]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/vax/vax.c
Merge in trunk.
[thirdparty/gcc.git] / gcc / config / vax / vax.c
1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "function.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "recog.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "flags.h"
38 #include "debug.h"
39 #include "diagnostic-core.h"
40 #include "reload.h"
41 #include "tm-preds.h"
42 #include "tm-constrs.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46
47 static void vax_option_override (void);
48 static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
49 static void vax_file_start (void);
50 static void vax_init_libfuncs (void);
51 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
52 HOST_WIDE_INT, tree);
53 static int vax_address_cost_1 (rtx);
54 static int vax_address_cost (rtx, enum machine_mode, addr_space_t, bool);
55 static bool vax_rtx_costs (rtx, int, int, int, int *, bool);
56 static rtx vax_function_arg (cumulative_args_t, enum machine_mode,
57 const_tree, bool);
58 static void vax_function_arg_advance (cumulative_args_t, enum machine_mode,
59 const_tree, bool);
60 static rtx vax_struct_value_rtx (tree, int);
61 static rtx vax_builtin_setjmp_frame_value (void);
62 static void vax_asm_trampoline_template (FILE *);
63 static void vax_trampoline_init (rtx, tree, rtx);
64 static int vax_return_pops_args (tree, tree, int);
65 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
66 \f
67 /* Initialize the GCC target structure. */
68 #undef TARGET_ASM_ALIGNED_HI_OP
69 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
70
71 #undef TARGET_ASM_FILE_START
72 #define TARGET_ASM_FILE_START vax_file_start
73 #undef TARGET_ASM_FILE_START_APP_OFF
74 #define TARGET_ASM_FILE_START_APP_OFF true
75
76 #undef TARGET_INIT_LIBFUNCS
77 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
78
79 #undef TARGET_ASM_OUTPUT_MI_THUNK
80 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
81 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
82 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
83
84 #undef TARGET_RTX_COSTS
85 #define TARGET_RTX_COSTS vax_rtx_costs
86 #undef TARGET_ADDRESS_COST
87 #define TARGET_ADDRESS_COST vax_address_cost
88
89 #undef TARGET_PROMOTE_PROTOTYPES
90 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
91
92 #undef TARGET_FUNCTION_ARG
93 #define TARGET_FUNCTION_ARG vax_function_arg
94 #undef TARGET_FUNCTION_ARG_ADVANCE
95 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
96
97 #undef TARGET_STRUCT_VALUE_RTX
98 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
99
100 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
101 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
102
103 #undef TARGET_LEGITIMATE_ADDRESS_P
104 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
105 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
106 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
107
108 #undef TARGET_FRAME_POINTER_REQUIRED
109 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
110
111 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
112 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
113 #undef TARGET_TRAMPOLINE_INIT
114 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
115 #undef TARGET_RETURN_POPS_ARGS
116 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
117
118 #undef TARGET_OPTION_OVERRIDE
119 #define TARGET_OPTION_OVERRIDE vax_option_override
120
121 struct gcc_target targetm = TARGET_INITIALIZER;
122 \f
123 /* Set global variables as needed for the options enabled. */
124
125 static void
126 vax_option_override (void)
127 {
128 /* We're VAX floating point, not IEEE floating point. */
129 if (TARGET_G_FLOAT)
130 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
131
132 #ifdef SUBTARGET_OVERRIDE_OPTIONS
133 SUBTARGET_OVERRIDE_OPTIONS;
134 #endif
135 }
136
137 static void
138 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
139 {
140 rtx x;
141
142 x = plus_constant (Pmode, frame_pointer_rtx, offset);
143 x = gen_rtx_MEM (SImode, x);
144 x = gen_rtx_SET (VOIDmode, x, src);
145 add_reg_note (insn, REG_CFA_OFFSET, x);
146 }
147
148 /* Generate the assembly code for function entry. FILE is a stdio
149 stream to output the code to. SIZE is an int: how many units of
150 temporary storage to allocate.
151
152 Refer to the array `regs_ever_live' to determine which registers to
153 save; `regs_ever_live[I]' is nonzero if register number I is ever
154 used in the function. This function is responsible for knowing
155 which registers should not be saved even if used. */
156
157 void
158 vax_expand_prologue (void)
159 {
160 int regno, offset;
161 int mask = 0;
162 HOST_WIDE_INT size;
163 rtx insn;
164
165 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
166 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
167 mask |= 1 << regno;
168
169 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
170 RTX_FRAME_RELATED_P (insn) = 1;
171
172 /* The layout of the CALLG/S stack frame is follows:
173
174 <- CFA, AP
175 r11
176 r10
177 ... Registers saved as specified by MASK
178 r3
179 r2
180 return-addr
181 old fp
182 old ap
183 old psw
184 zero
185 <- FP, SP
186
187 The rest of the prologue will adjust the SP for the local frame. */
188
189 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
190 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
191 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
192
193 offset = 16;
194 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
195 if (mask & (1 << regno))
196 {
197 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
198 offset += 4;
199 }
200
201 /* Because add_reg_note pushes the notes, adding this last means that
202 it will be processed first. This is required to allow the other
203 notes be interpreted properly. */
204 add_reg_note (insn, REG_CFA_DEF_CFA,
205 plus_constant (Pmode, frame_pointer_rtx, offset));
206
207 /* Allocate the local stack frame. */
208 size = get_frame_size ();
209 size -= STARTING_FRAME_OFFSET;
210 emit_insn (gen_addsi3 (stack_pointer_rtx,
211 stack_pointer_rtx, GEN_INT (-size)));
212
213 /* Do not allow instructions referencing local stack memory to be
214 scheduled before the frame is allocated. This is more pedantic
215 than anything else, given that VAX does not currently have a
216 scheduling description. */
217 emit_insn (gen_blockage ());
218 }
219
220 /* When debugging with stabs, we want to output an extra dummy label
221 so that gas can distinguish between D_float and G_float prior to
222 processing the .stabs directive identifying type double. */
223 static void
224 vax_file_start (void)
225 {
226 default_file_start ();
227
228 if (write_symbols == DBX_DEBUG)
229 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
230 }
231
232 /* We can use the BSD C library routines for the libgcc calls that are
233 still generated, since that's what they boil down to anyways. When
234 ELF, avoid the user's namespace. */
235
236 static void
237 vax_init_libfuncs (void)
238 {
239 if (TARGET_BSD_DIVMOD)
240 {
241 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
242 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
243 }
244 }
245
246 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
247
248 static void
249 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
250 rtx * low, int n)
251 {
252 int i;
253
254 for (i = 0; i < n; i++)
255 low[i] = 0;
256
257 for (i = 0; i < n; i++)
258 {
259 if (MEM_P (operands[i])
260 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
261 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
262 {
263 rtx addr = XEXP (operands[i], 0);
264 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
265 }
266 else if (optimize_size && MEM_P (operands[i])
267 && REG_P (XEXP (operands[i], 0))
268 && (code != MINUS || operands[1] != const0_rtx)
269 && find_regno_note (insn, REG_DEAD,
270 REGNO (XEXP (operands[i], 0))))
271 {
272 low[i] = gen_rtx_MEM (SImode,
273 gen_rtx_POST_INC (Pmode,
274 XEXP (operands[i], 0)));
275 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
276 }
277 else
278 {
279 low[i] = operand_subword (operands[i], 0, 0, DImode);
280 operands[i] = operand_subword (operands[i], 1, 0, DImode);
281 }
282 }
283 }
284 \f
285 void
286 print_operand_address (FILE * file, rtx addr)
287 {
288 rtx orig = addr;
289 rtx reg1, breg, ireg;
290 rtx offset;
291
292 retry:
293 switch (GET_CODE (addr))
294 {
295 case MEM:
296 fprintf (file, "*");
297 addr = XEXP (addr, 0);
298 goto retry;
299
300 case REG:
301 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
302 break;
303
304 case PRE_DEC:
305 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
306 break;
307
308 case POST_INC:
309 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
310 break;
311
312 case PLUS:
313 /* There can be either two or three things added here. One must be a
314 REG. One can be either a REG or a MULT of a REG and an appropriate
315 constant, and the third can only be a constant or a MEM.
316
317 We get these two or three things and put the constant or MEM in
318 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
319 a register and can't tell yet if it is a base or index register,
320 put it into REG1. */
321
322 reg1 = 0; ireg = 0; breg = 0; offset = 0;
323
324 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
325 || MEM_P (XEXP (addr, 0)))
326 {
327 offset = XEXP (addr, 0);
328 addr = XEXP (addr, 1);
329 }
330 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
331 || MEM_P (XEXP (addr, 1)))
332 {
333 offset = XEXP (addr, 1);
334 addr = XEXP (addr, 0);
335 }
336 else if (GET_CODE (XEXP (addr, 1)) == MULT)
337 {
338 ireg = XEXP (addr, 1);
339 addr = XEXP (addr, 0);
340 }
341 else if (GET_CODE (XEXP (addr, 0)) == MULT)
342 {
343 ireg = XEXP (addr, 0);
344 addr = XEXP (addr, 1);
345 }
346 else if (REG_P (XEXP (addr, 1)))
347 {
348 reg1 = XEXP (addr, 1);
349 addr = XEXP (addr, 0);
350 }
351 else if (REG_P (XEXP (addr, 0)))
352 {
353 reg1 = XEXP (addr, 0);
354 addr = XEXP (addr, 1);
355 }
356 else
357 gcc_unreachable ();
358
359 if (REG_P (addr))
360 {
361 if (reg1)
362 ireg = addr;
363 else
364 reg1 = addr;
365 }
366 else if (GET_CODE (addr) == MULT)
367 ireg = addr;
368 else
369 {
370 gcc_assert (GET_CODE (addr) == PLUS);
371 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
372 || MEM_P (XEXP (addr, 0)))
373 {
374 if (offset)
375 {
376 if (CONST_INT_P (offset))
377 offset = plus_constant (Pmode, XEXP (addr, 0),
378 INTVAL (offset));
379 else
380 {
381 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
382 offset = plus_constant (Pmode, offset,
383 INTVAL (XEXP (addr, 0)));
384 }
385 }
386 offset = XEXP (addr, 0);
387 }
388 else if (REG_P (XEXP (addr, 0)))
389 {
390 if (reg1)
391 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
392 else
393 reg1 = XEXP (addr, 0);
394 }
395 else
396 {
397 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
398 gcc_assert (!ireg);
399 ireg = XEXP (addr, 0);
400 }
401
402 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
403 || MEM_P (XEXP (addr, 1)))
404 {
405 if (offset)
406 {
407 if (CONST_INT_P (offset))
408 offset = plus_constant (Pmode, XEXP (addr, 1),
409 INTVAL (offset));
410 else
411 {
412 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
413 offset = plus_constant (Pmode, offset,
414 INTVAL (XEXP (addr, 1)));
415 }
416 }
417 offset = XEXP (addr, 1);
418 }
419 else if (REG_P (XEXP (addr, 1)))
420 {
421 if (reg1)
422 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
423 else
424 reg1 = XEXP (addr, 1);
425 }
426 else
427 {
428 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
429 gcc_assert (!ireg);
430 ireg = XEXP (addr, 1);
431 }
432 }
433
434 /* If REG1 is nonzero, figure out if it is a base or index register. */
435 if (reg1)
436 {
437 if (breg
438 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
439 || (offset
440 && (MEM_P (offset)
441 || (flag_pic && symbolic_operand (offset, SImode)))))
442 {
443 gcc_assert (!ireg);
444 ireg = reg1;
445 }
446 else
447 breg = reg1;
448 }
449
450 if (offset != 0)
451 {
452 if (flag_pic && symbolic_operand (offset, SImode))
453 {
454 if (breg && ireg)
455 {
456 debug_rtx (orig);
457 output_operand_lossage ("symbol used with both base and indexed registers");
458 }
459
460 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
461 if (flag_pic > 1 && GET_CODE (offset) == CONST
462 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
463 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
464 {
465 debug_rtx (orig);
466 output_operand_lossage ("symbol with offset used in PIC mode");
467 }
468 #endif
469
470 /* symbol(reg) isn't PIC, but symbol[reg] is. */
471 if (breg)
472 {
473 ireg = breg;
474 breg = 0;
475 }
476
477 }
478
479 output_address (offset);
480 }
481
482 if (breg != 0)
483 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
484
485 if (ireg != 0)
486 {
487 if (GET_CODE (ireg) == MULT)
488 ireg = XEXP (ireg, 0);
489 gcc_assert (REG_P (ireg));
490 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
491 }
492 break;
493
494 default:
495 output_addr_const (file, addr);
496 }
497 }
498
499 void
500 print_operand (FILE *file, rtx x, int code)
501 {
502 if (code == '#')
503 fputc (ASM_DOUBLE_CHAR, file);
504 else if (code == '|')
505 fputs (REGISTER_PREFIX, file);
506 else if (code == 'c')
507 fputs (cond_name (x), file);
508 else if (code == 'C')
509 fputs (rev_cond_name (x), file);
510 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
511 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
512 else if (code == 'P' && CONST_INT_P (x))
513 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
514 else if (code == 'N' && CONST_INT_P (x))
515 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
516 /* rotl instruction cannot deal with negative arguments. */
517 else if (code == 'R' && CONST_INT_P (x))
518 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
519 else if (code == 'H' && CONST_INT_P (x))
520 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
521 else if (code == 'h' && CONST_INT_P (x))
522 fprintf (file, "$%d", (short) - INTVAL (x));
523 else if (code == 'B' && CONST_INT_P (x))
524 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
525 else if (code == 'b' && CONST_INT_P (x))
526 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
527 else if (code == 'M' && CONST_INT_P (x))
528 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
529 else if (code == 'x' && CONST_INT_P (x))
530 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
531 else if (REG_P (x))
532 fprintf (file, "%s", reg_names[REGNO (x)]);
533 else if (MEM_P (x))
534 output_address (XEXP (x, 0));
535 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
536 {
537 char dstr[30];
538 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
539 sizeof (dstr), 0, 1);
540 fprintf (file, "$0f%s", dstr);
541 }
542 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
543 {
544 char dstr[30];
545 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
546 sizeof (dstr), 0, 1);
547 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
548 }
549 else
550 {
551 if (flag_pic > 1 && symbolic_operand (x, SImode))
552 {
553 debug_rtx (x);
554 output_operand_lossage ("symbol used as immediate operand");
555 }
556 putc ('$', file);
557 output_addr_const (file, x);
558 }
559 }
560 \f
561 const char *
562 cond_name (rtx op)
563 {
564 switch (GET_CODE (op))
565 {
566 case NE:
567 return "neq";
568 case EQ:
569 return "eql";
570 case GE:
571 return "geq";
572 case GT:
573 return "gtr";
574 case LE:
575 return "leq";
576 case LT:
577 return "lss";
578 case GEU:
579 return "gequ";
580 case GTU:
581 return "gtru";
582 case LEU:
583 return "lequ";
584 case LTU:
585 return "lssu";
586
587 default:
588 gcc_unreachable ();
589 }
590 }
591
592 const char *
593 rev_cond_name (rtx op)
594 {
595 switch (GET_CODE (op))
596 {
597 case EQ:
598 return "neq";
599 case NE:
600 return "eql";
601 case LT:
602 return "geq";
603 case LE:
604 return "gtr";
605 case GT:
606 return "leq";
607 case GE:
608 return "lss";
609 case LTU:
610 return "gequ";
611 case LEU:
612 return "gtru";
613 case GTU:
614 return "lequ";
615 case GEU:
616 return "lssu";
617
618 default:
619 gcc_unreachable ();
620 }
621 }
622
623 static bool
624 vax_float_literal (rtx c)
625 {
626 enum machine_mode mode;
627 REAL_VALUE_TYPE r, s;
628 int i;
629
630 if (GET_CODE (c) != CONST_DOUBLE)
631 return false;
632
633 mode = GET_MODE (c);
634
635 if (c == const_tiny_rtx[(int) mode][0]
636 || c == const_tiny_rtx[(int) mode][1]
637 || c == const_tiny_rtx[(int) mode][2])
638 return true;
639
640 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
641
642 for (i = 0; i < 7; i++)
643 {
644 int x = 1 << i;
645 bool ok;
646 REAL_VALUE_FROM_INT (s, x, mode);
647
648 if (REAL_VALUES_EQUAL (r, s))
649 return true;
650 ok = exact_real_inverse (mode, &s);
651 gcc_assert (ok);
652 if (REAL_VALUES_EQUAL (r, s))
653 return true;
654 }
655 return false;
656 }
657
658
659 /* Return the cost in cycles of a memory address, relative to register
660 indirect.
661
662 Each of the following adds the indicated number of cycles:
663
664 1 - symbolic address
665 1 - pre-decrement
666 1 - indexing and/or offset(register)
667 2 - indirect */
668
669
670 static int
671 vax_address_cost_1 (rtx addr)
672 {
673 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
674 rtx plus_op0 = 0, plus_op1 = 0;
675 restart:
676 switch (GET_CODE (addr))
677 {
678 case PRE_DEC:
679 predec = 1;
680 case REG:
681 case SUBREG:
682 case POST_INC:
683 reg = 1;
684 break;
685 case MULT:
686 indexed = 1; /* 2 on VAX 2 */
687 break;
688 case CONST_INT:
689 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
690 if (offset == 0)
691 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
692 break;
693 case CONST:
694 case SYMBOL_REF:
695 offset = 1; /* 2 on VAX 2 */
696 break;
697 case LABEL_REF: /* this is probably a byte offset from the pc */
698 if (offset == 0)
699 offset = 1;
700 break;
701 case PLUS:
702 if (plus_op0)
703 plus_op1 = XEXP (addr, 0);
704 else
705 plus_op0 = XEXP (addr, 0);
706 addr = XEXP (addr, 1);
707 goto restart;
708 case MEM:
709 indir = 2; /* 3 on VAX 2 */
710 addr = XEXP (addr, 0);
711 goto restart;
712 default:
713 break;
714 }
715
716 /* Up to 3 things can be added in an address. They are stored in
717 plus_op0, plus_op1, and addr. */
718
719 if (plus_op0)
720 {
721 addr = plus_op0;
722 plus_op0 = 0;
723 goto restart;
724 }
725 if (plus_op1)
726 {
727 addr = plus_op1;
728 plus_op1 = 0;
729 goto restart;
730 }
731 /* Indexing and register+offset can both be used (except on a VAX 2)
732 without increasing execution time over either one alone. */
733 if (reg && indexed && offset)
734 return reg + indir + offset + predec;
735 return reg + indexed + indir + offset + predec;
736 }
737
738 static int
739 vax_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
740 addr_space_t as ATTRIBUTE_UNUSED,
741 bool speed ATTRIBUTE_UNUSED)
742 {
743 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
744 }
745
746 /* Cost of an expression on a VAX. This version has costs tuned for the
747 CVAX chip (found in the VAX 3 series) with comments for variations on
748 other models.
749
750 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
751 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
752 costs on a per cpu basis. */
753
754 static bool
755 vax_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
756 int *total, bool speed ATTRIBUTE_UNUSED)
757 {
758 enum machine_mode mode = GET_MODE (x);
759 int i = 0; /* may be modified in switch */
760 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
761
762 switch (code)
763 {
764 /* On a VAX, constants from 0..63 are cheap because they can use the
765 1 byte literal constant format. Compare to -1 should be made cheap
766 so that decrement-and-branch insns can be formed more easily (if
767 the value -1 is copied to a register some decrement-and-branch
768 patterns will not match). */
769 case CONST_INT:
770 if (INTVAL (x) == 0)
771 {
772 *total = 0;
773 return true;
774 }
775 if (outer_code == AND)
776 {
777 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
778 return true;
779 }
780 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
781 || (outer_code == COMPARE
782 && INTVAL (x) == -1)
783 || ((outer_code == PLUS || outer_code == MINUS)
784 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
785 {
786 *total = 1;
787 return true;
788 }
789 /* FALLTHRU */
790
791 case CONST:
792 case LABEL_REF:
793 case SYMBOL_REF:
794 *total = 3;
795 return true;
796
797 case CONST_DOUBLE:
798 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
799 *total = vax_float_literal (x) ? 5 : 8;
800 else
801 *total = ((CONST_DOUBLE_HIGH (x) == 0
802 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
803 || (outer_code == PLUS
804 && CONST_DOUBLE_HIGH (x) == -1
805 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
806 ? 2 : 5;
807 return true;
808
809 case POST_INC:
810 *total = 2;
811 return true; /* Implies register operand. */
812
813 case PRE_DEC:
814 *total = 3;
815 return true; /* Implies register operand. */
816
817 case MULT:
818 switch (mode)
819 {
820 case DFmode:
821 *total = 16; /* 4 on VAX 9000 */
822 break;
823 case SFmode:
824 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
825 break;
826 case DImode:
827 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
828 break;
829 case SImode:
830 case HImode:
831 case QImode:
832 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
833 break;
834 default:
835 *total = MAX_COST; /* Mode is not supported. */
836 return true;
837 }
838 break;
839
840 case UDIV:
841 if (mode != SImode)
842 {
843 *total = MAX_COST; /* Mode is not supported. */
844 return true;
845 }
846 *total = 17;
847 break;
848
849 case DIV:
850 if (mode == DImode)
851 *total = 30; /* Highly variable. */
852 else if (mode == DFmode)
853 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
854 *total = 24;
855 else
856 *total = 11; /* 25 on VAX 2 */
857 break;
858
859 case MOD:
860 *total = 23;
861 break;
862
863 case UMOD:
864 if (mode != SImode)
865 {
866 *total = MAX_COST; /* Mode is not supported. */
867 return true;
868 }
869 *total = 29;
870 break;
871
872 case FLOAT:
873 *total = (6 /* 4 on VAX 9000 */
874 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
875 break;
876
877 case FIX:
878 *total = 7; /* 17 on VAX 2 */
879 break;
880
881 case ASHIFT:
882 case LSHIFTRT:
883 case ASHIFTRT:
884 if (mode == DImode)
885 *total = 12;
886 else
887 *total = 10; /* 6 on VAX 9000 */
888 break;
889
890 case ROTATE:
891 case ROTATERT:
892 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
893 if (CONST_INT_P (XEXP (x, 1)))
894 fmt = "e"; /* all constant rotate counts are short */
895 break;
896
897 case PLUS:
898 case MINUS:
899 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
900 /* Small integer operands can use subl2 and addl2. */
901 if ((CONST_INT_P (XEXP (x, 1)))
902 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
903 fmt = "e";
904 break;
905
906 case IOR:
907 case XOR:
908 *total = 3;
909 break;
910
911 case AND:
912 /* AND is special because the first operand is complemented. */
913 *total = 3;
914 if (CONST_INT_P (XEXP (x, 0)))
915 {
916 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
917 *total = 4;
918 fmt = "e";
919 i = 1;
920 }
921 break;
922
923 case NEG:
924 if (mode == DFmode)
925 *total = 9;
926 else if (mode == SFmode)
927 *total = 6;
928 else if (mode == DImode)
929 *total = 4;
930 else
931 *total = 2;
932 break;
933
934 case NOT:
935 *total = 2;
936 break;
937
938 case ZERO_EXTRACT:
939 case SIGN_EXTRACT:
940 *total = 15;
941 break;
942
943 case MEM:
944 if (mode == DImode || mode == DFmode)
945 *total = 5; /* 7 on VAX 2 */
946 else
947 *total = 3; /* 4 on VAX 2 */
948 x = XEXP (x, 0);
949 if (!REG_P (x) && GET_CODE (x) != POST_INC)
950 *total += vax_address_cost_1 (x);
951 return true;
952
953 case FLOAT_EXTEND:
954 case FLOAT_TRUNCATE:
955 case TRUNCATE:
956 *total = 3; /* FIXME: Costs need to be checked */
957 break;
958
959 default:
960 return false;
961 }
962
963 /* Now look inside the expression. Operands which are not registers or
964 short constants add to the cost.
965
966 FMT and I may have been adjusted in the switch above for instructions
967 which require special handling. */
968
969 while (*fmt++ == 'e')
970 {
971 rtx op = XEXP (x, i);
972
973 i += 1;
974 code = GET_CODE (op);
975
976 /* A NOT is likely to be found as the first operand of an AND
977 (in which case the relevant cost is of the operand inside
978 the not) and not likely to be found anywhere else. */
979 if (code == NOT)
980 op = XEXP (op, 0), code = GET_CODE (op);
981
982 switch (code)
983 {
984 case CONST_INT:
985 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
986 && GET_MODE (x) != QImode)
987 *total += 1; /* 2 on VAX 2 */
988 break;
989 case CONST:
990 case LABEL_REF:
991 case SYMBOL_REF:
992 *total += 1; /* 2 on VAX 2 */
993 break;
994 case CONST_DOUBLE:
995 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
996 {
997 /* Registers are faster than floating point constants -- even
998 those constants which can be encoded in a single byte. */
999 if (vax_float_literal (op))
1000 *total += 1;
1001 else
1002 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
1003 }
1004 else
1005 {
1006 if (CONST_DOUBLE_HIGH (op) != 0
1007 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1008 *total += 2;
1009 }
1010 break;
1011 case MEM:
1012 *total += 1; /* 2 on VAX 2 */
1013 if (!REG_P (XEXP (op, 0)))
1014 *total += vax_address_cost_1 (XEXP (op, 0));
1015 break;
1016 case REG:
1017 case SUBREG:
1018 break;
1019 default:
1020 *total += 1;
1021 break;
1022 }
1023 }
1024 return true;
1025 }
1026 \f
1027 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1028 Used for C++ multiple inheritance.
1029 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1030 addl2 $DELTA, 4(ap) #adjust first argument
1031 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1032 */
1033
1034 static void
1035 vax_output_mi_thunk (FILE * file,
1036 tree thunk ATTRIBUTE_UNUSED,
1037 HOST_WIDE_INT delta,
1038 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1039 tree function)
1040 {
1041 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1042 asm_fprintf (file, ",4(%Rap)\n");
1043 fprintf (file, "\tjmp ");
1044 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1045 fprintf (file, "+2\n");
1046 }
1047 \f
1048 static rtx
1049 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1050 int incoming ATTRIBUTE_UNUSED)
1051 {
1052 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1053 }
1054
1055 static rtx
1056 vax_builtin_setjmp_frame_value (void)
1057 {
1058 return hard_frame_pointer_rtx;
1059 }
1060
1061 /* Worker function for NOTICE_UPDATE_CC. */
1062
1063 void
1064 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1065 {
1066 if (GET_CODE (exp) == SET)
1067 {
1068 if (GET_CODE (SET_SRC (exp)) == CALL)
1069 CC_STATUS_INIT;
1070 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1071 && GET_CODE (SET_DEST (exp)) != PC)
1072 {
1073 cc_status.flags = 0;
1074 /* The integer operations below don't set carry or
1075 set it in an incompatible way. That's ok though
1076 as the Z bit is all we need when doing unsigned
1077 comparisons on the result of these insns (since
1078 they're always with 0). Set CC_NO_OVERFLOW to
1079 generate the correct unsigned branches. */
1080 switch (GET_CODE (SET_SRC (exp)))
1081 {
1082 case NEG:
1083 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1084 break;
1085 case AND:
1086 case IOR:
1087 case XOR:
1088 case NOT:
1089 case MEM:
1090 case REG:
1091 cc_status.flags = CC_NO_OVERFLOW;
1092 break;
1093 default:
1094 break;
1095 }
1096 cc_status.value1 = SET_DEST (exp);
1097 cc_status.value2 = SET_SRC (exp);
1098 }
1099 }
1100 else if (GET_CODE (exp) == PARALLEL
1101 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1102 {
1103 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1104 CC_STATUS_INIT;
1105 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1106 {
1107 cc_status.flags = 0;
1108 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1109 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1110 }
1111 else
1112 /* PARALLELs whose first element sets the PC are aob,
1113 sob insns. They do change the cc's. */
1114 CC_STATUS_INIT;
1115 }
1116 else
1117 CC_STATUS_INIT;
1118 if (cc_status.value1 && REG_P (cc_status.value1)
1119 && cc_status.value2
1120 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1121 cc_status.value2 = 0;
1122 if (cc_status.value1 && MEM_P (cc_status.value1)
1123 && cc_status.value2
1124 && MEM_P (cc_status.value2))
1125 cc_status.value2 = 0;
1126 /* Actual condition, one line up, should be that value2's address
1127 depends on value1, but that is too much of a pain. */
1128 }
1129
1130 /* Output integer move instructions. */
1131
1132 const char *
1133 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1134 enum machine_mode mode)
1135 {
1136 rtx hi[3], lo[3];
1137 const char *pattern_hi, *pattern_lo;
1138
1139 switch (mode)
1140 {
1141 case DImode:
1142 if (operands[1] == const0_rtx)
1143 return "clrq %0";
1144 if (TARGET_QMATH && optimize_size
1145 && (CONST_INT_P (operands[1])
1146 || GET_CODE (operands[1]) == CONST_DOUBLE))
1147 {
1148 unsigned HOST_WIDE_INT hval, lval;
1149 int n;
1150
1151 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1152 {
1153 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1154
1155 /* Make sure only the low 32 bits are valid. */
1156 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1157 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1158 }
1159 else
1160 {
1161 lval = INTVAL (operands[1]);
1162 hval = 0;
1163 }
1164
1165 /* Here we see if we are trying to see if the 64bit value is really
1166 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1167 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1168 8 bytes - 1 shift byte - 1 short literal byte. */
1169 if (lval != 0
1170 && (n = exact_log2 (lval & (- lval))) != -1
1171 && (lval >> n) < 64)
1172 {
1173 lval >>= n;
1174
1175 /* On 32bit platforms, if the 6bits didn't overflow into the
1176 upper 32bit value that value better be 0. If we have
1177 overflowed, make sure it wasn't too much. */
1178 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1179 {
1180 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1181 n = 0; /* failure */
1182 else
1183 lval |= hval << (32 - n);
1184 }
1185 /* If n is 0, then ashq is not the best way to emit this. */
1186 if (n > 0)
1187 {
1188 operands[1] = GEN_INT (lval);
1189 operands[2] = GEN_INT (n);
1190 return "ashq %2,%D1,%0";
1191 }
1192 #if HOST_BITS_PER_WIDE_INT == 32
1193 }
1194 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1195 upper 32bit value. */
1196 else if (hval != 0
1197 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1198 && (hval >> n) < 64)
1199 {
1200 operands[1] = GEN_INT (hval >> n);
1201 operands[2] = GEN_INT (n + 32);
1202 return "ashq %2,%D1,%0";
1203 #endif
1204 }
1205 }
1206
1207 if (TARGET_QMATH
1208 && (!MEM_P (operands[0])
1209 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1210 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1211 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1212 && ((CONST_INT_P (operands[1])
1213 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1214 || GET_CODE (operands[1]) == CONST_DOUBLE))
1215 {
1216 hi[0] = operands[0];
1217 hi[1] = operands[1];
1218
1219 split_quadword_operands (insn, SET, hi, lo, 2);
1220
1221 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1222 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1223
1224 /* The patterns are just movl/movl or pushl/pushl then a movq will
1225 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1226 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1227 value bytes. */
1228 if ((!strncmp (pattern_lo, "movl", 4)
1229 && !strncmp (pattern_hi, "movl", 4))
1230 || (!strncmp (pattern_lo, "pushl", 5)
1231 && !strncmp (pattern_hi, "pushl", 5)))
1232 return "movq %1,%0";
1233
1234 if (MEM_P (operands[0])
1235 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1236 {
1237 output_asm_insn (pattern_hi, hi);
1238 operands[0] = lo[0];
1239 operands[1] = lo[1];
1240 operands[2] = lo[2];
1241 return pattern_lo;
1242 }
1243 else
1244 {
1245 output_asm_insn (pattern_lo, lo);
1246 operands[0] = hi[0];
1247 operands[1] = hi[1];
1248 operands[2] = hi[2];
1249 return pattern_hi;
1250 }
1251 }
1252 return "movq %1,%0";
1253
1254 case SImode:
1255 if (symbolic_operand (operands[1], SImode))
1256 {
1257 if (push_operand (operands[0], SImode))
1258 return "pushab %a1";
1259 return "movab %a1,%0";
1260 }
1261
1262 if (operands[1] == const0_rtx)
1263 {
1264 if (push_operand (operands[1], SImode))
1265 return "pushl %1";
1266 return "clrl %0";
1267 }
1268
1269 if (CONST_INT_P (operands[1])
1270 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1271 {
1272 HOST_WIDE_INT i = INTVAL (operands[1]);
1273 int n;
1274 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1275 return "mcoml %N1,%0";
1276 if ((unsigned HOST_WIDE_INT)i < 0x100)
1277 return "movzbl %1,%0";
1278 if (i >= -0x80 && i < 0)
1279 return "cvtbl %1,%0";
1280 if (optimize_size
1281 && (n = exact_log2 (i & (-i))) != -1
1282 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1283 {
1284 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1285 operands[2] = GEN_INT (n);
1286 return "ashl %2,%1,%0";
1287 }
1288 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1289 return "movzwl %1,%0";
1290 if (i >= -0x8000 && i < 0)
1291 return "cvtwl %1,%0";
1292 }
1293 if (push_operand (operands[0], SImode))
1294 return "pushl %1";
1295 return "movl %1,%0";
1296
1297 case HImode:
1298 if (CONST_INT_P (operands[1]))
1299 {
1300 HOST_WIDE_INT i = INTVAL (operands[1]);
1301 if (i == 0)
1302 return "clrw %0";
1303 else if ((unsigned HOST_WIDE_INT)i < 64)
1304 return "movw %1,%0";
1305 else if ((unsigned HOST_WIDE_INT)~i < 64)
1306 return "mcomw %H1,%0";
1307 else if ((unsigned HOST_WIDE_INT)i < 256)
1308 return "movzbw %1,%0";
1309 else if (i >= -0x80 && i < 0)
1310 return "cvtbw %1,%0";
1311 }
1312 return "movw %1,%0";
1313
1314 case QImode:
1315 if (CONST_INT_P (operands[1]))
1316 {
1317 HOST_WIDE_INT i = INTVAL (operands[1]);
1318 if (i == 0)
1319 return "clrb %0";
1320 else if ((unsigned HOST_WIDE_INT)~i < 64)
1321 return "mcomb %B1,%0";
1322 }
1323 return "movb %1,%0";
1324
1325 default:
1326 gcc_unreachable ();
1327 }
1328 }
1329
1330 /* Output integer add instructions.
1331
1332 The space-time-opcode tradeoffs for addition vary by model of VAX.
1333
1334 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1335 but it not faster on other models.
1336
1337 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1338 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1339 a register is used in an address too soon after it is set.
1340 Compromise by using movab only when it is shorter than the add
1341 or the base register in the address is one of sp, ap, and fp,
1342 which are not modified very often. */
1343
1344 const char *
1345 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1346 {
1347 switch (mode)
1348 {
1349 case DImode:
1350 {
1351 rtx low[3];
1352 const char *pattern;
1353 int carry = 1;
1354 bool sub;
1355
1356 if (TARGET_QMATH && 0)
1357 debug_rtx (insn);
1358
1359 split_quadword_operands (insn, PLUS, operands, low, 3);
1360
1361 if (TARGET_QMATH)
1362 {
1363 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1364 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1365 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1366 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1367 #endif
1368
1369 /* No reason to add a 0 to the low part and thus no carry, so just
1370 emit the appropriate add/sub instruction. */
1371 if (low[2] == const0_rtx)
1372 return vax_output_int_add (NULL, operands, SImode);
1373
1374 /* Are we doing addition or subtraction? */
1375 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1376
1377 /* We can't use vax_output_int_add since some the patterns don't
1378 modify the carry bit. */
1379 if (sub)
1380 {
1381 if (low[2] == constm1_rtx)
1382 pattern = "decl %0";
1383 else
1384 pattern = "subl2 $%n2,%0";
1385 }
1386 else
1387 {
1388 if (low[2] == const1_rtx)
1389 pattern = "incl %0";
1390 else
1391 pattern = "addl2 %2,%0";
1392 }
1393 output_asm_insn (pattern, low);
1394
1395 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1396 two 32bit parts, we complement each and then add one to
1397 low part. We know that the low part can't overflow since
1398 it's value can never be 0. */
1399 if (sub)
1400 return "sbwc %N2,%0";
1401 return "adwc %2,%0";
1402 }
1403
1404 /* Add low parts. */
1405 if (rtx_equal_p (operands[0], operands[1]))
1406 {
1407 if (low[2] == const0_rtx)
1408 /* Should examine operand, punt if not POST_INC. */
1409 pattern = "tstl %0", carry = 0;
1410 else if (low[2] == const1_rtx)
1411 pattern = "incl %0";
1412 else
1413 pattern = "addl2 %2,%0";
1414 }
1415 else
1416 {
1417 if (low[2] == const0_rtx)
1418 pattern = "movl %1,%0", carry = 0;
1419 else
1420 pattern = "addl3 %2,%1,%0";
1421 }
1422 if (pattern)
1423 output_asm_insn (pattern, low);
1424 if (!carry)
1425 /* If CARRY is 0, we don't have any carry value to worry about. */
1426 return get_insn_template (CODE_FOR_addsi3, insn);
1427 /* %0 = C + %1 + %2 */
1428 if (!rtx_equal_p (operands[0], operands[1]))
1429 output_asm_insn ((operands[1] == const0_rtx
1430 ? "clrl %0"
1431 : "movl %1,%0"), operands);
1432 return "adwc %2,%0";
1433 }
1434
1435 case SImode:
1436 if (rtx_equal_p (operands[0], operands[1]))
1437 {
1438 if (operands[2] == const1_rtx)
1439 return "incl %0";
1440 if (operands[2] == constm1_rtx)
1441 return "decl %0";
1442 if (CONST_INT_P (operands[2])
1443 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1444 return "subl2 $%n2,%0";
1445 if (CONST_INT_P (operands[2])
1446 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1447 && REG_P (operands[1])
1448 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1449 || REGNO (operands[1]) > 11))
1450 return "movab %c2(%1),%0";
1451 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1452 return "movab %a2[%0],%0";
1453 return "addl2 %2,%0";
1454 }
1455
1456 if (rtx_equal_p (operands[0], operands[2]))
1457 {
1458 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1459 return "movab %a1[%0],%0";
1460 return "addl2 %1,%0";
1461 }
1462
1463 if (CONST_INT_P (operands[2])
1464 && INTVAL (operands[2]) < 32767
1465 && INTVAL (operands[2]) > -32768
1466 && REG_P (operands[1])
1467 && push_operand (operands[0], SImode))
1468 return "pushab %c2(%1)";
1469
1470 if (CONST_INT_P (operands[2])
1471 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1472 return "subl3 $%n2,%1,%0";
1473
1474 if (CONST_INT_P (operands[2])
1475 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1476 && REG_P (operands[1])
1477 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1478 || REGNO (operands[1]) > 11))
1479 return "movab %c2(%1),%0";
1480
1481 /* Add this if using gcc on a VAX 3xxx:
1482 if (REG_P (operands[1]) && REG_P (operands[2]))
1483 return "movab (%1)[%2],%0";
1484 */
1485
1486 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1487 {
1488 if (push_operand (operands[0], SImode))
1489 return "pushab %a2[%1]";
1490 return "movab %a2[%1],%0";
1491 }
1492
1493 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1494 {
1495 if (push_operand (operands[0], SImode))
1496 return "pushab %a1[%2]";
1497 return "movab %a1[%2],%0";
1498 }
1499
1500 if (flag_pic && REG_P (operands[0])
1501 && symbolic_operand (operands[2], SImode))
1502 return "movab %a2,%0;addl2 %1,%0";
1503
1504 if (flag_pic
1505 && (symbolic_operand (operands[1], SImode)
1506 || symbolic_operand (operands[1], SImode)))
1507 debug_rtx (insn);
1508
1509 return "addl3 %1,%2,%0";
1510
1511 case HImode:
1512 if (rtx_equal_p (operands[0], operands[1]))
1513 {
1514 if (operands[2] == const1_rtx)
1515 return "incw %0";
1516 if (operands[2] == constm1_rtx)
1517 return "decw %0";
1518 if (CONST_INT_P (operands[2])
1519 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1520 return "subw2 $%n2,%0";
1521 return "addw2 %2,%0";
1522 }
1523 if (rtx_equal_p (operands[0], operands[2]))
1524 return "addw2 %1,%0";
1525 if (CONST_INT_P (operands[2])
1526 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1527 return "subw3 $%n2,%1,%0";
1528 return "addw3 %1,%2,%0";
1529
1530 case QImode:
1531 if (rtx_equal_p (operands[0], operands[1]))
1532 {
1533 if (operands[2] == const1_rtx)
1534 return "incb %0";
1535 if (operands[2] == constm1_rtx)
1536 return "decb %0";
1537 if (CONST_INT_P (operands[2])
1538 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1539 return "subb2 $%n2,%0";
1540 return "addb2 %2,%0";
1541 }
1542 if (rtx_equal_p (operands[0], operands[2]))
1543 return "addb2 %1,%0";
1544 if (CONST_INT_P (operands[2])
1545 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1546 return "subb3 $%n2,%1,%0";
1547 return "addb3 %1,%2,%0";
1548
1549 default:
1550 gcc_unreachable ();
1551 }
1552 }
1553
1554 const char *
1555 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1556 {
1557 switch (mode)
1558 {
1559 case DImode:
1560 {
1561 rtx low[3];
1562 const char *pattern;
1563 int carry = 1;
1564
1565 if (TARGET_QMATH && 0)
1566 debug_rtx (insn);
1567
1568 split_quadword_operands (insn, MINUS, operands, low, 3);
1569
1570 if (TARGET_QMATH)
1571 {
1572 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1573 {
1574 /* Negation is tricky. It's basically complement and increment.
1575 Negate hi, then lo, and subtract the carry back. */
1576 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1577 || (MEM_P (operands[0])
1578 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1579 fatal_insn ("illegal operand detected", insn);
1580 output_asm_insn ("mnegl %2,%0", operands);
1581 output_asm_insn ("mnegl %2,%0", low);
1582 return "sbwc $0,%0";
1583 }
1584 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1585 gcc_assert (rtx_equal_p (low[0], low[1]));
1586 if (low[2] == const1_rtx)
1587 output_asm_insn ("decl %0", low);
1588 else
1589 output_asm_insn ("subl2 %2,%0", low);
1590 return "sbwc %2,%0";
1591 }
1592
1593 /* Subtract low parts. */
1594 if (rtx_equal_p (operands[0], operands[1]))
1595 {
1596 if (low[2] == const0_rtx)
1597 pattern = 0, carry = 0;
1598 else if (low[2] == constm1_rtx)
1599 pattern = "decl %0";
1600 else
1601 pattern = "subl2 %2,%0";
1602 }
1603 else
1604 {
1605 if (low[2] == constm1_rtx)
1606 pattern = "decl %0";
1607 else if (low[2] == const0_rtx)
1608 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1609 else
1610 pattern = "subl3 %2,%1,%0";
1611 }
1612 if (pattern)
1613 output_asm_insn (pattern, low);
1614 if (carry)
1615 {
1616 if (!rtx_equal_p (operands[0], operands[1]))
1617 return "movl %1,%0;sbwc %2,%0";
1618 return "sbwc %2,%0";
1619 /* %0 = %2 - %1 - C */
1620 }
1621 return get_insn_template (CODE_FOR_subsi3, insn);
1622 }
1623
1624 default:
1625 gcc_unreachable ();
1626 }
1627 }
1628
1629 /* True if X is an rtx for a constant that is a valid address. */
1630
1631 bool
1632 legitimate_constant_address_p (rtx x)
1633 {
1634 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1635 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1636 return true;
1637 if (GET_CODE (x) != CONST)
1638 return false;
1639 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1640 if (flag_pic
1641 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1642 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1643 return false;
1644 #endif
1645 return true;
1646 }
1647
1648 /* The other macros defined here are used only in legitimate_address_p (). */
1649
1650 /* Nonzero if X is a hard reg that can be used as an index
1651 or, if not strict, if it is a pseudo reg. */
1652 #define INDEX_REGISTER_P(X, STRICT) \
1653 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1654
1655 /* Nonzero if X is a hard reg that can be used as a base reg
1656 or, if not strict, if it is a pseudo reg. */
1657 #define BASE_REGISTER_P(X, STRICT) \
1658 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1659
1660 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1661
1662 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1663 are no SYMBOL_REFs for external symbols present. */
1664
1665 static bool
1666 indirectable_constant_address_p (rtx x, bool indirect)
1667 {
1668 if (GET_CODE (x) == SYMBOL_REF)
1669 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1670
1671 if (GET_CODE (x) == CONST)
1672 return !flag_pic
1673 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1674 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1675
1676 return CONSTANT_ADDRESS_P (x);
1677 }
1678
1679 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1680
1681 static bool
1682 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1683 {
1684 return CONSTANT_ADDRESS_P (x);
1685 }
1686
1687 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1688
1689 /* True if X is an address which can be indirected. External symbols
1690 could be in a sharable image library, so we disallow those. */
1691
1692 static bool
1693 indirectable_address_p (rtx x, bool strict, bool indirect)
1694 {
1695 if (indirectable_constant_address_p (x, indirect)
1696 || BASE_REGISTER_P (x, strict))
1697 return true;
1698 if (GET_CODE (x) != PLUS
1699 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1700 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1701 return false;
1702 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1703 }
1704
1705 /* Return true if x is a valid address not using indexing.
1706 (This much is the easy part.) */
1707 static bool
1708 nonindexed_address_p (rtx x, bool strict)
1709 {
1710 rtx xfoo0;
1711 if (REG_P (x))
1712 {
1713 if (! reload_in_progress
1714 || reg_equiv_mem (REGNO (x)) == 0
1715 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1716 return true;
1717 }
1718 if (indirectable_constant_address_p (x, false))
1719 return true;
1720 if (indirectable_address_p (x, strict, false))
1721 return true;
1722 xfoo0 = XEXP (x, 0);
1723 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1724 return true;
1725 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1726 && BASE_REGISTER_P (xfoo0, strict))
1727 return true;
1728 return false;
1729 }
1730
1731 /* True if PROD is either a reg times size of mode MODE and MODE is less
1732 than or equal 8 bytes, or just a reg if MODE is one byte. */
1733
1734 static bool
1735 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1736 {
1737 rtx xfoo0, xfoo1;
1738
1739 if (GET_MODE_SIZE (mode) == 1)
1740 return BASE_REGISTER_P (prod, strict);
1741
1742 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1743 return false;
1744
1745 xfoo0 = XEXP (prod, 0);
1746 xfoo1 = XEXP (prod, 1);
1747
1748 if (CONST_INT_P (xfoo0)
1749 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1750 && INDEX_REGISTER_P (xfoo1, strict))
1751 return true;
1752
1753 if (CONST_INT_P (xfoo1)
1754 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1755 && INDEX_REGISTER_P (xfoo0, strict))
1756 return true;
1757
1758 return false;
1759 }
1760
1761 /* Return true if X is the sum of a register
1762 and a valid index term for mode MODE. */
1763 static bool
1764 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1765 {
1766 rtx xfoo0, xfoo1;
1767
1768 if (GET_CODE (x) != PLUS)
1769 return false;
1770
1771 xfoo0 = XEXP (x, 0);
1772 xfoo1 = XEXP (x, 1);
1773
1774 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1775 return true;
1776
1777 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1778 return true;
1779
1780 return false;
1781 }
1782
1783 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1784 static bool
1785 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1786 {
1787 if (!CONSTANT_ADDRESS_P (xfoo0))
1788 return false;
1789 if (BASE_REGISTER_P (xfoo1, strict))
1790 return !flag_pic || mode == QImode;
1791 if (flag_pic && symbolic_operand (xfoo0, SImode))
1792 return false;
1793 return reg_plus_index_p (xfoo1, mode, strict);
1794 }
1795
1796 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1797 that is a valid memory address for an instruction.
1798 The MODE argument is the machine mode for the MEM expression
1799 that wants to use this address. */
1800 bool
1801 vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1802 {
1803 rtx xfoo0, xfoo1;
1804
1805 if (nonindexed_address_p (x, strict))
1806 return true;
1807
1808 if (GET_CODE (x) != PLUS)
1809 return false;
1810
1811 /* Handle <address>[index] represented with index-sum outermost */
1812
1813 xfoo0 = XEXP (x, 0);
1814 xfoo1 = XEXP (x, 1);
1815
1816 if (index_term_p (xfoo0, mode, strict)
1817 && nonindexed_address_p (xfoo1, strict))
1818 return true;
1819
1820 if (index_term_p (xfoo1, mode, strict)
1821 && nonindexed_address_p (xfoo0, strict))
1822 return true;
1823
1824 /* Handle offset(reg)[index] with offset added outermost */
1825
1826 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1827 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1828 return true;
1829
1830 return false;
1831 }
1832
1833 /* Return true if x (a legitimate address expression) has an effect that
1834 depends on the machine mode it is used for. On the VAX, the predecrement
1835 and postincrement address depend thus (the amount of decrement or
1836 increment being the length of the operand) and all indexed address depend
1837 thus (because the index scale factor is the length of the operand). */
1838
1839 static bool
1840 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
1841 {
1842 rtx xfoo0, xfoo1;
1843
1844 /* Auto-increment cases are now dealt with generically in recog.c. */
1845 if (GET_CODE (x) != PLUS)
1846 return false;
1847
1848 xfoo0 = XEXP (x, 0);
1849 xfoo1 = XEXP (x, 1);
1850
1851 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1852 return false;
1853 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1854 return false;
1855 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1856 return false;
1857 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1858 return false;
1859
1860 return true;
1861 }
1862
1863 static rtx
1864 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1865 {
1866 if (illegal_addsub_di_memory_operand (x, mode))
1867 {
1868 rtx addr = XEXP (x, 0);
1869 rtx temp = gen_reg_rtx (Pmode);
1870 rtx offset = 0;
1871 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1872 if (GET_CODE (addr) == CONST && flag_pic)
1873 {
1874 offset = XEXP (XEXP (addr, 0), 1);
1875 addr = XEXP (XEXP (addr, 0), 0);
1876 }
1877 #endif
1878 emit_move_insn (temp, addr);
1879 if (offset)
1880 temp = gen_rtx_PLUS (Pmode, temp, offset);
1881 x = gen_rtx_MEM (DImode, temp);
1882 }
1883 return x;
1884 }
1885
1886 void
1887 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1888 {
1889 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1890 rtx temp;
1891
1892 rtx (*gen_old_insn)(rtx, rtx, rtx);
1893 rtx (*gen_si_insn)(rtx, rtx, rtx);
1894 rtx (*gen_insn)(rtx, rtx, rtx);
1895
1896 if (code == PLUS)
1897 {
1898 gen_old_insn = gen_adddi3_old;
1899 gen_si_insn = gen_addsi3;
1900 gen_insn = gen_adcdi3;
1901 }
1902 else if (code == MINUS)
1903 {
1904 gen_old_insn = gen_subdi3_old;
1905 gen_si_insn = gen_subsi3;
1906 gen_insn = gen_sbcdi3;
1907 }
1908 else
1909 gcc_unreachable ();
1910
1911 /* If this is addition (thus operands are commutative) and if there is one
1912 addend that duplicates the desination, we want that addend to be the
1913 first addend. */
1914 if (code == PLUS
1915 && rtx_equal_p (operands[0], operands[2])
1916 && !rtx_equal_p (operands[1], operands[2]))
1917 {
1918 temp = operands[2];
1919 operands[2] = operands[1];
1920 operands[1] = temp;
1921 }
1922
1923 if (!TARGET_QMATH)
1924 {
1925 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1926 }
1927 else if (hi_only)
1928 {
1929 if (!rtx_equal_p (operands[0], operands[1])
1930 && (REG_P (operands[0]) && MEM_P (operands[1])))
1931 {
1932 emit_move_insn (operands[0], operands[1]);
1933 operands[1] = operands[0];
1934 }
1935
1936 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1937 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1938 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1939
1940 if (!rtx_equal_p (operands[0], operands[1]))
1941 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1942 operand_subword (operands[1], 0, 0, DImode));
1943
1944 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1945 operand_subword (operands[1], 1, 0, DImode),
1946 operand_subword (operands[2], 1, 0, DImode)));
1947 }
1948 else
1949 {
1950 /* If are adding the same value together, that's really a multiply by 2,
1951 and that's just a left shift of 1. */
1952 if (rtx_equal_p (operands[1], operands[2]))
1953 {
1954 gcc_assert (code != MINUS);
1955 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1956 return;
1957 }
1958
1959 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1960
1961 /* If an operand is the same as operand[0], use the operand[0] rtx
1962 because fixup will an equivalent rtx but not an equal one. */
1963
1964 if (rtx_equal_p (operands[0], operands[1]))
1965 operands[1] = operands[0];
1966 else
1967 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1968
1969 if (rtx_equal_p (operands[0], operands[2]))
1970 operands[2] = operands[0];
1971 else
1972 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1973
1974 /* If we are subtracting not from ourselves [d = a - b], and because the
1975 carry ops are two operand only, we would need to do a move prior to
1976 the subtract. And if d == b, we would need a temp otherwise
1977 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1978 into d = -b, d += a. Since -b can never overflow, even if b == d,
1979 no temp is needed.
1980
1981 If we are doing addition, since the carry ops are two operand, if
1982 we aren't adding to ourselves, move the first addend to the
1983 destination first. */
1984
1985 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1986 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1987 {
1988 if (code == MINUS && CONSTANT_P (operands[1]))
1989 {
1990 temp = gen_reg_rtx (DImode);
1991 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1992 code = PLUS;
1993 gen_insn = gen_adcdi3;
1994 operands[2] = operands[1];
1995 operands[1] = operands[0];
1996 }
1997 else
1998 emit_move_insn (operands[0], operands[1]);
1999 }
2000
2001 /* Subtracting a constant will have been rewritten to an addition of the
2002 negative of that constant before we get here. */
2003 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2004 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2005 }
2006 }
2007
2008 bool
2009 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
2010 {
2011 HOST_WIDE_INT lo_offset;
2012 HOST_WIDE_INT hi_offset;
2013
2014 if (GET_CODE (lo) != GET_CODE (hi))
2015 return false;
2016
2017 if (REG_P (lo))
2018 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2019 if (CONST_INT_P (lo))
2020 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2021 if (CONST_INT_P (lo))
2022 return mode != SImode;
2023
2024 if (!MEM_P (lo))
2025 return false;
2026
2027 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2028 return false;
2029
2030 lo = XEXP (lo, 0);
2031 hi = XEXP (hi, 0);
2032
2033 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2034 return rtx_equal_p (lo, hi);
2035
2036 switch (GET_CODE (lo))
2037 {
2038 case REG:
2039 case SYMBOL_REF:
2040 lo_offset = 0;
2041 break;
2042 case CONST:
2043 lo = XEXP (lo, 0);
2044 /* FALLTHROUGH */
2045 case PLUS:
2046 if (!CONST_INT_P (XEXP (lo, 1)))
2047 return false;
2048 lo_offset = INTVAL (XEXP (lo, 1));
2049 lo = XEXP (lo, 0);
2050 break;
2051 default:
2052 return false;
2053 }
2054
2055 switch (GET_CODE (hi))
2056 {
2057 case REG:
2058 case SYMBOL_REF:
2059 hi_offset = 0;
2060 break;
2061 case CONST:
2062 hi = XEXP (hi, 0);
2063 /* FALLTHROUGH */
2064 case PLUS:
2065 if (!CONST_INT_P (XEXP (hi, 1)))
2066 return false;
2067 hi_offset = INTVAL (XEXP (hi, 1));
2068 hi = XEXP (hi, 0);
2069 break;
2070 default:
2071 return false;
2072 }
2073
2074 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2075 return false;
2076
2077 return rtx_equal_p (lo, hi)
2078 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2079 }
2080
2081 /* Output assembler code for a block containing the constant parts
2082 of a trampoline, leaving space for the variable parts. */
2083
2084 /* On the VAX, the trampoline contains an entry mask and two instructions:
2085 .word NN
2086 movl $STATIC,r0 (store the functions static chain)
2087 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2088
2089 static void
2090 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2091 {
2092 assemble_aligned_integer (2, const0_rtx);
2093 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2094 assemble_aligned_integer (4, const0_rtx);
2095 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2096 assemble_aligned_integer (2, GEN_INT (0x9f17));
2097 assemble_aligned_integer (4, const0_rtx);
2098 }
2099
2100 /* We copy the register-mask from the function's pure code
2101 to the start of the trampoline. */
2102
2103 static void
2104 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2105 {
2106 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2107 rtx mem;
2108
2109 emit_block_move (m_tramp, assemble_trampoline_template (),
2110 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2111
2112 mem = adjust_address (m_tramp, HImode, 0);
2113 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2114
2115 mem = adjust_address (m_tramp, SImode, 4);
2116 emit_move_insn (mem, cxt);
2117 mem = adjust_address (m_tramp, SImode, 11);
2118 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2119 emit_insn (gen_sync_istream ());
2120 }
2121
2122 /* Value is the number of bytes of arguments automatically
2123 popped when returning from a subroutine call.
2124 FUNDECL is the declaration node of the function (as a tree),
2125 FUNTYPE is the data type of the function (as a tree),
2126 or for a library call it is an identifier node for the subroutine name.
2127 SIZE is the number of bytes of arguments passed on the stack.
2128
2129 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2130
2131 static int
2132 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2133 tree funtype ATTRIBUTE_UNUSED, int size)
2134 {
2135 return size > 255 * 4 ? 0 : size;
2136 }
2137
2138 /* Define where to put the arguments to a function.
2139 Value is zero to push the argument on the stack,
2140 or a hard register in which to store the argument.
2141
2142 MODE is the argument's machine mode.
2143 TYPE is the data type of the argument (as a tree).
2144 This is null for libcalls where that information may
2145 not be available.
2146 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2147 the preceding args and about the function being called.
2148 NAMED is nonzero if this argument is a named parameter
2149 (otherwise it is an extra parameter matching an ellipsis). */
2150
2151 /* On the VAX all args are pushed. */
2152
2153 static rtx
2154 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2155 enum machine_mode mode ATTRIBUTE_UNUSED,
2156 const_tree type ATTRIBUTE_UNUSED,
2157 bool named ATTRIBUTE_UNUSED)
2158 {
2159 return NULL_RTX;
2160 }
2161
2162 /* Update the data in CUM to advance over an argument of mode MODE and
2163 data type TYPE. (TYPE is null for libcalls where that information
2164 may not be available.) */
2165
2166 static void
2167 vax_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2168 const_tree type, bool named ATTRIBUTE_UNUSED)
2169 {
2170 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2171
2172 *cum += (mode != BLKmode
2173 ? (GET_MODE_SIZE (mode) + 3) & ~3
2174 : (int_size_in_bytes (type) + 3) & ~3);
2175 }