1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
34 #include "conditions.h"
40 /* This file should be included last. */
41 #include "target-def.h"
43 static void vax_option_override (void);
44 static bool vax_legitimate_address_p (machine_mode
, rtx
, bool);
45 static void vax_file_start (void);
46 static void vax_init_libfuncs (void);
47 static void vax_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
49 static int vax_address_cost_1 (rtx
);
50 static int vax_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
51 static bool vax_rtx_costs (rtx
, machine_mode
, int, int, int *, bool);
52 static rtx
vax_function_arg (cumulative_args_t
, machine_mode
,
54 static void vax_function_arg_advance (cumulative_args_t
, machine_mode
,
56 static rtx
vax_struct_value_rtx (tree
, int);
57 static rtx
vax_builtin_setjmp_frame_value (void);
58 static void vax_asm_trampoline_template (FILE *);
59 static void vax_trampoline_init (rtx
, tree
, rtx
);
60 static int vax_return_pops_args (tree
, tree
, int);
61 static bool vax_mode_dependent_address_p (const_rtx
, addr_space_t
);
63 /* Initialize the GCC target structure. */
64 #undef TARGET_ASM_ALIGNED_HI_OP
65 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
67 #undef TARGET_ASM_FILE_START
68 #define TARGET_ASM_FILE_START vax_file_start
69 #undef TARGET_ASM_FILE_START_APP_OFF
70 #define TARGET_ASM_FILE_START_APP_OFF true
72 #undef TARGET_INIT_LIBFUNCS
73 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
75 #undef TARGET_ASM_OUTPUT_MI_THUNK
76 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
77 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
78 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
80 #undef TARGET_RTX_COSTS
81 #define TARGET_RTX_COSTS vax_rtx_costs
82 #undef TARGET_ADDRESS_COST
83 #define TARGET_ADDRESS_COST vax_address_cost
85 #undef TARGET_PROMOTE_PROTOTYPES
86 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
88 #undef TARGET_FUNCTION_ARG
89 #define TARGET_FUNCTION_ARG vax_function_arg
90 #undef TARGET_FUNCTION_ARG_ADVANCE
91 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
93 #undef TARGET_STRUCT_VALUE_RTX
94 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
96 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
97 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
99 #undef TARGET_LEGITIMATE_ADDRESS_P
100 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
101 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
102 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
104 #undef TARGET_FRAME_POINTER_REQUIRED
105 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
107 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
108 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
109 #undef TARGET_TRAMPOLINE_INIT
110 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
111 #undef TARGET_RETURN_POPS_ARGS
112 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
114 #undef TARGET_OPTION_OVERRIDE
115 #define TARGET_OPTION_OVERRIDE vax_option_override
117 struct gcc_target targetm
= TARGET_INITIALIZER
;
119 /* Set global variables as needed for the options enabled. */
122 vax_option_override (void)
124 /* We're VAX floating point, not IEEE floating point. */
126 REAL_MODE_FORMAT (DFmode
) = &vax_g_format
;
128 #ifdef SUBTARGET_OVERRIDE_OPTIONS
129 SUBTARGET_OVERRIDE_OPTIONS
;
134 vax_add_reg_cfa_offset (rtx insn
, int offset
, rtx src
)
138 x
= plus_constant (Pmode
, frame_pointer_rtx
, offset
);
139 x
= gen_rtx_MEM (SImode
, x
);
140 x
= gen_rtx_SET (x
, src
);
141 add_reg_note (insn
, REG_CFA_OFFSET
, x
);
144 /* Generate the assembly code for function entry. FILE is a stdio
145 stream to output the code to. SIZE is an int: how many units of
146 temporary storage to allocate.
148 Refer to the array `regs_ever_live' to determine which registers to
149 save; `regs_ever_live[I]' is nonzero if register number I is ever
150 used in the function. This function is responsible for knowing
151 which registers should not be saved even if used. */
154 vax_expand_prologue (void)
161 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
162 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
165 insn
= emit_insn (gen_procedure_entry_mask (GEN_INT (mask
)));
166 RTX_FRAME_RELATED_P (insn
) = 1;
168 /* The layout of the CALLG/S stack frame is follows:
173 ... Registers saved as specified by MASK
183 The rest of the prologue will adjust the SP for the local frame. */
185 vax_add_reg_cfa_offset (insn
, 4, arg_pointer_rtx
);
186 vax_add_reg_cfa_offset (insn
, 8, frame_pointer_rtx
);
187 vax_add_reg_cfa_offset (insn
, 12, pc_rtx
);
190 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
191 if (mask
& (1 << regno
))
193 vax_add_reg_cfa_offset (insn
, offset
, gen_rtx_REG (SImode
, regno
));
197 /* Because add_reg_note pushes the notes, adding this last means that
198 it will be processed first. This is required to allow the other
199 notes be interpreted properly. */
200 add_reg_note (insn
, REG_CFA_DEF_CFA
,
201 plus_constant (Pmode
, frame_pointer_rtx
, offset
));
203 /* Allocate the local stack frame. */
204 size
= get_frame_size ();
205 size
-= STARTING_FRAME_OFFSET
;
206 emit_insn (gen_addsi3 (stack_pointer_rtx
,
207 stack_pointer_rtx
, GEN_INT (-size
)));
209 /* Do not allow instructions referencing local stack memory to be
210 scheduled before the frame is allocated. This is more pedantic
211 than anything else, given that VAX does not currently have a
212 scheduling description. */
213 emit_insn (gen_blockage ());
216 /* When debugging with stabs, we want to output an extra dummy label
217 so that gas can distinguish between D_float and G_float prior to
218 processing the .stabs directive identifying type double. */
220 vax_file_start (void)
222 default_file_start ();
224 if (write_symbols
== DBX_DEBUG
)
225 fprintf (asm_out_file
, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR
);
228 /* We can use the BSD C library routines for the libgcc calls that are
229 still generated, since that's what they boil down to anyways. When
230 ELF, avoid the user's namespace. */
233 vax_init_libfuncs (void)
235 if (TARGET_BSD_DIVMOD
)
237 set_optab_libfunc (udiv_optab
, SImode
, TARGET_ELF
? "*__udiv" : "*udiv");
238 set_optab_libfunc (umod_optab
, SImode
, TARGET_ELF
? "*__urem" : "*urem");
242 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
245 split_quadword_operands (rtx insn
, enum rtx_code code
, rtx
* operands
,
250 for (i
= 0; i
< n
; i
++)
253 for (i
= 0; i
< n
; i
++)
255 if (MEM_P (operands
[i
])
256 && (GET_CODE (XEXP (operands
[i
], 0)) == PRE_DEC
257 || GET_CODE (XEXP (operands
[i
], 0)) == POST_INC
))
259 rtx addr
= XEXP (operands
[i
], 0);
260 operands
[i
] = low
[i
] = gen_rtx_MEM (SImode
, addr
);
262 else if (optimize_size
&& MEM_P (operands
[i
])
263 && REG_P (XEXP (operands
[i
], 0))
264 && (code
!= MINUS
|| operands
[1] != const0_rtx
)
265 && find_regno_note (insn
, REG_DEAD
,
266 REGNO (XEXP (operands
[i
], 0))))
268 low
[i
] = gen_rtx_MEM (SImode
,
269 gen_rtx_POST_INC (Pmode
,
270 XEXP (operands
[i
], 0)));
271 operands
[i
] = gen_rtx_MEM (SImode
, XEXP (operands
[i
], 0));
275 low
[i
] = operand_subword (operands
[i
], 0, 0, DImode
);
276 operands
[i
] = operand_subword (operands
[i
], 1, 0, DImode
);
282 print_operand_address (FILE * file
, rtx addr
)
285 rtx reg1
, breg
, ireg
;
289 switch (GET_CODE (addr
))
293 addr
= XEXP (addr
, 0);
297 fprintf (file
, "(%s)", reg_names
[REGNO (addr
)]);
301 fprintf (file
, "-(%s)", reg_names
[REGNO (XEXP (addr
, 0))]);
305 fprintf (file
, "(%s)+", reg_names
[REGNO (XEXP (addr
, 0))]);
309 /* There can be either two or three things added here. One must be a
310 REG. One can be either a REG or a MULT of a REG and an appropriate
311 constant, and the third can only be a constant or a MEM.
313 We get these two or three things and put the constant or MEM in
314 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
315 a register and can't tell yet if it is a base or index register,
318 reg1
= 0; ireg
= 0; breg
= 0; offset
= 0;
320 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0))
321 || MEM_P (XEXP (addr
, 0)))
323 offset
= XEXP (addr
, 0);
324 addr
= XEXP (addr
, 1);
326 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1))
327 || MEM_P (XEXP (addr
, 1)))
329 offset
= XEXP (addr
, 1);
330 addr
= XEXP (addr
, 0);
332 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
334 ireg
= XEXP (addr
, 1);
335 addr
= XEXP (addr
, 0);
337 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
339 ireg
= XEXP (addr
, 0);
340 addr
= XEXP (addr
, 1);
342 else if (REG_P (XEXP (addr
, 1)))
344 reg1
= XEXP (addr
, 1);
345 addr
= XEXP (addr
, 0);
347 else if (REG_P (XEXP (addr
, 0)))
349 reg1
= XEXP (addr
, 0);
350 addr
= XEXP (addr
, 1);
362 else if (GET_CODE (addr
) == MULT
)
366 gcc_assert (GET_CODE (addr
) == PLUS
);
367 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0))
368 || MEM_P (XEXP (addr
, 0)))
372 if (CONST_INT_P (offset
))
373 offset
= plus_constant (Pmode
, XEXP (addr
, 0),
377 gcc_assert (CONST_INT_P (XEXP (addr
, 0)));
378 offset
= plus_constant (Pmode
, offset
,
379 INTVAL (XEXP (addr
, 0)));
382 offset
= XEXP (addr
, 0);
384 else if (REG_P (XEXP (addr
, 0)))
387 ireg
= reg1
, breg
= XEXP (addr
, 0), reg1
= 0;
389 reg1
= XEXP (addr
, 0);
393 gcc_assert (GET_CODE (XEXP (addr
, 0)) == MULT
);
395 ireg
= XEXP (addr
, 0);
398 if (CONSTANT_ADDRESS_P (XEXP (addr
, 1))
399 || MEM_P (XEXP (addr
, 1)))
403 if (CONST_INT_P (offset
))
404 offset
= plus_constant (Pmode
, XEXP (addr
, 1),
408 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
409 offset
= plus_constant (Pmode
, offset
,
410 INTVAL (XEXP (addr
, 1)));
413 offset
= XEXP (addr
, 1);
415 else if (REG_P (XEXP (addr
, 1)))
418 ireg
= reg1
, breg
= XEXP (addr
, 1), reg1
= 0;
420 reg1
= XEXP (addr
, 1);
424 gcc_assert (GET_CODE (XEXP (addr
, 1)) == MULT
);
426 ireg
= XEXP (addr
, 1);
430 /* If REG1 is nonzero, figure out if it is a base or index register. */
434 || (flag_pic
&& GET_CODE (addr
) == SYMBOL_REF
)
437 || (flag_pic
&& symbolic_operand (offset
, SImode
)))))
448 if (flag_pic
&& symbolic_operand (offset
, SImode
))
453 output_operand_lossage ("symbol used with both base and indexed registers");
456 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
457 if (flag_pic
> 1 && GET_CODE (offset
) == CONST
458 && GET_CODE (XEXP (XEXP (offset
, 0), 0)) == SYMBOL_REF
459 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset
, 0), 0)))
462 output_operand_lossage ("symbol with offset used in PIC mode");
466 /* symbol(reg) isn't PIC, but symbol[reg] is. */
475 output_address (VOIDmode
, offset
);
479 fprintf (file
, "(%s)", reg_names
[REGNO (breg
)]);
483 if (GET_CODE (ireg
) == MULT
)
484 ireg
= XEXP (ireg
, 0);
485 gcc_assert (REG_P (ireg
));
486 fprintf (file
, "[%s]", reg_names
[REGNO (ireg
)]);
491 output_addr_const (file
, addr
);
496 print_operand (FILE *file
, rtx x
, int code
)
499 fputc (ASM_DOUBLE_CHAR
, file
);
500 else if (code
== '|')
501 fputs (REGISTER_PREFIX
, file
);
502 else if (code
== 'c')
503 fputs (cond_name (x
), file
);
504 else if (code
== 'C')
505 fputs (rev_cond_name (x
), file
);
506 else if (code
== 'D' && CONST_INT_P (x
) && INTVAL (x
) < 0)
507 fprintf (file
, "$" NEG_HWI_PRINT_HEX16
, INTVAL (x
));
508 else if (code
== 'P' && CONST_INT_P (x
))
509 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + 1);
510 else if (code
== 'N' && CONST_INT_P (x
))
511 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, ~ INTVAL (x
));
512 /* rotl instruction cannot deal with negative arguments. */
513 else if (code
== 'R' && CONST_INT_P (x
))
514 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, 32 - INTVAL (x
));
515 else if (code
== 'H' && CONST_INT_P (x
))
516 fprintf (file
, "$%d", (int) (0xffff & ~ INTVAL (x
)));
517 else if (code
== 'h' && CONST_INT_P (x
))
518 fprintf (file
, "$%d", (short) - INTVAL (x
));
519 else if (code
== 'B' && CONST_INT_P (x
))
520 fprintf (file
, "$%d", (int) (0xff & ~ INTVAL (x
)));
521 else if (code
== 'b' && CONST_INT_P (x
))
522 fprintf (file
, "$%d", (int) (0xff & - INTVAL (x
)));
523 else if (code
== 'M' && CONST_INT_P (x
))
524 fprintf (file
, "$%d", ~((1 << INTVAL (x
)) - 1));
525 else if (code
== 'x' && CONST_INT_P (x
))
526 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (x
));
528 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
530 output_address (GET_MODE (x
), XEXP (x
, 0));
531 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
534 real_to_decimal (dstr
, CONST_DOUBLE_REAL_VALUE (x
),
535 sizeof (dstr
), 0, 1);
536 fprintf (file
, "$0f%s", dstr
);
538 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
541 real_to_decimal (dstr
, CONST_DOUBLE_REAL_VALUE (x
),
542 sizeof (dstr
), 0, 1);
543 fprintf (file
, "$0%c%s", ASM_DOUBLE_CHAR
, dstr
);
547 if (flag_pic
> 1 && symbolic_operand (x
, SImode
))
550 output_operand_lossage ("symbol used as immediate operand");
553 output_addr_const (file
, x
);
560 switch (GET_CODE (op
))
589 rev_cond_name (rtx op
)
591 switch (GET_CODE (op
))
620 vax_float_literal (rtx c
)
623 const REAL_VALUE_TYPE
*r
;
627 if (GET_CODE (c
) != CONST_DOUBLE
)
632 if (c
== const_tiny_rtx
[(int) mode
][0]
633 || c
== const_tiny_rtx
[(int) mode
][1]
634 || c
== const_tiny_rtx
[(int) mode
][2])
637 r
= CONST_DOUBLE_REAL_VALUE (c
);
639 for (i
= 0; i
< 7; i
++)
643 real_from_integer (&s
, mode
, x
, SIGNED
);
645 if (real_equal (r
, &s
))
647 ok
= exact_real_inverse (mode
, &s
);
649 if (real_equal (r
, &s
))
656 /* Return the cost in cycles of a memory address, relative to register
659 Each of the following adds the indicated number of cycles:
663 1 - indexing and/or offset(register)
668 vax_address_cost_1 (rtx addr
)
670 int reg
= 0, indexed
= 0, indir
= 0, offset
= 0, predec
= 0;
671 rtx plus_op0
= 0, plus_op1
= 0;
673 switch (GET_CODE (addr
))
683 indexed
= 1; /* 2 on VAX 2 */
686 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
688 offset
= (unsigned HOST_WIDE_INT
)(INTVAL(addr
)+128) > 256;
692 offset
= 1; /* 2 on VAX 2 */
694 case LABEL_REF
: /* this is probably a byte offset from the pc */
700 plus_op1
= XEXP (addr
, 0);
702 plus_op0
= XEXP (addr
, 0);
703 addr
= XEXP (addr
, 1);
706 indir
= 2; /* 3 on VAX 2 */
707 addr
= XEXP (addr
, 0);
713 /* Up to 3 things can be added in an address. They are stored in
714 plus_op0, plus_op1, and addr. */
728 /* Indexing and register+offset can both be used (except on a VAX 2)
729 without increasing execution time over either one alone. */
730 if (reg
&& indexed
&& offset
)
731 return reg
+ indir
+ offset
+ predec
;
732 return reg
+ indexed
+ indir
+ offset
+ predec
;
736 vax_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
737 addr_space_t as ATTRIBUTE_UNUSED
,
738 bool speed ATTRIBUTE_UNUSED
)
740 return (1 + (REG_P (x
) ? 0 : vax_address_cost_1 (x
)));
743 /* Cost of an expression on a VAX. This version has costs tuned for the
744 CVAX chip (found in the VAX 3 series) with comments for variations on
747 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
748 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
749 costs on a per cpu basis. */
752 vax_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
753 int opno ATTRIBUTE_UNUSED
,
754 int *total
, bool speed ATTRIBUTE_UNUSED
)
756 int code
= GET_CODE (x
);
757 int i
= 0; /* may be modified in switch */
758 const char *fmt
= GET_RTX_FORMAT (code
); /* may be modified in switch */
762 /* On a VAX, constants from 0..63 are cheap because they can use the
763 1 byte literal constant format. Compare to -1 should be made cheap
764 so that decrement-and-branch insns can be formed more easily (if
765 the value -1 is copied to a register some decrement-and-branch
766 patterns will not match). */
773 if (outer_code
== AND
)
775 *total
= ((unsigned HOST_WIDE_INT
) ~INTVAL (x
) <= 077) ? 1 : 2;
778 if ((unsigned HOST_WIDE_INT
) INTVAL (x
) <= 077
779 || (outer_code
== COMPARE
781 || ((outer_code
== PLUS
|| outer_code
== MINUS
)
782 && (unsigned HOST_WIDE_INT
) -INTVAL (x
) <= 077))
796 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
797 *total
= vax_float_literal (x
) ? 5 : 8;
799 *total
= ((CONST_DOUBLE_HIGH (x
) == 0
800 && (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
) < 64)
801 || (outer_code
== PLUS
802 && CONST_DOUBLE_HIGH (x
) == -1
803 && (unsigned HOST_WIDE_INT
)-CONST_DOUBLE_LOW (x
) < 64))
809 return true; /* Implies register operand. */
813 return true; /* Implies register operand. */
819 *total
= 16; /* 4 on VAX 9000 */
822 *total
= 9; /* 4 on VAX 9000, 12 on VAX 2 */
825 *total
= 16; /* 6 on VAX 9000, 28 on VAX 2 */
830 *total
= 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
833 *total
= MAX_COST
; /* Mode is not supported. */
841 *total
= MAX_COST
; /* Mode is not supported. */
849 *total
= 30; /* Highly variable. */
850 else if (mode
== DFmode
)
851 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
854 *total
= 11; /* 25 on VAX 2 */
864 *total
= MAX_COST
; /* Mode is not supported. */
871 *total
= (6 /* 4 on VAX 9000 */
872 + (mode
== DFmode
) + (GET_MODE (XEXP (x
, 0)) != SImode
));
876 *total
= 7; /* 17 on VAX 2 */
885 *total
= 10; /* 6 on VAX 9000 */
890 *total
= 6; /* 5 on VAX 2, 4 on VAX 9000 */
891 if (CONST_INT_P (XEXP (x
, 1)))
892 fmt
= "e"; /* all constant rotate counts are short */
897 *total
= (mode
== DFmode
) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
898 /* Small integer operands can use subl2 and addl2. */
899 if ((CONST_INT_P (XEXP (x
, 1)))
900 && (unsigned HOST_WIDE_INT
)(INTVAL (XEXP (x
, 1)) + 63) < 127)
910 /* AND is special because the first operand is complemented. */
912 if (CONST_INT_P (XEXP (x
, 0)))
914 if ((unsigned HOST_WIDE_INT
)~INTVAL (XEXP (x
, 0)) > 63)
924 else if (mode
== SFmode
)
926 else if (mode
== DImode
)
942 if (mode
== DImode
|| mode
== DFmode
)
943 *total
= 5; /* 7 on VAX 2 */
945 *total
= 3; /* 4 on VAX 2 */
947 if (!REG_P (x
) && GET_CODE (x
) != POST_INC
)
948 *total
+= vax_address_cost_1 (x
);
954 *total
= 3; /* FIXME: Costs need to be checked */
961 /* Now look inside the expression. Operands which are not registers or
962 short constants add to the cost.
964 FMT and I may have been adjusted in the switch above for instructions
965 which require special handling. */
967 while (*fmt
++ == 'e')
969 rtx op
= XEXP (x
, i
);
972 code
= GET_CODE (op
);
974 /* A NOT is likely to be found as the first operand of an AND
975 (in which case the relevant cost is of the operand inside
976 the not) and not likely to be found anywhere else. */
978 op
= XEXP (op
, 0), code
= GET_CODE (op
);
983 if ((unsigned HOST_WIDE_INT
)INTVAL (op
) > 63
985 *total
+= 1; /* 2 on VAX 2 */
990 *total
+= 1; /* 2 on VAX 2 */
993 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
)
995 /* Registers are faster than floating point constants -- even
996 those constants which can be encoded in a single byte. */
997 if (vax_float_literal (op
))
1000 *total
+= (GET_MODE (x
) == DFmode
) ? 3 : 2;
1004 if (CONST_DOUBLE_HIGH (op
) != 0
1005 || (unsigned HOST_WIDE_INT
)CONST_DOUBLE_LOW (op
) > 63)
1010 *total
+= 1; /* 2 on VAX 2 */
1011 if (!REG_P (XEXP (op
, 0)))
1012 *total
+= vax_address_cost_1 (XEXP (op
, 0));
1025 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1026 Used for C++ multiple inheritance.
1027 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1028 addl2 $DELTA, 4(ap) #adjust first argument
1029 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1033 vax_output_mi_thunk (FILE * file
,
1034 tree thunk ATTRIBUTE_UNUSED
,
1035 HOST_WIDE_INT delta
,
1036 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
1039 fprintf (file
, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC
, delta
);
1040 asm_fprintf (file
, ",4(%Rap)\n");
1041 fprintf (file
, "\tjmp ");
1042 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
1043 fprintf (file
, "+2\n");
1047 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1048 int incoming ATTRIBUTE_UNUSED
)
1050 return gen_rtx_REG (Pmode
, VAX_STRUCT_VALUE_REGNUM
);
1054 vax_builtin_setjmp_frame_value (void)
1056 return hard_frame_pointer_rtx
;
1059 /* Worker function for NOTICE_UPDATE_CC. */
1062 vax_notice_update_cc (rtx exp
, rtx insn ATTRIBUTE_UNUSED
)
1064 if (GET_CODE (exp
) == SET
)
1066 if (GET_CODE (SET_SRC (exp
)) == CALL
)
1068 else if (GET_CODE (SET_DEST (exp
)) != ZERO_EXTRACT
1069 && GET_CODE (SET_DEST (exp
)) != PC
)
1071 cc_status
.flags
= 0;
1072 /* The integer operations below don't set carry or
1073 set it in an incompatible way. That's ok though
1074 as the Z bit is all we need when doing unsigned
1075 comparisons on the result of these insns (since
1076 they're always with 0). Set CC_NO_OVERFLOW to
1077 generate the correct unsigned branches. */
1078 switch (GET_CODE (SET_SRC (exp
)))
1081 if (GET_MODE_CLASS (GET_MODE (exp
)) == MODE_FLOAT
)
1089 cc_status
.flags
= CC_NO_OVERFLOW
;
1094 cc_status
.value1
= SET_DEST (exp
);
1095 cc_status
.value2
= SET_SRC (exp
);
1098 else if (GET_CODE (exp
) == PARALLEL
1099 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
1101 if (GET_CODE (SET_SRC (XVECEXP (exp
, 0, 0))) == CALL
)
1103 else if (GET_CODE (SET_DEST (XVECEXP (exp
, 0, 0))) != PC
)
1105 cc_status
.flags
= 0;
1106 cc_status
.value1
= SET_DEST (XVECEXP (exp
, 0, 0));
1107 cc_status
.value2
= SET_SRC (XVECEXP (exp
, 0, 0));
1110 /* PARALLELs whose first element sets the PC are aob,
1111 sob insns. They do change the cc's. */
1116 if (cc_status
.value1
&& REG_P (cc_status
.value1
)
1118 && reg_overlap_mentioned_p (cc_status
.value1
, cc_status
.value2
))
1119 cc_status
.value2
= 0;
1120 if (cc_status
.value1
&& MEM_P (cc_status
.value1
)
1122 && MEM_P (cc_status
.value2
))
1123 cc_status
.value2
= 0;
1124 /* Actual condition, one line up, should be that value2's address
1125 depends on value1, but that is too much of a pain. */
1128 /* Output integer move instructions. */
1131 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
,
1135 const char *pattern_hi
, *pattern_lo
;
1140 if (operands
[1] == const0_rtx
)
1142 if (TARGET_QMATH
&& optimize_size
1143 && (CONST_INT_P (operands
[1])
1144 || GET_CODE (operands
[1]) == CONST_DOUBLE
))
1146 unsigned HOST_WIDE_INT hval
, lval
;
1149 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1151 gcc_assert (HOST_BITS_PER_WIDE_INT
!= 64);
1153 /* Make sure only the low 32 bits are valid. */
1154 lval
= CONST_DOUBLE_LOW (operands
[1]) & 0xffffffff;
1155 hval
= CONST_DOUBLE_HIGH (operands
[1]) & 0xffffffff;
1159 lval
= INTVAL (operands
[1]);
1163 /* Here we see if we are trying to see if the 64bit value is really
1164 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1165 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1166 8 bytes - 1 shift byte - 1 short literal byte. */
1168 && (n
= exact_log2 (lval
& (- lval
))) != -1
1169 && (lval
>> n
) < 64)
1173 /* On 32bit platforms, if the 6bits didn't overflow into the
1174 upper 32bit value that value better be 0. If we have
1175 overflowed, make sure it wasn't too much. */
1176 if (HOST_BITS_PER_WIDE_INT
== 32 && hval
!= 0)
1178 if (n
<= 26 || hval
>= ((unsigned)1 << (n
- 26)))
1179 n
= 0; /* failure */
1181 lval
|= hval
<< (32 - n
);
1183 /* If n is 0, then ashq is not the best way to emit this. */
1186 operands
[1] = GEN_INT (lval
);
1187 operands
[2] = GEN_INT (n
);
1188 return "ashq %2,%D1,%0";
1190 #if HOST_BITS_PER_WIDE_INT == 32
1192 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1193 upper 32bit value. */
1195 && (n
= exact_log2 (hval
& (- hval
)) - 1) != -1
1196 && (hval
>> n
) < 64)
1198 operands
[1] = GEN_INT (hval
>> n
);
1199 operands
[2] = GEN_INT (n
+ 32);
1200 return "ashq %2,%D1,%0";
1206 && (!MEM_P (operands
[0])
1207 || GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
1208 || GET_CODE (XEXP (operands
[0], 0)) == POST_INC
1209 || !illegal_addsub_di_memory_operand (operands
[0], DImode
))
1210 && ((CONST_INT_P (operands
[1])
1211 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[1]) >= 64)
1212 || GET_CODE (operands
[1]) == CONST_DOUBLE
))
1214 hi
[0] = operands
[0];
1215 hi
[1] = operands
[1];
1217 split_quadword_operands (insn
, SET
, hi
, lo
, 2);
1219 pattern_lo
= vax_output_int_move (NULL
, lo
, SImode
);
1220 pattern_hi
= vax_output_int_move (NULL
, hi
, SImode
);
1222 /* The patterns are just movl/movl or pushl/pushl then a movq will
1223 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1224 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1226 if ((!strncmp (pattern_lo
, "movl", 4)
1227 && !strncmp (pattern_hi
, "movl", 4))
1228 || (!strncmp (pattern_lo
, "pushl", 5)
1229 && !strncmp (pattern_hi
, "pushl", 5)))
1230 return "movq %1,%0";
1232 if (MEM_P (operands
[0])
1233 && GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1235 output_asm_insn (pattern_hi
, hi
);
1236 operands
[0] = lo
[0];
1237 operands
[1] = lo
[1];
1238 operands
[2] = lo
[2];
1243 output_asm_insn (pattern_lo
, lo
);
1244 operands
[0] = hi
[0];
1245 operands
[1] = hi
[1];
1246 operands
[2] = hi
[2];
1250 return "movq %1,%0";
1253 if (symbolic_operand (operands
[1], SImode
))
1255 if (push_operand (operands
[0], SImode
))
1256 return "pushab %a1";
1257 return "movab %a1,%0";
1260 if (operands
[1] == const0_rtx
)
1262 if (push_operand (operands
[1], SImode
))
1267 if (CONST_INT_P (operands
[1])
1268 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[1]) >= 64)
1270 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1272 if ((unsigned HOST_WIDE_INT
)(~i
) < 64)
1273 return "mcoml %N1,%0";
1274 if ((unsigned HOST_WIDE_INT
)i
< 0x100)
1275 return "movzbl %1,%0";
1276 if (i
>= -0x80 && i
< 0)
1277 return "cvtbl %1,%0";
1279 && (n
= exact_log2 (i
& (-i
))) != -1
1280 && ((unsigned HOST_WIDE_INT
)i
>> n
) < 64)
1282 operands
[1] = GEN_INT ((unsigned HOST_WIDE_INT
)i
>> n
);
1283 operands
[2] = GEN_INT (n
);
1284 return "ashl %2,%1,%0";
1286 if ((unsigned HOST_WIDE_INT
)i
< 0x10000)
1287 return "movzwl %1,%0";
1288 if (i
>= -0x8000 && i
< 0)
1289 return "cvtwl %1,%0";
1291 if (push_operand (operands
[0], SImode
))
1293 return "movl %1,%0";
1296 if (CONST_INT_P (operands
[1]))
1298 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1301 else if ((unsigned HOST_WIDE_INT
)i
< 64)
1302 return "movw %1,%0";
1303 else if ((unsigned HOST_WIDE_INT
)~i
< 64)
1304 return "mcomw %H1,%0";
1305 else if ((unsigned HOST_WIDE_INT
)i
< 256)
1306 return "movzbw %1,%0";
1307 else if (i
>= -0x80 && i
< 0)
1308 return "cvtbw %1,%0";
1310 return "movw %1,%0";
1313 if (CONST_INT_P (operands
[1]))
1315 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1318 else if ((unsigned HOST_WIDE_INT
)~i
< 64)
1319 return "mcomb %B1,%0";
1321 return "movb %1,%0";
1328 /* Output integer add instructions.
1330 The space-time-opcode tradeoffs for addition vary by model of VAX.
1332 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1333 but it not faster on other models.
1335 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1336 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1337 a register is used in an address too soon after it is set.
1338 Compromise by using movab only when it is shorter than the add
1339 or the base register in the address is one of sp, ap, and fp,
1340 which are not modified very often. */
1343 vax_output_int_add (rtx insn
, rtx
*operands
, machine_mode mode
)
1350 const char *pattern
;
1354 if (TARGET_QMATH
&& 0)
1357 split_quadword_operands (insn
, PLUS
, operands
, low
, 3);
1361 gcc_assert (rtx_equal_p (operands
[0], operands
[1]));
1362 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1363 gcc_assert (!flag_pic
|| !external_memory_operand (low
[2], SImode
));
1364 gcc_assert (!flag_pic
|| !external_memory_operand (low
[0], SImode
));
1367 /* No reason to add a 0 to the low part and thus no carry, so just
1368 emit the appropriate add/sub instruction. */
1369 if (low
[2] == const0_rtx
)
1370 return vax_output_int_add (NULL
, operands
, SImode
);
1372 /* Are we doing addition or subtraction? */
1373 sub
= CONST_INT_P (operands
[2]) && INTVAL (operands
[2]) < 0;
1375 /* We can't use vax_output_int_add since some the patterns don't
1376 modify the carry bit. */
1379 if (low
[2] == constm1_rtx
)
1380 pattern
= "decl %0";
1382 pattern
= "subl2 $%n2,%0";
1386 if (low
[2] == const1_rtx
)
1387 pattern
= "incl %0";
1389 pattern
= "addl2 %2,%0";
1391 output_asm_insn (pattern
, low
);
1393 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1394 two 32bit parts, we complement each and then add one to
1395 low part. We know that the low part can't overflow since
1396 it's value can never be 0. */
1398 return "sbwc %N2,%0";
1399 return "adwc %2,%0";
1402 /* Add low parts. */
1403 if (rtx_equal_p (operands
[0], operands
[1]))
1405 if (low
[2] == const0_rtx
)
1406 /* Should examine operand, punt if not POST_INC. */
1407 pattern
= "tstl %0", carry
= 0;
1408 else if (low
[2] == const1_rtx
)
1409 pattern
= "incl %0";
1411 pattern
= "addl2 %2,%0";
1415 if (low
[2] == const0_rtx
)
1416 pattern
= "movl %1,%0", carry
= 0;
1418 pattern
= "addl3 %2,%1,%0";
1421 output_asm_insn (pattern
, low
);
1423 /* If CARRY is 0, we don't have any carry value to worry about. */
1424 return get_insn_template (CODE_FOR_addsi3
, insn
);
1425 /* %0 = C + %1 + %2 */
1426 if (!rtx_equal_p (operands
[0], operands
[1]))
1427 output_asm_insn ((operands
[1] == const0_rtx
1429 : "movl %1,%0"), operands
);
1430 return "adwc %2,%0";
1434 if (rtx_equal_p (operands
[0], operands
[1]))
1436 if (operands
[2] == const1_rtx
)
1438 if (operands
[2] == constm1_rtx
)
1440 if (CONST_INT_P (operands
[2])
1441 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1442 return "subl2 $%n2,%0";
1443 if (CONST_INT_P (operands
[2])
1444 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[2]) >= 64
1445 && REG_P (operands
[1])
1446 && ((INTVAL (operands
[2]) < 32767 && INTVAL (operands
[2]) > -32768)
1447 || REGNO (operands
[1]) > 11))
1448 return "movab %c2(%1),%0";
1449 if (REG_P (operands
[0]) && symbolic_operand (operands
[2], SImode
))
1450 return "movab %a2[%0],%0";
1451 return "addl2 %2,%0";
1454 if (rtx_equal_p (operands
[0], operands
[2]))
1456 if (REG_P (operands
[0]) && symbolic_operand (operands
[1], SImode
))
1457 return "movab %a1[%0],%0";
1458 return "addl2 %1,%0";
1461 if (CONST_INT_P (operands
[2])
1462 && INTVAL (operands
[2]) < 32767
1463 && INTVAL (operands
[2]) > -32768
1464 && REG_P (operands
[1])
1465 && push_operand (operands
[0], SImode
))
1466 return "pushab %c2(%1)";
1468 if (CONST_INT_P (operands
[2])
1469 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1470 return "subl3 $%n2,%1,%0";
1472 if (CONST_INT_P (operands
[2])
1473 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[2]) >= 64
1474 && REG_P (operands
[1])
1475 && ((INTVAL (operands
[2]) < 32767 && INTVAL (operands
[2]) > -32768)
1476 || REGNO (operands
[1]) > 11))
1477 return "movab %c2(%1),%0";
1479 /* Add this if using gcc on a VAX 3xxx:
1480 if (REG_P (operands[1]) && REG_P (operands[2]))
1481 return "movab (%1)[%2],%0";
1484 if (REG_P (operands
[1]) && symbolic_operand (operands
[2], SImode
))
1486 if (push_operand (operands
[0], SImode
))
1487 return "pushab %a2[%1]";
1488 return "movab %a2[%1],%0";
1491 if (REG_P (operands
[2]) && symbolic_operand (operands
[1], SImode
))
1493 if (push_operand (operands
[0], SImode
))
1494 return "pushab %a1[%2]";
1495 return "movab %a1[%2],%0";
1498 if (flag_pic
&& REG_P (operands
[0])
1499 && symbolic_operand (operands
[2], SImode
))
1500 return "movab %a2,%0;addl2 %1,%0";
1503 && (symbolic_operand (operands
[1], SImode
)
1504 || symbolic_operand (operands
[1], SImode
)))
1507 return "addl3 %1,%2,%0";
1510 if (rtx_equal_p (operands
[0], operands
[1]))
1512 if (operands
[2] == const1_rtx
)
1514 if (operands
[2] == constm1_rtx
)
1516 if (CONST_INT_P (operands
[2])
1517 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1518 return "subw2 $%n2,%0";
1519 return "addw2 %2,%0";
1521 if (rtx_equal_p (operands
[0], operands
[2]))
1522 return "addw2 %1,%0";
1523 if (CONST_INT_P (operands
[2])
1524 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1525 return "subw3 $%n2,%1,%0";
1526 return "addw3 %1,%2,%0";
1529 if (rtx_equal_p (operands
[0], operands
[1]))
1531 if (operands
[2] == const1_rtx
)
1533 if (operands
[2] == constm1_rtx
)
1535 if (CONST_INT_P (operands
[2])
1536 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1537 return "subb2 $%n2,%0";
1538 return "addb2 %2,%0";
1540 if (rtx_equal_p (operands
[0], operands
[2]))
1541 return "addb2 %1,%0";
1542 if (CONST_INT_P (operands
[2])
1543 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1544 return "subb3 $%n2,%1,%0";
1545 return "addb3 %1,%2,%0";
1553 vax_output_int_subtract (rtx insn
, rtx
*operands
, machine_mode mode
)
1560 const char *pattern
;
1563 if (TARGET_QMATH
&& 0)
1566 split_quadword_operands (insn
, MINUS
, operands
, low
, 3);
1570 if (operands
[1] == const0_rtx
&& low
[1] == const0_rtx
)
1572 /* Negation is tricky. It's basically complement and increment.
1573 Negate hi, then lo, and subtract the carry back. */
1574 if ((MEM_P (low
[0]) && GET_CODE (XEXP (low
[0], 0)) == POST_INC
)
1575 || (MEM_P (operands
[0])
1576 && GET_CODE (XEXP (operands
[0], 0)) == POST_INC
))
1577 fatal_insn ("illegal operand detected", insn
);
1578 output_asm_insn ("mnegl %2,%0", operands
);
1579 output_asm_insn ("mnegl %2,%0", low
);
1580 return "sbwc $0,%0";
1582 gcc_assert (rtx_equal_p (operands
[0], operands
[1]));
1583 gcc_assert (rtx_equal_p (low
[0], low
[1]));
1584 if (low
[2] == const1_rtx
)
1585 output_asm_insn ("decl %0", low
);
1587 output_asm_insn ("subl2 %2,%0", low
);
1588 return "sbwc %2,%0";
1591 /* Subtract low parts. */
1592 if (rtx_equal_p (operands
[0], operands
[1]))
1594 if (low
[2] == const0_rtx
)
1595 pattern
= 0, carry
= 0;
1596 else if (low
[2] == constm1_rtx
)
1597 pattern
= "decl %0";
1599 pattern
= "subl2 %2,%0";
1603 if (low
[2] == constm1_rtx
)
1604 pattern
= "decl %0";
1605 else if (low
[2] == const0_rtx
)
1606 pattern
= get_insn_template (CODE_FOR_movsi
, insn
), carry
= 0;
1608 pattern
= "subl3 %2,%1,%0";
1611 output_asm_insn (pattern
, low
);
1614 if (!rtx_equal_p (operands
[0], operands
[1]))
1615 return "movl %1,%0;sbwc %2,%0";
1616 return "sbwc %2,%0";
1617 /* %0 = %2 - %1 - C */
1619 return get_insn_template (CODE_FOR_subsi3
, insn
);
1627 /* True if X is an rtx for a constant that is a valid address. */
1630 legitimate_constant_address_p (rtx x
)
1632 if (GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == SYMBOL_REF
1633 || CONST_INT_P (x
) || GET_CODE (x
) == HIGH
)
1635 if (GET_CODE (x
) != CONST
)
1637 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1639 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
1640 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x
, 0), 0)))
1646 /* The other macros defined here are used only in legitimate_address_p (). */
1648 /* Nonzero if X is a hard reg that can be used as an index
1649 or, if not strict, if it is a pseudo reg. */
1650 #define INDEX_REGISTER_P(X, STRICT) \
1651 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1653 /* Nonzero if X is a hard reg that can be used as a base reg
1654 or, if not strict, if it is a pseudo reg. */
1655 #define BASE_REGISTER_P(X, STRICT) \
1656 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1658 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1660 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1661 are no SYMBOL_REFs for external symbols present. */
1664 indirectable_constant_address_p (rtx x
, bool indirect
)
1666 if (GET_CODE (x
) == SYMBOL_REF
)
1667 return !flag_pic
|| SYMBOL_REF_LOCAL_P (x
) || !indirect
;
1669 if (GET_CODE (x
) == CONST
)
1671 || GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
1672 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x
, 0), 0));
1674 return CONSTANT_ADDRESS_P (x
);
1677 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1680 indirectable_constant_address_p (rtx x
, bool indirect ATTRIBUTE_UNUSED
)
1682 return CONSTANT_ADDRESS_P (x
);
1685 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1687 /* True if X is an address which can be indirected. External symbols
1688 could be in a sharable image library, so we disallow those. */
1691 indirectable_address_p (rtx x
, bool strict
, bool indirect
)
1693 if (indirectable_constant_address_p (x
, indirect
)
1694 || BASE_REGISTER_P (x
, strict
))
1696 if (GET_CODE (x
) != PLUS
1697 || !BASE_REGISTER_P (XEXP (x
, 0), strict
)
1698 || (flag_pic
&& !CONST_INT_P (XEXP (x
, 1))))
1700 return indirectable_constant_address_p (XEXP (x
, 1), indirect
);
1703 /* Return true if x is a valid address not using indexing.
1704 (This much is the easy part.) */
1706 nonindexed_address_p (rtx x
, bool strict
)
1711 if (! reload_in_progress
1712 || reg_equiv_mem (REGNO (x
)) == 0
1713 || indirectable_address_p (reg_equiv_mem (REGNO (x
)), strict
, false))
1716 if (indirectable_constant_address_p (x
, false))
1718 if (indirectable_address_p (x
, strict
, false))
1720 xfoo0
= XEXP (x
, 0);
1721 if (MEM_P (x
) && indirectable_address_p (xfoo0
, strict
, true))
1723 if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1724 && BASE_REGISTER_P (xfoo0
, strict
))
1729 /* True if PROD is either a reg times size of mode MODE and MODE is less
1730 than or equal 8 bytes, or just a reg if MODE is one byte. */
1733 index_term_p (rtx prod
, machine_mode mode
, bool strict
)
1737 if (GET_MODE_SIZE (mode
) == 1)
1738 return BASE_REGISTER_P (prod
, strict
);
1740 if (GET_CODE (prod
) != MULT
|| GET_MODE_SIZE (mode
) > 8)
1743 xfoo0
= XEXP (prod
, 0);
1744 xfoo1
= XEXP (prod
, 1);
1746 if (CONST_INT_P (xfoo0
)
1747 && INTVAL (xfoo0
) == (int)GET_MODE_SIZE (mode
)
1748 && INDEX_REGISTER_P (xfoo1
, strict
))
1751 if (CONST_INT_P (xfoo1
)
1752 && INTVAL (xfoo1
) == (int)GET_MODE_SIZE (mode
)
1753 && INDEX_REGISTER_P (xfoo0
, strict
))
1759 /* Return true if X is the sum of a register
1760 and a valid index term for mode MODE. */
1762 reg_plus_index_p (rtx x
, machine_mode mode
, bool strict
)
1766 if (GET_CODE (x
) != PLUS
)
1769 xfoo0
= XEXP (x
, 0);
1770 xfoo1
= XEXP (x
, 1);
1772 if (BASE_REGISTER_P (xfoo0
, strict
) && index_term_p (xfoo1
, mode
, strict
))
1775 if (BASE_REGISTER_P (xfoo1
, strict
) && index_term_p (xfoo0
, mode
, strict
))
1781 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1783 indexable_address_p (rtx xfoo0
, rtx xfoo1
, machine_mode mode
, bool strict
)
1785 if (!CONSTANT_ADDRESS_P (xfoo0
))
1787 if (BASE_REGISTER_P (xfoo1
, strict
))
1788 return !flag_pic
|| mode
== QImode
;
1789 if (flag_pic
&& symbolic_operand (xfoo0
, SImode
))
1791 return reg_plus_index_p (xfoo1
, mode
, strict
);
1794 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1795 that is a valid memory address for an instruction.
1796 The MODE argument is the machine mode for the MEM expression
1797 that wants to use this address. */
1799 vax_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
1803 if (nonindexed_address_p (x
, strict
))
1806 if (GET_CODE (x
) != PLUS
)
1809 /* Handle <address>[index] represented with index-sum outermost */
1811 xfoo0
= XEXP (x
, 0);
1812 xfoo1
= XEXP (x
, 1);
1814 if (index_term_p (xfoo0
, mode
, strict
)
1815 && nonindexed_address_p (xfoo1
, strict
))
1818 if (index_term_p (xfoo1
, mode
, strict
)
1819 && nonindexed_address_p (xfoo0
, strict
))
1822 /* Handle offset(reg)[index] with offset added outermost */
1824 if (indexable_address_p (xfoo0
, xfoo1
, mode
, strict
)
1825 || indexable_address_p (xfoo1
, xfoo0
, mode
, strict
))
1831 /* Return true if x (a legitimate address expression) has an effect that
1832 depends on the machine mode it is used for. On the VAX, the predecrement
1833 and postincrement address depend thus (the amount of decrement or
1834 increment being the length of the operand) and all indexed address depend
1835 thus (because the index scale factor is the length of the operand). */
1838 vax_mode_dependent_address_p (const_rtx x
, addr_space_t as ATTRIBUTE_UNUSED
)
1842 /* Auto-increment cases are now dealt with generically in recog.c. */
1843 if (GET_CODE (x
) != PLUS
)
1846 xfoo0
= XEXP (x
, 0);
1847 xfoo1
= XEXP (x
, 1);
1849 if (CONST_INT_P (xfoo0
) && REG_P (xfoo1
))
1851 if (CONST_INT_P (xfoo1
) && REG_P (xfoo0
))
1853 if (!flag_pic
&& CONSTANT_ADDRESS_P (xfoo0
) && REG_P (xfoo1
))
1855 if (!flag_pic
&& CONSTANT_ADDRESS_P (xfoo1
) && REG_P (xfoo0
))
1862 fixup_mathdi_operand (rtx x
, machine_mode mode
)
1864 if (illegal_addsub_di_memory_operand (x
, mode
))
1866 rtx addr
= XEXP (x
, 0);
1867 rtx temp
= gen_reg_rtx (Pmode
);
1869 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1870 if (GET_CODE (addr
) == CONST
&& flag_pic
)
1872 offset
= XEXP (XEXP (addr
, 0), 1);
1873 addr
= XEXP (XEXP (addr
, 0), 0);
1876 emit_move_insn (temp
, addr
);
1878 temp
= gen_rtx_PLUS (Pmode
, temp
, offset
);
1879 x
= gen_rtx_MEM (DImode
, temp
);
1885 vax_expand_addsub_di_operands (rtx
* operands
, enum rtx_code code
)
1887 int hi_only
= operand_subword (operands
[2], 0, 0, DImode
) == const0_rtx
;
1890 rtx (*gen_old_insn
)(rtx
, rtx
, rtx
);
1891 rtx (*gen_si_insn
)(rtx
, rtx
, rtx
);
1892 rtx (*gen_insn
)(rtx
, rtx
, rtx
);
1896 gen_old_insn
= gen_adddi3_old
;
1897 gen_si_insn
= gen_addsi3
;
1898 gen_insn
= gen_adcdi3
;
1900 else if (code
== MINUS
)
1902 gen_old_insn
= gen_subdi3_old
;
1903 gen_si_insn
= gen_subsi3
;
1904 gen_insn
= gen_sbcdi3
;
1909 /* If this is addition (thus operands are commutative) and if there is one
1910 addend that duplicates the desination, we want that addend to be the
1913 && rtx_equal_p (operands
[0], operands
[2])
1914 && !rtx_equal_p (operands
[1], operands
[2]))
1917 operands
[2] = operands
[1];
1923 emit_insn ((*gen_old_insn
) (operands
[0], operands
[1], operands
[2]));
1927 if (!rtx_equal_p (operands
[0], operands
[1])
1928 && (REG_P (operands
[0]) && MEM_P (operands
[1])))
1930 emit_move_insn (operands
[0], operands
[1]);
1931 operands
[1] = operands
[0];
1934 operands
[0] = fixup_mathdi_operand (operands
[0], DImode
);
1935 operands
[1] = fixup_mathdi_operand (operands
[1], DImode
);
1936 operands
[2] = fixup_mathdi_operand (operands
[2], DImode
);
1938 if (!rtx_equal_p (operands
[0], operands
[1]))
1939 emit_move_insn (operand_subword (operands
[0], 0, 0, DImode
),
1940 operand_subword (operands
[1], 0, 0, DImode
));
1942 emit_insn ((*gen_si_insn
) (operand_subword (operands
[0], 1, 0, DImode
),
1943 operand_subword (operands
[1], 1, 0, DImode
),
1944 operand_subword (operands
[2], 1, 0, DImode
)));
1948 /* If are adding the same value together, that's really a multiply by 2,
1949 and that's just a left shift of 1. */
1950 if (rtx_equal_p (operands
[1], operands
[2]))
1952 gcc_assert (code
!= MINUS
);
1953 emit_insn (gen_ashldi3 (operands
[0], operands
[1], const1_rtx
));
1957 operands
[0] = fixup_mathdi_operand (operands
[0], DImode
);
1959 /* If an operand is the same as operand[0], use the operand[0] rtx
1960 because fixup will an equivalent rtx but not an equal one. */
1962 if (rtx_equal_p (operands
[0], operands
[1]))
1963 operands
[1] = operands
[0];
1965 operands
[1] = fixup_mathdi_operand (operands
[1], DImode
);
1967 if (rtx_equal_p (operands
[0], operands
[2]))
1968 operands
[2] = operands
[0];
1970 operands
[2] = fixup_mathdi_operand (operands
[2], DImode
);
1972 /* If we are subtracting not from ourselves [d = a - b], and because the
1973 carry ops are two operand only, we would need to do a move prior to
1974 the subtract. And if d == b, we would need a temp otherwise
1975 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1976 into d = -b, d += a. Since -b can never overflow, even if b == d,
1979 If we are doing addition, since the carry ops are two operand, if
1980 we aren't adding to ourselves, move the first addend to the
1981 destination first. */
1983 gcc_assert (operands
[1] != const0_rtx
|| code
== MINUS
);
1984 if (!rtx_equal_p (operands
[0], operands
[1]) && operands
[1] != const0_rtx
)
1986 if (code
== MINUS
&& CONSTANT_P (operands
[1]))
1988 temp
= gen_reg_rtx (DImode
);
1989 emit_insn (gen_sbcdi3 (operands
[0], const0_rtx
, operands
[2]));
1991 gen_insn
= gen_adcdi3
;
1992 operands
[2] = operands
[1];
1993 operands
[1] = operands
[0];
1996 emit_move_insn (operands
[0], operands
[1]);
1999 /* Subtracting a constant will have been rewritten to an addition of the
2000 negative of that constant before we get here. */
2001 gcc_assert (!CONSTANT_P (operands
[2]) || code
== PLUS
);
2002 emit_insn ((*gen_insn
) (operands
[0], operands
[1], operands
[2]));
2007 adjacent_operands_p (rtx lo
, rtx hi
, machine_mode mode
)
2009 HOST_WIDE_INT lo_offset
;
2010 HOST_WIDE_INT hi_offset
;
2012 if (GET_CODE (lo
) != GET_CODE (hi
))
2016 return mode
== SImode
&& REGNO (lo
) + 1 == REGNO (hi
);
2017 if (CONST_INT_P (lo
))
2018 return INTVAL (hi
) == 0 && 0 <= INTVAL (lo
) && INTVAL (lo
) < 64;
2019 if (CONST_INT_P (lo
))
2020 return mode
!= SImode
;
2025 if (MEM_VOLATILE_P (lo
) || MEM_VOLATILE_P (hi
))
2031 if (GET_CODE (lo
) == POST_INC
/* || GET_CODE (lo) == PRE_DEC */)
2032 return rtx_equal_p (lo
, hi
);
2034 switch (GET_CODE (lo
))
2044 if (!CONST_INT_P (XEXP (lo
, 1)))
2046 lo_offset
= INTVAL (XEXP (lo
, 1));
2053 switch (GET_CODE (hi
))
2063 if (!CONST_INT_P (XEXP (hi
, 1)))
2065 hi_offset
= INTVAL (XEXP (hi
, 1));
2072 if (GET_CODE (lo
) == MULT
|| GET_CODE (lo
) == PLUS
)
2075 return rtx_equal_p (lo
, hi
)
2076 && hi_offset
- lo_offset
== GET_MODE_SIZE (mode
);
2079 /* Output assembler code for a block containing the constant parts
2080 of a trampoline, leaving space for the variable parts. */
2082 /* On the VAX, the trampoline contains an entry mask and two instructions:
2084 movl $STATIC,r0 (store the functions static chain)
2085 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2088 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED
)
2090 assemble_aligned_integer (2, const0_rtx
);
2091 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2092 assemble_aligned_integer (4, const0_rtx
);
2093 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM
));
2094 assemble_aligned_integer (2, GEN_INT (0x9f17));
2095 assemble_aligned_integer (4, const0_rtx
);
2098 /* We copy the register-mask from the function's pure code
2099 to the start of the trampoline. */
2102 vax_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
2104 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2107 emit_block_move (m_tramp
, assemble_trampoline_template (),
2108 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
2110 mem
= adjust_address (m_tramp
, HImode
, 0);
2111 emit_move_insn (mem
, gen_const_mem (HImode
, fnaddr
));
2113 mem
= adjust_address (m_tramp
, SImode
, 4);
2114 emit_move_insn (mem
, cxt
);
2115 mem
= adjust_address (m_tramp
, SImode
, 11);
2116 emit_move_insn (mem
, plus_constant (Pmode
, fnaddr
, 2));
2117 emit_insn (gen_sync_istream ());
2120 /* Value is the number of bytes of arguments automatically
2121 popped when returning from a subroutine call.
2122 FUNDECL is the declaration node of the function (as a tree),
2123 FUNTYPE is the data type of the function (as a tree),
2124 or for a library call it is an identifier node for the subroutine name.
2125 SIZE is the number of bytes of arguments passed on the stack.
2127 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2130 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED
,
2131 tree funtype ATTRIBUTE_UNUSED
, int size
)
2133 return size
> 255 * 4 ? 0 : size
;
2136 /* Define where to put the arguments to a function.
2137 Value is zero to push the argument on the stack,
2138 or a hard register in which to store the argument.
2140 MODE is the argument's machine mode.
2141 TYPE is the data type of the argument (as a tree).
2142 This is null for libcalls where that information may
2144 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2145 the preceding args and about the function being called.
2146 NAMED is nonzero if this argument is a named parameter
2147 (otherwise it is an extra parameter matching an ellipsis). */
2149 /* On the VAX all args are pushed. */
2152 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED
,
2153 machine_mode mode ATTRIBUTE_UNUSED
,
2154 const_tree type ATTRIBUTE_UNUSED
,
2155 bool named ATTRIBUTE_UNUSED
)
2160 /* Update the data in CUM to advance over an argument of mode MODE and
2161 data type TYPE. (TYPE is null for libcalls where that information
2162 may not be available.) */
2165 vax_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
2166 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2168 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2170 *cum
+= (mode
!= BLKmode
2171 ? (GET_MODE_SIZE (mode
) + 3) & ~3
2172 : (int_size_in_bytes (type
) + 3) & ~3);