1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "tm-constrs.h"
47 #include "target-def.h"
49 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT
);
50 static void vax_file_start (void);
51 static void vax_init_libfuncs (void);
52 static void vax_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
54 static int vax_address_cost_1 (rtx
);
55 static int vax_address_cost (rtx
, bool);
56 static bool vax_rtx_costs (rtx
, int, int, int *, bool);
57 static rtx
vax_struct_value_rtx (tree
, int);
58 static rtx
vax_builtin_setjmp_frame_value (void);
60 /* Initialize the GCC target structure. */
61 #undef TARGET_ASM_ALIGNED_HI_OP
62 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
64 #undef TARGET_ASM_FUNCTION_PROLOGUE
65 #define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
67 #undef TARGET_ASM_FILE_START
68 #define TARGET_ASM_FILE_START vax_file_start
69 #undef TARGET_ASM_FILE_START_APP_OFF
70 #define TARGET_ASM_FILE_START_APP_OFF true
72 #undef TARGET_INIT_LIBFUNCS
73 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
75 #undef TARGET_ASM_OUTPUT_MI_THUNK
76 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
77 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
78 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
80 #undef TARGET_DEFAULT_TARGET_FLAGS
81 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
83 #undef TARGET_RTX_COSTS
84 #define TARGET_RTX_COSTS vax_rtx_costs
85 #undef TARGET_ADDRESS_COST
86 #define TARGET_ADDRESS_COST vax_address_cost
88 #undef TARGET_PROMOTE_PROTOTYPES
89 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
91 #undef TARGET_STRUCT_VALUE_RTX
92 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
94 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
95 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
97 struct gcc_target targetm
= TARGET_INITIALIZER
;
99 /* Set global variables as needed for the options enabled. */
102 override_options (void)
104 /* We're VAX floating point, not IEEE floating point. */
106 REAL_MODE_FORMAT (DFmode
) = &vax_g_format
;
109 /* Generate the assembly code for function entry. FILE is a stdio
110 stream to output the code to. SIZE is an int: how many units of
111 temporary storage to allocate.
113 Refer to the array `regs_ever_live' to determine which registers to
114 save; `regs_ever_live[I]' is nonzero if register number I is ever
115 used in the function. This function is responsible for knowing
116 which registers should not be saved even if used. */
119 vax_output_function_prologue (FILE * file
, HOST_WIDE_INT size
)
124 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
125 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
128 fprintf (file
, "\t.word 0x%x\n", mask
);
130 if (dwarf2out_do_frame ())
132 const char *label
= dwarf2out_cfi_label ();
135 for (regno
= FIRST_PSEUDO_REGISTER
-1; regno
>= 0; --regno
)
136 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
137 dwarf2out_reg_save (label
, regno
, offset
-= 4);
139 dwarf2out_reg_save (label
, PC_REGNUM
, offset
-= 4);
140 dwarf2out_reg_save (label
, FRAME_POINTER_REGNUM
, offset
-= 4);
141 dwarf2out_reg_save (label
, ARG_POINTER_REGNUM
, offset
-= 4);
142 dwarf2out_def_cfa (label
, FRAME_POINTER_REGNUM
, -(offset
- 4));
145 size
-= STARTING_FRAME_OFFSET
;
147 asm_fprintf (file
, "\tmovab %wd(%Rsp),%Rsp\n", -size
);
149 asm_fprintf (file
, "\tsubl2 $%wd,%Rsp\n", size
);
152 /* When debugging with stabs, we want to output an extra dummy label
153 so that gas can distinguish between D_float and G_float prior to
154 processing the .stabs directive identifying type double. */
156 vax_file_start (void)
158 default_file_start ();
160 if (write_symbols
== DBX_DEBUG
)
161 fprintf (asm_out_file
, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR
);
164 /* We can use the BSD C library routines for the libgcc calls that are
165 still generated, since that's what they boil down to anyways. When
166 ELF, avoid the user's namespace. */
169 vax_init_libfuncs (void)
171 set_optab_libfunc (udiv_optab
, SImode
, TARGET_ELF
? "*__udiv" : "*udiv");
172 set_optab_libfunc (umod_optab
, SImode
, TARGET_ELF
? "*__urem" : "*urem");
175 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
178 split_quadword_operands (rtx insn
, enum rtx_code code
, rtx
* operands
,
183 for (i
= 0; i
< n
; i
++)
186 for (i
= 0; i
< n
; i
++)
188 if (MEM_P (operands
[i
])
189 && (GET_CODE (XEXP (operands
[i
], 0)) == PRE_DEC
190 || GET_CODE (XEXP (operands
[i
], 0)) == POST_INC
))
192 rtx addr
= XEXP (operands
[i
], 0);
193 operands
[i
] = low
[i
] = gen_rtx_MEM (SImode
, addr
);
195 else if (optimize_size
&& MEM_P (operands
[i
])
196 && REG_P (XEXP (operands
[i
], 0))
197 && (code
!= MINUS
|| operands
[1] != const0_rtx
)
198 && find_regno_note (insn
, REG_DEAD
,
199 REGNO (XEXP (operands
[i
], 0))))
201 low
[i
] = gen_rtx_MEM (SImode
,
202 gen_rtx_POST_INC (Pmode
,
203 XEXP (operands
[i
], 0)));
204 operands
[i
] = gen_rtx_MEM (SImode
, XEXP (operands
[i
], 0));
208 low
[i
] = operand_subword (operands
[i
], 0, 0, DImode
);
209 operands
[i
] = operand_subword (operands
[i
], 1, 0, DImode
);
215 print_operand_address (FILE * file
, rtx addr
)
218 rtx reg1
, breg
, ireg
;
222 switch (GET_CODE (addr
))
226 addr
= XEXP (addr
, 0);
230 fprintf (file
, "(%s)", reg_names
[REGNO (addr
)]);
234 fprintf (file
, "-(%s)", reg_names
[REGNO (XEXP (addr
, 0))]);
238 fprintf (file
, "(%s)+", reg_names
[REGNO (XEXP (addr
, 0))]);
242 /* There can be either two or three things added here. One must be a
243 REG. One can be either a REG or a MULT of a REG and an appropriate
244 constant, and the third can only be a constant or a MEM.
246 We get these two or three things and put the constant or MEM in
247 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
248 a register and can't tell yet if it is a base or index register,
251 reg1
= 0; ireg
= 0; breg
= 0; offset
= 0;
253 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0))
254 || MEM_P (XEXP (addr
, 0)))
256 offset
= XEXP (addr
, 0);
257 addr
= XEXP (addr
, 1);
259 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1))
260 || MEM_P (XEXP (addr
, 1)))
262 offset
= XEXP (addr
, 1);
263 addr
= XEXP (addr
, 0);
265 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
267 ireg
= XEXP (addr
, 1);
268 addr
= XEXP (addr
, 0);
270 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
272 ireg
= XEXP (addr
, 0);
273 addr
= XEXP (addr
, 1);
275 else if (REG_P (XEXP (addr
, 1)))
277 reg1
= XEXP (addr
, 1);
278 addr
= XEXP (addr
, 0);
280 else if (REG_P (XEXP (addr
, 0)))
282 reg1
= XEXP (addr
, 0);
283 addr
= XEXP (addr
, 1);
295 else if (GET_CODE (addr
) == MULT
)
299 gcc_assert (GET_CODE (addr
) == PLUS
);
300 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0))
301 || MEM_P (XEXP (addr
, 0)))
305 if (CONST_INT_P (offset
))
306 offset
= plus_constant (XEXP (addr
, 0), INTVAL (offset
));
309 gcc_assert (CONST_INT_P (XEXP (addr
, 0)));
310 offset
= plus_constant (offset
, INTVAL (XEXP (addr
, 0)));
313 offset
= XEXP (addr
, 0);
315 else if (REG_P (XEXP (addr
, 0)))
318 ireg
= reg1
, breg
= XEXP (addr
, 0), reg1
= 0;
320 reg1
= XEXP (addr
, 0);
324 gcc_assert (GET_CODE (XEXP (addr
, 0)) == MULT
);
326 ireg
= XEXP (addr
, 0);
329 if (CONSTANT_ADDRESS_P (XEXP (addr
, 1))
330 || MEM_P (XEXP (addr
, 1)))
334 if (CONST_INT_P (offset
))
335 offset
= plus_constant (XEXP (addr
, 1), INTVAL (offset
));
338 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
339 offset
= plus_constant (offset
, INTVAL (XEXP (addr
, 1)));
342 offset
= XEXP (addr
, 1);
344 else if (REG_P (XEXP (addr
, 1)))
347 ireg
= reg1
, breg
= XEXP (addr
, 1), reg1
= 0;
349 reg1
= XEXP (addr
, 1);
353 gcc_assert (GET_CODE (XEXP (addr
, 1)) == MULT
);
355 ireg
= XEXP (addr
, 1);
359 /* If REG1 is nonzero, figure out if it is a base or index register. */
363 || (flag_pic
&& GET_CODE (addr
) == SYMBOL_REF
)
366 || (flag_pic
&& symbolic_operand (offset
, SImode
)))))
377 if (flag_pic
&& symbolic_operand (offset
, SImode
))
382 output_operand_lossage ("symbol used with both base and indexed registers");
385 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
386 if (flag_pic
> 1 && GET_CODE (offset
) == CONST
387 && GET_CODE (XEXP (XEXP (offset
, 0), 0)) == SYMBOL_REF
388 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset
, 0), 0)))
391 output_operand_lossage ("symbol with offset used in PIC mode");
395 /* symbol(reg) isn't PIC, but symbol[reg] is. */
404 output_address (offset
);
408 fprintf (file
, "(%s)", reg_names
[REGNO (breg
)]);
412 if (GET_CODE (ireg
) == MULT
)
413 ireg
= XEXP (ireg
, 0);
414 gcc_assert (REG_P (ireg
));
415 fprintf (file
, "[%s]", reg_names
[REGNO (ireg
)]);
420 output_addr_const (file
, addr
);
425 print_operand (FILE *file
, rtx x
, int code
)
428 fputc (ASM_DOUBLE_CHAR
, file
);
429 else if (code
== '|')
430 fputs (REGISTER_PREFIX
, file
);
431 else if (code
== 'C')
432 fputs (rev_cond_name (x
), file
);
433 else if (code
== 'D' && CONST_INT_P (x
) && INTVAL (x
) < 0)
434 fprintf (file
, "$" NEG_HWI_PRINT_HEX16
, INTVAL (x
));
435 else if (code
== 'P' && CONST_INT_P (x
))
436 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + 1);
437 else if (code
== 'N' && CONST_INT_P (x
))
438 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, ~ INTVAL (x
));
439 /* rotl instruction cannot deal with negative arguments. */
440 else if (code
== 'R' && CONST_INT_P (x
))
441 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, 32 - INTVAL (x
));
442 else if (code
== 'H' && CONST_INT_P (x
))
443 fprintf (file
, "$%d", (int) (0xffff & ~ INTVAL (x
)));
444 else if (code
== 'h' && CONST_INT_P (x
))
445 fprintf (file
, "$%d", (short) - INTVAL (x
));
446 else if (code
== 'B' && CONST_INT_P (x
))
447 fprintf (file
, "$%d", (int) (0xff & ~ INTVAL (x
)));
448 else if (code
== 'b' && CONST_INT_P (x
))
449 fprintf (file
, "$%d", (int) (0xff & - INTVAL (x
)));
450 else if (code
== 'M' && CONST_INT_P (x
))
451 fprintf (file
, "$%d", ~((1 << INTVAL (x
)) - 1));
453 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
455 output_address (XEXP (x
, 0));
456 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
459 real_to_decimal (dstr
, CONST_DOUBLE_REAL_VALUE (x
),
460 sizeof (dstr
), 0, 1);
461 fprintf (file
, "$0f%s", dstr
);
463 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
466 real_to_decimal (dstr
, CONST_DOUBLE_REAL_VALUE (x
),
467 sizeof (dstr
), 0, 1);
468 fprintf (file
, "$0%c%s", ASM_DOUBLE_CHAR
, dstr
);
472 if (flag_pic
> 1 && symbolic_operand (x
, SImode
))
475 output_operand_lossage ("symbol used as immediate operand");
478 output_addr_const (file
, x
);
483 rev_cond_name (rtx op
)
485 switch (GET_CODE (op
))
514 vax_float_literal (rtx c
)
516 enum machine_mode mode
;
517 REAL_VALUE_TYPE r
, s
;
520 if (GET_CODE (c
) != CONST_DOUBLE
)
525 if (c
== const_tiny_rtx
[(int) mode
][0]
526 || c
== const_tiny_rtx
[(int) mode
][1]
527 || c
== const_tiny_rtx
[(int) mode
][2])
530 REAL_VALUE_FROM_CONST_DOUBLE (r
, c
);
532 for (i
= 0; i
< 7; i
++)
536 REAL_VALUE_FROM_INT (s
, x
, 0, mode
);
538 if (REAL_VALUES_EQUAL (r
, s
))
540 ok
= exact_real_inverse (mode
, &s
);
542 if (REAL_VALUES_EQUAL (r
, s
))
549 /* Return the cost in cycles of a memory address, relative to register
552 Each of the following adds the indicated number of cycles:
556 1 - indexing and/or offset(register)
561 vax_address_cost_1 (rtx addr
)
563 int reg
= 0, indexed
= 0, indir
= 0, offset
= 0, predec
= 0;
564 rtx plus_op0
= 0, plus_op1
= 0;
566 switch (GET_CODE (addr
))
576 indexed
= 1; /* 2 on VAX 2 */
579 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
581 offset
= (unsigned HOST_WIDE_INT
)(INTVAL(addr
)+128) > 256;
585 offset
= 1; /* 2 on VAX 2 */
587 case LABEL_REF
: /* this is probably a byte offset from the pc */
593 plus_op1
= XEXP (addr
, 0);
595 plus_op0
= XEXP (addr
, 0);
596 addr
= XEXP (addr
, 1);
599 indir
= 2; /* 3 on VAX 2 */
600 addr
= XEXP (addr
, 0);
606 /* Up to 3 things can be added in an address. They are stored in
607 plus_op0, plus_op1, and addr. */
621 /* Indexing and register+offset can both be used (except on a VAX 2)
622 without increasing execution time over either one alone. */
623 if (reg
&& indexed
&& offset
)
624 return reg
+ indir
+ offset
+ predec
;
625 return reg
+ indexed
+ indir
+ offset
+ predec
;
629 vax_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
631 return (1 + (REG_P (x
) ? 0 : vax_address_cost_1 (x
)));
634 /* Cost of an expression on a VAX. This version has costs tuned for the
635 CVAX chip (found in the VAX 3 series) with comments for variations on
638 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
639 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
640 costs on a per cpu basis. */
643 vax_rtx_costs (rtx x
, int code
, int outer_code
, int *total
,
644 bool speed ATTRIBUTE_UNUSED
)
646 enum machine_mode mode
= GET_MODE (x
);
647 int i
= 0; /* may be modified in switch */
648 const char *fmt
= GET_RTX_FORMAT (code
); /* may be modified in switch */
652 /* On a VAX, constants from 0..63 are cheap because they can use the
653 1 byte literal constant format. Compare to -1 should be made cheap
654 so that decrement-and-branch insns can be formed more easily (if
655 the value -1 is copied to a register some decrement-and-branch
656 patterns will not match). */
663 if (outer_code
== AND
)
665 *total
= ((unsigned HOST_WIDE_INT
) ~INTVAL (x
) <= 077) ? 1 : 2;
668 if ((unsigned HOST_WIDE_INT
) INTVAL (x
) <= 077
669 || (outer_code
== COMPARE
671 || ((outer_code
== PLUS
|| outer_code
== MINUS
)
672 && (unsigned HOST_WIDE_INT
) -INTVAL (x
) <= 077))
686 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
687 *total
= vax_float_literal (x
) ? 5 : 8;
689 *total
= ((CONST_DOUBLE_HIGH (x
) == 0
690 && (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
) < 64)
691 || (outer_code
== PLUS
692 && CONST_DOUBLE_HIGH (x
) == -1
693 && (unsigned HOST_WIDE_INT
)-CONST_DOUBLE_LOW (x
) < 64))
699 return true; /* Implies register operand. */
703 return true; /* Implies register operand. */
709 *total
= 16; /* 4 on VAX 9000 */
712 *total
= 9; /* 4 on VAX 9000, 12 on VAX 2 */
715 *total
= 16; /* 6 on VAX 9000, 28 on VAX 2 */
720 *total
= 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
723 *total
= MAX_COST
; /* Mode is not supported. */
731 *total
= MAX_COST
; /* Mode is not supported. */
739 *total
= 30; /* Highly variable. */
740 else if (mode
== DFmode
)
741 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
744 *total
= 11; /* 25 on VAX 2 */
754 *total
= MAX_COST
; /* Mode is not supported. */
761 *total
= (6 /* 4 on VAX 9000 */
762 + (mode
== DFmode
) + (GET_MODE (XEXP (x
, 0)) != SImode
));
766 *total
= 7; /* 17 on VAX 2 */
775 *total
= 10; /* 6 on VAX 9000 */
780 *total
= 6; /* 5 on VAX 2, 4 on VAX 9000 */
781 if (CONST_INT_P (XEXP (x
, 1)))
782 fmt
= "e"; /* all constant rotate counts are short */
787 *total
= (mode
== DFmode
) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
788 /* Small integer operands can use subl2 and addl2. */
789 if ((CONST_INT_P (XEXP (x
, 1)))
790 && (unsigned HOST_WIDE_INT
)(INTVAL (XEXP (x
, 1)) + 63) < 127)
800 /* AND is special because the first operand is complemented. */
802 if (CONST_INT_P (XEXP (x
, 0)))
804 if ((unsigned HOST_WIDE_INT
)~INTVAL (XEXP (x
, 0)) > 63)
814 else if (mode
== SFmode
)
816 else if (mode
== DImode
)
832 if (mode
== DImode
|| mode
== DFmode
)
833 *total
= 5; /* 7 on VAX 2 */
835 *total
= 3; /* 4 on VAX 2 */
837 if (!REG_P (x
) && GET_CODE (x
) != POST_INC
)
838 *total
+= vax_address_cost_1 (x
);
844 *total
= 3; /* FIXME: Costs need to be checked */
851 /* Now look inside the expression. Operands which are not registers or
852 short constants add to the cost.
854 FMT and I may have been adjusted in the switch above for instructions
855 which require special handling. */
857 while (*fmt
++ == 'e')
859 rtx op
= XEXP (x
, i
);
862 code
= GET_CODE (op
);
864 /* A NOT is likely to be found as the first operand of an AND
865 (in which case the relevant cost is of the operand inside
866 the not) and not likely to be found anywhere else. */
868 op
= XEXP (op
, 0), code
= GET_CODE (op
);
873 if ((unsigned HOST_WIDE_INT
)INTVAL (op
) > 63
874 && GET_MODE (x
) != QImode
)
875 *total
+= 1; /* 2 on VAX 2 */
880 *total
+= 1; /* 2 on VAX 2 */
883 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
)
885 /* Registers are faster than floating point constants -- even
886 those constants which can be encoded in a single byte. */
887 if (vax_float_literal (op
))
890 *total
+= (GET_MODE (x
) == DFmode
) ? 3 : 2;
894 if (CONST_DOUBLE_HIGH (op
) != 0
895 || (unsigned HOST_WIDE_INT
)CONST_DOUBLE_LOW (op
) > 63)
900 *total
+= 1; /* 2 on VAX 2 */
901 if (!REG_P (XEXP (op
, 0)))
902 *total
+= vax_address_cost_1 (XEXP (op
, 0));
915 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
916 Used for C++ multiple inheritance.
917 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
918 addl2 $DELTA, 4(ap) #adjust first argument
919 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
923 vax_output_mi_thunk (FILE * file
,
924 tree thunk ATTRIBUTE_UNUSED
,
926 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
929 fprintf (file
, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC
, delta
);
930 asm_fprintf (file
, ",4(%Rap)\n");
931 fprintf (file
, "\tjmp ");
932 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
933 fprintf (file
, "+2\n");
937 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
938 int incoming ATTRIBUTE_UNUSED
)
940 return gen_rtx_REG (Pmode
, VAX_STRUCT_VALUE_REGNUM
);
944 vax_builtin_setjmp_frame_value (void)
946 return hard_frame_pointer_rtx
;
949 /* Worker function for NOTICE_UPDATE_CC. */
952 vax_notice_update_cc (rtx exp
, rtx insn ATTRIBUTE_UNUSED
)
954 if (GET_CODE (exp
) == SET
)
956 if (GET_CODE (SET_SRC (exp
)) == CALL
)
958 else if (GET_CODE (SET_DEST (exp
)) != ZERO_EXTRACT
959 && GET_CODE (SET_DEST (exp
)) != PC
)
962 /* The integer operations below don't set carry or
963 set it in an incompatible way. That's ok though
964 as the Z bit is all we need when doing unsigned
965 comparisons on the result of these insns (since
966 they're always with 0). Set CC_NO_OVERFLOW to
967 generate the correct unsigned branches. */
968 switch (GET_CODE (SET_SRC (exp
)))
971 if (GET_MODE_CLASS (GET_MODE (exp
)) == MODE_FLOAT
)
979 cc_status
.flags
= CC_NO_OVERFLOW
;
984 cc_status
.value1
= SET_DEST (exp
);
985 cc_status
.value2
= SET_SRC (exp
);
988 else if (GET_CODE (exp
) == PARALLEL
989 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
991 if (GET_CODE (SET_SRC (XVECEXP (exp
, 0, 0))) == CALL
)
993 else if (GET_CODE (SET_DEST (XVECEXP (exp
, 0, 0))) != PC
)
996 cc_status
.value1
= SET_DEST (XVECEXP (exp
, 0, 0));
997 cc_status
.value2
= SET_SRC (XVECEXP (exp
, 0, 0));
1000 /* PARALLELs whose first element sets the PC are aob,
1001 sob insns. They do change the cc's. */
1006 if (cc_status
.value1
&& REG_P (cc_status
.value1
)
1008 && reg_overlap_mentioned_p (cc_status
.value1
, cc_status
.value2
))
1009 cc_status
.value2
= 0;
1010 if (cc_status
.value1
&& MEM_P (cc_status
.value1
)
1012 && MEM_P (cc_status
.value2
))
1013 cc_status
.value2
= 0;
1014 /* Actual condition, one line up, should be that value2's address
1015 depends on value1, but that is too much of a pain. */
1018 /* Output integer move instructions. */
1021 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
,
1022 enum machine_mode mode
)
1025 const char *pattern_hi
, *pattern_lo
;
1030 if (operands
[1] == const0_rtx
)
1032 if (TARGET_QMATH
&& optimize_size
1033 && (CONST_INT_P (operands
[1])
1034 || GET_CODE (operands
[1]) == CONST_DOUBLE
))
1036 unsigned HOST_WIDE_INT hval
, lval
;
1039 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1041 gcc_assert (HOST_BITS_PER_WIDE_INT
!= 64);
1043 /* Make sure only the low 32 bits are valid. */
1044 lval
= CONST_DOUBLE_LOW (operands
[1]) & 0xffffffff;
1045 hval
= CONST_DOUBLE_HIGH (operands
[1]) & 0xffffffff;
1049 lval
= INTVAL (operands
[1]);
1053 /* Here we see if we are trying to see if the 64bit value is really
1054 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1055 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1056 8 bytes - 1 shift byte - 1 short literal byte. */
1058 && (n
= exact_log2 (lval
& (- lval
))) != -1
1059 && (lval
>> n
) < 64)
1063 #if HOST_BITS_PER_WIDE_INT == 32
1064 /* On 32bit platforms, if the 6bits didn't overflow into the
1065 upper 32bit value that value better be 0. If we have
1066 overflowed, make sure it wasn't too much. */
1069 if (n
<= 26 || hval
>= ((unsigned)1 << (n
- 26)))
1070 n
= 0; /* failure */
1072 lval
|= hval
<< (32 - n
);
1075 /* If n is 0, then ashq is not the best way to emit this. */
1078 operands
[1] = GEN_INT (lval
);
1079 operands
[2] = GEN_INT (n
);
1080 return "ashq %2,%1,%0";
1082 #if HOST_BITS_PER_WIDE_INT == 32
1084 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1085 upper 32bit value. */
1087 && (n
= exact_log2 (hval
& (- hval
)) - 1) != -1
1088 && (hval
>> n
) < 64)
1090 operands
[1] = GEN_INT (hval
>> n
);
1091 operands
[2] = GEN_INT (n
+ 32);
1092 return "ashq %2,%1,%0";
1098 && (!MEM_P (operands
[0])
1099 || GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
1100 || GET_CODE (XEXP (operands
[0], 0)) == POST_INC
1101 || !illegal_addsub_di_memory_operand (operands
[0], DImode
))
1102 && ((CONST_INT_P (operands
[1])
1103 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[1]) >= 64)
1104 || GET_CODE (operands
[1]) == CONST_DOUBLE
))
1106 hi
[0] = operands
[0];
1107 hi
[1] = operands
[1];
1109 split_quadword_operands (insn
, SET
, hi
, lo
, 2);
1111 pattern_lo
= vax_output_int_move (NULL
, lo
, SImode
);
1112 pattern_hi
= vax_output_int_move (NULL
, hi
, SImode
);
1114 /* The patterns are just movl/movl or pushl/pushl then a movq will
1115 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1116 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1118 if ((!strncmp (pattern_lo
, "movl", 4)
1119 && !strncmp (pattern_hi
, "movl", 4))
1120 || (!strncmp (pattern_lo
, "pushl", 5)
1121 && !strncmp (pattern_hi
, "pushl", 5)))
1122 return "movq %1,%0";
1124 if (MEM_P (operands
[0])
1125 && GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1127 output_asm_insn (pattern_hi
, hi
);
1128 operands
[0] = lo
[0];
1129 operands
[1] = lo
[1];
1130 operands
[2] = lo
[2];
1135 output_asm_insn (pattern_lo
, lo
);
1136 operands
[0] = hi
[0];
1137 operands
[1] = hi
[1];
1138 operands
[2] = hi
[2];
1142 return "movq %1,%0";
1145 if (symbolic_operand (operands
[1], SImode
))
1147 if (push_operand (operands
[0], SImode
))
1148 return "pushab %a1";
1149 return "movab %a1,%0";
1152 if (operands
[1] == const0_rtx
)
1154 if (push_operand (operands
[1], SImode
))
1159 if (CONST_INT_P (operands
[1])
1160 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[1]) >= 64)
1162 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1164 if ((unsigned HOST_WIDE_INT
)(~i
) < 64)
1165 return "mcoml %N1,%0";
1166 if ((unsigned HOST_WIDE_INT
)i
< 0x100)
1167 return "movzbl %1,%0";
1168 if (i
>= -0x80 && i
< 0)
1169 return "cvtbl %1,%0";
1171 && (n
= exact_log2 (i
& (-i
))) != -1
1172 && ((unsigned HOST_WIDE_INT
)i
>> n
) < 64)
1174 operands
[1] = GEN_INT ((unsigned HOST_WIDE_INT
)i
>> n
);
1175 operands
[2] = GEN_INT (n
);
1176 return "ashl %2,%1,%0";
1178 if ((unsigned HOST_WIDE_INT
)i
< 0x10000)
1179 return "movzwl %1,%0";
1180 if (i
>= -0x8000 && i
< 0)
1181 return "cvtwl %1,%0";
1183 if (push_operand (operands
[0], SImode
))
1185 return "movl %1,%0";
1188 if (CONST_INT_P (operands
[1]))
1190 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1193 else if ((unsigned HOST_WIDE_INT
)i
< 64)
1194 return "movw %1,%0";
1195 else if ((unsigned HOST_WIDE_INT
)~i
< 64)
1196 return "mcomw %H1,%0";
1197 else if ((unsigned HOST_WIDE_INT
)i
< 256)
1198 return "movzbw %1,%0";
1199 else if (i
>= -0x80 && i
< 0)
1200 return "cvtbw %1,%0";
1202 return "movw %1,%0";
1205 if (CONST_INT_P (operands
[1]))
1207 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1210 else if ((unsigned HOST_WIDE_INT
)~i
< 64)
1211 return "mcomb %B1,%0";
1213 return "movb %1,%0";
1220 /* Output integer add instructions.
1222 The space-time-opcode tradeoffs for addition vary by model of VAX.
1224 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1225 but it not faster on other models.
1227 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1228 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1229 a register is used in an address too soon after it is set.
1230 Compromise by using movab only when it is shorter than the add
1231 or the base register in the address is one of sp, ap, and fp,
1232 which are not modified very often. */
1235 vax_output_int_add (rtx insn
, rtx
*operands
, enum machine_mode mode
)
1242 const char *pattern
;
1246 if (TARGET_QMATH
&& 0)
1249 split_quadword_operands (insn
, PLUS
, operands
, low
, 3);
1253 gcc_assert (rtx_equal_p (operands
[0], operands
[1]));
1254 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1255 gcc_assert (!flag_pic
|| !external_memory_operand (low
[2], SImode
));
1256 gcc_assert (!flag_pic
|| !external_memory_operand (low
[0], SImode
));
1259 /* No reason to add a 0 to the low part and thus no carry, so just
1260 emit the appropriate add/sub instruction. */
1261 if (low
[2] == const0_rtx
)
1262 return vax_output_int_add (NULL
, operands
, SImode
);
1264 /* Are we doing addition or subtraction? */
1265 sub
= CONST_INT_P (operands
[2]) && INTVAL (operands
[2]) < 0;
1267 /* We can't use vax_output_int_add since some the patterns don't
1268 modify the carry bit. */
1271 if (low
[2] == constm1_rtx
)
1272 pattern
= "decl %0";
1274 pattern
= "subl2 $%n2,%0";
1278 if (low
[2] == const1_rtx
)
1279 pattern
= "incl %0";
1281 pattern
= "addl2 %2,%0";
1283 output_asm_insn (pattern
, low
);
1285 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1286 two 32bit parts, we complement each and then add one to
1287 low part. We know that the low part can't overflow since
1288 it's value can never be 0. */
1290 return "sbwc %N2,%0";
1291 return "adwc %2,%0";
1294 /* Add low parts. */
1295 if (rtx_equal_p (operands
[0], operands
[1]))
1297 if (low
[2] == const0_rtx
)
1298 /* Should examine operand, punt if not POST_INC. */
1299 pattern
= "tstl %0", carry
= 0;
1300 else if (low
[2] == const1_rtx
)
1301 pattern
= "incl %0";
1303 pattern
= "addl2 %2,%0";
1307 if (low
[2] == const0_rtx
)
1308 pattern
= "movl %1,%0", carry
= 0;
1310 pattern
= "addl3 %2,%1,%0";
1313 output_asm_insn (pattern
, low
);
1315 /* If CARRY is 0, we don't have any carry value to worry about. */
1316 return get_insn_template (CODE_FOR_addsi3
, insn
);
1317 /* %0 = C + %1 + %2 */
1318 if (!rtx_equal_p (operands
[0], operands
[1]))
1319 output_asm_insn ((operands
[1] == const0_rtx
1321 : "movl %1,%0"), operands
);
1322 return "adwc %2,%0";
1326 if (rtx_equal_p (operands
[0], operands
[1]))
1328 if (operands
[2] == const1_rtx
)
1330 if (operands
[2] == constm1_rtx
)
1332 if (CONST_INT_P (operands
[2])
1333 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1334 return "subl2 $%n2,%0";
1335 if (CONST_INT_P (operands
[2])
1336 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[2]) >= 64
1337 && REG_P (operands
[1])
1338 && ((INTVAL (operands
[2]) < 32767 && INTVAL (operands
[2]) > -32768)
1339 || REGNO (operands
[1]) > 11))
1340 return "movab %c2(%1),%0";
1341 if (REG_P (operands
[0]) && symbolic_operand (operands
[2], SImode
))
1342 return "movab %a2[%0],%0";
1343 return "addl2 %2,%0";
1346 if (rtx_equal_p (operands
[0], operands
[2]))
1348 if (REG_P (operands
[0]) && symbolic_operand (operands
[1], SImode
))
1349 return "movab %a1[%0],%0";
1350 return "addl2 %1,%0";
1353 if (CONST_INT_P (operands
[2])
1354 && INTVAL (operands
[2]) < 32767
1355 && INTVAL (operands
[2]) > -32768
1356 && REG_P (operands
[1])
1357 && push_operand (operands
[0], SImode
))
1358 return "pushab %c2(%1)";
1360 if (CONST_INT_P (operands
[2])
1361 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1362 return "subl3 $%n2,%1,%0";
1364 if (CONST_INT_P (operands
[2])
1365 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[2]) >= 64
1366 && REG_P (operands
[1])
1367 && ((INTVAL (operands
[2]) < 32767 && INTVAL (operands
[2]) > -32768)
1368 || REGNO (operands
[1]) > 11))
1369 return "movab %c2(%1),%0";
1371 /* Add this if using gcc on a VAX 3xxx:
1372 if (REG_P (operands[1]) && REG_P (operands[2]))
1373 return "movab (%1)[%2],%0";
1376 if (REG_P (operands
[1]) && symbolic_operand (operands
[2], SImode
))
1378 if (push_operand (operands
[0], SImode
))
1379 return "pushab %a2[%1]";
1380 return "movab %a2[%1],%0";
1383 if (REG_P (operands
[2]) && symbolic_operand (operands
[1], SImode
))
1385 if (push_operand (operands
[0], SImode
))
1386 return "pushab %a1[%2]";
1387 return "movab %a1[%2],%0";
1390 if (flag_pic
&& REG_P (operands
[0])
1391 && symbolic_operand (operands
[2], SImode
))
1392 return "movab %a2,%0;addl2 %1,%0";
1395 && (symbolic_operand (operands
[1], SImode
)
1396 || symbolic_operand (operands
[1], SImode
)))
1399 return "addl3 %1,%2,%0";
1402 if (rtx_equal_p (operands
[0], operands
[1]))
1404 if (operands
[2] == const1_rtx
)
1406 if (operands
[2] == constm1_rtx
)
1408 if (CONST_INT_P (operands
[2])
1409 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1410 return "subw2 $%n2,%0";
1411 return "addw2 %2,%0";
1413 if (rtx_equal_p (operands
[0], operands
[2]))
1414 return "addw2 %1,%0";
1415 if (CONST_INT_P (operands
[2])
1416 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1417 return "subw3 $%n2,%1,%0";
1418 return "addw3 %1,%2,%0";
1421 if (rtx_equal_p (operands
[0], operands
[1]))
1423 if (operands
[2] == const1_rtx
)
1425 if (operands
[2] == constm1_rtx
)
1427 if (CONST_INT_P (operands
[2])
1428 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1429 return "subb2 $%n2,%0";
1430 return "addb2 %2,%0";
1432 if (rtx_equal_p (operands
[0], operands
[2]))
1433 return "addb2 %1,%0";
1434 if (CONST_INT_P (operands
[2])
1435 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1436 return "subb3 $%n2,%1,%0";
1437 return "addb3 %1,%2,%0";
1445 vax_output_int_subtract (rtx insn
, rtx
*operands
, enum machine_mode mode
)
1452 const char *pattern
;
1455 if (TARGET_QMATH
&& 0)
1458 split_quadword_operands (insn
, MINUS
, operands
, low
, 3);
1462 if (operands
[1] == const0_rtx
&& low
[1] == const0_rtx
)
1464 /* Negation is tricky. It's basically complement and increment.
1465 Negate hi, then lo, and subtract the carry back. */
1466 if ((MEM_P (low
[0]) && GET_CODE (XEXP (low
[0], 0)) == POST_INC
)
1467 || (MEM_P (operands
[0])
1468 && GET_CODE (XEXP (operands
[0], 0)) == POST_INC
))
1469 fatal_insn ("illegal operand detected", insn
);
1470 output_asm_insn ("mnegl %2,%0", operands
);
1471 output_asm_insn ("mnegl %2,%0", low
);
1472 return "sbwc $0,%0";
1474 gcc_assert (rtx_equal_p (operands
[0], operands
[1]));
1475 gcc_assert (rtx_equal_p (low
[0], low
[1]));
1476 if (low
[2] == const1_rtx
)
1477 output_asm_insn ("decl %0", low
);
1479 output_asm_insn ("subl2 %2,%0", low
);
1480 return "sbwc %2,%0";
1483 /* Subtract low parts. */
1484 if (rtx_equal_p (operands
[0], operands
[1]))
1486 if (low
[2] == const0_rtx
)
1487 pattern
= 0, carry
= 0;
1488 else if (low
[2] == constm1_rtx
)
1489 pattern
= "decl %0";
1491 pattern
= "subl2 %2,%0";
1495 if (low
[2] == constm1_rtx
)
1496 pattern
= "decl %0";
1497 else if (low
[2] == const0_rtx
)
1498 pattern
= get_insn_template (CODE_FOR_movsi
, insn
), carry
= 0;
1500 pattern
= "subl3 %2,%1,%0";
1503 output_asm_insn (pattern
, low
);
1506 if (!rtx_equal_p (operands
[0], operands
[1]))
1507 return "movl %1,%0;sbwc %2,%0";
1508 return "sbwc %2,%0";
1509 /* %0 = %2 - %1 - C */
1511 return get_insn_template (CODE_FOR_subsi3
, insn
);
1519 /* Output a conditional branch. */
1521 vax_output_conditional_branch (enum rtx_code code
)
1525 case EQ
: return "jeql %l0";
1526 case NE
: return "jneq %l0";
1527 case GT
: return "jgtr %l0";
1528 case LT
: return "jlss %l0";
1529 case GTU
: return "jgtru %l0";
1530 case LTU
: return "jlssu %l0";
1531 case GE
: return "jgeq %l0";
1532 case LE
: return "jleq %l0";
1533 case GEU
: return "jgequ %l0";
1534 case LEU
: return "jlequ %l0";
1540 /* True if X is an rtx for a constant that is a valid address. */
1543 legitimate_constant_address_p (rtx x
)
1545 if (GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == SYMBOL_REF
1546 || CONST_INT_P (x
) || GET_CODE (x
) == HIGH
)
1548 if (GET_CODE (x
) != CONST
)
1550 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1552 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
1553 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x
, 0), 0)))
1559 /* True if the constant value X is a legitimate general operand.
1560 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1563 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED
)
1568 /* The other macros defined here are used only in legitimate_address_p (). */
1570 /* Nonzero if X is a hard reg that can be used as an index
1571 or, if not strict, if it is a pseudo reg. */
1572 #define INDEX_REGISTER_P(X, STRICT) \
1573 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1575 /* Nonzero if X is a hard reg that can be used as a base reg
1576 or, if not strict, if it is a pseudo reg. */
1577 #define BASE_REGISTER_P(X, STRICT) \
1578 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1580 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1582 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1583 are no SYMBOL_REFs for external symbols present. */
1586 indirectable_constant_address_p (rtx x
, bool indirect
)
1588 if (GET_CODE (x
) == SYMBOL_REF
)
1589 return !flag_pic
|| SYMBOL_REF_LOCAL_P (x
) || !indirect
;
1591 if (GET_CODE (x
) == CONST
)
1593 || GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
1594 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x
, 0), 0));
1596 return CONSTANT_ADDRESS_P (x
);
1599 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1602 indirectable_constant_address_p (rtx x
, bool indirect ATTRIBUTE_UNUSED
)
1604 return CONSTANT_ADDRESS_P (x
);
1607 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1609 /* True if X is an address which can be indirected. External symbols
1610 could be in a sharable image library, so we disallow those. */
1613 indirectable_address_p (rtx x
, bool strict
, bool indirect
)
1615 if (indirectable_constant_address_p (x
, indirect
)
1616 || BASE_REGISTER_P (x
, strict
))
1618 if (GET_CODE (x
) != PLUS
1619 || !BASE_REGISTER_P (XEXP (x
, 0), strict
)
1620 || (flag_pic
&& !CONST_INT_P (XEXP (x
, 1))))
1622 return indirectable_constant_address_p (XEXP (x
, 1), indirect
);
1625 /* Return true if x is a valid address not using indexing.
1626 (This much is the easy part.) */
1628 nonindexed_address_p (rtx x
, bool strict
)
1633 extern rtx
*reg_equiv_mem
;
1634 if (! reload_in_progress
1635 || reg_equiv_mem
[REGNO (x
)] == 0
1636 || indirectable_address_p (reg_equiv_mem
[REGNO (x
)], strict
, false))
1639 if (indirectable_constant_address_p (x
, false))
1641 if (indirectable_address_p (x
, strict
, false))
1643 xfoo0
= XEXP (x
, 0);
1644 if (MEM_P (x
) && indirectable_address_p (xfoo0
, strict
, true))
1646 if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1647 && BASE_REGISTER_P (xfoo0
, strict
))
1652 /* True if PROD is either a reg times size of mode MODE and MODE is less
1653 than or equal 8 bytes, or just a reg if MODE is one byte. */
1656 index_term_p (rtx prod
, enum machine_mode mode
, bool strict
)
1660 if (GET_MODE_SIZE (mode
) == 1)
1661 return BASE_REGISTER_P (prod
, strict
);
1663 if (GET_CODE (prod
) != MULT
|| GET_MODE_SIZE (mode
) > 8)
1666 xfoo0
= XEXP (prod
, 0);
1667 xfoo1
= XEXP (prod
, 1);
1669 if (CONST_INT_P (xfoo0
)
1670 && INTVAL (xfoo0
) == (int)GET_MODE_SIZE (mode
)
1671 && INDEX_REGISTER_P (xfoo1
, strict
))
1674 if (CONST_INT_P (xfoo1
)
1675 && INTVAL (xfoo1
) == (int)GET_MODE_SIZE (mode
)
1676 && INDEX_REGISTER_P (xfoo0
, strict
))
1682 /* Return true if X is the sum of a register
1683 and a valid index term for mode MODE. */
1685 reg_plus_index_p (rtx x
, enum machine_mode mode
, bool strict
)
1689 if (GET_CODE (x
) != PLUS
)
1692 xfoo0
= XEXP (x
, 0);
1693 xfoo1
= XEXP (x
, 1);
1695 if (BASE_REGISTER_P (xfoo0
, strict
) && index_term_p (xfoo1
, mode
, strict
))
1698 if (BASE_REGISTER_P (xfoo1
, strict
) && index_term_p (xfoo0
, mode
, strict
))
1704 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1706 indexable_address_p (rtx xfoo0
, rtx xfoo1
, enum machine_mode mode
, bool strict
)
1708 if (!CONSTANT_ADDRESS_P (xfoo0
))
1710 if (BASE_REGISTER_P (xfoo1
, strict
))
1711 return !flag_pic
|| mode
== QImode
;
1712 if (flag_pic
&& symbolic_operand (xfoo0
, SImode
))
1714 return reg_plus_index_p (xfoo1
, mode
, strict
);
1717 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1718 that is a valid memory address for an instruction.
1719 The MODE argument is the machine mode for the MEM expression
1720 that wants to use this address. */
1722 legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1726 if (nonindexed_address_p (x
, strict
))
1729 if (GET_CODE (x
) != PLUS
)
1732 /* Handle <address>[index] represented with index-sum outermost */
1734 xfoo0
= XEXP (x
, 0);
1735 xfoo1
= XEXP (x
, 1);
1737 if (index_term_p (xfoo0
, mode
, strict
)
1738 && nonindexed_address_p (xfoo1
, strict
))
1741 if (index_term_p (xfoo1
, mode
, strict
)
1742 && nonindexed_address_p (xfoo0
, strict
))
1745 /* Handle offset(reg)[index] with offset added outermost */
1747 if (indexable_address_p (xfoo0
, xfoo1
, mode
, strict
)
1748 || indexable_address_p (xfoo1
, xfoo0
, mode
, strict
))
1754 /* Return true if x (a legitimate address expression) has an effect that
1755 depends on the machine mode it is used for. On the VAX, the predecrement
1756 and postincrement address depend thus (the amount of decrement or
1757 increment being the length of the operand) and all indexed address depend
1758 thus (because the index scale factor is the length of the operand). */
1761 vax_mode_dependent_address_p (rtx x
)
1765 /* Auto-increment cases are now dealt with generically in recog.c. */
1766 if (GET_CODE (x
) != PLUS
)
1769 xfoo0
= XEXP (x
, 0);
1770 xfoo1
= XEXP (x
, 1);
1772 if (CONST_INT_P (xfoo0
) && REG_P (xfoo1
))
1774 if (CONST_INT_P (xfoo1
) && REG_P (xfoo0
))
1776 if (!flag_pic
&& CONSTANT_ADDRESS_P (xfoo0
) && REG_P (xfoo1
))
1778 if (!flag_pic
&& CONSTANT_ADDRESS_P (xfoo1
) && REG_P (xfoo0
))
1785 fixup_mathdi_operand (rtx x
, enum machine_mode mode
)
1787 if (illegal_addsub_di_memory_operand (x
, mode
))
1789 rtx addr
= XEXP (x
, 0);
1790 rtx temp
= gen_reg_rtx (Pmode
);
1792 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1793 if (GET_CODE (addr
) == CONST
&& flag_pic
)
1795 offset
= XEXP (XEXP (addr
, 0), 1);
1796 addr
= XEXP (XEXP (addr
, 0), 0);
1799 emit_move_insn (temp
, addr
);
1801 temp
= gen_rtx_PLUS (Pmode
, temp
, offset
);
1802 x
= gen_rtx_MEM (DImode
, temp
);
1808 vax_expand_addsub_di_operands (rtx
* operands
, enum rtx_code code
)
1810 int hi_only
= operand_subword (operands
[2], 0, 0, DImode
) == const0_rtx
;
1813 rtx (*gen_old_insn
)(rtx
, rtx
, rtx
);
1814 rtx (*gen_si_insn
)(rtx
, rtx
, rtx
);
1815 rtx (*gen_insn
)(rtx
, rtx
, rtx
);
1819 gen_old_insn
= gen_adddi3_old
;
1820 gen_si_insn
= gen_addsi3
;
1821 gen_insn
= gen_adcdi3
;
1823 else if (code
== MINUS
)
1825 gen_old_insn
= gen_subdi3_old
;
1826 gen_si_insn
= gen_subsi3
;
1827 gen_insn
= gen_sbcdi3
;
1832 /* If this is addition (thus operands are commutative) and if there is one
1833 addend that duplicates the desination, we want that addend to be the
1836 && rtx_equal_p (operands
[0], operands
[2])
1837 && !rtx_equal_p (operands
[1], operands
[2]))
1840 operands
[2] = operands
[1];
1846 emit_insn ((*gen_old_insn
) (operands
[0], operands
[1], operands
[2]));
1850 if (!rtx_equal_p (operands
[0], operands
[1])
1851 && (REG_P (operands
[0]) && MEM_P (operands
[1])))
1853 emit_move_insn (operands
[0], operands
[1]);
1854 operands
[1] = operands
[0];
1857 operands
[0] = fixup_mathdi_operand (operands
[0], DImode
);
1858 operands
[1] = fixup_mathdi_operand (operands
[1], DImode
);
1859 operands
[2] = fixup_mathdi_operand (operands
[2], DImode
);
1861 if (!rtx_equal_p (operands
[0], operands
[1]))
1862 emit_move_insn (operand_subword (operands
[0], 0, 0, DImode
),
1863 operand_subword (operands
[1], 0, 0, DImode
));
1865 emit_insn ((*gen_si_insn
) (operand_subword (operands
[0], 1, 0, DImode
),
1866 operand_subword (operands
[1], 1, 0, DImode
),
1867 operand_subword (operands
[2], 1, 0, DImode
)));
1871 /* If are adding the same value together, that's really a multiply by 2,
1872 and that's just a left shift of 1. */
1873 if (rtx_equal_p (operands
[1], operands
[2]))
1875 gcc_assert (code
!= MINUS
);
1876 emit_insn (gen_ashldi3 (operands
[0], operands
[1], const1_rtx
));
1880 operands
[0] = fixup_mathdi_operand (operands
[0], DImode
);
1882 /* If an operand is the same as operand[0], use the operand[0] rtx
1883 because fixup will an equivalent rtx but not an equal one. */
1885 if (rtx_equal_p (operands
[0], operands
[1]))
1886 operands
[1] = operands
[0];
1888 operands
[1] = fixup_mathdi_operand (operands
[1], DImode
);
1890 if (rtx_equal_p (operands
[0], operands
[2]))
1891 operands
[2] = operands
[0];
1893 operands
[2] = fixup_mathdi_operand (operands
[2], DImode
);
1895 /* If we are subtracting not from ourselves [d = a - b], and because the
1896 carry ops are two operand only, we would need to do a move prior to
1897 the subtract. And if d == b, we would need a temp otherwise
1898 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1899 into d = -b, d += a. Since -b can never overflow, even if b == d,
1902 If we are doing addition, since the carry ops are two operand, if
1903 we aren't adding to ourselves, move the first addend to the
1904 destination first. */
1906 gcc_assert (operands
[1] != const0_rtx
|| code
== MINUS
);
1907 if (!rtx_equal_p (operands
[0], operands
[1]) && operands
[1] != const0_rtx
)
1909 if (code
== MINUS
&& CONSTANT_P (operands
[1]))
1911 temp
= gen_reg_rtx (DImode
);
1912 emit_insn (gen_sbcdi3 (operands
[0], const0_rtx
, operands
[2]));
1914 gen_insn
= gen_adcdi3
;
1915 operands
[2] = operands
[1];
1916 operands
[1] = operands
[0];
1919 emit_move_insn (operands
[0], operands
[1]);
1922 /* Subtracting a constant will have been rewritten to an addition of the
1923 negative of that constant before we get here. */
1924 gcc_assert (!CONSTANT_P (operands
[2]) || code
== PLUS
);
1925 emit_insn ((*gen_insn
) (operands
[0], operands
[1], operands
[2]));
1930 adjacent_operands_p (rtx lo
, rtx hi
, enum machine_mode mode
)
1932 HOST_WIDE_INT lo_offset
;
1933 HOST_WIDE_INT hi_offset
;
1935 if (GET_CODE (lo
) != GET_CODE (hi
))
1939 return mode
== SImode
&& REGNO (lo
) + 1 == REGNO (hi
);
1940 if (CONST_INT_P (lo
))
1941 return INTVAL (hi
) == 0 && 0 <= INTVAL (lo
) && INTVAL (lo
) < 64;
1942 if (CONST_INT_P (lo
))
1943 return mode
!= SImode
;
1948 if (MEM_VOLATILE_P (lo
) || MEM_VOLATILE_P (hi
))
1954 if (GET_CODE (lo
) == POST_INC
/* || GET_CODE (lo) == PRE_DEC */)
1955 return rtx_equal_p (lo
, hi
);
1957 switch (GET_CODE (lo
))
1967 if (!CONST_INT_P (XEXP (lo
, 1)))
1969 lo_offset
= INTVAL (XEXP (lo
, 1));
1976 switch (GET_CODE (hi
))
1986 if (!CONST_INT_P (XEXP (hi
, 1)))
1988 hi_offset
= INTVAL (XEXP (hi
, 1));
1995 if (GET_CODE (lo
) == MULT
|| GET_CODE (lo
) == PLUS
)
1998 return rtx_equal_p (lo
, hi
)
1999 && hi_offset
- lo_offset
== GET_MODE_SIZE (mode
);