1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "stringpool.h"
30 #include "insn-config.h"
33 #include "diagnostic-core.h"
35 #include "stor-layout.h"
39 #include "insn-attr.h"
42 #include "tm-constrs.h"
45 /* This file should be included last. */
46 #include "target-def.h"
48 /* Array of valid operand punctuation characters. */
49 static char m32r_punct_chars
[256];
51 /* Machine-specific symbol_ref flags. */
52 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
53 #define SYMBOL_REF_MODEL(X) \
54 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
56 /* For string literals, etc. */
57 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
59 /* Forward declaration. */
60 static void m32r_option_override (void);
61 static void init_reg_tables (void);
62 static void block_move_call (rtx
, rtx
, rtx
);
63 static int m32r_is_insn (rtx
);
64 static bool m32r_legitimate_address_p (machine_mode
, rtx
, bool);
65 static rtx
m32r_legitimize_address (rtx
, rtx
, machine_mode
);
66 static bool m32r_mode_dependent_address_p (const_rtx
, addr_space_t
);
67 static tree
m32r_handle_model_attribute (tree
*, tree
, tree
, int, bool *);
68 static void m32r_print_operand (FILE *, rtx
, int);
69 static void m32r_print_operand_address (FILE *, machine_mode
, rtx
);
70 static bool m32r_print_operand_punct_valid_p (unsigned char code
);
71 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT
);
72 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT
);
74 static void m32r_file_start (void);
76 static int m32r_adjust_priority (rtx_insn
*, int);
77 static int m32r_issue_rate (void);
79 static void m32r_encode_section_info (tree
, rtx
, int);
80 static bool m32r_in_small_data_p (const_tree
);
81 static bool m32r_return_in_memory (const_tree
, const_tree
);
82 static rtx
m32r_function_value (const_tree
, const_tree
, bool);
83 static rtx
m32r_libcall_value (machine_mode
, const_rtx
);
84 static bool m32r_function_value_regno_p (const unsigned int);
85 static void m32r_setup_incoming_varargs (cumulative_args_t
, machine_mode
,
87 static void init_idents (void);
88 static bool m32r_rtx_costs (rtx
, machine_mode
, int, int, int *, bool speed
);
89 static int m32r_memory_move_cost (machine_mode
, reg_class_t
, bool);
90 static bool m32r_pass_by_reference (cumulative_args_t
, machine_mode
,
92 static int m32r_arg_partial_bytes (cumulative_args_t
, machine_mode
,
94 static rtx
m32r_function_arg (cumulative_args_t
, machine_mode
,
96 static void m32r_function_arg_advance (cumulative_args_t
, machine_mode
,
98 static bool m32r_can_eliminate (const int, const int);
99 static void m32r_conditional_register_usage (void);
100 static void m32r_trampoline_init (rtx
, tree
, rtx
);
101 static bool m32r_legitimate_constant_p (machine_mode
, rtx
);
102 static bool m32r_attribute_identifier (const_tree
);
104 /* M32R specific attributes. */
106 static const struct attribute_spec m32r_attribute_table
[] =
108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
109 affects_type_identity } */
110 { "interrupt", 0, 0, true, false, false, NULL
, false },
111 { "model", 1, 1, true, false, false, m32r_handle_model_attribute
,
113 { NULL
, 0, 0, false, false, false, NULL
, false }
116 /* Initialize the GCC target structure. */
117 #undef TARGET_ATTRIBUTE_TABLE
118 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
119 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
120 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
122 #undef TARGET_LEGITIMATE_ADDRESS_P
123 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
124 #undef TARGET_LEGITIMIZE_ADDRESS
125 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
126 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
127 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
129 #undef TARGET_ASM_ALIGNED_HI_OP
130 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
131 #undef TARGET_ASM_ALIGNED_SI_OP
132 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
134 #undef TARGET_PRINT_OPERAND
135 #define TARGET_PRINT_OPERAND m32r_print_operand
136 #undef TARGET_PRINT_OPERAND_ADDRESS
137 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
138 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
139 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
141 #undef TARGET_ASM_FUNCTION_PROLOGUE
142 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
143 #undef TARGET_ASM_FUNCTION_EPILOGUE
144 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
146 #undef TARGET_ASM_FILE_START
147 #define TARGET_ASM_FILE_START m32r_file_start
149 #undef TARGET_SCHED_ADJUST_PRIORITY
150 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
151 #undef TARGET_SCHED_ISSUE_RATE
152 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
154 #undef TARGET_OPTION_OVERRIDE
155 #define TARGET_OPTION_OVERRIDE m32r_option_override
157 #undef TARGET_ENCODE_SECTION_INFO
158 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
159 #undef TARGET_IN_SMALL_DATA_P
160 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
163 #undef TARGET_MEMORY_MOVE_COST
164 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
165 #undef TARGET_RTX_COSTS
166 #define TARGET_RTX_COSTS m32r_rtx_costs
167 #undef TARGET_ADDRESS_COST
168 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
170 #undef TARGET_PROMOTE_PROTOTYPES
171 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
172 #undef TARGET_RETURN_IN_MEMORY
173 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
175 #undef TARGET_FUNCTION_VALUE
176 #define TARGET_FUNCTION_VALUE m32r_function_value
177 #undef TARGET_LIBCALL_VALUE
178 #define TARGET_LIBCALL_VALUE m32r_libcall_value
179 #undef TARGET_FUNCTION_VALUE_REGNO_P
180 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
182 #undef TARGET_SETUP_INCOMING_VARARGS
183 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
184 #undef TARGET_MUST_PASS_IN_STACK
185 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
186 #undef TARGET_PASS_BY_REFERENCE
187 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
188 #undef TARGET_ARG_PARTIAL_BYTES
189 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
190 #undef TARGET_FUNCTION_ARG
191 #define TARGET_FUNCTION_ARG m32r_function_arg
192 #undef TARGET_FUNCTION_ARG_ADVANCE
193 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
195 #undef TARGET_CAN_ELIMINATE
196 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
198 #undef TARGET_CONDITIONAL_REGISTER_USAGE
199 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
201 #undef TARGET_TRAMPOLINE_INIT
202 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
204 #undef TARGET_LEGITIMATE_CONSTANT_P
205 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
207 struct gcc_target targetm
= TARGET_INITIALIZER
;
209 /* Called by m32r_option_override to initialize various things. */
216 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
217 memset (m32r_punct_chars
, 0, sizeof (m32r_punct_chars
));
218 m32r_punct_chars
['#'] = 1;
219 m32r_punct_chars
['@'] = 1; /* ??? no longer used */
221 /* Provide default value if not specified. */
222 if (!global_options_set
.x_g_switch_value
)
223 g_switch_value
= SDATA_DEFAULT_SIZE
;
227 m32r_option_override (void)
229 /* These need to be done at start up.
230 It's convenient to do them here. */
232 SUBTARGET_OVERRIDE_OPTIONS
;
235 /* Vectors to keep interesting information about registers where it can easily
236 be got. We use to use the actual mode value as the bit number, but there
237 is (or may be) more than 32 modes now. Instead we use two tables: one
238 indexed by hard register number, and one indexed by mode. */
240 /* The purpose of m32r_mode_class is to shrink the range of modes so that
241 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
242 mapped into one m32r_mode_class mode. */
247 S_MODE
, D_MODE
, T_MODE
, O_MODE
,
248 SF_MODE
, DF_MODE
, TF_MODE
, OF_MODE
, A_MODE
251 /* Modes for condition codes. */
252 #define C_MODES (1 << (int) C_MODE)
254 /* Modes for single-word and smaller quantities. */
255 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
257 /* Modes for double-word and smaller quantities. */
258 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
260 /* Modes for quad-word and smaller quantities. */
261 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
263 /* Modes for accumulators. */
264 #define A_MODES (1 << (int) A_MODE)
266 /* Value is 1 if register/mode pair is acceptable on arc. */
268 const unsigned int m32r_hard_regno_mode_ok
[FIRST_PSEUDO_REGISTER
] =
270 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
,
271 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, S_MODES
, S_MODES
, S_MODES
,
272 S_MODES
, C_MODES
, A_MODES
, A_MODES
275 unsigned int m32r_mode_class
[NUM_MACHINE_MODES
];
277 enum reg_class m32r_regno_reg_class
[FIRST_PSEUDO_REGISTER
];
280 init_reg_tables (void)
284 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
286 machine_mode m
= (machine_mode
) i
;
288 switch (GET_MODE_CLASS (m
))
291 case MODE_PARTIAL_INT
:
292 case MODE_COMPLEX_INT
:
293 if (GET_MODE_SIZE (m
) <= 4)
294 m32r_mode_class
[i
] = 1 << (int) S_MODE
;
295 else if (GET_MODE_SIZE (m
) == 8)
296 m32r_mode_class
[i
] = 1 << (int) D_MODE
;
297 else if (GET_MODE_SIZE (m
) == 16)
298 m32r_mode_class
[i
] = 1 << (int) T_MODE
;
299 else if (GET_MODE_SIZE (m
) == 32)
300 m32r_mode_class
[i
] = 1 << (int) O_MODE
;
302 m32r_mode_class
[i
] = 0;
305 case MODE_COMPLEX_FLOAT
:
306 if (GET_MODE_SIZE (m
) <= 4)
307 m32r_mode_class
[i
] = 1 << (int) SF_MODE
;
308 else if (GET_MODE_SIZE (m
) == 8)
309 m32r_mode_class
[i
] = 1 << (int) DF_MODE
;
310 else if (GET_MODE_SIZE (m
) == 16)
311 m32r_mode_class
[i
] = 1 << (int) TF_MODE
;
312 else if (GET_MODE_SIZE (m
) == 32)
313 m32r_mode_class
[i
] = 1 << (int) OF_MODE
;
315 m32r_mode_class
[i
] = 0;
318 m32r_mode_class
[i
] = 1 << (int) C_MODE
;
321 m32r_mode_class
[i
] = 0;
326 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
329 m32r_regno_reg_class
[i
] = GENERAL_REGS
;
330 else if (i
== ARG_POINTER_REGNUM
)
331 m32r_regno_reg_class
[i
] = GENERAL_REGS
;
333 m32r_regno_reg_class
[i
] = NO_REGS
;
337 /* M32R specific attribute support.
339 interrupt - for interrupt functions
341 model - select code model used to access object
343 small: addresses use 24 bits, use bl to make calls
344 medium: addresses use 32 bits, use bl to make calls
345 large: addresses use 32 bits, use seth/add3/jl to make calls
347 Grep for MODEL in m32r.h for more info. */
349 static tree small_ident1
;
350 static tree small_ident2
;
351 static tree medium_ident1
;
352 static tree medium_ident2
;
353 static tree large_ident1
;
354 static tree large_ident2
;
359 if (small_ident1
== 0)
361 small_ident1
= get_identifier ("small");
362 small_ident2
= get_identifier ("__small__");
363 medium_ident1
= get_identifier ("medium");
364 medium_ident2
= get_identifier ("__medium__");
365 large_ident1
= get_identifier ("large");
366 large_ident2
= get_identifier ("__large__");
370 /* Handle an "model" attribute; arguments as in
371 struct attribute_spec.handler. */
373 m32r_handle_model_attribute (tree
*node ATTRIBUTE_UNUSED
, tree name
,
374 tree args
, int flags ATTRIBUTE_UNUSED
,
380 arg
= TREE_VALUE (args
);
382 if (arg
!= small_ident1
383 && arg
!= small_ident2
384 && arg
!= medium_ident1
385 && arg
!= medium_ident2
386 && arg
!= large_ident1
387 && arg
!= large_ident2
)
389 warning (OPT_Wattributes
, "invalid argument of %qs attribute",
390 IDENTIFIER_POINTER (name
));
391 *no_add_attrs
= true;
398 m32r_attribute_identifier (const_tree name
)
400 return strcmp (IDENTIFIER_POINTER (name
), "model") == 0
401 || strcmp (IDENTIFIER_POINTER (name
), "__model__") == 0;
404 /* Encode section information of DECL, which is either a VAR_DECL,
405 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
407 For the M32R we want to record:
409 - whether the object lives in .sdata/.sbss.
410 - what code model should be used to access the object
414 m32r_encode_section_info (tree decl
, rtx rtl
, int first
)
418 enum m32r_model model
;
420 default_encode_section_info (decl
, rtl
, first
);
425 model_attr
= lookup_attribute ("model", DECL_ATTRIBUTES (decl
));
432 id
= TREE_VALUE (TREE_VALUE (model_attr
));
434 if (id
== small_ident1
|| id
== small_ident2
)
435 model
= M32R_MODEL_SMALL
;
436 else if (id
== medium_ident1
|| id
== medium_ident2
)
437 model
= M32R_MODEL_MEDIUM
;
438 else if (id
== large_ident1
|| id
== large_ident2
)
439 model
= M32R_MODEL_LARGE
;
441 gcc_unreachable (); /* shouldn't happen */
445 if (TARGET_MODEL_SMALL
)
446 model
= M32R_MODEL_SMALL
;
447 else if (TARGET_MODEL_MEDIUM
)
448 model
= M32R_MODEL_MEDIUM
;
449 else if (TARGET_MODEL_LARGE
)
450 model
= M32R_MODEL_LARGE
;
452 gcc_unreachable (); /* shouldn't happen */
454 extra_flags
|= model
<< SYMBOL_FLAG_MODEL_SHIFT
;
457 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= extra_flags
;
460 /* Only mark the object as being small data area addressable if
461 it hasn't been explicitly marked with a code model.
463 The user can explicitly put an object in the small data area with the
464 section attribute. If the object is in sdata/sbss and marked with a
465 code model do both [put the object in .sdata and mark it as being
466 addressed with a specific code model - don't mark it as being addressed
467 with an SDA reloc though]. This is ok and might be useful at times. If
468 the object doesn't fit the linker will give an error. */
471 m32r_in_small_data_p (const_tree decl
)
475 if (TREE_CODE (decl
) != VAR_DECL
)
478 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl
)))
481 section
= DECL_SECTION_NAME (decl
);
484 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
489 if (! TREE_READONLY (decl
) && ! TARGET_SDATA_NONE
)
491 int size
= int_size_in_bytes (TREE_TYPE (decl
));
493 if (size
> 0 && size
<= g_switch_value
)
501 /* Do anything needed before RTL is emitted for each function. */
504 m32r_init_expanders (void)
506 /* ??? At one point there was code here. The function is left in
507 to make it easy to experiment. */
511 call_operand (rtx op
, machine_mode mode
)
516 return call_address_operand (op
, mode
);
519 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
522 small_data_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
524 if (! TARGET_SDATA_USE
)
527 if (GET_CODE (op
) == SYMBOL_REF
)
528 return SYMBOL_REF_SMALL_P (op
);
530 if (GET_CODE (op
) == CONST
531 && GET_CODE (XEXP (op
, 0)) == PLUS
532 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
533 && satisfies_constraint_J (XEXP (XEXP (op
, 0), 1)))
534 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op
, 0), 0));
539 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
542 addr24_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
549 if (GET_CODE (op
) == LABEL_REF
)
550 return TARGET_ADDR24
;
552 if (GET_CODE (op
) == SYMBOL_REF
)
554 else if (GET_CODE (op
) == CONST
555 && GET_CODE (XEXP (op
, 0)) == PLUS
556 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
557 && satisfies_constraint_M (XEXP (XEXP (op
, 0), 1)))
558 sym
= XEXP (XEXP (op
, 0), 0);
562 if (SYMBOL_REF_MODEL (sym
) == M32R_MODEL_SMALL
)
566 && (CONSTANT_POOL_ADDRESS_P (sym
)
567 || LIT_NAME_P (XSTR (sym
, 0))))
573 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
576 addr32_operand (rtx op
, machine_mode mode
)
580 if (GET_CODE (op
) == LABEL_REF
)
581 return TARGET_ADDR32
;
583 if (GET_CODE (op
) == SYMBOL_REF
)
585 else if (GET_CODE (op
) == CONST
586 && GET_CODE (XEXP (op
, 0)) == PLUS
587 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
588 && CONST_INT_P (XEXP (XEXP (op
, 0), 1))
590 sym
= XEXP (XEXP (op
, 0), 0);
594 return (! addr24_operand (sym
, mode
)
595 && ! small_data_operand (sym
, mode
));
598 /* Return 1 if OP is a function that can be called with the `bl' insn. */
601 call26_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
606 if (GET_CODE (op
) == SYMBOL_REF
)
607 return SYMBOL_REF_MODEL (op
) != M32R_MODEL_LARGE
;
609 return TARGET_CALL26
;
612 /* Return 1 if OP is a DImode const we want to handle inline.
613 This must match the code in the movdi pattern.
614 It is used by the 'G' constraint. */
617 easy_di_const (rtx op
)
619 rtx high_rtx
, low_rtx
;
620 HOST_WIDE_INT high
, low
;
622 split_double (op
, &high_rtx
, &low_rtx
);
623 high
= INTVAL (high_rtx
);
624 low
= INTVAL (low_rtx
);
625 /* Pick constants loadable with 2 16-bit `ldi' insns. */
626 if (high
>= -128 && high
<= 127
627 && low
>= -128 && low
<= 127)
632 /* Return 1 if OP is a DFmode const we want to handle inline.
633 This must match the code in the movdf pattern.
634 It is used by the 'H' constraint. */
637 easy_df_const (rtx op
)
641 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op
), l
);
642 if (l
[0] == 0 && l
[1] == 0)
644 if ((l
[0] & 0xffff) == 0 && l
[1] == 0)
649 /* Return 1 if OP is (mem (reg ...)).
650 This is used in insn length calcs. */
653 memreg_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
655 return MEM_P (op
) && REG_P (XEXP (op
, 0));
658 /* Return nonzero if TYPE must be passed by indirect reference. */
661 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED
,
662 machine_mode mode
, const_tree type
,
663 bool named ATTRIBUTE_UNUSED
)
668 size
= int_size_in_bytes (type
);
670 size
= GET_MODE_SIZE (mode
);
672 return (size
< 0 || size
> 8);
677 /* X and Y are two things to compare using CODE. Emit the compare insn and
678 return the rtx for compare [arg0 of the if_then_else].
679 If need_compare is true then the comparison insn must be generated, rather
680 than being subsumed into the following branch instruction. */
683 gen_compare (enum rtx_code code
, rtx x
, rtx y
, int need_compare
)
685 enum rtx_code compare_code
;
686 enum rtx_code branch_code
;
687 rtx cc_reg
= gen_rtx_REG (CCmode
, CARRY_REGNUM
);
692 case EQ
: compare_code
= EQ
; branch_code
= NE
; break;
693 case NE
: compare_code
= EQ
; branch_code
= EQ
; break;
694 case LT
: compare_code
= LT
; branch_code
= NE
; break;
695 case LE
: compare_code
= LT
; branch_code
= EQ
; must_swap
= 1; break;
696 case GT
: compare_code
= LT
; branch_code
= NE
; must_swap
= 1; break;
697 case GE
: compare_code
= LT
; branch_code
= EQ
; break;
698 case LTU
: compare_code
= LTU
; branch_code
= NE
; break;
699 case LEU
: compare_code
= LTU
; branch_code
= EQ
; must_swap
= 1; break;
700 case GTU
: compare_code
= LTU
; branch_code
= NE
; must_swap
= 1; break;
701 case GEU
: compare_code
= LTU
; branch_code
= EQ
; break;
709 switch (compare_code
)
712 if (satisfies_constraint_P (y
) /* Reg equal to small const. */
715 rtx tmp
= gen_reg_rtx (SImode
);
717 emit_insn (gen_addsi3 (tmp
, x
, GEN_INT (-INTVAL (y
))));
721 else if (CONSTANT_P (y
)) /* Reg equal to const. */
723 rtx tmp
= force_reg (GET_MODE (x
), y
);
727 if (register_operand (y
, SImode
) /* Reg equal to reg. */
728 || y
== const0_rtx
) /* Reg equal to zero. */
730 emit_insn (gen_cmp_eqsi_insn (x
, y
));
732 return gen_rtx_fmt_ee (code
, CCmode
, cc_reg
, const0_rtx
);
737 if (register_operand (y
, SImode
)
738 || satisfies_constraint_P (y
))
740 rtx tmp
= gen_reg_rtx (SImode
); /* Reg compared to reg. */
745 emit_insn (gen_cmp_ltsi_insn (x
, y
));
752 emit_insn (gen_addsi3 (tmp
, y
, constm1_rtx
));
753 emit_insn (gen_cmp_ltsi_insn (x
, tmp
));
758 tmp
= gen_rtx_PLUS (SImode
, y
, const1_rtx
);
760 emit_insn (gen_addsi3 (tmp
, y
, constm1_rtx
));
761 emit_insn (gen_cmp_ltsi_insn (x
, tmp
));
765 emit_insn (gen_cmp_ltsi_insn (x
, y
));
772 return gen_rtx_fmt_ee (code
, CCmode
, cc_reg
, const0_rtx
);
777 if (register_operand (y
, SImode
)
778 || satisfies_constraint_P (y
))
780 rtx tmp
= gen_reg_rtx (SImode
); /* Reg (unsigned) compared to reg. */
785 emit_insn (gen_cmp_ltusi_insn (x
, y
));
792 emit_insn (gen_addsi3 (tmp
, y
, constm1_rtx
));
793 emit_insn (gen_cmp_ltusi_insn (x
, tmp
));
798 tmp
= gen_rtx_PLUS (SImode
, y
, const1_rtx
);
800 emit_insn (gen_addsi3 (tmp
, y
, constm1_rtx
));
801 emit_insn (gen_cmp_ltusi_insn (x
, tmp
));
805 emit_insn (gen_cmp_ltusi_insn (x
, y
));
812 return gen_rtx_fmt_ee (code
, CCmode
, cc_reg
, const0_rtx
);
822 /* Reg/reg equal comparison. */
823 if (compare_code
== EQ
824 && register_operand (y
, SImode
))
825 return gen_rtx_fmt_ee (code
, CCmode
, x
, y
);
827 /* Reg/zero signed comparison. */
828 if ((compare_code
== EQ
|| compare_code
== LT
)
830 return gen_rtx_fmt_ee (code
, CCmode
, x
, y
);
832 /* Reg/smallconst equal comparison. */
833 if (compare_code
== EQ
834 && satisfies_constraint_P (y
))
836 rtx tmp
= gen_reg_rtx (SImode
);
838 emit_insn (gen_addsi3 (tmp
, x
, GEN_INT (-INTVAL (y
))));
839 return gen_rtx_fmt_ee (code
, CCmode
, tmp
, const0_rtx
);
842 /* Reg/const equal comparison. */
843 if (compare_code
== EQ
846 rtx tmp
= force_reg (GET_MODE (x
), y
);
848 return gen_rtx_fmt_ee (code
, CCmode
, x
, tmp
);
855 y
= force_reg (GET_MODE (x
), y
);
858 int ok_const
= reg_or_int16_operand (y
, GET_MODE (y
));
861 y
= force_reg (GET_MODE (x
), y
);
865 switch (compare_code
)
868 emit_insn (gen_cmp_eqsi_insn (must_swap
? y
: x
, must_swap
? x
: y
));
871 emit_insn (gen_cmp_ltsi_insn (must_swap
? y
: x
, must_swap
? x
: y
));
874 emit_insn (gen_cmp_ltusi_insn (must_swap
? y
: x
, must_swap
? x
: y
));
881 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, cc_reg
, CONST0_RTX (CCmode
));
885 gen_cond_store (enum rtx_code code
, rtx op0
, rtx op1
, rtx op2
)
887 machine_mode mode
= GET_MODE (op0
);
889 gcc_assert (mode
== SImode
);
893 if (!register_operand (op1
, mode
))
894 op1
= force_reg (mode
, op1
);
896 if (TARGET_M32RX
|| TARGET_M32R2
)
898 if (!reg_or_zero_operand (op2
, mode
))
899 op2
= force_reg (mode
, op2
);
901 emit_insn (gen_seq_insn_m32rx (op0
, op1
, op2
));
904 if (CONST_INT_P (op2
) && INTVAL (op2
) == 0)
906 emit_insn (gen_seq_zero_insn (op0
, op1
));
910 if (!reg_or_eq_int16_operand (op2
, mode
))
911 op2
= force_reg (mode
, op2
);
913 emit_insn (gen_seq_insn (op0
, op1
, op2
));
917 if (!CONST_INT_P (op2
)
918 || (INTVAL (op2
) != 0 && satisfies_constraint_K (op2
)))
922 if (reload_completed
|| reload_in_progress
)
925 reg
= gen_reg_rtx (SImode
);
926 emit_insn (gen_xorsi3 (reg
, op1
, op2
));
929 if (!register_operand (op1
, mode
))
930 op1
= force_reg (mode
, op1
);
932 emit_insn (gen_sne_zero_insn (op0
, op1
));
947 if (!register_operand (op1
, mode
))
948 op1
= force_reg (mode
, op1
);
950 if (!reg_or_int16_operand (op2
, mode
))
951 op2
= force_reg (mode
, op2
);
953 emit_insn (gen_slt_insn (op0
, op1
, op2
));
966 if (!register_operand (op1
, mode
))
967 op1
= force_reg (mode
, op1
);
969 if (!reg_or_int16_operand (op2
, mode
))
970 op2
= force_reg (mode
, op2
);
972 emit_insn (gen_sltu_insn (op0
, op1
, op2
));
977 if (!register_operand (op1
, mode
))
978 op1
= force_reg (mode
, op1
);
980 if (!reg_or_int16_operand (op2
, mode
))
981 op2
= force_reg (mode
, op2
);
984 emit_insn (gen_sge_insn (op0
, op1
, op2
));
986 emit_insn (gen_sgeu_insn (op0
, op1
, op2
));
991 if (!register_operand (op1
, mode
))
992 op1
= force_reg (mode
, op1
);
994 if (CONST_INT_P (op2
))
996 HOST_WIDE_INT value
= INTVAL (op2
);
997 if (value
>= 2147483647)
999 emit_move_insn (op0
, const1_rtx
);
1003 op2
= GEN_INT (value
+ 1);
1004 if (value
< -32768 || value
>= 32767)
1005 op2
= force_reg (mode
, op2
);
1008 emit_insn (gen_sltu_insn (op0
, op1
, op2
));
1010 emit_insn (gen_slt_insn (op0
, op1
, op2
));
1014 if (!register_operand (op2
, mode
))
1015 op2
= force_reg (mode
, op2
);
1018 emit_insn (gen_sleu_insn (op0
, op1
, op2
));
1020 emit_insn (gen_sle_insn (op0
, op1
, op2
));
1029 /* Split a 2 word move (DI or DF) into component parts. */
1032 gen_split_move_double (rtx operands
[])
1034 machine_mode mode
= GET_MODE (operands
[0]);
1035 rtx dest
= operands
[0];
1036 rtx src
= operands
[1];
1039 /* We might have (SUBREG (MEM)) here, so just get rid of the
1040 subregs to make this code simpler. It is safe to call
1041 alter_subreg any time after reload. */
1042 if (GET_CODE (dest
) == SUBREG
)
1043 alter_subreg (&dest
, true);
1044 if (GET_CODE (src
) == SUBREG
)
1045 alter_subreg (&src
, true);
1050 int dregno
= REGNO (dest
);
1055 int sregno
= REGNO (src
);
1057 int reverse
= (dregno
== sregno
+ 1);
1059 /* We normally copy the low-numbered register first. However, if
1060 the first register operand 0 is the same as the second register of
1061 operand 1, we must copy in the opposite order. */
1062 emit_insn (gen_rtx_SET (operand_subword (dest
, reverse
, TRUE
, mode
),
1063 operand_subword (src
, reverse
, TRUE
, mode
)));
1065 emit_insn (gen_rtx_SET (operand_subword (dest
, !reverse
, TRUE
, mode
),
1066 operand_subword (src
, !reverse
, TRUE
, mode
)));
1069 /* Reg = constant. */
1070 else if (CONST_INT_P (src
) || GET_CODE (src
) == CONST_DOUBLE
)
1073 split_double (src
, &words
[0], &words
[1]);
1074 emit_insn (gen_rtx_SET (operand_subword (dest
, 0, TRUE
, mode
),
1077 emit_insn (gen_rtx_SET (operand_subword (dest
, 1, TRUE
, mode
),
1082 else if (MEM_P (src
))
1084 /* If the high-address word is used in the address, we must load it
1085 last. Otherwise, load it first. */
1086 int reverse
= refers_to_regno_p (dregno
, XEXP (src
, 0));
1088 /* We used to optimize loads from single registers as
1092 if r3 were not used subsequently. However, the REG_NOTES aren't
1093 propagated correctly by the reload phase, and it can cause bad
1094 code to be generated. We could still try:
1096 ld r1,r3+; ld r2,r3; addi r3,-4
1098 which saves 2 bytes and doesn't force longword alignment. */
1099 emit_insn (gen_rtx_SET (operand_subword (dest
, reverse
, TRUE
, mode
),
1100 adjust_address (src
, SImode
,
1101 reverse
* UNITS_PER_WORD
)));
1103 emit_insn (gen_rtx_SET (operand_subword (dest
, !reverse
, TRUE
, mode
),
1104 adjust_address (src
, SImode
,
1105 !reverse
* UNITS_PER_WORD
)));
1112 /* We used to optimize loads from single registers as
1116 if r3 were not used subsequently. However, the REG_NOTES aren't
1117 propagated correctly by the reload phase, and it can cause bad
1118 code to be generated. We could still try:
1120 st r1,r3; st r2,+r3; addi r3,-4
1122 which saves 2 bytes and doesn't force longword alignment. */
1123 else if (MEM_P (dest
) && REG_P (src
))
1125 emit_insn (gen_rtx_SET (adjust_address (dest
, SImode
, 0),
1126 operand_subword (src
, 0, TRUE
, mode
)));
1128 emit_insn (gen_rtx_SET (adjust_address (dest
, SImode
, UNITS_PER_WORD
),
1129 operand_subword (src
, 1, TRUE
, mode
)));
1142 m32r_arg_partial_bytes (cumulative_args_t cum_v
, machine_mode mode
,
1143 tree type
, bool named ATTRIBUTE_UNUSED
)
1145 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1149 (((mode
== BLKmode
&& type
)
1150 ? (unsigned int) int_size_in_bytes (type
)
1151 : GET_MODE_SIZE (mode
)) + UNITS_PER_WORD
- 1)
1154 if (*cum
>= M32R_MAX_PARM_REGS
)
1156 else if (*cum
+ size
> M32R_MAX_PARM_REGS
)
1157 words
= (*cum
+ size
) - M32R_MAX_PARM_REGS
;
1161 return words
* UNITS_PER_WORD
;
1164 /* The ROUND_ADVANCE* macros are local to this file. */
1165 /* Round SIZE up to a word boundary. */
1166 #define ROUND_ADVANCE(SIZE) \
1167 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1169 /* Round arg MODE/TYPE up to the next word boundary. */
1170 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1171 ((MODE) == BLKmode \
1172 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1173 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1175 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1176 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1178 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1179 a reg. This includes arguments that have to be passed by reference as the
1180 pointer to them is passed in a reg if one is available (and that is what
1182 This macro is only used in this file. */
1183 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1184 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1186 /* Determine where to put an argument to a function.
1187 Value is zero to push the argument on the stack,
1188 or a hard register in which to store the argument.
1190 MODE is the argument's machine mode.
1191 TYPE is the data type of the argument (as a tree).
1192 This is null for libcalls where that information may
1194 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1195 the preceding args and about the function being called.
1196 NAMED is nonzero if this argument is a named parameter
1197 (otherwise it is an extra parameter matching an ellipsis). */
1198 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1199 and the rest are pushed. */
1202 m32r_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1203 const_tree type ATTRIBUTE_UNUSED
,
1204 bool named ATTRIBUTE_UNUSED
)
1206 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1208 return (PASS_IN_REG_P (*cum
, mode
, type
)
1209 ? gen_rtx_REG (mode
, ROUND_ADVANCE_CUM (*cum
, mode
, type
))
1213 /* Update the data in CUM to advance over an argument
1214 of mode MODE and data type TYPE.
1215 (TYPE is null for libcalls where that information may not be available.) */
1218 m32r_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1219 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1221 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1223 *cum
= (ROUND_ADVANCE_CUM (*cum
, mode
, type
)
1224 + ROUND_ADVANCE_ARG (mode
, type
));
1227 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1230 m32r_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1232 cumulative_args_t dummy
= pack_cumulative_args (NULL
);
1234 return m32r_pass_by_reference (dummy
, TYPE_MODE (type
), type
, false);
1237 /* Worker function for TARGET_FUNCTION_VALUE. */
1240 m32r_function_value (const_tree valtype
,
1241 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
1242 bool outgoing ATTRIBUTE_UNUSED
)
1244 return gen_rtx_REG (TYPE_MODE (valtype
), 0);
1247 /* Worker function for TARGET_LIBCALL_VALUE. */
1250 m32r_libcall_value (machine_mode mode
,
1251 const_rtx fun ATTRIBUTE_UNUSED
)
1253 return gen_rtx_REG (mode
, 0);
1256 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1258 ??? What about r1 in DI/DF values. */
1261 m32r_function_value_regno_p (const unsigned int regno
)
1263 return (regno
== 0);
1266 /* Do any needed setup for a variadic function. For the M32R, we must
1267 create a register parameter block, and then copy any anonymous arguments
1268 in registers to memory.
1270 CUM has not been updated for the last named argument which has type TYPE
1271 and mode MODE, and we rely on this fact. */
1274 m32r_setup_incoming_varargs (cumulative_args_t cum
, machine_mode mode
,
1275 tree type
, int *pretend_size
, int no_rtl
)
1282 /* All BLKmode values are passed by reference. */
1283 gcc_assert (mode
!= BLKmode
);
1285 first_anon_arg
= (ROUND_ADVANCE_CUM (*get_cumulative_args (cum
), mode
, type
)
1286 + ROUND_ADVANCE_ARG (mode
, type
));
1288 if (first_anon_arg
< M32R_MAX_PARM_REGS
)
1290 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1291 int first_reg_offset
= first_anon_arg
;
1292 /* Size in words to "pretend" allocate. */
1293 int size
= M32R_MAX_PARM_REGS
- first_reg_offset
;
1296 regblock
= gen_frame_mem (BLKmode
,
1297 plus_constant (Pmode
, arg_pointer_rtx
,
1298 FIRST_PARM_OFFSET (0)));
1299 set_mem_alias_set (regblock
, get_varargs_alias_set ());
1300 move_block_from_reg (first_reg_offset
, regblock
, size
);
1302 *pretend_size
= (size
* UNITS_PER_WORD
);
1307 /* Return true if INSN is real instruction bearing insn. */
1310 m32r_is_insn (rtx insn
)
1312 return (NONDEBUG_INSN_P (insn
)
1313 && GET_CODE (PATTERN (insn
)) != USE
1314 && GET_CODE (PATTERN (insn
)) != CLOBBER
);
1317 /* Increase the priority of long instructions so that the
1318 short instructions are scheduled ahead of the long ones. */
1321 m32r_adjust_priority (rtx_insn
*insn
, int priority
)
1323 if (m32r_is_insn (insn
)
1324 && get_attr_insn_size (insn
) != INSN_SIZE_SHORT
)
1331 /* Indicate how many instructions can be issued at the same time.
1332 This is sort of a lie. The m32r can issue only 1 long insn at
1333 once, but it can issue 2 short insns. The default therefore is
1334 set at 2, but this can be overridden by the command line option
1338 m32r_issue_rate (void)
1340 return ((TARGET_LOW_ISSUE_RATE
) ? 1 : 2);
1343 /* Cost functions. */
1344 /* Memory is 3 times as expensive as registers.
1345 ??? Is that the right way to look at it? */
1348 m32r_memory_move_cost (machine_mode mode
,
1349 reg_class_t rclass ATTRIBUTE_UNUSED
,
1350 bool in ATTRIBUTE_UNUSED
)
1352 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
)
1359 m32r_rtx_costs (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
1360 int outer_code ATTRIBUTE_UNUSED
,
1361 int opno ATTRIBUTE_UNUSED
, int *total
,
1362 bool speed ATTRIBUTE_UNUSED
)
1364 int code
= GET_CODE (x
);
1368 /* Small integers are as cheap as registers. 4 byte values can be
1369 fetched as immediate constants - let's give that the cost of an
1372 if (INT16_P (INTVAL (x
)))
1382 *total
= COSTS_N_INSNS (1);
1389 split_double (x
, &high
, &low
);
1390 *total
= COSTS_N_INSNS (!INT16_P (INTVAL (high
))
1391 + !INT16_P (INTVAL (low
)));
1396 *total
= COSTS_N_INSNS (3);
1403 *total
= COSTS_N_INSNS (10);
1411 /* Type of function DECL.
1413 The result is cached. To reset the cache at the end of a function,
1414 call with DECL = NULL_TREE. */
1416 enum m32r_function_type
1417 m32r_compute_function_type (tree decl
)
1420 static enum m32r_function_type fn_type
= M32R_FUNCTION_UNKNOWN
;
1421 /* Last function we were called for. */
1422 static tree last_fn
= NULL_TREE
;
1424 /* Resetting the cached value? */
1425 if (decl
== NULL_TREE
)
1427 fn_type
= M32R_FUNCTION_UNKNOWN
;
1428 last_fn
= NULL_TREE
;
1432 if (decl
== last_fn
&& fn_type
!= M32R_FUNCTION_UNKNOWN
)
1435 /* Compute function type. */
1436 fn_type
= (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl
)) != NULL_TREE
1437 ? M32R_FUNCTION_INTERRUPT
1438 : M32R_FUNCTION_NORMAL
);
1443 \f/* Function prologue/epilogue handlers. */
1445 /* M32R stack frames look like:
1447 Before call After call
1448 +-----------------------+ +-----------------------+
1450 high | local variables, | | local variables, |
1451 mem | reg save area, etc. | | reg save area, etc. |
1453 +-----------------------+ +-----------------------+
1455 | arguments on stack. | | arguments on stack. |
1457 SP+0->+-----------------------+ +-----------------------+
1458 | reg parm save area, |
1459 | only created for |
1460 | variable argument |
1462 +-----------------------+
1463 | previous frame ptr |
1464 +-----------------------+
1466 | register save area |
1468 +-----------------------+
1470 +-----------------------+
1474 +-----------------------+
1476 | alloca allocations |
1478 +-----------------------+
1480 low | arguments on stack |
1482 SP+0->+-----------------------+
1485 1) The "reg parm save area" does not exist for non variable argument fns.
1486 2) The "reg parm save area" can be eliminated completely if we saved regs
1487 containing anonymous args separately but that complicates things too
1488 much (so it's not done).
1489 3) The return address is saved after the register save area so as to have as
1490 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1492 /* Structure to be filled in by m32r_compute_frame_size with register
1493 save masks, and offsets for the current function. */
1494 struct m32r_frame_info
1496 unsigned int total_size
; /* # bytes that the entire frame takes up. */
1497 unsigned int extra_size
; /* # bytes of extra stuff. */
1498 unsigned int pretend_size
; /* # bytes we push and pretend caller did. */
1499 unsigned int args_size
; /* # bytes that outgoing arguments take up. */
1500 unsigned int reg_size
; /* # bytes needed to store regs. */
1501 unsigned int var_size
; /* # bytes that variables take up. */
1502 unsigned int gmask
; /* Mask of saved gp registers. */
1503 unsigned int save_fp
; /* Nonzero if fp must be saved. */
1504 unsigned int save_lr
; /* Nonzero if lr (return addr) must be saved. */
1505 int initialized
; /* Nonzero if frame size already calculated. */
1508 /* Current frame information calculated by m32r_compute_frame_size. */
1509 static struct m32r_frame_info current_frame_info
;
1511 /* Zero structure to initialize current_frame_info. */
1512 static struct m32r_frame_info zero_frame_info
;
1514 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1515 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1517 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1518 The return address and frame pointer are treated separately.
1519 Don't consider them here. */
1520 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1521 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1522 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1524 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1525 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1527 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1528 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1530 /* Return the bytes needed to compute the frame pointer from the current
1533 SIZE is the size needed for local variables. */
1536 m32r_compute_frame_size (int size
) /* # of var. bytes allocated. */
1539 unsigned int total_size
, var_size
, args_size
, pretend_size
, extra_size
;
1540 unsigned int reg_size
;
1542 enum m32r_function_type fn_type
;
1544 int pic_reg_used
= flag_pic
&& (crtl
->uses_pic_offset_table
1547 var_size
= M32R_STACK_ALIGN (size
);
1548 args_size
= M32R_STACK_ALIGN (crtl
->outgoing_args_size
);
1549 pretend_size
= crtl
->args
.pretend_args_size
;
1550 extra_size
= FIRST_PARM_OFFSET (0);
1551 total_size
= extra_size
+ pretend_size
+ args_size
+ var_size
;
1555 /* See if this is an interrupt handler. Call used registers must be saved
1557 fn_type
= m32r_compute_function_type (current_function_decl
);
1558 interrupt_p
= M32R_INTERRUPT_P (fn_type
);
1560 /* Calculate space needed for registers. */
1561 for (regno
= 0; regno
< M32R_MAX_INT_REGS
; regno
++)
1563 if (MUST_SAVE_REGISTER (regno
, interrupt_p
)
1564 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1566 reg_size
+= UNITS_PER_WORD
;
1567 gmask
|= 1 << regno
;
1571 current_frame_info
.save_fp
= MUST_SAVE_FRAME_POINTER
;
1572 current_frame_info
.save_lr
= MUST_SAVE_RETURN_ADDR
|| pic_reg_used
;
1574 reg_size
+= ((current_frame_info
.save_fp
+ current_frame_info
.save_lr
)
1576 total_size
+= reg_size
;
1578 /* ??? Not sure this is necessary, and I don't think the epilogue
1579 handler will do the right thing if this changes total_size. */
1580 total_size
= M32R_STACK_ALIGN (total_size
);
1582 /* frame_size = total_size - (pretend_size + reg_size); */
1584 /* Save computed information. */
1585 current_frame_info
.total_size
= total_size
;
1586 current_frame_info
.extra_size
= extra_size
;
1587 current_frame_info
.pretend_size
= pretend_size
;
1588 current_frame_info
.var_size
= var_size
;
1589 current_frame_info
.args_size
= args_size
;
1590 current_frame_info
.reg_size
= reg_size
;
1591 current_frame_info
.gmask
= gmask
;
1592 current_frame_info
.initialized
= reload_completed
;
1594 /* Ok, we're done. */
1598 /* Worker function for TARGET_CAN_ELIMINATE. */
1601 m32r_can_eliminate (const int from
, const int to
)
1603 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
1604 ? ! frame_pointer_needed
1609 /* The table we use to reference PIC data. */
1610 static rtx global_offset_table
;
1613 m32r_reload_lr (rtx sp
, int size
)
1615 rtx lr
= gen_rtx_REG (Pmode
, RETURN_ADDR_REGNUM
);
1618 emit_insn (gen_movsi (lr
, gen_frame_mem (Pmode
, sp
)));
1619 else if (size
< 32768)
1620 emit_insn (gen_movsi (lr
, gen_frame_mem (Pmode
,
1621 gen_rtx_PLUS (Pmode
, sp
,
1625 rtx tmp
= gen_rtx_REG (Pmode
, PROLOGUE_TMP_REGNUM
);
1627 emit_insn (gen_movsi (tmp
, GEN_INT (size
)));
1628 emit_insn (gen_addsi3 (tmp
, tmp
, sp
));
1629 emit_insn (gen_movsi (lr
, gen_frame_mem (Pmode
, tmp
)));
1636 m32r_load_pic_register (void)
1638 global_offset_table
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
1639 emit_insn (gen_get_pc (pic_offset_table_rtx
, global_offset_table
,
1640 GEN_INT (TARGET_MODEL_SMALL
)));
1642 /* Need to emit this whether or not we obey regdecls,
1643 since setjmp/longjmp can cause life info to screw up. */
1644 emit_use (pic_offset_table_rtx
);
1647 /* Expand the m32r prologue as a series of insns. */
1650 m32r_expand_prologue (void)
1655 int pic_reg_used
= flag_pic
&& (crtl
->uses_pic_offset_table
1658 if (! current_frame_info
.initialized
)
1659 m32r_compute_frame_size (get_frame_size ());
1661 if (flag_stack_usage_info
)
1662 current_function_static_stack_size
= current_frame_info
.total_size
;
1664 gmask
= current_frame_info
.gmask
;
1666 /* These cases shouldn't happen. Catch them now. */
1667 gcc_assert (current_frame_info
.total_size
|| !gmask
);
1669 /* Allocate space for register arguments if this is a variadic function. */
1670 if (current_frame_info
.pretend_size
!= 0)
1672 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1673 the wrong result on a 64-bit host. */
1674 HOST_WIDE_INT pretend_size
= current_frame_info
.pretend_size
;
1675 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1677 GEN_INT (-pretend_size
)));
1680 /* Save any registers we need to and set up fp. */
1681 if (current_frame_info
.save_fp
)
1682 emit_insn (gen_movsi_push (stack_pointer_rtx
, frame_pointer_rtx
));
1684 gmask
&= ~(FRAME_POINTER_MASK
| RETURN_ADDR_MASK
);
1686 /* Save any needed call-saved regs (and call-used if this is an
1687 interrupt handler). */
1688 for (regno
= 0; regno
<= M32R_MAX_INT_REGS
; ++regno
)
1690 if ((gmask
& (1 << regno
)) != 0)
1691 emit_insn (gen_movsi_push (stack_pointer_rtx
,
1692 gen_rtx_REG (Pmode
, regno
)));
1695 if (current_frame_info
.save_lr
)
1696 emit_insn (gen_movsi_push (stack_pointer_rtx
,
1697 gen_rtx_REG (Pmode
, RETURN_ADDR_REGNUM
)));
1699 /* Allocate the stack frame. */
1700 frame_size
= (current_frame_info
.total_size
1701 - (current_frame_info
.pretend_size
1702 + current_frame_info
.reg_size
));
1704 if (frame_size
== 0)
1705 ; /* Nothing to do. */
1706 else if (frame_size
<= 32768)
1707 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1708 GEN_INT (-frame_size
)));
1711 rtx tmp
= gen_rtx_REG (Pmode
, PROLOGUE_TMP_REGNUM
);
1713 emit_insn (gen_movsi (tmp
, GEN_INT (frame_size
)));
1714 emit_insn (gen_subsi3 (stack_pointer_rtx
, stack_pointer_rtx
, tmp
));
1717 if (frame_pointer_needed
)
1718 emit_insn (gen_movsi (frame_pointer_rtx
, stack_pointer_rtx
));
1721 /* Push lr for mcount (form_pc, x). */
1722 emit_insn (gen_movsi_push (stack_pointer_rtx
,
1723 gen_rtx_REG (Pmode
, RETURN_ADDR_REGNUM
)));
1727 m32r_load_pic_register ();
1728 m32r_reload_lr (stack_pointer_rtx
,
1729 (crtl
->profile
? 0 : frame_size
));
1732 if (crtl
->profile
&& !pic_reg_used
)
1733 emit_insn (gen_blockage ());
1737 /* Set up the stack and frame pointer (if desired) for the function.
1738 Note, if this is changed, you need to mirror the changes in
1739 m32r_compute_frame_size which calculates the prolog size. */
1742 m32r_output_function_prologue (FILE * file
, HOST_WIDE_INT size
)
1744 enum m32r_function_type fn_type
= m32r_compute_function_type (current_function_decl
);
1746 /* If this is an interrupt handler, mark it as such. */
1747 if (M32R_INTERRUPT_P (fn_type
))
1748 fprintf (file
, "\t%s interrupt handler\n", ASM_COMMENT_START
);
1750 if (! current_frame_info
.initialized
)
1751 m32r_compute_frame_size (size
);
1753 /* This is only for the human reader. */
1755 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1757 current_frame_info
.var_size
,
1758 current_frame_info
.reg_size
/ 4,
1759 current_frame_info
.args_size
,
1760 current_frame_info
.extra_size
);
1763 /* Output RTL to pop register REGNO from the stack. */
1770 x
= emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode
, regno
),
1771 stack_pointer_rtx
));
1772 add_reg_note (x
, REG_INC
, stack_pointer_rtx
);
1775 /* Expand the m32r epilogue as a series of insns. */
1778 m32r_expand_epilogue (void)
1781 int noepilogue
= FALSE
;
1784 gcc_assert (current_frame_info
.initialized
);
1785 total_size
= current_frame_info
.total_size
;
1787 if (total_size
== 0)
1789 rtx insn
= get_last_insn ();
1791 /* If the last insn was a BARRIER, we don't have to write any code
1792 because a jump (aka return) was put there. */
1793 if (insn
&& NOTE_P (insn
))
1794 insn
= prev_nonnote_insn (insn
);
1795 if (insn
&& BARRIER_P (insn
))
1801 unsigned int var_size
= current_frame_info
.var_size
;
1802 unsigned int args_size
= current_frame_info
.args_size
;
1803 unsigned int gmask
= current_frame_info
.gmask
;
1804 int can_trust_sp_p
= !cfun
->calls_alloca
;
1806 if (flag_exceptions
)
1807 emit_insn (gen_blockage ());
1809 /* The first thing to do is point the sp at the bottom of the register
1813 unsigned int reg_offset
= var_size
+ args_size
;
1815 if (reg_offset
== 0)
1816 ; /* Nothing to do. */
1817 else if (reg_offset
< 32768)
1818 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1819 GEN_INT (reg_offset
)));
1822 rtx tmp
= gen_rtx_REG (Pmode
, PROLOGUE_TMP_REGNUM
);
1824 emit_insn (gen_movsi (tmp
, GEN_INT (reg_offset
)));
1825 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1829 else if (frame_pointer_needed
)
1831 unsigned int reg_offset
= var_size
+ args_size
;
1833 if (reg_offset
== 0)
1834 emit_insn (gen_movsi (stack_pointer_rtx
, frame_pointer_rtx
));
1835 else if (reg_offset
< 32768)
1836 emit_insn (gen_addsi3 (stack_pointer_rtx
, frame_pointer_rtx
,
1837 GEN_INT (reg_offset
)));
1840 rtx tmp
= gen_rtx_REG (Pmode
, PROLOGUE_TMP_REGNUM
);
1842 emit_insn (gen_movsi (tmp
, GEN_INT (reg_offset
)));
1843 emit_insn (gen_movsi (stack_pointer_rtx
, frame_pointer_rtx
));
1844 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1851 if (current_frame_info
.save_lr
)
1852 pop (RETURN_ADDR_REGNUM
);
1854 /* Restore any saved registers, in reverse order of course. */
1855 gmask
&= ~(FRAME_POINTER_MASK
| RETURN_ADDR_MASK
);
1856 for (regno
= M32R_MAX_INT_REGS
- 1; regno
>= 0; --regno
)
1858 if ((gmask
& (1L << regno
)) != 0)
1862 if (current_frame_info
.save_fp
)
1863 pop (FRAME_POINTER_REGNUM
);
1865 /* Remove varargs area if present. */
1866 if (current_frame_info
.pretend_size
!= 0)
1867 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1868 GEN_INT (current_frame_info
.pretend_size
)));
1870 emit_insn (gen_blockage ());
1874 /* Do any necessary cleanup after a function to restore stack, frame,
1878 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED
,
1879 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
1881 /* Reset state info for each function. */
1882 current_frame_info
= zero_frame_info
;
1883 m32r_compute_function_type (NULL_TREE
);
1886 /* Return nonzero if this function is known to have a null or 1 instruction
1890 direct_return (void)
1892 if (!reload_completed
)
1895 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl
)))
1898 if (! current_frame_info
.initialized
)
1899 m32r_compute_frame_size (get_frame_size ());
1901 return current_frame_info
.total_size
== 0;
1908 m32r_legitimate_pic_operand_p (rtx x
)
1910 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
1913 if (GET_CODE (x
) == CONST
1914 && GET_CODE (XEXP (x
, 0)) == PLUS
1915 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
1916 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
1917 && (CONST_INT_P (XEXP (XEXP (x
, 0), 1))))
1924 m32r_legitimize_pic_address (rtx orig
, rtx reg
)
1927 printf("m32r_legitimize_pic_address()\n");
1930 if (GET_CODE (orig
) == SYMBOL_REF
|| GET_CODE (orig
) == LABEL_REF
)
1932 rtx pic_ref
, address
;
1937 gcc_assert (!reload_in_progress
&& !reload_completed
);
1938 reg
= gen_reg_rtx (Pmode
);
1944 address
= gen_reg_rtx (Pmode
);
1948 crtl
->uses_pic_offset_table
= 1;
1950 if (GET_CODE (orig
) == LABEL_REF
1951 || (GET_CODE (orig
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (orig
)))
1953 emit_insn (gen_gotoff_load_addr (reg
, orig
));
1954 emit_insn (gen_addsi3 (reg
, reg
, pic_offset_table_rtx
));
1958 emit_insn (gen_pic_load_addr (address
, orig
));
1960 emit_insn (gen_addsi3 (address
, address
, pic_offset_table_rtx
));
1961 pic_ref
= gen_const_mem (Pmode
, address
);
1962 emit_move_insn (reg
, pic_ref
);
1965 else if (GET_CODE (orig
) == CONST
)
1969 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1970 && XEXP (XEXP (orig
, 0), 1) == pic_offset_table_rtx
)
1975 gcc_assert (!reload_in_progress
&& !reload_completed
);
1976 reg
= gen_reg_rtx (Pmode
);
1979 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
1981 base
= m32r_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), reg
);
1983 offset
= m32r_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), NULL_RTX
);
1985 offset
= m32r_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), reg
);
1990 if (CONST_INT_P (offset
))
1992 if (INT16_P (INTVAL (offset
)))
1993 return plus_constant (Pmode
, base
, INTVAL (offset
));
1996 gcc_assert (! reload_in_progress
&& ! reload_completed
);
1997 offset
= force_reg (Pmode
, offset
);
2001 return gen_rtx_PLUS (Pmode
, base
, offset
);
2008 m32r_legitimize_address (rtx x
, rtx orig_x ATTRIBUTE_UNUSED
,
2009 machine_mode mode ATTRIBUTE_UNUSED
)
2012 return m32r_legitimize_pic_address (x
, NULL_RTX
);
2017 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2020 m32r_mode_dependent_address_p (const_rtx addr
, addr_space_t as ATTRIBUTE_UNUSED
)
2022 if (GET_CODE (addr
) == LO_SUM
)
2028 /* Nested function support. */
2030 /* Emit RTL insns to initialize the variable parts of a trampoline.
2031 FNADDR is an RTX for the address of the function's pure code.
2032 CXT is an RTX for the static chain value for the function. */
2035 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED
,
2036 rtx fnaddr ATTRIBUTE_UNUSED
,
2037 rtx cxt ATTRIBUTE_UNUSED
)
2042 m32r_file_start (void)
2044 default_file_start ();
2046 if (flag_verbose_asm
)
2047 fprintf (asm_out_file
,
2048 "%s M32R/D special options: -G %d\n",
2049 ASM_COMMENT_START
, g_switch_value
);
2051 if (TARGET_LITTLE_ENDIAN
)
2052 fprintf (asm_out_file
, "\t.little\n");
2055 /* Print operand X (an rtx) in assembler syntax to file FILE.
2056 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2057 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2060 m32r_print_operand (FILE * file
, rtx x
, int code
)
2066 /* The 's' and 'p' codes are used by output_block_move() to
2067 indicate post-increment 's'tores and 'p're-increment loads. */
2070 fprintf (file
, "@+%s", reg_names
[REGNO (x
)]);
2072 output_operand_lossage ("invalid operand to %%s code");
2077 fprintf (file
, "@%s+", reg_names
[REGNO (x
)]);
2079 output_operand_lossage ("invalid operand to %%p code");
2083 /* Write second word of DImode or DFmode reference,
2084 register or memory. */
2086 fputs (reg_names
[REGNO (x
)+1], file
);
2089 machine_mode mode
= GET_MODE (x
);
2091 fprintf (file
, "@(");
2092 /* Handle possible auto-increment. Since it is pre-increment and
2093 we have already done it, we can just use an offset of four. */
2094 /* ??? This is taken from rs6000.c I think. I don't think it is
2095 currently necessary, but keep it around. */
2096 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
2097 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
2098 output_address (mode
, plus_constant (Pmode
,
2099 XEXP (XEXP (x
, 0), 0), 4));
2101 output_address (mode
, plus_constant (Pmode
, XEXP (x
, 0), 4));
2105 output_operand_lossage ("invalid operand to %%R code");
2108 case 'H' : /* High word. */
2109 case 'L' : /* Low word. */
2112 /* L = least significant word, H = most significant word. */
2113 if ((WORDS_BIG_ENDIAN
!= 0) ^ (code
== 'L'))
2114 fputs (reg_names
[REGNO (x
)], file
);
2116 fputs (reg_names
[REGNO (x
)+1], file
);
2118 else if (CONST_INT_P (x
)
2119 || GET_CODE (x
) == CONST_DOUBLE
)
2123 split_double (x
, &first
, &second
);
2124 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2125 code
== 'L' ? INTVAL (first
) : INTVAL (second
));
2128 output_operand_lossage ("invalid operand to %%H/%%L code");
2135 if (GET_CODE (x
) != CONST_DOUBLE
2136 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
)
2137 fatal_insn ("bad insn for 'A'", x
);
2139 real_to_decimal (str
, CONST_DOUBLE_REAL_VALUE (x
), sizeof (str
), 0, 1);
2140 fprintf (file
, "%s", str
);
2144 case 'B' : /* Bottom half. */
2145 case 'T' : /* Top half. */
2146 /* Output the argument to a `seth' insn (sets the Top half-word).
2147 For constants output arguments to a seth/or3 pair to set Top and
2148 Bottom halves. For symbols output arguments to a seth/add3 pair to
2149 set Top and Bottom halves. The difference exists because for
2150 constants seth/or3 is more readable but for symbols we need to use
2151 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2152 switch (GET_CODE (x
))
2159 split_double (x
, &first
, &second
);
2160 x
= WORDS_BIG_ENDIAN
? second
: first
;
2161 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2163 ? INTVAL (x
) & 0xffff
2164 : (INTVAL (x
) >> 16) & 0xffff));
2170 && small_data_operand (x
, VOIDmode
))
2172 fputs ("sda(", file
);
2173 output_addr_const (file
, x
);
2179 fputs (code
== 'T' ? "shigh(" : "low(", file
);
2180 output_addr_const (file
, x
);
2184 output_operand_lossage ("invalid operand to %%T/%%B code");
2191 /* Output a load/store with update indicator if appropriate. */
2194 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
2195 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
2199 output_operand_lossage ("invalid operand to %%U code");
2203 /* Print a constant value negated. */
2204 if (CONST_INT_P (x
))
2205 output_addr_const (file
, GEN_INT (- INTVAL (x
)));
2207 output_operand_lossage ("invalid operand to %%N code");
2211 /* Print a const_int in hex. Used in comments. */
2212 if (CONST_INT_P (x
))
2213 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (x
));
2217 fputs (IMMEDIATE_PREFIX
, file
);
2221 /* Do nothing special. */
2226 output_operand_lossage ("invalid operand output code");
2229 switch (GET_CODE (x
))
2232 fputs (reg_names
[REGNO (x
)], file
);
2237 if (GET_CODE (addr
) == PRE_INC
)
2239 if (!REG_P (XEXP (addr
, 0)))
2240 fatal_insn ("pre-increment address is not a register", x
);
2242 fprintf (file
, "@+%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2244 else if (GET_CODE (addr
) == PRE_DEC
)
2246 if (!REG_P (XEXP (addr
, 0)))
2247 fatal_insn ("pre-decrement address is not a register", x
);
2249 fprintf (file
, "@-%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2251 else if (GET_CODE (addr
) == POST_INC
)
2253 if (!REG_P (XEXP (addr
, 0)))
2254 fatal_insn ("post-increment address is not a register", x
);
2256 fprintf (file
, "@%s+", reg_names
[REGNO (XEXP (addr
, 0))]);
2261 output_address (GET_MODE (x
), addr
);
2267 /* We handle SFmode constants here as output_addr_const doesn't. */
2268 if (GET_MODE (x
) == SFmode
)
2272 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), l
);
2273 fprintf (file
, "0x%08lx", l
);
2277 /* Fall through. Let output_addr_const deal with it. */
2280 output_addr_const (file
, x
);
2285 /* Print a memory address as an operand to reference that memory location. */
2288 m32r_print_operand_address (FILE * file
, machine_mode
/*mode*/, rtx addr
)
2294 switch (GET_CODE (addr
))
2297 fputs (reg_names
[REGNO (addr
)], file
);
2301 if (CONST_INT_P (XEXP (addr
, 0)))
2302 offset
= INTVAL (XEXP (addr
, 0)), base
= XEXP (addr
, 1);
2303 else if (CONST_INT_P (XEXP (addr
, 1)))
2304 offset
= INTVAL (XEXP (addr
, 1)), base
= XEXP (addr
, 0);
2306 base
= XEXP (addr
, 0), index
= XEXP (addr
, 1);
2309 /* Print the offset first (if present) to conform to the manual. */
2313 fprintf (file
, "%d,", offset
);
2314 fputs (reg_names
[REGNO (base
)], file
);
2316 /* The chip doesn't support this, but left in for generality. */
2317 else if (REG_P (index
))
2318 fprintf (file
, "%s,%s",
2319 reg_names
[REGNO (base
)], reg_names
[REGNO (index
)]);
2320 /* Not sure this can happen, but leave in for now. */
2321 else if (GET_CODE (index
) == SYMBOL_REF
)
2323 output_addr_const (file
, index
);
2325 fputs (reg_names
[REGNO (base
)], file
);
2328 fatal_insn ("bad address", addr
);
2330 else if (GET_CODE (base
) == LO_SUM
)
2332 gcc_assert (!index
&& REG_P (XEXP (base
, 0)));
2333 if (small_data_operand (XEXP (base
, 1), VOIDmode
))
2334 fputs ("sda(", file
);
2336 fputs ("low(", file
);
2337 output_addr_const (file
, plus_constant (Pmode
, XEXP (base
, 1),
2340 fputs (reg_names
[REGNO (XEXP (base
, 0))], file
);
2343 fatal_insn ("bad address", addr
);
2347 if (!REG_P (XEXP (addr
, 0)))
2348 fatal_insn ("lo_sum not of register", addr
);
2349 if (small_data_operand (XEXP (addr
, 1), VOIDmode
))
2350 fputs ("sda(", file
);
2352 fputs ("low(", file
);
2353 output_addr_const (file
, XEXP (addr
, 1));
2355 fputs (reg_names
[REGNO (XEXP (addr
, 0))], file
);
2358 case PRE_INC
: /* Assume SImode. */
2359 fprintf (file
, "+%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2362 case PRE_DEC
: /* Assume SImode. */
2363 fprintf (file
, "-%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2366 case POST_INC
: /* Assume SImode. */
2367 fprintf (file
, "%s+", reg_names
[REGNO (XEXP (addr
, 0))]);
2371 output_addr_const (file
, addr
);
2377 m32r_print_operand_punct_valid_p (unsigned char code
)
2379 return m32r_punct_chars
[code
];
2382 /* Return true if the operands are the constants 0 and 1. */
2385 zero_and_one (rtx operand1
, rtx operand2
)
2388 CONST_INT_P (operand1
)
2389 && CONST_INT_P (operand2
)
2390 && ( ((INTVAL (operand1
) == 0) && (INTVAL (operand2
) == 1))
2391 ||((INTVAL (operand1
) == 1) && (INTVAL (operand2
) == 0)));
2394 /* Generate the correct assembler code to handle the conditional loading of a
2395 value into a register. It is known that the operands satisfy the
2396 conditional_move_operand() function above. The destination is operand[0].
2397 The condition is operand [1]. The 'true' value is operand [2] and the
2398 'false' value is operand [3]. */
2401 emit_cond_move (rtx
* operands
, rtx insn ATTRIBUTE_UNUSED
)
2403 static char buffer
[100];
2404 const char * dest
= reg_names
[REGNO (operands
[0])];
2408 /* Destination must be a register. */
2409 gcc_assert (REG_P (operands
[0]));
2410 gcc_assert (conditional_move_operand (operands
[2], SImode
));
2411 gcc_assert (conditional_move_operand (operands
[3], SImode
));
2413 /* Check to see if the test is reversed. */
2414 if (GET_CODE (operands
[1]) == NE
)
2416 rtx tmp
= operands
[2];
2417 operands
[2] = operands
[3];
2421 sprintf (buffer
, "mvfc %s, cbr", dest
);
2423 /* If the true value was '0' then we need to invert the results of the move. */
2424 if (INTVAL (operands
[2]) == 0)
2425 sprintf (buffer
+ strlen (buffer
), "\n\txor3 %s, %s, #1",
2431 /* Returns true if the registers contained in the two
2432 rtl expressions are different. */
2435 m32r_not_same_reg (rtx a
, rtx b
)
2440 while (GET_CODE (a
) == SUBREG
)
2446 while (GET_CODE (b
) == SUBREG
)
2452 return reg_a
!= reg_b
;
2457 m32r_function_symbol (const char *name
)
2459 int extra_flags
= 0;
2460 enum m32r_model model
;
2461 rtx sym
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2463 if (TARGET_MODEL_SMALL
)
2464 model
= M32R_MODEL_SMALL
;
2465 else if (TARGET_MODEL_MEDIUM
)
2466 model
= M32R_MODEL_MEDIUM
;
2467 else if (TARGET_MODEL_LARGE
)
2468 model
= M32R_MODEL_LARGE
;
2470 gcc_unreachable (); /* Shouldn't happen. */
2471 extra_flags
|= model
<< SYMBOL_FLAG_MODEL_SHIFT
;
2474 SYMBOL_REF_FLAGS (sym
) |= extra_flags
;
2479 /* Use a library function to move some bytes. */
2482 block_move_call (rtx dest_reg
, rtx src_reg
, rtx bytes_rtx
)
2484 /* We want to pass the size as Pmode, which will normally be SImode
2485 but will be DImode if we are using 64-bit longs and pointers. */
2486 if (GET_MODE (bytes_rtx
) != VOIDmode
2487 && GET_MODE (bytes_rtx
) != Pmode
)
2488 bytes_rtx
= convert_to_mode (Pmode
, bytes_rtx
, 1);
2490 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL
,
2491 VOIDmode
, 3, dest_reg
, Pmode
, src_reg
, Pmode
,
2492 convert_to_mode (TYPE_MODE (sizetype
), bytes_rtx
,
2493 TYPE_UNSIGNED (sizetype
)),
2494 TYPE_MODE (sizetype
));
2497 /* Expand string/block move operations.
2499 operands[0] is the pointer to the destination.
2500 operands[1] is the pointer to the source.
2501 operands[2] is the number of bytes to move.
2502 operands[3] is the alignment.
2504 Returns 1 upon success, 0 otherwise. */
2507 m32r_expand_block_move (rtx operands
[])
2509 rtx orig_dst
= operands
[0];
2510 rtx orig_src
= operands
[1];
2511 rtx bytes_rtx
= operands
[2];
2512 rtx align_rtx
= operands
[3];
2513 int constp
= CONST_INT_P (bytes_rtx
);
2514 HOST_WIDE_INT bytes
= constp
? INTVAL (bytes_rtx
) : 0;
2515 int align
= INTVAL (align_rtx
);
2520 if (constp
&& bytes
<= 0)
2523 /* Move the address into scratch registers. */
2524 dst_reg
= copy_addr_to_reg (XEXP (orig_dst
, 0));
2525 src_reg
= copy_addr_to_reg (XEXP (orig_src
, 0));
2527 if (align
> UNITS_PER_WORD
)
2528 align
= UNITS_PER_WORD
;
2530 /* If we prefer size over speed, always use a function call.
2531 If we do not know the size, use a function call.
2532 If the blocks are not word aligned, use a function call. */
2533 if (optimize_size
|| ! constp
|| align
!= UNITS_PER_WORD
)
2535 block_move_call (dst_reg
, src_reg
, bytes_rtx
);
2539 leftover
= bytes
% MAX_MOVE_BYTES
;
2542 /* If necessary, generate a loop to handle the bulk of the copy. */
2545 rtx_code_label
*label
= NULL
;
2546 rtx final_src
= NULL_RTX
;
2547 rtx at_a_time
= GEN_INT (MAX_MOVE_BYTES
);
2548 rtx rounded_total
= GEN_INT (bytes
);
2549 rtx new_dst_reg
= gen_reg_rtx (SImode
);
2550 rtx new_src_reg
= gen_reg_rtx (SImode
);
2552 /* If we are going to have to perform this loop more than
2553 once, then generate a label and compute the address the
2554 source register will contain upon completion of the final
2556 if (bytes
> MAX_MOVE_BYTES
)
2558 final_src
= gen_reg_rtx (Pmode
);
2561 emit_insn (gen_addsi3 (final_src
, src_reg
, rounded_total
));
2564 emit_insn (gen_movsi (final_src
, rounded_total
));
2565 emit_insn (gen_addsi3 (final_src
, final_src
, src_reg
));
2568 label
= gen_label_rtx ();
2572 /* It is known that output_block_move() will update src_reg to point
2573 to the word after the end of the source block, and dst_reg to point
2574 to the last word of the destination block, provided that the block
2575 is MAX_MOVE_BYTES long. */
2576 emit_insn (gen_movmemsi_internal (dst_reg
, src_reg
, at_a_time
,
2577 new_dst_reg
, new_src_reg
));
2578 emit_move_insn (dst_reg
, new_dst_reg
);
2579 emit_move_insn (src_reg
, new_src_reg
);
2580 emit_insn (gen_addsi3 (dst_reg
, dst_reg
, GEN_INT (4)));
2582 if (bytes
> MAX_MOVE_BYTES
)
2584 rtx test
= gen_rtx_NE (VOIDmode
, src_reg
, final_src
);
2585 emit_jump_insn (gen_cbranchsi4 (test
, src_reg
, final_src
, label
));
2590 emit_insn (gen_movmemsi_internal (dst_reg
, src_reg
, GEN_INT (leftover
),
2591 gen_reg_rtx (SImode
),
2592 gen_reg_rtx (SImode
)));
2597 /* Emit load/stores for a small constant word aligned block_move.
2599 operands[0] is the memory address of the destination.
2600 operands[1] is the memory address of the source.
2601 operands[2] is the number of bytes to move.
2602 operands[3] is a temp register.
2603 operands[4] is a temp register. */
2606 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED
, rtx operands
[])
2608 HOST_WIDE_INT bytes
= INTVAL (operands
[2]);
2612 gcc_assert (bytes
>= 1 && bytes
<= MAX_MOVE_BYTES
);
2614 /* We do not have a post-increment store available, so the first set of
2615 stores are done without any increment, then the remaining ones can use
2616 the pre-increment addressing mode.
2618 Note: expand_block_move() also relies upon this behavior when building
2619 loops to copy large blocks. */
2628 output_asm_insn ("ld\t%5, %p1", operands
);
2629 output_asm_insn ("ld\t%6, %p1", operands
);
2630 output_asm_insn ("st\t%5, @%0", operands
);
2631 output_asm_insn ("st\t%6, %s0", operands
);
2635 output_asm_insn ("ld\t%5, %p1", operands
);
2636 output_asm_insn ("ld\t%6, %p1", operands
);
2637 output_asm_insn ("st\t%5, %s0", operands
);
2638 output_asm_insn ("st\t%6, %s0", operands
);
2643 else if (bytes
>= 4)
2648 output_asm_insn ("ld\t%5, %p1", operands
);
2651 output_asm_insn ("ld\t%6, %p1", operands
);
2654 output_asm_insn ("st\t%5, @%0", operands
);
2656 output_asm_insn ("st\t%5, %s0", operands
);
2662 /* Get the entire next word, even though we do not want all of it.
2663 The saves us from doing several smaller loads, and we assume that
2664 we cannot cause a page fault when at least part of the word is in
2665 valid memory [since we don't get called if things aren't properly
2667 int dst_offset
= first_time
? 0 : 4;
2668 /* The amount of increment we have to make to the
2669 destination pointer. */
2670 int dst_inc_amount
= dst_offset
+ bytes
- 4;
2671 /* The same for the source pointer. */
2672 int src_inc_amount
= bytes
;
2676 /* If got_extra is true then we have already loaded
2677 the next word as part of loading and storing the previous word. */
2679 output_asm_insn ("ld\t%6, @%1", operands
);
2685 output_asm_insn ("sra3\t%5, %6, #16", operands
);
2686 my_operands
[0] = operands
[5];
2687 my_operands
[1] = GEN_INT (dst_offset
);
2688 my_operands
[2] = operands
[0];
2689 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands
);
2691 /* If there is a byte left to store then increment the
2692 destination address and shift the contents of the source
2693 register down by 8 bits. We could not do the address
2694 increment in the store half word instruction, because it does
2695 not have an auto increment mode. */
2696 if (bytes
> 0) /* assert (bytes == 1) */
2707 my_operands
[0] = operands
[6];
2708 my_operands
[1] = GEN_INT (last_shift
);
2709 output_asm_insn ("srai\t%0, #%1", my_operands
);
2710 my_operands
[0] = operands
[6];
2711 my_operands
[1] = GEN_INT (dst_offset
);
2712 my_operands
[2] = operands
[0];
2713 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands
);
2716 /* Update the destination pointer if needed. We have to do
2717 this so that the patterns matches what we output in this
2720 && !find_reg_note (insn
, REG_UNUSED
, operands
[0]))
2722 my_operands
[0] = operands
[0];
2723 my_operands
[1] = GEN_INT (dst_inc_amount
);
2724 output_asm_insn ("addi\t%0, #%1", my_operands
);
2727 /* Update the source pointer if needed. We have to do this
2728 so that the patterns matches what we output in this
2731 && !find_reg_note (insn
, REG_UNUSED
, operands
[1]))
2733 my_operands
[0] = operands
[1];
2734 my_operands
[1] = GEN_INT (src_inc_amount
);
2735 output_asm_insn ("addi\t%0, #%1", my_operands
);
2745 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2748 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
2749 unsigned int new_reg
)
2751 /* Interrupt routines can't clobber any register that isn't already used. */
2752 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl
))
2753 && !df_regs_ever_live_p (new_reg
))
2760 m32r_return_addr (int count
)
2765 return get_hard_reg_initial_val (Pmode
, RETURN_ADDR_REGNUM
);
2769 m32r_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
2771 emit_move_insn (adjust_address (m_tramp
, SImode
, 0),
2772 gen_int_mode (TARGET_LITTLE_ENDIAN
?
2773 0x017e8e17 : 0x178e7e01, SImode
));
2774 emit_move_insn (adjust_address (m_tramp
, SImode
, 4),
2775 gen_int_mode (TARGET_LITTLE_ENDIAN
?
2776 0x0c00ae86 : 0x86ae000c, SImode
));
2777 emit_move_insn (adjust_address (m_tramp
, SImode
, 8),
2778 gen_int_mode (TARGET_LITTLE_ENDIAN
?
2779 0xe627871e : 0x1e8727e6, SImode
));
2780 emit_move_insn (adjust_address (m_tramp
, SImode
, 12),
2781 gen_int_mode (TARGET_LITTLE_ENDIAN
?
2782 0xc616c626 : 0x26c61fc6, SImode
));
2783 emit_move_insn (adjust_address (m_tramp
, SImode
, 16),
2785 emit_move_insn (adjust_address (m_tramp
, SImode
, 20),
2786 XEXP (DECL_RTL (fndecl
), 0));
2788 if (m32r_cache_flush_trap
>= 0)
2789 emit_insn (gen_flush_icache
2790 (validize_mem (adjust_address (m_tramp
, SImode
, 0)),
2791 gen_int_mode (m32r_cache_flush_trap
, SImode
)));
2792 else if (m32r_cache_flush_func
&& m32r_cache_flush_func
[0])
2793 emit_library_call (m32r_function_symbol (m32r_cache_flush_func
),
2794 LCT_NORMAL
, VOIDmode
, 3, XEXP (m_tramp
, 0), Pmode
,
2795 gen_int_mode (TRAMPOLINE_SIZE
, SImode
), SImode
,
2796 GEN_INT (3), SImode
);
2799 /* True if X is a reg that can be used as a base reg. */
2802 m32r_rtx_ok_for_base_p (const_rtx x
, bool strict
)
2809 if (GPR_P (REGNO (x
)))
2814 if (GPR_P (REGNO (x
))
2815 || REGNO (x
) == ARG_POINTER_REGNUM
2816 || ! HARD_REGISTER_P (x
))
2824 m32r_rtx_ok_for_offset_p (const_rtx x
)
2826 return (CONST_INT_P (x
) && INT16_P (INTVAL (x
)));
2830 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED
,
2831 const_rtx x
, bool strict
)
2833 if (GET_CODE (x
) == PLUS
2834 && m32r_rtx_ok_for_base_p (XEXP (x
, 0), strict
)
2835 && m32r_rtx_ok_for_offset_p (XEXP (x
, 1)))
2841 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2842 since more than one instruction will be required. */
2845 m32r_legitimate_lo_sum_addres_p (machine_mode mode
, const_rtx x
,
2848 if (GET_CODE (x
) == LO_SUM
2849 && (mode
!= BLKmode
&& GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
)
2850 && m32r_rtx_ok_for_base_p (XEXP (x
, 0), strict
)
2851 && CONSTANT_P (XEXP (x
, 1)))
2857 /* Is this a load and increment operation. */
2860 m32r_load_postinc_p (machine_mode mode
, const_rtx x
, bool strict
)
2862 if ((mode
== SImode
|| mode
== SFmode
)
2863 && GET_CODE (x
) == POST_INC
2864 && REG_P (XEXP (x
, 0))
2865 && m32r_rtx_ok_for_base_p (XEXP (x
, 0), strict
))
2871 /* Is this an increment/decrement and store operation. */
2874 m32r_store_preinc_predec_p (machine_mode mode
, const_rtx x
, bool strict
)
2876 if ((mode
== SImode
|| mode
== SFmode
)
2877 && (GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2878 && REG_P (XEXP (x
, 0)) \
2879 && m32r_rtx_ok_for_base_p (XEXP (x
, 0), strict
))
2885 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2888 m32r_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2890 if (m32r_rtx_ok_for_base_p (x
, strict
)
2891 || m32r_legitimate_offset_addres_p (mode
, x
, strict
)
2892 || m32r_legitimate_lo_sum_addres_p (mode
, x
, strict
)
2893 || m32r_load_postinc_p (mode
, x
, strict
)
2894 || m32r_store_preinc_predec_p (mode
, x
, strict
))
2901 m32r_conditional_register_usage (void)
2905 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2906 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2910 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2912 We don't allow (plus symbol large-constant) as the relocations can't
2913 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2914 We allow all CONST_DOUBLE's as the md file patterns will force the
2915 constant to memory if they can't handle them. */
2918 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2920 return !(GET_CODE (x
) == CONST
2921 && GET_CODE (XEXP (x
, 0)) == PLUS
2922 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
2923 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
2924 && CONST_INT_P (XEXP (XEXP (x
, 0), 1))
2925 && UINTVAL (XEXP (XEXP (x
, 0), 1)) > 32767);