1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "stor-layout.h"
31 #include "stringpool.h"
34 #include "insn-config.h"
35 #include "conditions.h"
38 #include "insn-attr.h"
47 #include "diagnostic-core.h"
52 #include "cfgcleanup.h"
55 #include "tm-constrs.h"
59 /* This file should be included last. */
60 #include "target-def.h"
62 /* Array of valid operand punctuation characters. */
63 static char m32r_punct_chars
[256];
65 /* Machine-specific symbol_ref flags. */
66 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
67 #define SYMBOL_REF_MODEL(X) \
68 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
70 /* For string literals, etc. */
71 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
73 /* Forward declaration. */
74 static void m32r_option_override (void);
75 static void init_reg_tables (void);
76 static void block_move_call (rtx
, rtx
, rtx
);
77 static int m32r_is_insn (rtx
);
78 static bool m32r_legitimate_address_p (machine_mode
, rtx
, bool);
79 static rtx
m32r_legitimize_address (rtx
, rtx
, machine_mode
);
80 static bool m32r_mode_dependent_address_p (const_rtx
, addr_space_t
);
81 static tree
m32r_handle_model_attribute (tree
*, tree
, tree
, int, bool *);
82 static void m32r_print_operand (FILE *, rtx
, int);
83 static void m32r_print_operand_address (FILE *, rtx
);
84 static bool m32r_print_operand_punct_valid_p (unsigned char code
);
85 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT
);
86 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT
);
88 static void m32r_file_start (void);
90 static int m32r_adjust_priority (rtx_insn
*, int);
91 static int m32r_issue_rate (void);
93 static void m32r_encode_section_info (tree
, rtx
, int);
94 static bool m32r_in_small_data_p (const_tree
);
95 static bool m32r_return_in_memory (const_tree
, const_tree
);
96 static rtx
m32r_function_value (const_tree
, const_tree
, bool);
97 static rtx
m32r_libcall_value (machine_mode
, const_rtx
);
98 static bool m32r_function_value_regno_p (const unsigned int);
99 static void m32r_setup_incoming_varargs (cumulative_args_t
, machine_mode
,
101 static void init_idents (void);
102 static bool m32r_rtx_costs (rtx
, machine_mode
, int, int, int *, bool speed
);
103 static int m32r_memory_move_cost (machine_mode
, reg_class_t
, bool);
104 static bool m32r_pass_by_reference (cumulative_args_t
, machine_mode
,
106 static int m32r_arg_partial_bytes (cumulative_args_t
, machine_mode
,
108 static rtx
m32r_function_arg (cumulative_args_t
, machine_mode
,
110 static void m32r_function_arg_advance (cumulative_args_t
, machine_mode
,
112 static bool m32r_can_eliminate (const int, const int);
113 static void m32r_conditional_register_usage (void);
114 static void m32r_trampoline_init (rtx
, tree
, rtx
);
115 static bool m32r_legitimate_constant_p (machine_mode
, rtx
);
116 static bool m32r_attribute_identifier (const_tree
);
118 /* M32R specific attributes. */
120 static const struct attribute_spec m32r_attribute_table
[] =
122 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
123 affects_type_identity } */
124 { "interrupt", 0, 0, true, false, false, NULL
, false },
125 { "model", 1, 1, true, false, false, m32r_handle_model_attribute
,
127 { NULL
, 0, 0, false, false, false, NULL
, false }
130 /* Initialize the GCC target structure. */
131 #undef TARGET_ATTRIBUTE_TABLE
132 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
133 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
134 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
136 #undef TARGET_LEGITIMATE_ADDRESS_P
137 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
138 #undef TARGET_LEGITIMIZE_ADDRESS
139 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
140 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
141 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
143 #undef TARGET_ASM_ALIGNED_HI_OP
144 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
145 #undef TARGET_ASM_ALIGNED_SI_OP
146 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
148 #undef TARGET_PRINT_OPERAND
149 #define TARGET_PRINT_OPERAND m32r_print_operand
150 #undef TARGET_PRINT_OPERAND_ADDRESS
151 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
152 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
153 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
155 #undef TARGET_ASM_FUNCTION_PROLOGUE
156 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
157 #undef TARGET_ASM_FUNCTION_EPILOGUE
158 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
160 #undef TARGET_ASM_FILE_START
161 #define TARGET_ASM_FILE_START m32r_file_start
163 #undef TARGET_SCHED_ADJUST_PRIORITY
164 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
165 #undef TARGET_SCHED_ISSUE_RATE
166 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
168 #undef TARGET_OPTION_OVERRIDE
169 #define TARGET_OPTION_OVERRIDE m32r_option_override
171 #undef TARGET_ENCODE_SECTION_INFO
172 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
173 #undef TARGET_IN_SMALL_DATA_P
174 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
177 #undef TARGET_MEMORY_MOVE_COST
178 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
179 #undef TARGET_RTX_COSTS
180 #define TARGET_RTX_COSTS m32r_rtx_costs
181 #undef TARGET_ADDRESS_COST
182 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
184 #undef TARGET_PROMOTE_PROTOTYPES
185 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
186 #undef TARGET_RETURN_IN_MEMORY
187 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
189 #undef TARGET_FUNCTION_VALUE
190 #define TARGET_FUNCTION_VALUE m32r_function_value
191 #undef TARGET_LIBCALL_VALUE
192 #define TARGET_LIBCALL_VALUE m32r_libcall_value
193 #undef TARGET_FUNCTION_VALUE_REGNO_P
194 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
196 #undef TARGET_SETUP_INCOMING_VARARGS
197 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
198 #undef TARGET_MUST_PASS_IN_STACK
199 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
200 #undef TARGET_PASS_BY_REFERENCE
201 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
202 #undef TARGET_ARG_PARTIAL_BYTES
203 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
204 #undef TARGET_FUNCTION_ARG
205 #define TARGET_FUNCTION_ARG m32r_function_arg
206 #undef TARGET_FUNCTION_ARG_ADVANCE
207 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
209 #undef TARGET_CAN_ELIMINATE
210 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
212 #undef TARGET_CONDITIONAL_REGISTER_USAGE
213 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
215 #undef TARGET_TRAMPOLINE_INIT
216 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
218 #undef TARGET_LEGITIMATE_CONSTANT_P
219 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
221 struct gcc_target targetm
= TARGET_INITIALIZER
;
223 /* Called by m32r_option_override to initialize various things. */
230 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
231 memset (m32r_punct_chars
, 0, sizeof (m32r_punct_chars
));
232 m32r_punct_chars
['#'] = 1;
233 m32r_punct_chars
['@'] = 1; /* ??? no longer used */
235 /* Provide default value if not specified. */
236 if (!global_options_set
.x_g_switch_value
)
237 g_switch_value
= SDATA_DEFAULT_SIZE
;
241 m32r_option_override (void)
243 /* These need to be done at start up.
244 It's convenient to do them here. */
246 SUBTARGET_OVERRIDE_OPTIONS
;
249 /* Vectors to keep interesting information about registers where it can easily
250 be got. We use to use the actual mode value as the bit number, but there
251 is (or may be) more than 32 modes now. Instead we use two tables: one
252 indexed by hard register number, and one indexed by mode. */
254 /* The purpose of m32r_mode_class is to shrink the range of modes so that
255 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
256 mapped into one m32r_mode_class mode. */
261 S_MODE
, D_MODE
, T_MODE
, O_MODE
,
262 SF_MODE
, DF_MODE
, TF_MODE
, OF_MODE
, A_MODE
265 /* Modes for condition codes. */
266 #define C_MODES (1 << (int) C_MODE)
268 /* Modes for single-word and smaller quantities. */
269 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
271 /* Modes for double-word and smaller quantities. */
272 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
274 /* Modes for quad-word and smaller quantities. */
275 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
277 /* Modes for accumulators. */
278 #define A_MODES (1 << (int) A_MODE)
280 /* Value is 1 if register/mode pair is acceptable on arc. */
282 const unsigned int m32r_hard_regno_mode_ok
[FIRST_PSEUDO_REGISTER
] =
284 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
,
285 T_MODES
, T_MODES
, T_MODES
, T_MODES
, T_MODES
, S_MODES
, S_MODES
, S_MODES
,
286 S_MODES
, C_MODES
, A_MODES
, A_MODES
289 unsigned int m32r_mode_class
[NUM_MACHINE_MODES
];
291 enum reg_class m32r_regno_reg_class
[FIRST_PSEUDO_REGISTER
];
294 init_reg_tables (void)
298 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
300 machine_mode m
= (machine_mode
) i
;
302 switch (GET_MODE_CLASS (m
))
305 case MODE_PARTIAL_INT
:
306 case MODE_COMPLEX_INT
:
307 if (GET_MODE_SIZE (m
) <= 4)
308 m32r_mode_class
[i
] = 1 << (int) S_MODE
;
309 else if (GET_MODE_SIZE (m
) == 8)
310 m32r_mode_class
[i
] = 1 << (int) D_MODE
;
311 else if (GET_MODE_SIZE (m
) == 16)
312 m32r_mode_class
[i
] = 1 << (int) T_MODE
;
313 else if (GET_MODE_SIZE (m
) == 32)
314 m32r_mode_class
[i
] = 1 << (int) O_MODE
;
316 m32r_mode_class
[i
] = 0;
319 case MODE_COMPLEX_FLOAT
:
320 if (GET_MODE_SIZE (m
) <= 4)
321 m32r_mode_class
[i
] = 1 << (int) SF_MODE
;
322 else if (GET_MODE_SIZE (m
) == 8)
323 m32r_mode_class
[i
] = 1 << (int) DF_MODE
;
324 else if (GET_MODE_SIZE (m
) == 16)
325 m32r_mode_class
[i
] = 1 << (int) TF_MODE
;
326 else if (GET_MODE_SIZE (m
) == 32)
327 m32r_mode_class
[i
] = 1 << (int) OF_MODE
;
329 m32r_mode_class
[i
] = 0;
332 m32r_mode_class
[i
] = 1 << (int) C_MODE
;
335 m32r_mode_class
[i
] = 0;
340 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
343 m32r_regno_reg_class
[i
] = GENERAL_REGS
;
344 else if (i
== ARG_POINTER_REGNUM
)
345 m32r_regno_reg_class
[i
] = GENERAL_REGS
;
347 m32r_regno_reg_class
[i
] = NO_REGS
;
351 /* M32R specific attribute support.
353 interrupt - for interrupt functions
355 model - select code model used to access object
357 small: addresses use 24 bits, use bl to make calls
358 medium: addresses use 32 bits, use bl to make calls
359 large: addresses use 32 bits, use seth/add3/jl to make calls
361 Grep for MODEL in m32r.h for more info. */
363 static tree small_ident1
;
364 static tree small_ident2
;
365 static tree medium_ident1
;
366 static tree medium_ident2
;
367 static tree large_ident1
;
368 static tree large_ident2
;
373 if (small_ident1
== 0)
375 small_ident1
= get_identifier ("small");
376 small_ident2
= get_identifier ("__small__");
377 medium_ident1
= get_identifier ("medium");
378 medium_ident2
= get_identifier ("__medium__");
379 large_ident1
= get_identifier ("large");
380 large_ident2
= get_identifier ("__large__");
384 /* Handle an "model" attribute; arguments as in
385 struct attribute_spec.handler. */
387 m32r_handle_model_attribute (tree
*node ATTRIBUTE_UNUSED
, tree name
,
388 tree args
, int flags ATTRIBUTE_UNUSED
,
394 arg
= TREE_VALUE (args
);
396 if (arg
!= small_ident1
397 && arg
!= small_ident2
398 && arg
!= medium_ident1
399 && arg
!= medium_ident2
400 && arg
!= large_ident1
401 && arg
!= large_ident2
)
403 warning (OPT_Wattributes
, "invalid argument of %qs attribute",
404 IDENTIFIER_POINTER (name
));
405 *no_add_attrs
= true;
412 m32r_attribute_identifier (const_tree name
)
414 return strcmp (IDENTIFIER_POINTER (name
), "model") == 0
415 || strcmp (IDENTIFIER_POINTER (name
), "__model__") == 0;
418 /* Encode section information of DECL, which is either a VAR_DECL,
419 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
421 For the M32R we want to record:
423 - whether the object lives in .sdata/.sbss.
424 - what code model should be used to access the object
428 m32r_encode_section_info (tree decl
, rtx rtl
, int first
)
432 enum m32r_model model
;
434 default_encode_section_info (decl
, rtl
, first
);
439 model_attr
= lookup_attribute ("model", DECL_ATTRIBUTES (decl
));
446 id
= TREE_VALUE (TREE_VALUE (model_attr
));
448 if (id
== small_ident1
|| id
== small_ident2
)
449 model
= M32R_MODEL_SMALL
;
450 else if (id
== medium_ident1
|| id
== medium_ident2
)
451 model
= M32R_MODEL_MEDIUM
;
452 else if (id
== large_ident1
|| id
== large_ident2
)
453 model
= M32R_MODEL_LARGE
;
455 gcc_unreachable (); /* shouldn't happen */
459 if (TARGET_MODEL_SMALL
)
460 model
= M32R_MODEL_SMALL
;
461 else if (TARGET_MODEL_MEDIUM
)
462 model
= M32R_MODEL_MEDIUM
;
463 else if (TARGET_MODEL_LARGE
)
464 model
= M32R_MODEL_LARGE
;
466 gcc_unreachable (); /* shouldn't happen */
468 extra_flags
|= model
<< SYMBOL_FLAG_MODEL_SHIFT
;
471 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= extra_flags
;
474 /* Only mark the object as being small data area addressable if
475 it hasn't been explicitly marked with a code model.
477 The user can explicitly put an object in the small data area with the
478 section attribute. If the object is in sdata/sbss and marked with a
479 code model do both [put the object in .sdata and mark it as being
480 addressed with a specific code model - don't mark it as being addressed
481 with an SDA reloc though]. This is ok and might be useful at times. If
482 the object doesn't fit the linker will give an error. */
485 m32r_in_small_data_p (const_tree decl
)
489 if (TREE_CODE (decl
) != VAR_DECL
)
492 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl
)))
495 section
= DECL_SECTION_NAME (decl
);
498 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
503 if (! TREE_READONLY (decl
) && ! TARGET_SDATA_NONE
)
505 int size
= int_size_in_bytes (TREE_TYPE (decl
));
507 if (size
> 0 && size
<= g_switch_value
)
515 /* Do anything needed before RTL is emitted for each function. */
518 m32r_init_expanders (void)
520 /* ??? At one point there was code here. The function is left in
521 to make it easy to experiment. */
525 call_operand (rtx op
, machine_mode mode
)
530 return call_address_operand (op
, mode
);
533 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
536 small_data_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
538 if (! TARGET_SDATA_USE
)
541 if (GET_CODE (op
) == SYMBOL_REF
)
542 return SYMBOL_REF_SMALL_P (op
);
544 if (GET_CODE (op
) == CONST
545 && GET_CODE (XEXP (op
, 0)) == PLUS
546 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
547 && satisfies_constraint_J (XEXP (XEXP (op
, 0), 1)))
548 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op
, 0), 0));
553 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
556 addr24_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
563 if (GET_CODE (op
) == LABEL_REF
)
564 return TARGET_ADDR24
;
566 if (GET_CODE (op
) == SYMBOL_REF
)
568 else if (GET_CODE (op
) == CONST
569 && GET_CODE (XEXP (op
, 0)) == PLUS
570 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
571 && satisfies_constraint_M (XEXP (XEXP (op
, 0), 1)))
572 sym
= XEXP (XEXP (op
, 0), 0);
576 if (SYMBOL_REF_MODEL (sym
) == M32R_MODEL_SMALL
)
580 && (CONSTANT_POOL_ADDRESS_P (sym
)
581 || LIT_NAME_P (XSTR (sym
, 0))))
587 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
590 addr32_operand (rtx op
, machine_mode mode
)
594 if (GET_CODE (op
) == LABEL_REF
)
595 return TARGET_ADDR32
;
597 if (GET_CODE (op
) == SYMBOL_REF
)
599 else if (GET_CODE (op
) == CONST
600 && GET_CODE (XEXP (op
, 0)) == PLUS
601 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
602 && CONST_INT_P (XEXP (XEXP (op
, 0), 1))
604 sym
= XEXP (XEXP (op
, 0), 0);
608 return (! addr24_operand (sym
, mode
)
609 && ! small_data_operand (sym
, mode
));
612 /* Return 1 if OP is a function that can be called with the `bl' insn. */
615 call26_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
620 if (GET_CODE (op
) == SYMBOL_REF
)
621 return SYMBOL_REF_MODEL (op
) != M32R_MODEL_LARGE
;
623 return TARGET_CALL26
;
626 /* Return 1 if OP is a DImode const we want to handle inline.
627 This must match the code in the movdi pattern.
628 It is used by the 'G' constraint. */
631 easy_di_const (rtx op
)
633 rtx high_rtx
, low_rtx
;
634 HOST_WIDE_INT high
, low
;
636 split_double (op
, &high_rtx
, &low_rtx
);
637 high
= INTVAL (high_rtx
);
638 low
= INTVAL (low_rtx
);
639 /* Pick constants loadable with 2 16-bit `ldi' insns. */
640 if (high
>= -128 && high
<= 127
641 && low
>= -128 && low
<= 127)
646 /* Return 1 if OP is a DFmode const we want to handle inline.
647 This must match the code in the movdf pattern.
648 It is used by the 'H' constraint. */
651 easy_df_const (rtx op
)
655 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op
), l
);
656 if (l
[0] == 0 && l
[1] == 0)
658 if ((l
[0] & 0xffff) == 0 && l
[1] == 0)
663 /* Return 1 if OP is (mem (reg ...)).
664 This is used in insn length calcs. */
667 memreg_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
669 return MEM_P (op
) && REG_P (XEXP (op
, 0));
672 /* Return nonzero if TYPE must be passed by indirect reference. */
675 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED
,
676 machine_mode mode
, const_tree type
,
677 bool named ATTRIBUTE_UNUSED
)
682 size
= int_size_in_bytes (type
);
684 size
= GET_MODE_SIZE (mode
);
686 return (size
< 0 || size
> 8);
691 /* X and Y are two things to compare using CODE. Emit the compare insn and
692 return the rtx for compare [arg0 of the if_then_else].
693 If need_compare is true then the comparison insn must be generated, rather
694 than being subsumed into the following branch instruction. */
697 gen_compare (enum rtx_code code
, rtx x
, rtx y
, int need_compare
)
699 enum rtx_code compare_code
;
700 enum rtx_code branch_code
;
701 rtx cc_reg
= gen_rtx_REG (CCmode
, CARRY_REGNUM
);
706 case EQ
: compare_code
= EQ
; branch_code
= NE
; break;
707 case NE
: compare_code
= EQ
; branch_code
= EQ
; break;
708 case LT
: compare_code
= LT
; branch_code
= NE
; break;
709 case LE
: compare_code
= LT
; branch_code
= EQ
; must_swap
= 1; break;
710 case GT
: compare_code
= LT
; branch_code
= NE
; must_swap
= 1; break;
711 case GE
: compare_code
= LT
; branch_code
= EQ
; break;
712 case LTU
: compare_code
= LTU
; branch_code
= NE
; break;
713 case LEU
: compare_code
= LTU
; branch_code
= EQ
; must_swap
= 1; break;
714 case GTU
: compare_code
= LTU
; branch_code
= NE
; must_swap
= 1; break;
715 case GEU
: compare_code
= LTU
; branch_code
= EQ
; break;
723 switch (compare_code
)
726 if (satisfies_constraint_P (y
) /* Reg equal to small const. */
729 rtx tmp
= gen_reg_rtx (SImode
);
731 emit_insn (gen_addsi3 (tmp
, x
, GEN_INT (-INTVAL (y
))));
735 else if (CONSTANT_P (y
)) /* Reg equal to const. */
737 rtx tmp
= force_reg (GET_MODE (x
), y
);
741 if (register_operand (y
, SImode
) /* Reg equal to reg. */
742 || y
== const0_rtx
) /* Reg equal to zero. */
744 emit_insn (gen_cmp_eqsi_insn (x
, y
));
746 return gen_rtx_fmt_ee (code
, CCmode
, cc_reg
, const0_rtx
);
751 if (register_operand (y
, SImode
)
752 || satisfies_constraint_P (y
))
754 rtx tmp
= gen_reg_rtx (SImode
); /* Reg compared to reg. */
759 emit_insn (gen_cmp_ltsi_insn (x
, y
));
766 emit_insn (gen_addsi3 (tmp
, y
, constm1_rtx
));
767 emit_insn (gen_cmp_ltsi_insn (x
, tmp
));
772 tmp
= gen_rtx_PLUS (SImode
, y
, const1_rtx
);
774 emit_insn (gen_addsi3 (tmp
, y
, constm1_rtx
));
775 emit_insn (gen_cmp_ltsi_insn (x
, tmp
));
779 emit_insn (gen_cmp_ltsi_insn (x
, y
));
786 return gen_rtx_fmt_ee (code
, CCmode
, cc_reg
, const0_rtx
);
791 if (register_operand (y
, SImode
)
792 || satisfies_constraint_P (y
))
794 rtx tmp
= gen_reg_rtx (SImode
); /* Reg (unsigned) compared to reg. */
799 emit_insn (gen_cmp_ltusi_insn (x
, y
));
806 emit_insn (gen_addsi3 (tmp
, y
, constm1_rtx
));
807 emit_insn (gen_cmp_ltusi_insn (x
, tmp
));
812 tmp
= gen_rtx_PLUS (SImode
, y
, const1_rtx
);
814 emit_insn (gen_addsi3 (tmp
, y
, constm1_rtx
));
815 emit_insn (gen_cmp_ltusi_insn (x
, tmp
));
819 emit_insn (gen_cmp_ltusi_insn (x
, y
));
826 return gen_rtx_fmt_ee (code
, CCmode
, cc_reg
, const0_rtx
);
836 /* Reg/reg equal comparison. */
837 if (compare_code
== EQ
838 && register_operand (y
, SImode
))
839 return gen_rtx_fmt_ee (code
, CCmode
, x
, y
);
841 /* Reg/zero signed comparison. */
842 if ((compare_code
== EQ
|| compare_code
== LT
)
844 return gen_rtx_fmt_ee (code
, CCmode
, x
, y
);
846 /* Reg/smallconst equal comparison. */
847 if (compare_code
== EQ
848 && satisfies_constraint_P (y
))
850 rtx tmp
= gen_reg_rtx (SImode
);
852 emit_insn (gen_addsi3 (tmp
, x
, GEN_INT (-INTVAL (y
))));
853 return gen_rtx_fmt_ee (code
, CCmode
, tmp
, const0_rtx
);
856 /* Reg/const equal comparison. */
857 if (compare_code
== EQ
860 rtx tmp
= force_reg (GET_MODE (x
), y
);
862 return gen_rtx_fmt_ee (code
, CCmode
, x
, tmp
);
869 y
= force_reg (GET_MODE (x
), y
);
872 int ok_const
= reg_or_int16_operand (y
, GET_MODE (y
));
875 y
= force_reg (GET_MODE (x
), y
);
879 switch (compare_code
)
882 emit_insn (gen_cmp_eqsi_insn (must_swap
? y
: x
, must_swap
? x
: y
));
885 emit_insn (gen_cmp_ltsi_insn (must_swap
? y
: x
, must_swap
? x
: y
));
888 emit_insn (gen_cmp_ltusi_insn (must_swap
? y
: x
, must_swap
? x
: y
));
895 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, cc_reg
, CONST0_RTX (CCmode
));
899 gen_cond_store (enum rtx_code code
, rtx op0
, rtx op1
, rtx op2
)
901 machine_mode mode
= GET_MODE (op0
);
903 gcc_assert (mode
== SImode
);
907 if (!register_operand (op1
, mode
))
908 op1
= force_reg (mode
, op1
);
910 if (TARGET_M32RX
|| TARGET_M32R2
)
912 if (!reg_or_zero_operand (op2
, mode
))
913 op2
= force_reg (mode
, op2
);
915 emit_insn (gen_seq_insn_m32rx (op0
, op1
, op2
));
918 if (CONST_INT_P (op2
) && INTVAL (op2
) == 0)
920 emit_insn (gen_seq_zero_insn (op0
, op1
));
924 if (!reg_or_eq_int16_operand (op2
, mode
))
925 op2
= force_reg (mode
, op2
);
927 emit_insn (gen_seq_insn (op0
, op1
, op2
));
931 if (!CONST_INT_P (op2
)
932 || (INTVAL (op2
) != 0 && satisfies_constraint_K (op2
)))
936 if (reload_completed
|| reload_in_progress
)
939 reg
= gen_reg_rtx (SImode
);
940 emit_insn (gen_xorsi3 (reg
, op1
, op2
));
943 if (!register_operand (op1
, mode
))
944 op1
= force_reg (mode
, op1
);
946 emit_insn (gen_sne_zero_insn (op0
, op1
));
961 if (!register_operand (op1
, mode
))
962 op1
= force_reg (mode
, op1
);
964 if (!reg_or_int16_operand (op2
, mode
))
965 op2
= force_reg (mode
, op2
);
967 emit_insn (gen_slt_insn (op0
, op1
, op2
));
980 if (!register_operand (op1
, mode
))
981 op1
= force_reg (mode
, op1
);
983 if (!reg_or_int16_operand (op2
, mode
))
984 op2
= force_reg (mode
, op2
);
986 emit_insn (gen_sltu_insn (op0
, op1
, op2
));
991 if (!register_operand (op1
, mode
))
992 op1
= force_reg (mode
, op1
);
994 if (!reg_or_int16_operand (op2
, mode
))
995 op2
= force_reg (mode
, op2
);
998 emit_insn (gen_sge_insn (op0
, op1
, op2
));
1000 emit_insn (gen_sgeu_insn (op0
, op1
, op2
));
1005 if (!register_operand (op1
, mode
))
1006 op1
= force_reg (mode
, op1
);
1008 if (CONST_INT_P (op2
))
1010 HOST_WIDE_INT value
= INTVAL (op2
);
1011 if (value
>= 2147483647)
1013 emit_move_insn (op0
, const1_rtx
);
1017 op2
= GEN_INT (value
+ 1);
1018 if (value
< -32768 || value
>= 32767)
1019 op2
= force_reg (mode
, op2
);
1022 emit_insn (gen_sltu_insn (op0
, op1
, op2
));
1024 emit_insn (gen_slt_insn (op0
, op1
, op2
));
1028 if (!register_operand (op2
, mode
))
1029 op2
= force_reg (mode
, op2
);
1032 emit_insn (gen_sleu_insn (op0
, op1
, op2
));
1034 emit_insn (gen_sle_insn (op0
, op1
, op2
));
1043 /* Split a 2 word move (DI or DF) into component parts. */
1046 gen_split_move_double (rtx operands
[])
1048 machine_mode mode
= GET_MODE (operands
[0]);
1049 rtx dest
= operands
[0];
1050 rtx src
= operands
[1];
1053 /* We might have (SUBREG (MEM)) here, so just get rid of the
1054 subregs to make this code simpler. It is safe to call
1055 alter_subreg any time after reload. */
1056 if (GET_CODE (dest
) == SUBREG
)
1057 alter_subreg (&dest
, true);
1058 if (GET_CODE (src
) == SUBREG
)
1059 alter_subreg (&src
, true);
1064 int dregno
= REGNO (dest
);
1069 int sregno
= REGNO (src
);
1071 int reverse
= (dregno
== sregno
+ 1);
1073 /* We normally copy the low-numbered register first. However, if
1074 the first register operand 0 is the same as the second register of
1075 operand 1, we must copy in the opposite order. */
1076 emit_insn (gen_rtx_SET (operand_subword (dest
, reverse
, TRUE
, mode
),
1077 operand_subword (src
, reverse
, TRUE
, mode
)));
1079 emit_insn (gen_rtx_SET (operand_subword (dest
, !reverse
, TRUE
, mode
),
1080 operand_subword (src
, !reverse
, TRUE
, mode
)));
1083 /* Reg = constant. */
1084 else if (CONST_INT_P (src
) || GET_CODE (src
) == CONST_DOUBLE
)
1087 split_double (src
, &words
[0], &words
[1]);
1088 emit_insn (gen_rtx_SET (operand_subword (dest
, 0, TRUE
, mode
),
1091 emit_insn (gen_rtx_SET (operand_subword (dest
, 1, TRUE
, mode
),
1096 else if (MEM_P (src
))
1098 /* If the high-address word is used in the address, we must load it
1099 last. Otherwise, load it first. */
1100 int reverse
= refers_to_regno_p (dregno
, XEXP (src
, 0));
1102 /* We used to optimize loads from single registers as
1106 if r3 were not used subsequently. However, the REG_NOTES aren't
1107 propagated correctly by the reload phase, and it can cause bad
1108 code to be generated. We could still try:
1110 ld r1,r3+; ld r2,r3; addi r3,-4
1112 which saves 2 bytes and doesn't force longword alignment. */
1113 emit_insn (gen_rtx_SET (operand_subword (dest
, reverse
, TRUE
, mode
),
1114 adjust_address (src
, SImode
,
1115 reverse
* UNITS_PER_WORD
)));
1117 emit_insn (gen_rtx_SET (operand_subword (dest
, !reverse
, TRUE
, mode
),
1118 adjust_address (src
, SImode
,
1119 !reverse
* UNITS_PER_WORD
)));
1126 /* We used to optimize loads from single registers as
1130 if r3 were not used subsequently. However, the REG_NOTES aren't
1131 propagated correctly by the reload phase, and it can cause bad
1132 code to be generated. We could still try:
1134 st r1,r3; st r2,+r3; addi r3,-4
1136 which saves 2 bytes and doesn't force longword alignment. */
1137 else if (MEM_P (dest
) && REG_P (src
))
1139 emit_insn (gen_rtx_SET (adjust_address (dest
, SImode
, 0),
1140 operand_subword (src
, 0, TRUE
, mode
)));
1142 emit_insn (gen_rtx_SET (adjust_address (dest
, SImode
, UNITS_PER_WORD
),
1143 operand_subword (src
, 1, TRUE
, mode
)));
1156 m32r_arg_partial_bytes (cumulative_args_t cum_v
, machine_mode mode
,
1157 tree type
, bool named ATTRIBUTE_UNUSED
)
1159 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1163 (((mode
== BLKmode
&& type
)
1164 ? (unsigned int) int_size_in_bytes (type
)
1165 : GET_MODE_SIZE (mode
)) + UNITS_PER_WORD
- 1)
1168 if (*cum
>= M32R_MAX_PARM_REGS
)
1170 else if (*cum
+ size
> M32R_MAX_PARM_REGS
)
1171 words
= (*cum
+ size
) - M32R_MAX_PARM_REGS
;
1175 return words
* UNITS_PER_WORD
;
1178 /* The ROUND_ADVANCE* macros are local to this file. */
1179 /* Round SIZE up to a word boundary. */
1180 #define ROUND_ADVANCE(SIZE) \
1181 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1183 /* Round arg MODE/TYPE up to the next word boundary. */
1184 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1185 ((MODE) == BLKmode \
1186 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1187 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1189 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1190 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1192 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1193 a reg. This includes arguments that have to be passed by reference as the
1194 pointer to them is passed in a reg if one is available (and that is what
1196 This macro is only used in this file. */
1197 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1198 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1200 /* Determine where to put an argument to a function.
1201 Value is zero to push the argument on the stack,
1202 or a hard register in which to store the argument.
1204 MODE is the argument's machine mode.
1205 TYPE is the data type of the argument (as a tree).
1206 This is null for libcalls where that information may
1208 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1209 the preceding args and about the function being called.
1210 NAMED is nonzero if this argument is a named parameter
1211 (otherwise it is an extra parameter matching an ellipsis). */
1212 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1213 and the rest are pushed. */
1216 m32r_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1217 const_tree type ATTRIBUTE_UNUSED
,
1218 bool named ATTRIBUTE_UNUSED
)
1220 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1222 return (PASS_IN_REG_P (*cum
, mode
, type
)
1223 ? gen_rtx_REG (mode
, ROUND_ADVANCE_CUM (*cum
, mode
, type
))
1227 /* Update the data in CUM to advance over an argument
1228 of mode MODE and data type TYPE.
1229 (TYPE is null for libcalls where that information may not be available.) */
1232 m32r_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1233 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1235 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1237 *cum
= (ROUND_ADVANCE_CUM (*cum
, mode
, type
)
1238 + ROUND_ADVANCE_ARG (mode
, type
));
1241 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1244 m32r_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1246 cumulative_args_t dummy
= pack_cumulative_args (NULL
);
1248 return m32r_pass_by_reference (dummy
, TYPE_MODE (type
), type
, false);
1251 /* Worker function for TARGET_FUNCTION_VALUE. */
1254 m32r_function_value (const_tree valtype
,
1255 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
1256 bool outgoing ATTRIBUTE_UNUSED
)
1258 return gen_rtx_REG (TYPE_MODE (valtype
), 0);
1261 /* Worker function for TARGET_LIBCALL_VALUE. */
1264 m32r_libcall_value (machine_mode mode
,
1265 const_rtx fun ATTRIBUTE_UNUSED
)
1267 return gen_rtx_REG (mode
, 0);
1270 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1272 ??? What about r1 in DI/DF values. */
1275 m32r_function_value_regno_p (const unsigned int regno
)
1277 return (regno
== 0);
1280 /* Do any needed setup for a variadic function. For the M32R, we must
1281 create a register parameter block, and then copy any anonymous arguments
1282 in registers to memory.
1284 CUM has not been updated for the last named argument which has type TYPE
1285 and mode MODE, and we rely on this fact. */
1288 m32r_setup_incoming_varargs (cumulative_args_t cum
, machine_mode mode
,
1289 tree type
, int *pretend_size
, int no_rtl
)
1296 /* All BLKmode values are passed by reference. */
1297 gcc_assert (mode
!= BLKmode
);
1299 first_anon_arg
= (ROUND_ADVANCE_CUM (*get_cumulative_args (cum
), mode
, type
)
1300 + ROUND_ADVANCE_ARG (mode
, type
));
1302 if (first_anon_arg
< M32R_MAX_PARM_REGS
)
1304 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1305 int first_reg_offset
= first_anon_arg
;
1306 /* Size in words to "pretend" allocate. */
1307 int size
= M32R_MAX_PARM_REGS
- first_reg_offset
;
1310 regblock
= gen_frame_mem (BLKmode
,
1311 plus_constant (Pmode
, arg_pointer_rtx
,
1312 FIRST_PARM_OFFSET (0)));
1313 set_mem_alias_set (regblock
, get_varargs_alias_set ());
1314 move_block_from_reg (first_reg_offset
, regblock
, size
);
1316 *pretend_size
= (size
* UNITS_PER_WORD
);
1321 /* Return true if INSN is real instruction bearing insn. */
1324 m32r_is_insn (rtx insn
)
1326 return (NONDEBUG_INSN_P (insn
)
1327 && GET_CODE (PATTERN (insn
)) != USE
1328 && GET_CODE (PATTERN (insn
)) != CLOBBER
);
1331 /* Increase the priority of long instructions so that the
1332 short instructions are scheduled ahead of the long ones. */
1335 m32r_adjust_priority (rtx_insn
*insn
, int priority
)
1337 if (m32r_is_insn (insn
)
1338 && get_attr_insn_size (insn
) != INSN_SIZE_SHORT
)
1345 /* Indicate how many instructions can be issued at the same time.
1346 This is sort of a lie. The m32r can issue only 1 long insn at
1347 once, but it can issue 2 short insns. The default therefore is
1348 set at 2, but this can be overridden by the command line option
1352 m32r_issue_rate (void)
1354 return ((TARGET_LOW_ISSUE_RATE
) ? 1 : 2);
1357 /* Cost functions. */
1358 /* Memory is 3 times as expensive as registers.
1359 ??? Is that the right way to look at it? */
1362 m32r_memory_move_cost (machine_mode mode
,
1363 reg_class_t rclass ATTRIBUTE_UNUSED
,
1364 bool in ATTRIBUTE_UNUSED
)
1366 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
)
1373 m32r_rtx_costs (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
1374 int outer_code ATTRIBUTE_UNUSED
,
1375 int opno ATTRIBUTE_UNUSED
, int *total
,
1376 bool speed ATTRIBUTE_UNUSED
)
1378 int code
= GET_CODE (x
);
1382 /* Small integers are as cheap as registers. 4 byte values can be
1383 fetched as immediate constants - let's give that the cost of an
1386 if (INT16_P (INTVAL (x
)))
1396 *total
= COSTS_N_INSNS (1);
1403 split_double (x
, &high
, &low
);
1404 *total
= COSTS_N_INSNS (!INT16_P (INTVAL (high
))
1405 + !INT16_P (INTVAL (low
)));
1410 *total
= COSTS_N_INSNS (3);
1417 *total
= COSTS_N_INSNS (10);
1425 /* Type of function DECL.
1427 The result is cached. To reset the cache at the end of a function,
1428 call with DECL = NULL_TREE. */
1430 enum m32r_function_type
1431 m32r_compute_function_type (tree decl
)
1434 static enum m32r_function_type fn_type
= M32R_FUNCTION_UNKNOWN
;
1435 /* Last function we were called for. */
1436 static tree last_fn
= NULL_TREE
;
1438 /* Resetting the cached value? */
1439 if (decl
== NULL_TREE
)
1441 fn_type
= M32R_FUNCTION_UNKNOWN
;
1442 last_fn
= NULL_TREE
;
1446 if (decl
== last_fn
&& fn_type
!= M32R_FUNCTION_UNKNOWN
)
1449 /* Compute function type. */
1450 fn_type
= (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl
)) != NULL_TREE
1451 ? M32R_FUNCTION_INTERRUPT
1452 : M32R_FUNCTION_NORMAL
);
1457 \f/* Function prologue/epilogue handlers. */
1459 /* M32R stack frames look like:
1461 Before call After call
1462 +-----------------------+ +-----------------------+
1464 high | local variables, | | local variables, |
1465 mem | reg save area, etc. | | reg save area, etc. |
1467 +-----------------------+ +-----------------------+
1469 | arguments on stack. | | arguments on stack. |
1471 SP+0->+-----------------------+ +-----------------------+
1472 | reg parm save area, |
1473 | only created for |
1474 | variable argument |
1476 +-----------------------+
1477 | previous frame ptr |
1478 +-----------------------+
1480 | register save area |
1482 +-----------------------+
1484 +-----------------------+
1488 +-----------------------+
1490 | alloca allocations |
1492 +-----------------------+
1494 low | arguments on stack |
1496 SP+0->+-----------------------+
1499 1) The "reg parm save area" does not exist for non variable argument fns.
1500 2) The "reg parm save area" can be eliminated completely if we saved regs
1501 containing anonymous args separately but that complicates things too
1502 much (so it's not done).
1503 3) The return address is saved after the register save area so as to have as
1504 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1506 /* Structure to be filled in by m32r_compute_frame_size with register
1507 save masks, and offsets for the current function. */
1508 struct m32r_frame_info
1510 unsigned int total_size
; /* # bytes that the entire frame takes up. */
1511 unsigned int extra_size
; /* # bytes of extra stuff. */
1512 unsigned int pretend_size
; /* # bytes we push and pretend caller did. */
1513 unsigned int args_size
; /* # bytes that outgoing arguments take up. */
1514 unsigned int reg_size
; /* # bytes needed to store regs. */
1515 unsigned int var_size
; /* # bytes that variables take up. */
1516 unsigned int gmask
; /* Mask of saved gp registers. */
1517 unsigned int save_fp
; /* Nonzero if fp must be saved. */
1518 unsigned int save_lr
; /* Nonzero if lr (return addr) must be saved. */
1519 int initialized
; /* Nonzero if frame size already calculated. */
1522 /* Current frame information calculated by m32r_compute_frame_size. */
1523 static struct m32r_frame_info current_frame_info
;
1525 /* Zero structure to initialize current_frame_info. */
1526 static struct m32r_frame_info zero_frame_info
;
1528 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1529 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1531 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1532 The return address and frame pointer are treated separately.
1533 Don't consider them here. */
1534 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1535 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1536 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1538 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1539 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1541 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1542 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1544 /* Return the bytes needed to compute the frame pointer from the current
1547 SIZE is the size needed for local variables. */
1550 m32r_compute_frame_size (int size
) /* # of var. bytes allocated. */
1553 unsigned int total_size
, var_size
, args_size
, pretend_size
, extra_size
;
1554 unsigned int reg_size
;
1556 enum m32r_function_type fn_type
;
1558 int pic_reg_used
= flag_pic
&& (crtl
->uses_pic_offset_table
1561 var_size
= M32R_STACK_ALIGN (size
);
1562 args_size
= M32R_STACK_ALIGN (crtl
->outgoing_args_size
);
1563 pretend_size
= crtl
->args
.pretend_args_size
;
1564 extra_size
= FIRST_PARM_OFFSET (0);
1565 total_size
= extra_size
+ pretend_size
+ args_size
+ var_size
;
1569 /* See if this is an interrupt handler. Call used registers must be saved
1571 fn_type
= m32r_compute_function_type (current_function_decl
);
1572 interrupt_p
= M32R_INTERRUPT_P (fn_type
);
1574 /* Calculate space needed for registers. */
1575 for (regno
= 0; regno
< M32R_MAX_INT_REGS
; regno
++)
1577 if (MUST_SAVE_REGISTER (regno
, interrupt_p
)
1578 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
1580 reg_size
+= UNITS_PER_WORD
;
1581 gmask
|= 1 << regno
;
1585 current_frame_info
.save_fp
= MUST_SAVE_FRAME_POINTER
;
1586 current_frame_info
.save_lr
= MUST_SAVE_RETURN_ADDR
|| pic_reg_used
;
1588 reg_size
+= ((current_frame_info
.save_fp
+ current_frame_info
.save_lr
)
1590 total_size
+= reg_size
;
1592 /* ??? Not sure this is necessary, and I don't think the epilogue
1593 handler will do the right thing if this changes total_size. */
1594 total_size
= M32R_STACK_ALIGN (total_size
);
1596 /* frame_size = total_size - (pretend_size + reg_size); */
1598 /* Save computed information. */
1599 current_frame_info
.total_size
= total_size
;
1600 current_frame_info
.extra_size
= extra_size
;
1601 current_frame_info
.pretend_size
= pretend_size
;
1602 current_frame_info
.var_size
= var_size
;
1603 current_frame_info
.args_size
= args_size
;
1604 current_frame_info
.reg_size
= reg_size
;
1605 current_frame_info
.gmask
= gmask
;
1606 current_frame_info
.initialized
= reload_completed
;
1608 /* Ok, we're done. */
1612 /* Worker function for TARGET_CAN_ELIMINATE. */
1615 m32r_can_eliminate (const int from
, const int to
)
1617 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
1618 ? ! frame_pointer_needed
1623 /* The table we use to reference PIC data. */
1624 static rtx global_offset_table
;
1627 m32r_reload_lr (rtx sp
, int size
)
1629 rtx lr
= gen_rtx_REG (Pmode
, RETURN_ADDR_REGNUM
);
1632 emit_insn (gen_movsi (lr
, gen_frame_mem (Pmode
, sp
)));
1633 else if (size
< 32768)
1634 emit_insn (gen_movsi (lr
, gen_frame_mem (Pmode
,
1635 gen_rtx_PLUS (Pmode
, sp
,
1639 rtx tmp
= gen_rtx_REG (Pmode
, PROLOGUE_TMP_REGNUM
);
1641 emit_insn (gen_movsi (tmp
, GEN_INT (size
)));
1642 emit_insn (gen_addsi3 (tmp
, tmp
, sp
));
1643 emit_insn (gen_movsi (lr
, gen_frame_mem (Pmode
, tmp
)));
1650 m32r_load_pic_register (void)
1652 global_offset_table
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
1653 emit_insn (gen_get_pc (pic_offset_table_rtx
, global_offset_table
,
1654 GEN_INT (TARGET_MODEL_SMALL
)));
1656 /* Need to emit this whether or not we obey regdecls,
1657 since setjmp/longjmp can cause life info to screw up. */
1658 emit_use (pic_offset_table_rtx
);
1661 /* Expand the m32r prologue as a series of insns. */
1664 m32r_expand_prologue (void)
1669 int pic_reg_used
= flag_pic
&& (crtl
->uses_pic_offset_table
1672 if (! current_frame_info
.initialized
)
1673 m32r_compute_frame_size (get_frame_size ());
1675 if (flag_stack_usage_info
)
1676 current_function_static_stack_size
= current_frame_info
.total_size
;
1678 gmask
= current_frame_info
.gmask
;
1680 /* These cases shouldn't happen. Catch them now. */
1681 gcc_assert (current_frame_info
.total_size
|| !gmask
);
1683 /* Allocate space for register arguments if this is a variadic function. */
1684 if (current_frame_info
.pretend_size
!= 0)
1686 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1687 the wrong result on a 64-bit host. */
1688 HOST_WIDE_INT pretend_size
= current_frame_info
.pretend_size
;
1689 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1691 GEN_INT (-pretend_size
)));
1694 /* Save any registers we need to and set up fp. */
1695 if (current_frame_info
.save_fp
)
1696 emit_insn (gen_movsi_push (stack_pointer_rtx
, frame_pointer_rtx
));
1698 gmask
&= ~(FRAME_POINTER_MASK
| RETURN_ADDR_MASK
);
1700 /* Save any needed call-saved regs (and call-used if this is an
1701 interrupt handler). */
1702 for (regno
= 0; regno
<= M32R_MAX_INT_REGS
; ++regno
)
1704 if ((gmask
& (1 << regno
)) != 0)
1705 emit_insn (gen_movsi_push (stack_pointer_rtx
,
1706 gen_rtx_REG (Pmode
, regno
)));
1709 if (current_frame_info
.save_lr
)
1710 emit_insn (gen_movsi_push (stack_pointer_rtx
,
1711 gen_rtx_REG (Pmode
, RETURN_ADDR_REGNUM
)));
1713 /* Allocate the stack frame. */
1714 frame_size
= (current_frame_info
.total_size
1715 - (current_frame_info
.pretend_size
1716 + current_frame_info
.reg_size
));
1718 if (frame_size
== 0)
1719 ; /* Nothing to do. */
1720 else if (frame_size
<= 32768)
1721 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1722 GEN_INT (-frame_size
)));
1725 rtx tmp
= gen_rtx_REG (Pmode
, PROLOGUE_TMP_REGNUM
);
1727 emit_insn (gen_movsi (tmp
, GEN_INT (frame_size
)));
1728 emit_insn (gen_subsi3 (stack_pointer_rtx
, stack_pointer_rtx
, tmp
));
1731 if (frame_pointer_needed
)
1732 emit_insn (gen_movsi (frame_pointer_rtx
, stack_pointer_rtx
));
1735 /* Push lr for mcount (form_pc, x). */
1736 emit_insn (gen_movsi_push (stack_pointer_rtx
,
1737 gen_rtx_REG (Pmode
, RETURN_ADDR_REGNUM
)));
1741 m32r_load_pic_register ();
1742 m32r_reload_lr (stack_pointer_rtx
,
1743 (crtl
->profile
? 0 : frame_size
));
1746 if (crtl
->profile
&& !pic_reg_used
)
1747 emit_insn (gen_blockage ());
1751 /* Set up the stack and frame pointer (if desired) for the function.
1752 Note, if this is changed, you need to mirror the changes in
1753 m32r_compute_frame_size which calculates the prolog size. */
1756 m32r_output_function_prologue (FILE * file
, HOST_WIDE_INT size
)
1758 enum m32r_function_type fn_type
= m32r_compute_function_type (current_function_decl
);
1760 /* If this is an interrupt handler, mark it as such. */
1761 if (M32R_INTERRUPT_P (fn_type
))
1762 fprintf (file
, "\t%s interrupt handler\n", ASM_COMMENT_START
);
1764 if (! current_frame_info
.initialized
)
1765 m32r_compute_frame_size (size
);
1767 /* This is only for the human reader. */
1769 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1771 current_frame_info
.var_size
,
1772 current_frame_info
.reg_size
/ 4,
1773 current_frame_info
.args_size
,
1774 current_frame_info
.extra_size
);
1777 /* Output RTL to pop register REGNO from the stack. */
1784 x
= emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode
, regno
),
1785 stack_pointer_rtx
));
1786 add_reg_note (x
, REG_INC
, stack_pointer_rtx
);
1789 /* Expand the m32r epilogue as a series of insns. */
1792 m32r_expand_epilogue (void)
1795 int noepilogue
= FALSE
;
1798 gcc_assert (current_frame_info
.initialized
);
1799 total_size
= current_frame_info
.total_size
;
1801 if (total_size
== 0)
1803 rtx insn
= get_last_insn ();
1805 /* If the last insn was a BARRIER, we don't have to write any code
1806 because a jump (aka return) was put there. */
1807 if (insn
&& NOTE_P (insn
))
1808 insn
= prev_nonnote_insn (insn
);
1809 if (insn
&& BARRIER_P (insn
))
1815 unsigned int var_size
= current_frame_info
.var_size
;
1816 unsigned int args_size
= current_frame_info
.args_size
;
1817 unsigned int gmask
= current_frame_info
.gmask
;
1818 int can_trust_sp_p
= !cfun
->calls_alloca
;
1820 if (flag_exceptions
)
1821 emit_insn (gen_blockage ());
1823 /* The first thing to do is point the sp at the bottom of the register
1827 unsigned int reg_offset
= var_size
+ args_size
;
1829 if (reg_offset
== 0)
1830 ; /* Nothing to do. */
1831 else if (reg_offset
< 32768)
1832 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1833 GEN_INT (reg_offset
)));
1836 rtx tmp
= gen_rtx_REG (Pmode
, PROLOGUE_TMP_REGNUM
);
1838 emit_insn (gen_movsi (tmp
, GEN_INT (reg_offset
)));
1839 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1843 else if (frame_pointer_needed
)
1845 unsigned int reg_offset
= var_size
+ args_size
;
1847 if (reg_offset
== 0)
1848 emit_insn (gen_movsi (stack_pointer_rtx
, frame_pointer_rtx
));
1849 else if (reg_offset
< 32768)
1850 emit_insn (gen_addsi3 (stack_pointer_rtx
, frame_pointer_rtx
,
1851 GEN_INT (reg_offset
)));
1854 rtx tmp
= gen_rtx_REG (Pmode
, PROLOGUE_TMP_REGNUM
);
1856 emit_insn (gen_movsi (tmp
, GEN_INT (reg_offset
)));
1857 emit_insn (gen_movsi (stack_pointer_rtx
, frame_pointer_rtx
));
1858 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1865 if (current_frame_info
.save_lr
)
1866 pop (RETURN_ADDR_REGNUM
);
1868 /* Restore any saved registers, in reverse order of course. */
1869 gmask
&= ~(FRAME_POINTER_MASK
| RETURN_ADDR_MASK
);
1870 for (regno
= M32R_MAX_INT_REGS
- 1; regno
>= 0; --regno
)
1872 if ((gmask
& (1L << regno
)) != 0)
1876 if (current_frame_info
.save_fp
)
1877 pop (FRAME_POINTER_REGNUM
);
1879 /* Remove varargs area if present. */
1880 if (current_frame_info
.pretend_size
!= 0)
1881 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
1882 GEN_INT (current_frame_info
.pretend_size
)));
1884 emit_insn (gen_blockage ());
1888 /* Do any necessary cleanup after a function to restore stack, frame,
1892 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED
,
1893 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
1895 /* Reset state info for each function. */
1896 current_frame_info
= zero_frame_info
;
1897 m32r_compute_function_type (NULL_TREE
);
1900 /* Return nonzero if this function is known to have a null or 1 instruction
1904 direct_return (void)
1906 if (!reload_completed
)
1909 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl
)))
1912 if (! current_frame_info
.initialized
)
1913 m32r_compute_frame_size (get_frame_size ());
1915 return current_frame_info
.total_size
== 0;
1922 m32r_legitimate_pic_operand_p (rtx x
)
1924 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
1927 if (GET_CODE (x
) == CONST
1928 && GET_CODE (XEXP (x
, 0)) == PLUS
1929 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
1930 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
1931 && (CONST_INT_P (XEXP (XEXP (x
, 0), 1))))
1938 m32r_legitimize_pic_address (rtx orig
, rtx reg
)
1941 printf("m32r_legitimize_pic_address()\n");
1944 if (GET_CODE (orig
) == SYMBOL_REF
|| GET_CODE (orig
) == LABEL_REF
)
1946 rtx pic_ref
, address
;
1951 gcc_assert (!reload_in_progress
&& !reload_completed
);
1952 reg
= gen_reg_rtx (Pmode
);
1958 address
= gen_reg_rtx (Pmode
);
1962 crtl
->uses_pic_offset_table
= 1;
1964 if (GET_CODE (orig
) == LABEL_REF
1965 || (GET_CODE (orig
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (orig
)))
1967 emit_insn (gen_gotoff_load_addr (reg
, orig
));
1968 emit_insn (gen_addsi3 (reg
, reg
, pic_offset_table_rtx
));
1972 emit_insn (gen_pic_load_addr (address
, orig
));
1974 emit_insn (gen_addsi3 (address
, address
, pic_offset_table_rtx
));
1975 pic_ref
= gen_const_mem (Pmode
, address
);
1976 emit_move_insn (reg
, pic_ref
);
1979 else if (GET_CODE (orig
) == CONST
)
1983 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1984 && XEXP (XEXP (orig
, 0), 1) == pic_offset_table_rtx
)
1989 gcc_assert (!reload_in_progress
&& !reload_completed
);
1990 reg
= gen_reg_rtx (Pmode
);
1993 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
1995 base
= m32r_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), reg
);
1997 offset
= m32r_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), NULL_RTX
);
1999 offset
= m32r_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), reg
);
2004 if (CONST_INT_P (offset
))
2006 if (INT16_P (INTVAL (offset
)))
2007 return plus_constant (Pmode
, base
, INTVAL (offset
));
2010 gcc_assert (! reload_in_progress
&& ! reload_completed
);
2011 offset
= force_reg (Pmode
, offset
);
2015 return gen_rtx_PLUS (Pmode
, base
, offset
);
2022 m32r_legitimize_address (rtx x
, rtx orig_x ATTRIBUTE_UNUSED
,
2023 machine_mode mode ATTRIBUTE_UNUSED
)
2026 return m32r_legitimize_pic_address (x
, NULL_RTX
);
2031 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2034 m32r_mode_dependent_address_p (const_rtx addr
, addr_space_t as ATTRIBUTE_UNUSED
)
2036 if (GET_CODE (addr
) == LO_SUM
)
2042 /* Nested function support. */
2044 /* Emit RTL insns to initialize the variable parts of a trampoline.
2045 FNADDR is an RTX for the address of the function's pure code.
2046 CXT is an RTX for the static chain value for the function. */
2049 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED
,
2050 rtx fnaddr ATTRIBUTE_UNUSED
,
2051 rtx cxt ATTRIBUTE_UNUSED
)
2056 m32r_file_start (void)
2058 default_file_start ();
2060 if (flag_verbose_asm
)
2061 fprintf (asm_out_file
,
2062 "%s M32R/D special options: -G %d\n",
2063 ASM_COMMENT_START
, g_switch_value
);
2065 if (TARGET_LITTLE_ENDIAN
)
2066 fprintf (asm_out_file
, "\t.little\n");
2069 /* Print operand X (an rtx) in assembler syntax to file FILE.
2070 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2071 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2074 m32r_print_operand (FILE * file
, rtx x
, int code
)
2080 /* The 's' and 'p' codes are used by output_block_move() to
2081 indicate post-increment 's'tores and 'p're-increment loads. */
2084 fprintf (file
, "@+%s", reg_names
[REGNO (x
)]);
2086 output_operand_lossage ("invalid operand to %%s code");
2091 fprintf (file
, "@%s+", reg_names
[REGNO (x
)]);
2093 output_operand_lossage ("invalid operand to %%p code");
2097 /* Write second word of DImode or DFmode reference,
2098 register or memory. */
2100 fputs (reg_names
[REGNO (x
)+1], file
);
2103 fprintf (file
, "@(");
2104 /* Handle possible auto-increment. Since it is pre-increment and
2105 we have already done it, we can just use an offset of four. */
2106 /* ??? This is taken from rs6000.c I think. I don't think it is
2107 currently necessary, but keep it around. */
2108 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
2109 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
2110 output_address (plus_constant (Pmode
, XEXP (XEXP (x
, 0), 0), 4));
2112 output_address (plus_constant (Pmode
, XEXP (x
, 0), 4));
2116 output_operand_lossage ("invalid operand to %%R code");
2119 case 'H' : /* High word. */
2120 case 'L' : /* Low word. */
2123 /* L = least significant word, H = most significant word. */
2124 if ((WORDS_BIG_ENDIAN
!= 0) ^ (code
== 'L'))
2125 fputs (reg_names
[REGNO (x
)], file
);
2127 fputs (reg_names
[REGNO (x
)+1], file
);
2129 else if (CONST_INT_P (x
)
2130 || GET_CODE (x
) == CONST_DOUBLE
)
2134 split_double (x
, &first
, &second
);
2135 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2136 code
== 'L' ? INTVAL (first
) : INTVAL (second
));
2139 output_operand_lossage ("invalid operand to %%H/%%L code");
2146 if (GET_CODE (x
) != CONST_DOUBLE
2147 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
)
2148 fatal_insn ("bad insn for 'A'", x
);
2150 real_to_decimal (str
, CONST_DOUBLE_REAL_VALUE (x
), sizeof (str
), 0, 1);
2151 fprintf (file
, "%s", str
);
2155 case 'B' : /* Bottom half. */
2156 case 'T' : /* Top half. */
2157 /* Output the argument to a `seth' insn (sets the Top half-word).
2158 For constants output arguments to a seth/or3 pair to set Top and
2159 Bottom halves. For symbols output arguments to a seth/add3 pair to
2160 set Top and Bottom halves. The difference exists because for
2161 constants seth/or3 is more readable but for symbols we need to use
2162 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2163 switch (GET_CODE (x
))
2170 split_double (x
, &first
, &second
);
2171 x
= WORDS_BIG_ENDIAN
? second
: first
;
2172 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2174 ? INTVAL (x
) & 0xffff
2175 : (INTVAL (x
) >> 16) & 0xffff));
2181 && small_data_operand (x
, VOIDmode
))
2183 fputs ("sda(", file
);
2184 output_addr_const (file
, x
);
2190 fputs (code
== 'T' ? "shigh(" : "low(", file
);
2191 output_addr_const (file
, x
);
2195 output_operand_lossage ("invalid operand to %%T/%%B code");
2202 /* Output a load/store with update indicator if appropriate. */
2205 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
2206 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
2210 output_operand_lossage ("invalid operand to %%U code");
2214 /* Print a constant value negated. */
2215 if (CONST_INT_P (x
))
2216 output_addr_const (file
, GEN_INT (- INTVAL (x
)));
2218 output_operand_lossage ("invalid operand to %%N code");
2222 /* Print a const_int in hex. Used in comments. */
2223 if (CONST_INT_P (x
))
2224 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (x
));
2228 fputs (IMMEDIATE_PREFIX
, file
);
2232 /* Do nothing special. */
2237 output_operand_lossage ("invalid operand output code");
2240 switch (GET_CODE (x
))
2243 fputs (reg_names
[REGNO (x
)], file
);
2248 if (GET_CODE (addr
) == PRE_INC
)
2250 if (!REG_P (XEXP (addr
, 0)))
2251 fatal_insn ("pre-increment address is not a register", x
);
2253 fprintf (file
, "@+%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2255 else if (GET_CODE (addr
) == PRE_DEC
)
2257 if (!REG_P (XEXP (addr
, 0)))
2258 fatal_insn ("pre-decrement address is not a register", x
);
2260 fprintf (file
, "@-%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2262 else if (GET_CODE (addr
) == POST_INC
)
2264 if (!REG_P (XEXP (addr
, 0)))
2265 fatal_insn ("post-increment address is not a register", x
);
2267 fprintf (file
, "@%s+", reg_names
[REGNO (XEXP (addr
, 0))]);
2272 output_address (XEXP (x
, 0));
2278 /* We handle SFmode constants here as output_addr_const doesn't. */
2279 if (GET_MODE (x
) == SFmode
)
2283 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), l
);
2284 fprintf (file
, "0x%08lx", l
);
2288 /* Fall through. Let output_addr_const deal with it. */
2291 output_addr_const (file
, x
);
2296 /* Print a memory address as an operand to reference that memory location. */
2299 m32r_print_operand_address (FILE * file
, rtx addr
)
2305 switch (GET_CODE (addr
))
2308 fputs (reg_names
[REGNO (addr
)], file
);
2312 if (CONST_INT_P (XEXP (addr
, 0)))
2313 offset
= INTVAL (XEXP (addr
, 0)), base
= XEXP (addr
, 1);
2314 else if (CONST_INT_P (XEXP (addr
, 1)))
2315 offset
= INTVAL (XEXP (addr
, 1)), base
= XEXP (addr
, 0);
2317 base
= XEXP (addr
, 0), index
= XEXP (addr
, 1);
2320 /* Print the offset first (if present) to conform to the manual. */
2324 fprintf (file
, "%d,", offset
);
2325 fputs (reg_names
[REGNO (base
)], file
);
2327 /* The chip doesn't support this, but left in for generality. */
2328 else if (REG_P (index
))
2329 fprintf (file
, "%s,%s",
2330 reg_names
[REGNO (base
)], reg_names
[REGNO (index
)]);
2331 /* Not sure this can happen, but leave in for now. */
2332 else if (GET_CODE (index
) == SYMBOL_REF
)
2334 output_addr_const (file
, index
);
2336 fputs (reg_names
[REGNO (base
)], file
);
2339 fatal_insn ("bad address", addr
);
2341 else if (GET_CODE (base
) == LO_SUM
)
2343 gcc_assert (!index
&& REG_P (XEXP (base
, 0)));
2344 if (small_data_operand (XEXP (base
, 1), VOIDmode
))
2345 fputs ("sda(", file
);
2347 fputs ("low(", file
);
2348 output_addr_const (file
, plus_constant (Pmode
, XEXP (base
, 1),
2351 fputs (reg_names
[REGNO (XEXP (base
, 0))], file
);
2354 fatal_insn ("bad address", addr
);
2358 if (!REG_P (XEXP (addr
, 0)))
2359 fatal_insn ("lo_sum not of register", addr
);
2360 if (small_data_operand (XEXP (addr
, 1), VOIDmode
))
2361 fputs ("sda(", file
);
2363 fputs ("low(", file
);
2364 output_addr_const (file
, XEXP (addr
, 1));
2366 fputs (reg_names
[REGNO (XEXP (addr
, 0))], file
);
2369 case PRE_INC
: /* Assume SImode. */
2370 fprintf (file
, "+%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2373 case PRE_DEC
: /* Assume SImode. */
2374 fprintf (file
, "-%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2377 case POST_INC
: /* Assume SImode. */
2378 fprintf (file
, "%s+", reg_names
[REGNO (XEXP (addr
, 0))]);
2382 output_addr_const (file
, addr
);
2388 m32r_print_operand_punct_valid_p (unsigned char code
)
2390 return m32r_punct_chars
[code
];
2393 /* Return true if the operands are the constants 0 and 1. */
2396 zero_and_one (rtx operand1
, rtx operand2
)
2399 CONST_INT_P (operand1
)
2400 && CONST_INT_P (operand2
)
2401 && ( ((INTVAL (operand1
) == 0) && (INTVAL (operand2
) == 1))
2402 ||((INTVAL (operand1
) == 1) && (INTVAL (operand2
) == 0)));
2405 /* Generate the correct assembler code to handle the conditional loading of a
2406 value into a register. It is known that the operands satisfy the
2407 conditional_move_operand() function above. The destination is operand[0].
2408 The condition is operand [1]. The 'true' value is operand [2] and the
2409 'false' value is operand [3]. */
2412 emit_cond_move (rtx
* operands
, rtx insn ATTRIBUTE_UNUSED
)
2414 static char buffer
[100];
2415 const char * dest
= reg_names
[REGNO (operands
[0])];
2419 /* Destination must be a register. */
2420 gcc_assert (REG_P (operands
[0]));
2421 gcc_assert (conditional_move_operand (operands
[2], SImode
));
2422 gcc_assert (conditional_move_operand (operands
[3], SImode
));
2424 /* Check to see if the test is reversed. */
2425 if (GET_CODE (operands
[1]) == NE
)
2427 rtx tmp
= operands
[2];
2428 operands
[2] = operands
[3];
2432 sprintf (buffer
, "mvfc %s, cbr", dest
);
2434 /* If the true value was '0' then we need to invert the results of the move. */
2435 if (INTVAL (operands
[2]) == 0)
2436 sprintf (buffer
+ strlen (buffer
), "\n\txor3 %s, %s, #1",
2442 /* Returns true if the registers contained in the two
2443 rtl expressions are different. */
2446 m32r_not_same_reg (rtx a
, rtx b
)
2451 while (GET_CODE (a
) == SUBREG
)
2457 while (GET_CODE (b
) == SUBREG
)
2463 return reg_a
!= reg_b
;
2468 m32r_function_symbol (const char *name
)
2470 int extra_flags
= 0;
2471 enum m32r_model model
;
2472 rtx sym
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2474 if (TARGET_MODEL_SMALL
)
2475 model
= M32R_MODEL_SMALL
;
2476 else if (TARGET_MODEL_MEDIUM
)
2477 model
= M32R_MODEL_MEDIUM
;
2478 else if (TARGET_MODEL_LARGE
)
2479 model
= M32R_MODEL_LARGE
;
2481 gcc_unreachable (); /* Shouldn't happen. */
2482 extra_flags
|= model
<< SYMBOL_FLAG_MODEL_SHIFT
;
2485 SYMBOL_REF_FLAGS (sym
) |= extra_flags
;
2490 /* Use a library function to move some bytes. */
2493 block_move_call (rtx dest_reg
, rtx src_reg
, rtx bytes_rtx
)
2495 /* We want to pass the size as Pmode, which will normally be SImode
2496 but will be DImode if we are using 64-bit longs and pointers. */
2497 if (GET_MODE (bytes_rtx
) != VOIDmode
2498 && GET_MODE (bytes_rtx
) != Pmode
)
2499 bytes_rtx
= convert_to_mode (Pmode
, bytes_rtx
, 1);
2501 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL
,
2502 VOIDmode
, 3, dest_reg
, Pmode
, src_reg
, Pmode
,
2503 convert_to_mode (TYPE_MODE (sizetype
), bytes_rtx
,
2504 TYPE_UNSIGNED (sizetype
)),
2505 TYPE_MODE (sizetype
));
2508 /* Expand string/block move operations.
2510 operands[0] is the pointer to the destination.
2511 operands[1] is the pointer to the source.
2512 operands[2] is the number of bytes to move.
2513 operands[3] is the alignment.
2515 Returns 1 upon success, 0 otherwise. */
2518 m32r_expand_block_move (rtx operands
[])
2520 rtx orig_dst
= operands
[0];
2521 rtx orig_src
= operands
[1];
2522 rtx bytes_rtx
= operands
[2];
2523 rtx align_rtx
= operands
[3];
2524 int constp
= CONST_INT_P (bytes_rtx
);
2525 HOST_WIDE_INT bytes
= constp
? INTVAL (bytes_rtx
) : 0;
2526 int align
= INTVAL (align_rtx
);
2531 if (constp
&& bytes
<= 0)
2534 /* Move the address into scratch registers. */
2535 dst_reg
= copy_addr_to_reg (XEXP (orig_dst
, 0));
2536 src_reg
= copy_addr_to_reg (XEXP (orig_src
, 0));
2538 if (align
> UNITS_PER_WORD
)
2539 align
= UNITS_PER_WORD
;
2541 /* If we prefer size over speed, always use a function call.
2542 If we do not know the size, use a function call.
2543 If the blocks are not word aligned, use a function call. */
2544 if (optimize_size
|| ! constp
|| align
!= UNITS_PER_WORD
)
2546 block_move_call (dst_reg
, src_reg
, bytes_rtx
);
2550 leftover
= bytes
% MAX_MOVE_BYTES
;
2553 /* If necessary, generate a loop to handle the bulk of the copy. */
2556 rtx_code_label
*label
= NULL
;
2557 rtx final_src
= NULL_RTX
;
2558 rtx at_a_time
= GEN_INT (MAX_MOVE_BYTES
);
2559 rtx rounded_total
= GEN_INT (bytes
);
2560 rtx new_dst_reg
= gen_reg_rtx (SImode
);
2561 rtx new_src_reg
= gen_reg_rtx (SImode
);
2563 /* If we are going to have to perform this loop more than
2564 once, then generate a label and compute the address the
2565 source register will contain upon completion of the final
2567 if (bytes
> MAX_MOVE_BYTES
)
2569 final_src
= gen_reg_rtx (Pmode
);
2572 emit_insn (gen_addsi3 (final_src
, src_reg
, rounded_total
));
2575 emit_insn (gen_movsi (final_src
, rounded_total
));
2576 emit_insn (gen_addsi3 (final_src
, final_src
, src_reg
));
2579 label
= gen_label_rtx ();
2583 /* It is known that output_block_move() will update src_reg to point
2584 to the word after the end of the source block, and dst_reg to point
2585 to the last word of the destination block, provided that the block
2586 is MAX_MOVE_BYTES long. */
2587 emit_insn (gen_movmemsi_internal (dst_reg
, src_reg
, at_a_time
,
2588 new_dst_reg
, new_src_reg
));
2589 emit_move_insn (dst_reg
, new_dst_reg
);
2590 emit_move_insn (src_reg
, new_src_reg
);
2591 emit_insn (gen_addsi3 (dst_reg
, dst_reg
, GEN_INT (4)));
2593 if (bytes
> MAX_MOVE_BYTES
)
2595 rtx test
= gen_rtx_NE (VOIDmode
, src_reg
, final_src
);
2596 emit_jump_insn (gen_cbranchsi4 (test
, src_reg
, final_src
, label
));
2601 emit_insn (gen_movmemsi_internal (dst_reg
, src_reg
, GEN_INT (leftover
),
2602 gen_reg_rtx (SImode
),
2603 gen_reg_rtx (SImode
)));
2608 /* Emit load/stores for a small constant word aligned block_move.
2610 operands[0] is the memory address of the destination.
2611 operands[1] is the memory address of the source.
2612 operands[2] is the number of bytes to move.
2613 operands[3] is a temp register.
2614 operands[4] is a temp register. */
2617 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED
, rtx operands
[])
2619 HOST_WIDE_INT bytes
= INTVAL (operands
[2]);
2623 gcc_assert (bytes
>= 1 && bytes
<= MAX_MOVE_BYTES
);
2625 /* We do not have a post-increment store available, so the first set of
2626 stores are done without any increment, then the remaining ones can use
2627 the pre-increment addressing mode.
2629 Note: expand_block_move() also relies upon this behavior when building
2630 loops to copy large blocks. */
2639 output_asm_insn ("ld\t%5, %p1", operands
);
2640 output_asm_insn ("ld\t%6, %p1", operands
);
2641 output_asm_insn ("st\t%5, @%0", operands
);
2642 output_asm_insn ("st\t%6, %s0", operands
);
2646 output_asm_insn ("ld\t%5, %p1", operands
);
2647 output_asm_insn ("ld\t%6, %p1", operands
);
2648 output_asm_insn ("st\t%5, %s0", operands
);
2649 output_asm_insn ("st\t%6, %s0", operands
);
2654 else if (bytes
>= 4)
2659 output_asm_insn ("ld\t%5, %p1", operands
);
2662 output_asm_insn ("ld\t%6, %p1", operands
);
2665 output_asm_insn ("st\t%5, @%0", operands
);
2667 output_asm_insn ("st\t%5, %s0", operands
);
2673 /* Get the entire next word, even though we do not want all of it.
2674 The saves us from doing several smaller loads, and we assume that
2675 we cannot cause a page fault when at least part of the word is in
2676 valid memory [since we don't get called if things aren't properly
2678 int dst_offset
= first_time
? 0 : 4;
2679 /* The amount of increment we have to make to the
2680 destination pointer. */
2681 int dst_inc_amount
= dst_offset
+ bytes
- 4;
2682 /* The same for the source pointer. */
2683 int src_inc_amount
= bytes
;
2687 /* If got_extra is true then we have already loaded
2688 the next word as part of loading and storing the previous word. */
2690 output_asm_insn ("ld\t%6, @%1", operands
);
2696 output_asm_insn ("sra3\t%5, %6, #16", operands
);
2697 my_operands
[0] = operands
[5];
2698 my_operands
[1] = GEN_INT (dst_offset
);
2699 my_operands
[2] = operands
[0];
2700 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands
);
2702 /* If there is a byte left to store then increment the
2703 destination address and shift the contents of the source
2704 register down by 8 bits. We could not do the address
2705 increment in the store half word instruction, because it does
2706 not have an auto increment mode. */
2707 if (bytes
> 0) /* assert (bytes == 1) */
2718 my_operands
[0] = operands
[6];
2719 my_operands
[1] = GEN_INT (last_shift
);
2720 output_asm_insn ("srai\t%0, #%1", my_operands
);
2721 my_operands
[0] = operands
[6];
2722 my_operands
[1] = GEN_INT (dst_offset
);
2723 my_operands
[2] = operands
[0];
2724 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands
);
2727 /* Update the destination pointer if needed. We have to do
2728 this so that the patterns matches what we output in this
2731 && !find_reg_note (insn
, REG_UNUSED
, operands
[0]))
2733 my_operands
[0] = operands
[0];
2734 my_operands
[1] = GEN_INT (dst_inc_amount
);
2735 output_asm_insn ("addi\t%0, #%1", my_operands
);
2738 /* Update the source pointer if needed. We have to do this
2739 so that the patterns matches what we output in this
2742 && !find_reg_note (insn
, REG_UNUSED
, operands
[1]))
2744 my_operands
[0] = operands
[1];
2745 my_operands
[1] = GEN_INT (src_inc_amount
);
2746 output_asm_insn ("addi\t%0, #%1", my_operands
);
2756 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2759 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
2760 unsigned int new_reg
)
2762 /* Interrupt routines can't clobber any register that isn't already used. */
2763 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl
))
2764 && !df_regs_ever_live_p (new_reg
))
2771 m32r_return_addr (int count
)
2776 return get_hard_reg_initial_val (Pmode
, RETURN_ADDR_REGNUM
);
2780 m32r_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
2782 emit_move_insn (adjust_address (m_tramp
, SImode
, 0),
2783 gen_int_mode (TARGET_LITTLE_ENDIAN
?
2784 0x017e8e17 : 0x178e7e01, SImode
));
2785 emit_move_insn (adjust_address (m_tramp
, SImode
, 4),
2786 gen_int_mode (TARGET_LITTLE_ENDIAN
?
2787 0x0c00ae86 : 0x86ae000c, SImode
));
2788 emit_move_insn (adjust_address (m_tramp
, SImode
, 8),
2789 gen_int_mode (TARGET_LITTLE_ENDIAN
?
2790 0xe627871e : 0x1e8727e6, SImode
));
2791 emit_move_insn (adjust_address (m_tramp
, SImode
, 12),
2792 gen_int_mode (TARGET_LITTLE_ENDIAN
?
2793 0xc616c626 : 0x26c61fc6, SImode
));
2794 emit_move_insn (adjust_address (m_tramp
, SImode
, 16),
2796 emit_move_insn (adjust_address (m_tramp
, SImode
, 20),
2797 XEXP (DECL_RTL (fndecl
), 0));
2799 if (m32r_cache_flush_trap
>= 0)
2800 emit_insn (gen_flush_icache
2801 (validize_mem (adjust_address (m_tramp
, SImode
, 0)),
2802 gen_int_mode (m32r_cache_flush_trap
, SImode
)));
2803 else if (m32r_cache_flush_func
&& m32r_cache_flush_func
[0])
2804 emit_library_call (m32r_function_symbol (m32r_cache_flush_func
),
2805 LCT_NORMAL
, VOIDmode
, 3, XEXP (m_tramp
, 0), Pmode
,
2806 gen_int_mode (TRAMPOLINE_SIZE
, SImode
), SImode
,
2807 GEN_INT (3), SImode
);
2810 /* True if X is a reg that can be used as a base reg. */
2813 m32r_rtx_ok_for_base_p (const_rtx x
, bool strict
)
2820 if (GPR_P (REGNO (x
)))
2825 if (GPR_P (REGNO (x
))
2826 || REGNO (x
) == ARG_POINTER_REGNUM
2827 || ! HARD_REGISTER_P (x
))
2835 m32r_rtx_ok_for_offset_p (const_rtx x
)
2837 return (CONST_INT_P (x
) && INT16_P (INTVAL (x
)));
2841 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED
,
2842 const_rtx x
, bool strict
)
2844 if (GET_CODE (x
) == PLUS
2845 && m32r_rtx_ok_for_base_p (XEXP (x
, 0), strict
)
2846 && m32r_rtx_ok_for_offset_p (XEXP (x
, 1)))
2852 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2853 since more than one instruction will be required. */
2856 m32r_legitimate_lo_sum_addres_p (machine_mode mode
, const_rtx x
,
2859 if (GET_CODE (x
) == LO_SUM
2860 && (mode
!= BLKmode
&& GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
)
2861 && m32r_rtx_ok_for_base_p (XEXP (x
, 0), strict
)
2862 && CONSTANT_P (XEXP (x
, 1)))
2868 /* Is this a load and increment operation. */
2871 m32r_load_postinc_p (machine_mode mode
, const_rtx x
, bool strict
)
2873 if ((mode
== SImode
|| mode
== SFmode
)
2874 && GET_CODE (x
) == POST_INC
2875 && REG_P (XEXP (x
, 0))
2876 && m32r_rtx_ok_for_base_p (XEXP (x
, 0), strict
))
2882 /* Is this an increment/decrement and store operation. */
2885 m32r_store_preinc_predec_p (machine_mode mode
, const_rtx x
, bool strict
)
2887 if ((mode
== SImode
|| mode
== SFmode
)
2888 && (GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2889 && REG_P (XEXP (x
, 0)) \
2890 && m32r_rtx_ok_for_base_p (XEXP (x
, 0), strict
))
2896 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2899 m32r_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2901 if (m32r_rtx_ok_for_base_p (x
, strict
)
2902 || m32r_legitimate_offset_addres_p (mode
, x
, strict
)
2903 || m32r_legitimate_lo_sum_addres_p (mode
, x
, strict
)
2904 || m32r_load_postinc_p (mode
, x
, strict
)
2905 || m32r_store_preinc_predec_p (mode
, x
, strict
))
2912 m32r_conditional_register_usage (void)
2916 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2917 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2921 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2923 We don't allow (plus symbol large-constant) as the relocations can't
2924 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2925 We allow all CONST_DOUBLE's as the md file patterns will force the
2926 constant to memory if they can't handle them. */
2929 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2931 return !(GET_CODE (x
) == CONST
2932 && GET_CODE (XEXP (x
, 0)) == PLUS
2933 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
2934 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
2935 && CONST_INT_P (XEXP (XEXP (x
, 0), 1))
2936 && UINTVAL (XEXP (XEXP (x
, 0), 1)) > 32767);