]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
target.def (legitimate_constant_p): New hook.
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "ggc.h"
39 #include "integrate.h"
40 #include "df.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44 #include "tm-constrs.h"
45 #include "opts.h"
46
47 /* Array of valid operand punctuation characters. */
48 static char m32r_punct_chars[256];
49
50 /* Machine-specific symbol_ref flags. */
51 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
52 #define SYMBOL_REF_MODEL(X) \
53 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
54
55 /* For string literals, etc. */
56 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
57
58 /* Forward declaration. */
59 static bool m32r_handle_option (struct gcc_options *, struct gcc_options *,
60 const struct cl_decoded_option *, location_t);
61 static void m32r_option_override (void);
62 static void init_reg_tables (void);
63 static void block_move_call (rtx, rtx, rtx);
64 static int m32r_is_insn (rtx);
65 static bool m32r_legitimate_address_p (enum machine_mode, rtx, bool);
66 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
67 static bool m32r_mode_dependent_address_p (const_rtx);
68 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
69 static void m32r_print_operand (FILE *, rtx, int);
70 static void m32r_print_operand_address (FILE *, rtx);
71 static bool m32r_print_operand_punct_valid_p (unsigned char code);
72 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
73 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
74
75 static void m32r_file_start (void);
76
77 static int m32r_adjust_priority (rtx, int);
78 static int m32r_issue_rate (void);
79
80 static void m32r_encode_section_info (tree, rtx, int);
81 static bool m32r_in_small_data_p (const_tree);
82 static bool m32r_return_in_memory (const_tree, const_tree);
83 static rtx m32r_function_value (const_tree, const_tree, bool);
84 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
85 static bool m32r_function_value_regno_p (const unsigned int);
86 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
87 tree, int *, int);
88 static void init_idents (void);
89 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
90 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
91 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
92 const_tree, bool);
93 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
94 tree, bool);
95 static rtx m32r_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
96 const_tree, bool);
97 static void m32r_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
98 const_tree, bool);
99 static bool m32r_can_eliminate (const int, const int);
100 static void m32r_conditional_register_usage (void);
101 static void m32r_trampoline_init (rtx, tree, rtx);
102 static bool m32r_legitimate_constant_p (enum machine_mode, rtx);
103 \f
104 /* M32R specific attributes. */
105
106 static const struct attribute_spec m32r_attribute_table[] =
107 {
108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
109 affects_type_identity } */
110 { "interrupt", 0, 0, true, false, false, NULL, false },
111 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
112 false },
113 { NULL, 0, 0, false, false, false, NULL, false }
114 };
115
116 static const struct default_options m32r_option_optimization_table[] =
117 {
118 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
119 { OPT_LEVELS_1_PLUS, OPT_fregmove, NULL, 1 },
120 { OPT_LEVELS_NONE, 0, NULL, 0 }
121 };
122 \f
123 /* Initialize the GCC target structure. */
124 #undef TARGET_ATTRIBUTE_TABLE
125 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
126
127 #undef TARGET_LEGITIMATE_ADDRESS_P
128 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
129 #undef TARGET_LEGITIMIZE_ADDRESS
130 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
131 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
132 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
133
134 #undef TARGET_ASM_ALIGNED_HI_OP
135 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
136 #undef TARGET_ASM_ALIGNED_SI_OP
137 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
138
139 #undef TARGET_PRINT_OPERAND
140 #define TARGET_PRINT_OPERAND m32r_print_operand
141 #undef TARGET_PRINT_OPERAND_ADDRESS
142 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
143 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
144 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
145
146 #undef TARGET_ASM_FUNCTION_PROLOGUE
147 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
148 #undef TARGET_ASM_FUNCTION_EPILOGUE
149 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
150
151 #undef TARGET_ASM_FILE_START
152 #define TARGET_ASM_FILE_START m32r_file_start
153
154 #undef TARGET_SCHED_ADJUST_PRIORITY
155 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
156 #undef TARGET_SCHED_ISSUE_RATE
157 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
158
159 #undef TARGET_DEFAULT_TARGET_FLAGS
160 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT
161 #undef TARGET_HANDLE_OPTION
162 #define TARGET_HANDLE_OPTION m32r_handle_option
163 #undef TARGET_OPTION_OVERRIDE
164 #define TARGET_OPTION_OVERRIDE m32r_option_override
165 #undef TARGET_OPTION_OPTIMIZATION_TABLE
166 #define TARGET_OPTION_OPTIMIZATION_TABLE m32r_option_optimization_table
167
168 #undef TARGET_ENCODE_SECTION_INFO
169 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
170 #undef TARGET_IN_SMALL_DATA_P
171 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
172
173
174 #undef TARGET_MEMORY_MOVE_COST
175 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
176 #undef TARGET_RTX_COSTS
177 #define TARGET_RTX_COSTS m32r_rtx_costs
178 #undef TARGET_ADDRESS_COST
179 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
180
181 #undef TARGET_PROMOTE_PROTOTYPES
182 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
183 #undef TARGET_RETURN_IN_MEMORY
184 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
185
186 #undef TARGET_FUNCTION_VALUE
187 #define TARGET_FUNCTION_VALUE m32r_function_value
188 #undef TARGET_LIBCALL_VALUE
189 #define TARGET_LIBCALL_VALUE m32r_libcall_value
190 #undef TARGET_FUNCTION_VALUE_REGNO_P
191 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
192
193 #undef TARGET_SETUP_INCOMING_VARARGS
194 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
195 #undef TARGET_MUST_PASS_IN_STACK
196 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
197 #undef TARGET_PASS_BY_REFERENCE
198 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
199 #undef TARGET_ARG_PARTIAL_BYTES
200 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
201 #undef TARGET_FUNCTION_ARG
202 #define TARGET_FUNCTION_ARG m32r_function_arg
203 #undef TARGET_FUNCTION_ARG_ADVANCE
204 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
205
206 #undef TARGET_CAN_ELIMINATE
207 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
208
209 #undef TARGET_CONDITIONAL_REGISTER_USAGE
210 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
211
212 #undef TARGET_TRAMPOLINE_INIT
213 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
214
215 #undef TARGET_LEGITIMATE_CONSTANT_P
216 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
217
218 #undef TARGET_EXCEPT_UNWIND_INFO
219 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
220
221 struct gcc_target targetm = TARGET_INITIALIZER;
222 \f
223 /* Implement TARGET_HANDLE_OPTION. */
224
225 static bool
226 m32r_handle_option (struct gcc_options *opts,
227 struct gcc_options *opts_set ATTRIBUTE_UNUSED,
228 const struct cl_decoded_option *decoded,
229 location_t loc ATTRIBUTE_UNUSED)
230 {
231 size_t code = decoded->opt_index;
232 int value = decoded->value;
233
234 switch (code)
235 {
236 case OPT_m32r:
237 opts->x_target_flags &= ~(MASK_M32R2 | MASK_M32RX);
238 return true;
239
240 case OPT_mno_flush_func:
241 opts->x_m32r_cache_flush_func = NULL;
242 return true;
243
244 case OPT_mflush_trap_:
245 return value <= 15;
246
247 default:
248 return true;
249 }
250 }
251
252 /* Called by m32r_option_override to initialize various things. */
253
254 void
255 m32r_init (void)
256 {
257 init_reg_tables ();
258
259 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
260 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
261 m32r_punct_chars['#'] = 1;
262 m32r_punct_chars['@'] = 1; /* ??? no longer used */
263
264 /* Provide default value if not specified. */
265 if (!global_options_set.x_g_switch_value)
266 g_switch_value = SDATA_DEFAULT_SIZE;
267 }
268
269 static void
270 m32r_option_override (void)
271 {
272 /* These need to be done at start up.
273 It's convenient to do them here. */
274 m32r_init ();
275 SUBTARGET_OVERRIDE_OPTIONS;
276 }
277
278 /* Vectors to keep interesting information about registers where it can easily
279 be got. We use to use the actual mode value as the bit number, but there
280 is (or may be) more than 32 modes now. Instead we use two tables: one
281 indexed by hard register number, and one indexed by mode. */
282
283 /* The purpose of m32r_mode_class is to shrink the range of modes so that
284 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
285 mapped into one m32r_mode_class mode. */
286
287 enum m32r_mode_class
288 {
289 C_MODE,
290 S_MODE, D_MODE, T_MODE, O_MODE,
291 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
292 };
293
294 /* Modes for condition codes. */
295 #define C_MODES (1 << (int) C_MODE)
296
297 /* Modes for single-word and smaller quantities. */
298 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
299
300 /* Modes for double-word and smaller quantities. */
301 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
302
303 /* Modes for quad-word and smaller quantities. */
304 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
305
306 /* Modes for accumulators. */
307 #define A_MODES (1 << (int) A_MODE)
308
309 /* Value is 1 if register/mode pair is acceptable on arc. */
310
311 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
312 {
313 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
314 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
315 S_MODES, C_MODES, A_MODES, A_MODES
316 };
317
318 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
319
320 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
321
322 static void
323 init_reg_tables (void)
324 {
325 int i;
326
327 for (i = 0; i < NUM_MACHINE_MODES; i++)
328 {
329 switch (GET_MODE_CLASS (i))
330 {
331 case MODE_INT:
332 case MODE_PARTIAL_INT:
333 case MODE_COMPLEX_INT:
334 if (GET_MODE_SIZE (i) <= 4)
335 m32r_mode_class[i] = 1 << (int) S_MODE;
336 else if (GET_MODE_SIZE (i) == 8)
337 m32r_mode_class[i] = 1 << (int) D_MODE;
338 else if (GET_MODE_SIZE (i) == 16)
339 m32r_mode_class[i] = 1 << (int) T_MODE;
340 else if (GET_MODE_SIZE (i) == 32)
341 m32r_mode_class[i] = 1 << (int) O_MODE;
342 else
343 m32r_mode_class[i] = 0;
344 break;
345 case MODE_FLOAT:
346 case MODE_COMPLEX_FLOAT:
347 if (GET_MODE_SIZE (i) <= 4)
348 m32r_mode_class[i] = 1 << (int) SF_MODE;
349 else if (GET_MODE_SIZE (i) == 8)
350 m32r_mode_class[i] = 1 << (int) DF_MODE;
351 else if (GET_MODE_SIZE (i) == 16)
352 m32r_mode_class[i] = 1 << (int) TF_MODE;
353 else if (GET_MODE_SIZE (i) == 32)
354 m32r_mode_class[i] = 1 << (int) OF_MODE;
355 else
356 m32r_mode_class[i] = 0;
357 break;
358 case MODE_CC:
359 m32r_mode_class[i] = 1 << (int) C_MODE;
360 break;
361 default:
362 m32r_mode_class[i] = 0;
363 break;
364 }
365 }
366
367 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
368 {
369 if (GPR_P (i))
370 m32r_regno_reg_class[i] = GENERAL_REGS;
371 else if (i == ARG_POINTER_REGNUM)
372 m32r_regno_reg_class[i] = GENERAL_REGS;
373 else
374 m32r_regno_reg_class[i] = NO_REGS;
375 }
376 }
377 \f
378 /* M32R specific attribute support.
379
380 interrupt - for interrupt functions
381
382 model - select code model used to access object
383
384 small: addresses use 24 bits, use bl to make calls
385 medium: addresses use 32 bits, use bl to make calls
386 large: addresses use 32 bits, use seth/add3/jl to make calls
387
388 Grep for MODEL in m32r.h for more info. */
389
390 static tree small_ident1;
391 static tree small_ident2;
392 static tree medium_ident1;
393 static tree medium_ident2;
394 static tree large_ident1;
395 static tree large_ident2;
396
397 static void
398 init_idents (void)
399 {
400 if (small_ident1 == 0)
401 {
402 small_ident1 = get_identifier ("small");
403 small_ident2 = get_identifier ("__small__");
404 medium_ident1 = get_identifier ("medium");
405 medium_ident2 = get_identifier ("__medium__");
406 large_ident1 = get_identifier ("large");
407 large_ident2 = get_identifier ("__large__");
408 }
409 }
410
411 /* Handle an "model" attribute; arguments as in
412 struct attribute_spec.handler. */
413 static tree
414 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
415 tree args, int flags ATTRIBUTE_UNUSED,
416 bool *no_add_attrs)
417 {
418 tree arg;
419
420 init_idents ();
421 arg = TREE_VALUE (args);
422
423 if (arg != small_ident1
424 && arg != small_ident2
425 && arg != medium_ident1
426 && arg != medium_ident2
427 && arg != large_ident1
428 && arg != large_ident2)
429 {
430 warning (OPT_Wattributes, "invalid argument of %qs attribute",
431 IDENTIFIER_POINTER (name));
432 *no_add_attrs = true;
433 }
434
435 return NULL_TREE;
436 }
437 \f
438 /* Encode section information of DECL, which is either a VAR_DECL,
439 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
440
441 For the M32R we want to record:
442
443 - whether the object lives in .sdata/.sbss.
444 - what code model should be used to access the object
445 */
446
447 static void
448 m32r_encode_section_info (tree decl, rtx rtl, int first)
449 {
450 int extra_flags = 0;
451 tree model_attr;
452 enum m32r_model model;
453
454 default_encode_section_info (decl, rtl, first);
455
456 if (!DECL_P (decl))
457 return;
458
459 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
460 if (model_attr)
461 {
462 tree id;
463
464 init_idents ();
465
466 id = TREE_VALUE (TREE_VALUE (model_attr));
467
468 if (id == small_ident1 || id == small_ident2)
469 model = M32R_MODEL_SMALL;
470 else if (id == medium_ident1 || id == medium_ident2)
471 model = M32R_MODEL_MEDIUM;
472 else if (id == large_ident1 || id == large_ident2)
473 model = M32R_MODEL_LARGE;
474 else
475 gcc_unreachable (); /* shouldn't happen */
476 }
477 else
478 {
479 if (TARGET_MODEL_SMALL)
480 model = M32R_MODEL_SMALL;
481 else if (TARGET_MODEL_MEDIUM)
482 model = M32R_MODEL_MEDIUM;
483 else if (TARGET_MODEL_LARGE)
484 model = M32R_MODEL_LARGE;
485 else
486 gcc_unreachable (); /* shouldn't happen */
487 }
488 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
489
490 if (extra_flags)
491 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
492 }
493
494 /* Only mark the object as being small data area addressable if
495 it hasn't been explicitly marked with a code model.
496
497 The user can explicitly put an object in the small data area with the
498 section attribute. If the object is in sdata/sbss and marked with a
499 code model do both [put the object in .sdata and mark it as being
500 addressed with a specific code model - don't mark it as being addressed
501 with an SDA reloc though]. This is ok and might be useful at times. If
502 the object doesn't fit the linker will give an error. */
503
504 static bool
505 m32r_in_small_data_p (const_tree decl)
506 {
507 const_tree section;
508
509 if (TREE_CODE (decl) != VAR_DECL)
510 return false;
511
512 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
513 return false;
514
515 section = DECL_SECTION_NAME (decl);
516 if (section)
517 {
518 const char *const name = TREE_STRING_POINTER (section);
519 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
520 return true;
521 }
522 else
523 {
524 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
525 {
526 int size = int_size_in_bytes (TREE_TYPE (decl));
527
528 if (size > 0 && size <= g_switch_value)
529 return true;
530 }
531 }
532
533 return false;
534 }
535
536 /* Do anything needed before RTL is emitted for each function. */
537
538 void
539 m32r_init_expanders (void)
540 {
541 /* ??? At one point there was code here. The function is left in
542 to make it easy to experiment. */
543 }
544 \f
545 int
546 call_operand (rtx op, enum machine_mode mode)
547 {
548 if (!MEM_P (op))
549 return 0;
550 op = XEXP (op, 0);
551 return call_address_operand (op, mode);
552 }
553
554 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
555
556 int
557 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
558 {
559 if (! TARGET_SDATA_USE)
560 return 0;
561
562 if (GET_CODE (op) == SYMBOL_REF)
563 return SYMBOL_REF_SMALL_P (op);
564
565 if (GET_CODE (op) == CONST
566 && GET_CODE (XEXP (op, 0)) == PLUS
567 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
568 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
569 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
570
571 return 0;
572 }
573
574 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
575
576 int
577 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
578 {
579 rtx sym;
580
581 if (flag_pic)
582 return 0;
583
584 if (GET_CODE (op) == LABEL_REF)
585 return TARGET_ADDR24;
586
587 if (GET_CODE (op) == SYMBOL_REF)
588 sym = op;
589 else if (GET_CODE (op) == CONST
590 && GET_CODE (XEXP (op, 0)) == PLUS
591 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
592 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
593 sym = XEXP (XEXP (op, 0), 0);
594 else
595 return 0;
596
597 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
598 return 1;
599
600 if (TARGET_ADDR24
601 && (CONSTANT_POOL_ADDRESS_P (sym)
602 || LIT_NAME_P (XSTR (sym, 0))))
603 return 1;
604
605 return 0;
606 }
607
608 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
609
610 int
611 addr32_operand (rtx op, enum machine_mode mode)
612 {
613 rtx sym;
614
615 if (GET_CODE (op) == LABEL_REF)
616 return TARGET_ADDR32;
617
618 if (GET_CODE (op) == SYMBOL_REF)
619 sym = op;
620 else if (GET_CODE (op) == CONST
621 && GET_CODE (XEXP (op, 0)) == PLUS
622 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
623 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
624 && ! flag_pic)
625 sym = XEXP (XEXP (op, 0), 0);
626 else
627 return 0;
628
629 return (! addr24_operand (sym, mode)
630 && ! small_data_operand (sym, mode));
631 }
632
633 /* Return 1 if OP is a function that can be called with the `bl' insn. */
634
635 int
636 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
637 {
638 if (flag_pic)
639 return 1;
640
641 if (GET_CODE (op) == SYMBOL_REF)
642 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
643
644 return TARGET_CALL26;
645 }
646
647 /* Return 1 if OP is a DImode const we want to handle inline.
648 This must match the code in the movdi pattern.
649 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
650
651 int
652 easy_di_const (rtx op)
653 {
654 rtx high_rtx, low_rtx;
655 HOST_WIDE_INT high, low;
656
657 split_double (op, &high_rtx, &low_rtx);
658 high = INTVAL (high_rtx);
659 low = INTVAL (low_rtx);
660 /* Pick constants loadable with 2 16-bit `ldi' insns. */
661 if (high >= -128 && high <= 127
662 && low >= -128 && low <= 127)
663 return 1;
664 return 0;
665 }
666
667 /* Return 1 if OP is a DFmode const we want to handle inline.
668 This must match the code in the movdf pattern.
669 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
670
671 int
672 easy_df_const (rtx op)
673 {
674 REAL_VALUE_TYPE r;
675 long l[2];
676
677 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
678 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
679 if (l[0] == 0 && l[1] == 0)
680 return 1;
681 if ((l[0] & 0xffff) == 0 && l[1] == 0)
682 return 1;
683 return 0;
684 }
685
686 /* Return 1 if OP is (mem (reg ...)).
687 This is used in insn length calcs. */
688
689 int
690 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
691 {
692 return MEM_P (op) && REG_P (XEXP (op, 0));
693 }
694
695 /* Return nonzero if TYPE must be passed by indirect reference. */
696
697 static bool
698 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
699 enum machine_mode mode, const_tree type,
700 bool named ATTRIBUTE_UNUSED)
701 {
702 int size;
703
704 if (type)
705 size = int_size_in_bytes (type);
706 else
707 size = GET_MODE_SIZE (mode);
708
709 return (size < 0 || size > 8);
710 }
711 \f
712 /* Comparisons. */
713
714 /* X and Y are two things to compare using CODE. Emit the compare insn and
715 return the rtx for compare [arg0 of the if_then_else].
716 If need_compare is true then the comparison insn must be generated, rather
717 than being subsumed into the following branch instruction. */
718
719 rtx
720 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
721 {
722 enum rtx_code compare_code;
723 enum rtx_code branch_code;
724 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
725 int must_swap = 0;
726
727 switch (code)
728 {
729 case EQ: compare_code = EQ; branch_code = NE; break;
730 case NE: compare_code = EQ; branch_code = EQ; break;
731 case LT: compare_code = LT; branch_code = NE; break;
732 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
733 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
734 case GE: compare_code = LT; branch_code = EQ; break;
735 case LTU: compare_code = LTU; branch_code = NE; break;
736 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
737 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
738 case GEU: compare_code = LTU; branch_code = EQ; break;
739
740 default:
741 gcc_unreachable ();
742 }
743
744 if (need_compare)
745 {
746 switch (compare_code)
747 {
748 case EQ:
749 if (satisfies_constraint_P (y) /* Reg equal to small const. */
750 && y != const0_rtx)
751 {
752 rtx tmp = gen_reg_rtx (SImode);
753
754 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
755 x = tmp;
756 y = const0_rtx;
757 }
758 else if (CONSTANT_P (y)) /* Reg equal to const. */
759 {
760 rtx tmp = force_reg (GET_MODE (x), y);
761 y = tmp;
762 }
763
764 if (register_operand (y, SImode) /* Reg equal to reg. */
765 || y == const0_rtx) /* Reg equal to zero. */
766 {
767 emit_insn (gen_cmp_eqsi_insn (x, y));
768
769 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
770 }
771 break;
772
773 case LT:
774 if (register_operand (y, SImode)
775 || satisfies_constraint_P (y))
776 {
777 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
778
779 switch (code)
780 {
781 case LT:
782 emit_insn (gen_cmp_ltsi_insn (x, y));
783 code = EQ;
784 break;
785 case LE:
786 if (y == const0_rtx)
787 tmp = const1_rtx;
788 else
789 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
790 emit_insn (gen_cmp_ltsi_insn (x, tmp));
791 code = EQ;
792 break;
793 case GT:
794 if (CONST_INT_P (y))
795 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
796 else
797 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
798 emit_insn (gen_cmp_ltsi_insn (x, tmp));
799 code = NE;
800 break;
801 case GE:
802 emit_insn (gen_cmp_ltsi_insn (x, y));
803 code = NE;
804 break;
805 default:
806 gcc_unreachable ();
807 }
808
809 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
810 }
811 break;
812
813 case LTU:
814 if (register_operand (y, SImode)
815 || satisfies_constraint_P (y))
816 {
817 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
818
819 switch (code)
820 {
821 case LTU:
822 emit_insn (gen_cmp_ltusi_insn (x, y));
823 code = EQ;
824 break;
825 case LEU:
826 if (y == const0_rtx)
827 tmp = const1_rtx;
828 else
829 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
830 emit_insn (gen_cmp_ltusi_insn (x, tmp));
831 code = EQ;
832 break;
833 case GTU:
834 if (CONST_INT_P (y))
835 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
836 else
837 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
838 emit_insn (gen_cmp_ltusi_insn (x, tmp));
839 code = NE;
840 break;
841 case GEU:
842 emit_insn (gen_cmp_ltusi_insn (x, y));
843 code = NE;
844 break;
845 default:
846 gcc_unreachable ();
847 }
848
849 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
850 }
851 break;
852
853 default:
854 gcc_unreachable ();
855 }
856 }
857 else
858 {
859 /* Reg/reg equal comparison. */
860 if (compare_code == EQ
861 && register_operand (y, SImode))
862 return gen_rtx_fmt_ee (code, CCmode, x, y);
863
864 /* Reg/zero signed comparison. */
865 if ((compare_code == EQ || compare_code == LT)
866 && y == const0_rtx)
867 return gen_rtx_fmt_ee (code, CCmode, x, y);
868
869 /* Reg/smallconst equal comparison. */
870 if (compare_code == EQ
871 && satisfies_constraint_P (y))
872 {
873 rtx tmp = gen_reg_rtx (SImode);
874
875 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
876 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
877 }
878
879 /* Reg/const equal comparison. */
880 if (compare_code == EQ
881 && CONSTANT_P (y))
882 {
883 rtx tmp = force_reg (GET_MODE (x), y);
884
885 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
886 }
887 }
888
889 if (CONSTANT_P (y))
890 {
891 if (must_swap)
892 y = force_reg (GET_MODE (x), y);
893 else
894 {
895 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
896
897 if (! ok_const)
898 y = force_reg (GET_MODE (x), y);
899 }
900 }
901
902 switch (compare_code)
903 {
904 case EQ :
905 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
906 break;
907 case LT :
908 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
909 break;
910 case LTU :
911 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
912 break;
913
914 default:
915 gcc_unreachable ();
916 }
917
918 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
919 }
920
921 bool
922 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
923 {
924 enum machine_mode mode = GET_MODE (op0);
925
926 gcc_assert (mode == SImode);
927 switch (code)
928 {
929 case EQ:
930 if (!register_operand (op1, mode))
931 op1 = force_reg (mode, op1);
932
933 if (TARGET_M32RX || TARGET_M32R2)
934 {
935 if (!reg_or_zero_operand (op2, mode))
936 op2 = force_reg (mode, op2);
937
938 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
939 return true;
940 }
941 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
942 {
943 emit_insn (gen_seq_zero_insn (op0, op1));
944 return true;
945 }
946
947 if (!reg_or_eq_int16_operand (op2, mode))
948 op2 = force_reg (mode, op2);
949
950 emit_insn (gen_seq_insn (op0, op1, op2));
951 return true;
952
953 case NE:
954 if (!CONST_INT_P (op2)
955 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
956 {
957 rtx reg;
958
959 if (reload_completed || reload_in_progress)
960 return false;
961
962 reg = gen_reg_rtx (SImode);
963 emit_insn (gen_xorsi3 (reg, op1, op2));
964 op1 = reg;
965
966 if (!register_operand (op1, mode))
967 op1 = force_reg (mode, op1);
968
969 emit_insn (gen_sne_zero_insn (op0, op1));
970 return true;
971 }
972 return false;
973
974 case LT:
975 case GT:
976 if (code == GT)
977 {
978 rtx tmp = op2;
979 op2 = op1;
980 op1 = tmp;
981 code = LT;
982 }
983
984 if (!register_operand (op1, mode))
985 op1 = force_reg (mode, op1);
986
987 if (!reg_or_int16_operand (op2, mode))
988 op2 = force_reg (mode, op2);
989
990 emit_insn (gen_slt_insn (op0, op1, op2));
991 return true;
992
993 case LTU:
994 case GTU:
995 if (code == GTU)
996 {
997 rtx tmp = op2;
998 op2 = op1;
999 op1 = tmp;
1000 code = LTU;
1001 }
1002
1003 if (!register_operand (op1, mode))
1004 op1 = force_reg (mode, op1);
1005
1006 if (!reg_or_int16_operand (op2, mode))
1007 op2 = force_reg (mode, op2);
1008
1009 emit_insn (gen_sltu_insn (op0, op1, op2));
1010 return true;
1011
1012 case GE:
1013 case GEU:
1014 if (!register_operand (op1, mode))
1015 op1 = force_reg (mode, op1);
1016
1017 if (!reg_or_int16_operand (op2, mode))
1018 op2 = force_reg (mode, op2);
1019
1020 if (code == GE)
1021 emit_insn (gen_sge_insn (op0, op1, op2));
1022 else
1023 emit_insn (gen_sgeu_insn (op0, op1, op2));
1024 return true;
1025
1026 case LE:
1027 case LEU:
1028 if (!register_operand (op1, mode))
1029 op1 = force_reg (mode, op1);
1030
1031 if (CONST_INT_P (op2))
1032 {
1033 HOST_WIDE_INT value = INTVAL (op2);
1034 if (value >= 2147483647)
1035 {
1036 emit_move_insn (op0, const1_rtx);
1037 return true;
1038 }
1039
1040 op2 = GEN_INT (value + 1);
1041 if (value < -32768 || value >= 32767)
1042 op2 = force_reg (mode, op2);
1043
1044 if (code == LEU)
1045 emit_insn (gen_sltu_insn (op0, op1, op2));
1046 else
1047 emit_insn (gen_slt_insn (op0, op1, op2));
1048 return true;
1049 }
1050
1051 if (!register_operand (op2, mode))
1052 op2 = force_reg (mode, op2);
1053
1054 if (code == LEU)
1055 emit_insn (gen_sleu_insn (op0, op1, op2));
1056 else
1057 emit_insn (gen_sle_insn (op0, op1, op2));
1058 return true;
1059
1060 default:
1061 gcc_unreachable ();
1062 }
1063 }
1064
1065 \f
1066 /* Split a 2 word move (DI or DF) into component parts. */
1067
1068 rtx
1069 gen_split_move_double (rtx operands[])
1070 {
1071 enum machine_mode mode = GET_MODE (operands[0]);
1072 rtx dest = operands[0];
1073 rtx src = operands[1];
1074 rtx val;
1075
1076 /* We might have (SUBREG (MEM)) here, so just get rid of the
1077 subregs to make this code simpler. It is safe to call
1078 alter_subreg any time after reload. */
1079 if (GET_CODE (dest) == SUBREG)
1080 alter_subreg (&dest);
1081 if (GET_CODE (src) == SUBREG)
1082 alter_subreg (&src);
1083
1084 start_sequence ();
1085 if (REG_P (dest))
1086 {
1087 int dregno = REGNO (dest);
1088
1089 /* Reg = reg. */
1090 if (REG_P (src))
1091 {
1092 int sregno = REGNO (src);
1093
1094 int reverse = (dregno == sregno + 1);
1095
1096 /* We normally copy the low-numbered register first. However, if
1097 the first register operand 0 is the same as the second register of
1098 operand 1, we must copy in the opposite order. */
1099 emit_insn (gen_rtx_SET (VOIDmode,
1100 operand_subword (dest, reverse, TRUE, mode),
1101 operand_subword (src, reverse, TRUE, mode)));
1102
1103 emit_insn (gen_rtx_SET (VOIDmode,
1104 operand_subword (dest, !reverse, TRUE, mode),
1105 operand_subword (src, !reverse, TRUE, mode)));
1106 }
1107
1108 /* Reg = constant. */
1109 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1110 {
1111 rtx words[2];
1112 split_double (src, &words[0], &words[1]);
1113 emit_insn (gen_rtx_SET (VOIDmode,
1114 operand_subword (dest, 0, TRUE, mode),
1115 words[0]));
1116
1117 emit_insn (gen_rtx_SET (VOIDmode,
1118 operand_subword (dest, 1, TRUE, mode),
1119 words[1]));
1120 }
1121
1122 /* Reg = mem. */
1123 else if (MEM_P (src))
1124 {
1125 /* If the high-address word is used in the address, we must load it
1126 last. Otherwise, load it first. */
1127 int reverse
1128 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1129
1130 /* We used to optimize loads from single registers as
1131
1132 ld r1,r3+; ld r2,r3
1133
1134 if r3 were not used subsequently. However, the REG_NOTES aren't
1135 propagated correctly by the reload phase, and it can cause bad
1136 code to be generated. We could still try:
1137
1138 ld r1,r3+; ld r2,r3; addi r3,-4
1139
1140 which saves 2 bytes and doesn't force longword alignment. */
1141 emit_insn (gen_rtx_SET (VOIDmode,
1142 operand_subword (dest, reverse, TRUE, mode),
1143 adjust_address (src, SImode,
1144 reverse * UNITS_PER_WORD)));
1145
1146 emit_insn (gen_rtx_SET (VOIDmode,
1147 operand_subword (dest, !reverse, TRUE, mode),
1148 adjust_address (src, SImode,
1149 !reverse * UNITS_PER_WORD)));
1150 }
1151 else
1152 gcc_unreachable ();
1153 }
1154
1155 /* Mem = reg. */
1156 /* We used to optimize loads from single registers as
1157
1158 st r1,r3; st r2,+r3
1159
1160 if r3 were not used subsequently. However, the REG_NOTES aren't
1161 propagated correctly by the reload phase, and it can cause bad
1162 code to be generated. We could still try:
1163
1164 st r1,r3; st r2,+r3; addi r3,-4
1165
1166 which saves 2 bytes and doesn't force longword alignment. */
1167 else if (MEM_P (dest) && REG_P (src))
1168 {
1169 emit_insn (gen_rtx_SET (VOIDmode,
1170 adjust_address (dest, SImode, 0),
1171 operand_subword (src, 0, TRUE, mode)));
1172
1173 emit_insn (gen_rtx_SET (VOIDmode,
1174 adjust_address (dest, SImode, UNITS_PER_WORD),
1175 operand_subword (src, 1, TRUE, mode)));
1176 }
1177
1178 else
1179 gcc_unreachable ();
1180
1181 val = get_insns ();
1182 end_sequence ();
1183 return val;
1184 }
1185
1186 \f
1187 static int
1188 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1189 tree type, bool named ATTRIBUTE_UNUSED)
1190 {
1191 int words;
1192 unsigned int size =
1193 (((mode == BLKmode && type)
1194 ? (unsigned int) int_size_in_bytes (type)
1195 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1196 / UNITS_PER_WORD;
1197
1198 if (*cum >= M32R_MAX_PARM_REGS)
1199 words = 0;
1200 else if (*cum + size > M32R_MAX_PARM_REGS)
1201 words = (*cum + size) - M32R_MAX_PARM_REGS;
1202 else
1203 words = 0;
1204
1205 return words * UNITS_PER_WORD;
1206 }
1207
1208 /* The ROUND_ADVANCE* macros are local to this file. */
1209 /* Round SIZE up to a word boundary. */
1210 #define ROUND_ADVANCE(SIZE) \
1211 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1212
1213 /* Round arg MODE/TYPE up to the next word boundary. */
1214 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1215 ((MODE) == BLKmode \
1216 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1217 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1218
1219 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1220 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1221
1222 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1223 a reg. This includes arguments that have to be passed by reference as the
1224 pointer to them is passed in a reg if one is available (and that is what
1225 we're given).
1226 This macro is only used in this file. */
1227 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1228 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1229
1230 /* Determine where to put an argument to a function.
1231 Value is zero to push the argument on the stack,
1232 or a hard register in which to store the argument.
1233
1234 MODE is the argument's machine mode.
1235 TYPE is the data type of the argument (as a tree).
1236 This is null for libcalls where that information may
1237 not be available.
1238 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1239 the preceding args and about the function being called.
1240 NAMED is nonzero if this argument is a named parameter
1241 (otherwise it is an extra parameter matching an ellipsis). */
1242 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1243 and the rest are pushed. */
1244
1245 static rtx
1246 m32r_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1247 const_tree type ATTRIBUTE_UNUSED,
1248 bool named ATTRIBUTE_UNUSED)
1249 {
1250 return (PASS_IN_REG_P (*cum, mode, type)
1251 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1252 : NULL_RTX);
1253 }
1254
1255 /* Update the data in CUM to advance over an argument
1256 of mode MODE and data type TYPE.
1257 (TYPE is null for libcalls where that information may not be available.) */
1258
1259 static void
1260 m32r_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1261 const_tree type, bool named ATTRIBUTE_UNUSED)
1262 {
1263 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1264 + ROUND_ADVANCE_ARG (mode, type));
1265 }
1266
1267 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1268
1269 static bool
1270 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1271 {
1272 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
1273 }
1274
1275 /* Worker function for TARGET_FUNCTION_VALUE. */
1276
1277 static rtx
1278 m32r_function_value (const_tree valtype,
1279 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1280 bool outgoing ATTRIBUTE_UNUSED)
1281 {
1282 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1283 }
1284
1285 /* Worker function for TARGET_LIBCALL_VALUE. */
1286
1287 static rtx
1288 m32r_libcall_value (enum machine_mode mode,
1289 const_rtx fun ATTRIBUTE_UNUSED)
1290 {
1291 return gen_rtx_REG (mode, 0);
1292 }
1293
1294 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1295
1296 ??? What about r1 in DI/DF values. */
1297
1298 static bool
1299 m32r_function_value_regno_p (const unsigned int regno)
1300 {
1301 return (regno == 0);
1302 }
1303
1304 /* Do any needed setup for a variadic function. For the M32R, we must
1305 create a register parameter block, and then copy any anonymous arguments
1306 in registers to memory.
1307
1308 CUM has not been updated for the last named argument which has type TYPE
1309 and mode MODE, and we rely on this fact. */
1310
1311 static void
1312 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1313 tree type, int *pretend_size, int no_rtl)
1314 {
1315 int first_anon_arg;
1316
1317 if (no_rtl)
1318 return;
1319
1320 /* All BLKmode values are passed by reference. */
1321 gcc_assert (mode != BLKmode);
1322
1323 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1324 + ROUND_ADVANCE_ARG (mode, type));
1325
1326 if (first_anon_arg < M32R_MAX_PARM_REGS)
1327 {
1328 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1329 int first_reg_offset = first_anon_arg;
1330 /* Size in words to "pretend" allocate. */
1331 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1332 rtx regblock;
1333
1334 regblock = gen_frame_mem (BLKmode,
1335 plus_constant (arg_pointer_rtx,
1336 FIRST_PARM_OFFSET (0)));
1337 set_mem_alias_set (regblock, get_varargs_alias_set ());
1338 move_block_from_reg (first_reg_offset, regblock, size);
1339
1340 *pretend_size = (size * UNITS_PER_WORD);
1341 }
1342 }
1343
1344 \f
1345 /* Return true if INSN is real instruction bearing insn. */
1346
1347 static int
1348 m32r_is_insn (rtx insn)
1349 {
1350 return (NONDEBUG_INSN_P (insn)
1351 && GET_CODE (PATTERN (insn)) != USE
1352 && GET_CODE (PATTERN (insn)) != CLOBBER
1353 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1354 }
1355
1356 /* Increase the priority of long instructions so that the
1357 short instructions are scheduled ahead of the long ones. */
1358
1359 static int
1360 m32r_adjust_priority (rtx insn, int priority)
1361 {
1362 if (m32r_is_insn (insn)
1363 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1364 priority <<= 3;
1365
1366 return priority;
1367 }
1368
1369 \f
1370 /* Indicate how many instructions can be issued at the same time.
1371 This is sort of a lie. The m32r can issue only 1 long insn at
1372 once, but it can issue 2 short insns. The default therefore is
1373 set at 2, but this can be overridden by the command line option
1374 -missue-rate=1. */
1375
1376 static int
1377 m32r_issue_rate (void)
1378 {
1379 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1380 }
1381 \f
1382 /* Cost functions. */
1383
1384 /* Implement TARGET_HANDLE_OPTION.
1385
1386 Memory is 3 times as expensive as registers.
1387 ??? Is that the right way to look at it? */
1388
1389 static int
1390 m32r_memory_move_cost (enum machine_mode mode,
1391 reg_class_t rclass ATTRIBUTE_UNUSED,
1392 bool in ATTRIBUTE_UNUSED)
1393 {
1394 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1395 return 6;
1396 else
1397 return 12;
1398 }
1399
1400 static bool
1401 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
1402 bool speed ATTRIBUTE_UNUSED)
1403 {
1404 switch (code)
1405 {
1406 /* Small integers are as cheap as registers. 4 byte values can be
1407 fetched as immediate constants - let's give that the cost of an
1408 extra insn. */
1409 case CONST_INT:
1410 if (INT16_P (INTVAL (x)))
1411 {
1412 *total = 0;
1413 return true;
1414 }
1415 /* FALLTHRU */
1416
1417 case CONST:
1418 case LABEL_REF:
1419 case SYMBOL_REF:
1420 *total = COSTS_N_INSNS (1);
1421 return true;
1422
1423 case CONST_DOUBLE:
1424 {
1425 rtx high, low;
1426
1427 split_double (x, &high, &low);
1428 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1429 + !INT16_P (INTVAL (low)));
1430 return true;
1431 }
1432
1433 case MULT:
1434 *total = COSTS_N_INSNS (3);
1435 return true;
1436
1437 case DIV:
1438 case UDIV:
1439 case MOD:
1440 case UMOD:
1441 *total = COSTS_N_INSNS (10);
1442 return true;
1443
1444 default:
1445 return false;
1446 }
1447 }
1448 \f
1449 /* Type of function DECL.
1450
1451 The result is cached. To reset the cache at the end of a function,
1452 call with DECL = NULL_TREE. */
1453
1454 enum m32r_function_type
1455 m32r_compute_function_type (tree decl)
1456 {
1457 /* Cached value. */
1458 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1459 /* Last function we were called for. */
1460 static tree last_fn = NULL_TREE;
1461
1462 /* Resetting the cached value? */
1463 if (decl == NULL_TREE)
1464 {
1465 fn_type = M32R_FUNCTION_UNKNOWN;
1466 last_fn = NULL_TREE;
1467 return fn_type;
1468 }
1469
1470 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1471 return fn_type;
1472
1473 /* Compute function type. */
1474 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1475 ? M32R_FUNCTION_INTERRUPT
1476 : M32R_FUNCTION_NORMAL);
1477
1478 last_fn = decl;
1479 return fn_type;
1480 }
1481 \f/* Function prologue/epilogue handlers. */
1482
1483 /* M32R stack frames look like:
1484
1485 Before call After call
1486 +-----------------------+ +-----------------------+
1487 | | | |
1488 high | local variables, | | local variables, |
1489 mem | reg save area, etc. | | reg save area, etc. |
1490 | | | |
1491 +-----------------------+ +-----------------------+
1492 | | | |
1493 | arguments on stack. | | arguments on stack. |
1494 | | | |
1495 SP+0->+-----------------------+ +-----------------------+
1496 | reg parm save area, |
1497 | only created for |
1498 | variable argument |
1499 | functions |
1500 +-----------------------+
1501 | previous frame ptr |
1502 +-----------------------+
1503 | |
1504 | register save area |
1505 | |
1506 +-----------------------+
1507 | return address |
1508 +-----------------------+
1509 | |
1510 | local variables |
1511 | |
1512 +-----------------------+
1513 | |
1514 | alloca allocations |
1515 | |
1516 +-----------------------+
1517 | |
1518 low | arguments on stack |
1519 memory | |
1520 SP+0->+-----------------------+
1521
1522 Notes:
1523 1) The "reg parm save area" does not exist for non variable argument fns.
1524 2) The "reg parm save area" can be eliminated completely if we saved regs
1525 containing anonymous args separately but that complicates things too
1526 much (so it's not done).
1527 3) The return address is saved after the register save area so as to have as
1528 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1529
1530 /* Structure to be filled in by m32r_compute_frame_size with register
1531 save masks, and offsets for the current function. */
1532 struct m32r_frame_info
1533 {
1534 unsigned int total_size; /* # bytes that the entire frame takes up. */
1535 unsigned int extra_size; /* # bytes of extra stuff. */
1536 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1537 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1538 unsigned int reg_size; /* # bytes needed to store regs. */
1539 unsigned int var_size; /* # bytes that variables take up. */
1540 unsigned int gmask; /* Mask of saved gp registers. */
1541 unsigned int save_fp; /* Nonzero if fp must be saved. */
1542 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1543 int initialized; /* Nonzero if frame size already calculated. */
1544 };
1545
1546 /* Current frame information calculated by m32r_compute_frame_size. */
1547 static struct m32r_frame_info current_frame_info;
1548
1549 /* Zero structure to initialize current_frame_info. */
1550 static struct m32r_frame_info zero_frame_info;
1551
1552 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1553 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1554
1555 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1556 The return address and frame pointer are treated separately.
1557 Don't consider them here. */
1558 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1559 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1560 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1561
1562 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1563 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1564
1565 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1566 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1567
1568 /* Return the bytes needed to compute the frame pointer from the current
1569 stack pointer.
1570
1571 SIZE is the size needed for local variables. */
1572
1573 unsigned int
1574 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1575 {
1576 unsigned int regno;
1577 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1578 unsigned int reg_size;
1579 unsigned int gmask;
1580 enum m32r_function_type fn_type;
1581 int interrupt_p;
1582 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1583 | crtl->profile);
1584
1585 var_size = M32R_STACK_ALIGN (size);
1586 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1587 pretend_size = crtl->args.pretend_args_size;
1588 extra_size = FIRST_PARM_OFFSET (0);
1589 total_size = extra_size + pretend_size + args_size + var_size;
1590 reg_size = 0;
1591 gmask = 0;
1592
1593 /* See if this is an interrupt handler. Call used registers must be saved
1594 for them too. */
1595 fn_type = m32r_compute_function_type (current_function_decl);
1596 interrupt_p = M32R_INTERRUPT_P (fn_type);
1597
1598 /* Calculate space needed for registers. */
1599 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1600 {
1601 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1602 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1603 {
1604 reg_size += UNITS_PER_WORD;
1605 gmask |= 1 << regno;
1606 }
1607 }
1608
1609 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1610 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1611
1612 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1613 * UNITS_PER_WORD);
1614 total_size += reg_size;
1615
1616 /* ??? Not sure this is necessary, and I don't think the epilogue
1617 handler will do the right thing if this changes total_size. */
1618 total_size = M32R_STACK_ALIGN (total_size);
1619
1620 /* frame_size = total_size - (pretend_size + reg_size); */
1621
1622 /* Save computed information. */
1623 current_frame_info.total_size = total_size;
1624 current_frame_info.extra_size = extra_size;
1625 current_frame_info.pretend_size = pretend_size;
1626 current_frame_info.var_size = var_size;
1627 current_frame_info.args_size = args_size;
1628 current_frame_info.reg_size = reg_size;
1629 current_frame_info.gmask = gmask;
1630 current_frame_info.initialized = reload_completed;
1631
1632 /* Ok, we're done. */
1633 return total_size;
1634 }
1635
1636 /* Worker function for TARGET_CAN_ELIMINATE. */
1637
1638 bool
1639 m32r_can_eliminate (const int from, const int to)
1640 {
1641 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1642 ? ! frame_pointer_needed
1643 : true);
1644 }
1645
1646 \f
1647 /* The table we use to reference PIC data. */
1648 static rtx global_offset_table;
1649
1650 static void
1651 m32r_reload_lr (rtx sp, int size)
1652 {
1653 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1654
1655 if (size == 0)
1656 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1657 else if (size < 32768)
1658 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1659 gen_rtx_PLUS (Pmode, sp,
1660 GEN_INT (size)))));
1661 else
1662 {
1663 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1664
1665 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1666 emit_insn (gen_addsi3 (tmp, tmp, sp));
1667 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1668 }
1669
1670 emit_use (lr);
1671 }
1672
1673 void
1674 m32r_load_pic_register (void)
1675 {
1676 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1677 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1678 GEN_INT (TARGET_MODEL_SMALL)));
1679
1680 /* Need to emit this whether or not we obey regdecls,
1681 since setjmp/longjmp can cause life info to screw up. */
1682 emit_use (pic_offset_table_rtx);
1683 }
1684
1685 /* Expand the m32r prologue as a series of insns. */
1686
1687 void
1688 m32r_expand_prologue (void)
1689 {
1690 int regno;
1691 int frame_size;
1692 unsigned int gmask;
1693 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1694 | crtl->profile);
1695
1696 if (! current_frame_info.initialized)
1697 m32r_compute_frame_size (get_frame_size ());
1698
1699 gmask = current_frame_info.gmask;
1700
1701 /* These cases shouldn't happen. Catch them now. */
1702 gcc_assert (current_frame_info.total_size || !gmask);
1703
1704 /* Allocate space for register arguments if this is a variadic function. */
1705 if (current_frame_info.pretend_size != 0)
1706 {
1707 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1708 the wrong result on a 64-bit host. */
1709 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1710 emit_insn (gen_addsi3 (stack_pointer_rtx,
1711 stack_pointer_rtx,
1712 GEN_INT (-pretend_size)));
1713 }
1714
1715 /* Save any registers we need to and set up fp. */
1716 if (current_frame_info.save_fp)
1717 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1718
1719 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1720
1721 /* Save any needed call-saved regs (and call-used if this is an
1722 interrupt handler). */
1723 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1724 {
1725 if ((gmask & (1 << regno)) != 0)
1726 emit_insn (gen_movsi_push (stack_pointer_rtx,
1727 gen_rtx_REG (Pmode, regno)));
1728 }
1729
1730 if (current_frame_info.save_lr)
1731 emit_insn (gen_movsi_push (stack_pointer_rtx,
1732 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1733
1734 /* Allocate the stack frame. */
1735 frame_size = (current_frame_info.total_size
1736 - (current_frame_info.pretend_size
1737 + current_frame_info.reg_size));
1738
1739 if (frame_size == 0)
1740 ; /* Nothing to do. */
1741 else if (frame_size <= 32768)
1742 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1743 GEN_INT (-frame_size)));
1744 else
1745 {
1746 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1747
1748 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1749 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1750 }
1751
1752 if (frame_pointer_needed)
1753 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1754
1755 if (crtl->profile)
1756 /* Push lr for mcount (form_pc, x). */
1757 emit_insn (gen_movsi_push (stack_pointer_rtx,
1758 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1759
1760 if (pic_reg_used)
1761 {
1762 m32r_load_pic_register ();
1763 m32r_reload_lr (stack_pointer_rtx,
1764 (crtl->profile ? 0 : frame_size));
1765 }
1766
1767 if (crtl->profile && !pic_reg_used)
1768 emit_insn (gen_blockage ());
1769 }
1770
1771 \f
1772 /* Set up the stack and frame pointer (if desired) for the function.
1773 Note, if this is changed, you need to mirror the changes in
1774 m32r_compute_frame_size which calculates the prolog size. */
1775
1776 static void
1777 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1778 {
1779 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1780
1781 /* If this is an interrupt handler, mark it as such. */
1782 if (M32R_INTERRUPT_P (fn_type))
1783 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1784
1785 if (! current_frame_info.initialized)
1786 m32r_compute_frame_size (size);
1787
1788 /* This is only for the human reader. */
1789 fprintf (file,
1790 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1791 ASM_COMMENT_START,
1792 current_frame_info.var_size,
1793 current_frame_info.reg_size / 4,
1794 current_frame_info.args_size,
1795 current_frame_info.extra_size);
1796 }
1797 \f
1798 /* Output RTL to pop register REGNO from the stack. */
1799
1800 static void
1801 pop (int regno)
1802 {
1803 rtx x;
1804
1805 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1806 stack_pointer_rtx));
1807 add_reg_note (x, REG_INC, stack_pointer_rtx);
1808 }
1809
1810 /* Expand the m32r epilogue as a series of insns. */
1811
1812 void
1813 m32r_expand_epilogue (void)
1814 {
1815 int regno;
1816 int noepilogue = FALSE;
1817 int total_size;
1818
1819 gcc_assert (current_frame_info.initialized);
1820 total_size = current_frame_info.total_size;
1821
1822 if (total_size == 0)
1823 {
1824 rtx insn = get_last_insn ();
1825
1826 /* If the last insn was a BARRIER, we don't have to write any code
1827 because a jump (aka return) was put there. */
1828 if (insn && NOTE_P (insn))
1829 insn = prev_nonnote_insn (insn);
1830 if (insn && BARRIER_P (insn))
1831 noepilogue = TRUE;
1832 }
1833
1834 if (!noepilogue)
1835 {
1836 unsigned int var_size = current_frame_info.var_size;
1837 unsigned int args_size = current_frame_info.args_size;
1838 unsigned int gmask = current_frame_info.gmask;
1839 int can_trust_sp_p = !cfun->calls_alloca;
1840
1841 if (flag_exceptions)
1842 emit_insn (gen_blockage ());
1843
1844 /* The first thing to do is point the sp at the bottom of the register
1845 save area. */
1846 if (can_trust_sp_p)
1847 {
1848 unsigned int reg_offset = var_size + args_size;
1849
1850 if (reg_offset == 0)
1851 ; /* Nothing to do. */
1852 else if (reg_offset < 32768)
1853 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1854 GEN_INT (reg_offset)));
1855 else
1856 {
1857 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1858
1859 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1860 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1861 tmp));
1862 }
1863 }
1864 else if (frame_pointer_needed)
1865 {
1866 unsigned int reg_offset = var_size + args_size;
1867
1868 if (reg_offset == 0)
1869 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1870 else if (reg_offset < 32768)
1871 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1872 GEN_INT (reg_offset)));
1873 else
1874 {
1875 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1876
1877 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1878 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1879 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1880 tmp));
1881 }
1882 }
1883 else
1884 gcc_unreachable ();
1885
1886 if (current_frame_info.save_lr)
1887 pop (RETURN_ADDR_REGNUM);
1888
1889 /* Restore any saved registers, in reverse order of course. */
1890 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1891 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1892 {
1893 if ((gmask & (1L << regno)) != 0)
1894 pop (regno);
1895 }
1896
1897 if (current_frame_info.save_fp)
1898 pop (FRAME_POINTER_REGNUM);
1899
1900 /* Remove varargs area if present. */
1901 if (current_frame_info.pretend_size != 0)
1902 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1903 GEN_INT (current_frame_info.pretend_size)));
1904
1905 emit_insn (gen_blockage ());
1906 }
1907 }
1908
1909 /* Do any necessary cleanup after a function to restore stack, frame,
1910 and regs. */
1911
1912 static void
1913 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1914 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1915 {
1916 /* Reset state info for each function. */
1917 current_frame_info = zero_frame_info;
1918 m32r_compute_function_type (NULL_TREE);
1919 }
1920 \f
1921 /* Return nonzero if this function is known to have a null or 1 instruction
1922 epilogue. */
1923
1924 int
1925 direct_return (void)
1926 {
1927 if (!reload_completed)
1928 return FALSE;
1929
1930 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1931 return FALSE;
1932
1933 if (! current_frame_info.initialized)
1934 m32r_compute_frame_size (get_frame_size ());
1935
1936 return current_frame_info.total_size == 0;
1937 }
1938
1939 \f
1940 /* PIC. */
1941
1942 int
1943 m32r_legitimate_pic_operand_p (rtx x)
1944 {
1945 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1946 return 0;
1947
1948 if (GET_CODE (x) == CONST
1949 && GET_CODE (XEXP (x, 0)) == PLUS
1950 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1951 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1952 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1953 return 0;
1954
1955 return 1;
1956 }
1957
1958 rtx
1959 m32r_legitimize_pic_address (rtx orig, rtx reg)
1960 {
1961 #ifdef DEBUG_PIC
1962 printf("m32r_legitimize_pic_address()\n");
1963 #endif
1964
1965 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1966 {
1967 rtx pic_ref, address;
1968 int subregs = 0;
1969
1970 if (reg == 0)
1971 {
1972 gcc_assert (!reload_in_progress && !reload_completed);
1973 reg = gen_reg_rtx (Pmode);
1974
1975 subregs = 1;
1976 }
1977
1978 if (subregs)
1979 address = gen_reg_rtx (Pmode);
1980 else
1981 address = reg;
1982
1983 crtl->uses_pic_offset_table = 1;
1984
1985 if (GET_CODE (orig) == LABEL_REF
1986 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1987 {
1988 emit_insn (gen_gotoff_load_addr (reg, orig));
1989 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1990 return reg;
1991 }
1992
1993 emit_insn (gen_pic_load_addr (address, orig));
1994
1995 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1996 pic_ref = gen_const_mem (Pmode, address);
1997 emit_move_insn (reg, pic_ref);
1998 return reg;
1999 }
2000 else if (GET_CODE (orig) == CONST)
2001 {
2002 rtx base, offset;
2003
2004 if (GET_CODE (XEXP (orig, 0)) == PLUS
2005 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
2006 return orig;
2007
2008 if (reg == 0)
2009 {
2010 gcc_assert (!reload_in_progress && !reload_completed);
2011 reg = gen_reg_rtx (Pmode);
2012 }
2013
2014 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2015 {
2016 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2017 if (base == reg)
2018 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2019 else
2020 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2021 }
2022 else
2023 return orig;
2024
2025 if (CONST_INT_P (offset))
2026 {
2027 if (INT16_P (INTVAL (offset)))
2028 return plus_constant (base, INTVAL (offset));
2029 else
2030 {
2031 gcc_assert (! reload_in_progress && ! reload_completed);
2032 offset = force_reg (Pmode, offset);
2033 }
2034 }
2035
2036 return gen_rtx_PLUS (Pmode, base, offset);
2037 }
2038
2039 return orig;
2040 }
2041
2042 static rtx
2043 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2044 enum machine_mode mode ATTRIBUTE_UNUSED)
2045 {
2046 if (flag_pic)
2047 return m32r_legitimize_pic_address (x, NULL_RTX);
2048 else
2049 return x;
2050 }
2051
2052 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2053
2054 static bool
2055 m32r_mode_dependent_address_p (const_rtx addr)
2056 {
2057 if (GET_CODE (addr) == LO_SUM)
2058 return true;
2059
2060 return false;
2061 }
2062 \f
2063 /* Nested function support. */
2064
2065 /* Emit RTL insns to initialize the variable parts of a trampoline.
2066 FNADDR is an RTX for the address of the function's pure code.
2067 CXT is an RTX for the static chain value for the function. */
2068
2069 void
2070 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2071 rtx fnaddr ATTRIBUTE_UNUSED,
2072 rtx cxt ATTRIBUTE_UNUSED)
2073 {
2074 }
2075 \f
2076 static void
2077 m32r_file_start (void)
2078 {
2079 default_file_start ();
2080
2081 if (flag_verbose_asm)
2082 fprintf (asm_out_file,
2083 "%s M32R/D special options: -G %d\n",
2084 ASM_COMMENT_START, g_switch_value);
2085
2086 if (TARGET_LITTLE_ENDIAN)
2087 fprintf (asm_out_file, "\t.little\n");
2088 }
2089 \f
2090 /* Print operand X (an rtx) in assembler syntax to file FILE.
2091 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2092 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2093
2094 static void
2095 m32r_print_operand (FILE * file, rtx x, int code)
2096 {
2097 rtx addr;
2098
2099 switch (code)
2100 {
2101 /* The 's' and 'p' codes are used by output_block_move() to
2102 indicate post-increment 's'tores and 'p're-increment loads. */
2103 case 's':
2104 if (REG_P (x))
2105 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2106 else
2107 output_operand_lossage ("invalid operand to %%s code");
2108 return;
2109
2110 case 'p':
2111 if (REG_P (x))
2112 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2113 else
2114 output_operand_lossage ("invalid operand to %%p code");
2115 return;
2116
2117 case 'R' :
2118 /* Write second word of DImode or DFmode reference,
2119 register or memory. */
2120 if (REG_P (x))
2121 fputs (reg_names[REGNO (x)+1], file);
2122 else if (MEM_P (x))
2123 {
2124 fprintf (file, "@(");
2125 /* Handle possible auto-increment. Since it is pre-increment and
2126 we have already done it, we can just use an offset of four. */
2127 /* ??? This is taken from rs6000.c I think. I don't think it is
2128 currently necessary, but keep it around. */
2129 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2130 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2131 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2132 else
2133 output_address (plus_constant (XEXP (x, 0), 4));
2134 fputc (')', file);
2135 }
2136 else
2137 output_operand_lossage ("invalid operand to %%R code");
2138 return;
2139
2140 case 'H' : /* High word. */
2141 case 'L' : /* Low word. */
2142 if (REG_P (x))
2143 {
2144 /* L = least significant word, H = most significant word. */
2145 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2146 fputs (reg_names[REGNO (x)], file);
2147 else
2148 fputs (reg_names[REGNO (x)+1], file);
2149 }
2150 else if (CONST_INT_P (x)
2151 || GET_CODE (x) == CONST_DOUBLE)
2152 {
2153 rtx first, second;
2154
2155 split_double (x, &first, &second);
2156 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2157 code == 'L' ? INTVAL (first) : INTVAL (second));
2158 }
2159 else
2160 output_operand_lossage ("invalid operand to %%H/%%L code");
2161 return;
2162
2163 case 'A' :
2164 {
2165 char str[30];
2166
2167 if (GET_CODE (x) != CONST_DOUBLE
2168 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2169 fatal_insn ("bad insn for 'A'", x);
2170
2171 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2172 fprintf (file, "%s", str);
2173 return;
2174 }
2175
2176 case 'B' : /* Bottom half. */
2177 case 'T' : /* Top half. */
2178 /* Output the argument to a `seth' insn (sets the Top half-word).
2179 For constants output arguments to a seth/or3 pair to set Top and
2180 Bottom halves. For symbols output arguments to a seth/add3 pair to
2181 set Top and Bottom halves. The difference exists because for
2182 constants seth/or3 is more readable but for symbols we need to use
2183 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2184 switch (GET_CODE (x))
2185 {
2186 case CONST_INT :
2187 case CONST_DOUBLE :
2188 {
2189 rtx first, second;
2190
2191 split_double (x, &first, &second);
2192 x = WORDS_BIG_ENDIAN ? second : first;
2193 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2194 (code == 'B'
2195 ? INTVAL (x) & 0xffff
2196 : (INTVAL (x) >> 16) & 0xffff));
2197 }
2198 return;
2199 case CONST :
2200 case SYMBOL_REF :
2201 if (code == 'B'
2202 && small_data_operand (x, VOIDmode))
2203 {
2204 fputs ("sda(", file);
2205 output_addr_const (file, x);
2206 fputc (')', file);
2207 return;
2208 }
2209 /* fall through */
2210 case LABEL_REF :
2211 fputs (code == 'T' ? "shigh(" : "low(", file);
2212 output_addr_const (file, x);
2213 fputc (')', file);
2214 return;
2215 default :
2216 output_operand_lossage ("invalid operand to %%T/%%B code");
2217 return;
2218 }
2219 break;
2220
2221 case 'U' :
2222 /* ??? wip */
2223 /* Output a load/store with update indicator if appropriate. */
2224 if (MEM_P (x))
2225 {
2226 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2227 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2228 fputs (".a", file);
2229 }
2230 else
2231 output_operand_lossage ("invalid operand to %%U code");
2232 return;
2233
2234 case 'N' :
2235 /* Print a constant value negated. */
2236 if (CONST_INT_P (x))
2237 output_addr_const (file, GEN_INT (- INTVAL (x)));
2238 else
2239 output_operand_lossage ("invalid operand to %%N code");
2240 return;
2241
2242 case 'X' :
2243 /* Print a const_int in hex. Used in comments. */
2244 if (CONST_INT_P (x))
2245 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2246 return;
2247
2248 case '#' :
2249 fputs (IMMEDIATE_PREFIX, file);
2250 return;
2251
2252 case 0 :
2253 /* Do nothing special. */
2254 break;
2255
2256 default :
2257 /* Unknown flag. */
2258 output_operand_lossage ("invalid operand output code");
2259 }
2260
2261 switch (GET_CODE (x))
2262 {
2263 case REG :
2264 fputs (reg_names[REGNO (x)], file);
2265 break;
2266
2267 case MEM :
2268 addr = XEXP (x, 0);
2269 if (GET_CODE (addr) == PRE_INC)
2270 {
2271 if (!REG_P (XEXP (addr, 0)))
2272 fatal_insn ("pre-increment address is not a register", x);
2273
2274 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2275 }
2276 else if (GET_CODE (addr) == PRE_DEC)
2277 {
2278 if (!REG_P (XEXP (addr, 0)))
2279 fatal_insn ("pre-decrement address is not a register", x);
2280
2281 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2282 }
2283 else if (GET_CODE (addr) == POST_INC)
2284 {
2285 if (!REG_P (XEXP (addr, 0)))
2286 fatal_insn ("post-increment address is not a register", x);
2287
2288 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2289 }
2290 else
2291 {
2292 fputs ("@(", file);
2293 output_address (XEXP (x, 0));
2294 fputc (')', file);
2295 }
2296 break;
2297
2298 case CONST_DOUBLE :
2299 /* We handle SFmode constants here as output_addr_const doesn't. */
2300 if (GET_MODE (x) == SFmode)
2301 {
2302 REAL_VALUE_TYPE d;
2303 long l;
2304
2305 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2306 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2307 fprintf (file, "0x%08lx", l);
2308 break;
2309 }
2310
2311 /* Fall through. Let output_addr_const deal with it. */
2312
2313 default :
2314 output_addr_const (file, x);
2315 break;
2316 }
2317 }
2318
2319 /* Print a memory address as an operand to reference that memory location. */
2320
2321 static void
2322 m32r_print_operand_address (FILE * file, rtx addr)
2323 {
2324 rtx base;
2325 rtx index = 0;
2326 int offset = 0;
2327
2328 switch (GET_CODE (addr))
2329 {
2330 case REG :
2331 fputs (reg_names[REGNO (addr)], file);
2332 break;
2333
2334 case PLUS :
2335 if (CONST_INT_P (XEXP (addr, 0)))
2336 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2337 else if (CONST_INT_P (XEXP (addr, 1)))
2338 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2339 else
2340 base = XEXP (addr, 0), index = XEXP (addr, 1);
2341 if (REG_P (base))
2342 {
2343 /* Print the offset first (if present) to conform to the manual. */
2344 if (index == 0)
2345 {
2346 if (offset != 0)
2347 fprintf (file, "%d,", offset);
2348 fputs (reg_names[REGNO (base)], file);
2349 }
2350 /* The chip doesn't support this, but left in for generality. */
2351 else if (REG_P (index))
2352 fprintf (file, "%s,%s",
2353 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2354 /* Not sure this can happen, but leave in for now. */
2355 else if (GET_CODE (index) == SYMBOL_REF)
2356 {
2357 output_addr_const (file, index);
2358 fputc (',', file);
2359 fputs (reg_names[REGNO (base)], file);
2360 }
2361 else
2362 fatal_insn ("bad address", addr);
2363 }
2364 else if (GET_CODE (base) == LO_SUM)
2365 {
2366 gcc_assert (!index && REG_P (XEXP (base, 0)));
2367 if (small_data_operand (XEXP (base, 1), VOIDmode))
2368 fputs ("sda(", file);
2369 else
2370 fputs ("low(", file);
2371 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2372 fputs ("),", file);
2373 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2374 }
2375 else
2376 fatal_insn ("bad address", addr);
2377 break;
2378
2379 case LO_SUM :
2380 if (!REG_P (XEXP (addr, 0)))
2381 fatal_insn ("lo_sum not of register", addr);
2382 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2383 fputs ("sda(", file);
2384 else
2385 fputs ("low(", file);
2386 output_addr_const (file, XEXP (addr, 1));
2387 fputs ("),", file);
2388 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2389 break;
2390
2391 case PRE_INC : /* Assume SImode. */
2392 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2393 break;
2394
2395 case PRE_DEC : /* Assume SImode. */
2396 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2397 break;
2398
2399 case POST_INC : /* Assume SImode. */
2400 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2401 break;
2402
2403 default :
2404 output_addr_const (file, addr);
2405 break;
2406 }
2407 }
2408
2409 static bool
2410 m32r_print_operand_punct_valid_p (unsigned char code)
2411 {
2412 return m32r_punct_chars[code];
2413 }
2414
2415 /* Return true if the operands are the constants 0 and 1. */
2416
2417 int
2418 zero_and_one (rtx operand1, rtx operand2)
2419 {
2420 return
2421 CONST_INT_P (operand1)
2422 && CONST_INT_P (operand2)
2423 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2424 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2425 }
2426
2427 /* Generate the correct assembler code to handle the conditional loading of a
2428 value into a register. It is known that the operands satisfy the
2429 conditional_move_operand() function above. The destination is operand[0].
2430 The condition is operand [1]. The 'true' value is operand [2] and the
2431 'false' value is operand [3]. */
2432
2433 char *
2434 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2435 {
2436 static char buffer [100];
2437 const char * dest = reg_names [REGNO (operands [0])];
2438
2439 buffer [0] = 0;
2440
2441 /* Destination must be a register. */
2442 gcc_assert (REG_P (operands [0]));
2443 gcc_assert (conditional_move_operand (operands [2], SImode));
2444 gcc_assert (conditional_move_operand (operands [3], SImode));
2445
2446 /* Check to see if the test is reversed. */
2447 if (GET_CODE (operands [1]) == NE)
2448 {
2449 rtx tmp = operands [2];
2450 operands [2] = operands [3];
2451 operands [3] = tmp;
2452 }
2453
2454 sprintf (buffer, "mvfc %s, cbr", dest);
2455
2456 /* If the true value was '0' then we need to invert the results of the move. */
2457 if (INTVAL (operands [2]) == 0)
2458 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2459 dest, dest);
2460
2461 return buffer;
2462 }
2463
2464 /* Returns true if the registers contained in the two
2465 rtl expressions are different. */
2466
2467 int
2468 m32r_not_same_reg (rtx a, rtx b)
2469 {
2470 int reg_a = -1;
2471 int reg_b = -2;
2472
2473 while (GET_CODE (a) == SUBREG)
2474 a = SUBREG_REG (a);
2475
2476 if (REG_P (a))
2477 reg_a = REGNO (a);
2478
2479 while (GET_CODE (b) == SUBREG)
2480 b = SUBREG_REG (b);
2481
2482 if (REG_P (b))
2483 reg_b = REGNO (b);
2484
2485 return reg_a != reg_b;
2486 }
2487
2488 \f
2489 rtx
2490 m32r_function_symbol (const char *name)
2491 {
2492 int extra_flags = 0;
2493 enum m32r_model model;
2494 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2495
2496 if (TARGET_MODEL_SMALL)
2497 model = M32R_MODEL_SMALL;
2498 else if (TARGET_MODEL_MEDIUM)
2499 model = M32R_MODEL_MEDIUM;
2500 else if (TARGET_MODEL_LARGE)
2501 model = M32R_MODEL_LARGE;
2502 else
2503 gcc_unreachable (); /* Shouldn't happen. */
2504 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2505
2506 if (extra_flags)
2507 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2508
2509 return sym;
2510 }
2511
2512 /* Use a library function to move some bytes. */
2513
2514 static void
2515 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2516 {
2517 /* We want to pass the size as Pmode, which will normally be SImode
2518 but will be DImode if we are using 64-bit longs and pointers. */
2519 if (GET_MODE (bytes_rtx) != VOIDmode
2520 && GET_MODE (bytes_rtx) != Pmode)
2521 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2522
2523 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2524 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2525 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2526 TYPE_UNSIGNED (sizetype)),
2527 TYPE_MODE (sizetype));
2528 }
2529
2530 /* Expand string/block move operations.
2531
2532 operands[0] is the pointer to the destination.
2533 operands[1] is the pointer to the source.
2534 operands[2] is the number of bytes to move.
2535 operands[3] is the alignment.
2536
2537 Returns 1 upon success, 0 otherwise. */
2538
2539 int
2540 m32r_expand_block_move (rtx operands[])
2541 {
2542 rtx orig_dst = operands[0];
2543 rtx orig_src = operands[1];
2544 rtx bytes_rtx = operands[2];
2545 rtx align_rtx = operands[3];
2546 int constp = CONST_INT_P (bytes_rtx);
2547 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2548 int align = INTVAL (align_rtx);
2549 int leftover;
2550 rtx src_reg;
2551 rtx dst_reg;
2552
2553 if (constp && bytes <= 0)
2554 return 1;
2555
2556 /* Move the address into scratch registers. */
2557 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2558 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2559
2560 if (align > UNITS_PER_WORD)
2561 align = UNITS_PER_WORD;
2562
2563 /* If we prefer size over speed, always use a function call.
2564 If we do not know the size, use a function call.
2565 If the blocks are not word aligned, use a function call. */
2566 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2567 {
2568 block_move_call (dst_reg, src_reg, bytes_rtx);
2569 return 0;
2570 }
2571
2572 leftover = bytes % MAX_MOVE_BYTES;
2573 bytes -= leftover;
2574
2575 /* If necessary, generate a loop to handle the bulk of the copy. */
2576 if (bytes)
2577 {
2578 rtx label = NULL_RTX;
2579 rtx final_src = NULL_RTX;
2580 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2581 rtx rounded_total = GEN_INT (bytes);
2582 rtx new_dst_reg = gen_reg_rtx (SImode);
2583 rtx new_src_reg = gen_reg_rtx (SImode);
2584
2585 /* If we are going to have to perform this loop more than
2586 once, then generate a label and compute the address the
2587 source register will contain upon completion of the final
2588 iteration. */
2589 if (bytes > MAX_MOVE_BYTES)
2590 {
2591 final_src = gen_reg_rtx (Pmode);
2592
2593 if (INT16_P(bytes))
2594 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2595 else
2596 {
2597 emit_insn (gen_movsi (final_src, rounded_total));
2598 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2599 }
2600
2601 label = gen_label_rtx ();
2602 emit_label (label);
2603 }
2604
2605 /* It is known that output_block_move() will update src_reg to point
2606 to the word after the end of the source block, and dst_reg to point
2607 to the last word of the destination block, provided that the block
2608 is MAX_MOVE_BYTES long. */
2609 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2610 new_dst_reg, new_src_reg));
2611 emit_move_insn (dst_reg, new_dst_reg);
2612 emit_move_insn (src_reg, new_src_reg);
2613 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2614
2615 if (bytes > MAX_MOVE_BYTES)
2616 {
2617 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2618 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2619 }
2620 }
2621
2622 if (leftover)
2623 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2624 gen_reg_rtx (SImode),
2625 gen_reg_rtx (SImode)));
2626 return 1;
2627 }
2628
2629 \f
2630 /* Emit load/stores for a small constant word aligned block_move.
2631
2632 operands[0] is the memory address of the destination.
2633 operands[1] is the memory address of the source.
2634 operands[2] is the number of bytes to move.
2635 operands[3] is a temp register.
2636 operands[4] is a temp register. */
2637
2638 void
2639 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2640 {
2641 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2642 int first_time;
2643 int got_extra = 0;
2644
2645 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2646
2647 /* We do not have a post-increment store available, so the first set of
2648 stores are done without any increment, then the remaining ones can use
2649 the pre-increment addressing mode.
2650
2651 Note: expand_block_move() also relies upon this behavior when building
2652 loops to copy large blocks. */
2653 first_time = 1;
2654
2655 while (bytes > 0)
2656 {
2657 if (bytes >= 8)
2658 {
2659 if (first_time)
2660 {
2661 output_asm_insn ("ld\t%5, %p1", operands);
2662 output_asm_insn ("ld\t%6, %p1", operands);
2663 output_asm_insn ("st\t%5, @%0", operands);
2664 output_asm_insn ("st\t%6, %s0", operands);
2665 }
2666 else
2667 {
2668 output_asm_insn ("ld\t%5, %p1", operands);
2669 output_asm_insn ("ld\t%6, %p1", operands);
2670 output_asm_insn ("st\t%5, %s0", operands);
2671 output_asm_insn ("st\t%6, %s0", operands);
2672 }
2673
2674 bytes -= 8;
2675 }
2676 else if (bytes >= 4)
2677 {
2678 if (bytes > 4)
2679 got_extra = 1;
2680
2681 output_asm_insn ("ld\t%5, %p1", operands);
2682
2683 if (got_extra)
2684 output_asm_insn ("ld\t%6, %p1", operands);
2685
2686 if (first_time)
2687 output_asm_insn ("st\t%5, @%0", operands);
2688 else
2689 output_asm_insn ("st\t%5, %s0", operands);
2690
2691 bytes -= 4;
2692 }
2693 else
2694 {
2695 /* Get the entire next word, even though we do not want all of it.
2696 The saves us from doing several smaller loads, and we assume that
2697 we cannot cause a page fault when at least part of the word is in
2698 valid memory [since we don't get called if things aren't properly
2699 aligned]. */
2700 int dst_offset = first_time ? 0 : 4;
2701 /* The amount of increment we have to make to the
2702 destination pointer. */
2703 int dst_inc_amount = dst_offset + bytes - 4;
2704 /* The same for the source pointer. */
2705 int src_inc_amount = bytes;
2706 int last_shift;
2707 rtx my_operands[3];
2708
2709 /* If got_extra is true then we have already loaded
2710 the next word as part of loading and storing the previous word. */
2711 if (! got_extra)
2712 output_asm_insn ("ld\t%6, @%1", operands);
2713
2714 if (bytes >= 2)
2715 {
2716 bytes -= 2;
2717
2718 output_asm_insn ("sra3\t%5, %6, #16", operands);
2719 my_operands[0] = operands[5];
2720 my_operands[1] = GEN_INT (dst_offset);
2721 my_operands[2] = operands[0];
2722 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2723
2724 /* If there is a byte left to store then increment the
2725 destination address and shift the contents of the source
2726 register down by 8 bits. We could not do the address
2727 increment in the store half word instruction, because it does
2728 not have an auto increment mode. */
2729 if (bytes > 0) /* assert (bytes == 1) */
2730 {
2731 dst_offset += 2;
2732 last_shift = 8;
2733 }
2734 }
2735 else
2736 last_shift = 24;
2737
2738 if (bytes > 0)
2739 {
2740 my_operands[0] = operands[6];
2741 my_operands[1] = GEN_INT (last_shift);
2742 output_asm_insn ("srai\t%0, #%1", my_operands);
2743 my_operands[0] = operands[6];
2744 my_operands[1] = GEN_INT (dst_offset);
2745 my_operands[2] = operands[0];
2746 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2747 }
2748
2749 /* Update the destination pointer if needed. We have to do
2750 this so that the patterns matches what we output in this
2751 function. */
2752 if (dst_inc_amount
2753 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2754 {
2755 my_operands[0] = operands[0];
2756 my_operands[1] = GEN_INT (dst_inc_amount);
2757 output_asm_insn ("addi\t%0, #%1", my_operands);
2758 }
2759
2760 /* Update the source pointer if needed. We have to do this
2761 so that the patterns matches what we output in this
2762 function. */
2763 if (src_inc_amount
2764 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2765 {
2766 my_operands[0] = operands[1];
2767 my_operands[1] = GEN_INT (src_inc_amount);
2768 output_asm_insn ("addi\t%0, #%1", my_operands);
2769 }
2770
2771 bytes = 0;
2772 }
2773
2774 first_time = 0;
2775 }
2776 }
2777
2778 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2779
2780 int
2781 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2782 unsigned int new_reg)
2783 {
2784 /* Interrupt routines can't clobber any register that isn't already used. */
2785 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2786 && !df_regs_ever_live_p (new_reg))
2787 return 0;
2788
2789 return 1;
2790 }
2791
2792 rtx
2793 m32r_return_addr (int count)
2794 {
2795 if (count != 0)
2796 return const0_rtx;
2797
2798 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2799 }
2800
2801 static void
2802 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2803 {
2804 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2805 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2806 0x017e8e17 : 0x178e7e01, SImode));
2807 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2808 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2809 0x0c00ae86 : 0x86ae000c, SImode));
2810 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2811 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2812 0xe627871e : 0x1e8727e6, SImode));
2813 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2814 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2815 0xc616c626 : 0x26c61fc6, SImode));
2816 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2817 chain_value);
2818 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2819 XEXP (DECL_RTL (fndecl), 0));
2820
2821 if (m32r_cache_flush_trap >= 0)
2822 emit_insn (gen_flush_icache
2823 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2824 gen_int_mode (m32r_cache_flush_trap, SImode)));
2825 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2826 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2827 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2828 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2829 GEN_INT (3), SImode);
2830 }
2831
2832 /* True if X is a reg that can be used as a base reg. */
2833
2834 static bool
2835 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2836 {
2837 if (! REG_P (x))
2838 return false;
2839
2840 if (strict)
2841 {
2842 if (GPR_P (REGNO (x)))
2843 return true;
2844 }
2845 else
2846 {
2847 if (GPR_P (REGNO (x))
2848 || REGNO (x) == ARG_POINTER_REGNUM
2849 || ! HARD_REGISTER_P (x))
2850 return true;
2851 }
2852
2853 return false;
2854 }
2855
2856 static inline bool
2857 m32r_rtx_ok_for_offset_p (const_rtx x)
2858 {
2859 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2860 }
2861
2862 static inline bool
2863 m32r_legitimate_offset_addres_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2864 const_rtx x, bool strict)
2865 {
2866 if (GET_CODE (x) == PLUS
2867 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2868 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2869 return true;
2870
2871 return false;
2872 }
2873
2874 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2875 since more than one instruction will be required. */
2876
2877 static inline bool
2878 m32r_legitimate_lo_sum_addres_p (enum machine_mode mode, const_rtx x,
2879 bool strict)
2880 {
2881 if (GET_CODE (x) == LO_SUM
2882 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2883 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2884 && CONSTANT_P (XEXP (x, 1)))
2885 return true;
2886
2887 return false;
2888 }
2889
2890 /* Is this a load and increment operation. */
2891
2892 static inline bool
2893 m32r_load_postinc_p (enum machine_mode mode, const_rtx x, bool strict)
2894 {
2895 if ((mode == SImode || mode == SFmode)
2896 && GET_CODE (x) == POST_INC
2897 && REG_P (XEXP (x, 0))
2898 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2899 return true;
2900
2901 return false;
2902 }
2903
2904 /* Is this an increment/decrement and store operation. */
2905
2906 static inline bool
2907 m32r_store_preinc_predec_p (enum machine_mode mode, const_rtx x, bool strict)
2908 {
2909 if ((mode == SImode || mode == SFmode)
2910 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2911 && REG_P (XEXP (x, 0)) \
2912 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2913 return true;
2914
2915 return false;
2916 }
2917
2918 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2919
2920 static bool
2921 m32r_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
2922 {
2923 if (m32r_rtx_ok_for_base_p (x, strict)
2924 || m32r_legitimate_offset_addres_p (mode, x, strict)
2925 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2926 || m32r_load_postinc_p (mode, x, strict)
2927 || m32r_store_preinc_predec_p (mode, x, strict))
2928 return true;
2929
2930 return false;
2931 }
2932
2933 static void
2934 m32r_conditional_register_usage (void)
2935 {
2936 if (flag_pic)
2937 {
2938 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2939 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2940 }
2941 }
2942
2943 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2944
2945 We don't allow (plus symbol large-constant) as the relocations can't
2946 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2947 We allow all CONST_DOUBLE's as the md file patterns will force the
2948 constant to memory if they can't handle them. */
2949
2950 static bool
2951 m32r_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2952 {
2953 return !(GET_CODE (x) == CONST
2954 && GET_CODE (XEXP (x, 0)) == PLUS
2955 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2956 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2957 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2958 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2959 }