]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
target.def (handle_option): Take gcc_options and cl_decoded_option pointers and locat...
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "ggc.h"
39 #include "integrate.h"
40 #include "df.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44 #include "tm-constrs.h"
45 #include "opts.h"
46
47 /* Array of valid operand punctuation characters. */
48 static char m32r_punct_chars[256];
49
50 /* Selected code model. */
51 enum m32r_model m32r_model = M32R_MODEL_DEFAULT;
52
53 /* Selected SDA support. */
54 enum m32r_sdata m32r_sdata = M32R_SDATA_DEFAULT;
55
56 /* Machine-specific symbol_ref flags. */
57 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
58 #define SYMBOL_REF_MODEL(X) \
59 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
60
61 /* For string literals, etc. */
62 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
63
64 /* Forward declaration. */
65 static bool m32r_handle_option (struct gcc_options *, struct gcc_options *,
66 const struct cl_decoded_option *, location_t);
67 static void m32r_option_override (void);
68 static void init_reg_tables (void);
69 static void block_move_call (rtx, rtx, rtx);
70 static int m32r_is_insn (rtx);
71 static bool m32r_legitimate_address_p (enum machine_mode, rtx, bool);
72 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
73 static bool m32r_mode_dependent_address_p (const_rtx);
74 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
75 static void m32r_print_operand (FILE *, rtx, int);
76 static void m32r_print_operand_address (FILE *, rtx);
77 static bool m32r_print_operand_punct_valid_p (unsigned char code);
78 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
79 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
80
81 static void m32r_file_start (void);
82
83 static int m32r_adjust_priority (rtx, int);
84 static int m32r_issue_rate (void);
85
86 static void m32r_encode_section_info (tree, rtx, int);
87 static bool m32r_in_small_data_p (const_tree);
88 static bool m32r_return_in_memory (const_tree, const_tree);
89 static rtx m32r_function_value (const_tree, const_tree, bool);
90 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
91 static bool m32r_function_value_regno_p (const unsigned int);
92 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
93 tree, int *, int);
94 static void init_idents (void);
95 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
96 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
97 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
98 const_tree, bool);
99 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
100 tree, bool);
101 static rtx m32r_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
102 const_tree, bool);
103 static void m32r_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
104 const_tree, bool);
105 static bool m32r_can_eliminate (const int, const int);
106 static void m32r_conditional_register_usage (void);
107 static void m32r_trampoline_init (rtx, tree, rtx);
108 \f
109 /* M32R specific attributes. */
110
111 static const struct attribute_spec m32r_attribute_table[] =
112 {
113 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
114 affects_type_identity } */
115 { "interrupt", 0, 0, true, false, false, NULL, false },
116 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
117 false },
118 { NULL, 0, 0, false, false, false, NULL, false }
119 };
120
121 static const struct default_options m32r_option_optimization_table[] =
122 {
123 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
124 { OPT_LEVELS_1_PLUS, OPT_fregmove, NULL, 1 },
125 { OPT_LEVELS_NONE, 0, NULL, 0 }
126 };
127 \f
128 /* Initialize the GCC target structure. */
129 #undef TARGET_ATTRIBUTE_TABLE
130 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
131
132 #undef TARGET_LEGITIMATE_ADDRESS_P
133 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
134 #undef TARGET_LEGITIMIZE_ADDRESS
135 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
136 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
137 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
138
139 #undef TARGET_ASM_ALIGNED_HI_OP
140 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
141 #undef TARGET_ASM_ALIGNED_SI_OP
142 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
143
144 #undef TARGET_PRINT_OPERAND
145 #define TARGET_PRINT_OPERAND m32r_print_operand
146 #undef TARGET_PRINT_OPERAND_ADDRESS
147 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
148 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
149 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
150
151 #undef TARGET_ASM_FUNCTION_PROLOGUE
152 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
153 #undef TARGET_ASM_FUNCTION_EPILOGUE
154 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
155
156 #undef TARGET_ASM_FILE_START
157 #define TARGET_ASM_FILE_START m32r_file_start
158
159 #undef TARGET_SCHED_ADJUST_PRIORITY
160 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
161 #undef TARGET_SCHED_ISSUE_RATE
162 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
163
164 #undef TARGET_DEFAULT_TARGET_FLAGS
165 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT
166 #undef TARGET_HANDLE_OPTION
167 #define TARGET_HANDLE_OPTION m32r_handle_option
168 #undef TARGET_OPTION_OVERRIDE
169 #define TARGET_OPTION_OVERRIDE m32r_option_override
170 #undef TARGET_OPTION_OPTIMIZATION_TABLE
171 #define TARGET_OPTION_OPTIMIZATION_TABLE m32r_option_optimization_table
172
173 #undef TARGET_ENCODE_SECTION_INFO
174 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
175 #undef TARGET_IN_SMALL_DATA_P
176 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
177
178
179 #undef TARGET_MEMORY_MOVE_COST
180 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
181 #undef TARGET_RTX_COSTS
182 #define TARGET_RTX_COSTS m32r_rtx_costs
183 #undef TARGET_ADDRESS_COST
184 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
185
186 #undef TARGET_PROMOTE_PROTOTYPES
187 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
188 #undef TARGET_RETURN_IN_MEMORY
189 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
190
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE m32r_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE m32r_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
197
198 #undef TARGET_SETUP_INCOMING_VARARGS
199 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
200 #undef TARGET_MUST_PASS_IN_STACK
201 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
202 #undef TARGET_PASS_BY_REFERENCE
203 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
204 #undef TARGET_ARG_PARTIAL_BYTES
205 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
206 #undef TARGET_FUNCTION_ARG
207 #define TARGET_FUNCTION_ARG m32r_function_arg
208 #undef TARGET_FUNCTION_ARG_ADVANCE
209 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
210
211 #undef TARGET_CAN_ELIMINATE
212 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
213
214 #undef TARGET_CONDITIONAL_REGISTER_USAGE
215 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
216
217 #undef TARGET_TRAMPOLINE_INIT
218 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
219
220 #undef TARGET_EXCEPT_UNWIND_INFO
221 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
222
223 struct gcc_target targetm = TARGET_INITIALIZER;
224 \f
225 /* Implement TARGET_HANDLE_OPTION. */
226
227 static bool
228 m32r_handle_option (struct gcc_options *opts, struct gcc_options *opts_set,
229 const struct cl_decoded_option *decoded,
230 location_t loc ATTRIBUTE_UNUSED)
231 {
232 size_t code = decoded->opt_index;
233 const char *arg = decoded->arg;
234 int value = decoded->value;
235
236 gcc_assert (opts == &global_options);
237 gcc_assert (opts_set == &global_options_set);
238
239 switch (code)
240 {
241 case OPT_m32r:
242 target_flags &= ~(MASK_M32R2 | MASK_M32RX);
243 return true;
244
245 case OPT_mmodel_:
246 if (strcmp (arg, "small") == 0)
247 m32r_model = M32R_MODEL_SMALL;
248 else if (strcmp (arg, "medium") == 0)
249 m32r_model = M32R_MODEL_MEDIUM;
250 else if (strcmp (arg, "large") == 0)
251 m32r_model = M32R_MODEL_LARGE;
252 else
253 return false;
254 return true;
255
256 case OPT_msdata_:
257 if (strcmp (arg, "none") == 0)
258 m32r_sdata = M32R_SDATA_NONE;
259 else if (strcmp (arg, "sdata") == 0)
260 m32r_sdata = M32R_SDATA_SDATA;
261 else if (strcmp (arg, "use") == 0)
262 m32r_sdata = M32R_SDATA_USE;
263 else
264 return false;
265 return true;
266
267 case OPT_mno_flush_func:
268 m32r_cache_flush_func = NULL;
269 return true;
270
271 case OPT_mflush_trap_:
272 return value <= 15;
273
274 case OPT_mno_flush_trap:
275 m32r_cache_flush_trap = -1;
276 return true;
277
278 default:
279 return true;
280 }
281 }
282
283 /* Called by m32r_option_override to initialize various things. */
284
285 void
286 m32r_init (void)
287 {
288 init_reg_tables ();
289
290 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
291 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
292 m32r_punct_chars['#'] = 1;
293 m32r_punct_chars['@'] = 1; /* ??? no longer used */
294
295 /* Provide default value if not specified. */
296 if (!global_options_set.x_g_switch_value)
297 g_switch_value = SDATA_DEFAULT_SIZE;
298 }
299
300 static void
301 m32r_option_override (void)
302 {
303 /* These need to be done at start up.
304 It's convenient to do them here. */
305 m32r_init ();
306 SUBTARGET_OVERRIDE_OPTIONS;
307 }
308
309 /* Vectors to keep interesting information about registers where it can easily
310 be got. We use to use the actual mode value as the bit number, but there
311 is (or may be) more than 32 modes now. Instead we use two tables: one
312 indexed by hard register number, and one indexed by mode. */
313
314 /* The purpose of m32r_mode_class is to shrink the range of modes so that
315 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
316 mapped into one m32r_mode_class mode. */
317
318 enum m32r_mode_class
319 {
320 C_MODE,
321 S_MODE, D_MODE, T_MODE, O_MODE,
322 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
323 };
324
325 /* Modes for condition codes. */
326 #define C_MODES (1 << (int) C_MODE)
327
328 /* Modes for single-word and smaller quantities. */
329 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
330
331 /* Modes for double-word and smaller quantities. */
332 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
333
334 /* Modes for quad-word and smaller quantities. */
335 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
336
337 /* Modes for accumulators. */
338 #define A_MODES (1 << (int) A_MODE)
339
340 /* Value is 1 if register/mode pair is acceptable on arc. */
341
342 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
343 {
344 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
345 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
346 S_MODES, C_MODES, A_MODES, A_MODES
347 };
348
349 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
350
351 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
352
353 static void
354 init_reg_tables (void)
355 {
356 int i;
357
358 for (i = 0; i < NUM_MACHINE_MODES; i++)
359 {
360 switch (GET_MODE_CLASS (i))
361 {
362 case MODE_INT:
363 case MODE_PARTIAL_INT:
364 case MODE_COMPLEX_INT:
365 if (GET_MODE_SIZE (i) <= 4)
366 m32r_mode_class[i] = 1 << (int) S_MODE;
367 else if (GET_MODE_SIZE (i) == 8)
368 m32r_mode_class[i] = 1 << (int) D_MODE;
369 else if (GET_MODE_SIZE (i) == 16)
370 m32r_mode_class[i] = 1 << (int) T_MODE;
371 else if (GET_MODE_SIZE (i) == 32)
372 m32r_mode_class[i] = 1 << (int) O_MODE;
373 else
374 m32r_mode_class[i] = 0;
375 break;
376 case MODE_FLOAT:
377 case MODE_COMPLEX_FLOAT:
378 if (GET_MODE_SIZE (i) <= 4)
379 m32r_mode_class[i] = 1 << (int) SF_MODE;
380 else if (GET_MODE_SIZE (i) == 8)
381 m32r_mode_class[i] = 1 << (int) DF_MODE;
382 else if (GET_MODE_SIZE (i) == 16)
383 m32r_mode_class[i] = 1 << (int) TF_MODE;
384 else if (GET_MODE_SIZE (i) == 32)
385 m32r_mode_class[i] = 1 << (int) OF_MODE;
386 else
387 m32r_mode_class[i] = 0;
388 break;
389 case MODE_CC:
390 m32r_mode_class[i] = 1 << (int) C_MODE;
391 break;
392 default:
393 m32r_mode_class[i] = 0;
394 break;
395 }
396 }
397
398 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
399 {
400 if (GPR_P (i))
401 m32r_regno_reg_class[i] = GENERAL_REGS;
402 else if (i == ARG_POINTER_REGNUM)
403 m32r_regno_reg_class[i] = GENERAL_REGS;
404 else
405 m32r_regno_reg_class[i] = NO_REGS;
406 }
407 }
408 \f
409 /* M32R specific attribute support.
410
411 interrupt - for interrupt functions
412
413 model - select code model used to access object
414
415 small: addresses use 24 bits, use bl to make calls
416 medium: addresses use 32 bits, use bl to make calls
417 large: addresses use 32 bits, use seth/add3/jl to make calls
418
419 Grep for MODEL in m32r.h for more info. */
420
421 static tree small_ident1;
422 static tree small_ident2;
423 static tree medium_ident1;
424 static tree medium_ident2;
425 static tree large_ident1;
426 static tree large_ident2;
427
428 static void
429 init_idents (void)
430 {
431 if (small_ident1 == 0)
432 {
433 small_ident1 = get_identifier ("small");
434 small_ident2 = get_identifier ("__small__");
435 medium_ident1 = get_identifier ("medium");
436 medium_ident2 = get_identifier ("__medium__");
437 large_ident1 = get_identifier ("large");
438 large_ident2 = get_identifier ("__large__");
439 }
440 }
441
442 /* Handle an "model" attribute; arguments as in
443 struct attribute_spec.handler. */
444 static tree
445 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
446 tree args, int flags ATTRIBUTE_UNUSED,
447 bool *no_add_attrs)
448 {
449 tree arg;
450
451 init_idents ();
452 arg = TREE_VALUE (args);
453
454 if (arg != small_ident1
455 && arg != small_ident2
456 && arg != medium_ident1
457 && arg != medium_ident2
458 && arg != large_ident1
459 && arg != large_ident2)
460 {
461 warning (OPT_Wattributes, "invalid argument of %qs attribute",
462 IDENTIFIER_POINTER (name));
463 *no_add_attrs = true;
464 }
465
466 return NULL_TREE;
467 }
468 \f
469 /* Encode section information of DECL, which is either a VAR_DECL,
470 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
471
472 For the M32R we want to record:
473
474 - whether the object lives in .sdata/.sbss.
475 - what code model should be used to access the object
476 */
477
478 static void
479 m32r_encode_section_info (tree decl, rtx rtl, int first)
480 {
481 int extra_flags = 0;
482 tree model_attr;
483 enum m32r_model model;
484
485 default_encode_section_info (decl, rtl, first);
486
487 if (!DECL_P (decl))
488 return;
489
490 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
491 if (model_attr)
492 {
493 tree id;
494
495 init_idents ();
496
497 id = TREE_VALUE (TREE_VALUE (model_attr));
498
499 if (id == small_ident1 || id == small_ident2)
500 model = M32R_MODEL_SMALL;
501 else if (id == medium_ident1 || id == medium_ident2)
502 model = M32R_MODEL_MEDIUM;
503 else if (id == large_ident1 || id == large_ident2)
504 model = M32R_MODEL_LARGE;
505 else
506 gcc_unreachable (); /* shouldn't happen */
507 }
508 else
509 {
510 if (TARGET_MODEL_SMALL)
511 model = M32R_MODEL_SMALL;
512 else if (TARGET_MODEL_MEDIUM)
513 model = M32R_MODEL_MEDIUM;
514 else if (TARGET_MODEL_LARGE)
515 model = M32R_MODEL_LARGE;
516 else
517 gcc_unreachable (); /* shouldn't happen */
518 }
519 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
520
521 if (extra_flags)
522 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
523 }
524
525 /* Only mark the object as being small data area addressable if
526 it hasn't been explicitly marked with a code model.
527
528 The user can explicitly put an object in the small data area with the
529 section attribute. If the object is in sdata/sbss and marked with a
530 code model do both [put the object in .sdata and mark it as being
531 addressed with a specific code model - don't mark it as being addressed
532 with an SDA reloc though]. This is ok and might be useful at times. If
533 the object doesn't fit the linker will give an error. */
534
535 static bool
536 m32r_in_small_data_p (const_tree decl)
537 {
538 const_tree section;
539
540 if (TREE_CODE (decl) != VAR_DECL)
541 return false;
542
543 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
544 return false;
545
546 section = DECL_SECTION_NAME (decl);
547 if (section)
548 {
549 const char *const name = TREE_STRING_POINTER (section);
550 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
551 return true;
552 }
553 else
554 {
555 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
556 {
557 int size = int_size_in_bytes (TREE_TYPE (decl));
558
559 if (size > 0 && size <= g_switch_value)
560 return true;
561 }
562 }
563
564 return false;
565 }
566
567 /* Do anything needed before RTL is emitted for each function. */
568
569 void
570 m32r_init_expanders (void)
571 {
572 /* ??? At one point there was code here. The function is left in
573 to make it easy to experiment. */
574 }
575 \f
576 int
577 call_operand (rtx op, enum machine_mode mode)
578 {
579 if (!MEM_P (op))
580 return 0;
581 op = XEXP (op, 0);
582 return call_address_operand (op, mode);
583 }
584
585 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
586
587 int
588 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
589 {
590 if (! TARGET_SDATA_USE)
591 return 0;
592
593 if (GET_CODE (op) == SYMBOL_REF)
594 return SYMBOL_REF_SMALL_P (op);
595
596 if (GET_CODE (op) == CONST
597 && GET_CODE (XEXP (op, 0)) == PLUS
598 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
599 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
600 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
601
602 return 0;
603 }
604
605 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
606
607 int
608 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
609 {
610 rtx sym;
611
612 if (flag_pic)
613 return 0;
614
615 if (GET_CODE (op) == LABEL_REF)
616 return TARGET_ADDR24;
617
618 if (GET_CODE (op) == SYMBOL_REF)
619 sym = op;
620 else if (GET_CODE (op) == CONST
621 && GET_CODE (XEXP (op, 0)) == PLUS
622 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
623 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
624 sym = XEXP (XEXP (op, 0), 0);
625 else
626 return 0;
627
628 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
629 return 1;
630
631 if (TARGET_ADDR24
632 && (CONSTANT_POOL_ADDRESS_P (sym)
633 || LIT_NAME_P (XSTR (sym, 0))))
634 return 1;
635
636 return 0;
637 }
638
639 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
640
641 int
642 addr32_operand (rtx op, enum machine_mode mode)
643 {
644 rtx sym;
645
646 if (GET_CODE (op) == LABEL_REF)
647 return TARGET_ADDR32;
648
649 if (GET_CODE (op) == SYMBOL_REF)
650 sym = op;
651 else if (GET_CODE (op) == CONST
652 && GET_CODE (XEXP (op, 0)) == PLUS
653 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
654 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
655 && ! flag_pic)
656 sym = XEXP (XEXP (op, 0), 0);
657 else
658 return 0;
659
660 return (! addr24_operand (sym, mode)
661 && ! small_data_operand (sym, mode));
662 }
663
664 /* Return 1 if OP is a function that can be called with the `bl' insn. */
665
666 int
667 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
668 {
669 if (flag_pic)
670 return 1;
671
672 if (GET_CODE (op) == SYMBOL_REF)
673 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
674
675 return TARGET_CALL26;
676 }
677
678 /* Return 1 if OP is a DImode const we want to handle inline.
679 This must match the code in the movdi pattern.
680 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
681
682 int
683 easy_di_const (rtx op)
684 {
685 rtx high_rtx, low_rtx;
686 HOST_WIDE_INT high, low;
687
688 split_double (op, &high_rtx, &low_rtx);
689 high = INTVAL (high_rtx);
690 low = INTVAL (low_rtx);
691 /* Pick constants loadable with 2 16-bit `ldi' insns. */
692 if (high >= -128 && high <= 127
693 && low >= -128 && low <= 127)
694 return 1;
695 return 0;
696 }
697
698 /* Return 1 if OP is a DFmode const we want to handle inline.
699 This must match the code in the movdf pattern.
700 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
701
702 int
703 easy_df_const (rtx op)
704 {
705 REAL_VALUE_TYPE r;
706 long l[2];
707
708 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
709 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
710 if (l[0] == 0 && l[1] == 0)
711 return 1;
712 if ((l[0] & 0xffff) == 0 && l[1] == 0)
713 return 1;
714 return 0;
715 }
716
717 /* Return 1 if OP is (mem (reg ...)).
718 This is used in insn length calcs. */
719
720 int
721 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
722 {
723 return MEM_P (op) && REG_P (XEXP (op, 0));
724 }
725
726 /* Return nonzero if TYPE must be passed by indirect reference. */
727
728 static bool
729 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
730 enum machine_mode mode, const_tree type,
731 bool named ATTRIBUTE_UNUSED)
732 {
733 int size;
734
735 if (type)
736 size = int_size_in_bytes (type);
737 else
738 size = GET_MODE_SIZE (mode);
739
740 return (size < 0 || size > 8);
741 }
742 \f
743 /* Comparisons. */
744
745 /* X and Y are two things to compare using CODE. Emit the compare insn and
746 return the rtx for compare [arg0 of the if_then_else].
747 If need_compare is true then the comparison insn must be generated, rather
748 than being subsumed into the following branch instruction. */
749
750 rtx
751 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
752 {
753 enum rtx_code compare_code;
754 enum rtx_code branch_code;
755 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
756 int must_swap = 0;
757
758 switch (code)
759 {
760 case EQ: compare_code = EQ; branch_code = NE; break;
761 case NE: compare_code = EQ; branch_code = EQ; break;
762 case LT: compare_code = LT; branch_code = NE; break;
763 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
764 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
765 case GE: compare_code = LT; branch_code = EQ; break;
766 case LTU: compare_code = LTU; branch_code = NE; break;
767 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
768 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
769 case GEU: compare_code = LTU; branch_code = EQ; break;
770
771 default:
772 gcc_unreachable ();
773 }
774
775 if (need_compare)
776 {
777 switch (compare_code)
778 {
779 case EQ:
780 if (satisfies_constraint_P (y) /* Reg equal to small const. */
781 && y != const0_rtx)
782 {
783 rtx tmp = gen_reg_rtx (SImode);
784
785 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
786 x = tmp;
787 y = const0_rtx;
788 }
789 else if (CONSTANT_P (y)) /* Reg equal to const. */
790 {
791 rtx tmp = force_reg (GET_MODE (x), y);
792 y = tmp;
793 }
794
795 if (register_operand (y, SImode) /* Reg equal to reg. */
796 || y == const0_rtx) /* Reg equal to zero. */
797 {
798 emit_insn (gen_cmp_eqsi_insn (x, y));
799
800 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
801 }
802 break;
803
804 case LT:
805 if (register_operand (y, SImode)
806 || satisfies_constraint_P (y))
807 {
808 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
809
810 switch (code)
811 {
812 case LT:
813 emit_insn (gen_cmp_ltsi_insn (x, y));
814 code = EQ;
815 break;
816 case LE:
817 if (y == const0_rtx)
818 tmp = const1_rtx;
819 else
820 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
821 emit_insn (gen_cmp_ltsi_insn (x, tmp));
822 code = EQ;
823 break;
824 case GT:
825 if (CONST_INT_P (y))
826 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
827 else
828 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
829 emit_insn (gen_cmp_ltsi_insn (x, tmp));
830 code = NE;
831 break;
832 case GE:
833 emit_insn (gen_cmp_ltsi_insn (x, y));
834 code = NE;
835 break;
836 default:
837 gcc_unreachable ();
838 }
839
840 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
841 }
842 break;
843
844 case LTU:
845 if (register_operand (y, SImode)
846 || satisfies_constraint_P (y))
847 {
848 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
849
850 switch (code)
851 {
852 case LTU:
853 emit_insn (gen_cmp_ltusi_insn (x, y));
854 code = EQ;
855 break;
856 case LEU:
857 if (y == const0_rtx)
858 tmp = const1_rtx;
859 else
860 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
861 emit_insn (gen_cmp_ltusi_insn (x, tmp));
862 code = EQ;
863 break;
864 case GTU:
865 if (CONST_INT_P (y))
866 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
867 else
868 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
869 emit_insn (gen_cmp_ltusi_insn (x, tmp));
870 code = NE;
871 break;
872 case GEU:
873 emit_insn (gen_cmp_ltusi_insn (x, y));
874 code = NE;
875 break;
876 default:
877 gcc_unreachable ();
878 }
879
880 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
881 }
882 break;
883
884 default:
885 gcc_unreachable ();
886 }
887 }
888 else
889 {
890 /* Reg/reg equal comparison. */
891 if (compare_code == EQ
892 && register_operand (y, SImode))
893 return gen_rtx_fmt_ee (code, CCmode, x, y);
894
895 /* Reg/zero signed comparison. */
896 if ((compare_code == EQ || compare_code == LT)
897 && y == const0_rtx)
898 return gen_rtx_fmt_ee (code, CCmode, x, y);
899
900 /* Reg/smallconst equal comparison. */
901 if (compare_code == EQ
902 && satisfies_constraint_P (y))
903 {
904 rtx tmp = gen_reg_rtx (SImode);
905
906 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
907 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
908 }
909
910 /* Reg/const equal comparison. */
911 if (compare_code == EQ
912 && CONSTANT_P (y))
913 {
914 rtx tmp = force_reg (GET_MODE (x), y);
915
916 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
917 }
918 }
919
920 if (CONSTANT_P (y))
921 {
922 if (must_swap)
923 y = force_reg (GET_MODE (x), y);
924 else
925 {
926 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
927
928 if (! ok_const)
929 y = force_reg (GET_MODE (x), y);
930 }
931 }
932
933 switch (compare_code)
934 {
935 case EQ :
936 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
937 break;
938 case LT :
939 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
940 break;
941 case LTU :
942 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
943 break;
944
945 default:
946 gcc_unreachable ();
947 }
948
949 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
950 }
951
952 bool
953 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
954 {
955 enum machine_mode mode = GET_MODE (op0);
956
957 gcc_assert (mode == SImode);
958 switch (code)
959 {
960 case EQ:
961 if (!register_operand (op1, mode))
962 op1 = force_reg (mode, op1);
963
964 if (TARGET_M32RX || TARGET_M32R2)
965 {
966 if (!reg_or_zero_operand (op2, mode))
967 op2 = force_reg (mode, op2);
968
969 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
970 return true;
971 }
972 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
973 {
974 emit_insn (gen_seq_zero_insn (op0, op1));
975 return true;
976 }
977
978 if (!reg_or_eq_int16_operand (op2, mode))
979 op2 = force_reg (mode, op2);
980
981 emit_insn (gen_seq_insn (op0, op1, op2));
982 return true;
983
984 case NE:
985 if (!CONST_INT_P (op2)
986 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
987 {
988 rtx reg;
989
990 if (reload_completed || reload_in_progress)
991 return false;
992
993 reg = gen_reg_rtx (SImode);
994 emit_insn (gen_xorsi3 (reg, op1, op2));
995 op1 = reg;
996
997 if (!register_operand (op1, mode))
998 op1 = force_reg (mode, op1);
999
1000 emit_insn (gen_sne_zero_insn (op0, op1));
1001 return true;
1002 }
1003 return false;
1004
1005 case LT:
1006 case GT:
1007 if (code == GT)
1008 {
1009 rtx tmp = op2;
1010 op2 = op1;
1011 op1 = tmp;
1012 code = LT;
1013 }
1014
1015 if (!register_operand (op1, mode))
1016 op1 = force_reg (mode, op1);
1017
1018 if (!reg_or_int16_operand (op2, mode))
1019 op2 = force_reg (mode, op2);
1020
1021 emit_insn (gen_slt_insn (op0, op1, op2));
1022 return true;
1023
1024 case LTU:
1025 case GTU:
1026 if (code == GTU)
1027 {
1028 rtx tmp = op2;
1029 op2 = op1;
1030 op1 = tmp;
1031 code = LTU;
1032 }
1033
1034 if (!register_operand (op1, mode))
1035 op1 = force_reg (mode, op1);
1036
1037 if (!reg_or_int16_operand (op2, mode))
1038 op2 = force_reg (mode, op2);
1039
1040 emit_insn (gen_sltu_insn (op0, op1, op2));
1041 return true;
1042
1043 case GE:
1044 case GEU:
1045 if (!register_operand (op1, mode))
1046 op1 = force_reg (mode, op1);
1047
1048 if (!reg_or_int16_operand (op2, mode))
1049 op2 = force_reg (mode, op2);
1050
1051 if (code == GE)
1052 emit_insn (gen_sge_insn (op0, op1, op2));
1053 else
1054 emit_insn (gen_sgeu_insn (op0, op1, op2));
1055 return true;
1056
1057 case LE:
1058 case LEU:
1059 if (!register_operand (op1, mode))
1060 op1 = force_reg (mode, op1);
1061
1062 if (CONST_INT_P (op2))
1063 {
1064 HOST_WIDE_INT value = INTVAL (op2);
1065 if (value >= 2147483647)
1066 {
1067 emit_move_insn (op0, const1_rtx);
1068 return true;
1069 }
1070
1071 op2 = GEN_INT (value + 1);
1072 if (value < -32768 || value >= 32767)
1073 op2 = force_reg (mode, op2);
1074
1075 if (code == LEU)
1076 emit_insn (gen_sltu_insn (op0, op1, op2));
1077 else
1078 emit_insn (gen_slt_insn (op0, op1, op2));
1079 return true;
1080 }
1081
1082 if (!register_operand (op2, mode))
1083 op2 = force_reg (mode, op2);
1084
1085 if (code == LEU)
1086 emit_insn (gen_sleu_insn (op0, op1, op2));
1087 else
1088 emit_insn (gen_sle_insn (op0, op1, op2));
1089 return true;
1090
1091 default:
1092 gcc_unreachable ();
1093 }
1094 }
1095
1096 \f
1097 /* Split a 2 word move (DI or DF) into component parts. */
1098
1099 rtx
1100 gen_split_move_double (rtx operands[])
1101 {
1102 enum machine_mode mode = GET_MODE (operands[0]);
1103 rtx dest = operands[0];
1104 rtx src = operands[1];
1105 rtx val;
1106
1107 /* We might have (SUBREG (MEM)) here, so just get rid of the
1108 subregs to make this code simpler. It is safe to call
1109 alter_subreg any time after reload. */
1110 if (GET_CODE (dest) == SUBREG)
1111 alter_subreg (&dest);
1112 if (GET_CODE (src) == SUBREG)
1113 alter_subreg (&src);
1114
1115 start_sequence ();
1116 if (REG_P (dest))
1117 {
1118 int dregno = REGNO (dest);
1119
1120 /* Reg = reg. */
1121 if (REG_P (src))
1122 {
1123 int sregno = REGNO (src);
1124
1125 int reverse = (dregno == sregno + 1);
1126
1127 /* We normally copy the low-numbered register first. However, if
1128 the first register operand 0 is the same as the second register of
1129 operand 1, we must copy in the opposite order. */
1130 emit_insn (gen_rtx_SET (VOIDmode,
1131 operand_subword (dest, reverse, TRUE, mode),
1132 operand_subword (src, reverse, TRUE, mode)));
1133
1134 emit_insn (gen_rtx_SET (VOIDmode,
1135 operand_subword (dest, !reverse, TRUE, mode),
1136 operand_subword (src, !reverse, TRUE, mode)));
1137 }
1138
1139 /* Reg = constant. */
1140 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1141 {
1142 rtx words[2];
1143 split_double (src, &words[0], &words[1]);
1144 emit_insn (gen_rtx_SET (VOIDmode,
1145 operand_subword (dest, 0, TRUE, mode),
1146 words[0]));
1147
1148 emit_insn (gen_rtx_SET (VOIDmode,
1149 operand_subword (dest, 1, TRUE, mode),
1150 words[1]));
1151 }
1152
1153 /* Reg = mem. */
1154 else if (MEM_P (src))
1155 {
1156 /* If the high-address word is used in the address, we must load it
1157 last. Otherwise, load it first. */
1158 int reverse
1159 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1160
1161 /* We used to optimize loads from single registers as
1162
1163 ld r1,r3+; ld r2,r3
1164
1165 if r3 were not used subsequently. However, the REG_NOTES aren't
1166 propagated correctly by the reload phase, and it can cause bad
1167 code to be generated. We could still try:
1168
1169 ld r1,r3+; ld r2,r3; addi r3,-4
1170
1171 which saves 2 bytes and doesn't force longword alignment. */
1172 emit_insn (gen_rtx_SET (VOIDmode,
1173 operand_subword (dest, reverse, TRUE, mode),
1174 adjust_address (src, SImode,
1175 reverse * UNITS_PER_WORD)));
1176
1177 emit_insn (gen_rtx_SET (VOIDmode,
1178 operand_subword (dest, !reverse, TRUE, mode),
1179 adjust_address (src, SImode,
1180 !reverse * UNITS_PER_WORD)));
1181 }
1182 else
1183 gcc_unreachable ();
1184 }
1185
1186 /* Mem = reg. */
1187 /* We used to optimize loads from single registers as
1188
1189 st r1,r3; st r2,+r3
1190
1191 if r3 were not used subsequently. However, the REG_NOTES aren't
1192 propagated correctly by the reload phase, and it can cause bad
1193 code to be generated. We could still try:
1194
1195 st r1,r3; st r2,+r3; addi r3,-4
1196
1197 which saves 2 bytes and doesn't force longword alignment. */
1198 else if (MEM_P (dest) && REG_P (src))
1199 {
1200 emit_insn (gen_rtx_SET (VOIDmode,
1201 adjust_address (dest, SImode, 0),
1202 operand_subword (src, 0, TRUE, mode)));
1203
1204 emit_insn (gen_rtx_SET (VOIDmode,
1205 adjust_address (dest, SImode, UNITS_PER_WORD),
1206 operand_subword (src, 1, TRUE, mode)));
1207 }
1208
1209 else
1210 gcc_unreachable ();
1211
1212 val = get_insns ();
1213 end_sequence ();
1214 return val;
1215 }
1216
1217 \f
1218 static int
1219 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1220 tree type, bool named ATTRIBUTE_UNUSED)
1221 {
1222 int words;
1223 unsigned int size =
1224 (((mode == BLKmode && type)
1225 ? (unsigned int) int_size_in_bytes (type)
1226 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1227 / UNITS_PER_WORD;
1228
1229 if (*cum >= M32R_MAX_PARM_REGS)
1230 words = 0;
1231 else if (*cum + size > M32R_MAX_PARM_REGS)
1232 words = (*cum + size) - M32R_MAX_PARM_REGS;
1233 else
1234 words = 0;
1235
1236 return words * UNITS_PER_WORD;
1237 }
1238
1239 /* The ROUND_ADVANCE* macros are local to this file. */
1240 /* Round SIZE up to a word boundary. */
1241 #define ROUND_ADVANCE(SIZE) \
1242 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1243
1244 /* Round arg MODE/TYPE up to the next word boundary. */
1245 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1246 ((MODE) == BLKmode \
1247 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1248 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1249
1250 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1251 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1252
1253 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1254 a reg. This includes arguments that have to be passed by reference as the
1255 pointer to them is passed in a reg if one is available (and that is what
1256 we're given).
1257 This macro is only used in this file. */
1258 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1259 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1260
1261 /* Determine where to put an argument to a function.
1262 Value is zero to push the argument on the stack,
1263 or a hard register in which to store the argument.
1264
1265 MODE is the argument's machine mode.
1266 TYPE is the data type of the argument (as a tree).
1267 This is null for libcalls where that information may
1268 not be available.
1269 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1270 the preceding args and about the function being called.
1271 NAMED is nonzero if this argument is a named parameter
1272 (otherwise it is an extra parameter matching an ellipsis). */
1273 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1274 and the rest are pushed. */
1275
1276 static rtx
1277 m32r_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1278 const_tree type ATTRIBUTE_UNUSED,
1279 bool named ATTRIBUTE_UNUSED)
1280 {
1281 return (PASS_IN_REG_P (*cum, mode, type)
1282 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1283 : NULL_RTX);
1284 }
1285
1286 /* Update the data in CUM to advance over an argument
1287 of mode MODE and data type TYPE.
1288 (TYPE is null for libcalls where that information may not be available.) */
1289
1290 static void
1291 m32r_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1292 const_tree type, bool named ATTRIBUTE_UNUSED)
1293 {
1294 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1295 + ROUND_ADVANCE_ARG (mode, type));
1296 }
1297
1298 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1299
1300 static bool
1301 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1302 {
1303 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
1304 }
1305
1306 /* Worker function for TARGET_FUNCTION_VALUE. */
1307
1308 static rtx
1309 m32r_function_value (const_tree valtype,
1310 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1311 bool outgoing ATTRIBUTE_UNUSED)
1312 {
1313 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1314 }
1315
1316 /* Worker function for TARGET_LIBCALL_VALUE. */
1317
1318 static rtx
1319 m32r_libcall_value (enum machine_mode mode,
1320 const_rtx fun ATTRIBUTE_UNUSED)
1321 {
1322 return gen_rtx_REG (mode, 0);
1323 }
1324
1325 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1326
1327 ??? What about r1 in DI/DF values. */
1328
1329 static bool
1330 m32r_function_value_regno_p (const unsigned int regno)
1331 {
1332 return (regno == 0);
1333 }
1334
1335 /* Do any needed setup for a variadic function. For the M32R, we must
1336 create a register parameter block, and then copy any anonymous arguments
1337 in registers to memory.
1338
1339 CUM has not been updated for the last named argument which has type TYPE
1340 and mode MODE, and we rely on this fact. */
1341
1342 static void
1343 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1344 tree type, int *pretend_size, int no_rtl)
1345 {
1346 int first_anon_arg;
1347
1348 if (no_rtl)
1349 return;
1350
1351 /* All BLKmode values are passed by reference. */
1352 gcc_assert (mode != BLKmode);
1353
1354 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1355 + ROUND_ADVANCE_ARG (mode, type));
1356
1357 if (first_anon_arg < M32R_MAX_PARM_REGS)
1358 {
1359 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1360 int first_reg_offset = first_anon_arg;
1361 /* Size in words to "pretend" allocate. */
1362 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1363 rtx regblock;
1364
1365 regblock = gen_frame_mem (BLKmode,
1366 plus_constant (arg_pointer_rtx,
1367 FIRST_PARM_OFFSET (0)));
1368 set_mem_alias_set (regblock, get_varargs_alias_set ());
1369 move_block_from_reg (first_reg_offset, regblock, size);
1370
1371 *pretend_size = (size * UNITS_PER_WORD);
1372 }
1373 }
1374
1375 \f
1376 /* Return true if INSN is real instruction bearing insn. */
1377
1378 static int
1379 m32r_is_insn (rtx insn)
1380 {
1381 return (NONDEBUG_INSN_P (insn)
1382 && GET_CODE (PATTERN (insn)) != USE
1383 && GET_CODE (PATTERN (insn)) != CLOBBER
1384 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1385 }
1386
1387 /* Increase the priority of long instructions so that the
1388 short instructions are scheduled ahead of the long ones. */
1389
1390 static int
1391 m32r_adjust_priority (rtx insn, int priority)
1392 {
1393 if (m32r_is_insn (insn)
1394 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1395 priority <<= 3;
1396
1397 return priority;
1398 }
1399
1400 \f
1401 /* Indicate how many instructions can be issued at the same time.
1402 This is sort of a lie. The m32r can issue only 1 long insn at
1403 once, but it can issue 2 short insns. The default therefore is
1404 set at 2, but this can be overridden by the command line option
1405 -missue-rate=1. */
1406
1407 static int
1408 m32r_issue_rate (void)
1409 {
1410 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1411 }
1412 \f
1413 /* Cost functions. */
1414
1415 /* Implement TARGET_HANDLE_OPTION.
1416
1417 Memory is 3 times as expensive as registers.
1418 ??? Is that the right way to look at it? */
1419
1420 static int
1421 m32r_memory_move_cost (enum machine_mode mode,
1422 reg_class_t rclass ATTRIBUTE_UNUSED,
1423 bool in ATTRIBUTE_UNUSED)
1424 {
1425 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1426 return 6;
1427 else
1428 return 12;
1429 }
1430
1431 static bool
1432 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
1433 bool speed ATTRIBUTE_UNUSED)
1434 {
1435 switch (code)
1436 {
1437 /* Small integers are as cheap as registers. 4 byte values can be
1438 fetched as immediate constants - let's give that the cost of an
1439 extra insn. */
1440 case CONST_INT:
1441 if (INT16_P (INTVAL (x)))
1442 {
1443 *total = 0;
1444 return true;
1445 }
1446 /* FALLTHRU */
1447
1448 case CONST:
1449 case LABEL_REF:
1450 case SYMBOL_REF:
1451 *total = COSTS_N_INSNS (1);
1452 return true;
1453
1454 case CONST_DOUBLE:
1455 {
1456 rtx high, low;
1457
1458 split_double (x, &high, &low);
1459 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1460 + !INT16_P (INTVAL (low)));
1461 return true;
1462 }
1463
1464 case MULT:
1465 *total = COSTS_N_INSNS (3);
1466 return true;
1467
1468 case DIV:
1469 case UDIV:
1470 case MOD:
1471 case UMOD:
1472 *total = COSTS_N_INSNS (10);
1473 return true;
1474
1475 default:
1476 return false;
1477 }
1478 }
1479 \f
1480 /* Type of function DECL.
1481
1482 The result is cached. To reset the cache at the end of a function,
1483 call with DECL = NULL_TREE. */
1484
1485 enum m32r_function_type
1486 m32r_compute_function_type (tree decl)
1487 {
1488 /* Cached value. */
1489 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1490 /* Last function we were called for. */
1491 static tree last_fn = NULL_TREE;
1492
1493 /* Resetting the cached value? */
1494 if (decl == NULL_TREE)
1495 {
1496 fn_type = M32R_FUNCTION_UNKNOWN;
1497 last_fn = NULL_TREE;
1498 return fn_type;
1499 }
1500
1501 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1502 return fn_type;
1503
1504 /* Compute function type. */
1505 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1506 ? M32R_FUNCTION_INTERRUPT
1507 : M32R_FUNCTION_NORMAL);
1508
1509 last_fn = decl;
1510 return fn_type;
1511 }
1512 \f/* Function prologue/epilogue handlers. */
1513
1514 /* M32R stack frames look like:
1515
1516 Before call After call
1517 +-----------------------+ +-----------------------+
1518 | | | |
1519 high | local variables, | | local variables, |
1520 mem | reg save area, etc. | | reg save area, etc. |
1521 | | | |
1522 +-----------------------+ +-----------------------+
1523 | | | |
1524 | arguments on stack. | | arguments on stack. |
1525 | | | |
1526 SP+0->+-----------------------+ +-----------------------+
1527 | reg parm save area, |
1528 | only created for |
1529 | variable argument |
1530 | functions |
1531 +-----------------------+
1532 | previous frame ptr |
1533 +-----------------------+
1534 | |
1535 | register save area |
1536 | |
1537 +-----------------------+
1538 | return address |
1539 +-----------------------+
1540 | |
1541 | local variables |
1542 | |
1543 +-----------------------+
1544 | |
1545 | alloca allocations |
1546 | |
1547 +-----------------------+
1548 | |
1549 low | arguments on stack |
1550 memory | |
1551 SP+0->+-----------------------+
1552
1553 Notes:
1554 1) The "reg parm save area" does not exist for non variable argument fns.
1555 2) The "reg parm save area" can be eliminated completely if we saved regs
1556 containing anonymous args separately but that complicates things too
1557 much (so it's not done).
1558 3) The return address is saved after the register save area so as to have as
1559 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1560
1561 /* Structure to be filled in by m32r_compute_frame_size with register
1562 save masks, and offsets for the current function. */
1563 struct m32r_frame_info
1564 {
1565 unsigned int total_size; /* # bytes that the entire frame takes up. */
1566 unsigned int extra_size; /* # bytes of extra stuff. */
1567 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1568 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1569 unsigned int reg_size; /* # bytes needed to store regs. */
1570 unsigned int var_size; /* # bytes that variables take up. */
1571 unsigned int gmask; /* Mask of saved gp registers. */
1572 unsigned int save_fp; /* Nonzero if fp must be saved. */
1573 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1574 int initialized; /* Nonzero if frame size already calculated. */
1575 };
1576
1577 /* Current frame information calculated by m32r_compute_frame_size. */
1578 static struct m32r_frame_info current_frame_info;
1579
1580 /* Zero structure to initialize current_frame_info. */
1581 static struct m32r_frame_info zero_frame_info;
1582
1583 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1584 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1585
1586 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1587 The return address and frame pointer are treated separately.
1588 Don't consider them here. */
1589 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1590 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1591 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1592
1593 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1594 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1595
1596 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1597 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1598
1599 /* Return the bytes needed to compute the frame pointer from the current
1600 stack pointer.
1601
1602 SIZE is the size needed for local variables. */
1603
1604 unsigned int
1605 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1606 {
1607 unsigned int regno;
1608 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1609 unsigned int reg_size;
1610 unsigned int gmask;
1611 enum m32r_function_type fn_type;
1612 int interrupt_p;
1613 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1614 | crtl->profile);
1615
1616 var_size = M32R_STACK_ALIGN (size);
1617 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1618 pretend_size = crtl->args.pretend_args_size;
1619 extra_size = FIRST_PARM_OFFSET (0);
1620 total_size = extra_size + pretend_size + args_size + var_size;
1621 reg_size = 0;
1622 gmask = 0;
1623
1624 /* See if this is an interrupt handler. Call used registers must be saved
1625 for them too. */
1626 fn_type = m32r_compute_function_type (current_function_decl);
1627 interrupt_p = M32R_INTERRUPT_P (fn_type);
1628
1629 /* Calculate space needed for registers. */
1630 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1631 {
1632 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1633 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1634 {
1635 reg_size += UNITS_PER_WORD;
1636 gmask |= 1 << regno;
1637 }
1638 }
1639
1640 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1641 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1642
1643 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1644 * UNITS_PER_WORD);
1645 total_size += reg_size;
1646
1647 /* ??? Not sure this is necessary, and I don't think the epilogue
1648 handler will do the right thing if this changes total_size. */
1649 total_size = M32R_STACK_ALIGN (total_size);
1650
1651 /* frame_size = total_size - (pretend_size + reg_size); */
1652
1653 /* Save computed information. */
1654 current_frame_info.total_size = total_size;
1655 current_frame_info.extra_size = extra_size;
1656 current_frame_info.pretend_size = pretend_size;
1657 current_frame_info.var_size = var_size;
1658 current_frame_info.args_size = args_size;
1659 current_frame_info.reg_size = reg_size;
1660 current_frame_info.gmask = gmask;
1661 current_frame_info.initialized = reload_completed;
1662
1663 /* Ok, we're done. */
1664 return total_size;
1665 }
1666
1667 /* Worker function for TARGET_CAN_ELIMINATE. */
1668
1669 bool
1670 m32r_can_eliminate (const int from, const int to)
1671 {
1672 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1673 ? ! frame_pointer_needed
1674 : true);
1675 }
1676
1677 \f
1678 /* The table we use to reference PIC data. */
1679 static rtx global_offset_table;
1680
1681 static void
1682 m32r_reload_lr (rtx sp, int size)
1683 {
1684 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1685
1686 if (size == 0)
1687 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1688 else if (size < 32768)
1689 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1690 gen_rtx_PLUS (Pmode, sp,
1691 GEN_INT (size)))));
1692 else
1693 {
1694 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1695
1696 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1697 emit_insn (gen_addsi3 (tmp, tmp, sp));
1698 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1699 }
1700
1701 emit_use (lr);
1702 }
1703
1704 void
1705 m32r_load_pic_register (void)
1706 {
1707 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1708 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1709 GEN_INT (TARGET_MODEL_SMALL)));
1710
1711 /* Need to emit this whether or not we obey regdecls,
1712 since setjmp/longjmp can cause life info to screw up. */
1713 emit_use (pic_offset_table_rtx);
1714 }
1715
1716 /* Expand the m32r prologue as a series of insns. */
1717
1718 void
1719 m32r_expand_prologue (void)
1720 {
1721 int regno;
1722 int frame_size;
1723 unsigned int gmask;
1724 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1725 | crtl->profile);
1726
1727 if (! current_frame_info.initialized)
1728 m32r_compute_frame_size (get_frame_size ());
1729
1730 gmask = current_frame_info.gmask;
1731
1732 /* These cases shouldn't happen. Catch them now. */
1733 gcc_assert (current_frame_info.total_size || !gmask);
1734
1735 /* Allocate space for register arguments if this is a variadic function. */
1736 if (current_frame_info.pretend_size != 0)
1737 {
1738 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1739 the wrong result on a 64-bit host. */
1740 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1741 emit_insn (gen_addsi3 (stack_pointer_rtx,
1742 stack_pointer_rtx,
1743 GEN_INT (-pretend_size)));
1744 }
1745
1746 /* Save any registers we need to and set up fp. */
1747 if (current_frame_info.save_fp)
1748 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1749
1750 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1751
1752 /* Save any needed call-saved regs (and call-used if this is an
1753 interrupt handler). */
1754 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1755 {
1756 if ((gmask & (1 << regno)) != 0)
1757 emit_insn (gen_movsi_push (stack_pointer_rtx,
1758 gen_rtx_REG (Pmode, regno)));
1759 }
1760
1761 if (current_frame_info.save_lr)
1762 emit_insn (gen_movsi_push (stack_pointer_rtx,
1763 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1764
1765 /* Allocate the stack frame. */
1766 frame_size = (current_frame_info.total_size
1767 - (current_frame_info.pretend_size
1768 + current_frame_info.reg_size));
1769
1770 if (frame_size == 0)
1771 ; /* Nothing to do. */
1772 else if (frame_size <= 32768)
1773 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1774 GEN_INT (-frame_size)));
1775 else
1776 {
1777 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1778
1779 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1780 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1781 }
1782
1783 if (frame_pointer_needed)
1784 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1785
1786 if (crtl->profile)
1787 /* Push lr for mcount (form_pc, x). */
1788 emit_insn (gen_movsi_push (stack_pointer_rtx,
1789 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1790
1791 if (pic_reg_used)
1792 {
1793 m32r_load_pic_register ();
1794 m32r_reload_lr (stack_pointer_rtx,
1795 (crtl->profile ? 0 : frame_size));
1796 }
1797
1798 if (crtl->profile && !pic_reg_used)
1799 emit_insn (gen_blockage ());
1800 }
1801
1802 \f
1803 /* Set up the stack and frame pointer (if desired) for the function.
1804 Note, if this is changed, you need to mirror the changes in
1805 m32r_compute_frame_size which calculates the prolog size. */
1806
1807 static void
1808 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1809 {
1810 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1811
1812 /* If this is an interrupt handler, mark it as such. */
1813 if (M32R_INTERRUPT_P (fn_type))
1814 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1815
1816 if (! current_frame_info.initialized)
1817 m32r_compute_frame_size (size);
1818
1819 /* This is only for the human reader. */
1820 fprintf (file,
1821 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1822 ASM_COMMENT_START,
1823 current_frame_info.var_size,
1824 current_frame_info.reg_size / 4,
1825 current_frame_info.args_size,
1826 current_frame_info.extra_size);
1827 }
1828 \f
1829 /* Output RTL to pop register REGNO from the stack. */
1830
1831 static void
1832 pop (int regno)
1833 {
1834 rtx x;
1835
1836 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1837 stack_pointer_rtx));
1838 add_reg_note (x, REG_INC, stack_pointer_rtx);
1839 }
1840
1841 /* Expand the m32r epilogue as a series of insns. */
1842
1843 void
1844 m32r_expand_epilogue (void)
1845 {
1846 int regno;
1847 int noepilogue = FALSE;
1848 int total_size;
1849
1850 gcc_assert (current_frame_info.initialized);
1851 total_size = current_frame_info.total_size;
1852
1853 if (total_size == 0)
1854 {
1855 rtx insn = get_last_insn ();
1856
1857 /* If the last insn was a BARRIER, we don't have to write any code
1858 because a jump (aka return) was put there. */
1859 if (insn && NOTE_P (insn))
1860 insn = prev_nonnote_insn (insn);
1861 if (insn && BARRIER_P (insn))
1862 noepilogue = TRUE;
1863 }
1864
1865 if (!noepilogue)
1866 {
1867 unsigned int var_size = current_frame_info.var_size;
1868 unsigned int args_size = current_frame_info.args_size;
1869 unsigned int gmask = current_frame_info.gmask;
1870 int can_trust_sp_p = !cfun->calls_alloca;
1871
1872 if (flag_exceptions)
1873 emit_insn (gen_blockage ());
1874
1875 /* The first thing to do is point the sp at the bottom of the register
1876 save area. */
1877 if (can_trust_sp_p)
1878 {
1879 unsigned int reg_offset = var_size + args_size;
1880
1881 if (reg_offset == 0)
1882 ; /* Nothing to do. */
1883 else if (reg_offset < 32768)
1884 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1885 GEN_INT (reg_offset)));
1886 else
1887 {
1888 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1889
1890 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1891 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1892 tmp));
1893 }
1894 }
1895 else if (frame_pointer_needed)
1896 {
1897 unsigned int reg_offset = var_size + args_size;
1898
1899 if (reg_offset == 0)
1900 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1901 else if (reg_offset < 32768)
1902 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1903 GEN_INT (reg_offset)));
1904 else
1905 {
1906 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1907
1908 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1909 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1910 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1911 tmp));
1912 }
1913 }
1914 else
1915 gcc_unreachable ();
1916
1917 if (current_frame_info.save_lr)
1918 pop (RETURN_ADDR_REGNUM);
1919
1920 /* Restore any saved registers, in reverse order of course. */
1921 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1922 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1923 {
1924 if ((gmask & (1L << regno)) != 0)
1925 pop (regno);
1926 }
1927
1928 if (current_frame_info.save_fp)
1929 pop (FRAME_POINTER_REGNUM);
1930
1931 /* Remove varargs area if present. */
1932 if (current_frame_info.pretend_size != 0)
1933 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1934 GEN_INT (current_frame_info.pretend_size)));
1935
1936 emit_insn (gen_blockage ());
1937 }
1938 }
1939
1940 /* Do any necessary cleanup after a function to restore stack, frame,
1941 and regs. */
1942
1943 static void
1944 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1945 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1946 {
1947 /* Reset state info for each function. */
1948 current_frame_info = zero_frame_info;
1949 m32r_compute_function_type (NULL_TREE);
1950 }
1951 \f
1952 /* Return nonzero if this function is known to have a null or 1 instruction
1953 epilogue. */
1954
1955 int
1956 direct_return (void)
1957 {
1958 if (!reload_completed)
1959 return FALSE;
1960
1961 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1962 return FALSE;
1963
1964 if (! current_frame_info.initialized)
1965 m32r_compute_frame_size (get_frame_size ());
1966
1967 return current_frame_info.total_size == 0;
1968 }
1969
1970 \f
1971 /* PIC. */
1972
1973 int
1974 m32r_legitimate_pic_operand_p (rtx x)
1975 {
1976 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1977 return 0;
1978
1979 if (GET_CODE (x) == CONST
1980 && GET_CODE (XEXP (x, 0)) == PLUS
1981 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1982 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1983 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1984 return 0;
1985
1986 return 1;
1987 }
1988
1989 rtx
1990 m32r_legitimize_pic_address (rtx orig, rtx reg)
1991 {
1992 #ifdef DEBUG_PIC
1993 printf("m32r_legitimize_pic_address()\n");
1994 #endif
1995
1996 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1997 {
1998 rtx pic_ref, address;
1999 int subregs = 0;
2000
2001 if (reg == 0)
2002 {
2003 gcc_assert (!reload_in_progress && !reload_completed);
2004 reg = gen_reg_rtx (Pmode);
2005
2006 subregs = 1;
2007 }
2008
2009 if (subregs)
2010 address = gen_reg_rtx (Pmode);
2011 else
2012 address = reg;
2013
2014 crtl->uses_pic_offset_table = 1;
2015
2016 if (GET_CODE (orig) == LABEL_REF
2017 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
2018 {
2019 emit_insn (gen_gotoff_load_addr (reg, orig));
2020 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
2021 return reg;
2022 }
2023
2024 emit_insn (gen_pic_load_addr (address, orig));
2025
2026 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
2027 pic_ref = gen_const_mem (Pmode, address);
2028 emit_move_insn (reg, pic_ref);
2029 return reg;
2030 }
2031 else if (GET_CODE (orig) == CONST)
2032 {
2033 rtx base, offset;
2034
2035 if (GET_CODE (XEXP (orig, 0)) == PLUS
2036 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
2037 return orig;
2038
2039 if (reg == 0)
2040 {
2041 gcc_assert (!reload_in_progress && !reload_completed);
2042 reg = gen_reg_rtx (Pmode);
2043 }
2044
2045 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2046 {
2047 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2048 if (base == reg)
2049 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2050 else
2051 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2052 }
2053 else
2054 return orig;
2055
2056 if (CONST_INT_P (offset))
2057 {
2058 if (INT16_P (INTVAL (offset)))
2059 return plus_constant (base, INTVAL (offset));
2060 else
2061 {
2062 gcc_assert (! reload_in_progress && ! reload_completed);
2063 offset = force_reg (Pmode, offset);
2064 }
2065 }
2066
2067 return gen_rtx_PLUS (Pmode, base, offset);
2068 }
2069
2070 return orig;
2071 }
2072
2073 static rtx
2074 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2075 enum machine_mode mode ATTRIBUTE_UNUSED)
2076 {
2077 if (flag_pic)
2078 return m32r_legitimize_pic_address (x, NULL_RTX);
2079 else
2080 return x;
2081 }
2082
2083 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2084
2085 static bool
2086 m32r_mode_dependent_address_p (const_rtx addr)
2087 {
2088 if (GET_CODE (addr) == LO_SUM)
2089 return true;
2090
2091 return false;
2092 }
2093 \f
2094 /* Nested function support. */
2095
2096 /* Emit RTL insns to initialize the variable parts of a trampoline.
2097 FNADDR is an RTX for the address of the function's pure code.
2098 CXT is an RTX for the static chain value for the function. */
2099
2100 void
2101 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2102 rtx fnaddr ATTRIBUTE_UNUSED,
2103 rtx cxt ATTRIBUTE_UNUSED)
2104 {
2105 }
2106 \f
2107 static void
2108 m32r_file_start (void)
2109 {
2110 default_file_start ();
2111
2112 if (flag_verbose_asm)
2113 fprintf (asm_out_file,
2114 "%s M32R/D special options: -G %d\n",
2115 ASM_COMMENT_START, g_switch_value);
2116
2117 if (TARGET_LITTLE_ENDIAN)
2118 fprintf (asm_out_file, "\t.little\n");
2119 }
2120 \f
2121 /* Print operand X (an rtx) in assembler syntax to file FILE.
2122 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2123 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2124
2125 static void
2126 m32r_print_operand (FILE * file, rtx x, int code)
2127 {
2128 rtx addr;
2129
2130 switch (code)
2131 {
2132 /* The 's' and 'p' codes are used by output_block_move() to
2133 indicate post-increment 's'tores and 'p're-increment loads. */
2134 case 's':
2135 if (REG_P (x))
2136 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2137 else
2138 output_operand_lossage ("invalid operand to %%s code");
2139 return;
2140
2141 case 'p':
2142 if (REG_P (x))
2143 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2144 else
2145 output_operand_lossage ("invalid operand to %%p code");
2146 return;
2147
2148 case 'R' :
2149 /* Write second word of DImode or DFmode reference,
2150 register or memory. */
2151 if (REG_P (x))
2152 fputs (reg_names[REGNO (x)+1], file);
2153 else if (MEM_P (x))
2154 {
2155 fprintf (file, "@(");
2156 /* Handle possible auto-increment. Since it is pre-increment and
2157 we have already done it, we can just use an offset of four. */
2158 /* ??? This is taken from rs6000.c I think. I don't think it is
2159 currently necessary, but keep it around. */
2160 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2161 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2162 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2163 else
2164 output_address (plus_constant (XEXP (x, 0), 4));
2165 fputc (')', file);
2166 }
2167 else
2168 output_operand_lossage ("invalid operand to %%R code");
2169 return;
2170
2171 case 'H' : /* High word. */
2172 case 'L' : /* Low word. */
2173 if (REG_P (x))
2174 {
2175 /* L = least significant word, H = most significant word. */
2176 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2177 fputs (reg_names[REGNO (x)], file);
2178 else
2179 fputs (reg_names[REGNO (x)+1], file);
2180 }
2181 else if (CONST_INT_P (x)
2182 || GET_CODE (x) == CONST_DOUBLE)
2183 {
2184 rtx first, second;
2185
2186 split_double (x, &first, &second);
2187 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2188 code == 'L' ? INTVAL (first) : INTVAL (second));
2189 }
2190 else
2191 output_operand_lossage ("invalid operand to %%H/%%L code");
2192 return;
2193
2194 case 'A' :
2195 {
2196 char str[30];
2197
2198 if (GET_CODE (x) != CONST_DOUBLE
2199 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2200 fatal_insn ("bad insn for 'A'", x);
2201
2202 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2203 fprintf (file, "%s", str);
2204 return;
2205 }
2206
2207 case 'B' : /* Bottom half. */
2208 case 'T' : /* Top half. */
2209 /* Output the argument to a `seth' insn (sets the Top half-word).
2210 For constants output arguments to a seth/or3 pair to set Top and
2211 Bottom halves. For symbols output arguments to a seth/add3 pair to
2212 set Top and Bottom halves. The difference exists because for
2213 constants seth/or3 is more readable but for symbols we need to use
2214 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2215 switch (GET_CODE (x))
2216 {
2217 case CONST_INT :
2218 case CONST_DOUBLE :
2219 {
2220 rtx first, second;
2221
2222 split_double (x, &first, &second);
2223 x = WORDS_BIG_ENDIAN ? second : first;
2224 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2225 (code == 'B'
2226 ? INTVAL (x) & 0xffff
2227 : (INTVAL (x) >> 16) & 0xffff));
2228 }
2229 return;
2230 case CONST :
2231 case SYMBOL_REF :
2232 if (code == 'B'
2233 && small_data_operand (x, VOIDmode))
2234 {
2235 fputs ("sda(", file);
2236 output_addr_const (file, x);
2237 fputc (')', file);
2238 return;
2239 }
2240 /* fall through */
2241 case LABEL_REF :
2242 fputs (code == 'T' ? "shigh(" : "low(", file);
2243 output_addr_const (file, x);
2244 fputc (')', file);
2245 return;
2246 default :
2247 output_operand_lossage ("invalid operand to %%T/%%B code");
2248 return;
2249 }
2250 break;
2251
2252 case 'U' :
2253 /* ??? wip */
2254 /* Output a load/store with update indicator if appropriate. */
2255 if (MEM_P (x))
2256 {
2257 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2258 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2259 fputs (".a", file);
2260 }
2261 else
2262 output_operand_lossage ("invalid operand to %%U code");
2263 return;
2264
2265 case 'N' :
2266 /* Print a constant value negated. */
2267 if (CONST_INT_P (x))
2268 output_addr_const (file, GEN_INT (- INTVAL (x)));
2269 else
2270 output_operand_lossage ("invalid operand to %%N code");
2271 return;
2272
2273 case 'X' :
2274 /* Print a const_int in hex. Used in comments. */
2275 if (CONST_INT_P (x))
2276 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2277 return;
2278
2279 case '#' :
2280 fputs (IMMEDIATE_PREFIX, file);
2281 return;
2282
2283 case 0 :
2284 /* Do nothing special. */
2285 break;
2286
2287 default :
2288 /* Unknown flag. */
2289 output_operand_lossage ("invalid operand output code");
2290 }
2291
2292 switch (GET_CODE (x))
2293 {
2294 case REG :
2295 fputs (reg_names[REGNO (x)], file);
2296 break;
2297
2298 case MEM :
2299 addr = XEXP (x, 0);
2300 if (GET_CODE (addr) == PRE_INC)
2301 {
2302 if (!REG_P (XEXP (addr, 0)))
2303 fatal_insn ("pre-increment address is not a register", x);
2304
2305 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2306 }
2307 else if (GET_CODE (addr) == PRE_DEC)
2308 {
2309 if (!REG_P (XEXP (addr, 0)))
2310 fatal_insn ("pre-decrement address is not a register", x);
2311
2312 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2313 }
2314 else if (GET_CODE (addr) == POST_INC)
2315 {
2316 if (!REG_P (XEXP (addr, 0)))
2317 fatal_insn ("post-increment address is not a register", x);
2318
2319 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2320 }
2321 else
2322 {
2323 fputs ("@(", file);
2324 output_address (XEXP (x, 0));
2325 fputc (')', file);
2326 }
2327 break;
2328
2329 case CONST_DOUBLE :
2330 /* We handle SFmode constants here as output_addr_const doesn't. */
2331 if (GET_MODE (x) == SFmode)
2332 {
2333 REAL_VALUE_TYPE d;
2334 long l;
2335
2336 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2337 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2338 fprintf (file, "0x%08lx", l);
2339 break;
2340 }
2341
2342 /* Fall through. Let output_addr_const deal with it. */
2343
2344 default :
2345 output_addr_const (file, x);
2346 break;
2347 }
2348 }
2349
2350 /* Print a memory address as an operand to reference that memory location. */
2351
2352 static void
2353 m32r_print_operand_address (FILE * file, rtx addr)
2354 {
2355 rtx base;
2356 rtx index = 0;
2357 int offset = 0;
2358
2359 switch (GET_CODE (addr))
2360 {
2361 case REG :
2362 fputs (reg_names[REGNO (addr)], file);
2363 break;
2364
2365 case PLUS :
2366 if (CONST_INT_P (XEXP (addr, 0)))
2367 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2368 else if (CONST_INT_P (XEXP (addr, 1)))
2369 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2370 else
2371 base = XEXP (addr, 0), index = XEXP (addr, 1);
2372 if (REG_P (base))
2373 {
2374 /* Print the offset first (if present) to conform to the manual. */
2375 if (index == 0)
2376 {
2377 if (offset != 0)
2378 fprintf (file, "%d,", offset);
2379 fputs (reg_names[REGNO (base)], file);
2380 }
2381 /* The chip doesn't support this, but left in for generality. */
2382 else if (REG_P (index))
2383 fprintf (file, "%s,%s",
2384 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2385 /* Not sure this can happen, but leave in for now. */
2386 else if (GET_CODE (index) == SYMBOL_REF)
2387 {
2388 output_addr_const (file, index);
2389 fputc (',', file);
2390 fputs (reg_names[REGNO (base)], file);
2391 }
2392 else
2393 fatal_insn ("bad address", addr);
2394 }
2395 else if (GET_CODE (base) == LO_SUM)
2396 {
2397 gcc_assert (!index && REG_P (XEXP (base, 0)));
2398 if (small_data_operand (XEXP (base, 1), VOIDmode))
2399 fputs ("sda(", file);
2400 else
2401 fputs ("low(", file);
2402 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2403 fputs ("),", file);
2404 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2405 }
2406 else
2407 fatal_insn ("bad address", addr);
2408 break;
2409
2410 case LO_SUM :
2411 if (!REG_P (XEXP (addr, 0)))
2412 fatal_insn ("lo_sum not of register", addr);
2413 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2414 fputs ("sda(", file);
2415 else
2416 fputs ("low(", file);
2417 output_addr_const (file, XEXP (addr, 1));
2418 fputs ("),", file);
2419 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2420 break;
2421
2422 case PRE_INC : /* Assume SImode. */
2423 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2424 break;
2425
2426 case PRE_DEC : /* Assume SImode. */
2427 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2428 break;
2429
2430 case POST_INC : /* Assume SImode. */
2431 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2432 break;
2433
2434 default :
2435 output_addr_const (file, addr);
2436 break;
2437 }
2438 }
2439
2440 static bool
2441 m32r_print_operand_punct_valid_p (unsigned char code)
2442 {
2443 return m32r_punct_chars[code];
2444 }
2445
2446 /* Return true if the operands are the constants 0 and 1. */
2447
2448 int
2449 zero_and_one (rtx operand1, rtx operand2)
2450 {
2451 return
2452 CONST_INT_P (operand1)
2453 && CONST_INT_P (operand2)
2454 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2455 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2456 }
2457
2458 /* Generate the correct assembler code to handle the conditional loading of a
2459 value into a register. It is known that the operands satisfy the
2460 conditional_move_operand() function above. The destination is operand[0].
2461 The condition is operand [1]. The 'true' value is operand [2] and the
2462 'false' value is operand [3]. */
2463
2464 char *
2465 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2466 {
2467 static char buffer [100];
2468 const char * dest = reg_names [REGNO (operands [0])];
2469
2470 buffer [0] = 0;
2471
2472 /* Destination must be a register. */
2473 gcc_assert (REG_P (operands [0]));
2474 gcc_assert (conditional_move_operand (operands [2], SImode));
2475 gcc_assert (conditional_move_operand (operands [3], SImode));
2476
2477 /* Check to see if the test is reversed. */
2478 if (GET_CODE (operands [1]) == NE)
2479 {
2480 rtx tmp = operands [2];
2481 operands [2] = operands [3];
2482 operands [3] = tmp;
2483 }
2484
2485 sprintf (buffer, "mvfc %s, cbr", dest);
2486
2487 /* If the true value was '0' then we need to invert the results of the move. */
2488 if (INTVAL (operands [2]) == 0)
2489 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2490 dest, dest);
2491
2492 return buffer;
2493 }
2494
2495 /* Returns true if the registers contained in the two
2496 rtl expressions are different. */
2497
2498 int
2499 m32r_not_same_reg (rtx a, rtx b)
2500 {
2501 int reg_a = -1;
2502 int reg_b = -2;
2503
2504 while (GET_CODE (a) == SUBREG)
2505 a = SUBREG_REG (a);
2506
2507 if (REG_P (a))
2508 reg_a = REGNO (a);
2509
2510 while (GET_CODE (b) == SUBREG)
2511 b = SUBREG_REG (b);
2512
2513 if (REG_P (b))
2514 reg_b = REGNO (b);
2515
2516 return reg_a != reg_b;
2517 }
2518
2519 \f
2520 rtx
2521 m32r_function_symbol (const char *name)
2522 {
2523 int extra_flags = 0;
2524 enum m32r_model model;
2525 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2526
2527 if (TARGET_MODEL_SMALL)
2528 model = M32R_MODEL_SMALL;
2529 else if (TARGET_MODEL_MEDIUM)
2530 model = M32R_MODEL_MEDIUM;
2531 else if (TARGET_MODEL_LARGE)
2532 model = M32R_MODEL_LARGE;
2533 else
2534 gcc_unreachable (); /* Shouldn't happen. */
2535 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2536
2537 if (extra_flags)
2538 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2539
2540 return sym;
2541 }
2542
2543 /* Use a library function to move some bytes. */
2544
2545 static void
2546 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2547 {
2548 /* We want to pass the size as Pmode, which will normally be SImode
2549 but will be DImode if we are using 64-bit longs and pointers. */
2550 if (GET_MODE (bytes_rtx) != VOIDmode
2551 && GET_MODE (bytes_rtx) != Pmode)
2552 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2553
2554 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2555 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2556 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2557 TYPE_UNSIGNED (sizetype)),
2558 TYPE_MODE (sizetype));
2559 }
2560
2561 /* Expand string/block move operations.
2562
2563 operands[0] is the pointer to the destination.
2564 operands[1] is the pointer to the source.
2565 operands[2] is the number of bytes to move.
2566 operands[3] is the alignment.
2567
2568 Returns 1 upon success, 0 otherwise. */
2569
2570 int
2571 m32r_expand_block_move (rtx operands[])
2572 {
2573 rtx orig_dst = operands[0];
2574 rtx orig_src = operands[1];
2575 rtx bytes_rtx = operands[2];
2576 rtx align_rtx = operands[3];
2577 int constp = CONST_INT_P (bytes_rtx);
2578 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2579 int align = INTVAL (align_rtx);
2580 int leftover;
2581 rtx src_reg;
2582 rtx dst_reg;
2583
2584 if (constp && bytes <= 0)
2585 return 1;
2586
2587 /* Move the address into scratch registers. */
2588 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2589 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2590
2591 if (align > UNITS_PER_WORD)
2592 align = UNITS_PER_WORD;
2593
2594 /* If we prefer size over speed, always use a function call.
2595 If we do not know the size, use a function call.
2596 If the blocks are not word aligned, use a function call. */
2597 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2598 {
2599 block_move_call (dst_reg, src_reg, bytes_rtx);
2600 return 0;
2601 }
2602
2603 leftover = bytes % MAX_MOVE_BYTES;
2604 bytes -= leftover;
2605
2606 /* If necessary, generate a loop to handle the bulk of the copy. */
2607 if (bytes)
2608 {
2609 rtx label = NULL_RTX;
2610 rtx final_src = NULL_RTX;
2611 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2612 rtx rounded_total = GEN_INT (bytes);
2613 rtx new_dst_reg = gen_reg_rtx (SImode);
2614 rtx new_src_reg = gen_reg_rtx (SImode);
2615
2616 /* If we are going to have to perform this loop more than
2617 once, then generate a label and compute the address the
2618 source register will contain upon completion of the final
2619 iteration. */
2620 if (bytes > MAX_MOVE_BYTES)
2621 {
2622 final_src = gen_reg_rtx (Pmode);
2623
2624 if (INT16_P(bytes))
2625 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2626 else
2627 {
2628 emit_insn (gen_movsi (final_src, rounded_total));
2629 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2630 }
2631
2632 label = gen_label_rtx ();
2633 emit_label (label);
2634 }
2635
2636 /* It is known that output_block_move() will update src_reg to point
2637 to the word after the end of the source block, and dst_reg to point
2638 to the last word of the destination block, provided that the block
2639 is MAX_MOVE_BYTES long. */
2640 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2641 new_dst_reg, new_src_reg));
2642 emit_move_insn (dst_reg, new_dst_reg);
2643 emit_move_insn (src_reg, new_src_reg);
2644 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2645
2646 if (bytes > MAX_MOVE_BYTES)
2647 {
2648 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2649 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2650 }
2651 }
2652
2653 if (leftover)
2654 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2655 gen_reg_rtx (SImode),
2656 gen_reg_rtx (SImode)));
2657 return 1;
2658 }
2659
2660 \f
2661 /* Emit load/stores for a small constant word aligned block_move.
2662
2663 operands[0] is the memory address of the destination.
2664 operands[1] is the memory address of the source.
2665 operands[2] is the number of bytes to move.
2666 operands[3] is a temp register.
2667 operands[4] is a temp register. */
2668
2669 void
2670 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2671 {
2672 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2673 int first_time;
2674 int got_extra = 0;
2675
2676 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2677
2678 /* We do not have a post-increment store available, so the first set of
2679 stores are done without any increment, then the remaining ones can use
2680 the pre-increment addressing mode.
2681
2682 Note: expand_block_move() also relies upon this behavior when building
2683 loops to copy large blocks. */
2684 first_time = 1;
2685
2686 while (bytes > 0)
2687 {
2688 if (bytes >= 8)
2689 {
2690 if (first_time)
2691 {
2692 output_asm_insn ("ld\t%5, %p1", operands);
2693 output_asm_insn ("ld\t%6, %p1", operands);
2694 output_asm_insn ("st\t%5, @%0", operands);
2695 output_asm_insn ("st\t%6, %s0", operands);
2696 }
2697 else
2698 {
2699 output_asm_insn ("ld\t%5, %p1", operands);
2700 output_asm_insn ("ld\t%6, %p1", operands);
2701 output_asm_insn ("st\t%5, %s0", operands);
2702 output_asm_insn ("st\t%6, %s0", operands);
2703 }
2704
2705 bytes -= 8;
2706 }
2707 else if (bytes >= 4)
2708 {
2709 if (bytes > 4)
2710 got_extra = 1;
2711
2712 output_asm_insn ("ld\t%5, %p1", operands);
2713
2714 if (got_extra)
2715 output_asm_insn ("ld\t%6, %p1", operands);
2716
2717 if (first_time)
2718 output_asm_insn ("st\t%5, @%0", operands);
2719 else
2720 output_asm_insn ("st\t%5, %s0", operands);
2721
2722 bytes -= 4;
2723 }
2724 else
2725 {
2726 /* Get the entire next word, even though we do not want all of it.
2727 The saves us from doing several smaller loads, and we assume that
2728 we cannot cause a page fault when at least part of the word is in
2729 valid memory [since we don't get called if things aren't properly
2730 aligned]. */
2731 int dst_offset = first_time ? 0 : 4;
2732 /* The amount of increment we have to make to the
2733 destination pointer. */
2734 int dst_inc_amount = dst_offset + bytes - 4;
2735 /* The same for the source pointer. */
2736 int src_inc_amount = bytes;
2737 int last_shift;
2738 rtx my_operands[3];
2739
2740 /* If got_extra is true then we have already loaded
2741 the next word as part of loading and storing the previous word. */
2742 if (! got_extra)
2743 output_asm_insn ("ld\t%6, @%1", operands);
2744
2745 if (bytes >= 2)
2746 {
2747 bytes -= 2;
2748
2749 output_asm_insn ("sra3\t%5, %6, #16", operands);
2750 my_operands[0] = operands[5];
2751 my_operands[1] = GEN_INT (dst_offset);
2752 my_operands[2] = operands[0];
2753 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2754
2755 /* If there is a byte left to store then increment the
2756 destination address and shift the contents of the source
2757 register down by 8 bits. We could not do the address
2758 increment in the store half word instruction, because it does
2759 not have an auto increment mode. */
2760 if (bytes > 0) /* assert (bytes == 1) */
2761 {
2762 dst_offset += 2;
2763 last_shift = 8;
2764 }
2765 }
2766 else
2767 last_shift = 24;
2768
2769 if (bytes > 0)
2770 {
2771 my_operands[0] = operands[6];
2772 my_operands[1] = GEN_INT (last_shift);
2773 output_asm_insn ("srai\t%0, #%1", my_operands);
2774 my_operands[0] = operands[6];
2775 my_operands[1] = GEN_INT (dst_offset);
2776 my_operands[2] = operands[0];
2777 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2778 }
2779
2780 /* Update the destination pointer if needed. We have to do
2781 this so that the patterns matches what we output in this
2782 function. */
2783 if (dst_inc_amount
2784 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2785 {
2786 my_operands[0] = operands[0];
2787 my_operands[1] = GEN_INT (dst_inc_amount);
2788 output_asm_insn ("addi\t%0, #%1", my_operands);
2789 }
2790
2791 /* Update the source pointer if needed. We have to do this
2792 so that the patterns matches what we output in this
2793 function. */
2794 if (src_inc_amount
2795 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2796 {
2797 my_operands[0] = operands[1];
2798 my_operands[1] = GEN_INT (src_inc_amount);
2799 output_asm_insn ("addi\t%0, #%1", my_operands);
2800 }
2801
2802 bytes = 0;
2803 }
2804
2805 first_time = 0;
2806 }
2807 }
2808
2809 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2810
2811 int
2812 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2813 unsigned int new_reg)
2814 {
2815 /* Interrupt routines can't clobber any register that isn't already used. */
2816 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2817 && !df_regs_ever_live_p (new_reg))
2818 return 0;
2819
2820 return 1;
2821 }
2822
2823 rtx
2824 m32r_return_addr (int count)
2825 {
2826 if (count != 0)
2827 return const0_rtx;
2828
2829 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2830 }
2831
2832 static void
2833 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2834 {
2835 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2836 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2837 0x017e8e17 : 0x178e7e01, SImode));
2838 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2839 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2840 0x0c00ae86 : 0x86ae000c, SImode));
2841 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2842 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2843 0xe627871e : 0x1e8727e6, SImode));
2844 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2845 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2846 0xc616c626 : 0x26c61fc6, SImode));
2847 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2848 chain_value);
2849 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2850 XEXP (DECL_RTL (fndecl), 0));
2851
2852 if (m32r_cache_flush_trap >= 0)
2853 emit_insn (gen_flush_icache
2854 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2855 gen_int_mode (m32r_cache_flush_trap, SImode)));
2856 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2857 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2858 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2859 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2860 GEN_INT (3), SImode);
2861 }
2862
2863 /* True if X is a reg that can be used as a base reg. */
2864
2865 static bool
2866 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2867 {
2868 if (! REG_P (x))
2869 return false;
2870
2871 if (strict)
2872 {
2873 if (GPR_P (REGNO (x)))
2874 return true;
2875 }
2876 else
2877 {
2878 if (GPR_P (REGNO (x))
2879 || REGNO (x) == ARG_POINTER_REGNUM
2880 || ! HARD_REGISTER_P (x))
2881 return true;
2882 }
2883
2884 return false;
2885 }
2886
2887 static inline bool
2888 m32r_rtx_ok_for_offset_p (const_rtx x)
2889 {
2890 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2891 }
2892
2893 static inline bool
2894 m32r_legitimate_offset_addres_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2895 const_rtx x, bool strict)
2896 {
2897 if (GET_CODE (x) == PLUS
2898 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2899 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2900 return true;
2901
2902 return false;
2903 }
2904
2905 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2906 since more than one instruction will be required. */
2907
2908 static inline bool
2909 m32r_legitimate_lo_sum_addres_p (enum machine_mode mode, const_rtx x,
2910 bool strict)
2911 {
2912 if (GET_CODE (x) == LO_SUM
2913 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2914 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2915 && CONSTANT_P (XEXP (x, 1)))
2916 return true;
2917
2918 return false;
2919 }
2920
2921 /* Is this a load and increment operation. */
2922
2923 static inline bool
2924 m32r_load_postinc_p (enum machine_mode mode, const_rtx x, bool strict)
2925 {
2926 if ((mode == SImode || mode == SFmode)
2927 && GET_CODE (x) == POST_INC
2928 && REG_P (XEXP (x, 0))
2929 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2930 return true;
2931
2932 return false;
2933 }
2934
2935 /* Is this an increment/decrement and store operation. */
2936
2937 static inline bool
2938 m32r_store_preinc_predec_p (enum machine_mode mode, const_rtx x, bool strict)
2939 {
2940 if ((mode == SImode || mode == SFmode)
2941 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2942 && REG_P (XEXP (x, 0)) \
2943 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2944 return true;
2945
2946 return false;
2947 }
2948
2949 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2950
2951 static bool
2952 m32r_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
2953 {
2954 if (m32r_rtx_ok_for_base_p (x, strict)
2955 || m32r_legitimate_offset_addres_p (mode, x, strict)
2956 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2957 || m32r_load_postinc_p (mode, x, strict)
2958 || m32r_store_preinc_predec_p (mode, x, strict))
2959 return true;
2960
2961 return false;
2962 }
2963
2964 static void
2965 m32r_conditional_register_usage (void)
2966 {
2967 if (flag_pic)
2968 {
2969 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2970 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2971 }
2972 }