]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
* config/m32r/m32r.c: Remove unused variables frame_size and insn.
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "toplev.h"
39 #include "ggc.h"
40 #include "integrate.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "tm-constrs.h"
46
47 /* Array of valid operand punctuation characters. */
48 static char m32r_punct_chars[256];
49
50 /* Selected code model. */
51 enum m32r_model m32r_model = M32R_MODEL_DEFAULT;
52
53 /* Selected SDA support. */
54 enum m32r_sdata m32r_sdata = M32R_SDATA_DEFAULT;
55
56 /* Machine-specific symbol_ref flags. */
57 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
58 #define SYMBOL_REF_MODEL(X) \
59 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
60
61 /* For string literals, etc. */
62 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
63
64 /* Forward declaration. */
65 static bool m32r_handle_option (size_t, const char *, int);
66 static void m32r_option_override (void);
67 static void init_reg_tables (void);
68 static void block_move_call (rtx, rtx, rtx);
69 static int m32r_is_insn (rtx);
70 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
71 static bool m32r_mode_dependent_address_p (const_rtx);
72 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
73 static void m32r_print_operand (FILE *, rtx, int);
74 static void m32r_print_operand_address (FILE *, rtx);
75 static bool m32r_print_operand_punct_valid_p (unsigned char code);
76 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
77 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
78
79 static void m32r_file_start (void);
80
81 static int m32r_adjust_priority (rtx, int);
82 static int m32r_issue_rate (void);
83
84 static void m32r_encode_section_info (tree, rtx, int);
85 static bool m32r_in_small_data_p (const_tree);
86 static bool m32r_return_in_memory (const_tree, const_tree);
87 static rtx m32r_function_value (const_tree, const_tree, bool);
88 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
89 static bool m32r_function_value_regno_p (const unsigned int);
90 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
91 tree, int *, int);
92 static void init_idents (void);
93 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
94 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
96 const_tree, bool);
97 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
98 tree, bool);
99 static rtx m32r_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
100 const_tree, bool);
101 static void m32r_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
102 const_tree, bool);
103 static bool m32r_can_eliminate (const int, const int);
104 static void m32r_trampoline_init (rtx, tree, rtx);
105 \f
106 /* M32R specific attributes. */
107
108 static const struct attribute_spec m32r_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
111 { "interrupt", 0, 0, true, false, false, NULL },
112 { "model", 1, 1, true, false, false, m32r_handle_model_attribute },
113 { NULL, 0, 0, false, false, false, NULL }
114 };
115
116 static const struct default_options m32r_option_optimization_table[] =
117 {
118 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
119 { OPT_LEVELS_1_PLUS, OPT_fregmove, NULL, 1 },
120 { OPT_LEVELS_NONE, 0, NULL, 0 }
121 };
122 \f
123 /* Initialize the GCC target structure. */
124 #undef TARGET_ATTRIBUTE_TABLE
125 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
126
127 #undef TARGET_LEGITIMIZE_ADDRESS
128 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
129 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
130 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
131
132 #undef TARGET_ASM_ALIGNED_HI_OP
133 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
134 #undef TARGET_ASM_ALIGNED_SI_OP
135 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
136
137 #undef TARGET_PRINT_OPERAND
138 #define TARGET_PRINT_OPERAND m32r_print_operand
139 #undef TARGET_PRINT_OPERAND_ADDRESS
140 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
141 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
142 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
143
144 #undef TARGET_ASM_FUNCTION_PROLOGUE
145 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
146 #undef TARGET_ASM_FUNCTION_EPILOGUE
147 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
148
149 #undef TARGET_ASM_FILE_START
150 #define TARGET_ASM_FILE_START m32r_file_start
151
152 #undef TARGET_SCHED_ADJUST_PRIORITY
153 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
154 #undef TARGET_SCHED_ISSUE_RATE
155 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
156
157 #undef TARGET_DEFAULT_TARGET_FLAGS
158 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT
159 #undef TARGET_HANDLE_OPTION
160 #define TARGET_HANDLE_OPTION m32r_handle_option
161 #undef TARGET_OPTION_OVERRIDE
162 #define TARGET_OPTION_OVERRIDE m32r_option_override
163 #undef TARGET_OPTION_OPTIMIZATION_TABLE
164 #define TARGET_OPTION_OPTIMIZATION_TABLE m32r_option_optimization_table
165
166 #undef TARGET_ENCODE_SECTION_INFO
167 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
168 #undef TARGET_IN_SMALL_DATA_P
169 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
170
171
172 #undef TARGET_MEMORY_MOVE_COST
173 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
174 #undef TARGET_RTX_COSTS
175 #define TARGET_RTX_COSTS m32r_rtx_costs
176 #undef TARGET_ADDRESS_COST
177 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
178
179 #undef TARGET_PROMOTE_PROTOTYPES
180 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
181 #undef TARGET_RETURN_IN_MEMORY
182 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
183
184 #undef TARGET_FUNCTION_VALUE
185 #define TARGET_FUNCTION_VALUE m32r_function_value
186 #undef TARGET_LIBCALL_VALUE
187 #define TARGET_LIBCALL_VALUE m32r_libcall_value
188 #undef TARGET_FUNCTION_VALUE_REGNO_P
189 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
190
191 #undef TARGET_SETUP_INCOMING_VARARGS
192 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
193 #undef TARGET_MUST_PASS_IN_STACK
194 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
195 #undef TARGET_PASS_BY_REFERENCE
196 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
197 #undef TARGET_ARG_PARTIAL_BYTES
198 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
199 #undef TARGET_FUNCTION_ARG
200 #define TARGET_FUNCTION_ARG m32r_function_arg
201 #undef TARGET_FUNCTION_ARG_ADVANCE
202 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
203
204 #undef TARGET_CAN_ELIMINATE
205 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
206
207 #undef TARGET_TRAMPOLINE_INIT
208 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
209
210 struct gcc_target targetm = TARGET_INITIALIZER;
211 \f
212 /* Implement TARGET_HANDLE_OPTION. */
213
214 static bool
215 m32r_handle_option (size_t code, const char *arg, int value)
216 {
217 switch (code)
218 {
219 case OPT_m32r:
220 target_flags &= ~(MASK_M32R2 | MASK_M32RX);
221 return true;
222
223 case OPT_mmodel_:
224 if (strcmp (arg, "small") == 0)
225 m32r_model = M32R_MODEL_SMALL;
226 else if (strcmp (arg, "medium") == 0)
227 m32r_model = M32R_MODEL_MEDIUM;
228 else if (strcmp (arg, "large") == 0)
229 m32r_model = M32R_MODEL_LARGE;
230 else
231 return false;
232 return true;
233
234 case OPT_msdata_:
235 if (strcmp (arg, "none") == 0)
236 m32r_sdata = M32R_SDATA_NONE;
237 else if (strcmp (arg, "sdata") == 0)
238 m32r_sdata = M32R_SDATA_SDATA;
239 else if (strcmp (arg, "use") == 0)
240 m32r_sdata = M32R_SDATA_USE;
241 else
242 return false;
243 return true;
244
245 case OPT_mno_flush_func:
246 m32r_cache_flush_func = NULL;
247 return true;
248
249 case OPT_mflush_trap_:
250 return value <= 15;
251
252 case OPT_mno_flush_trap:
253 m32r_cache_flush_trap = -1;
254 return true;
255
256 default:
257 return true;
258 }
259 }
260
261 /* Called by m32r_option_override to initialize various things. */
262
263 void
264 m32r_init (void)
265 {
266 init_reg_tables ();
267
268 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
269 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
270 m32r_punct_chars['#'] = 1;
271 m32r_punct_chars['@'] = 1; /* ??? no longer used */
272
273 /* Provide default value if not specified. */
274 if (!global_options_set.x_g_switch_value)
275 g_switch_value = SDATA_DEFAULT_SIZE;
276 }
277
278 static void
279 m32r_option_override (void)
280 {
281 /* These need to be done at start up.
282 It's convenient to do them here. */
283 m32r_init ();
284 SUBTARGET_OVERRIDE_OPTIONS;
285 }
286
287 /* Vectors to keep interesting information about registers where it can easily
288 be got. We use to use the actual mode value as the bit number, but there
289 is (or may be) more than 32 modes now. Instead we use two tables: one
290 indexed by hard register number, and one indexed by mode. */
291
292 /* The purpose of m32r_mode_class is to shrink the range of modes so that
293 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
294 mapped into one m32r_mode_class mode. */
295
296 enum m32r_mode_class
297 {
298 C_MODE,
299 S_MODE, D_MODE, T_MODE, O_MODE,
300 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
301 };
302
303 /* Modes for condition codes. */
304 #define C_MODES (1 << (int) C_MODE)
305
306 /* Modes for single-word and smaller quantities. */
307 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
308
309 /* Modes for double-word and smaller quantities. */
310 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
311
312 /* Modes for quad-word and smaller quantities. */
313 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
314
315 /* Modes for accumulators. */
316 #define A_MODES (1 << (int) A_MODE)
317
318 /* Value is 1 if register/mode pair is acceptable on arc. */
319
320 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
321 {
322 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
323 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
324 S_MODES, C_MODES, A_MODES, A_MODES
325 };
326
327 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
328
329 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
330
331 static void
332 init_reg_tables (void)
333 {
334 int i;
335
336 for (i = 0; i < NUM_MACHINE_MODES; i++)
337 {
338 switch (GET_MODE_CLASS (i))
339 {
340 case MODE_INT:
341 case MODE_PARTIAL_INT:
342 case MODE_COMPLEX_INT:
343 if (GET_MODE_SIZE (i) <= 4)
344 m32r_mode_class[i] = 1 << (int) S_MODE;
345 else if (GET_MODE_SIZE (i) == 8)
346 m32r_mode_class[i] = 1 << (int) D_MODE;
347 else if (GET_MODE_SIZE (i) == 16)
348 m32r_mode_class[i] = 1 << (int) T_MODE;
349 else if (GET_MODE_SIZE (i) == 32)
350 m32r_mode_class[i] = 1 << (int) O_MODE;
351 else
352 m32r_mode_class[i] = 0;
353 break;
354 case MODE_FLOAT:
355 case MODE_COMPLEX_FLOAT:
356 if (GET_MODE_SIZE (i) <= 4)
357 m32r_mode_class[i] = 1 << (int) SF_MODE;
358 else if (GET_MODE_SIZE (i) == 8)
359 m32r_mode_class[i] = 1 << (int) DF_MODE;
360 else if (GET_MODE_SIZE (i) == 16)
361 m32r_mode_class[i] = 1 << (int) TF_MODE;
362 else if (GET_MODE_SIZE (i) == 32)
363 m32r_mode_class[i] = 1 << (int) OF_MODE;
364 else
365 m32r_mode_class[i] = 0;
366 break;
367 case MODE_CC:
368 m32r_mode_class[i] = 1 << (int) C_MODE;
369 break;
370 default:
371 m32r_mode_class[i] = 0;
372 break;
373 }
374 }
375
376 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
377 {
378 if (GPR_P (i))
379 m32r_regno_reg_class[i] = GENERAL_REGS;
380 else if (i == ARG_POINTER_REGNUM)
381 m32r_regno_reg_class[i] = GENERAL_REGS;
382 else
383 m32r_regno_reg_class[i] = NO_REGS;
384 }
385 }
386 \f
387 /* M32R specific attribute support.
388
389 interrupt - for interrupt functions
390
391 model - select code model used to access object
392
393 small: addresses use 24 bits, use bl to make calls
394 medium: addresses use 32 bits, use bl to make calls
395 large: addresses use 32 bits, use seth/add3/jl to make calls
396
397 Grep for MODEL in m32r.h for more info. */
398
399 static tree small_ident1;
400 static tree small_ident2;
401 static tree medium_ident1;
402 static tree medium_ident2;
403 static tree large_ident1;
404 static tree large_ident2;
405
406 static void
407 init_idents (void)
408 {
409 if (small_ident1 == 0)
410 {
411 small_ident1 = get_identifier ("small");
412 small_ident2 = get_identifier ("__small__");
413 medium_ident1 = get_identifier ("medium");
414 medium_ident2 = get_identifier ("__medium__");
415 large_ident1 = get_identifier ("large");
416 large_ident2 = get_identifier ("__large__");
417 }
418 }
419
420 /* Handle an "model" attribute; arguments as in
421 struct attribute_spec.handler. */
422 static tree
423 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
424 tree args, int flags ATTRIBUTE_UNUSED,
425 bool *no_add_attrs)
426 {
427 tree arg;
428
429 init_idents ();
430 arg = TREE_VALUE (args);
431
432 if (arg != small_ident1
433 && arg != small_ident2
434 && arg != medium_ident1
435 && arg != medium_ident2
436 && arg != large_ident1
437 && arg != large_ident2)
438 {
439 warning (OPT_Wattributes, "invalid argument of %qs attribute",
440 IDENTIFIER_POINTER (name));
441 *no_add_attrs = true;
442 }
443
444 return NULL_TREE;
445 }
446 \f
447 /* Encode section information of DECL, which is either a VAR_DECL,
448 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
449
450 For the M32R we want to record:
451
452 - whether the object lives in .sdata/.sbss.
453 - what code model should be used to access the object
454 */
455
456 static void
457 m32r_encode_section_info (tree decl, rtx rtl, int first)
458 {
459 int extra_flags = 0;
460 tree model_attr;
461 enum m32r_model model;
462
463 default_encode_section_info (decl, rtl, first);
464
465 if (!DECL_P (decl))
466 return;
467
468 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
469 if (model_attr)
470 {
471 tree id;
472
473 init_idents ();
474
475 id = TREE_VALUE (TREE_VALUE (model_attr));
476
477 if (id == small_ident1 || id == small_ident2)
478 model = M32R_MODEL_SMALL;
479 else if (id == medium_ident1 || id == medium_ident2)
480 model = M32R_MODEL_MEDIUM;
481 else if (id == large_ident1 || id == large_ident2)
482 model = M32R_MODEL_LARGE;
483 else
484 gcc_unreachable (); /* shouldn't happen */
485 }
486 else
487 {
488 if (TARGET_MODEL_SMALL)
489 model = M32R_MODEL_SMALL;
490 else if (TARGET_MODEL_MEDIUM)
491 model = M32R_MODEL_MEDIUM;
492 else if (TARGET_MODEL_LARGE)
493 model = M32R_MODEL_LARGE;
494 else
495 gcc_unreachable (); /* shouldn't happen */
496 }
497 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
498
499 if (extra_flags)
500 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
501 }
502
503 /* Only mark the object as being small data area addressable if
504 it hasn't been explicitly marked with a code model.
505
506 The user can explicitly put an object in the small data area with the
507 section attribute. If the object is in sdata/sbss and marked with a
508 code model do both [put the object in .sdata and mark it as being
509 addressed with a specific code model - don't mark it as being addressed
510 with an SDA reloc though]. This is ok and might be useful at times. If
511 the object doesn't fit the linker will give an error. */
512
513 static bool
514 m32r_in_small_data_p (const_tree decl)
515 {
516 const_tree section;
517
518 if (TREE_CODE (decl) != VAR_DECL)
519 return false;
520
521 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
522 return false;
523
524 section = DECL_SECTION_NAME (decl);
525 if (section)
526 {
527 const char *const name = TREE_STRING_POINTER (section);
528 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
529 return true;
530 }
531 else
532 {
533 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
534 {
535 int size = int_size_in_bytes (TREE_TYPE (decl));
536
537 if (size > 0 && size <= g_switch_value)
538 return true;
539 }
540 }
541
542 return false;
543 }
544
545 /* Do anything needed before RTL is emitted for each function. */
546
547 void
548 m32r_init_expanders (void)
549 {
550 /* ??? At one point there was code here. The function is left in
551 to make it easy to experiment. */
552 }
553 \f
554 int
555 call_operand (rtx op, enum machine_mode mode)
556 {
557 if (!MEM_P (op))
558 return 0;
559 op = XEXP (op, 0);
560 return call_address_operand (op, mode);
561 }
562
563 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
564
565 int
566 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
567 {
568 if (! TARGET_SDATA_USE)
569 return 0;
570
571 if (GET_CODE (op) == SYMBOL_REF)
572 return SYMBOL_REF_SMALL_P (op);
573
574 if (GET_CODE (op) == CONST
575 && GET_CODE (XEXP (op, 0)) == PLUS
576 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
577 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
578 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
579
580 return 0;
581 }
582
583 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
584
585 int
586 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
587 {
588 rtx sym;
589
590 if (flag_pic)
591 return 0;
592
593 if (GET_CODE (op) == LABEL_REF)
594 return TARGET_ADDR24;
595
596 if (GET_CODE (op) == SYMBOL_REF)
597 sym = op;
598 else if (GET_CODE (op) == CONST
599 && GET_CODE (XEXP (op, 0)) == PLUS
600 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
601 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
602 sym = XEXP (XEXP (op, 0), 0);
603 else
604 return 0;
605
606 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
607 return 1;
608
609 if (TARGET_ADDR24
610 && (CONSTANT_POOL_ADDRESS_P (sym)
611 || LIT_NAME_P (XSTR (sym, 0))))
612 return 1;
613
614 return 0;
615 }
616
617 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
618
619 int
620 addr32_operand (rtx op, enum machine_mode mode)
621 {
622 rtx sym;
623
624 if (GET_CODE (op) == LABEL_REF)
625 return TARGET_ADDR32;
626
627 if (GET_CODE (op) == SYMBOL_REF)
628 sym = op;
629 else if (GET_CODE (op) == CONST
630 && GET_CODE (XEXP (op, 0)) == PLUS
631 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
632 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
633 && ! flag_pic)
634 sym = XEXP (XEXP (op, 0), 0);
635 else
636 return 0;
637
638 return (! addr24_operand (sym, mode)
639 && ! small_data_operand (sym, mode));
640 }
641
642 /* Return 1 if OP is a function that can be called with the `bl' insn. */
643
644 int
645 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
646 {
647 if (flag_pic)
648 return 1;
649
650 if (GET_CODE (op) == SYMBOL_REF)
651 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
652
653 return TARGET_CALL26;
654 }
655
656 /* Return 1 if OP is a DImode const we want to handle inline.
657 This must match the code in the movdi pattern.
658 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
659
660 int
661 easy_di_const (rtx op)
662 {
663 rtx high_rtx, low_rtx;
664 HOST_WIDE_INT high, low;
665
666 split_double (op, &high_rtx, &low_rtx);
667 high = INTVAL (high_rtx);
668 low = INTVAL (low_rtx);
669 /* Pick constants loadable with 2 16-bit `ldi' insns. */
670 if (high >= -128 && high <= 127
671 && low >= -128 && low <= 127)
672 return 1;
673 return 0;
674 }
675
676 /* Return 1 if OP is a DFmode const we want to handle inline.
677 This must match the code in the movdf pattern.
678 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
679
680 int
681 easy_df_const (rtx op)
682 {
683 REAL_VALUE_TYPE r;
684 long l[2];
685
686 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
687 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
688 if (l[0] == 0 && l[1] == 0)
689 return 1;
690 if ((l[0] & 0xffff) == 0 && l[1] == 0)
691 return 1;
692 return 0;
693 }
694
695 /* Return 1 if OP is (mem (reg ...)).
696 This is used in insn length calcs. */
697
698 int
699 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
700 {
701 return MEM_P (op) && REG_P (XEXP (op, 0));
702 }
703
704 /* Return nonzero if TYPE must be passed by indirect reference. */
705
706 static bool
707 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
708 enum machine_mode mode, const_tree type,
709 bool named ATTRIBUTE_UNUSED)
710 {
711 int size;
712
713 if (type)
714 size = int_size_in_bytes (type);
715 else
716 size = GET_MODE_SIZE (mode);
717
718 return (size < 0 || size > 8);
719 }
720 \f
721 /* Comparisons. */
722
723 /* X and Y are two things to compare using CODE. Emit the compare insn and
724 return the rtx for compare [arg0 of the if_then_else].
725 If need_compare is true then the comparison insn must be generated, rather
726 than being subsumed into the following branch instruction. */
727
728 rtx
729 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
730 {
731 enum rtx_code compare_code;
732 enum rtx_code branch_code;
733 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
734 int must_swap = 0;
735
736 switch (code)
737 {
738 case EQ: compare_code = EQ; branch_code = NE; break;
739 case NE: compare_code = EQ; branch_code = EQ; break;
740 case LT: compare_code = LT; branch_code = NE; break;
741 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
742 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
743 case GE: compare_code = LT; branch_code = EQ; break;
744 case LTU: compare_code = LTU; branch_code = NE; break;
745 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
746 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
747 case GEU: compare_code = LTU; branch_code = EQ; break;
748
749 default:
750 gcc_unreachable ();
751 }
752
753 if (need_compare)
754 {
755 switch (compare_code)
756 {
757 case EQ:
758 if (satisfies_constraint_P (y) /* Reg equal to small const. */
759 && y != const0_rtx)
760 {
761 rtx tmp = gen_reg_rtx (SImode);
762
763 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
764 x = tmp;
765 y = const0_rtx;
766 }
767 else if (CONSTANT_P (y)) /* Reg equal to const. */
768 {
769 rtx tmp = force_reg (GET_MODE (x), y);
770 y = tmp;
771 }
772
773 if (register_operand (y, SImode) /* Reg equal to reg. */
774 || y == const0_rtx) /* Reg equal to zero. */
775 {
776 emit_insn (gen_cmp_eqsi_insn (x, y));
777
778 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
779 }
780 break;
781
782 case LT:
783 if (register_operand (y, SImode)
784 || satisfies_constraint_P (y))
785 {
786 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
787
788 switch (code)
789 {
790 case LT:
791 emit_insn (gen_cmp_ltsi_insn (x, y));
792 code = EQ;
793 break;
794 case LE:
795 if (y == const0_rtx)
796 tmp = const1_rtx;
797 else
798 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
799 emit_insn (gen_cmp_ltsi_insn (x, tmp));
800 code = EQ;
801 break;
802 case GT:
803 if (CONST_INT_P (y))
804 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
805 else
806 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
807 emit_insn (gen_cmp_ltsi_insn (x, tmp));
808 code = NE;
809 break;
810 case GE:
811 emit_insn (gen_cmp_ltsi_insn (x, y));
812 code = NE;
813 break;
814 default:
815 gcc_unreachable ();
816 }
817
818 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
819 }
820 break;
821
822 case LTU:
823 if (register_operand (y, SImode)
824 || satisfies_constraint_P (y))
825 {
826 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
827
828 switch (code)
829 {
830 case LTU:
831 emit_insn (gen_cmp_ltusi_insn (x, y));
832 code = EQ;
833 break;
834 case LEU:
835 if (y == const0_rtx)
836 tmp = const1_rtx;
837 else
838 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
839 emit_insn (gen_cmp_ltusi_insn (x, tmp));
840 code = EQ;
841 break;
842 case GTU:
843 if (CONST_INT_P (y))
844 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
845 else
846 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
847 emit_insn (gen_cmp_ltusi_insn (x, tmp));
848 code = NE;
849 break;
850 case GEU:
851 emit_insn (gen_cmp_ltusi_insn (x, y));
852 code = NE;
853 break;
854 default:
855 gcc_unreachable ();
856 }
857
858 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
859 }
860 break;
861
862 default:
863 gcc_unreachable ();
864 }
865 }
866 else
867 {
868 /* Reg/reg equal comparison. */
869 if (compare_code == EQ
870 && register_operand (y, SImode))
871 return gen_rtx_fmt_ee (code, CCmode, x, y);
872
873 /* Reg/zero signed comparison. */
874 if ((compare_code == EQ || compare_code == LT)
875 && y == const0_rtx)
876 return gen_rtx_fmt_ee (code, CCmode, x, y);
877
878 /* Reg/smallconst equal comparison. */
879 if (compare_code == EQ
880 && satisfies_constraint_P (y))
881 {
882 rtx tmp = gen_reg_rtx (SImode);
883
884 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
885 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
886 }
887
888 /* Reg/const equal comparison. */
889 if (compare_code == EQ
890 && CONSTANT_P (y))
891 {
892 rtx tmp = force_reg (GET_MODE (x), y);
893
894 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
895 }
896 }
897
898 if (CONSTANT_P (y))
899 {
900 if (must_swap)
901 y = force_reg (GET_MODE (x), y);
902 else
903 {
904 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
905
906 if (! ok_const)
907 y = force_reg (GET_MODE (x), y);
908 }
909 }
910
911 switch (compare_code)
912 {
913 case EQ :
914 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
915 break;
916 case LT :
917 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
918 break;
919 case LTU :
920 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
921 break;
922
923 default:
924 gcc_unreachable ();
925 }
926
927 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
928 }
929
930 bool
931 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
932 {
933 enum machine_mode mode = GET_MODE (op0);
934
935 gcc_assert (mode == SImode);
936 switch (code)
937 {
938 case EQ:
939 if (!register_operand (op1, mode))
940 op1 = force_reg (mode, op1);
941
942 if (TARGET_M32RX || TARGET_M32R2)
943 {
944 if (!reg_or_zero_operand (op2, mode))
945 op2 = force_reg (mode, op2);
946
947 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
948 return true;
949 }
950 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
951 {
952 emit_insn (gen_seq_zero_insn (op0, op1));
953 return true;
954 }
955
956 if (!reg_or_eq_int16_operand (op2, mode))
957 op2 = force_reg (mode, op2);
958
959 emit_insn (gen_seq_insn (op0, op1, op2));
960 return true;
961
962 case NE:
963 if (!CONST_INT_P (op2)
964 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
965 {
966 rtx reg;
967
968 if (reload_completed || reload_in_progress)
969 return false;
970
971 reg = gen_reg_rtx (SImode);
972 emit_insn (gen_xorsi3 (reg, op1, op2));
973 op1 = reg;
974
975 if (!register_operand (op1, mode))
976 op1 = force_reg (mode, op1);
977
978 emit_insn (gen_sne_zero_insn (op0, op1));
979 return true;
980 }
981 return false;
982
983 case LT:
984 case GT:
985 if (code == GT)
986 {
987 rtx tmp = op2;
988 op2 = op1;
989 op1 = tmp;
990 code = LT;
991 }
992
993 if (!register_operand (op1, mode))
994 op1 = force_reg (mode, op1);
995
996 if (!reg_or_int16_operand (op2, mode))
997 op2 = force_reg (mode, op2);
998
999 emit_insn (gen_slt_insn (op0, op1, op2));
1000 return true;
1001
1002 case LTU:
1003 case GTU:
1004 if (code == GTU)
1005 {
1006 rtx tmp = op2;
1007 op2 = op1;
1008 op1 = tmp;
1009 code = LTU;
1010 }
1011
1012 if (!register_operand (op1, mode))
1013 op1 = force_reg (mode, op1);
1014
1015 if (!reg_or_int16_operand (op2, mode))
1016 op2 = force_reg (mode, op2);
1017
1018 emit_insn (gen_sltu_insn (op0, op1, op2));
1019 return true;
1020
1021 case GE:
1022 case GEU:
1023 if (!register_operand (op1, mode))
1024 op1 = force_reg (mode, op1);
1025
1026 if (!reg_or_int16_operand (op2, mode))
1027 op2 = force_reg (mode, op2);
1028
1029 if (code == GE)
1030 emit_insn (gen_sge_insn (op0, op1, op2));
1031 else
1032 emit_insn (gen_sgeu_insn (op0, op1, op2));
1033 return true;
1034
1035 case LE:
1036 case LEU:
1037 if (!register_operand (op1, mode))
1038 op1 = force_reg (mode, op1);
1039
1040 if (CONST_INT_P (op2))
1041 {
1042 HOST_WIDE_INT value = INTVAL (op2);
1043 if (value >= 2147483647)
1044 {
1045 emit_move_insn (op0, const1_rtx);
1046 return true;
1047 }
1048
1049 op2 = GEN_INT (value + 1);
1050 if (value < -32768 || value >= 32767)
1051 op2 = force_reg (mode, op2);
1052
1053 if (code == LEU)
1054 emit_insn (gen_sltu_insn (op0, op1, op2));
1055 else
1056 emit_insn (gen_slt_insn (op0, op1, op2));
1057 return true;
1058 }
1059
1060 if (!register_operand (op2, mode))
1061 op2 = force_reg (mode, op2);
1062
1063 if (code == LEU)
1064 emit_insn (gen_sleu_insn (op0, op1, op2));
1065 else
1066 emit_insn (gen_sle_insn (op0, op1, op2));
1067 return true;
1068
1069 default:
1070 gcc_unreachable ();
1071 }
1072 }
1073
1074 \f
1075 /* Split a 2 word move (DI or DF) into component parts. */
1076
1077 rtx
1078 gen_split_move_double (rtx operands[])
1079 {
1080 enum machine_mode mode = GET_MODE (operands[0]);
1081 rtx dest = operands[0];
1082 rtx src = operands[1];
1083 rtx val;
1084
1085 /* We might have (SUBREG (MEM)) here, so just get rid of the
1086 subregs to make this code simpler. It is safe to call
1087 alter_subreg any time after reload. */
1088 if (GET_CODE (dest) == SUBREG)
1089 alter_subreg (&dest);
1090 if (GET_CODE (src) == SUBREG)
1091 alter_subreg (&src);
1092
1093 start_sequence ();
1094 if (REG_P (dest))
1095 {
1096 int dregno = REGNO (dest);
1097
1098 /* Reg = reg. */
1099 if (REG_P (src))
1100 {
1101 int sregno = REGNO (src);
1102
1103 int reverse = (dregno == sregno + 1);
1104
1105 /* We normally copy the low-numbered register first. However, if
1106 the first register operand 0 is the same as the second register of
1107 operand 1, we must copy in the opposite order. */
1108 emit_insn (gen_rtx_SET (VOIDmode,
1109 operand_subword (dest, reverse, TRUE, mode),
1110 operand_subword (src, reverse, TRUE, mode)));
1111
1112 emit_insn (gen_rtx_SET (VOIDmode,
1113 operand_subword (dest, !reverse, TRUE, mode),
1114 operand_subword (src, !reverse, TRUE, mode)));
1115 }
1116
1117 /* Reg = constant. */
1118 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1119 {
1120 rtx words[2];
1121 split_double (src, &words[0], &words[1]);
1122 emit_insn (gen_rtx_SET (VOIDmode,
1123 operand_subword (dest, 0, TRUE, mode),
1124 words[0]));
1125
1126 emit_insn (gen_rtx_SET (VOIDmode,
1127 operand_subword (dest, 1, TRUE, mode),
1128 words[1]));
1129 }
1130
1131 /* Reg = mem. */
1132 else if (MEM_P (src))
1133 {
1134 /* If the high-address word is used in the address, we must load it
1135 last. Otherwise, load it first. */
1136 int reverse
1137 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1138
1139 /* We used to optimize loads from single registers as
1140
1141 ld r1,r3+; ld r2,r3
1142
1143 if r3 were not used subsequently. However, the REG_NOTES aren't
1144 propagated correctly by the reload phase, and it can cause bad
1145 code to be generated. We could still try:
1146
1147 ld r1,r3+; ld r2,r3; addi r3,-4
1148
1149 which saves 2 bytes and doesn't force longword alignment. */
1150 emit_insn (gen_rtx_SET (VOIDmode,
1151 operand_subword (dest, reverse, TRUE, mode),
1152 adjust_address (src, SImode,
1153 reverse * UNITS_PER_WORD)));
1154
1155 emit_insn (gen_rtx_SET (VOIDmode,
1156 operand_subword (dest, !reverse, TRUE, mode),
1157 adjust_address (src, SImode,
1158 !reverse * UNITS_PER_WORD)));
1159 }
1160 else
1161 gcc_unreachable ();
1162 }
1163
1164 /* Mem = reg. */
1165 /* We used to optimize loads from single registers as
1166
1167 st r1,r3; st r2,+r3
1168
1169 if r3 were not used subsequently. However, the REG_NOTES aren't
1170 propagated correctly by the reload phase, and it can cause bad
1171 code to be generated. We could still try:
1172
1173 st r1,r3; st r2,+r3; addi r3,-4
1174
1175 which saves 2 bytes and doesn't force longword alignment. */
1176 else if (MEM_P (dest) && REG_P (src))
1177 {
1178 emit_insn (gen_rtx_SET (VOIDmode,
1179 adjust_address (dest, SImode, 0),
1180 operand_subword (src, 0, TRUE, mode)));
1181
1182 emit_insn (gen_rtx_SET (VOIDmode,
1183 adjust_address (dest, SImode, UNITS_PER_WORD),
1184 operand_subword (src, 1, TRUE, mode)));
1185 }
1186
1187 else
1188 gcc_unreachable ();
1189
1190 val = get_insns ();
1191 end_sequence ();
1192 return val;
1193 }
1194
1195 \f
1196 static int
1197 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1198 tree type, bool named ATTRIBUTE_UNUSED)
1199 {
1200 int words;
1201 unsigned int size =
1202 (((mode == BLKmode && type)
1203 ? (unsigned int) int_size_in_bytes (type)
1204 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1205 / UNITS_PER_WORD;
1206
1207 if (*cum >= M32R_MAX_PARM_REGS)
1208 words = 0;
1209 else if (*cum + size > M32R_MAX_PARM_REGS)
1210 words = (*cum + size) - M32R_MAX_PARM_REGS;
1211 else
1212 words = 0;
1213
1214 return words * UNITS_PER_WORD;
1215 }
1216
1217 /* The ROUND_ADVANCE* macros are local to this file. */
1218 /* Round SIZE up to a word boundary. */
1219 #define ROUND_ADVANCE(SIZE) \
1220 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1221
1222 /* Round arg MODE/TYPE up to the next word boundary. */
1223 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1224 ((MODE) == BLKmode \
1225 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1226 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1227
1228 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1229 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1230
1231 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1232 a reg. This includes arguments that have to be passed by reference as the
1233 pointer to them is passed in a reg if one is available (and that is what
1234 we're given).
1235 This macro is only used in this file. */
1236 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1237 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1238
1239 /* Determine where to put an argument to a function.
1240 Value is zero to push the argument on the stack,
1241 or a hard register in which to store the argument.
1242
1243 MODE is the argument's machine mode.
1244 TYPE is the data type of the argument (as a tree).
1245 This is null for libcalls where that information may
1246 not be available.
1247 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1248 the preceding args and about the function being called.
1249 NAMED is nonzero if this argument is a named parameter
1250 (otherwise it is an extra parameter matching an ellipsis). */
1251 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1252 and the rest are pushed. */
1253
1254 static rtx
1255 m32r_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1256 const_tree type ATTRIBUTE_UNUSED,
1257 bool named ATTRIBUTE_UNUSED)
1258 {
1259 return (PASS_IN_REG_P (*cum, mode, type)
1260 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1261 : NULL_RTX);
1262 }
1263
1264 /* Update the data in CUM to advance over an argument
1265 of mode MODE and data type TYPE.
1266 (TYPE is null for libcalls where that information may not be available.) */
1267
1268 static void
1269 m32r_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1270 const_tree type, bool named ATTRIBUTE_UNUSED)
1271 {
1272 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1273 + ROUND_ADVANCE_ARG (mode, type));
1274 }
1275
1276 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1277
1278 static bool
1279 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1280 {
1281 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
1282 }
1283
1284 /* Worker function for TARGET_FUNCTION_VALUE. */
1285
1286 static rtx
1287 m32r_function_value (const_tree valtype,
1288 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1289 bool outgoing ATTRIBUTE_UNUSED)
1290 {
1291 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1292 }
1293
1294 /* Worker function for TARGET_LIBCALL_VALUE. */
1295
1296 static rtx
1297 m32r_libcall_value (enum machine_mode mode,
1298 const_rtx fun ATTRIBUTE_UNUSED)
1299 {
1300 return gen_rtx_REG (mode, 0);
1301 }
1302
1303 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1304
1305 ??? What about r1 in DI/DF values. */
1306
1307 static bool
1308 m32r_function_value_regno_p (const unsigned int regno)
1309 {
1310 return (regno == 0);
1311 }
1312
1313 /* Do any needed setup for a variadic function. For the M32R, we must
1314 create a register parameter block, and then copy any anonymous arguments
1315 in registers to memory.
1316
1317 CUM has not been updated for the last named argument which has type TYPE
1318 and mode MODE, and we rely on this fact. */
1319
1320 static void
1321 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1322 tree type, int *pretend_size, int no_rtl)
1323 {
1324 int first_anon_arg;
1325
1326 if (no_rtl)
1327 return;
1328
1329 /* All BLKmode values are passed by reference. */
1330 gcc_assert (mode != BLKmode);
1331
1332 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1333 + ROUND_ADVANCE_ARG (mode, type));
1334
1335 if (first_anon_arg < M32R_MAX_PARM_REGS)
1336 {
1337 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1338 int first_reg_offset = first_anon_arg;
1339 /* Size in words to "pretend" allocate. */
1340 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1341 rtx regblock;
1342
1343 regblock = gen_frame_mem (BLKmode,
1344 plus_constant (arg_pointer_rtx,
1345 FIRST_PARM_OFFSET (0)));
1346 set_mem_alias_set (regblock, get_varargs_alias_set ());
1347 move_block_from_reg (first_reg_offset, regblock, size);
1348
1349 *pretend_size = (size * UNITS_PER_WORD);
1350 }
1351 }
1352
1353 \f
1354 /* Return true if INSN is real instruction bearing insn. */
1355
1356 static int
1357 m32r_is_insn (rtx insn)
1358 {
1359 return (NONDEBUG_INSN_P (insn)
1360 && GET_CODE (PATTERN (insn)) != USE
1361 && GET_CODE (PATTERN (insn)) != CLOBBER
1362 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1363 }
1364
1365 /* Increase the priority of long instructions so that the
1366 short instructions are scheduled ahead of the long ones. */
1367
1368 static int
1369 m32r_adjust_priority (rtx insn, int priority)
1370 {
1371 if (m32r_is_insn (insn)
1372 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1373 priority <<= 3;
1374
1375 return priority;
1376 }
1377
1378 \f
1379 /* Indicate how many instructions can be issued at the same time.
1380 This is sort of a lie. The m32r can issue only 1 long insn at
1381 once, but it can issue 2 short insns. The default therefore is
1382 set at 2, but this can be overridden by the command line option
1383 -missue-rate=1. */
1384
1385 static int
1386 m32r_issue_rate (void)
1387 {
1388 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1389 }
1390 \f
1391 /* Cost functions. */
1392
1393 /* Implement TARGET_HANDLE_OPTION.
1394
1395 Memory is 3 times as expensive as registers.
1396 ??? Is that the right way to look at it? */
1397
1398 static int
1399 m32r_memory_move_cost (enum machine_mode mode,
1400 reg_class_t rclass ATTRIBUTE_UNUSED,
1401 bool in ATTRIBUTE_UNUSED)
1402 {
1403 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1404 return 6;
1405 else
1406 return 12;
1407 }
1408
1409 static bool
1410 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
1411 bool speed ATTRIBUTE_UNUSED)
1412 {
1413 switch (code)
1414 {
1415 /* Small integers are as cheap as registers. 4 byte values can be
1416 fetched as immediate constants - let's give that the cost of an
1417 extra insn. */
1418 case CONST_INT:
1419 if (INT16_P (INTVAL (x)))
1420 {
1421 *total = 0;
1422 return true;
1423 }
1424 /* FALLTHRU */
1425
1426 case CONST:
1427 case LABEL_REF:
1428 case SYMBOL_REF:
1429 *total = COSTS_N_INSNS (1);
1430 return true;
1431
1432 case CONST_DOUBLE:
1433 {
1434 rtx high, low;
1435
1436 split_double (x, &high, &low);
1437 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1438 + !INT16_P (INTVAL (low)));
1439 return true;
1440 }
1441
1442 case MULT:
1443 *total = COSTS_N_INSNS (3);
1444 return true;
1445
1446 case DIV:
1447 case UDIV:
1448 case MOD:
1449 case UMOD:
1450 *total = COSTS_N_INSNS (10);
1451 return true;
1452
1453 default:
1454 return false;
1455 }
1456 }
1457 \f
1458 /* Type of function DECL.
1459
1460 The result is cached. To reset the cache at the end of a function,
1461 call with DECL = NULL_TREE. */
1462
1463 enum m32r_function_type
1464 m32r_compute_function_type (tree decl)
1465 {
1466 /* Cached value. */
1467 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1468 /* Last function we were called for. */
1469 static tree last_fn = NULL_TREE;
1470
1471 /* Resetting the cached value? */
1472 if (decl == NULL_TREE)
1473 {
1474 fn_type = M32R_FUNCTION_UNKNOWN;
1475 last_fn = NULL_TREE;
1476 return fn_type;
1477 }
1478
1479 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1480 return fn_type;
1481
1482 /* Compute function type. */
1483 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1484 ? M32R_FUNCTION_INTERRUPT
1485 : M32R_FUNCTION_NORMAL);
1486
1487 last_fn = decl;
1488 return fn_type;
1489 }
1490 \f/* Function prologue/epilogue handlers. */
1491
1492 /* M32R stack frames look like:
1493
1494 Before call After call
1495 +-----------------------+ +-----------------------+
1496 | | | |
1497 high | local variables, | | local variables, |
1498 mem | reg save area, etc. | | reg save area, etc. |
1499 | | | |
1500 +-----------------------+ +-----------------------+
1501 | | | |
1502 | arguments on stack. | | arguments on stack. |
1503 | | | |
1504 SP+0->+-----------------------+ +-----------------------+
1505 | reg parm save area, |
1506 | only created for |
1507 | variable argument |
1508 | functions |
1509 +-----------------------+
1510 | previous frame ptr |
1511 +-----------------------+
1512 | |
1513 | register save area |
1514 | |
1515 +-----------------------+
1516 | return address |
1517 +-----------------------+
1518 | |
1519 | local variables |
1520 | |
1521 +-----------------------+
1522 | |
1523 | alloca allocations |
1524 | |
1525 +-----------------------+
1526 | |
1527 low | arguments on stack |
1528 memory | |
1529 SP+0->+-----------------------+
1530
1531 Notes:
1532 1) The "reg parm save area" does not exist for non variable argument fns.
1533 2) The "reg parm save area" can be eliminated completely if we saved regs
1534 containing anonymous args separately but that complicates things too
1535 much (so it's not done).
1536 3) The return address is saved after the register save area so as to have as
1537 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1538
1539 /* Structure to be filled in by m32r_compute_frame_size with register
1540 save masks, and offsets for the current function. */
1541 struct m32r_frame_info
1542 {
1543 unsigned int total_size; /* # bytes that the entire frame takes up. */
1544 unsigned int extra_size; /* # bytes of extra stuff. */
1545 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1546 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1547 unsigned int reg_size; /* # bytes needed to store regs. */
1548 unsigned int var_size; /* # bytes that variables take up. */
1549 unsigned int gmask; /* Mask of saved gp registers. */
1550 unsigned int save_fp; /* Nonzero if fp must be saved. */
1551 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1552 int initialized; /* Nonzero if frame size already calculated. */
1553 };
1554
1555 /* Current frame information calculated by m32r_compute_frame_size. */
1556 static struct m32r_frame_info current_frame_info;
1557
1558 /* Zero structure to initialize current_frame_info. */
1559 static struct m32r_frame_info zero_frame_info;
1560
1561 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1562 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1563
1564 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1565 The return address and frame pointer are treated separately.
1566 Don't consider them here. */
1567 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1568 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1569 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1570
1571 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1572 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1573
1574 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1575 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1576
1577 /* Return the bytes needed to compute the frame pointer from the current
1578 stack pointer.
1579
1580 SIZE is the size needed for local variables. */
1581
1582 unsigned int
1583 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1584 {
1585 unsigned int regno;
1586 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1587 unsigned int reg_size;
1588 unsigned int gmask;
1589 enum m32r_function_type fn_type;
1590 int interrupt_p;
1591 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1592 | crtl->profile);
1593
1594 var_size = M32R_STACK_ALIGN (size);
1595 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1596 pretend_size = crtl->args.pretend_args_size;
1597 extra_size = FIRST_PARM_OFFSET (0);
1598 total_size = extra_size + pretend_size + args_size + var_size;
1599 reg_size = 0;
1600 gmask = 0;
1601
1602 /* See if this is an interrupt handler. Call used registers must be saved
1603 for them too. */
1604 fn_type = m32r_compute_function_type (current_function_decl);
1605 interrupt_p = M32R_INTERRUPT_P (fn_type);
1606
1607 /* Calculate space needed for registers. */
1608 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1609 {
1610 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1611 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1612 {
1613 reg_size += UNITS_PER_WORD;
1614 gmask |= 1 << regno;
1615 }
1616 }
1617
1618 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1619 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1620
1621 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1622 * UNITS_PER_WORD);
1623 total_size += reg_size;
1624
1625 /* ??? Not sure this is necessary, and I don't think the epilogue
1626 handler will do the right thing if this changes total_size. */
1627 total_size = M32R_STACK_ALIGN (total_size);
1628
1629 /* frame_size = total_size - (pretend_size + reg_size); */
1630
1631 /* Save computed information. */
1632 current_frame_info.total_size = total_size;
1633 current_frame_info.extra_size = extra_size;
1634 current_frame_info.pretend_size = pretend_size;
1635 current_frame_info.var_size = var_size;
1636 current_frame_info.args_size = args_size;
1637 current_frame_info.reg_size = reg_size;
1638 current_frame_info.gmask = gmask;
1639 current_frame_info.initialized = reload_completed;
1640
1641 /* Ok, we're done. */
1642 return total_size;
1643 }
1644
1645 /* Worker function for TARGET_CAN_ELIMINATE. */
1646
1647 bool
1648 m32r_can_eliminate (const int from, const int to)
1649 {
1650 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1651 ? ! frame_pointer_needed
1652 : true);
1653 }
1654
1655 \f
1656 /* The table we use to reference PIC data. */
1657 static rtx global_offset_table;
1658
1659 static void
1660 m32r_reload_lr (rtx sp, int size)
1661 {
1662 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1663
1664 if (size == 0)
1665 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1666 else if (size < 32768)
1667 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1668 gen_rtx_PLUS (Pmode, sp,
1669 GEN_INT (size)))));
1670 else
1671 {
1672 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1673
1674 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1675 emit_insn (gen_addsi3 (tmp, tmp, sp));
1676 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1677 }
1678
1679 emit_use (lr);
1680 }
1681
1682 void
1683 m32r_load_pic_register (void)
1684 {
1685 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1686 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1687 GEN_INT (TARGET_MODEL_SMALL)));
1688
1689 /* Need to emit this whether or not we obey regdecls,
1690 since setjmp/longjmp can cause life info to screw up. */
1691 emit_use (pic_offset_table_rtx);
1692 }
1693
1694 /* Expand the m32r prologue as a series of insns. */
1695
1696 void
1697 m32r_expand_prologue (void)
1698 {
1699 int regno;
1700 int frame_size;
1701 unsigned int gmask;
1702 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1703 | crtl->profile);
1704
1705 if (! current_frame_info.initialized)
1706 m32r_compute_frame_size (get_frame_size ());
1707
1708 gmask = current_frame_info.gmask;
1709
1710 /* These cases shouldn't happen. Catch them now. */
1711 gcc_assert (current_frame_info.total_size || !gmask);
1712
1713 /* Allocate space for register arguments if this is a variadic function. */
1714 if (current_frame_info.pretend_size != 0)
1715 {
1716 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1717 the wrong result on a 64-bit host. */
1718 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1719 emit_insn (gen_addsi3 (stack_pointer_rtx,
1720 stack_pointer_rtx,
1721 GEN_INT (-pretend_size)));
1722 }
1723
1724 /* Save any registers we need to and set up fp. */
1725 if (current_frame_info.save_fp)
1726 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1727
1728 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1729
1730 /* Save any needed call-saved regs (and call-used if this is an
1731 interrupt handler). */
1732 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1733 {
1734 if ((gmask & (1 << regno)) != 0)
1735 emit_insn (gen_movsi_push (stack_pointer_rtx,
1736 gen_rtx_REG (Pmode, regno)));
1737 }
1738
1739 if (current_frame_info.save_lr)
1740 emit_insn (gen_movsi_push (stack_pointer_rtx,
1741 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1742
1743 /* Allocate the stack frame. */
1744 frame_size = (current_frame_info.total_size
1745 - (current_frame_info.pretend_size
1746 + current_frame_info.reg_size));
1747
1748 if (frame_size == 0)
1749 ; /* Nothing to do. */
1750 else if (frame_size <= 32768)
1751 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1752 GEN_INT (-frame_size)));
1753 else
1754 {
1755 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1756
1757 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1758 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1759 }
1760
1761 if (frame_pointer_needed)
1762 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1763
1764 if (crtl->profile)
1765 /* Push lr for mcount (form_pc, x). */
1766 emit_insn (gen_movsi_push (stack_pointer_rtx,
1767 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1768
1769 if (pic_reg_used)
1770 {
1771 m32r_load_pic_register ();
1772 m32r_reload_lr (stack_pointer_rtx,
1773 (crtl->profile ? 0 : frame_size));
1774 }
1775
1776 if (crtl->profile && !pic_reg_used)
1777 emit_insn (gen_blockage ());
1778 }
1779
1780 \f
1781 /* Set up the stack and frame pointer (if desired) for the function.
1782 Note, if this is changed, you need to mirror the changes in
1783 m32r_compute_frame_size which calculates the prolog size. */
1784
1785 static void
1786 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1787 {
1788 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1789
1790 /* If this is an interrupt handler, mark it as such. */
1791 if (M32R_INTERRUPT_P (fn_type))
1792 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1793
1794 if (! current_frame_info.initialized)
1795 m32r_compute_frame_size (size);
1796
1797 /* This is only for the human reader. */
1798 fprintf (file,
1799 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1800 ASM_COMMENT_START,
1801 current_frame_info.var_size,
1802 current_frame_info.reg_size / 4,
1803 current_frame_info.args_size,
1804 current_frame_info.extra_size);
1805 }
1806 \f
1807 /* Output RTL to pop register REGNO from the stack. */
1808
1809 static void
1810 pop (int regno)
1811 {
1812 rtx x;
1813
1814 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1815 stack_pointer_rtx));
1816 add_reg_note (x, REG_INC, stack_pointer_rtx);
1817 }
1818
1819 /* Expand the m32r epilogue as a series of insns. */
1820
1821 void
1822 m32r_expand_epilogue (void)
1823 {
1824 int regno;
1825 int noepilogue = FALSE;
1826 int total_size;
1827
1828 gcc_assert (current_frame_info.initialized);
1829 total_size = current_frame_info.total_size;
1830
1831 if (total_size == 0)
1832 {
1833 rtx insn = get_last_insn ();
1834
1835 /* If the last insn was a BARRIER, we don't have to write any code
1836 because a jump (aka return) was put there. */
1837 if (insn && NOTE_P (insn))
1838 insn = prev_nonnote_insn (insn);
1839 if (insn && BARRIER_P (insn))
1840 noepilogue = TRUE;
1841 }
1842
1843 if (!noepilogue)
1844 {
1845 unsigned int var_size = current_frame_info.var_size;
1846 unsigned int args_size = current_frame_info.args_size;
1847 unsigned int gmask = current_frame_info.gmask;
1848 int can_trust_sp_p = !cfun->calls_alloca;
1849
1850 if (flag_exceptions)
1851 emit_insn (gen_blockage ());
1852
1853 /* The first thing to do is point the sp at the bottom of the register
1854 save area. */
1855 if (can_trust_sp_p)
1856 {
1857 unsigned int reg_offset = var_size + args_size;
1858
1859 if (reg_offset == 0)
1860 ; /* Nothing to do. */
1861 else if (reg_offset < 32768)
1862 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1863 GEN_INT (reg_offset)));
1864 else
1865 {
1866 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1867
1868 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1869 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1870 tmp));
1871 }
1872 }
1873 else if (frame_pointer_needed)
1874 {
1875 unsigned int reg_offset = var_size + args_size;
1876
1877 if (reg_offset == 0)
1878 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1879 else if (reg_offset < 32768)
1880 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1881 GEN_INT (reg_offset)));
1882 else
1883 {
1884 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1885
1886 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1887 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1888 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1889 tmp));
1890 }
1891 }
1892 else
1893 gcc_unreachable ();
1894
1895 if (current_frame_info.save_lr)
1896 pop (RETURN_ADDR_REGNUM);
1897
1898 /* Restore any saved registers, in reverse order of course. */
1899 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1900 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1901 {
1902 if ((gmask & (1L << regno)) != 0)
1903 pop (regno);
1904 }
1905
1906 if (current_frame_info.save_fp)
1907 pop (FRAME_POINTER_REGNUM);
1908
1909 /* Remove varargs area if present. */
1910 if (current_frame_info.pretend_size != 0)
1911 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1912 GEN_INT (current_frame_info.pretend_size)));
1913
1914 emit_insn (gen_blockage ());
1915 }
1916 }
1917
1918 /* Do any necessary cleanup after a function to restore stack, frame,
1919 and regs. */
1920
1921 static void
1922 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1923 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1924 {
1925 /* Reset state info for each function. */
1926 current_frame_info = zero_frame_info;
1927 m32r_compute_function_type (NULL_TREE);
1928 }
1929 \f
1930 /* Return nonzero if this function is known to have a null or 1 instruction
1931 epilogue. */
1932
1933 int
1934 direct_return (void)
1935 {
1936 if (!reload_completed)
1937 return FALSE;
1938
1939 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1940 return FALSE;
1941
1942 if (! current_frame_info.initialized)
1943 m32r_compute_frame_size (get_frame_size ());
1944
1945 return current_frame_info.total_size == 0;
1946 }
1947
1948 \f
1949 /* PIC. */
1950
1951 int
1952 m32r_legitimate_pic_operand_p (rtx x)
1953 {
1954 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1955 return 0;
1956
1957 if (GET_CODE (x) == CONST
1958 && GET_CODE (XEXP (x, 0)) == PLUS
1959 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1960 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1961 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1962 return 0;
1963
1964 return 1;
1965 }
1966
1967 rtx
1968 m32r_legitimize_pic_address (rtx orig, rtx reg)
1969 {
1970 #ifdef DEBUG_PIC
1971 printf("m32r_legitimize_pic_address()\n");
1972 #endif
1973
1974 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1975 {
1976 rtx pic_ref, address;
1977 int subregs = 0;
1978
1979 if (reg == 0)
1980 {
1981 gcc_assert (!reload_in_progress && !reload_completed);
1982 reg = gen_reg_rtx (Pmode);
1983
1984 subregs = 1;
1985 }
1986
1987 if (subregs)
1988 address = gen_reg_rtx (Pmode);
1989 else
1990 address = reg;
1991
1992 crtl->uses_pic_offset_table = 1;
1993
1994 if (GET_CODE (orig) == LABEL_REF
1995 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1996 {
1997 emit_insn (gen_gotoff_load_addr (reg, orig));
1998 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1999 return reg;
2000 }
2001
2002 emit_insn (gen_pic_load_addr (address, orig));
2003
2004 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
2005 pic_ref = gen_const_mem (Pmode, address);
2006 emit_move_insn (reg, pic_ref);
2007 return reg;
2008 }
2009 else if (GET_CODE (orig) == CONST)
2010 {
2011 rtx base, offset;
2012
2013 if (GET_CODE (XEXP (orig, 0)) == PLUS
2014 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
2015 return orig;
2016
2017 if (reg == 0)
2018 {
2019 gcc_assert (!reload_in_progress && !reload_completed);
2020 reg = gen_reg_rtx (Pmode);
2021 }
2022
2023 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2024 {
2025 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2026 if (base == reg)
2027 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2028 else
2029 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2030 }
2031 else
2032 return orig;
2033
2034 if (CONST_INT_P (offset))
2035 {
2036 if (INT16_P (INTVAL (offset)))
2037 return plus_constant (base, INTVAL (offset));
2038 else
2039 {
2040 gcc_assert (! reload_in_progress && ! reload_completed);
2041 offset = force_reg (Pmode, offset);
2042 }
2043 }
2044
2045 return gen_rtx_PLUS (Pmode, base, offset);
2046 }
2047
2048 return orig;
2049 }
2050
2051 static rtx
2052 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2053 enum machine_mode mode ATTRIBUTE_UNUSED)
2054 {
2055 if (flag_pic)
2056 return m32r_legitimize_pic_address (x, NULL_RTX);
2057 else
2058 return x;
2059 }
2060
2061 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2062
2063 static bool
2064 m32r_mode_dependent_address_p (const_rtx addr)
2065 {
2066 if (GET_CODE (addr) == LO_SUM)
2067 return true;
2068
2069 return false;
2070 }
2071 \f
2072 /* Nested function support. */
2073
2074 /* Emit RTL insns to initialize the variable parts of a trampoline.
2075 FNADDR is an RTX for the address of the function's pure code.
2076 CXT is an RTX for the static chain value for the function. */
2077
2078 void
2079 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2080 rtx fnaddr ATTRIBUTE_UNUSED,
2081 rtx cxt ATTRIBUTE_UNUSED)
2082 {
2083 }
2084 \f
2085 static void
2086 m32r_file_start (void)
2087 {
2088 default_file_start ();
2089
2090 if (flag_verbose_asm)
2091 fprintf (asm_out_file,
2092 "%s M32R/D special options: -G %d\n",
2093 ASM_COMMENT_START, g_switch_value);
2094
2095 if (TARGET_LITTLE_ENDIAN)
2096 fprintf (asm_out_file, "\t.little\n");
2097 }
2098 \f
2099 /* Print operand X (an rtx) in assembler syntax to file FILE.
2100 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2101 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2102
2103 static void
2104 m32r_print_operand (FILE * file, rtx x, int code)
2105 {
2106 rtx addr;
2107
2108 switch (code)
2109 {
2110 /* The 's' and 'p' codes are used by output_block_move() to
2111 indicate post-increment 's'tores and 'p're-increment loads. */
2112 case 's':
2113 if (REG_P (x))
2114 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2115 else
2116 output_operand_lossage ("invalid operand to %%s code");
2117 return;
2118
2119 case 'p':
2120 if (REG_P (x))
2121 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2122 else
2123 output_operand_lossage ("invalid operand to %%p code");
2124 return;
2125
2126 case 'R' :
2127 /* Write second word of DImode or DFmode reference,
2128 register or memory. */
2129 if (REG_P (x))
2130 fputs (reg_names[REGNO (x)+1], file);
2131 else if (MEM_P (x))
2132 {
2133 fprintf (file, "@(");
2134 /* Handle possible auto-increment. Since it is pre-increment and
2135 we have already done it, we can just use an offset of four. */
2136 /* ??? This is taken from rs6000.c I think. I don't think it is
2137 currently necessary, but keep it around. */
2138 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2139 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2140 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2141 else
2142 output_address (plus_constant (XEXP (x, 0), 4));
2143 fputc (')', file);
2144 }
2145 else
2146 output_operand_lossage ("invalid operand to %%R code");
2147 return;
2148
2149 case 'H' : /* High word. */
2150 case 'L' : /* Low word. */
2151 if (REG_P (x))
2152 {
2153 /* L = least significant word, H = most significant word. */
2154 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2155 fputs (reg_names[REGNO (x)], file);
2156 else
2157 fputs (reg_names[REGNO (x)+1], file);
2158 }
2159 else if (CONST_INT_P (x)
2160 || GET_CODE (x) == CONST_DOUBLE)
2161 {
2162 rtx first, second;
2163
2164 split_double (x, &first, &second);
2165 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2166 code == 'L' ? INTVAL (first) : INTVAL (second));
2167 }
2168 else
2169 output_operand_lossage ("invalid operand to %%H/%%L code");
2170 return;
2171
2172 case 'A' :
2173 {
2174 char str[30];
2175
2176 if (GET_CODE (x) != CONST_DOUBLE
2177 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2178 fatal_insn ("bad insn for 'A'", x);
2179
2180 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2181 fprintf (file, "%s", str);
2182 return;
2183 }
2184
2185 case 'B' : /* Bottom half. */
2186 case 'T' : /* Top half. */
2187 /* Output the argument to a `seth' insn (sets the Top half-word).
2188 For constants output arguments to a seth/or3 pair to set Top and
2189 Bottom halves. For symbols output arguments to a seth/add3 pair to
2190 set Top and Bottom halves. The difference exists because for
2191 constants seth/or3 is more readable but for symbols we need to use
2192 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2193 switch (GET_CODE (x))
2194 {
2195 case CONST_INT :
2196 case CONST_DOUBLE :
2197 {
2198 rtx first, second;
2199
2200 split_double (x, &first, &second);
2201 x = WORDS_BIG_ENDIAN ? second : first;
2202 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2203 (code == 'B'
2204 ? INTVAL (x) & 0xffff
2205 : (INTVAL (x) >> 16) & 0xffff));
2206 }
2207 return;
2208 case CONST :
2209 case SYMBOL_REF :
2210 if (code == 'B'
2211 && small_data_operand (x, VOIDmode))
2212 {
2213 fputs ("sda(", file);
2214 output_addr_const (file, x);
2215 fputc (')', file);
2216 return;
2217 }
2218 /* fall through */
2219 case LABEL_REF :
2220 fputs (code == 'T' ? "shigh(" : "low(", file);
2221 output_addr_const (file, x);
2222 fputc (')', file);
2223 return;
2224 default :
2225 output_operand_lossage ("invalid operand to %%T/%%B code");
2226 return;
2227 }
2228 break;
2229
2230 case 'U' :
2231 /* ??? wip */
2232 /* Output a load/store with update indicator if appropriate. */
2233 if (MEM_P (x))
2234 {
2235 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2236 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2237 fputs (".a", file);
2238 }
2239 else
2240 output_operand_lossage ("invalid operand to %%U code");
2241 return;
2242
2243 case 'N' :
2244 /* Print a constant value negated. */
2245 if (CONST_INT_P (x))
2246 output_addr_const (file, GEN_INT (- INTVAL (x)));
2247 else
2248 output_operand_lossage ("invalid operand to %%N code");
2249 return;
2250
2251 case 'X' :
2252 /* Print a const_int in hex. Used in comments. */
2253 if (CONST_INT_P (x))
2254 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2255 return;
2256
2257 case '#' :
2258 fputs (IMMEDIATE_PREFIX, file);
2259 return;
2260
2261 case 0 :
2262 /* Do nothing special. */
2263 break;
2264
2265 default :
2266 /* Unknown flag. */
2267 output_operand_lossage ("invalid operand output code");
2268 }
2269
2270 switch (GET_CODE (x))
2271 {
2272 case REG :
2273 fputs (reg_names[REGNO (x)], file);
2274 break;
2275
2276 case MEM :
2277 addr = XEXP (x, 0);
2278 if (GET_CODE (addr) == PRE_INC)
2279 {
2280 if (!REG_P (XEXP (addr, 0)))
2281 fatal_insn ("pre-increment address is not a register", x);
2282
2283 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2284 }
2285 else if (GET_CODE (addr) == PRE_DEC)
2286 {
2287 if (!REG_P (XEXP (addr, 0)))
2288 fatal_insn ("pre-decrement address is not a register", x);
2289
2290 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2291 }
2292 else if (GET_CODE (addr) == POST_INC)
2293 {
2294 if (!REG_P (XEXP (addr, 0)))
2295 fatal_insn ("post-increment address is not a register", x);
2296
2297 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2298 }
2299 else
2300 {
2301 fputs ("@(", file);
2302 output_address (XEXP (x, 0));
2303 fputc (')', file);
2304 }
2305 break;
2306
2307 case CONST_DOUBLE :
2308 /* We handle SFmode constants here as output_addr_const doesn't. */
2309 if (GET_MODE (x) == SFmode)
2310 {
2311 REAL_VALUE_TYPE d;
2312 long l;
2313
2314 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2315 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2316 fprintf (file, "0x%08lx", l);
2317 break;
2318 }
2319
2320 /* Fall through. Let output_addr_const deal with it. */
2321
2322 default :
2323 output_addr_const (file, x);
2324 break;
2325 }
2326 }
2327
2328 /* Print a memory address as an operand to reference that memory location. */
2329
2330 static void
2331 m32r_print_operand_address (FILE * file, rtx addr)
2332 {
2333 rtx base;
2334 rtx index = 0;
2335 int offset = 0;
2336
2337 switch (GET_CODE (addr))
2338 {
2339 case REG :
2340 fputs (reg_names[REGNO (addr)], file);
2341 break;
2342
2343 case PLUS :
2344 if (CONST_INT_P (XEXP (addr, 0)))
2345 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2346 else if (CONST_INT_P (XEXP (addr, 1)))
2347 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2348 else
2349 base = XEXP (addr, 0), index = XEXP (addr, 1);
2350 if (REG_P (base))
2351 {
2352 /* Print the offset first (if present) to conform to the manual. */
2353 if (index == 0)
2354 {
2355 if (offset != 0)
2356 fprintf (file, "%d,", offset);
2357 fputs (reg_names[REGNO (base)], file);
2358 }
2359 /* The chip doesn't support this, but left in for generality. */
2360 else if (REG_P (index))
2361 fprintf (file, "%s,%s",
2362 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2363 /* Not sure this can happen, but leave in for now. */
2364 else if (GET_CODE (index) == SYMBOL_REF)
2365 {
2366 output_addr_const (file, index);
2367 fputc (',', file);
2368 fputs (reg_names[REGNO (base)], file);
2369 }
2370 else
2371 fatal_insn ("bad address", addr);
2372 }
2373 else if (GET_CODE (base) == LO_SUM)
2374 {
2375 gcc_assert (!index && REG_P (XEXP (base, 0)));
2376 if (small_data_operand (XEXP (base, 1), VOIDmode))
2377 fputs ("sda(", file);
2378 else
2379 fputs ("low(", file);
2380 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2381 fputs ("),", file);
2382 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2383 }
2384 else
2385 fatal_insn ("bad address", addr);
2386 break;
2387
2388 case LO_SUM :
2389 if (!REG_P (XEXP (addr, 0)))
2390 fatal_insn ("lo_sum not of register", addr);
2391 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2392 fputs ("sda(", file);
2393 else
2394 fputs ("low(", file);
2395 output_addr_const (file, XEXP (addr, 1));
2396 fputs ("),", file);
2397 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2398 break;
2399
2400 case PRE_INC : /* Assume SImode. */
2401 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2402 break;
2403
2404 case PRE_DEC : /* Assume SImode. */
2405 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2406 break;
2407
2408 case POST_INC : /* Assume SImode. */
2409 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2410 break;
2411
2412 default :
2413 output_addr_const (file, addr);
2414 break;
2415 }
2416 }
2417
2418 static bool
2419 m32r_print_operand_punct_valid_p (unsigned char code)
2420 {
2421 return m32r_punct_chars[code];
2422 }
2423
2424 /* Return true if the operands are the constants 0 and 1. */
2425
2426 int
2427 zero_and_one (rtx operand1, rtx operand2)
2428 {
2429 return
2430 CONST_INT_P (operand1)
2431 && CONST_INT_P (operand2)
2432 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2433 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2434 }
2435
2436 /* Generate the correct assembler code to handle the conditional loading of a
2437 value into a register. It is known that the operands satisfy the
2438 conditional_move_operand() function above. The destination is operand[0].
2439 The condition is operand [1]. The 'true' value is operand [2] and the
2440 'false' value is operand [3]. */
2441
2442 char *
2443 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2444 {
2445 static char buffer [100];
2446 const char * dest = reg_names [REGNO (operands [0])];
2447
2448 buffer [0] = 0;
2449
2450 /* Destination must be a register. */
2451 gcc_assert (REG_P (operands [0]));
2452 gcc_assert (conditional_move_operand (operands [2], SImode));
2453 gcc_assert (conditional_move_operand (operands [3], SImode));
2454
2455 /* Check to see if the test is reversed. */
2456 if (GET_CODE (operands [1]) == NE)
2457 {
2458 rtx tmp = operands [2];
2459 operands [2] = operands [3];
2460 operands [3] = tmp;
2461 }
2462
2463 sprintf (buffer, "mvfc %s, cbr", dest);
2464
2465 /* If the true value was '0' then we need to invert the results of the move. */
2466 if (INTVAL (operands [2]) == 0)
2467 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2468 dest, dest);
2469
2470 return buffer;
2471 }
2472
2473 /* Returns true if the registers contained in the two
2474 rtl expressions are different. */
2475
2476 int
2477 m32r_not_same_reg (rtx a, rtx b)
2478 {
2479 int reg_a = -1;
2480 int reg_b = -2;
2481
2482 while (GET_CODE (a) == SUBREG)
2483 a = SUBREG_REG (a);
2484
2485 if (REG_P (a))
2486 reg_a = REGNO (a);
2487
2488 while (GET_CODE (b) == SUBREG)
2489 b = SUBREG_REG (b);
2490
2491 if (REG_P (b))
2492 reg_b = REGNO (b);
2493
2494 return reg_a != reg_b;
2495 }
2496
2497 \f
2498 rtx
2499 m32r_function_symbol (const char *name)
2500 {
2501 int extra_flags = 0;
2502 enum m32r_model model;
2503 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2504
2505 if (TARGET_MODEL_SMALL)
2506 model = M32R_MODEL_SMALL;
2507 else if (TARGET_MODEL_MEDIUM)
2508 model = M32R_MODEL_MEDIUM;
2509 else if (TARGET_MODEL_LARGE)
2510 model = M32R_MODEL_LARGE;
2511 else
2512 gcc_unreachable (); /* Shouldn't happen. */
2513 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2514
2515 if (extra_flags)
2516 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2517
2518 return sym;
2519 }
2520
2521 /* Use a library function to move some bytes. */
2522
2523 static void
2524 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2525 {
2526 /* We want to pass the size as Pmode, which will normally be SImode
2527 but will be DImode if we are using 64-bit longs and pointers. */
2528 if (GET_MODE (bytes_rtx) != VOIDmode
2529 && GET_MODE (bytes_rtx) != Pmode)
2530 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2531
2532 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2533 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2534 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2535 TYPE_UNSIGNED (sizetype)),
2536 TYPE_MODE (sizetype));
2537 }
2538
2539 /* Expand string/block move operations.
2540
2541 operands[0] is the pointer to the destination.
2542 operands[1] is the pointer to the source.
2543 operands[2] is the number of bytes to move.
2544 operands[3] is the alignment.
2545
2546 Returns 1 upon success, 0 otherwise. */
2547
2548 int
2549 m32r_expand_block_move (rtx operands[])
2550 {
2551 rtx orig_dst = operands[0];
2552 rtx orig_src = operands[1];
2553 rtx bytes_rtx = operands[2];
2554 rtx align_rtx = operands[3];
2555 int constp = CONST_INT_P (bytes_rtx);
2556 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2557 int align = INTVAL (align_rtx);
2558 int leftover;
2559 rtx src_reg;
2560 rtx dst_reg;
2561
2562 if (constp && bytes <= 0)
2563 return 1;
2564
2565 /* Move the address into scratch registers. */
2566 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2567 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2568
2569 if (align > UNITS_PER_WORD)
2570 align = UNITS_PER_WORD;
2571
2572 /* If we prefer size over speed, always use a function call.
2573 If we do not know the size, use a function call.
2574 If the blocks are not word aligned, use a function call. */
2575 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2576 {
2577 block_move_call (dst_reg, src_reg, bytes_rtx);
2578 return 0;
2579 }
2580
2581 leftover = bytes % MAX_MOVE_BYTES;
2582 bytes -= leftover;
2583
2584 /* If necessary, generate a loop to handle the bulk of the copy. */
2585 if (bytes)
2586 {
2587 rtx label = NULL_RTX;
2588 rtx final_src = NULL_RTX;
2589 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2590 rtx rounded_total = GEN_INT (bytes);
2591 rtx new_dst_reg = gen_reg_rtx (SImode);
2592 rtx new_src_reg = gen_reg_rtx (SImode);
2593
2594 /* If we are going to have to perform this loop more than
2595 once, then generate a label and compute the address the
2596 source register will contain upon completion of the final
2597 iteration. */
2598 if (bytes > MAX_MOVE_BYTES)
2599 {
2600 final_src = gen_reg_rtx (Pmode);
2601
2602 if (INT16_P(bytes))
2603 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2604 else
2605 {
2606 emit_insn (gen_movsi (final_src, rounded_total));
2607 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2608 }
2609
2610 label = gen_label_rtx ();
2611 emit_label (label);
2612 }
2613
2614 /* It is known that output_block_move() will update src_reg to point
2615 to the word after the end of the source block, and dst_reg to point
2616 to the last word of the destination block, provided that the block
2617 is MAX_MOVE_BYTES long. */
2618 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2619 new_dst_reg, new_src_reg));
2620 emit_move_insn (dst_reg, new_dst_reg);
2621 emit_move_insn (src_reg, new_src_reg);
2622 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2623
2624 if (bytes > MAX_MOVE_BYTES)
2625 {
2626 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2627 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2628 }
2629 }
2630
2631 if (leftover)
2632 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2633 gen_reg_rtx (SImode),
2634 gen_reg_rtx (SImode)));
2635 return 1;
2636 }
2637
2638 \f
2639 /* Emit load/stores for a small constant word aligned block_move.
2640
2641 operands[0] is the memory address of the destination.
2642 operands[1] is the memory address of the source.
2643 operands[2] is the number of bytes to move.
2644 operands[3] is a temp register.
2645 operands[4] is a temp register. */
2646
2647 void
2648 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2649 {
2650 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2651 int first_time;
2652 int got_extra = 0;
2653
2654 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2655
2656 /* We do not have a post-increment store available, so the first set of
2657 stores are done without any increment, then the remaining ones can use
2658 the pre-increment addressing mode.
2659
2660 Note: expand_block_move() also relies upon this behavior when building
2661 loops to copy large blocks. */
2662 first_time = 1;
2663
2664 while (bytes > 0)
2665 {
2666 if (bytes >= 8)
2667 {
2668 if (first_time)
2669 {
2670 output_asm_insn ("ld\t%5, %p1", operands);
2671 output_asm_insn ("ld\t%6, %p1", operands);
2672 output_asm_insn ("st\t%5, @%0", operands);
2673 output_asm_insn ("st\t%6, %s0", operands);
2674 }
2675 else
2676 {
2677 output_asm_insn ("ld\t%5, %p1", operands);
2678 output_asm_insn ("ld\t%6, %p1", operands);
2679 output_asm_insn ("st\t%5, %s0", operands);
2680 output_asm_insn ("st\t%6, %s0", operands);
2681 }
2682
2683 bytes -= 8;
2684 }
2685 else if (bytes >= 4)
2686 {
2687 if (bytes > 4)
2688 got_extra = 1;
2689
2690 output_asm_insn ("ld\t%5, %p1", operands);
2691
2692 if (got_extra)
2693 output_asm_insn ("ld\t%6, %p1", operands);
2694
2695 if (first_time)
2696 output_asm_insn ("st\t%5, @%0", operands);
2697 else
2698 output_asm_insn ("st\t%5, %s0", operands);
2699
2700 bytes -= 4;
2701 }
2702 else
2703 {
2704 /* Get the entire next word, even though we do not want all of it.
2705 The saves us from doing several smaller loads, and we assume that
2706 we cannot cause a page fault when at least part of the word is in
2707 valid memory [since we don't get called if things aren't properly
2708 aligned]. */
2709 int dst_offset = first_time ? 0 : 4;
2710 /* The amount of increment we have to make to the
2711 destination pointer. */
2712 int dst_inc_amount = dst_offset + bytes - 4;
2713 /* The same for the source pointer. */
2714 int src_inc_amount = bytes;
2715 int last_shift;
2716 rtx my_operands[3];
2717
2718 /* If got_extra is true then we have already loaded
2719 the next word as part of loading and storing the previous word. */
2720 if (! got_extra)
2721 output_asm_insn ("ld\t%6, @%1", operands);
2722
2723 if (bytes >= 2)
2724 {
2725 bytes -= 2;
2726
2727 output_asm_insn ("sra3\t%5, %6, #16", operands);
2728 my_operands[0] = operands[5];
2729 my_operands[1] = GEN_INT (dst_offset);
2730 my_operands[2] = operands[0];
2731 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2732
2733 /* If there is a byte left to store then increment the
2734 destination address and shift the contents of the source
2735 register down by 8 bits. We could not do the address
2736 increment in the store half word instruction, because it does
2737 not have an auto increment mode. */
2738 if (bytes > 0) /* assert (bytes == 1) */
2739 {
2740 dst_offset += 2;
2741 last_shift = 8;
2742 }
2743 }
2744 else
2745 last_shift = 24;
2746
2747 if (bytes > 0)
2748 {
2749 my_operands[0] = operands[6];
2750 my_operands[1] = GEN_INT (last_shift);
2751 output_asm_insn ("srai\t%0, #%1", my_operands);
2752 my_operands[0] = operands[6];
2753 my_operands[1] = GEN_INT (dst_offset);
2754 my_operands[2] = operands[0];
2755 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2756 }
2757
2758 /* Update the destination pointer if needed. We have to do
2759 this so that the patterns matches what we output in this
2760 function. */
2761 if (dst_inc_amount
2762 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2763 {
2764 my_operands[0] = operands[0];
2765 my_operands[1] = GEN_INT (dst_inc_amount);
2766 output_asm_insn ("addi\t%0, #%1", my_operands);
2767 }
2768
2769 /* Update the source pointer if needed. We have to do this
2770 so that the patterns matches what we output in this
2771 function. */
2772 if (src_inc_amount
2773 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2774 {
2775 my_operands[0] = operands[1];
2776 my_operands[1] = GEN_INT (src_inc_amount);
2777 output_asm_insn ("addi\t%0, #%1", my_operands);
2778 }
2779
2780 bytes = 0;
2781 }
2782
2783 first_time = 0;
2784 }
2785 }
2786
2787 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2788
2789 int
2790 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2791 unsigned int new_reg)
2792 {
2793 /* Interrupt routines can't clobber any register that isn't already used. */
2794 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2795 && !df_regs_ever_live_p (new_reg))
2796 return 0;
2797
2798 return 1;
2799 }
2800
2801 rtx
2802 m32r_return_addr (int count)
2803 {
2804 if (count != 0)
2805 return const0_rtx;
2806
2807 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2808 }
2809
2810 static void
2811 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2812 {
2813 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2814 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2815 0x017e8e17 : 0x178e7e01, SImode));
2816 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2817 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2818 0x0c00ae86 : 0x86ae000c, SImode));
2819 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2820 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2821 0xe627871e : 0x1e8727e6, SImode));
2822 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2823 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2824 0xc616c626 : 0x26c61fc6, SImode));
2825 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2826 chain_value);
2827 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2828 XEXP (DECL_RTL (fndecl), 0));
2829
2830 if (m32r_cache_flush_trap >= 0)
2831 emit_insn (gen_flush_icache
2832 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2833 gen_int_mode (m32r_cache_flush_trap, SImode)));
2834 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2835 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2836 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2837 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2838 GEN_INT (3), SImode);
2839 }