]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
86af3b3a8c979eb5392e25c56f0a46ae094d4be2
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "ggc.h"
39 #include "integrate.h"
40 #include "df.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44 #include "tm-constrs.h"
45 #include "opts.h"
46
47 /* Array of valid operand punctuation characters. */
48 static char m32r_punct_chars[256];
49
50 /* Machine-specific symbol_ref flags. */
51 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
52 #define SYMBOL_REF_MODEL(X) \
53 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
54
55 /* For string literals, etc. */
56 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
57
58 /* Forward declaration. */
59 static bool m32r_handle_option (struct gcc_options *, struct gcc_options *,
60 const struct cl_decoded_option *, location_t);
61 static void m32r_option_override (void);
62 static void init_reg_tables (void);
63 static void block_move_call (rtx, rtx, rtx);
64 static int m32r_is_insn (rtx);
65 static bool m32r_legitimate_address_p (enum machine_mode, rtx, bool);
66 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
67 static bool m32r_mode_dependent_address_p (const_rtx);
68 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
69 static void m32r_print_operand (FILE *, rtx, int);
70 static void m32r_print_operand_address (FILE *, rtx);
71 static bool m32r_print_operand_punct_valid_p (unsigned char code);
72 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
73 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
74
75 static void m32r_file_start (void);
76
77 static int m32r_adjust_priority (rtx, int);
78 static int m32r_issue_rate (void);
79
80 static void m32r_encode_section_info (tree, rtx, int);
81 static bool m32r_in_small_data_p (const_tree);
82 static bool m32r_return_in_memory (const_tree, const_tree);
83 static rtx m32r_function_value (const_tree, const_tree, bool);
84 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
85 static bool m32r_function_value_regno_p (const unsigned int);
86 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
87 tree, int *, int);
88 static void init_idents (void);
89 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
90 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
91 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
92 const_tree, bool);
93 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
94 tree, bool);
95 static rtx m32r_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
96 const_tree, bool);
97 static void m32r_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
98 const_tree, bool);
99 static bool m32r_can_eliminate (const int, const int);
100 static void m32r_conditional_register_usage (void);
101 static void m32r_trampoline_init (rtx, tree, rtx);
102 \f
103 /* M32R specific attributes. */
104
105 static const struct attribute_spec m32r_attribute_table[] =
106 {
107 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
108 affects_type_identity } */
109 { "interrupt", 0, 0, true, false, false, NULL, false },
110 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
111 false },
112 { NULL, 0, 0, false, false, false, NULL, false }
113 };
114
115 static const struct default_options m32r_option_optimization_table[] =
116 {
117 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
118 { OPT_LEVELS_1_PLUS, OPT_fregmove, NULL, 1 },
119 { OPT_LEVELS_NONE, 0, NULL, 0 }
120 };
121 \f
122 /* Initialize the GCC target structure. */
123 #undef TARGET_ATTRIBUTE_TABLE
124 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
125
126 #undef TARGET_LEGITIMATE_ADDRESS_P
127 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
128 #undef TARGET_LEGITIMIZE_ADDRESS
129 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
130 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
131 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
132
133 #undef TARGET_ASM_ALIGNED_HI_OP
134 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
135 #undef TARGET_ASM_ALIGNED_SI_OP
136 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
137
138 #undef TARGET_PRINT_OPERAND
139 #define TARGET_PRINT_OPERAND m32r_print_operand
140 #undef TARGET_PRINT_OPERAND_ADDRESS
141 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
142 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
143 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
144
145 #undef TARGET_ASM_FUNCTION_PROLOGUE
146 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
147 #undef TARGET_ASM_FUNCTION_EPILOGUE
148 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
149
150 #undef TARGET_ASM_FILE_START
151 #define TARGET_ASM_FILE_START m32r_file_start
152
153 #undef TARGET_SCHED_ADJUST_PRIORITY
154 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
155 #undef TARGET_SCHED_ISSUE_RATE
156 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
157
158 #undef TARGET_DEFAULT_TARGET_FLAGS
159 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT
160 #undef TARGET_HANDLE_OPTION
161 #define TARGET_HANDLE_OPTION m32r_handle_option
162 #undef TARGET_OPTION_OVERRIDE
163 #define TARGET_OPTION_OVERRIDE m32r_option_override
164 #undef TARGET_OPTION_OPTIMIZATION_TABLE
165 #define TARGET_OPTION_OPTIMIZATION_TABLE m32r_option_optimization_table
166
167 #undef TARGET_ENCODE_SECTION_INFO
168 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
169 #undef TARGET_IN_SMALL_DATA_P
170 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
171
172
173 #undef TARGET_MEMORY_MOVE_COST
174 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
175 #undef TARGET_RTX_COSTS
176 #define TARGET_RTX_COSTS m32r_rtx_costs
177 #undef TARGET_ADDRESS_COST
178 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
179
180 #undef TARGET_PROMOTE_PROTOTYPES
181 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
182 #undef TARGET_RETURN_IN_MEMORY
183 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
184
185 #undef TARGET_FUNCTION_VALUE
186 #define TARGET_FUNCTION_VALUE m32r_function_value
187 #undef TARGET_LIBCALL_VALUE
188 #define TARGET_LIBCALL_VALUE m32r_libcall_value
189 #undef TARGET_FUNCTION_VALUE_REGNO_P
190 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
191
192 #undef TARGET_SETUP_INCOMING_VARARGS
193 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
194 #undef TARGET_MUST_PASS_IN_STACK
195 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
196 #undef TARGET_PASS_BY_REFERENCE
197 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
198 #undef TARGET_ARG_PARTIAL_BYTES
199 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
200 #undef TARGET_FUNCTION_ARG
201 #define TARGET_FUNCTION_ARG m32r_function_arg
202 #undef TARGET_FUNCTION_ARG_ADVANCE
203 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
204
205 #undef TARGET_CAN_ELIMINATE
206 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
207
208 #undef TARGET_CONDITIONAL_REGISTER_USAGE
209 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
210
211 #undef TARGET_TRAMPOLINE_INIT
212 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
213
214 #undef TARGET_EXCEPT_UNWIND_INFO
215 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
216
217 struct gcc_target targetm = TARGET_INITIALIZER;
218 \f
219 /* Implement TARGET_HANDLE_OPTION. */
220
221 static bool
222 m32r_handle_option (struct gcc_options *opts,
223 struct gcc_options *opts_set ATTRIBUTE_UNUSED,
224 const struct cl_decoded_option *decoded,
225 location_t loc ATTRIBUTE_UNUSED)
226 {
227 size_t code = decoded->opt_index;
228 int value = decoded->value;
229
230 switch (code)
231 {
232 case OPT_m32r:
233 opts->x_target_flags &= ~(MASK_M32R2 | MASK_M32RX);
234 return true;
235
236 case OPT_mno_flush_func:
237 opts->x_m32r_cache_flush_func = NULL;
238 return true;
239
240 case OPT_mflush_trap_:
241 return value <= 15;
242
243 default:
244 return true;
245 }
246 }
247
248 /* Called by m32r_option_override to initialize various things. */
249
250 void
251 m32r_init (void)
252 {
253 init_reg_tables ();
254
255 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
256 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
257 m32r_punct_chars['#'] = 1;
258 m32r_punct_chars['@'] = 1; /* ??? no longer used */
259
260 /* Provide default value if not specified. */
261 if (!global_options_set.x_g_switch_value)
262 g_switch_value = SDATA_DEFAULT_SIZE;
263 }
264
265 static void
266 m32r_option_override (void)
267 {
268 /* These need to be done at start up.
269 It's convenient to do them here. */
270 m32r_init ();
271 SUBTARGET_OVERRIDE_OPTIONS;
272 }
273
274 /* Vectors to keep interesting information about registers where it can easily
275 be got. We use to use the actual mode value as the bit number, but there
276 is (or may be) more than 32 modes now. Instead we use two tables: one
277 indexed by hard register number, and one indexed by mode. */
278
279 /* The purpose of m32r_mode_class is to shrink the range of modes so that
280 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
281 mapped into one m32r_mode_class mode. */
282
283 enum m32r_mode_class
284 {
285 C_MODE,
286 S_MODE, D_MODE, T_MODE, O_MODE,
287 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
288 };
289
290 /* Modes for condition codes. */
291 #define C_MODES (1 << (int) C_MODE)
292
293 /* Modes for single-word and smaller quantities. */
294 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
295
296 /* Modes for double-word and smaller quantities. */
297 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
298
299 /* Modes for quad-word and smaller quantities. */
300 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
301
302 /* Modes for accumulators. */
303 #define A_MODES (1 << (int) A_MODE)
304
305 /* Value is 1 if register/mode pair is acceptable on arc. */
306
307 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
308 {
309 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
310 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
311 S_MODES, C_MODES, A_MODES, A_MODES
312 };
313
314 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
315
316 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
317
318 static void
319 init_reg_tables (void)
320 {
321 int i;
322
323 for (i = 0; i < NUM_MACHINE_MODES; i++)
324 {
325 switch (GET_MODE_CLASS (i))
326 {
327 case MODE_INT:
328 case MODE_PARTIAL_INT:
329 case MODE_COMPLEX_INT:
330 if (GET_MODE_SIZE (i) <= 4)
331 m32r_mode_class[i] = 1 << (int) S_MODE;
332 else if (GET_MODE_SIZE (i) == 8)
333 m32r_mode_class[i] = 1 << (int) D_MODE;
334 else if (GET_MODE_SIZE (i) == 16)
335 m32r_mode_class[i] = 1 << (int) T_MODE;
336 else if (GET_MODE_SIZE (i) == 32)
337 m32r_mode_class[i] = 1 << (int) O_MODE;
338 else
339 m32r_mode_class[i] = 0;
340 break;
341 case MODE_FLOAT:
342 case MODE_COMPLEX_FLOAT:
343 if (GET_MODE_SIZE (i) <= 4)
344 m32r_mode_class[i] = 1 << (int) SF_MODE;
345 else if (GET_MODE_SIZE (i) == 8)
346 m32r_mode_class[i] = 1 << (int) DF_MODE;
347 else if (GET_MODE_SIZE (i) == 16)
348 m32r_mode_class[i] = 1 << (int) TF_MODE;
349 else if (GET_MODE_SIZE (i) == 32)
350 m32r_mode_class[i] = 1 << (int) OF_MODE;
351 else
352 m32r_mode_class[i] = 0;
353 break;
354 case MODE_CC:
355 m32r_mode_class[i] = 1 << (int) C_MODE;
356 break;
357 default:
358 m32r_mode_class[i] = 0;
359 break;
360 }
361 }
362
363 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
364 {
365 if (GPR_P (i))
366 m32r_regno_reg_class[i] = GENERAL_REGS;
367 else if (i == ARG_POINTER_REGNUM)
368 m32r_regno_reg_class[i] = GENERAL_REGS;
369 else
370 m32r_regno_reg_class[i] = NO_REGS;
371 }
372 }
373 \f
374 /* M32R specific attribute support.
375
376 interrupt - for interrupt functions
377
378 model - select code model used to access object
379
380 small: addresses use 24 bits, use bl to make calls
381 medium: addresses use 32 bits, use bl to make calls
382 large: addresses use 32 bits, use seth/add3/jl to make calls
383
384 Grep for MODEL in m32r.h for more info. */
385
386 static tree small_ident1;
387 static tree small_ident2;
388 static tree medium_ident1;
389 static tree medium_ident2;
390 static tree large_ident1;
391 static tree large_ident2;
392
393 static void
394 init_idents (void)
395 {
396 if (small_ident1 == 0)
397 {
398 small_ident1 = get_identifier ("small");
399 small_ident2 = get_identifier ("__small__");
400 medium_ident1 = get_identifier ("medium");
401 medium_ident2 = get_identifier ("__medium__");
402 large_ident1 = get_identifier ("large");
403 large_ident2 = get_identifier ("__large__");
404 }
405 }
406
407 /* Handle an "model" attribute; arguments as in
408 struct attribute_spec.handler. */
409 static tree
410 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
411 tree args, int flags ATTRIBUTE_UNUSED,
412 bool *no_add_attrs)
413 {
414 tree arg;
415
416 init_idents ();
417 arg = TREE_VALUE (args);
418
419 if (arg != small_ident1
420 && arg != small_ident2
421 && arg != medium_ident1
422 && arg != medium_ident2
423 && arg != large_ident1
424 && arg != large_ident2)
425 {
426 warning (OPT_Wattributes, "invalid argument of %qs attribute",
427 IDENTIFIER_POINTER (name));
428 *no_add_attrs = true;
429 }
430
431 return NULL_TREE;
432 }
433 \f
434 /* Encode section information of DECL, which is either a VAR_DECL,
435 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
436
437 For the M32R we want to record:
438
439 - whether the object lives in .sdata/.sbss.
440 - what code model should be used to access the object
441 */
442
443 static void
444 m32r_encode_section_info (tree decl, rtx rtl, int first)
445 {
446 int extra_flags = 0;
447 tree model_attr;
448 enum m32r_model model;
449
450 default_encode_section_info (decl, rtl, first);
451
452 if (!DECL_P (decl))
453 return;
454
455 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
456 if (model_attr)
457 {
458 tree id;
459
460 init_idents ();
461
462 id = TREE_VALUE (TREE_VALUE (model_attr));
463
464 if (id == small_ident1 || id == small_ident2)
465 model = M32R_MODEL_SMALL;
466 else if (id == medium_ident1 || id == medium_ident2)
467 model = M32R_MODEL_MEDIUM;
468 else if (id == large_ident1 || id == large_ident2)
469 model = M32R_MODEL_LARGE;
470 else
471 gcc_unreachable (); /* shouldn't happen */
472 }
473 else
474 {
475 if (TARGET_MODEL_SMALL)
476 model = M32R_MODEL_SMALL;
477 else if (TARGET_MODEL_MEDIUM)
478 model = M32R_MODEL_MEDIUM;
479 else if (TARGET_MODEL_LARGE)
480 model = M32R_MODEL_LARGE;
481 else
482 gcc_unreachable (); /* shouldn't happen */
483 }
484 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
485
486 if (extra_flags)
487 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
488 }
489
490 /* Only mark the object as being small data area addressable if
491 it hasn't been explicitly marked with a code model.
492
493 The user can explicitly put an object in the small data area with the
494 section attribute. If the object is in sdata/sbss and marked with a
495 code model do both [put the object in .sdata and mark it as being
496 addressed with a specific code model - don't mark it as being addressed
497 with an SDA reloc though]. This is ok and might be useful at times. If
498 the object doesn't fit the linker will give an error. */
499
500 static bool
501 m32r_in_small_data_p (const_tree decl)
502 {
503 const_tree section;
504
505 if (TREE_CODE (decl) != VAR_DECL)
506 return false;
507
508 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
509 return false;
510
511 section = DECL_SECTION_NAME (decl);
512 if (section)
513 {
514 const char *const name = TREE_STRING_POINTER (section);
515 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
516 return true;
517 }
518 else
519 {
520 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
521 {
522 int size = int_size_in_bytes (TREE_TYPE (decl));
523
524 if (size > 0 && size <= g_switch_value)
525 return true;
526 }
527 }
528
529 return false;
530 }
531
532 /* Do anything needed before RTL is emitted for each function. */
533
534 void
535 m32r_init_expanders (void)
536 {
537 /* ??? At one point there was code here. The function is left in
538 to make it easy to experiment. */
539 }
540 \f
541 int
542 call_operand (rtx op, enum machine_mode mode)
543 {
544 if (!MEM_P (op))
545 return 0;
546 op = XEXP (op, 0);
547 return call_address_operand (op, mode);
548 }
549
550 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
551
552 int
553 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
554 {
555 if (! TARGET_SDATA_USE)
556 return 0;
557
558 if (GET_CODE (op) == SYMBOL_REF)
559 return SYMBOL_REF_SMALL_P (op);
560
561 if (GET_CODE (op) == CONST
562 && GET_CODE (XEXP (op, 0)) == PLUS
563 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
564 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
565 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
566
567 return 0;
568 }
569
570 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
571
572 int
573 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
574 {
575 rtx sym;
576
577 if (flag_pic)
578 return 0;
579
580 if (GET_CODE (op) == LABEL_REF)
581 return TARGET_ADDR24;
582
583 if (GET_CODE (op) == SYMBOL_REF)
584 sym = op;
585 else if (GET_CODE (op) == CONST
586 && GET_CODE (XEXP (op, 0)) == PLUS
587 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
588 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
589 sym = XEXP (XEXP (op, 0), 0);
590 else
591 return 0;
592
593 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
594 return 1;
595
596 if (TARGET_ADDR24
597 && (CONSTANT_POOL_ADDRESS_P (sym)
598 || LIT_NAME_P (XSTR (sym, 0))))
599 return 1;
600
601 return 0;
602 }
603
604 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
605
606 int
607 addr32_operand (rtx op, enum machine_mode mode)
608 {
609 rtx sym;
610
611 if (GET_CODE (op) == LABEL_REF)
612 return TARGET_ADDR32;
613
614 if (GET_CODE (op) == SYMBOL_REF)
615 sym = op;
616 else if (GET_CODE (op) == CONST
617 && GET_CODE (XEXP (op, 0)) == PLUS
618 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
619 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
620 && ! flag_pic)
621 sym = XEXP (XEXP (op, 0), 0);
622 else
623 return 0;
624
625 return (! addr24_operand (sym, mode)
626 && ! small_data_operand (sym, mode));
627 }
628
629 /* Return 1 if OP is a function that can be called with the `bl' insn. */
630
631 int
632 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
633 {
634 if (flag_pic)
635 return 1;
636
637 if (GET_CODE (op) == SYMBOL_REF)
638 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
639
640 return TARGET_CALL26;
641 }
642
643 /* Return 1 if OP is a DImode const we want to handle inline.
644 This must match the code in the movdi pattern.
645 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
646
647 int
648 easy_di_const (rtx op)
649 {
650 rtx high_rtx, low_rtx;
651 HOST_WIDE_INT high, low;
652
653 split_double (op, &high_rtx, &low_rtx);
654 high = INTVAL (high_rtx);
655 low = INTVAL (low_rtx);
656 /* Pick constants loadable with 2 16-bit `ldi' insns. */
657 if (high >= -128 && high <= 127
658 && low >= -128 && low <= 127)
659 return 1;
660 return 0;
661 }
662
663 /* Return 1 if OP is a DFmode const we want to handle inline.
664 This must match the code in the movdf pattern.
665 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
666
667 int
668 easy_df_const (rtx op)
669 {
670 REAL_VALUE_TYPE r;
671 long l[2];
672
673 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
674 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
675 if (l[0] == 0 && l[1] == 0)
676 return 1;
677 if ((l[0] & 0xffff) == 0 && l[1] == 0)
678 return 1;
679 return 0;
680 }
681
682 /* Return 1 if OP is (mem (reg ...)).
683 This is used in insn length calcs. */
684
685 int
686 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
687 {
688 return MEM_P (op) && REG_P (XEXP (op, 0));
689 }
690
691 /* Return nonzero if TYPE must be passed by indirect reference. */
692
693 static bool
694 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
695 enum machine_mode mode, const_tree type,
696 bool named ATTRIBUTE_UNUSED)
697 {
698 int size;
699
700 if (type)
701 size = int_size_in_bytes (type);
702 else
703 size = GET_MODE_SIZE (mode);
704
705 return (size < 0 || size > 8);
706 }
707 \f
708 /* Comparisons. */
709
710 /* X and Y are two things to compare using CODE. Emit the compare insn and
711 return the rtx for compare [arg0 of the if_then_else].
712 If need_compare is true then the comparison insn must be generated, rather
713 than being subsumed into the following branch instruction. */
714
715 rtx
716 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
717 {
718 enum rtx_code compare_code;
719 enum rtx_code branch_code;
720 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
721 int must_swap = 0;
722
723 switch (code)
724 {
725 case EQ: compare_code = EQ; branch_code = NE; break;
726 case NE: compare_code = EQ; branch_code = EQ; break;
727 case LT: compare_code = LT; branch_code = NE; break;
728 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
729 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
730 case GE: compare_code = LT; branch_code = EQ; break;
731 case LTU: compare_code = LTU; branch_code = NE; break;
732 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
733 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
734 case GEU: compare_code = LTU; branch_code = EQ; break;
735
736 default:
737 gcc_unreachable ();
738 }
739
740 if (need_compare)
741 {
742 switch (compare_code)
743 {
744 case EQ:
745 if (satisfies_constraint_P (y) /* Reg equal to small const. */
746 && y != const0_rtx)
747 {
748 rtx tmp = gen_reg_rtx (SImode);
749
750 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
751 x = tmp;
752 y = const0_rtx;
753 }
754 else if (CONSTANT_P (y)) /* Reg equal to const. */
755 {
756 rtx tmp = force_reg (GET_MODE (x), y);
757 y = tmp;
758 }
759
760 if (register_operand (y, SImode) /* Reg equal to reg. */
761 || y == const0_rtx) /* Reg equal to zero. */
762 {
763 emit_insn (gen_cmp_eqsi_insn (x, y));
764
765 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
766 }
767 break;
768
769 case LT:
770 if (register_operand (y, SImode)
771 || satisfies_constraint_P (y))
772 {
773 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
774
775 switch (code)
776 {
777 case LT:
778 emit_insn (gen_cmp_ltsi_insn (x, y));
779 code = EQ;
780 break;
781 case LE:
782 if (y == const0_rtx)
783 tmp = const1_rtx;
784 else
785 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
786 emit_insn (gen_cmp_ltsi_insn (x, tmp));
787 code = EQ;
788 break;
789 case GT:
790 if (CONST_INT_P (y))
791 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
792 else
793 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
794 emit_insn (gen_cmp_ltsi_insn (x, tmp));
795 code = NE;
796 break;
797 case GE:
798 emit_insn (gen_cmp_ltsi_insn (x, y));
799 code = NE;
800 break;
801 default:
802 gcc_unreachable ();
803 }
804
805 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
806 }
807 break;
808
809 case LTU:
810 if (register_operand (y, SImode)
811 || satisfies_constraint_P (y))
812 {
813 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
814
815 switch (code)
816 {
817 case LTU:
818 emit_insn (gen_cmp_ltusi_insn (x, y));
819 code = EQ;
820 break;
821 case LEU:
822 if (y == const0_rtx)
823 tmp = const1_rtx;
824 else
825 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
826 emit_insn (gen_cmp_ltusi_insn (x, tmp));
827 code = EQ;
828 break;
829 case GTU:
830 if (CONST_INT_P (y))
831 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
832 else
833 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
834 emit_insn (gen_cmp_ltusi_insn (x, tmp));
835 code = NE;
836 break;
837 case GEU:
838 emit_insn (gen_cmp_ltusi_insn (x, y));
839 code = NE;
840 break;
841 default:
842 gcc_unreachable ();
843 }
844
845 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
846 }
847 break;
848
849 default:
850 gcc_unreachable ();
851 }
852 }
853 else
854 {
855 /* Reg/reg equal comparison. */
856 if (compare_code == EQ
857 && register_operand (y, SImode))
858 return gen_rtx_fmt_ee (code, CCmode, x, y);
859
860 /* Reg/zero signed comparison. */
861 if ((compare_code == EQ || compare_code == LT)
862 && y == const0_rtx)
863 return gen_rtx_fmt_ee (code, CCmode, x, y);
864
865 /* Reg/smallconst equal comparison. */
866 if (compare_code == EQ
867 && satisfies_constraint_P (y))
868 {
869 rtx tmp = gen_reg_rtx (SImode);
870
871 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
872 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
873 }
874
875 /* Reg/const equal comparison. */
876 if (compare_code == EQ
877 && CONSTANT_P (y))
878 {
879 rtx tmp = force_reg (GET_MODE (x), y);
880
881 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
882 }
883 }
884
885 if (CONSTANT_P (y))
886 {
887 if (must_swap)
888 y = force_reg (GET_MODE (x), y);
889 else
890 {
891 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
892
893 if (! ok_const)
894 y = force_reg (GET_MODE (x), y);
895 }
896 }
897
898 switch (compare_code)
899 {
900 case EQ :
901 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
902 break;
903 case LT :
904 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
905 break;
906 case LTU :
907 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
908 break;
909
910 default:
911 gcc_unreachable ();
912 }
913
914 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
915 }
916
917 bool
918 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
919 {
920 enum machine_mode mode = GET_MODE (op0);
921
922 gcc_assert (mode == SImode);
923 switch (code)
924 {
925 case EQ:
926 if (!register_operand (op1, mode))
927 op1 = force_reg (mode, op1);
928
929 if (TARGET_M32RX || TARGET_M32R2)
930 {
931 if (!reg_or_zero_operand (op2, mode))
932 op2 = force_reg (mode, op2);
933
934 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
935 return true;
936 }
937 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
938 {
939 emit_insn (gen_seq_zero_insn (op0, op1));
940 return true;
941 }
942
943 if (!reg_or_eq_int16_operand (op2, mode))
944 op2 = force_reg (mode, op2);
945
946 emit_insn (gen_seq_insn (op0, op1, op2));
947 return true;
948
949 case NE:
950 if (!CONST_INT_P (op2)
951 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
952 {
953 rtx reg;
954
955 if (reload_completed || reload_in_progress)
956 return false;
957
958 reg = gen_reg_rtx (SImode);
959 emit_insn (gen_xorsi3 (reg, op1, op2));
960 op1 = reg;
961
962 if (!register_operand (op1, mode))
963 op1 = force_reg (mode, op1);
964
965 emit_insn (gen_sne_zero_insn (op0, op1));
966 return true;
967 }
968 return false;
969
970 case LT:
971 case GT:
972 if (code == GT)
973 {
974 rtx tmp = op2;
975 op2 = op1;
976 op1 = tmp;
977 code = LT;
978 }
979
980 if (!register_operand (op1, mode))
981 op1 = force_reg (mode, op1);
982
983 if (!reg_or_int16_operand (op2, mode))
984 op2 = force_reg (mode, op2);
985
986 emit_insn (gen_slt_insn (op0, op1, op2));
987 return true;
988
989 case LTU:
990 case GTU:
991 if (code == GTU)
992 {
993 rtx tmp = op2;
994 op2 = op1;
995 op1 = tmp;
996 code = LTU;
997 }
998
999 if (!register_operand (op1, mode))
1000 op1 = force_reg (mode, op1);
1001
1002 if (!reg_or_int16_operand (op2, mode))
1003 op2 = force_reg (mode, op2);
1004
1005 emit_insn (gen_sltu_insn (op0, op1, op2));
1006 return true;
1007
1008 case GE:
1009 case GEU:
1010 if (!register_operand (op1, mode))
1011 op1 = force_reg (mode, op1);
1012
1013 if (!reg_or_int16_operand (op2, mode))
1014 op2 = force_reg (mode, op2);
1015
1016 if (code == GE)
1017 emit_insn (gen_sge_insn (op0, op1, op2));
1018 else
1019 emit_insn (gen_sgeu_insn (op0, op1, op2));
1020 return true;
1021
1022 case LE:
1023 case LEU:
1024 if (!register_operand (op1, mode))
1025 op1 = force_reg (mode, op1);
1026
1027 if (CONST_INT_P (op2))
1028 {
1029 HOST_WIDE_INT value = INTVAL (op2);
1030 if (value >= 2147483647)
1031 {
1032 emit_move_insn (op0, const1_rtx);
1033 return true;
1034 }
1035
1036 op2 = GEN_INT (value + 1);
1037 if (value < -32768 || value >= 32767)
1038 op2 = force_reg (mode, op2);
1039
1040 if (code == LEU)
1041 emit_insn (gen_sltu_insn (op0, op1, op2));
1042 else
1043 emit_insn (gen_slt_insn (op0, op1, op2));
1044 return true;
1045 }
1046
1047 if (!register_operand (op2, mode))
1048 op2 = force_reg (mode, op2);
1049
1050 if (code == LEU)
1051 emit_insn (gen_sleu_insn (op0, op1, op2));
1052 else
1053 emit_insn (gen_sle_insn (op0, op1, op2));
1054 return true;
1055
1056 default:
1057 gcc_unreachable ();
1058 }
1059 }
1060
1061 \f
1062 /* Split a 2 word move (DI or DF) into component parts. */
1063
1064 rtx
1065 gen_split_move_double (rtx operands[])
1066 {
1067 enum machine_mode mode = GET_MODE (operands[0]);
1068 rtx dest = operands[0];
1069 rtx src = operands[1];
1070 rtx val;
1071
1072 /* We might have (SUBREG (MEM)) here, so just get rid of the
1073 subregs to make this code simpler. It is safe to call
1074 alter_subreg any time after reload. */
1075 if (GET_CODE (dest) == SUBREG)
1076 alter_subreg (&dest);
1077 if (GET_CODE (src) == SUBREG)
1078 alter_subreg (&src);
1079
1080 start_sequence ();
1081 if (REG_P (dest))
1082 {
1083 int dregno = REGNO (dest);
1084
1085 /* Reg = reg. */
1086 if (REG_P (src))
1087 {
1088 int sregno = REGNO (src);
1089
1090 int reverse = (dregno == sregno + 1);
1091
1092 /* We normally copy the low-numbered register first. However, if
1093 the first register operand 0 is the same as the second register of
1094 operand 1, we must copy in the opposite order. */
1095 emit_insn (gen_rtx_SET (VOIDmode,
1096 operand_subword (dest, reverse, TRUE, mode),
1097 operand_subword (src, reverse, TRUE, mode)));
1098
1099 emit_insn (gen_rtx_SET (VOIDmode,
1100 operand_subword (dest, !reverse, TRUE, mode),
1101 operand_subword (src, !reverse, TRUE, mode)));
1102 }
1103
1104 /* Reg = constant. */
1105 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1106 {
1107 rtx words[2];
1108 split_double (src, &words[0], &words[1]);
1109 emit_insn (gen_rtx_SET (VOIDmode,
1110 operand_subword (dest, 0, TRUE, mode),
1111 words[0]));
1112
1113 emit_insn (gen_rtx_SET (VOIDmode,
1114 operand_subword (dest, 1, TRUE, mode),
1115 words[1]));
1116 }
1117
1118 /* Reg = mem. */
1119 else if (MEM_P (src))
1120 {
1121 /* If the high-address word is used in the address, we must load it
1122 last. Otherwise, load it first. */
1123 int reverse
1124 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1125
1126 /* We used to optimize loads from single registers as
1127
1128 ld r1,r3+; ld r2,r3
1129
1130 if r3 were not used subsequently. However, the REG_NOTES aren't
1131 propagated correctly by the reload phase, and it can cause bad
1132 code to be generated. We could still try:
1133
1134 ld r1,r3+; ld r2,r3; addi r3,-4
1135
1136 which saves 2 bytes and doesn't force longword alignment. */
1137 emit_insn (gen_rtx_SET (VOIDmode,
1138 operand_subword (dest, reverse, TRUE, mode),
1139 adjust_address (src, SImode,
1140 reverse * UNITS_PER_WORD)));
1141
1142 emit_insn (gen_rtx_SET (VOIDmode,
1143 operand_subword (dest, !reverse, TRUE, mode),
1144 adjust_address (src, SImode,
1145 !reverse * UNITS_PER_WORD)));
1146 }
1147 else
1148 gcc_unreachable ();
1149 }
1150
1151 /* Mem = reg. */
1152 /* We used to optimize loads from single registers as
1153
1154 st r1,r3; st r2,+r3
1155
1156 if r3 were not used subsequently. However, the REG_NOTES aren't
1157 propagated correctly by the reload phase, and it can cause bad
1158 code to be generated. We could still try:
1159
1160 st r1,r3; st r2,+r3; addi r3,-4
1161
1162 which saves 2 bytes and doesn't force longword alignment. */
1163 else if (MEM_P (dest) && REG_P (src))
1164 {
1165 emit_insn (gen_rtx_SET (VOIDmode,
1166 adjust_address (dest, SImode, 0),
1167 operand_subword (src, 0, TRUE, mode)));
1168
1169 emit_insn (gen_rtx_SET (VOIDmode,
1170 adjust_address (dest, SImode, UNITS_PER_WORD),
1171 operand_subword (src, 1, TRUE, mode)));
1172 }
1173
1174 else
1175 gcc_unreachable ();
1176
1177 val = get_insns ();
1178 end_sequence ();
1179 return val;
1180 }
1181
1182 \f
1183 static int
1184 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1185 tree type, bool named ATTRIBUTE_UNUSED)
1186 {
1187 int words;
1188 unsigned int size =
1189 (((mode == BLKmode && type)
1190 ? (unsigned int) int_size_in_bytes (type)
1191 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1192 / UNITS_PER_WORD;
1193
1194 if (*cum >= M32R_MAX_PARM_REGS)
1195 words = 0;
1196 else if (*cum + size > M32R_MAX_PARM_REGS)
1197 words = (*cum + size) - M32R_MAX_PARM_REGS;
1198 else
1199 words = 0;
1200
1201 return words * UNITS_PER_WORD;
1202 }
1203
1204 /* The ROUND_ADVANCE* macros are local to this file. */
1205 /* Round SIZE up to a word boundary. */
1206 #define ROUND_ADVANCE(SIZE) \
1207 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1208
1209 /* Round arg MODE/TYPE up to the next word boundary. */
1210 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1211 ((MODE) == BLKmode \
1212 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1213 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1214
1215 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1216 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1217
1218 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1219 a reg. This includes arguments that have to be passed by reference as the
1220 pointer to them is passed in a reg if one is available (and that is what
1221 we're given).
1222 This macro is only used in this file. */
1223 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1224 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1225
1226 /* Determine where to put an argument to a function.
1227 Value is zero to push the argument on the stack,
1228 or a hard register in which to store the argument.
1229
1230 MODE is the argument's machine mode.
1231 TYPE is the data type of the argument (as a tree).
1232 This is null for libcalls where that information may
1233 not be available.
1234 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1235 the preceding args and about the function being called.
1236 NAMED is nonzero if this argument is a named parameter
1237 (otherwise it is an extra parameter matching an ellipsis). */
1238 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1239 and the rest are pushed. */
1240
1241 static rtx
1242 m32r_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1243 const_tree type ATTRIBUTE_UNUSED,
1244 bool named ATTRIBUTE_UNUSED)
1245 {
1246 return (PASS_IN_REG_P (*cum, mode, type)
1247 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1248 : NULL_RTX);
1249 }
1250
1251 /* Update the data in CUM to advance over an argument
1252 of mode MODE and data type TYPE.
1253 (TYPE is null for libcalls where that information may not be available.) */
1254
1255 static void
1256 m32r_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1257 const_tree type, bool named ATTRIBUTE_UNUSED)
1258 {
1259 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1260 + ROUND_ADVANCE_ARG (mode, type));
1261 }
1262
1263 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1264
1265 static bool
1266 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1267 {
1268 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
1269 }
1270
1271 /* Worker function for TARGET_FUNCTION_VALUE. */
1272
1273 static rtx
1274 m32r_function_value (const_tree valtype,
1275 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1276 bool outgoing ATTRIBUTE_UNUSED)
1277 {
1278 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1279 }
1280
1281 /* Worker function for TARGET_LIBCALL_VALUE. */
1282
1283 static rtx
1284 m32r_libcall_value (enum machine_mode mode,
1285 const_rtx fun ATTRIBUTE_UNUSED)
1286 {
1287 return gen_rtx_REG (mode, 0);
1288 }
1289
1290 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1291
1292 ??? What about r1 in DI/DF values. */
1293
1294 static bool
1295 m32r_function_value_regno_p (const unsigned int regno)
1296 {
1297 return (regno == 0);
1298 }
1299
1300 /* Do any needed setup for a variadic function. For the M32R, we must
1301 create a register parameter block, and then copy any anonymous arguments
1302 in registers to memory.
1303
1304 CUM has not been updated for the last named argument which has type TYPE
1305 and mode MODE, and we rely on this fact. */
1306
1307 static void
1308 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1309 tree type, int *pretend_size, int no_rtl)
1310 {
1311 int first_anon_arg;
1312
1313 if (no_rtl)
1314 return;
1315
1316 /* All BLKmode values are passed by reference. */
1317 gcc_assert (mode != BLKmode);
1318
1319 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1320 + ROUND_ADVANCE_ARG (mode, type));
1321
1322 if (first_anon_arg < M32R_MAX_PARM_REGS)
1323 {
1324 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1325 int first_reg_offset = first_anon_arg;
1326 /* Size in words to "pretend" allocate. */
1327 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1328 rtx regblock;
1329
1330 regblock = gen_frame_mem (BLKmode,
1331 plus_constant (arg_pointer_rtx,
1332 FIRST_PARM_OFFSET (0)));
1333 set_mem_alias_set (regblock, get_varargs_alias_set ());
1334 move_block_from_reg (first_reg_offset, regblock, size);
1335
1336 *pretend_size = (size * UNITS_PER_WORD);
1337 }
1338 }
1339
1340 \f
1341 /* Return true if INSN is real instruction bearing insn. */
1342
1343 static int
1344 m32r_is_insn (rtx insn)
1345 {
1346 return (NONDEBUG_INSN_P (insn)
1347 && GET_CODE (PATTERN (insn)) != USE
1348 && GET_CODE (PATTERN (insn)) != CLOBBER
1349 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1350 }
1351
1352 /* Increase the priority of long instructions so that the
1353 short instructions are scheduled ahead of the long ones. */
1354
1355 static int
1356 m32r_adjust_priority (rtx insn, int priority)
1357 {
1358 if (m32r_is_insn (insn)
1359 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1360 priority <<= 3;
1361
1362 return priority;
1363 }
1364
1365 \f
1366 /* Indicate how many instructions can be issued at the same time.
1367 This is sort of a lie. The m32r can issue only 1 long insn at
1368 once, but it can issue 2 short insns. The default therefore is
1369 set at 2, but this can be overridden by the command line option
1370 -missue-rate=1. */
1371
1372 static int
1373 m32r_issue_rate (void)
1374 {
1375 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1376 }
1377 \f
1378 /* Cost functions. */
1379
1380 /* Implement TARGET_HANDLE_OPTION.
1381
1382 Memory is 3 times as expensive as registers.
1383 ??? Is that the right way to look at it? */
1384
1385 static int
1386 m32r_memory_move_cost (enum machine_mode mode,
1387 reg_class_t rclass ATTRIBUTE_UNUSED,
1388 bool in ATTRIBUTE_UNUSED)
1389 {
1390 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1391 return 6;
1392 else
1393 return 12;
1394 }
1395
1396 static bool
1397 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
1398 bool speed ATTRIBUTE_UNUSED)
1399 {
1400 switch (code)
1401 {
1402 /* Small integers are as cheap as registers. 4 byte values can be
1403 fetched as immediate constants - let's give that the cost of an
1404 extra insn. */
1405 case CONST_INT:
1406 if (INT16_P (INTVAL (x)))
1407 {
1408 *total = 0;
1409 return true;
1410 }
1411 /* FALLTHRU */
1412
1413 case CONST:
1414 case LABEL_REF:
1415 case SYMBOL_REF:
1416 *total = COSTS_N_INSNS (1);
1417 return true;
1418
1419 case CONST_DOUBLE:
1420 {
1421 rtx high, low;
1422
1423 split_double (x, &high, &low);
1424 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1425 + !INT16_P (INTVAL (low)));
1426 return true;
1427 }
1428
1429 case MULT:
1430 *total = COSTS_N_INSNS (3);
1431 return true;
1432
1433 case DIV:
1434 case UDIV:
1435 case MOD:
1436 case UMOD:
1437 *total = COSTS_N_INSNS (10);
1438 return true;
1439
1440 default:
1441 return false;
1442 }
1443 }
1444 \f
1445 /* Type of function DECL.
1446
1447 The result is cached. To reset the cache at the end of a function,
1448 call with DECL = NULL_TREE. */
1449
1450 enum m32r_function_type
1451 m32r_compute_function_type (tree decl)
1452 {
1453 /* Cached value. */
1454 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1455 /* Last function we were called for. */
1456 static tree last_fn = NULL_TREE;
1457
1458 /* Resetting the cached value? */
1459 if (decl == NULL_TREE)
1460 {
1461 fn_type = M32R_FUNCTION_UNKNOWN;
1462 last_fn = NULL_TREE;
1463 return fn_type;
1464 }
1465
1466 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1467 return fn_type;
1468
1469 /* Compute function type. */
1470 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1471 ? M32R_FUNCTION_INTERRUPT
1472 : M32R_FUNCTION_NORMAL);
1473
1474 last_fn = decl;
1475 return fn_type;
1476 }
1477 \f/* Function prologue/epilogue handlers. */
1478
1479 /* M32R stack frames look like:
1480
1481 Before call After call
1482 +-----------------------+ +-----------------------+
1483 | | | |
1484 high | local variables, | | local variables, |
1485 mem | reg save area, etc. | | reg save area, etc. |
1486 | | | |
1487 +-----------------------+ +-----------------------+
1488 | | | |
1489 | arguments on stack. | | arguments on stack. |
1490 | | | |
1491 SP+0->+-----------------------+ +-----------------------+
1492 | reg parm save area, |
1493 | only created for |
1494 | variable argument |
1495 | functions |
1496 +-----------------------+
1497 | previous frame ptr |
1498 +-----------------------+
1499 | |
1500 | register save area |
1501 | |
1502 +-----------------------+
1503 | return address |
1504 +-----------------------+
1505 | |
1506 | local variables |
1507 | |
1508 +-----------------------+
1509 | |
1510 | alloca allocations |
1511 | |
1512 +-----------------------+
1513 | |
1514 low | arguments on stack |
1515 memory | |
1516 SP+0->+-----------------------+
1517
1518 Notes:
1519 1) The "reg parm save area" does not exist for non variable argument fns.
1520 2) The "reg parm save area" can be eliminated completely if we saved regs
1521 containing anonymous args separately but that complicates things too
1522 much (so it's not done).
1523 3) The return address is saved after the register save area so as to have as
1524 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1525
1526 /* Structure to be filled in by m32r_compute_frame_size with register
1527 save masks, and offsets for the current function. */
1528 struct m32r_frame_info
1529 {
1530 unsigned int total_size; /* # bytes that the entire frame takes up. */
1531 unsigned int extra_size; /* # bytes of extra stuff. */
1532 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1533 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1534 unsigned int reg_size; /* # bytes needed to store regs. */
1535 unsigned int var_size; /* # bytes that variables take up. */
1536 unsigned int gmask; /* Mask of saved gp registers. */
1537 unsigned int save_fp; /* Nonzero if fp must be saved. */
1538 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1539 int initialized; /* Nonzero if frame size already calculated. */
1540 };
1541
1542 /* Current frame information calculated by m32r_compute_frame_size. */
1543 static struct m32r_frame_info current_frame_info;
1544
1545 /* Zero structure to initialize current_frame_info. */
1546 static struct m32r_frame_info zero_frame_info;
1547
1548 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1549 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1550
1551 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1552 The return address and frame pointer are treated separately.
1553 Don't consider them here. */
1554 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1555 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1556 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1557
1558 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1559 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1560
1561 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1562 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1563
1564 /* Return the bytes needed to compute the frame pointer from the current
1565 stack pointer.
1566
1567 SIZE is the size needed for local variables. */
1568
1569 unsigned int
1570 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1571 {
1572 unsigned int regno;
1573 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1574 unsigned int reg_size;
1575 unsigned int gmask;
1576 enum m32r_function_type fn_type;
1577 int interrupt_p;
1578 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1579 | crtl->profile);
1580
1581 var_size = M32R_STACK_ALIGN (size);
1582 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1583 pretend_size = crtl->args.pretend_args_size;
1584 extra_size = FIRST_PARM_OFFSET (0);
1585 total_size = extra_size + pretend_size + args_size + var_size;
1586 reg_size = 0;
1587 gmask = 0;
1588
1589 /* See if this is an interrupt handler. Call used registers must be saved
1590 for them too. */
1591 fn_type = m32r_compute_function_type (current_function_decl);
1592 interrupt_p = M32R_INTERRUPT_P (fn_type);
1593
1594 /* Calculate space needed for registers. */
1595 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1596 {
1597 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1598 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1599 {
1600 reg_size += UNITS_PER_WORD;
1601 gmask |= 1 << regno;
1602 }
1603 }
1604
1605 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1606 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1607
1608 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1609 * UNITS_PER_WORD);
1610 total_size += reg_size;
1611
1612 /* ??? Not sure this is necessary, and I don't think the epilogue
1613 handler will do the right thing if this changes total_size. */
1614 total_size = M32R_STACK_ALIGN (total_size);
1615
1616 /* frame_size = total_size - (pretend_size + reg_size); */
1617
1618 /* Save computed information. */
1619 current_frame_info.total_size = total_size;
1620 current_frame_info.extra_size = extra_size;
1621 current_frame_info.pretend_size = pretend_size;
1622 current_frame_info.var_size = var_size;
1623 current_frame_info.args_size = args_size;
1624 current_frame_info.reg_size = reg_size;
1625 current_frame_info.gmask = gmask;
1626 current_frame_info.initialized = reload_completed;
1627
1628 /* Ok, we're done. */
1629 return total_size;
1630 }
1631
1632 /* Worker function for TARGET_CAN_ELIMINATE. */
1633
1634 bool
1635 m32r_can_eliminate (const int from, const int to)
1636 {
1637 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1638 ? ! frame_pointer_needed
1639 : true);
1640 }
1641
1642 \f
1643 /* The table we use to reference PIC data. */
1644 static rtx global_offset_table;
1645
1646 static void
1647 m32r_reload_lr (rtx sp, int size)
1648 {
1649 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1650
1651 if (size == 0)
1652 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1653 else if (size < 32768)
1654 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1655 gen_rtx_PLUS (Pmode, sp,
1656 GEN_INT (size)))));
1657 else
1658 {
1659 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1660
1661 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1662 emit_insn (gen_addsi3 (tmp, tmp, sp));
1663 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1664 }
1665
1666 emit_use (lr);
1667 }
1668
1669 void
1670 m32r_load_pic_register (void)
1671 {
1672 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1673 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1674 GEN_INT (TARGET_MODEL_SMALL)));
1675
1676 /* Need to emit this whether or not we obey regdecls,
1677 since setjmp/longjmp can cause life info to screw up. */
1678 emit_use (pic_offset_table_rtx);
1679 }
1680
1681 /* Expand the m32r prologue as a series of insns. */
1682
1683 void
1684 m32r_expand_prologue (void)
1685 {
1686 int regno;
1687 int frame_size;
1688 unsigned int gmask;
1689 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1690 | crtl->profile);
1691
1692 if (! current_frame_info.initialized)
1693 m32r_compute_frame_size (get_frame_size ());
1694
1695 gmask = current_frame_info.gmask;
1696
1697 /* These cases shouldn't happen. Catch them now. */
1698 gcc_assert (current_frame_info.total_size || !gmask);
1699
1700 /* Allocate space for register arguments if this is a variadic function. */
1701 if (current_frame_info.pretend_size != 0)
1702 {
1703 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1704 the wrong result on a 64-bit host. */
1705 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1706 emit_insn (gen_addsi3 (stack_pointer_rtx,
1707 stack_pointer_rtx,
1708 GEN_INT (-pretend_size)));
1709 }
1710
1711 /* Save any registers we need to and set up fp. */
1712 if (current_frame_info.save_fp)
1713 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1714
1715 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1716
1717 /* Save any needed call-saved regs (and call-used if this is an
1718 interrupt handler). */
1719 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1720 {
1721 if ((gmask & (1 << regno)) != 0)
1722 emit_insn (gen_movsi_push (stack_pointer_rtx,
1723 gen_rtx_REG (Pmode, regno)));
1724 }
1725
1726 if (current_frame_info.save_lr)
1727 emit_insn (gen_movsi_push (stack_pointer_rtx,
1728 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1729
1730 /* Allocate the stack frame. */
1731 frame_size = (current_frame_info.total_size
1732 - (current_frame_info.pretend_size
1733 + current_frame_info.reg_size));
1734
1735 if (frame_size == 0)
1736 ; /* Nothing to do. */
1737 else if (frame_size <= 32768)
1738 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1739 GEN_INT (-frame_size)));
1740 else
1741 {
1742 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1743
1744 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1745 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1746 }
1747
1748 if (frame_pointer_needed)
1749 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1750
1751 if (crtl->profile)
1752 /* Push lr for mcount (form_pc, x). */
1753 emit_insn (gen_movsi_push (stack_pointer_rtx,
1754 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1755
1756 if (pic_reg_used)
1757 {
1758 m32r_load_pic_register ();
1759 m32r_reload_lr (stack_pointer_rtx,
1760 (crtl->profile ? 0 : frame_size));
1761 }
1762
1763 if (crtl->profile && !pic_reg_used)
1764 emit_insn (gen_blockage ());
1765 }
1766
1767 \f
1768 /* Set up the stack and frame pointer (if desired) for the function.
1769 Note, if this is changed, you need to mirror the changes in
1770 m32r_compute_frame_size which calculates the prolog size. */
1771
1772 static void
1773 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1774 {
1775 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1776
1777 /* If this is an interrupt handler, mark it as such. */
1778 if (M32R_INTERRUPT_P (fn_type))
1779 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1780
1781 if (! current_frame_info.initialized)
1782 m32r_compute_frame_size (size);
1783
1784 /* This is only for the human reader. */
1785 fprintf (file,
1786 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1787 ASM_COMMENT_START,
1788 current_frame_info.var_size,
1789 current_frame_info.reg_size / 4,
1790 current_frame_info.args_size,
1791 current_frame_info.extra_size);
1792 }
1793 \f
1794 /* Output RTL to pop register REGNO from the stack. */
1795
1796 static void
1797 pop (int regno)
1798 {
1799 rtx x;
1800
1801 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1802 stack_pointer_rtx));
1803 add_reg_note (x, REG_INC, stack_pointer_rtx);
1804 }
1805
1806 /* Expand the m32r epilogue as a series of insns. */
1807
1808 void
1809 m32r_expand_epilogue (void)
1810 {
1811 int regno;
1812 int noepilogue = FALSE;
1813 int total_size;
1814
1815 gcc_assert (current_frame_info.initialized);
1816 total_size = current_frame_info.total_size;
1817
1818 if (total_size == 0)
1819 {
1820 rtx insn = get_last_insn ();
1821
1822 /* If the last insn was a BARRIER, we don't have to write any code
1823 because a jump (aka return) was put there. */
1824 if (insn && NOTE_P (insn))
1825 insn = prev_nonnote_insn (insn);
1826 if (insn && BARRIER_P (insn))
1827 noepilogue = TRUE;
1828 }
1829
1830 if (!noepilogue)
1831 {
1832 unsigned int var_size = current_frame_info.var_size;
1833 unsigned int args_size = current_frame_info.args_size;
1834 unsigned int gmask = current_frame_info.gmask;
1835 int can_trust_sp_p = !cfun->calls_alloca;
1836
1837 if (flag_exceptions)
1838 emit_insn (gen_blockage ());
1839
1840 /* The first thing to do is point the sp at the bottom of the register
1841 save area. */
1842 if (can_trust_sp_p)
1843 {
1844 unsigned int reg_offset = var_size + args_size;
1845
1846 if (reg_offset == 0)
1847 ; /* Nothing to do. */
1848 else if (reg_offset < 32768)
1849 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1850 GEN_INT (reg_offset)));
1851 else
1852 {
1853 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1854
1855 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1856 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1857 tmp));
1858 }
1859 }
1860 else if (frame_pointer_needed)
1861 {
1862 unsigned int reg_offset = var_size + args_size;
1863
1864 if (reg_offset == 0)
1865 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1866 else if (reg_offset < 32768)
1867 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1868 GEN_INT (reg_offset)));
1869 else
1870 {
1871 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1872
1873 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1874 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1875 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1876 tmp));
1877 }
1878 }
1879 else
1880 gcc_unreachable ();
1881
1882 if (current_frame_info.save_lr)
1883 pop (RETURN_ADDR_REGNUM);
1884
1885 /* Restore any saved registers, in reverse order of course. */
1886 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1887 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1888 {
1889 if ((gmask & (1L << regno)) != 0)
1890 pop (regno);
1891 }
1892
1893 if (current_frame_info.save_fp)
1894 pop (FRAME_POINTER_REGNUM);
1895
1896 /* Remove varargs area if present. */
1897 if (current_frame_info.pretend_size != 0)
1898 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1899 GEN_INT (current_frame_info.pretend_size)));
1900
1901 emit_insn (gen_blockage ());
1902 }
1903 }
1904
1905 /* Do any necessary cleanup after a function to restore stack, frame,
1906 and regs. */
1907
1908 static void
1909 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1910 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1911 {
1912 /* Reset state info for each function. */
1913 current_frame_info = zero_frame_info;
1914 m32r_compute_function_type (NULL_TREE);
1915 }
1916 \f
1917 /* Return nonzero if this function is known to have a null or 1 instruction
1918 epilogue. */
1919
1920 int
1921 direct_return (void)
1922 {
1923 if (!reload_completed)
1924 return FALSE;
1925
1926 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1927 return FALSE;
1928
1929 if (! current_frame_info.initialized)
1930 m32r_compute_frame_size (get_frame_size ());
1931
1932 return current_frame_info.total_size == 0;
1933 }
1934
1935 \f
1936 /* PIC. */
1937
1938 int
1939 m32r_legitimate_pic_operand_p (rtx x)
1940 {
1941 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1942 return 0;
1943
1944 if (GET_CODE (x) == CONST
1945 && GET_CODE (XEXP (x, 0)) == PLUS
1946 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1947 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1948 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1949 return 0;
1950
1951 return 1;
1952 }
1953
1954 rtx
1955 m32r_legitimize_pic_address (rtx orig, rtx reg)
1956 {
1957 #ifdef DEBUG_PIC
1958 printf("m32r_legitimize_pic_address()\n");
1959 #endif
1960
1961 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1962 {
1963 rtx pic_ref, address;
1964 int subregs = 0;
1965
1966 if (reg == 0)
1967 {
1968 gcc_assert (!reload_in_progress && !reload_completed);
1969 reg = gen_reg_rtx (Pmode);
1970
1971 subregs = 1;
1972 }
1973
1974 if (subregs)
1975 address = gen_reg_rtx (Pmode);
1976 else
1977 address = reg;
1978
1979 crtl->uses_pic_offset_table = 1;
1980
1981 if (GET_CODE (orig) == LABEL_REF
1982 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1983 {
1984 emit_insn (gen_gotoff_load_addr (reg, orig));
1985 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1986 return reg;
1987 }
1988
1989 emit_insn (gen_pic_load_addr (address, orig));
1990
1991 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1992 pic_ref = gen_const_mem (Pmode, address);
1993 emit_move_insn (reg, pic_ref);
1994 return reg;
1995 }
1996 else if (GET_CODE (orig) == CONST)
1997 {
1998 rtx base, offset;
1999
2000 if (GET_CODE (XEXP (orig, 0)) == PLUS
2001 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
2002 return orig;
2003
2004 if (reg == 0)
2005 {
2006 gcc_assert (!reload_in_progress && !reload_completed);
2007 reg = gen_reg_rtx (Pmode);
2008 }
2009
2010 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2011 {
2012 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2013 if (base == reg)
2014 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2015 else
2016 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2017 }
2018 else
2019 return orig;
2020
2021 if (CONST_INT_P (offset))
2022 {
2023 if (INT16_P (INTVAL (offset)))
2024 return plus_constant (base, INTVAL (offset));
2025 else
2026 {
2027 gcc_assert (! reload_in_progress && ! reload_completed);
2028 offset = force_reg (Pmode, offset);
2029 }
2030 }
2031
2032 return gen_rtx_PLUS (Pmode, base, offset);
2033 }
2034
2035 return orig;
2036 }
2037
2038 static rtx
2039 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2040 enum machine_mode mode ATTRIBUTE_UNUSED)
2041 {
2042 if (flag_pic)
2043 return m32r_legitimize_pic_address (x, NULL_RTX);
2044 else
2045 return x;
2046 }
2047
2048 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2049
2050 static bool
2051 m32r_mode_dependent_address_p (const_rtx addr)
2052 {
2053 if (GET_CODE (addr) == LO_SUM)
2054 return true;
2055
2056 return false;
2057 }
2058 \f
2059 /* Nested function support. */
2060
2061 /* Emit RTL insns to initialize the variable parts of a trampoline.
2062 FNADDR is an RTX for the address of the function's pure code.
2063 CXT is an RTX for the static chain value for the function. */
2064
2065 void
2066 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2067 rtx fnaddr ATTRIBUTE_UNUSED,
2068 rtx cxt ATTRIBUTE_UNUSED)
2069 {
2070 }
2071 \f
2072 static void
2073 m32r_file_start (void)
2074 {
2075 default_file_start ();
2076
2077 if (flag_verbose_asm)
2078 fprintf (asm_out_file,
2079 "%s M32R/D special options: -G %d\n",
2080 ASM_COMMENT_START, g_switch_value);
2081
2082 if (TARGET_LITTLE_ENDIAN)
2083 fprintf (asm_out_file, "\t.little\n");
2084 }
2085 \f
2086 /* Print operand X (an rtx) in assembler syntax to file FILE.
2087 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2088 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2089
2090 static void
2091 m32r_print_operand (FILE * file, rtx x, int code)
2092 {
2093 rtx addr;
2094
2095 switch (code)
2096 {
2097 /* The 's' and 'p' codes are used by output_block_move() to
2098 indicate post-increment 's'tores and 'p're-increment loads. */
2099 case 's':
2100 if (REG_P (x))
2101 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2102 else
2103 output_operand_lossage ("invalid operand to %%s code");
2104 return;
2105
2106 case 'p':
2107 if (REG_P (x))
2108 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2109 else
2110 output_operand_lossage ("invalid operand to %%p code");
2111 return;
2112
2113 case 'R' :
2114 /* Write second word of DImode or DFmode reference,
2115 register or memory. */
2116 if (REG_P (x))
2117 fputs (reg_names[REGNO (x)+1], file);
2118 else if (MEM_P (x))
2119 {
2120 fprintf (file, "@(");
2121 /* Handle possible auto-increment. Since it is pre-increment and
2122 we have already done it, we can just use an offset of four. */
2123 /* ??? This is taken from rs6000.c I think. I don't think it is
2124 currently necessary, but keep it around. */
2125 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2126 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2127 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2128 else
2129 output_address (plus_constant (XEXP (x, 0), 4));
2130 fputc (')', file);
2131 }
2132 else
2133 output_operand_lossage ("invalid operand to %%R code");
2134 return;
2135
2136 case 'H' : /* High word. */
2137 case 'L' : /* Low word. */
2138 if (REG_P (x))
2139 {
2140 /* L = least significant word, H = most significant word. */
2141 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2142 fputs (reg_names[REGNO (x)], file);
2143 else
2144 fputs (reg_names[REGNO (x)+1], file);
2145 }
2146 else if (CONST_INT_P (x)
2147 || GET_CODE (x) == CONST_DOUBLE)
2148 {
2149 rtx first, second;
2150
2151 split_double (x, &first, &second);
2152 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2153 code == 'L' ? INTVAL (first) : INTVAL (second));
2154 }
2155 else
2156 output_operand_lossage ("invalid operand to %%H/%%L code");
2157 return;
2158
2159 case 'A' :
2160 {
2161 char str[30];
2162
2163 if (GET_CODE (x) != CONST_DOUBLE
2164 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2165 fatal_insn ("bad insn for 'A'", x);
2166
2167 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2168 fprintf (file, "%s", str);
2169 return;
2170 }
2171
2172 case 'B' : /* Bottom half. */
2173 case 'T' : /* Top half. */
2174 /* Output the argument to a `seth' insn (sets the Top half-word).
2175 For constants output arguments to a seth/or3 pair to set Top and
2176 Bottom halves. For symbols output arguments to a seth/add3 pair to
2177 set Top and Bottom halves. The difference exists because for
2178 constants seth/or3 is more readable but for symbols we need to use
2179 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2180 switch (GET_CODE (x))
2181 {
2182 case CONST_INT :
2183 case CONST_DOUBLE :
2184 {
2185 rtx first, second;
2186
2187 split_double (x, &first, &second);
2188 x = WORDS_BIG_ENDIAN ? second : first;
2189 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2190 (code == 'B'
2191 ? INTVAL (x) & 0xffff
2192 : (INTVAL (x) >> 16) & 0xffff));
2193 }
2194 return;
2195 case CONST :
2196 case SYMBOL_REF :
2197 if (code == 'B'
2198 && small_data_operand (x, VOIDmode))
2199 {
2200 fputs ("sda(", file);
2201 output_addr_const (file, x);
2202 fputc (')', file);
2203 return;
2204 }
2205 /* fall through */
2206 case LABEL_REF :
2207 fputs (code == 'T' ? "shigh(" : "low(", file);
2208 output_addr_const (file, x);
2209 fputc (')', file);
2210 return;
2211 default :
2212 output_operand_lossage ("invalid operand to %%T/%%B code");
2213 return;
2214 }
2215 break;
2216
2217 case 'U' :
2218 /* ??? wip */
2219 /* Output a load/store with update indicator if appropriate. */
2220 if (MEM_P (x))
2221 {
2222 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2223 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2224 fputs (".a", file);
2225 }
2226 else
2227 output_operand_lossage ("invalid operand to %%U code");
2228 return;
2229
2230 case 'N' :
2231 /* Print a constant value negated. */
2232 if (CONST_INT_P (x))
2233 output_addr_const (file, GEN_INT (- INTVAL (x)));
2234 else
2235 output_operand_lossage ("invalid operand to %%N code");
2236 return;
2237
2238 case 'X' :
2239 /* Print a const_int in hex. Used in comments. */
2240 if (CONST_INT_P (x))
2241 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2242 return;
2243
2244 case '#' :
2245 fputs (IMMEDIATE_PREFIX, file);
2246 return;
2247
2248 case 0 :
2249 /* Do nothing special. */
2250 break;
2251
2252 default :
2253 /* Unknown flag. */
2254 output_operand_lossage ("invalid operand output code");
2255 }
2256
2257 switch (GET_CODE (x))
2258 {
2259 case REG :
2260 fputs (reg_names[REGNO (x)], file);
2261 break;
2262
2263 case MEM :
2264 addr = XEXP (x, 0);
2265 if (GET_CODE (addr) == PRE_INC)
2266 {
2267 if (!REG_P (XEXP (addr, 0)))
2268 fatal_insn ("pre-increment address is not a register", x);
2269
2270 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2271 }
2272 else if (GET_CODE (addr) == PRE_DEC)
2273 {
2274 if (!REG_P (XEXP (addr, 0)))
2275 fatal_insn ("pre-decrement address is not a register", x);
2276
2277 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2278 }
2279 else if (GET_CODE (addr) == POST_INC)
2280 {
2281 if (!REG_P (XEXP (addr, 0)))
2282 fatal_insn ("post-increment address is not a register", x);
2283
2284 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2285 }
2286 else
2287 {
2288 fputs ("@(", file);
2289 output_address (XEXP (x, 0));
2290 fputc (')', file);
2291 }
2292 break;
2293
2294 case CONST_DOUBLE :
2295 /* We handle SFmode constants here as output_addr_const doesn't. */
2296 if (GET_MODE (x) == SFmode)
2297 {
2298 REAL_VALUE_TYPE d;
2299 long l;
2300
2301 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2302 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2303 fprintf (file, "0x%08lx", l);
2304 break;
2305 }
2306
2307 /* Fall through. Let output_addr_const deal with it. */
2308
2309 default :
2310 output_addr_const (file, x);
2311 break;
2312 }
2313 }
2314
2315 /* Print a memory address as an operand to reference that memory location. */
2316
2317 static void
2318 m32r_print_operand_address (FILE * file, rtx addr)
2319 {
2320 rtx base;
2321 rtx index = 0;
2322 int offset = 0;
2323
2324 switch (GET_CODE (addr))
2325 {
2326 case REG :
2327 fputs (reg_names[REGNO (addr)], file);
2328 break;
2329
2330 case PLUS :
2331 if (CONST_INT_P (XEXP (addr, 0)))
2332 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2333 else if (CONST_INT_P (XEXP (addr, 1)))
2334 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2335 else
2336 base = XEXP (addr, 0), index = XEXP (addr, 1);
2337 if (REG_P (base))
2338 {
2339 /* Print the offset first (if present) to conform to the manual. */
2340 if (index == 0)
2341 {
2342 if (offset != 0)
2343 fprintf (file, "%d,", offset);
2344 fputs (reg_names[REGNO (base)], file);
2345 }
2346 /* The chip doesn't support this, but left in for generality. */
2347 else if (REG_P (index))
2348 fprintf (file, "%s,%s",
2349 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2350 /* Not sure this can happen, but leave in for now. */
2351 else if (GET_CODE (index) == SYMBOL_REF)
2352 {
2353 output_addr_const (file, index);
2354 fputc (',', file);
2355 fputs (reg_names[REGNO (base)], file);
2356 }
2357 else
2358 fatal_insn ("bad address", addr);
2359 }
2360 else if (GET_CODE (base) == LO_SUM)
2361 {
2362 gcc_assert (!index && REG_P (XEXP (base, 0)));
2363 if (small_data_operand (XEXP (base, 1), VOIDmode))
2364 fputs ("sda(", file);
2365 else
2366 fputs ("low(", file);
2367 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2368 fputs ("),", file);
2369 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2370 }
2371 else
2372 fatal_insn ("bad address", addr);
2373 break;
2374
2375 case LO_SUM :
2376 if (!REG_P (XEXP (addr, 0)))
2377 fatal_insn ("lo_sum not of register", addr);
2378 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2379 fputs ("sda(", file);
2380 else
2381 fputs ("low(", file);
2382 output_addr_const (file, XEXP (addr, 1));
2383 fputs ("),", file);
2384 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2385 break;
2386
2387 case PRE_INC : /* Assume SImode. */
2388 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2389 break;
2390
2391 case PRE_DEC : /* Assume SImode. */
2392 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2393 break;
2394
2395 case POST_INC : /* Assume SImode. */
2396 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2397 break;
2398
2399 default :
2400 output_addr_const (file, addr);
2401 break;
2402 }
2403 }
2404
2405 static bool
2406 m32r_print_operand_punct_valid_p (unsigned char code)
2407 {
2408 return m32r_punct_chars[code];
2409 }
2410
2411 /* Return true if the operands are the constants 0 and 1. */
2412
2413 int
2414 zero_and_one (rtx operand1, rtx operand2)
2415 {
2416 return
2417 CONST_INT_P (operand1)
2418 && CONST_INT_P (operand2)
2419 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2420 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2421 }
2422
2423 /* Generate the correct assembler code to handle the conditional loading of a
2424 value into a register. It is known that the operands satisfy the
2425 conditional_move_operand() function above. The destination is operand[0].
2426 The condition is operand [1]. The 'true' value is operand [2] and the
2427 'false' value is operand [3]. */
2428
2429 char *
2430 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2431 {
2432 static char buffer [100];
2433 const char * dest = reg_names [REGNO (operands [0])];
2434
2435 buffer [0] = 0;
2436
2437 /* Destination must be a register. */
2438 gcc_assert (REG_P (operands [0]));
2439 gcc_assert (conditional_move_operand (operands [2], SImode));
2440 gcc_assert (conditional_move_operand (operands [3], SImode));
2441
2442 /* Check to see if the test is reversed. */
2443 if (GET_CODE (operands [1]) == NE)
2444 {
2445 rtx tmp = operands [2];
2446 operands [2] = operands [3];
2447 operands [3] = tmp;
2448 }
2449
2450 sprintf (buffer, "mvfc %s, cbr", dest);
2451
2452 /* If the true value was '0' then we need to invert the results of the move. */
2453 if (INTVAL (operands [2]) == 0)
2454 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2455 dest, dest);
2456
2457 return buffer;
2458 }
2459
2460 /* Returns true if the registers contained in the two
2461 rtl expressions are different. */
2462
2463 int
2464 m32r_not_same_reg (rtx a, rtx b)
2465 {
2466 int reg_a = -1;
2467 int reg_b = -2;
2468
2469 while (GET_CODE (a) == SUBREG)
2470 a = SUBREG_REG (a);
2471
2472 if (REG_P (a))
2473 reg_a = REGNO (a);
2474
2475 while (GET_CODE (b) == SUBREG)
2476 b = SUBREG_REG (b);
2477
2478 if (REG_P (b))
2479 reg_b = REGNO (b);
2480
2481 return reg_a != reg_b;
2482 }
2483
2484 \f
2485 rtx
2486 m32r_function_symbol (const char *name)
2487 {
2488 int extra_flags = 0;
2489 enum m32r_model model;
2490 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2491
2492 if (TARGET_MODEL_SMALL)
2493 model = M32R_MODEL_SMALL;
2494 else if (TARGET_MODEL_MEDIUM)
2495 model = M32R_MODEL_MEDIUM;
2496 else if (TARGET_MODEL_LARGE)
2497 model = M32R_MODEL_LARGE;
2498 else
2499 gcc_unreachable (); /* Shouldn't happen. */
2500 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2501
2502 if (extra_flags)
2503 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2504
2505 return sym;
2506 }
2507
2508 /* Use a library function to move some bytes. */
2509
2510 static void
2511 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2512 {
2513 /* We want to pass the size as Pmode, which will normally be SImode
2514 but will be DImode if we are using 64-bit longs and pointers. */
2515 if (GET_MODE (bytes_rtx) != VOIDmode
2516 && GET_MODE (bytes_rtx) != Pmode)
2517 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2518
2519 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2520 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2521 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2522 TYPE_UNSIGNED (sizetype)),
2523 TYPE_MODE (sizetype));
2524 }
2525
2526 /* Expand string/block move operations.
2527
2528 operands[0] is the pointer to the destination.
2529 operands[1] is the pointer to the source.
2530 operands[2] is the number of bytes to move.
2531 operands[3] is the alignment.
2532
2533 Returns 1 upon success, 0 otherwise. */
2534
2535 int
2536 m32r_expand_block_move (rtx operands[])
2537 {
2538 rtx orig_dst = operands[0];
2539 rtx orig_src = operands[1];
2540 rtx bytes_rtx = operands[2];
2541 rtx align_rtx = operands[3];
2542 int constp = CONST_INT_P (bytes_rtx);
2543 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2544 int align = INTVAL (align_rtx);
2545 int leftover;
2546 rtx src_reg;
2547 rtx dst_reg;
2548
2549 if (constp && bytes <= 0)
2550 return 1;
2551
2552 /* Move the address into scratch registers. */
2553 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2554 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2555
2556 if (align > UNITS_PER_WORD)
2557 align = UNITS_PER_WORD;
2558
2559 /* If we prefer size over speed, always use a function call.
2560 If we do not know the size, use a function call.
2561 If the blocks are not word aligned, use a function call. */
2562 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2563 {
2564 block_move_call (dst_reg, src_reg, bytes_rtx);
2565 return 0;
2566 }
2567
2568 leftover = bytes % MAX_MOVE_BYTES;
2569 bytes -= leftover;
2570
2571 /* If necessary, generate a loop to handle the bulk of the copy. */
2572 if (bytes)
2573 {
2574 rtx label = NULL_RTX;
2575 rtx final_src = NULL_RTX;
2576 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2577 rtx rounded_total = GEN_INT (bytes);
2578 rtx new_dst_reg = gen_reg_rtx (SImode);
2579 rtx new_src_reg = gen_reg_rtx (SImode);
2580
2581 /* If we are going to have to perform this loop more than
2582 once, then generate a label and compute the address the
2583 source register will contain upon completion of the final
2584 iteration. */
2585 if (bytes > MAX_MOVE_BYTES)
2586 {
2587 final_src = gen_reg_rtx (Pmode);
2588
2589 if (INT16_P(bytes))
2590 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2591 else
2592 {
2593 emit_insn (gen_movsi (final_src, rounded_total));
2594 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2595 }
2596
2597 label = gen_label_rtx ();
2598 emit_label (label);
2599 }
2600
2601 /* It is known that output_block_move() will update src_reg to point
2602 to the word after the end of the source block, and dst_reg to point
2603 to the last word of the destination block, provided that the block
2604 is MAX_MOVE_BYTES long. */
2605 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2606 new_dst_reg, new_src_reg));
2607 emit_move_insn (dst_reg, new_dst_reg);
2608 emit_move_insn (src_reg, new_src_reg);
2609 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2610
2611 if (bytes > MAX_MOVE_BYTES)
2612 {
2613 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2614 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2615 }
2616 }
2617
2618 if (leftover)
2619 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2620 gen_reg_rtx (SImode),
2621 gen_reg_rtx (SImode)));
2622 return 1;
2623 }
2624
2625 \f
2626 /* Emit load/stores for a small constant word aligned block_move.
2627
2628 operands[0] is the memory address of the destination.
2629 operands[1] is the memory address of the source.
2630 operands[2] is the number of bytes to move.
2631 operands[3] is a temp register.
2632 operands[4] is a temp register. */
2633
2634 void
2635 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2636 {
2637 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2638 int first_time;
2639 int got_extra = 0;
2640
2641 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2642
2643 /* We do not have a post-increment store available, so the first set of
2644 stores are done without any increment, then the remaining ones can use
2645 the pre-increment addressing mode.
2646
2647 Note: expand_block_move() also relies upon this behavior when building
2648 loops to copy large blocks. */
2649 first_time = 1;
2650
2651 while (bytes > 0)
2652 {
2653 if (bytes >= 8)
2654 {
2655 if (first_time)
2656 {
2657 output_asm_insn ("ld\t%5, %p1", operands);
2658 output_asm_insn ("ld\t%6, %p1", operands);
2659 output_asm_insn ("st\t%5, @%0", operands);
2660 output_asm_insn ("st\t%6, %s0", operands);
2661 }
2662 else
2663 {
2664 output_asm_insn ("ld\t%5, %p1", operands);
2665 output_asm_insn ("ld\t%6, %p1", operands);
2666 output_asm_insn ("st\t%5, %s0", operands);
2667 output_asm_insn ("st\t%6, %s0", operands);
2668 }
2669
2670 bytes -= 8;
2671 }
2672 else if (bytes >= 4)
2673 {
2674 if (bytes > 4)
2675 got_extra = 1;
2676
2677 output_asm_insn ("ld\t%5, %p1", operands);
2678
2679 if (got_extra)
2680 output_asm_insn ("ld\t%6, %p1", operands);
2681
2682 if (first_time)
2683 output_asm_insn ("st\t%5, @%0", operands);
2684 else
2685 output_asm_insn ("st\t%5, %s0", operands);
2686
2687 bytes -= 4;
2688 }
2689 else
2690 {
2691 /* Get the entire next word, even though we do not want all of it.
2692 The saves us from doing several smaller loads, and we assume that
2693 we cannot cause a page fault when at least part of the word is in
2694 valid memory [since we don't get called if things aren't properly
2695 aligned]. */
2696 int dst_offset = first_time ? 0 : 4;
2697 /* The amount of increment we have to make to the
2698 destination pointer. */
2699 int dst_inc_amount = dst_offset + bytes - 4;
2700 /* The same for the source pointer. */
2701 int src_inc_amount = bytes;
2702 int last_shift;
2703 rtx my_operands[3];
2704
2705 /* If got_extra is true then we have already loaded
2706 the next word as part of loading and storing the previous word. */
2707 if (! got_extra)
2708 output_asm_insn ("ld\t%6, @%1", operands);
2709
2710 if (bytes >= 2)
2711 {
2712 bytes -= 2;
2713
2714 output_asm_insn ("sra3\t%5, %6, #16", operands);
2715 my_operands[0] = operands[5];
2716 my_operands[1] = GEN_INT (dst_offset);
2717 my_operands[2] = operands[0];
2718 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2719
2720 /* If there is a byte left to store then increment the
2721 destination address and shift the contents of the source
2722 register down by 8 bits. We could not do the address
2723 increment in the store half word instruction, because it does
2724 not have an auto increment mode. */
2725 if (bytes > 0) /* assert (bytes == 1) */
2726 {
2727 dst_offset += 2;
2728 last_shift = 8;
2729 }
2730 }
2731 else
2732 last_shift = 24;
2733
2734 if (bytes > 0)
2735 {
2736 my_operands[0] = operands[6];
2737 my_operands[1] = GEN_INT (last_shift);
2738 output_asm_insn ("srai\t%0, #%1", my_operands);
2739 my_operands[0] = operands[6];
2740 my_operands[1] = GEN_INT (dst_offset);
2741 my_operands[2] = operands[0];
2742 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2743 }
2744
2745 /* Update the destination pointer if needed. We have to do
2746 this so that the patterns matches what we output in this
2747 function. */
2748 if (dst_inc_amount
2749 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2750 {
2751 my_operands[0] = operands[0];
2752 my_operands[1] = GEN_INT (dst_inc_amount);
2753 output_asm_insn ("addi\t%0, #%1", my_operands);
2754 }
2755
2756 /* Update the source pointer if needed. We have to do this
2757 so that the patterns matches what we output in this
2758 function. */
2759 if (src_inc_amount
2760 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2761 {
2762 my_operands[0] = operands[1];
2763 my_operands[1] = GEN_INT (src_inc_amount);
2764 output_asm_insn ("addi\t%0, #%1", my_operands);
2765 }
2766
2767 bytes = 0;
2768 }
2769
2770 first_time = 0;
2771 }
2772 }
2773
2774 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2775
2776 int
2777 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2778 unsigned int new_reg)
2779 {
2780 /* Interrupt routines can't clobber any register that isn't already used. */
2781 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2782 && !df_regs_ever_live_p (new_reg))
2783 return 0;
2784
2785 return 1;
2786 }
2787
2788 rtx
2789 m32r_return_addr (int count)
2790 {
2791 if (count != 0)
2792 return const0_rtx;
2793
2794 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2795 }
2796
2797 static void
2798 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2799 {
2800 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2801 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2802 0x017e8e17 : 0x178e7e01, SImode));
2803 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2804 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2805 0x0c00ae86 : 0x86ae000c, SImode));
2806 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2807 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2808 0xe627871e : 0x1e8727e6, SImode));
2809 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2810 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2811 0xc616c626 : 0x26c61fc6, SImode));
2812 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2813 chain_value);
2814 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2815 XEXP (DECL_RTL (fndecl), 0));
2816
2817 if (m32r_cache_flush_trap >= 0)
2818 emit_insn (gen_flush_icache
2819 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2820 gen_int_mode (m32r_cache_flush_trap, SImode)));
2821 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2822 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2823 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2824 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2825 GEN_INT (3), SImode);
2826 }
2827
2828 /* True if X is a reg that can be used as a base reg. */
2829
2830 static bool
2831 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2832 {
2833 if (! REG_P (x))
2834 return false;
2835
2836 if (strict)
2837 {
2838 if (GPR_P (REGNO (x)))
2839 return true;
2840 }
2841 else
2842 {
2843 if (GPR_P (REGNO (x))
2844 || REGNO (x) == ARG_POINTER_REGNUM
2845 || ! HARD_REGISTER_P (x))
2846 return true;
2847 }
2848
2849 return false;
2850 }
2851
2852 static inline bool
2853 m32r_rtx_ok_for_offset_p (const_rtx x)
2854 {
2855 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2856 }
2857
2858 static inline bool
2859 m32r_legitimate_offset_addres_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2860 const_rtx x, bool strict)
2861 {
2862 if (GET_CODE (x) == PLUS
2863 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2864 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2865 return true;
2866
2867 return false;
2868 }
2869
2870 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2871 since more than one instruction will be required. */
2872
2873 static inline bool
2874 m32r_legitimate_lo_sum_addres_p (enum machine_mode mode, const_rtx x,
2875 bool strict)
2876 {
2877 if (GET_CODE (x) == LO_SUM
2878 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2879 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2880 && CONSTANT_P (XEXP (x, 1)))
2881 return true;
2882
2883 return false;
2884 }
2885
2886 /* Is this a load and increment operation. */
2887
2888 static inline bool
2889 m32r_load_postinc_p (enum machine_mode mode, const_rtx x, bool strict)
2890 {
2891 if ((mode == SImode || mode == SFmode)
2892 && GET_CODE (x) == POST_INC
2893 && REG_P (XEXP (x, 0))
2894 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2895 return true;
2896
2897 return false;
2898 }
2899
2900 /* Is this an increment/decrement and store operation. */
2901
2902 static inline bool
2903 m32r_store_preinc_predec_p (enum machine_mode mode, const_rtx x, bool strict)
2904 {
2905 if ((mode == SImode || mode == SFmode)
2906 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2907 && REG_P (XEXP (x, 0)) \
2908 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2909 return true;
2910
2911 return false;
2912 }
2913
2914 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2915
2916 static bool
2917 m32r_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
2918 {
2919 if (m32r_rtx_ok_for_base_p (x, strict)
2920 || m32r_legitimate_offset_addres_p (mode, x, strict)
2921 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2922 || m32r_load_postinc_p (mode, x, strict)
2923 || m32r_store_preinc_predec_p (mode, x, strict))
2924 return true;
2925
2926 return false;
2927 }
2928
2929 static void
2930 m32r_conditional_register_usage (void)
2931 {
2932 if (flag_pic)
2933 {
2934 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2935 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2936 }
2937 }