]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
coretypes.h: Include machmode.h...
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "vec.h"
26 #include "input.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "inchash.h"
30 #include "tree.h"
31 #include "stor-layout.h"
32 #include "varasm.h"
33 #include "stringpool.h"
34 #include "calls.h"
35 #include "rtl.h"
36 #include "regs.h"
37 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "conditions.h"
40 #include "output.h"
41 #include "dbxout.h"
42 #include "insn-attr.h"
43 #include "flags.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "statistics.h"
47 #include "expmed.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "emit-rtl.h"
51 #include "stmt.h"
52 #include "expr.h"
53 #include "recog.h"
54 #include "diagnostic-core.h"
55 #include "ggc.h"
56 #include "dominance.h"
57 #include "cfg.h"
58 #include "cfgrtl.h"
59 #include "cfganal.h"
60 #include "lcm.h"
61 #include "cfgbuild.h"
62 #include "cfgcleanup.h"
63 #include "predict.h"
64 #include "basic-block.h"
65 #include "df.h"
66 #include "tm_p.h"
67 #include "target.h"
68 #include "target-def.h"
69 #include "tm-constrs.h"
70 #include "opts.h"
71 #include "builtins.h"
72
73 /* Array of valid operand punctuation characters. */
74 static char m32r_punct_chars[256];
75
76 /* Machine-specific symbol_ref flags. */
77 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
78 #define SYMBOL_REF_MODEL(X) \
79 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
80
81 /* For string literals, etc. */
82 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
83
84 /* Forward declaration. */
85 static void m32r_option_override (void);
86 static void init_reg_tables (void);
87 static void block_move_call (rtx, rtx, rtx);
88 static int m32r_is_insn (rtx);
89 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
90 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
91 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
92 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
93 static void m32r_print_operand (FILE *, rtx, int);
94 static void m32r_print_operand_address (FILE *, rtx);
95 static bool m32r_print_operand_punct_valid_p (unsigned char code);
96 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
97 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
98
99 static void m32r_file_start (void);
100
101 static int m32r_adjust_priority (rtx_insn *, int);
102 static int m32r_issue_rate (void);
103
104 static void m32r_encode_section_info (tree, rtx, int);
105 static bool m32r_in_small_data_p (const_tree);
106 static bool m32r_return_in_memory (const_tree, const_tree);
107 static rtx m32r_function_value (const_tree, const_tree, bool);
108 static rtx m32r_libcall_value (machine_mode, const_rtx);
109 static bool m32r_function_value_regno_p (const unsigned int);
110 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
111 tree, int *, int);
112 static void init_idents (void);
113 static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed);
114 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
115 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
116 const_tree, bool);
117 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
118 tree, bool);
119 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
120 const_tree, bool);
121 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
122 const_tree, bool);
123 static bool m32r_can_eliminate (const int, const int);
124 static void m32r_conditional_register_usage (void);
125 static void m32r_trampoline_init (rtx, tree, rtx);
126 static bool m32r_legitimate_constant_p (machine_mode, rtx);
127 \f
128 /* M32R specific attributes. */
129
130 static const struct attribute_spec m32r_attribute_table[] =
131 {
132 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
133 affects_type_identity } */
134 { "interrupt", 0, 0, true, false, false, NULL, false },
135 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
136 false },
137 { NULL, 0, 0, false, false, false, NULL, false }
138 };
139 \f
140 /* Initialize the GCC target structure. */
141 #undef TARGET_ATTRIBUTE_TABLE
142 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
143
144 #undef TARGET_LEGITIMATE_ADDRESS_P
145 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
146 #undef TARGET_LEGITIMIZE_ADDRESS
147 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
148 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
149 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
150
151 #undef TARGET_ASM_ALIGNED_HI_OP
152 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
153 #undef TARGET_ASM_ALIGNED_SI_OP
154 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
155
156 #undef TARGET_PRINT_OPERAND
157 #define TARGET_PRINT_OPERAND m32r_print_operand
158 #undef TARGET_PRINT_OPERAND_ADDRESS
159 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
160 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
161 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
162
163 #undef TARGET_ASM_FUNCTION_PROLOGUE
164 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
165 #undef TARGET_ASM_FUNCTION_EPILOGUE
166 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
167
168 #undef TARGET_ASM_FILE_START
169 #define TARGET_ASM_FILE_START m32r_file_start
170
171 #undef TARGET_SCHED_ADJUST_PRIORITY
172 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
173 #undef TARGET_SCHED_ISSUE_RATE
174 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
175
176 #undef TARGET_OPTION_OVERRIDE
177 #define TARGET_OPTION_OVERRIDE m32r_option_override
178
179 #undef TARGET_ENCODE_SECTION_INFO
180 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
181 #undef TARGET_IN_SMALL_DATA_P
182 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
183
184
185 #undef TARGET_MEMORY_MOVE_COST
186 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
187 #undef TARGET_RTX_COSTS
188 #define TARGET_RTX_COSTS m32r_rtx_costs
189 #undef TARGET_ADDRESS_COST
190 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
191
192 #undef TARGET_PROMOTE_PROTOTYPES
193 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
194 #undef TARGET_RETURN_IN_MEMORY
195 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
196
197 #undef TARGET_FUNCTION_VALUE
198 #define TARGET_FUNCTION_VALUE m32r_function_value
199 #undef TARGET_LIBCALL_VALUE
200 #define TARGET_LIBCALL_VALUE m32r_libcall_value
201 #undef TARGET_FUNCTION_VALUE_REGNO_P
202 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
203
204 #undef TARGET_SETUP_INCOMING_VARARGS
205 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
206 #undef TARGET_MUST_PASS_IN_STACK
207 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
208 #undef TARGET_PASS_BY_REFERENCE
209 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
210 #undef TARGET_ARG_PARTIAL_BYTES
211 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
212 #undef TARGET_FUNCTION_ARG
213 #define TARGET_FUNCTION_ARG m32r_function_arg
214 #undef TARGET_FUNCTION_ARG_ADVANCE
215 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
216
217 #undef TARGET_CAN_ELIMINATE
218 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
219
220 #undef TARGET_CONDITIONAL_REGISTER_USAGE
221 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
222
223 #undef TARGET_TRAMPOLINE_INIT
224 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
225
226 #undef TARGET_LEGITIMATE_CONSTANT_P
227 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
228
229 struct gcc_target targetm = TARGET_INITIALIZER;
230 \f
231 /* Called by m32r_option_override to initialize various things. */
232
233 void
234 m32r_init (void)
235 {
236 init_reg_tables ();
237
238 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
239 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
240 m32r_punct_chars['#'] = 1;
241 m32r_punct_chars['@'] = 1; /* ??? no longer used */
242
243 /* Provide default value if not specified. */
244 if (!global_options_set.x_g_switch_value)
245 g_switch_value = SDATA_DEFAULT_SIZE;
246 }
247
248 static void
249 m32r_option_override (void)
250 {
251 /* These need to be done at start up.
252 It's convenient to do them here. */
253 m32r_init ();
254 SUBTARGET_OVERRIDE_OPTIONS;
255 }
256
257 /* Vectors to keep interesting information about registers where it can easily
258 be got. We use to use the actual mode value as the bit number, but there
259 is (or may be) more than 32 modes now. Instead we use two tables: one
260 indexed by hard register number, and one indexed by mode. */
261
262 /* The purpose of m32r_mode_class is to shrink the range of modes so that
263 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
264 mapped into one m32r_mode_class mode. */
265
266 enum m32r_mode_class
267 {
268 C_MODE,
269 S_MODE, D_MODE, T_MODE, O_MODE,
270 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
271 };
272
273 /* Modes for condition codes. */
274 #define C_MODES (1 << (int) C_MODE)
275
276 /* Modes for single-word and smaller quantities. */
277 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
278
279 /* Modes for double-word and smaller quantities. */
280 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
281
282 /* Modes for quad-word and smaller quantities. */
283 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
284
285 /* Modes for accumulators. */
286 #define A_MODES (1 << (int) A_MODE)
287
288 /* Value is 1 if register/mode pair is acceptable on arc. */
289
290 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
291 {
292 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
293 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
294 S_MODES, C_MODES, A_MODES, A_MODES
295 };
296
297 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
298
299 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
300
301 static void
302 init_reg_tables (void)
303 {
304 int i;
305
306 for (i = 0; i < NUM_MACHINE_MODES; i++)
307 {
308 machine_mode m = (machine_mode) i;
309
310 switch (GET_MODE_CLASS (m))
311 {
312 case MODE_INT:
313 case MODE_PARTIAL_INT:
314 case MODE_COMPLEX_INT:
315 if (GET_MODE_SIZE (m) <= 4)
316 m32r_mode_class[i] = 1 << (int) S_MODE;
317 else if (GET_MODE_SIZE (m) == 8)
318 m32r_mode_class[i] = 1 << (int) D_MODE;
319 else if (GET_MODE_SIZE (m) == 16)
320 m32r_mode_class[i] = 1 << (int) T_MODE;
321 else if (GET_MODE_SIZE (m) == 32)
322 m32r_mode_class[i] = 1 << (int) O_MODE;
323 else
324 m32r_mode_class[i] = 0;
325 break;
326 case MODE_FLOAT:
327 case MODE_COMPLEX_FLOAT:
328 if (GET_MODE_SIZE (m) <= 4)
329 m32r_mode_class[i] = 1 << (int) SF_MODE;
330 else if (GET_MODE_SIZE (m) == 8)
331 m32r_mode_class[i] = 1 << (int) DF_MODE;
332 else if (GET_MODE_SIZE (m) == 16)
333 m32r_mode_class[i] = 1 << (int) TF_MODE;
334 else if (GET_MODE_SIZE (m) == 32)
335 m32r_mode_class[i] = 1 << (int) OF_MODE;
336 else
337 m32r_mode_class[i] = 0;
338 break;
339 case MODE_CC:
340 m32r_mode_class[i] = 1 << (int) C_MODE;
341 break;
342 default:
343 m32r_mode_class[i] = 0;
344 break;
345 }
346 }
347
348 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
349 {
350 if (GPR_P (i))
351 m32r_regno_reg_class[i] = GENERAL_REGS;
352 else if (i == ARG_POINTER_REGNUM)
353 m32r_regno_reg_class[i] = GENERAL_REGS;
354 else
355 m32r_regno_reg_class[i] = NO_REGS;
356 }
357 }
358 \f
359 /* M32R specific attribute support.
360
361 interrupt - for interrupt functions
362
363 model - select code model used to access object
364
365 small: addresses use 24 bits, use bl to make calls
366 medium: addresses use 32 bits, use bl to make calls
367 large: addresses use 32 bits, use seth/add3/jl to make calls
368
369 Grep for MODEL in m32r.h for more info. */
370
371 static tree small_ident1;
372 static tree small_ident2;
373 static tree medium_ident1;
374 static tree medium_ident2;
375 static tree large_ident1;
376 static tree large_ident2;
377
378 static void
379 init_idents (void)
380 {
381 if (small_ident1 == 0)
382 {
383 small_ident1 = get_identifier ("small");
384 small_ident2 = get_identifier ("__small__");
385 medium_ident1 = get_identifier ("medium");
386 medium_ident2 = get_identifier ("__medium__");
387 large_ident1 = get_identifier ("large");
388 large_ident2 = get_identifier ("__large__");
389 }
390 }
391
392 /* Handle an "model" attribute; arguments as in
393 struct attribute_spec.handler. */
394 static tree
395 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
396 tree args, int flags ATTRIBUTE_UNUSED,
397 bool *no_add_attrs)
398 {
399 tree arg;
400
401 init_idents ();
402 arg = TREE_VALUE (args);
403
404 if (arg != small_ident1
405 && arg != small_ident2
406 && arg != medium_ident1
407 && arg != medium_ident2
408 && arg != large_ident1
409 && arg != large_ident2)
410 {
411 warning (OPT_Wattributes, "invalid argument of %qs attribute",
412 IDENTIFIER_POINTER (name));
413 *no_add_attrs = true;
414 }
415
416 return NULL_TREE;
417 }
418 \f
419 /* Encode section information of DECL, which is either a VAR_DECL,
420 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
421
422 For the M32R we want to record:
423
424 - whether the object lives in .sdata/.sbss.
425 - what code model should be used to access the object
426 */
427
428 static void
429 m32r_encode_section_info (tree decl, rtx rtl, int first)
430 {
431 int extra_flags = 0;
432 tree model_attr;
433 enum m32r_model model;
434
435 default_encode_section_info (decl, rtl, first);
436
437 if (!DECL_P (decl))
438 return;
439
440 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
441 if (model_attr)
442 {
443 tree id;
444
445 init_idents ();
446
447 id = TREE_VALUE (TREE_VALUE (model_attr));
448
449 if (id == small_ident1 || id == small_ident2)
450 model = M32R_MODEL_SMALL;
451 else if (id == medium_ident1 || id == medium_ident2)
452 model = M32R_MODEL_MEDIUM;
453 else if (id == large_ident1 || id == large_ident2)
454 model = M32R_MODEL_LARGE;
455 else
456 gcc_unreachable (); /* shouldn't happen */
457 }
458 else
459 {
460 if (TARGET_MODEL_SMALL)
461 model = M32R_MODEL_SMALL;
462 else if (TARGET_MODEL_MEDIUM)
463 model = M32R_MODEL_MEDIUM;
464 else if (TARGET_MODEL_LARGE)
465 model = M32R_MODEL_LARGE;
466 else
467 gcc_unreachable (); /* shouldn't happen */
468 }
469 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
470
471 if (extra_flags)
472 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
473 }
474
475 /* Only mark the object as being small data area addressable if
476 it hasn't been explicitly marked with a code model.
477
478 The user can explicitly put an object in the small data area with the
479 section attribute. If the object is in sdata/sbss and marked with a
480 code model do both [put the object in .sdata and mark it as being
481 addressed with a specific code model - don't mark it as being addressed
482 with an SDA reloc though]. This is ok and might be useful at times. If
483 the object doesn't fit the linker will give an error. */
484
485 static bool
486 m32r_in_small_data_p (const_tree decl)
487 {
488 const char *section;
489
490 if (TREE_CODE (decl) != VAR_DECL)
491 return false;
492
493 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
494 return false;
495
496 section = DECL_SECTION_NAME (decl);
497 if (section)
498 {
499 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
500 return true;
501 }
502 else
503 {
504 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
505 {
506 int size = int_size_in_bytes (TREE_TYPE (decl));
507
508 if (size > 0 && size <= g_switch_value)
509 return true;
510 }
511 }
512
513 return false;
514 }
515
516 /* Do anything needed before RTL is emitted for each function. */
517
518 void
519 m32r_init_expanders (void)
520 {
521 /* ??? At one point there was code here. The function is left in
522 to make it easy to experiment. */
523 }
524 \f
525 int
526 call_operand (rtx op, machine_mode mode)
527 {
528 if (!MEM_P (op))
529 return 0;
530 op = XEXP (op, 0);
531 return call_address_operand (op, mode);
532 }
533
534 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
535
536 int
537 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
538 {
539 if (! TARGET_SDATA_USE)
540 return 0;
541
542 if (GET_CODE (op) == SYMBOL_REF)
543 return SYMBOL_REF_SMALL_P (op);
544
545 if (GET_CODE (op) == CONST
546 && GET_CODE (XEXP (op, 0)) == PLUS
547 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
548 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
549 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
550
551 return 0;
552 }
553
554 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
555
556 int
557 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
558 {
559 rtx sym;
560
561 if (flag_pic)
562 return 0;
563
564 if (GET_CODE (op) == LABEL_REF)
565 return TARGET_ADDR24;
566
567 if (GET_CODE (op) == SYMBOL_REF)
568 sym = op;
569 else if (GET_CODE (op) == CONST
570 && GET_CODE (XEXP (op, 0)) == PLUS
571 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
572 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
573 sym = XEXP (XEXP (op, 0), 0);
574 else
575 return 0;
576
577 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
578 return 1;
579
580 if (TARGET_ADDR24
581 && (CONSTANT_POOL_ADDRESS_P (sym)
582 || LIT_NAME_P (XSTR (sym, 0))))
583 return 1;
584
585 return 0;
586 }
587
588 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
589
590 int
591 addr32_operand (rtx op, machine_mode mode)
592 {
593 rtx sym;
594
595 if (GET_CODE (op) == LABEL_REF)
596 return TARGET_ADDR32;
597
598 if (GET_CODE (op) == SYMBOL_REF)
599 sym = op;
600 else if (GET_CODE (op) == CONST
601 && GET_CODE (XEXP (op, 0)) == PLUS
602 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
603 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
604 && ! flag_pic)
605 sym = XEXP (XEXP (op, 0), 0);
606 else
607 return 0;
608
609 return (! addr24_operand (sym, mode)
610 && ! small_data_operand (sym, mode));
611 }
612
613 /* Return 1 if OP is a function that can be called with the `bl' insn. */
614
615 int
616 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
617 {
618 if (flag_pic)
619 return 1;
620
621 if (GET_CODE (op) == SYMBOL_REF)
622 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
623
624 return TARGET_CALL26;
625 }
626
627 /* Return 1 if OP is a DImode const we want to handle inline.
628 This must match the code in the movdi pattern.
629 It is used by the 'G' constraint. */
630
631 int
632 easy_di_const (rtx op)
633 {
634 rtx high_rtx, low_rtx;
635 HOST_WIDE_INT high, low;
636
637 split_double (op, &high_rtx, &low_rtx);
638 high = INTVAL (high_rtx);
639 low = INTVAL (low_rtx);
640 /* Pick constants loadable with 2 16-bit `ldi' insns. */
641 if (high >= -128 && high <= 127
642 && low >= -128 && low <= 127)
643 return 1;
644 return 0;
645 }
646
647 /* Return 1 if OP is a DFmode const we want to handle inline.
648 This must match the code in the movdf pattern.
649 It is used by the 'H' constraint. */
650
651 int
652 easy_df_const (rtx op)
653 {
654 REAL_VALUE_TYPE r;
655 long l[2];
656
657 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
658 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
659 if (l[0] == 0 && l[1] == 0)
660 return 1;
661 if ((l[0] & 0xffff) == 0 && l[1] == 0)
662 return 1;
663 return 0;
664 }
665
666 /* Return 1 if OP is (mem (reg ...)).
667 This is used in insn length calcs. */
668
669 int
670 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
671 {
672 return MEM_P (op) && REG_P (XEXP (op, 0));
673 }
674
675 /* Return nonzero if TYPE must be passed by indirect reference. */
676
677 static bool
678 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
679 machine_mode mode, const_tree type,
680 bool named ATTRIBUTE_UNUSED)
681 {
682 int size;
683
684 if (type)
685 size = int_size_in_bytes (type);
686 else
687 size = GET_MODE_SIZE (mode);
688
689 return (size < 0 || size > 8);
690 }
691 \f
692 /* Comparisons. */
693
694 /* X and Y are two things to compare using CODE. Emit the compare insn and
695 return the rtx for compare [arg0 of the if_then_else].
696 If need_compare is true then the comparison insn must be generated, rather
697 than being subsumed into the following branch instruction. */
698
699 rtx
700 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
701 {
702 enum rtx_code compare_code;
703 enum rtx_code branch_code;
704 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
705 int must_swap = 0;
706
707 switch (code)
708 {
709 case EQ: compare_code = EQ; branch_code = NE; break;
710 case NE: compare_code = EQ; branch_code = EQ; break;
711 case LT: compare_code = LT; branch_code = NE; break;
712 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
713 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
714 case GE: compare_code = LT; branch_code = EQ; break;
715 case LTU: compare_code = LTU; branch_code = NE; break;
716 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
717 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
718 case GEU: compare_code = LTU; branch_code = EQ; break;
719
720 default:
721 gcc_unreachable ();
722 }
723
724 if (need_compare)
725 {
726 switch (compare_code)
727 {
728 case EQ:
729 if (satisfies_constraint_P (y) /* Reg equal to small const. */
730 && y != const0_rtx)
731 {
732 rtx tmp = gen_reg_rtx (SImode);
733
734 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
735 x = tmp;
736 y = const0_rtx;
737 }
738 else if (CONSTANT_P (y)) /* Reg equal to const. */
739 {
740 rtx tmp = force_reg (GET_MODE (x), y);
741 y = tmp;
742 }
743
744 if (register_operand (y, SImode) /* Reg equal to reg. */
745 || y == const0_rtx) /* Reg equal to zero. */
746 {
747 emit_insn (gen_cmp_eqsi_insn (x, y));
748
749 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
750 }
751 break;
752
753 case LT:
754 if (register_operand (y, SImode)
755 || satisfies_constraint_P (y))
756 {
757 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
758
759 switch (code)
760 {
761 case LT:
762 emit_insn (gen_cmp_ltsi_insn (x, y));
763 code = EQ;
764 break;
765 case LE:
766 if (y == const0_rtx)
767 tmp = const1_rtx;
768 else
769 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
770 emit_insn (gen_cmp_ltsi_insn (x, tmp));
771 code = EQ;
772 break;
773 case GT:
774 if (CONST_INT_P (y))
775 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
776 else
777 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
778 emit_insn (gen_cmp_ltsi_insn (x, tmp));
779 code = NE;
780 break;
781 case GE:
782 emit_insn (gen_cmp_ltsi_insn (x, y));
783 code = NE;
784 break;
785 default:
786 gcc_unreachable ();
787 }
788
789 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
790 }
791 break;
792
793 case LTU:
794 if (register_operand (y, SImode)
795 || satisfies_constraint_P (y))
796 {
797 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
798
799 switch (code)
800 {
801 case LTU:
802 emit_insn (gen_cmp_ltusi_insn (x, y));
803 code = EQ;
804 break;
805 case LEU:
806 if (y == const0_rtx)
807 tmp = const1_rtx;
808 else
809 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
810 emit_insn (gen_cmp_ltusi_insn (x, tmp));
811 code = EQ;
812 break;
813 case GTU:
814 if (CONST_INT_P (y))
815 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
816 else
817 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
818 emit_insn (gen_cmp_ltusi_insn (x, tmp));
819 code = NE;
820 break;
821 case GEU:
822 emit_insn (gen_cmp_ltusi_insn (x, y));
823 code = NE;
824 break;
825 default:
826 gcc_unreachable ();
827 }
828
829 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
830 }
831 break;
832
833 default:
834 gcc_unreachable ();
835 }
836 }
837 else
838 {
839 /* Reg/reg equal comparison. */
840 if (compare_code == EQ
841 && register_operand (y, SImode))
842 return gen_rtx_fmt_ee (code, CCmode, x, y);
843
844 /* Reg/zero signed comparison. */
845 if ((compare_code == EQ || compare_code == LT)
846 && y == const0_rtx)
847 return gen_rtx_fmt_ee (code, CCmode, x, y);
848
849 /* Reg/smallconst equal comparison. */
850 if (compare_code == EQ
851 && satisfies_constraint_P (y))
852 {
853 rtx tmp = gen_reg_rtx (SImode);
854
855 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
856 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
857 }
858
859 /* Reg/const equal comparison. */
860 if (compare_code == EQ
861 && CONSTANT_P (y))
862 {
863 rtx tmp = force_reg (GET_MODE (x), y);
864
865 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
866 }
867 }
868
869 if (CONSTANT_P (y))
870 {
871 if (must_swap)
872 y = force_reg (GET_MODE (x), y);
873 else
874 {
875 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
876
877 if (! ok_const)
878 y = force_reg (GET_MODE (x), y);
879 }
880 }
881
882 switch (compare_code)
883 {
884 case EQ :
885 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
886 break;
887 case LT :
888 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
889 break;
890 case LTU :
891 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
892 break;
893
894 default:
895 gcc_unreachable ();
896 }
897
898 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
899 }
900
901 bool
902 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
903 {
904 machine_mode mode = GET_MODE (op0);
905
906 gcc_assert (mode == SImode);
907 switch (code)
908 {
909 case EQ:
910 if (!register_operand (op1, mode))
911 op1 = force_reg (mode, op1);
912
913 if (TARGET_M32RX || TARGET_M32R2)
914 {
915 if (!reg_or_zero_operand (op2, mode))
916 op2 = force_reg (mode, op2);
917
918 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
919 return true;
920 }
921 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
922 {
923 emit_insn (gen_seq_zero_insn (op0, op1));
924 return true;
925 }
926
927 if (!reg_or_eq_int16_operand (op2, mode))
928 op2 = force_reg (mode, op2);
929
930 emit_insn (gen_seq_insn (op0, op1, op2));
931 return true;
932
933 case NE:
934 if (!CONST_INT_P (op2)
935 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
936 {
937 rtx reg;
938
939 if (reload_completed || reload_in_progress)
940 return false;
941
942 reg = gen_reg_rtx (SImode);
943 emit_insn (gen_xorsi3 (reg, op1, op2));
944 op1 = reg;
945
946 if (!register_operand (op1, mode))
947 op1 = force_reg (mode, op1);
948
949 emit_insn (gen_sne_zero_insn (op0, op1));
950 return true;
951 }
952 return false;
953
954 case LT:
955 case GT:
956 if (code == GT)
957 {
958 rtx tmp = op2;
959 op2 = op1;
960 op1 = tmp;
961 code = LT;
962 }
963
964 if (!register_operand (op1, mode))
965 op1 = force_reg (mode, op1);
966
967 if (!reg_or_int16_operand (op2, mode))
968 op2 = force_reg (mode, op2);
969
970 emit_insn (gen_slt_insn (op0, op1, op2));
971 return true;
972
973 case LTU:
974 case GTU:
975 if (code == GTU)
976 {
977 rtx tmp = op2;
978 op2 = op1;
979 op1 = tmp;
980 code = LTU;
981 }
982
983 if (!register_operand (op1, mode))
984 op1 = force_reg (mode, op1);
985
986 if (!reg_or_int16_operand (op2, mode))
987 op2 = force_reg (mode, op2);
988
989 emit_insn (gen_sltu_insn (op0, op1, op2));
990 return true;
991
992 case GE:
993 case GEU:
994 if (!register_operand (op1, mode))
995 op1 = force_reg (mode, op1);
996
997 if (!reg_or_int16_operand (op2, mode))
998 op2 = force_reg (mode, op2);
999
1000 if (code == GE)
1001 emit_insn (gen_sge_insn (op0, op1, op2));
1002 else
1003 emit_insn (gen_sgeu_insn (op0, op1, op2));
1004 return true;
1005
1006 case LE:
1007 case LEU:
1008 if (!register_operand (op1, mode))
1009 op1 = force_reg (mode, op1);
1010
1011 if (CONST_INT_P (op2))
1012 {
1013 HOST_WIDE_INT value = INTVAL (op2);
1014 if (value >= 2147483647)
1015 {
1016 emit_move_insn (op0, const1_rtx);
1017 return true;
1018 }
1019
1020 op2 = GEN_INT (value + 1);
1021 if (value < -32768 || value >= 32767)
1022 op2 = force_reg (mode, op2);
1023
1024 if (code == LEU)
1025 emit_insn (gen_sltu_insn (op0, op1, op2));
1026 else
1027 emit_insn (gen_slt_insn (op0, op1, op2));
1028 return true;
1029 }
1030
1031 if (!register_operand (op2, mode))
1032 op2 = force_reg (mode, op2);
1033
1034 if (code == LEU)
1035 emit_insn (gen_sleu_insn (op0, op1, op2));
1036 else
1037 emit_insn (gen_sle_insn (op0, op1, op2));
1038 return true;
1039
1040 default:
1041 gcc_unreachable ();
1042 }
1043 }
1044
1045 \f
1046 /* Split a 2 word move (DI or DF) into component parts. */
1047
1048 rtx
1049 gen_split_move_double (rtx operands[])
1050 {
1051 machine_mode mode = GET_MODE (operands[0]);
1052 rtx dest = operands[0];
1053 rtx src = operands[1];
1054 rtx val;
1055
1056 /* We might have (SUBREG (MEM)) here, so just get rid of the
1057 subregs to make this code simpler. It is safe to call
1058 alter_subreg any time after reload. */
1059 if (GET_CODE (dest) == SUBREG)
1060 alter_subreg (&dest, true);
1061 if (GET_CODE (src) == SUBREG)
1062 alter_subreg (&src, true);
1063
1064 start_sequence ();
1065 if (REG_P (dest))
1066 {
1067 int dregno = REGNO (dest);
1068
1069 /* Reg = reg. */
1070 if (REG_P (src))
1071 {
1072 int sregno = REGNO (src);
1073
1074 int reverse = (dregno == sregno + 1);
1075
1076 /* We normally copy the low-numbered register first. However, if
1077 the first register operand 0 is the same as the second register of
1078 operand 1, we must copy in the opposite order. */
1079 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1080 operand_subword (src, reverse, TRUE, mode)));
1081
1082 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1083 operand_subword (src, !reverse, TRUE, mode)));
1084 }
1085
1086 /* Reg = constant. */
1087 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1088 {
1089 rtx words[2];
1090 split_double (src, &words[0], &words[1]);
1091 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1092 words[0]));
1093
1094 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1095 words[1]));
1096 }
1097
1098 /* Reg = mem. */
1099 else if (MEM_P (src))
1100 {
1101 /* If the high-address word is used in the address, we must load it
1102 last. Otherwise, load it first. */
1103 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1104
1105 /* We used to optimize loads from single registers as
1106
1107 ld r1,r3+; ld r2,r3
1108
1109 if r3 were not used subsequently. However, the REG_NOTES aren't
1110 propagated correctly by the reload phase, and it can cause bad
1111 code to be generated. We could still try:
1112
1113 ld r1,r3+; ld r2,r3; addi r3,-4
1114
1115 which saves 2 bytes and doesn't force longword alignment. */
1116 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1117 adjust_address (src, SImode,
1118 reverse * UNITS_PER_WORD)));
1119
1120 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1121 adjust_address (src, SImode,
1122 !reverse * UNITS_PER_WORD)));
1123 }
1124 else
1125 gcc_unreachable ();
1126 }
1127
1128 /* Mem = reg. */
1129 /* We used to optimize loads from single registers as
1130
1131 st r1,r3; st r2,+r3
1132
1133 if r3 were not used subsequently. However, the REG_NOTES aren't
1134 propagated correctly by the reload phase, and it can cause bad
1135 code to be generated. We could still try:
1136
1137 st r1,r3; st r2,+r3; addi r3,-4
1138
1139 which saves 2 bytes and doesn't force longword alignment. */
1140 else if (MEM_P (dest) && REG_P (src))
1141 {
1142 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1143 operand_subword (src, 0, TRUE, mode)));
1144
1145 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1146 operand_subword (src, 1, TRUE, mode)));
1147 }
1148
1149 else
1150 gcc_unreachable ();
1151
1152 val = get_insns ();
1153 end_sequence ();
1154 return val;
1155 }
1156
1157 \f
1158 static int
1159 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1160 tree type, bool named ATTRIBUTE_UNUSED)
1161 {
1162 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1163
1164 int words;
1165 unsigned int size =
1166 (((mode == BLKmode && type)
1167 ? (unsigned int) int_size_in_bytes (type)
1168 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1169 / UNITS_PER_WORD;
1170
1171 if (*cum >= M32R_MAX_PARM_REGS)
1172 words = 0;
1173 else if (*cum + size > M32R_MAX_PARM_REGS)
1174 words = (*cum + size) - M32R_MAX_PARM_REGS;
1175 else
1176 words = 0;
1177
1178 return words * UNITS_PER_WORD;
1179 }
1180
1181 /* The ROUND_ADVANCE* macros are local to this file. */
1182 /* Round SIZE up to a word boundary. */
1183 #define ROUND_ADVANCE(SIZE) \
1184 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1185
1186 /* Round arg MODE/TYPE up to the next word boundary. */
1187 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1188 ((MODE) == BLKmode \
1189 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1190 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1191
1192 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1193 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1194
1195 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1196 a reg. This includes arguments that have to be passed by reference as the
1197 pointer to them is passed in a reg if one is available (and that is what
1198 we're given).
1199 This macro is only used in this file. */
1200 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1201 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1202
1203 /* Determine where to put an argument to a function.
1204 Value is zero to push the argument on the stack,
1205 or a hard register in which to store the argument.
1206
1207 MODE is the argument's machine mode.
1208 TYPE is the data type of the argument (as a tree).
1209 This is null for libcalls where that information may
1210 not be available.
1211 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1212 the preceding args and about the function being called.
1213 NAMED is nonzero if this argument is a named parameter
1214 (otherwise it is an extra parameter matching an ellipsis). */
1215 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1216 and the rest are pushed. */
1217
1218 static rtx
1219 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1220 const_tree type ATTRIBUTE_UNUSED,
1221 bool named ATTRIBUTE_UNUSED)
1222 {
1223 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1224
1225 return (PASS_IN_REG_P (*cum, mode, type)
1226 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1227 : NULL_RTX);
1228 }
1229
1230 /* Update the data in CUM to advance over an argument
1231 of mode MODE and data type TYPE.
1232 (TYPE is null for libcalls where that information may not be available.) */
1233
1234 static void
1235 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1236 const_tree type, bool named ATTRIBUTE_UNUSED)
1237 {
1238 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1239
1240 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1241 + ROUND_ADVANCE_ARG (mode, type));
1242 }
1243
1244 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1245
1246 static bool
1247 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1248 {
1249 cumulative_args_t dummy = pack_cumulative_args (NULL);
1250
1251 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1252 }
1253
1254 /* Worker function for TARGET_FUNCTION_VALUE. */
1255
1256 static rtx
1257 m32r_function_value (const_tree valtype,
1258 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1259 bool outgoing ATTRIBUTE_UNUSED)
1260 {
1261 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1262 }
1263
1264 /* Worker function for TARGET_LIBCALL_VALUE. */
1265
1266 static rtx
1267 m32r_libcall_value (machine_mode mode,
1268 const_rtx fun ATTRIBUTE_UNUSED)
1269 {
1270 return gen_rtx_REG (mode, 0);
1271 }
1272
1273 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1274
1275 ??? What about r1 in DI/DF values. */
1276
1277 static bool
1278 m32r_function_value_regno_p (const unsigned int regno)
1279 {
1280 return (regno == 0);
1281 }
1282
1283 /* Do any needed setup for a variadic function. For the M32R, we must
1284 create a register parameter block, and then copy any anonymous arguments
1285 in registers to memory.
1286
1287 CUM has not been updated for the last named argument which has type TYPE
1288 and mode MODE, and we rely on this fact. */
1289
1290 static void
1291 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1292 tree type, int *pretend_size, int no_rtl)
1293 {
1294 int first_anon_arg;
1295
1296 if (no_rtl)
1297 return;
1298
1299 /* All BLKmode values are passed by reference. */
1300 gcc_assert (mode != BLKmode);
1301
1302 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1303 + ROUND_ADVANCE_ARG (mode, type));
1304
1305 if (first_anon_arg < M32R_MAX_PARM_REGS)
1306 {
1307 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1308 int first_reg_offset = first_anon_arg;
1309 /* Size in words to "pretend" allocate. */
1310 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1311 rtx regblock;
1312
1313 regblock = gen_frame_mem (BLKmode,
1314 plus_constant (Pmode, arg_pointer_rtx,
1315 FIRST_PARM_OFFSET (0)));
1316 set_mem_alias_set (regblock, get_varargs_alias_set ());
1317 move_block_from_reg (first_reg_offset, regblock, size);
1318
1319 *pretend_size = (size * UNITS_PER_WORD);
1320 }
1321 }
1322
1323 \f
1324 /* Return true if INSN is real instruction bearing insn. */
1325
1326 static int
1327 m32r_is_insn (rtx insn)
1328 {
1329 return (NONDEBUG_INSN_P (insn)
1330 && GET_CODE (PATTERN (insn)) != USE
1331 && GET_CODE (PATTERN (insn)) != CLOBBER);
1332 }
1333
1334 /* Increase the priority of long instructions so that the
1335 short instructions are scheduled ahead of the long ones. */
1336
1337 static int
1338 m32r_adjust_priority (rtx_insn *insn, int priority)
1339 {
1340 if (m32r_is_insn (insn)
1341 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1342 priority <<= 3;
1343
1344 return priority;
1345 }
1346
1347 \f
1348 /* Indicate how many instructions can be issued at the same time.
1349 This is sort of a lie. The m32r can issue only 1 long insn at
1350 once, but it can issue 2 short insns. The default therefore is
1351 set at 2, but this can be overridden by the command line option
1352 -missue-rate=1. */
1353
1354 static int
1355 m32r_issue_rate (void)
1356 {
1357 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1358 }
1359 \f
1360 /* Cost functions. */
1361 /* Memory is 3 times as expensive as registers.
1362 ??? Is that the right way to look at it? */
1363
1364 static int
1365 m32r_memory_move_cost (machine_mode mode,
1366 reg_class_t rclass ATTRIBUTE_UNUSED,
1367 bool in ATTRIBUTE_UNUSED)
1368 {
1369 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1370 return 6;
1371 else
1372 return 12;
1373 }
1374
1375 static bool
1376 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
1377 int opno ATTRIBUTE_UNUSED, int *total,
1378 bool speed ATTRIBUTE_UNUSED)
1379 {
1380 switch (code)
1381 {
1382 /* Small integers are as cheap as registers. 4 byte values can be
1383 fetched as immediate constants - let's give that the cost of an
1384 extra insn. */
1385 case CONST_INT:
1386 if (INT16_P (INTVAL (x)))
1387 {
1388 *total = 0;
1389 return true;
1390 }
1391 /* FALLTHRU */
1392
1393 case CONST:
1394 case LABEL_REF:
1395 case SYMBOL_REF:
1396 *total = COSTS_N_INSNS (1);
1397 return true;
1398
1399 case CONST_DOUBLE:
1400 {
1401 rtx high, low;
1402
1403 split_double (x, &high, &low);
1404 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1405 + !INT16_P (INTVAL (low)));
1406 return true;
1407 }
1408
1409 case MULT:
1410 *total = COSTS_N_INSNS (3);
1411 return true;
1412
1413 case DIV:
1414 case UDIV:
1415 case MOD:
1416 case UMOD:
1417 *total = COSTS_N_INSNS (10);
1418 return true;
1419
1420 default:
1421 return false;
1422 }
1423 }
1424 \f
1425 /* Type of function DECL.
1426
1427 The result is cached. To reset the cache at the end of a function,
1428 call with DECL = NULL_TREE. */
1429
1430 enum m32r_function_type
1431 m32r_compute_function_type (tree decl)
1432 {
1433 /* Cached value. */
1434 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1435 /* Last function we were called for. */
1436 static tree last_fn = NULL_TREE;
1437
1438 /* Resetting the cached value? */
1439 if (decl == NULL_TREE)
1440 {
1441 fn_type = M32R_FUNCTION_UNKNOWN;
1442 last_fn = NULL_TREE;
1443 return fn_type;
1444 }
1445
1446 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1447 return fn_type;
1448
1449 /* Compute function type. */
1450 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1451 ? M32R_FUNCTION_INTERRUPT
1452 : M32R_FUNCTION_NORMAL);
1453
1454 last_fn = decl;
1455 return fn_type;
1456 }
1457 \f/* Function prologue/epilogue handlers. */
1458
1459 /* M32R stack frames look like:
1460
1461 Before call After call
1462 +-----------------------+ +-----------------------+
1463 | | | |
1464 high | local variables, | | local variables, |
1465 mem | reg save area, etc. | | reg save area, etc. |
1466 | | | |
1467 +-----------------------+ +-----------------------+
1468 | | | |
1469 | arguments on stack. | | arguments on stack. |
1470 | | | |
1471 SP+0->+-----------------------+ +-----------------------+
1472 | reg parm save area, |
1473 | only created for |
1474 | variable argument |
1475 | functions |
1476 +-----------------------+
1477 | previous frame ptr |
1478 +-----------------------+
1479 | |
1480 | register save area |
1481 | |
1482 +-----------------------+
1483 | return address |
1484 +-----------------------+
1485 | |
1486 | local variables |
1487 | |
1488 +-----------------------+
1489 | |
1490 | alloca allocations |
1491 | |
1492 +-----------------------+
1493 | |
1494 low | arguments on stack |
1495 memory | |
1496 SP+0->+-----------------------+
1497
1498 Notes:
1499 1) The "reg parm save area" does not exist for non variable argument fns.
1500 2) The "reg parm save area" can be eliminated completely if we saved regs
1501 containing anonymous args separately but that complicates things too
1502 much (so it's not done).
1503 3) The return address is saved after the register save area so as to have as
1504 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1505
1506 /* Structure to be filled in by m32r_compute_frame_size with register
1507 save masks, and offsets for the current function. */
1508 struct m32r_frame_info
1509 {
1510 unsigned int total_size; /* # bytes that the entire frame takes up. */
1511 unsigned int extra_size; /* # bytes of extra stuff. */
1512 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1513 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1514 unsigned int reg_size; /* # bytes needed to store regs. */
1515 unsigned int var_size; /* # bytes that variables take up. */
1516 unsigned int gmask; /* Mask of saved gp registers. */
1517 unsigned int save_fp; /* Nonzero if fp must be saved. */
1518 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1519 int initialized; /* Nonzero if frame size already calculated. */
1520 };
1521
1522 /* Current frame information calculated by m32r_compute_frame_size. */
1523 static struct m32r_frame_info current_frame_info;
1524
1525 /* Zero structure to initialize current_frame_info. */
1526 static struct m32r_frame_info zero_frame_info;
1527
1528 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1529 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1530
1531 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1532 The return address and frame pointer are treated separately.
1533 Don't consider them here. */
1534 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1535 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1536 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1537
1538 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1539 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1540
1541 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1542 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1543
1544 /* Return the bytes needed to compute the frame pointer from the current
1545 stack pointer.
1546
1547 SIZE is the size needed for local variables. */
1548
1549 unsigned int
1550 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1551 {
1552 unsigned int regno;
1553 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1554 unsigned int reg_size;
1555 unsigned int gmask;
1556 enum m32r_function_type fn_type;
1557 int interrupt_p;
1558 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1559 | crtl->profile);
1560
1561 var_size = M32R_STACK_ALIGN (size);
1562 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1563 pretend_size = crtl->args.pretend_args_size;
1564 extra_size = FIRST_PARM_OFFSET (0);
1565 total_size = extra_size + pretend_size + args_size + var_size;
1566 reg_size = 0;
1567 gmask = 0;
1568
1569 /* See if this is an interrupt handler. Call used registers must be saved
1570 for them too. */
1571 fn_type = m32r_compute_function_type (current_function_decl);
1572 interrupt_p = M32R_INTERRUPT_P (fn_type);
1573
1574 /* Calculate space needed for registers. */
1575 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1576 {
1577 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1578 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1579 {
1580 reg_size += UNITS_PER_WORD;
1581 gmask |= 1 << regno;
1582 }
1583 }
1584
1585 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1586 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1587
1588 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1589 * UNITS_PER_WORD);
1590 total_size += reg_size;
1591
1592 /* ??? Not sure this is necessary, and I don't think the epilogue
1593 handler will do the right thing if this changes total_size. */
1594 total_size = M32R_STACK_ALIGN (total_size);
1595
1596 /* frame_size = total_size - (pretend_size + reg_size); */
1597
1598 /* Save computed information. */
1599 current_frame_info.total_size = total_size;
1600 current_frame_info.extra_size = extra_size;
1601 current_frame_info.pretend_size = pretend_size;
1602 current_frame_info.var_size = var_size;
1603 current_frame_info.args_size = args_size;
1604 current_frame_info.reg_size = reg_size;
1605 current_frame_info.gmask = gmask;
1606 current_frame_info.initialized = reload_completed;
1607
1608 /* Ok, we're done. */
1609 return total_size;
1610 }
1611
1612 /* Worker function for TARGET_CAN_ELIMINATE. */
1613
1614 bool
1615 m32r_can_eliminate (const int from, const int to)
1616 {
1617 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1618 ? ! frame_pointer_needed
1619 : true);
1620 }
1621
1622 \f
1623 /* The table we use to reference PIC data. */
1624 static rtx global_offset_table;
1625
1626 static void
1627 m32r_reload_lr (rtx sp, int size)
1628 {
1629 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1630
1631 if (size == 0)
1632 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1633 else if (size < 32768)
1634 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1635 gen_rtx_PLUS (Pmode, sp,
1636 GEN_INT (size)))));
1637 else
1638 {
1639 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1640
1641 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1642 emit_insn (gen_addsi3 (tmp, tmp, sp));
1643 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1644 }
1645
1646 emit_use (lr);
1647 }
1648
1649 void
1650 m32r_load_pic_register (void)
1651 {
1652 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1653 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1654 GEN_INT (TARGET_MODEL_SMALL)));
1655
1656 /* Need to emit this whether or not we obey regdecls,
1657 since setjmp/longjmp can cause life info to screw up. */
1658 emit_use (pic_offset_table_rtx);
1659 }
1660
1661 /* Expand the m32r prologue as a series of insns. */
1662
1663 void
1664 m32r_expand_prologue (void)
1665 {
1666 int regno;
1667 int frame_size;
1668 unsigned int gmask;
1669 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1670 | crtl->profile);
1671
1672 if (! current_frame_info.initialized)
1673 m32r_compute_frame_size (get_frame_size ());
1674
1675 gmask = current_frame_info.gmask;
1676
1677 /* These cases shouldn't happen. Catch them now. */
1678 gcc_assert (current_frame_info.total_size || !gmask);
1679
1680 /* Allocate space for register arguments if this is a variadic function. */
1681 if (current_frame_info.pretend_size != 0)
1682 {
1683 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1684 the wrong result on a 64-bit host. */
1685 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1686 emit_insn (gen_addsi3 (stack_pointer_rtx,
1687 stack_pointer_rtx,
1688 GEN_INT (-pretend_size)));
1689 }
1690
1691 /* Save any registers we need to and set up fp. */
1692 if (current_frame_info.save_fp)
1693 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1694
1695 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1696
1697 /* Save any needed call-saved regs (and call-used if this is an
1698 interrupt handler). */
1699 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1700 {
1701 if ((gmask & (1 << regno)) != 0)
1702 emit_insn (gen_movsi_push (stack_pointer_rtx,
1703 gen_rtx_REG (Pmode, regno)));
1704 }
1705
1706 if (current_frame_info.save_lr)
1707 emit_insn (gen_movsi_push (stack_pointer_rtx,
1708 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1709
1710 /* Allocate the stack frame. */
1711 frame_size = (current_frame_info.total_size
1712 - (current_frame_info.pretend_size
1713 + current_frame_info.reg_size));
1714
1715 if (frame_size == 0)
1716 ; /* Nothing to do. */
1717 else if (frame_size <= 32768)
1718 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1719 GEN_INT (-frame_size)));
1720 else
1721 {
1722 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1723
1724 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1725 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1726 }
1727
1728 if (frame_pointer_needed)
1729 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1730
1731 if (crtl->profile)
1732 /* Push lr for mcount (form_pc, x). */
1733 emit_insn (gen_movsi_push (stack_pointer_rtx,
1734 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1735
1736 if (pic_reg_used)
1737 {
1738 m32r_load_pic_register ();
1739 m32r_reload_lr (stack_pointer_rtx,
1740 (crtl->profile ? 0 : frame_size));
1741 }
1742
1743 if (crtl->profile && !pic_reg_used)
1744 emit_insn (gen_blockage ());
1745 }
1746
1747 \f
1748 /* Set up the stack and frame pointer (if desired) for the function.
1749 Note, if this is changed, you need to mirror the changes in
1750 m32r_compute_frame_size which calculates the prolog size. */
1751
1752 static void
1753 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1754 {
1755 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1756
1757 /* If this is an interrupt handler, mark it as such. */
1758 if (M32R_INTERRUPT_P (fn_type))
1759 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1760
1761 if (! current_frame_info.initialized)
1762 m32r_compute_frame_size (size);
1763
1764 /* This is only for the human reader. */
1765 fprintf (file,
1766 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1767 ASM_COMMENT_START,
1768 current_frame_info.var_size,
1769 current_frame_info.reg_size / 4,
1770 current_frame_info.args_size,
1771 current_frame_info.extra_size);
1772 }
1773 \f
1774 /* Output RTL to pop register REGNO from the stack. */
1775
1776 static void
1777 pop (int regno)
1778 {
1779 rtx x;
1780
1781 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1782 stack_pointer_rtx));
1783 add_reg_note (x, REG_INC, stack_pointer_rtx);
1784 }
1785
1786 /* Expand the m32r epilogue as a series of insns. */
1787
1788 void
1789 m32r_expand_epilogue (void)
1790 {
1791 int regno;
1792 int noepilogue = FALSE;
1793 int total_size;
1794
1795 gcc_assert (current_frame_info.initialized);
1796 total_size = current_frame_info.total_size;
1797
1798 if (total_size == 0)
1799 {
1800 rtx insn = get_last_insn ();
1801
1802 /* If the last insn was a BARRIER, we don't have to write any code
1803 because a jump (aka return) was put there. */
1804 if (insn && NOTE_P (insn))
1805 insn = prev_nonnote_insn (insn);
1806 if (insn && BARRIER_P (insn))
1807 noepilogue = TRUE;
1808 }
1809
1810 if (!noepilogue)
1811 {
1812 unsigned int var_size = current_frame_info.var_size;
1813 unsigned int args_size = current_frame_info.args_size;
1814 unsigned int gmask = current_frame_info.gmask;
1815 int can_trust_sp_p = !cfun->calls_alloca;
1816
1817 if (flag_exceptions)
1818 emit_insn (gen_blockage ());
1819
1820 /* The first thing to do is point the sp at the bottom of the register
1821 save area. */
1822 if (can_trust_sp_p)
1823 {
1824 unsigned int reg_offset = var_size + args_size;
1825
1826 if (reg_offset == 0)
1827 ; /* Nothing to do. */
1828 else if (reg_offset < 32768)
1829 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1830 GEN_INT (reg_offset)));
1831 else
1832 {
1833 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1834
1835 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1836 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1837 tmp));
1838 }
1839 }
1840 else if (frame_pointer_needed)
1841 {
1842 unsigned int reg_offset = var_size + args_size;
1843
1844 if (reg_offset == 0)
1845 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1846 else if (reg_offset < 32768)
1847 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1848 GEN_INT (reg_offset)));
1849 else
1850 {
1851 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1852
1853 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1854 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1855 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1856 tmp));
1857 }
1858 }
1859 else
1860 gcc_unreachable ();
1861
1862 if (current_frame_info.save_lr)
1863 pop (RETURN_ADDR_REGNUM);
1864
1865 /* Restore any saved registers, in reverse order of course. */
1866 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1867 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1868 {
1869 if ((gmask & (1L << regno)) != 0)
1870 pop (regno);
1871 }
1872
1873 if (current_frame_info.save_fp)
1874 pop (FRAME_POINTER_REGNUM);
1875
1876 /* Remove varargs area if present. */
1877 if (current_frame_info.pretend_size != 0)
1878 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1879 GEN_INT (current_frame_info.pretend_size)));
1880
1881 emit_insn (gen_blockage ());
1882 }
1883 }
1884
1885 /* Do any necessary cleanup after a function to restore stack, frame,
1886 and regs. */
1887
1888 static void
1889 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1890 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1891 {
1892 /* Reset state info for each function. */
1893 current_frame_info = zero_frame_info;
1894 m32r_compute_function_type (NULL_TREE);
1895 }
1896 \f
1897 /* Return nonzero if this function is known to have a null or 1 instruction
1898 epilogue. */
1899
1900 int
1901 direct_return (void)
1902 {
1903 if (!reload_completed)
1904 return FALSE;
1905
1906 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1907 return FALSE;
1908
1909 if (! current_frame_info.initialized)
1910 m32r_compute_frame_size (get_frame_size ());
1911
1912 return current_frame_info.total_size == 0;
1913 }
1914
1915 \f
1916 /* PIC. */
1917
1918 int
1919 m32r_legitimate_pic_operand_p (rtx x)
1920 {
1921 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1922 return 0;
1923
1924 if (GET_CODE (x) == CONST
1925 && GET_CODE (XEXP (x, 0)) == PLUS
1926 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1927 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1928 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1929 return 0;
1930
1931 return 1;
1932 }
1933
1934 rtx
1935 m32r_legitimize_pic_address (rtx orig, rtx reg)
1936 {
1937 #ifdef DEBUG_PIC
1938 printf("m32r_legitimize_pic_address()\n");
1939 #endif
1940
1941 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1942 {
1943 rtx pic_ref, address;
1944 int subregs = 0;
1945
1946 if (reg == 0)
1947 {
1948 gcc_assert (!reload_in_progress && !reload_completed);
1949 reg = gen_reg_rtx (Pmode);
1950
1951 subregs = 1;
1952 }
1953
1954 if (subregs)
1955 address = gen_reg_rtx (Pmode);
1956 else
1957 address = reg;
1958
1959 crtl->uses_pic_offset_table = 1;
1960
1961 if (GET_CODE (orig) == LABEL_REF
1962 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1963 {
1964 emit_insn (gen_gotoff_load_addr (reg, orig));
1965 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1966 return reg;
1967 }
1968
1969 emit_insn (gen_pic_load_addr (address, orig));
1970
1971 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1972 pic_ref = gen_const_mem (Pmode, address);
1973 emit_move_insn (reg, pic_ref);
1974 return reg;
1975 }
1976 else if (GET_CODE (orig) == CONST)
1977 {
1978 rtx base, offset;
1979
1980 if (GET_CODE (XEXP (orig, 0)) == PLUS
1981 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1982 return orig;
1983
1984 if (reg == 0)
1985 {
1986 gcc_assert (!reload_in_progress && !reload_completed);
1987 reg = gen_reg_rtx (Pmode);
1988 }
1989
1990 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1991 {
1992 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1993 if (base == reg)
1994 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1995 else
1996 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1997 }
1998 else
1999 return orig;
2000
2001 if (CONST_INT_P (offset))
2002 {
2003 if (INT16_P (INTVAL (offset)))
2004 return plus_constant (Pmode, base, INTVAL (offset));
2005 else
2006 {
2007 gcc_assert (! reload_in_progress && ! reload_completed);
2008 offset = force_reg (Pmode, offset);
2009 }
2010 }
2011
2012 return gen_rtx_PLUS (Pmode, base, offset);
2013 }
2014
2015 return orig;
2016 }
2017
2018 static rtx
2019 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2020 machine_mode mode ATTRIBUTE_UNUSED)
2021 {
2022 if (flag_pic)
2023 return m32r_legitimize_pic_address (x, NULL_RTX);
2024 else
2025 return x;
2026 }
2027
2028 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2029
2030 static bool
2031 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2032 {
2033 if (GET_CODE (addr) == LO_SUM)
2034 return true;
2035
2036 return false;
2037 }
2038 \f
2039 /* Nested function support. */
2040
2041 /* Emit RTL insns to initialize the variable parts of a trampoline.
2042 FNADDR is an RTX for the address of the function's pure code.
2043 CXT is an RTX for the static chain value for the function. */
2044
2045 void
2046 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2047 rtx fnaddr ATTRIBUTE_UNUSED,
2048 rtx cxt ATTRIBUTE_UNUSED)
2049 {
2050 }
2051 \f
2052 static void
2053 m32r_file_start (void)
2054 {
2055 default_file_start ();
2056
2057 if (flag_verbose_asm)
2058 fprintf (asm_out_file,
2059 "%s M32R/D special options: -G %d\n",
2060 ASM_COMMENT_START, g_switch_value);
2061
2062 if (TARGET_LITTLE_ENDIAN)
2063 fprintf (asm_out_file, "\t.little\n");
2064 }
2065 \f
2066 /* Print operand X (an rtx) in assembler syntax to file FILE.
2067 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2068 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2069
2070 static void
2071 m32r_print_operand (FILE * file, rtx x, int code)
2072 {
2073 rtx addr;
2074
2075 switch (code)
2076 {
2077 /* The 's' and 'p' codes are used by output_block_move() to
2078 indicate post-increment 's'tores and 'p're-increment loads. */
2079 case 's':
2080 if (REG_P (x))
2081 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2082 else
2083 output_operand_lossage ("invalid operand to %%s code");
2084 return;
2085
2086 case 'p':
2087 if (REG_P (x))
2088 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2089 else
2090 output_operand_lossage ("invalid operand to %%p code");
2091 return;
2092
2093 case 'R' :
2094 /* Write second word of DImode or DFmode reference,
2095 register or memory. */
2096 if (REG_P (x))
2097 fputs (reg_names[REGNO (x)+1], file);
2098 else if (MEM_P (x))
2099 {
2100 fprintf (file, "@(");
2101 /* Handle possible auto-increment. Since it is pre-increment and
2102 we have already done it, we can just use an offset of four. */
2103 /* ??? This is taken from rs6000.c I think. I don't think it is
2104 currently necessary, but keep it around. */
2105 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2106 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2107 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2108 else
2109 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2110 fputc (')', file);
2111 }
2112 else
2113 output_operand_lossage ("invalid operand to %%R code");
2114 return;
2115
2116 case 'H' : /* High word. */
2117 case 'L' : /* Low word. */
2118 if (REG_P (x))
2119 {
2120 /* L = least significant word, H = most significant word. */
2121 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2122 fputs (reg_names[REGNO (x)], file);
2123 else
2124 fputs (reg_names[REGNO (x)+1], file);
2125 }
2126 else if (CONST_INT_P (x)
2127 || GET_CODE (x) == CONST_DOUBLE)
2128 {
2129 rtx first, second;
2130
2131 split_double (x, &first, &second);
2132 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2133 code == 'L' ? INTVAL (first) : INTVAL (second));
2134 }
2135 else
2136 output_operand_lossage ("invalid operand to %%H/%%L code");
2137 return;
2138
2139 case 'A' :
2140 {
2141 char str[30];
2142
2143 if (GET_CODE (x) != CONST_DOUBLE
2144 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2145 fatal_insn ("bad insn for 'A'", x);
2146
2147 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2148 fprintf (file, "%s", str);
2149 return;
2150 }
2151
2152 case 'B' : /* Bottom half. */
2153 case 'T' : /* Top half. */
2154 /* Output the argument to a `seth' insn (sets the Top half-word).
2155 For constants output arguments to a seth/or3 pair to set Top and
2156 Bottom halves. For symbols output arguments to a seth/add3 pair to
2157 set Top and Bottom halves. The difference exists because for
2158 constants seth/or3 is more readable but for symbols we need to use
2159 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2160 switch (GET_CODE (x))
2161 {
2162 case CONST_INT :
2163 case CONST_DOUBLE :
2164 {
2165 rtx first, second;
2166
2167 split_double (x, &first, &second);
2168 x = WORDS_BIG_ENDIAN ? second : first;
2169 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2170 (code == 'B'
2171 ? INTVAL (x) & 0xffff
2172 : (INTVAL (x) >> 16) & 0xffff));
2173 }
2174 return;
2175 case CONST :
2176 case SYMBOL_REF :
2177 if (code == 'B'
2178 && small_data_operand (x, VOIDmode))
2179 {
2180 fputs ("sda(", file);
2181 output_addr_const (file, x);
2182 fputc (')', file);
2183 return;
2184 }
2185 /* fall through */
2186 case LABEL_REF :
2187 fputs (code == 'T' ? "shigh(" : "low(", file);
2188 output_addr_const (file, x);
2189 fputc (')', file);
2190 return;
2191 default :
2192 output_operand_lossage ("invalid operand to %%T/%%B code");
2193 return;
2194 }
2195 break;
2196
2197 case 'U' :
2198 /* ??? wip */
2199 /* Output a load/store with update indicator if appropriate. */
2200 if (MEM_P (x))
2201 {
2202 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2203 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2204 fputs (".a", file);
2205 }
2206 else
2207 output_operand_lossage ("invalid operand to %%U code");
2208 return;
2209
2210 case 'N' :
2211 /* Print a constant value negated. */
2212 if (CONST_INT_P (x))
2213 output_addr_const (file, GEN_INT (- INTVAL (x)));
2214 else
2215 output_operand_lossage ("invalid operand to %%N code");
2216 return;
2217
2218 case 'X' :
2219 /* Print a const_int in hex. Used in comments. */
2220 if (CONST_INT_P (x))
2221 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2222 return;
2223
2224 case '#' :
2225 fputs (IMMEDIATE_PREFIX, file);
2226 return;
2227
2228 case 0 :
2229 /* Do nothing special. */
2230 break;
2231
2232 default :
2233 /* Unknown flag. */
2234 output_operand_lossage ("invalid operand output code");
2235 }
2236
2237 switch (GET_CODE (x))
2238 {
2239 case REG :
2240 fputs (reg_names[REGNO (x)], file);
2241 break;
2242
2243 case MEM :
2244 addr = XEXP (x, 0);
2245 if (GET_CODE (addr) == PRE_INC)
2246 {
2247 if (!REG_P (XEXP (addr, 0)))
2248 fatal_insn ("pre-increment address is not a register", x);
2249
2250 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2251 }
2252 else if (GET_CODE (addr) == PRE_DEC)
2253 {
2254 if (!REG_P (XEXP (addr, 0)))
2255 fatal_insn ("pre-decrement address is not a register", x);
2256
2257 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2258 }
2259 else if (GET_CODE (addr) == POST_INC)
2260 {
2261 if (!REG_P (XEXP (addr, 0)))
2262 fatal_insn ("post-increment address is not a register", x);
2263
2264 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2265 }
2266 else
2267 {
2268 fputs ("@(", file);
2269 output_address (XEXP (x, 0));
2270 fputc (')', file);
2271 }
2272 break;
2273
2274 case CONST_DOUBLE :
2275 /* We handle SFmode constants here as output_addr_const doesn't. */
2276 if (GET_MODE (x) == SFmode)
2277 {
2278 REAL_VALUE_TYPE d;
2279 long l;
2280
2281 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2282 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2283 fprintf (file, "0x%08lx", l);
2284 break;
2285 }
2286
2287 /* Fall through. Let output_addr_const deal with it. */
2288
2289 default :
2290 output_addr_const (file, x);
2291 break;
2292 }
2293 }
2294
2295 /* Print a memory address as an operand to reference that memory location. */
2296
2297 static void
2298 m32r_print_operand_address (FILE * file, rtx addr)
2299 {
2300 rtx base;
2301 rtx index = 0;
2302 int offset = 0;
2303
2304 switch (GET_CODE (addr))
2305 {
2306 case REG :
2307 fputs (reg_names[REGNO (addr)], file);
2308 break;
2309
2310 case PLUS :
2311 if (CONST_INT_P (XEXP (addr, 0)))
2312 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2313 else if (CONST_INT_P (XEXP (addr, 1)))
2314 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2315 else
2316 base = XEXP (addr, 0), index = XEXP (addr, 1);
2317 if (REG_P (base))
2318 {
2319 /* Print the offset first (if present) to conform to the manual. */
2320 if (index == 0)
2321 {
2322 if (offset != 0)
2323 fprintf (file, "%d,", offset);
2324 fputs (reg_names[REGNO (base)], file);
2325 }
2326 /* The chip doesn't support this, but left in for generality. */
2327 else if (REG_P (index))
2328 fprintf (file, "%s,%s",
2329 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2330 /* Not sure this can happen, but leave in for now. */
2331 else if (GET_CODE (index) == SYMBOL_REF)
2332 {
2333 output_addr_const (file, index);
2334 fputc (',', file);
2335 fputs (reg_names[REGNO (base)], file);
2336 }
2337 else
2338 fatal_insn ("bad address", addr);
2339 }
2340 else if (GET_CODE (base) == LO_SUM)
2341 {
2342 gcc_assert (!index && REG_P (XEXP (base, 0)));
2343 if (small_data_operand (XEXP (base, 1), VOIDmode))
2344 fputs ("sda(", file);
2345 else
2346 fputs ("low(", file);
2347 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2348 offset));
2349 fputs ("),", file);
2350 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2351 }
2352 else
2353 fatal_insn ("bad address", addr);
2354 break;
2355
2356 case LO_SUM :
2357 if (!REG_P (XEXP (addr, 0)))
2358 fatal_insn ("lo_sum not of register", addr);
2359 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2360 fputs ("sda(", file);
2361 else
2362 fputs ("low(", file);
2363 output_addr_const (file, XEXP (addr, 1));
2364 fputs ("),", file);
2365 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2366 break;
2367
2368 case PRE_INC : /* Assume SImode. */
2369 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2370 break;
2371
2372 case PRE_DEC : /* Assume SImode. */
2373 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2374 break;
2375
2376 case POST_INC : /* Assume SImode. */
2377 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2378 break;
2379
2380 default :
2381 output_addr_const (file, addr);
2382 break;
2383 }
2384 }
2385
2386 static bool
2387 m32r_print_operand_punct_valid_p (unsigned char code)
2388 {
2389 return m32r_punct_chars[code];
2390 }
2391
2392 /* Return true if the operands are the constants 0 and 1. */
2393
2394 int
2395 zero_and_one (rtx operand1, rtx operand2)
2396 {
2397 return
2398 CONST_INT_P (operand1)
2399 && CONST_INT_P (operand2)
2400 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2401 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2402 }
2403
2404 /* Generate the correct assembler code to handle the conditional loading of a
2405 value into a register. It is known that the operands satisfy the
2406 conditional_move_operand() function above. The destination is operand[0].
2407 The condition is operand [1]. The 'true' value is operand [2] and the
2408 'false' value is operand [3]. */
2409
2410 char *
2411 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2412 {
2413 static char buffer [100];
2414 const char * dest = reg_names [REGNO (operands [0])];
2415
2416 buffer [0] = 0;
2417
2418 /* Destination must be a register. */
2419 gcc_assert (REG_P (operands [0]));
2420 gcc_assert (conditional_move_operand (operands [2], SImode));
2421 gcc_assert (conditional_move_operand (operands [3], SImode));
2422
2423 /* Check to see if the test is reversed. */
2424 if (GET_CODE (operands [1]) == NE)
2425 {
2426 rtx tmp = operands [2];
2427 operands [2] = operands [3];
2428 operands [3] = tmp;
2429 }
2430
2431 sprintf (buffer, "mvfc %s, cbr", dest);
2432
2433 /* If the true value was '0' then we need to invert the results of the move. */
2434 if (INTVAL (operands [2]) == 0)
2435 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2436 dest, dest);
2437
2438 return buffer;
2439 }
2440
2441 /* Returns true if the registers contained in the two
2442 rtl expressions are different. */
2443
2444 int
2445 m32r_not_same_reg (rtx a, rtx b)
2446 {
2447 int reg_a = -1;
2448 int reg_b = -2;
2449
2450 while (GET_CODE (a) == SUBREG)
2451 a = SUBREG_REG (a);
2452
2453 if (REG_P (a))
2454 reg_a = REGNO (a);
2455
2456 while (GET_CODE (b) == SUBREG)
2457 b = SUBREG_REG (b);
2458
2459 if (REG_P (b))
2460 reg_b = REGNO (b);
2461
2462 return reg_a != reg_b;
2463 }
2464
2465 \f
2466 rtx
2467 m32r_function_symbol (const char *name)
2468 {
2469 int extra_flags = 0;
2470 enum m32r_model model;
2471 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2472
2473 if (TARGET_MODEL_SMALL)
2474 model = M32R_MODEL_SMALL;
2475 else if (TARGET_MODEL_MEDIUM)
2476 model = M32R_MODEL_MEDIUM;
2477 else if (TARGET_MODEL_LARGE)
2478 model = M32R_MODEL_LARGE;
2479 else
2480 gcc_unreachable (); /* Shouldn't happen. */
2481 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2482
2483 if (extra_flags)
2484 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2485
2486 return sym;
2487 }
2488
2489 /* Use a library function to move some bytes. */
2490
2491 static void
2492 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2493 {
2494 /* We want to pass the size as Pmode, which will normally be SImode
2495 but will be DImode if we are using 64-bit longs and pointers. */
2496 if (GET_MODE (bytes_rtx) != VOIDmode
2497 && GET_MODE (bytes_rtx) != Pmode)
2498 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2499
2500 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2501 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2502 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2503 TYPE_UNSIGNED (sizetype)),
2504 TYPE_MODE (sizetype));
2505 }
2506
2507 /* Expand string/block move operations.
2508
2509 operands[0] is the pointer to the destination.
2510 operands[1] is the pointer to the source.
2511 operands[2] is the number of bytes to move.
2512 operands[3] is the alignment.
2513
2514 Returns 1 upon success, 0 otherwise. */
2515
2516 int
2517 m32r_expand_block_move (rtx operands[])
2518 {
2519 rtx orig_dst = operands[0];
2520 rtx orig_src = operands[1];
2521 rtx bytes_rtx = operands[2];
2522 rtx align_rtx = operands[3];
2523 int constp = CONST_INT_P (bytes_rtx);
2524 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2525 int align = INTVAL (align_rtx);
2526 int leftover;
2527 rtx src_reg;
2528 rtx dst_reg;
2529
2530 if (constp && bytes <= 0)
2531 return 1;
2532
2533 /* Move the address into scratch registers. */
2534 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2535 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2536
2537 if (align > UNITS_PER_WORD)
2538 align = UNITS_PER_WORD;
2539
2540 /* If we prefer size over speed, always use a function call.
2541 If we do not know the size, use a function call.
2542 If the blocks are not word aligned, use a function call. */
2543 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2544 {
2545 block_move_call (dst_reg, src_reg, bytes_rtx);
2546 return 0;
2547 }
2548
2549 leftover = bytes % MAX_MOVE_BYTES;
2550 bytes -= leftover;
2551
2552 /* If necessary, generate a loop to handle the bulk of the copy. */
2553 if (bytes)
2554 {
2555 rtx_code_label *label = NULL;
2556 rtx final_src = NULL_RTX;
2557 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2558 rtx rounded_total = GEN_INT (bytes);
2559 rtx new_dst_reg = gen_reg_rtx (SImode);
2560 rtx new_src_reg = gen_reg_rtx (SImode);
2561
2562 /* If we are going to have to perform this loop more than
2563 once, then generate a label and compute the address the
2564 source register will contain upon completion of the final
2565 iteration. */
2566 if (bytes > MAX_MOVE_BYTES)
2567 {
2568 final_src = gen_reg_rtx (Pmode);
2569
2570 if (INT16_P(bytes))
2571 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2572 else
2573 {
2574 emit_insn (gen_movsi (final_src, rounded_total));
2575 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2576 }
2577
2578 label = gen_label_rtx ();
2579 emit_label (label);
2580 }
2581
2582 /* It is known that output_block_move() will update src_reg to point
2583 to the word after the end of the source block, and dst_reg to point
2584 to the last word of the destination block, provided that the block
2585 is MAX_MOVE_BYTES long. */
2586 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2587 new_dst_reg, new_src_reg));
2588 emit_move_insn (dst_reg, new_dst_reg);
2589 emit_move_insn (src_reg, new_src_reg);
2590 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2591
2592 if (bytes > MAX_MOVE_BYTES)
2593 {
2594 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2595 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2596 }
2597 }
2598
2599 if (leftover)
2600 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2601 gen_reg_rtx (SImode),
2602 gen_reg_rtx (SImode)));
2603 return 1;
2604 }
2605
2606 \f
2607 /* Emit load/stores for a small constant word aligned block_move.
2608
2609 operands[0] is the memory address of the destination.
2610 operands[1] is the memory address of the source.
2611 operands[2] is the number of bytes to move.
2612 operands[3] is a temp register.
2613 operands[4] is a temp register. */
2614
2615 void
2616 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2617 {
2618 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2619 int first_time;
2620 int got_extra = 0;
2621
2622 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2623
2624 /* We do not have a post-increment store available, so the first set of
2625 stores are done without any increment, then the remaining ones can use
2626 the pre-increment addressing mode.
2627
2628 Note: expand_block_move() also relies upon this behavior when building
2629 loops to copy large blocks. */
2630 first_time = 1;
2631
2632 while (bytes > 0)
2633 {
2634 if (bytes >= 8)
2635 {
2636 if (first_time)
2637 {
2638 output_asm_insn ("ld\t%5, %p1", operands);
2639 output_asm_insn ("ld\t%6, %p1", operands);
2640 output_asm_insn ("st\t%5, @%0", operands);
2641 output_asm_insn ("st\t%6, %s0", operands);
2642 }
2643 else
2644 {
2645 output_asm_insn ("ld\t%5, %p1", operands);
2646 output_asm_insn ("ld\t%6, %p1", operands);
2647 output_asm_insn ("st\t%5, %s0", operands);
2648 output_asm_insn ("st\t%6, %s0", operands);
2649 }
2650
2651 bytes -= 8;
2652 }
2653 else if (bytes >= 4)
2654 {
2655 if (bytes > 4)
2656 got_extra = 1;
2657
2658 output_asm_insn ("ld\t%5, %p1", operands);
2659
2660 if (got_extra)
2661 output_asm_insn ("ld\t%6, %p1", operands);
2662
2663 if (first_time)
2664 output_asm_insn ("st\t%5, @%0", operands);
2665 else
2666 output_asm_insn ("st\t%5, %s0", operands);
2667
2668 bytes -= 4;
2669 }
2670 else
2671 {
2672 /* Get the entire next word, even though we do not want all of it.
2673 The saves us from doing several smaller loads, and we assume that
2674 we cannot cause a page fault when at least part of the word is in
2675 valid memory [since we don't get called if things aren't properly
2676 aligned]. */
2677 int dst_offset = first_time ? 0 : 4;
2678 /* The amount of increment we have to make to the
2679 destination pointer. */
2680 int dst_inc_amount = dst_offset + bytes - 4;
2681 /* The same for the source pointer. */
2682 int src_inc_amount = bytes;
2683 int last_shift;
2684 rtx my_operands[3];
2685
2686 /* If got_extra is true then we have already loaded
2687 the next word as part of loading and storing the previous word. */
2688 if (! got_extra)
2689 output_asm_insn ("ld\t%6, @%1", operands);
2690
2691 if (bytes >= 2)
2692 {
2693 bytes -= 2;
2694
2695 output_asm_insn ("sra3\t%5, %6, #16", operands);
2696 my_operands[0] = operands[5];
2697 my_operands[1] = GEN_INT (dst_offset);
2698 my_operands[2] = operands[0];
2699 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2700
2701 /* If there is a byte left to store then increment the
2702 destination address and shift the contents of the source
2703 register down by 8 bits. We could not do the address
2704 increment in the store half word instruction, because it does
2705 not have an auto increment mode. */
2706 if (bytes > 0) /* assert (bytes == 1) */
2707 {
2708 dst_offset += 2;
2709 last_shift = 8;
2710 }
2711 }
2712 else
2713 last_shift = 24;
2714
2715 if (bytes > 0)
2716 {
2717 my_operands[0] = operands[6];
2718 my_operands[1] = GEN_INT (last_shift);
2719 output_asm_insn ("srai\t%0, #%1", my_operands);
2720 my_operands[0] = operands[6];
2721 my_operands[1] = GEN_INT (dst_offset);
2722 my_operands[2] = operands[0];
2723 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2724 }
2725
2726 /* Update the destination pointer if needed. We have to do
2727 this so that the patterns matches what we output in this
2728 function. */
2729 if (dst_inc_amount
2730 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2731 {
2732 my_operands[0] = operands[0];
2733 my_operands[1] = GEN_INT (dst_inc_amount);
2734 output_asm_insn ("addi\t%0, #%1", my_operands);
2735 }
2736
2737 /* Update the source pointer if needed. We have to do this
2738 so that the patterns matches what we output in this
2739 function. */
2740 if (src_inc_amount
2741 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2742 {
2743 my_operands[0] = operands[1];
2744 my_operands[1] = GEN_INT (src_inc_amount);
2745 output_asm_insn ("addi\t%0, #%1", my_operands);
2746 }
2747
2748 bytes = 0;
2749 }
2750
2751 first_time = 0;
2752 }
2753 }
2754
2755 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2756
2757 int
2758 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2759 unsigned int new_reg)
2760 {
2761 /* Interrupt routines can't clobber any register that isn't already used. */
2762 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2763 && !df_regs_ever_live_p (new_reg))
2764 return 0;
2765
2766 return 1;
2767 }
2768
2769 rtx
2770 m32r_return_addr (int count)
2771 {
2772 if (count != 0)
2773 return const0_rtx;
2774
2775 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2776 }
2777
2778 static void
2779 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2780 {
2781 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2782 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2783 0x017e8e17 : 0x178e7e01, SImode));
2784 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2785 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2786 0x0c00ae86 : 0x86ae000c, SImode));
2787 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2788 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2789 0xe627871e : 0x1e8727e6, SImode));
2790 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2791 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2792 0xc616c626 : 0x26c61fc6, SImode));
2793 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2794 chain_value);
2795 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2796 XEXP (DECL_RTL (fndecl), 0));
2797
2798 if (m32r_cache_flush_trap >= 0)
2799 emit_insn (gen_flush_icache
2800 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2801 gen_int_mode (m32r_cache_flush_trap, SImode)));
2802 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2803 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2804 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2805 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2806 GEN_INT (3), SImode);
2807 }
2808
2809 /* True if X is a reg that can be used as a base reg. */
2810
2811 static bool
2812 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2813 {
2814 if (! REG_P (x))
2815 return false;
2816
2817 if (strict)
2818 {
2819 if (GPR_P (REGNO (x)))
2820 return true;
2821 }
2822 else
2823 {
2824 if (GPR_P (REGNO (x))
2825 || REGNO (x) == ARG_POINTER_REGNUM
2826 || ! HARD_REGISTER_P (x))
2827 return true;
2828 }
2829
2830 return false;
2831 }
2832
2833 static inline bool
2834 m32r_rtx_ok_for_offset_p (const_rtx x)
2835 {
2836 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2837 }
2838
2839 static inline bool
2840 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2841 const_rtx x, bool strict)
2842 {
2843 if (GET_CODE (x) == PLUS
2844 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2845 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2846 return true;
2847
2848 return false;
2849 }
2850
2851 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2852 since more than one instruction will be required. */
2853
2854 static inline bool
2855 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2856 bool strict)
2857 {
2858 if (GET_CODE (x) == LO_SUM
2859 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2860 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2861 && CONSTANT_P (XEXP (x, 1)))
2862 return true;
2863
2864 return false;
2865 }
2866
2867 /* Is this a load and increment operation. */
2868
2869 static inline bool
2870 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2871 {
2872 if ((mode == SImode || mode == SFmode)
2873 && GET_CODE (x) == POST_INC
2874 && REG_P (XEXP (x, 0))
2875 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2876 return true;
2877
2878 return false;
2879 }
2880
2881 /* Is this an increment/decrement and store operation. */
2882
2883 static inline bool
2884 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2885 {
2886 if ((mode == SImode || mode == SFmode)
2887 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2888 && REG_P (XEXP (x, 0)) \
2889 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2890 return true;
2891
2892 return false;
2893 }
2894
2895 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2896
2897 static bool
2898 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2899 {
2900 if (m32r_rtx_ok_for_base_p (x, strict)
2901 || m32r_legitimate_offset_addres_p (mode, x, strict)
2902 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2903 || m32r_load_postinc_p (mode, x, strict)
2904 || m32r_store_preinc_predec_p (mode, x, strict))
2905 return true;
2906
2907 return false;
2908 }
2909
2910 static void
2911 m32r_conditional_register_usage (void)
2912 {
2913 if (flag_pic)
2914 {
2915 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2916 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2917 }
2918 }
2919
2920 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2921
2922 We don't allow (plus symbol large-constant) as the relocations can't
2923 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2924 We allow all CONST_DOUBLE's as the md file patterns will force the
2925 constant to memory if they can't handle them. */
2926
2927 static bool
2928 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2929 {
2930 return !(GET_CODE (x) == CONST
2931 && GET_CODE (XEXP (x, 0)) == PLUS
2932 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2933 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2934 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2935 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2936 }