]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
80fceb55f9c5dca5a984c8c5dd3f862bd6494c8d
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "df.h"
28 #include "alias.h"
29 #include "stor-layout.h"
30 #include "varasm.h"
31 #include "stringpool.h"
32 #include "calls.h"
33 #include "regs.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "dbxout.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "expmed.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "emit-rtl.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "recog.h"
47 #include "diagnostic-core.h"
48 #include "cfgrtl.h"
49 #include "cfganal.h"
50 #include "lcm.h"
51 #include "cfgbuild.h"
52 #include "cfgcleanup.h"
53 #include "tm_p.h"
54 #include "target.h"
55 #include "tm-constrs.h"
56 #include "opts.h"
57 #include "builtins.h"
58
59 /* This file should be included last. */
60 #include "target-def.h"
61
62 /* Array of valid operand punctuation characters. */
63 static char m32r_punct_chars[256];
64
65 /* Machine-specific symbol_ref flags. */
66 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
67 #define SYMBOL_REF_MODEL(X) \
68 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
69
70 /* For string literals, etc. */
71 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
72
73 /* Forward declaration. */
74 static void m32r_option_override (void);
75 static void init_reg_tables (void);
76 static void block_move_call (rtx, rtx, rtx);
77 static int m32r_is_insn (rtx);
78 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
79 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
80 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
81 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
82 static void m32r_print_operand (FILE *, rtx, int);
83 static void m32r_print_operand_address (FILE *, rtx);
84 static bool m32r_print_operand_punct_valid_p (unsigned char code);
85 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
86 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
87
88 static void m32r_file_start (void);
89
90 static int m32r_adjust_priority (rtx_insn *, int);
91 static int m32r_issue_rate (void);
92
93 static void m32r_encode_section_info (tree, rtx, int);
94 static bool m32r_in_small_data_p (const_tree);
95 static bool m32r_return_in_memory (const_tree, const_tree);
96 static rtx m32r_function_value (const_tree, const_tree, bool);
97 static rtx m32r_libcall_value (machine_mode, const_rtx);
98 static bool m32r_function_value_regno_p (const unsigned int);
99 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
100 tree, int *, int);
101 static void init_idents (void);
102 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed);
103 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
104 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
105 const_tree, bool);
106 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
107 tree, bool);
108 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
109 const_tree, bool);
110 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
111 const_tree, bool);
112 static bool m32r_can_eliminate (const int, const int);
113 static void m32r_conditional_register_usage (void);
114 static void m32r_trampoline_init (rtx, tree, rtx);
115 static bool m32r_legitimate_constant_p (machine_mode, rtx);
116 static bool m32r_attribute_identifier (const_tree);
117 \f
118 /* M32R specific attributes. */
119
120 static const struct attribute_spec m32r_attribute_table[] =
121 {
122 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
123 affects_type_identity } */
124 { "interrupt", 0, 0, true, false, false, NULL, false },
125 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
126 false },
127 { NULL, 0, 0, false, false, false, NULL, false }
128 };
129 \f
130 /* Initialize the GCC target structure. */
131 #undef TARGET_ATTRIBUTE_TABLE
132 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
133 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
134 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
135
136 #undef TARGET_LEGITIMATE_ADDRESS_P
137 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
138 #undef TARGET_LEGITIMIZE_ADDRESS
139 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
140 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
141 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
142
143 #undef TARGET_ASM_ALIGNED_HI_OP
144 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
145 #undef TARGET_ASM_ALIGNED_SI_OP
146 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
147
148 #undef TARGET_PRINT_OPERAND
149 #define TARGET_PRINT_OPERAND m32r_print_operand
150 #undef TARGET_PRINT_OPERAND_ADDRESS
151 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
152 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
153 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
154
155 #undef TARGET_ASM_FUNCTION_PROLOGUE
156 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
157 #undef TARGET_ASM_FUNCTION_EPILOGUE
158 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
159
160 #undef TARGET_ASM_FILE_START
161 #define TARGET_ASM_FILE_START m32r_file_start
162
163 #undef TARGET_SCHED_ADJUST_PRIORITY
164 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
165 #undef TARGET_SCHED_ISSUE_RATE
166 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
167
168 #undef TARGET_OPTION_OVERRIDE
169 #define TARGET_OPTION_OVERRIDE m32r_option_override
170
171 #undef TARGET_ENCODE_SECTION_INFO
172 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
173 #undef TARGET_IN_SMALL_DATA_P
174 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
175
176
177 #undef TARGET_MEMORY_MOVE_COST
178 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
179 #undef TARGET_RTX_COSTS
180 #define TARGET_RTX_COSTS m32r_rtx_costs
181 #undef TARGET_ADDRESS_COST
182 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
183
184 #undef TARGET_PROMOTE_PROTOTYPES
185 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
186 #undef TARGET_RETURN_IN_MEMORY
187 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
188
189 #undef TARGET_FUNCTION_VALUE
190 #define TARGET_FUNCTION_VALUE m32r_function_value
191 #undef TARGET_LIBCALL_VALUE
192 #define TARGET_LIBCALL_VALUE m32r_libcall_value
193 #undef TARGET_FUNCTION_VALUE_REGNO_P
194 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
195
196 #undef TARGET_SETUP_INCOMING_VARARGS
197 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
198 #undef TARGET_MUST_PASS_IN_STACK
199 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
200 #undef TARGET_PASS_BY_REFERENCE
201 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
202 #undef TARGET_ARG_PARTIAL_BYTES
203 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
204 #undef TARGET_FUNCTION_ARG
205 #define TARGET_FUNCTION_ARG m32r_function_arg
206 #undef TARGET_FUNCTION_ARG_ADVANCE
207 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
208
209 #undef TARGET_CAN_ELIMINATE
210 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
211
212 #undef TARGET_CONDITIONAL_REGISTER_USAGE
213 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
214
215 #undef TARGET_TRAMPOLINE_INIT
216 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
217
218 #undef TARGET_LEGITIMATE_CONSTANT_P
219 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
220
221 struct gcc_target targetm = TARGET_INITIALIZER;
222 \f
223 /* Called by m32r_option_override to initialize various things. */
224
225 void
226 m32r_init (void)
227 {
228 init_reg_tables ();
229
230 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
231 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
232 m32r_punct_chars['#'] = 1;
233 m32r_punct_chars['@'] = 1; /* ??? no longer used */
234
235 /* Provide default value if not specified. */
236 if (!global_options_set.x_g_switch_value)
237 g_switch_value = SDATA_DEFAULT_SIZE;
238 }
239
240 static void
241 m32r_option_override (void)
242 {
243 /* These need to be done at start up.
244 It's convenient to do them here. */
245 m32r_init ();
246 SUBTARGET_OVERRIDE_OPTIONS;
247 }
248
249 /* Vectors to keep interesting information about registers where it can easily
250 be got. We use to use the actual mode value as the bit number, but there
251 is (or may be) more than 32 modes now. Instead we use two tables: one
252 indexed by hard register number, and one indexed by mode. */
253
254 /* The purpose of m32r_mode_class is to shrink the range of modes so that
255 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
256 mapped into one m32r_mode_class mode. */
257
258 enum m32r_mode_class
259 {
260 C_MODE,
261 S_MODE, D_MODE, T_MODE, O_MODE,
262 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
263 };
264
265 /* Modes for condition codes. */
266 #define C_MODES (1 << (int) C_MODE)
267
268 /* Modes for single-word and smaller quantities. */
269 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
270
271 /* Modes for double-word and smaller quantities. */
272 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
273
274 /* Modes for quad-word and smaller quantities. */
275 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
276
277 /* Modes for accumulators. */
278 #define A_MODES (1 << (int) A_MODE)
279
280 /* Value is 1 if register/mode pair is acceptable on arc. */
281
282 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
283 {
284 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
285 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
286 S_MODES, C_MODES, A_MODES, A_MODES
287 };
288
289 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
290
291 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
292
293 static void
294 init_reg_tables (void)
295 {
296 int i;
297
298 for (i = 0; i < NUM_MACHINE_MODES; i++)
299 {
300 machine_mode m = (machine_mode) i;
301
302 switch (GET_MODE_CLASS (m))
303 {
304 case MODE_INT:
305 case MODE_PARTIAL_INT:
306 case MODE_COMPLEX_INT:
307 if (GET_MODE_SIZE (m) <= 4)
308 m32r_mode_class[i] = 1 << (int) S_MODE;
309 else if (GET_MODE_SIZE (m) == 8)
310 m32r_mode_class[i] = 1 << (int) D_MODE;
311 else if (GET_MODE_SIZE (m) == 16)
312 m32r_mode_class[i] = 1 << (int) T_MODE;
313 else if (GET_MODE_SIZE (m) == 32)
314 m32r_mode_class[i] = 1 << (int) O_MODE;
315 else
316 m32r_mode_class[i] = 0;
317 break;
318 case MODE_FLOAT:
319 case MODE_COMPLEX_FLOAT:
320 if (GET_MODE_SIZE (m) <= 4)
321 m32r_mode_class[i] = 1 << (int) SF_MODE;
322 else if (GET_MODE_SIZE (m) == 8)
323 m32r_mode_class[i] = 1 << (int) DF_MODE;
324 else if (GET_MODE_SIZE (m) == 16)
325 m32r_mode_class[i] = 1 << (int) TF_MODE;
326 else if (GET_MODE_SIZE (m) == 32)
327 m32r_mode_class[i] = 1 << (int) OF_MODE;
328 else
329 m32r_mode_class[i] = 0;
330 break;
331 case MODE_CC:
332 m32r_mode_class[i] = 1 << (int) C_MODE;
333 break;
334 default:
335 m32r_mode_class[i] = 0;
336 break;
337 }
338 }
339
340 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
341 {
342 if (GPR_P (i))
343 m32r_regno_reg_class[i] = GENERAL_REGS;
344 else if (i == ARG_POINTER_REGNUM)
345 m32r_regno_reg_class[i] = GENERAL_REGS;
346 else
347 m32r_regno_reg_class[i] = NO_REGS;
348 }
349 }
350 \f
351 /* M32R specific attribute support.
352
353 interrupt - for interrupt functions
354
355 model - select code model used to access object
356
357 small: addresses use 24 bits, use bl to make calls
358 medium: addresses use 32 bits, use bl to make calls
359 large: addresses use 32 bits, use seth/add3/jl to make calls
360
361 Grep for MODEL in m32r.h for more info. */
362
363 static tree small_ident1;
364 static tree small_ident2;
365 static tree medium_ident1;
366 static tree medium_ident2;
367 static tree large_ident1;
368 static tree large_ident2;
369
370 static void
371 init_idents (void)
372 {
373 if (small_ident1 == 0)
374 {
375 small_ident1 = get_identifier ("small");
376 small_ident2 = get_identifier ("__small__");
377 medium_ident1 = get_identifier ("medium");
378 medium_ident2 = get_identifier ("__medium__");
379 large_ident1 = get_identifier ("large");
380 large_ident2 = get_identifier ("__large__");
381 }
382 }
383
384 /* Handle an "model" attribute; arguments as in
385 struct attribute_spec.handler. */
386 static tree
387 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
388 tree args, int flags ATTRIBUTE_UNUSED,
389 bool *no_add_attrs)
390 {
391 tree arg;
392
393 init_idents ();
394 arg = TREE_VALUE (args);
395
396 if (arg != small_ident1
397 && arg != small_ident2
398 && arg != medium_ident1
399 && arg != medium_ident2
400 && arg != large_ident1
401 && arg != large_ident2)
402 {
403 warning (OPT_Wattributes, "invalid argument of %qs attribute",
404 IDENTIFIER_POINTER (name));
405 *no_add_attrs = true;
406 }
407
408 return NULL_TREE;
409 }
410
411 static bool
412 m32r_attribute_identifier (const_tree name)
413 {
414 return strcmp (IDENTIFIER_POINTER (name), "model") == 0
415 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0;
416 }
417 \f
418 /* Encode section information of DECL, which is either a VAR_DECL,
419 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
420
421 For the M32R we want to record:
422
423 - whether the object lives in .sdata/.sbss.
424 - what code model should be used to access the object
425 */
426
427 static void
428 m32r_encode_section_info (tree decl, rtx rtl, int first)
429 {
430 int extra_flags = 0;
431 tree model_attr;
432 enum m32r_model model;
433
434 default_encode_section_info (decl, rtl, first);
435
436 if (!DECL_P (decl))
437 return;
438
439 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
440 if (model_attr)
441 {
442 tree id;
443
444 init_idents ();
445
446 id = TREE_VALUE (TREE_VALUE (model_attr));
447
448 if (id == small_ident1 || id == small_ident2)
449 model = M32R_MODEL_SMALL;
450 else if (id == medium_ident1 || id == medium_ident2)
451 model = M32R_MODEL_MEDIUM;
452 else if (id == large_ident1 || id == large_ident2)
453 model = M32R_MODEL_LARGE;
454 else
455 gcc_unreachable (); /* shouldn't happen */
456 }
457 else
458 {
459 if (TARGET_MODEL_SMALL)
460 model = M32R_MODEL_SMALL;
461 else if (TARGET_MODEL_MEDIUM)
462 model = M32R_MODEL_MEDIUM;
463 else if (TARGET_MODEL_LARGE)
464 model = M32R_MODEL_LARGE;
465 else
466 gcc_unreachable (); /* shouldn't happen */
467 }
468 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
469
470 if (extra_flags)
471 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
472 }
473
474 /* Only mark the object as being small data area addressable if
475 it hasn't been explicitly marked with a code model.
476
477 The user can explicitly put an object in the small data area with the
478 section attribute. If the object is in sdata/sbss and marked with a
479 code model do both [put the object in .sdata and mark it as being
480 addressed with a specific code model - don't mark it as being addressed
481 with an SDA reloc though]. This is ok and might be useful at times. If
482 the object doesn't fit the linker will give an error. */
483
484 static bool
485 m32r_in_small_data_p (const_tree decl)
486 {
487 const char *section;
488
489 if (TREE_CODE (decl) != VAR_DECL)
490 return false;
491
492 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
493 return false;
494
495 section = DECL_SECTION_NAME (decl);
496 if (section)
497 {
498 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
499 return true;
500 }
501 else
502 {
503 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
504 {
505 int size = int_size_in_bytes (TREE_TYPE (decl));
506
507 if (size > 0 && size <= g_switch_value)
508 return true;
509 }
510 }
511
512 return false;
513 }
514
515 /* Do anything needed before RTL is emitted for each function. */
516
517 void
518 m32r_init_expanders (void)
519 {
520 /* ??? At one point there was code here. The function is left in
521 to make it easy to experiment. */
522 }
523 \f
524 int
525 call_operand (rtx op, machine_mode mode)
526 {
527 if (!MEM_P (op))
528 return 0;
529 op = XEXP (op, 0);
530 return call_address_operand (op, mode);
531 }
532
533 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
534
535 int
536 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
537 {
538 if (! TARGET_SDATA_USE)
539 return 0;
540
541 if (GET_CODE (op) == SYMBOL_REF)
542 return SYMBOL_REF_SMALL_P (op);
543
544 if (GET_CODE (op) == CONST
545 && GET_CODE (XEXP (op, 0)) == PLUS
546 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
547 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
548 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
549
550 return 0;
551 }
552
553 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
554
555 int
556 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
557 {
558 rtx sym;
559
560 if (flag_pic)
561 return 0;
562
563 if (GET_CODE (op) == LABEL_REF)
564 return TARGET_ADDR24;
565
566 if (GET_CODE (op) == SYMBOL_REF)
567 sym = op;
568 else if (GET_CODE (op) == CONST
569 && GET_CODE (XEXP (op, 0)) == PLUS
570 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
571 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
572 sym = XEXP (XEXP (op, 0), 0);
573 else
574 return 0;
575
576 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
577 return 1;
578
579 if (TARGET_ADDR24
580 && (CONSTANT_POOL_ADDRESS_P (sym)
581 || LIT_NAME_P (XSTR (sym, 0))))
582 return 1;
583
584 return 0;
585 }
586
587 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
588
589 int
590 addr32_operand (rtx op, machine_mode mode)
591 {
592 rtx sym;
593
594 if (GET_CODE (op) == LABEL_REF)
595 return TARGET_ADDR32;
596
597 if (GET_CODE (op) == SYMBOL_REF)
598 sym = op;
599 else if (GET_CODE (op) == CONST
600 && GET_CODE (XEXP (op, 0)) == PLUS
601 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
602 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
603 && ! flag_pic)
604 sym = XEXP (XEXP (op, 0), 0);
605 else
606 return 0;
607
608 return (! addr24_operand (sym, mode)
609 && ! small_data_operand (sym, mode));
610 }
611
612 /* Return 1 if OP is a function that can be called with the `bl' insn. */
613
614 int
615 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
616 {
617 if (flag_pic)
618 return 1;
619
620 if (GET_CODE (op) == SYMBOL_REF)
621 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
622
623 return TARGET_CALL26;
624 }
625
626 /* Return 1 if OP is a DImode const we want to handle inline.
627 This must match the code in the movdi pattern.
628 It is used by the 'G' constraint. */
629
630 int
631 easy_di_const (rtx op)
632 {
633 rtx high_rtx, low_rtx;
634 HOST_WIDE_INT high, low;
635
636 split_double (op, &high_rtx, &low_rtx);
637 high = INTVAL (high_rtx);
638 low = INTVAL (low_rtx);
639 /* Pick constants loadable with 2 16-bit `ldi' insns. */
640 if (high >= -128 && high <= 127
641 && low >= -128 && low <= 127)
642 return 1;
643 return 0;
644 }
645
646 /* Return 1 if OP is a DFmode const we want to handle inline.
647 This must match the code in the movdf pattern.
648 It is used by the 'H' constraint. */
649
650 int
651 easy_df_const (rtx op)
652 {
653 long l[2];
654
655 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l);
656 if (l[0] == 0 && l[1] == 0)
657 return 1;
658 if ((l[0] & 0xffff) == 0 && l[1] == 0)
659 return 1;
660 return 0;
661 }
662
663 /* Return 1 if OP is (mem (reg ...)).
664 This is used in insn length calcs. */
665
666 int
667 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
668 {
669 return MEM_P (op) && REG_P (XEXP (op, 0));
670 }
671
672 /* Return nonzero if TYPE must be passed by indirect reference. */
673
674 static bool
675 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
676 machine_mode mode, const_tree type,
677 bool named ATTRIBUTE_UNUSED)
678 {
679 int size;
680
681 if (type)
682 size = int_size_in_bytes (type);
683 else
684 size = GET_MODE_SIZE (mode);
685
686 return (size < 0 || size > 8);
687 }
688 \f
689 /* Comparisons. */
690
691 /* X and Y are two things to compare using CODE. Emit the compare insn and
692 return the rtx for compare [arg0 of the if_then_else].
693 If need_compare is true then the comparison insn must be generated, rather
694 than being subsumed into the following branch instruction. */
695
696 rtx
697 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
698 {
699 enum rtx_code compare_code;
700 enum rtx_code branch_code;
701 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
702 int must_swap = 0;
703
704 switch (code)
705 {
706 case EQ: compare_code = EQ; branch_code = NE; break;
707 case NE: compare_code = EQ; branch_code = EQ; break;
708 case LT: compare_code = LT; branch_code = NE; break;
709 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
710 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
711 case GE: compare_code = LT; branch_code = EQ; break;
712 case LTU: compare_code = LTU; branch_code = NE; break;
713 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
714 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
715 case GEU: compare_code = LTU; branch_code = EQ; break;
716
717 default:
718 gcc_unreachable ();
719 }
720
721 if (need_compare)
722 {
723 switch (compare_code)
724 {
725 case EQ:
726 if (satisfies_constraint_P (y) /* Reg equal to small const. */
727 && y != const0_rtx)
728 {
729 rtx tmp = gen_reg_rtx (SImode);
730
731 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
732 x = tmp;
733 y = const0_rtx;
734 }
735 else if (CONSTANT_P (y)) /* Reg equal to const. */
736 {
737 rtx tmp = force_reg (GET_MODE (x), y);
738 y = tmp;
739 }
740
741 if (register_operand (y, SImode) /* Reg equal to reg. */
742 || y == const0_rtx) /* Reg equal to zero. */
743 {
744 emit_insn (gen_cmp_eqsi_insn (x, y));
745
746 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
747 }
748 break;
749
750 case LT:
751 if (register_operand (y, SImode)
752 || satisfies_constraint_P (y))
753 {
754 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
755
756 switch (code)
757 {
758 case LT:
759 emit_insn (gen_cmp_ltsi_insn (x, y));
760 code = EQ;
761 break;
762 case LE:
763 if (y == const0_rtx)
764 tmp = const1_rtx;
765 else
766 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
767 emit_insn (gen_cmp_ltsi_insn (x, tmp));
768 code = EQ;
769 break;
770 case GT:
771 if (CONST_INT_P (y))
772 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
773 else
774 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
775 emit_insn (gen_cmp_ltsi_insn (x, tmp));
776 code = NE;
777 break;
778 case GE:
779 emit_insn (gen_cmp_ltsi_insn (x, y));
780 code = NE;
781 break;
782 default:
783 gcc_unreachable ();
784 }
785
786 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
787 }
788 break;
789
790 case LTU:
791 if (register_operand (y, SImode)
792 || satisfies_constraint_P (y))
793 {
794 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
795
796 switch (code)
797 {
798 case LTU:
799 emit_insn (gen_cmp_ltusi_insn (x, y));
800 code = EQ;
801 break;
802 case LEU:
803 if (y == const0_rtx)
804 tmp = const1_rtx;
805 else
806 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
807 emit_insn (gen_cmp_ltusi_insn (x, tmp));
808 code = EQ;
809 break;
810 case GTU:
811 if (CONST_INT_P (y))
812 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
813 else
814 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
815 emit_insn (gen_cmp_ltusi_insn (x, tmp));
816 code = NE;
817 break;
818 case GEU:
819 emit_insn (gen_cmp_ltusi_insn (x, y));
820 code = NE;
821 break;
822 default:
823 gcc_unreachable ();
824 }
825
826 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
827 }
828 break;
829
830 default:
831 gcc_unreachable ();
832 }
833 }
834 else
835 {
836 /* Reg/reg equal comparison. */
837 if (compare_code == EQ
838 && register_operand (y, SImode))
839 return gen_rtx_fmt_ee (code, CCmode, x, y);
840
841 /* Reg/zero signed comparison. */
842 if ((compare_code == EQ || compare_code == LT)
843 && y == const0_rtx)
844 return gen_rtx_fmt_ee (code, CCmode, x, y);
845
846 /* Reg/smallconst equal comparison. */
847 if (compare_code == EQ
848 && satisfies_constraint_P (y))
849 {
850 rtx tmp = gen_reg_rtx (SImode);
851
852 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
853 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
854 }
855
856 /* Reg/const equal comparison. */
857 if (compare_code == EQ
858 && CONSTANT_P (y))
859 {
860 rtx tmp = force_reg (GET_MODE (x), y);
861
862 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
863 }
864 }
865
866 if (CONSTANT_P (y))
867 {
868 if (must_swap)
869 y = force_reg (GET_MODE (x), y);
870 else
871 {
872 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
873
874 if (! ok_const)
875 y = force_reg (GET_MODE (x), y);
876 }
877 }
878
879 switch (compare_code)
880 {
881 case EQ :
882 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
883 break;
884 case LT :
885 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
886 break;
887 case LTU :
888 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
889 break;
890
891 default:
892 gcc_unreachable ();
893 }
894
895 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
896 }
897
898 bool
899 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
900 {
901 machine_mode mode = GET_MODE (op0);
902
903 gcc_assert (mode == SImode);
904 switch (code)
905 {
906 case EQ:
907 if (!register_operand (op1, mode))
908 op1 = force_reg (mode, op1);
909
910 if (TARGET_M32RX || TARGET_M32R2)
911 {
912 if (!reg_or_zero_operand (op2, mode))
913 op2 = force_reg (mode, op2);
914
915 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
916 return true;
917 }
918 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
919 {
920 emit_insn (gen_seq_zero_insn (op0, op1));
921 return true;
922 }
923
924 if (!reg_or_eq_int16_operand (op2, mode))
925 op2 = force_reg (mode, op2);
926
927 emit_insn (gen_seq_insn (op0, op1, op2));
928 return true;
929
930 case NE:
931 if (!CONST_INT_P (op2)
932 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
933 {
934 rtx reg;
935
936 if (reload_completed || reload_in_progress)
937 return false;
938
939 reg = gen_reg_rtx (SImode);
940 emit_insn (gen_xorsi3 (reg, op1, op2));
941 op1 = reg;
942
943 if (!register_operand (op1, mode))
944 op1 = force_reg (mode, op1);
945
946 emit_insn (gen_sne_zero_insn (op0, op1));
947 return true;
948 }
949 return false;
950
951 case LT:
952 case GT:
953 if (code == GT)
954 {
955 rtx tmp = op2;
956 op2 = op1;
957 op1 = tmp;
958 code = LT;
959 }
960
961 if (!register_operand (op1, mode))
962 op1 = force_reg (mode, op1);
963
964 if (!reg_or_int16_operand (op2, mode))
965 op2 = force_reg (mode, op2);
966
967 emit_insn (gen_slt_insn (op0, op1, op2));
968 return true;
969
970 case LTU:
971 case GTU:
972 if (code == GTU)
973 {
974 rtx tmp = op2;
975 op2 = op1;
976 op1 = tmp;
977 code = LTU;
978 }
979
980 if (!register_operand (op1, mode))
981 op1 = force_reg (mode, op1);
982
983 if (!reg_or_int16_operand (op2, mode))
984 op2 = force_reg (mode, op2);
985
986 emit_insn (gen_sltu_insn (op0, op1, op2));
987 return true;
988
989 case GE:
990 case GEU:
991 if (!register_operand (op1, mode))
992 op1 = force_reg (mode, op1);
993
994 if (!reg_or_int16_operand (op2, mode))
995 op2 = force_reg (mode, op2);
996
997 if (code == GE)
998 emit_insn (gen_sge_insn (op0, op1, op2));
999 else
1000 emit_insn (gen_sgeu_insn (op0, op1, op2));
1001 return true;
1002
1003 case LE:
1004 case LEU:
1005 if (!register_operand (op1, mode))
1006 op1 = force_reg (mode, op1);
1007
1008 if (CONST_INT_P (op2))
1009 {
1010 HOST_WIDE_INT value = INTVAL (op2);
1011 if (value >= 2147483647)
1012 {
1013 emit_move_insn (op0, const1_rtx);
1014 return true;
1015 }
1016
1017 op2 = GEN_INT (value + 1);
1018 if (value < -32768 || value >= 32767)
1019 op2 = force_reg (mode, op2);
1020
1021 if (code == LEU)
1022 emit_insn (gen_sltu_insn (op0, op1, op2));
1023 else
1024 emit_insn (gen_slt_insn (op0, op1, op2));
1025 return true;
1026 }
1027
1028 if (!register_operand (op2, mode))
1029 op2 = force_reg (mode, op2);
1030
1031 if (code == LEU)
1032 emit_insn (gen_sleu_insn (op0, op1, op2));
1033 else
1034 emit_insn (gen_sle_insn (op0, op1, op2));
1035 return true;
1036
1037 default:
1038 gcc_unreachable ();
1039 }
1040 }
1041
1042 \f
1043 /* Split a 2 word move (DI or DF) into component parts. */
1044
1045 rtx
1046 gen_split_move_double (rtx operands[])
1047 {
1048 machine_mode mode = GET_MODE (operands[0]);
1049 rtx dest = operands[0];
1050 rtx src = operands[1];
1051 rtx val;
1052
1053 /* We might have (SUBREG (MEM)) here, so just get rid of the
1054 subregs to make this code simpler. It is safe to call
1055 alter_subreg any time after reload. */
1056 if (GET_CODE (dest) == SUBREG)
1057 alter_subreg (&dest, true);
1058 if (GET_CODE (src) == SUBREG)
1059 alter_subreg (&src, true);
1060
1061 start_sequence ();
1062 if (REG_P (dest))
1063 {
1064 int dregno = REGNO (dest);
1065
1066 /* Reg = reg. */
1067 if (REG_P (src))
1068 {
1069 int sregno = REGNO (src);
1070
1071 int reverse = (dregno == sregno + 1);
1072
1073 /* We normally copy the low-numbered register first. However, if
1074 the first register operand 0 is the same as the second register of
1075 operand 1, we must copy in the opposite order. */
1076 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1077 operand_subword (src, reverse, TRUE, mode)));
1078
1079 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1080 operand_subword (src, !reverse, TRUE, mode)));
1081 }
1082
1083 /* Reg = constant. */
1084 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1085 {
1086 rtx words[2];
1087 split_double (src, &words[0], &words[1]);
1088 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1089 words[0]));
1090
1091 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1092 words[1]));
1093 }
1094
1095 /* Reg = mem. */
1096 else if (MEM_P (src))
1097 {
1098 /* If the high-address word is used in the address, we must load it
1099 last. Otherwise, load it first. */
1100 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1101
1102 /* We used to optimize loads from single registers as
1103
1104 ld r1,r3+; ld r2,r3
1105
1106 if r3 were not used subsequently. However, the REG_NOTES aren't
1107 propagated correctly by the reload phase, and it can cause bad
1108 code to be generated. We could still try:
1109
1110 ld r1,r3+; ld r2,r3; addi r3,-4
1111
1112 which saves 2 bytes and doesn't force longword alignment. */
1113 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1114 adjust_address (src, SImode,
1115 reverse * UNITS_PER_WORD)));
1116
1117 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1118 adjust_address (src, SImode,
1119 !reverse * UNITS_PER_WORD)));
1120 }
1121 else
1122 gcc_unreachable ();
1123 }
1124
1125 /* Mem = reg. */
1126 /* We used to optimize loads from single registers as
1127
1128 st r1,r3; st r2,+r3
1129
1130 if r3 were not used subsequently. However, the REG_NOTES aren't
1131 propagated correctly by the reload phase, and it can cause bad
1132 code to be generated. We could still try:
1133
1134 st r1,r3; st r2,+r3; addi r3,-4
1135
1136 which saves 2 bytes and doesn't force longword alignment. */
1137 else if (MEM_P (dest) && REG_P (src))
1138 {
1139 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1140 operand_subword (src, 0, TRUE, mode)));
1141
1142 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1143 operand_subword (src, 1, TRUE, mode)));
1144 }
1145
1146 else
1147 gcc_unreachable ();
1148
1149 val = get_insns ();
1150 end_sequence ();
1151 return val;
1152 }
1153
1154 \f
1155 static int
1156 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1157 tree type, bool named ATTRIBUTE_UNUSED)
1158 {
1159 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1160
1161 int words;
1162 unsigned int size =
1163 (((mode == BLKmode && type)
1164 ? (unsigned int) int_size_in_bytes (type)
1165 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1166 / UNITS_PER_WORD;
1167
1168 if (*cum >= M32R_MAX_PARM_REGS)
1169 words = 0;
1170 else if (*cum + size > M32R_MAX_PARM_REGS)
1171 words = (*cum + size) - M32R_MAX_PARM_REGS;
1172 else
1173 words = 0;
1174
1175 return words * UNITS_PER_WORD;
1176 }
1177
1178 /* The ROUND_ADVANCE* macros are local to this file. */
1179 /* Round SIZE up to a word boundary. */
1180 #define ROUND_ADVANCE(SIZE) \
1181 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1182
1183 /* Round arg MODE/TYPE up to the next word boundary. */
1184 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1185 ((MODE) == BLKmode \
1186 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1187 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1188
1189 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1190 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1191
1192 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1193 a reg. This includes arguments that have to be passed by reference as the
1194 pointer to them is passed in a reg if one is available (and that is what
1195 we're given).
1196 This macro is only used in this file. */
1197 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1198 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1199
1200 /* Determine where to put an argument to a function.
1201 Value is zero to push the argument on the stack,
1202 or a hard register in which to store the argument.
1203
1204 MODE is the argument's machine mode.
1205 TYPE is the data type of the argument (as a tree).
1206 This is null for libcalls where that information may
1207 not be available.
1208 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1209 the preceding args and about the function being called.
1210 NAMED is nonzero if this argument is a named parameter
1211 (otherwise it is an extra parameter matching an ellipsis). */
1212 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1213 and the rest are pushed. */
1214
1215 static rtx
1216 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1217 const_tree type ATTRIBUTE_UNUSED,
1218 bool named ATTRIBUTE_UNUSED)
1219 {
1220 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1221
1222 return (PASS_IN_REG_P (*cum, mode, type)
1223 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1224 : NULL_RTX);
1225 }
1226
1227 /* Update the data in CUM to advance over an argument
1228 of mode MODE and data type TYPE.
1229 (TYPE is null for libcalls where that information may not be available.) */
1230
1231 static void
1232 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1233 const_tree type, bool named ATTRIBUTE_UNUSED)
1234 {
1235 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1236
1237 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1238 + ROUND_ADVANCE_ARG (mode, type));
1239 }
1240
1241 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1242
1243 static bool
1244 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1245 {
1246 cumulative_args_t dummy = pack_cumulative_args (NULL);
1247
1248 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1249 }
1250
1251 /* Worker function for TARGET_FUNCTION_VALUE. */
1252
1253 static rtx
1254 m32r_function_value (const_tree valtype,
1255 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1256 bool outgoing ATTRIBUTE_UNUSED)
1257 {
1258 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1259 }
1260
1261 /* Worker function for TARGET_LIBCALL_VALUE. */
1262
1263 static rtx
1264 m32r_libcall_value (machine_mode mode,
1265 const_rtx fun ATTRIBUTE_UNUSED)
1266 {
1267 return gen_rtx_REG (mode, 0);
1268 }
1269
1270 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1271
1272 ??? What about r1 in DI/DF values. */
1273
1274 static bool
1275 m32r_function_value_regno_p (const unsigned int regno)
1276 {
1277 return (regno == 0);
1278 }
1279
1280 /* Do any needed setup for a variadic function. For the M32R, we must
1281 create a register parameter block, and then copy any anonymous arguments
1282 in registers to memory.
1283
1284 CUM has not been updated for the last named argument which has type TYPE
1285 and mode MODE, and we rely on this fact. */
1286
1287 static void
1288 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1289 tree type, int *pretend_size, int no_rtl)
1290 {
1291 int first_anon_arg;
1292
1293 if (no_rtl)
1294 return;
1295
1296 /* All BLKmode values are passed by reference. */
1297 gcc_assert (mode != BLKmode);
1298
1299 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1300 + ROUND_ADVANCE_ARG (mode, type));
1301
1302 if (first_anon_arg < M32R_MAX_PARM_REGS)
1303 {
1304 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1305 int first_reg_offset = first_anon_arg;
1306 /* Size in words to "pretend" allocate. */
1307 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1308 rtx regblock;
1309
1310 regblock = gen_frame_mem (BLKmode,
1311 plus_constant (Pmode, arg_pointer_rtx,
1312 FIRST_PARM_OFFSET (0)));
1313 set_mem_alias_set (regblock, get_varargs_alias_set ());
1314 move_block_from_reg (first_reg_offset, regblock, size);
1315
1316 *pretend_size = (size * UNITS_PER_WORD);
1317 }
1318 }
1319
1320 \f
1321 /* Return true if INSN is real instruction bearing insn. */
1322
1323 static int
1324 m32r_is_insn (rtx insn)
1325 {
1326 return (NONDEBUG_INSN_P (insn)
1327 && GET_CODE (PATTERN (insn)) != USE
1328 && GET_CODE (PATTERN (insn)) != CLOBBER);
1329 }
1330
1331 /* Increase the priority of long instructions so that the
1332 short instructions are scheduled ahead of the long ones. */
1333
1334 static int
1335 m32r_adjust_priority (rtx_insn *insn, int priority)
1336 {
1337 if (m32r_is_insn (insn)
1338 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1339 priority <<= 3;
1340
1341 return priority;
1342 }
1343
1344 \f
1345 /* Indicate how many instructions can be issued at the same time.
1346 This is sort of a lie. The m32r can issue only 1 long insn at
1347 once, but it can issue 2 short insns. The default therefore is
1348 set at 2, but this can be overridden by the command line option
1349 -missue-rate=1. */
1350
1351 static int
1352 m32r_issue_rate (void)
1353 {
1354 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1355 }
1356 \f
1357 /* Cost functions. */
1358 /* Memory is 3 times as expensive as registers.
1359 ??? Is that the right way to look at it? */
1360
1361 static int
1362 m32r_memory_move_cost (machine_mode mode,
1363 reg_class_t rclass ATTRIBUTE_UNUSED,
1364 bool in ATTRIBUTE_UNUSED)
1365 {
1366 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1367 return 6;
1368 else
1369 return 12;
1370 }
1371
1372 static bool
1373 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1374 int outer_code ATTRIBUTE_UNUSED,
1375 int opno ATTRIBUTE_UNUSED, int *total,
1376 bool speed ATTRIBUTE_UNUSED)
1377 {
1378 int code = GET_CODE (x);
1379
1380 switch (code)
1381 {
1382 /* Small integers are as cheap as registers. 4 byte values can be
1383 fetched as immediate constants - let's give that the cost of an
1384 extra insn. */
1385 case CONST_INT:
1386 if (INT16_P (INTVAL (x)))
1387 {
1388 *total = 0;
1389 return true;
1390 }
1391 /* FALLTHRU */
1392
1393 case CONST:
1394 case LABEL_REF:
1395 case SYMBOL_REF:
1396 *total = COSTS_N_INSNS (1);
1397 return true;
1398
1399 case CONST_DOUBLE:
1400 {
1401 rtx high, low;
1402
1403 split_double (x, &high, &low);
1404 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1405 + !INT16_P (INTVAL (low)));
1406 return true;
1407 }
1408
1409 case MULT:
1410 *total = COSTS_N_INSNS (3);
1411 return true;
1412
1413 case DIV:
1414 case UDIV:
1415 case MOD:
1416 case UMOD:
1417 *total = COSTS_N_INSNS (10);
1418 return true;
1419
1420 default:
1421 return false;
1422 }
1423 }
1424 \f
1425 /* Type of function DECL.
1426
1427 The result is cached. To reset the cache at the end of a function,
1428 call with DECL = NULL_TREE. */
1429
1430 enum m32r_function_type
1431 m32r_compute_function_type (tree decl)
1432 {
1433 /* Cached value. */
1434 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1435 /* Last function we were called for. */
1436 static tree last_fn = NULL_TREE;
1437
1438 /* Resetting the cached value? */
1439 if (decl == NULL_TREE)
1440 {
1441 fn_type = M32R_FUNCTION_UNKNOWN;
1442 last_fn = NULL_TREE;
1443 return fn_type;
1444 }
1445
1446 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1447 return fn_type;
1448
1449 /* Compute function type. */
1450 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1451 ? M32R_FUNCTION_INTERRUPT
1452 : M32R_FUNCTION_NORMAL);
1453
1454 last_fn = decl;
1455 return fn_type;
1456 }
1457 \f/* Function prologue/epilogue handlers. */
1458
1459 /* M32R stack frames look like:
1460
1461 Before call After call
1462 +-----------------------+ +-----------------------+
1463 | | | |
1464 high | local variables, | | local variables, |
1465 mem | reg save area, etc. | | reg save area, etc. |
1466 | | | |
1467 +-----------------------+ +-----------------------+
1468 | | | |
1469 | arguments on stack. | | arguments on stack. |
1470 | | | |
1471 SP+0->+-----------------------+ +-----------------------+
1472 | reg parm save area, |
1473 | only created for |
1474 | variable argument |
1475 | functions |
1476 +-----------------------+
1477 | previous frame ptr |
1478 +-----------------------+
1479 | |
1480 | register save area |
1481 | |
1482 +-----------------------+
1483 | return address |
1484 +-----------------------+
1485 | |
1486 | local variables |
1487 | |
1488 +-----------------------+
1489 | |
1490 | alloca allocations |
1491 | |
1492 +-----------------------+
1493 | |
1494 low | arguments on stack |
1495 memory | |
1496 SP+0->+-----------------------+
1497
1498 Notes:
1499 1) The "reg parm save area" does not exist for non variable argument fns.
1500 2) The "reg parm save area" can be eliminated completely if we saved regs
1501 containing anonymous args separately but that complicates things too
1502 much (so it's not done).
1503 3) The return address is saved after the register save area so as to have as
1504 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1505
1506 /* Structure to be filled in by m32r_compute_frame_size with register
1507 save masks, and offsets for the current function. */
1508 struct m32r_frame_info
1509 {
1510 unsigned int total_size; /* # bytes that the entire frame takes up. */
1511 unsigned int extra_size; /* # bytes of extra stuff. */
1512 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1513 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1514 unsigned int reg_size; /* # bytes needed to store regs. */
1515 unsigned int var_size; /* # bytes that variables take up. */
1516 unsigned int gmask; /* Mask of saved gp registers. */
1517 unsigned int save_fp; /* Nonzero if fp must be saved. */
1518 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1519 int initialized; /* Nonzero if frame size already calculated. */
1520 };
1521
1522 /* Current frame information calculated by m32r_compute_frame_size. */
1523 static struct m32r_frame_info current_frame_info;
1524
1525 /* Zero structure to initialize current_frame_info. */
1526 static struct m32r_frame_info zero_frame_info;
1527
1528 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1529 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1530
1531 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1532 The return address and frame pointer are treated separately.
1533 Don't consider them here. */
1534 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1535 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1536 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1537
1538 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1539 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1540
1541 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1542 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1543
1544 /* Return the bytes needed to compute the frame pointer from the current
1545 stack pointer.
1546
1547 SIZE is the size needed for local variables. */
1548
1549 unsigned int
1550 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1551 {
1552 unsigned int regno;
1553 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1554 unsigned int reg_size;
1555 unsigned int gmask;
1556 enum m32r_function_type fn_type;
1557 int interrupt_p;
1558 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1559 | crtl->profile);
1560
1561 var_size = M32R_STACK_ALIGN (size);
1562 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1563 pretend_size = crtl->args.pretend_args_size;
1564 extra_size = FIRST_PARM_OFFSET (0);
1565 total_size = extra_size + pretend_size + args_size + var_size;
1566 reg_size = 0;
1567 gmask = 0;
1568
1569 /* See if this is an interrupt handler. Call used registers must be saved
1570 for them too. */
1571 fn_type = m32r_compute_function_type (current_function_decl);
1572 interrupt_p = M32R_INTERRUPT_P (fn_type);
1573
1574 /* Calculate space needed for registers. */
1575 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1576 {
1577 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1578 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1579 {
1580 reg_size += UNITS_PER_WORD;
1581 gmask |= 1 << regno;
1582 }
1583 }
1584
1585 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1586 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1587
1588 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1589 * UNITS_PER_WORD);
1590 total_size += reg_size;
1591
1592 /* ??? Not sure this is necessary, and I don't think the epilogue
1593 handler will do the right thing if this changes total_size. */
1594 total_size = M32R_STACK_ALIGN (total_size);
1595
1596 /* frame_size = total_size - (pretend_size + reg_size); */
1597
1598 /* Save computed information. */
1599 current_frame_info.total_size = total_size;
1600 current_frame_info.extra_size = extra_size;
1601 current_frame_info.pretend_size = pretend_size;
1602 current_frame_info.var_size = var_size;
1603 current_frame_info.args_size = args_size;
1604 current_frame_info.reg_size = reg_size;
1605 current_frame_info.gmask = gmask;
1606 current_frame_info.initialized = reload_completed;
1607
1608 /* Ok, we're done. */
1609 return total_size;
1610 }
1611
1612 /* Worker function for TARGET_CAN_ELIMINATE. */
1613
1614 bool
1615 m32r_can_eliminate (const int from, const int to)
1616 {
1617 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1618 ? ! frame_pointer_needed
1619 : true);
1620 }
1621
1622 \f
1623 /* The table we use to reference PIC data. */
1624 static rtx global_offset_table;
1625
1626 static void
1627 m32r_reload_lr (rtx sp, int size)
1628 {
1629 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1630
1631 if (size == 0)
1632 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1633 else if (size < 32768)
1634 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1635 gen_rtx_PLUS (Pmode, sp,
1636 GEN_INT (size)))));
1637 else
1638 {
1639 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1640
1641 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1642 emit_insn (gen_addsi3 (tmp, tmp, sp));
1643 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1644 }
1645
1646 emit_use (lr);
1647 }
1648
1649 void
1650 m32r_load_pic_register (void)
1651 {
1652 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1653 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1654 GEN_INT (TARGET_MODEL_SMALL)));
1655
1656 /* Need to emit this whether or not we obey regdecls,
1657 since setjmp/longjmp can cause life info to screw up. */
1658 emit_use (pic_offset_table_rtx);
1659 }
1660
1661 /* Expand the m32r prologue as a series of insns. */
1662
1663 void
1664 m32r_expand_prologue (void)
1665 {
1666 int regno;
1667 int frame_size;
1668 unsigned int gmask;
1669 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1670 | crtl->profile);
1671
1672 if (! current_frame_info.initialized)
1673 m32r_compute_frame_size (get_frame_size ());
1674
1675 if (flag_stack_usage_info)
1676 current_function_static_stack_size = current_frame_info.total_size;
1677
1678 gmask = current_frame_info.gmask;
1679
1680 /* These cases shouldn't happen. Catch them now. */
1681 gcc_assert (current_frame_info.total_size || !gmask);
1682
1683 /* Allocate space for register arguments if this is a variadic function. */
1684 if (current_frame_info.pretend_size != 0)
1685 {
1686 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1687 the wrong result on a 64-bit host. */
1688 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1689 emit_insn (gen_addsi3 (stack_pointer_rtx,
1690 stack_pointer_rtx,
1691 GEN_INT (-pretend_size)));
1692 }
1693
1694 /* Save any registers we need to and set up fp. */
1695 if (current_frame_info.save_fp)
1696 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1697
1698 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1699
1700 /* Save any needed call-saved regs (and call-used if this is an
1701 interrupt handler). */
1702 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1703 {
1704 if ((gmask & (1 << regno)) != 0)
1705 emit_insn (gen_movsi_push (stack_pointer_rtx,
1706 gen_rtx_REG (Pmode, regno)));
1707 }
1708
1709 if (current_frame_info.save_lr)
1710 emit_insn (gen_movsi_push (stack_pointer_rtx,
1711 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1712
1713 /* Allocate the stack frame. */
1714 frame_size = (current_frame_info.total_size
1715 - (current_frame_info.pretend_size
1716 + current_frame_info.reg_size));
1717
1718 if (frame_size == 0)
1719 ; /* Nothing to do. */
1720 else if (frame_size <= 32768)
1721 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1722 GEN_INT (-frame_size)));
1723 else
1724 {
1725 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1726
1727 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1728 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1729 }
1730
1731 if (frame_pointer_needed)
1732 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1733
1734 if (crtl->profile)
1735 /* Push lr for mcount (form_pc, x). */
1736 emit_insn (gen_movsi_push (stack_pointer_rtx,
1737 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1738
1739 if (pic_reg_used)
1740 {
1741 m32r_load_pic_register ();
1742 m32r_reload_lr (stack_pointer_rtx,
1743 (crtl->profile ? 0 : frame_size));
1744 }
1745
1746 if (crtl->profile && !pic_reg_used)
1747 emit_insn (gen_blockage ());
1748 }
1749
1750 \f
1751 /* Set up the stack and frame pointer (if desired) for the function.
1752 Note, if this is changed, you need to mirror the changes in
1753 m32r_compute_frame_size which calculates the prolog size. */
1754
1755 static void
1756 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1757 {
1758 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1759
1760 /* If this is an interrupt handler, mark it as such. */
1761 if (M32R_INTERRUPT_P (fn_type))
1762 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1763
1764 if (! current_frame_info.initialized)
1765 m32r_compute_frame_size (size);
1766
1767 /* This is only for the human reader. */
1768 fprintf (file,
1769 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1770 ASM_COMMENT_START,
1771 current_frame_info.var_size,
1772 current_frame_info.reg_size / 4,
1773 current_frame_info.args_size,
1774 current_frame_info.extra_size);
1775 }
1776 \f
1777 /* Output RTL to pop register REGNO from the stack. */
1778
1779 static void
1780 pop (int regno)
1781 {
1782 rtx x;
1783
1784 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1785 stack_pointer_rtx));
1786 add_reg_note (x, REG_INC, stack_pointer_rtx);
1787 }
1788
1789 /* Expand the m32r epilogue as a series of insns. */
1790
1791 void
1792 m32r_expand_epilogue (void)
1793 {
1794 int regno;
1795 int noepilogue = FALSE;
1796 int total_size;
1797
1798 gcc_assert (current_frame_info.initialized);
1799 total_size = current_frame_info.total_size;
1800
1801 if (total_size == 0)
1802 {
1803 rtx insn = get_last_insn ();
1804
1805 /* If the last insn was a BARRIER, we don't have to write any code
1806 because a jump (aka return) was put there. */
1807 if (insn && NOTE_P (insn))
1808 insn = prev_nonnote_insn (insn);
1809 if (insn && BARRIER_P (insn))
1810 noepilogue = TRUE;
1811 }
1812
1813 if (!noepilogue)
1814 {
1815 unsigned int var_size = current_frame_info.var_size;
1816 unsigned int args_size = current_frame_info.args_size;
1817 unsigned int gmask = current_frame_info.gmask;
1818 int can_trust_sp_p = !cfun->calls_alloca;
1819
1820 if (flag_exceptions)
1821 emit_insn (gen_blockage ());
1822
1823 /* The first thing to do is point the sp at the bottom of the register
1824 save area. */
1825 if (can_trust_sp_p)
1826 {
1827 unsigned int reg_offset = var_size + args_size;
1828
1829 if (reg_offset == 0)
1830 ; /* Nothing to do. */
1831 else if (reg_offset < 32768)
1832 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1833 GEN_INT (reg_offset)));
1834 else
1835 {
1836 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1837
1838 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1839 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1840 tmp));
1841 }
1842 }
1843 else if (frame_pointer_needed)
1844 {
1845 unsigned int reg_offset = var_size + args_size;
1846
1847 if (reg_offset == 0)
1848 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1849 else if (reg_offset < 32768)
1850 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1851 GEN_INT (reg_offset)));
1852 else
1853 {
1854 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1855
1856 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1857 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1858 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1859 tmp));
1860 }
1861 }
1862 else
1863 gcc_unreachable ();
1864
1865 if (current_frame_info.save_lr)
1866 pop (RETURN_ADDR_REGNUM);
1867
1868 /* Restore any saved registers, in reverse order of course. */
1869 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1870 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1871 {
1872 if ((gmask & (1L << regno)) != 0)
1873 pop (regno);
1874 }
1875
1876 if (current_frame_info.save_fp)
1877 pop (FRAME_POINTER_REGNUM);
1878
1879 /* Remove varargs area if present. */
1880 if (current_frame_info.pretend_size != 0)
1881 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1882 GEN_INT (current_frame_info.pretend_size)));
1883
1884 emit_insn (gen_blockage ());
1885 }
1886 }
1887
1888 /* Do any necessary cleanup after a function to restore stack, frame,
1889 and regs. */
1890
1891 static void
1892 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1893 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1894 {
1895 /* Reset state info for each function. */
1896 current_frame_info = zero_frame_info;
1897 m32r_compute_function_type (NULL_TREE);
1898 }
1899 \f
1900 /* Return nonzero if this function is known to have a null or 1 instruction
1901 epilogue. */
1902
1903 int
1904 direct_return (void)
1905 {
1906 if (!reload_completed)
1907 return FALSE;
1908
1909 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1910 return FALSE;
1911
1912 if (! current_frame_info.initialized)
1913 m32r_compute_frame_size (get_frame_size ());
1914
1915 return current_frame_info.total_size == 0;
1916 }
1917
1918 \f
1919 /* PIC. */
1920
1921 int
1922 m32r_legitimate_pic_operand_p (rtx x)
1923 {
1924 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1925 return 0;
1926
1927 if (GET_CODE (x) == CONST
1928 && GET_CODE (XEXP (x, 0)) == PLUS
1929 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1930 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1931 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1932 return 0;
1933
1934 return 1;
1935 }
1936
1937 rtx
1938 m32r_legitimize_pic_address (rtx orig, rtx reg)
1939 {
1940 #ifdef DEBUG_PIC
1941 printf("m32r_legitimize_pic_address()\n");
1942 #endif
1943
1944 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1945 {
1946 rtx pic_ref, address;
1947 int subregs = 0;
1948
1949 if (reg == 0)
1950 {
1951 gcc_assert (!reload_in_progress && !reload_completed);
1952 reg = gen_reg_rtx (Pmode);
1953
1954 subregs = 1;
1955 }
1956
1957 if (subregs)
1958 address = gen_reg_rtx (Pmode);
1959 else
1960 address = reg;
1961
1962 crtl->uses_pic_offset_table = 1;
1963
1964 if (GET_CODE (orig) == LABEL_REF
1965 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1966 {
1967 emit_insn (gen_gotoff_load_addr (reg, orig));
1968 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1969 return reg;
1970 }
1971
1972 emit_insn (gen_pic_load_addr (address, orig));
1973
1974 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1975 pic_ref = gen_const_mem (Pmode, address);
1976 emit_move_insn (reg, pic_ref);
1977 return reg;
1978 }
1979 else if (GET_CODE (orig) == CONST)
1980 {
1981 rtx base, offset;
1982
1983 if (GET_CODE (XEXP (orig, 0)) == PLUS
1984 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1985 return orig;
1986
1987 if (reg == 0)
1988 {
1989 gcc_assert (!reload_in_progress && !reload_completed);
1990 reg = gen_reg_rtx (Pmode);
1991 }
1992
1993 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1994 {
1995 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1996 if (base == reg)
1997 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1998 else
1999 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2000 }
2001 else
2002 return orig;
2003
2004 if (CONST_INT_P (offset))
2005 {
2006 if (INT16_P (INTVAL (offset)))
2007 return plus_constant (Pmode, base, INTVAL (offset));
2008 else
2009 {
2010 gcc_assert (! reload_in_progress && ! reload_completed);
2011 offset = force_reg (Pmode, offset);
2012 }
2013 }
2014
2015 return gen_rtx_PLUS (Pmode, base, offset);
2016 }
2017
2018 return orig;
2019 }
2020
2021 static rtx
2022 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2023 machine_mode mode ATTRIBUTE_UNUSED)
2024 {
2025 if (flag_pic)
2026 return m32r_legitimize_pic_address (x, NULL_RTX);
2027 else
2028 return x;
2029 }
2030
2031 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2032
2033 static bool
2034 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2035 {
2036 if (GET_CODE (addr) == LO_SUM)
2037 return true;
2038
2039 return false;
2040 }
2041 \f
2042 /* Nested function support. */
2043
2044 /* Emit RTL insns to initialize the variable parts of a trampoline.
2045 FNADDR is an RTX for the address of the function's pure code.
2046 CXT is an RTX for the static chain value for the function. */
2047
2048 void
2049 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2050 rtx fnaddr ATTRIBUTE_UNUSED,
2051 rtx cxt ATTRIBUTE_UNUSED)
2052 {
2053 }
2054 \f
2055 static void
2056 m32r_file_start (void)
2057 {
2058 default_file_start ();
2059
2060 if (flag_verbose_asm)
2061 fprintf (asm_out_file,
2062 "%s M32R/D special options: -G %d\n",
2063 ASM_COMMENT_START, g_switch_value);
2064
2065 if (TARGET_LITTLE_ENDIAN)
2066 fprintf (asm_out_file, "\t.little\n");
2067 }
2068 \f
2069 /* Print operand X (an rtx) in assembler syntax to file FILE.
2070 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2071 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2072
2073 static void
2074 m32r_print_operand (FILE * file, rtx x, int code)
2075 {
2076 rtx addr;
2077
2078 switch (code)
2079 {
2080 /* The 's' and 'p' codes are used by output_block_move() to
2081 indicate post-increment 's'tores and 'p're-increment loads. */
2082 case 's':
2083 if (REG_P (x))
2084 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2085 else
2086 output_operand_lossage ("invalid operand to %%s code");
2087 return;
2088
2089 case 'p':
2090 if (REG_P (x))
2091 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2092 else
2093 output_operand_lossage ("invalid operand to %%p code");
2094 return;
2095
2096 case 'R' :
2097 /* Write second word of DImode or DFmode reference,
2098 register or memory. */
2099 if (REG_P (x))
2100 fputs (reg_names[REGNO (x)+1], file);
2101 else if (MEM_P (x))
2102 {
2103 fprintf (file, "@(");
2104 /* Handle possible auto-increment. Since it is pre-increment and
2105 we have already done it, we can just use an offset of four. */
2106 /* ??? This is taken from rs6000.c I think. I don't think it is
2107 currently necessary, but keep it around. */
2108 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2109 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2110 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2111 else
2112 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2113 fputc (')', file);
2114 }
2115 else
2116 output_operand_lossage ("invalid operand to %%R code");
2117 return;
2118
2119 case 'H' : /* High word. */
2120 case 'L' : /* Low word. */
2121 if (REG_P (x))
2122 {
2123 /* L = least significant word, H = most significant word. */
2124 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2125 fputs (reg_names[REGNO (x)], file);
2126 else
2127 fputs (reg_names[REGNO (x)+1], file);
2128 }
2129 else if (CONST_INT_P (x)
2130 || GET_CODE (x) == CONST_DOUBLE)
2131 {
2132 rtx first, second;
2133
2134 split_double (x, &first, &second);
2135 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2136 code == 'L' ? INTVAL (first) : INTVAL (second));
2137 }
2138 else
2139 output_operand_lossage ("invalid operand to %%H/%%L code");
2140 return;
2141
2142 case 'A' :
2143 {
2144 char str[30];
2145
2146 if (GET_CODE (x) != CONST_DOUBLE
2147 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2148 fatal_insn ("bad insn for 'A'", x);
2149
2150 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2151 fprintf (file, "%s", str);
2152 return;
2153 }
2154
2155 case 'B' : /* Bottom half. */
2156 case 'T' : /* Top half. */
2157 /* Output the argument to a `seth' insn (sets the Top half-word).
2158 For constants output arguments to a seth/or3 pair to set Top and
2159 Bottom halves. For symbols output arguments to a seth/add3 pair to
2160 set Top and Bottom halves. The difference exists because for
2161 constants seth/or3 is more readable but for symbols we need to use
2162 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2163 switch (GET_CODE (x))
2164 {
2165 case CONST_INT :
2166 case CONST_DOUBLE :
2167 {
2168 rtx first, second;
2169
2170 split_double (x, &first, &second);
2171 x = WORDS_BIG_ENDIAN ? second : first;
2172 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2173 (code == 'B'
2174 ? INTVAL (x) & 0xffff
2175 : (INTVAL (x) >> 16) & 0xffff));
2176 }
2177 return;
2178 case CONST :
2179 case SYMBOL_REF :
2180 if (code == 'B'
2181 && small_data_operand (x, VOIDmode))
2182 {
2183 fputs ("sda(", file);
2184 output_addr_const (file, x);
2185 fputc (')', file);
2186 return;
2187 }
2188 /* fall through */
2189 case LABEL_REF :
2190 fputs (code == 'T' ? "shigh(" : "low(", file);
2191 output_addr_const (file, x);
2192 fputc (')', file);
2193 return;
2194 default :
2195 output_operand_lossage ("invalid operand to %%T/%%B code");
2196 return;
2197 }
2198 break;
2199
2200 case 'U' :
2201 /* ??? wip */
2202 /* Output a load/store with update indicator if appropriate. */
2203 if (MEM_P (x))
2204 {
2205 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2206 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2207 fputs (".a", file);
2208 }
2209 else
2210 output_operand_lossage ("invalid operand to %%U code");
2211 return;
2212
2213 case 'N' :
2214 /* Print a constant value negated. */
2215 if (CONST_INT_P (x))
2216 output_addr_const (file, GEN_INT (- INTVAL (x)));
2217 else
2218 output_operand_lossage ("invalid operand to %%N code");
2219 return;
2220
2221 case 'X' :
2222 /* Print a const_int in hex. Used in comments. */
2223 if (CONST_INT_P (x))
2224 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2225 return;
2226
2227 case '#' :
2228 fputs (IMMEDIATE_PREFIX, file);
2229 return;
2230
2231 case 0 :
2232 /* Do nothing special. */
2233 break;
2234
2235 default :
2236 /* Unknown flag. */
2237 output_operand_lossage ("invalid operand output code");
2238 }
2239
2240 switch (GET_CODE (x))
2241 {
2242 case REG :
2243 fputs (reg_names[REGNO (x)], file);
2244 break;
2245
2246 case MEM :
2247 addr = XEXP (x, 0);
2248 if (GET_CODE (addr) == PRE_INC)
2249 {
2250 if (!REG_P (XEXP (addr, 0)))
2251 fatal_insn ("pre-increment address is not a register", x);
2252
2253 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2254 }
2255 else if (GET_CODE (addr) == PRE_DEC)
2256 {
2257 if (!REG_P (XEXP (addr, 0)))
2258 fatal_insn ("pre-decrement address is not a register", x);
2259
2260 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2261 }
2262 else if (GET_CODE (addr) == POST_INC)
2263 {
2264 if (!REG_P (XEXP (addr, 0)))
2265 fatal_insn ("post-increment address is not a register", x);
2266
2267 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2268 }
2269 else
2270 {
2271 fputs ("@(", file);
2272 output_address (XEXP (x, 0));
2273 fputc (')', file);
2274 }
2275 break;
2276
2277 case CONST_DOUBLE :
2278 /* We handle SFmode constants here as output_addr_const doesn't. */
2279 if (GET_MODE (x) == SFmode)
2280 {
2281 long l;
2282
2283 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2284 fprintf (file, "0x%08lx", l);
2285 break;
2286 }
2287
2288 /* Fall through. Let output_addr_const deal with it. */
2289
2290 default :
2291 output_addr_const (file, x);
2292 break;
2293 }
2294 }
2295
2296 /* Print a memory address as an operand to reference that memory location. */
2297
2298 static void
2299 m32r_print_operand_address (FILE * file, rtx addr)
2300 {
2301 rtx base;
2302 rtx index = 0;
2303 int offset = 0;
2304
2305 switch (GET_CODE (addr))
2306 {
2307 case REG :
2308 fputs (reg_names[REGNO (addr)], file);
2309 break;
2310
2311 case PLUS :
2312 if (CONST_INT_P (XEXP (addr, 0)))
2313 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2314 else if (CONST_INT_P (XEXP (addr, 1)))
2315 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2316 else
2317 base = XEXP (addr, 0), index = XEXP (addr, 1);
2318 if (REG_P (base))
2319 {
2320 /* Print the offset first (if present) to conform to the manual. */
2321 if (index == 0)
2322 {
2323 if (offset != 0)
2324 fprintf (file, "%d,", offset);
2325 fputs (reg_names[REGNO (base)], file);
2326 }
2327 /* The chip doesn't support this, but left in for generality. */
2328 else if (REG_P (index))
2329 fprintf (file, "%s,%s",
2330 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2331 /* Not sure this can happen, but leave in for now. */
2332 else if (GET_CODE (index) == SYMBOL_REF)
2333 {
2334 output_addr_const (file, index);
2335 fputc (',', file);
2336 fputs (reg_names[REGNO (base)], file);
2337 }
2338 else
2339 fatal_insn ("bad address", addr);
2340 }
2341 else if (GET_CODE (base) == LO_SUM)
2342 {
2343 gcc_assert (!index && REG_P (XEXP (base, 0)));
2344 if (small_data_operand (XEXP (base, 1), VOIDmode))
2345 fputs ("sda(", file);
2346 else
2347 fputs ("low(", file);
2348 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2349 offset));
2350 fputs ("),", file);
2351 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2352 }
2353 else
2354 fatal_insn ("bad address", addr);
2355 break;
2356
2357 case LO_SUM :
2358 if (!REG_P (XEXP (addr, 0)))
2359 fatal_insn ("lo_sum not of register", addr);
2360 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2361 fputs ("sda(", file);
2362 else
2363 fputs ("low(", file);
2364 output_addr_const (file, XEXP (addr, 1));
2365 fputs ("),", file);
2366 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2367 break;
2368
2369 case PRE_INC : /* Assume SImode. */
2370 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2371 break;
2372
2373 case PRE_DEC : /* Assume SImode. */
2374 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2375 break;
2376
2377 case POST_INC : /* Assume SImode. */
2378 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2379 break;
2380
2381 default :
2382 output_addr_const (file, addr);
2383 break;
2384 }
2385 }
2386
2387 static bool
2388 m32r_print_operand_punct_valid_p (unsigned char code)
2389 {
2390 return m32r_punct_chars[code];
2391 }
2392
2393 /* Return true if the operands are the constants 0 and 1. */
2394
2395 int
2396 zero_and_one (rtx operand1, rtx operand2)
2397 {
2398 return
2399 CONST_INT_P (operand1)
2400 && CONST_INT_P (operand2)
2401 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2402 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2403 }
2404
2405 /* Generate the correct assembler code to handle the conditional loading of a
2406 value into a register. It is known that the operands satisfy the
2407 conditional_move_operand() function above. The destination is operand[0].
2408 The condition is operand [1]. The 'true' value is operand [2] and the
2409 'false' value is operand [3]. */
2410
2411 char *
2412 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2413 {
2414 static char buffer [100];
2415 const char * dest = reg_names [REGNO (operands [0])];
2416
2417 buffer [0] = 0;
2418
2419 /* Destination must be a register. */
2420 gcc_assert (REG_P (operands [0]));
2421 gcc_assert (conditional_move_operand (operands [2], SImode));
2422 gcc_assert (conditional_move_operand (operands [3], SImode));
2423
2424 /* Check to see if the test is reversed. */
2425 if (GET_CODE (operands [1]) == NE)
2426 {
2427 rtx tmp = operands [2];
2428 operands [2] = operands [3];
2429 operands [3] = tmp;
2430 }
2431
2432 sprintf (buffer, "mvfc %s, cbr", dest);
2433
2434 /* If the true value was '0' then we need to invert the results of the move. */
2435 if (INTVAL (operands [2]) == 0)
2436 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2437 dest, dest);
2438
2439 return buffer;
2440 }
2441
2442 /* Returns true if the registers contained in the two
2443 rtl expressions are different. */
2444
2445 int
2446 m32r_not_same_reg (rtx a, rtx b)
2447 {
2448 int reg_a = -1;
2449 int reg_b = -2;
2450
2451 while (GET_CODE (a) == SUBREG)
2452 a = SUBREG_REG (a);
2453
2454 if (REG_P (a))
2455 reg_a = REGNO (a);
2456
2457 while (GET_CODE (b) == SUBREG)
2458 b = SUBREG_REG (b);
2459
2460 if (REG_P (b))
2461 reg_b = REGNO (b);
2462
2463 return reg_a != reg_b;
2464 }
2465
2466 \f
2467 rtx
2468 m32r_function_symbol (const char *name)
2469 {
2470 int extra_flags = 0;
2471 enum m32r_model model;
2472 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2473
2474 if (TARGET_MODEL_SMALL)
2475 model = M32R_MODEL_SMALL;
2476 else if (TARGET_MODEL_MEDIUM)
2477 model = M32R_MODEL_MEDIUM;
2478 else if (TARGET_MODEL_LARGE)
2479 model = M32R_MODEL_LARGE;
2480 else
2481 gcc_unreachable (); /* Shouldn't happen. */
2482 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2483
2484 if (extra_flags)
2485 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2486
2487 return sym;
2488 }
2489
2490 /* Use a library function to move some bytes. */
2491
2492 static void
2493 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2494 {
2495 /* We want to pass the size as Pmode, which will normally be SImode
2496 but will be DImode if we are using 64-bit longs and pointers. */
2497 if (GET_MODE (bytes_rtx) != VOIDmode
2498 && GET_MODE (bytes_rtx) != Pmode)
2499 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2500
2501 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2502 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2503 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2504 TYPE_UNSIGNED (sizetype)),
2505 TYPE_MODE (sizetype));
2506 }
2507
2508 /* Expand string/block move operations.
2509
2510 operands[0] is the pointer to the destination.
2511 operands[1] is the pointer to the source.
2512 operands[2] is the number of bytes to move.
2513 operands[3] is the alignment.
2514
2515 Returns 1 upon success, 0 otherwise. */
2516
2517 int
2518 m32r_expand_block_move (rtx operands[])
2519 {
2520 rtx orig_dst = operands[0];
2521 rtx orig_src = operands[1];
2522 rtx bytes_rtx = operands[2];
2523 rtx align_rtx = operands[3];
2524 int constp = CONST_INT_P (bytes_rtx);
2525 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2526 int align = INTVAL (align_rtx);
2527 int leftover;
2528 rtx src_reg;
2529 rtx dst_reg;
2530
2531 if (constp && bytes <= 0)
2532 return 1;
2533
2534 /* Move the address into scratch registers. */
2535 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2536 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2537
2538 if (align > UNITS_PER_WORD)
2539 align = UNITS_PER_WORD;
2540
2541 /* If we prefer size over speed, always use a function call.
2542 If we do not know the size, use a function call.
2543 If the blocks are not word aligned, use a function call. */
2544 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2545 {
2546 block_move_call (dst_reg, src_reg, bytes_rtx);
2547 return 0;
2548 }
2549
2550 leftover = bytes % MAX_MOVE_BYTES;
2551 bytes -= leftover;
2552
2553 /* If necessary, generate a loop to handle the bulk of the copy. */
2554 if (bytes)
2555 {
2556 rtx_code_label *label = NULL;
2557 rtx final_src = NULL_RTX;
2558 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2559 rtx rounded_total = GEN_INT (bytes);
2560 rtx new_dst_reg = gen_reg_rtx (SImode);
2561 rtx new_src_reg = gen_reg_rtx (SImode);
2562
2563 /* If we are going to have to perform this loop more than
2564 once, then generate a label and compute the address the
2565 source register will contain upon completion of the final
2566 iteration. */
2567 if (bytes > MAX_MOVE_BYTES)
2568 {
2569 final_src = gen_reg_rtx (Pmode);
2570
2571 if (INT16_P(bytes))
2572 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2573 else
2574 {
2575 emit_insn (gen_movsi (final_src, rounded_total));
2576 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2577 }
2578
2579 label = gen_label_rtx ();
2580 emit_label (label);
2581 }
2582
2583 /* It is known that output_block_move() will update src_reg to point
2584 to the word after the end of the source block, and dst_reg to point
2585 to the last word of the destination block, provided that the block
2586 is MAX_MOVE_BYTES long. */
2587 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2588 new_dst_reg, new_src_reg));
2589 emit_move_insn (dst_reg, new_dst_reg);
2590 emit_move_insn (src_reg, new_src_reg);
2591 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2592
2593 if (bytes > MAX_MOVE_BYTES)
2594 {
2595 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2596 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2597 }
2598 }
2599
2600 if (leftover)
2601 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2602 gen_reg_rtx (SImode),
2603 gen_reg_rtx (SImode)));
2604 return 1;
2605 }
2606
2607 \f
2608 /* Emit load/stores for a small constant word aligned block_move.
2609
2610 operands[0] is the memory address of the destination.
2611 operands[1] is the memory address of the source.
2612 operands[2] is the number of bytes to move.
2613 operands[3] is a temp register.
2614 operands[4] is a temp register. */
2615
2616 void
2617 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2618 {
2619 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2620 int first_time;
2621 int got_extra = 0;
2622
2623 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2624
2625 /* We do not have a post-increment store available, so the first set of
2626 stores are done without any increment, then the remaining ones can use
2627 the pre-increment addressing mode.
2628
2629 Note: expand_block_move() also relies upon this behavior when building
2630 loops to copy large blocks. */
2631 first_time = 1;
2632
2633 while (bytes > 0)
2634 {
2635 if (bytes >= 8)
2636 {
2637 if (first_time)
2638 {
2639 output_asm_insn ("ld\t%5, %p1", operands);
2640 output_asm_insn ("ld\t%6, %p1", operands);
2641 output_asm_insn ("st\t%5, @%0", operands);
2642 output_asm_insn ("st\t%6, %s0", operands);
2643 }
2644 else
2645 {
2646 output_asm_insn ("ld\t%5, %p1", operands);
2647 output_asm_insn ("ld\t%6, %p1", operands);
2648 output_asm_insn ("st\t%5, %s0", operands);
2649 output_asm_insn ("st\t%6, %s0", operands);
2650 }
2651
2652 bytes -= 8;
2653 }
2654 else if (bytes >= 4)
2655 {
2656 if (bytes > 4)
2657 got_extra = 1;
2658
2659 output_asm_insn ("ld\t%5, %p1", operands);
2660
2661 if (got_extra)
2662 output_asm_insn ("ld\t%6, %p1", operands);
2663
2664 if (first_time)
2665 output_asm_insn ("st\t%5, @%0", operands);
2666 else
2667 output_asm_insn ("st\t%5, %s0", operands);
2668
2669 bytes -= 4;
2670 }
2671 else
2672 {
2673 /* Get the entire next word, even though we do not want all of it.
2674 The saves us from doing several smaller loads, and we assume that
2675 we cannot cause a page fault when at least part of the word is in
2676 valid memory [since we don't get called if things aren't properly
2677 aligned]. */
2678 int dst_offset = first_time ? 0 : 4;
2679 /* The amount of increment we have to make to the
2680 destination pointer. */
2681 int dst_inc_amount = dst_offset + bytes - 4;
2682 /* The same for the source pointer. */
2683 int src_inc_amount = bytes;
2684 int last_shift;
2685 rtx my_operands[3];
2686
2687 /* If got_extra is true then we have already loaded
2688 the next word as part of loading and storing the previous word. */
2689 if (! got_extra)
2690 output_asm_insn ("ld\t%6, @%1", operands);
2691
2692 if (bytes >= 2)
2693 {
2694 bytes -= 2;
2695
2696 output_asm_insn ("sra3\t%5, %6, #16", operands);
2697 my_operands[0] = operands[5];
2698 my_operands[1] = GEN_INT (dst_offset);
2699 my_operands[2] = operands[0];
2700 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2701
2702 /* If there is a byte left to store then increment the
2703 destination address and shift the contents of the source
2704 register down by 8 bits. We could not do the address
2705 increment in the store half word instruction, because it does
2706 not have an auto increment mode. */
2707 if (bytes > 0) /* assert (bytes == 1) */
2708 {
2709 dst_offset += 2;
2710 last_shift = 8;
2711 }
2712 }
2713 else
2714 last_shift = 24;
2715
2716 if (bytes > 0)
2717 {
2718 my_operands[0] = operands[6];
2719 my_operands[1] = GEN_INT (last_shift);
2720 output_asm_insn ("srai\t%0, #%1", my_operands);
2721 my_operands[0] = operands[6];
2722 my_operands[1] = GEN_INT (dst_offset);
2723 my_operands[2] = operands[0];
2724 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2725 }
2726
2727 /* Update the destination pointer if needed. We have to do
2728 this so that the patterns matches what we output in this
2729 function. */
2730 if (dst_inc_amount
2731 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2732 {
2733 my_operands[0] = operands[0];
2734 my_operands[1] = GEN_INT (dst_inc_amount);
2735 output_asm_insn ("addi\t%0, #%1", my_operands);
2736 }
2737
2738 /* Update the source pointer if needed. We have to do this
2739 so that the patterns matches what we output in this
2740 function. */
2741 if (src_inc_amount
2742 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2743 {
2744 my_operands[0] = operands[1];
2745 my_operands[1] = GEN_INT (src_inc_amount);
2746 output_asm_insn ("addi\t%0, #%1", my_operands);
2747 }
2748
2749 bytes = 0;
2750 }
2751
2752 first_time = 0;
2753 }
2754 }
2755
2756 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2757
2758 int
2759 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2760 unsigned int new_reg)
2761 {
2762 /* Interrupt routines can't clobber any register that isn't already used. */
2763 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2764 && !df_regs_ever_live_p (new_reg))
2765 return 0;
2766
2767 return 1;
2768 }
2769
2770 rtx
2771 m32r_return_addr (int count)
2772 {
2773 if (count != 0)
2774 return const0_rtx;
2775
2776 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2777 }
2778
2779 static void
2780 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2781 {
2782 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2783 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2784 0x017e8e17 : 0x178e7e01, SImode));
2785 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2786 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2787 0x0c00ae86 : 0x86ae000c, SImode));
2788 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2789 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2790 0xe627871e : 0x1e8727e6, SImode));
2791 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2792 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2793 0xc616c626 : 0x26c61fc6, SImode));
2794 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2795 chain_value);
2796 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2797 XEXP (DECL_RTL (fndecl), 0));
2798
2799 if (m32r_cache_flush_trap >= 0)
2800 emit_insn (gen_flush_icache
2801 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2802 gen_int_mode (m32r_cache_flush_trap, SImode)));
2803 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2804 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2805 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2806 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2807 GEN_INT (3), SImode);
2808 }
2809
2810 /* True if X is a reg that can be used as a base reg. */
2811
2812 static bool
2813 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2814 {
2815 if (! REG_P (x))
2816 return false;
2817
2818 if (strict)
2819 {
2820 if (GPR_P (REGNO (x)))
2821 return true;
2822 }
2823 else
2824 {
2825 if (GPR_P (REGNO (x))
2826 || REGNO (x) == ARG_POINTER_REGNUM
2827 || ! HARD_REGISTER_P (x))
2828 return true;
2829 }
2830
2831 return false;
2832 }
2833
2834 static inline bool
2835 m32r_rtx_ok_for_offset_p (const_rtx x)
2836 {
2837 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2838 }
2839
2840 static inline bool
2841 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2842 const_rtx x, bool strict)
2843 {
2844 if (GET_CODE (x) == PLUS
2845 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2846 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2847 return true;
2848
2849 return false;
2850 }
2851
2852 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2853 since more than one instruction will be required. */
2854
2855 static inline bool
2856 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2857 bool strict)
2858 {
2859 if (GET_CODE (x) == LO_SUM
2860 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2861 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2862 && CONSTANT_P (XEXP (x, 1)))
2863 return true;
2864
2865 return false;
2866 }
2867
2868 /* Is this a load and increment operation. */
2869
2870 static inline bool
2871 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2872 {
2873 if ((mode == SImode || mode == SFmode)
2874 && GET_CODE (x) == POST_INC
2875 && REG_P (XEXP (x, 0))
2876 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2877 return true;
2878
2879 return false;
2880 }
2881
2882 /* Is this an increment/decrement and store operation. */
2883
2884 static inline bool
2885 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2886 {
2887 if ((mode == SImode || mode == SFmode)
2888 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2889 && REG_P (XEXP (x, 0)) \
2890 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2891 return true;
2892
2893 return false;
2894 }
2895
2896 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2897
2898 static bool
2899 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2900 {
2901 if (m32r_rtx_ok_for_base_p (x, strict)
2902 || m32r_legitimate_offset_addres_p (mode, x, strict)
2903 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2904 || m32r_load_postinc_p (mode, x, strict)
2905 || m32r_store_preinc_predec_p (mode, x, strict))
2906 return true;
2907
2908 return false;
2909 }
2910
2911 static void
2912 m32r_conditional_register_usage (void)
2913 {
2914 if (flag_pic)
2915 {
2916 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2917 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2918 }
2919 }
2920
2921 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2922
2923 We don't allow (plus symbol large-constant) as the relocations can't
2924 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2925 We allow all CONST_DOUBLE's as the md file patterns will force the
2926 constant to memory if they can't handle them. */
2927
2928 static bool
2929 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2930 {
2931 return !(GET_CODE (x) == CONST
2932 && GET_CODE (XEXP (x, 0)) == PLUS
2933 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2934 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2935 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2936 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2937 }