]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "df.h"
27 #include "alias.h"
28 #include "stor-layout.h"
29 #include "varasm.h"
30 #include "stringpool.h"
31 #include "calls.h"
32 #include "regs.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "dbxout.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "expmed.h"
40 #include "dojump.h"
41 #include "explow.h"
42 #include "emit-rtl.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "recog.h"
46 #include "diagnostic-core.h"
47 #include "cfgrtl.h"
48 #include "cfganal.h"
49 #include "lcm.h"
50 #include "cfgbuild.h"
51 #include "cfgcleanup.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "tm-constrs.h"
55 #include "opts.h"
56 #include "builtins.h"
57
58 /* This file should be included last. */
59 #include "target-def.h"
60
61 /* Array of valid operand punctuation characters. */
62 static char m32r_punct_chars[256];
63
64 /* Machine-specific symbol_ref flags. */
65 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
66 #define SYMBOL_REF_MODEL(X) \
67 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
68
69 /* For string literals, etc. */
70 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
71
72 /* Forward declaration. */
73 static void m32r_option_override (void);
74 static void init_reg_tables (void);
75 static void block_move_call (rtx, rtx, rtx);
76 static int m32r_is_insn (rtx);
77 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
78 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
79 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
80 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
81 static void m32r_print_operand (FILE *, rtx, int);
82 static void m32r_print_operand_address (FILE *, rtx);
83 static bool m32r_print_operand_punct_valid_p (unsigned char code);
84 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
85 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
86
87 static void m32r_file_start (void);
88
89 static int m32r_adjust_priority (rtx_insn *, int);
90 static int m32r_issue_rate (void);
91
92 static void m32r_encode_section_info (tree, rtx, int);
93 static bool m32r_in_small_data_p (const_tree);
94 static bool m32r_return_in_memory (const_tree, const_tree);
95 static rtx m32r_function_value (const_tree, const_tree, bool);
96 static rtx m32r_libcall_value (machine_mode, const_rtx);
97 static bool m32r_function_value_regno_p (const unsigned int);
98 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
99 tree, int *, int);
100 static void init_idents (void);
101 static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed);
102 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
103 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
104 const_tree, bool);
105 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
106 tree, bool);
107 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
108 const_tree, bool);
109 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
110 const_tree, bool);
111 static bool m32r_can_eliminate (const int, const int);
112 static void m32r_conditional_register_usage (void);
113 static void m32r_trampoline_init (rtx, tree, rtx);
114 static bool m32r_legitimate_constant_p (machine_mode, rtx);
115 \f
116 /* M32R specific attributes. */
117
118 static const struct attribute_spec m32r_attribute_table[] =
119 {
120 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
121 affects_type_identity } */
122 { "interrupt", 0, 0, true, false, false, NULL, false },
123 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
124 false },
125 { NULL, 0, 0, false, false, false, NULL, false }
126 };
127 \f
128 /* Initialize the GCC target structure. */
129 #undef TARGET_ATTRIBUTE_TABLE
130 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
131
132 #undef TARGET_LEGITIMATE_ADDRESS_P
133 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
134 #undef TARGET_LEGITIMIZE_ADDRESS
135 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
136 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
137 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
138
139 #undef TARGET_ASM_ALIGNED_HI_OP
140 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
141 #undef TARGET_ASM_ALIGNED_SI_OP
142 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
143
144 #undef TARGET_PRINT_OPERAND
145 #define TARGET_PRINT_OPERAND m32r_print_operand
146 #undef TARGET_PRINT_OPERAND_ADDRESS
147 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
148 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
149 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
150
151 #undef TARGET_ASM_FUNCTION_PROLOGUE
152 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
153 #undef TARGET_ASM_FUNCTION_EPILOGUE
154 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
155
156 #undef TARGET_ASM_FILE_START
157 #define TARGET_ASM_FILE_START m32r_file_start
158
159 #undef TARGET_SCHED_ADJUST_PRIORITY
160 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
161 #undef TARGET_SCHED_ISSUE_RATE
162 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
163
164 #undef TARGET_OPTION_OVERRIDE
165 #define TARGET_OPTION_OVERRIDE m32r_option_override
166
167 #undef TARGET_ENCODE_SECTION_INFO
168 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
169 #undef TARGET_IN_SMALL_DATA_P
170 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
171
172
173 #undef TARGET_MEMORY_MOVE_COST
174 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
175 #undef TARGET_RTX_COSTS
176 #define TARGET_RTX_COSTS m32r_rtx_costs
177 #undef TARGET_ADDRESS_COST
178 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
179
180 #undef TARGET_PROMOTE_PROTOTYPES
181 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
182 #undef TARGET_RETURN_IN_MEMORY
183 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
184
185 #undef TARGET_FUNCTION_VALUE
186 #define TARGET_FUNCTION_VALUE m32r_function_value
187 #undef TARGET_LIBCALL_VALUE
188 #define TARGET_LIBCALL_VALUE m32r_libcall_value
189 #undef TARGET_FUNCTION_VALUE_REGNO_P
190 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
191
192 #undef TARGET_SETUP_INCOMING_VARARGS
193 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
194 #undef TARGET_MUST_PASS_IN_STACK
195 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
196 #undef TARGET_PASS_BY_REFERENCE
197 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
198 #undef TARGET_ARG_PARTIAL_BYTES
199 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
200 #undef TARGET_FUNCTION_ARG
201 #define TARGET_FUNCTION_ARG m32r_function_arg
202 #undef TARGET_FUNCTION_ARG_ADVANCE
203 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
204
205 #undef TARGET_CAN_ELIMINATE
206 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
207
208 #undef TARGET_CONDITIONAL_REGISTER_USAGE
209 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
210
211 #undef TARGET_TRAMPOLINE_INIT
212 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
213
214 #undef TARGET_LEGITIMATE_CONSTANT_P
215 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
216
217 struct gcc_target targetm = TARGET_INITIALIZER;
218 \f
219 /* Called by m32r_option_override to initialize various things. */
220
221 void
222 m32r_init (void)
223 {
224 init_reg_tables ();
225
226 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
227 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
228 m32r_punct_chars['#'] = 1;
229 m32r_punct_chars['@'] = 1; /* ??? no longer used */
230
231 /* Provide default value if not specified. */
232 if (!global_options_set.x_g_switch_value)
233 g_switch_value = SDATA_DEFAULT_SIZE;
234 }
235
236 static void
237 m32r_option_override (void)
238 {
239 /* These need to be done at start up.
240 It's convenient to do them here. */
241 m32r_init ();
242 SUBTARGET_OVERRIDE_OPTIONS;
243 }
244
245 /* Vectors to keep interesting information about registers where it can easily
246 be got. We use to use the actual mode value as the bit number, but there
247 is (or may be) more than 32 modes now. Instead we use two tables: one
248 indexed by hard register number, and one indexed by mode. */
249
250 /* The purpose of m32r_mode_class is to shrink the range of modes so that
251 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
252 mapped into one m32r_mode_class mode. */
253
254 enum m32r_mode_class
255 {
256 C_MODE,
257 S_MODE, D_MODE, T_MODE, O_MODE,
258 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
259 };
260
261 /* Modes for condition codes. */
262 #define C_MODES (1 << (int) C_MODE)
263
264 /* Modes for single-word and smaller quantities. */
265 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
266
267 /* Modes for double-word and smaller quantities. */
268 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
269
270 /* Modes for quad-word and smaller quantities. */
271 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
272
273 /* Modes for accumulators. */
274 #define A_MODES (1 << (int) A_MODE)
275
276 /* Value is 1 if register/mode pair is acceptable on arc. */
277
278 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
279 {
280 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
281 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
282 S_MODES, C_MODES, A_MODES, A_MODES
283 };
284
285 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
286
287 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
288
289 static void
290 init_reg_tables (void)
291 {
292 int i;
293
294 for (i = 0; i < NUM_MACHINE_MODES; i++)
295 {
296 machine_mode m = (machine_mode) i;
297
298 switch (GET_MODE_CLASS (m))
299 {
300 case MODE_INT:
301 case MODE_PARTIAL_INT:
302 case MODE_COMPLEX_INT:
303 if (GET_MODE_SIZE (m) <= 4)
304 m32r_mode_class[i] = 1 << (int) S_MODE;
305 else if (GET_MODE_SIZE (m) == 8)
306 m32r_mode_class[i] = 1 << (int) D_MODE;
307 else if (GET_MODE_SIZE (m) == 16)
308 m32r_mode_class[i] = 1 << (int) T_MODE;
309 else if (GET_MODE_SIZE (m) == 32)
310 m32r_mode_class[i] = 1 << (int) O_MODE;
311 else
312 m32r_mode_class[i] = 0;
313 break;
314 case MODE_FLOAT:
315 case MODE_COMPLEX_FLOAT:
316 if (GET_MODE_SIZE (m) <= 4)
317 m32r_mode_class[i] = 1 << (int) SF_MODE;
318 else if (GET_MODE_SIZE (m) == 8)
319 m32r_mode_class[i] = 1 << (int) DF_MODE;
320 else if (GET_MODE_SIZE (m) == 16)
321 m32r_mode_class[i] = 1 << (int) TF_MODE;
322 else if (GET_MODE_SIZE (m) == 32)
323 m32r_mode_class[i] = 1 << (int) OF_MODE;
324 else
325 m32r_mode_class[i] = 0;
326 break;
327 case MODE_CC:
328 m32r_mode_class[i] = 1 << (int) C_MODE;
329 break;
330 default:
331 m32r_mode_class[i] = 0;
332 break;
333 }
334 }
335
336 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
337 {
338 if (GPR_P (i))
339 m32r_regno_reg_class[i] = GENERAL_REGS;
340 else if (i == ARG_POINTER_REGNUM)
341 m32r_regno_reg_class[i] = GENERAL_REGS;
342 else
343 m32r_regno_reg_class[i] = NO_REGS;
344 }
345 }
346 \f
347 /* M32R specific attribute support.
348
349 interrupt - for interrupt functions
350
351 model - select code model used to access object
352
353 small: addresses use 24 bits, use bl to make calls
354 medium: addresses use 32 bits, use bl to make calls
355 large: addresses use 32 bits, use seth/add3/jl to make calls
356
357 Grep for MODEL in m32r.h for more info. */
358
359 static tree small_ident1;
360 static tree small_ident2;
361 static tree medium_ident1;
362 static tree medium_ident2;
363 static tree large_ident1;
364 static tree large_ident2;
365
366 static void
367 init_idents (void)
368 {
369 if (small_ident1 == 0)
370 {
371 small_ident1 = get_identifier ("small");
372 small_ident2 = get_identifier ("__small__");
373 medium_ident1 = get_identifier ("medium");
374 medium_ident2 = get_identifier ("__medium__");
375 large_ident1 = get_identifier ("large");
376 large_ident2 = get_identifier ("__large__");
377 }
378 }
379
380 /* Handle an "model" attribute; arguments as in
381 struct attribute_spec.handler. */
382 static tree
383 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
384 tree args, int flags ATTRIBUTE_UNUSED,
385 bool *no_add_attrs)
386 {
387 tree arg;
388
389 init_idents ();
390 arg = TREE_VALUE (args);
391
392 if (arg != small_ident1
393 && arg != small_ident2
394 && arg != medium_ident1
395 && arg != medium_ident2
396 && arg != large_ident1
397 && arg != large_ident2)
398 {
399 warning (OPT_Wattributes, "invalid argument of %qs attribute",
400 IDENTIFIER_POINTER (name));
401 *no_add_attrs = true;
402 }
403
404 return NULL_TREE;
405 }
406 \f
407 /* Encode section information of DECL, which is either a VAR_DECL,
408 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
409
410 For the M32R we want to record:
411
412 - whether the object lives in .sdata/.sbss.
413 - what code model should be used to access the object
414 */
415
416 static void
417 m32r_encode_section_info (tree decl, rtx rtl, int first)
418 {
419 int extra_flags = 0;
420 tree model_attr;
421 enum m32r_model model;
422
423 default_encode_section_info (decl, rtl, first);
424
425 if (!DECL_P (decl))
426 return;
427
428 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
429 if (model_attr)
430 {
431 tree id;
432
433 init_idents ();
434
435 id = TREE_VALUE (TREE_VALUE (model_attr));
436
437 if (id == small_ident1 || id == small_ident2)
438 model = M32R_MODEL_SMALL;
439 else if (id == medium_ident1 || id == medium_ident2)
440 model = M32R_MODEL_MEDIUM;
441 else if (id == large_ident1 || id == large_ident2)
442 model = M32R_MODEL_LARGE;
443 else
444 gcc_unreachable (); /* shouldn't happen */
445 }
446 else
447 {
448 if (TARGET_MODEL_SMALL)
449 model = M32R_MODEL_SMALL;
450 else if (TARGET_MODEL_MEDIUM)
451 model = M32R_MODEL_MEDIUM;
452 else if (TARGET_MODEL_LARGE)
453 model = M32R_MODEL_LARGE;
454 else
455 gcc_unreachable (); /* shouldn't happen */
456 }
457 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
458
459 if (extra_flags)
460 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
461 }
462
463 /* Only mark the object as being small data area addressable if
464 it hasn't been explicitly marked with a code model.
465
466 The user can explicitly put an object in the small data area with the
467 section attribute. If the object is in sdata/sbss and marked with a
468 code model do both [put the object in .sdata and mark it as being
469 addressed with a specific code model - don't mark it as being addressed
470 with an SDA reloc though]. This is ok and might be useful at times. If
471 the object doesn't fit the linker will give an error. */
472
473 static bool
474 m32r_in_small_data_p (const_tree decl)
475 {
476 const char *section;
477
478 if (TREE_CODE (decl) != VAR_DECL)
479 return false;
480
481 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
482 return false;
483
484 section = DECL_SECTION_NAME (decl);
485 if (section)
486 {
487 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
488 return true;
489 }
490 else
491 {
492 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
493 {
494 int size = int_size_in_bytes (TREE_TYPE (decl));
495
496 if (size > 0 && size <= g_switch_value)
497 return true;
498 }
499 }
500
501 return false;
502 }
503
504 /* Do anything needed before RTL is emitted for each function. */
505
506 void
507 m32r_init_expanders (void)
508 {
509 /* ??? At one point there was code here. The function is left in
510 to make it easy to experiment. */
511 }
512 \f
513 int
514 call_operand (rtx op, machine_mode mode)
515 {
516 if (!MEM_P (op))
517 return 0;
518 op = XEXP (op, 0);
519 return call_address_operand (op, mode);
520 }
521
522 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
523
524 int
525 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
526 {
527 if (! TARGET_SDATA_USE)
528 return 0;
529
530 if (GET_CODE (op) == SYMBOL_REF)
531 return SYMBOL_REF_SMALL_P (op);
532
533 if (GET_CODE (op) == CONST
534 && GET_CODE (XEXP (op, 0)) == PLUS
535 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
536 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
537 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
538
539 return 0;
540 }
541
542 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
543
544 int
545 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
546 {
547 rtx sym;
548
549 if (flag_pic)
550 return 0;
551
552 if (GET_CODE (op) == LABEL_REF)
553 return TARGET_ADDR24;
554
555 if (GET_CODE (op) == SYMBOL_REF)
556 sym = op;
557 else if (GET_CODE (op) == CONST
558 && GET_CODE (XEXP (op, 0)) == PLUS
559 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
560 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
561 sym = XEXP (XEXP (op, 0), 0);
562 else
563 return 0;
564
565 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
566 return 1;
567
568 if (TARGET_ADDR24
569 && (CONSTANT_POOL_ADDRESS_P (sym)
570 || LIT_NAME_P (XSTR (sym, 0))))
571 return 1;
572
573 return 0;
574 }
575
576 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
577
578 int
579 addr32_operand (rtx op, machine_mode mode)
580 {
581 rtx sym;
582
583 if (GET_CODE (op) == LABEL_REF)
584 return TARGET_ADDR32;
585
586 if (GET_CODE (op) == SYMBOL_REF)
587 sym = op;
588 else if (GET_CODE (op) == CONST
589 && GET_CODE (XEXP (op, 0)) == PLUS
590 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
591 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
592 && ! flag_pic)
593 sym = XEXP (XEXP (op, 0), 0);
594 else
595 return 0;
596
597 return (! addr24_operand (sym, mode)
598 && ! small_data_operand (sym, mode));
599 }
600
601 /* Return 1 if OP is a function that can be called with the `bl' insn. */
602
603 int
604 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
605 {
606 if (flag_pic)
607 return 1;
608
609 if (GET_CODE (op) == SYMBOL_REF)
610 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
611
612 return TARGET_CALL26;
613 }
614
615 /* Return 1 if OP is a DImode const we want to handle inline.
616 This must match the code in the movdi pattern.
617 It is used by the 'G' constraint. */
618
619 int
620 easy_di_const (rtx op)
621 {
622 rtx high_rtx, low_rtx;
623 HOST_WIDE_INT high, low;
624
625 split_double (op, &high_rtx, &low_rtx);
626 high = INTVAL (high_rtx);
627 low = INTVAL (low_rtx);
628 /* Pick constants loadable with 2 16-bit `ldi' insns. */
629 if (high >= -128 && high <= 127
630 && low >= -128 && low <= 127)
631 return 1;
632 return 0;
633 }
634
635 /* Return 1 if OP is a DFmode const we want to handle inline.
636 This must match the code in the movdf pattern.
637 It is used by the 'H' constraint. */
638
639 int
640 easy_df_const (rtx op)
641 {
642 REAL_VALUE_TYPE r;
643 long l[2];
644
645 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
646 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
647 if (l[0] == 0 && l[1] == 0)
648 return 1;
649 if ((l[0] & 0xffff) == 0 && l[1] == 0)
650 return 1;
651 return 0;
652 }
653
654 /* Return 1 if OP is (mem (reg ...)).
655 This is used in insn length calcs. */
656
657 int
658 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
659 {
660 return MEM_P (op) && REG_P (XEXP (op, 0));
661 }
662
663 /* Return nonzero if TYPE must be passed by indirect reference. */
664
665 static bool
666 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
667 machine_mode mode, const_tree type,
668 bool named ATTRIBUTE_UNUSED)
669 {
670 int size;
671
672 if (type)
673 size = int_size_in_bytes (type);
674 else
675 size = GET_MODE_SIZE (mode);
676
677 return (size < 0 || size > 8);
678 }
679 \f
680 /* Comparisons. */
681
682 /* X and Y are two things to compare using CODE. Emit the compare insn and
683 return the rtx for compare [arg0 of the if_then_else].
684 If need_compare is true then the comparison insn must be generated, rather
685 than being subsumed into the following branch instruction. */
686
687 rtx
688 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
689 {
690 enum rtx_code compare_code;
691 enum rtx_code branch_code;
692 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
693 int must_swap = 0;
694
695 switch (code)
696 {
697 case EQ: compare_code = EQ; branch_code = NE; break;
698 case NE: compare_code = EQ; branch_code = EQ; break;
699 case LT: compare_code = LT; branch_code = NE; break;
700 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
701 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
702 case GE: compare_code = LT; branch_code = EQ; break;
703 case LTU: compare_code = LTU; branch_code = NE; break;
704 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
705 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
706 case GEU: compare_code = LTU; branch_code = EQ; break;
707
708 default:
709 gcc_unreachable ();
710 }
711
712 if (need_compare)
713 {
714 switch (compare_code)
715 {
716 case EQ:
717 if (satisfies_constraint_P (y) /* Reg equal to small const. */
718 && y != const0_rtx)
719 {
720 rtx tmp = gen_reg_rtx (SImode);
721
722 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
723 x = tmp;
724 y = const0_rtx;
725 }
726 else if (CONSTANT_P (y)) /* Reg equal to const. */
727 {
728 rtx tmp = force_reg (GET_MODE (x), y);
729 y = tmp;
730 }
731
732 if (register_operand (y, SImode) /* Reg equal to reg. */
733 || y == const0_rtx) /* Reg equal to zero. */
734 {
735 emit_insn (gen_cmp_eqsi_insn (x, y));
736
737 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
738 }
739 break;
740
741 case LT:
742 if (register_operand (y, SImode)
743 || satisfies_constraint_P (y))
744 {
745 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
746
747 switch (code)
748 {
749 case LT:
750 emit_insn (gen_cmp_ltsi_insn (x, y));
751 code = EQ;
752 break;
753 case LE:
754 if (y == const0_rtx)
755 tmp = const1_rtx;
756 else
757 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
758 emit_insn (gen_cmp_ltsi_insn (x, tmp));
759 code = EQ;
760 break;
761 case GT:
762 if (CONST_INT_P (y))
763 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
764 else
765 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
766 emit_insn (gen_cmp_ltsi_insn (x, tmp));
767 code = NE;
768 break;
769 case GE:
770 emit_insn (gen_cmp_ltsi_insn (x, y));
771 code = NE;
772 break;
773 default:
774 gcc_unreachable ();
775 }
776
777 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
778 }
779 break;
780
781 case LTU:
782 if (register_operand (y, SImode)
783 || satisfies_constraint_P (y))
784 {
785 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
786
787 switch (code)
788 {
789 case LTU:
790 emit_insn (gen_cmp_ltusi_insn (x, y));
791 code = EQ;
792 break;
793 case LEU:
794 if (y == const0_rtx)
795 tmp = const1_rtx;
796 else
797 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
798 emit_insn (gen_cmp_ltusi_insn (x, tmp));
799 code = EQ;
800 break;
801 case GTU:
802 if (CONST_INT_P (y))
803 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
804 else
805 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
806 emit_insn (gen_cmp_ltusi_insn (x, tmp));
807 code = NE;
808 break;
809 case GEU:
810 emit_insn (gen_cmp_ltusi_insn (x, y));
811 code = NE;
812 break;
813 default:
814 gcc_unreachable ();
815 }
816
817 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
818 }
819 break;
820
821 default:
822 gcc_unreachable ();
823 }
824 }
825 else
826 {
827 /* Reg/reg equal comparison. */
828 if (compare_code == EQ
829 && register_operand (y, SImode))
830 return gen_rtx_fmt_ee (code, CCmode, x, y);
831
832 /* Reg/zero signed comparison. */
833 if ((compare_code == EQ || compare_code == LT)
834 && y == const0_rtx)
835 return gen_rtx_fmt_ee (code, CCmode, x, y);
836
837 /* Reg/smallconst equal comparison. */
838 if (compare_code == EQ
839 && satisfies_constraint_P (y))
840 {
841 rtx tmp = gen_reg_rtx (SImode);
842
843 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
844 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
845 }
846
847 /* Reg/const equal comparison. */
848 if (compare_code == EQ
849 && CONSTANT_P (y))
850 {
851 rtx tmp = force_reg (GET_MODE (x), y);
852
853 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
854 }
855 }
856
857 if (CONSTANT_P (y))
858 {
859 if (must_swap)
860 y = force_reg (GET_MODE (x), y);
861 else
862 {
863 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
864
865 if (! ok_const)
866 y = force_reg (GET_MODE (x), y);
867 }
868 }
869
870 switch (compare_code)
871 {
872 case EQ :
873 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
874 break;
875 case LT :
876 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
877 break;
878 case LTU :
879 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
880 break;
881
882 default:
883 gcc_unreachable ();
884 }
885
886 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
887 }
888
889 bool
890 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
891 {
892 machine_mode mode = GET_MODE (op0);
893
894 gcc_assert (mode == SImode);
895 switch (code)
896 {
897 case EQ:
898 if (!register_operand (op1, mode))
899 op1 = force_reg (mode, op1);
900
901 if (TARGET_M32RX || TARGET_M32R2)
902 {
903 if (!reg_or_zero_operand (op2, mode))
904 op2 = force_reg (mode, op2);
905
906 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
907 return true;
908 }
909 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
910 {
911 emit_insn (gen_seq_zero_insn (op0, op1));
912 return true;
913 }
914
915 if (!reg_or_eq_int16_operand (op2, mode))
916 op2 = force_reg (mode, op2);
917
918 emit_insn (gen_seq_insn (op0, op1, op2));
919 return true;
920
921 case NE:
922 if (!CONST_INT_P (op2)
923 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
924 {
925 rtx reg;
926
927 if (reload_completed || reload_in_progress)
928 return false;
929
930 reg = gen_reg_rtx (SImode);
931 emit_insn (gen_xorsi3 (reg, op1, op2));
932 op1 = reg;
933
934 if (!register_operand (op1, mode))
935 op1 = force_reg (mode, op1);
936
937 emit_insn (gen_sne_zero_insn (op0, op1));
938 return true;
939 }
940 return false;
941
942 case LT:
943 case GT:
944 if (code == GT)
945 {
946 rtx tmp = op2;
947 op2 = op1;
948 op1 = tmp;
949 code = LT;
950 }
951
952 if (!register_operand (op1, mode))
953 op1 = force_reg (mode, op1);
954
955 if (!reg_or_int16_operand (op2, mode))
956 op2 = force_reg (mode, op2);
957
958 emit_insn (gen_slt_insn (op0, op1, op2));
959 return true;
960
961 case LTU:
962 case GTU:
963 if (code == GTU)
964 {
965 rtx tmp = op2;
966 op2 = op1;
967 op1 = tmp;
968 code = LTU;
969 }
970
971 if (!register_operand (op1, mode))
972 op1 = force_reg (mode, op1);
973
974 if (!reg_or_int16_operand (op2, mode))
975 op2 = force_reg (mode, op2);
976
977 emit_insn (gen_sltu_insn (op0, op1, op2));
978 return true;
979
980 case GE:
981 case GEU:
982 if (!register_operand (op1, mode))
983 op1 = force_reg (mode, op1);
984
985 if (!reg_or_int16_operand (op2, mode))
986 op2 = force_reg (mode, op2);
987
988 if (code == GE)
989 emit_insn (gen_sge_insn (op0, op1, op2));
990 else
991 emit_insn (gen_sgeu_insn (op0, op1, op2));
992 return true;
993
994 case LE:
995 case LEU:
996 if (!register_operand (op1, mode))
997 op1 = force_reg (mode, op1);
998
999 if (CONST_INT_P (op2))
1000 {
1001 HOST_WIDE_INT value = INTVAL (op2);
1002 if (value >= 2147483647)
1003 {
1004 emit_move_insn (op0, const1_rtx);
1005 return true;
1006 }
1007
1008 op2 = GEN_INT (value + 1);
1009 if (value < -32768 || value >= 32767)
1010 op2 = force_reg (mode, op2);
1011
1012 if (code == LEU)
1013 emit_insn (gen_sltu_insn (op0, op1, op2));
1014 else
1015 emit_insn (gen_slt_insn (op0, op1, op2));
1016 return true;
1017 }
1018
1019 if (!register_operand (op2, mode))
1020 op2 = force_reg (mode, op2);
1021
1022 if (code == LEU)
1023 emit_insn (gen_sleu_insn (op0, op1, op2));
1024 else
1025 emit_insn (gen_sle_insn (op0, op1, op2));
1026 return true;
1027
1028 default:
1029 gcc_unreachable ();
1030 }
1031 }
1032
1033 \f
1034 /* Split a 2 word move (DI or DF) into component parts. */
1035
1036 rtx
1037 gen_split_move_double (rtx operands[])
1038 {
1039 machine_mode mode = GET_MODE (operands[0]);
1040 rtx dest = operands[0];
1041 rtx src = operands[1];
1042 rtx val;
1043
1044 /* We might have (SUBREG (MEM)) here, so just get rid of the
1045 subregs to make this code simpler. It is safe to call
1046 alter_subreg any time after reload. */
1047 if (GET_CODE (dest) == SUBREG)
1048 alter_subreg (&dest, true);
1049 if (GET_CODE (src) == SUBREG)
1050 alter_subreg (&src, true);
1051
1052 start_sequence ();
1053 if (REG_P (dest))
1054 {
1055 int dregno = REGNO (dest);
1056
1057 /* Reg = reg. */
1058 if (REG_P (src))
1059 {
1060 int sregno = REGNO (src);
1061
1062 int reverse = (dregno == sregno + 1);
1063
1064 /* We normally copy the low-numbered register first. However, if
1065 the first register operand 0 is the same as the second register of
1066 operand 1, we must copy in the opposite order. */
1067 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1068 operand_subword (src, reverse, TRUE, mode)));
1069
1070 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1071 operand_subword (src, !reverse, TRUE, mode)));
1072 }
1073
1074 /* Reg = constant. */
1075 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1076 {
1077 rtx words[2];
1078 split_double (src, &words[0], &words[1]);
1079 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1080 words[0]));
1081
1082 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1083 words[1]));
1084 }
1085
1086 /* Reg = mem. */
1087 else if (MEM_P (src))
1088 {
1089 /* If the high-address word is used in the address, we must load it
1090 last. Otherwise, load it first. */
1091 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1092
1093 /* We used to optimize loads from single registers as
1094
1095 ld r1,r3+; ld r2,r3
1096
1097 if r3 were not used subsequently. However, the REG_NOTES aren't
1098 propagated correctly by the reload phase, and it can cause bad
1099 code to be generated. We could still try:
1100
1101 ld r1,r3+; ld r2,r3; addi r3,-4
1102
1103 which saves 2 bytes and doesn't force longword alignment. */
1104 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1105 adjust_address (src, SImode,
1106 reverse * UNITS_PER_WORD)));
1107
1108 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1109 adjust_address (src, SImode,
1110 !reverse * UNITS_PER_WORD)));
1111 }
1112 else
1113 gcc_unreachable ();
1114 }
1115
1116 /* Mem = reg. */
1117 /* We used to optimize loads from single registers as
1118
1119 st r1,r3; st r2,+r3
1120
1121 if r3 were not used subsequently. However, the REG_NOTES aren't
1122 propagated correctly by the reload phase, and it can cause bad
1123 code to be generated. We could still try:
1124
1125 st r1,r3; st r2,+r3; addi r3,-4
1126
1127 which saves 2 bytes and doesn't force longword alignment. */
1128 else if (MEM_P (dest) && REG_P (src))
1129 {
1130 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1131 operand_subword (src, 0, TRUE, mode)));
1132
1133 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1134 operand_subword (src, 1, TRUE, mode)));
1135 }
1136
1137 else
1138 gcc_unreachable ();
1139
1140 val = get_insns ();
1141 end_sequence ();
1142 return val;
1143 }
1144
1145 \f
1146 static int
1147 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1148 tree type, bool named ATTRIBUTE_UNUSED)
1149 {
1150 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1151
1152 int words;
1153 unsigned int size =
1154 (((mode == BLKmode && type)
1155 ? (unsigned int) int_size_in_bytes (type)
1156 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1157 / UNITS_PER_WORD;
1158
1159 if (*cum >= M32R_MAX_PARM_REGS)
1160 words = 0;
1161 else if (*cum + size > M32R_MAX_PARM_REGS)
1162 words = (*cum + size) - M32R_MAX_PARM_REGS;
1163 else
1164 words = 0;
1165
1166 return words * UNITS_PER_WORD;
1167 }
1168
1169 /* The ROUND_ADVANCE* macros are local to this file. */
1170 /* Round SIZE up to a word boundary. */
1171 #define ROUND_ADVANCE(SIZE) \
1172 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1173
1174 /* Round arg MODE/TYPE up to the next word boundary. */
1175 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1176 ((MODE) == BLKmode \
1177 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1178 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1179
1180 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1181 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1182
1183 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1184 a reg. This includes arguments that have to be passed by reference as the
1185 pointer to them is passed in a reg if one is available (and that is what
1186 we're given).
1187 This macro is only used in this file. */
1188 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1189 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1190
1191 /* Determine where to put an argument to a function.
1192 Value is zero to push the argument on the stack,
1193 or a hard register in which to store the argument.
1194
1195 MODE is the argument's machine mode.
1196 TYPE is the data type of the argument (as a tree).
1197 This is null for libcalls where that information may
1198 not be available.
1199 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1200 the preceding args and about the function being called.
1201 NAMED is nonzero if this argument is a named parameter
1202 (otherwise it is an extra parameter matching an ellipsis). */
1203 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1204 and the rest are pushed. */
1205
1206 static rtx
1207 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1208 const_tree type ATTRIBUTE_UNUSED,
1209 bool named ATTRIBUTE_UNUSED)
1210 {
1211 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1212
1213 return (PASS_IN_REG_P (*cum, mode, type)
1214 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1215 : NULL_RTX);
1216 }
1217
1218 /* Update the data in CUM to advance over an argument
1219 of mode MODE and data type TYPE.
1220 (TYPE is null for libcalls where that information may not be available.) */
1221
1222 static void
1223 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1224 const_tree type, bool named ATTRIBUTE_UNUSED)
1225 {
1226 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1227
1228 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1229 + ROUND_ADVANCE_ARG (mode, type));
1230 }
1231
1232 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1233
1234 static bool
1235 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1236 {
1237 cumulative_args_t dummy = pack_cumulative_args (NULL);
1238
1239 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1240 }
1241
1242 /* Worker function for TARGET_FUNCTION_VALUE. */
1243
1244 static rtx
1245 m32r_function_value (const_tree valtype,
1246 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1247 bool outgoing ATTRIBUTE_UNUSED)
1248 {
1249 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1250 }
1251
1252 /* Worker function for TARGET_LIBCALL_VALUE. */
1253
1254 static rtx
1255 m32r_libcall_value (machine_mode mode,
1256 const_rtx fun ATTRIBUTE_UNUSED)
1257 {
1258 return gen_rtx_REG (mode, 0);
1259 }
1260
1261 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1262
1263 ??? What about r1 in DI/DF values. */
1264
1265 static bool
1266 m32r_function_value_regno_p (const unsigned int regno)
1267 {
1268 return (regno == 0);
1269 }
1270
1271 /* Do any needed setup for a variadic function. For the M32R, we must
1272 create a register parameter block, and then copy any anonymous arguments
1273 in registers to memory.
1274
1275 CUM has not been updated for the last named argument which has type TYPE
1276 and mode MODE, and we rely on this fact. */
1277
1278 static void
1279 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1280 tree type, int *pretend_size, int no_rtl)
1281 {
1282 int first_anon_arg;
1283
1284 if (no_rtl)
1285 return;
1286
1287 /* All BLKmode values are passed by reference. */
1288 gcc_assert (mode != BLKmode);
1289
1290 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1291 + ROUND_ADVANCE_ARG (mode, type));
1292
1293 if (first_anon_arg < M32R_MAX_PARM_REGS)
1294 {
1295 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1296 int first_reg_offset = first_anon_arg;
1297 /* Size in words to "pretend" allocate. */
1298 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1299 rtx regblock;
1300
1301 regblock = gen_frame_mem (BLKmode,
1302 plus_constant (Pmode, arg_pointer_rtx,
1303 FIRST_PARM_OFFSET (0)));
1304 set_mem_alias_set (regblock, get_varargs_alias_set ());
1305 move_block_from_reg (first_reg_offset, regblock, size);
1306
1307 *pretend_size = (size * UNITS_PER_WORD);
1308 }
1309 }
1310
1311 \f
1312 /* Return true if INSN is real instruction bearing insn. */
1313
1314 static int
1315 m32r_is_insn (rtx insn)
1316 {
1317 return (NONDEBUG_INSN_P (insn)
1318 && GET_CODE (PATTERN (insn)) != USE
1319 && GET_CODE (PATTERN (insn)) != CLOBBER);
1320 }
1321
1322 /* Increase the priority of long instructions so that the
1323 short instructions are scheduled ahead of the long ones. */
1324
1325 static int
1326 m32r_adjust_priority (rtx_insn *insn, int priority)
1327 {
1328 if (m32r_is_insn (insn)
1329 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1330 priority <<= 3;
1331
1332 return priority;
1333 }
1334
1335 \f
1336 /* Indicate how many instructions can be issued at the same time.
1337 This is sort of a lie. The m32r can issue only 1 long insn at
1338 once, but it can issue 2 short insns. The default therefore is
1339 set at 2, but this can be overridden by the command line option
1340 -missue-rate=1. */
1341
1342 static int
1343 m32r_issue_rate (void)
1344 {
1345 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1346 }
1347 \f
1348 /* Cost functions. */
1349 /* Memory is 3 times as expensive as registers.
1350 ??? Is that the right way to look at it? */
1351
1352 static int
1353 m32r_memory_move_cost (machine_mode mode,
1354 reg_class_t rclass ATTRIBUTE_UNUSED,
1355 bool in ATTRIBUTE_UNUSED)
1356 {
1357 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1358 return 6;
1359 else
1360 return 12;
1361 }
1362
1363 static bool
1364 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
1365 int opno ATTRIBUTE_UNUSED, int *total,
1366 bool speed ATTRIBUTE_UNUSED)
1367 {
1368 switch (code)
1369 {
1370 /* Small integers are as cheap as registers. 4 byte values can be
1371 fetched as immediate constants - let's give that the cost of an
1372 extra insn. */
1373 case CONST_INT:
1374 if (INT16_P (INTVAL (x)))
1375 {
1376 *total = 0;
1377 return true;
1378 }
1379 /* FALLTHRU */
1380
1381 case CONST:
1382 case LABEL_REF:
1383 case SYMBOL_REF:
1384 *total = COSTS_N_INSNS (1);
1385 return true;
1386
1387 case CONST_DOUBLE:
1388 {
1389 rtx high, low;
1390
1391 split_double (x, &high, &low);
1392 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1393 + !INT16_P (INTVAL (low)));
1394 return true;
1395 }
1396
1397 case MULT:
1398 *total = COSTS_N_INSNS (3);
1399 return true;
1400
1401 case DIV:
1402 case UDIV:
1403 case MOD:
1404 case UMOD:
1405 *total = COSTS_N_INSNS (10);
1406 return true;
1407
1408 default:
1409 return false;
1410 }
1411 }
1412 \f
1413 /* Type of function DECL.
1414
1415 The result is cached. To reset the cache at the end of a function,
1416 call with DECL = NULL_TREE. */
1417
1418 enum m32r_function_type
1419 m32r_compute_function_type (tree decl)
1420 {
1421 /* Cached value. */
1422 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1423 /* Last function we were called for. */
1424 static tree last_fn = NULL_TREE;
1425
1426 /* Resetting the cached value? */
1427 if (decl == NULL_TREE)
1428 {
1429 fn_type = M32R_FUNCTION_UNKNOWN;
1430 last_fn = NULL_TREE;
1431 return fn_type;
1432 }
1433
1434 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1435 return fn_type;
1436
1437 /* Compute function type. */
1438 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1439 ? M32R_FUNCTION_INTERRUPT
1440 : M32R_FUNCTION_NORMAL);
1441
1442 last_fn = decl;
1443 return fn_type;
1444 }
1445 \f/* Function prologue/epilogue handlers. */
1446
1447 /* M32R stack frames look like:
1448
1449 Before call After call
1450 +-----------------------+ +-----------------------+
1451 | | | |
1452 high | local variables, | | local variables, |
1453 mem | reg save area, etc. | | reg save area, etc. |
1454 | | | |
1455 +-----------------------+ +-----------------------+
1456 | | | |
1457 | arguments on stack. | | arguments on stack. |
1458 | | | |
1459 SP+0->+-----------------------+ +-----------------------+
1460 | reg parm save area, |
1461 | only created for |
1462 | variable argument |
1463 | functions |
1464 +-----------------------+
1465 | previous frame ptr |
1466 +-----------------------+
1467 | |
1468 | register save area |
1469 | |
1470 +-----------------------+
1471 | return address |
1472 +-----------------------+
1473 | |
1474 | local variables |
1475 | |
1476 +-----------------------+
1477 | |
1478 | alloca allocations |
1479 | |
1480 +-----------------------+
1481 | |
1482 low | arguments on stack |
1483 memory | |
1484 SP+0->+-----------------------+
1485
1486 Notes:
1487 1) The "reg parm save area" does not exist for non variable argument fns.
1488 2) The "reg parm save area" can be eliminated completely if we saved regs
1489 containing anonymous args separately but that complicates things too
1490 much (so it's not done).
1491 3) The return address is saved after the register save area so as to have as
1492 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1493
1494 /* Structure to be filled in by m32r_compute_frame_size with register
1495 save masks, and offsets for the current function. */
1496 struct m32r_frame_info
1497 {
1498 unsigned int total_size; /* # bytes that the entire frame takes up. */
1499 unsigned int extra_size; /* # bytes of extra stuff. */
1500 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1501 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1502 unsigned int reg_size; /* # bytes needed to store regs. */
1503 unsigned int var_size; /* # bytes that variables take up. */
1504 unsigned int gmask; /* Mask of saved gp registers. */
1505 unsigned int save_fp; /* Nonzero if fp must be saved. */
1506 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1507 int initialized; /* Nonzero if frame size already calculated. */
1508 };
1509
1510 /* Current frame information calculated by m32r_compute_frame_size. */
1511 static struct m32r_frame_info current_frame_info;
1512
1513 /* Zero structure to initialize current_frame_info. */
1514 static struct m32r_frame_info zero_frame_info;
1515
1516 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1517 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1518
1519 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1520 The return address and frame pointer are treated separately.
1521 Don't consider them here. */
1522 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1523 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1524 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1525
1526 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1527 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1528
1529 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1530 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1531
1532 /* Return the bytes needed to compute the frame pointer from the current
1533 stack pointer.
1534
1535 SIZE is the size needed for local variables. */
1536
1537 unsigned int
1538 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1539 {
1540 unsigned int regno;
1541 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1542 unsigned int reg_size;
1543 unsigned int gmask;
1544 enum m32r_function_type fn_type;
1545 int interrupt_p;
1546 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1547 | crtl->profile);
1548
1549 var_size = M32R_STACK_ALIGN (size);
1550 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1551 pretend_size = crtl->args.pretend_args_size;
1552 extra_size = FIRST_PARM_OFFSET (0);
1553 total_size = extra_size + pretend_size + args_size + var_size;
1554 reg_size = 0;
1555 gmask = 0;
1556
1557 /* See if this is an interrupt handler. Call used registers must be saved
1558 for them too. */
1559 fn_type = m32r_compute_function_type (current_function_decl);
1560 interrupt_p = M32R_INTERRUPT_P (fn_type);
1561
1562 /* Calculate space needed for registers. */
1563 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1564 {
1565 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1566 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1567 {
1568 reg_size += UNITS_PER_WORD;
1569 gmask |= 1 << regno;
1570 }
1571 }
1572
1573 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1574 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1575
1576 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1577 * UNITS_PER_WORD);
1578 total_size += reg_size;
1579
1580 /* ??? Not sure this is necessary, and I don't think the epilogue
1581 handler will do the right thing if this changes total_size. */
1582 total_size = M32R_STACK_ALIGN (total_size);
1583
1584 /* frame_size = total_size - (pretend_size + reg_size); */
1585
1586 /* Save computed information. */
1587 current_frame_info.total_size = total_size;
1588 current_frame_info.extra_size = extra_size;
1589 current_frame_info.pretend_size = pretend_size;
1590 current_frame_info.var_size = var_size;
1591 current_frame_info.args_size = args_size;
1592 current_frame_info.reg_size = reg_size;
1593 current_frame_info.gmask = gmask;
1594 current_frame_info.initialized = reload_completed;
1595
1596 /* Ok, we're done. */
1597 return total_size;
1598 }
1599
1600 /* Worker function for TARGET_CAN_ELIMINATE. */
1601
1602 bool
1603 m32r_can_eliminate (const int from, const int to)
1604 {
1605 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1606 ? ! frame_pointer_needed
1607 : true);
1608 }
1609
1610 \f
1611 /* The table we use to reference PIC data. */
1612 static rtx global_offset_table;
1613
1614 static void
1615 m32r_reload_lr (rtx sp, int size)
1616 {
1617 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1618
1619 if (size == 0)
1620 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1621 else if (size < 32768)
1622 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1623 gen_rtx_PLUS (Pmode, sp,
1624 GEN_INT (size)))));
1625 else
1626 {
1627 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1628
1629 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1630 emit_insn (gen_addsi3 (tmp, tmp, sp));
1631 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1632 }
1633
1634 emit_use (lr);
1635 }
1636
1637 void
1638 m32r_load_pic_register (void)
1639 {
1640 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1641 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1642 GEN_INT (TARGET_MODEL_SMALL)));
1643
1644 /* Need to emit this whether or not we obey regdecls,
1645 since setjmp/longjmp can cause life info to screw up. */
1646 emit_use (pic_offset_table_rtx);
1647 }
1648
1649 /* Expand the m32r prologue as a series of insns. */
1650
1651 void
1652 m32r_expand_prologue (void)
1653 {
1654 int regno;
1655 int frame_size;
1656 unsigned int gmask;
1657 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1658 | crtl->profile);
1659
1660 if (! current_frame_info.initialized)
1661 m32r_compute_frame_size (get_frame_size ());
1662
1663 if (flag_stack_usage_info)
1664 current_function_static_stack_size = current_frame_info.total_size;
1665
1666 gmask = current_frame_info.gmask;
1667
1668 /* These cases shouldn't happen. Catch them now. */
1669 gcc_assert (current_frame_info.total_size || !gmask);
1670
1671 /* Allocate space for register arguments if this is a variadic function. */
1672 if (current_frame_info.pretend_size != 0)
1673 {
1674 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1675 the wrong result on a 64-bit host. */
1676 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1677 emit_insn (gen_addsi3 (stack_pointer_rtx,
1678 stack_pointer_rtx,
1679 GEN_INT (-pretend_size)));
1680 }
1681
1682 /* Save any registers we need to and set up fp. */
1683 if (current_frame_info.save_fp)
1684 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1685
1686 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1687
1688 /* Save any needed call-saved regs (and call-used if this is an
1689 interrupt handler). */
1690 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1691 {
1692 if ((gmask & (1 << regno)) != 0)
1693 emit_insn (gen_movsi_push (stack_pointer_rtx,
1694 gen_rtx_REG (Pmode, regno)));
1695 }
1696
1697 if (current_frame_info.save_lr)
1698 emit_insn (gen_movsi_push (stack_pointer_rtx,
1699 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1700
1701 /* Allocate the stack frame. */
1702 frame_size = (current_frame_info.total_size
1703 - (current_frame_info.pretend_size
1704 + current_frame_info.reg_size));
1705
1706 if (frame_size == 0)
1707 ; /* Nothing to do. */
1708 else if (frame_size <= 32768)
1709 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1710 GEN_INT (-frame_size)));
1711 else
1712 {
1713 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1714
1715 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1716 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1717 }
1718
1719 if (frame_pointer_needed)
1720 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1721
1722 if (crtl->profile)
1723 /* Push lr for mcount (form_pc, x). */
1724 emit_insn (gen_movsi_push (stack_pointer_rtx,
1725 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1726
1727 if (pic_reg_used)
1728 {
1729 m32r_load_pic_register ();
1730 m32r_reload_lr (stack_pointer_rtx,
1731 (crtl->profile ? 0 : frame_size));
1732 }
1733
1734 if (crtl->profile && !pic_reg_used)
1735 emit_insn (gen_blockage ());
1736 }
1737
1738 \f
1739 /* Set up the stack and frame pointer (if desired) for the function.
1740 Note, if this is changed, you need to mirror the changes in
1741 m32r_compute_frame_size which calculates the prolog size. */
1742
1743 static void
1744 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1745 {
1746 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1747
1748 /* If this is an interrupt handler, mark it as such. */
1749 if (M32R_INTERRUPT_P (fn_type))
1750 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1751
1752 if (! current_frame_info.initialized)
1753 m32r_compute_frame_size (size);
1754
1755 /* This is only for the human reader. */
1756 fprintf (file,
1757 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1758 ASM_COMMENT_START,
1759 current_frame_info.var_size,
1760 current_frame_info.reg_size / 4,
1761 current_frame_info.args_size,
1762 current_frame_info.extra_size);
1763 }
1764 \f
1765 /* Output RTL to pop register REGNO from the stack. */
1766
1767 static void
1768 pop (int regno)
1769 {
1770 rtx x;
1771
1772 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1773 stack_pointer_rtx));
1774 add_reg_note (x, REG_INC, stack_pointer_rtx);
1775 }
1776
1777 /* Expand the m32r epilogue as a series of insns. */
1778
1779 void
1780 m32r_expand_epilogue (void)
1781 {
1782 int regno;
1783 int noepilogue = FALSE;
1784 int total_size;
1785
1786 gcc_assert (current_frame_info.initialized);
1787 total_size = current_frame_info.total_size;
1788
1789 if (total_size == 0)
1790 {
1791 rtx insn = get_last_insn ();
1792
1793 /* If the last insn was a BARRIER, we don't have to write any code
1794 because a jump (aka return) was put there. */
1795 if (insn && NOTE_P (insn))
1796 insn = prev_nonnote_insn (insn);
1797 if (insn && BARRIER_P (insn))
1798 noepilogue = TRUE;
1799 }
1800
1801 if (!noepilogue)
1802 {
1803 unsigned int var_size = current_frame_info.var_size;
1804 unsigned int args_size = current_frame_info.args_size;
1805 unsigned int gmask = current_frame_info.gmask;
1806 int can_trust_sp_p = !cfun->calls_alloca;
1807
1808 if (flag_exceptions)
1809 emit_insn (gen_blockage ());
1810
1811 /* The first thing to do is point the sp at the bottom of the register
1812 save area. */
1813 if (can_trust_sp_p)
1814 {
1815 unsigned int reg_offset = var_size + args_size;
1816
1817 if (reg_offset == 0)
1818 ; /* Nothing to do. */
1819 else if (reg_offset < 32768)
1820 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1821 GEN_INT (reg_offset)));
1822 else
1823 {
1824 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1825
1826 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1827 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1828 tmp));
1829 }
1830 }
1831 else if (frame_pointer_needed)
1832 {
1833 unsigned int reg_offset = var_size + args_size;
1834
1835 if (reg_offset == 0)
1836 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1837 else if (reg_offset < 32768)
1838 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1839 GEN_INT (reg_offset)));
1840 else
1841 {
1842 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1843
1844 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1845 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1846 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1847 tmp));
1848 }
1849 }
1850 else
1851 gcc_unreachable ();
1852
1853 if (current_frame_info.save_lr)
1854 pop (RETURN_ADDR_REGNUM);
1855
1856 /* Restore any saved registers, in reverse order of course. */
1857 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1858 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1859 {
1860 if ((gmask & (1L << regno)) != 0)
1861 pop (regno);
1862 }
1863
1864 if (current_frame_info.save_fp)
1865 pop (FRAME_POINTER_REGNUM);
1866
1867 /* Remove varargs area if present. */
1868 if (current_frame_info.pretend_size != 0)
1869 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1870 GEN_INT (current_frame_info.pretend_size)));
1871
1872 emit_insn (gen_blockage ());
1873 }
1874 }
1875
1876 /* Do any necessary cleanup after a function to restore stack, frame,
1877 and regs. */
1878
1879 static void
1880 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1881 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1882 {
1883 /* Reset state info for each function. */
1884 current_frame_info = zero_frame_info;
1885 m32r_compute_function_type (NULL_TREE);
1886 }
1887 \f
1888 /* Return nonzero if this function is known to have a null or 1 instruction
1889 epilogue. */
1890
1891 int
1892 direct_return (void)
1893 {
1894 if (!reload_completed)
1895 return FALSE;
1896
1897 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1898 return FALSE;
1899
1900 if (! current_frame_info.initialized)
1901 m32r_compute_frame_size (get_frame_size ());
1902
1903 return current_frame_info.total_size == 0;
1904 }
1905
1906 \f
1907 /* PIC. */
1908
1909 int
1910 m32r_legitimate_pic_operand_p (rtx x)
1911 {
1912 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1913 return 0;
1914
1915 if (GET_CODE (x) == CONST
1916 && GET_CODE (XEXP (x, 0)) == PLUS
1917 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1918 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1919 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1920 return 0;
1921
1922 return 1;
1923 }
1924
1925 rtx
1926 m32r_legitimize_pic_address (rtx orig, rtx reg)
1927 {
1928 #ifdef DEBUG_PIC
1929 printf("m32r_legitimize_pic_address()\n");
1930 #endif
1931
1932 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1933 {
1934 rtx pic_ref, address;
1935 int subregs = 0;
1936
1937 if (reg == 0)
1938 {
1939 gcc_assert (!reload_in_progress && !reload_completed);
1940 reg = gen_reg_rtx (Pmode);
1941
1942 subregs = 1;
1943 }
1944
1945 if (subregs)
1946 address = gen_reg_rtx (Pmode);
1947 else
1948 address = reg;
1949
1950 crtl->uses_pic_offset_table = 1;
1951
1952 if (GET_CODE (orig) == LABEL_REF
1953 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1954 {
1955 emit_insn (gen_gotoff_load_addr (reg, orig));
1956 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1957 return reg;
1958 }
1959
1960 emit_insn (gen_pic_load_addr (address, orig));
1961
1962 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1963 pic_ref = gen_const_mem (Pmode, address);
1964 emit_move_insn (reg, pic_ref);
1965 return reg;
1966 }
1967 else if (GET_CODE (orig) == CONST)
1968 {
1969 rtx base, offset;
1970
1971 if (GET_CODE (XEXP (orig, 0)) == PLUS
1972 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1973 return orig;
1974
1975 if (reg == 0)
1976 {
1977 gcc_assert (!reload_in_progress && !reload_completed);
1978 reg = gen_reg_rtx (Pmode);
1979 }
1980
1981 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1982 {
1983 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1984 if (base == reg)
1985 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1986 else
1987 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1988 }
1989 else
1990 return orig;
1991
1992 if (CONST_INT_P (offset))
1993 {
1994 if (INT16_P (INTVAL (offset)))
1995 return plus_constant (Pmode, base, INTVAL (offset));
1996 else
1997 {
1998 gcc_assert (! reload_in_progress && ! reload_completed);
1999 offset = force_reg (Pmode, offset);
2000 }
2001 }
2002
2003 return gen_rtx_PLUS (Pmode, base, offset);
2004 }
2005
2006 return orig;
2007 }
2008
2009 static rtx
2010 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2011 machine_mode mode ATTRIBUTE_UNUSED)
2012 {
2013 if (flag_pic)
2014 return m32r_legitimize_pic_address (x, NULL_RTX);
2015 else
2016 return x;
2017 }
2018
2019 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2020
2021 static bool
2022 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2023 {
2024 if (GET_CODE (addr) == LO_SUM)
2025 return true;
2026
2027 return false;
2028 }
2029 \f
2030 /* Nested function support. */
2031
2032 /* Emit RTL insns to initialize the variable parts of a trampoline.
2033 FNADDR is an RTX for the address of the function's pure code.
2034 CXT is an RTX for the static chain value for the function. */
2035
2036 void
2037 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2038 rtx fnaddr ATTRIBUTE_UNUSED,
2039 rtx cxt ATTRIBUTE_UNUSED)
2040 {
2041 }
2042 \f
2043 static void
2044 m32r_file_start (void)
2045 {
2046 default_file_start ();
2047
2048 if (flag_verbose_asm)
2049 fprintf (asm_out_file,
2050 "%s M32R/D special options: -G %d\n",
2051 ASM_COMMENT_START, g_switch_value);
2052
2053 if (TARGET_LITTLE_ENDIAN)
2054 fprintf (asm_out_file, "\t.little\n");
2055 }
2056 \f
2057 /* Print operand X (an rtx) in assembler syntax to file FILE.
2058 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2059 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2060
2061 static void
2062 m32r_print_operand (FILE * file, rtx x, int code)
2063 {
2064 rtx addr;
2065
2066 switch (code)
2067 {
2068 /* The 's' and 'p' codes are used by output_block_move() to
2069 indicate post-increment 's'tores and 'p're-increment loads. */
2070 case 's':
2071 if (REG_P (x))
2072 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2073 else
2074 output_operand_lossage ("invalid operand to %%s code");
2075 return;
2076
2077 case 'p':
2078 if (REG_P (x))
2079 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2080 else
2081 output_operand_lossage ("invalid operand to %%p code");
2082 return;
2083
2084 case 'R' :
2085 /* Write second word of DImode or DFmode reference,
2086 register or memory. */
2087 if (REG_P (x))
2088 fputs (reg_names[REGNO (x)+1], file);
2089 else if (MEM_P (x))
2090 {
2091 fprintf (file, "@(");
2092 /* Handle possible auto-increment. Since it is pre-increment and
2093 we have already done it, we can just use an offset of four. */
2094 /* ??? This is taken from rs6000.c I think. I don't think it is
2095 currently necessary, but keep it around. */
2096 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2097 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2098 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2099 else
2100 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2101 fputc (')', file);
2102 }
2103 else
2104 output_operand_lossage ("invalid operand to %%R code");
2105 return;
2106
2107 case 'H' : /* High word. */
2108 case 'L' : /* Low word. */
2109 if (REG_P (x))
2110 {
2111 /* L = least significant word, H = most significant word. */
2112 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2113 fputs (reg_names[REGNO (x)], file);
2114 else
2115 fputs (reg_names[REGNO (x)+1], file);
2116 }
2117 else if (CONST_INT_P (x)
2118 || GET_CODE (x) == CONST_DOUBLE)
2119 {
2120 rtx first, second;
2121
2122 split_double (x, &first, &second);
2123 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2124 code == 'L' ? INTVAL (first) : INTVAL (second));
2125 }
2126 else
2127 output_operand_lossage ("invalid operand to %%H/%%L code");
2128 return;
2129
2130 case 'A' :
2131 {
2132 char str[30];
2133
2134 if (GET_CODE (x) != CONST_DOUBLE
2135 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2136 fatal_insn ("bad insn for 'A'", x);
2137
2138 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2139 fprintf (file, "%s", str);
2140 return;
2141 }
2142
2143 case 'B' : /* Bottom half. */
2144 case 'T' : /* Top half. */
2145 /* Output the argument to a `seth' insn (sets the Top half-word).
2146 For constants output arguments to a seth/or3 pair to set Top and
2147 Bottom halves. For symbols output arguments to a seth/add3 pair to
2148 set Top and Bottom halves. The difference exists because for
2149 constants seth/or3 is more readable but for symbols we need to use
2150 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2151 switch (GET_CODE (x))
2152 {
2153 case CONST_INT :
2154 case CONST_DOUBLE :
2155 {
2156 rtx first, second;
2157
2158 split_double (x, &first, &second);
2159 x = WORDS_BIG_ENDIAN ? second : first;
2160 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2161 (code == 'B'
2162 ? INTVAL (x) & 0xffff
2163 : (INTVAL (x) >> 16) & 0xffff));
2164 }
2165 return;
2166 case CONST :
2167 case SYMBOL_REF :
2168 if (code == 'B'
2169 && small_data_operand (x, VOIDmode))
2170 {
2171 fputs ("sda(", file);
2172 output_addr_const (file, x);
2173 fputc (')', file);
2174 return;
2175 }
2176 /* fall through */
2177 case LABEL_REF :
2178 fputs (code == 'T' ? "shigh(" : "low(", file);
2179 output_addr_const (file, x);
2180 fputc (')', file);
2181 return;
2182 default :
2183 output_operand_lossage ("invalid operand to %%T/%%B code");
2184 return;
2185 }
2186 break;
2187
2188 case 'U' :
2189 /* ??? wip */
2190 /* Output a load/store with update indicator if appropriate. */
2191 if (MEM_P (x))
2192 {
2193 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2194 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2195 fputs (".a", file);
2196 }
2197 else
2198 output_operand_lossage ("invalid operand to %%U code");
2199 return;
2200
2201 case 'N' :
2202 /* Print a constant value negated. */
2203 if (CONST_INT_P (x))
2204 output_addr_const (file, GEN_INT (- INTVAL (x)));
2205 else
2206 output_operand_lossage ("invalid operand to %%N code");
2207 return;
2208
2209 case 'X' :
2210 /* Print a const_int in hex. Used in comments. */
2211 if (CONST_INT_P (x))
2212 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2213 return;
2214
2215 case '#' :
2216 fputs (IMMEDIATE_PREFIX, file);
2217 return;
2218
2219 case 0 :
2220 /* Do nothing special. */
2221 break;
2222
2223 default :
2224 /* Unknown flag. */
2225 output_operand_lossage ("invalid operand output code");
2226 }
2227
2228 switch (GET_CODE (x))
2229 {
2230 case REG :
2231 fputs (reg_names[REGNO (x)], file);
2232 break;
2233
2234 case MEM :
2235 addr = XEXP (x, 0);
2236 if (GET_CODE (addr) == PRE_INC)
2237 {
2238 if (!REG_P (XEXP (addr, 0)))
2239 fatal_insn ("pre-increment address is not a register", x);
2240
2241 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2242 }
2243 else if (GET_CODE (addr) == PRE_DEC)
2244 {
2245 if (!REG_P (XEXP (addr, 0)))
2246 fatal_insn ("pre-decrement address is not a register", x);
2247
2248 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2249 }
2250 else if (GET_CODE (addr) == POST_INC)
2251 {
2252 if (!REG_P (XEXP (addr, 0)))
2253 fatal_insn ("post-increment address is not a register", x);
2254
2255 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2256 }
2257 else
2258 {
2259 fputs ("@(", file);
2260 output_address (XEXP (x, 0));
2261 fputc (')', file);
2262 }
2263 break;
2264
2265 case CONST_DOUBLE :
2266 /* We handle SFmode constants here as output_addr_const doesn't. */
2267 if (GET_MODE (x) == SFmode)
2268 {
2269 REAL_VALUE_TYPE d;
2270 long l;
2271
2272 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2273 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2274 fprintf (file, "0x%08lx", l);
2275 break;
2276 }
2277
2278 /* Fall through. Let output_addr_const deal with it. */
2279
2280 default :
2281 output_addr_const (file, x);
2282 break;
2283 }
2284 }
2285
2286 /* Print a memory address as an operand to reference that memory location. */
2287
2288 static void
2289 m32r_print_operand_address (FILE * file, rtx addr)
2290 {
2291 rtx base;
2292 rtx index = 0;
2293 int offset = 0;
2294
2295 switch (GET_CODE (addr))
2296 {
2297 case REG :
2298 fputs (reg_names[REGNO (addr)], file);
2299 break;
2300
2301 case PLUS :
2302 if (CONST_INT_P (XEXP (addr, 0)))
2303 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2304 else if (CONST_INT_P (XEXP (addr, 1)))
2305 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2306 else
2307 base = XEXP (addr, 0), index = XEXP (addr, 1);
2308 if (REG_P (base))
2309 {
2310 /* Print the offset first (if present) to conform to the manual. */
2311 if (index == 0)
2312 {
2313 if (offset != 0)
2314 fprintf (file, "%d,", offset);
2315 fputs (reg_names[REGNO (base)], file);
2316 }
2317 /* The chip doesn't support this, but left in for generality. */
2318 else if (REG_P (index))
2319 fprintf (file, "%s,%s",
2320 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2321 /* Not sure this can happen, but leave in for now. */
2322 else if (GET_CODE (index) == SYMBOL_REF)
2323 {
2324 output_addr_const (file, index);
2325 fputc (',', file);
2326 fputs (reg_names[REGNO (base)], file);
2327 }
2328 else
2329 fatal_insn ("bad address", addr);
2330 }
2331 else if (GET_CODE (base) == LO_SUM)
2332 {
2333 gcc_assert (!index && REG_P (XEXP (base, 0)));
2334 if (small_data_operand (XEXP (base, 1), VOIDmode))
2335 fputs ("sda(", file);
2336 else
2337 fputs ("low(", file);
2338 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2339 offset));
2340 fputs ("),", file);
2341 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2342 }
2343 else
2344 fatal_insn ("bad address", addr);
2345 break;
2346
2347 case LO_SUM :
2348 if (!REG_P (XEXP (addr, 0)))
2349 fatal_insn ("lo_sum not of register", addr);
2350 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2351 fputs ("sda(", file);
2352 else
2353 fputs ("low(", file);
2354 output_addr_const (file, XEXP (addr, 1));
2355 fputs ("),", file);
2356 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2357 break;
2358
2359 case PRE_INC : /* Assume SImode. */
2360 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2361 break;
2362
2363 case PRE_DEC : /* Assume SImode. */
2364 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2365 break;
2366
2367 case POST_INC : /* Assume SImode. */
2368 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2369 break;
2370
2371 default :
2372 output_addr_const (file, addr);
2373 break;
2374 }
2375 }
2376
2377 static bool
2378 m32r_print_operand_punct_valid_p (unsigned char code)
2379 {
2380 return m32r_punct_chars[code];
2381 }
2382
2383 /* Return true if the operands are the constants 0 and 1. */
2384
2385 int
2386 zero_and_one (rtx operand1, rtx operand2)
2387 {
2388 return
2389 CONST_INT_P (operand1)
2390 && CONST_INT_P (operand2)
2391 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2392 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2393 }
2394
2395 /* Generate the correct assembler code to handle the conditional loading of a
2396 value into a register. It is known that the operands satisfy the
2397 conditional_move_operand() function above. The destination is operand[0].
2398 The condition is operand [1]. The 'true' value is operand [2] and the
2399 'false' value is operand [3]. */
2400
2401 char *
2402 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2403 {
2404 static char buffer [100];
2405 const char * dest = reg_names [REGNO (operands [0])];
2406
2407 buffer [0] = 0;
2408
2409 /* Destination must be a register. */
2410 gcc_assert (REG_P (operands [0]));
2411 gcc_assert (conditional_move_operand (operands [2], SImode));
2412 gcc_assert (conditional_move_operand (operands [3], SImode));
2413
2414 /* Check to see if the test is reversed. */
2415 if (GET_CODE (operands [1]) == NE)
2416 {
2417 rtx tmp = operands [2];
2418 operands [2] = operands [3];
2419 operands [3] = tmp;
2420 }
2421
2422 sprintf (buffer, "mvfc %s, cbr", dest);
2423
2424 /* If the true value was '0' then we need to invert the results of the move. */
2425 if (INTVAL (operands [2]) == 0)
2426 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2427 dest, dest);
2428
2429 return buffer;
2430 }
2431
2432 /* Returns true if the registers contained in the two
2433 rtl expressions are different. */
2434
2435 int
2436 m32r_not_same_reg (rtx a, rtx b)
2437 {
2438 int reg_a = -1;
2439 int reg_b = -2;
2440
2441 while (GET_CODE (a) == SUBREG)
2442 a = SUBREG_REG (a);
2443
2444 if (REG_P (a))
2445 reg_a = REGNO (a);
2446
2447 while (GET_CODE (b) == SUBREG)
2448 b = SUBREG_REG (b);
2449
2450 if (REG_P (b))
2451 reg_b = REGNO (b);
2452
2453 return reg_a != reg_b;
2454 }
2455
2456 \f
2457 rtx
2458 m32r_function_symbol (const char *name)
2459 {
2460 int extra_flags = 0;
2461 enum m32r_model model;
2462 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2463
2464 if (TARGET_MODEL_SMALL)
2465 model = M32R_MODEL_SMALL;
2466 else if (TARGET_MODEL_MEDIUM)
2467 model = M32R_MODEL_MEDIUM;
2468 else if (TARGET_MODEL_LARGE)
2469 model = M32R_MODEL_LARGE;
2470 else
2471 gcc_unreachable (); /* Shouldn't happen. */
2472 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2473
2474 if (extra_flags)
2475 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2476
2477 return sym;
2478 }
2479
2480 /* Use a library function to move some bytes. */
2481
2482 static void
2483 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2484 {
2485 /* We want to pass the size as Pmode, which will normally be SImode
2486 but will be DImode if we are using 64-bit longs and pointers. */
2487 if (GET_MODE (bytes_rtx) != VOIDmode
2488 && GET_MODE (bytes_rtx) != Pmode)
2489 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2490
2491 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2492 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2493 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2494 TYPE_UNSIGNED (sizetype)),
2495 TYPE_MODE (sizetype));
2496 }
2497
2498 /* Expand string/block move operations.
2499
2500 operands[0] is the pointer to the destination.
2501 operands[1] is the pointer to the source.
2502 operands[2] is the number of bytes to move.
2503 operands[3] is the alignment.
2504
2505 Returns 1 upon success, 0 otherwise. */
2506
2507 int
2508 m32r_expand_block_move (rtx operands[])
2509 {
2510 rtx orig_dst = operands[0];
2511 rtx orig_src = operands[1];
2512 rtx bytes_rtx = operands[2];
2513 rtx align_rtx = operands[3];
2514 int constp = CONST_INT_P (bytes_rtx);
2515 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2516 int align = INTVAL (align_rtx);
2517 int leftover;
2518 rtx src_reg;
2519 rtx dst_reg;
2520
2521 if (constp && bytes <= 0)
2522 return 1;
2523
2524 /* Move the address into scratch registers. */
2525 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2526 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2527
2528 if (align > UNITS_PER_WORD)
2529 align = UNITS_PER_WORD;
2530
2531 /* If we prefer size over speed, always use a function call.
2532 If we do not know the size, use a function call.
2533 If the blocks are not word aligned, use a function call. */
2534 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2535 {
2536 block_move_call (dst_reg, src_reg, bytes_rtx);
2537 return 0;
2538 }
2539
2540 leftover = bytes % MAX_MOVE_BYTES;
2541 bytes -= leftover;
2542
2543 /* If necessary, generate a loop to handle the bulk of the copy. */
2544 if (bytes)
2545 {
2546 rtx_code_label *label = NULL;
2547 rtx final_src = NULL_RTX;
2548 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2549 rtx rounded_total = GEN_INT (bytes);
2550 rtx new_dst_reg = gen_reg_rtx (SImode);
2551 rtx new_src_reg = gen_reg_rtx (SImode);
2552
2553 /* If we are going to have to perform this loop more than
2554 once, then generate a label and compute the address the
2555 source register will contain upon completion of the final
2556 iteration. */
2557 if (bytes > MAX_MOVE_BYTES)
2558 {
2559 final_src = gen_reg_rtx (Pmode);
2560
2561 if (INT16_P(bytes))
2562 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2563 else
2564 {
2565 emit_insn (gen_movsi (final_src, rounded_total));
2566 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2567 }
2568
2569 label = gen_label_rtx ();
2570 emit_label (label);
2571 }
2572
2573 /* It is known that output_block_move() will update src_reg to point
2574 to the word after the end of the source block, and dst_reg to point
2575 to the last word of the destination block, provided that the block
2576 is MAX_MOVE_BYTES long. */
2577 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2578 new_dst_reg, new_src_reg));
2579 emit_move_insn (dst_reg, new_dst_reg);
2580 emit_move_insn (src_reg, new_src_reg);
2581 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2582
2583 if (bytes > MAX_MOVE_BYTES)
2584 {
2585 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2586 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2587 }
2588 }
2589
2590 if (leftover)
2591 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2592 gen_reg_rtx (SImode),
2593 gen_reg_rtx (SImode)));
2594 return 1;
2595 }
2596
2597 \f
2598 /* Emit load/stores for a small constant word aligned block_move.
2599
2600 operands[0] is the memory address of the destination.
2601 operands[1] is the memory address of the source.
2602 operands[2] is the number of bytes to move.
2603 operands[3] is a temp register.
2604 operands[4] is a temp register. */
2605
2606 void
2607 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2608 {
2609 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2610 int first_time;
2611 int got_extra = 0;
2612
2613 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2614
2615 /* We do not have a post-increment store available, so the first set of
2616 stores are done without any increment, then the remaining ones can use
2617 the pre-increment addressing mode.
2618
2619 Note: expand_block_move() also relies upon this behavior when building
2620 loops to copy large blocks. */
2621 first_time = 1;
2622
2623 while (bytes > 0)
2624 {
2625 if (bytes >= 8)
2626 {
2627 if (first_time)
2628 {
2629 output_asm_insn ("ld\t%5, %p1", operands);
2630 output_asm_insn ("ld\t%6, %p1", operands);
2631 output_asm_insn ("st\t%5, @%0", operands);
2632 output_asm_insn ("st\t%6, %s0", operands);
2633 }
2634 else
2635 {
2636 output_asm_insn ("ld\t%5, %p1", operands);
2637 output_asm_insn ("ld\t%6, %p1", operands);
2638 output_asm_insn ("st\t%5, %s0", operands);
2639 output_asm_insn ("st\t%6, %s0", operands);
2640 }
2641
2642 bytes -= 8;
2643 }
2644 else if (bytes >= 4)
2645 {
2646 if (bytes > 4)
2647 got_extra = 1;
2648
2649 output_asm_insn ("ld\t%5, %p1", operands);
2650
2651 if (got_extra)
2652 output_asm_insn ("ld\t%6, %p1", operands);
2653
2654 if (first_time)
2655 output_asm_insn ("st\t%5, @%0", operands);
2656 else
2657 output_asm_insn ("st\t%5, %s0", operands);
2658
2659 bytes -= 4;
2660 }
2661 else
2662 {
2663 /* Get the entire next word, even though we do not want all of it.
2664 The saves us from doing several smaller loads, and we assume that
2665 we cannot cause a page fault when at least part of the word is in
2666 valid memory [since we don't get called if things aren't properly
2667 aligned]. */
2668 int dst_offset = first_time ? 0 : 4;
2669 /* The amount of increment we have to make to the
2670 destination pointer. */
2671 int dst_inc_amount = dst_offset + bytes - 4;
2672 /* The same for the source pointer. */
2673 int src_inc_amount = bytes;
2674 int last_shift;
2675 rtx my_operands[3];
2676
2677 /* If got_extra is true then we have already loaded
2678 the next word as part of loading and storing the previous word. */
2679 if (! got_extra)
2680 output_asm_insn ("ld\t%6, @%1", operands);
2681
2682 if (bytes >= 2)
2683 {
2684 bytes -= 2;
2685
2686 output_asm_insn ("sra3\t%5, %6, #16", operands);
2687 my_operands[0] = operands[5];
2688 my_operands[1] = GEN_INT (dst_offset);
2689 my_operands[2] = operands[0];
2690 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2691
2692 /* If there is a byte left to store then increment the
2693 destination address and shift the contents of the source
2694 register down by 8 bits. We could not do the address
2695 increment in the store half word instruction, because it does
2696 not have an auto increment mode. */
2697 if (bytes > 0) /* assert (bytes == 1) */
2698 {
2699 dst_offset += 2;
2700 last_shift = 8;
2701 }
2702 }
2703 else
2704 last_shift = 24;
2705
2706 if (bytes > 0)
2707 {
2708 my_operands[0] = operands[6];
2709 my_operands[1] = GEN_INT (last_shift);
2710 output_asm_insn ("srai\t%0, #%1", my_operands);
2711 my_operands[0] = operands[6];
2712 my_operands[1] = GEN_INT (dst_offset);
2713 my_operands[2] = operands[0];
2714 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2715 }
2716
2717 /* Update the destination pointer if needed. We have to do
2718 this so that the patterns matches what we output in this
2719 function. */
2720 if (dst_inc_amount
2721 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2722 {
2723 my_operands[0] = operands[0];
2724 my_operands[1] = GEN_INT (dst_inc_amount);
2725 output_asm_insn ("addi\t%0, #%1", my_operands);
2726 }
2727
2728 /* Update the source pointer if needed. We have to do this
2729 so that the patterns matches what we output in this
2730 function. */
2731 if (src_inc_amount
2732 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2733 {
2734 my_operands[0] = operands[1];
2735 my_operands[1] = GEN_INT (src_inc_amount);
2736 output_asm_insn ("addi\t%0, #%1", my_operands);
2737 }
2738
2739 bytes = 0;
2740 }
2741
2742 first_time = 0;
2743 }
2744 }
2745
2746 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2747
2748 int
2749 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2750 unsigned int new_reg)
2751 {
2752 /* Interrupt routines can't clobber any register that isn't already used. */
2753 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2754 && !df_regs_ever_live_p (new_reg))
2755 return 0;
2756
2757 return 1;
2758 }
2759
2760 rtx
2761 m32r_return_addr (int count)
2762 {
2763 if (count != 0)
2764 return const0_rtx;
2765
2766 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2767 }
2768
2769 static void
2770 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2771 {
2772 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2773 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2774 0x017e8e17 : 0x178e7e01, SImode));
2775 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2776 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2777 0x0c00ae86 : 0x86ae000c, SImode));
2778 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2779 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2780 0xe627871e : 0x1e8727e6, SImode));
2781 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2782 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2783 0xc616c626 : 0x26c61fc6, SImode));
2784 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2785 chain_value);
2786 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2787 XEXP (DECL_RTL (fndecl), 0));
2788
2789 if (m32r_cache_flush_trap >= 0)
2790 emit_insn (gen_flush_icache
2791 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2792 gen_int_mode (m32r_cache_flush_trap, SImode)));
2793 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2794 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2795 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2796 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2797 GEN_INT (3), SImode);
2798 }
2799
2800 /* True if X is a reg that can be used as a base reg. */
2801
2802 static bool
2803 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2804 {
2805 if (! REG_P (x))
2806 return false;
2807
2808 if (strict)
2809 {
2810 if (GPR_P (REGNO (x)))
2811 return true;
2812 }
2813 else
2814 {
2815 if (GPR_P (REGNO (x))
2816 || REGNO (x) == ARG_POINTER_REGNUM
2817 || ! HARD_REGISTER_P (x))
2818 return true;
2819 }
2820
2821 return false;
2822 }
2823
2824 static inline bool
2825 m32r_rtx_ok_for_offset_p (const_rtx x)
2826 {
2827 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2828 }
2829
2830 static inline bool
2831 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2832 const_rtx x, bool strict)
2833 {
2834 if (GET_CODE (x) == PLUS
2835 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2836 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2837 return true;
2838
2839 return false;
2840 }
2841
2842 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2843 since more than one instruction will be required. */
2844
2845 static inline bool
2846 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2847 bool strict)
2848 {
2849 if (GET_CODE (x) == LO_SUM
2850 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2851 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2852 && CONSTANT_P (XEXP (x, 1)))
2853 return true;
2854
2855 return false;
2856 }
2857
2858 /* Is this a load and increment operation. */
2859
2860 static inline bool
2861 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2862 {
2863 if ((mode == SImode || mode == SFmode)
2864 && GET_CODE (x) == POST_INC
2865 && REG_P (XEXP (x, 0))
2866 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2867 return true;
2868
2869 return false;
2870 }
2871
2872 /* Is this an increment/decrement and store operation. */
2873
2874 static inline bool
2875 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2876 {
2877 if ((mode == SImode || mode == SFmode)
2878 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2879 && REG_P (XEXP (x, 0)) \
2880 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2881 return true;
2882
2883 return false;
2884 }
2885
2886 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2887
2888 static bool
2889 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2890 {
2891 if (m32r_rtx_ok_for_base_p (x, strict)
2892 || m32r_legitimate_offset_addres_p (mode, x, strict)
2893 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2894 || m32r_load_postinc_p (mode, x, strict)
2895 || m32r_store_preinc_predec_p (mode, x, strict))
2896 return true;
2897
2898 return false;
2899 }
2900
2901 static void
2902 m32r_conditional_register_usage (void)
2903 {
2904 if (flag_pic)
2905 {
2906 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2907 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2908 }
2909 }
2910
2911 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2912
2913 We don't allow (plus symbol large-constant) as the relocations can't
2914 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2915 We allow all CONST_DOUBLE's as the md file patterns will force the
2916 constant to memory if they can't handle them. */
2917
2918 static bool
2919 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2920 {
2921 return !(GET_CODE (x) == CONST
2922 && GET_CODE (XEXP (x, 0)) == PLUS
2923 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2924 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2925 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2926 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2927 }