]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
Factor unrelated declarations out of tree.h.
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "stringpool.h"
28 #include "calls.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "dbxout.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "expr.h"
39 #include "function.h"
40 #include "recog.h"
41 #include "diagnostic-core.h"
42 #include "ggc.h"
43 #include "df.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "tm-constrs.h"
48 #include "opts.h"
49
50 /* Array of valid operand punctuation characters. */
51 static char m32r_punct_chars[256];
52
53 /* Machine-specific symbol_ref flags. */
54 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
55 #define SYMBOL_REF_MODEL(X) \
56 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
57
58 /* For string literals, etc. */
59 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
60
61 /* Forward declaration. */
62 static void m32r_option_override (void);
63 static void init_reg_tables (void);
64 static void block_move_call (rtx, rtx, rtx);
65 static int m32r_is_insn (rtx);
66 static bool m32r_legitimate_address_p (enum machine_mode, rtx, bool);
67 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
68 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
69 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
70 static void m32r_print_operand (FILE *, rtx, int);
71 static void m32r_print_operand_address (FILE *, rtx);
72 static bool m32r_print_operand_punct_valid_p (unsigned char code);
73 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
74 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
75
76 static void m32r_file_start (void);
77
78 static int m32r_adjust_priority (rtx, int);
79 static int m32r_issue_rate (void);
80
81 static void m32r_encode_section_info (tree, rtx, int);
82 static bool m32r_in_small_data_p (const_tree);
83 static bool m32r_return_in_memory (const_tree, const_tree);
84 static rtx m32r_function_value (const_tree, const_tree, bool);
85 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
86 static bool m32r_function_value_regno_p (const unsigned int);
87 static void m32r_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
88 tree, int *, int);
89 static void init_idents (void);
90 static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed);
91 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static bool m32r_pass_by_reference (cumulative_args_t, enum machine_mode,
93 const_tree, bool);
94 static int m32r_arg_partial_bytes (cumulative_args_t, enum machine_mode,
95 tree, bool);
96 static rtx m32r_function_arg (cumulative_args_t, enum machine_mode,
97 const_tree, bool);
98 static void m32r_function_arg_advance (cumulative_args_t, enum machine_mode,
99 const_tree, bool);
100 static bool m32r_can_eliminate (const int, const int);
101 static void m32r_conditional_register_usage (void);
102 static void m32r_trampoline_init (rtx, tree, rtx);
103 static bool m32r_legitimate_constant_p (enum machine_mode, rtx);
104 \f
105 /* M32R specific attributes. */
106
107 static const struct attribute_spec m32r_attribute_table[] =
108 {
109 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
110 affects_type_identity } */
111 { "interrupt", 0, 0, true, false, false, NULL, false },
112 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
113 false },
114 { NULL, 0, 0, false, false, false, NULL, false }
115 };
116 \f
117 /* Initialize the GCC target structure. */
118 #undef TARGET_ATTRIBUTE_TABLE
119 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
120
121 #undef TARGET_LEGITIMATE_ADDRESS_P
122 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
123 #undef TARGET_LEGITIMIZE_ADDRESS
124 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
125 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
126 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
127
128 #undef TARGET_ASM_ALIGNED_HI_OP
129 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
130 #undef TARGET_ASM_ALIGNED_SI_OP
131 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
132
133 #undef TARGET_PRINT_OPERAND
134 #define TARGET_PRINT_OPERAND m32r_print_operand
135 #undef TARGET_PRINT_OPERAND_ADDRESS
136 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
137 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
138 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
139
140 #undef TARGET_ASM_FUNCTION_PROLOGUE
141 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
142 #undef TARGET_ASM_FUNCTION_EPILOGUE
143 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
144
145 #undef TARGET_ASM_FILE_START
146 #define TARGET_ASM_FILE_START m32r_file_start
147
148 #undef TARGET_SCHED_ADJUST_PRIORITY
149 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
150 #undef TARGET_SCHED_ISSUE_RATE
151 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
152
153 #undef TARGET_OPTION_OVERRIDE
154 #define TARGET_OPTION_OVERRIDE m32r_option_override
155
156 #undef TARGET_ENCODE_SECTION_INFO
157 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
158 #undef TARGET_IN_SMALL_DATA_P
159 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
160
161
162 #undef TARGET_MEMORY_MOVE_COST
163 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
164 #undef TARGET_RTX_COSTS
165 #define TARGET_RTX_COSTS m32r_rtx_costs
166 #undef TARGET_ADDRESS_COST
167 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
168
169 #undef TARGET_PROMOTE_PROTOTYPES
170 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
171 #undef TARGET_RETURN_IN_MEMORY
172 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
173
174 #undef TARGET_FUNCTION_VALUE
175 #define TARGET_FUNCTION_VALUE m32r_function_value
176 #undef TARGET_LIBCALL_VALUE
177 #define TARGET_LIBCALL_VALUE m32r_libcall_value
178 #undef TARGET_FUNCTION_VALUE_REGNO_P
179 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
180
181 #undef TARGET_SETUP_INCOMING_VARARGS
182 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
183 #undef TARGET_MUST_PASS_IN_STACK
184 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
185 #undef TARGET_PASS_BY_REFERENCE
186 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
187 #undef TARGET_ARG_PARTIAL_BYTES
188 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
189 #undef TARGET_FUNCTION_ARG
190 #define TARGET_FUNCTION_ARG m32r_function_arg
191 #undef TARGET_FUNCTION_ARG_ADVANCE
192 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
193
194 #undef TARGET_CAN_ELIMINATE
195 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
196
197 #undef TARGET_CONDITIONAL_REGISTER_USAGE
198 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
199
200 #undef TARGET_TRAMPOLINE_INIT
201 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
202
203 #undef TARGET_LEGITIMATE_CONSTANT_P
204 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
205
206 struct gcc_target targetm = TARGET_INITIALIZER;
207 \f
208 /* Called by m32r_option_override to initialize various things. */
209
210 void
211 m32r_init (void)
212 {
213 init_reg_tables ();
214
215 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
216 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
217 m32r_punct_chars['#'] = 1;
218 m32r_punct_chars['@'] = 1; /* ??? no longer used */
219
220 /* Provide default value if not specified. */
221 if (!global_options_set.x_g_switch_value)
222 g_switch_value = SDATA_DEFAULT_SIZE;
223 }
224
225 static void
226 m32r_option_override (void)
227 {
228 /* These need to be done at start up.
229 It's convenient to do them here. */
230 m32r_init ();
231 SUBTARGET_OVERRIDE_OPTIONS;
232 }
233
234 /* Vectors to keep interesting information about registers where it can easily
235 be got. We use to use the actual mode value as the bit number, but there
236 is (or may be) more than 32 modes now. Instead we use two tables: one
237 indexed by hard register number, and one indexed by mode. */
238
239 /* The purpose of m32r_mode_class is to shrink the range of modes so that
240 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
241 mapped into one m32r_mode_class mode. */
242
243 enum m32r_mode_class
244 {
245 C_MODE,
246 S_MODE, D_MODE, T_MODE, O_MODE,
247 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
248 };
249
250 /* Modes for condition codes. */
251 #define C_MODES (1 << (int) C_MODE)
252
253 /* Modes for single-word and smaller quantities. */
254 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
255
256 /* Modes for double-word and smaller quantities. */
257 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
258
259 /* Modes for quad-word and smaller quantities. */
260 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
261
262 /* Modes for accumulators. */
263 #define A_MODES (1 << (int) A_MODE)
264
265 /* Value is 1 if register/mode pair is acceptable on arc. */
266
267 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
268 {
269 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
270 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
271 S_MODES, C_MODES, A_MODES, A_MODES
272 };
273
274 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
275
276 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
277
278 static void
279 init_reg_tables (void)
280 {
281 int i;
282
283 for (i = 0; i < NUM_MACHINE_MODES; i++)
284 {
285 switch (GET_MODE_CLASS (i))
286 {
287 case MODE_INT:
288 case MODE_PARTIAL_INT:
289 case MODE_COMPLEX_INT:
290 if (GET_MODE_SIZE (i) <= 4)
291 m32r_mode_class[i] = 1 << (int) S_MODE;
292 else if (GET_MODE_SIZE (i) == 8)
293 m32r_mode_class[i] = 1 << (int) D_MODE;
294 else if (GET_MODE_SIZE (i) == 16)
295 m32r_mode_class[i] = 1 << (int) T_MODE;
296 else if (GET_MODE_SIZE (i) == 32)
297 m32r_mode_class[i] = 1 << (int) O_MODE;
298 else
299 m32r_mode_class[i] = 0;
300 break;
301 case MODE_FLOAT:
302 case MODE_COMPLEX_FLOAT:
303 if (GET_MODE_SIZE (i) <= 4)
304 m32r_mode_class[i] = 1 << (int) SF_MODE;
305 else if (GET_MODE_SIZE (i) == 8)
306 m32r_mode_class[i] = 1 << (int) DF_MODE;
307 else if (GET_MODE_SIZE (i) == 16)
308 m32r_mode_class[i] = 1 << (int) TF_MODE;
309 else if (GET_MODE_SIZE (i) == 32)
310 m32r_mode_class[i] = 1 << (int) OF_MODE;
311 else
312 m32r_mode_class[i] = 0;
313 break;
314 case MODE_CC:
315 m32r_mode_class[i] = 1 << (int) C_MODE;
316 break;
317 default:
318 m32r_mode_class[i] = 0;
319 break;
320 }
321 }
322
323 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
324 {
325 if (GPR_P (i))
326 m32r_regno_reg_class[i] = GENERAL_REGS;
327 else if (i == ARG_POINTER_REGNUM)
328 m32r_regno_reg_class[i] = GENERAL_REGS;
329 else
330 m32r_regno_reg_class[i] = NO_REGS;
331 }
332 }
333 \f
334 /* M32R specific attribute support.
335
336 interrupt - for interrupt functions
337
338 model - select code model used to access object
339
340 small: addresses use 24 bits, use bl to make calls
341 medium: addresses use 32 bits, use bl to make calls
342 large: addresses use 32 bits, use seth/add3/jl to make calls
343
344 Grep for MODEL in m32r.h for more info. */
345
346 static tree small_ident1;
347 static tree small_ident2;
348 static tree medium_ident1;
349 static tree medium_ident2;
350 static tree large_ident1;
351 static tree large_ident2;
352
353 static void
354 init_idents (void)
355 {
356 if (small_ident1 == 0)
357 {
358 small_ident1 = get_identifier ("small");
359 small_ident2 = get_identifier ("__small__");
360 medium_ident1 = get_identifier ("medium");
361 medium_ident2 = get_identifier ("__medium__");
362 large_ident1 = get_identifier ("large");
363 large_ident2 = get_identifier ("__large__");
364 }
365 }
366
367 /* Handle an "model" attribute; arguments as in
368 struct attribute_spec.handler. */
369 static tree
370 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
371 tree args, int flags ATTRIBUTE_UNUSED,
372 bool *no_add_attrs)
373 {
374 tree arg;
375
376 init_idents ();
377 arg = TREE_VALUE (args);
378
379 if (arg != small_ident1
380 && arg != small_ident2
381 && arg != medium_ident1
382 && arg != medium_ident2
383 && arg != large_ident1
384 && arg != large_ident2)
385 {
386 warning (OPT_Wattributes, "invalid argument of %qs attribute",
387 IDENTIFIER_POINTER (name));
388 *no_add_attrs = true;
389 }
390
391 return NULL_TREE;
392 }
393 \f
394 /* Encode section information of DECL, which is either a VAR_DECL,
395 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
396
397 For the M32R we want to record:
398
399 - whether the object lives in .sdata/.sbss.
400 - what code model should be used to access the object
401 */
402
403 static void
404 m32r_encode_section_info (tree decl, rtx rtl, int first)
405 {
406 int extra_flags = 0;
407 tree model_attr;
408 enum m32r_model model;
409
410 default_encode_section_info (decl, rtl, first);
411
412 if (!DECL_P (decl))
413 return;
414
415 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
416 if (model_attr)
417 {
418 tree id;
419
420 init_idents ();
421
422 id = TREE_VALUE (TREE_VALUE (model_attr));
423
424 if (id == small_ident1 || id == small_ident2)
425 model = M32R_MODEL_SMALL;
426 else if (id == medium_ident1 || id == medium_ident2)
427 model = M32R_MODEL_MEDIUM;
428 else if (id == large_ident1 || id == large_ident2)
429 model = M32R_MODEL_LARGE;
430 else
431 gcc_unreachable (); /* shouldn't happen */
432 }
433 else
434 {
435 if (TARGET_MODEL_SMALL)
436 model = M32R_MODEL_SMALL;
437 else if (TARGET_MODEL_MEDIUM)
438 model = M32R_MODEL_MEDIUM;
439 else if (TARGET_MODEL_LARGE)
440 model = M32R_MODEL_LARGE;
441 else
442 gcc_unreachable (); /* shouldn't happen */
443 }
444 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
445
446 if (extra_flags)
447 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
448 }
449
450 /* Only mark the object as being small data area addressable if
451 it hasn't been explicitly marked with a code model.
452
453 The user can explicitly put an object in the small data area with the
454 section attribute. If the object is in sdata/sbss and marked with a
455 code model do both [put the object in .sdata and mark it as being
456 addressed with a specific code model - don't mark it as being addressed
457 with an SDA reloc though]. This is ok and might be useful at times. If
458 the object doesn't fit the linker will give an error. */
459
460 static bool
461 m32r_in_small_data_p (const_tree decl)
462 {
463 const_tree section;
464
465 if (TREE_CODE (decl) != VAR_DECL)
466 return false;
467
468 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
469 return false;
470
471 section = DECL_SECTION_NAME (decl);
472 if (section)
473 {
474 const char *const name = TREE_STRING_POINTER (section);
475 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
476 return true;
477 }
478 else
479 {
480 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
481 {
482 int size = int_size_in_bytes (TREE_TYPE (decl));
483
484 if (size > 0 && size <= g_switch_value)
485 return true;
486 }
487 }
488
489 return false;
490 }
491
492 /* Do anything needed before RTL is emitted for each function. */
493
494 void
495 m32r_init_expanders (void)
496 {
497 /* ??? At one point there was code here. The function is left in
498 to make it easy to experiment. */
499 }
500 \f
501 int
502 call_operand (rtx op, enum machine_mode mode)
503 {
504 if (!MEM_P (op))
505 return 0;
506 op = XEXP (op, 0);
507 return call_address_operand (op, mode);
508 }
509
510 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
511
512 int
513 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
514 {
515 if (! TARGET_SDATA_USE)
516 return 0;
517
518 if (GET_CODE (op) == SYMBOL_REF)
519 return SYMBOL_REF_SMALL_P (op);
520
521 if (GET_CODE (op) == CONST
522 && GET_CODE (XEXP (op, 0)) == PLUS
523 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
524 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
525 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
526
527 return 0;
528 }
529
530 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
531
532 int
533 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
534 {
535 rtx sym;
536
537 if (flag_pic)
538 return 0;
539
540 if (GET_CODE (op) == LABEL_REF)
541 return TARGET_ADDR24;
542
543 if (GET_CODE (op) == SYMBOL_REF)
544 sym = op;
545 else if (GET_CODE (op) == CONST
546 && GET_CODE (XEXP (op, 0)) == PLUS
547 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
548 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
549 sym = XEXP (XEXP (op, 0), 0);
550 else
551 return 0;
552
553 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
554 return 1;
555
556 if (TARGET_ADDR24
557 && (CONSTANT_POOL_ADDRESS_P (sym)
558 || LIT_NAME_P (XSTR (sym, 0))))
559 return 1;
560
561 return 0;
562 }
563
564 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
565
566 int
567 addr32_operand (rtx op, enum machine_mode mode)
568 {
569 rtx sym;
570
571 if (GET_CODE (op) == LABEL_REF)
572 return TARGET_ADDR32;
573
574 if (GET_CODE (op) == SYMBOL_REF)
575 sym = op;
576 else if (GET_CODE (op) == CONST
577 && GET_CODE (XEXP (op, 0)) == PLUS
578 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
579 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
580 && ! flag_pic)
581 sym = XEXP (XEXP (op, 0), 0);
582 else
583 return 0;
584
585 return (! addr24_operand (sym, mode)
586 && ! small_data_operand (sym, mode));
587 }
588
589 /* Return 1 if OP is a function that can be called with the `bl' insn. */
590
591 int
592 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
593 {
594 if (flag_pic)
595 return 1;
596
597 if (GET_CODE (op) == SYMBOL_REF)
598 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
599
600 return TARGET_CALL26;
601 }
602
603 /* Return 1 if OP is a DImode const we want to handle inline.
604 This must match the code in the movdi pattern.
605 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
606
607 int
608 easy_di_const (rtx op)
609 {
610 rtx high_rtx, low_rtx;
611 HOST_WIDE_INT high, low;
612
613 split_double (op, &high_rtx, &low_rtx);
614 high = INTVAL (high_rtx);
615 low = INTVAL (low_rtx);
616 /* Pick constants loadable with 2 16-bit `ldi' insns. */
617 if (high >= -128 && high <= 127
618 && low >= -128 && low <= 127)
619 return 1;
620 return 0;
621 }
622
623 /* Return 1 if OP is a DFmode const we want to handle inline.
624 This must match the code in the movdf pattern.
625 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
626
627 int
628 easy_df_const (rtx op)
629 {
630 REAL_VALUE_TYPE r;
631 long l[2];
632
633 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
634 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
635 if (l[0] == 0 && l[1] == 0)
636 return 1;
637 if ((l[0] & 0xffff) == 0 && l[1] == 0)
638 return 1;
639 return 0;
640 }
641
642 /* Return 1 if OP is (mem (reg ...)).
643 This is used in insn length calcs. */
644
645 int
646 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
647 {
648 return MEM_P (op) && REG_P (XEXP (op, 0));
649 }
650
651 /* Return nonzero if TYPE must be passed by indirect reference. */
652
653 static bool
654 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
655 enum machine_mode mode, const_tree type,
656 bool named ATTRIBUTE_UNUSED)
657 {
658 int size;
659
660 if (type)
661 size = int_size_in_bytes (type);
662 else
663 size = GET_MODE_SIZE (mode);
664
665 return (size < 0 || size > 8);
666 }
667 \f
668 /* Comparisons. */
669
670 /* X and Y are two things to compare using CODE. Emit the compare insn and
671 return the rtx for compare [arg0 of the if_then_else].
672 If need_compare is true then the comparison insn must be generated, rather
673 than being subsumed into the following branch instruction. */
674
675 rtx
676 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
677 {
678 enum rtx_code compare_code;
679 enum rtx_code branch_code;
680 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
681 int must_swap = 0;
682
683 switch (code)
684 {
685 case EQ: compare_code = EQ; branch_code = NE; break;
686 case NE: compare_code = EQ; branch_code = EQ; break;
687 case LT: compare_code = LT; branch_code = NE; break;
688 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
689 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
690 case GE: compare_code = LT; branch_code = EQ; break;
691 case LTU: compare_code = LTU; branch_code = NE; break;
692 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
693 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
694 case GEU: compare_code = LTU; branch_code = EQ; break;
695
696 default:
697 gcc_unreachable ();
698 }
699
700 if (need_compare)
701 {
702 switch (compare_code)
703 {
704 case EQ:
705 if (satisfies_constraint_P (y) /* Reg equal to small const. */
706 && y != const0_rtx)
707 {
708 rtx tmp = gen_reg_rtx (SImode);
709
710 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
711 x = tmp;
712 y = const0_rtx;
713 }
714 else if (CONSTANT_P (y)) /* Reg equal to const. */
715 {
716 rtx tmp = force_reg (GET_MODE (x), y);
717 y = tmp;
718 }
719
720 if (register_operand (y, SImode) /* Reg equal to reg. */
721 || y == const0_rtx) /* Reg equal to zero. */
722 {
723 emit_insn (gen_cmp_eqsi_insn (x, y));
724
725 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
726 }
727 break;
728
729 case LT:
730 if (register_operand (y, SImode)
731 || satisfies_constraint_P (y))
732 {
733 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
734
735 switch (code)
736 {
737 case LT:
738 emit_insn (gen_cmp_ltsi_insn (x, y));
739 code = EQ;
740 break;
741 case LE:
742 if (y == const0_rtx)
743 tmp = const1_rtx;
744 else
745 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
746 emit_insn (gen_cmp_ltsi_insn (x, tmp));
747 code = EQ;
748 break;
749 case GT:
750 if (CONST_INT_P (y))
751 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
752 else
753 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
754 emit_insn (gen_cmp_ltsi_insn (x, tmp));
755 code = NE;
756 break;
757 case GE:
758 emit_insn (gen_cmp_ltsi_insn (x, y));
759 code = NE;
760 break;
761 default:
762 gcc_unreachable ();
763 }
764
765 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
766 }
767 break;
768
769 case LTU:
770 if (register_operand (y, SImode)
771 || satisfies_constraint_P (y))
772 {
773 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
774
775 switch (code)
776 {
777 case LTU:
778 emit_insn (gen_cmp_ltusi_insn (x, y));
779 code = EQ;
780 break;
781 case LEU:
782 if (y == const0_rtx)
783 tmp = const1_rtx;
784 else
785 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
786 emit_insn (gen_cmp_ltusi_insn (x, tmp));
787 code = EQ;
788 break;
789 case GTU:
790 if (CONST_INT_P (y))
791 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
792 else
793 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
794 emit_insn (gen_cmp_ltusi_insn (x, tmp));
795 code = NE;
796 break;
797 case GEU:
798 emit_insn (gen_cmp_ltusi_insn (x, y));
799 code = NE;
800 break;
801 default:
802 gcc_unreachable ();
803 }
804
805 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
806 }
807 break;
808
809 default:
810 gcc_unreachable ();
811 }
812 }
813 else
814 {
815 /* Reg/reg equal comparison. */
816 if (compare_code == EQ
817 && register_operand (y, SImode))
818 return gen_rtx_fmt_ee (code, CCmode, x, y);
819
820 /* Reg/zero signed comparison. */
821 if ((compare_code == EQ || compare_code == LT)
822 && y == const0_rtx)
823 return gen_rtx_fmt_ee (code, CCmode, x, y);
824
825 /* Reg/smallconst equal comparison. */
826 if (compare_code == EQ
827 && satisfies_constraint_P (y))
828 {
829 rtx tmp = gen_reg_rtx (SImode);
830
831 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
832 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
833 }
834
835 /* Reg/const equal comparison. */
836 if (compare_code == EQ
837 && CONSTANT_P (y))
838 {
839 rtx tmp = force_reg (GET_MODE (x), y);
840
841 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
842 }
843 }
844
845 if (CONSTANT_P (y))
846 {
847 if (must_swap)
848 y = force_reg (GET_MODE (x), y);
849 else
850 {
851 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
852
853 if (! ok_const)
854 y = force_reg (GET_MODE (x), y);
855 }
856 }
857
858 switch (compare_code)
859 {
860 case EQ :
861 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
862 break;
863 case LT :
864 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
865 break;
866 case LTU :
867 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
868 break;
869
870 default:
871 gcc_unreachable ();
872 }
873
874 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
875 }
876
877 bool
878 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
879 {
880 enum machine_mode mode = GET_MODE (op0);
881
882 gcc_assert (mode == SImode);
883 switch (code)
884 {
885 case EQ:
886 if (!register_operand (op1, mode))
887 op1 = force_reg (mode, op1);
888
889 if (TARGET_M32RX || TARGET_M32R2)
890 {
891 if (!reg_or_zero_operand (op2, mode))
892 op2 = force_reg (mode, op2);
893
894 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
895 return true;
896 }
897 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
898 {
899 emit_insn (gen_seq_zero_insn (op0, op1));
900 return true;
901 }
902
903 if (!reg_or_eq_int16_operand (op2, mode))
904 op2 = force_reg (mode, op2);
905
906 emit_insn (gen_seq_insn (op0, op1, op2));
907 return true;
908
909 case NE:
910 if (!CONST_INT_P (op2)
911 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
912 {
913 rtx reg;
914
915 if (reload_completed || reload_in_progress)
916 return false;
917
918 reg = gen_reg_rtx (SImode);
919 emit_insn (gen_xorsi3 (reg, op1, op2));
920 op1 = reg;
921
922 if (!register_operand (op1, mode))
923 op1 = force_reg (mode, op1);
924
925 emit_insn (gen_sne_zero_insn (op0, op1));
926 return true;
927 }
928 return false;
929
930 case LT:
931 case GT:
932 if (code == GT)
933 {
934 rtx tmp = op2;
935 op2 = op1;
936 op1 = tmp;
937 code = LT;
938 }
939
940 if (!register_operand (op1, mode))
941 op1 = force_reg (mode, op1);
942
943 if (!reg_or_int16_operand (op2, mode))
944 op2 = force_reg (mode, op2);
945
946 emit_insn (gen_slt_insn (op0, op1, op2));
947 return true;
948
949 case LTU:
950 case GTU:
951 if (code == GTU)
952 {
953 rtx tmp = op2;
954 op2 = op1;
955 op1 = tmp;
956 code = LTU;
957 }
958
959 if (!register_operand (op1, mode))
960 op1 = force_reg (mode, op1);
961
962 if (!reg_or_int16_operand (op2, mode))
963 op2 = force_reg (mode, op2);
964
965 emit_insn (gen_sltu_insn (op0, op1, op2));
966 return true;
967
968 case GE:
969 case GEU:
970 if (!register_operand (op1, mode))
971 op1 = force_reg (mode, op1);
972
973 if (!reg_or_int16_operand (op2, mode))
974 op2 = force_reg (mode, op2);
975
976 if (code == GE)
977 emit_insn (gen_sge_insn (op0, op1, op2));
978 else
979 emit_insn (gen_sgeu_insn (op0, op1, op2));
980 return true;
981
982 case LE:
983 case LEU:
984 if (!register_operand (op1, mode))
985 op1 = force_reg (mode, op1);
986
987 if (CONST_INT_P (op2))
988 {
989 HOST_WIDE_INT value = INTVAL (op2);
990 if (value >= 2147483647)
991 {
992 emit_move_insn (op0, const1_rtx);
993 return true;
994 }
995
996 op2 = GEN_INT (value + 1);
997 if (value < -32768 || value >= 32767)
998 op2 = force_reg (mode, op2);
999
1000 if (code == LEU)
1001 emit_insn (gen_sltu_insn (op0, op1, op2));
1002 else
1003 emit_insn (gen_slt_insn (op0, op1, op2));
1004 return true;
1005 }
1006
1007 if (!register_operand (op2, mode))
1008 op2 = force_reg (mode, op2);
1009
1010 if (code == LEU)
1011 emit_insn (gen_sleu_insn (op0, op1, op2));
1012 else
1013 emit_insn (gen_sle_insn (op0, op1, op2));
1014 return true;
1015
1016 default:
1017 gcc_unreachable ();
1018 }
1019 }
1020
1021 \f
1022 /* Split a 2 word move (DI or DF) into component parts. */
1023
1024 rtx
1025 gen_split_move_double (rtx operands[])
1026 {
1027 enum machine_mode mode = GET_MODE (operands[0]);
1028 rtx dest = operands[0];
1029 rtx src = operands[1];
1030 rtx val;
1031
1032 /* We might have (SUBREG (MEM)) here, so just get rid of the
1033 subregs to make this code simpler. It is safe to call
1034 alter_subreg any time after reload. */
1035 if (GET_CODE (dest) == SUBREG)
1036 alter_subreg (&dest, true);
1037 if (GET_CODE (src) == SUBREG)
1038 alter_subreg (&src, true);
1039
1040 start_sequence ();
1041 if (REG_P (dest))
1042 {
1043 int dregno = REGNO (dest);
1044
1045 /* Reg = reg. */
1046 if (REG_P (src))
1047 {
1048 int sregno = REGNO (src);
1049
1050 int reverse = (dregno == sregno + 1);
1051
1052 /* We normally copy the low-numbered register first. However, if
1053 the first register operand 0 is the same as the second register of
1054 operand 1, we must copy in the opposite order. */
1055 emit_insn (gen_rtx_SET (VOIDmode,
1056 operand_subword (dest, reverse, TRUE, mode),
1057 operand_subword (src, reverse, TRUE, mode)));
1058
1059 emit_insn (gen_rtx_SET (VOIDmode,
1060 operand_subword (dest, !reverse, TRUE, mode),
1061 operand_subword (src, !reverse, TRUE, mode)));
1062 }
1063
1064 /* Reg = constant. */
1065 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1066 {
1067 rtx words[2];
1068 split_double (src, &words[0], &words[1]);
1069 emit_insn (gen_rtx_SET (VOIDmode,
1070 operand_subword (dest, 0, TRUE, mode),
1071 words[0]));
1072
1073 emit_insn (gen_rtx_SET (VOIDmode,
1074 operand_subword (dest, 1, TRUE, mode),
1075 words[1]));
1076 }
1077
1078 /* Reg = mem. */
1079 else if (MEM_P (src))
1080 {
1081 /* If the high-address word is used in the address, we must load it
1082 last. Otherwise, load it first. */
1083 int reverse
1084 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1085
1086 /* We used to optimize loads from single registers as
1087
1088 ld r1,r3+; ld r2,r3
1089
1090 if r3 were not used subsequently. However, the REG_NOTES aren't
1091 propagated correctly by the reload phase, and it can cause bad
1092 code to be generated. We could still try:
1093
1094 ld r1,r3+; ld r2,r3; addi r3,-4
1095
1096 which saves 2 bytes and doesn't force longword alignment. */
1097 emit_insn (gen_rtx_SET (VOIDmode,
1098 operand_subword (dest, reverse, TRUE, mode),
1099 adjust_address (src, SImode,
1100 reverse * UNITS_PER_WORD)));
1101
1102 emit_insn (gen_rtx_SET (VOIDmode,
1103 operand_subword (dest, !reverse, TRUE, mode),
1104 adjust_address (src, SImode,
1105 !reverse * UNITS_PER_WORD)));
1106 }
1107 else
1108 gcc_unreachable ();
1109 }
1110
1111 /* Mem = reg. */
1112 /* We used to optimize loads from single registers as
1113
1114 st r1,r3; st r2,+r3
1115
1116 if r3 were not used subsequently. However, the REG_NOTES aren't
1117 propagated correctly by the reload phase, and it can cause bad
1118 code to be generated. We could still try:
1119
1120 st r1,r3; st r2,+r3; addi r3,-4
1121
1122 which saves 2 bytes and doesn't force longword alignment. */
1123 else if (MEM_P (dest) && REG_P (src))
1124 {
1125 emit_insn (gen_rtx_SET (VOIDmode,
1126 adjust_address (dest, SImode, 0),
1127 operand_subword (src, 0, TRUE, mode)));
1128
1129 emit_insn (gen_rtx_SET (VOIDmode,
1130 adjust_address (dest, SImode, UNITS_PER_WORD),
1131 operand_subword (src, 1, TRUE, mode)));
1132 }
1133
1134 else
1135 gcc_unreachable ();
1136
1137 val = get_insns ();
1138 end_sequence ();
1139 return val;
1140 }
1141
1142 \f
1143 static int
1144 m32r_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
1145 tree type, bool named ATTRIBUTE_UNUSED)
1146 {
1147 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1148
1149 int words;
1150 unsigned int size =
1151 (((mode == BLKmode && type)
1152 ? (unsigned int) int_size_in_bytes (type)
1153 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1154 / UNITS_PER_WORD;
1155
1156 if (*cum >= M32R_MAX_PARM_REGS)
1157 words = 0;
1158 else if (*cum + size > M32R_MAX_PARM_REGS)
1159 words = (*cum + size) - M32R_MAX_PARM_REGS;
1160 else
1161 words = 0;
1162
1163 return words * UNITS_PER_WORD;
1164 }
1165
1166 /* The ROUND_ADVANCE* macros are local to this file. */
1167 /* Round SIZE up to a word boundary. */
1168 #define ROUND_ADVANCE(SIZE) \
1169 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1170
1171 /* Round arg MODE/TYPE up to the next word boundary. */
1172 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1173 ((MODE) == BLKmode \
1174 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1175 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1176
1177 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1178 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1179
1180 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1181 a reg. This includes arguments that have to be passed by reference as the
1182 pointer to them is passed in a reg if one is available (and that is what
1183 we're given).
1184 This macro is only used in this file. */
1185 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1186 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1187
1188 /* Determine where to put an argument to a function.
1189 Value is zero to push the argument on the stack,
1190 or a hard register in which to store the argument.
1191
1192 MODE is the argument's machine mode.
1193 TYPE is the data type of the argument (as a tree).
1194 This is null for libcalls where that information may
1195 not be available.
1196 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1197 the preceding args and about the function being called.
1198 NAMED is nonzero if this argument is a named parameter
1199 (otherwise it is an extra parameter matching an ellipsis). */
1200 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1201 and the rest are pushed. */
1202
1203 static rtx
1204 m32r_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1205 const_tree type ATTRIBUTE_UNUSED,
1206 bool named ATTRIBUTE_UNUSED)
1207 {
1208 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1209
1210 return (PASS_IN_REG_P (*cum, mode, type)
1211 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1212 : NULL_RTX);
1213 }
1214
1215 /* Update the data in CUM to advance over an argument
1216 of mode MODE and data type TYPE.
1217 (TYPE is null for libcalls where that information may not be available.) */
1218
1219 static void
1220 m32r_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1221 const_tree type, bool named ATTRIBUTE_UNUSED)
1222 {
1223 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1224
1225 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1226 + ROUND_ADVANCE_ARG (mode, type));
1227 }
1228
1229 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1230
1231 static bool
1232 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1233 {
1234 cumulative_args_t dummy = pack_cumulative_args (NULL);
1235
1236 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1237 }
1238
1239 /* Worker function for TARGET_FUNCTION_VALUE. */
1240
1241 static rtx
1242 m32r_function_value (const_tree valtype,
1243 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1244 bool outgoing ATTRIBUTE_UNUSED)
1245 {
1246 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1247 }
1248
1249 /* Worker function for TARGET_LIBCALL_VALUE. */
1250
1251 static rtx
1252 m32r_libcall_value (enum machine_mode mode,
1253 const_rtx fun ATTRIBUTE_UNUSED)
1254 {
1255 return gen_rtx_REG (mode, 0);
1256 }
1257
1258 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1259
1260 ??? What about r1 in DI/DF values. */
1261
1262 static bool
1263 m32r_function_value_regno_p (const unsigned int regno)
1264 {
1265 return (regno == 0);
1266 }
1267
1268 /* Do any needed setup for a variadic function. For the M32R, we must
1269 create a register parameter block, and then copy any anonymous arguments
1270 in registers to memory.
1271
1272 CUM has not been updated for the last named argument which has type TYPE
1273 and mode MODE, and we rely on this fact. */
1274
1275 static void
1276 m32r_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
1277 tree type, int *pretend_size, int no_rtl)
1278 {
1279 int first_anon_arg;
1280
1281 if (no_rtl)
1282 return;
1283
1284 /* All BLKmode values are passed by reference. */
1285 gcc_assert (mode != BLKmode);
1286
1287 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1288 + ROUND_ADVANCE_ARG (mode, type));
1289
1290 if (first_anon_arg < M32R_MAX_PARM_REGS)
1291 {
1292 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1293 int first_reg_offset = first_anon_arg;
1294 /* Size in words to "pretend" allocate. */
1295 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1296 rtx regblock;
1297
1298 regblock = gen_frame_mem (BLKmode,
1299 plus_constant (Pmode, arg_pointer_rtx,
1300 FIRST_PARM_OFFSET (0)));
1301 set_mem_alias_set (regblock, get_varargs_alias_set ());
1302 move_block_from_reg (first_reg_offset, regblock, size);
1303
1304 *pretend_size = (size * UNITS_PER_WORD);
1305 }
1306 }
1307
1308 \f
1309 /* Return true if INSN is real instruction bearing insn. */
1310
1311 static int
1312 m32r_is_insn (rtx insn)
1313 {
1314 return (NONDEBUG_INSN_P (insn)
1315 && GET_CODE (PATTERN (insn)) != USE
1316 && GET_CODE (PATTERN (insn)) != CLOBBER);
1317 }
1318
1319 /* Increase the priority of long instructions so that the
1320 short instructions are scheduled ahead of the long ones. */
1321
1322 static int
1323 m32r_adjust_priority (rtx insn, int priority)
1324 {
1325 if (m32r_is_insn (insn)
1326 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1327 priority <<= 3;
1328
1329 return priority;
1330 }
1331
1332 \f
1333 /* Indicate how many instructions can be issued at the same time.
1334 This is sort of a lie. The m32r can issue only 1 long insn at
1335 once, but it can issue 2 short insns. The default therefore is
1336 set at 2, but this can be overridden by the command line option
1337 -missue-rate=1. */
1338
1339 static int
1340 m32r_issue_rate (void)
1341 {
1342 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1343 }
1344 \f
1345 /* Cost functions. */
1346 /* Memory is 3 times as expensive as registers.
1347 ??? Is that the right way to look at it? */
1348
1349 static int
1350 m32r_memory_move_cost (enum machine_mode mode,
1351 reg_class_t rclass ATTRIBUTE_UNUSED,
1352 bool in ATTRIBUTE_UNUSED)
1353 {
1354 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1355 return 6;
1356 else
1357 return 12;
1358 }
1359
1360 static bool
1361 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
1362 int opno ATTRIBUTE_UNUSED, int *total,
1363 bool speed ATTRIBUTE_UNUSED)
1364 {
1365 switch (code)
1366 {
1367 /* Small integers are as cheap as registers. 4 byte values can be
1368 fetched as immediate constants - let's give that the cost of an
1369 extra insn. */
1370 case CONST_INT:
1371 if (INT16_P (INTVAL (x)))
1372 {
1373 *total = 0;
1374 return true;
1375 }
1376 /* FALLTHRU */
1377
1378 case CONST:
1379 case LABEL_REF:
1380 case SYMBOL_REF:
1381 *total = COSTS_N_INSNS (1);
1382 return true;
1383
1384 case CONST_DOUBLE:
1385 {
1386 rtx high, low;
1387
1388 split_double (x, &high, &low);
1389 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1390 + !INT16_P (INTVAL (low)));
1391 return true;
1392 }
1393
1394 case MULT:
1395 *total = COSTS_N_INSNS (3);
1396 return true;
1397
1398 case DIV:
1399 case UDIV:
1400 case MOD:
1401 case UMOD:
1402 *total = COSTS_N_INSNS (10);
1403 return true;
1404
1405 default:
1406 return false;
1407 }
1408 }
1409 \f
1410 /* Type of function DECL.
1411
1412 The result is cached. To reset the cache at the end of a function,
1413 call with DECL = NULL_TREE. */
1414
1415 enum m32r_function_type
1416 m32r_compute_function_type (tree decl)
1417 {
1418 /* Cached value. */
1419 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1420 /* Last function we were called for. */
1421 static tree last_fn = NULL_TREE;
1422
1423 /* Resetting the cached value? */
1424 if (decl == NULL_TREE)
1425 {
1426 fn_type = M32R_FUNCTION_UNKNOWN;
1427 last_fn = NULL_TREE;
1428 return fn_type;
1429 }
1430
1431 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1432 return fn_type;
1433
1434 /* Compute function type. */
1435 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1436 ? M32R_FUNCTION_INTERRUPT
1437 : M32R_FUNCTION_NORMAL);
1438
1439 last_fn = decl;
1440 return fn_type;
1441 }
1442 \f/* Function prologue/epilogue handlers. */
1443
1444 /* M32R stack frames look like:
1445
1446 Before call After call
1447 +-----------------------+ +-----------------------+
1448 | | | |
1449 high | local variables, | | local variables, |
1450 mem | reg save area, etc. | | reg save area, etc. |
1451 | | | |
1452 +-----------------------+ +-----------------------+
1453 | | | |
1454 | arguments on stack. | | arguments on stack. |
1455 | | | |
1456 SP+0->+-----------------------+ +-----------------------+
1457 | reg parm save area, |
1458 | only created for |
1459 | variable argument |
1460 | functions |
1461 +-----------------------+
1462 | previous frame ptr |
1463 +-----------------------+
1464 | |
1465 | register save area |
1466 | |
1467 +-----------------------+
1468 | return address |
1469 +-----------------------+
1470 | |
1471 | local variables |
1472 | |
1473 +-----------------------+
1474 | |
1475 | alloca allocations |
1476 | |
1477 +-----------------------+
1478 | |
1479 low | arguments on stack |
1480 memory | |
1481 SP+0->+-----------------------+
1482
1483 Notes:
1484 1) The "reg parm save area" does not exist for non variable argument fns.
1485 2) The "reg parm save area" can be eliminated completely if we saved regs
1486 containing anonymous args separately but that complicates things too
1487 much (so it's not done).
1488 3) The return address is saved after the register save area so as to have as
1489 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1490
1491 /* Structure to be filled in by m32r_compute_frame_size with register
1492 save masks, and offsets for the current function. */
1493 struct m32r_frame_info
1494 {
1495 unsigned int total_size; /* # bytes that the entire frame takes up. */
1496 unsigned int extra_size; /* # bytes of extra stuff. */
1497 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1498 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1499 unsigned int reg_size; /* # bytes needed to store regs. */
1500 unsigned int var_size; /* # bytes that variables take up. */
1501 unsigned int gmask; /* Mask of saved gp registers. */
1502 unsigned int save_fp; /* Nonzero if fp must be saved. */
1503 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1504 int initialized; /* Nonzero if frame size already calculated. */
1505 };
1506
1507 /* Current frame information calculated by m32r_compute_frame_size. */
1508 static struct m32r_frame_info current_frame_info;
1509
1510 /* Zero structure to initialize current_frame_info. */
1511 static struct m32r_frame_info zero_frame_info;
1512
1513 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1514 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1515
1516 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1517 The return address and frame pointer are treated separately.
1518 Don't consider them here. */
1519 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1520 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1521 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1522
1523 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1524 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1525
1526 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1527 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1528
1529 /* Return the bytes needed to compute the frame pointer from the current
1530 stack pointer.
1531
1532 SIZE is the size needed for local variables. */
1533
1534 unsigned int
1535 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1536 {
1537 unsigned int regno;
1538 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1539 unsigned int reg_size;
1540 unsigned int gmask;
1541 enum m32r_function_type fn_type;
1542 int interrupt_p;
1543 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1544 | crtl->profile);
1545
1546 var_size = M32R_STACK_ALIGN (size);
1547 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1548 pretend_size = crtl->args.pretend_args_size;
1549 extra_size = FIRST_PARM_OFFSET (0);
1550 total_size = extra_size + pretend_size + args_size + var_size;
1551 reg_size = 0;
1552 gmask = 0;
1553
1554 /* See if this is an interrupt handler. Call used registers must be saved
1555 for them too. */
1556 fn_type = m32r_compute_function_type (current_function_decl);
1557 interrupt_p = M32R_INTERRUPT_P (fn_type);
1558
1559 /* Calculate space needed for registers. */
1560 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1561 {
1562 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1563 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1564 {
1565 reg_size += UNITS_PER_WORD;
1566 gmask |= 1 << regno;
1567 }
1568 }
1569
1570 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1571 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1572
1573 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1574 * UNITS_PER_WORD);
1575 total_size += reg_size;
1576
1577 /* ??? Not sure this is necessary, and I don't think the epilogue
1578 handler will do the right thing if this changes total_size. */
1579 total_size = M32R_STACK_ALIGN (total_size);
1580
1581 /* frame_size = total_size - (pretend_size + reg_size); */
1582
1583 /* Save computed information. */
1584 current_frame_info.total_size = total_size;
1585 current_frame_info.extra_size = extra_size;
1586 current_frame_info.pretend_size = pretend_size;
1587 current_frame_info.var_size = var_size;
1588 current_frame_info.args_size = args_size;
1589 current_frame_info.reg_size = reg_size;
1590 current_frame_info.gmask = gmask;
1591 current_frame_info.initialized = reload_completed;
1592
1593 /* Ok, we're done. */
1594 return total_size;
1595 }
1596
1597 /* Worker function for TARGET_CAN_ELIMINATE. */
1598
1599 bool
1600 m32r_can_eliminate (const int from, const int to)
1601 {
1602 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1603 ? ! frame_pointer_needed
1604 : true);
1605 }
1606
1607 \f
1608 /* The table we use to reference PIC data. */
1609 static rtx global_offset_table;
1610
1611 static void
1612 m32r_reload_lr (rtx sp, int size)
1613 {
1614 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1615
1616 if (size == 0)
1617 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1618 else if (size < 32768)
1619 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1620 gen_rtx_PLUS (Pmode, sp,
1621 GEN_INT (size)))));
1622 else
1623 {
1624 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1625
1626 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1627 emit_insn (gen_addsi3 (tmp, tmp, sp));
1628 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1629 }
1630
1631 emit_use (lr);
1632 }
1633
1634 void
1635 m32r_load_pic_register (void)
1636 {
1637 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1638 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1639 GEN_INT (TARGET_MODEL_SMALL)));
1640
1641 /* Need to emit this whether or not we obey regdecls,
1642 since setjmp/longjmp can cause life info to screw up. */
1643 emit_use (pic_offset_table_rtx);
1644 }
1645
1646 /* Expand the m32r prologue as a series of insns. */
1647
1648 void
1649 m32r_expand_prologue (void)
1650 {
1651 int regno;
1652 int frame_size;
1653 unsigned int gmask;
1654 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1655 | crtl->profile);
1656
1657 if (! current_frame_info.initialized)
1658 m32r_compute_frame_size (get_frame_size ());
1659
1660 gmask = current_frame_info.gmask;
1661
1662 /* These cases shouldn't happen. Catch them now. */
1663 gcc_assert (current_frame_info.total_size || !gmask);
1664
1665 /* Allocate space for register arguments if this is a variadic function. */
1666 if (current_frame_info.pretend_size != 0)
1667 {
1668 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1669 the wrong result on a 64-bit host. */
1670 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1671 emit_insn (gen_addsi3 (stack_pointer_rtx,
1672 stack_pointer_rtx,
1673 GEN_INT (-pretend_size)));
1674 }
1675
1676 /* Save any registers we need to and set up fp. */
1677 if (current_frame_info.save_fp)
1678 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1679
1680 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1681
1682 /* Save any needed call-saved regs (and call-used if this is an
1683 interrupt handler). */
1684 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1685 {
1686 if ((gmask & (1 << regno)) != 0)
1687 emit_insn (gen_movsi_push (stack_pointer_rtx,
1688 gen_rtx_REG (Pmode, regno)));
1689 }
1690
1691 if (current_frame_info.save_lr)
1692 emit_insn (gen_movsi_push (stack_pointer_rtx,
1693 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1694
1695 /* Allocate the stack frame. */
1696 frame_size = (current_frame_info.total_size
1697 - (current_frame_info.pretend_size
1698 + current_frame_info.reg_size));
1699
1700 if (frame_size == 0)
1701 ; /* Nothing to do. */
1702 else if (frame_size <= 32768)
1703 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1704 GEN_INT (-frame_size)));
1705 else
1706 {
1707 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1708
1709 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1710 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1711 }
1712
1713 if (frame_pointer_needed)
1714 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1715
1716 if (crtl->profile)
1717 /* Push lr for mcount (form_pc, x). */
1718 emit_insn (gen_movsi_push (stack_pointer_rtx,
1719 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1720
1721 if (pic_reg_used)
1722 {
1723 m32r_load_pic_register ();
1724 m32r_reload_lr (stack_pointer_rtx,
1725 (crtl->profile ? 0 : frame_size));
1726 }
1727
1728 if (crtl->profile && !pic_reg_used)
1729 emit_insn (gen_blockage ());
1730 }
1731
1732 \f
1733 /* Set up the stack and frame pointer (if desired) for the function.
1734 Note, if this is changed, you need to mirror the changes in
1735 m32r_compute_frame_size which calculates the prolog size. */
1736
1737 static void
1738 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1739 {
1740 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1741
1742 /* If this is an interrupt handler, mark it as such. */
1743 if (M32R_INTERRUPT_P (fn_type))
1744 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1745
1746 if (! current_frame_info.initialized)
1747 m32r_compute_frame_size (size);
1748
1749 /* This is only for the human reader. */
1750 fprintf (file,
1751 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1752 ASM_COMMENT_START,
1753 current_frame_info.var_size,
1754 current_frame_info.reg_size / 4,
1755 current_frame_info.args_size,
1756 current_frame_info.extra_size);
1757 }
1758 \f
1759 /* Output RTL to pop register REGNO from the stack. */
1760
1761 static void
1762 pop (int regno)
1763 {
1764 rtx x;
1765
1766 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1767 stack_pointer_rtx));
1768 add_reg_note (x, REG_INC, stack_pointer_rtx);
1769 }
1770
1771 /* Expand the m32r epilogue as a series of insns. */
1772
1773 void
1774 m32r_expand_epilogue (void)
1775 {
1776 int regno;
1777 int noepilogue = FALSE;
1778 int total_size;
1779
1780 gcc_assert (current_frame_info.initialized);
1781 total_size = current_frame_info.total_size;
1782
1783 if (total_size == 0)
1784 {
1785 rtx insn = get_last_insn ();
1786
1787 /* If the last insn was a BARRIER, we don't have to write any code
1788 because a jump (aka return) was put there. */
1789 if (insn && NOTE_P (insn))
1790 insn = prev_nonnote_insn (insn);
1791 if (insn && BARRIER_P (insn))
1792 noepilogue = TRUE;
1793 }
1794
1795 if (!noepilogue)
1796 {
1797 unsigned int var_size = current_frame_info.var_size;
1798 unsigned int args_size = current_frame_info.args_size;
1799 unsigned int gmask = current_frame_info.gmask;
1800 int can_trust_sp_p = !cfun->calls_alloca;
1801
1802 if (flag_exceptions)
1803 emit_insn (gen_blockage ());
1804
1805 /* The first thing to do is point the sp at the bottom of the register
1806 save area. */
1807 if (can_trust_sp_p)
1808 {
1809 unsigned int reg_offset = var_size + args_size;
1810
1811 if (reg_offset == 0)
1812 ; /* Nothing to do. */
1813 else if (reg_offset < 32768)
1814 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1815 GEN_INT (reg_offset)));
1816 else
1817 {
1818 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1819
1820 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1821 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1822 tmp));
1823 }
1824 }
1825 else if (frame_pointer_needed)
1826 {
1827 unsigned int reg_offset = var_size + args_size;
1828
1829 if (reg_offset == 0)
1830 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1831 else if (reg_offset < 32768)
1832 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1833 GEN_INT (reg_offset)));
1834 else
1835 {
1836 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1837
1838 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1839 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1840 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1841 tmp));
1842 }
1843 }
1844 else
1845 gcc_unreachable ();
1846
1847 if (current_frame_info.save_lr)
1848 pop (RETURN_ADDR_REGNUM);
1849
1850 /* Restore any saved registers, in reverse order of course. */
1851 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1852 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1853 {
1854 if ((gmask & (1L << regno)) != 0)
1855 pop (regno);
1856 }
1857
1858 if (current_frame_info.save_fp)
1859 pop (FRAME_POINTER_REGNUM);
1860
1861 /* Remove varargs area if present. */
1862 if (current_frame_info.pretend_size != 0)
1863 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1864 GEN_INT (current_frame_info.pretend_size)));
1865
1866 emit_insn (gen_blockage ());
1867 }
1868 }
1869
1870 /* Do any necessary cleanup after a function to restore stack, frame,
1871 and regs. */
1872
1873 static void
1874 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1875 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1876 {
1877 /* Reset state info for each function. */
1878 current_frame_info = zero_frame_info;
1879 m32r_compute_function_type (NULL_TREE);
1880 }
1881 \f
1882 /* Return nonzero if this function is known to have a null or 1 instruction
1883 epilogue. */
1884
1885 int
1886 direct_return (void)
1887 {
1888 if (!reload_completed)
1889 return FALSE;
1890
1891 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1892 return FALSE;
1893
1894 if (! current_frame_info.initialized)
1895 m32r_compute_frame_size (get_frame_size ());
1896
1897 return current_frame_info.total_size == 0;
1898 }
1899
1900 \f
1901 /* PIC. */
1902
1903 int
1904 m32r_legitimate_pic_operand_p (rtx x)
1905 {
1906 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1907 return 0;
1908
1909 if (GET_CODE (x) == CONST
1910 && GET_CODE (XEXP (x, 0)) == PLUS
1911 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1912 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1913 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1914 return 0;
1915
1916 return 1;
1917 }
1918
1919 rtx
1920 m32r_legitimize_pic_address (rtx orig, rtx reg)
1921 {
1922 #ifdef DEBUG_PIC
1923 printf("m32r_legitimize_pic_address()\n");
1924 #endif
1925
1926 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1927 {
1928 rtx pic_ref, address;
1929 int subregs = 0;
1930
1931 if (reg == 0)
1932 {
1933 gcc_assert (!reload_in_progress && !reload_completed);
1934 reg = gen_reg_rtx (Pmode);
1935
1936 subregs = 1;
1937 }
1938
1939 if (subregs)
1940 address = gen_reg_rtx (Pmode);
1941 else
1942 address = reg;
1943
1944 crtl->uses_pic_offset_table = 1;
1945
1946 if (GET_CODE (orig) == LABEL_REF
1947 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1948 {
1949 emit_insn (gen_gotoff_load_addr (reg, orig));
1950 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1951 return reg;
1952 }
1953
1954 emit_insn (gen_pic_load_addr (address, orig));
1955
1956 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1957 pic_ref = gen_const_mem (Pmode, address);
1958 emit_move_insn (reg, pic_ref);
1959 return reg;
1960 }
1961 else if (GET_CODE (orig) == CONST)
1962 {
1963 rtx base, offset;
1964
1965 if (GET_CODE (XEXP (orig, 0)) == PLUS
1966 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1967 return orig;
1968
1969 if (reg == 0)
1970 {
1971 gcc_assert (!reload_in_progress && !reload_completed);
1972 reg = gen_reg_rtx (Pmode);
1973 }
1974
1975 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1976 {
1977 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1978 if (base == reg)
1979 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1980 else
1981 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1982 }
1983 else
1984 return orig;
1985
1986 if (CONST_INT_P (offset))
1987 {
1988 if (INT16_P (INTVAL (offset)))
1989 return plus_constant (Pmode, base, INTVAL (offset));
1990 else
1991 {
1992 gcc_assert (! reload_in_progress && ! reload_completed);
1993 offset = force_reg (Pmode, offset);
1994 }
1995 }
1996
1997 return gen_rtx_PLUS (Pmode, base, offset);
1998 }
1999
2000 return orig;
2001 }
2002
2003 static rtx
2004 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2005 enum machine_mode mode ATTRIBUTE_UNUSED)
2006 {
2007 if (flag_pic)
2008 return m32r_legitimize_pic_address (x, NULL_RTX);
2009 else
2010 return x;
2011 }
2012
2013 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2014
2015 static bool
2016 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2017 {
2018 if (GET_CODE (addr) == LO_SUM)
2019 return true;
2020
2021 return false;
2022 }
2023 \f
2024 /* Nested function support. */
2025
2026 /* Emit RTL insns to initialize the variable parts of a trampoline.
2027 FNADDR is an RTX for the address of the function's pure code.
2028 CXT is an RTX for the static chain value for the function. */
2029
2030 void
2031 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2032 rtx fnaddr ATTRIBUTE_UNUSED,
2033 rtx cxt ATTRIBUTE_UNUSED)
2034 {
2035 }
2036 \f
2037 static void
2038 m32r_file_start (void)
2039 {
2040 default_file_start ();
2041
2042 if (flag_verbose_asm)
2043 fprintf (asm_out_file,
2044 "%s M32R/D special options: -G %d\n",
2045 ASM_COMMENT_START, g_switch_value);
2046
2047 if (TARGET_LITTLE_ENDIAN)
2048 fprintf (asm_out_file, "\t.little\n");
2049 }
2050 \f
2051 /* Print operand X (an rtx) in assembler syntax to file FILE.
2052 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2053 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2054
2055 static void
2056 m32r_print_operand (FILE * file, rtx x, int code)
2057 {
2058 rtx addr;
2059
2060 switch (code)
2061 {
2062 /* The 's' and 'p' codes are used by output_block_move() to
2063 indicate post-increment 's'tores and 'p're-increment loads. */
2064 case 's':
2065 if (REG_P (x))
2066 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2067 else
2068 output_operand_lossage ("invalid operand to %%s code");
2069 return;
2070
2071 case 'p':
2072 if (REG_P (x))
2073 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2074 else
2075 output_operand_lossage ("invalid operand to %%p code");
2076 return;
2077
2078 case 'R' :
2079 /* Write second word of DImode or DFmode reference,
2080 register or memory. */
2081 if (REG_P (x))
2082 fputs (reg_names[REGNO (x)+1], file);
2083 else if (MEM_P (x))
2084 {
2085 fprintf (file, "@(");
2086 /* Handle possible auto-increment. Since it is pre-increment and
2087 we have already done it, we can just use an offset of four. */
2088 /* ??? This is taken from rs6000.c I think. I don't think it is
2089 currently necessary, but keep it around. */
2090 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2091 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2092 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2093 else
2094 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2095 fputc (')', file);
2096 }
2097 else
2098 output_operand_lossage ("invalid operand to %%R code");
2099 return;
2100
2101 case 'H' : /* High word. */
2102 case 'L' : /* Low word. */
2103 if (REG_P (x))
2104 {
2105 /* L = least significant word, H = most significant word. */
2106 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2107 fputs (reg_names[REGNO (x)], file);
2108 else
2109 fputs (reg_names[REGNO (x)+1], file);
2110 }
2111 else if (CONST_INT_P (x)
2112 || GET_CODE (x) == CONST_DOUBLE)
2113 {
2114 rtx first, second;
2115
2116 split_double (x, &first, &second);
2117 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2118 code == 'L' ? INTVAL (first) : INTVAL (second));
2119 }
2120 else
2121 output_operand_lossage ("invalid operand to %%H/%%L code");
2122 return;
2123
2124 case 'A' :
2125 {
2126 char str[30];
2127
2128 if (GET_CODE (x) != CONST_DOUBLE
2129 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2130 fatal_insn ("bad insn for 'A'", x);
2131
2132 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2133 fprintf (file, "%s", str);
2134 return;
2135 }
2136
2137 case 'B' : /* Bottom half. */
2138 case 'T' : /* Top half. */
2139 /* Output the argument to a `seth' insn (sets the Top half-word).
2140 For constants output arguments to a seth/or3 pair to set Top and
2141 Bottom halves. For symbols output arguments to a seth/add3 pair to
2142 set Top and Bottom halves. The difference exists because for
2143 constants seth/or3 is more readable but for symbols we need to use
2144 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2145 switch (GET_CODE (x))
2146 {
2147 case CONST_INT :
2148 case CONST_DOUBLE :
2149 {
2150 rtx first, second;
2151
2152 split_double (x, &first, &second);
2153 x = WORDS_BIG_ENDIAN ? second : first;
2154 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2155 (code == 'B'
2156 ? INTVAL (x) & 0xffff
2157 : (INTVAL (x) >> 16) & 0xffff));
2158 }
2159 return;
2160 case CONST :
2161 case SYMBOL_REF :
2162 if (code == 'B'
2163 && small_data_operand (x, VOIDmode))
2164 {
2165 fputs ("sda(", file);
2166 output_addr_const (file, x);
2167 fputc (')', file);
2168 return;
2169 }
2170 /* fall through */
2171 case LABEL_REF :
2172 fputs (code == 'T' ? "shigh(" : "low(", file);
2173 output_addr_const (file, x);
2174 fputc (')', file);
2175 return;
2176 default :
2177 output_operand_lossage ("invalid operand to %%T/%%B code");
2178 return;
2179 }
2180 break;
2181
2182 case 'U' :
2183 /* ??? wip */
2184 /* Output a load/store with update indicator if appropriate. */
2185 if (MEM_P (x))
2186 {
2187 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2188 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2189 fputs (".a", file);
2190 }
2191 else
2192 output_operand_lossage ("invalid operand to %%U code");
2193 return;
2194
2195 case 'N' :
2196 /* Print a constant value negated. */
2197 if (CONST_INT_P (x))
2198 output_addr_const (file, GEN_INT (- INTVAL (x)));
2199 else
2200 output_operand_lossage ("invalid operand to %%N code");
2201 return;
2202
2203 case 'X' :
2204 /* Print a const_int in hex. Used in comments. */
2205 if (CONST_INT_P (x))
2206 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2207 return;
2208
2209 case '#' :
2210 fputs (IMMEDIATE_PREFIX, file);
2211 return;
2212
2213 case 0 :
2214 /* Do nothing special. */
2215 break;
2216
2217 default :
2218 /* Unknown flag. */
2219 output_operand_lossage ("invalid operand output code");
2220 }
2221
2222 switch (GET_CODE (x))
2223 {
2224 case REG :
2225 fputs (reg_names[REGNO (x)], file);
2226 break;
2227
2228 case MEM :
2229 addr = XEXP (x, 0);
2230 if (GET_CODE (addr) == PRE_INC)
2231 {
2232 if (!REG_P (XEXP (addr, 0)))
2233 fatal_insn ("pre-increment address is not a register", x);
2234
2235 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2236 }
2237 else if (GET_CODE (addr) == PRE_DEC)
2238 {
2239 if (!REG_P (XEXP (addr, 0)))
2240 fatal_insn ("pre-decrement address is not a register", x);
2241
2242 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2243 }
2244 else if (GET_CODE (addr) == POST_INC)
2245 {
2246 if (!REG_P (XEXP (addr, 0)))
2247 fatal_insn ("post-increment address is not a register", x);
2248
2249 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2250 }
2251 else
2252 {
2253 fputs ("@(", file);
2254 output_address (XEXP (x, 0));
2255 fputc (')', file);
2256 }
2257 break;
2258
2259 case CONST_DOUBLE :
2260 /* We handle SFmode constants here as output_addr_const doesn't. */
2261 if (GET_MODE (x) == SFmode)
2262 {
2263 REAL_VALUE_TYPE d;
2264 long l;
2265
2266 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2267 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2268 fprintf (file, "0x%08lx", l);
2269 break;
2270 }
2271
2272 /* Fall through. Let output_addr_const deal with it. */
2273
2274 default :
2275 output_addr_const (file, x);
2276 break;
2277 }
2278 }
2279
2280 /* Print a memory address as an operand to reference that memory location. */
2281
2282 static void
2283 m32r_print_operand_address (FILE * file, rtx addr)
2284 {
2285 rtx base;
2286 rtx index = 0;
2287 int offset = 0;
2288
2289 switch (GET_CODE (addr))
2290 {
2291 case REG :
2292 fputs (reg_names[REGNO (addr)], file);
2293 break;
2294
2295 case PLUS :
2296 if (CONST_INT_P (XEXP (addr, 0)))
2297 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2298 else if (CONST_INT_P (XEXP (addr, 1)))
2299 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2300 else
2301 base = XEXP (addr, 0), index = XEXP (addr, 1);
2302 if (REG_P (base))
2303 {
2304 /* Print the offset first (if present) to conform to the manual. */
2305 if (index == 0)
2306 {
2307 if (offset != 0)
2308 fprintf (file, "%d,", offset);
2309 fputs (reg_names[REGNO (base)], file);
2310 }
2311 /* The chip doesn't support this, but left in for generality. */
2312 else if (REG_P (index))
2313 fprintf (file, "%s,%s",
2314 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2315 /* Not sure this can happen, but leave in for now. */
2316 else if (GET_CODE (index) == SYMBOL_REF)
2317 {
2318 output_addr_const (file, index);
2319 fputc (',', file);
2320 fputs (reg_names[REGNO (base)], file);
2321 }
2322 else
2323 fatal_insn ("bad address", addr);
2324 }
2325 else if (GET_CODE (base) == LO_SUM)
2326 {
2327 gcc_assert (!index && REG_P (XEXP (base, 0)));
2328 if (small_data_operand (XEXP (base, 1), VOIDmode))
2329 fputs ("sda(", file);
2330 else
2331 fputs ("low(", file);
2332 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2333 offset));
2334 fputs ("),", file);
2335 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2336 }
2337 else
2338 fatal_insn ("bad address", addr);
2339 break;
2340
2341 case LO_SUM :
2342 if (!REG_P (XEXP (addr, 0)))
2343 fatal_insn ("lo_sum not of register", addr);
2344 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2345 fputs ("sda(", file);
2346 else
2347 fputs ("low(", file);
2348 output_addr_const (file, XEXP (addr, 1));
2349 fputs ("),", file);
2350 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2351 break;
2352
2353 case PRE_INC : /* Assume SImode. */
2354 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2355 break;
2356
2357 case PRE_DEC : /* Assume SImode. */
2358 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2359 break;
2360
2361 case POST_INC : /* Assume SImode. */
2362 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2363 break;
2364
2365 default :
2366 output_addr_const (file, addr);
2367 break;
2368 }
2369 }
2370
2371 static bool
2372 m32r_print_operand_punct_valid_p (unsigned char code)
2373 {
2374 return m32r_punct_chars[code];
2375 }
2376
2377 /* Return true if the operands are the constants 0 and 1. */
2378
2379 int
2380 zero_and_one (rtx operand1, rtx operand2)
2381 {
2382 return
2383 CONST_INT_P (operand1)
2384 && CONST_INT_P (operand2)
2385 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2386 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2387 }
2388
2389 /* Generate the correct assembler code to handle the conditional loading of a
2390 value into a register. It is known that the operands satisfy the
2391 conditional_move_operand() function above. The destination is operand[0].
2392 The condition is operand [1]. The 'true' value is operand [2] and the
2393 'false' value is operand [3]. */
2394
2395 char *
2396 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2397 {
2398 static char buffer [100];
2399 const char * dest = reg_names [REGNO (operands [0])];
2400
2401 buffer [0] = 0;
2402
2403 /* Destination must be a register. */
2404 gcc_assert (REG_P (operands [0]));
2405 gcc_assert (conditional_move_operand (operands [2], SImode));
2406 gcc_assert (conditional_move_operand (operands [3], SImode));
2407
2408 /* Check to see if the test is reversed. */
2409 if (GET_CODE (operands [1]) == NE)
2410 {
2411 rtx tmp = operands [2];
2412 operands [2] = operands [3];
2413 operands [3] = tmp;
2414 }
2415
2416 sprintf (buffer, "mvfc %s, cbr", dest);
2417
2418 /* If the true value was '0' then we need to invert the results of the move. */
2419 if (INTVAL (operands [2]) == 0)
2420 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2421 dest, dest);
2422
2423 return buffer;
2424 }
2425
2426 /* Returns true if the registers contained in the two
2427 rtl expressions are different. */
2428
2429 int
2430 m32r_not_same_reg (rtx a, rtx b)
2431 {
2432 int reg_a = -1;
2433 int reg_b = -2;
2434
2435 while (GET_CODE (a) == SUBREG)
2436 a = SUBREG_REG (a);
2437
2438 if (REG_P (a))
2439 reg_a = REGNO (a);
2440
2441 while (GET_CODE (b) == SUBREG)
2442 b = SUBREG_REG (b);
2443
2444 if (REG_P (b))
2445 reg_b = REGNO (b);
2446
2447 return reg_a != reg_b;
2448 }
2449
2450 \f
2451 rtx
2452 m32r_function_symbol (const char *name)
2453 {
2454 int extra_flags = 0;
2455 enum m32r_model model;
2456 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2457
2458 if (TARGET_MODEL_SMALL)
2459 model = M32R_MODEL_SMALL;
2460 else if (TARGET_MODEL_MEDIUM)
2461 model = M32R_MODEL_MEDIUM;
2462 else if (TARGET_MODEL_LARGE)
2463 model = M32R_MODEL_LARGE;
2464 else
2465 gcc_unreachable (); /* Shouldn't happen. */
2466 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2467
2468 if (extra_flags)
2469 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2470
2471 return sym;
2472 }
2473
2474 /* Use a library function to move some bytes. */
2475
2476 static void
2477 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2478 {
2479 /* We want to pass the size as Pmode, which will normally be SImode
2480 but will be DImode if we are using 64-bit longs and pointers. */
2481 if (GET_MODE (bytes_rtx) != VOIDmode
2482 && GET_MODE (bytes_rtx) != Pmode)
2483 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2484
2485 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2486 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2487 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2488 TYPE_UNSIGNED (sizetype)),
2489 TYPE_MODE (sizetype));
2490 }
2491
2492 /* Expand string/block move operations.
2493
2494 operands[0] is the pointer to the destination.
2495 operands[1] is the pointer to the source.
2496 operands[2] is the number of bytes to move.
2497 operands[3] is the alignment.
2498
2499 Returns 1 upon success, 0 otherwise. */
2500
2501 int
2502 m32r_expand_block_move (rtx operands[])
2503 {
2504 rtx orig_dst = operands[0];
2505 rtx orig_src = operands[1];
2506 rtx bytes_rtx = operands[2];
2507 rtx align_rtx = operands[3];
2508 int constp = CONST_INT_P (bytes_rtx);
2509 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2510 int align = INTVAL (align_rtx);
2511 int leftover;
2512 rtx src_reg;
2513 rtx dst_reg;
2514
2515 if (constp && bytes <= 0)
2516 return 1;
2517
2518 /* Move the address into scratch registers. */
2519 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2520 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2521
2522 if (align > UNITS_PER_WORD)
2523 align = UNITS_PER_WORD;
2524
2525 /* If we prefer size over speed, always use a function call.
2526 If we do not know the size, use a function call.
2527 If the blocks are not word aligned, use a function call. */
2528 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2529 {
2530 block_move_call (dst_reg, src_reg, bytes_rtx);
2531 return 0;
2532 }
2533
2534 leftover = bytes % MAX_MOVE_BYTES;
2535 bytes -= leftover;
2536
2537 /* If necessary, generate a loop to handle the bulk of the copy. */
2538 if (bytes)
2539 {
2540 rtx label = NULL_RTX;
2541 rtx final_src = NULL_RTX;
2542 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2543 rtx rounded_total = GEN_INT (bytes);
2544 rtx new_dst_reg = gen_reg_rtx (SImode);
2545 rtx new_src_reg = gen_reg_rtx (SImode);
2546
2547 /* If we are going to have to perform this loop more than
2548 once, then generate a label and compute the address the
2549 source register will contain upon completion of the final
2550 iteration. */
2551 if (bytes > MAX_MOVE_BYTES)
2552 {
2553 final_src = gen_reg_rtx (Pmode);
2554
2555 if (INT16_P(bytes))
2556 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2557 else
2558 {
2559 emit_insn (gen_movsi (final_src, rounded_total));
2560 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2561 }
2562
2563 label = gen_label_rtx ();
2564 emit_label (label);
2565 }
2566
2567 /* It is known that output_block_move() will update src_reg to point
2568 to the word after the end of the source block, and dst_reg to point
2569 to the last word of the destination block, provided that the block
2570 is MAX_MOVE_BYTES long. */
2571 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2572 new_dst_reg, new_src_reg));
2573 emit_move_insn (dst_reg, new_dst_reg);
2574 emit_move_insn (src_reg, new_src_reg);
2575 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2576
2577 if (bytes > MAX_MOVE_BYTES)
2578 {
2579 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2580 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2581 }
2582 }
2583
2584 if (leftover)
2585 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2586 gen_reg_rtx (SImode),
2587 gen_reg_rtx (SImode)));
2588 return 1;
2589 }
2590
2591 \f
2592 /* Emit load/stores for a small constant word aligned block_move.
2593
2594 operands[0] is the memory address of the destination.
2595 operands[1] is the memory address of the source.
2596 operands[2] is the number of bytes to move.
2597 operands[3] is a temp register.
2598 operands[4] is a temp register. */
2599
2600 void
2601 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2602 {
2603 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2604 int first_time;
2605 int got_extra = 0;
2606
2607 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2608
2609 /* We do not have a post-increment store available, so the first set of
2610 stores are done without any increment, then the remaining ones can use
2611 the pre-increment addressing mode.
2612
2613 Note: expand_block_move() also relies upon this behavior when building
2614 loops to copy large blocks. */
2615 first_time = 1;
2616
2617 while (bytes > 0)
2618 {
2619 if (bytes >= 8)
2620 {
2621 if (first_time)
2622 {
2623 output_asm_insn ("ld\t%5, %p1", operands);
2624 output_asm_insn ("ld\t%6, %p1", operands);
2625 output_asm_insn ("st\t%5, @%0", operands);
2626 output_asm_insn ("st\t%6, %s0", operands);
2627 }
2628 else
2629 {
2630 output_asm_insn ("ld\t%5, %p1", operands);
2631 output_asm_insn ("ld\t%6, %p1", operands);
2632 output_asm_insn ("st\t%5, %s0", operands);
2633 output_asm_insn ("st\t%6, %s0", operands);
2634 }
2635
2636 bytes -= 8;
2637 }
2638 else if (bytes >= 4)
2639 {
2640 if (bytes > 4)
2641 got_extra = 1;
2642
2643 output_asm_insn ("ld\t%5, %p1", operands);
2644
2645 if (got_extra)
2646 output_asm_insn ("ld\t%6, %p1", operands);
2647
2648 if (first_time)
2649 output_asm_insn ("st\t%5, @%0", operands);
2650 else
2651 output_asm_insn ("st\t%5, %s0", operands);
2652
2653 bytes -= 4;
2654 }
2655 else
2656 {
2657 /* Get the entire next word, even though we do not want all of it.
2658 The saves us from doing several smaller loads, and we assume that
2659 we cannot cause a page fault when at least part of the word is in
2660 valid memory [since we don't get called if things aren't properly
2661 aligned]. */
2662 int dst_offset = first_time ? 0 : 4;
2663 /* The amount of increment we have to make to the
2664 destination pointer. */
2665 int dst_inc_amount = dst_offset + bytes - 4;
2666 /* The same for the source pointer. */
2667 int src_inc_amount = bytes;
2668 int last_shift;
2669 rtx my_operands[3];
2670
2671 /* If got_extra is true then we have already loaded
2672 the next word as part of loading and storing the previous word. */
2673 if (! got_extra)
2674 output_asm_insn ("ld\t%6, @%1", operands);
2675
2676 if (bytes >= 2)
2677 {
2678 bytes -= 2;
2679
2680 output_asm_insn ("sra3\t%5, %6, #16", operands);
2681 my_operands[0] = operands[5];
2682 my_operands[1] = GEN_INT (dst_offset);
2683 my_operands[2] = operands[0];
2684 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2685
2686 /* If there is a byte left to store then increment the
2687 destination address and shift the contents of the source
2688 register down by 8 bits. We could not do the address
2689 increment in the store half word instruction, because it does
2690 not have an auto increment mode. */
2691 if (bytes > 0) /* assert (bytes == 1) */
2692 {
2693 dst_offset += 2;
2694 last_shift = 8;
2695 }
2696 }
2697 else
2698 last_shift = 24;
2699
2700 if (bytes > 0)
2701 {
2702 my_operands[0] = operands[6];
2703 my_operands[1] = GEN_INT (last_shift);
2704 output_asm_insn ("srai\t%0, #%1", my_operands);
2705 my_operands[0] = operands[6];
2706 my_operands[1] = GEN_INT (dst_offset);
2707 my_operands[2] = operands[0];
2708 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2709 }
2710
2711 /* Update the destination pointer if needed. We have to do
2712 this so that the patterns matches what we output in this
2713 function. */
2714 if (dst_inc_amount
2715 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2716 {
2717 my_operands[0] = operands[0];
2718 my_operands[1] = GEN_INT (dst_inc_amount);
2719 output_asm_insn ("addi\t%0, #%1", my_operands);
2720 }
2721
2722 /* Update the source pointer if needed. We have to do this
2723 so that the patterns matches what we output in this
2724 function. */
2725 if (src_inc_amount
2726 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2727 {
2728 my_operands[0] = operands[1];
2729 my_operands[1] = GEN_INT (src_inc_amount);
2730 output_asm_insn ("addi\t%0, #%1", my_operands);
2731 }
2732
2733 bytes = 0;
2734 }
2735
2736 first_time = 0;
2737 }
2738 }
2739
2740 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2741
2742 int
2743 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2744 unsigned int new_reg)
2745 {
2746 /* Interrupt routines can't clobber any register that isn't already used. */
2747 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2748 && !df_regs_ever_live_p (new_reg))
2749 return 0;
2750
2751 return 1;
2752 }
2753
2754 rtx
2755 m32r_return_addr (int count)
2756 {
2757 if (count != 0)
2758 return const0_rtx;
2759
2760 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2761 }
2762
2763 static void
2764 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2765 {
2766 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2767 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2768 0x017e8e17 : 0x178e7e01, SImode));
2769 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2770 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2771 0x0c00ae86 : 0x86ae000c, SImode));
2772 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2773 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2774 0xe627871e : 0x1e8727e6, SImode));
2775 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2776 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2777 0xc616c626 : 0x26c61fc6, SImode));
2778 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2779 chain_value);
2780 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2781 XEXP (DECL_RTL (fndecl), 0));
2782
2783 if (m32r_cache_flush_trap >= 0)
2784 emit_insn (gen_flush_icache
2785 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2786 gen_int_mode (m32r_cache_flush_trap, SImode)));
2787 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2788 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2789 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2790 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2791 GEN_INT (3), SImode);
2792 }
2793
2794 /* True if X is a reg that can be used as a base reg. */
2795
2796 static bool
2797 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2798 {
2799 if (! REG_P (x))
2800 return false;
2801
2802 if (strict)
2803 {
2804 if (GPR_P (REGNO (x)))
2805 return true;
2806 }
2807 else
2808 {
2809 if (GPR_P (REGNO (x))
2810 || REGNO (x) == ARG_POINTER_REGNUM
2811 || ! HARD_REGISTER_P (x))
2812 return true;
2813 }
2814
2815 return false;
2816 }
2817
2818 static inline bool
2819 m32r_rtx_ok_for_offset_p (const_rtx x)
2820 {
2821 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2822 }
2823
2824 static inline bool
2825 m32r_legitimate_offset_addres_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2826 const_rtx x, bool strict)
2827 {
2828 if (GET_CODE (x) == PLUS
2829 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2830 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2831 return true;
2832
2833 return false;
2834 }
2835
2836 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2837 since more than one instruction will be required. */
2838
2839 static inline bool
2840 m32r_legitimate_lo_sum_addres_p (enum machine_mode mode, const_rtx x,
2841 bool strict)
2842 {
2843 if (GET_CODE (x) == LO_SUM
2844 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2845 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2846 && CONSTANT_P (XEXP (x, 1)))
2847 return true;
2848
2849 return false;
2850 }
2851
2852 /* Is this a load and increment operation. */
2853
2854 static inline bool
2855 m32r_load_postinc_p (enum machine_mode mode, const_rtx x, bool strict)
2856 {
2857 if ((mode == SImode || mode == SFmode)
2858 && GET_CODE (x) == POST_INC
2859 && REG_P (XEXP (x, 0))
2860 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2861 return true;
2862
2863 return false;
2864 }
2865
2866 /* Is this an increment/decrement and store operation. */
2867
2868 static inline bool
2869 m32r_store_preinc_predec_p (enum machine_mode mode, const_rtx x, bool strict)
2870 {
2871 if ((mode == SImode || mode == SFmode)
2872 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2873 && REG_P (XEXP (x, 0)) \
2874 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2875 return true;
2876
2877 return false;
2878 }
2879
2880 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2881
2882 static bool
2883 m32r_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
2884 {
2885 if (m32r_rtx_ok_for_base_p (x, strict)
2886 || m32r_legitimate_offset_addres_p (mode, x, strict)
2887 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2888 || m32r_load_postinc_p (mode, x, strict)
2889 || m32r_store_preinc_predec_p (mode, x, strict))
2890 return true;
2891
2892 return false;
2893 }
2894
2895 static void
2896 m32r_conditional_register_usage (void)
2897 {
2898 if (flag_pic)
2899 {
2900 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2901 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2902 }
2903 }
2904
2905 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2906
2907 We don't allow (plus symbol large-constant) as the relocations can't
2908 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2909 We allow all CONST_DOUBLE's as the md file patterns will force the
2910 constant to memory if they can't handle them. */
2911
2912 static bool
2913 m32r_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2914 {
2915 return !(GET_CODE (x) == CONST
2916 && GET_CODE (XEXP (x, 0)) == PLUS
2917 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2918 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2919 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2920 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2921 }