]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
re PR middle-end/46500 (target.h includes tm.h)
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "ggc.h"
39 #include "integrate.h"
40 #include "df.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44 #include "tm-constrs.h"
45 #include "opts.h"
46
47 /* Array of valid operand punctuation characters. */
48 static char m32r_punct_chars[256];
49
50 /* Machine-specific symbol_ref flags. */
51 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
52 #define SYMBOL_REF_MODEL(X) \
53 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
54
55 /* For string literals, etc. */
56 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
57
58 /* Forward declaration. */
59 static void m32r_option_override (void);
60 static void init_reg_tables (void);
61 static void block_move_call (rtx, rtx, rtx);
62 static int m32r_is_insn (rtx);
63 static bool m32r_legitimate_address_p (enum machine_mode, rtx, bool);
64 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
65 static bool m32r_mode_dependent_address_p (const_rtx);
66 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
67 static void m32r_print_operand (FILE *, rtx, int);
68 static void m32r_print_operand_address (FILE *, rtx);
69 static bool m32r_print_operand_punct_valid_p (unsigned char code);
70 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
71 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
72
73 static void m32r_file_start (void);
74
75 static int m32r_adjust_priority (rtx, int);
76 static int m32r_issue_rate (void);
77
78 static void m32r_encode_section_info (tree, rtx, int);
79 static bool m32r_in_small_data_p (const_tree);
80 static bool m32r_return_in_memory (const_tree, const_tree);
81 static rtx m32r_function_value (const_tree, const_tree, bool);
82 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
83 static bool m32r_function_value_regno_p (const unsigned int);
84 static void m32r_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
85 tree, int *, int);
86 static void init_idents (void);
87 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
88 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
89 static bool m32r_pass_by_reference (cumulative_args_t, enum machine_mode,
90 const_tree, bool);
91 static int m32r_arg_partial_bytes (cumulative_args_t, enum machine_mode,
92 tree, bool);
93 static rtx m32r_function_arg (cumulative_args_t, enum machine_mode,
94 const_tree, bool);
95 static void m32r_function_arg_advance (cumulative_args_t, enum machine_mode,
96 const_tree, bool);
97 static bool m32r_can_eliminate (const int, const int);
98 static void m32r_conditional_register_usage (void);
99 static void m32r_trampoline_init (rtx, tree, rtx);
100 static bool m32r_legitimate_constant_p (enum machine_mode, rtx);
101 \f
102 /* M32R specific attributes. */
103
104 static const struct attribute_spec m32r_attribute_table[] =
105 {
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
107 affects_type_identity } */
108 { "interrupt", 0, 0, true, false, false, NULL, false },
109 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
110 false },
111 { NULL, 0, 0, false, false, false, NULL, false }
112 };
113 \f
114 /* Initialize the GCC target structure. */
115 #undef TARGET_ATTRIBUTE_TABLE
116 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
117
118 #undef TARGET_LEGITIMATE_ADDRESS_P
119 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
120 #undef TARGET_LEGITIMIZE_ADDRESS
121 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
122 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
123 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
124
125 #undef TARGET_ASM_ALIGNED_HI_OP
126 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
127 #undef TARGET_ASM_ALIGNED_SI_OP
128 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
129
130 #undef TARGET_PRINT_OPERAND
131 #define TARGET_PRINT_OPERAND m32r_print_operand
132 #undef TARGET_PRINT_OPERAND_ADDRESS
133 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
134 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
135 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
136
137 #undef TARGET_ASM_FUNCTION_PROLOGUE
138 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
139 #undef TARGET_ASM_FUNCTION_EPILOGUE
140 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
141
142 #undef TARGET_ASM_FILE_START
143 #define TARGET_ASM_FILE_START m32r_file_start
144
145 #undef TARGET_SCHED_ADJUST_PRIORITY
146 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
147 #undef TARGET_SCHED_ISSUE_RATE
148 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
149
150 #undef TARGET_OPTION_OVERRIDE
151 #define TARGET_OPTION_OVERRIDE m32r_option_override
152
153 #undef TARGET_ENCODE_SECTION_INFO
154 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
155 #undef TARGET_IN_SMALL_DATA_P
156 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
157
158
159 #undef TARGET_MEMORY_MOVE_COST
160 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
161 #undef TARGET_RTX_COSTS
162 #define TARGET_RTX_COSTS m32r_rtx_costs
163 #undef TARGET_ADDRESS_COST
164 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
165
166 #undef TARGET_PROMOTE_PROTOTYPES
167 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
168 #undef TARGET_RETURN_IN_MEMORY
169 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
170
171 #undef TARGET_FUNCTION_VALUE
172 #define TARGET_FUNCTION_VALUE m32r_function_value
173 #undef TARGET_LIBCALL_VALUE
174 #define TARGET_LIBCALL_VALUE m32r_libcall_value
175 #undef TARGET_FUNCTION_VALUE_REGNO_P
176 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
177
178 #undef TARGET_SETUP_INCOMING_VARARGS
179 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
180 #undef TARGET_MUST_PASS_IN_STACK
181 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
182 #undef TARGET_PASS_BY_REFERENCE
183 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
184 #undef TARGET_ARG_PARTIAL_BYTES
185 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
186 #undef TARGET_FUNCTION_ARG
187 #define TARGET_FUNCTION_ARG m32r_function_arg
188 #undef TARGET_FUNCTION_ARG_ADVANCE
189 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
190
191 #undef TARGET_CAN_ELIMINATE
192 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
193
194 #undef TARGET_CONDITIONAL_REGISTER_USAGE
195 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
196
197 #undef TARGET_TRAMPOLINE_INIT
198 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
199
200 #undef TARGET_LEGITIMATE_CONSTANT_P
201 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
202
203 struct gcc_target targetm = TARGET_INITIALIZER;
204 \f
205 /* Called by m32r_option_override to initialize various things. */
206
207 void
208 m32r_init (void)
209 {
210 init_reg_tables ();
211
212 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
213 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
214 m32r_punct_chars['#'] = 1;
215 m32r_punct_chars['@'] = 1; /* ??? no longer used */
216
217 /* Provide default value if not specified. */
218 if (!global_options_set.x_g_switch_value)
219 g_switch_value = SDATA_DEFAULT_SIZE;
220 }
221
222 static void
223 m32r_option_override (void)
224 {
225 /* These need to be done at start up.
226 It's convenient to do them here. */
227 m32r_init ();
228 SUBTARGET_OVERRIDE_OPTIONS;
229 }
230
231 /* Vectors to keep interesting information about registers where it can easily
232 be got. We use to use the actual mode value as the bit number, but there
233 is (or may be) more than 32 modes now. Instead we use two tables: one
234 indexed by hard register number, and one indexed by mode. */
235
236 /* The purpose of m32r_mode_class is to shrink the range of modes so that
237 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
238 mapped into one m32r_mode_class mode. */
239
240 enum m32r_mode_class
241 {
242 C_MODE,
243 S_MODE, D_MODE, T_MODE, O_MODE,
244 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
245 };
246
247 /* Modes for condition codes. */
248 #define C_MODES (1 << (int) C_MODE)
249
250 /* Modes for single-word and smaller quantities. */
251 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
252
253 /* Modes for double-word and smaller quantities. */
254 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
255
256 /* Modes for quad-word and smaller quantities. */
257 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
258
259 /* Modes for accumulators. */
260 #define A_MODES (1 << (int) A_MODE)
261
262 /* Value is 1 if register/mode pair is acceptable on arc. */
263
264 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
265 {
266 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
267 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
268 S_MODES, C_MODES, A_MODES, A_MODES
269 };
270
271 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
272
273 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
274
275 static void
276 init_reg_tables (void)
277 {
278 int i;
279
280 for (i = 0; i < NUM_MACHINE_MODES; i++)
281 {
282 switch (GET_MODE_CLASS (i))
283 {
284 case MODE_INT:
285 case MODE_PARTIAL_INT:
286 case MODE_COMPLEX_INT:
287 if (GET_MODE_SIZE (i) <= 4)
288 m32r_mode_class[i] = 1 << (int) S_MODE;
289 else if (GET_MODE_SIZE (i) == 8)
290 m32r_mode_class[i] = 1 << (int) D_MODE;
291 else if (GET_MODE_SIZE (i) == 16)
292 m32r_mode_class[i] = 1 << (int) T_MODE;
293 else if (GET_MODE_SIZE (i) == 32)
294 m32r_mode_class[i] = 1 << (int) O_MODE;
295 else
296 m32r_mode_class[i] = 0;
297 break;
298 case MODE_FLOAT:
299 case MODE_COMPLEX_FLOAT:
300 if (GET_MODE_SIZE (i) <= 4)
301 m32r_mode_class[i] = 1 << (int) SF_MODE;
302 else if (GET_MODE_SIZE (i) == 8)
303 m32r_mode_class[i] = 1 << (int) DF_MODE;
304 else if (GET_MODE_SIZE (i) == 16)
305 m32r_mode_class[i] = 1 << (int) TF_MODE;
306 else if (GET_MODE_SIZE (i) == 32)
307 m32r_mode_class[i] = 1 << (int) OF_MODE;
308 else
309 m32r_mode_class[i] = 0;
310 break;
311 case MODE_CC:
312 m32r_mode_class[i] = 1 << (int) C_MODE;
313 break;
314 default:
315 m32r_mode_class[i] = 0;
316 break;
317 }
318 }
319
320 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
321 {
322 if (GPR_P (i))
323 m32r_regno_reg_class[i] = GENERAL_REGS;
324 else if (i == ARG_POINTER_REGNUM)
325 m32r_regno_reg_class[i] = GENERAL_REGS;
326 else
327 m32r_regno_reg_class[i] = NO_REGS;
328 }
329 }
330 \f
331 /* M32R specific attribute support.
332
333 interrupt - for interrupt functions
334
335 model - select code model used to access object
336
337 small: addresses use 24 bits, use bl to make calls
338 medium: addresses use 32 bits, use bl to make calls
339 large: addresses use 32 bits, use seth/add3/jl to make calls
340
341 Grep for MODEL in m32r.h for more info. */
342
343 static tree small_ident1;
344 static tree small_ident2;
345 static tree medium_ident1;
346 static tree medium_ident2;
347 static tree large_ident1;
348 static tree large_ident2;
349
350 static void
351 init_idents (void)
352 {
353 if (small_ident1 == 0)
354 {
355 small_ident1 = get_identifier ("small");
356 small_ident2 = get_identifier ("__small__");
357 medium_ident1 = get_identifier ("medium");
358 medium_ident2 = get_identifier ("__medium__");
359 large_ident1 = get_identifier ("large");
360 large_ident2 = get_identifier ("__large__");
361 }
362 }
363
364 /* Handle an "model" attribute; arguments as in
365 struct attribute_spec.handler. */
366 static tree
367 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
368 tree args, int flags ATTRIBUTE_UNUSED,
369 bool *no_add_attrs)
370 {
371 tree arg;
372
373 init_idents ();
374 arg = TREE_VALUE (args);
375
376 if (arg != small_ident1
377 && arg != small_ident2
378 && arg != medium_ident1
379 && arg != medium_ident2
380 && arg != large_ident1
381 && arg != large_ident2)
382 {
383 warning (OPT_Wattributes, "invalid argument of %qs attribute",
384 IDENTIFIER_POINTER (name));
385 *no_add_attrs = true;
386 }
387
388 return NULL_TREE;
389 }
390 \f
391 /* Encode section information of DECL, which is either a VAR_DECL,
392 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
393
394 For the M32R we want to record:
395
396 - whether the object lives in .sdata/.sbss.
397 - what code model should be used to access the object
398 */
399
400 static void
401 m32r_encode_section_info (tree decl, rtx rtl, int first)
402 {
403 int extra_flags = 0;
404 tree model_attr;
405 enum m32r_model model;
406
407 default_encode_section_info (decl, rtl, first);
408
409 if (!DECL_P (decl))
410 return;
411
412 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
413 if (model_attr)
414 {
415 tree id;
416
417 init_idents ();
418
419 id = TREE_VALUE (TREE_VALUE (model_attr));
420
421 if (id == small_ident1 || id == small_ident2)
422 model = M32R_MODEL_SMALL;
423 else if (id == medium_ident1 || id == medium_ident2)
424 model = M32R_MODEL_MEDIUM;
425 else if (id == large_ident1 || id == large_ident2)
426 model = M32R_MODEL_LARGE;
427 else
428 gcc_unreachable (); /* shouldn't happen */
429 }
430 else
431 {
432 if (TARGET_MODEL_SMALL)
433 model = M32R_MODEL_SMALL;
434 else if (TARGET_MODEL_MEDIUM)
435 model = M32R_MODEL_MEDIUM;
436 else if (TARGET_MODEL_LARGE)
437 model = M32R_MODEL_LARGE;
438 else
439 gcc_unreachable (); /* shouldn't happen */
440 }
441 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
442
443 if (extra_flags)
444 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
445 }
446
447 /* Only mark the object as being small data area addressable if
448 it hasn't been explicitly marked with a code model.
449
450 The user can explicitly put an object in the small data area with the
451 section attribute. If the object is in sdata/sbss and marked with a
452 code model do both [put the object in .sdata and mark it as being
453 addressed with a specific code model - don't mark it as being addressed
454 with an SDA reloc though]. This is ok and might be useful at times. If
455 the object doesn't fit the linker will give an error. */
456
457 static bool
458 m32r_in_small_data_p (const_tree decl)
459 {
460 const_tree section;
461
462 if (TREE_CODE (decl) != VAR_DECL)
463 return false;
464
465 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
466 return false;
467
468 section = DECL_SECTION_NAME (decl);
469 if (section)
470 {
471 const char *const name = TREE_STRING_POINTER (section);
472 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
473 return true;
474 }
475 else
476 {
477 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
478 {
479 int size = int_size_in_bytes (TREE_TYPE (decl));
480
481 if (size > 0 && size <= g_switch_value)
482 return true;
483 }
484 }
485
486 return false;
487 }
488
489 /* Do anything needed before RTL is emitted for each function. */
490
491 void
492 m32r_init_expanders (void)
493 {
494 /* ??? At one point there was code here. The function is left in
495 to make it easy to experiment. */
496 }
497 \f
498 int
499 call_operand (rtx op, enum machine_mode mode)
500 {
501 if (!MEM_P (op))
502 return 0;
503 op = XEXP (op, 0);
504 return call_address_operand (op, mode);
505 }
506
507 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
508
509 int
510 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
511 {
512 if (! TARGET_SDATA_USE)
513 return 0;
514
515 if (GET_CODE (op) == SYMBOL_REF)
516 return SYMBOL_REF_SMALL_P (op);
517
518 if (GET_CODE (op) == CONST
519 && GET_CODE (XEXP (op, 0)) == PLUS
520 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
521 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
522 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
523
524 return 0;
525 }
526
527 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
528
529 int
530 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
531 {
532 rtx sym;
533
534 if (flag_pic)
535 return 0;
536
537 if (GET_CODE (op) == LABEL_REF)
538 return TARGET_ADDR24;
539
540 if (GET_CODE (op) == SYMBOL_REF)
541 sym = op;
542 else if (GET_CODE (op) == CONST
543 && GET_CODE (XEXP (op, 0)) == PLUS
544 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
545 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
546 sym = XEXP (XEXP (op, 0), 0);
547 else
548 return 0;
549
550 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
551 return 1;
552
553 if (TARGET_ADDR24
554 && (CONSTANT_POOL_ADDRESS_P (sym)
555 || LIT_NAME_P (XSTR (sym, 0))))
556 return 1;
557
558 return 0;
559 }
560
561 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
562
563 int
564 addr32_operand (rtx op, enum machine_mode mode)
565 {
566 rtx sym;
567
568 if (GET_CODE (op) == LABEL_REF)
569 return TARGET_ADDR32;
570
571 if (GET_CODE (op) == SYMBOL_REF)
572 sym = op;
573 else if (GET_CODE (op) == CONST
574 && GET_CODE (XEXP (op, 0)) == PLUS
575 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
576 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
577 && ! flag_pic)
578 sym = XEXP (XEXP (op, 0), 0);
579 else
580 return 0;
581
582 return (! addr24_operand (sym, mode)
583 && ! small_data_operand (sym, mode));
584 }
585
586 /* Return 1 if OP is a function that can be called with the `bl' insn. */
587
588 int
589 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
590 {
591 if (flag_pic)
592 return 1;
593
594 if (GET_CODE (op) == SYMBOL_REF)
595 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
596
597 return TARGET_CALL26;
598 }
599
600 /* Return 1 if OP is a DImode const we want to handle inline.
601 This must match the code in the movdi pattern.
602 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
603
604 int
605 easy_di_const (rtx op)
606 {
607 rtx high_rtx, low_rtx;
608 HOST_WIDE_INT high, low;
609
610 split_double (op, &high_rtx, &low_rtx);
611 high = INTVAL (high_rtx);
612 low = INTVAL (low_rtx);
613 /* Pick constants loadable with 2 16-bit `ldi' insns. */
614 if (high >= -128 && high <= 127
615 && low >= -128 && low <= 127)
616 return 1;
617 return 0;
618 }
619
620 /* Return 1 if OP is a DFmode const we want to handle inline.
621 This must match the code in the movdf pattern.
622 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
623
624 int
625 easy_df_const (rtx op)
626 {
627 REAL_VALUE_TYPE r;
628 long l[2];
629
630 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
631 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
632 if (l[0] == 0 && l[1] == 0)
633 return 1;
634 if ((l[0] & 0xffff) == 0 && l[1] == 0)
635 return 1;
636 return 0;
637 }
638
639 /* Return 1 if OP is (mem (reg ...)).
640 This is used in insn length calcs. */
641
642 int
643 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
644 {
645 return MEM_P (op) && REG_P (XEXP (op, 0));
646 }
647
648 /* Return nonzero if TYPE must be passed by indirect reference. */
649
650 static bool
651 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
652 enum machine_mode mode, const_tree type,
653 bool named ATTRIBUTE_UNUSED)
654 {
655 int size;
656
657 if (type)
658 size = int_size_in_bytes (type);
659 else
660 size = GET_MODE_SIZE (mode);
661
662 return (size < 0 || size > 8);
663 }
664 \f
665 /* Comparisons. */
666
667 /* X and Y are two things to compare using CODE. Emit the compare insn and
668 return the rtx for compare [arg0 of the if_then_else].
669 If need_compare is true then the comparison insn must be generated, rather
670 than being subsumed into the following branch instruction. */
671
672 rtx
673 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
674 {
675 enum rtx_code compare_code;
676 enum rtx_code branch_code;
677 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
678 int must_swap = 0;
679
680 switch (code)
681 {
682 case EQ: compare_code = EQ; branch_code = NE; break;
683 case NE: compare_code = EQ; branch_code = EQ; break;
684 case LT: compare_code = LT; branch_code = NE; break;
685 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
686 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
687 case GE: compare_code = LT; branch_code = EQ; break;
688 case LTU: compare_code = LTU; branch_code = NE; break;
689 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
690 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
691 case GEU: compare_code = LTU; branch_code = EQ; break;
692
693 default:
694 gcc_unreachable ();
695 }
696
697 if (need_compare)
698 {
699 switch (compare_code)
700 {
701 case EQ:
702 if (satisfies_constraint_P (y) /* Reg equal to small const. */
703 && y != const0_rtx)
704 {
705 rtx tmp = gen_reg_rtx (SImode);
706
707 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
708 x = tmp;
709 y = const0_rtx;
710 }
711 else if (CONSTANT_P (y)) /* Reg equal to const. */
712 {
713 rtx tmp = force_reg (GET_MODE (x), y);
714 y = tmp;
715 }
716
717 if (register_operand (y, SImode) /* Reg equal to reg. */
718 || y == const0_rtx) /* Reg equal to zero. */
719 {
720 emit_insn (gen_cmp_eqsi_insn (x, y));
721
722 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
723 }
724 break;
725
726 case LT:
727 if (register_operand (y, SImode)
728 || satisfies_constraint_P (y))
729 {
730 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
731
732 switch (code)
733 {
734 case LT:
735 emit_insn (gen_cmp_ltsi_insn (x, y));
736 code = EQ;
737 break;
738 case LE:
739 if (y == const0_rtx)
740 tmp = const1_rtx;
741 else
742 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
743 emit_insn (gen_cmp_ltsi_insn (x, tmp));
744 code = EQ;
745 break;
746 case GT:
747 if (CONST_INT_P (y))
748 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
749 else
750 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
751 emit_insn (gen_cmp_ltsi_insn (x, tmp));
752 code = NE;
753 break;
754 case GE:
755 emit_insn (gen_cmp_ltsi_insn (x, y));
756 code = NE;
757 break;
758 default:
759 gcc_unreachable ();
760 }
761
762 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
763 }
764 break;
765
766 case LTU:
767 if (register_operand (y, SImode)
768 || satisfies_constraint_P (y))
769 {
770 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
771
772 switch (code)
773 {
774 case LTU:
775 emit_insn (gen_cmp_ltusi_insn (x, y));
776 code = EQ;
777 break;
778 case LEU:
779 if (y == const0_rtx)
780 tmp = const1_rtx;
781 else
782 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
783 emit_insn (gen_cmp_ltusi_insn (x, tmp));
784 code = EQ;
785 break;
786 case GTU:
787 if (CONST_INT_P (y))
788 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
789 else
790 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
791 emit_insn (gen_cmp_ltusi_insn (x, tmp));
792 code = NE;
793 break;
794 case GEU:
795 emit_insn (gen_cmp_ltusi_insn (x, y));
796 code = NE;
797 break;
798 default:
799 gcc_unreachable ();
800 }
801
802 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
803 }
804 break;
805
806 default:
807 gcc_unreachable ();
808 }
809 }
810 else
811 {
812 /* Reg/reg equal comparison. */
813 if (compare_code == EQ
814 && register_operand (y, SImode))
815 return gen_rtx_fmt_ee (code, CCmode, x, y);
816
817 /* Reg/zero signed comparison. */
818 if ((compare_code == EQ || compare_code == LT)
819 && y == const0_rtx)
820 return gen_rtx_fmt_ee (code, CCmode, x, y);
821
822 /* Reg/smallconst equal comparison. */
823 if (compare_code == EQ
824 && satisfies_constraint_P (y))
825 {
826 rtx tmp = gen_reg_rtx (SImode);
827
828 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
829 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
830 }
831
832 /* Reg/const equal comparison. */
833 if (compare_code == EQ
834 && CONSTANT_P (y))
835 {
836 rtx tmp = force_reg (GET_MODE (x), y);
837
838 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
839 }
840 }
841
842 if (CONSTANT_P (y))
843 {
844 if (must_swap)
845 y = force_reg (GET_MODE (x), y);
846 else
847 {
848 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
849
850 if (! ok_const)
851 y = force_reg (GET_MODE (x), y);
852 }
853 }
854
855 switch (compare_code)
856 {
857 case EQ :
858 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
859 break;
860 case LT :
861 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
862 break;
863 case LTU :
864 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
865 break;
866
867 default:
868 gcc_unreachable ();
869 }
870
871 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
872 }
873
874 bool
875 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
876 {
877 enum machine_mode mode = GET_MODE (op0);
878
879 gcc_assert (mode == SImode);
880 switch (code)
881 {
882 case EQ:
883 if (!register_operand (op1, mode))
884 op1 = force_reg (mode, op1);
885
886 if (TARGET_M32RX || TARGET_M32R2)
887 {
888 if (!reg_or_zero_operand (op2, mode))
889 op2 = force_reg (mode, op2);
890
891 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
892 return true;
893 }
894 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
895 {
896 emit_insn (gen_seq_zero_insn (op0, op1));
897 return true;
898 }
899
900 if (!reg_or_eq_int16_operand (op2, mode))
901 op2 = force_reg (mode, op2);
902
903 emit_insn (gen_seq_insn (op0, op1, op2));
904 return true;
905
906 case NE:
907 if (!CONST_INT_P (op2)
908 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
909 {
910 rtx reg;
911
912 if (reload_completed || reload_in_progress)
913 return false;
914
915 reg = gen_reg_rtx (SImode);
916 emit_insn (gen_xorsi3 (reg, op1, op2));
917 op1 = reg;
918
919 if (!register_operand (op1, mode))
920 op1 = force_reg (mode, op1);
921
922 emit_insn (gen_sne_zero_insn (op0, op1));
923 return true;
924 }
925 return false;
926
927 case LT:
928 case GT:
929 if (code == GT)
930 {
931 rtx tmp = op2;
932 op2 = op1;
933 op1 = tmp;
934 code = LT;
935 }
936
937 if (!register_operand (op1, mode))
938 op1 = force_reg (mode, op1);
939
940 if (!reg_or_int16_operand (op2, mode))
941 op2 = force_reg (mode, op2);
942
943 emit_insn (gen_slt_insn (op0, op1, op2));
944 return true;
945
946 case LTU:
947 case GTU:
948 if (code == GTU)
949 {
950 rtx tmp = op2;
951 op2 = op1;
952 op1 = tmp;
953 code = LTU;
954 }
955
956 if (!register_operand (op1, mode))
957 op1 = force_reg (mode, op1);
958
959 if (!reg_or_int16_operand (op2, mode))
960 op2 = force_reg (mode, op2);
961
962 emit_insn (gen_sltu_insn (op0, op1, op2));
963 return true;
964
965 case GE:
966 case GEU:
967 if (!register_operand (op1, mode))
968 op1 = force_reg (mode, op1);
969
970 if (!reg_or_int16_operand (op2, mode))
971 op2 = force_reg (mode, op2);
972
973 if (code == GE)
974 emit_insn (gen_sge_insn (op0, op1, op2));
975 else
976 emit_insn (gen_sgeu_insn (op0, op1, op2));
977 return true;
978
979 case LE:
980 case LEU:
981 if (!register_operand (op1, mode))
982 op1 = force_reg (mode, op1);
983
984 if (CONST_INT_P (op2))
985 {
986 HOST_WIDE_INT value = INTVAL (op2);
987 if (value >= 2147483647)
988 {
989 emit_move_insn (op0, const1_rtx);
990 return true;
991 }
992
993 op2 = GEN_INT (value + 1);
994 if (value < -32768 || value >= 32767)
995 op2 = force_reg (mode, op2);
996
997 if (code == LEU)
998 emit_insn (gen_sltu_insn (op0, op1, op2));
999 else
1000 emit_insn (gen_slt_insn (op0, op1, op2));
1001 return true;
1002 }
1003
1004 if (!register_operand (op2, mode))
1005 op2 = force_reg (mode, op2);
1006
1007 if (code == LEU)
1008 emit_insn (gen_sleu_insn (op0, op1, op2));
1009 else
1010 emit_insn (gen_sle_insn (op0, op1, op2));
1011 return true;
1012
1013 default:
1014 gcc_unreachable ();
1015 }
1016 }
1017
1018 \f
1019 /* Split a 2 word move (DI or DF) into component parts. */
1020
1021 rtx
1022 gen_split_move_double (rtx operands[])
1023 {
1024 enum machine_mode mode = GET_MODE (operands[0]);
1025 rtx dest = operands[0];
1026 rtx src = operands[1];
1027 rtx val;
1028
1029 /* We might have (SUBREG (MEM)) here, so just get rid of the
1030 subregs to make this code simpler. It is safe to call
1031 alter_subreg any time after reload. */
1032 if (GET_CODE (dest) == SUBREG)
1033 alter_subreg (&dest);
1034 if (GET_CODE (src) == SUBREG)
1035 alter_subreg (&src);
1036
1037 start_sequence ();
1038 if (REG_P (dest))
1039 {
1040 int dregno = REGNO (dest);
1041
1042 /* Reg = reg. */
1043 if (REG_P (src))
1044 {
1045 int sregno = REGNO (src);
1046
1047 int reverse = (dregno == sregno + 1);
1048
1049 /* We normally copy the low-numbered register first. However, if
1050 the first register operand 0 is the same as the second register of
1051 operand 1, we must copy in the opposite order. */
1052 emit_insn (gen_rtx_SET (VOIDmode,
1053 operand_subword (dest, reverse, TRUE, mode),
1054 operand_subword (src, reverse, TRUE, mode)));
1055
1056 emit_insn (gen_rtx_SET (VOIDmode,
1057 operand_subword (dest, !reverse, TRUE, mode),
1058 operand_subword (src, !reverse, TRUE, mode)));
1059 }
1060
1061 /* Reg = constant. */
1062 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1063 {
1064 rtx words[2];
1065 split_double (src, &words[0], &words[1]);
1066 emit_insn (gen_rtx_SET (VOIDmode,
1067 operand_subword (dest, 0, TRUE, mode),
1068 words[0]));
1069
1070 emit_insn (gen_rtx_SET (VOIDmode,
1071 operand_subword (dest, 1, TRUE, mode),
1072 words[1]));
1073 }
1074
1075 /* Reg = mem. */
1076 else if (MEM_P (src))
1077 {
1078 /* If the high-address word is used in the address, we must load it
1079 last. Otherwise, load it first. */
1080 int reverse
1081 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1082
1083 /* We used to optimize loads from single registers as
1084
1085 ld r1,r3+; ld r2,r3
1086
1087 if r3 were not used subsequently. However, the REG_NOTES aren't
1088 propagated correctly by the reload phase, and it can cause bad
1089 code to be generated. We could still try:
1090
1091 ld r1,r3+; ld r2,r3; addi r3,-4
1092
1093 which saves 2 bytes and doesn't force longword alignment. */
1094 emit_insn (gen_rtx_SET (VOIDmode,
1095 operand_subword (dest, reverse, TRUE, mode),
1096 adjust_address (src, SImode,
1097 reverse * UNITS_PER_WORD)));
1098
1099 emit_insn (gen_rtx_SET (VOIDmode,
1100 operand_subword (dest, !reverse, TRUE, mode),
1101 adjust_address (src, SImode,
1102 !reverse * UNITS_PER_WORD)));
1103 }
1104 else
1105 gcc_unreachable ();
1106 }
1107
1108 /* Mem = reg. */
1109 /* We used to optimize loads from single registers as
1110
1111 st r1,r3; st r2,+r3
1112
1113 if r3 were not used subsequently. However, the REG_NOTES aren't
1114 propagated correctly by the reload phase, and it can cause bad
1115 code to be generated. We could still try:
1116
1117 st r1,r3; st r2,+r3; addi r3,-4
1118
1119 which saves 2 bytes and doesn't force longword alignment. */
1120 else if (MEM_P (dest) && REG_P (src))
1121 {
1122 emit_insn (gen_rtx_SET (VOIDmode,
1123 adjust_address (dest, SImode, 0),
1124 operand_subword (src, 0, TRUE, mode)));
1125
1126 emit_insn (gen_rtx_SET (VOIDmode,
1127 adjust_address (dest, SImode, UNITS_PER_WORD),
1128 operand_subword (src, 1, TRUE, mode)));
1129 }
1130
1131 else
1132 gcc_unreachable ();
1133
1134 val = get_insns ();
1135 end_sequence ();
1136 return val;
1137 }
1138
1139 \f
1140 static int
1141 m32r_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
1142 tree type, bool named ATTRIBUTE_UNUSED)
1143 {
1144 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1145
1146 int words;
1147 unsigned int size =
1148 (((mode == BLKmode && type)
1149 ? (unsigned int) int_size_in_bytes (type)
1150 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1151 / UNITS_PER_WORD;
1152
1153 if (*cum >= M32R_MAX_PARM_REGS)
1154 words = 0;
1155 else if (*cum + size > M32R_MAX_PARM_REGS)
1156 words = (*cum + size) - M32R_MAX_PARM_REGS;
1157 else
1158 words = 0;
1159
1160 return words * UNITS_PER_WORD;
1161 }
1162
1163 /* The ROUND_ADVANCE* macros are local to this file. */
1164 /* Round SIZE up to a word boundary. */
1165 #define ROUND_ADVANCE(SIZE) \
1166 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1167
1168 /* Round arg MODE/TYPE up to the next word boundary. */
1169 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1170 ((MODE) == BLKmode \
1171 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1172 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1173
1174 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1175 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1176
1177 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1178 a reg. This includes arguments that have to be passed by reference as the
1179 pointer to them is passed in a reg if one is available (and that is what
1180 we're given).
1181 This macro is only used in this file. */
1182 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1183 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1184
1185 /* Determine where to put an argument to a function.
1186 Value is zero to push the argument on the stack,
1187 or a hard register in which to store the argument.
1188
1189 MODE is the argument's machine mode.
1190 TYPE is the data type of the argument (as a tree).
1191 This is null for libcalls where that information may
1192 not be available.
1193 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1194 the preceding args and about the function being called.
1195 NAMED is nonzero if this argument is a named parameter
1196 (otherwise it is an extra parameter matching an ellipsis). */
1197 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1198 and the rest are pushed. */
1199
1200 static rtx
1201 m32r_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1202 const_tree type ATTRIBUTE_UNUSED,
1203 bool named ATTRIBUTE_UNUSED)
1204 {
1205 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1206
1207 return (PASS_IN_REG_P (*cum, mode, type)
1208 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1209 : NULL_RTX);
1210 }
1211
1212 /* Update the data in CUM to advance over an argument
1213 of mode MODE and data type TYPE.
1214 (TYPE is null for libcalls where that information may not be available.) */
1215
1216 static void
1217 m32r_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1218 const_tree type, bool named ATTRIBUTE_UNUSED)
1219 {
1220 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1221
1222 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1223 + ROUND_ADVANCE_ARG (mode, type));
1224 }
1225
1226 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1227
1228 static bool
1229 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1230 {
1231 cumulative_args_t dummy = pack_cumulative_args (NULL);
1232
1233 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1234 }
1235
1236 /* Worker function for TARGET_FUNCTION_VALUE. */
1237
1238 static rtx
1239 m32r_function_value (const_tree valtype,
1240 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1241 bool outgoing ATTRIBUTE_UNUSED)
1242 {
1243 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1244 }
1245
1246 /* Worker function for TARGET_LIBCALL_VALUE. */
1247
1248 static rtx
1249 m32r_libcall_value (enum machine_mode mode,
1250 const_rtx fun ATTRIBUTE_UNUSED)
1251 {
1252 return gen_rtx_REG (mode, 0);
1253 }
1254
1255 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1256
1257 ??? What about r1 in DI/DF values. */
1258
1259 static bool
1260 m32r_function_value_regno_p (const unsigned int regno)
1261 {
1262 return (regno == 0);
1263 }
1264
1265 /* Do any needed setup for a variadic function. For the M32R, we must
1266 create a register parameter block, and then copy any anonymous arguments
1267 in registers to memory.
1268
1269 CUM has not been updated for the last named argument which has type TYPE
1270 and mode MODE, and we rely on this fact. */
1271
1272 static void
1273 m32r_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
1274 tree type, int *pretend_size, int no_rtl)
1275 {
1276 int first_anon_arg;
1277
1278 if (no_rtl)
1279 return;
1280
1281 /* All BLKmode values are passed by reference. */
1282 gcc_assert (mode != BLKmode);
1283
1284 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1285 + ROUND_ADVANCE_ARG (mode, type));
1286
1287 if (first_anon_arg < M32R_MAX_PARM_REGS)
1288 {
1289 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1290 int first_reg_offset = first_anon_arg;
1291 /* Size in words to "pretend" allocate. */
1292 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1293 rtx regblock;
1294
1295 regblock = gen_frame_mem (BLKmode,
1296 plus_constant (arg_pointer_rtx,
1297 FIRST_PARM_OFFSET (0)));
1298 set_mem_alias_set (regblock, get_varargs_alias_set ());
1299 move_block_from_reg (first_reg_offset, regblock, size);
1300
1301 *pretend_size = (size * UNITS_PER_WORD);
1302 }
1303 }
1304
1305 \f
1306 /* Return true if INSN is real instruction bearing insn. */
1307
1308 static int
1309 m32r_is_insn (rtx insn)
1310 {
1311 return (NONDEBUG_INSN_P (insn)
1312 && GET_CODE (PATTERN (insn)) != USE
1313 && GET_CODE (PATTERN (insn)) != CLOBBER
1314 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1315 }
1316
1317 /* Increase the priority of long instructions so that the
1318 short instructions are scheduled ahead of the long ones. */
1319
1320 static int
1321 m32r_adjust_priority (rtx insn, int priority)
1322 {
1323 if (m32r_is_insn (insn)
1324 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1325 priority <<= 3;
1326
1327 return priority;
1328 }
1329
1330 \f
1331 /* Indicate how many instructions can be issued at the same time.
1332 This is sort of a lie. The m32r can issue only 1 long insn at
1333 once, but it can issue 2 short insns. The default therefore is
1334 set at 2, but this can be overridden by the command line option
1335 -missue-rate=1. */
1336
1337 static int
1338 m32r_issue_rate (void)
1339 {
1340 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1341 }
1342 \f
1343 /* Cost functions. */
1344 /* Memory is 3 times as expensive as registers.
1345 ??? Is that the right way to look at it? */
1346
1347 static int
1348 m32r_memory_move_cost (enum machine_mode mode,
1349 reg_class_t rclass ATTRIBUTE_UNUSED,
1350 bool in ATTRIBUTE_UNUSED)
1351 {
1352 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1353 return 6;
1354 else
1355 return 12;
1356 }
1357
1358 static bool
1359 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
1360 bool speed ATTRIBUTE_UNUSED)
1361 {
1362 switch (code)
1363 {
1364 /* Small integers are as cheap as registers. 4 byte values can be
1365 fetched as immediate constants - let's give that the cost of an
1366 extra insn. */
1367 case CONST_INT:
1368 if (INT16_P (INTVAL (x)))
1369 {
1370 *total = 0;
1371 return true;
1372 }
1373 /* FALLTHRU */
1374
1375 case CONST:
1376 case LABEL_REF:
1377 case SYMBOL_REF:
1378 *total = COSTS_N_INSNS (1);
1379 return true;
1380
1381 case CONST_DOUBLE:
1382 {
1383 rtx high, low;
1384
1385 split_double (x, &high, &low);
1386 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1387 + !INT16_P (INTVAL (low)));
1388 return true;
1389 }
1390
1391 case MULT:
1392 *total = COSTS_N_INSNS (3);
1393 return true;
1394
1395 case DIV:
1396 case UDIV:
1397 case MOD:
1398 case UMOD:
1399 *total = COSTS_N_INSNS (10);
1400 return true;
1401
1402 default:
1403 return false;
1404 }
1405 }
1406 \f
1407 /* Type of function DECL.
1408
1409 The result is cached. To reset the cache at the end of a function,
1410 call with DECL = NULL_TREE. */
1411
1412 enum m32r_function_type
1413 m32r_compute_function_type (tree decl)
1414 {
1415 /* Cached value. */
1416 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1417 /* Last function we were called for. */
1418 static tree last_fn = NULL_TREE;
1419
1420 /* Resetting the cached value? */
1421 if (decl == NULL_TREE)
1422 {
1423 fn_type = M32R_FUNCTION_UNKNOWN;
1424 last_fn = NULL_TREE;
1425 return fn_type;
1426 }
1427
1428 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1429 return fn_type;
1430
1431 /* Compute function type. */
1432 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1433 ? M32R_FUNCTION_INTERRUPT
1434 : M32R_FUNCTION_NORMAL);
1435
1436 last_fn = decl;
1437 return fn_type;
1438 }
1439 \f/* Function prologue/epilogue handlers. */
1440
1441 /* M32R stack frames look like:
1442
1443 Before call After call
1444 +-----------------------+ +-----------------------+
1445 | | | |
1446 high | local variables, | | local variables, |
1447 mem | reg save area, etc. | | reg save area, etc. |
1448 | | | |
1449 +-----------------------+ +-----------------------+
1450 | | | |
1451 | arguments on stack. | | arguments on stack. |
1452 | | | |
1453 SP+0->+-----------------------+ +-----------------------+
1454 | reg parm save area, |
1455 | only created for |
1456 | variable argument |
1457 | functions |
1458 +-----------------------+
1459 | previous frame ptr |
1460 +-----------------------+
1461 | |
1462 | register save area |
1463 | |
1464 +-----------------------+
1465 | return address |
1466 +-----------------------+
1467 | |
1468 | local variables |
1469 | |
1470 +-----------------------+
1471 | |
1472 | alloca allocations |
1473 | |
1474 +-----------------------+
1475 | |
1476 low | arguments on stack |
1477 memory | |
1478 SP+0->+-----------------------+
1479
1480 Notes:
1481 1) The "reg parm save area" does not exist for non variable argument fns.
1482 2) The "reg parm save area" can be eliminated completely if we saved regs
1483 containing anonymous args separately but that complicates things too
1484 much (so it's not done).
1485 3) The return address is saved after the register save area so as to have as
1486 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1487
1488 /* Structure to be filled in by m32r_compute_frame_size with register
1489 save masks, and offsets for the current function. */
1490 struct m32r_frame_info
1491 {
1492 unsigned int total_size; /* # bytes that the entire frame takes up. */
1493 unsigned int extra_size; /* # bytes of extra stuff. */
1494 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1495 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1496 unsigned int reg_size; /* # bytes needed to store regs. */
1497 unsigned int var_size; /* # bytes that variables take up. */
1498 unsigned int gmask; /* Mask of saved gp registers. */
1499 unsigned int save_fp; /* Nonzero if fp must be saved. */
1500 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1501 int initialized; /* Nonzero if frame size already calculated. */
1502 };
1503
1504 /* Current frame information calculated by m32r_compute_frame_size. */
1505 static struct m32r_frame_info current_frame_info;
1506
1507 /* Zero structure to initialize current_frame_info. */
1508 static struct m32r_frame_info zero_frame_info;
1509
1510 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1511 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1512
1513 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1514 The return address and frame pointer are treated separately.
1515 Don't consider them here. */
1516 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1517 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1518 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1519
1520 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1521 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1522
1523 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1524 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1525
1526 /* Return the bytes needed to compute the frame pointer from the current
1527 stack pointer.
1528
1529 SIZE is the size needed for local variables. */
1530
1531 unsigned int
1532 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1533 {
1534 unsigned int regno;
1535 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1536 unsigned int reg_size;
1537 unsigned int gmask;
1538 enum m32r_function_type fn_type;
1539 int interrupt_p;
1540 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1541 | crtl->profile);
1542
1543 var_size = M32R_STACK_ALIGN (size);
1544 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1545 pretend_size = crtl->args.pretend_args_size;
1546 extra_size = FIRST_PARM_OFFSET (0);
1547 total_size = extra_size + pretend_size + args_size + var_size;
1548 reg_size = 0;
1549 gmask = 0;
1550
1551 /* See if this is an interrupt handler. Call used registers must be saved
1552 for them too. */
1553 fn_type = m32r_compute_function_type (current_function_decl);
1554 interrupt_p = M32R_INTERRUPT_P (fn_type);
1555
1556 /* Calculate space needed for registers. */
1557 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1558 {
1559 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1560 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1561 {
1562 reg_size += UNITS_PER_WORD;
1563 gmask |= 1 << regno;
1564 }
1565 }
1566
1567 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1568 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1569
1570 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1571 * UNITS_PER_WORD);
1572 total_size += reg_size;
1573
1574 /* ??? Not sure this is necessary, and I don't think the epilogue
1575 handler will do the right thing if this changes total_size. */
1576 total_size = M32R_STACK_ALIGN (total_size);
1577
1578 /* frame_size = total_size - (pretend_size + reg_size); */
1579
1580 /* Save computed information. */
1581 current_frame_info.total_size = total_size;
1582 current_frame_info.extra_size = extra_size;
1583 current_frame_info.pretend_size = pretend_size;
1584 current_frame_info.var_size = var_size;
1585 current_frame_info.args_size = args_size;
1586 current_frame_info.reg_size = reg_size;
1587 current_frame_info.gmask = gmask;
1588 current_frame_info.initialized = reload_completed;
1589
1590 /* Ok, we're done. */
1591 return total_size;
1592 }
1593
1594 /* Worker function for TARGET_CAN_ELIMINATE. */
1595
1596 bool
1597 m32r_can_eliminate (const int from, const int to)
1598 {
1599 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1600 ? ! frame_pointer_needed
1601 : true);
1602 }
1603
1604 \f
1605 /* The table we use to reference PIC data. */
1606 static rtx global_offset_table;
1607
1608 static void
1609 m32r_reload_lr (rtx sp, int size)
1610 {
1611 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1612
1613 if (size == 0)
1614 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1615 else if (size < 32768)
1616 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1617 gen_rtx_PLUS (Pmode, sp,
1618 GEN_INT (size)))));
1619 else
1620 {
1621 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1622
1623 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1624 emit_insn (gen_addsi3 (tmp, tmp, sp));
1625 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1626 }
1627
1628 emit_use (lr);
1629 }
1630
1631 void
1632 m32r_load_pic_register (void)
1633 {
1634 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1635 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1636 GEN_INT (TARGET_MODEL_SMALL)));
1637
1638 /* Need to emit this whether or not we obey regdecls,
1639 since setjmp/longjmp can cause life info to screw up. */
1640 emit_use (pic_offset_table_rtx);
1641 }
1642
1643 /* Expand the m32r prologue as a series of insns. */
1644
1645 void
1646 m32r_expand_prologue (void)
1647 {
1648 int regno;
1649 int frame_size;
1650 unsigned int gmask;
1651 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1652 | crtl->profile);
1653
1654 if (! current_frame_info.initialized)
1655 m32r_compute_frame_size (get_frame_size ());
1656
1657 gmask = current_frame_info.gmask;
1658
1659 /* These cases shouldn't happen. Catch them now. */
1660 gcc_assert (current_frame_info.total_size || !gmask);
1661
1662 /* Allocate space for register arguments if this is a variadic function. */
1663 if (current_frame_info.pretend_size != 0)
1664 {
1665 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1666 the wrong result on a 64-bit host. */
1667 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1668 emit_insn (gen_addsi3 (stack_pointer_rtx,
1669 stack_pointer_rtx,
1670 GEN_INT (-pretend_size)));
1671 }
1672
1673 /* Save any registers we need to and set up fp. */
1674 if (current_frame_info.save_fp)
1675 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1676
1677 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1678
1679 /* Save any needed call-saved regs (and call-used if this is an
1680 interrupt handler). */
1681 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1682 {
1683 if ((gmask & (1 << regno)) != 0)
1684 emit_insn (gen_movsi_push (stack_pointer_rtx,
1685 gen_rtx_REG (Pmode, regno)));
1686 }
1687
1688 if (current_frame_info.save_lr)
1689 emit_insn (gen_movsi_push (stack_pointer_rtx,
1690 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1691
1692 /* Allocate the stack frame. */
1693 frame_size = (current_frame_info.total_size
1694 - (current_frame_info.pretend_size
1695 + current_frame_info.reg_size));
1696
1697 if (frame_size == 0)
1698 ; /* Nothing to do. */
1699 else if (frame_size <= 32768)
1700 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1701 GEN_INT (-frame_size)));
1702 else
1703 {
1704 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1705
1706 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1707 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1708 }
1709
1710 if (frame_pointer_needed)
1711 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1712
1713 if (crtl->profile)
1714 /* Push lr for mcount (form_pc, x). */
1715 emit_insn (gen_movsi_push (stack_pointer_rtx,
1716 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1717
1718 if (pic_reg_used)
1719 {
1720 m32r_load_pic_register ();
1721 m32r_reload_lr (stack_pointer_rtx,
1722 (crtl->profile ? 0 : frame_size));
1723 }
1724
1725 if (crtl->profile && !pic_reg_used)
1726 emit_insn (gen_blockage ());
1727 }
1728
1729 \f
1730 /* Set up the stack and frame pointer (if desired) for the function.
1731 Note, if this is changed, you need to mirror the changes in
1732 m32r_compute_frame_size which calculates the prolog size. */
1733
1734 static void
1735 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1736 {
1737 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1738
1739 /* If this is an interrupt handler, mark it as such. */
1740 if (M32R_INTERRUPT_P (fn_type))
1741 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1742
1743 if (! current_frame_info.initialized)
1744 m32r_compute_frame_size (size);
1745
1746 /* This is only for the human reader. */
1747 fprintf (file,
1748 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1749 ASM_COMMENT_START,
1750 current_frame_info.var_size,
1751 current_frame_info.reg_size / 4,
1752 current_frame_info.args_size,
1753 current_frame_info.extra_size);
1754 }
1755 \f
1756 /* Output RTL to pop register REGNO from the stack. */
1757
1758 static void
1759 pop (int regno)
1760 {
1761 rtx x;
1762
1763 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1764 stack_pointer_rtx));
1765 add_reg_note (x, REG_INC, stack_pointer_rtx);
1766 }
1767
1768 /* Expand the m32r epilogue as a series of insns. */
1769
1770 void
1771 m32r_expand_epilogue (void)
1772 {
1773 int regno;
1774 int noepilogue = FALSE;
1775 int total_size;
1776
1777 gcc_assert (current_frame_info.initialized);
1778 total_size = current_frame_info.total_size;
1779
1780 if (total_size == 0)
1781 {
1782 rtx insn = get_last_insn ();
1783
1784 /* If the last insn was a BARRIER, we don't have to write any code
1785 because a jump (aka return) was put there. */
1786 if (insn && NOTE_P (insn))
1787 insn = prev_nonnote_insn (insn);
1788 if (insn && BARRIER_P (insn))
1789 noepilogue = TRUE;
1790 }
1791
1792 if (!noepilogue)
1793 {
1794 unsigned int var_size = current_frame_info.var_size;
1795 unsigned int args_size = current_frame_info.args_size;
1796 unsigned int gmask = current_frame_info.gmask;
1797 int can_trust_sp_p = !cfun->calls_alloca;
1798
1799 if (flag_exceptions)
1800 emit_insn (gen_blockage ());
1801
1802 /* The first thing to do is point the sp at the bottom of the register
1803 save area. */
1804 if (can_trust_sp_p)
1805 {
1806 unsigned int reg_offset = var_size + args_size;
1807
1808 if (reg_offset == 0)
1809 ; /* Nothing to do. */
1810 else if (reg_offset < 32768)
1811 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1812 GEN_INT (reg_offset)));
1813 else
1814 {
1815 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1816
1817 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1818 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1819 tmp));
1820 }
1821 }
1822 else if (frame_pointer_needed)
1823 {
1824 unsigned int reg_offset = var_size + args_size;
1825
1826 if (reg_offset == 0)
1827 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1828 else if (reg_offset < 32768)
1829 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1830 GEN_INT (reg_offset)));
1831 else
1832 {
1833 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1834
1835 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1836 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1837 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1838 tmp));
1839 }
1840 }
1841 else
1842 gcc_unreachable ();
1843
1844 if (current_frame_info.save_lr)
1845 pop (RETURN_ADDR_REGNUM);
1846
1847 /* Restore any saved registers, in reverse order of course. */
1848 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1849 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1850 {
1851 if ((gmask & (1L << regno)) != 0)
1852 pop (regno);
1853 }
1854
1855 if (current_frame_info.save_fp)
1856 pop (FRAME_POINTER_REGNUM);
1857
1858 /* Remove varargs area if present. */
1859 if (current_frame_info.pretend_size != 0)
1860 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1861 GEN_INT (current_frame_info.pretend_size)));
1862
1863 emit_insn (gen_blockage ());
1864 }
1865 }
1866
1867 /* Do any necessary cleanup after a function to restore stack, frame,
1868 and regs. */
1869
1870 static void
1871 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1872 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1873 {
1874 /* Reset state info for each function. */
1875 current_frame_info = zero_frame_info;
1876 m32r_compute_function_type (NULL_TREE);
1877 }
1878 \f
1879 /* Return nonzero if this function is known to have a null or 1 instruction
1880 epilogue. */
1881
1882 int
1883 direct_return (void)
1884 {
1885 if (!reload_completed)
1886 return FALSE;
1887
1888 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1889 return FALSE;
1890
1891 if (! current_frame_info.initialized)
1892 m32r_compute_frame_size (get_frame_size ());
1893
1894 return current_frame_info.total_size == 0;
1895 }
1896
1897 \f
1898 /* PIC. */
1899
1900 int
1901 m32r_legitimate_pic_operand_p (rtx x)
1902 {
1903 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1904 return 0;
1905
1906 if (GET_CODE (x) == CONST
1907 && GET_CODE (XEXP (x, 0)) == PLUS
1908 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1909 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1910 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1911 return 0;
1912
1913 return 1;
1914 }
1915
1916 rtx
1917 m32r_legitimize_pic_address (rtx orig, rtx reg)
1918 {
1919 #ifdef DEBUG_PIC
1920 printf("m32r_legitimize_pic_address()\n");
1921 #endif
1922
1923 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1924 {
1925 rtx pic_ref, address;
1926 int subregs = 0;
1927
1928 if (reg == 0)
1929 {
1930 gcc_assert (!reload_in_progress && !reload_completed);
1931 reg = gen_reg_rtx (Pmode);
1932
1933 subregs = 1;
1934 }
1935
1936 if (subregs)
1937 address = gen_reg_rtx (Pmode);
1938 else
1939 address = reg;
1940
1941 crtl->uses_pic_offset_table = 1;
1942
1943 if (GET_CODE (orig) == LABEL_REF
1944 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1945 {
1946 emit_insn (gen_gotoff_load_addr (reg, orig));
1947 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1948 return reg;
1949 }
1950
1951 emit_insn (gen_pic_load_addr (address, orig));
1952
1953 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1954 pic_ref = gen_const_mem (Pmode, address);
1955 emit_move_insn (reg, pic_ref);
1956 return reg;
1957 }
1958 else if (GET_CODE (orig) == CONST)
1959 {
1960 rtx base, offset;
1961
1962 if (GET_CODE (XEXP (orig, 0)) == PLUS
1963 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1964 return orig;
1965
1966 if (reg == 0)
1967 {
1968 gcc_assert (!reload_in_progress && !reload_completed);
1969 reg = gen_reg_rtx (Pmode);
1970 }
1971
1972 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1973 {
1974 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1975 if (base == reg)
1976 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1977 else
1978 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1979 }
1980 else
1981 return orig;
1982
1983 if (CONST_INT_P (offset))
1984 {
1985 if (INT16_P (INTVAL (offset)))
1986 return plus_constant (base, INTVAL (offset));
1987 else
1988 {
1989 gcc_assert (! reload_in_progress && ! reload_completed);
1990 offset = force_reg (Pmode, offset);
1991 }
1992 }
1993
1994 return gen_rtx_PLUS (Pmode, base, offset);
1995 }
1996
1997 return orig;
1998 }
1999
2000 static rtx
2001 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2002 enum machine_mode mode ATTRIBUTE_UNUSED)
2003 {
2004 if (flag_pic)
2005 return m32r_legitimize_pic_address (x, NULL_RTX);
2006 else
2007 return x;
2008 }
2009
2010 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2011
2012 static bool
2013 m32r_mode_dependent_address_p (const_rtx addr)
2014 {
2015 if (GET_CODE (addr) == LO_SUM)
2016 return true;
2017
2018 return false;
2019 }
2020 \f
2021 /* Nested function support. */
2022
2023 /* Emit RTL insns to initialize the variable parts of a trampoline.
2024 FNADDR is an RTX for the address of the function's pure code.
2025 CXT is an RTX for the static chain value for the function. */
2026
2027 void
2028 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2029 rtx fnaddr ATTRIBUTE_UNUSED,
2030 rtx cxt ATTRIBUTE_UNUSED)
2031 {
2032 }
2033 \f
2034 static void
2035 m32r_file_start (void)
2036 {
2037 default_file_start ();
2038
2039 if (flag_verbose_asm)
2040 fprintf (asm_out_file,
2041 "%s M32R/D special options: -G %d\n",
2042 ASM_COMMENT_START, g_switch_value);
2043
2044 if (TARGET_LITTLE_ENDIAN)
2045 fprintf (asm_out_file, "\t.little\n");
2046 }
2047 \f
2048 /* Print operand X (an rtx) in assembler syntax to file FILE.
2049 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2050 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2051
2052 static void
2053 m32r_print_operand (FILE * file, rtx x, int code)
2054 {
2055 rtx addr;
2056
2057 switch (code)
2058 {
2059 /* The 's' and 'p' codes are used by output_block_move() to
2060 indicate post-increment 's'tores and 'p're-increment loads. */
2061 case 's':
2062 if (REG_P (x))
2063 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2064 else
2065 output_operand_lossage ("invalid operand to %%s code");
2066 return;
2067
2068 case 'p':
2069 if (REG_P (x))
2070 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2071 else
2072 output_operand_lossage ("invalid operand to %%p code");
2073 return;
2074
2075 case 'R' :
2076 /* Write second word of DImode or DFmode reference,
2077 register or memory. */
2078 if (REG_P (x))
2079 fputs (reg_names[REGNO (x)+1], file);
2080 else if (MEM_P (x))
2081 {
2082 fprintf (file, "@(");
2083 /* Handle possible auto-increment. Since it is pre-increment and
2084 we have already done it, we can just use an offset of four. */
2085 /* ??? This is taken from rs6000.c I think. I don't think it is
2086 currently necessary, but keep it around. */
2087 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2088 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2089 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2090 else
2091 output_address (plus_constant (XEXP (x, 0), 4));
2092 fputc (')', file);
2093 }
2094 else
2095 output_operand_lossage ("invalid operand to %%R code");
2096 return;
2097
2098 case 'H' : /* High word. */
2099 case 'L' : /* Low word. */
2100 if (REG_P (x))
2101 {
2102 /* L = least significant word, H = most significant word. */
2103 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2104 fputs (reg_names[REGNO (x)], file);
2105 else
2106 fputs (reg_names[REGNO (x)+1], file);
2107 }
2108 else if (CONST_INT_P (x)
2109 || GET_CODE (x) == CONST_DOUBLE)
2110 {
2111 rtx first, second;
2112
2113 split_double (x, &first, &second);
2114 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2115 code == 'L' ? INTVAL (first) : INTVAL (second));
2116 }
2117 else
2118 output_operand_lossage ("invalid operand to %%H/%%L code");
2119 return;
2120
2121 case 'A' :
2122 {
2123 char str[30];
2124
2125 if (GET_CODE (x) != CONST_DOUBLE
2126 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2127 fatal_insn ("bad insn for 'A'", x);
2128
2129 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2130 fprintf (file, "%s", str);
2131 return;
2132 }
2133
2134 case 'B' : /* Bottom half. */
2135 case 'T' : /* Top half. */
2136 /* Output the argument to a `seth' insn (sets the Top half-word).
2137 For constants output arguments to a seth/or3 pair to set Top and
2138 Bottom halves. For symbols output arguments to a seth/add3 pair to
2139 set Top and Bottom halves. The difference exists because for
2140 constants seth/or3 is more readable but for symbols we need to use
2141 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2142 switch (GET_CODE (x))
2143 {
2144 case CONST_INT :
2145 case CONST_DOUBLE :
2146 {
2147 rtx first, second;
2148
2149 split_double (x, &first, &second);
2150 x = WORDS_BIG_ENDIAN ? second : first;
2151 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2152 (code == 'B'
2153 ? INTVAL (x) & 0xffff
2154 : (INTVAL (x) >> 16) & 0xffff));
2155 }
2156 return;
2157 case CONST :
2158 case SYMBOL_REF :
2159 if (code == 'B'
2160 && small_data_operand (x, VOIDmode))
2161 {
2162 fputs ("sda(", file);
2163 output_addr_const (file, x);
2164 fputc (')', file);
2165 return;
2166 }
2167 /* fall through */
2168 case LABEL_REF :
2169 fputs (code == 'T' ? "shigh(" : "low(", file);
2170 output_addr_const (file, x);
2171 fputc (')', file);
2172 return;
2173 default :
2174 output_operand_lossage ("invalid operand to %%T/%%B code");
2175 return;
2176 }
2177 break;
2178
2179 case 'U' :
2180 /* ??? wip */
2181 /* Output a load/store with update indicator if appropriate. */
2182 if (MEM_P (x))
2183 {
2184 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2185 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2186 fputs (".a", file);
2187 }
2188 else
2189 output_operand_lossage ("invalid operand to %%U code");
2190 return;
2191
2192 case 'N' :
2193 /* Print a constant value negated. */
2194 if (CONST_INT_P (x))
2195 output_addr_const (file, GEN_INT (- INTVAL (x)));
2196 else
2197 output_operand_lossage ("invalid operand to %%N code");
2198 return;
2199
2200 case 'X' :
2201 /* Print a const_int in hex. Used in comments. */
2202 if (CONST_INT_P (x))
2203 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2204 return;
2205
2206 case '#' :
2207 fputs (IMMEDIATE_PREFIX, file);
2208 return;
2209
2210 case 0 :
2211 /* Do nothing special. */
2212 break;
2213
2214 default :
2215 /* Unknown flag. */
2216 output_operand_lossage ("invalid operand output code");
2217 }
2218
2219 switch (GET_CODE (x))
2220 {
2221 case REG :
2222 fputs (reg_names[REGNO (x)], file);
2223 break;
2224
2225 case MEM :
2226 addr = XEXP (x, 0);
2227 if (GET_CODE (addr) == PRE_INC)
2228 {
2229 if (!REG_P (XEXP (addr, 0)))
2230 fatal_insn ("pre-increment address is not a register", x);
2231
2232 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2233 }
2234 else if (GET_CODE (addr) == PRE_DEC)
2235 {
2236 if (!REG_P (XEXP (addr, 0)))
2237 fatal_insn ("pre-decrement address is not a register", x);
2238
2239 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2240 }
2241 else if (GET_CODE (addr) == POST_INC)
2242 {
2243 if (!REG_P (XEXP (addr, 0)))
2244 fatal_insn ("post-increment address is not a register", x);
2245
2246 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2247 }
2248 else
2249 {
2250 fputs ("@(", file);
2251 output_address (XEXP (x, 0));
2252 fputc (')', file);
2253 }
2254 break;
2255
2256 case CONST_DOUBLE :
2257 /* We handle SFmode constants here as output_addr_const doesn't. */
2258 if (GET_MODE (x) == SFmode)
2259 {
2260 REAL_VALUE_TYPE d;
2261 long l;
2262
2263 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2264 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2265 fprintf (file, "0x%08lx", l);
2266 break;
2267 }
2268
2269 /* Fall through. Let output_addr_const deal with it. */
2270
2271 default :
2272 output_addr_const (file, x);
2273 break;
2274 }
2275 }
2276
2277 /* Print a memory address as an operand to reference that memory location. */
2278
2279 static void
2280 m32r_print_operand_address (FILE * file, rtx addr)
2281 {
2282 rtx base;
2283 rtx index = 0;
2284 int offset = 0;
2285
2286 switch (GET_CODE (addr))
2287 {
2288 case REG :
2289 fputs (reg_names[REGNO (addr)], file);
2290 break;
2291
2292 case PLUS :
2293 if (CONST_INT_P (XEXP (addr, 0)))
2294 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2295 else if (CONST_INT_P (XEXP (addr, 1)))
2296 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2297 else
2298 base = XEXP (addr, 0), index = XEXP (addr, 1);
2299 if (REG_P (base))
2300 {
2301 /* Print the offset first (if present) to conform to the manual. */
2302 if (index == 0)
2303 {
2304 if (offset != 0)
2305 fprintf (file, "%d,", offset);
2306 fputs (reg_names[REGNO (base)], file);
2307 }
2308 /* The chip doesn't support this, but left in for generality. */
2309 else if (REG_P (index))
2310 fprintf (file, "%s,%s",
2311 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2312 /* Not sure this can happen, but leave in for now. */
2313 else if (GET_CODE (index) == SYMBOL_REF)
2314 {
2315 output_addr_const (file, index);
2316 fputc (',', file);
2317 fputs (reg_names[REGNO (base)], file);
2318 }
2319 else
2320 fatal_insn ("bad address", addr);
2321 }
2322 else if (GET_CODE (base) == LO_SUM)
2323 {
2324 gcc_assert (!index && REG_P (XEXP (base, 0)));
2325 if (small_data_operand (XEXP (base, 1), VOIDmode))
2326 fputs ("sda(", file);
2327 else
2328 fputs ("low(", file);
2329 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2330 fputs ("),", file);
2331 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2332 }
2333 else
2334 fatal_insn ("bad address", addr);
2335 break;
2336
2337 case LO_SUM :
2338 if (!REG_P (XEXP (addr, 0)))
2339 fatal_insn ("lo_sum not of register", addr);
2340 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2341 fputs ("sda(", file);
2342 else
2343 fputs ("low(", file);
2344 output_addr_const (file, XEXP (addr, 1));
2345 fputs ("),", file);
2346 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2347 break;
2348
2349 case PRE_INC : /* Assume SImode. */
2350 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2351 break;
2352
2353 case PRE_DEC : /* Assume SImode. */
2354 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2355 break;
2356
2357 case POST_INC : /* Assume SImode. */
2358 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2359 break;
2360
2361 default :
2362 output_addr_const (file, addr);
2363 break;
2364 }
2365 }
2366
2367 static bool
2368 m32r_print_operand_punct_valid_p (unsigned char code)
2369 {
2370 return m32r_punct_chars[code];
2371 }
2372
2373 /* Return true if the operands are the constants 0 and 1. */
2374
2375 int
2376 zero_and_one (rtx operand1, rtx operand2)
2377 {
2378 return
2379 CONST_INT_P (operand1)
2380 && CONST_INT_P (operand2)
2381 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2382 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2383 }
2384
2385 /* Generate the correct assembler code to handle the conditional loading of a
2386 value into a register. It is known that the operands satisfy the
2387 conditional_move_operand() function above. The destination is operand[0].
2388 The condition is operand [1]. The 'true' value is operand [2] and the
2389 'false' value is operand [3]. */
2390
2391 char *
2392 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2393 {
2394 static char buffer [100];
2395 const char * dest = reg_names [REGNO (operands [0])];
2396
2397 buffer [0] = 0;
2398
2399 /* Destination must be a register. */
2400 gcc_assert (REG_P (operands [0]));
2401 gcc_assert (conditional_move_operand (operands [2], SImode));
2402 gcc_assert (conditional_move_operand (operands [3], SImode));
2403
2404 /* Check to see if the test is reversed. */
2405 if (GET_CODE (operands [1]) == NE)
2406 {
2407 rtx tmp = operands [2];
2408 operands [2] = operands [3];
2409 operands [3] = tmp;
2410 }
2411
2412 sprintf (buffer, "mvfc %s, cbr", dest);
2413
2414 /* If the true value was '0' then we need to invert the results of the move. */
2415 if (INTVAL (operands [2]) == 0)
2416 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2417 dest, dest);
2418
2419 return buffer;
2420 }
2421
2422 /* Returns true if the registers contained in the two
2423 rtl expressions are different. */
2424
2425 int
2426 m32r_not_same_reg (rtx a, rtx b)
2427 {
2428 int reg_a = -1;
2429 int reg_b = -2;
2430
2431 while (GET_CODE (a) == SUBREG)
2432 a = SUBREG_REG (a);
2433
2434 if (REG_P (a))
2435 reg_a = REGNO (a);
2436
2437 while (GET_CODE (b) == SUBREG)
2438 b = SUBREG_REG (b);
2439
2440 if (REG_P (b))
2441 reg_b = REGNO (b);
2442
2443 return reg_a != reg_b;
2444 }
2445
2446 \f
2447 rtx
2448 m32r_function_symbol (const char *name)
2449 {
2450 int extra_flags = 0;
2451 enum m32r_model model;
2452 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2453
2454 if (TARGET_MODEL_SMALL)
2455 model = M32R_MODEL_SMALL;
2456 else if (TARGET_MODEL_MEDIUM)
2457 model = M32R_MODEL_MEDIUM;
2458 else if (TARGET_MODEL_LARGE)
2459 model = M32R_MODEL_LARGE;
2460 else
2461 gcc_unreachable (); /* Shouldn't happen. */
2462 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2463
2464 if (extra_flags)
2465 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2466
2467 return sym;
2468 }
2469
2470 /* Use a library function to move some bytes. */
2471
2472 static void
2473 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2474 {
2475 /* We want to pass the size as Pmode, which will normally be SImode
2476 but will be DImode if we are using 64-bit longs and pointers. */
2477 if (GET_MODE (bytes_rtx) != VOIDmode
2478 && GET_MODE (bytes_rtx) != Pmode)
2479 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2480
2481 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2482 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2483 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2484 TYPE_UNSIGNED (sizetype)),
2485 TYPE_MODE (sizetype));
2486 }
2487
2488 /* Expand string/block move operations.
2489
2490 operands[0] is the pointer to the destination.
2491 operands[1] is the pointer to the source.
2492 operands[2] is the number of bytes to move.
2493 operands[3] is the alignment.
2494
2495 Returns 1 upon success, 0 otherwise. */
2496
2497 int
2498 m32r_expand_block_move (rtx operands[])
2499 {
2500 rtx orig_dst = operands[0];
2501 rtx orig_src = operands[1];
2502 rtx bytes_rtx = operands[2];
2503 rtx align_rtx = operands[3];
2504 int constp = CONST_INT_P (bytes_rtx);
2505 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2506 int align = INTVAL (align_rtx);
2507 int leftover;
2508 rtx src_reg;
2509 rtx dst_reg;
2510
2511 if (constp && bytes <= 0)
2512 return 1;
2513
2514 /* Move the address into scratch registers. */
2515 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2516 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2517
2518 if (align > UNITS_PER_WORD)
2519 align = UNITS_PER_WORD;
2520
2521 /* If we prefer size over speed, always use a function call.
2522 If we do not know the size, use a function call.
2523 If the blocks are not word aligned, use a function call. */
2524 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2525 {
2526 block_move_call (dst_reg, src_reg, bytes_rtx);
2527 return 0;
2528 }
2529
2530 leftover = bytes % MAX_MOVE_BYTES;
2531 bytes -= leftover;
2532
2533 /* If necessary, generate a loop to handle the bulk of the copy. */
2534 if (bytes)
2535 {
2536 rtx label = NULL_RTX;
2537 rtx final_src = NULL_RTX;
2538 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2539 rtx rounded_total = GEN_INT (bytes);
2540 rtx new_dst_reg = gen_reg_rtx (SImode);
2541 rtx new_src_reg = gen_reg_rtx (SImode);
2542
2543 /* If we are going to have to perform this loop more than
2544 once, then generate a label and compute the address the
2545 source register will contain upon completion of the final
2546 iteration. */
2547 if (bytes > MAX_MOVE_BYTES)
2548 {
2549 final_src = gen_reg_rtx (Pmode);
2550
2551 if (INT16_P(bytes))
2552 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2553 else
2554 {
2555 emit_insn (gen_movsi (final_src, rounded_total));
2556 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2557 }
2558
2559 label = gen_label_rtx ();
2560 emit_label (label);
2561 }
2562
2563 /* It is known that output_block_move() will update src_reg to point
2564 to the word after the end of the source block, and dst_reg to point
2565 to the last word of the destination block, provided that the block
2566 is MAX_MOVE_BYTES long. */
2567 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2568 new_dst_reg, new_src_reg));
2569 emit_move_insn (dst_reg, new_dst_reg);
2570 emit_move_insn (src_reg, new_src_reg);
2571 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2572
2573 if (bytes > MAX_MOVE_BYTES)
2574 {
2575 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2576 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2577 }
2578 }
2579
2580 if (leftover)
2581 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2582 gen_reg_rtx (SImode),
2583 gen_reg_rtx (SImode)));
2584 return 1;
2585 }
2586
2587 \f
2588 /* Emit load/stores for a small constant word aligned block_move.
2589
2590 operands[0] is the memory address of the destination.
2591 operands[1] is the memory address of the source.
2592 operands[2] is the number of bytes to move.
2593 operands[3] is a temp register.
2594 operands[4] is a temp register. */
2595
2596 void
2597 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2598 {
2599 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2600 int first_time;
2601 int got_extra = 0;
2602
2603 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2604
2605 /* We do not have a post-increment store available, so the first set of
2606 stores are done without any increment, then the remaining ones can use
2607 the pre-increment addressing mode.
2608
2609 Note: expand_block_move() also relies upon this behavior when building
2610 loops to copy large blocks. */
2611 first_time = 1;
2612
2613 while (bytes > 0)
2614 {
2615 if (bytes >= 8)
2616 {
2617 if (first_time)
2618 {
2619 output_asm_insn ("ld\t%5, %p1", operands);
2620 output_asm_insn ("ld\t%6, %p1", operands);
2621 output_asm_insn ("st\t%5, @%0", operands);
2622 output_asm_insn ("st\t%6, %s0", operands);
2623 }
2624 else
2625 {
2626 output_asm_insn ("ld\t%5, %p1", operands);
2627 output_asm_insn ("ld\t%6, %p1", operands);
2628 output_asm_insn ("st\t%5, %s0", operands);
2629 output_asm_insn ("st\t%6, %s0", operands);
2630 }
2631
2632 bytes -= 8;
2633 }
2634 else if (bytes >= 4)
2635 {
2636 if (bytes > 4)
2637 got_extra = 1;
2638
2639 output_asm_insn ("ld\t%5, %p1", operands);
2640
2641 if (got_extra)
2642 output_asm_insn ("ld\t%6, %p1", operands);
2643
2644 if (first_time)
2645 output_asm_insn ("st\t%5, @%0", operands);
2646 else
2647 output_asm_insn ("st\t%5, %s0", operands);
2648
2649 bytes -= 4;
2650 }
2651 else
2652 {
2653 /* Get the entire next word, even though we do not want all of it.
2654 The saves us from doing several smaller loads, and we assume that
2655 we cannot cause a page fault when at least part of the word is in
2656 valid memory [since we don't get called if things aren't properly
2657 aligned]. */
2658 int dst_offset = first_time ? 0 : 4;
2659 /* The amount of increment we have to make to the
2660 destination pointer. */
2661 int dst_inc_amount = dst_offset + bytes - 4;
2662 /* The same for the source pointer. */
2663 int src_inc_amount = bytes;
2664 int last_shift;
2665 rtx my_operands[3];
2666
2667 /* If got_extra is true then we have already loaded
2668 the next word as part of loading and storing the previous word. */
2669 if (! got_extra)
2670 output_asm_insn ("ld\t%6, @%1", operands);
2671
2672 if (bytes >= 2)
2673 {
2674 bytes -= 2;
2675
2676 output_asm_insn ("sra3\t%5, %6, #16", operands);
2677 my_operands[0] = operands[5];
2678 my_operands[1] = GEN_INT (dst_offset);
2679 my_operands[2] = operands[0];
2680 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2681
2682 /* If there is a byte left to store then increment the
2683 destination address and shift the contents of the source
2684 register down by 8 bits. We could not do the address
2685 increment in the store half word instruction, because it does
2686 not have an auto increment mode. */
2687 if (bytes > 0) /* assert (bytes == 1) */
2688 {
2689 dst_offset += 2;
2690 last_shift = 8;
2691 }
2692 }
2693 else
2694 last_shift = 24;
2695
2696 if (bytes > 0)
2697 {
2698 my_operands[0] = operands[6];
2699 my_operands[1] = GEN_INT (last_shift);
2700 output_asm_insn ("srai\t%0, #%1", my_operands);
2701 my_operands[0] = operands[6];
2702 my_operands[1] = GEN_INT (dst_offset);
2703 my_operands[2] = operands[0];
2704 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2705 }
2706
2707 /* Update the destination pointer if needed. We have to do
2708 this so that the patterns matches what we output in this
2709 function. */
2710 if (dst_inc_amount
2711 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2712 {
2713 my_operands[0] = operands[0];
2714 my_operands[1] = GEN_INT (dst_inc_amount);
2715 output_asm_insn ("addi\t%0, #%1", my_operands);
2716 }
2717
2718 /* Update the source pointer if needed. We have to do this
2719 so that the patterns matches what we output in this
2720 function. */
2721 if (src_inc_amount
2722 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2723 {
2724 my_operands[0] = operands[1];
2725 my_operands[1] = GEN_INT (src_inc_amount);
2726 output_asm_insn ("addi\t%0, #%1", my_operands);
2727 }
2728
2729 bytes = 0;
2730 }
2731
2732 first_time = 0;
2733 }
2734 }
2735
2736 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2737
2738 int
2739 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2740 unsigned int new_reg)
2741 {
2742 /* Interrupt routines can't clobber any register that isn't already used. */
2743 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2744 && !df_regs_ever_live_p (new_reg))
2745 return 0;
2746
2747 return 1;
2748 }
2749
2750 rtx
2751 m32r_return_addr (int count)
2752 {
2753 if (count != 0)
2754 return const0_rtx;
2755
2756 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2757 }
2758
2759 static void
2760 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2761 {
2762 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2763 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2764 0x017e8e17 : 0x178e7e01, SImode));
2765 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2766 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2767 0x0c00ae86 : 0x86ae000c, SImode));
2768 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2769 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2770 0xe627871e : 0x1e8727e6, SImode));
2771 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2772 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2773 0xc616c626 : 0x26c61fc6, SImode));
2774 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2775 chain_value);
2776 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2777 XEXP (DECL_RTL (fndecl), 0));
2778
2779 if (m32r_cache_flush_trap >= 0)
2780 emit_insn (gen_flush_icache
2781 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2782 gen_int_mode (m32r_cache_flush_trap, SImode)));
2783 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2784 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2785 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2786 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2787 GEN_INT (3), SImode);
2788 }
2789
2790 /* True if X is a reg that can be used as a base reg. */
2791
2792 static bool
2793 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2794 {
2795 if (! REG_P (x))
2796 return false;
2797
2798 if (strict)
2799 {
2800 if (GPR_P (REGNO (x)))
2801 return true;
2802 }
2803 else
2804 {
2805 if (GPR_P (REGNO (x))
2806 || REGNO (x) == ARG_POINTER_REGNUM
2807 || ! HARD_REGISTER_P (x))
2808 return true;
2809 }
2810
2811 return false;
2812 }
2813
2814 static inline bool
2815 m32r_rtx_ok_for_offset_p (const_rtx x)
2816 {
2817 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2818 }
2819
2820 static inline bool
2821 m32r_legitimate_offset_addres_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2822 const_rtx x, bool strict)
2823 {
2824 if (GET_CODE (x) == PLUS
2825 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2826 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2827 return true;
2828
2829 return false;
2830 }
2831
2832 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2833 since more than one instruction will be required. */
2834
2835 static inline bool
2836 m32r_legitimate_lo_sum_addres_p (enum machine_mode mode, const_rtx x,
2837 bool strict)
2838 {
2839 if (GET_CODE (x) == LO_SUM
2840 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2841 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2842 && CONSTANT_P (XEXP (x, 1)))
2843 return true;
2844
2845 return false;
2846 }
2847
2848 /* Is this a load and increment operation. */
2849
2850 static inline bool
2851 m32r_load_postinc_p (enum machine_mode mode, const_rtx x, bool strict)
2852 {
2853 if ((mode == SImode || mode == SFmode)
2854 && GET_CODE (x) == POST_INC
2855 && REG_P (XEXP (x, 0))
2856 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2857 return true;
2858
2859 return false;
2860 }
2861
2862 /* Is this an increment/decrement and store operation. */
2863
2864 static inline bool
2865 m32r_store_preinc_predec_p (enum machine_mode mode, const_rtx x, bool strict)
2866 {
2867 if ((mode == SImode || mode == SFmode)
2868 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2869 && REG_P (XEXP (x, 0)) \
2870 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2871 return true;
2872
2873 return false;
2874 }
2875
2876 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2877
2878 static bool
2879 m32r_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
2880 {
2881 if (m32r_rtx_ok_for_base_p (x, strict)
2882 || m32r_legitimate_offset_addres_p (mode, x, strict)
2883 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2884 || m32r_load_postinc_p (mode, x, strict)
2885 || m32r_store_preinc_predec_p (mode, x, strict))
2886 return true;
2887
2888 return false;
2889 }
2890
2891 static void
2892 m32r_conditional_register_usage (void)
2893 {
2894 if (flag_pic)
2895 {
2896 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2897 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2898 }
2899 }
2900
2901 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2902
2903 We don't allow (plus symbol large-constant) as the relocations can't
2904 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2905 We allow all CONST_DOUBLE's as the md file patterns will force the
2906 constant to memory if they can't handle them. */
2907
2908 static bool
2909 m32r_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2910 {
2911 return !(GET_CODE (x) == CONST
2912 && GET_CODE (XEXP (x, 0)) == PLUS
2913 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2914 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2915 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2916 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2917 }