]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
bfin.c (bfin_expand_prologue): Set current_function_static_stack_size if flag_stack_u...
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "varasm.h"
29 #include "stringpool.h"
30 #include "calls.h"
31 #include "rtl.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "dbxout.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "function.h"
41 #include "expmed.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "emit-rtl.h"
45 #include "stmt.h"
46 #include "expr.h"
47 #include "recog.h"
48 #include "diagnostic-core.h"
49 #include "dominance.h"
50 #include "cfg.h"
51 #include "cfgrtl.h"
52 #include "cfganal.h"
53 #include "lcm.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "predict.h"
57 #include "basic-block.h"
58 #include "df.h"
59 #include "tm_p.h"
60 #include "target.h"
61 #include "target-def.h"
62 #include "tm-constrs.h"
63 #include "opts.h"
64 #include "builtins.h"
65
66 /* Array of valid operand punctuation characters. */
67 static char m32r_punct_chars[256];
68
69 /* Machine-specific symbol_ref flags. */
70 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
71 #define SYMBOL_REF_MODEL(X) \
72 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
73
74 /* For string literals, etc. */
75 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
76
77 /* Forward declaration. */
78 static void m32r_option_override (void);
79 static void init_reg_tables (void);
80 static void block_move_call (rtx, rtx, rtx);
81 static int m32r_is_insn (rtx);
82 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
83 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
84 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
85 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
86 static void m32r_print_operand (FILE *, rtx, int);
87 static void m32r_print_operand_address (FILE *, rtx);
88 static bool m32r_print_operand_punct_valid_p (unsigned char code);
89 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
90 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
91
92 static void m32r_file_start (void);
93
94 static int m32r_adjust_priority (rtx_insn *, int);
95 static int m32r_issue_rate (void);
96
97 static void m32r_encode_section_info (tree, rtx, int);
98 static bool m32r_in_small_data_p (const_tree);
99 static bool m32r_return_in_memory (const_tree, const_tree);
100 static rtx m32r_function_value (const_tree, const_tree, bool);
101 static rtx m32r_libcall_value (machine_mode, const_rtx);
102 static bool m32r_function_value_regno_p (const unsigned int);
103 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
104 tree, int *, int);
105 static void init_idents (void);
106 static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed);
107 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
108 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
109 const_tree, bool);
110 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
111 tree, bool);
112 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
113 const_tree, bool);
114 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
115 const_tree, bool);
116 static bool m32r_can_eliminate (const int, const int);
117 static void m32r_conditional_register_usage (void);
118 static void m32r_trampoline_init (rtx, tree, rtx);
119 static bool m32r_legitimate_constant_p (machine_mode, rtx);
120 \f
121 /* M32R specific attributes. */
122
123 static const struct attribute_spec m32r_attribute_table[] =
124 {
125 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
126 affects_type_identity } */
127 { "interrupt", 0, 0, true, false, false, NULL, false },
128 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
129 false },
130 { NULL, 0, 0, false, false, false, NULL, false }
131 };
132 \f
133 /* Initialize the GCC target structure. */
134 #undef TARGET_ATTRIBUTE_TABLE
135 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
136
137 #undef TARGET_LEGITIMATE_ADDRESS_P
138 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
139 #undef TARGET_LEGITIMIZE_ADDRESS
140 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
141 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
142 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
143
144 #undef TARGET_ASM_ALIGNED_HI_OP
145 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
146 #undef TARGET_ASM_ALIGNED_SI_OP
147 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
148
149 #undef TARGET_PRINT_OPERAND
150 #define TARGET_PRINT_OPERAND m32r_print_operand
151 #undef TARGET_PRINT_OPERAND_ADDRESS
152 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
153 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
154 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
155
156 #undef TARGET_ASM_FUNCTION_PROLOGUE
157 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
158 #undef TARGET_ASM_FUNCTION_EPILOGUE
159 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
160
161 #undef TARGET_ASM_FILE_START
162 #define TARGET_ASM_FILE_START m32r_file_start
163
164 #undef TARGET_SCHED_ADJUST_PRIORITY
165 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
166 #undef TARGET_SCHED_ISSUE_RATE
167 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
168
169 #undef TARGET_OPTION_OVERRIDE
170 #define TARGET_OPTION_OVERRIDE m32r_option_override
171
172 #undef TARGET_ENCODE_SECTION_INFO
173 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
174 #undef TARGET_IN_SMALL_DATA_P
175 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
176
177
178 #undef TARGET_MEMORY_MOVE_COST
179 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
180 #undef TARGET_RTX_COSTS
181 #define TARGET_RTX_COSTS m32r_rtx_costs
182 #undef TARGET_ADDRESS_COST
183 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
184
185 #undef TARGET_PROMOTE_PROTOTYPES
186 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
187 #undef TARGET_RETURN_IN_MEMORY
188 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
189
190 #undef TARGET_FUNCTION_VALUE
191 #define TARGET_FUNCTION_VALUE m32r_function_value
192 #undef TARGET_LIBCALL_VALUE
193 #define TARGET_LIBCALL_VALUE m32r_libcall_value
194 #undef TARGET_FUNCTION_VALUE_REGNO_P
195 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
196
197 #undef TARGET_SETUP_INCOMING_VARARGS
198 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
199 #undef TARGET_MUST_PASS_IN_STACK
200 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
201 #undef TARGET_PASS_BY_REFERENCE
202 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
203 #undef TARGET_ARG_PARTIAL_BYTES
204 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
205 #undef TARGET_FUNCTION_ARG
206 #define TARGET_FUNCTION_ARG m32r_function_arg
207 #undef TARGET_FUNCTION_ARG_ADVANCE
208 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
209
210 #undef TARGET_CAN_ELIMINATE
211 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
212
213 #undef TARGET_CONDITIONAL_REGISTER_USAGE
214 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
215
216 #undef TARGET_TRAMPOLINE_INIT
217 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
218
219 #undef TARGET_LEGITIMATE_CONSTANT_P
220 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
221
222 struct gcc_target targetm = TARGET_INITIALIZER;
223 \f
224 /* Called by m32r_option_override to initialize various things. */
225
226 void
227 m32r_init (void)
228 {
229 init_reg_tables ();
230
231 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
232 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
233 m32r_punct_chars['#'] = 1;
234 m32r_punct_chars['@'] = 1; /* ??? no longer used */
235
236 /* Provide default value if not specified. */
237 if (!global_options_set.x_g_switch_value)
238 g_switch_value = SDATA_DEFAULT_SIZE;
239 }
240
241 static void
242 m32r_option_override (void)
243 {
244 /* These need to be done at start up.
245 It's convenient to do them here. */
246 m32r_init ();
247 SUBTARGET_OVERRIDE_OPTIONS;
248 }
249
250 /* Vectors to keep interesting information about registers where it can easily
251 be got. We use to use the actual mode value as the bit number, but there
252 is (or may be) more than 32 modes now. Instead we use two tables: one
253 indexed by hard register number, and one indexed by mode. */
254
255 /* The purpose of m32r_mode_class is to shrink the range of modes so that
256 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
257 mapped into one m32r_mode_class mode. */
258
259 enum m32r_mode_class
260 {
261 C_MODE,
262 S_MODE, D_MODE, T_MODE, O_MODE,
263 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
264 };
265
266 /* Modes for condition codes. */
267 #define C_MODES (1 << (int) C_MODE)
268
269 /* Modes for single-word and smaller quantities. */
270 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
271
272 /* Modes for double-word and smaller quantities. */
273 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
274
275 /* Modes for quad-word and smaller quantities. */
276 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
277
278 /* Modes for accumulators. */
279 #define A_MODES (1 << (int) A_MODE)
280
281 /* Value is 1 if register/mode pair is acceptable on arc. */
282
283 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
284 {
285 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
286 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
287 S_MODES, C_MODES, A_MODES, A_MODES
288 };
289
290 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
291
292 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
293
294 static void
295 init_reg_tables (void)
296 {
297 int i;
298
299 for (i = 0; i < NUM_MACHINE_MODES; i++)
300 {
301 machine_mode m = (machine_mode) i;
302
303 switch (GET_MODE_CLASS (m))
304 {
305 case MODE_INT:
306 case MODE_PARTIAL_INT:
307 case MODE_COMPLEX_INT:
308 if (GET_MODE_SIZE (m) <= 4)
309 m32r_mode_class[i] = 1 << (int) S_MODE;
310 else if (GET_MODE_SIZE (m) == 8)
311 m32r_mode_class[i] = 1 << (int) D_MODE;
312 else if (GET_MODE_SIZE (m) == 16)
313 m32r_mode_class[i] = 1 << (int) T_MODE;
314 else if (GET_MODE_SIZE (m) == 32)
315 m32r_mode_class[i] = 1 << (int) O_MODE;
316 else
317 m32r_mode_class[i] = 0;
318 break;
319 case MODE_FLOAT:
320 case MODE_COMPLEX_FLOAT:
321 if (GET_MODE_SIZE (m) <= 4)
322 m32r_mode_class[i] = 1 << (int) SF_MODE;
323 else if (GET_MODE_SIZE (m) == 8)
324 m32r_mode_class[i] = 1 << (int) DF_MODE;
325 else if (GET_MODE_SIZE (m) == 16)
326 m32r_mode_class[i] = 1 << (int) TF_MODE;
327 else if (GET_MODE_SIZE (m) == 32)
328 m32r_mode_class[i] = 1 << (int) OF_MODE;
329 else
330 m32r_mode_class[i] = 0;
331 break;
332 case MODE_CC:
333 m32r_mode_class[i] = 1 << (int) C_MODE;
334 break;
335 default:
336 m32r_mode_class[i] = 0;
337 break;
338 }
339 }
340
341 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
342 {
343 if (GPR_P (i))
344 m32r_regno_reg_class[i] = GENERAL_REGS;
345 else if (i == ARG_POINTER_REGNUM)
346 m32r_regno_reg_class[i] = GENERAL_REGS;
347 else
348 m32r_regno_reg_class[i] = NO_REGS;
349 }
350 }
351 \f
352 /* M32R specific attribute support.
353
354 interrupt - for interrupt functions
355
356 model - select code model used to access object
357
358 small: addresses use 24 bits, use bl to make calls
359 medium: addresses use 32 bits, use bl to make calls
360 large: addresses use 32 bits, use seth/add3/jl to make calls
361
362 Grep for MODEL in m32r.h for more info. */
363
364 static tree small_ident1;
365 static tree small_ident2;
366 static tree medium_ident1;
367 static tree medium_ident2;
368 static tree large_ident1;
369 static tree large_ident2;
370
371 static void
372 init_idents (void)
373 {
374 if (small_ident1 == 0)
375 {
376 small_ident1 = get_identifier ("small");
377 small_ident2 = get_identifier ("__small__");
378 medium_ident1 = get_identifier ("medium");
379 medium_ident2 = get_identifier ("__medium__");
380 large_ident1 = get_identifier ("large");
381 large_ident2 = get_identifier ("__large__");
382 }
383 }
384
385 /* Handle an "model" attribute; arguments as in
386 struct attribute_spec.handler. */
387 static tree
388 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
389 tree args, int flags ATTRIBUTE_UNUSED,
390 bool *no_add_attrs)
391 {
392 tree arg;
393
394 init_idents ();
395 arg = TREE_VALUE (args);
396
397 if (arg != small_ident1
398 && arg != small_ident2
399 && arg != medium_ident1
400 && arg != medium_ident2
401 && arg != large_ident1
402 && arg != large_ident2)
403 {
404 warning (OPT_Wattributes, "invalid argument of %qs attribute",
405 IDENTIFIER_POINTER (name));
406 *no_add_attrs = true;
407 }
408
409 return NULL_TREE;
410 }
411 \f
412 /* Encode section information of DECL, which is either a VAR_DECL,
413 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
414
415 For the M32R we want to record:
416
417 - whether the object lives in .sdata/.sbss.
418 - what code model should be used to access the object
419 */
420
421 static void
422 m32r_encode_section_info (tree decl, rtx rtl, int first)
423 {
424 int extra_flags = 0;
425 tree model_attr;
426 enum m32r_model model;
427
428 default_encode_section_info (decl, rtl, first);
429
430 if (!DECL_P (decl))
431 return;
432
433 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
434 if (model_attr)
435 {
436 tree id;
437
438 init_idents ();
439
440 id = TREE_VALUE (TREE_VALUE (model_attr));
441
442 if (id == small_ident1 || id == small_ident2)
443 model = M32R_MODEL_SMALL;
444 else if (id == medium_ident1 || id == medium_ident2)
445 model = M32R_MODEL_MEDIUM;
446 else if (id == large_ident1 || id == large_ident2)
447 model = M32R_MODEL_LARGE;
448 else
449 gcc_unreachable (); /* shouldn't happen */
450 }
451 else
452 {
453 if (TARGET_MODEL_SMALL)
454 model = M32R_MODEL_SMALL;
455 else if (TARGET_MODEL_MEDIUM)
456 model = M32R_MODEL_MEDIUM;
457 else if (TARGET_MODEL_LARGE)
458 model = M32R_MODEL_LARGE;
459 else
460 gcc_unreachable (); /* shouldn't happen */
461 }
462 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
463
464 if (extra_flags)
465 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
466 }
467
468 /* Only mark the object as being small data area addressable if
469 it hasn't been explicitly marked with a code model.
470
471 The user can explicitly put an object in the small data area with the
472 section attribute. If the object is in sdata/sbss and marked with a
473 code model do both [put the object in .sdata and mark it as being
474 addressed with a specific code model - don't mark it as being addressed
475 with an SDA reloc though]. This is ok and might be useful at times. If
476 the object doesn't fit the linker will give an error. */
477
478 static bool
479 m32r_in_small_data_p (const_tree decl)
480 {
481 const char *section;
482
483 if (TREE_CODE (decl) != VAR_DECL)
484 return false;
485
486 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
487 return false;
488
489 section = DECL_SECTION_NAME (decl);
490 if (section)
491 {
492 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
493 return true;
494 }
495 else
496 {
497 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
498 {
499 int size = int_size_in_bytes (TREE_TYPE (decl));
500
501 if (size > 0 && size <= g_switch_value)
502 return true;
503 }
504 }
505
506 return false;
507 }
508
509 /* Do anything needed before RTL is emitted for each function. */
510
511 void
512 m32r_init_expanders (void)
513 {
514 /* ??? At one point there was code here. The function is left in
515 to make it easy to experiment. */
516 }
517 \f
518 int
519 call_operand (rtx op, machine_mode mode)
520 {
521 if (!MEM_P (op))
522 return 0;
523 op = XEXP (op, 0);
524 return call_address_operand (op, mode);
525 }
526
527 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
528
529 int
530 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
531 {
532 if (! TARGET_SDATA_USE)
533 return 0;
534
535 if (GET_CODE (op) == SYMBOL_REF)
536 return SYMBOL_REF_SMALL_P (op);
537
538 if (GET_CODE (op) == CONST
539 && GET_CODE (XEXP (op, 0)) == PLUS
540 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
541 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
542 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
543
544 return 0;
545 }
546
547 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
548
549 int
550 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
551 {
552 rtx sym;
553
554 if (flag_pic)
555 return 0;
556
557 if (GET_CODE (op) == LABEL_REF)
558 return TARGET_ADDR24;
559
560 if (GET_CODE (op) == SYMBOL_REF)
561 sym = op;
562 else if (GET_CODE (op) == CONST
563 && GET_CODE (XEXP (op, 0)) == PLUS
564 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
565 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
566 sym = XEXP (XEXP (op, 0), 0);
567 else
568 return 0;
569
570 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
571 return 1;
572
573 if (TARGET_ADDR24
574 && (CONSTANT_POOL_ADDRESS_P (sym)
575 || LIT_NAME_P (XSTR (sym, 0))))
576 return 1;
577
578 return 0;
579 }
580
581 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
582
583 int
584 addr32_operand (rtx op, machine_mode mode)
585 {
586 rtx sym;
587
588 if (GET_CODE (op) == LABEL_REF)
589 return TARGET_ADDR32;
590
591 if (GET_CODE (op) == SYMBOL_REF)
592 sym = op;
593 else if (GET_CODE (op) == CONST
594 && GET_CODE (XEXP (op, 0)) == PLUS
595 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
596 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
597 && ! flag_pic)
598 sym = XEXP (XEXP (op, 0), 0);
599 else
600 return 0;
601
602 return (! addr24_operand (sym, mode)
603 && ! small_data_operand (sym, mode));
604 }
605
606 /* Return 1 if OP is a function that can be called with the `bl' insn. */
607
608 int
609 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
610 {
611 if (flag_pic)
612 return 1;
613
614 if (GET_CODE (op) == SYMBOL_REF)
615 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
616
617 return TARGET_CALL26;
618 }
619
620 /* Return 1 if OP is a DImode const we want to handle inline.
621 This must match the code in the movdi pattern.
622 It is used by the 'G' constraint. */
623
624 int
625 easy_di_const (rtx op)
626 {
627 rtx high_rtx, low_rtx;
628 HOST_WIDE_INT high, low;
629
630 split_double (op, &high_rtx, &low_rtx);
631 high = INTVAL (high_rtx);
632 low = INTVAL (low_rtx);
633 /* Pick constants loadable with 2 16-bit `ldi' insns. */
634 if (high >= -128 && high <= 127
635 && low >= -128 && low <= 127)
636 return 1;
637 return 0;
638 }
639
640 /* Return 1 if OP is a DFmode const we want to handle inline.
641 This must match the code in the movdf pattern.
642 It is used by the 'H' constraint. */
643
644 int
645 easy_df_const (rtx op)
646 {
647 REAL_VALUE_TYPE r;
648 long l[2];
649
650 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
651 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
652 if (l[0] == 0 && l[1] == 0)
653 return 1;
654 if ((l[0] & 0xffff) == 0 && l[1] == 0)
655 return 1;
656 return 0;
657 }
658
659 /* Return 1 if OP is (mem (reg ...)).
660 This is used in insn length calcs. */
661
662 int
663 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
664 {
665 return MEM_P (op) && REG_P (XEXP (op, 0));
666 }
667
668 /* Return nonzero if TYPE must be passed by indirect reference. */
669
670 static bool
671 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
672 machine_mode mode, const_tree type,
673 bool named ATTRIBUTE_UNUSED)
674 {
675 int size;
676
677 if (type)
678 size = int_size_in_bytes (type);
679 else
680 size = GET_MODE_SIZE (mode);
681
682 return (size < 0 || size > 8);
683 }
684 \f
685 /* Comparisons. */
686
687 /* X and Y are two things to compare using CODE. Emit the compare insn and
688 return the rtx for compare [arg0 of the if_then_else].
689 If need_compare is true then the comparison insn must be generated, rather
690 than being subsumed into the following branch instruction. */
691
692 rtx
693 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
694 {
695 enum rtx_code compare_code;
696 enum rtx_code branch_code;
697 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
698 int must_swap = 0;
699
700 switch (code)
701 {
702 case EQ: compare_code = EQ; branch_code = NE; break;
703 case NE: compare_code = EQ; branch_code = EQ; break;
704 case LT: compare_code = LT; branch_code = NE; break;
705 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
706 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
707 case GE: compare_code = LT; branch_code = EQ; break;
708 case LTU: compare_code = LTU; branch_code = NE; break;
709 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
710 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
711 case GEU: compare_code = LTU; branch_code = EQ; break;
712
713 default:
714 gcc_unreachable ();
715 }
716
717 if (need_compare)
718 {
719 switch (compare_code)
720 {
721 case EQ:
722 if (satisfies_constraint_P (y) /* Reg equal to small const. */
723 && y != const0_rtx)
724 {
725 rtx tmp = gen_reg_rtx (SImode);
726
727 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
728 x = tmp;
729 y = const0_rtx;
730 }
731 else if (CONSTANT_P (y)) /* Reg equal to const. */
732 {
733 rtx tmp = force_reg (GET_MODE (x), y);
734 y = tmp;
735 }
736
737 if (register_operand (y, SImode) /* Reg equal to reg. */
738 || y == const0_rtx) /* Reg equal to zero. */
739 {
740 emit_insn (gen_cmp_eqsi_insn (x, y));
741
742 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
743 }
744 break;
745
746 case LT:
747 if (register_operand (y, SImode)
748 || satisfies_constraint_P (y))
749 {
750 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
751
752 switch (code)
753 {
754 case LT:
755 emit_insn (gen_cmp_ltsi_insn (x, y));
756 code = EQ;
757 break;
758 case LE:
759 if (y == const0_rtx)
760 tmp = const1_rtx;
761 else
762 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
763 emit_insn (gen_cmp_ltsi_insn (x, tmp));
764 code = EQ;
765 break;
766 case GT:
767 if (CONST_INT_P (y))
768 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
769 else
770 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
771 emit_insn (gen_cmp_ltsi_insn (x, tmp));
772 code = NE;
773 break;
774 case GE:
775 emit_insn (gen_cmp_ltsi_insn (x, y));
776 code = NE;
777 break;
778 default:
779 gcc_unreachable ();
780 }
781
782 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
783 }
784 break;
785
786 case LTU:
787 if (register_operand (y, SImode)
788 || satisfies_constraint_P (y))
789 {
790 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
791
792 switch (code)
793 {
794 case LTU:
795 emit_insn (gen_cmp_ltusi_insn (x, y));
796 code = EQ;
797 break;
798 case LEU:
799 if (y == const0_rtx)
800 tmp = const1_rtx;
801 else
802 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
803 emit_insn (gen_cmp_ltusi_insn (x, tmp));
804 code = EQ;
805 break;
806 case GTU:
807 if (CONST_INT_P (y))
808 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
809 else
810 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
811 emit_insn (gen_cmp_ltusi_insn (x, tmp));
812 code = NE;
813 break;
814 case GEU:
815 emit_insn (gen_cmp_ltusi_insn (x, y));
816 code = NE;
817 break;
818 default:
819 gcc_unreachable ();
820 }
821
822 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
823 }
824 break;
825
826 default:
827 gcc_unreachable ();
828 }
829 }
830 else
831 {
832 /* Reg/reg equal comparison. */
833 if (compare_code == EQ
834 && register_operand (y, SImode))
835 return gen_rtx_fmt_ee (code, CCmode, x, y);
836
837 /* Reg/zero signed comparison. */
838 if ((compare_code == EQ || compare_code == LT)
839 && y == const0_rtx)
840 return gen_rtx_fmt_ee (code, CCmode, x, y);
841
842 /* Reg/smallconst equal comparison. */
843 if (compare_code == EQ
844 && satisfies_constraint_P (y))
845 {
846 rtx tmp = gen_reg_rtx (SImode);
847
848 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
849 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
850 }
851
852 /* Reg/const equal comparison. */
853 if (compare_code == EQ
854 && CONSTANT_P (y))
855 {
856 rtx tmp = force_reg (GET_MODE (x), y);
857
858 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
859 }
860 }
861
862 if (CONSTANT_P (y))
863 {
864 if (must_swap)
865 y = force_reg (GET_MODE (x), y);
866 else
867 {
868 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
869
870 if (! ok_const)
871 y = force_reg (GET_MODE (x), y);
872 }
873 }
874
875 switch (compare_code)
876 {
877 case EQ :
878 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
879 break;
880 case LT :
881 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
882 break;
883 case LTU :
884 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
885 break;
886
887 default:
888 gcc_unreachable ();
889 }
890
891 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
892 }
893
894 bool
895 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
896 {
897 machine_mode mode = GET_MODE (op0);
898
899 gcc_assert (mode == SImode);
900 switch (code)
901 {
902 case EQ:
903 if (!register_operand (op1, mode))
904 op1 = force_reg (mode, op1);
905
906 if (TARGET_M32RX || TARGET_M32R2)
907 {
908 if (!reg_or_zero_operand (op2, mode))
909 op2 = force_reg (mode, op2);
910
911 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
912 return true;
913 }
914 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
915 {
916 emit_insn (gen_seq_zero_insn (op0, op1));
917 return true;
918 }
919
920 if (!reg_or_eq_int16_operand (op2, mode))
921 op2 = force_reg (mode, op2);
922
923 emit_insn (gen_seq_insn (op0, op1, op2));
924 return true;
925
926 case NE:
927 if (!CONST_INT_P (op2)
928 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
929 {
930 rtx reg;
931
932 if (reload_completed || reload_in_progress)
933 return false;
934
935 reg = gen_reg_rtx (SImode);
936 emit_insn (gen_xorsi3 (reg, op1, op2));
937 op1 = reg;
938
939 if (!register_operand (op1, mode))
940 op1 = force_reg (mode, op1);
941
942 emit_insn (gen_sne_zero_insn (op0, op1));
943 return true;
944 }
945 return false;
946
947 case LT:
948 case GT:
949 if (code == GT)
950 {
951 rtx tmp = op2;
952 op2 = op1;
953 op1 = tmp;
954 code = LT;
955 }
956
957 if (!register_operand (op1, mode))
958 op1 = force_reg (mode, op1);
959
960 if (!reg_or_int16_operand (op2, mode))
961 op2 = force_reg (mode, op2);
962
963 emit_insn (gen_slt_insn (op0, op1, op2));
964 return true;
965
966 case LTU:
967 case GTU:
968 if (code == GTU)
969 {
970 rtx tmp = op2;
971 op2 = op1;
972 op1 = tmp;
973 code = LTU;
974 }
975
976 if (!register_operand (op1, mode))
977 op1 = force_reg (mode, op1);
978
979 if (!reg_or_int16_operand (op2, mode))
980 op2 = force_reg (mode, op2);
981
982 emit_insn (gen_sltu_insn (op0, op1, op2));
983 return true;
984
985 case GE:
986 case GEU:
987 if (!register_operand (op1, mode))
988 op1 = force_reg (mode, op1);
989
990 if (!reg_or_int16_operand (op2, mode))
991 op2 = force_reg (mode, op2);
992
993 if (code == GE)
994 emit_insn (gen_sge_insn (op0, op1, op2));
995 else
996 emit_insn (gen_sgeu_insn (op0, op1, op2));
997 return true;
998
999 case LE:
1000 case LEU:
1001 if (!register_operand (op1, mode))
1002 op1 = force_reg (mode, op1);
1003
1004 if (CONST_INT_P (op2))
1005 {
1006 HOST_WIDE_INT value = INTVAL (op2);
1007 if (value >= 2147483647)
1008 {
1009 emit_move_insn (op0, const1_rtx);
1010 return true;
1011 }
1012
1013 op2 = GEN_INT (value + 1);
1014 if (value < -32768 || value >= 32767)
1015 op2 = force_reg (mode, op2);
1016
1017 if (code == LEU)
1018 emit_insn (gen_sltu_insn (op0, op1, op2));
1019 else
1020 emit_insn (gen_slt_insn (op0, op1, op2));
1021 return true;
1022 }
1023
1024 if (!register_operand (op2, mode))
1025 op2 = force_reg (mode, op2);
1026
1027 if (code == LEU)
1028 emit_insn (gen_sleu_insn (op0, op1, op2));
1029 else
1030 emit_insn (gen_sle_insn (op0, op1, op2));
1031 return true;
1032
1033 default:
1034 gcc_unreachable ();
1035 }
1036 }
1037
1038 \f
1039 /* Split a 2 word move (DI or DF) into component parts. */
1040
1041 rtx
1042 gen_split_move_double (rtx operands[])
1043 {
1044 machine_mode mode = GET_MODE (operands[0]);
1045 rtx dest = operands[0];
1046 rtx src = operands[1];
1047 rtx val;
1048
1049 /* We might have (SUBREG (MEM)) here, so just get rid of the
1050 subregs to make this code simpler. It is safe to call
1051 alter_subreg any time after reload. */
1052 if (GET_CODE (dest) == SUBREG)
1053 alter_subreg (&dest, true);
1054 if (GET_CODE (src) == SUBREG)
1055 alter_subreg (&src, true);
1056
1057 start_sequence ();
1058 if (REG_P (dest))
1059 {
1060 int dregno = REGNO (dest);
1061
1062 /* Reg = reg. */
1063 if (REG_P (src))
1064 {
1065 int sregno = REGNO (src);
1066
1067 int reverse = (dregno == sregno + 1);
1068
1069 /* We normally copy the low-numbered register first. However, if
1070 the first register operand 0 is the same as the second register of
1071 operand 1, we must copy in the opposite order. */
1072 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1073 operand_subword (src, reverse, TRUE, mode)));
1074
1075 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1076 operand_subword (src, !reverse, TRUE, mode)));
1077 }
1078
1079 /* Reg = constant. */
1080 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1081 {
1082 rtx words[2];
1083 split_double (src, &words[0], &words[1]);
1084 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1085 words[0]));
1086
1087 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1088 words[1]));
1089 }
1090
1091 /* Reg = mem. */
1092 else if (MEM_P (src))
1093 {
1094 /* If the high-address word is used in the address, we must load it
1095 last. Otherwise, load it first. */
1096 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1097
1098 /* We used to optimize loads from single registers as
1099
1100 ld r1,r3+; ld r2,r3
1101
1102 if r3 were not used subsequently. However, the REG_NOTES aren't
1103 propagated correctly by the reload phase, and it can cause bad
1104 code to be generated. We could still try:
1105
1106 ld r1,r3+; ld r2,r3; addi r3,-4
1107
1108 which saves 2 bytes and doesn't force longword alignment. */
1109 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1110 adjust_address (src, SImode,
1111 reverse * UNITS_PER_WORD)));
1112
1113 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1114 adjust_address (src, SImode,
1115 !reverse * UNITS_PER_WORD)));
1116 }
1117 else
1118 gcc_unreachable ();
1119 }
1120
1121 /* Mem = reg. */
1122 /* We used to optimize loads from single registers as
1123
1124 st r1,r3; st r2,+r3
1125
1126 if r3 were not used subsequently. However, the REG_NOTES aren't
1127 propagated correctly by the reload phase, and it can cause bad
1128 code to be generated. We could still try:
1129
1130 st r1,r3; st r2,+r3; addi r3,-4
1131
1132 which saves 2 bytes and doesn't force longword alignment. */
1133 else if (MEM_P (dest) && REG_P (src))
1134 {
1135 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1136 operand_subword (src, 0, TRUE, mode)));
1137
1138 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1139 operand_subword (src, 1, TRUE, mode)));
1140 }
1141
1142 else
1143 gcc_unreachable ();
1144
1145 val = get_insns ();
1146 end_sequence ();
1147 return val;
1148 }
1149
1150 \f
1151 static int
1152 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1153 tree type, bool named ATTRIBUTE_UNUSED)
1154 {
1155 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1156
1157 int words;
1158 unsigned int size =
1159 (((mode == BLKmode && type)
1160 ? (unsigned int) int_size_in_bytes (type)
1161 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1162 / UNITS_PER_WORD;
1163
1164 if (*cum >= M32R_MAX_PARM_REGS)
1165 words = 0;
1166 else if (*cum + size > M32R_MAX_PARM_REGS)
1167 words = (*cum + size) - M32R_MAX_PARM_REGS;
1168 else
1169 words = 0;
1170
1171 return words * UNITS_PER_WORD;
1172 }
1173
1174 /* The ROUND_ADVANCE* macros are local to this file. */
1175 /* Round SIZE up to a word boundary. */
1176 #define ROUND_ADVANCE(SIZE) \
1177 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1178
1179 /* Round arg MODE/TYPE up to the next word boundary. */
1180 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1181 ((MODE) == BLKmode \
1182 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1183 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1184
1185 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1186 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1187
1188 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1189 a reg. This includes arguments that have to be passed by reference as the
1190 pointer to them is passed in a reg if one is available (and that is what
1191 we're given).
1192 This macro is only used in this file. */
1193 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1194 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1195
1196 /* Determine where to put an argument to a function.
1197 Value is zero to push the argument on the stack,
1198 or a hard register in which to store the argument.
1199
1200 MODE is the argument's machine mode.
1201 TYPE is the data type of the argument (as a tree).
1202 This is null for libcalls where that information may
1203 not be available.
1204 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1205 the preceding args and about the function being called.
1206 NAMED is nonzero if this argument is a named parameter
1207 (otherwise it is an extra parameter matching an ellipsis). */
1208 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1209 and the rest are pushed. */
1210
1211 static rtx
1212 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1213 const_tree type ATTRIBUTE_UNUSED,
1214 bool named ATTRIBUTE_UNUSED)
1215 {
1216 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1217
1218 return (PASS_IN_REG_P (*cum, mode, type)
1219 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1220 : NULL_RTX);
1221 }
1222
1223 /* Update the data in CUM to advance over an argument
1224 of mode MODE and data type TYPE.
1225 (TYPE is null for libcalls where that information may not be available.) */
1226
1227 static void
1228 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1229 const_tree type, bool named ATTRIBUTE_UNUSED)
1230 {
1231 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1232
1233 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1234 + ROUND_ADVANCE_ARG (mode, type));
1235 }
1236
1237 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1238
1239 static bool
1240 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1241 {
1242 cumulative_args_t dummy = pack_cumulative_args (NULL);
1243
1244 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1245 }
1246
1247 /* Worker function for TARGET_FUNCTION_VALUE. */
1248
1249 static rtx
1250 m32r_function_value (const_tree valtype,
1251 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1252 bool outgoing ATTRIBUTE_UNUSED)
1253 {
1254 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1255 }
1256
1257 /* Worker function for TARGET_LIBCALL_VALUE. */
1258
1259 static rtx
1260 m32r_libcall_value (machine_mode mode,
1261 const_rtx fun ATTRIBUTE_UNUSED)
1262 {
1263 return gen_rtx_REG (mode, 0);
1264 }
1265
1266 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1267
1268 ??? What about r1 in DI/DF values. */
1269
1270 static bool
1271 m32r_function_value_regno_p (const unsigned int regno)
1272 {
1273 return (regno == 0);
1274 }
1275
1276 /* Do any needed setup for a variadic function. For the M32R, we must
1277 create a register parameter block, and then copy any anonymous arguments
1278 in registers to memory.
1279
1280 CUM has not been updated for the last named argument which has type TYPE
1281 and mode MODE, and we rely on this fact. */
1282
1283 static void
1284 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1285 tree type, int *pretend_size, int no_rtl)
1286 {
1287 int first_anon_arg;
1288
1289 if (no_rtl)
1290 return;
1291
1292 /* All BLKmode values are passed by reference. */
1293 gcc_assert (mode != BLKmode);
1294
1295 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1296 + ROUND_ADVANCE_ARG (mode, type));
1297
1298 if (first_anon_arg < M32R_MAX_PARM_REGS)
1299 {
1300 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1301 int first_reg_offset = first_anon_arg;
1302 /* Size in words to "pretend" allocate. */
1303 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1304 rtx regblock;
1305
1306 regblock = gen_frame_mem (BLKmode,
1307 plus_constant (Pmode, arg_pointer_rtx,
1308 FIRST_PARM_OFFSET (0)));
1309 set_mem_alias_set (regblock, get_varargs_alias_set ());
1310 move_block_from_reg (first_reg_offset, regblock, size);
1311
1312 *pretend_size = (size * UNITS_PER_WORD);
1313 }
1314 }
1315
1316 \f
1317 /* Return true if INSN is real instruction bearing insn. */
1318
1319 static int
1320 m32r_is_insn (rtx insn)
1321 {
1322 return (NONDEBUG_INSN_P (insn)
1323 && GET_CODE (PATTERN (insn)) != USE
1324 && GET_CODE (PATTERN (insn)) != CLOBBER);
1325 }
1326
1327 /* Increase the priority of long instructions so that the
1328 short instructions are scheduled ahead of the long ones. */
1329
1330 static int
1331 m32r_adjust_priority (rtx_insn *insn, int priority)
1332 {
1333 if (m32r_is_insn (insn)
1334 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1335 priority <<= 3;
1336
1337 return priority;
1338 }
1339
1340 \f
1341 /* Indicate how many instructions can be issued at the same time.
1342 This is sort of a lie. The m32r can issue only 1 long insn at
1343 once, but it can issue 2 short insns. The default therefore is
1344 set at 2, but this can be overridden by the command line option
1345 -missue-rate=1. */
1346
1347 static int
1348 m32r_issue_rate (void)
1349 {
1350 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1351 }
1352 \f
1353 /* Cost functions. */
1354 /* Memory is 3 times as expensive as registers.
1355 ??? Is that the right way to look at it? */
1356
1357 static int
1358 m32r_memory_move_cost (machine_mode mode,
1359 reg_class_t rclass ATTRIBUTE_UNUSED,
1360 bool in ATTRIBUTE_UNUSED)
1361 {
1362 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1363 return 6;
1364 else
1365 return 12;
1366 }
1367
1368 static bool
1369 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
1370 int opno ATTRIBUTE_UNUSED, int *total,
1371 bool speed ATTRIBUTE_UNUSED)
1372 {
1373 switch (code)
1374 {
1375 /* Small integers are as cheap as registers. 4 byte values can be
1376 fetched as immediate constants - let's give that the cost of an
1377 extra insn. */
1378 case CONST_INT:
1379 if (INT16_P (INTVAL (x)))
1380 {
1381 *total = 0;
1382 return true;
1383 }
1384 /* FALLTHRU */
1385
1386 case CONST:
1387 case LABEL_REF:
1388 case SYMBOL_REF:
1389 *total = COSTS_N_INSNS (1);
1390 return true;
1391
1392 case CONST_DOUBLE:
1393 {
1394 rtx high, low;
1395
1396 split_double (x, &high, &low);
1397 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1398 + !INT16_P (INTVAL (low)));
1399 return true;
1400 }
1401
1402 case MULT:
1403 *total = COSTS_N_INSNS (3);
1404 return true;
1405
1406 case DIV:
1407 case UDIV:
1408 case MOD:
1409 case UMOD:
1410 *total = COSTS_N_INSNS (10);
1411 return true;
1412
1413 default:
1414 return false;
1415 }
1416 }
1417 \f
1418 /* Type of function DECL.
1419
1420 The result is cached. To reset the cache at the end of a function,
1421 call with DECL = NULL_TREE. */
1422
1423 enum m32r_function_type
1424 m32r_compute_function_type (tree decl)
1425 {
1426 /* Cached value. */
1427 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1428 /* Last function we were called for. */
1429 static tree last_fn = NULL_TREE;
1430
1431 /* Resetting the cached value? */
1432 if (decl == NULL_TREE)
1433 {
1434 fn_type = M32R_FUNCTION_UNKNOWN;
1435 last_fn = NULL_TREE;
1436 return fn_type;
1437 }
1438
1439 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1440 return fn_type;
1441
1442 /* Compute function type. */
1443 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1444 ? M32R_FUNCTION_INTERRUPT
1445 : M32R_FUNCTION_NORMAL);
1446
1447 last_fn = decl;
1448 return fn_type;
1449 }
1450 \f/* Function prologue/epilogue handlers. */
1451
1452 /* M32R stack frames look like:
1453
1454 Before call After call
1455 +-----------------------+ +-----------------------+
1456 | | | |
1457 high | local variables, | | local variables, |
1458 mem | reg save area, etc. | | reg save area, etc. |
1459 | | | |
1460 +-----------------------+ +-----------------------+
1461 | | | |
1462 | arguments on stack. | | arguments on stack. |
1463 | | | |
1464 SP+0->+-----------------------+ +-----------------------+
1465 | reg parm save area, |
1466 | only created for |
1467 | variable argument |
1468 | functions |
1469 +-----------------------+
1470 | previous frame ptr |
1471 +-----------------------+
1472 | |
1473 | register save area |
1474 | |
1475 +-----------------------+
1476 | return address |
1477 +-----------------------+
1478 | |
1479 | local variables |
1480 | |
1481 +-----------------------+
1482 | |
1483 | alloca allocations |
1484 | |
1485 +-----------------------+
1486 | |
1487 low | arguments on stack |
1488 memory | |
1489 SP+0->+-----------------------+
1490
1491 Notes:
1492 1) The "reg parm save area" does not exist for non variable argument fns.
1493 2) The "reg parm save area" can be eliminated completely if we saved regs
1494 containing anonymous args separately but that complicates things too
1495 much (so it's not done).
1496 3) The return address is saved after the register save area so as to have as
1497 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1498
1499 /* Structure to be filled in by m32r_compute_frame_size with register
1500 save masks, and offsets for the current function. */
1501 struct m32r_frame_info
1502 {
1503 unsigned int total_size; /* # bytes that the entire frame takes up. */
1504 unsigned int extra_size; /* # bytes of extra stuff. */
1505 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1506 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1507 unsigned int reg_size; /* # bytes needed to store regs. */
1508 unsigned int var_size; /* # bytes that variables take up. */
1509 unsigned int gmask; /* Mask of saved gp registers. */
1510 unsigned int save_fp; /* Nonzero if fp must be saved. */
1511 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1512 int initialized; /* Nonzero if frame size already calculated. */
1513 };
1514
1515 /* Current frame information calculated by m32r_compute_frame_size. */
1516 static struct m32r_frame_info current_frame_info;
1517
1518 /* Zero structure to initialize current_frame_info. */
1519 static struct m32r_frame_info zero_frame_info;
1520
1521 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1522 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1523
1524 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1525 The return address and frame pointer are treated separately.
1526 Don't consider them here. */
1527 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1528 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1529 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1530
1531 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1532 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1533
1534 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1535 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1536
1537 /* Return the bytes needed to compute the frame pointer from the current
1538 stack pointer.
1539
1540 SIZE is the size needed for local variables. */
1541
1542 unsigned int
1543 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1544 {
1545 unsigned int regno;
1546 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1547 unsigned int reg_size;
1548 unsigned int gmask;
1549 enum m32r_function_type fn_type;
1550 int interrupt_p;
1551 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1552 | crtl->profile);
1553
1554 var_size = M32R_STACK_ALIGN (size);
1555 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1556 pretend_size = crtl->args.pretend_args_size;
1557 extra_size = FIRST_PARM_OFFSET (0);
1558 total_size = extra_size + pretend_size + args_size + var_size;
1559 reg_size = 0;
1560 gmask = 0;
1561
1562 /* See if this is an interrupt handler. Call used registers must be saved
1563 for them too. */
1564 fn_type = m32r_compute_function_type (current_function_decl);
1565 interrupt_p = M32R_INTERRUPT_P (fn_type);
1566
1567 /* Calculate space needed for registers. */
1568 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1569 {
1570 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1571 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1572 {
1573 reg_size += UNITS_PER_WORD;
1574 gmask |= 1 << regno;
1575 }
1576 }
1577
1578 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1579 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1580
1581 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1582 * UNITS_PER_WORD);
1583 total_size += reg_size;
1584
1585 /* ??? Not sure this is necessary, and I don't think the epilogue
1586 handler will do the right thing if this changes total_size. */
1587 total_size = M32R_STACK_ALIGN (total_size);
1588
1589 /* frame_size = total_size - (pretend_size + reg_size); */
1590
1591 /* Save computed information. */
1592 current_frame_info.total_size = total_size;
1593 current_frame_info.extra_size = extra_size;
1594 current_frame_info.pretend_size = pretend_size;
1595 current_frame_info.var_size = var_size;
1596 current_frame_info.args_size = args_size;
1597 current_frame_info.reg_size = reg_size;
1598 current_frame_info.gmask = gmask;
1599 current_frame_info.initialized = reload_completed;
1600
1601 /* Ok, we're done. */
1602 return total_size;
1603 }
1604
1605 /* Worker function for TARGET_CAN_ELIMINATE. */
1606
1607 bool
1608 m32r_can_eliminate (const int from, const int to)
1609 {
1610 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1611 ? ! frame_pointer_needed
1612 : true);
1613 }
1614
1615 \f
1616 /* The table we use to reference PIC data. */
1617 static rtx global_offset_table;
1618
1619 static void
1620 m32r_reload_lr (rtx sp, int size)
1621 {
1622 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1623
1624 if (size == 0)
1625 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1626 else if (size < 32768)
1627 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1628 gen_rtx_PLUS (Pmode, sp,
1629 GEN_INT (size)))));
1630 else
1631 {
1632 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1633
1634 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1635 emit_insn (gen_addsi3 (tmp, tmp, sp));
1636 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1637 }
1638
1639 emit_use (lr);
1640 }
1641
1642 void
1643 m32r_load_pic_register (void)
1644 {
1645 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1646 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1647 GEN_INT (TARGET_MODEL_SMALL)));
1648
1649 /* Need to emit this whether or not we obey regdecls,
1650 since setjmp/longjmp can cause life info to screw up. */
1651 emit_use (pic_offset_table_rtx);
1652 }
1653
1654 /* Expand the m32r prologue as a series of insns. */
1655
1656 void
1657 m32r_expand_prologue (void)
1658 {
1659 int regno;
1660 int frame_size;
1661 unsigned int gmask;
1662 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1663 | crtl->profile);
1664
1665 if (! current_frame_info.initialized)
1666 m32r_compute_frame_size (get_frame_size ());
1667
1668 if (flag_stack_usage_info)
1669 current_function_static_stack_size = current_frame_info.total_size;
1670
1671 gmask = current_frame_info.gmask;
1672
1673 /* These cases shouldn't happen. Catch them now. */
1674 gcc_assert (current_frame_info.total_size || !gmask);
1675
1676 /* Allocate space for register arguments if this is a variadic function. */
1677 if (current_frame_info.pretend_size != 0)
1678 {
1679 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1680 the wrong result on a 64-bit host. */
1681 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1682 emit_insn (gen_addsi3 (stack_pointer_rtx,
1683 stack_pointer_rtx,
1684 GEN_INT (-pretend_size)));
1685 }
1686
1687 /* Save any registers we need to and set up fp. */
1688 if (current_frame_info.save_fp)
1689 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1690
1691 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1692
1693 /* Save any needed call-saved regs (and call-used if this is an
1694 interrupt handler). */
1695 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1696 {
1697 if ((gmask & (1 << regno)) != 0)
1698 emit_insn (gen_movsi_push (stack_pointer_rtx,
1699 gen_rtx_REG (Pmode, regno)));
1700 }
1701
1702 if (current_frame_info.save_lr)
1703 emit_insn (gen_movsi_push (stack_pointer_rtx,
1704 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1705
1706 /* Allocate the stack frame. */
1707 frame_size = (current_frame_info.total_size
1708 - (current_frame_info.pretend_size
1709 + current_frame_info.reg_size));
1710
1711 if (frame_size == 0)
1712 ; /* Nothing to do. */
1713 else if (frame_size <= 32768)
1714 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1715 GEN_INT (-frame_size)));
1716 else
1717 {
1718 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1719
1720 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1721 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1722 }
1723
1724 if (frame_pointer_needed)
1725 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1726
1727 if (crtl->profile)
1728 /* Push lr for mcount (form_pc, x). */
1729 emit_insn (gen_movsi_push (stack_pointer_rtx,
1730 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1731
1732 if (pic_reg_used)
1733 {
1734 m32r_load_pic_register ();
1735 m32r_reload_lr (stack_pointer_rtx,
1736 (crtl->profile ? 0 : frame_size));
1737 }
1738
1739 if (crtl->profile && !pic_reg_used)
1740 emit_insn (gen_blockage ());
1741 }
1742
1743 \f
1744 /* Set up the stack and frame pointer (if desired) for the function.
1745 Note, if this is changed, you need to mirror the changes in
1746 m32r_compute_frame_size which calculates the prolog size. */
1747
1748 static void
1749 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1750 {
1751 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1752
1753 /* If this is an interrupt handler, mark it as such. */
1754 if (M32R_INTERRUPT_P (fn_type))
1755 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1756
1757 if (! current_frame_info.initialized)
1758 m32r_compute_frame_size (size);
1759
1760 /* This is only for the human reader. */
1761 fprintf (file,
1762 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1763 ASM_COMMENT_START,
1764 current_frame_info.var_size,
1765 current_frame_info.reg_size / 4,
1766 current_frame_info.args_size,
1767 current_frame_info.extra_size);
1768 }
1769 \f
1770 /* Output RTL to pop register REGNO from the stack. */
1771
1772 static void
1773 pop (int regno)
1774 {
1775 rtx x;
1776
1777 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1778 stack_pointer_rtx));
1779 add_reg_note (x, REG_INC, stack_pointer_rtx);
1780 }
1781
1782 /* Expand the m32r epilogue as a series of insns. */
1783
1784 void
1785 m32r_expand_epilogue (void)
1786 {
1787 int regno;
1788 int noepilogue = FALSE;
1789 int total_size;
1790
1791 gcc_assert (current_frame_info.initialized);
1792 total_size = current_frame_info.total_size;
1793
1794 if (total_size == 0)
1795 {
1796 rtx insn = get_last_insn ();
1797
1798 /* If the last insn was a BARRIER, we don't have to write any code
1799 because a jump (aka return) was put there. */
1800 if (insn && NOTE_P (insn))
1801 insn = prev_nonnote_insn (insn);
1802 if (insn && BARRIER_P (insn))
1803 noepilogue = TRUE;
1804 }
1805
1806 if (!noepilogue)
1807 {
1808 unsigned int var_size = current_frame_info.var_size;
1809 unsigned int args_size = current_frame_info.args_size;
1810 unsigned int gmask = current_frame_info.gmask;
1811 int can_trust_sp_p = !cfun->calls_alloca;
1812
1813 if (flag_exceptions)
1814 emit_insn (gen_blockage ());
1815
1816 /* The first thing to do is point the sp at the bottom of the register
1817 save area. */
1818 if (can_trust_sp_p)
1819 {
1820 unsigned int reg_offset = var_size + args_size;
1821
1822 if (reg_offset == 0)
1823 ; /* Nothing to do. */
1824 else if (reg_offset < 32768)
1825 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1826 GEN_INT (reg_offset)));
1827 else
1828 {
1829 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1830
1831 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1832 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1833 tmp));
1834 }
1835 }
1836 else if (frame_pointer_needed)
1837 {
1838 unsigned int reg_offset = var_size + args_size;
1839
1840 if (reg_offset == 0)
1841 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1842 else if (reg_offset < 32768)
1843 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1844 GEN_INT (reg_offset)));
1845 else
1846 {
1847 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1848
1849 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1850 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1851 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1852 tmp));
1853 }
1854 }
1855 else
1856 gcc_unreachable ();
1857
1858 if (current_frame_info.save_lr)
1859 pop (RETURN_ADDR_REGNUM);
1860
1861 /* Restore any saved registers, in reverse order of course. */
1862 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1863 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1864 {
1865 if ((gmask & (1L << regno)) != 0)
1866 pop (regno);
1867 }
1868
1869 if (current_frame_info.save_fp)
1870 pop (FRAME_POINTER_REGNUM);
1871
1872 /* Remove varargs area if present. */
1873 if (current_frame_info.pretend_size != 0)
1874 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1875 GEN_INT (current_frame_info.pretend_size)));
1876
1877 emit_insn (gen_blockage ());
1878 }
1879 }
1880
1881 /* Do any necessary cleanup after a function to restore stack, frame,
1882 and regs. */
1883
1884 static void
1885 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1886 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1887 {
1888 /* Reset state info for each function. */
1889 current_frame_info = zero_frame_info;
1890 m32r_compute_function_type (NULL_TREE);
1891 }
1892 \f
1893 /* Return nonzero if this function is known to have a null or 1 instruction
1894 epilogue. */
1895
1896 int
1897 direct_return (void)
1898 {
1899 if (!reload_completed)
1900 return FALSE;
1901
1902 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1903 return FALSE;
1904
1905 if (! current_frame_info.initialized)
1906 m32r_compute_frame_size (get_frame_size ());
1907
1908 return current_frame_info.total_size == 0;
1909 }
1910
1911 \f
1912 /* PIC. */
1913
1914 int
1915 m32r_legitimate_pic_operand_p (rtx x)
1916 {
1917 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1918 return 0;
1919
1920 if (GET_CODE (x) == CONST
1921 && GET_CODE (XEXP (x, 0)) == PLUS
1922 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1923 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1924 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1925 return 0;
1926
1927 return 1;
1928 }
1929
1930 rtx
1931 m32r_legitimize_pic_address (rtx orig, rtx reg)
1932 {
1933 #ifdef DEBUG_PIC
1934 printf("m32r_legitimize_pic_address()\n");
1935 #endif
1936
1937 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1938 {
1939 rtx pic_ref, address;
1940 int subregs = 0;
1941
1942 if (reg == 0)
1943 {
1944 gcc_assert (!reload_in_progress && !reload_completed);
1945 reg = gen_reg_rtx (Pmode);
1946
1947 subregs = 1;
1948 }
1949
1950 if (subregs)
1951 address = gen_reg_rtx (Pmode);
1952 else
1953 address = reg;
1954
1955 crtl->uses_pic_offset_table = 1;
1956
1957 if (GET_CODE (orig) == LABEL_REF
1958 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1959 {
1960 emit_insn (gen_gotoff_load_addr (reg, orig));
1961 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1962 return reg;
1963 }
1964
1965 emit_insn (gen_pic_load_addr (address, orig));
1966
1967 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1968 pic_ref = gen_const_mem (Pmode, address);
1969 emit_move_insn (reg, pic_ref);
1970 return reg;
1971 }
1972 else if (GET_CODE (orig) == CONST)
1973 {
1974 rtx base, offset;
1975
1976 if (GET_CODE (XEXP (orig, 0)) == PLUS
1977 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1978 return orig;
1979
1980 if (reg == 0)
1981 {
1982 gcc_assert (!reload_in_progress && !reload_completed);
1983 reg = gen_reg_rtx (Pmode);
1984 }
1985
1986 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1987 {
1988 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1989 if (base == reg)
1990 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1991 else
1992 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1993 }
1994 else
1995 return orig;
1996
1997 if (CONST_INT_P (offset))
1998 {
1999 if (INT16_P (INTVAL (offset)))
2000 return plus_constant (Pmode, base, INTVAL (offset));
2001 else
2002 {
2003 gcc_assert (! reload_in_progress && ! reload_completed);
2004 offset = force_reg (Pmode, offset);
2005 }
2006 }
2007
2008 return gen_rtx_PLUS (Pmode, base, offset);
2009 }
2010
2011 return orig;
2012 }
2013
2014 static rtx
2015 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2016 machine_mode mode ATTRIBUTE_UNUSED)
2017 {
2018 if (flag_pic)
2019 return m32r_legitimize_pic_address (x, NULL_RTX);
2020 else
2021 return x;
2022 }
2023
2024 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2025
2026 static bool
2027 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2028 {
2029 if (GET_CODE (addr) == LO_SUM)
2030 return true;
2031
2032 return false;
2033 }
2034 \f
2035 /* Nested function support. */
2036
2037 /* Emit RTL insns to initialize the variable parts of a trampoline.
2038 FNADDR is an RTX for the address of the function's pure code.
2039 CXT is an RTX for the static chain value for the function. */
2040
2041 void
2042 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2043 rtx fnaddr ATTRIBUTE_UNUSED,
2044 rtx cxt ATTRIBUTE_UNUSED)
2045 {
2046 }
2047 \f
2048 static void
2049 m32r_file_start (void)
2050 {
2051 default_file_start ();
2052
2053 if (flag_verbose_asm)
2054 fprintf (asm_out_file,
2055 "%s M32R/D special options: -G %d\n",
2056 ASM_COMMENT_START, g_switch_value);
2057
2058 if (TARGET_LITTLE_ENDIAN)
2059 fprintf (asm_out_file, "\t.little\n");
2060 }
2061 \f
2062 /* Print operand X (an rtx) in assembler syntax to file FILE.
2063 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2064 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2065
2066 static void
2067 m32r_print_operand (FILE * file, rtx x, int code)
2068 {
2069 rtx addr;
2070
2071 switch (code)
2072 {
2073 /* The 's' and 'p' codes are used by output_block_move() to
2074 indicate post-increment 's'tores and 'p're-increment loads. */
2075 case 's':
2076 if (REG_P (x))
2077 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2078 else
2079 output_operand_lossage ("invalid operand to %%s code");
2080 return;
2081
2082 case 'p':
2083 if (REG_P (x))
2084 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2085 else
2086 output_operand_lossage ("invalid operand to %%p code");
2087 return;
2088
2089 case 'R' :
2090 /* Write second word of DImode or DFmode reference,
2091 register or memory. */
2092 if (REG_P (x))
2093 fputs (reg_names[REGNO (x)+1], file);
2094 else if (MEM_P (x))
2095 {
2096 fprintf (file, "@(");
2097 /* Handle possible auto-increment. Since it is pre-increment and
2098 we have already done it, we can just use an offset of four. */
2099 /* ??? This is taken from rs6000.c I think. I don't think it is
2100 currently necessary, but keep it around. */
2101 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2102 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2103 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2104 else
2105 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2106 fputc (')', file);
2107 }
2108 else
2109 output_operand_lossage ("invalid operand to %%R code");
2110 return;
2111
2112 case 'H' : /* High word. */
2113 case 'L' : /* Low word. */
2114 if (REG_P (x))
2115 {
2116 /* L = least significant word, H = most significant word. */
2117 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2118 fputs (reg_names[REGNO (x)], file);
2119 else
2120 fputs (reg_names[REGNO (x)+1], file);
2121 }
2122 else if (CONST_INT_P (x)
2123 || GET_CODE (x) == CONST_DOUBLE)
2124 {
2125 rtx first, second;
2126
2127 split_double (x, &first, &second);
2128 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2129 code == 'L' ? INTVAL (first) : INTVAL (second));
2130 }
2131 else
2132 output_operand_lossage ("invalid operand to %%H/%%L code");
2133 return;
2134
2135 case 'A' :
2136 {
2137 char str[30];
2138
2139 if (GET_CODE (x) != CONST_DOUBLE
2140 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2141 fatal_insn ("bad insn for 'A'", x);
2142
2143 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2144 fprintf (file, "%s", str);
2145 return;
2146 }
2147
2148 case 'B' : /* Bottom half. */
2149 case 'T' : /* Top half. */
2150 /* Output the argument to a `seth' insn (sets the Top half-word).
2151 For constants output arguments to a seth/or3 pair to set Top and
2152 Bottom halves. For symbols output arguments to a seth/add3 pair to
2153 set Top and Bottom halves. The difference exists because for
2154 constants seth/or3 is more readable but for symbols we need to use
2155 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2156 switch (GET_CODE (x))
2157 {
2158 case CONST_INT :
2159 case CONST_DOUBLE :
2160 {
2161 rtx first, second;
2162
2163 split_double (x, &first, &second);
2164 x = WORDS_BIG_ENDIAN ? second : first;
2165 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2166 (code == 'B'
2167 ? INTVAL (x) & 0xffff
2168 : (INTVAL (x) >> 16) & 0xffff));
2169 }
2170 return;
2171 case CONST :
2172 case SYMBOL_REF :
2173 if (code == 'B'
2174 && small_data_operand (x, VOIDmode))
2175 {
2176 fputs ("sda(", file);
2177 output_addr_const (file, x);
2178 fputc (')', file);
2179 return;
2180 }
2181 /* fall through */
2182 case LABEL_REF :
2183 fputs (code == 'T' ? "shigh(" : "low(", file);
2184 output_addr_const (file, x);
2185 fputc (')', file);
2186 return;
2187 default :
2188 output_operand_lossage ("invalid operand to %%T/%%B code");
2189 return;
2190 }
2191 break;
2192
2193 case 'U' :
2194 /* ??? wip */
2195 /* Output a load/store with update indicator if appropriate. */
2196 if (MEM_P (x))
2197 {
2198 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2199 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2200 fputs (".a", file);
2201 }
2202 else
2203 output_operand_lossage ("invalid operand to %%U code");
2204 return;
2205
2206 case 'N' :
2207 /* Print a constant value negated. */
2208 if (CONST_INT_P (x))
2209 output_addr_const (file, GEN_INT (- INTVAL (x)));
2210 else
2211 output_operand_lossage ("invalid operand to %%N code");
2212 return;
2213
2214 case 'X' :
2215 /* Print a const_int in hex. Used in comments. */
2216 if (CONST_INT_P (x))
2217 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2218 return;
2219
2220 case '#' :
2221 fputs (IMMEDIATE_PREFIX, file);
2222 return;
2223
2224 case 0 :
2225 /* Do nothing special. */
2226 break;
2227
2228 default :
2229 /* Unknown flag. */
2230 output_operand_lossage ("invalid operand output code");
2231 }
2232
2233 switch (GET_CODE (x))
2234 {
2235 case REG :
2236 fputs (reg_names[REGNO (x)], file);
2237 break;
2238
2239 case MEM :
2240 addr = XEXP (x, 0);
2241 if (GET_CODE (addr) == PRE_INC)
2242 {
2243 if (!REG_P (XEXP (addr, 0)))
2244 fatal_insn ("pre-increment address is not a register", x);
2245
2246 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2247 }
2248 else if (GET_CODE (addr) == PRE_DEC)
2249 {
2250 if (!REG_P (XEXP (addr, 0)))
2251 fatal_insn ("pre-decrement address is not a register", x);
2252
2253 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2254 }
2255 else if (GET_CODE (addr) == POST_INC)
2256 {
2257 if (!REG_P (XEXP (addr, 0)))
2258 fatal_insn ("post-increment address is not a register", x);
2259
2260 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2261 }
2262 else
2263 {
2264 fputs ("@(", file);
2265 output_address (XEXP (x, 0));
2266 fputc (')', file);
2267 }
2268 break;
2269
2270 case CONST_DOUBLE :
2271 /* We handle SFmode constants here as output_addr_const doesn't. */
2272 if (GET_MODE (x) == SFmode)
2273 {
2274 REAL_VALUE_TYPE d;
2275 long l;
2276
2277 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2278 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2279 fprintf (file, "0x%08lx", l);
2280 break;
2281 }
2282
2283 /* Fall through. Let output_addr_const deal with it. */
2284
2285 default :
2286 output_addr_const (file, x);
2287 break;
2288 }
2289 }
2290
2291 /* Print a memory address as an operand to reference that memory location. */
2292
2293 static void
2294 m32r_print_operand_address (FILE * file, rtx addr)
2295 {
2296 rtx base;
2297 rtx index = 0;
2298 int offset = 0;
2299
2300 switch (GET_CODE (addr))
2301 {
2302 case REG :
2303 fputs (reg_names[REGNO (addr)], file);
2304 break;
2305
2306 case PLUS :
2307 if (CONST_INT_P (XEXP (addr, 0)))
2308 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2309 else if (CONST_INT_P (XEXP (addr, 1)))
2310 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2311 else
2312 base = XEXP (addr, 0), index = XEXP (addr, 1);
2313 if (REG_P (base))
2314 {
2315 /* Print the offset first (if present) to conform to the manual. */
2316 if (index == 0)
2317 {
2318 if (offset != 0)
2319 fprintf (file, "%d,", offset);
2320 fputs (reg_names[REGNO (base)], file);
2321 }
2322 /* The chip doesn't support this, but left in for generality. */
2323 else if (REG_P (index))
2324 fprintf (file, "%s,%s",
2325 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2326 /* Not sure this can happen, but leave in for now. */
2327 else if (GET_CODE (index) == SYMBOL_REF)
2328 {
2329 output_addr_const (file, index);
2330 fputc (',', file);
2331 fputs (reg_names[REGNO (base)], file);
2332 }
2333 else
2334 fatal_insn ("bad address", addr);
2335 }
2336 else if (GET_CODE (base) == LO_SUM)
2337 {
2338 gcc_assert (!index && REG_P (XEXP (base, 0)));
2339 if (small_data_operand (XEXP (base, 1), VOIDmode))
2340 fputs ("sda(", file);
2341 else
2342 fputs ("low(", file);
2343 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2344 offset));
2345 fputs ("),", file);
2346 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2347 }
2348 else
2349 fatal_insn ("bad address", addr);
2350 break;
2351
2352 case LO_SUM :
2353 if (!REG_P (XEXP (addr, 0)))
2354 fatal_insn ("lo_sum not of register", addr);
2355 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2356 fputs ("sda(", file);
2357 else
2358 fputs ("low(", file);
2359 output_addr_const (file, XEXP (addr, 1));
2360 fputs ("),", file);
2361 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2362 break;
2363
2364 case PRE_INC : /* Assume SImode. */
2365 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2366 break;
2367
2368 case PRE_DEC : /* Assume SImode. */
2369 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2370 break;
2371
2372 case POST_INC : /* Assume SImode. */
2373 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2374 break;
2375
2376 default :
2377 output_addr_const (file, addr);
2378 break;
2379 }
2380 }
2381
2382 static bool
2383 m32r_print_operand_punct_valid_p (unsigned char code)
2384 {
2385 return m32r_punct_chars[code];
2386 }
2387
2388 /* Return true if the operands are the constants 0 and 1. */
2389
2390 int
2391 zero_and_one (rtx operand1, rtx operand2)
2392 {
2393 return
2394 CONST_INT_P (operand1)
2395 && CONST_INT_P (operand2)
2396 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2397 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2398 }
2399
2400 /* Generate the correct assembler code to handle the conditional loading of a
2401 value into a register. It is known that the operands satisfy the
2402 conditional_move_operand() function above. The destination is operand[0].
2403 The condition is operand [1]. The 'true' value is operand [2] and the
2404 'false' value is operand [3]. */
2405
2406 char *
2407 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2408 {
2409 static char buffer [100];
2410 const char * dest = reg_names [REGNO (operands [0])];
2411
2412 buffer [0] = 0;
2413
2414 /* Destination must be a register. */
2415 gcc_assert (REG_P (operands [0]));
2416 gcc_assert (conditional_move_operand (operands [2], SImode));
2417 gcc_assert (conditional_move_operand (operands [3], SImode));
2418
2419 /* Check to see if the test is reversed. */
2420 if (GET_CODE (operands [1]) == NE)
2421 {
2422 rtx tmp = operands [2];
2423 operands [2] = operands [3];
2424 operands [3] = tmp;
2425 }
2426
2427 sprintf (buffer, "mvfc %s, cbr", dest);
2428
2429 /* If the true value was '0' then we need to invert the results of the move. */
2430 if (INTVAL (operands [2]) == 0)
2431 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2432 dest, dest);
2433
2434 return buffer;
2435 }
2436
2437 /* Returns true if the registers contained in the two
2438 rtl expressions are different. */
2439
2440 int
2441 m32r_not_same_reg (rtx a, rtx b)
2442 {
2443 int reg_a = -1;
2444 int reg_b = -2;
2445
2446 while (GET_CODE (a) == SUBREG)
2447 a = SUBREG_REG (a);
2448
2449 if (REG_P (a))
2450 reg_a = REGNO (a);
2451
2452 while (GET_CODE (b) == SUBREG)
2453 b = SUBREG_REG (b);
2454
2455 if (REG_P (b))
2456 reg_b = REGNO (b);
2457
2458 return reg_a != reg_b;
2459 }
2460
2461 \f
2462 rtx
2463 m32r_function_symbol (const char *name)
2464 {
2465 int extra_flags = 0;
2466 enum m32r_model model;
2467 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2468
2469 if (TARGET_MODEL_SMALL)
2470 model = M32R_MODEL_SMALL;
2471 else if (TARGET_MODEL_MEDIUM)
2472 model = M32R_MODEL_MEDIUM;
2473 else if (TARGET_MODEL_LARGE)
2474 model = M32R_MODEL_LARGE;
2475 else
2476 gcc_unreachable (); /* Shouldn't happen. */
2477 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2478
2479 if (extra_flags)
2480 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2481
2482 return sym;
2483 }
2484
2485 /* Use a library function to move some bytes. */
2486
2487 static void
2488 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2489 {
2490 /* We want to pass the size as Pmode, which will normally be SImode
2491 but will be DImode if we are using 64-bit longs and pointers. */
2492 if (GET_MODE (bytes_rtx) != VOIDmode
2493 && GET_MODE (bytes_rtx) != Pmode)
2494 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2495
2496 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2497 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2498 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2499 TYPE_UNSIGNED (sizetype)),
2500 TYPE_MODE (sizetype));
2501 }
2502
2503 /* Expand string/block move operations.
2504
2505 operands[0] is the pointer to the destination.
2506 operands[1] is the pointer to the source.
2507 operands[2] is the number of bytes to move.
2508 operands[3] is the alignment.
2509
2510 Returns 1 upon success, 0 otherwise. */
2511
2512 int
2513 m32r_expand_block_move (rtx operands[])
2514 {
2515 rtx orig_dst = operands[0];
2516 rtx orig_src = operands[1];
2517 rtx bytes_rtx = operands[2];
2518 rtx align_rtx = operands[3];
2519 int constp = CONST_INT_P (bytes_rtx);
2520 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2521 int align = INTVAL (align_rtx);
2522 int leftover;
2523 rtx src_reg;
2524 rtx dst_reg;
2525
2526 if (constp && bytes <= 0)
2527 return 1;
2528
2529 /* Move the address into scratch registers. */
2530 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2531 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2532
2533 if (align > UNITS_PER_WORD)
2534 align = UNITS_PER_WORD;
2535
2536 /* If we prefer size over speed, always use a function call.
2537 If we do not know the size, use a function call.
2538 If the blocks are not word aligned, use a function call. */
2539 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2540 {
2541 block_move_call (dst_reg, src_reg, bytes_rtx);
2542 return 0;
2543 }
2544
2545 leftover = bytes % MAX_MOVE_BYTES;
2546 bytes -= leftover;
2547
2548 /* If necessary, generate a loop to handle the bulk of the copy. */
2549 if (bytes)
2550 {
2551 rtx_code_label *label = NULL;
2552 rtx final_src = NULL_RTX;
2553 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2554 rtx rounded_total = GEN_INT (bytes);
2555 rtx new_dst_reg = gen_reg_rtx (SImode);
2556 rtx new_src_reg = gen_reg_rtx (SImode);
2557
2558 /* If we are going to have to perform this loop more than
2559 once, then generate a label and compute the address the
2560 source register will contain upon completion of the final
2561 iteration. */
2562 if (bytes > MAX_MOVE_BYTES)
2563 {
2564 final_src = gen_reg_rtx (Pmode);
2565
2566 if (INT16_P(bytes))
2567 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2568 else
2569 {
2570 emit_insn (gen_movsi (final_src, rounded_total));
2571 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2572 }
2573
2574 label = gen_label_rtx ();
2575 emit_label (label);
2576 }
2577
2578 /* It is known that output_block_move() will update src_reg to point
2579 to the word after the end of the source block, and dst_reg to point
2580 to the last word of the destination block, provided that the block
2581 is MAX_MOVE_BYTES long. */
2582 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2583 new_dst_reg, new_src_reg));
2584 emit_move_insn (dst_reg, new_dst_reg);
2585 emit_move_insn (src_reg, new_src_reg);
2586 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2587
2588 if (bytes > MAX_MOVE_BYTES)
2589 {
2590 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2591 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2592 }
2593 }
2594
2595 if (leftover)
2596 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2597 gen_reg_rtx (SImode),
2598 gen_reg_rtx (SImode)));
2599 return 1;
2600 }
2601
2602 \f
2603 /* Emit load/stores for a small constant word aligned block_move.
2604
2605 operands[0] is the memory address of the destination.
2606 operands[1] is the memory address of the source.
2607 operands[2] is the number of bytes to move.
2608 operands[3] is a temp register.
2609 operands[4] is a temp register. */
2610
2611 void
2612 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2613 {
2614 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2615 int first_time;
2616 int got_extra = 0;
2617
2618 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2619
2620 /* We do not have a post-increment store available, so the first set of
2621 stores are done without any increment, then the remaining ones can use
2622 the pre-increment addressing mode.
2623
2624 Note: expand_block_move() also relies upon this behavior when building
2625 loops to copy large blocks. */
2626 first_time = 1;
2627
2628 while (bytes > 0)
2629 {
2630 if (bytes >= 8)
2631 {
2632 if (first_time)
2633 {
2634 output_asm_insn ("ld\t%5, %p1", operands);
2635 output_asm_insn ("ld\t%6, %p1", operands);
2636 output_asm_insn ("st\t%5, @%0", operands);
2637 output_asm_insn ("st\t%6, %s0", operands);
2638 }
2639 else
2640 {
2641 output_asm_insn ("ld\t%5, %p1", operands);
2642 output_asm_insn ("ld\t%6, %p1", operands);
2643 output_asm_insn ("st\t%5, %s0", operands);
2644 output_asm_insn ("st\t%6, %s0", operands);
2645 }
2646
2647 bytes -= 8;
2648 }
2649 else if (bytes >= 4)
2650 {
2651 if (bytes > 4)
2652 got_extra = 1;
2653
2654 output_asm_insn ("ld\t%5, %p1", operands);
2655
2656 if (got_extra)
2657 output_asm_insn ("ld\t%6, %p1", operands);
2658
2659 if (first_time)
2660 output_asm_insn ("st\t%5, @%0", operands);
2661 else
2662 output_asm_insn ("st\t%5, %s0", operands);
2663
2664 bytes -= 4;
2665 }
2666 else
2667 {
2668 /* Get the entire next word, even though we do not want all of it.
2669 The saves us from doing several smaller loads, and we assume that
2670 we cannot cause a page fault when at least part of the word is in
2671 valid memory [since we don't get called if things aren't properly
2672 aligned]. */
2673 int dst_offset = first_time ? 0 : 4;
2674 /* The amount of increment we have to make to the
2675 destination pointer. */
2676 int dst_inc_amount = dst_offset + bytes - 4;
2677 /* The same for the source pointer. */
2678 int src_inc_amount = bytes;
2679 int last_shift;
2680 rtx my_operands[3];
2681
2682 /* If got_extra is true then we have already loaded
2683 the next word as part of loading and storing the previous word. */
2684 if (! got_extra)
2685 output_asm_insn ("ld\t%6, @%1", operands);
2686
2687 if (bytes >= 2)
2688 {
2689 bytes -= 2;
2690
2691 output_asm_insn ("sra3\t%5, %6, #16", operands);
2692 my_operands[0] = operands[5];
2693 my_operands[1] = GEN_INT (dst_offset);
2694 my_operands[2] = operands[0];
2695 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2696
2697 /* If there is a byte left to store then increment the
2698 destination address and shift the contents of the source
2699 register down by 8 bits. We could not do the address
2700 increment in the store half word instruction, because it does
2701 not have an auto increment mode. */
2702 if (bytes > 0) /* assert (bytes == 1) */
2703 {
2704 dst_offset += 2;
2705 last_shift = 8;
2706 }
2707 }
2708 else
2709 last_shift = 24;
2710
2711 if (bytes > 0)
2712 {
2713 my_operands[0] = operands[6];
2714 my_operands[1] = GEN_INT (last_shift);
2715 output_asm_insn ("srai\t%0, #%1", my_operands);
2716 my_operands[0] = operands[6];
2717 my_operands[1] = GEN_INT (dst_offset);
2718 my_operands[2] = operands[0];
2719 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2720 }
2721
2722 /* Update the destination pointer if needed. We have to do
2723 this so that the patterns matches what we output in this
2724 function. */
2725 if (dst_inc_amount
2726 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2727 {
2728 my_operands[0] = operands[0];
2729 my_operands[1] = GEN_INT (dst_inc_amount);
2730 output_asm_insn ("addi\t%0, #%1", my_operands);
2731 }
2732
2733 /* Update the source pointer if needed. We have to do this
2734 so that the patterns matches what we output in this
2735 function. */
2736 if (src_inc_amount
2737 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2738 {
2739 my_operands[0] = operands[1];
2740 my_operands[1] = GEN_INT (src_inc_amount);
2741 output_asm_insn ("addi\t%0, #%1", my_operands);
2742 }
2743
2744 bytes = 0;
2745 }
2746
2747 first_time = 0;
2748 }
2749 }
2750
2751 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2752
2753 int
2754 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2755 unsigned int new_reg)
2756 {
2757 /* Interrupt routines can't clobber any register that isn't already used. */
2758 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2759 && !df_regs_ever_live_p (new_reg))
2760 return 0;
2761
2762 return 1;
2763 }
2764
2765 rtx
2766 m32r_return_addr (int count)
2767 {
2768 if (count != 0)
2769 return const0_rtx;
2770
2771 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2772 }
2773
2774 static void
2775 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2776 {
2777 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2778 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2779 0x017e8e17 : 0x178e7e01, SImode));
2780 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2781 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2782 0x0c00ae86 : 0x86ae000c, SImode));
2783 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2784 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2785 0xe627871e : 0x1e8727e6, SImode));
2786 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2787 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2788 0xc616c626 : 0x26c61fc6, SImode));
2789 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2790 chain_value);
2791 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2792 XEXP (DECL_RTL (fndecl), 0));
2793
2794 if (m32r_cache_flush_trap >= 0)
2795 emit_insn (gen_flush_icache
2796 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2797 gen_int_mode (m32r_cache_flush_trap, SImode)));
2798 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2799 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2800 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2801 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2802 GEN_INT (3), SImode);
2803 }
2804
2805 /* True if X is a reg that can be used as a base reg. */
2806
2807 static bool
2808 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2809 {
2810 if (! REG_P (x))
2811 return false;
2812
2813 if (strict)
2814 {
2815 if (GPR_P (REGNO (x)))
2816 return true;
2817 }
2818 else
2819 {
2820 if (GPR_P (REGNO (x))
2821 || REGNO (x) == ARG_POINTER_REGNUM
2822 || ! HARD_REGISTER_P (x))
2823 return true;
2824 }
2825
2826 return false;
2827 }
2828
2829 static inline bool
2830 m32r_rtx_ok_for_offset_p (const_rtx x)
2831 {
2832 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2833 }
2834
2835 static inline bool
2836 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2837 const_rtx x, bool strict)
2838 {
2839 if (GET_CODE (x) == PLUS
2840 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2841 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2842 return true;
2843
2844 return false;
2845 }
2846
2847 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2848 since more than one instruction will be required. */
2849
2850 static inline bool
2851 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2852 bool strict)
2853 {
2854 if (GET_CODE (x) == LO_SUM
2855 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2856 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2857 && CONSTANT_P (XEXP (x, 1)))
2858 return true;
2859
2860 return false;
2861 }
2862
2863 /* Is this a load and increment operation. */
2864
2865 static inline bool
2866 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2867 {
2868 if ((mode == SImode || mode == SFmode)
2869 && GET_CODE (x) == POST_INC
2870 && REG_P (XEXP (x, 0))
2871 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2872 return true;
2873
2874 return false;
2875 }
2876
2877 /* Is this an increment/decrement and store operation. */
2878
2879 static inline bool
2880 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2881 {
2882 if ((mode == SImode || mode == SFmode)
2883 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2884 && REG_P (XEXP (x, 0)) \
2885 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2886 return true;
2887
2888 return false;
2889 }
2890
2891 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2892
2893 static bool
2894 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2895 {
2896 if (m32r_rtx_ok_for_base_p (x, strict)
2897 || m32r_legitimate_offset_addres_p (mode, x, strict)
2898 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2899 || m32r_load_postinc_p (mode, x, strict)
2900 || m32r_store_preinc_predec_p (mode, x, strict))
2901 return true;
2902
2903 return false;
2904 }
2905
2906 static void
2907 m32r_conditional_register_usage (void)
2908 {
2909 if (flag_pic)
2910 {
2911 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2912 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2913 }
2914 }
2915
2916 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2917
2918 We don't allow (plus symbol large-constant) as the relocations can't
2919 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2920 We allow all CONST_DOUBLE's as the md file patterns will force the
2921 constant to memory if they can't handle them. */
2922
2923 static bool
2924 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2925 {
2926 return !(GET_CODE (x) == CONST
2927 && GET_CODE (XEXP (x, 0)) == PLUS
2928 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2929 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2930 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2931 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2932 }