]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
rtlanal.c (refers_to_regno_p): Change return value from int to bool.
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "stringpool.h"
28 #include "calls.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "dbxout.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "expr.h"
39 #include "hashtab.h"
40 #include "hash-set.h"
41 #include "vec.h"
42 #include "machmode.h"
43 #include "input.h"
44 #include "function.h"
45 #include "recog.h"
46 #include "diagnostic-core.h"
47 #include "ggc.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfgrtl.h"
51 #include "cfganal.h"
52 #include "lcm.h"
53 #include "cfgbuild.h"
54 #include "cfgcleanup.h"
55 #include "predict.h"
56 #include "basic-block.h"
57 #include "df.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "target-def.h"
61 #include "tm-constrs.h"
62 #include "opts.h"
63 #include "builtins.h"
64
65 /* Array of valid operand punctuation characters. */
66 static char m32r_punct_chars[256];
67
68 /* Machine-specific symbol_ref flags. */
69 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
70 #define SYMBOL_REF_MODEL(X) \
71 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
72
73 /* For string literals, etc. */
74 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
75
76 /* Forward declaration. */
77 static void m32r_option_override (void);
78 static void init_reg_tables (void);
79 static void block_move_call (rtx, rtx, rtx);
80 static int m32r_is_insn (rtx);
81 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
82 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
83 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
84 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
85 static void m32r_print_operand (FILE *, rtx, int);
86 static void m32r_print_operand_address (FILE *, rtx);
87 static bool m32r_print_operand_punct_valid_p (unsigned char code);
88 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
89 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
90
91 static void m32r_file_start (void);
92
93 static int m32r_adjust_priority (rtx_insn *, int);
94 static int m32r_issue_rate (void);
95
96 static void m32r_encode_section_info (tree, rtx, int);
97 static bool m32r_in_small_data_p (const_tree);
98 static bool m32r_return_in_memory (const_tree, const_tree);
99 static rtx m32r_function_value (const_tree, const_tree, bool);
100 static rtx m32r_libcall_value (machine_mode, const_rtx);
101 static bool m32r_function_value_regno_p (const unsigned int);
102 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
103 tree, int *, int);
104 static void init_idents (void);
105 static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed);
106 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
107 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
108 const_tree, bool);
109 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
110 tree, bool);
111 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
112 const_tree, bool);
113 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
114 const_tree, bool);
115 static bool m32r_can_eliminate (const int, const int);
116 static void m32r_conditional_register_usage (void);
117 static void m32r_trampoline_init (rtx, tree, rtx);
118 static bool m32r_legitimate_constant_p (machine_mode, rtx);
119 \f
120 /* M32R specific attributes. */
121
122 static const struct attribute_spec m32r_attribute_table[] =
123 {
124 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
125 affects_type_identity } */
126 { "interrupt", 0, 0, true, false, false, NULL, false },
127 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
128 false },
129 { NULL, 0, 0, false, false, false, NULL, false }
130 };
131 \f
132 /* Initialize the GCC target structure. */
133 #undef TARGET_ATTRIBUTE_TABLE
134 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
135
136 #undef TARGET_LEGITIMATE_ADDRESS_P
137 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
138 #undef TARGET_LEGITIMIZE_ADDRESS
139 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
140 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
141 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
142
143 #undef TARGET_ASM_ALIGNED_HI_OP
144 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
145 #undef TARGET_ASM_ALIGNED_SI_OP
146 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
147
148 #undef TARGET_PRINT_OPERAND
149 #define TARGET_PRINT_OPERAND m32r_print_operand
150 #undef TARGET_PRINT_OPERAND_ADDRESS
151 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
152 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
153 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
154
155 #undef TARGET_ASM_FUNCTION_PROLOGUE
156 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
157 #undef TARGET_ASM_FUNCTION_EPILOGUE
158 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
159
160 #undef TARGET_ASM_FILE_START
161 #define TARGET_ASM_FILE_START m32r_file_start
162
163 #undef TARGET_SCHED_ADJUST_PRIORITY
164 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
165 #undef TARGET_SCHED_ISSUE_RATE
166 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
167
168 #undef TARGET_OPTION_OVERRIDE
169 #define TARGET_OPTION_OVERRIDE m32r_option_override
170
171 #undef TARGET_ENCODE_SECTION_INFO
172 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
173 #undef TARGET_IN_SMALL_DATA_P
174 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
175
176
177 #undef TARGET_MEMORY_MOVE_COST
178 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
179 #undef TARGET_RTX_COSTS
180 #define TARGET_RTX_COSTS m32r_rtx_costs
181 #undef TARGET_ADDRESS_COST
182 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
183
184 #undef TARGET_PROMOTE_PROTOTYPES
185 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
186 #undef TARGET_RETURN_IN_MEMORY
187 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
188
189 #undef TARGET_FUNCTION_VALUE
190 #define TARGET_FUNCTION_VALUE m32r_function_value
191 #undef TARGET_LIBCALL_VALUE
192 #define TARGET_LIBCALL_VALUE m32r_libcall_value
193 #undef TARGET_FUNCTION_VALUE_REGNO_P
194 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
195
196 #undef TARGET_SETUP_INCOMING_VARARGS
197 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
198 #undef TARGET_MUST_PASS_IN_STACK
199 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
200 #undef TARGET_PASS_BY_REFERENCE
201 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
202 #undef TARGET_ARG_PARTIAL_BYTES
203 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
204 #undef TARGET_FUNCTION_ARG
205 #define TARGET_FUNCTION_ARG m32r_function_arg
206 #undef TARGET_FUNCTION_ARG_ADVANCE
207 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
208
209 #undef TARGET_CAN_ELIMINATE
210 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
211
212 #undef TARGET_CONDITIONAL_REGISTER_USAGE
213 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
214
215 #undef TARGET_TRAMPOLINE_INIT
216 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
217
218 #undef TARGET_LEGITIMATE_CONSTANT_P
219 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
220
221 struct gcc_target targetm = TARGET_INITIALIZER;
222 \f
223 /* Called by m32r_option_override to initialize various things. */
224
225 void
226 m32r_init (void)
227 {
228 init_reg_tables ();
229
230 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
231 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
232 m32r_punct_chars['#'] = 1;
233 m32r_punct_chars['@'] = 1; /* ??? no longer used */
234
235 /* Provide default value if not specified. */
236 if (!global_options_set.x_g_switch_value)
237 g_switch_value = SDATA_DEFAULT_SIZE;
238 }
239
240 static void
241 m32r_option_override (void)
242 {
243 /* These need to be done at start up.
244 It's convenient to do them here. */
245 m32r_init ();
246 SUBTARGET_OVERRIDE_OPTIONS;
247 }
248
249 /* Vectors to keep interesting information about registers where it can easily
250 be got. We use to use the actual mode value as the bit number, but there
251 is (or may be) more than 32 modes now. Instead we use two tables: one
252 indexed by hard register number, and one indexed by mode. */
253
254 /* The purpose of m32r_mode_class is to shrink the range of modes so that
255 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
256 mapped into one m32r_mode_class mode. */
257
258 enum m32r_mode_class
259 {
260 C_MODE,
261 S_MODE, D_MODE, T_MODE, O_MODE,
262 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
263 };
264
265 /* Modes for condition codes. */
266 #define C_MODES (1 << (int) C_MODE)
267
268 /* Modes for single-word and smaller quantities. */
269 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
270
271 /* Modes for double-word and smaller quantities. */
272 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
273
274 /* Modes for quad-word and smaller quantities. */
275 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
276
277 /* Modes for accumulators. */
278 #define A_MODES (1 << (int) A_MODE)
279
280 /* Value is 1 if register/mode pair is acceptable on arc. */
281
282 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
283 {
284 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
285 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
286 S_MODES, C_MODES, A_MODES, A_MODES
287 };
288
289 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
290
291 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
292
293 static void
294 init_reg_tables (void)
295 {
296 int i;
297
298 for (i = 0; i < NUM_MACHINE_MODES; i++)
299 {
300 machine_mode m = (machine_mode) i;
301
302 switch (GET_MODE_CLASS (m))
303 {
304 case MODE_INT:
305 case MODE_PARTIAL_INT:
306 case MODE_COMPLEX_INT:
307 if (GET_MODE_SIZE (m) <= 4)
308 m32r_mode_class[i] = 1 << (int) S_MODE;
309 else if (GET_MODE_SIZE (m) == 8)
310 m32r_mode_class[i] = 1 << (int) D_MODE;
311 else if (GET_MODE_SIZE (m) == 16)
312 m32r_mode_class[i] = 1 << (int) T_MODE;
313 else if (GET_MODE_SIZE (m) == 32)
314 m32r_mode_class[i] = 1 << (int) O_MODE;
315 else
316 m32r_mode_class[i] = 0;
317 break;
318 case MODE_FLOAT:
319 case MODE_COMPLEX_FLOAT:
320 if (GET_MODE_SIZE (m) <= 4)
321 m32r_mode_class[i] = 1 << (int) SF_MODE;
322 else if (GET_MODE_SIZE (m) == 8)
323 m32r_mode_class[i] = 1 << (int) DF_MODE;
324 else if (GET_MODE_SIZE (m) == 16)
325 m32r_mode_class[i] = 1 << (int) TF_MODE;
326 else if (GET_MODE_SIZE (m) == 32)
327 m32r_mode_class[i] = 1 << (int) OF_MODE;
328 else
329 m32r_mode_class[i] = 0;
330 break;
331 case MODE_CC:
332 m32r_mode_class[i] = 1 << (int) C_MODE;
333 break;
334 default:
335 m32r_mode_class[i] = 0;
336 break;
337 }
338 }
339
340 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
341 {
342 if (GPR_P (i))
343 m32r_regno_reg_class[i] = GENERAL_REGS;
344 else if (i == ARG_POINTER_REGNUM)
345 m32r_regno_reg_class[i] = GENERAL_REGS;
346 else
347 m32r_regno_reg_class[i] = NO_REGS;
348 }
349 }
350 \f
351 /* M32R specific attribute support.
352
353 interrupt - for interrupt functions
354
355 model - select code model used to access object
356
357 small: addresses use 24 bits, use bl to make calls
358 medium: addresses use 32 bits, use bl to make calls
359 large: addresses use 32 bits, use seth/add3/jl to make calls
360
361 Grep for MODEL in m32r.h for more info. */
362
363 static tree small_ident1;
364 static tree small_ident2;
365 static tree medium_ident1;
366 static tree medium_ident2;
367 static tree large_ident1;
368 static tree large_ident2;
369
370 static void
371 init_idents (void)
372 {
373 if (small_ident1 == 0)
374 {
375 small_ident1 = get_identifier ("small");
376 small_ident2 = get_identifier ("__small__");
377 medium_ident1 = get_identifier ("medium");
378 medium_ident2 = get_identifier ("__medium__");
379 large_ident1 = get_identifier ("large");
380 large_ident2 = get_identifier ("__large__");
381 }
382 }
383
384 /* Handle an "model" attribute; arguments as in
385 struct attribute_spec.handler. */
386 static tree
387 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
388 tree args, int flags ATTRIBUTE_UNUSED,
389 bool *no_add_attrs)
390 {
391 tree arg;
392
393 init_idents ();
394 arg = TREE_VALUE (args);
395
396 if (arg != small_ident1
397 && arg != small_ident2
398 && arg != medium_ident1
399 && arg != medium_ident2
400 && arg != large_ident1
401 && arg != large_ident2)
402 {
403 warning (OPT_Wattributes, "invalid argument of %qs attribute",
404 IDENTIFIER_POINTER (name));
405 *no_add_attrs = true;
406 }
407
408 return NULL_TREE;
409 }
410 \f
411 /* Encode section information of DECL, which is either a VAR_DECL,
412 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
413
414 For the M32R we want to record:
415
416 - whether the object lives in .sdata/.sbss.
417 - what code model should be used to access the object
418 */
419
420 static void
421 m32r_encode_section_info (tree decl, rtx rtl, int first)
422 {
423 int extra_flags = 0;
424 tree model_attr;
425 enum m32r_model model;
426
427 default_encode_section_info (decl, rtl, first);
428
429 if (!DECL_P (decl))
430 return;
431
432 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
433 if (model_attr)
434 {
435 tree id;
436
437 init_idents ();
438
439 id = TREE_VALUE (TREE_VALUE (model_attr));
440
441 if (id == small_ident1 || id == small_ident2)
442 model = M32R_MODEL_SMALL;
443 else if (id == medium_ident1 || id == medium_ident2)
444 model = M32R_MODEL_MEDIUM;
445 else if (id == large_ident1 || id == large_ident2)
446 model = M32R_MODEL_LARGE;
447 else
448 gcc_unreachable (); /* shouldn't happen */
449 }
450 else
451 {
452 if (TARGET_MODEL_SMALL)
453 model = M32R_MODEL_SMALL;
454 else if (TARGET_MODEL_MEDIUM)
455 model = M32R_MODEL_MEDIUM;
456 else if (TARGET_MODEL_LARGE)
457 model = M32R_MODEL_LARGE;
458 else
459 gcc_unreachable (); /* shouldn't happen */
460 }
461 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
462
463 if (extra_flags)
464 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
465 }
466
467 /* Only mark the object as being small data area addressable if
468 it hasn't been explicitly marked with a code model.
469
470 The user can explicitly put an object in the small data area with the
471 section attribute. If the object is in sdata/sbss and marked with a
472 code model do both [put the object in .sdata and mark it as being
473 addressed with a specific code model - don't mark it as being addressed
474 with an SDA reloc though]. This is ok and might be useful at times. If
475 the object doesn't fit the linker will give an error. */
476
477 static bool
478 m32r_in_small_data_p (const_tree decl)
479 {
480 const char *section;
481
482 if (TREE_CODE (decl) != VAR_DECL)
483 return false;
484
485 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
486 return false;
487
488 section = DECL_SECTION_NAME (decl);
489 if (section)
490 {
491 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
492 return true;
493 }
494 else
495 {
496 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
497 {
498 int size = int_size_in_bytes (TREE_TYPE (decl));
499
500 if (size > 0 && size <= g_switch_value)
501 return true;
502 }
503 }
504
505 return false;
506 }
507
508 /* Do anything needed before RTL is emitted for each function. */
509
510 void
511 m32r_init_expanders (void)
512 {
513 /* ??? At one point there was code here. The function is left in
514 to make it easy to experiment. */
515 }
516 \f
517 int
518 call_operand (rtx op, machine_mode mode)
519 {
520 if (!MEM_P (op))
521 return 0;
522 op = XEXP (op, 0);
523 return call_address_operand (op, mode);
524 }
525
526 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
527
528 int
529 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
530 {
531 if (! TARGET_SDATA_USE)
532 return 0;
533
534 if (GET_CODE (op) == SYMBOL_REF)
535 return SYMBOL_REF_SMALL_P (op);
536
537 if (GET_CODE (op) == CONST
538 && GET_CODE (XEXP (op, 0)) == PLUS
539 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
540 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
541 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
542
543 return 0;
544 }
545
546 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
547
548 int
549 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
550 {
551 rtx sym;
552
553 if (flag_pic)
554 return 0;
555
556 if (GET_CODE (op) == LABEL_REF)
557 return TARGET_ADDR24;
558
559 if (GET_CODE (op) == SYMBOL_REF)
560 sym = op;
561 else if (GET_CODE (op) == CONST
562 && GET_CODE (XEXP (op, 0)) == PLUS
563 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
564 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
565 sym = XEXP (XEXP (op, 0), 0);
566 else
567 return 0;
568
569 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
570 return 1;
571
572 if (TARGET_ADDR24
573 && (CONSTANT_POOL_ADDRESS_P (sym)
574 || LIT_NAME_P (XSTR (sym, 0))))
575 return 1;
576
577 return 0;
578 }
579
580 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
581
582 int
583 addr32_operand (rtx op, machine_mode mode)
584 {
585 rtx sym;
586
587 if (GET_CODE (op) == LABEL_REF)
588 return TARGET_ADDR32;
589
590 if (GET_CODE (op) == SYMBOL_REF)
591 sym = op;
592 else if (GET_CODE (op) == CONST
593 && GET_CODE (XEXP (op, 0)) == PLUS
594 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
595 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
596 && ! flag_pic)
597 sym = XEXP (XEXP (op, 0), 0);
598 else
599 return 0;
600
601 return (! addr24_operand (sym, mode)
602 && ! small_data_operand (sym, mode));
603 }
604
605 /* Return 1 if OP is a function that can be called with the `bl' insn. */
606
607 int
608 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
609 {
610 if (flag_pic)
611 return 1;
612
613 if (GET_CODE (op) == SYMBOL_REF)
614 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
615
616 return TARGET_CALL26;
617 }
618
619 /* Return 1 if OP is a DImode const we want to handle inline.
620 This must match the code in the movdi pattern.
621 It is used by the 'G' constraint. */
622
623 int
624 easy_di_const (rtx op)
625 {
626 rtx high_rtx, low_rtx;
627 HOST_WIDE_INT high, low;
628
629 split_double (op, &high_rtx, &low_rtx);
630 high = INTVAL (high_rtx);
631 low = INTVAL (low_rtx);
632 /* Pick constants loadable with 2 16-bit `ldi' insns. */
633 if (high >= -128 && high <= 127
634 && low >= -128 && low <= 127)
635 return 1;
636 return 0;
637 }
638
639 /* Return 1 if OP is a DFmode const we want to handle inline.
640 This must match the code in the movdf pattern.
641 It is used by the 'H' constraint. */
642
643 int
644 easy_df_const (rtx op)
645 {
646 REAL_VALUE_TYPE r;
647 long l[2];
648
649 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
650 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
651 if (l[0] == 0 && l[1] == 0)
652 return 1;
653 if ((l[0] & 0xffff) == 0 && l[1] == 0)
654 return 1;
655 return 0;
656 }
657
658 /* Return 1 if OP is (mem (reg ...)).
659 This is used in insn length calcs. */
660
661 int
662 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
663 {
664 return MEM_P (op) && REG_P (XEXP (op, 0));
665 }
666
667 /* Return nonzero if TYPE must be passed by indirect reference. */
668
669 static bool
670 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
671 machine_mode mode, const_tree type,
672 bool named ATTRIBUTE_UNUSED)
673 {
674 int size;
675
676 if (type)
677 size = int_size_in_bytes (type);
678 else
679 size = GET_MODE_SIZE (mode);
680
681 return (size < 0 || size > 8);
682 }
683 \f
684 /* Comparisons. */
685
686 /* X and Y are two things to compare using CODE. Emit the compare insn and
687 return the rtx for compare [arg0 of the if_then_else].
688 If need_compare is true then the comparison insn must be generated, rather
689 than being subsumed into the following branch instruction. */
690
691 rtx
692 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
693 {
694 enum rtx_code compare_code;
695 enum rtx_code branch_code;
696 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
697 int must_swap = 0;
698
699 switch (code)
700 {
701 case EQ: compare_code = EQ; branch_code = NE; break;
702 case NE: compare_code = EQ; branch_code = EQ; break;
703 case LT: compare_code = LT; branch_code = NE; break;
704 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
705 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
706 case GE: compare_code = LT; branch_code = EQ; break;
707 case LTU: compare_code = LTU; branch_code = NE; break;
708 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
709 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
710 case GEU: compare_code = LTU; branch_code = EQ; break;
711
712 default:
713 gcc_unreachable ();
714 }
715
716 if (need_compare)
717 {
718 switch (compare_code)
719 {
720 case EQ:
721 if (satisfies_constraint_P (y) /* Reg equal to small const. */
722 && y != const0_rtx)
723 {
724 rtx tmp = gen_reg_rtx (SImode);
725
726 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
727 x = tmp;
728 y = const0_rtx;
729 }
730 else if (CONSTANT_P (y)) /* Reg equal to const. */
731 {
732 rtx tmp = force_reg (GET_MODE (x), y);
733 y = tmp;
734 }
735
736 if (register_operand (y, SImode) /* Reg equal to reg. */
737 || y == const0_rtx) /* Reg equal to zero. */
738 {
739 emit_insn (gen_cmp_eqsi_insn (x, y));
740
741 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
742 }
743 break;
744
745 case LT:
746 if (register_operand (y, SImode)
747 || satisfies_constraint_P (y))
748 {
749 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
750
751 switch (code)
752 {
753 case LT:
754 emit_insn (gen_cmp_ltsi_insn (x, y));
755 code = EQ;
756 break;
757 case LE:
758 if (y == const0_rtx)
759 tmp = const1_rtx;
760 else
761 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
762 emit_insn (gen_cmp_ltsi_insn (x, tmp));
763 code = EQ;
764 break;
765 case GT:
766 if (CONST_INT_P (y))
767 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
768 else
769 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
770 emit_insn (gen_cmp_ltsi_insn (x, tmp));
771 code = NE;
772 break;
773 case GE:
774 emit_insn (gen_cmp_ltsi_insn (x, y));
775 code = NE;
776 break;
777 default:
778 gcc_unreachable ();
779 }
780
781 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
782 }
783 break;
784
785 case LTU:
786 if (register_operand (y, SImode)
787 || satisfies_constraint_P (y))
788 {
789 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
790
791 switch (code)
792 {
793 case LTU:
794 emit_insn (gen_cmp_ltusi_insn (x, y));
795 code = EQ;
796 break;
797 case LEU:
798 if (y == const0_rtx)
799 tmp = const1_rtx;
800 else
801 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
802 emit_insn (gen_cmp_ltusi_insn (x, tmp));
803 code = EQ;
804 break;
805 case GTU:
806 if (CONST_INT_P (y))
807 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
808 else
809 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
810 emit_insn (gen_cmp_ltusi_insn (x, tmp));
811 code = NE;
812 break;
813 case GEU:
814 emit_insn (gen_cmp_ltusi_insn (x, y));
815 code = NE;
816 break;
817 default:
818 gcc_unreachable ();
819 }
820
821 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
822 }
823 break;
824
825 default:
826 gcc_unreachable ();
827 }
828 }
829 else
830 {
831 /* Reg/reg equal comparison. */
832 if (compare_code == EQ
833 && register_operand (y, SImode))
834 return gen_rtx_fmt_ee (code, CCmode, x, y);
835
836 /* Reg/zero signed comparison. */
837 if ((compare_code == EQ || compare_code == LT)
838 && y == const0_rtx)
839 return gen_rtx_fmt_ee (code, CCmode, x, y);
840
841 /* Reg/smallconst equal comparison. */
842 if (compare_code == EQ
843 && satisfies_constraint_P (y))
844 {
845 rtx tmp = gen_reg_rtx (SImode);
846
847 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
848 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
849 }
850
851 /* Reg/const equal comparison. */
852 if (compare_code == EQ
853 && CONSTANT_P (y))
854 {
855 rtx tmp = force_reg (GET_MODE (x), y);
856
857 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
858 }
859 }
860
861 if (CONSTANT_P (y))
862 {
863 if (must_swap)
864 y = force_reg (GET_MODE (x), y);
865 else
866 {
867 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
868
869 if (! ok_const)
870 y = force_reg (GET_MODE (x), y);
871 }
872 }
873
874 switch (compare_code)
875 {
876 case EQ :
877 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
878 break;
879 case LT :
880 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
881 break;
882 case LTU :
883 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
884 break;
885
886 default:
887 gcc_unreachable ();
888 }
889
890 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
891 }
892
893 bool
894 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
895 {
896 machine_mode mode = GET_MODE (op0);
897
898 gcc_assert (mode == SImode);
899 switch (code)
900 {
901 case EQ:
902 if (!register_operand (op1, mode))
903 op1 = force_reg (mode, op1);
904
905 if (TARGET_M32RX || TARGET_M32R2)
906 {
907 if (!reg_or_zero_operand (op2, mode))
908 op2 = force_reg (mode, op2);
909
910 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
911 return true;
912 }
913 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
914 {
915 emit_insn (gen_seq_zero_insn (op0, op1));
916 return true;
917 }
918
919 if (!reg_or_eq_int16_operand (op2, mode))
920 op2 = force_reg (mode, op2);
921
922 emit_insn (gen_seq_insn (op0, op1, op2));
923 return true;
924
925 case NE:
926 if (!CONST_INT_P (op2)
927 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
928 {
929 rtx reg;
930
931 if (reload_completed || reload_in_progress)
932 return false;
933
934 reg = gen_reg_rtx (SImode);
935 emit_insn (gen_xorsi3 (reg, op1, op2));
936 op1 = reg;
937
938 if (!register_operand (op1, mode))
939 op1 = force_reg (mode, op1);
940
941 emit_insn (gen_sne_zero_insn (op0, op1));
942 return true;
943 }
944 return false;
945
946 case LT:
947 case GT:
948 if (code == GT)
949 {
950 rtx tmp = op2;
951 op2 = op1;
952 op1 = tmp;
953 code = LT;
954 }
955
956 if (!register_operand (op1, mode))
957 op1 = force_reg (mode, op1);
958
959 if (!reg_or_int16_operand (op2, mode))
960 op2 = force_reg (mode, op2);
961
962 emit_insn (gen_slt_insn (op0, op1, op2));
963 return true;
964
965 case LTU:
966 case GTU:
967 if (code == GTU)
968 {
969 rtx tmp = op2;
970 op2 = op1;
971 op1 = tmp;
972 code = LTU;
973 }
974
975 if (!register_operand (op1, mode))
976 op1 = force_reg (mode, op1);
977
978 if (!reg_or_int16_operand (op2, mode))
979 op2 = force_reg (mode, op2);
980
981 emit_insn (gen_sltu_insn (op0, op1, op2));
982 return true;
983
984 case GE:
985 case GEU:
986 if (!register_operand (op1, mode))
987 op1 = force_reg (mode, op1);
988
989 if (!reg_or_int16_operand (op2, mode))
990 op2 = force_reg (mode, op2);
991
992 if (code == GE)
993 emit_insn (gen_sge_insn (op0, op1, op2));
994 else
995 emit_insn (gen_sgeu_insn (op0, op1, op2));
996 return true;
997
998 case LE:
999 case LEU:
1000 if (!register_operand (op1, mode))
1001 op1 = force_reg (mode, op1);
1002
1003 if (CONST_INT_P (op2))
1004 {
1005 HOST_WIDE_INT value = INTVAL (op2);
1006 if (value >= 2147483647)
1007 {
1008 emit_move_insn (op0, const1_rtx);
1009 return true;
1010 }
1011
1012 op2 = GEN_INT (value + 1);
1013 if (value < -32768 || value >= 32767)
1014 op2 = force_reg (mode, op2);
1015
1016 if (code == LEU)
1017 emit_insn (gen_sltu_insn (op0, op1, op2));
1018 else
1019 emit_insn (gen_slt_insn (op0, op1, op2));
1020 return true;
1021 }
1022
1023 if (!register_operand (op2, mode))
1024 op2 = force_reg (mode, op2);
1025
1026 if (code == LEU)
1027 emit_insn (gen_sleu_insn (op0, op1, op2));
1028 else
1029 emit_insn (gen_sle_insn (op0, op1, op2));
1030 return true;
1031
1032 default:
1033 gcc_unreachable ();
1034 }
1035 }
1036
1037 \f
1038 /* Split a 2 word move (DI or DF) into component parts. */
1039
1040 rtx
1041 gen_split_move_double (rtx operands[])
1042 {
1043 machine_mode mode = GET_MODE (operands[0]);
1044 rtx dest = operands[0];
1045 rtx src = operands[1];
1046 rtx val;
1047
1048 /* We might have (SUBREG (MEM)) here, so just get rid of the
1049 subregs to make this code simpler. It is safe to call
1050 alter_subreg any time after reload. */
1051 if (GET_CODE (dest) == SUBREG)
1052 alter_subreg (&dest, true);
1053 if (GET_CODE (src) == SUBREG)
1054 alter_subreg (&src, true);
1055
1056 start_sequence ();
1057 if (REG_P (dest))
1058 {
1059 int dregno = REGNO (dest);
1060
1061 /* Reg = reg. */
1062 if (REG_P (src))
1063 {
1064 int sregno = REGNO (src);
1065
1066 int reverse = (dregno == sregno + 1);
1067
1068 /* We normally copy the low-numbered register first. However, if
1069 the first register operand 0 is the same as the second register of
1070 operand 1, we must copy in the opposite order. */
1071 emit_insn (gen_rtx_SET (VOIDmode,
1072 operand_subword (dest, reverse, TRUE, mode),
1073 operand_subword (src, reverse, TRUE, mode)));
1074
1075 emit_insn (gen_rtx_SET (VOIDmode,
1076 operand_subword (dest, !reverse, TRUE, mode),
1077 operand_subword (src, !reverse, TRUE, mode)));
1078 }
1079
1080 /* Reg = constant. */
1081 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1082 {
1083 rtx words[2];
1084 split_double (src, &words[0], &words[1]);
1085 emit_insn (gen_rtx_SET (VOIDmode,
1086 operand_subword (dest, 0, TRUE, mode),
1087 words[0]));
1088
1089 emit_insn (gen_rtx_SET (VOIDmode,
1090 operand_subword (dest, 1, TRUE, mode),
1091 words[1]));
1092 }
1093
1094 /* Reg = mem. */
1095 else if (MEM_P (src))
1096 {
1097 /* If the high-address word is used in the address, we must load it
1098 last. Otherwise, load it first. */
1099 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1100
1101 /* We used to optimize loads from single registers as
1102
1103 ld r1,r3+; ld r2,r3
1104
1105 if r3 were not used subsequently. However, the REG_NOTES aren't
1106 propagated correctly by the reload phase, and it can cause bad
1107 code to be generated. We could still try:
1108
1109 ld r1,r3+; ld r2,r3; addi r3,-4
1110
1111 which saves 2 bytes and doesn't force longword alignment. */
1112 emit_insn (gen_rtx_SET (VOIDmode,
1113 operand_subword (dest, reverse, TRUE, mode),
1114 adjust_address (src, SImode,
1115 reverse * UNITS_PER_WORD)));
1116
1117 emit_insn (gen_rtx_SET (VOIDmode,
1118 operand_subword (dest, !reverse, TRUE, mode),
1119 adjust_address (src, SImode,
1120 !reverse * UNITS_PER_WORD)));
1121 }
1122 else
1123 gcc_unreachable ();
1124 }
1125
1126 /* Mem = reg. */
1127 /* We used to optimize loads from single registers as
1128
1129 st r1,r3; st r2,+r3
1130
1131 if r3 were not used subsequently. However, the REG_NOTES aren't
1132 propagated correctly by the reload phase, and it can cause bad
1133 code to be generated. We could still try:
1134
1135 st r1,r3; st r2,+r3; addi r3,-4
1136
1137 which saves 2 bytes and doesn't force longword alignment. */
1138 else if (MEM_P (dest) && REG_P (src))
1139 {
1140 emit_insn (gen_rtx_SET (VOIDmode,
1141 adjust_address (dest, SImode, 0),
1142 operand_subword (src, 0, TRUE, mode)));
1143
1144 emit_insn (gen_rtx_SET (VOIDmode,
1145 adjust_address (dest, SImode, UNITS_PER_WORD),
1146 operand_subword (src, 1, TRUE, mode)));
1147 }
1148
1149 else
1150 gcc_unreachable ();
1151
1152 val = get_insns ();
1153 end_sequence ();
1154 return val;
1155 }
1156
1157 \f
1158 static int
1159 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1160 tree type, bool named ATTRIBUTE_UNUSED)
1161 {
1162 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1163
1164 int words;
1165 unsigned int size =
1166 (((mode == BLKmode && type)
1167 ? (unsigned int) int_size_in_bytes (type)
1168 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1169 / UNITS_PER_WORD;
1170
1171 if (*cum >= M32R_MAX_PARM_REGS)
1172 words = 0;
1173 else if (*cum + size > M32R_MAX_PARM_REGS)
1174 words = (*cum + size) - M32R_MAX_PARM_REGS;
1175 else
1176 words = 0;
1177
1178 return words * UNITS_PER_WORD;
1179 }
1180
1181 /* The ROUND_ADVANCE* macros are local to this file. */
1182 /* Round SIZE up to a word boundary. */
1183 #define ROUND_ADVANCE(SIZE) \
1184 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1185
1186 /* Round arg MODE/TYPE up to the next word boundary. */
1187 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1188 ((MODE) == BLKmode \
1189 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1190 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1191
1192 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1193 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1194
1195 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1196 a reg. This includes arguments that have to be passed by reference as the
1197 pointer to them is passed in a reg if one is available (and that is what
1198 we're given).
1199 This macro is only used in this file. */
1200 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1201 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1202
1203 /* Determine where to put an argument to a function.
1204 Value is zero to push the argument on the stack,
1205 or a hard register in which to store the argument.
1206
1207 MODE is the argument's machine mode.
1208 TYPE is the data type of the argument (as a tree).
1209 This is null for libcalls where that information may
1210 not be available.
1211 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1212 the preceding args and about the function being called.
1213 NAMED is nonzero if this argument is a named parameter
1214 (otherwise it is an extra parameter matching an ellipsis). */
1215 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1216 and the rest are pushed. */
1217
1218 static rtx
1219 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1220 const_tree type ATTRIBUTE_UNUSED,
1221 bool named ATTRIBUTE_UNUSED)
1222 {
1223 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1224
1225 return (PASS_IN_REG_P (*cum, mode, type)
1226 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1227 : NULL_RTX);
1228 }
1229
1230 /* Update the data in CUM to advance over an argument
1231 of mode MODE and data type TYPE.
1232 (TYPE is null for libcalls where that information may not be available.) */
1233
1234 static void
1235 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1236 const_tree type, bool named ATTRIBUTE_UNUSED)
1237 {
1238 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1239
1240 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1241 + ROUND_ADVANCE_ARG (mode, type));
1242 }
1243
1244 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1245
1246 static bool
1247 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1248 {
1249 cumulative_args_t dummy = pack_cumulative_args (NULL);
1250
1251 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1252 }
1253
1254 /* Worker function for TARGET_FUNCTION_VALUE. */
1255
1256 static rtx
1257 m32r_function_value (const_tree valtype,
1258 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1259 bool outgoing ATTRIBUTE_UNUSED)
1260 {
1261 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1262 }
1263
1264 /* Worker function for TARGET_LIBCALL_VALUE. */
1265
1266 static rtx
1267 m32r_libcall_value (machine_mode mode,
1268 const_rtx fun ATTRIBUTE_UNUSED)
1269 {
1270 return gen_rtx_REG (mode, 0);
1271 }
1272
1273 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1274
1275 ??? What about r1 in DI/DF values. */
1276
1277 static bool
1278 m32r_function_value_regno_p (const unsigned int regno)
1279 {
1280 return (regno == 0);
1281 }
1282
1283 /* Do any needed setup for a variadic function. For the M32R, we must
1284 create a register parameter block, and then copy any anonymous arguments
1285 in registers to memory.
1286
1287 CUM has not been updated for the last named argument which has type TYPE
1288 and mode MODE, and we rely on this fact. */
1289
1290 static void
1291 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1292 tree type, int *pretend_size, int no_rtl)
1293 {
1294 int first_anon_arg;
1295
1296 if (no_rtl)
1297 return;
1298
1299 /* All BLKmode values are passed by reference. */
1300 gcc_assert (mode != BLKmode);
1301
1302 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1303 + ROUND_ADVANCE_ARG (mode, type));
1304
1305 if (first_anon_arg < M32R_MAX_PARM_REGS)
1306 {
1307 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1308 int first_reg_offset = first_anon_arg;
1309 /* Size in words to "pretend" allocate. */
1310 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1311 rtx regblock;
1312
1313 regblock = gen_frame_mem (BLKmode,
1314 plus_constant (Pmode, arg_pointer_rtx,
1315 FIRST_PARM_OFFSET (0)));
1316 set_mem_alias_set (regblock, get_varargs_alias_set ());
1317 move_block_from_reg (first_reg_offset, regblock, size);
1318
1319 *pretend_size = (size * UNITS_PER_WORD);
1320 }
1321 }
1322
1323 \f
1324 /* Return true if INSN is real instruction bearing insn. */
1325
1326 static int
1327 m32r_is_insn (rtx insn)
1328 {
1329 return (NONDEBUG_INSN_P (insn)
1330 && GET_CODE (PATTERN (insn)) != USE
1331 && GET_CODE (PATTERN (insn)) != CLOBBER);
1332 }
1333
1334 /* Increase the priority of long instructions so that the
1335 short instructions are scheduled ahead of the long ones. */
1336
1337 static int
1338 m32r_adjust_priority (rtx_insn *insn, int priority)
1339 {
1340 if (m32r_is_insn (insn)
1341 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1342 priority <<= 3;
1343
1344 return priority;
1345 }
1346
1347 \f
1348 /* Indicate how many instructions can be issued at the same time.
1349 This is sort of a lie. The m32r can issue only 1 long insn at
1350 once, but it can issue 2 short insns. The default therefore is
1351 set at 2, but this can be overridden by the command line option
1352 -missue-rate=1. */
1353
1354 static int
1355 m32r_issue_rate (void)
1356 {
1357 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1358 }
1359 \f
1360 /* Cost functions. */
1361 /* Memory is 3 times as expensive as registers.
1362 ??? Is that the right way to look at it? */
1363
1364 static int
1365 m32r_memory_move_cost (machine_mode mode,
1366 reg_class_t rclass ATTRIBUTE_UNUSED,
1367 bool in ATTRIBUTE_UNUSED)
1368 {
1369 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1370 return 6;
1371 else
1372 return 12;
1373 }
1374
1375 static bool
1376 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
1377 int opno ATTRIBUTE_UNUSED, int *total,
1378 bool speed ATTRIBUTE_UNUSED)
1379 {
1380 switch (code)
1381 {
1382 /* Small integers are as cheap as registers. 4 byte values can be
1383 fetched as immediate constants - let's give that the cost of an
1384 extra insn. */
1385 case CONST_INT:
1386 if (INT16_P (INTVAL (x)))
1387 {
1388 *total = 0;
1389 return true;
1390 }
1391 /* FALLTHRU */
1392
1393 case CONST:
1394 case LABEL_REF:
1395 case SYMBOL_REF:
1396 *total = COSTS_N_INSNS (1);
1397 return true;
1398
1399 case CONST_DOUBLE:
1400 {
1401 rtx high, low;
1402
1403 split_double (x, &high, &low);
1404 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1405 + !INT16_P (INTVAL (low)));
1406 return true;
1407 }
1408
1409 case MULT:
1410 *total = COSTS_N_INSNS (3);
1411 return true;
1412
1413 case DIV:
1414 case UDIV:
1415 case MOD:
1416 case UMOD:
1417 *total = COSTS_N_INSNS (10);
1418 return true;
1419
1420 default:
1421 return false;
1422 }
1423 }
1424 \f
1425 /* Type of function DECL.
1426
1427 The result is cached. To reset the cache at the end of a function,
1428 call with DECL = NULL_TREE. */
1429
1430 enum m32r_function_type
1431 m32r_compute_function_type (tree decl)
1432 {
1433 /* Cached value. */
1434 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1435 /* Last function we were called for. */
1436 static tree last_fn = NULL_TREE;
1437
1438 /* Resetting the cached value? */
1439 if (decl == NULL_TREE)
1440 {
1441 fn_type = M32R_FUNCTION_UNKNOWN;
1442 last_fn = NULL_TREE;
1443 return fn_type;
1444 }
1445
1446 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1447 return fn_type;
1448
1449 /* Compute function type. */
1450 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1451 ? M32R_FUNCTION_INTERRUPT
1452 : M32R_FUNCTION_NORMAL);
1453
1454 last_fn = decl;
1455 return fn_type;
1456 }
1457 \f/* Function prologue/epilogue handlers. */
1458
1459 /* M32R stack frames look like:
1460
1461 Before call After call
1462 +-----------------------+ +-----------------------+
1463 | | | |
1464 high | local variables, | | local variables, |
1465 mem | reg save area, etc. | | reg save area, etc. |
1466 | | | |
1467 +-----------------------+ +-----------------------+
1468 | | | |
1469 | arguments on stack. | | arguments on stack. |
1470 | | | |
1471 SP+0->+-----------------------+ +-----------------------+
1472 | reg parm save area, |
1473 | only created for |
1474 | variable argument |
1475 | functions |
1476 +-----------------------+
1477 | previous frame ptr |
1478 +-----------------------+
1479 | |
1480 | register save area |
1481 | |
1482 +-----------------------+
1483 | return address |
1484 +-----------------------+
1485 | |
1486 | local variables |
1487 | |
1488 +-----------------------+
1489 | |
1490 | alloca allocations |
1491 | |
1492 +-----------------------+
1493 | |
1494 low | arguments on stack |
1495 memory | |
1496 SP+0->+-----------------------+
1497
1498 Notes:
1499 1) The "reg parm save area" does not exist for non variable argument fns.
1500 2) The "reg parm save area" can be eliminated completely if we saved regs
1501 containing anonymous args separately but that complicates things too
1502 much (so it's not done).
1503 3) The return address is saved after the register save area so as to have as
1504 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1505
1506 /* Structure to be filled in by m32r_compute_frame_size with register
1507 save masks, and offsets for the current function. */
1508 struct m32r_frame_info
1509 {
1510 unsigned int total_size; /* # bytes that the entire frame takes up. */
1511 unsigned int extra_size; /* # bytes of extra stuff. */
1512 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1513 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1514 unsigned int reg_size; /* # bytes needed to store regs. */
1515 unsigned int var_size; /* # bytes that variables take up. */
1516 unsigned int gmask; /* Mask of saved gp registers. */
1517 unsigned int save_fp; /* Nonzero if fp must be saved. */
1518 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1519 int initialized; /* Nonzero if frame size already calculated. */
1520 };
1521
1522 /* Current frame information calculated by m32r_compute_frame_size. */
1523 static struct m32r_frame_info current_frame_info;
1524
1525 /* Zero structure to initialize current_frame_info. */
1526 static struct m32r_frame_info zero_frame_info;
1527
1528 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1529 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1530
1531 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1532 The return address and frame pointer are treated separately.
1533 Don't consider them here. */
1534 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1535 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1536 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1537
1538 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1539 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1540
1541 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1542 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1543
1544 /* Return the bytes needed to compute the frame pointer from the current
1545 stack pointer.
1546
1547 SIZE is the size needed for local variables. */
1548
1549 unsigned int
1550 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1551 {
1552 unsigned int regno;
1553 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1554 unsigned int reg_size;
1555 unsigned int gmask;
1556 enum m32r_function_type fn_type;
1557 int interrupt_p;
1558 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1559 | crtl->profile);
1560
1561 var_size = M32R_STACK_ALIGN (size);
1562 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1563 pretend_size = crtl->args.pretend_args_size;
1564 extra_size = FIRST_PARM_OFFSET (0);
1565 total_size = extra_size + pretend_size + args_size + var_size;
1566 reg_size = 0;
1567 gmask = 0;
1568
1569 /* See if this is an interrupt handler. Call used registers must be saved
1570 for them too. */
1571 fn_type = m32r_compute_function_type (current_function_decl);
1572 interrupt_p = M32R_INTERRUPT_P (fn_type);
1573
1574 /* Calculate space needed for registers. */
1575 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1576 {
1577 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1578 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1579 {
1580 reg_size += UNITS_PER_WORD;
1581 gmask |= 1 << regno;
1582 }
1583 }
1584
1585 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1586 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1587
1588 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1589 * UNITS_PER_WORD);
1590 total_size += reg_size;
1591
1592 /* ??? Not sure this is necessary, and I don't think the epilogue
1593 handler will do the right thing if this changes total_size. */
1594 total_size = M32R_STACK_ALIGN (total_size);
1595
1596 /* frame_size = total_size - (pretend_size + reg_size); */
1597
1598 /* Save computed information. */
1599 current_frame_info.total_size = total_size;
1600 current_frame_info.extra_size = extra_size;
1601 current_frame_info.pretend_size = pretend_size;
1602 current_frame_info.var_size = var_size;
1603 current_frame_info.args_size = args_size;
1604 current_frame_info.reg_size = reg_size;
1605 current_frame_info.gmask = gmask;
1606 current_frame_info.initialized = reload_completed;
1607
1608 /* Ok, we're done. */
1609 return total_size;
1610 }
1611
1612 /* Worker function for TARGET_CAN_ELIMINATE. */
1613
1614 bool
1615 m32r_can_eliminate (const int from, const int to)
1616 {
1617 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1618 ? ! frame_pointer_needed
1619 : true);
1620 }
1621
1622 \f
1623 /* The table we use to reference PIC data. */
1624 static rtx global_offset_table;
1625
1626 static void
1627 m32r_reload_lr (rtx sp, int size)
1628 {
1629 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1630
1631 if (size == 0)
1632 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1633 else if (size < 32768)
1634 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1635 gen_rtx_PLUS (Pmode, sp,
1636 GEN_INT (size)))));
1637 else
1638 {
1639 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1640
1641 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1642 emit_insn (gen_addsi3 (tmp, tmp, sp));
1643 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1644 }
1645
1646 emit_use (lr);
1647 }
1648
1649 void
1650 m32r_load_pic_register (void)
1651 {
1652 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1653 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1654 GEN_INT (TARGET_MODEL_SMALL)));
1655
1656 /* Need to emit this whether or not we obey regdecls,
1657 since setjmp/longjmp can cause life info to screw up. */
1658 emit_use (pic_offset_table_rtx);
1659 }
1660
1661 /* Expand the m32r prologue as a series of insns. */
1662
1663 void
1664 m32r_expand_prologue (void)
1665 {
1666 int regno;
1667 int frame_size;
1668 unsigned int gmask;
1669 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1670 | crtl->profile);
1671
1672 if (! current_frame_info.initialized)
1673 m32r_compute_frame_size (get_frame_size ());
1674
1675 gmask = current_frame_info.gmask;
1676
1677 /* These cases shouldn't happen. Catch them now. */
1678 gcc_assert (current_frame_info.total_size || !gmask);
1679
1680 /* Allocate space for register arguments if this is a variadic function. */
1681 if (current_frame_info.pretend_size != 0)
1682 {
1683 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1684 the wrong result on a 64-bit host. */
1685 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1686 emit_insn (gen_addsi3 (stack_pointer_rtx,
1687 stack_pointer_rtx,
1688 GEN_INT (-pretend_size)));
1689 }
1690
1691 /* Save any registers we need to and set up fp. */
1692 if (current_frame_info.save_fp)
1693 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1694
1695 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1696
1697 /* Save any needed call-saved regs (and call-used if this is an
1698 interrupt handler). */
1699 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1700 {
1701 if ((gmask & (1 << regno)) != 0)
1702 emit_insn (gen_movsi_push (stack_pointer_rtx,
1703 gen_rtx_REG (Pmode, regno)));
1704 }
1705
1706 if (current_frame_info.save_lr)
1707 emit_insn (gen_movsi_push (stack_pointer_rtx,
1708 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1709
1710 /* Allocate the stack frame. */
1711 frame_size = (current_frame_info.total_size
1712 - (current_frame_info.pretend_size
1713 + current_frame_info.reg_size));
1714
1715 if (frame_size == 0)
1716 ; /* Nothing to do. */
1717 else if (frame_size <= 32768)
1718 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1719 GEN_INT (-frame_size)));
1720 else
1721 {
1722 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1723
1724 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1725 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1726 }
1727
1728 if (frame_pointer_needed)
1729 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1730
1731 if (crtl->profile)
1732 /* Push lr for mcount (form_pc, x). */
1733 emit_insn (gen_movsi_push (stack_pointer_rtx,
1734 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1735
1736 if (pic_reg_used)
1737 {
1738 m32r_load_pic_register ();
1739 m32r_reload_lr (stack_pointer_rtx,
1740 (crtl->profile ? 0 : frame_size));
1741 }
1742
1743 if (crtl->profile && !pic_reg_used)
1744 emit_insn (gen_blockage ());
1745 }
1746
1747 \f
1748 /* Set up the stack and frame pointer (if desired) for the function.
1749 Note, if this is changed, you need to mirror the changes in
1750 m32r_compute_frame_size which calculates the prolog size. */
1751
1752 static void
1753 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1754 {
1755 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1756
1757 /* If this is an interrupt handler, mark it as such. */
1758 if (M32R_INTERRUPT_P (fn_type))
1759 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1760
1761 if (! current_frame_info.initialized)
1762 m32r_compute_frame_size (size);
1763
1764 /* This is only for the human reader. */
1765 fprintf (file,
1766 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1767 ASM_COMMENT_START,
1768 current_frame_info.var_size,
1769 current_frame_info.reg_size / 4,
1770 current_frame_info.args_size,
1771 current_frame_info.extra_size);
1772 }
1773 \f
1774 /* Output RTL to pop register REGNO from the stack. */
1775
1776 static void
1777 pop (int regno)
1778 {
1779 rtx x;
1780
1781 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1782 stack_pointer_rtx));
1783 add_reg_note (x, REG_INC, stack_pointer_rtx);
1784 }
1785
1786 /* Expand the m32r epilogue as a series of insns. */
1787
1788 void
1789 m32r_expand_epilogue (void)
1790 {
1791 int regno;
1792 int noepilogue = FALSE;
1793 int total_size;
1794
1795 gcc_assert (current_frame_info.initialized);
1796 total_size = current_frame_info.total_size;
1797
1798 if (total_size == 0)
1799 {
1800 rtx insn = get_last_insn ();
1801
1802 /* If the last insn was a BARRIER, we don't have to write any code
1803 because a jump (aka return) was put there. */
1804 if (insn && NOTE_P (insn))
1805 insn = prev_nonnote_insn (insn);
1806 if (insn && BARRIER_P (insn))
1807 noepilogue = TRUE;
1808 }
1809
1810 if (!noepilogue)
1811 {
1812 unsigned int var_size = current_frame_info.var_size;
1813 unsigned int args_size = current_frame_info.args_size;
1814 unsigned int gmask = current_frame_info.gmask;
1815 int can_trust_sp_p = !cfun->calls_alloca;
1816
1817 if (flag_exceptions)
1818 emit_insn (gen_blockage ());
1819
1820 /* The first thing to do is point the sp at the bottom of the register
1821 save area. */
1822 if (can_trust_sp_p)
1823 {
1824 unsigned int reg_offset = var_size + args_size;
1825
1826 if (reg_offset == 0)
1827 ; /* Nothing to do. */
1828 else if (reg_offset < 32768)
1829 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1830 GEN_INT (reg_offset)));
1831 else
1832 {
1833 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1834
1835 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1836 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1837 tmp));
1838 }
1839 }
1840 else if (frame_pointer_needed)
1841 {
1842 unsigned int reg_offset = var_size + args_size;
1843
1844 if (reg_offset == 0)
1845 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1846 else if (reg_offset < 32768)
1847 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1848 GEN_INT (reg_offset)));
1849 else
1850 {
1851 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1852
1853 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1854 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1855 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1856 tmp));
1857 }
1858 }
1859 else
1860 gcc_unreachable ();
1861
1862 if (current_frame_info.save_lr)
1863 pop (RETURN_ADDR_REGNUM);
1864
1865 /* Restore any saved registers, in reverse order of course. */
1866 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1867 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1868 {
1869 if ((gmask & (1L << regno)) != 0)
1870 pop (regno);
1871 }
1872
1873 if (current_frame_info.save_fp)
1874 pop (FRAME_POINTER_REGNUM);
1875
1876 /* Remove varargs area if present. */
1877 if (current_frame_info.pretend_size != 0)
1878 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1879 GEN_INT (current_frame_info.pretend_size)));
1880
1881 emit_insn (gen_blockage ());
1882 }
1883 }
1884
1885 /* Do any necessary cleanup after a function to restore stack, frame,
1886 and regs. */
1887
1888 static void
1889 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1890 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1891 {
1892 /* Reset state info for each function. */
1893 current_frame_info = zero_frame_info;
1894 m32r_compute_function_type (NULL_TREE);
1895 }
1896 \f
1897 /* Return nonzero if this function is known to have a null or 1 instruction
1898 epilogue. */
1899
1900 int
1901 direct_return (void)
1902 {
1903 if (!reload_completed)
1904 return FALSE;
1905
1906 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1907 return FALSE;
1908
1909 if (! current_frame_info.initialized)
1910 m32r_compute_frame_size (get_frame_size ());
1911
1912 return current_frame_info.total_size == 0;
1913 }
1914
1915 \f
1916 /* PIC. */
1917
1918 int
1919 m32r_legitimate_pic_operand_p (rtx x)
1920 {
1921 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1922 return 0;
1923
1924 if (GET_CODE (x) == CONST
1925 && GET_CODE (XEXP (x, 0)) == PLUS
1926 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1927 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1928 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1929 return 0;
1930
1931 return 1;
1932 }
1933
1934 rtx
1935 m32r_legitimize_pic_address (rtx orig, rtx reg)
1936 {
1937 #ifdef DEBUG_PIC
1938 printf("m32r_legitimize_pic_address()\n");
1939 #endif
1940
1941 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1942 {
1943 rtx pic_ref, address;
1944 int subregs = 0;
1945
1946 if (reg == 0)
1947 {
1948 gcc_assert (!reload_in_progress && !reload_completed);
1949 reg = gen_reg_rtx (Pmode);
1950
1951 subregs = 1;
1952 }
1953
1954 if (subregs)
1955 address = gen_reg_rtx (Pmode);
1956 else
1957 address = reg;
1958
1959 crtl->uses_pic_offset_table = 1;
1960
1961 if (GET_CODE (orig) == LABEL_REF
1962 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1963 {
1964 emit_insn (gen_gotoff_load_addr (reg, orig));
1965 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1966 return reg;
1967 }
1968
1969 emit_insn (gen_pic_load_addr (address, orig));
1970
1971 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1972 pic_ref = gen_const_mem (Pmode, address);
1973 emit_move_insn (reg, pic_ref);
1974 return reg;
1975 }
1976 else if (GET_CODE (orig) == CONST)
1977 {
1978 rtx base, offset;
1979
1980 if (GET_CODE (XEXP (orig, 0)) == PLUS
1981 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1982 return orig;
1983
1984 if (reg == 0)
1985 {
1986 gcc_assert (!reload_in_progress && !reload_completed);
1987 reg = gen_reg_rtx (Pmode);
1988 }
1989
1990 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1991 {
1992 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1993 if (base == reg)
1994 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1995 else
1996 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1997 }
1998 else
1999 return orig;
2000
2001 if (CONST_INT_P (offset))
2002 {
2003 if (INT16_P (INTVAL (offset)))
2004 return plus_constant (Pmode, base, INTVAL (offset));
2005 else
2006 {
2007 gcc_assert (! reload_in_progress && ! reload_completed);
2008 offset = force_reg (Pmode, offset);
2009 }
2010 }
2011
2012 return gen_rtx_PLUS (Pmode, base, offset);
2013 }
2014
2015 return orig;
2016 }
2017
2018 static rtx
2019 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2020 machine_mode mode ATTRIBUTE_UNUSED)
2021 {
2022 if (flag_pic)
2023 return m32r_legitimize_pic_address (x, NULL_RTX);
2024 else
2025 return x;
2026 }
2027
2028 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2029
2030 static bool
2031 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2032 {
2033 if (GET_CODE (addr) == LO_SUM)
2034 return true;
2035
2036 return false;
2037 }
2038 \f
2039 /* Nested function support. */
2040
2041 /* Emit RTL insns to initialize the variable parts of a trampoline.
2042 FNADDR is an RTX for the address of the function's pure code.
2043 CXT is an RTX for the static chain value for the function. */
2044
2045 void
2046 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2047 rtx fnaddr ATTRIBUTE_UNUSED,
2048 rtx cxt ATTRIBUTE_UNUSED)
2049 {
2050 }
2051 \f
2052 static void
2053 m32r_file_start (void)
2054 {
2055 default_file_start ();
2056
2057 if (flag_verbose_asm)
2058 fprintf (asm_out_file,
2059 "%s M32R/D special options: -G %d\n",
2060 ASM_COMMENT_START, g_switch_value);
2061
2062 if (TARGET_LITTLE_ENDIAN)
2063 fprintf (asm_out_file, "\t.little\n");
2064 }
2065 \f
2066 /* Print operand X (an rtx) in assembler syntax to file FILE.
2067 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2068 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2069
2070 static void
2071 m32r_print_operand (FILE * file, rtx x, int code)
2072 {
2073 rtx addr;
2074
2075 switch (code)
2076 {
2077 /* The 's' and 'p' codes are used by output_block_move() to
2078 indicate post-increment 's'tores and 'p're-increment loads. */
2079 case 's':
2080 if (REG_P (x))
2081 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2082 else
2083 output_operand_lossage ("invalid operand to %%s code");
2084 return;
2085
2086 case 'p':
2087 if (REG_P (x))
2088 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2089 else
2090 output_operand_lossage ("invalid operand to %%p code");
2091 return;
2092
2093 case 'R' :
2094 /* Write second word of DImode or DFmode reference,
2095 register or memory. */
2096 if (REG_P (x))
2097 fputs (reg_names[REGNO (x)+1], file);
2098 else if (MEM_P (x))
2099 {
2100 fprintf (file, "@(");
2101 /* Handle possible auto-increment. Since it is pre-increment and
2102 we have already done it, we can just use an offset of four. */
2103 /* ??? This is taken from rs6000.c I think. I don't think it is
2104 currently necessary, but keep it around. */
2105 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2106 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2107 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2108 else
2109 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2110 fputc (')', file);
2111 }
2112 else
2113 output_operand_lossage ("invalid operand to %%R code");
2114 return;
2115
2116 case 'H' : /* High word. */
2117 case 'L' : /* Low word. */
2118 if (REG_P (x))
2119 {
2120 /* L = least significant word, H = most significant word. */
2121 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2122 fputs (reg_names[REGNO (x)], file);
2123 else
2124 fputs (reg_names[REGNO (x)+1], file);
2125 }
2126 else if (CONST_INT_P (x)
2127 || GET_CODE (x) == CONST_DOUBLE)
2128 {
2129 rtx first, second;
2130
2131 split_double (x, &first, &second);
2132 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2133 code == 'L' ? INTVAL (first) : INTVAL (second));
2134 }
2135 else
2136 output_operand_lossage ("invalid operand to %%H/%%L code");
2137 return;
2138
2139 case 'A' :
2140 {
2141 char str[30];
2142
2143 if (GET_CODE (x) != CONST_DOUBLE
2144 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2145 fatal_insn ("bad insn for 'A'", x);
2146
2147 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2148 fprintf (file, "%s", str);
2149 return;
2150 }
2151
2152 case 'B' : /* Bottom half. */
2153 case 'T' : /* Top half. */
2154 /* Output the argument to a `seth' insn (sets the Top half-word).
2155 For constants output arguments to a seth/or3 pair to set Top and
2156 Bottom halves. For symbols output arguments to a seth/add3 pair to
2157 set Top and Bottom halves. The difference exists because for
2158 constants seth/or3 is more readable but for symbols we need to use
2159 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2160 switch (GET_CODE (x))
2161 {
2162 case CONST_INT :
2163 case CONST_DOUBLE :
2164 {
2165 rtx first, second;
2166
2167 split_double (x, &first, &second);
2168 x = WORDS_BIG_ENDIAN ? second : first;
2169 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2170 (code == 'B'
2171 ? INTVAL (x) & 0xffff
2172 : (INTVAL (x) >> 16) & 0xffff));
2173 }
2174 return;
2175 case CONST :
2176 case SYMBOL_REF :
2177 if (code == 'B'
2178 && small_data_operand (x, VOIDmode))
2179 {
2180 fputs ("sda(", file);
2181 output_addr_const (file, x);
2182 fputc (')', file);
2183 return;
2184 }
2185 /* fall through */
2186 case LABEL_REF :
2187 fputs (code == 'T' ? "shigh(" : "low(", file);
2188 output_addr_const (file, x);
2189 fputc (')', file);
2190 return;
2191 default :
2192 output_operand_lossage ("invalid operand to %%T/%%B code");
2193 return;
2194 }
2195 break;
2196
2197 case 'U' :
2198 /* ??? wip */
2199 /* Output a load/store with update indicator if appropriate. */
2200 if (MEM_P (x))
2201 {
2202 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2203 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2204 fputs (".a", file);
2205 }
2206 else
2207 output_operand_lossage ("invalid operand to %%U code");
2208 return;
2209
2210 case 'N' :
2211 /* Print a constant value negated. */
2212 if (CONST_INT_P (x))
2213 output_addr_const (file, GEN_INT (- INTVAL (x)));
2214 else
2215 output_operand_lossage ("invalid operand to %%N code");
2216 return;
2217
2218 case 'X' :
2219 /* Print a const_int in hex. Used in comments. */
2220 if (CONST_INT_P (x))
2221 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2222 return;
2223
2224 case '#' :
2225 fputs (IMMEDIATE_PREFIX, file);
2226 return;
2227
2228 case 0 :
2229 /* Do nothing special. */
2230 break;
2231
2232 default :
2233 /* Unknown flag. */
2234 output_operand_lossage ("invalid operand output code");
2235 }
2236
2237 switch (GET_CODE (x))
2238 {
2239 case REG :
2240 fputs (reg_names[REGNO (x)], file);
2241 break;
2242
2243 case MEM :
2244 addr = XEXP (x, 0);
2245 if (GET_CODE (addr) == PRE_INC)
2246 {
2247 if (!REG_P (XEXP (addr, 0)))
2248 fatal_insn ("pre-increment address is not a register", x);
2249
2250 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2251 }
2252 else if (GET_CODE (addr) == PRE_DEC)
2253 {
2254 if (!REG_P (XEXP (addr, 0)))
2255 fatal_insn ("pre-decrement address is not a register", x);
2256
2257 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2258 }
2259 else if (GET_CODE (addr) == POST_INC)
2260 {
2261 if (!REG_P (XEXP (addr, 0)))
2262 fatal_insn ("post-increment address is not a register", x);
2263
2264 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2265 }
2266 else
2267 {
2268 fputs ("@(", file);
2269 output_address (XEXP (x, 0));
2270 fputc (')', file);
2271 }
2272 break;
2273
2274 case CONST_DOUBLE :
2275 /* We handle SFmode constants here as output_addr_const doesn't. */
2276 if (GET_MODE (x) == SFmode)
2277 {
2278 REAL_VALUE_TYPE d;
2279 long l;
2280
2281 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2282 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2283 fprintf (file, "0x%08lx", l);
2284 break;
2285 }
2286
2287 /* Fall through. Let output_addr_const deal with it. */
2288
2289 default :
2290 output_addr_const (file, x);
2291 break;
2292 }
2293 }
2294
2295 /* Print a memory address as an operand to reference that memory location. */
2296
2297 static void
2298 m32r_print_operand_address (FILE * file, rtx addr)
2299 {
2300 rtx base;
2301 rtx index = 0;
2302 int offset = 0;
2303
2304 switch (GET_CODE (addr))
2305 {
2306 case REG :
2307 fputs (reg_names[REGNO (addr)], file);
2308 break;
2309
2310 case PLUS :
2311 if (CONST_INT_P (XEXP (addr, 0)))
2312 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2313 else if (CONST_INT_P (XEXP (addr, 1)))
2314 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2315 else
2316 base = XEXP (addr, 0), index = XEXP (addr, 1);
2317 if (REG_P (base))
2318 {
2319 /* Print the offset first (if present) to conform to the manual. */
2320 if (index == 0)
2321 {
2322 if (offset != 0)
2323 fprintf (file, "%d,", offset);
2324 fputs (reg_names[REGNO (base)], file);
2325 }
2326 /* The chip doesn't support this, but left in for generality. */
2327 else if (REG_P (index))
2328 fprintf (file, "%s,%s",
2329 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2330 /* Not sure this can happen, but leave in for now. */
2331 else if (GET_CODE (index) == SYMBOL_REF)
2332 {
2333 output_addr_const (file, index);
2334 fputc (',', file);
2335 fputs (reg_names[REGNO (base)], file);
2336 }
2337 else
2338 fatal_insn ("bad address", addr);
2339 }
2340 else if (GET_CODE (base) == LO_SUM)
2341 {
2342 gcc_assert (!index && REG_P (XEXP (base, 0)));
2343 if (small_data_operand (XEXP (base, 1), VOIDmode))
2344 fputs ("sda(", file);
2345 else
2346 fputs ("low(", file);
2347 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2348 offset));
2349 fputs ("),", file);
2350 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2351 }
2352 else
2353 fatal_insn ("bad address", addr);
2354 break;
2355
2356 case LO_SUM :
2357 if (!REG_P (XEXP (addr, 0)))
2358 fatal_insn ("lo_sum not of register", addr);
2359 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2360 fputs ("sda(", file);
2361 else
2362 fputs ("low(", file);
2363 output_addr_const (file, XEXP (addr, 1));
2364 fputs ("),", file);
2365 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2366 break;
2367
2368 case PRE_INC : /* Assume SImode. */
2369 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2370 break;
2371
2372 case PRE_DEC : /* Assume SImode. */
2373 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2374 break;
2375
2376 case POST_INC : /* Assume SImode. */
2377 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2378 break;
2379
2380 default :
2381 output_addr_const (file, addr);
2382 break;
2383 }
2384 }
2385
2386 static bool
2387 m32r_print_operand_punct_valid_p (unsigned char code)
2388 {
2389 return m32r_punct_chars[code];
2390 }
2391
2392 /* Return true if the operands are the constants 0 and 1. */
2393
2394 int
2395 zero_and_one (rtx operand1, rtx operand2)
2396 {
2397 return
2398 CONST_INT_P (operand1)
2399 && CONST_INT_P (operand2)
2400 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2401 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2402 }
2403
2404 /* Generate the correct assembler code to handle the conditional loading of a
2405 value into a register. It is known that the operands satisfy the
2406 conditional_move_operand() function above. The destination is operand[0].
2407 The condition is operand [1]. The 'true' value is operand [2] and the
2408 'false' value is operand [3]. */
2409
2410 char *
2411 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2412 {
2413 static char buffer [100];
2414 const char * dest = reg_names [REGNO (operands [0])];
2415
2416 buffer [0] = 0;
2417
2418 /* Destination must be a register. */
2419 gcc_assert (REG_P (operands [0]));
2420 gcc_assert (conditional_move_operand (operands [2], SImode));
2421 gcc_assert (conditional_move_operand (operands [3], SImode));
2422
2423 /* Check to see if the test is reversed. */
2424 if (GET_CODE (operands [1]) == NE)
2425 {
2426 rtx tmp = operands [2];
2427 operands [2] = operands [3];
2428 operands [3] = tmp;
2429 }
2430
2431 sprintf (buffer, "mvfc %s, cbr", dest);
2432
2433 /* If the true value was '0' then we need to invert the results of the move. */
2434 if (INTVAL (operands [2]) == 0)
2435 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2436 dest, dest);
2437
2438 return buffer;
2439 }
2440
2441 /* Returns true if the registers contained in the two
2442 rtl expressions are different. */
2443
2444 int
2445 m32r_not_same_reg (rtx a, rtx b)
2446 {
2447 int reg_a = -1;
2448 int reg_b = -2;
2449
2450 while (GET_CODE (a) == SUBREG)
2451 a = SUBREG_REG (a);
2452
2453 if (REG_P (a))
2454 reg_a = REGNO (a);
2455
2456 while (GET_CODE (b) == SUBREG)
2457 b = SUBREG_REG (b);
2458
2459 if (REG_P (b))
2460 reg_b = REGNO (b);
2461
2462 return reg_a != reg_b;
2463 }
2464
2465 \f
2466 rtx
2467 m32r_function_symbol (const char *name)
2468 {
2469 int extra_flags = 0;
2470 enum m32r_model model;
2471 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2472
2473 if (TARGET_MODEL_SMALL)
2474 model = M32R_MODEL_SMALL;
2475 else if (TARGET_MODEL_MEDIUM)
2476 model = M32R_MODEL_MEDIUM;
2477 else if (TARGET_MODEL_LARGE)
2478 model = M32R_MODEL_LARGE;
2479 else
2480 gcc_unreachable (); /* Shouldn't happen. */
2481 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2482
2483 if (extra_flags)
2484 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2485
2486 return sym;
2487 }
2488
2489 /* Use a library function to move some bytes. */
2490
2491 static void
2492 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2493 {
2494 /* We want to pass the size as Pmode, which will normally be SImode
2495 but will be DImode if we are using 64-bit longs and pointers. */
2496 if (GET_MODE (bytes_rtx) != VOIDmode
2497 && GET_MODE (bytes_rtx) != Pmode)
2498 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2499
2500 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2501 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2502 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2503 TYPE_UNSIGNED (sizetype)),
2504 TYPE_MODE (sizetype));
2505 }
2506
2507 /* Expand string/block move operations.
2508
2509 operands[0] is the pointer to the destination.
2510 operands[1] is the pointer to the source.
2511 operands[2] is the number of bytes to move.
2512 operands[3] is the alignment.
2513
2514 Returns 1 upon success, 0 otherwise. */
2515
2516 int
2517 m32r_expand_block_move (rtx operands[])
2518 {
2519 rtx orig_dst = operands[0];
2520 rtx orig_src = operands[1];
2521 rtx bytes_rtx = operands[2];
2522 rtx align_rtx = operands[3];
2523 int constp = CONST_INT_P (bytes_rtx);
2524 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2525 int align = INTVAL (align_rtx);
2526 int leftover;
2527 rtx src_reg;
2528 rtx dst_reg;
2529
2530 if (constp && bytes <= 0)
2531 return 1;
2532
2533 /* Move the address into scratch registers. */
2534 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2535 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2536
2537 if (align > UNITS_PER_WORD)
2538 align = UNITS_PER_WORD;
2539
2540 /* If we prefer size over speed, always use a function call.
2541 If we do not know the size, use a function call.
2542 If the blocks are not word aligned, use a function call. */
2543 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2544 {
2545 block_move_call (dst_reg, src_reg, bytes_rtx);
2546 return 0;
2547 }
2548
2549 leftover = bytes % MAX_MOVE_BYTES;
2550 bytes -= leftover;
2551
2552 /* If necessary, generate a loop to handle the bulk of the copy. */
2553 if (bytes)
2554 {
2555 rtx_code_label *label = NULL;
2556 rtx final_src = NULL_RTX;
2557 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2558 rtx rounded_total = GEN_INT (bytes);
2559 rtx new_dst_reg = gen_reg_rtx (SImode);
2560 rtx new_src_reg = gen_reg_rtx (SImode);
2561
2562 /* If we are going to have to perform this loop more than
2563 once, then generate a label and compute the address the
2564 source register will contain upon completion of the final
2565 iteration. */
2566 if (bytes > MAX_MOVE_BYTES)
2567 {
2568 final_src = gen_reg_rtx (Pmode);
2569
2570 if (INT16_P(bytes))
2571 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2572 else
2573 {
2574 emit_insn (gen_movsi (final_src, rounded_total));
2575 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2576 }
2577
2578 label = gen_label_rtx ();
2579 emit_label (label);
2580 }
2581
2582 /* It is known that output_block_move() will update src_reg to point
2583 to the word after the end of the source block, and dst_reg to point
2584 to the last word of the destination block, provided that the block
2585 is MAX_MOVE_BYTES long. */
2586 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2587 new_dst_reg, new_src_reg));
2588 emit_move_insn (dst_reg, new_dst_reg);
2589 emit_move_insn (src_reg, new_src_reg);
2590 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2591
2592 if (bytes > MAX_MOVE_BYTES)
2593 {
2594 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2595 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2596 }
2597 }
2598
2599 if (leftover)
2600 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2601 gen_reg_rtx (SImode),
2602 gen_reg_rtx (SImode)));
2603 return 1;
2604 }
2605
2606 \f
2607 /* Emit load/stores for a small constant word aligned block_move.
2608
2609 operands[0] is the memory address of the destination.
2610 operands[1] is the memory address of the source.
2611 operands[2] is the number of bytes to move.
2612 operands[3] is a temp register.
2613 operands[4] is a temp register. */
2614
2615 void
2616 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2617 {
2618 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2619 int first_time;
2620 int got_extra = 0;
2621
2622 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2623
2624 /* We do not have a post-increment store available, so the first set of
2625 stores are done without any increment, then the remaining ones can use
2626 the pre-increment addressing mode.
2627
2628 Note: expand_block_move() also relies upon this behavior when building
2629 loops to copy large blocks. */
2630 first_time = 1;
2631
2632 while (bytes > 0)
2633 {
2634 if (bytes >= 8)
2635 {
2636 if (first_time)
2637 {
2638 output_asm_insn ("ld\t%5, %p1", operands);
2639 output_asm_insn ("ld\t%6, %p1", operands);
2640 output_asm_insn ("st\t%5, @%0", operands);
2641 output_asm_insn ("st\t%6, %s0", operands);
2642 }
2643 else
2644 {
2645 output_asm_insn ("ld\t%5, %p1", operands);
2646 output_asm_insn ("ld\t%6, %p1", operands);
2647 output_asm_insn ("st\t%5, %s0", operands);
2648 output_asm_insn ("st\t%6, %s0", operands);
2649 }
2650
2651 bytes -= 8;
2652 }
2653 else if (bytes >= 4)
2654 {
2655 if (bytes > 4)
2656 got_extra = 1;
2657
2658 output_asm_insn ("ld\t%5, %p1", operands);
2659
2660 if (got_extra)
2661 output_asm_insn ("ld\t%6, %p1", operands);
2662
2663 if (first_time)
2664 output_asm_insn ("st\t%5, @%0", operands);
2665 else
2666 output_asm_insn ("st\t%5, %s0", operands);
2667
2668 bytes -= 4;
2669 }
2670 else
2671 {
2672 /* Get the entire next word, even though we do not want all of it.
2673 The saves us from doing several smaller loads, and we assume that
2674 we cannot cause a page fault when at least part of the word is in
2675 valid memory [since we don't get called if things aren't properly
2676 aligned]. */
2677 int dst_offset = first_time ? 0 : 4;
2678 /* The amount of increment we have to make to the
2679 destination pointer. */
2680 int dst_inc_amount = dst_offset + bytes - 4;
2681 /* The same for the source pointer. */
2682 int src_inc_amount = bytes;
2683 int last_shift;
2684 rtx my_operands[3];
2685
2686 /* If got_extra is true then we have already loaded
2687 the next word as part of loading and storing the previous word. */
2688 if (! got_extra)
2689 output_asm_insn ("ld\t%6, @%1", operands);
2690
2691 if (bytes >= 2)
2692 {
2693 bytes -= 2;
2694
2695 output_asm_insn ("sra3\t%5, %6, #16", operands);
2696 my_operands[0] = operands[5];
2697 my_operands[1] = GEN_INT (dst_offset);
2698 my_operands[2] = operands[0];
2699 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2700
2701 /* If there is a byte left to store then increment the
2702 destination address and shift the contents of the source
2703 register down by 8 bits. We could not do the address
2704 increment in the store half word instruction, because it does
2705 not have an auto increment mode. */
2706 if (bytes > 0) /* assert (bytes == 1) */
2707 {
2708 dst_offset += 2;
2709 last_shift = 8;
2710 }
2711 }
2712 else
2713 last_shift = 24;
2714
2715 if (bytes > 0)
2716 {
2717 my_operands[0] = operands[6];
2718 my_operands[1] = GEN_INT (last_shift);
2719 output_asm_insn ("srai\t%0, #%1", my_operands);
2720 my_operands[0] = operands[6];
2721 my_operands[1] = GEN_INT (dst_offset);
2722 my_operands[2] = operands[0];
2723 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2724 }
2725
2726 /* Update the destination pointer if needed. We have to do
2727 this so that the patterns matches what we output in this
2728 function. */
2729 if (dst_inc_amount
2730 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2731 {
2732 my_operands[0] = operands[0];
2733 my_operands[1] = GEN_INT (dst_inc_amount);
2734 output_asm_insn ("addi\t%0, #%1", my_operands);
2735 }
2736
2737 /* Update the source pointer if needed. We have to do this
2738 so that the patterns matches what we output in this
2739 function. */
2740 if (src_inc_amount
2741 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2742 {
2743 my_operands[0] = operands[1];
2744 my_operands[1] = GEN_INT (src_inc_amount);
2745 output_asm_insn ("addi\t%0, #%1", my_operands);
2746 }
2747
2748 bytes = 0;
2749 }
2750
2751 first_time = 0;
2752 }
2753 }
2754
2755 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2756
2757 int
2758 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2759 unsigned int new_reg)
2760 {
2761 /* Interrupt routines can't clobber any register that isn't already used. */
2762 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2763 && !df_regs_ever_live_p (new_reg))
2764 return 0;
2765
2766 return 1;
2767 }
2768
2769 rtx
2770 m32r_return_addr (int count)
2771 {
2772 if (count != 0)
2773 return const0_rtx;
2774
2775 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2776 }
2777
2778 static void
2779 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2780 {
2781 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2782 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2783 0x017e8e17 : 0x178e7e01, SImode));
2784 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2785 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2786 0x0c00ae86 : 0x86ae000c, SImode));
2787 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2788 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2789 0xe627871e : 0x1e8727e6, SImode));
2790 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2791 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2792 0xc616c626 : 0x26c61fc6, SImode));
2793 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2794 chain_value);
2795 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2796 XEXP (DECL_RTL (fndecl), 0));
2797
2798 if (m32r_cache_flush_trap >= 0)
2799 emit_insn (gen_flush_icache
2800 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2801 gen_int_mode (m32r_cache_flush_trap, SImode)));
2802 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2803 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2804 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2805 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2806 GEN_INT (3), SImode);
2807 }
2808
2809 /* True if X is a reg that can be used as a base reg. */
2810
2811 static bool
2812 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2813 {
2814 if (! REG_P (x))
2815 return false;
2816
2817 if (strict)
2818 {
2819 if (GPR_P (REGNO (x)))
2820 return true;
2821 }
2822 else
2823 {
2824 if (GPR_P (REGNO (x))
2825 || REGNO (x) == ARG_POINTER_REGNUM
2826 || ! HARD_REGISTER_P (x))
2827 return true;
2828 }
2829
2830 return false;
2831 }
2832
2833 static inline bool
2834 m32r_rtx_ok_for_offset_p (const_rtx x)
2835 {
2836 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2837 }
2838
2839 static inline bool
2840 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2841 const_rtx x, bool strict)
2842 {
2843 if (GET_CODE (x) == PLUS
2844 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2845 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2846 return true;
2847
2848 return false;
2849 }
2850
2851 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2852 since more than one instruction will be required. */
2853
2854 static inline bool
2855 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2856 bool strict)
2857 {
2858 if (GET_CODE (x) == LO_SUM
2859 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2860 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2861 && CONSTANT_P (XEXP (x, 1)))
2862 return true;
2863
2864 return false;
2865 }
2866
2867 /* Is this a load and increment operation. */
2868
2869 static inline bool
2870 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2871 {
2872 if ((mode == SImode || mode == SFmode)
2873 && GET_CODE (x) == POST_INC
2874 && REG_P (XEXP (x, 0))
2875 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2876 return true;
2877
2878 return false;
2879 }
2880
2881 /* Is this an increment/decrement and store operation. */
2882
2883 static inline bool
2884 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2885 {
2886 if ((mode == SImode || mode == SFmode)
2887 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2888 && REG_P (XEXP (x, 0)) \
2889 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2890 return true;
2891
2892 return false;
2893 }
2894
2895 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2896
2897 static bool
2898 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2899 {
2900 if (m32r_rtx_ok_for_base_p (x, strict)
2901 || m32r_legitimate_offset_addres_p (mode, x, strict)
2902 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2903 || m32r_load_postinc_p (mode, x, strict)
2904 || m32r_store_preinc_predec_p (mode, x, strict))
2905 return true;
2906
2907 return false;
2908 }
2909
2910 static void
2911 m32r_conditional_register_usage (void)
2912 {
2913 if (flag_pic)
2914 {
2915 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2916 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2917 }
2918 }
2919
2920 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2921
2922 We don't allow (plus symbol large-constant) as the relocations can't
2923 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2924 We allow all CONST_DOUBLE's as the md file patterns will force the
2925 constant to memory if they can't handle them. */
2926
2927 static bool
2928 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2929 {
2930 return !(GET_CODE (x) == CONST
2931 && GET_CODE (XEXP (x, 0)) == PLUS
2932 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2933 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2934 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2935 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2936 }