]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
56d1dbddb2d0f3b6f74328a3bfb73da3b59e0411
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "attribs.h"
32 #include "insn-config.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "alias.h"
37 #include "stor-layout.h"
38 #include "varasm.h"
39 #include "calls.h"
40 #include "output.h"
41 #include "insn-attr.h"
42 #include "explow.h"
43 #include "expr.h"
44 #include "tm-constrs.h"
45 #include "builtins.h"
46
47 /* This file should be included last. */
48 #include "target-def.h"
49
50 /* Array of valid operand punctuation characters. */
51 static char m32r_punct_chars[256];
52
53 /* Machine-specific symbol_ref flags. */
54 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
55 #define SYMBOL_REF_MODEL(X) \
56 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
57
58 /* For string literals, etc. */
59 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
60
61 /* Forward declaration. */
62 static void m32r_option_override (void);
63 static void init_reg_tables (void);
64 static void block_move_call (rtx, rtx, rtx);
65 static int m32r_is_insn (rtx);
66 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
67 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
68 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
69 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
70 static void m32r_print_operand (FILE *, rtx, int);
71 static void m32r_print_operand_address (FILE *, machine_mode, rtx);
72 static bool m32r_print_operand_punct_valid_p (unsigned char code);
73 static void m32r_output_function_prologue (FILE *);
74 static void m32r_output_function_epilogue (FILE *);
75
76 static void m32r_file_start (void);
77
78 static int m32r_adjust_priority (rtx_insn *, int);
79 static int m32r_issue_rate (void);
80
81 static void m32r_encode_section_info (tree, rtx, int);
82 static bool m32r_in_small_data_p (const_tree);
83 static bool m32r_return_in_memory (const_tree, const_tree);
84 static rtx m32r_function_value (const_tree, const_tree, bool);
85 static rtx m32r_libcall_value (machine_mode, const_rtx);
86 static bool m32r_function_value_regno_p (const unsigned int);
87 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
88 tree, int *, int);
89 static void init_idents (void);
90 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed);
91 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
92 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
93 const_tree, bool);
94 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
95 tree, bool);
96 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
97 const_tree, bool);
98 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
99 const_tree, bool);
100 static bool m32r_can_eliminate (const int, const int);
101 static void m32r_conditional_register_usage (void);
102 static void m32r_trampoline_init (rtx, tree, rtx);
103 static bool m32r_legitimate_constant_p (machine_mode, rtx);
104 static bool m32r_attribute_identifier (const_tree);
105 \f
106 /* M32R specific attributes. */
107
108 static const struct attribute_spec m32r_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "interrupt", 0, 0, true, false, false, NULL, false },
113 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
114 false },
115 { NULL, 0, 0, false, false, false, NULL, false }
116 };
117 \f
118 /* Initialize the GCC target structure. */
119 #undef TARGET_ATTRIBUTE_TABLE
120 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
121 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
122 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
123
124 #undef TARGET_LRA_P
125 #define TARGET_LRA_P hook_bool_void_false
126
127 #undef TARGET_LEGITIMATE_ADDRESS_P
128 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
129 #undef TARGET_LEGITIMIZE_ADDRESS
130 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
131 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
132 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
133
134 #undef TARGET_ASM_ALIGNED_HI_OP
135 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
136 #undef TARGET_ASM_ALIGNED_SI_OP
137 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
138
139 #undef TARGET_PRINT_OPERAND
140 #define TARGET_PRINT_OPERAND m32r_print_operand
141 #undef TARGET_PRINT_OPERAND_ADDRESS
142 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
143 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
144 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
145
146 #undef TARGET_ASM_FUNCTION_PROLOGUE
147 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
148 #undef TARGET_ASM_FUNCTION_EPILOGUE
149 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
150
151 #undef TARGET_ASM_FILE_START
152 #define TARGET_ASM_FILE_START m32r_file_start
153
154 #undef TARGET_SCHED_ADJUST_PRIORITY
155 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
156 #undef TARGET_SCHED_ISSUE_RATE
157 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
158
159 #undef TARGET_OPTION_OVERRIDE
160 #define TARGET_OPTION_OVERRIDE m32r_option_override
161
162 #undef TARGET_ENCODE_SECTION_INFO
163 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
164 #undef TARGET_IN_SMALL_DATA_P
165 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
166
167
168 #undef TARGET_MEMORY_MOVE_COST
169 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
170 #undef TARGET_RTX_COSTS
171 #define TARGET_RTX_COSTS m32r_rtx_costs
172 #undef TARGET_ADDRESS_COST
173 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
174
175 #undef TARGET_PROMOTE_PROTOTYPES
176 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
177 #undef TARGET_RETURN_IN_MEMORY
178 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
179
180 #undef TARGET_FUNCTION_VALUE
181 #define TARGET_FUNCTION_VALUE m32r_function_value
182 #undef TARGET_LIBCALL_VALUE
183 #define TARGET_LIBCALL_VALUE m32r_libcall_value
184 #undef TARGET_FUNCTION_VALUE_REGNO_P
185 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
186
187 #undef TARGET_SETUP_INCOMING_VARARGS
188 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
189 #undef TARGET_MUST_PASS_IN_STACK
190 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
191 #undef TARGET_PASS_BY_REFERENCE
192 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
193 #undef TARGET_ARG_PARTIAL_BYTES
194 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
195 #undef TARGET_FUNCTION_ARG
196 #define TARGET_FUNCTION_ARG m32r_function_arg
197 #undef TARGET_FUNCTION_ARG_ADVANCE
198 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
199
200 #undef TARGET_CAN_ELIMINATE
201 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
202
203 #undef TARGET_CONDITIONAL_REGISTER_USAGE
204 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
205
206 #undef TARGET_TRAMPOLINE_INIT
207 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
208
209 #undef TARGET_LEGITIMATE_CONSTANT_P
210 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
211
212 struct gcc_target targetm = TARGET_INITIALIZER;
213 \f
214 /* Called by m32r_option_override to initialize various things. */
215
216 void
217 m32r_init (void)
218 {
219 init_reg_tables ();
220
221 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
222 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
223 m32r_punct_chars['#'] = 1;
224 m32r_punct_chars['@'] = 1; /* ??? no longer used */
225
226 /* Provide default value if not specified. */
227 if (!global_options_set.x_g_switch_value)
228 g_switch_value = SDATA_DEFAULT_SIZE;
229 }
230
231 static void
232 m32r_option_override (void)
233 {
234 /* These need to be done at start up.
235 It's convenient to do them here. */
236 m32r_init ();
237 SUBTARGET_OVERRIDE_OPTIONS;
238 }
239
240 /* Vectors to keep interesting information about registers where it can easily
241 be got. We use to use the actual mode value as the bit number, but there
242 is (or may be) more than 32 modes now. Instead we use two tables: one
243 indexed by hard register number, and one indexed by mode. */
244
245 /* The purpose of m32r_mode_class is to shrink the range of modes so that
246 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
247 mapped into one m32r_mode_class mode. */
248
249 enum m32r_mode_class
250 {
251 C_MODE,
252 S_MODE, D_MODE, T_MODE, O_MODE,
253 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
254 };
255
256 /* Modes for condition codes. */
257 #define C_MODES (1 << (int) C_MODE)
258
259 /* Modes for single-word and smaller quantities. */
260 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
261
262 /* Modes for double-word and smaller quantities. */
263 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
264
265 /* Modes for quad-word and smaller quantities. */
266 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
267
268 /* Modes for accumulators. */
269 #define A_MODES (1 << (int) A_MODE)
270
271 /* Value is 1 if register/mode pair is acceptable on arc. */
272
273 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
274 {
275 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
276 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
277 S_MODES, C_MODES, A_MODES, A_MODES
278 };
279
280 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
281
282 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
283
284 static void
285 init_reg_tables (void)
286 {
287 int i;
288
289 for (i = 0; i < NUM_MACHINE_MODES; i++)
290 {
291 machine_mode m = (machine_mode) i;
292
293 switch (GET_MODE_CLASS (m))
294 {
295 case MODE_INT:
296 case MODE_PARTIAL_INT:
297 case MODE_COMPLEX_INT:
298 if (GET_MODE_SIZE (m) <= 4)
299 m32r_mode_class[i] = 1 << (int) S_MODE;
300 else if (GET_MODE_SIZE (m) == 8)
301 m32r_mode_class[i] = 1 << (int) D_MODE;
302 else if (GET_MODE_SIZE (m) == 16)
303 m32r_mode_class[i] = 1 << (int) T_MODE;
304 else if (GET_MODE_SIZE (m) == 32)
305 m32r_mode_class[i] = 1 << (int) O_MODE;
306 else
307 m32r_mode_class[i] = 0;
308 break;
309 case MODE_FLOAT:
310 case MODE_COMPLEX_FLOAT:
311 if (GET_MODE_SIZE (m) <= 4)
312 m32r_mode_class[i] = 1 << (int) SF_MODE;
313 else if (GET_MODE_SIZE (m) == 8)
314 m32r_mode_class[i] = 1 << (int) DF_MODE;
315 else if (GET_MODE_SIZE (m) == 16)
316 m32r_mode_class[i] = 1 << (int) TF_MODE;
317 else if (GET_MODE_SIZE (m) == 32)
318 m32r_mode_class[i] = 1 << (int) OF_MODE;
319 else
320 m32r_mode_class[i] = 0;
321 break;
322 case MODE_CC:
323 m32r_mode_class[i] = 1 << (int) C_MODE;
324 break;
325 default:
326 m32r_mode_class[i] = 0;
327 break;
328 }
329 }
330
331 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
332 {
333 if (GPR_P (i))
334 m32r_regno_reg_class[i] = GENERAL_REGS;
335 else if (i == ARG_POINTER_REGNUM)
336 m32r_regno_reg_class[i] = GENERAL_REGS;
337 else
338 m32r_regno_reg_class[i] = NO_REGS;
339 }
340 }
341 \f
342 /* M32R specific attribute support.
343
344 interrupt - for interrupt functions
345
346 model - select code model used to access object
347
348 small: addresses use 24 bits, use bl to make calls
349 medium: addresses use 32 bits, use bl to make calls
350 large: addresses use 32 bits, use seth/add3/jl to make calls
351
352 Grep for MODEL in m32r.h for more info. */
353
354 static tree small_ident1;
355 static tree small_ident2;
356 static tree medium_ident1;
357 static tree medium_ident2;
358 static tree large_ident1;
359 static tree large_ident2;
360
361 static void
362 init_idents (void)
363 {
364 if (small_ident1 == 0)
365 {
366 small_ident1 = get_identifier ("small");
367 small_ident2 = get_identifier ("__small__");
368 medium_ident1 = get_identifier ("medium");
369 medium_ident2 = get_identifier ("__medium__");
370 large_ident1 = get_identifier ("large");
371 large_ident2 = get_identifier ("__large__");
372 }
373 }
374
375 /* Handle an "model" attribute; arguments as in
376 struct attribute_spec.handler. */
377 static tree
378 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
379 tree args, int flags ATTRIBUTE_UNUSED,
380 bool *no_add_attrs)
381 {
382 tree arg;
383
384 init_idents ();
385 arg = TREE_VALUE (args);
386
387 if (arg != small_ident1
388 && arg != small_ident2
389 && arg != medium_ident1
390 && arg != medium_ident2
391 && arg != large_ident1
392 && arg != large_ident2)
393 {
394 warning (OPT_Wattributes, "invalid argument of %qs attribute",
395 IDENTIFIER_POINTER (name));
396 *no_add_attrs = true;
397 }
398
399 return NULL_TREE;
400 }
401
402 static bool
403 m32r_attribute_identifier (const_tree name)
404 {
405 return strcmp (IDENTIFIER_POINTER (name), "model") == 0
406 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0;
407 }
408 \f
409 /* Encode section information of DECL, which is either a VAR_DECL,
410 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
411
412 For the M32R we want to record:
413
414 - whether the object lives in .sdata/.sbss.
415 - what code model should be used to access the object
416 */
417
418 static void
419 m32r_encode_section_info (tree decl, rtx rtl, int first)
420 {
421 int extra_flags = 0;
422 tree model_attr;
423 enum m32r_model model;
424
425 default_encode_section_info (decl, rtl, first);
426
427 if (!DECL_P (decl))
428 return;
429
430 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
431 if (model_attr)
432 {
433 tree id;
434
435 init_idents ();
436
437 id = TREE_VALUE (TREE_VALUE (model_attr));
438
439 if (id == small_ident1 || id == small_ident2)
440 model = M32R_MODEL_SMALL;
441 else if (id == medium_ident1 || id == medium_ident2)
442 model = M32R_MODEL_MEDIUM;
443 else if (id == large_ident1 || id == large_ident2)
444 model = M32R_MODEL_LARGE;
445 else
446 gcc_unreachable (); /* shouldn't happen */
447 }
448 else
449 {
450 if (TARGET_MODEL_SMALL)
451 model = M32R_MODEL_SMALL;
452 else if (TARGET_MODEL_MEDIUM)
453 model = M32R_MODEL_MEDIUM;
454 else if (TARGET_MODEL_LARGE)
455 model = M32R_MODEL_LARGE;
456 else
457 gcc_unreachable (); /* shouldn't happen */
458 }
459 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
460
461 if (extra_flags)
462 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
463 }
464
465 /* Only mark the object as being small data area addressable if
466 it hasn't been explicitly marked with a code model.
467
468 The user can explicitly put an object in the small data area with the
469 section attribute. If the object is in sdata/sbss and marked with a
470 code model do both [put the object in .sdata and mark it as being
471 addressed with a specific code model - don't mark it as being addressed
472 with an SDA reloc though]. This is ok and might be useful at times. If
473 the object doesn't fit the linker will give an error. */
474
475 static bool
476 m32r_in_small_data_p (const_tree decl)
477 {
478 const char *section;
479
480 if (TREE_CODE (decl) != VAR_DECL)
481 return false;
482
483 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
484 return false;
485
486 section = DECL_SECTION_NAME (decl);
487 if (section)
488 {
489 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
490 return true;
491 }
492 else
493 {
494 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
495 {
496 int size = int_size_in_bytes (TREE_TYPE (decl));
497
498 if (size > 0 && size <= g_switch_value)
499 return true;
500 }
501 }
502
503 return false;
504 }
505
506 /* Do anything needed before RTL is emitted for each function. */
507
508 void
509 m32r_init_expanders (void)
510 {
511 /* ??? At one point there was code here. The function is left in
512 to make it easy to experiment. */
513 }
514 \f
515 int
516 call_operand (rtx op, machine_mode mode)
517 {
518 if (!MEM_P (op))
519 return 0;
520 op = XEXP (op, 0);
521 return call_address_operand (op, mode);
522 }
523
524 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
525
526 int
527 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
528 {
529 if (! TARGET_SDATA_USE)
530 return 0;
531
532 if (GET_CODE (op) == SYMBOL_REF)
533 return SYMBOL_REF_SMALL_P (op);
534
535 if (GET_CODE (op) == CONST
536 && GET_CODE (XEXP (op, 0)) == PLUS
537 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
538 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
539 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
540
541 return 0;
542 }
543
544 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
545
546 int
547 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
548 {
549 rtx sym;
550
551 if (flag_pic)
552 return 0;
553
554 if (GET_CODE (op) == LABEL_REF)
555 return TARGET_ADDR24;
556
557 if (GET_CODE (op) == SYMBOL_REF)
558 sym = op;
559 else if (GET_CODE (op) == CONST
560 && GET_CODE (XEXP (op, 0)) == PLUS
561 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
562 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
563 sym = XEXP (XEXP (op, 0), 0);
564 else
565 return 0;
566
567 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
568 return 1;
569
570 if (TARGET_ADDR24
571 && (CONSTANT_POOL_ADDRESS_P (sym)
572 || LIT_NAME_P (XSTR (sym, 0))))
573 return 1;
574
575 return 0;
576 }
577
578 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
579
580 int
581 addr32_operand (rtx op, machine_mode mode)
582 {
583 rtx sym;
584
585 if (GET_CODE (op) == LABEL_REF)
586 return TARGET_ADDR32;
587
588 if (GET_CODE (op) == SYMBOL_REF)
589 sym = op;
590 else if (GET_CODE (op) == CONST
591 && GET_CODE (XEXP (op, 0)) == PLUS
592 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
593 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
594 && ! flag_pic)
595 sym = XEXP (XEXP (op, 0), 0);
596 else
597 return 0;
598
599 return (! addr24_operand (sym, mode)
600 && ! small_data_operand (sym, mode));
601 }
602
603 /* Return 1 if OP is a function that can be called with the `bl' insn. */
604
605 int
606 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
607 {
608 if (flag_pic)
609 return 1;
610
611 if (GET_CODE (op) == SYMBOL_REF)
612 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
613
614 return TARGET_CALL26;
615 }
616
617 /* Return 1 if OP is a DImode const we want to handle inline.
618 This must match the code in the movdi pattern.
619 It is used by the 'G' constraint. */
620
621 int
622 easy_di_const (rtx op)
623 {
624 rtx high_rtx, low_rtx;
625 HOST_WIDE_INT high, low;
626
627 split_double (op, &high_rtx, &low_rtx);
628 high = INTVAL (high_rtx);
629 low = INTVAL (low_rtx);
630 /* Pick constants loadable with 2 16-bit `ldi' insns. */
631 if (high >= -128 && high <= 127
632 && low >= -128 && low <= 127)
633 return 1;
634 return 0;
635 }
636
637 /* Return 1 if OP is a DFmode const we want to handle inline.
638 This must match the code in the movdf pattern.
639 It is used by the 'H' constraint. */
640
641 int
642 easy_df_const (rtx op)
643 {
644 long l[2];
645
646 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l);
647 if (l[0] == 0 && l[1] == 0)
648 return 1;
649 if ((l[0] & 0xffff) == 0 && l[1] == 0)
650 return 1;
651 return 0;
652 }
653
654 /* Return 1 if OP is (mem (reg ...)).
655 This is used in insn length calcs. */
656
657 int
658 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
659 {
660 return MEM_P (op) && REG_P (XEXP (op, 0));
661 }
662
663 /* Return nonzero if TYPE must be passed by indirect reference. */
664
665 static bool
666 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
667 machine_mode mode, const_tree type,
668 bool named ATTRIBUTE_UNUSED)
669 {
670 int size;
671
672 if (type)
673 size = int_size_in_bytes (type);
674 else
675 size = GET_MODE_SIZE (mode);
676
677 return (size < 0 || size > 8);
678 }
679 \f
680 /* Comparisons. */
681
682 /* X and Y are two things to compare using CODE. Emit the compare insn and
683 return the rtx for compare [arg0 of the if_then_else].
684 If need_compare is true then the comparison insn must be generated, rather
685 than being subsumed into the following branch instruction. */
686
687 rtx
688 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
689 {
690 enum rtx_code compare_code;
691 enum rtx_code branch_code;
692 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
693 int must_swap = 0;
694
695 switch (code)
696 {
697 case EQ: compare_code = EQ; branch_code = NE; break;
698 case NE: compare_code = EQ; branch_code = EQ; break;
699 case LT: compare_code = LT; branch_code = NE; break;
700 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
701 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
702 case GE: compare_code = LT; branch_code = EQ; break;
703 case LTU: compare_code = LTU; branch_code = NE; break;
704 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
705 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
706 case GEU: compare_code = LTU; branch_code = EQ; break;
707
708 default:
709 gcc_unreachable ();
710 }
711
712 if (need_compare)
713 {
714 switch (compare_code)
715 {
716 case EQ:
717 if (satisfies_constraint_P (y) /* Reg equal to small const. */
718 && y != const0_rtx)
719 {
720 rtx tmp = gen_reg_rtx (SImode);
721
722 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
723 x = tmp;
724 y = const0_rtx;
725 }
726 else if (CONSTANT_P (y)) /* Reg equal to const. */
727 {
728 rtx tmp = force_reg (GET_MODE (x), y);
729 y = tmp;
730 }
731
732 if (register_operand (y, SImode) /* Reg equal to reg. */
733 || y == const0_rtx) /* Reg equal to zero. */
734 {
735 emit_insn (gen_cmp_eqsi_insn (x, y));
736
737 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
738 }
739 break;
740
741 case LT:
742 if (register_operand (y, SImode)
743 || satisfies_constraint_P (y))
744 {
745 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
746
747 switch (code)
748 {
749 case LT:
750 emit_insn (gen_cmp_ltsi_insn (x, y));
751 code = EQ;
752 break;
753 case LE:
754 if (y == const0_rtx)
755 tmp = const1_rtx;
756 else
757 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
758 emit_insn (gen_cmp_ltsi_insn (x, tmp));
759 code = EQ;
760 break;
761 case GT:
762 if (CONST_INT_P (y))
763 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
764 else
765 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
766 emit_insn (gen_cmp_ltsi_insn (x, tmp));
767 code = NE;
768 break;
769 case GE:
770 emit_insn (gen_cmp_ltsi_insn (x, y));
771 code = NE;
772 break;
773 default:
774 gcc_unreachable ();
775 }
776
777 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
778 }
779 break;
780
781 case LTU:
782 if (register_operand (y, SImode)
783 || satisfies_constraint_P (y))
784 {
785 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
786
787 switch (code)
788 {
789 case LTU:
790 emit_insn (gen_cmp_ltusi_insn (x, y));
791 code = EQ;
792 break;
793 case LEU:
794 if (y == const0_rtx)
795 tmp = const1_rtx;
796 else
797 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
798 emit_insn (gen_cmp_ltusi_insn (x, tmp));
799 code = EQ;
800 break;
801 case GTU:
802 if (CONST_INT_P (y))
803 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
804 else
805 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
806 emit_insn (gen_cmp_ltusi_insn (x, tmp));
807 code = NE;
808 break;
809 case GEU:
810 emit_insn (gen_cmp_ltusi_insn (x, y));
811 code = NE;
812 break;
813 default:
814 gcc_unreachable ();
815 }
816
817 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
818 }
819 break;
820
821 default:
822 gcc_unreachable ();
823 }
824 }
825 else
826 {
827 /* Reg/reg equal comparison. */
828 if (compare_code == EQ
829 && register_operand (y, SImode))
830 return gen_rtx_fmt_ee (code, CCmode, x, y);
831
832 /* Reg/zero signed comparison. */
833 if ((compare_code == EQ || compare_code == LT)
834 && y == const0_rtx)
835 return gen_rtx_fmt_ee (code, CCmode, x, y);
836
837 /* Reg/smallconst equal comparison. */
838 if (compare_code == EQ
839 && satisfies_constraint_P (y))
840 {
841 rtx tmp = gen_reg_rtx (SImode);
842
843 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
844 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
845 }
846
847 /* Reg/const equal comparison. */
848 if (compare_code == EQ
849 && CONSTANT_P (y))
850 {
851 rtx tmp = force_reg (GET_MODE (x), y);
852
853 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
854 }
855 }
856
857 if (CONSTANT_P (y))
858 {
859 if (must_swap)
860 y = force_reg (GET_MODE (x), y);
861 else
862 {
863 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
864
865 if (! ok_const)
866 y = force_reg (GET_MODE (x), y);
867 }
868 }
869
870 switch (compare_code)
871 {
872 case EQ :
873 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
874 break;
875 case LT :
876 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
877 break;
878 case LTU :
879 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
880 break;
881
882 default:
883 gcc_unreachable ();
884 }
885
886 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
887 }
888
889 bool
890 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
891 {
892 machine_mode mode = GET_MODE (op0);
893
894 gcc_assert (mode == SImode);
895 switch (code)
896 {
897 case EQ:
898 if (!register_operand (op1, mode))
899 op1 = force_reg (mode, op1);
900
901 if (TARGET_M32RX || TARGET_M32R2)
902 {
903 if (!reg_or_zero_operand (op2, mode))
904 op2 = force_reg (mode, op2);
905
906 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
907 return true;
908 }
909 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
910 {
911 emit_insn (gen_seq_zero_insn (op0, op1));
912 return true;
913 }
914
915 if (!reg_or_eq_int16_operand (op2, mode))
916 op2 = force_reg (mode, op2);
917
918 emit_insn (gen_seq_insn (op0, op1, op2));
919 return true;
920
921 case NE:
922 if (!CONST_INT_P (op2)
923 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
924 {
925 rtx reg;
926
927 if (reload_completed || reload_in_progress)
928 return false;
929
930 reg = gen_reg_rtx (SImode);
931 emit_insn (gen_xorsi3 (reg, op1, op2));
932 op1 = reg;
933
934 if (!register_operand (op1, mode))
935 op1 = force_reg (mode, op1);
936
937 emit_insn (gen_sne_zero_insn (op0, op1));
938 return true;
939 }
940 return false;
941
942 case LT:
943 case GT:
944 if (code == GT)
945 {
946 rtx tmp = op2;
947 op2 = op1;
948 op1 = tmp;
949 code = LT;
950 }
951
952 if (!register_operand (op1, mode))
953 op1 = force_reg (mode, op1);
954
955 if (!reg_or_int16_operand (op2, mode))
956 op2 = force_reg (mode, op2);
957
958 emit_insn (gen_slt_insn (op0, op1, op2));
959 return true;
960
961 case LTU:
962 case GTU:
963 if (code == GTU)
964 {
965 rtx tmp = op2;
966 op2 = op1;
967 op1 = tmp;
968 code = LTU;
969 }
970
971 if (!register_operand (op1, mode))
972 op1 = force_reg (mode, op1);
973
974 if (!reg_or_int16_operand (op2, mode))
975 op2 = force_reg (mode, op2);
976
977 emit_insn (gen_sltu_insn (op0, op1, op2));
978 return true;
979
980 case GE:
981 case GEU:
982 if (!register_operand (op1, mode))
983 op1 = force_reg (mode, op1);
984
985 if (!reg_or_int16_operand (op2, mode))
986 op2 = force_reg (mode, op2);
987
988 if (code == GE)
989 emit_insn (gen_sge_insn (op0, op1, op2));
990 else
991 emit_insn (gen_sgeu_insn (op0, op1, op2));
992 return true;
993
994 case LE:
995 case LEU:
996 if (!register_operand (op1, mode))
997 op1 = force_reg (mode, op1);
998
999 if (CONST_INT_P (op2))
1000 {
1001 HOST_WIDE_INT value = INTVAL (op2);
1002 if (value >= 2147483647)
1003 {
1004 emit_move_insn (op0, const1_rtx);
1005 return true;
1006 }
1007
1008 op2 = GEN_INT (value + 1);
1009 if (value < -32768 || value >= 32767)
1010 op2 = force_reg (mode, op2);
1011
1012 if (code == LEU)
1013 emit_insn (gen_sltu_insn (op0, op1, op2));
1014 else
1015 emit_insn (gen_slt_insn (op0, op1, op2));
1016 return true;
1017 }
1018
1019 if (!register_operand (op2, mode))
1020 op2 = force_reg (mode, op2);
1021
1022 if (code == LEU)
1023 emit_insn (gen_sleu_insn (op0, op1, op2));
1024 else
1025 emit_insn (gen_sle_insn (op0, op1, op2));
1026 return true;
1027
1028 default:
1029 gcc_unreachable ();
1030 }
1031 }
1032
1033 \f
1034 /* Split a 2 word move (DI or DF) into component parts. */
1035
1036 rtx
1037 gen_split_move_double (rtx operands[])
1038 {
1039 machine_mode mode = GET_MODE (operands[0]);
1040 rtx dest = operands[0];
1041 rtx src = operands[1];
1042 rtx val;
1043
1044 /* We might have (SUBREG (MEM)) here, so just get rid of the
1045 subregs to make this code simpler. It is safe to call
1046 alter_subreg any time after reload. */
1047 if (GET_CODE (dest) == SUBREG)
1048 alter_subreg (&dest, true);
1049 if (GET_CODE (src) == SUBREG)
1050 alter_subreg (&src, true);
1051
1052 start_sequence ();
1053 if (REG_P (dest))
1054 {
1055 int dregno = REGNO (dest);
1056
1057 /* Reg = reg. */
1058 if (REG_P (src))
1059 {
1060 int sregno = REGNO (src);
1061
1062 int reverse = (dregno == sregno + 1);
1063
1064 /* We normally copy the low-numbered register first. However, if
1065 the first register operand 0 is the same as the second register of
1066 operand 1, we must copy in the opposite order. */
1067 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1068 operand_subword (src, reverse, TRUE, mode)));
1069
1070 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1071 operand_subword (src, !reverse, TRUE, mode)));
1072 }
1073
1074 /* Reg = constant. */
1075 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1076 {
1077 rtx words[2];
1078 split_double (src, &words[0], &words[1]);
1079 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1080 words[0]));
1081
1082 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1083 words[1]));
1084 }
1085
1086 /* Reg = mem. */
1087 else if (MEM_P (src))
1088 {
1089 /* If the high-address word is used in the address, we must load it
1090 last. Otherwise, load it first. */
1091 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1092
1093 /* We used to optimize loads from single registers as
1094
1095 ld r1,r3+; ld r2,r3
1096
1097 if r3 were not used subsequently. However, the REG_NOTES aren't
1098 propagated correctly by the reload phase, and it can cause bad
1099 code to be generated. We could still try:
1100
1101 ld r1,r3+; ld r2,r3; addi r3,-4
1102
1103 which saves 2 bytes and doesn't force longword alignment. */
1104 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1105 adjust_address (src, SImode,
1106 reverse * UNITS_PER_WORD)));
1107
1108 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1109 adjust_address (src, SImode,
1110 !reverse * UNITS_PER_WORD)));
1111 }
1112 else
1113 gcc_unreachable ();
1114 }
1115
1116 /* Mem = reg. */
1117 /* We used to optimize loads from single registers as
1118
1119 st r1,r3; st r2,+r3
1120
1121 if r3 were not used subsequently. However, the REG_NOTES aren't
1122 propagated correctly by the reload phase, and it can cause bad
1123 code to be generated. We could still try:
1124
1125 st r1,r3; st r2,+r3; addi r3,-4
1126
1127 which saves 2 bytes and doesn't force longword alignment. */
1128 else if (MEM_P (dest) && REG_P (src))
1129 {
1130 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1131 operand_subword (src, 0, TRUE, mode)));
1132
1133 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1134 operand_subword (src, 1, TRUE, mode)));
1135 }
1136
1137 else
1138 gcc_unreachable ();
1139
1140 val = get_insns ();
1141 end_sequence ();
1142 return val;
1143 }
1144
1145 \f
1146 static int
1147 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1148 tree type, bool named ATTRIBUTE_UNUSED)
1149 {
1150 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1151
1152 int words;
1153 unsigned int size =
1154 (((mode == BLKmode && type)
1155 ? (unsigned int) int_size_in_bytes (type)
1156 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1157 / UNITS_PER_WORD;
1158
1159 if (*cum >= M32R_MAX_PARM_REGS)
1160 words = 0;
1161 else if (*cum + size > M32R_MAX_PARM_REGS)
1162 words = (*cum + size) - M32R_MAX_PARM_REGS;
1163 else
1164 words = 0;
1165
1166 return words * UNITS_PER_WORD;
1167 }
1168
1169 /* The ROUND_ADVANCE* macros are local to this file. */
1170 /* Round SIZE up to a word boundary. */
1171 #define ROUND_ADVANCE(SIZE) \
1172 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1173
1174 /* Round arg MODE/TYPE up to the next word boundary. */
1175 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1176 ((MODE) == BLKmode \
1177 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1178 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1179
1180 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1181 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1182
1183 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1184 a reg. This includes arguments that have to be passed by reference as the
1185 pointer to them is passed in a reg if one is available (and that is what
1186 we're given).
1187 This macro is only used in this file. */
1188 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1189 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1190
1191 /* Determine where to put an argument to a function.
1192 Value is zero to push the argument on the stack,
1193 or a hard register in which to store the argument.
1194
1195 MODE is the argument's machine mode.
1196 TYPE is the data type of the argument (as a tree).
1197 This is null for libcalls where that information may
1198 not be available.
1199 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1200 the preceding args and about the function being called.
1201 NAMED is nonzero if this argument is a named parameter
1202 (otherwise it is an extra parameter matching an ellipsis). */
1203 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1204 and the rest are pushed. */
1205
1206 static rtx
1207 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1208 const_tree type ATTRIBUTE_UNUSED,
1209 bool named ATTRIBUTE_UNUSED)
1210 {
1211 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1212
1213 return (PASS_IN_REG_P (*cum, mode, type)
1214 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1215 : NULL_RTX);
1216 }
1217
1218 /* Update the data in CUM to advance over an argument
1219 of mode MODE and data type TYPE.
1220 (TYPE is null for libcalls where that information may not be available.) */
1221
1222 static void
1223 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1224 const_tree type, bool named ATTRIBUTE_UNUSED)
1225 {
1226 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1227
1228 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1229 + ROUND_ADVANCE_ARG (mode, type));
1230 }
1231
1232 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1233
1234 static bool
1235 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1236 {
1237 cumulative_args_t dummy = pack_cumulative_args (NULL);
1238
1239 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1240 }
1241
1242 /* Worker function for TARGET_FUNCTION_VALUE. */
1243
1244 static rtx
1245 m32r_function_value (const_tree valtype,
1246 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1247 bool outgoing ATTRIBUTE_UNUSED)
1248 {
1249 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1250 }
1251
1252 /* Worker function for TARGET_LIBCALL_VALUE. */
1253
1254 static rtx
1255 m32r_libcall_value (machine_mode mode,
1256 const_rtx fun ATTRIBUTE_UNUSED)
1257 {
1258 return gen_rtx_REG (mode, 0);
1259 }
1260
1261 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1262
1263 ??? What about r1 in DI/DF values. */
1264
1265 static bool
1266 m32r_function_value_regno_p (const unsigned int regno)
1267 {
1268 return (regno == 0);
1269 }
1270
1271 /* Do any needed setup for a variadic function. For the M32R, we must
1272 create a register parameter block, and then copy any anonymous arguments
1273 in registers to memory.
1274
1275 CUM has not been updated for the last named argument which has type TYPE
1276 and mode MODE, and we rely on this fact. */
1277
1278 static void
1279 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1280 tree type, int *pretend_size, int no_rtl)
1281 {
1282 int first_anon_arg;
1283
1284 if (no_rtl)
1285 return;
1286
1287 /* All BLKmode values are passed by reference. */
1288 gcc_assert (mode != BLKmode);
1289
1290 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1291 + ROUND_ADVANCE_ARG (mode, type));
1292
1293 if (first_anon_arg < M32R_MAX_PARM_REGS)
1294 {
1295 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1296 int first_reg_offset = first_anon_arg;
1297 /* Size in words to "pretend" allocate. */
1298 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1299 rtx regblock;
1300
1301 regblock = gen_frame_mem (BLKmode,
1302 plus_constant (Pmode, arg_pointer_rtx,
1303 FIRST_PARM_OFFSET (0)));
1304 set_mem_alias_set (regblock, get_varargs_alias_set ());
1305 move_block_from_reg (first_reg_offset, regblock, size);
1306
1307 *pretend_size = (size * UNITS_PER_WORD);
1308 }
1309 }
1310
1311 \f
1312 /* Return true if INSN is real instruction bearing insn. */
1313
1314 static int
1315 m32r_is_insn (rtx insn)
1316 {
1317 return (NONDEBUG_INSN_P (insn)
1318 && GET_CODE (PATTERN (insn)) != USE
1319 && GET_CODE (PATTERN (insn)) != CLOBBER);
1320 }
1321
1322 /* Increase the priority of long instructions so that the
1323 short instructions are scheduled ahead of the long ones. */
1324
1325 static int
1326 m32r_adjust_priority (rtx_insn *insn, int priority)
1327 {
1328 if (m32r_is_insn (insn)
1329 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1330 priority <<= 3;
1331
1332 return priority;
1333 }
1334
1335 \f
1336 /* Indicate how many instructions can be issued at the same time.
1337 This is sort of a lie. The m32r can issue only 1 long insn at
1338 once, but it can issue 2 short insns. The default therefore is
1339 set at 2, but this can be overridden by the command line option
1340 -missue-rate=1. */
1341
1342 static int
1343 m32r_issue_rate (void)
1344 {
1345 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1346 }
1347 \f
1348 /* Cost functions. */
1349 /* Memory is 3 times as expensive as registers.
1350 ??? Is that the right way to look at it? */
1351
1352 static int
1353 m32r_memory_move_cost (machine_mode mode,
1354 reg_class_t rclass ATTRIBUTE_UNUSED,
1355 bool in ATTRIBUTE_UNUSED)
1356 {
1357 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1358 return 6;
1359 else
1360 return 12;
1361 }
1362
1363 static bool
1364 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1365 int outer_code ATTRIBUTE_UNUSED,
1366 int opno ATTRIBUTE_UNUSED, int *total,
1367 bool speed ATTRIBUTE_UNUSED)
1368 {
1369 int code = GET_CODE (x);
1370
1371 switch (code)
1372 {
1373 /* Small integers are as cheap as registers. 4 byte values can be
1374 fetched as immediate constants - let's give that the cost of an
1375 extra insn. */
1376 case CONST_INT:
1377 if (INT16_P (INTVAL (x)))
1378 {
1379 *total = 0;
1380 return true;
1381 }
1382 /* FALLTHRU */
1383
1384 case CONST:
1385 case LABEL_REF:
1386 case SYMBOL_REF:
1387 *total = COSTS_N_INSNS (1);
1388 return true;
1389
1390 case CONST_DOUBLE:
1391 {
1392 rtx high, low;
1393
1394 split_double (x, &high, &low);
1395 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1396 + !INT16_P (INTVAL (low)));
1397 return true;
1398 }
1399
1400 case MULT:
1401 *total = COSTS_N_INSNS (3);
1402 return true;
1403
1404 case DIV:
1405 case UDIV:
1406 case MOD:
1407 case UMOD:
1408 *total = COSTS_N_INSNS (10);
1409 return true;
1410
1411 default:
1412 return false;
1413 }
1414 }
1415 \f
1416 /* Type of function DECL.
1417
1418 The result is cached. To reset the cache at the end of a function,
1419 call with DECL = NULL_TREE. */
1420
1421 enum m32r_function_type
1422 m32r_compute_function_type (tree decl)
1423 {
1424 /* Cached value. */
1425 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1426 /* Last function we were called for. */
1427 static tree last_fn = NULL_TREE;
1428
1429 /* Resetting the cached value? */
1430 if (decl == NULL_TREE)
1431 {
1432 fn_type = M32R_FUNCTION_UNKNOWN;
1433 last_fn = NULL_TREE;
1434 return fn_type;
1435 }
1436
1437 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1438 return fn_type;
1439
1440 /* Compute function type. */
1441 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1442 ? M32R_FUNCTION_INTERRUPT
1443 : M32R_FUNCTION_NORMAL);
1444
1445 last_fn = decl;
1446 return fn_type;
1447 }
1448 \f/* Function prologue/epilogue handlers. */
1449
1450 /* M32R stack frames look like:
1451
1452 Before call After call
1453 +-----------------------+ +-----------------------+
1454 | | | |
1455 high | local variables, | | local variables, |
1456 mem | reg save area, etc. | | reg save area, etc. |
1457 | | | |
1458 +-----------------------+ +-----------------------+
1459 | | | |
1460 | arguments on stack. | | arguments on stack. |
1461 | | | |
1462 SP+0->+-----------------------+ +-----------------------+
1463 | reg parm save area, |
1464 | only created for |
1465 | variable argument |
1466 | functions |
1467 +-----------------------+
1468 | previous frame ptr |
1469 +-----------------------+
1470 | |
1471 | register save area |
1472 | |
1473 +-----------------------+
1474 | return address |
1475 +-----------------------+
1476 | |
1477 | local variables |
1478 | |
1479 +-----------------------+
1480 | |
1481 | alloca allocations |
1482 | |
1483 +-----------------------+
1484 | |
1485 low | arguments on stack |
1486 memory | |
1487 SP+0->+-----------------------+
1488
1489 Notes:
1490 1) The "reg parm save area" does not exist for non variable argument fns.
1491 2) The "reg parm save area" can be eliminated completely if we saved regs
1492 containing anonymous args separately but that complicates things too
1493 much (so it's not done).
1494 3) The return address is saved after the register save area so as to have as
1495 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1496
1497 /* Structure to be filled in by m32r_compute_frame_size with register
1498 save masks, and offsets for the current function. */
1499 struct m32r_frame_info
1500 {
1501 unsigned int total_size; /* # bytes that the entire frame takes up. */
1502 unsigned int extra_size; /* # bytes of extra stuff. */
1503 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1504 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1505 unsigned int reg_size; /* # bytes needed to store regs. */
1506 unsigned int var_size; /* # bytes that variables take up. */
1507 unsigned int gmask; /* Mask of saved gp registers. */
1508 unsigned int save_fp; /* Nonzero if fp must be saved. */
1509 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1510 int initialized; /* Nonzero if frame size already calculated. */
1511 };
1512
1513 /* Current frame information calculated by m32r_compute_frame_size. */
1514 static struct m32r_frame_info current_frame_info;
1515
1516 /* Zero structure to initialize current_frame_info. */
1517 static struct m32r_frame_info zero_frame_info;
1518
1519 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1520 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1521
1522 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1523 The return address and frame pointer are treated separately.
1524 Don't consider them here. */
1525 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1526 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1527 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1528
1529 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1530 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1531
1532 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1533 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1534
1535 /* Return the bytes needed to compute the frame pointer from the current
1536 stack pointer.
1537
1538 SIZE is the size needed for local variables. */
1539
1540 unsigned int
1541 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1542 {
1543 unsigned int regno;
1544 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1545 unsigned int reg_size;
1546 unsigned int gmask;
1547 enum m32r_function_type fn_type;
1548 int interrupt_p;
1549 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1550 | crtl->profile);
1551
1552 var_size = M32R_STACK_ALIGN (size);
1553 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1554 pretend_size = crtl->args.pretend_args_size;
1555 extra_size = FIRST_PARM_OFFSET (0);
1556 total_size = extra_size + pretend_size + args_size + var_size;
1557 reg_size = 0;
1558 gmask = 0;
1559
1560 /* See if this is an interrupt handler. Call used registers must be saved
1561 for them too. */
1562 fn_type = m32r_compute_function_type (current_function_decl);
1563 interrupt_p = M32R_INTERRUPT_P (fn_type);
1564
1565 /* Calculate space needed for registers. */
1566 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1567 {
1568 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1569 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1570 {
1571 reg_size += UNITS_PER_WORD;
1572 gmask |= 1 << regno;
1573 }
1574 }
1575
1576 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1577 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1578
1579 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1580 * UNITS_PER_WORD);
1581 total_size += reg_size;
1582
1583 /* ??? Not sure this is necessary, and I don't think the epilogue
1584 handler will do the right thing if this changes total_size. */
1585 total_size = M32R_STACK_ALIGN (total_size);
1586
1587 /* frame_size = total_size - (pretend_size + reg_size); */
1588
1589 /* Save computed information. */
1590 current_frame_info.total_size = total_size;
1591 current_frame_info.extra_size = extra_size;
1592 current_frame_info.pretend_size = pretend_size;
1593 current_frame_info.var_size = var_size;
1594 current_frame_info.args_size = args_size;
1595 current_frame_info.reg_size = reg_size;
1596 current_frame_info.gmask = gmask;
1597 current_frame_info.initialized = reload_completed;
1598
1599 /* Ok, we're done. */
1600 return total_size;
1601 }
1602
1603 /* Worker function for TARGET_CAN_ELIMINATE. */
1604
1605 bool
1606 m32r_can_eliminate (const int from, const int to)
1607 {
1608 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1609 ? ! frame_pointer_needed
1610 : true);
1611 }
1612
1613 \f
1614 /* The table we use to reference PIC data. */
1615 static rtx global_offset_table;
1616
1617 static void
1618 m32r_reload_lr (rtx sp, int size)
1619 {
1620 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1621
1622 if (size == 0)
1623 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1624 else if (size < 32768)
1625 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1626 gen_rtx_PLUS (Pmode, sp,
1627 GEN_INT (size)))));
1628 else
1629 {
1630 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1631
1632 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1633 emit_insn (gen_addsi3 (tmp, tmp, sp));
1634 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1635 }
1636
1637 emit_use (lr);
1638 }
1639
1640 void
1641 m32r_load_pic_register (void)
1642 {
1643 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1644 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1645 GEN_INT (TARGET_MODEL_SMALL)));
1646
1647 /* Need to emit this whether or not we obey regdecls,
1648 since setjmp/longjmp can cause life info to screw up. */
1649 emit_use (pic_offset_table_rtx);
1650 }
1651
1652 /* Expand the m32r prologue as a series of insns. */
1653
1654 void
1655 m32r_expand_prologue (void)
1656 {
1657 int regno;
1658 int frame_size;
1659 unsigned int gmask;
1660 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1661 | crtl->profile);
1662
1663 if (! current_frame_info.initialized)
1664 m32r_compute_frame_size (get_frame_size ());
1665
1666 if (flag_stack_usage_info)
1667 current_function_static_stack_size = current_frame_info.total_size;
1668
1669 gmask = current_frame_info.gmask;
1670
1671 /* These cases shouldn't happen. Catch them now. */
1672 gcc_assert (current_frame_info.total_size || !gmask);
1673
1674 /* Allocate space for register arguments if this is a variadic function. */
1675 if (current_frame_info.pretend_size != 0)
1676 {
1677 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1678 the wrong result on a 64-bit host. */
1679 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1680 emit_insn (gen_addsi3 (stack_pointer_rtx,
1681 stack_pointer_rtx,
1682 GEN_INT (-pretend_size)));
1683 }
1684
1685 /* Save any registers we need to and set up fp. */
1686 if (current_frame_info.save_fp)
1687 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1688
1689 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1690
1691 /* Save any needed call-saved regs (and call-used if this is an
1692 interrupt handler). */
1693 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1694 {
1695 if ((gmask & (1 << regno)) != 0)
1696 emit_insn (gen_movsi_push (stack_pointer_rtx,
1697 gen_rtx_REG (Pmode, regno)));
1698 }
1699
1700 if (current_frame_info.save_lr)
1701 emit_insn (gen_movsi_push (stack_pointer_rtx,
1702 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1703
1704 /* Allocate the stack frame. */
1705 frame_size = (current_frame_info.total_size
1706 - (current_frame_info.pretend_size
1707 + current_frame_info.reg_size));
1708
1709 if (frame_size == 0)
1710 ; /* Nothing to do. */
1711 else if (frame_size <= 32768)
1712 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1713 GEN_INT (-frame_size)));
1714 else
1715 {
1716 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1717
1718 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1719 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1720 }
1721
1722 if (frame_pointer_needed)
1723 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1724
1725 if (crtl->profile)
1726 /* Push lr for mcount (form_pc, x). */
1727 emit_insn (gen_movsi_push (stack_pointer_rtx,
1728 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1729
1730 if (pic_reg_used)
1731 {
1732 m32r_load_pic_register ();
1733 m32r_reload_lr (stack_pointer_rtx,
1734 (crtl->profile ? 0 : frame_size));
1735 }
1736
1737 if (crtl->profile && !pic_reg_used)
1738 emit_insn (gen_blockage ());
1739 }
1740
1741 \f
1742 /* Set up the stack and frame pointer (if desired) for the function.
1743 Note, if this is changed, you need to mirror the changes in
1744 m32r_compute_frame_size which calculates the prolog size. */
1745
1746 static void
1747 m32r_output_function_prologue (FILE * file)
1748 {
1749 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1750
1751 /* If this is an interrupt handler, mark it as such. */
1752 if (M32R_INTERRUPT_P (fn_type))
1753 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1754
1755 if (! current_frame_info.initialized)
1756 m32r_compute_frame_size (get_frame_size ());
1757
1758 /* This is only for the human reader. */
1759 fprintf (file,
1760 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1761 ASM_COMMENT_START,
1762 current_frame_info.var_size,
1763 current_frame_info.reg_size / 4,
1764 current_frame_info.args_size,
1765 current_frame_info.extra_size);
1766 }
1767 \f
1768 /* Output RTL to pop register REGNO from the stack. */
1769
1770 static void
1771 pop (int regno)
1772 {
1773 rtx x;
1774
1775 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1776 stack_pointer_rtx));
1777 add_reg_note (x, REG_INC, stack_pointer_rtx);
1778 }
1779
1780 /* Expand the m32r epilogue as a series of insns. */
1781
1782 void
1783 m32r_expand_epilogue (void)
1784 {
1785 int regno;
1786 int noepilogue = FALSE;
1787 int total_size;
1788
1789 gcc_assert (current_frame_info.initialized);
1790 total_size = current_frame_info.total_size;
1791
1792 if (total_size == 0)
1793 {
1794 rtx_insn *insn = get_last_insn ();
1795
1796 /* If the last insn was a BARRIER, we don't have to write any code
1797 because a jump (aka return) was put there. */
1798 if (insn && NOTE_P (insn))
1799 insn = prev_nonnote_insn (insn);
1800 if (insn && BARRIER_P (insn))
1801 noepilogue = TRUE;
1802 }
1803
1804 if (!noepilogue)
1805 {
1806 unsigned int var_size = current_frame_info.var_size;
1807 unsigned int args_size = current_frame_info.args_size;
1808 unsigned int gmask = current_frame_info.gmask;
1809 int can_trust_sp_p = !cfun->calls_alloca;
1810
1811 if (flag_exceptions)
1812 emit_insn (gen_blockage ());
1813
1814 /* The first thing to do is point the sp at the bottom of the register
1815 save area. */
1816 if (can_trust_sp_p)
1817 {
1818 unsigned int reg_offset = var_size + args_size;
1819
1820 if (reg_offset == 0)
1821 ; /* Nothing to do. */
1822 else if (reg_offset < 32768)
1823 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1824 GEN_INT (reg_offset)));
1825 else
1826 {
1827 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1828
1829 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1830 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1831 tmp));
1832 }
1833 }
1834 else if (frame_pointer_needed)
1835 {
1836 unsigned int reg_offset = var_size + args_size;
1837
1838 if (reg_offset == 0)
1839 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1840 else if (reg_offset < 32768)
1841 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1842 GEN_INT (reg_offset)));
1843 else
1844 {
1845 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1846
1847 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1848 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1849 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1850 tmp));
1851 }
1852 }
1853 else
1854 gcc_unreachable ();
1855
1856 if (current_frame_info.save_lr)
1857 pop (RETURN_ADDR_REGNUM);
1858
1859 /* Restore any saved registers, in reverse order of course. */
1860 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1861 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1862 {
1863 if ((gmask & (1L << regno)) != 0)
1864 pop (regno);
1865 }
1866
1867 if (current_frame_info.save_fp)
1868 pop (FRAME_POINTER_REGNUM);
1869
1870 /* Remove varargs area if present. */
1871 if (current_frame_info.pretend_size != 0)
1872 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1873 GEN_INT (current_frame_info.pretend_size)));
1874
1875 emit_insn (gen_blockage ());
1876 }
1877 }
1878
1879 /* Do any necessary cleanup after a function to restore stack, frame,
1880 and regs. */
1881
1882 static void
1883 m32r_output_function_epilogue (FILE *)
1884 {
1885 /* Reset state info for each function. */
1886 current_frame_info = zero_frame_info;
1887 m32r_compute_function_type (NULL_TREE);
1888 }
1889 \f
1890 /* Return nonzero if this function is known to have a null or 1 instruction
1891 epilogue. */
1892
1893 int
1894 direct_return (void)
1895 {
1896 if (!reload_completed)
1897 return FALSE;
1898
1899 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1900 return FALSE;
1901
1902 if (! current_frame_info.initialized)
1903 m32r_compute_frame_size (get_frame_size ());
1904
1905 return current_frame_info.total_size == 0;
1906 }
1907
1908 \f
1909 /* PIC. */
1910
1911 int
1912 m32r_legitimate_pic_operand_p (rtx x)
1913 {
1914 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1915 return 0;
1916
1917 if (GET_CODE (x) == CONST
1918 && GET_CODE (XEXP (x, 0)) == PLUS
1919 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1920 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1921 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1922 return 0;
1923
1924 return 1;
1925 }
1926
1927 rtx
1928 m32r_legitimize_pic_address (rtx orig, rtx reg)
1929 {
1930 #ifdef DEBUG_PIC
1931 printf("m32r_legitimize_pic_address()\n");
1932 #endif
1933
1934 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1935 {
1936 rtx pic_ref, address;
1937 int subregs = 0;
1938
1939 if (reg == 0)
1940 {
1941 gcc_assert (!reload_in_progress && !reload_completed);
1942 reg = gen_reg_rtx (Pmode);
1943
1944 subregs = 1;
1945 }
1946
1947 if (subregs)
1948 address = gen_reg_rtx (Pmode);
1949 else
1950 address = reg;
1951
1952 crtl->uses_pic_offset_table = 1;
1953
1954 if (GET_CODE (orig) == LABEL_REF
1955 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1956 {
1957 emit_insn (gen_gotoff_load_addr (reg, orig));
1958 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1959 return reg;
1960 }
1961
1962 emit_insn (gen_pic_load_addr (address, orig));
1963
1964 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1965 pic_ref = gen_const_mem (Pmode, address);
1966 emit_move_insn (reg, pic_ref);
1967 return reg;
1968 }
1969 else if (GET_CODE (orig) == CONST)
1970 {
1971 rtx base, offset;
1972
1973 if (GET_CODE (XEXP (orig, 0)) == PLUS
1974 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1975 return orig;
1976
1977 if (reg == 0)
1978 {
1979 gcc_assert (!reload_in_progress && !reload_completed);
1980 reg = gen_reg_rtx (Pmode);
1981 }
1982
1983 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1984 {
1985 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1986 if (base == reg)
1987 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1988 else
1989 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1990 }
1991 else
1992 return orig;
1993
1994 if (CONST_INT_P (offset))
1995 {
1996 if (INT16_P (INTVAL (offset)))
1997 return plus_constant (Pmode, base, INTVAL (offset));
1998 else
1999 {
2000 gcc_assert (! reload_in_progress && ! reload_completed);
2001 offset = force_reg (Pmode, offset);
2002 }
2003 }
2004
2005 return gen_rtx_PLUS (Pmode, base, offset);
2006 }
2007
2008 return orig;
2009 }
2010
2011 static rtx
2012 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2013 machine_mode mode ATTRIBUTE_UNUSED)
2014 {
2015 if (flag_pic)
2016 return m32r_legitimize_pic_address (x, NULL_RTX);
2017 else
2018 return x;
2019 }
2020
2021 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2022
2023 static bool
2024 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2025 {
2026 if (GET_CODE (addr) == LO_SUM)
2027 return true;
2028
2029 return false;
2030 }
2031 \f
2032 /* Nested function support. */
2033
2034 /* Emit RTL insns to initialize the variable parts of a trampoline.
2035 FNADDR is an RTX for the address of the function's pure code.
2036 CXT is an RTX for the static chain value for the function. */
2037
2038 void
2039 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2040 rtx fnaddr ATTRIBUTE_UNUSED,
2041 rtx cxt ATTRIBUTE_UNUSED)
2042 {
2043 }
2044 \f
2045 static void
2046 m32r_file_start (void)
2047 {
2048 default_file_start ();
2049
2050 if (flag_verbose_asm)
2051 fprintf (asm_out_file,
2052 "%s M32R/D special options: -G %d\n",
2053 ASM_COMMENT_START, g_switch_value);
2054
2055 if (TARGET_LITTLE_ENDIAN)
2056 fprintf (asm_out_file, "\t.little\n");
2057 }
2058 \f
2059 /* Print operand X (an rtx) in assembler syntax to file FILE.
2060 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2061 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2062
2063 static void
2064 m32r_print_operand (FILE * file, rtx x, int code)
2065 {
2066 rtx addr;
2067
2068 switch (code)
2069 {
2070 /* The 's' and 'p' codes are used by output_block_move() to
2071 indicate post-increment 's'tores and 'p're-increment loads. */
2072 case 's':
2073 if (REG_P (x))
2074 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2075 else
2076 output_operand_lossage ("invalid operand to %%s code");
2077 return;
2078
2079 case 'p':
2080 if (REG_P (x))
2081 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2082 else
2083 output_operand_lossage ("invalid operand to %%p code");
2084 return;
2085
2086 case 'R' :
2087 /* Write second word of DImode or DFmode reference,
2088 register or memory. */
2089 if (REG_P (x))
2090 fputs (reg_names[REGNO (x)+1], file);
2091 else if (MEM_P (x))
2092 {
2093 machine_mode mode = GET_MODE (x);
2094
2095 fprintf (file, "@(");
2096 /* Handle possible auto-increment. Since it is pre-increment and
2097 we have already done it, we can just use an offset of four. */
2098 /* ??? This is taken from rs6000.c I think. I don't think it is
2099 currently necessary, but keep it around. */
2100 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2101 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2102 output_address (mode, plus_constant (Pmode,
2103 XEXP (XEXP (x, 0), 0), 4));
2104 else
2105 output_address (mode, plus_constant (Pmode, XEXP (x, 0), 4));
2106 fputc (')', file);
2107 }
2108 else
2109 output_operand_lossage ("invalid operand to %%R code");
2110 return;
2111
2112 case 'H' : /* High word. */
2113 case 'L' : /* Low word. */
2114 if (REG_P (x))
2115 {
2116 /* L = least significant word, H = most significant word. */
2117 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2118 fputs (reg_names[REGNO (x)], file);
2119 else
2120 fputs (reg_names[REGNO (x)+1], file);
2121 }
2122 else if (CONST_INT_P (x)
2123 || GET_CODE (x) == CONST_DOUBLE)
2124 {
2125 rtx first, second;
2126
2127 split_double (x, &first, &second);
2128 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2129 code == 'L' ? INTVAL (first) : INTVAL (second));
2130 }
2131 else
2132 output_operand_lossage ("invalid operand to %%H/%%L code");
2133 return;
2134
2135 case 'A' :
2136 {
2137 char str[30];
2138
2139 if (GET_CODE (x) != CONST_DOUBLE
2140 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2141 fatal_insn ("bad insn for 'A'", x);
2142
2143 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2144 fprintf (file, "%s", str);
2145 return;
2146 }
2147
2148 case 'B' : /* Bottom half. */
2149 case 'T' : /* Top half. */
2150 /* Output the argument to a `seth' insn (sets the Top half-word).
2151 For constants output arguments to a seth/or3 pair to set Top and
2152 Bottom halves. For symbols output arguments to a seth/add3 pair to
2153 set Top and Bottom halves. The difference exists because for
2154 constants seth/or3 is more readable but for symbols we need to use
2155 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2156 switch (GET_CODE (x))
2157 {
2158 case CONST_INT :
2159 case CONST_DOUBLE :
2160 {
2161 rtx first, second;
2162
2163 split_double (x, &first, &second);
2164 x = WORDS_BIG_ENDIAN ? second : first;
2165 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2166 (code == 'B'
2167 ? INTVAL (x) & 0xffff
2168 : (INTVAL (x) >> 16) & 0xffff));
2169 }
2170 return;
2171 case CONST :
2172 case SYMBOL_REF :
2173 if (code == 'B'
2174 && small_data_operand (x, VOIDmode))
2175 {
2176 fputs ("sda(", file);
2177 output_addr_const (file, x);
2178 fputc (')', file);
2179 return;
2180 }
2181 /* fall through */
2182 case LABEL_REF :
2183 fputs (code == 'T' ? "shigh(" : "low(", file);
2184 output_addr_const (file, x);
2185 fputc (')', file);
2186 return;
2187 default :
2188 output_operand_lossage ("invalid operand to %%T/%%B code");
2189 return;
2190 }
2191 break;
2192
2193 case 'U' :
2194 /* ??? wip */
2195 /* Output a load/store with update indicator if appropriate. */
2196 if (MEM_P (x))
2197 {
2198 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2199 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2200 fputs (".a", file);
2201 }
2202 else
2203 output_operand_lossage ("invalid operand to %%U code");
2204 return;
2205
2206 case 'N' :
2207 /* Print a constant value negated. */
2208 if (CONST_INT_P (x))
2209 output_addr_const (file, GEN_INT (- INTVAL (x)));
2210 else
2211 output_operand_lossage ("invalid operand to %%N code");
2212 return;
2213
2214 case 'X' :
2215 /* Print a const_int in hex. Used in comments. */
2216 if (CONST_INT_P (x))
2217 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2218 return;
2219
2220 case '#' :
2221 fputs (IMMEDIATE_PREFIX, file);
2222 return;
2223
2224 case 0 :
2225 /* Do nothing special. */
2226 break;
2227
2228 default :
2229 /* Unknown flag. */
2230 output_operand_lossage ("invalid operand output code");
2231 }
2232
2233 switch (GET_CODE (x))
2234 {
2235 case REG :
2236 fputs (reg_names[REGNO (x)], file);
2237 break;
2238
2239 case MEM :
2240 addr = XEXP (x, 0);
2241 if (GET_CODE (addr) == PRE_INC)
2242 {
2243 if (!REG_P (XEXP (addr, 0)))
2244 fatal_insn ("pre-increment address is not a register", x);
2245
2246 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2247 }
2248 else if (GET_CODE (addr) == PRE_DEC)
2249 {
2250 if (!REG_P (XEXP (addr, 0)))
2251 fatal_insn ("pre-decrement address is not a register", x);
2252
2253 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2254 }
2255 else if (GET_CODE (addr) == POST_INC)
2256 {
2257 if (!REG_P (XEXP (addr, 0)))
2258 fatal_insn ("post-increment address is not a register", x);
2259
2260 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2261 }
2262 else
2263 {
2264 fputs ("@(", file);
2265 output_address (GET_MODE (x), addr);
2266 fputc (')', file);
2267 }
2268 break;
2269
2270 case CONST_DOUBLE :
2271 /* We handle SFmode constants here as output_addr_const doesn't. */
2272 if (GET_MODE (x) == SFmode)
2273 {
2274 long l;
2275
2276 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2277 fprintf (file, "0x%08lx", l);
2278 break;
2279 }
2280
2281 /* FALLTHRU */
2282 /* Let output_addr_const deal with it. */
2283
2284 default :
2285 output_addr_const (file, x);
2286 break;
2287 }
2288 }
2289
2290 /* Print a memory address as an operand to reference that memory location. */
2291
2292 static void
2293 m32r_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
2294 {
2295 rtx base;
2296 rtx index = 0;
2297 int offset = 0;
2298
2299 switch (GET_CODE (addr))
2300 {
2301 case REG :
2302 fputs (reg_names[REGNO (addr)], file);
2303 break;
2304
2305 case PLUS :
2306 if (CONST_INT_P (XEXP (addr, 0)))
2307 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2308 else if (CONST_INT_P (XEXP (addr, 1)))
2309 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2310 else
2311 base = XEXP (addr, 0), index = XEXP (addr, 1);
2312 if (REG_P (base))
2313 {
2314 /* Print the offset first (if present) to conform to the manual. */
2315 if (index == 0)
2316 {
2317 if (offset != 0)
2318 fprintf (file, "%d,", offset);
2319 fputs (reg_names[REGNO (base)], file);
2320 }
2321 /* The chip doesn't support this, but left in for generality. */
2322 else if (REG_P (index))
2323 fprintf (file, "%s,%s",
2324 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2325 /* Not sure this can happen, but leave in for now. */
2326 else if (GET_CODE (index) == SYMBOL_REF)
2327 {
2328 output_addr_const (file, index);
2329 fputc (',', file);
2330 fputs (reg_names[REGNO (base)], file);
2331 }
2332 else
2333 fatal_insn ("bad address", addr);
2334 }
2335 else if (GET_CODE (base) == LO_SUM)
2336 {
2337 gcc_assert (!index && REG_P (XEXP (base, 0)));
2338 if (small_data_operand (XEXP (base, 1), VOIDmode))
2339 fputs ("sda(", file);
2340 else
2341 fputs ("low(", file);
2342 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2343 offset));
2344 fputs ("),", file);
2345 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2346 }
2347 else
2348 fatal_insn ("bad address", addr);
2349 break;
2350
2351 case LO_SUM :
2352 if (!REG_P (XEXP (addr, 0)))
2353 fatal_insn ("lo_sum not of register", addr);
2354 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2355 fputs ("sda(", file);
2356 else
2357 fputs ("low(", file);
2358 output_addr_const (file, XEXP (addr, 1));
2359 fputs ("),", file);
2360 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2361 break;
2362
2363 case PRE_INC : /* Assume SImode. */
2364 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2365 break;
2366
2367 case PRE_DEC : /* Assume SImode. */
2368 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2369 break;
2370
2371 case POST_INC : /* Assume SImode. */
2372 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2373 break;
2374
2375 default :
2376 output_addr_const (file, addr);
2377 break;
2378 }
2379 }
2380
2381 static bool
2382 m32r_print_operand_punct_valid_p (unsigned char code)
2383 {
2384 return m32r_punct_chars[code];
2385 }
2386
2387 /* Return true if the operands are the constants 0 and 1. */
2388
2389 int
2390 zero_and_one (rtx operand1, rtx operand2)
2391 {
2392 return
2393 CONST_INT_P (operand1)
2394 && CONST_INT_P (operand2)
2395 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2396 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2397 }
2398
2399 /* Generate the correct assembler code to handle the conditional loading of a
2400 value into a register. It is known that the operands satisfy the
2401 conditional_move_operand() function above. The destination is operand[0].
2402 The condition is operand [1]. The 'true' value is operand [2] and the
2403 'false' value is operand [3]. */
2404
2405 char *
2406 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2407 {
2408 static char buffer [100];
2409 const char * dest = reg_names [REGNO (operands [0])];
2410
2411 buffer [0] = 0;
2412
2413 /* Destination must be a register. */
2414 gcc_assert (REG_P (operands [0]));
2415 gcc_assert (conditional_move_operand (operands [2], SImode));
2416 gcc_assert (conditional_move_operand (operands [3], SImode));
2417
2418 /* Check to see if the test is reversed. */
2419 if (GET_CODE (operands [1]) == NE)
2420 {
2421 rtx tmp = operands [2];
2422 operands [2] = operands [3];
2423 operands [3] = tmp;
2424 }
2425
2426 sprintf (buffer, "mvfc %s, cbr", dest);
2427
2428 /* If the true value was '0' then we need to invert the results of the move. */
2429 if (INTVAL (operands [2]) == 0)
2430 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2431 dest, dest);
2432
2433 return buffer;
2434 }
2435
2436 /* Returns true if the registers contained in the two
2437 rtl expressions are different. */
2438
2439 int
2440 m32r_not_same_reg (rtx a, rtx b)
2441 {
2442 int reg_a = -1;
2443 int reg_b = -2;
2444
2445 while (GET_CODE (a) == SUBREG)
2446 a = SUBREG_REG (a);
2447
2448 if (REG_P (a))
2449 reg_a = REGNO (a);
2450
2451 while (GET_CODE (b) == SUBREG)
2452 b = SUBREG_REG (b);
2453
2454 if (REG_P (b))
2455 reg_b = REGNO (b);
2456
2457 return reg_a != reg_b;
2458 }
2459
2460 \f
2461 rtx
2462 m32r_function_symbol (const char *name)
2463 {
2464 int extra_flags = 0;
2465 enum m32r_model model;
2466 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2467
2468 if (TARGET_MODEL_SMALL)
2469 model = M32R_MODEL_SMALL;
2470 else if (TARGET_MODEL_MEDIUM)
2471 model = M32R_MODEL_MEDIUM;
2472 else if (TARGET_MODEL_LARGE)
2473 model = M32R_MODEL_LARGE;
2474 else
2475 gcc_unreachable (); /* Shouldn't happen. */
2476 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2477
2478 if (extra_flags)
2479 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2480
2481 return sym;
2482 }
2483
2484 /* Use a library function to move some bytes. */
2485
2486 static void
2487 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2488 {
2489 /* We want to pass the size as Pmode, which will normally be SImode
2490 but will be DImode if we are using 64-bit longs and pointers. */
2491 if (GET_MODE (bytes_rtx) != VOIDmode
2492 && GET_MODE (bytes_rtx) != Pmode)
2493 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2494
2495 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2496 VOIDmode, dest_reg, Pmode, src_reg, Pmode,
2497 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2498 TYPE_UNSIGNED (sizetype)),
2499 TYPE_MODE (sizetype));
2500 }
2501
2502 /* Expand string/block move operations.
2503
2504 operands[0] is the pointer to the destination.
2505 operands[1] is the pointer to the source.
2506 operands[2] is the number of bytes to move.
2507 operands[3] is the alignment.
2508
2509 Returns 1 upon success, 0 otherwise. */
2510
2511 int
2512 m32r_expand_block_move (rtx operands[])
2513 {
2514 rtx orig_dst = operands[0];
2515 rtx orig_src = operands[1];
2516 rtx bytes_rtx = operands[2];
2517 rtx align_rtx = operands[3];
2518 int constp = CONST_INT_P (bytes_rtx);
2519 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2520 int align = INTVAL (align_rtx);
2521 int leftover;
2522 rtx src_reg;
2523 rtx dst_reg;
2524
2525 if (constp && bytes <= 0)
2526 return 1;
2527
2528 /* Move the address into scratch registers. */
2529 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2530 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2531
2532 if (align > UNITS_PER_WORD)
2533 align = UNITS_PER_WORD;
2534
2535 /* If we prefer size over speed, always use a function call.
2536 If we do not know the size, use a function call.
2537 If the blocks are not word aligned, use a function call. */
2538 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2539 {
2540 block_move_call (dst_reg, src_reg, bytes_rtx);
2541 return 0;
2542 }
2543
2544 leftover = bytes % MAX_MOVE_BYTES;
2545 bytes -= leftover;
2546
2547 /* If necessary, generate a loop to handle the bulk of the copy. */
2548 if (bytes)
2549 {
2550 rtx_code_label *label = NULL;
2551 rtx final_src = NULL_RTX;
2552 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2553 rtx rounded_total = GEN_INT (bytes);
2554 rtx new_dst_reg = gen_reg_rtx (SImode);
2555 rtx new_src_reg = gen_reg_rtx (SImode);
2556
2557 /* If we are going to have to perform this loop more than
2558 once, then generate a label and compute the address the
2559 source register will contain upon completion of the final
2560 iteration. */
2561 if (bytes > MAX_MOVE_BYTES)
2562 {
2563 final_src = gen_reg_rtx (Pmode);
2564
2565 if (INT16_P(bytes))
2566 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2567 else
2568 {
2569 emit_insn (gen_movsi (final_src, rounded_total));
2570 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2571 }
2572
2573 label = gen_label_rtx ();
2574 emit_label (label);
2575 }
2576
2577 /* It is known that output_block_move() will update src_reg to point
2578 to the word after the end of the source block, and dst_reg to point
2579 to the last word of the destination block, provided that the block
2580 is MAX_MOVE_BYTES long. */
2581 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2582 new_dst_reg, new_src_reg));
2583 emit_move_insn (dst_reg, new_dst_reg);
2584 emit_move_insn (src_reg, new_src_reg);
2585 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2586
2587 if (bytes > MAX_MOVE_BYTES)
2588 {
2589 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2590 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2591 }
2592 }
2593
2594 if (leftover)
2595 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2596 gen_reg_rtx (SImode),
2597 gen_reg_rtx (SImode)));
2598 return 1;
2599 }
2600
2601 \f
2602 /* Emit load/stores for a small constant word aligned block_move.
2603
2604 operands[0] is the memory address of the destination.
2605 operands[1] is the memory address of the source.
2606 operands[2] is the number of bytes to move.
2607 operands[3] is a temp register.
2608 operands[4] is a temp register. */
2609
2610 void
2611 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2612 {
2613 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2614 int first_time;
2615 int got_extra = 0;
2616
2617 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2618
2619 /* We do not have a post-increment store available, so the first set of
2620 stores are done without any increment, then the remaining ones can use
2621 the pre-increment addressing mode.
2622
2623 Note: expand_block_move() also relies upon this behavior when building
2624 loops to copy large blocks. */
2625 first_time = 1;
2626
2627 while (bytes > 0)
2628 {
2629 if (bytes >= 8)
2630 {
2631 if (first_time)
2632 {
2633 output_asm_insn ("ld\t%5, %p1", operands);
2634 output_asm_insn ("ld\t%6, %p1", operands);
2635 output_asm_insn ("st\t%5, @%0", operands);
2636 output_asm_insn ("st\t%6, %s0", operands);
2637 }
2638 else
2639 {
2640 output_asm_insn ("ld\t%5, %p1", operands);
2641 output_asm_insn ("ld\t%6, %p1", operands);
2642 output_asm_insn ("st\t%5, %s0", operands);
2643 output_asm_insn ("st\t%6, %s0", operands);
2644 }
2645
2646 bytes -= 8;
2647 }
2648 else if (bytes >= 4)
2649 {
2650 if (bytes > 4)
2651 got_extra = 1;
2652
2653 output_asm_insn ("ld\t%5, %p1", operands);
2654
2655 if (got_extra)
2656 output_asm_insn ("ld\t%6, %p1", operands);
2657
2658 if (first_time)
2659 output_asm_insn ("st\t%5, @%0", operands);
2660 else
2661 output_asm_insn ("st\t%5, %s0", operands);
2662
2663 bytes -= 4;
2664 }
2665 else
2666 {
2667 /* Get the entire next word, even though we do not want all of it.
2668 The saves us from doing several smaller loads, and we assume that
2669 we cannot cause a page fault when at least part of the word is in
2670 valid memory [since we don't get called if things aren't properly
2671 aligned]. */
2672 int dst_offset = first_time ? 0 : 4;
2673 /* The amount of increment we have to make to the
2674 destination pointer. */
2675 int dst_inc_amount = dst_offset + bytes - 4;
2676 /* The same for the source pointer. */
2677 int src_inc_amount = bytes;
2678 int last_shift;
2679 rtx my_operands[3];
2680
2681 /* If got_extra is true then we have already loaded
2682 the next word as part of loading and storing the previous word. */
2683 if (! got_extra)
2684 output_asm_insn ("ld\t%6, @%1", operands);
2685
2686 if (bytes >= 2)
2687 {
2688 bytes -= 2;
2689
2690 output_asm_insn ("sra3\t%5, %6, #16", operands);
2691 my_operands[0] = operands[5];
2692 my_operands[1] = GEN_INT (dst_offset);
2693 my_operands[2] = operands[0];
2694 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2695
2696 /* If there is a byte left to store then increment the
2697 destination address and shift the contents of the source
2698 register down by 8 bits. We could not do the address
2699 increment in the store half word instruction, because it does
2700 not have an auto increment mode. */
2701 if (bytes > 0) /* assert (bytes == 1) */
2702 {
2703 dst_offset += 2;
2704 last_shift = 8;
2705 }
2706 }
2707 else
2708 last_shift = 24;
2709
2710 if (bytes > 0)
2711 {
2712 my_operands[0] = operands[6];
2713 my_operands[1] = GEN_INT (last_shift);
2714 output_asm_insn ("srai\t%0, #%1", my_operands);
2715 my_operands[0] = operands[6];
2716 my_operands[1] = GEN_INT (dst_offset);
2717 my_operands[2] = operands[0];
2718 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2719 }
2720
2721 /* Update the destination pointer if needed. We have to do
2722 this so that the patterns matches what we output in this
2723 function. */
2724 if (dst_inc_amount
2725 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2726 {
2727 my_operands[0] = operands[0];
2728 my_operands[1] = GEN_INT (dst_inc_amount);
2729 output_asm_insn ("addi\t%0, #%1", my_operands);
2730 }
2731
2732 /* Update the source pointer if needed. We have to do this
2733 so that the patterns matches what we output in this
2734 function. */
2735 if (src_inc_amount
2736 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2737 {
2738 my_operands[0] = operands[1];
2739 my_operands[1] = GEN_INT (src_inc_amount);
2740 output_asm_insn ("addi\t%0, #%1", my_operands);
2741 }
2742
2743 bytes = 0;
2744 }
2745
2746 first_time = 0;
2747 }
2748 }
2749
2750 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2751
2752 int
2753 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2754 unsigned int new_reg)
2755 {
2756 /* Interrupt routines can't clobber any register that isn't already used. */
2757 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2758 && !df_regs_ever_live_p (new_reg))
2759 return 0;
2760
2761 return 1;
2762 }
2763
2764 rtx
2765 m32r_return_addr (int count)
2766 {
2767 if (count != 0)
2768 return const0_rtx;
2769
2770 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2771 }
2772
2773 static void
2774 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2775 {
2776 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2777 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2778 0x017e8e17 : 0x178e7e01, SImode));
2779 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2780 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2781 0x0c00ae86 : 0x86ae000c, SImode));
2782 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2783 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2784 0xe627871e : 0x1e8727e6, SImode));
2785 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2786 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2787 0xc616c626 : 0x26c61fc6, SImode));
2788 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2789 chain_value);
2790 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2791 XEXP (DECL_RTL (fndecl), 0));
2792
2793 if (m32r_cache_flush_trap >= 0)
2794 emit_insn (gen_flush_icache
2795 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2796 gen_int_mode (m32r_cache_flush_trap, SImode)));
2797 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2798 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2799 LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode,
2800 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2801 GEN_INT (3), SImode);
2802 }
2803
2804 /* True if X is a reg that can be used as a base reg. */
2805
2806 static bool
2807 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2808 {
2809 if (! REG_P (x))
2810 return false;
2811
2812 if (strict)
2813 {
2814 if (GPR_P (REGNO (x)))
2815 return true;
2816 }
2817 else
2818 {
2819 if (GPR_P (REGNO (x))
2820 || REGNO (x) == ARG_POINTER_REGNUM
2821 || ! HARD_REGISTER_P (x))
2822 return true;
2823 }
2824
2825 return false;
2826 }
2827
2828 static inline bool
2829 m32r_rtx_ok_for_offset_p (const_rtx x)
2830 {
2831 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2832 }
2833
2834 static inline bool
2835 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2836 const_rtx x, bool strict)
2837 {
2838 if (GET_CODE (x) == PLUS
2839 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2840 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2841 return true;
2842
2843 return false;
2844 }
2845
2846 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2847 since more than one instruction will be required. */
2848
2849 static inline bool
2850 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2851 bool strict)
2852 {
2853 if (GET_CODE (x) == LO_SUM
2854 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2855 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2856 && CONSTANT_P (XEXP (x, 1)))
2857 return true;
2858
2859 return false;
2860 }
2861
2862 /* Is this a load and increment operation. */
2863
2864 static inline bool
2865 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2866 {
2867 if ((mode == SImode || mode == SFmode)
2868 && GET_CODE (x) == POST_INC
2869 && REG_P (XEXP (x, 0))
2870 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2871 return true;
2872
2873 return false;
2874 }
2875
2876 /* Is this an increment/decrement and store operation. */
2877
2878 static inline bool
2879 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2880 {
2881 if ((mode == SImode || mode == SFmode)
2882 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2883 && REG_P (XEXP (x, 0)) \
2884 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2885 return true;
2886
2887 return false;
2888 }
2889
2890 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2891
2892 static bool
2893 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2894 {
2895 if (m32r_rtx_ok_for_base_p (x, strict)
2896 || m32r_legitimate_offset_addres_p (mode, x, strict)
2897 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2898 || m32r_load_postinc_p (mode, x, strict)
2899 || m32r_store_preinc_predec_p (mode, x, strict))
2900 return true;
2901
2902 return false;
2903 }
2904
2905 static void
2906 m32r_conditional_register_usage (void)
2907 {
2908 if (flag_pic)
2909 {
2910 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2911 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2912 }
2913 }
2914
2915 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2916
2917 We don't allow (plus symbol large-constant) as the relocations can't
2918 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2919 We allow all CONST_DOUBLE's as the md file patterns will force the
2920 constant to memory if they can't handle them. */
2921
2922 static bool
2923 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2924 {
2925 return !(GET_CODE (x) == CONST
2926 && GET_CODE (XEXP (x, 0)) == PLUS
2927 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2928 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2929 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2930 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2931 }