]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
Move MEMMODEL_* from coretypes.h to memmodel.h
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "recog.h"
34 #include "diagnostic-core.h"
35 #include "alias.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "calls.h"
39 #include "output.h"
40 #include "insn-attr.h"
41 #include "explow.h"
42 #include "expr.h"
43 #include "tm-constrs.h"
44 #include "builtins.h"
45
46 /* This file should be included last. */
47 #include "target-def.h"
48
49 /* Array of valid operand punctuation characters. */
50 static char m32r_punct_chars[256];
51
52 /* Machine-specific symbol_ref flags. */
53 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
54 #define SYMBOL_REF_MODEL(X) \
55 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
56
57 /* For string literals, etc. */
58 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
59
60 /* Forward declaration. */
61 static void m32r_option_override (void);
62 static void init_reg_tables (void);
63 static void block_move_call (rtx, rtx, rtx);
64 static int m32r_is_insn (rtx);
65 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
66 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
67 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
68 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
69 static void m32r_print_operand (FILE *, rtx, int);
70 static void m32r_print_operand_address (FILE *, machine_mode, rtx);
71 static bool m32r_print_operand_punct_valid_p (unsigned char code);
72 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
73 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
74
75 static void m32r_file_start (void);
76
77 static int m32r_adjust_priority (rtx_insn *, int);
78 static int m32r_issue_rate (void);
79
80 static void m32r_encode_section_info (tree, rtx, int);
81 static bool m32r_in_small_data_p (const_tree);
82 static bool m32r_return_in_memory (const_tree, const_tree);
83 static rtx m32r_function_value (const_tree, const_tree, bool);
84 static rtx m32r_libcall_value (machine_mode, const_rtx);
85 static bool m32r_function_value_regno_p (const unsigned int);
86 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
87 tree, int *, int);
88 static void init_idents (void);
89 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed);
90 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
91 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
92 const_tree, bool);
93 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
94 tree, bool);
95 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
96 const_tree, bool);
97 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
98 const_tree, bool);
99 static bool m32r_can_eliminate (const int, const int);
100 static void m32r_conditional_register_usage (void);
101 static void m32r_trampoline_init (rtx, tree, rtx);
102 static bool m32r_legitimate_constant_p (machine_mode, rtx);
103 static bool m32r_attribute_identifier (const_tree);
104 \f
105 /* M32R specific attributes. */
106
107 static const struct attribute_spec m32r_attribute_table[] =
108 {
109 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
110 affects_type_identity } */
111 { "interrupt", 0, 0, true, false, false, NULL, false },
112 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
113 false },
114 { NULL, 0, 0, false, false, false, NULL, false }
115 };
116 \f
117 /* Initialize the GCC target structure. */
118 #undef TARGET_ATTRIBUTE_TABLE
119 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
120 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
121 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
122
123 #undef TARGET_LRA_P
124 #define TARGET_LRA_P hook_bool_void_false
125
126 #undef TARGET_LEGITIMATE_ADDRESS_P
127 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
128 #undef TARGET_LEGITIMIZE_ADDRESS
129 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
130 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
131 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
132
133 #undef TARGET_ASM_ALIGNED_HI_OP
134 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
135 #undef TARGET_ASM_ALIGNED_SI_OP
136 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
137
138 #undef TARGET_PRINT_OPERAND
139 #define TARGET_PRINT_OPERAND m32r_print_operand
140 #undef TARGET_PRINT_OPERAND_ADDRESS
141 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
142 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
143 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
144
145 #undef TARGET_ASM_FUNCTION_PROLOGUE
146 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
147 #undef TARGET_ASM_FUNCTION_EPILOGUE
148 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
149
150 #undef TARGET_ASM_FILE_START
151 #define TARGET_ASM_FILE_START m32r_file_start
152
153 #undef TARGET_SCHED_ADJUST_PRIORITY
154 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
155 #undef TARGET_SCHED_ISSUE_RATE
156 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
157
158 #undef TARGET_OPTION_OVERRIDE
159 #define TARGET_OPTION_OVERRIDE m32r_option_override
160
161 #undef TARGET_ENCODE_SECTION_INFO
162 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
163 #undef TARGET_IN_SMALL_DATA_P
164 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
165
166
167 #undef TARGET_MEMORY_MOVE_COST
168 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
169 #undef TARGET_RTX_COSTS
170 #define TARGET_RTX_COSTS m32r_rtx_costs
171 #undef TARGET_ADDRESS_COST
172 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
173
174 #undef TARGET_PROMOTE_PROTOTYPES
175 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
176 #undef TARGET_RETURN_IN_MEMORY
177 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
178
179 #undef TARGET_FUNCTION_VALUE
180 #define TARGET_FUNCTION_VALUE m32r_function_value
181 #undef TARGET_LIBCALL_VALUE
182 #define TARGET_LIBCALL_VALUE m32r_libcall_value
183 #undef TARGET_FUNCTION_VALUE_REGNO_P
184 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
185
186 #undef TARGET_SETUP_INCOMING_VARARGS
187 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
188 #undef TARGET_MUST_PASS_IN_STACK
189 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
190 #undef TARGET_PASS_BY_REFERENCE
191 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
192 #undef TARGET_ARG_PARTIAL_BYTES
193 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
194 #undef TARGET_FUNCTION_ARG
195 #define TARGET_FUNCTION_ARG m32r_function_arg
196 #undef TARGET_FUNCTION_ARG_ADVANCE
197 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
198
199 #undef TARGET_CAN_ELIMINATE
200 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
201
202 #undef TARGET_CONDITIONAL_REGISTER_USAGE
203 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
204
205 #undef TARGET_TRAMPOLINE_INIT
206 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
207
208 #undef TARGET_LEGITIMATE_CONSTANT_P
209 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
210
211 struct gcc_target targetm = TARGET_INITIALIZER;
212 \f
213 /* Called by m32r_option_override to initialize various things. */
214
215 void
216 m32r_init (void)
217 {
218 init_reg_tables ();
219
220 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
221 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
222 m32r_punct_chars['#'] = 1;
223 m32r_punct_chars['@'] = 1; /* ??? no longer used */
224
225 /* Provide default value if not specified. */
226 if (!global_options_set.x_g_switch_value)
227 g_switch_value = SDATA_DEFAULT_SIZE;
228 }
229
230 static void
231 m32r_option_override (void)
232 {
233 /* These need to be done at start up.
234 It's convenient to do them here. */
235 m32r_init ();
236 SUBTARGET_OVERRIDE_OPTIONS;
237 }
238
239 /* Vectors to keep interesting information about registers where it can easily
240 be got. We use to use the actual mode value as the bit number, but there
241 is (or may be) more than 32 modes now. Instead we use two tables: one
242 indexed by hard register number, and one indexed by mode. */
243
244 /* The purpose of m32r_mode_class is to shrink the range of modes so that
245 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
246 mapped into one m32r_mode_class mode. */
247
248 enum m32r_mode_class
249 {
250 C_MODE,
251 S_MODE, D_MODE, T_MODE, O_MODE,
252 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
253 };
254
255 /* Modes for condition codes. */
256 #define C_MODES (1 << (int) C_MODE)
257
258 /* Modes for single-word and smaller quantities. */
259 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
260
261 /* Modes for double-word and smaller quantities. */
262 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
263
264 /* Modes for quad-word and smaller quantities. */
265 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
266
267 /* Modes for accumulators. */
268 #define A_MODES (1 << (int) A_MODE)
269
270 /* Value is 1 if register/mode pair is acceptable on arc. */
271
272 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
273 {
274 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
275 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
276 S_MODES, C_MODES, A_MODES, A_MODES
277 };
278
279 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
280
281 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
282
283 static void
284 init_reg_tables (void)
285 {
286 int i;
287
288 for (i = 0; i < NUM_MACHINE_MODES; i++)
289 {
290 machine_mode m = (machine_mode) i;
291
292 switch (GET_MODE_CLASS (m))
293 {
294 case MODE_INT:
295 case MODE_PARTIAL_INT:
296 case MODE_COMPLEX_INT:
297 if (GET_MODE_SIZE (m) <= 4)
298 m32r_mode_class[i] = 1 << (int) S_MODE;
299 else if (GET_MODE_SIZE (m) == 8)
300 m32r_mode_class[i] = 1 << (int) D_MODE;
301 else if (GET_MODE_SIZE (m) == 16)
302 m32r_mode_class[i] = 1 << (int) T_MODE;
303 else if (GET_MODE_SIZE (m) == 32)
304 m32r_mode_class[i] = 1 << (int) O_MODE;
305 else
306 m32r_mode_class[i] = 0;
307 break;
308 case MODE_FLOAT:
309 case MODE_COMPLEX_FLOAT:
310 if (GET_MODE_SIZE (m) <= 4)
311 m32r_mode_class[i] = 1 << (int) SF_MODE;
312 else if (GET_MODE_SIZE (m) == 8)
313 m32r_mode_class[i] = 1 << (int) DF_MODE;
314 else if (GET_MODE_SIZE (m) == 16)
315 m32r_mode_class[i] = 1 << (int) TF_MODE;
316 else if (GET_MODE_SIZE (m) == 32)
317 m32r_mode_class[i] = 1 << (int) OF_MODE;
318 else
319 m32r_mode_class[i] = 0;
320 break;
321 case MODE_CC:
322 m32r_mode_class[i] = 1 << (int) C_MODE;
323 break;
324 default:
325 m32r_mode_class[i] = 0;
326 break;
327 }
328 }
329
330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
331 {
332 if (GPR_P (i))
333 m32r_regno_reg_class[i] = GENERAL_REGS;
334 else if (i == ARG_POINTER_REGNUM)
335 m32r_regno_reg_class[i] = GENERAL_REGS;
336 else
337 m32r_regno_reg_class[i] = NO_REGS;
338 }
339 }
340 \f
341 /* M32R specific attribute support.
342
343 interrupt - for interrupt functions
344
345 model - select code model used to access object
346
347 small: addresses use 24 bits, use bl to make calls
348 medium: addresses use 32 bits, use bl to make calls
349 large: addresses use 32 bits, use seth/add3/jl to make calls
350
351 Grep for MODEL in m32r.h for more info. */
352
353 static tree small_ident1;
354 static tree small_ident2;
355 static tree medium_ident1;
356 static tree medium_ident2;
357 static tree large_ident1;
358 static tree large_ident2;
359
360 static void
361 init_idents (void)
362 {
363 if (small_ident1 == 0)
364 {
365 small_ident1 = get_identifier ("small");
366 small_ident2 = get_identifier ("__small__");
367 medium_ident1 = get_identifier ("medium");
368 medium_ident2 = get_identifier ("__medium__");
369 large_ident1 = get_identifier ("large");
370 large_ident2 = get_identifier ("__large__");
371 }
372 }
373
374 /* Handle an "model" attribute; arguments as in
375 struct attribute_spec.handler. */
376 static tree
377 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
378 tree args, int flags ATTRIBUTE_UNUSED,
379 bool *no_add_attrs)
380 {
381 tree arg;
382
383 init_idents ();
384 arg = TREE_VALUE (args);
385
386 if (arg != small_ident1
387 && arg != small_ident2
388 && arg != medium_ident1
389 && arg != medium_ident2
390 && arg != large_ident1
391 && arg != large_ident2)
392 {
393 warning (OPT_Wattributes, "invalid argument of %qs attribute",
394 IDENTIFIER_POINTER (name));
395 *no_add_attrs = true;
396 }
397
398 return NULL_TREE;
399 }
400
401 static bool
402 m32r_attribute_identifier (const_tree name)
403 {
404 return strcmp (IDENTIFIER_POINTER (name), "model") == 0
405 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0;
406 }
407 \f
408 /* Encode section information of DECL, which is either a VAR_DECL,
409 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
410
411 For the M32R we want to record:
412
413 - whether the object lives in .sdata/.sbss.
414 - what code model should be used to access the object
415 */
416
417 static void
418 m32r_encode_section_info (tree decl, rtx rtl, int first)
419 {
420 int extra_flags = 0;
421 tree model_attr;
422 enum m32r_model model;
423
424 default_encode_section_info (decl, rtl, first);
425
426 if (!DECL_P (decl))
427 return;
428
429 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
430 if (model_attr)
431 {
432 tree id;
433
434 init_idents ();
435
436 id = TREE_VALUE (TREE_VALUE (model_attr));
437
438 if (id == small_ident1 || id == small_ident2)
439 model = M32R_MODEL_SMALL;
440 else if (id == medium_ident1 || id == medium_ident2)
441 model = M32R_MODEL_MEDIUM;
442 else if (id == large_ident1 || id == large_ident2)
443 model = M32R_MODEL_LARGE;
444 else
445 gcc_unreachable (); /* shouldn't happen */
446 }
447 else
448 {
449 if (TARGET_MODEL_SMALL)
450 model = M32R_MODEL_SMALL;
451 else if (TARGET_MODEL_MEDIUM)
452 model = M32R_MODEL_MEDIUM;
453 else if (TARGET_MODEL_LARGE)
454 model = M32R_MODEL_LARGE;
455 else
456 gcc_unreachable (); /* shouldn't happen */
457 }
458 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
459
460 if (extra_flags)
461 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
462 }
463
464 /* Only mark the object as being small data area addressable if
465 it hasn't been explicitly marked with a code model.
466
467 The user can explicitly put an object in the small data area with the
468 section attribute. If the object is in sdata/sbss and marked with a
469 code model do both [put the object in .sdata and mark it as being
470 addressed with a specific code model - don't mark it as being addressed
471 with an SDA reloc though]. This is ok and might be useful at times. If
472 the object doesn't fit the linker will give an error. */
473
474 static bool
475 m32r_in_small_data_p (const_tree decl)
476 {
477 const char *section;
478
479 if (TREE_CODE (decl) != VAR_DECL)
480 return false;
481
482 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
483 return false;
484
485 section = DECL_SECTION_NAME (decl);
486 if (section)
487 {
488 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
489 return true;
490 }
491 else
492 {
493 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
494 {
495 int size = int_size_in_bytes (TREE_TYPE (decl));
496
497 if (size > 0 && size <= g_switch_value)
498 return true;
499 }
500 }
501
502 return false;
503 }
504
505 /* Do anything needed before RTL is emitted for each function. */
506
507 void
508 m32r_init_expanders (void)
509 {
510 /* ??? At one point there was code here. The function is left in
511 to make it easy to experiment. */
512 }
513 \f
514 int
515 call_operand (rtx op, machine_mode mode)
516 {
517 if (!MEM_P (op))
518 return 0;
519 op = XEXP (op, 0);
520 return call_address_operand (op, mode);
521 }
522
523 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
524
525 int
526 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
527 {
528 if (! TARGET_SDATA_USE)
529 return 0;
530
531 if (GET_CODE (op) == SYMBOL_REF)
532 return SYMBOL_REF_SMALL_P (op);
533
534 if (GET_CODE (op) == CONST
535 && GET_CODE (XEXP (op, 0)) == PLUS
536 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
537 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
538 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
539
540 return 0;
541 }
542
543 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
544
545 int
546 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
547 {
548 rtx sym;
549
550 if (flag_pic)
551 return 0;
552
553 if (GET_CODE (op) == LABEL_REF)
554 return TARGET_ADDR24;
555
556 if (GET_CODE (op) == SYMBOL_REF)
557 sym = op;
558 else if (GET_CODE (op) == CONST
559 && GET_CODE (XEXP (op, 0)) == PLUS
560 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
561 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
562 sym = XEXP (XEXP (op, 0), 0);
563 else
564 return 0;
565
566 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
567 return 1;
568
569 if (TARGET_ADDR24
570 && (CONSTANT_POOL_ADDRESS_P (sym)
571 || LIT_NAME_P (XSTR (sym, 0))))
572 return 1;
573
574 return 0;
575 }
576
577 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
578
579 int
580 addr32_operand (rtx op, machine_mode mode)
581 {
582 rtx sym;
583
584 if (GET_CODE (op) == LABEL_REF)
585 return TARGET_ADDR32;
586
587 if (GET_CODE (op) == SYMBOL_REF)
588 sym = op;
589 else if (GET_CODE (op) == CONST
590 && GET_CODE (XEXP (op, 0)) == PLUS
591 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
592 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
593 && ! flag_pic)
594 sym = XEXP (XEXP (op, 0), 0);
595 else
596 return 0;
597
598 return (! addr24_operand (sym, mode)
599 && ! small_data_operand (sym, mode));
600 }
601
602 /* Return 1 if OP is a function that can be called with the `bl' insn. */
603
604 int
605 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
606 {
607 if (flag_pic)
608 return 1;
609
610 if (GET_CODE (op) == SYMBOL_REF)
611 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
612
613 return TARGET_CALL26;
614 }
615
616 /* Return 1 if OP is a DImode const we want to handle inline.
617 This must match the code in the movdi pattern.
618 It is used by the 'G' constraint. */
619
620 int
621 easy_di_const (rtx op)
622 {
623 rtx high_rtx, low_rtx;
624 HOST_WIDE_INT high, low;
625
626 split_double (op, &high_rtx, &low_rtx);
627 high = INTVAL (high_rtx);
628 low = INTVAL (low_rtx);
629 /* Pick constants loadable with 2 16-bit `ldi' insns. */
630 if (high >= -128 && high <= 127
631 && low >= -128 && low <= 127)
632 return 1;
633 return 0;
634 }
635
636 /* Return 1 if OP is a DFmode const we want to handle inline.
637 This must match the code in the movdf pattern.
638 It is used by the 'H' constraint. */
639
640 int
641 easy_df_const (rtx op)
642 {
643 long l[2];
644
645 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l);
646 if (l[0] == 0 && l[1] == 0)
647 return 1;
648 if ((l[0] & 0xffff) == 0 && l[1] == 0)
649 return 1;
650 return 0;
651 }
652
653 /* Return 1 if OP is (mem (reg ...)).
654 This is used in insn length calcs. */
655
656 int
657 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
658 {
659 return MEM_P (op) && REG_P (XEXP (op, 0));
660 }
661
662 /* Return nonzero if TYPE must be passed by indirect reference. */
663
664 static bool
665 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
666 machine_mode mode, const_tree type,
667 bool named ATTRIBUTE_UNUSED)
668 {
669 int size;
670
671 if (type)
672 size = int_size_in_bytes (type);
673 else
674 size = GET_MODE_SIZE (mode);
675
676 return (size < 0 || size > 8);
677 }
678 \f
679 /* Comparisons. */
680
681 /* X and Y are two things to compare using CODE. Emit the compare insn and
682 return the rtx for compare [arg0 of the if_then_else].
683 If need_compare is true then the comparison insn must be generated, rather
684 than being subsumed into the following branch instruction. */
685
686 rtx
687 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
688 {
689 enum rtx_code compare_code;
690 enum rtx_code branch_code;
691 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
692 int must_swap = 0;
693
694 switch (code)
695 {
696 case EQ: compare_code = EQ; branch_code = NE; break;
697 case NE: compare_code = EQ; branch_code = EQ; break;
698 case LT: compare_code = LT; branch_code = NE; break;
699 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
700 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
701 case GE: compare_code = LT; branch_code = EQ; break;
702 case LTU: compare_code = LTU; branch_code = NE; break;
703 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
704 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
705 case GEU: compare_code = LTU; branch_code = EQ; break;
706
707 default:
708 gcc_unreachable ();
709 }
710
711 if (need_compare)
712 {
713 switch (compare_code)
714 {
715 case EQ:
716 if (satisfies_constraint_P (y) /* Reg equal to small const. */
717 && y != const0_rtx)
718 {
719 rtx tmp = gen_reg_rtx (SImode);
720
721 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
722 x = tmp;
723 y = const0_rtx;
724 }
725 else if (CONSTANT_P (y)) /* Reg equal to const. */
726 {
727 rtx tmp = force_reg (GET_MODE (x), y);
728 y = tmp;
729 }
730
731 if (register_operand (y, SImode) /* Reg equal to reg. */
732 || y == const0_rtx) /* Reg equal to zero. */
733 {
734 emit_insn (gen_cmp_eqsi_insn (x, y));
735
736 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
737 }
738 break;
739
740 case LT:
741 if (register_operand (y, SImode)
742 || satisfies_constraint_P (y))
743 {
744 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
745
746 switch (code)
747 {
748 case LT:
749 emit_insn (gen_cmp_ltsi_insn (x, y));
750 code = EQ;
751 break;
752 case LE:
753 if (y == const0_rtx)
754 tmp = const1_rtx;
755 else
756 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
757 emit_insn (gen_cmp_ltsi_insn (x, tmp));
758 code = EQ;
759 break;
760 case GT:
761 if (CONST_INT_P (y))
762 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
763 else
764 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
765 emit_insn (gen_cmp_ltsi_insn (x, tmp));
766 code = NE;
767 break;
768 case GE:
769 emit_insn (gen_cmp_ltsi_insn (x, y));
770 code = NE;
771 break;
772 default:
773 gcc_unreachable ();
774 }
775
776 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
777 }
778 break;
779
780 case LTU:
781 if (register_operand (y, SImode)
782 || satisfies_constraint_P (y))
783 {
784 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
785
786 switch (code)
787 {
788 case LTU:
789 emit_insn (gen_cmp_ltusi_insn (x, y));
790 code = EQ;
791 break;
792 case LEU:
793 if (y == const0_rtx)
794 tmp = const1_rtx;
795 else
796 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
797 emit_insn (gen_cmp_ltusi_insn (x, tmp));
798 code = EQ;
799 break;
800 case GTU:
801 if (CONST_INT_P (y))
802 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
803 else
804 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
805 emit_insn (gen_cmp_ltusi_insn (x, tmp));
806 code = NE;
807 break;
808 case GEU:
809 emit_insn (gen_cmp_ltusi_insn (x, y));
810 code = NE;
811 break;
812 default:
813 gcc_unreachable ();
814 }
815
816 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
817 }
818 break;
819
820 default:
821 gcc_unreachable ();
822 }
823 }
824 else
825 {
826 /* Reg/reg equal comparison. */
827 if (compare_code == EQ
828 && register_operand (y, SImode))
829 return gen_rtx_fmt_ee (code, CCmode, x, y);
830
831 /* Reg/zero signed comparison. */
832 if ((compare_code == EQ || compare_code == LT)
833 && y == const0_rtx)
834 return gen_rtx_fmt_ee (code, CCmode, x, y);
835
836 /* Reg/smallconst equal comparison. */
837 if (compare_code == EQ
838 && satisfies_constraint_P (y))
839 {
840 rtx tmp = gen_reg_rtx (SImode);
841
842 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
843 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
844 }
845
846 /* Reg/const equal comparison. */
847 if (compare_code == EQ
848 && CONSTANT_P (y))
849 {
850 rtx tmp = force_reg (GET_MODE (x), y);
851
852 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
853 }
854 }
855
856 if (CONSTANT_P (y))
857 {
858 if (must_swap)
859 y = force_reg (GET_MODE (x), y);
860 else
861 {
862 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
863
864 if (! ok_const)
865 y = force_reg (GET_MODE (x), y);
866 }
867 }
868
869 switch (compare_code)
870 {
871 case EQ :
872 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
873 break;
874 case LT :
875 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
876 break;
877 case LTU :
878 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
879 break;
880
881 default:
882 gcc_unreachable ();
883 }
884
885 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
886 }
887
888 bool
889 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
890 {
891 machine_mode mode = GET_MODE (op0);
892
893 gcc_assert (mode == SImode);
894 switch (code)
895 {
896 case EQ:
897 if (!register_operand (op1, mode))
898 op1 = force_reg (mode, op1);
899
900 if (TARGET_M32RX || TARGET_M32R2)
901 {
902 if (!reg_or_zero_operand (op2, mode))
903 op2 = force_reg (mode, op2);
904
905 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
906 return true;
907 }
908 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
909 {
910 emit_insn (gen_seq_zero_insn (op0, op1));
911 return true;
912 }
913
914 if (!reg_or_eq_int16_operand (op2, mode))
915 op2 = force_reg (mode, op2);
916
917 emit_insn (gen_seq_insn (op0, op1, op2));
918 return true;
919
920 case NE:
921 if (!CONST_INT_P (op2)
922 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
923 {
924 rtx reg;
925
926 if (reload_completed || reload_in_progress)
927 return false;
928
929 reg = gen_reg_rtx (SImode);
930 emit_insn (gen_xorsi3 (reg, op1, op2));
931 op1 = reg;
932
933 if (!register_operand (op1, mode))
934 op1 = force_reg (mode, op1);
935
936 emit_insn (gen_sne_zero_insn (op0, op1));
937 return true;
938 }
939 return false;
940
941 case LT:
942 case GT:
943 if (code == GT)
944 {
945 rtx tmp = op2;
946 op2 = op1;
947 op1 = tmp;
948 code = LT;
949 }
950
951 if (!register_operand (op1, mode))
952 op1 = force_reg (mode, op1);
953
954 if (!reg_or_int16_operand (op2, mode))
955 op2 = force_reg (mode, op2);
956
957 emit_insn (gen_slt_insn (op0, op1, op2));
958 return true;
959
960 case LTU:
961 case GTU:
962 if (code == GTU)
963 {
964 rtx tmp = op2;
965 op2 = op1;
966 op1 = tmp;
967 code = LTU;
968 }
969
970 if (!register_operand (op1, mode))
971 op1 = force_reg (mode, op1);
972
973 if (!reg_or_int16_operand (op2, mode))
974 op2 = force_reg (mode, op2);
975
976 emit_insn (gen_sltu_insn (op0, op1, op2));
977 return true;
978
979 case GE:
980 case GEU:
981 if (!register_operand (op1, mode))
982 op1 = force_reg (mode, op1);
983
984 if (!reg_or_int16_operand (op2, mode))
985 op2 = force_reg (mode, op2);
986
987 if (code == GE)
988 emit_insn (gen_sge_insn (op0, op1, op2));
989 else
990 emit_insn (gen_sgeu_insn (op0, op1, op2));
991 return true;
992
993 case LE:
994 case LEU:
995 if (!register_operand (op1, mode))
996 op1 = force_reg (mode, op1);
997
998 if (CONST_INT_P (op2))
999 {
1000 HOST_WIDE_INT value = INTVAL (op2);
1001 if (value >= 2147483647)
1002 {
1003 emit_move_insn (op0, const1_rtx);
1004 return true;
1005 }
1006
1007 op2 = GEN_INT (value + 1);
1008 if (value < -32768 || value >= 32767)
1009 op2 = force_reg (mode, op2);
1010
1011 if (code == LEU)
1012 emit_insn (gen_sltu_insn (op0, op1, op2));
1013 else
1014 emit_insn (gen_slt_insn (op0, op1, op2));
1015 return true;
1016 }
1017
1018 if (!register_operand (op2, mode))
1019 op2 = force_reg (mode, op2);
1020
1021 if (code == LEU)
1022 emit_insn (gen_sleu_insn (op0, op1, op2));
1023 else
1024 emit_insn (gen_sle_insn (op0, op1, op2));
1025 return true;
1026
1027 default:
1028 gcc_unreachable ();
1029 }
1030 }
1031
1032 \f
1033 /* Split a 2 word move (DI or DF) into component parts. */
1034
1035 rtx
1036 gen_split_move_double (rtx operands[])
1037 {
1038 machine_mode mode = GET_MODE (operands[0]);
1039 rtx dest = operands[0];
1040 rtx src = operands[1];
1041 rtx val;
1042
1043 /* We might have (SUBREG (MEM)) here, so just get rid of the
1044 subregs to make this code simpler. It is safe to call
1045 alter_subreg any time after reload. */
1046 if (GET_CODE (dest) == SUBREG)
1047 alter_subreg (&dest, true);
1048 if (GET_CODE (src) == SUBREG)
1049 alter_subreg (&src, true);
1050
1051 start_sequence ();
1052 if (REG_P (dest))
1053 {
1054 int dregno = REGNO (dest);
1055
1056 /* Reg = reg. */
1057 if (REG_P (src))
1058 {
1059 int sregno = REGNO (src);
1060
1061 int reverse = (dregno == sregno + 1);
1062
1063 /* We normally copy the low-numbered register first. However, if
1064 the first register operand 0 is the same as the second register of
1065 operand 1, we must copy in the opposite order. */
1066 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1067 operand_subword (src, reverse, TRUE, mode)));
1068
1069 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1070 operand_subword (src, !reverse, TRUE, mode)));
1071 }
1072
1073 /* Reg = constant. */
1074 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1075 {
1076 rtx words[2];
1077 split_double (src, &words[0], &words[1]);
1078 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1079 words[0]));
1080
1081 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1082 words[1]));
1083 }
1084
1085 /* Reg = mem. */
1086 else if (MEM_P (src))
1087 {
1088 /* If the high-address word is used in the address, we must load it
1089 last. Otherwise, load it first. */
1090 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1091
1092 /* We used to optimize loads from single registers as
1093
1094 ld r1,r3+; ld r2,r3
1095
1096 if r3 were not used subsequently. However, the REG_NOTES aren't
1097 propagated correctly by the reload phase, and it can cause bad
1098 code to be generated. We could still try:
1099
1100 ld r1,r3+; ld r2,r3; addi r3,-4
1101
1102 which saves 2 bytes and doesn't force longword alignment. */
1103 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1104 adjust_address (src, SImode,
1105 reverse * UNITS_PER_WORD)));
1106
1107 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1108 adjust_address (src, SImode,
1109 !reverse * UNITS_PER_WORD)));
1110 }
1111 else
1112 gcc_unreachable ();
1113 }
1114
1115 /* Mem = reg. */
1116 /* We used to optimize loads from single registers as
1117
1118 st r1,r3; st r2,+r3
1119
1120 if r3 were not used subsequently. However, the REG_NOTES aren't
1121 propagated correctly by the reload phase, and it can cause bad
1122 code to be generated. We could still try:
1123
1124 st r1,r3; st r2,+r3; addi r3,-4
1125
1126 which saves 2 bytes and doesn't force longword alignment. */
1127 else if (MEM_P (dest) && REG_P (src))
1128 {
1129 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1130 operand_subword (src, 0, TRUE, mode)));
1131
1132 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1133 operand_subword (src, 1, TRUE, mode)));
1134 }
1135
1136 else
1137 gcc_unreachable ();
1138
1139 val = get_insns ();
1140 end_sequence ();
1141 return val;
1142 }
1143
1144 \f
1145 static int
1146 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1147 tree type, bool named ATTRIBUTE_UNUSED)
1148 {
1149 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1150
1151 int words;
1152 unsigned int size =
1153 (((mode == BLKmode && type)
1154 ? (unsigned int) int_size_in_bytes (type)
1155 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1156 / UNITS_PER_WORD;
1157
1158 if (*cum >= M32R_MAX_PARM_REGS)
1159 words = 0;
1160 else if (*cum + size > M32R_MAX_PARM_REGS)
1161 words = (*cum + size) - M32R_MAX_PARM_REGS;
1162 else
1163 words = 0;
1164
1165 return words * UNITS_PER_WORD;
1166 }
1167
1168 /* The ROUND_ADVANCE* macros are local to this file. */
1169 /* Round SIZE up to a word boundary. */
1170 #define ROUND_ADVANCE(SIZE) \
1171 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1172
1173 /* Round arg MODE/TYPE up to the next word boundary. */
1174 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1175 ((MODE) == BLKmode \
1176 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1177 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1178
1179 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1180 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1181
1182 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1183 a reg. This includes arguments that have to be passed by reference as the
1184 pointer to them is passed in a reg if one is available (and that is what
1185 we're given).
1186 This macro is only used in this file. */
1187 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1188 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1189
1190 /* Determine where to put an argument to a function.
1191 Value is zero to push the argument on the stack,
1192 or a hard register in which to store the argument.
1193
1194 MODE is the argument's machine mode.
1195 TYPE is the data type of the argument (as a tree).
1196 This is null for libcalls where that information may
1197 not be available.
1198 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1199 the preceding args and about the function being called.
1200 NAMED is nonzero if this argument is a named parameter
1201 (otherwise it is an extra parameter matching an ellipsis). */
1202 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1203 and the rest are pushed. */
1204
1205 static rtx
1206 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1207 const_tree type ATTRIBUTE_UNUSED,
1208 bool named ATTRIBUTE_UNUSED)
1209 {
1210 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1211
1212 return (PASS_IN_REG_P (*cum, mode, type)
1213 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1214 : NULL_RTX);
1215 }
1216
1217 /* Update the data in CUM to advance over an argument
1218 of mode MODE and data type TYPE.
1219 (TYPE is null for libcalls where that information may not be available.) */
1220
1221 static void
1222 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1223 const_tree type, bool named ATTRIBUTE_UNUSED)
1224 {
1225 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1226
1227 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1228 + ROUND_ADVANCE_ARG (mode, type));
1229 }
1230
1231 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1232
1233 static bool
1234 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1235 {
1236 cumulative_args_t dummy = pack_cumulative_args (NULL);
1237
1238 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1239 }
1240
1241 /* Worker function for TARGET_FUNCTION_VALUE. */
1242
1243 static rtx
1244 m32r_function_value (const_tree valtype,
1245 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1246 bool outgoing ATTRIBUTE_UNUSED)
1247 {
1248 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1249 }
1250
1251 /* Worker function for TARGET_LIBCALL_VALUE. */
1252
1253 static rtx
1254 m32r_libcall_value (machine_mode mode,
1255 const_rtx fun ATTRIBUTE_UNUSED)
1256 {
1257 return gen_rtx_REG (mode, 0);
1258 }
1259
1260 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1261
1262 ??? What about r1 in DI/DF values. */
1263
1264 static bool
1265 m32r_function_value_regno_p (const unsigned int regno)
1266 {
1267 return (regno == 0);
1268 }
1269
1270 /* Do any needed setup for a variadic function. For the M32R, we must
1271 create a register parameter block, and then copy any anonymous arguments
1272 in registers to memory.
1273
1274 CUM has not been updated for the last named argument which has type TYPE
1275 and mode MODE, and we rely on this fact. */
1276
1277 static void
1278 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1279 tree type, int *pretend_size, int no_rtl)
1280 {
1281 int first_anon_arg;
1282
1283 if (no_rtl)
1284 return;
1285
1286 /* All BLKmode values are passed by reference. */
1287 gcc_assert (mode != BLKmode);
1288
1289 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1290 + ROUND_ADVANCE_ARG (mode, type));
1291
1292 if (first_anon_arg < M32R_MAX_PARM_REGS)
1293 {
1294 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1295 int first_reg_offset = first_anon_arg;
1296 /* Size in words to "pretend" allocate. */
1297 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1298 rtx regblock;
1299
1300 regblock = gen_frame_mem (BLKmode,
1301 plus_constant (Pmode, arg_pointer_rtx,
1302 FIRST_PARM_OFFSET (0)));
1303 set_mem_alias_set (regblock, get_varargs_alias_set ());
1304 move_block_from_reg (first_reg_offset, regblock, size);
1305
1306 *pretend_size = (size * UNITS_PER_WORD);
1307 }
1308 }
1309
1310 \f
1311 /* Return true if INSN is real instruction bearing insn. */
1312
1313 static int
1314 m32r_is_insn (rtx insn)
1315 {
1316 return (NONDEBUG_INSN_P (insn)
1317 && GET_CODE (PATTERN (insn)) != USE
1318 && GET_CODE (PATTERN (insn)) != CLOBBER);
1319 }
1320
1321 /* Increase the priority of long instructions so that the
1322 short instructions are scheduled ahead of the long ones. */
1323
1324 static int
1325 m32r_adjust_priority (rtx_insn *insn, int priority)
1326 {
1327 if (m32r_is_insn (insn)
1328 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1329 priority <<= 3;
1330
1331 return priority;
1332 }
1333
1334 \f
1335 /* Indicate how many instructions can be issued at the same time.
1336 This is sort of a lie. The m32r can issue only 1 long insn at
1337 once, but it can issue 2 short insns. The default therefore is
1338 set at 2, but this can be overridden by the command line option
1339 -missue-rate=1. */
1340
1341 static int
1342 m32r_issue_rate (void)
1343 {
1344 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1345 }
1346 \f
1347 /* Cost functions. */
1348 /* Memory is 3 times as expensive as registers.
1349 ??? Is that the right way to look at it? */
1350
1351 static int
1352 m32r_memory_move_cost (machine_mode mode,
1353 reg_class_t rclass ATTRIBUTE_UNUSED,
1354 bool in ATTRIBUTE_UNUSED)
1355 {
1356 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1357 return 6;
1358 else
1359 return 12;
1360 }
1361
1362 static bool
1363 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1364 int outer_code ATTRIBUTE_UNUSED,
1365 int opno ATTRIBUTE_UNUSED, int *total,
1366 bool speed ATTRIBUTE_UNUSED)
1367 {
1368 int code = GET_CODE (x);
1369
1370 switch (code)
1371 {
1372 /* Small integers are as cheap as registers. 4 byte values can be
1373 fetched as immediate constants - let's give that the cost of an
1374 extra insn. */
1375 case CONST_INT:
1376 if (INT16_P (INTVAL (x)))
1377 {
1378 *total = 0;
1379 return true;
1380 }
1381 /* FALLTHRU */
1382
1383 case CONST:
1384 case LABEL_REF:
1385 case SYMBOL_REF:
1386 *total = COSTS_N_INSNS (1);
1387 return true;
1388
1389 case CONST_DOUBLE:
1390 {
1391 rtx high, low;
1392
1393 split_double (x, &high, &low);
1394 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1395 + !INT16_P (INTVAL (low)));
1396 return true;
1397 }
1398
1399 case MULT:
1400 *total = COSTS_N_INSNS (3);
1401 return true;
1402
1403 case DIV:
1404 case UDIV:
1405 case MOD:
1406 case UMOD:
1407 *total = COSTS_N_INSNS (10);
1408 return true;
1409
1410 default:
1411 return false;
1412 }
1413 }
1414 \f
1415 /* Type of function DECL.
1416
1417 The result is cached. To reset the cache at the end of a function,
1418 call with DECL = NULL_TREE. */
1419
1420 enum m32r_function_type
1421 m32r_compute_function_type (tree decl)
1422 {
1423 /* Cached value. */
1424 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1425 /* Last function we were called for. */
1426 static tree last_fn = NULL_TREE;
1427
1428 /* Resetting the cached value? */
1429 if (decl == NULL_TREE)
1430 {
1431 fn_type = M32R_FUNCTION_UNKNOWN;
1432 last_fn = NULL_TREE;
1433 return fn_type;
1434 }
1435
1436 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1437 return fn_type;
1438
1439 /* Compute function type. */
1440 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1441 ? M32R_FUNCTION_INTERRUPT
1442 : M32R_FUNCTION_NORMAL);
1443
1444 last_fn = decl;
1445 return fn_type;
1446 }
1447 \f/* Function prologue/epilogue handlers. */
1448
1449 /* M32R stack frames look like:
1450
1451 Before call After call
1452 +-----------------------+ +-----------------------+
1453 | | | |
1454 high | local variables, | | local variables, |
1455 mem | reg save area, etc. | | reg save area, etc. |
1456 | | | |
1457 +-----------------------+ +-----------------------+
1458 | | | |
1459 | arguments on stack. | | arguments on stack. |
1460 | | | |
1461 SP+0->+-----------------------+ +-----------------------+
1462 | reg parm save area, |
1463 | only created for |
1464 | variable argument |
1465 | functions |
1466 +-----------------------+
1467 | previous frame ptr |
1468 +-----------------------+
1469 | |
1470 | register save area |
1471 | |
1472 +-----------------------+
1473 | return address |
1474 +-----------------------+
1475 | |
1476 | local variables |
1477 | |
1478 +-----------------------+
1479 | |
1480 | alloca allocations |
1481 | |
1482 +-----------------------+
1483 | |
1484 low | arguments on stack |
1485 memory | |
1486 SP+0->+-----------------------+
1487
1488 Notes:
1489 1) The "reg parm save area" does not exist for non variable argument fns.
1490 2) The "reg parm save area" can be eliminated completely if we saved regs
1491 containing anonymous args separately but that complicates things too
1492 much (so it's not done).
1493 3) The return address is saved after the register save area so as to have as
1494 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1495
1496 /* Structure to be filled in by m32r_compute_frame_size with register
1497 save masks, and offsets for the current function. */
1498 struct m32r_frame_info
1499 {
1500 unsigned int total_size; /* # bytes that the entire frame takes up. */
1501 unsigned int extra_size; /* # bytes of extra stuff. */
1502 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1503 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1504 unsigned int reg_size; /* # bytes needed to store regs. */
1505 unsigned int var_size; /* # bytes that variables take up. */
1506 unsigned int gmask; /* Mask of saved gp registers. */
1507 unsigned int save_fp; /* Nonzero if fp must be saved. */
1508 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1509 int initialized; /* Nonzero if frame size already calculated. */
1510 };
1511
1512 /* Current frame information calculated by m32r_compute_frame_size. */
1513 static struct m32r_frame_info current_frame_info;
1514
1515 /* Zero structure to initialize current_frame_info. */
1516 static struct m32r_frame_info zero_frame_info;
1517
1518 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1519 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1520
1521 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1522 The return address and frame pointer are treated separately.
1523 Don't consider them here. */
1524 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1525 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1526 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1527
1528 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1529 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1530
1531 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1532 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1533
1534 /* Return the bytes needed to compute the frame pointer from the current
1535 stack pointer.
1536
1537 SIZE is the size needed for local variables. */
1538
1539 unsigned int
1540 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1541 {
1542 unsigned int regno;
1543 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1544 unsigned int reg_size;
1545 unsigned int gmask;
1546 enum m32r_function_type fn_type;
1547 int interrupt_p;
1548 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1549 | crtl->profile);
1550
1551 var_size = M32R_STACK_ALIGN (size);
1552 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1553 pretend_size = crtl->args.pretend_args_size;
1554 extra_size = FIRST_PARM_OFFSET (0);
1555 total_size = extra_size + pretend_size + args_size + var_size;
1556 reg_size = 0;
1557 gmask = 0;
1558
1559 /* See if this is an interrupt handler. Call used registers must be saved
1560 for them too. */
1561 fn_type = m32r_compute_function_type (current_function_decl);
1562 interrupt_p = M32R_INTERRUPT_P (fn_type);
1563
1564 /* Calculate space needed for registers. */
1565 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1566 {
1567 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1568 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1569 {
1570 reg_size += UNITS_PER_WORD;
1571 gmask |= 1 << regno;
1572 }
1573 }
1574
1575 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1576 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1577
1578 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1579 * UNITS_PER_WORD);
1580 total_size += reg_size;
1581
1582 /* ??? Not sure this is necessary, and I don't think the epilogue
1583 handler will do the right thing if this changes total_size. */
1584 total_size = M32R_STACK_ALIGN (total_size);
1585
1586 /* frame_size = total_size - (pretend_size + reg_size); */
1587
1588 /* Save computed information. */
1589 current_frame_info.total_size = total_size;
1590 current_frame_info.extra_size = extra_size;
1591 current_frame_info.pretend_size = pretend_size;
1592 current_frame_info.var_size = var_size;
1593 current_frame_info.args_size = args_size;
1594 current_frame_info.reg_size = reg_size;
1595 current_frame_info.gmask = gmask;
1596 current_frame_info.initialized = reload_completed;
1597
1598 /* Ok, we're done. */
1599 return total_size;
1600 }
1601
1602 /* Worker function for TARGET_CAN_ELIMINATE. */
1603
1604 bool
1605 m32r_can_eliminate (const int from, const int to)
1606 {
1607 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1608 ? ! frame_pointer_needed
1609 : true);
1610 }
1611
1612 \f
1613 /* The table we use to reference PIC data. */
1614 static rtx global_offset_table;
1615
1616 static void
1617 m32r_reload_lr (rtx sp, int size)
1618 {
1619 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1620
1621 if (size == 0)
1622 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1623 else if (size < 32768)
1624 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1625 gen_rtx_PLUS (Pmode, sp,
1626 GEN_INT (size)))));
1627 else
1628 {
1629 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1630
1631 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1632 emit_insn (gen_addsi3 (tmp, tmp, sp));
1633 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1634 }
1635
1636 emit_use (lr);
1637 }
1638
1639 void
1640 m32r_load_pic_register (void)
1641 {
1642 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1643 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1644 GEN_INT (TARGET_MODEL_SMALL)));
1645
1646 /* Need to emit this whether or not we obey regdecls,
1647 since setjmp/longjmp can cause life info to screw up. */
1648 emit_use (pic_offset_table_rtx);
1649 }
1650
1651 /* Expand the m32r prologue as a series of insns. */
1652
1653 void
1654 m32r_expand_prologue (void)
1655 {
1656 int regno;
1657 int frame_size;
1658 unsigned int gmask;
1659 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1660 | crtl->profile);
1661
1662 if (! current_frame_info.initialized)
1663 m32r_compute_frame_size (get_frame_size ());
1664
1665 if (flag_stack_usage_info)
1666 current_function_static_stack_size = current_frame_info.total_size;
1667
1668 gmask = current_frame_info.gmask;
1669
1670 /* These cases shouldn't happen. Catch them now. */
1671 gcc_assert (current_frame_info.total_size || !gmask);
1672
1673 /* Allocate space for register arguments if this is a variadic function. */
1674 if (current_frame_info.pretend_size != 0)
1675 {
1676 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1677 the wrong result on a 64-bit host. */
1678 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1679 emit_insn (gen_addsi3 (stack_pointer_rtx,
1680 stack_pointer_rtx,
1681 GEN_INT (-pretend_size)));
1682 }
1683
1684 /* Save any registers we need to and set up fp. */
1685 if (current_frame_info.save_fp)
1686 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1687
1688 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1689
1690 /* Save any needed call-saved regs (and call-used if this is an
1691 interrupt handler). */
1692 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1693 {
1694 if ((gmask & (1 << regno)) != 0)
1695 emit_insn (gen_movsi_push (stack_pointer_rtx,
1696 gen_rtx_REG (Pmode, regno)));
1697 }
1698
1699 if (current_frame_info.save_lr)
1700 emit_insn (gen_movsi_push (stack_pointer_rtx,
1701 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1702
1703 /* Allocate the stack frame. */
1704 frame_size = (current_frame_info.total_size
1705 - (current_frame_info.pretend_size
1706 + current_frame_info.reg_size));
1707
1708 if (frame_size == 0)
1709 ; /* Nothing to do. */
1710 else if (frame_size <= 32768)
1711 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1712 GEN_INT (-frame_size)));
1713 else
1714 {
1715 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1716
1717 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1718 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1719 }
1720
1721 if (frame_pointer_needed)
1722 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1723
1724 if (crtl->profile)
1725 /* Push lr for mcount (form_pc, x). */
1726 emit_insn (gen_movsi_push (stack_pointer_rtx,
1727 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1728
1729 if (pic_reg_used)
1730 {
1731 m32r_load_pic_register ();
1732 m32r_reload_lr (stack_pointer_rtx,
1733 (crtl->profile ? 0 : frame_size));
1734 }
1735
1736 if (crtl->profile && !pic_reg_used)
1737 emit_insn (gen_blockage ());
1738 }
1739
1740 \f
1741 /* Set up the stack and frame pointer (if desired) for the function.
1742 Note, if this is changed, you need to mirror the changes in
1743 m32r_compute_frame_size which calculates the prolog size. */
1744
1745 static void
1746 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1747 {
1748 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1749
1750 /* If this is an interrupt handler, mark it as such. */
1751 if (M32R_INTERRUPT_P (fn_type))
1752 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1753
1754 if (! current_frame_info.initialized)
1755 m32r_compute_frame_size (size);
1756
1757 /* This is only for the human reader. */
1758 fprintf (file,
1759 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1760 ASM_COMMENT_START,
1761 current_frame_info.var_size,
1762 current_frame_info.reg_size / 4,
1763 current_frame_info.args_size,
1764 current_frame_info.extra_size);
1765 }
1766 \f
1767 /* Output RTL to pop register REGNO from the stack. */
1768
1769 static void
1770 pop (int regno)
1771 {
1772 rtx x;
1773
1774 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1775 stack_pointer_rtx));
1776 add_reg_note (x, REG_INC, stack_pointer_rtx);
1777 }
1778
1779 /* Expand the m32r epilogue as a series of insns. */
1780
1781 void
1782 m32r_expand_epilogue (void)
1783 {
1784 int regno;
1785 int noepilogue = FALSE;
1786 int total_size;
1787
1788 gcc_assert (current_frame_info.initialized);
1789 total_size = current_frame_info.total_size;
1790
1791 if (total_size == 0)
1792 {
1793 rtx_insn *insn = get_last_insn ();
1794
1795 /* If the last insn was a BARRIER, we don't have to write any code
1796 because a jump (aka return) was put there. */
1797 if (insn && NOTE_P (insn))
1798 insn = prev_nonnote_insn (insn);
1799 if (insn && BARRIER_P (insn))
1800 noepilogue = TRUE;
1801 }
1802
1803 if (!noepilogue)
1804 {
1805 unsigned int var_size = current_frame_info.var_size;
1806 unsigned int args_size = current_frame_info.args_size;
1807 unsigned int gmask = current_frame_info.gmask;
1808 int can_trust_sp_p = !cfun->calls_alloca;
1809
1810 if (flag_exceptions)
1811 emit_insn (gen_blockage ());
1812
1813 /* The first thing to do is point the sp at the bottom of the register
1814 save area. */
1815 if (can_trust_sp_p)
1816 {
1817 unsigned int reg_offset = var_size + args_size;
1818
1819 if (reg_offset == 0)
1820 ; /* Nothing to do. */
1821 else if (reg_offset < 32768)
1822 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1823 GEN_INT (reg_offset)));
1824 else
1825 {
1826 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1827
1828 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1829 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1830 tmp));
1831 }
1832 }
1833 else if (frame_pointer_needed)
1834 {
1835 unsigned int reg_offset = var_size + args_size;
1836
1837 if (reg_offset == 0)
1838 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1839 else if (reg_offset < 32768)
1840 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1841 GEN_INT (reg_offset)));
1842 else
1843 {
1844 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1845
1846 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1847 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1848 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1849 tmp));
1850 }
1851 }
1852 else
1853 gcc_unreachable ();
1854
1855 if (current_frame_info.save_lr)
1856 pop (RETURN_ADDR_REGNUM);
1857
1858 /* Restore any saved registers, in reverse order of course. */
1859 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1860 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1861 {
1862 if ((gmask & (1L << regno)) != 0)
1863 pop (regno);
1864 }
1865
1866 if (current_frame_info.save_fp)
1867 pop (FRAME_POINTER_REGNUM);
1868
1869 /* Remove varargs area if present. */
1870 if (current_frame_info.pretend_size != 0)
1871 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1872 GEN_INT (current_frame_info.pretend_size)));
1873
1874 emit_insn (gen_blockage ());
1875 }
1876 }
1877
1878 /* Do any necessary cleanup after a function to restore stack, frame,
1879 and regs. */
1880
1881 static void
1882 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1883 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1884 {
1885 /* Reset state info for each function. */
1886 current_frame_info = zero_frame_info;
1887 m32r_compute_function_type (NULL_TREE);
1888 }
1889 \f
1890 /* Return nonzero if this function is known to have a null or 1 instruction
1891 epilogue. */
1892
1893 int
1894 direct_return (void)
1895 {
1896 if (!reload_completed)
1897 return FALSE;
1898
1899 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1900 return FALSE;
1901
1902 if (! current_frame_info.initialized)
1903 m32r_compute_frame_size (get_frame_size ());
1904
1905 return current_frame_info.total_size == 0;
1906 }
1907
1908 \f
1909 /* PIC. */
1910
1911 int
1912 m32r_legitimate_pic_operand_p (rtx x)
1913 {
1914 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1915 return 0;
1916
1917 if (GET_CODE (x) == CONST
1918 && GET_CODE (XEXP (x, 0)) == PLUS
1919 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1920 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1921 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1922 return 0;
1923
1924 return 1;
1925 }
1926
1927 rtx
1928 m32r_legitimize_pic_address (rtx orig, rtx reg)
1929 {
1930 #ifdef DEBUG_PIC
1931 printf("m32r_legitimize_pic_address()\n");
1932 #endif
1933
1934 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1935 {
1936 rtx pic_ref, address;
1937 int subregs = 0;
1938
1939 if (reg == 0)
1940 {
1941 gcc_assert (!reload_in_progress && !reload_completed);
1942 reg = gen_reg_rtx (Pmode);
1943
1944 subregs = 1;
1945 }
1946
1947 if (subregs)
1948 address = gen_reg_rtx (Pmode);
1949 else
1950 address = reg;
1951
1952 crtl->uses_pic_offset_table = 1;
1953
1954 if (GET_CODE (orig) == LABEL_REF
1955 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1956 {
1957 emit_insn (gen_gotoff_load_addr (reg, orig));
1958 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1959 return reg;
1960 }
1961
1962 emit_insn (gen_pic_load_addr (address, orig));
1963
1964 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1965 pic_ref = gen_const_mem (Pmode, address);
1966 emit_move_insn (reg, pic_ref);
1967 return reg;
1968 }
1969 else if (GET_CODE (orig) == CONST)
1970 {
1971 rtx base, offset;
1972
1973 if (GET_CODE (XEXP (orig, 0)) == PLUS
1974 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1975 return orig;
1976
1977 if (reg == 0)
1978 {
1979 gcc_assert (!reload_in_progress && !reload_completed);
1980 reg = gen_reg_rtx (Pmode);
1981 }
1982
1983 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1984 {
1985 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1986 if (base == reg)
1987 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1988 else
1989 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1990 }
1991 else
1992 return orig;
1993
1994 if (CONST_INT_P (offset))
1995 {
1996 if (INT16_P (INTVAL (offset)))
1997 return plus_constant (Pmode, base, INTVAL (offset));
1998 else
1999 {
2000 gcc_assert (! reload_in_progress && ! reload_completed);
2001 offset = force_reg (Pmode, offset);
2002 }
2003 }
2004
2005 return gen_rtx_PLUS (Pmode, base, offset);
2006 }
2007
2008 return orig;
2009 }
2010
2011 static rtx
2012 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2013 machine_mode mode ATTRIBUTE_UNUSED)
2014 {
2015 if (flag_pic)
2016 return m32r_legitimize_pic_address (x, NULL_RTX);
2017 else
2018 return x;
2019 }
2020
2021 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2022
2023 static bool
2024 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2025 {
2026 if (GET_CODE (addr) == LO_SUM)
2027 return true;
2028
2029 return false;
2030 }
2031 \f
2032 /* Nested function support. */
2033
2034 /* Emit RTL insns to initialize the variable parts of a trampoline.
2035 FNADDR is an RTX for the address of the function's pure code.
2036 CXT is an RTX for the static chain value for the function. */
2037
2038 void
2039 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2040 rtx fnaddr ATTRIBUTE_UNUSED,
2041 rtx cxt ATTRIBUTE_UNUSED)
2042 {
2043 }
2044 \f
2045 static void
2046 m32r_file_start (void)
2047 {
2048 default_file_start ();
2049
2050 if (flag_verbose_asm)
2051 fprintf (asm_out_file,
2052 "%s M32R/D special options: -G %d\n",
2053 ASM_COMMENT_START, g_switch_value);
2054
2055 if (TARGET_LITTLE_ENDIAN)
2056 fprintf (asm_out_file, "\t.little\n");
2057 }
2058 \f
2059 /* Print operand X (an rtx) in assembler syntax to file FILE.
2060 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2061 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2062
2063 static void
2064 m32r_print_operand (FILE * file, rtx x, int code)
2065 {
2066 rtx addr;
2067
2068 switch (code)
2069 {
2070 /* The 's' and 'p' codes are used by output_block_move() to
2071 indicate post-increment 's'tores and 'p're-increment loads. */
2072 case 's':
2073 if (REG_P (x))
2074 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2075 else
2076 output_operand_lossage ("invalid operand to %%s code");
2077 return;
2078
2079 case 'p':
2080 if (REG_P (x))
2081 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2082 else
2083 output_operand_lossage ("invalid operand to %%p code");
2084 return;
2085
2086 case 'R' :
2087 /* Write second word of DImode or DFmode reference,
2088 register or memory. */
2089 if (REG_P (x))
2090 fputs (reg_names[REGNO (x)+1], file);
2091 else if (MEM_P (x))
2092 {
2093 machine_mode mode = GET_MODE (x);
2094
2095 fprintf (file, "@(");
2096 /* Handle possible auto-increment. Since it is pre-increment and
2097 we have already done it, we can just use an offset of four. */
2098 /* ??? This is taken from rs6000.c I think. I don't think it is
2099 currently necessary, but keep it around. */
2100 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2101 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2102 output_address (mode, plus_constant (Pmode,
2103 XEXP (XEXP (x, 0), 0), 4));
2104 else
2105 output_address (mode, plus_constant (Pmode, XEXP (x, 0), 4));
2106 fputc (')', file);
2107 }
2108 else
2109 output_operand_lossage ("invalid operand to %%R code");
2110 return;
2111
2112 case 'H' : /* High word. */
2113 case 'L' : /* Low word. */
2114 if (REG_P (x))
2115 {
2116 /* L = least significant word, H = most significant word. */
2117 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2118 fputs (reg_names[REGNO (x)], file);
2119 else
2120 fputs (reg_names[REGNO (x)+1], file);
2121 }
2122 else if (CONST_INT_P (x)
2123 || GET_CODE (x) == CONST_DOUBLE)
2124 {
2125 rtx first, second;
2126
2127 split_double (x, &first, &second);
2128 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2129 code == 'L' ? INTVAL (first) : INTVAL (second));
2130 }
2131 else
2132 output_operand_lossage ("invalid operand to %%H/%%L code");
2133 return;
2134
2135 case 'A' :
2136 {
2137 char str[30];
2138
2139 if (GET_CODE (x) != CONST_DOUBLE
2140 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2141 fatal_insn ("bad insn for 'A'", x);
2142
2143 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2144 fprintf (file, "%s", str);
2145 return;
2146 }
2147
2148 case 'B' : /* Bottom half. */
2149 case 'T' : /* Top half. */
2150 /* Output the argument to a `seth' insn (sets the Top half-word).
2151 For constants output arguments to a seth/or3 pair to set Top and
2152 Bottom halves. For symbols output arguments to a seth/add3 pair to
2153 set Top and Bottom halves. The difference exists because for
2154 constants seth/or3 is more readable but for symbols we need to use
2155 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2156 switch (GET_CODE (x))
2157 {
2158 case CONST_INT :
2159 case CONST_DOUBLE :
2160 {
2161 rtx first, second;
2162
2163 split_double (x, &first, &second);
2164 x = WORDS_BIG_ENDIAN ? second : first;
2165 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2166 (code == 'B'
2167 ? INTVAL (x) & 0xffff
2168 : (INTVAL (x) >> 16) & 0xffff));
2169 }
2170 return;
2171 case CONST :
2172 case SYMBOL_REF :
2173 if (code == 'B'
2174 && small_data_operand (x, VOIDmode))
2175 {
2176 fputs ("sda(", file);
2177 output_addr_const (file, x);
2178 fputc (')', file);
2179 return;
2180 }
2181 /* fall through */
2182 case LABEL_REF :
2183 fputs (code == 'T' ? "shigh(" : "low(", file);
2184 output_addr_const (file, x);
2185 fputc (')', file);
2186 return;
2187 default :
2188 output_operand_lossage ("invalid operand to %%T/%%B code");
2189 return;
2190 }
2191 break;
2192
2193 case 'U' :
2194 /* ??? wip */
2195 /* Output a load/store with update indicator if appropriate. */
2196 if (MEM_P (x))
2197 {
2198 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2199 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2200 fputs (".a", file);
2201 }
2202 else
2203 output_operand_lossage ("invalid operand to %%U code");
2204 return;
2205
2206 case 'N' :
2207 /* Print a constant value negated. */
2208 if (CONST_INT_P (x))
2209 output_addr_const (file, GEN_INT (- INTVAL (x)));
2210 else
2211 output_operand_lossage ("invalid operand to %%N code");
2212 return;
2213
2214 case 'X' :
2215 /* Print a const_int in hex. Used in comments. */
2216 if (CONST_INT_P (x))
2217 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2218 return;
2219
2220 case '#' :
2221 fputs (IMMEDIATE_PREFIX, file);
2222 return;
2223
2224 case 0 :
2225 /* Do nothing special. */
2226 break;
2227
2228 default :
2229 /* Unknown flag. */
2230 output_operand_lossage ("invalid operand output code");
2231 }
2232
2233 switch (GET_CODE (x))
2234 {
2235 case REG :
2236 fputs (reg_names[REGNO (x)], file);
2237 break;
2238
2239 case MEM :
2240 addr = XEXP (x, 0);
2241 if (GET_CODE (addr) == PRE_INC)
2242 {
2243 if (!REG_P (XEXP (addr, 0)))
2244 fatal_insn ("pre-increment address is not a register", x);
2245
2246 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2247 }
2248 else if (GET_CODE (addr) == PRE_DEC)
2249 {
2250 if (!REG_P (XEXP (addr, 0)))
2251 fatal_insn ("pre-decrement address is not a register", x);
2252
2253 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2254 }
2255 else if (GET_CODE (addr) == POST_INC)
2256 {
2257 if (!REG_P (XEXP (addr, 0)))
2258 fatal_insn ("post-increment address is not a register", x);
2259
2260 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2261 }
2262 else
2263 {
2264 fputs ("@(", file);
2265 output_address (GET_MODE (x), addr);
2266 fputc (')', file);
2267 }
2268 break;
2269
2270 case CONST_DOUBLE :
2271 /* We handle SFmode constants here as output_addr_const doesn't. */
2272 if (GET_MODE (x) == SFmode)
2273 {
2274 long l;
2275
2276 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2277 fprintf (file, "0x%08lx", l);
2278 break;
2279 }
2280
2281 /* Fall through. Let output_addr_const deal with it. */
2282
2283 default :
2284 output_addr_const (file, x);
2285 break;
2286 }
2287 }
2288
2289 /* Print a memory address as an operand to reference that memory location. */
2290
2291 static void
2292 m32r_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
2293 {
2294 rtx base;
2295 rtx index = 0;
2296 int offset = 0;
2297
2298 switch (GET_CODE (addr))
2299 {
2300 case REG :
2301 fputs (reg_names[REGNO (addr)], file);
2302 break;
2303
2304 case PLUS :
2305 if (CONST_INT_P (XEXP (addr, 0)))
2306 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2307 else if (CONST_INT_P (XEXP (addr, 1)))
2308 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2309 else
2310 base = XEXP (addr, 0), index = XEXP (addr, 1);
2311 if (REG_P (base))
2312 {
2313 /* Print the offset first (if present) to conform to the manual. */
2314 if (index == 0)
2315 {
2316 if (offset != 0)
2317 fprintf (file, "%d,", offset);
2318 fputs (reg_names[REGNO (base)], file);
2319 }
2320 /* The chip doesn't support this, but left in for generality. */
2321 else if (REG_P (index))
2322 fprintf (file, "%s,%s",
2323 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2324 /* Not sure this can happen, but leave in for now. */
2325 else if (GET_CODE (index) == SYMBOL_REF)
2326 {
2327 output_addr_const (file, index);
2328 fputc (',', file);
2329 fputs (reg_names[REGNO (base)], file);
2330 }
2331 else
2332 fatal_insn ("bad address", addr);
2333 }
2334 else if (GET_CODE (base) == LO_SUM)
2335 {
2336 gcc_assert (!index && REG_P (XEXP (base, 0)));
2337 if (small_data_operand (XEXP (base, 1), VOIDmode))
2338 fputs ("sda(", file);
2339 else
2340 fputs ("low(", file);
2341 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2342 offset));
2343 fputs ("),", file);
2344 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2345 }
2346 else
2347 fatal_insn ("bad address", addr);
2348 break;
2349
2350 case LO_SUM :
2351 if (!REG_P (XEXP (addr, 0)))
2352 fatal_insn ("lo_sum not of register", addr);
2353 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2354 fputs ("sda(", file);
2355 else
2356 fputs ("low(", file);
2357 output_addr_const (file, XEXP (addr, 1));
2358 fputs ("),", file);
2359 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2360 break;
2361
2362 case PRE_INC : /* Assume SImode. */
2363 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2364 break;
2365
2366 case PRE_DEC : /* Assume SImode. */
2367 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2368 break;
2369
2370 case POST_INC : /* Assume SImode. */
2371 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2372 break;
2373
2374 default :
2375 output_addr_const (file, addr);
2376 break;
2377 }
2378 }
2379
2380 static bool
2381 m32r_print_operand_punct_valid_p (unsigned char code)
2382 {
2383 return m32r_punct_chars[code];
2384 }
2385
2386 /* Return true if the operands are the constants 0 and 1. */
2387
2388 int
2389 zero_and_one (rtx operand1, rtx operand2)
2390 {
2391 return
2392 CONST_INT_P (operand1)
2393 && CONST_INT_P (operand2)
2394 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2395 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2396 }
2397
2398 /* Generate the correct assembler code to handle the conditional loading of a
2399 value into a register. It is known that the operands satisfy the
2400 conditional_move_operand() function above. The destination is operand[0].
2401 The condition is operand [1]. The 'true' value is operand [2] and the
2402 'false' value is operand [3]. */
2403
2404 char *
2405 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2406 {
2407 static char buffer [100];
2408 const char * dest = reg_names [REGNO (operands [0])];
2409
2410 buffer [0] = 0;
2411
2412 /* Destination must be a register. */
2413 gcc_assert (REG_P (operands [0]));
2414 gcc_assert (conditional_move_operand (operands [2], SImode));
2415 gcc_assert (conditional_move_operand (operands [3], SImode));
2416
2417 /* Check to see if the test is reversed. */
2418 if (GET_CODE (operands [1]) == NE)
2419 {
2420 rtx tmp = operands [2];
2421 operands [2] = operands [3];
2422 operands [3] = tmp;
2423 }
2424
2425 sprintf (buffer, "mvfc %s, cbr", dest);
2426
2427 /* If the true value was '0' then we need to invert the results of the move. */
2428 if (INTVAL (operands [2]) == 0)
2429 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2430 dest, dest);
2431
2432 return buffer;
2433 }
2434
2435 /* Returns true if the registers contained in the two
2436 rtl expressions are different. */
2437
2438 int
2439 m32r_not_same_reg (rtx a, rtx b)
2440 {
2441 int reg_a = -1;
2442 int reg_b = -2;
2443
2444 while (GET_CODE (a) == SUBREG)
2445 a = SUBREG_REG (a);
2446
2447 if (REG_P (a))
2448 reg_a = REGNO (a);
2449
2450 while (GET_CODE (b) == SUBREG)
2451 b = SUBREG_REG (b);
2452
2453 if (REG_P (b))
2454 reg_b = REGNO (b);
2455
2456 return reg_a != reg_b;
2457 }
2458
2459 \f
2460 rtx
2461 m32r_function_symbol (const char *name)
2462 {
2463 int extra_flags = 0;
2464 enum m32r_model model;
2465 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2466
2467 if (TARGET_MODEL_SMALL)
2468 model = M32R_MODEL_SMALL;
2469 else if (TARGET_MODEL_MEDIUM)
2470 model = M32R_MODEL_MEDIUM;
2471 else if (TARGET_MODEL_LARGE)
2472 model = M32R_MODEL_LARGE;
2473 else
2474 gcc_unreachable (); /* Shouldn't happen. */
2475 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2476
2477 if (extra_flags)
2478 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2479
2480 return sym;
2481 }
2482
2483 /* Use a library function to move some bytes. */
2484
2485 static void
2486 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2487 {
2488 /* We want to pass the size as Pmode, which will normally be SImode
2489 but will be DImode if we are using 64-bit longs and pointers. */
2490 if (GET_MODE (bytes_rtx) != VOIDmode
2491 && GET_MODE (bytes_rtx) != Pmode)
2492 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2493
2494 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2495 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2496 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2497 TYPE_UNSIGNED (sizetype)),
2498 TYPE_MODE (sizetype));
2499 }
2500
2501 /* Expand string/block move operations.
2502
2503 operands[0] is the pointer to the destination.
2504 operands[1] is the pointer to the source.
2505 operands[2] is the number of bytes to move.
2506 operands[3] is the alignment.
2507
2508 Returns 1 upon success, 0 otherwise. */
2509
2510 int
2511 m32r_expand_block_move (rtx operands[])
2512 {
2513 rtx orig_dst = operands[0];
2514 rtx orig_src = operands[1];
2515 rtx bytes_rtx = operands[2];
2516 rtx align_rtx = operands[3];
2517 int constp = CONST_INT_P (bytes_rtx);
2518 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2519 int align = INTVAL (align_rtx);
2520 int leftover;
2521 rtx src_reg;
2522 rtx dst_reg;
2523
2524 if (constp && bytes <= 0)
2525 return 1;
2526
2527 /* Move the address into scratch registers. */
2528 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2529 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2530
2531 if (align > UNITS_PER_WORD)
2532 align = UNITS_PER_WORD;
2533
2534 /* If we prefer size over speed, always use a function call.
2535 If we do not know the size, use a function call.
2536 If the blocks are not word aligned, use a function call. */
2537 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2538 {
2539 block_move_call (dst_reg, src_reg, bytes_rtx);
2540 return 0;
2541 }
2542
2543 leftover = bytes % MAX_MOVE_BYTES;
2544 bytes -= leftover;
2545
2546 /* If necessary, generate a loop to handle the bulk of the copy. */
2547 if (bytes)
2548 {
2549 rtx_code_label *label = NULL;
2550 rtx final_src = NULL_RTX;
2551 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2552 rtx rounded_total = GEN_INT (bytes);
2553 rtx new_dst_reg = gen_reg_rtx (SImode);
2554 rtx new_src_reg = gen_reg_rtx (SImode);
2555
2556 /* If we are going to have to perform this loop more than
2557 once, then generate a label and compute the address the
2558 source register will contain upon completion of the final
2559 iteration. */
2560 if (bytes > MAX_MOVE_BYTES)
2561 {
2562 final_src = gen_reg_rtx (Pmode);
2563
2564 if (INT16_P(bytes))
2565 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2566 else
2567 {
2568 emit_insn (gen_movsi (final_src, rounded_total));
2569 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2570 }
2571
2572 label = gen_label_rtx ();
2573 emit_label (label);
2574 }
2575
2576 /* It is known that output_block_move() will update src_reg to point
2577 to the word after the end of the source block, and dst_reg to point
2578 to the last word of the destination block, provided that the block
2579 is MAX_MOVE_BYTES long. */
2580 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2581 new_dst_reg, new_src_reg));
2582 emit_move_insn (dst_reg, new_dst_reg);
2583 emit_move_insn (src_reg, new_src_reg);
2584 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2585
2586 if (bytes > MAX_MOVE_BYTES)
2587 {
2588 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2589 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2590 }
2591 }
2592
2593 if (leftover)
2594 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2595 gen_reg_rtx (SImode),
2596 gen_reg_rtx (SImode)));
2597 return 1;
2598 }
2599
2600 \f
2601 /* Emit load/stores for a small constant word aligned block_move.
2602
2603 operands[0] is the memory address of the destination.
2604 operands[1] is the memory address of the source.
2605 operands[2] is the number of bytes to move.
2606 operands[3] is a temp register.
2607 operands[4] is a temp register. */
2608
2609 void
2610 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2611 {
2612 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2613 int first_time;
2614 int got_extra = 0;
2615
2616 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2617
2618 /* We do not have a post-increment store available, so the first set of
2619 stores are done without any increment, then the remaining ones can use
2620 the pre-increment addressing mode.
2621
2622 Note: expand_block_move() also relies upon this behavior when building
2623 loops to copy large blocks. */
2624 first_time = 1;
2625
2626 while (bytes > 0)
2627 {
2628 if (bytes >= 8)
2629 {
2630 if (first_time)
2631 {
2632 output_asm_insn ("ld\t%5, %p1", operands);
2633 output_asm_insn ("ld\t%6, %p1", operands);
2634 output_asm_insn ("st\t%5, @%0", operands);
2635 output_asm_insn ("st\t%6, %s0", operands);
2636 }
2637 else
2638 {
2639 output_asm_insn ("ld\t%5, %p1", operands);
2640 output_asm_insn ("ld\t%6, %p1", operands);
2641 output_asm_insn ("st\t%5, %s0", operands);
2642 output_asm_insn ("st\t%6, %s0", operands);
2643 }
2644
2645 bytes -= 8;
2646 }
2647 else if (bytes >= 4)
2648 {
2649 if (bytes > 4)
2650 got_extra = 1;
2651
2652 output_asm_insn ("ld\t%5, %p1", operands);
2653
2654 if (got_extra)
2655 output_asm_insn ("ld\t%6, %p1", operands);
2656
2657 if (first_time)
2658 output_asm_insn ("st\t%5, @%0", operands);
2659 else
2660 output_asm_insn ("st\t%5, %s0", operands);
2661
2662 bytes -= 4;
2663 }
2664 else
2665 {
2666 /* Get the entire next word, even though we do not want all of it.
2667 The saves us from doing several smaller loads, and we assume that
2668 we cannot cause a page fault when at least part of the word is in
2669 valid memory [since we don't get called if things aren't properly
2670 aligned]. */
2671 int dst_offset = first_time ? 0 : 4;
2672 /* The amount of increment we have to make to the
2673 destination pointer. */
2674 int dst_inc_amount = dst_offset + bytes - 4;
2675 /* The same for the source pointer. */
2676 int src_inc_amount = bytes;
2677 int last_shift;
2678 rtx my_operands[3];
2679
2680 /* If got_extra is true then we have already loaded
2681 the next word as part of loading and storing the previous word. */
2682 if (! got_extra)
2683 output_asm_insn ("ld\t%6, @%1", operands);
2684
2685 if (bytes >= 2)
2686 {
2687 bytes -= 2;
2688
2689 output_asm_insn ("sra3\t%5, %6, #16", operands);
2690 my_operands[0] = operands[5];
2691 my_operands[1] = GEN_INT (dst_offset);
2692 my_operands[2] = operands[0];
2693 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2694
2695 /* If there is a byte left to store then increment the
2696 destination address and shift the contents of the source
2697 register down by 8 bits. We could not do the address
2698 increment in the store half word instruction, because it does
2699 not have an auto increment mode. */
2700 if (bytes > 0) /* assert (bytes == 1) */
2701 {
2702 dst_offset += 2;
2703 last_shift = 8;
2704 }
2705 }
2706 else
2707 last_shift = 24;
2708
2709 if (bytes > 0)
2710 {
2711 my_operands[0] = operands[6];
2712 my_operands[1] = GEN_INT (last_shift);
2713 output_asm_insn ("srai\t%0, #%1", my_operands);
2714 my_operands[0] = operands[6];
2715 my_operands[1] = GEN_INT (dst_offset);
2716 my_operands[2] = operands[0];
2717 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2718 }
2719
2720 /* Update the destination pointer if needed. We have to do
2721 this so that the patterns matches what we output in this
2722 function. */
2723 if (dst_inc_amount
2724 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2725 {
2726 my_operands[0] = operands[0];
2727 my_operands[1] = GEN_INT (dst_inc_amount);
2728 output_asm_insn ("addi\t%0, #%1", my_operands);
2729 }
2730
2731 /* Update the source pointer if needed. We have to do this
2732 so that the patterns matches what we output in this
2733 function. */
2734 if (src_inc_amount
2735 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2736 {
2737 my_operands[0] = operands[1];
2738 my_operands[1] = GEN_INT (src_inc_amount);
2739 output_asm_insn ("addi\t%0, #%1", my_operands);
2740 }
2741
2742 bytes = 0;
2743 }
2744
2745 first_time = 0;
2746 }
2747 }
2748
2749 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2750
2751 int
2752 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2753 unsigned int new_reg)
2754 {
2755 /* Interrupt routines can't clobber any register that isn't already used. */
2756 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2757 && !df_regs_ever_live_p (new_reg))
2758 return 0;
2759
2760 return 1;
2761 }
2762
2763 rtx
2764 m32r_return_addr (int count)
2765 {
2766 if (count != 0)
2767 return const0_rtx;
2768
2769 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2770 }
2771
2772 static void
2773 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2774 {
2775 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2776 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2777 0x017e8e17 : 0x178e7e01, SImode));
2778 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2779 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2780 0x0c00ae86 : 0x86ae000c, SImode));
2781 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2782 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2783 0xe627871e : 0x1e8727e6, SImode));
2784 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2785 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2786 0xc616c626 : 0x26c61fc6, SImode));
2787 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2788 chain_value);
2789 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2790 XEXP (DECL_RTL (fndecl), 0));
2791
2792 if (m32r_cache_flush_trap >= 0)
2793 emit_insn (gen_flush_icache
2794 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2795 gen_int_mode (m32r_cache_flush_trap, SImode)));
2796 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2797 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2798 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2799 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2800 GEN_INT (3), SImode);
2801 }
2802
2803 /* True if X is a reg that can be used as a base reg. */
2804
2805 static bool
2806 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2807 {
2808 if (! REG_P (x))
2809 return false;
2810
2811 if (strict)
2812 {
2813 if (GPR_P (REGNO (x)))
2814 return true;
2815 }
2816 else
2817 {
2818 if (GPR_P (REGNO (x))
2819 || REGNO (x) == ARG_POINTER_REGNUM
2820 || ! HARD_REGISTER_P (x))
2821 return true;
2822 }
2823
2824 return false;
2825 }
2826
2827 static inline bool
2828 m32r_rtx_ok_for_offset_p (const_rtx x)
2829 {
2830 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2831 }
2832
2833 static inline bool
2834 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2835 const_rtx x, bool strict)
2836 {
2837 if (GET_CODE (x) == PLUS
2838 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2839 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2840 return true;
2841
2842 return false;
2843 }
2844
2845 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2846 since more than one instruction will be required. */
2847
2848 static inline bool
2849 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2850 bool strict)
2851 {
2852 if (GET_CODE (x) == LO_SUM
2853 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2854 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2855 && CONSTANT_P (XEXP (x, 1)))
2856 return true;
2857
2858 return false;
2859 }
2860
2861 /* Is this a load and increment operation. */
2862
2863 static inline bool
2864 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2865 {
2866 if ((mode == SImode || mode == SFmode)
2867 && GET_CODE (x) == POST_INC
2868 && REG_P (XEXP (x, 0))
2869 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2870 return true;
2871
2872 return false;
2873 }
2874
2875 /* Is this an increment/decrement and store operation. */
2876
2877 static inline bool
2878 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2879 {
2880 if ((mode == SImode || mode == SFmode)
2881 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2882 && REG_P (XEXP (x, 0)) \
2883 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2884 return true;
2885
2886 return false;
2887 }
2888
2889 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2890
2891 static bool
2892 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2893 {
2894 if (m32r_rtx_ok_for_base_p (x, strict)
2895 || m32r_legitimate_offset_addres_p (mode, x, strict)
2896 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2897 || m32r_load_postinc_p (mode, x, strict)
2898 || m32r_store_preinc_predec_p (mode, x, strict))
2899 return true;
2900
2901 return false;
2902 }
2903
2904 static void
2905 m32r_conditional_register_usage (void)
2906 {
2907 if (flag_pic)
2908 {
2909 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2910 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2911 }
2912 }
2913
2914 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2915
2916 We don't allow (plus symbol large-constant) as the relocations can't
2917 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2918 We allow all CONST_DOUBLE's as the md file patterns will force the
2919 constant to memory if they can't handle them. */
2920
2921 static bool
2922 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2923 {
2924 return !(GET_CODE (x) == CONST
2925 && GET_CODE (XEXP (x, 0)) == PLUS
2926 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2927 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2928 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2929 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2930 }