]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/m32r/m32r.c
gen-mul-tables.cc: Adjust include files.
[thirdparty/gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "tm_p.h"
29 #include "stringpool.h"
30 #include "insn-config.h"
31 #include "emit-rtl.h"
32 #include "recog.h"
33 #include "diagnostic-core.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "calls.h"
38 #include "output.h"
39 #include "insn-attr.h"
40 #include "explow.h"
41 #include "expr.h"
42 #include "tm-constrs.h"
43 #include "builtins.h"
44
45 /* This file should be included last. */
46 #include "target-def.h"
47
48 /* Array of valid operand punctuation characters. */
49 static char m32r_punct_chars[256];
50
51 /* Machine-specific symbol_ref flags. */
52 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
53 #define SYMBOL_REF_MODEL(X) \
54 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
55
56 /* For string literals, etc. */
57 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
58
59 /* Forward declaration. */
60 static void m32r_option_override (void);
61 static void init_reg_tables (void);
62 static void block_move_call (rtx, rtx, rtx);
63 static int m32r_is_insn (rtx);
64 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
65 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
66 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
67 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
68 static void m32r_print_operand (FILE *, rtx, int);
69 static void m32r_print_operand_address (FILE *, rtx);
70 static bool m32r_print_operand_punct_valid_p (unsigned char code);
71 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
72 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
73
74 static void m32r_file_start (void);
75
76 static int m32r_adjust_priority (rtx_insn *, int);
77 static int m32r_issue_rate (void);
78
79 static void m32r_encode_section_info (tree, rtx, int);
80 static bool m32r_in_small_data_p (const_tree);
81 static bool m32r_return_in_memory (const_tree, const_tree);
82 static rtx m32r_function_value (const_tree, const_tree, bool);
83 static rtx m32r_libcall_value (machine_mode, const_rtx);
84 static bool m32r_function_value_regno_p (const unsigned int);
85 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
86 tree, int *, int);
87 static void init_idents (void);
88 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed);
89 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
90 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
91 const_tree, bool);
92 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
93 tree, bool);
94 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
95 const_tree, bool);
96 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
97 const_tree, bool);
98 static bool m32r_can_eliminate (const int, const int);
99 static void m32r_conditional_register_usage (void);
100 static void m32r_trampoline_init (rtx, tree, rtx);
101 static bool m32r_legitimate_constant_p (machine_mode, rtx);
102 static bool m32r_attribute_identifier (const_tree);
103 \f
104 /* M32R specific attributes. */
105
106 static const struct attribute_spec m32r_attribute_table[] =
107 {
108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
109 affects_type_identity } */
110 { "interrupt", 0, 0, true, false, false, NULL, false },
111 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
112 false },
113 { NULL, 0, 0, false, false, false, NULL, false }
114 };
115 \f
116 /* Initialize the GCC target structure. */
117 #undef TARGET_ATTRIBUTE_TABLE
118 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
119 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
120 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
121
122 #undef TARGET_LEGITIMATE_ADDRESS_P
123 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
124 #undef TARGET_LEGITIMIZE_ADDRESS
125 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
126 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
127 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
128
129 #undef TARGET_ASM_ALIGNED_HI_OP
130 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
131 #undef TARGET_ASM_ALIGNED_SI_OP
132 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
133
134 #undef TARGET_PRINT_OPERAND
135 #define TARGET_PRINT_OPERAND m32r_print_operand
136 #undef TARGET_PRINT_OPERAND_ADDRESS
137 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
138 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
139 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
140
141 #undef TARGET_ASM_FUNCTION_PROLOGUE
142 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
143 #undef TARGET_ASM_FUNCTION_EPILOGUE
144 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
145
146 #undef TARGET_ASM_FILE_START
147 #define TARGET_ASM_FILE_START m32r_file_start
148
149 #undef TARGET_SCHED_ADJUST_PRIORITY
150 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
151 #undef TARGET_SCHED_ISSUE_RATE
152 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
153
154 #undef TARGET_OPTION_OVERRIDE
155 #define TARGET_OPTION_OVERRIDE m32r_option_override
156
157 #undef TARGET_ENCODE_SECTION_INFO
158 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
159 #undef TARGET_IN_SMALL_DATA_P
160 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
161
162
163 #undef TARGET_MEMORY_MOVE_COST
164 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
165 #undef TARGET_RTX_COSTS
166 #define TARGET_RTX_COSTS m32r_rtx_costs
167 #undef TARGET_ADDRESS_COST
168 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
169
170 #undef TARGET_PROMOTE_PROTOTYPES
171 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
172 #undef TARGET_RETURN_IN_MEMORY
173 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
174
175 #undef TARGET_FUNCTION_VALUE
176 #define TARGET_FUNCTION_VALUE m32r_function_value
177 #undef TARGET_LIBCALL_VALUE
178 #define TARGET_LIBCALL_VALUE m32r_libcall_value
179 #undef TARGET_FUNCTION_VALUE_REGNO_P
180 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
181
182 #undef TARGET_SETUP_INCOMING_VARARGS
183 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
184 #undef TARGET_MUST_PASS_IN_STACK
185 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
186 #undef TARGET_PASS_BY_REFERENCE
187 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
188 #undef TARGET_ARG_PARTIAL_BYTES
189 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
190 #undef TARGET_FUNCTION_ARG
191 #define TARGET_FUNCTION_ARG m32r_function_arg
192 #undef TARGET_FUNCTION_ARG_ADVANCE
193 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
194
195 #undef TARGET_CAN_ELIMINATE
196 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
197
198 #undef TARGET_CONDITIONAL_REGISTER_USAGE
199 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
200
201 #undef TARGET_TRAMPOLINE_INIT
202 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
203
204 #undef TARGET_LEGITIMATE_CONSTANT_P
205 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
206
207 struct gcc_target targetm = TARGET_INITIALIZER;
208 \f
209 /* Called by m32r_option_override to initialize various things. */
210
211 void
212 m32r_init (void)
213 {
214 init_reg_tables ();
215
216 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
217 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
218 m32r_punct_chars['#'] = 1;
219 m32r_punct_chars['@'] = 1; /* ??? no longer used */
220
221 /* Provide default value if not specified. */
222 if (!global_options_set.x_g_switch_value)
223 g_switch_value = SDATA_DEFAULT_SIZE;
224 }
225
226 static void
227 m32r_option_override (void)
228 {
229 /* These need to be done at start up.
230 It's convenient to do them here. */
231 m32r_init ();
232 SUBTARGET_OVERRIDE_OPTIONS;
233 }
234
235 /* Vectors to keep interesting information about registers where it can easily
236 be got. We use to use the actual mode value as the bit number, but there
237 is (or may be) more than 32 modes now. Instead we use two tables: one
238 indexed by hard register number, and one indexed by mode. */
239
240 /* The purpose of m32r_mode_class is to shrink the range of modes so that
241 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
242 mapped into one m32r_mode_class mode. */
243
244 enum m32r_mode_class
245 {
246 C_MODE,
247 S_MODE, D_MODE, T_MODE, O_MODE,
248 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
249 };
250
251 /* Modes for condition codes. */
252 #define C_MODES (1 << (int) C_MODE)
253
254 /* Modes for single-word and smaller quantities. */
255 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
256
257 /* Modes for double-word and smaller quantities. */
258 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
259
260 /* Modes for quad-word and smaller quantities. */
261 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
262
263 /* Modes for accumulators. */
264 #define A_MODES (1 << (int) A_MODE)
265
266 /* Value is 1 if register/mode pair is acceptable on arc. */
267
268 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
269 {
270 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
271 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
272 S_MODES, C_MODES, A_MODES, A_MODES
273 };
274
275 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
276
277 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
278
279 static void
280 init_reg_tables (void)
281 {
282 int i;
283
284 for (i = 0; i < NUM_MACHINE_MODES; i++)
285 {
286 machine_mode m = (machine_mode) i;
287
288 switch (GET_MODE_CLASS (m))
289 {
290 case MODE_INT:
291 case MODE_PARTIAL_INT:
292 case MODE_COMPLEX_INT:
293 if (GET_MODE_SIZE (m) <= 4)
294 m32r_mode_class[i] = 1 << (int) S_MODE;
295 else if (GET_MODE_SIZE (m) == 8)
296 m32r_mode_class[i] = 1 << (int) D_MODE;
297 else if (GET_MODE_SIZE (m) == 16)
298 m32r_mode_class[i] = 1 << (int) T_MODE;
299 else if (GET_MODE_SIZE (m) == 32)
300 m32r_mode_class[i] = 1 << (int) O_MODE;
301 else
302 m32r_mode_class[i] = 0;
303 break;
304 case MODE_FLOAT:
305 case MODE_COMPLEX_FLOAT:
306 if (GET_MODE_SIZE (m) <= 4)
307 m32r_mode_class[i] = 1 << (int) SF_MODE;
308 else if (GET_MODE_SIZE (m) == 8)
309 m32r_mode_class[i] = 1 << (int) DF_MODE;
310 else if (GET_MODE_SIZE (m) == 16)
311 m32r_mode_class[i] = 1 << (int) TF_MODE;
312 else if (GET_MODE_SIZE (m) == 32)
313 m32r_mode_class[i] = 1 << (int) OF_MODE;
314 else
315 m32r_mode_class[i] = 0;
316 break;
317 case MODE_CC:
318 m32r_mode_class[i] = 1 << (int) C_MODE;
319 break;
320 default:
321 m32r_mode_class[i] = 0;
322 break;
323 }
324 }
325
326 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
327 {
328 if (GPR_P (i))
329 m32r_regno_reg_class[i] = GENERAL_REGS;
330 else if (i == ARG_POINTER_REGNUM)
331 m32r_regno_reg_class[i] = GENERAL_REGS;
332 else
333 m32r_regno_reg_class[i] = NO_REGS;
334 }
335 }
336 \f
337 /* M32R specific attribute support.
338
339 interrupt - for interrupt functions
340
341 model - select code model used to access object
342
343 small: addresses use 24 bits, use bl to make calls
344 medium: addresses use 32 bits, use bl to make calls
345 large: addresses use 32 bits, use seth/add3/jl to make calls
346
347 Grep for MODEL in m32r.h for more info. */
348
349 static tree small_ident1;
350 static tree small_ident2;
351 static tree medium_ident1;
352 static tree medium_ident2;
353 static tree large_ident1;
354 static tree large_ident2;
355
356 static void
357 init_idents (void)
358 {
359 if (small_ident1 == 0)
360 {
361 small_ident1 = get_identifier ("small");
362 small_ident2 = get_identifier ("__small__");
363 medium_ident1 = get_identifier ("medium");
364 medium_ident2 = get_identifier ("__medium__");
365 large_ident1 = get_identifier ("large");
366 large_ident2 = get_identifier ("__large__");
367 }
368 }
369
370 /* Handle an "model" attribute; arguments as in
371 struct attribute_spec.handler. */
372 static tree
373 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
374 tree args, int flags ATTRIBUTE_UNUSED,
375 bool *no_add_attrs)
376 {
377 tree arg;
378
379 init_idents ();
380 arg = TREE_VALUE (args);
381
382 if (arg != small_ident1
383 && arg != small_ident2
384 && arg != medium_ident1
385 && arg != medium_ident2
386 && arg != large_ident1
387 && arg != large_ident2)
388 {
389 warning (OPT_Wattributes, "invalid argument of %qs attribute",
390 IDENTIFIER_POINTER (name));
391 *no_add_attrs = true;
392 }
393
394 return NULL_TREE;
395 }
396
397 static bool
398 m32r_attribute_identifier (const_tree name)
399 {
400 return strcmp (IDENTIFIER_POINTER (name), "model") == 0
401 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0;
402 }
403 \f
404 /* Encode section information of DECL, which is either a VAR_DECL,
405 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
406
407 For the M32R we want to record:
408
409 - whether the object lives in .sdata/.sbss.
410 - what code model should be used to access the object
411 */
412
413 static void
414 m32r_encode_section_info (tree decl, rtx rtl, int first)
415 {
416 int extra_flags = 0;
417 tree model_attr;
418 enum m32r_model model;
419
420 default_encode_section_info (decl, rtl, first);
421
422 if (!DECL_P (decl))
423 return;
424
425 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
426 if (model_attr)
427 {
428 tree id;
429
430 init_idents ();
431
432 id = TREE_VALUE (TREE_VALUE (model_attr));
433
434 if (id == small_ident1 || id == small_ident2)
435 model = M32R_MODEL_SMALL;
436 else if (id == medium_ident1 || id == medium_ident2)
437 model = M32R_MODEL_MEDIUM;
438 else if (id == large_ident1 || id == large_ident2)
439 model = M32R_MODEL_LARGE;
440 else
441 gcc_unreachable (); /* shouldn't happen */
442 }
443 else
444 {
445 if (TARGET_MODEL_SMALL)
446 model = M32R_MODEL_SMALL;
447 else if (TARGET_MODEL_MEDIUM)
448 model = M32R_MODEL_MEDIUM;
449 else if (TARGET_MODEL_LARGE)
450 model = M32R_MODEL_LARGE;
451 else
452 gcc_unreachable (); /* shouldn't happen */
453 }
454 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
455
456 if (extra_flags)
457 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
458 }
459
460 /* Only mark the object as being small data area addressable if
461 it hasn't been explicitly marked with a code model.
462
463 The user can explicitly put an object in the small data area with the
464 section attribute. If the object is in sdata/sbss and marked with a
465 code model do both [put the object in .sdata and mark it as being
466 addressed with a specific code model - don't mark it as being addressed
467 with an SDA reloc though]. This is ok and might be useful at times. If
468 the object doesn't fit the linker will give an error. */
469
470 static bool
471 m32r_in_small_data_p (const_tree decl)
472 {
473 const char *section;
474
475 if (TREE_CODE (decl) != VAR_DECL)
476 return false;
477
478 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
479 return false;
480
481 section = DECL_SECTION_NAME (decl);
482 if (section)
483 {
484 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
485 return true;
486 }
487 else
488 {
489 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
490 {
491 int size = int_size_in_bytes (TREE_TYPE (decl));
492
493 if (size > 0 && size <= g_switch_value)
494 return true;
495 }
496 }
497
498 return false;
499 }
500
501 /* Do anything needed before RTL is emitted for each function. */
502
503 void
504 m32r_init_expanders (void)
505 {
506 /* ??? At one point there was code here. The function is left in
507 to make it easy to experiment. */
508 }
509 \f
510 int
511 call_operand (rtx op, machine_mode mode)
512 {
513 if (!MEM_P (op))
514 return 0;
515 op = XEXP (op, 0);
516 return call_address_operand (op, mode);
517 }
518
519 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
520
521 int
522 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
523 {
524 if (! TARGET_SDATA_USE)
525 return 0;
526
527 if (GET_CODE (op) == SYMBOL_REF)
528 return SYMBOL_REF_SMALL_P (op);
529
530 if (GET_CODE (op) == CONST
531 && GET_CODE (XEXP (op, 0)) == PLUS
532 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
533 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
534 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
535
536 return 0;
537 }
538
539 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
540
541 int
542 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
543 {
544 rtx sym;
545
546 if (flag_pic)
547 return 0;
548
549 if (GET_CODE (op) == LABEL_REF)
550 return TARGET_ADDR24;
551
552 if (GET_CODE (op) == SYMBOL_REF)
553 sym = op;
554 else if (GET_CODE (op) == CONST
555 && GET_CODE (XEXP (op, 0)) == PLUS
556 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
557 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
558 sym = XEXP (XEXP (op, 0), 0);
559 else
560 return 0;
561
562 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
563 return 1;
564
565 if (TARGET_ADDR24
566 && (CONSTANT_POOL_ADDRESS_P (sym)
567 || LIT_NAME_P (XSTR (sym, 0))))
568 return 1;
569
570 return 0;
571 }
572
573 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
574
575 int
576 addr32_operand (rtx op, machine_mode mode)
577 {
578 rtx sym;
579
580 if (GET_CODE (op) == LABEL_REF)
581 return TARGET_ADDR32;
582
583 if (GET_CODE (op) == SYMBOL_REF)
584 sym = op;
585 else if (GET_CODE (op) == CONST
586 && GET_CODE (XEXP (op, 0)) == PLUS
587 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
588 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
589 && ! flag_pic)
590 sym = XEXP (XEXP (op, 0), 0);
591 else
592 return 0;
593
594 return (! addr24_operand (sym, mode)
595 && ! small_data_operand (sym, mode));
596 }
597
598 /* Return 1 if OP is a function that can be called with the `bl' insn. */
599
600 int
601 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
602 {
603 if (flag_pic)
604 return 1;
605
606 if (GET_CODE (op) == SYMBOL_REF)
607 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
608
609 return TARGET_CALL26;
610 }
611
612 /* Return 1 if OP is a DImode const we want to handle inline.
613 This must match the code in the movdi pattern.
614 It is used by the 'G' constraint. */
615
616 int
617 easy_di_const (rtx op)
618 {
619 rtx high_rtx, low_rtx;
620 HOST_WIDE_INT high, low;
621
622 split_double (op, &high_rtx, &low_rtx);
623 high = INTVAL (high_rtx);
624 low = INTVAL (low_rtx);
625 /* Pick constants loadable with 2 16-bit `ldi' insns. */
626 if (high >= -128 && high <= 127
627 && low >= -128 && low <= 127)
628 return 1;
629 return 0;
630 }
631
632 /* Return 1 if OP is a DFmode const we want to handle inline.
633 This must match the code in the movdf pattern.
634 It is used by the 'H' constraint. */
635
636 int
637 easy_df_const (rtx op)
638 {
639 long l[2];
640
641 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l);
642 if (l[0] == 0 && l[1] == 0)
643 return 1;
644 if ((l[0] & 0xffff) == 0 && l[1] == 0)
645 return 1;
646 return 0;
647 }
648
649 /* Return 1 if OP is (mem (reg ...)).
650 This is used in insn length calcs. */
651
652 int
653 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
654 {
655 return MEM_P (op) && REG_P (XEXP (op, 0));
656 }
657
658 /* Return nonzero if TYPE must be passed by indirect reference. */
659
660 static bool
661 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
662 machine_mode mode, const_tree type,
663 bool named ATTRIBUTE_UNUSED)
664 {
665 int size;
666
667 if (type)
668 size = int_size_in_bytes (type);
669 else
670 size = GET_MODE_SIZE (mode);
671
672 return (size < 0 || size > 8);
673 }
674 \f
675 /* Comparisons. */
676
677 /* X and Y are two things to compare using CODE. Emit the compare insn and
678 return the rtx for compare [arg0 of the if_then_else].
679 If need_compare is true then the comparison insn must be generated, rather
680 than being subsumed into the following branch instruction. */
681
682 rtx
683 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
684 {
685 enum rtx_code compare_code;
686 enum rtx_code branch_code;
687 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
688 int must_swap = 0;
689
690 switch (code)
691 {
692 case EQ: compare_code = EQ; branch_code = NE; break;
693 case NE: compare_code = EQ; branch_code = EQ; break;
694 case LT: compare_code = LT; branch_code = NE; break;
695 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
696 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
697 case GE: compare_code = LT; branch_code = EQ; break;
698 case LTU: compare_code = LTU; branch_code = NE; break;
699 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
700 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
701 case GEU: compare_code = LTU; branch_code = EQ; break;
702
703 default:
704 gcc_unreachable ();
705 }
706
707 if (need_compare)
708 {
709 switch (compare_code)
710 {
711 case EQ:
712 if (satisfies_constraint_P (y) /* Reg equal to small const. */
713 && y != const0_rtx)
714 {
715 rtx tmp = gen_reg_rtx (SImode);
716
717 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
718 x = tmp;
719 y = const0_rtx;
720 }
721 else if (CONSTANT_P (y)) /* Reg equal to const. */
722 {
723 rtx tmp = force_reg (GET_MODE (x), y);
724 y = tmp;
725 }
726
727 if (register_operand (y, SImode) /* Reg equal to reg. */
728 || y == const0_rtx) /* Reg equal to zero. */
729 {
730 emit_insn (gen_cmp_eqsi_insn (x, y));
731
732 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
733 }
734 break;
735
736 case LT:
737 if (register_operand (y, SImode)
738 || satisfies_constraint_P (y))
739 {
740 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
741
742 switch (code)
743 {
744 case LT:
745 emit_insn (gen_cmp_ltsi_insn (x, y));
746 code = EQ;
747 break;
748 case LE:
749 if (y == const0_rtx)
750 tmp = const1_rtx;
751 else
752 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
753 emit_insn (gen_cmp_ltsi_insn (x, tmp));
754 code = EQ;
755 break;
756 case GT:
757 if (CONST_INT_P (y))
758 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
759 else
760 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
761 emit_insn (gen_cmp_ltsi_insn (x, tmp));
762 code = NE;
763 break;
764 case GE:
765 emit_insn (gen_cmp_ltsi_insn (x, y));
766 code = NE;
767 break;
768 default:
769 gcc_unreachable ();
770 }
771
772 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
773 }
774 break;
775
776 case LTU:
777 if (register_operand (y, SImode)
778 || satisfies_constraint_P (y))
779 {
780 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
781
782 switch (code)
783 {
784 case LTU:
785 emit_insn (gen_cmp_ltusi_insn (x, y));
786 code = EQ;
787 break;
788 case LEU:
789 if (y == const0_rtx)
790 tmp = const1_rtx;
791 else
792 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
793 emit_insn (gen_cmp_ltusi_insn (x, tmp));
794 code = EQ;
795 break;
796 case GTU:
797 if (CONST_INT_P (y))
798 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
799 else
800 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
801 emit_insn (gen_cmp_ltusi_insn (x, tmp));
802 code = NE;
803 break;
804 case GEU:
805 emit_insn (gen_cmp_ltusi_insn (x, y));
806 code = NE;
807 break;
808 default:
809 gcc_unreachable ();
810 }
811
812 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
813 }
814 break;
815
816 default:
817 gcc_unreachable ();
818 }
819 }
820 else
821 {
822 /* Reg/reg equal comparison. */
823 if (compare_code == EQ
824 && register_operand (y, SImode))
825 return gen_rtx_fmt_ee (code, CCmode, x, y);
826
827 /* Reg/zero signed comparison. */
828 if ((compare_code == EQ || compare_code == LT)
829 && y == const0_rtx)
830 return gen_rtx_fmt_ee (code, CCmode, x, y);
831
832 /* Reg/smallconst equal comparison. */
833 if (compare_code == EQ
834 && satisfies_constraint_P (y))
835 {
836 rtx tmp = gen_reg_rtx (SImode);
837
838 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
839 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
840 }
841
842 /* Reg/const equal comparison. */
843 if (compare_code == EQ
844 && CONSTANT_P (y))
845 {
846 rtx tmp = force_reg (GET_MODE (x), y);
847
848 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
849 }
850 }
851
852 if (CONSTANT_P (y))
853 {
854 if (must_swap)
855 y = force_reg (GET_MODE (x), y);
856 else
857 {
858 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
859
860 if (! ok_const)
861 y = force_reg (GET_MODE (x), y);
862 }
863 }
864
865 switch (compare_code)
866 {
867 case EQ :
868 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
869 break;
870 case LT :
871 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
872 break;
873 case LTU :
874 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
875 break;
876
877 default:
878 gcc_unreachable ();
879 }
880
881 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
882 }
883
884 bool
885 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
886 {
887 machine_mode mode = GET_MODE (op0);
888
889 gcc_assert (mode == SImode);
890 switch (code)
891 {
892 case EQ:
893 if (!register_operand (op1, mode))
894 op1 = force_reg (mode, op1);
895
896 if (TARGET_M32RX || TARGET_M32R2)
897 {
898 if (!reg_or_zero_operand (op2, mode))
899 op2 = force_reg (mode, op2);
900
901 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
902 return true;
903 }
904 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
905 {
906 emit_insn (gen_seq_zero_insn (op0, op1));
907 return true;
908 }
909
910 if (!reg_or_eq_int16_operand (op2, mode))
911 op2 = force_reg (mode, op2);
912
913 emit_insn (gen_seq_insn (op0, op1, op2));
914 return true;
915
916 case NE:
917 if (!CONST_INT_P (op2)
918 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
919 {
920 rtx reg;
921
922 if (reload_completed || reload_in_progress)
923 return false;
924
925 reg = gen_reg_rtx (SImode);
926 emit_insn (gen_xorsi3 (reg, op1, op2));
927 op1 = reg;
928
929 if (!register_operand (op1, mode))
930 op1 = force_reg (mode, op1);
931
932 emit_insn (gen_sne_zero_insn (op0, op1));
933 return true;
934 }
935 return false;
936
937 case LT:
938 case GT:
939 if (code == GT)
940 {
941 rtx tmp = op2;
942 op2 = op1;
943 op1 = tmp;
944 code = LT;
945 }
946
947 if (!register_operand (op1, mode))
948 op1 = force_reg (mode, op1);
949
950 if (!reg_or_int16_operand (op2, mode))
951 op2 = force_reg (mode, op2);
952
953 emit_insn (gen_slt_insn (op0, op1, op2));
954 return true;
955
956 case LTU:
957 case GTU:
958 if (code == GTU)
959 {
960 rtx tmp = op2;
961 op2 = op1;
962 op1 = tmp;
963 code = LTU;
964 }
965
966 if (!register_operand (op1, mode))
967 op1 = force_reg (mode, op1);
968
969 if (!reg_or_int16_operand (op2, mode))
970 op2 = force_reg (mode, op2);
971
972 emit_insn (gen_sltu_insn (op0, op1, op2));
973 return true;
974
975 case GE:
976 case GEU:
977 if (!register_operand (op1, mode))
978 op1 = force_reg (mode, op1);
979
980 if (!reg_or_int16_operand (op2, mode))
981 op2 = force_reg (mode, op2);
982
983 if (code == GE)
984 emit_insn (gen_sge_insn (op0, op1, op2));
985 else
986 emit_insn (gen_sgeu_insn (op0, op1, op2));
987 return true;
988
989 case LE:
990 case LEU:
991 if (!register_operand (op1, mode))
992 op1 = force_reg (mode, op1);
993
994 if (CONST_INT_P (op2))
995 {
996 HOST_WIDE_INT value = INTVAL (op2);
997 if (value >= 2147483647)
998 {
999 emit_move_insn (op0, const1_rtx);
1000 return true;
1001 }
1002
1003 op2 = GEN_INT (value + 1);
1004 if (value < -32768 || value >= 32767)
1005 op2 = force_reg (mode, op2);
1006
1007 if (code == LEU)
1008 emit_insn (gen_sltu_insn (op0, op1, op2));
1009 else
1010 emit_insn (gen_slt_insn (op0, op1, op2));
1011 return true;
1012 }
1013
1014 if (!register_operand (op2, mode))
1015 op2 = force_reg (mode, op2);
1016
1017 if (code == LEU)
1018 emit_insn (gen_sleu_insn (op0, op1, op2));
1019 else
1020 emit_insn (gen_sle_insn (op0, op1, op2));
1021 return true;
1022
1023 default:
1024 gcc_unreachable ();
1025 }
1026 }
1027
1028 \f
1029 /* Split a 2 word move (DI or DF) into component parts. */
1030
1031 rtx
1032 gen_split_move_double (rtx operands[])
1033 {
1034 machine_mode mode = GET_MODE (operands[0]);
1035 rtx dest = operands[0];
1036 rtx src = operands[1];
1037 rtx val;
1038
1039 /* We might have (SUBREG (MEM)) here, so just get rid of the
1040 subregs to make this code simpler. It is safe to call
1041 alter_subreg any time after reload. */
1042 if (GET_CODE (dest) == SUBREG)
1043 alter_subreg (&dest, true);
1044 if (GET_CODE (src) == SUBREG)
1045 alter_subreg (&src, true);
1046
1047 start_sequence ();
1048 if (REG_P (dest))
1049 {
1050 int dregno = REGNO (dest);
1051
1052 /* Reg = reg. */
1053 if (REG_P (src))
1054 {
1055 int sregno = REGNO (src);
1056
1057 int reverse = (dregno == sregno + 1);
1058
1059 /* We normally copy the low-numbered register first. However, if
1060 the first register operand 0 is the same as the second register of
1061 operand 1, we must copy in the opposite order. */
1062 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1063 operand_subword (src, reverse, TRUE, mode)));
1064
1065 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1066 operand_subword (src, !reverse, TRUE, mode)));
1067 }
1068
1069 /* Reg = constant. */
1070 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1071 {
1072 rtx words[2];
1073 split_double (src, &words[0], &words[1]);
1074 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1075 words[0]));
1076
1077 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1078 words[1]));
1079 }
1080
1081 /* Reg = mem. */
1082 else if (MEM_P (src))
1083 {
1084 /* If the high-address word is used in the address, we must load it
1085 last. Otherwise, load it first. */
1086 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1087
1088 /* We used to optimize loads from single registers as
1089
1090 ld r1,r3+; ld r2,r3
1091
1092 if r3 were not used subsequently. However, the REG_NOTES aren't
1093 propagated correctly by the reload phase, and it can cause bad
1094 code to be generated. We could still try:
1095
1096 ld r1,r3+; ld r2,r3; addi r3,-4
1097
1098 which saves 2 bytes and doesn't force longword alignment. */
1099 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1100 adjust_address (src, SImode,
1101 reverse * UNITS_PER_WORD)));
1102
1103 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1104 adjust_address (src, SImode,
1105 !reverse * UNITS_PER_WORD)));
1106 }
1107 else
1108 gcc_unreachable ();
1109 }
1110
1111 /* Mem = reg. */
1112 /* We used to optimize loads from single registers as
1113
1114 st r1,r3; st r2,+r3
1115
1116 if r3 were not used subsequently. However, the REG_NOTES aren't
1117 propagated correctly by the reload phase, and it can cause bad
1118 code to be generated. We could still try:
1119
1120 st r1,r3; st r2,+r3; addi r3,-4
1121
1122 which saves 2 bytes and doesn't force longword alignment. */
1123 else if (MEM_P (dest) && REG_P (src))
1124 {
1125 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1126 operand_subword (src, 0, TRUE, mode)));
1127
1128 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1129 operand_subword (src, 1, TRUE, mode)));
1130 }
1131
1132 else
1133 gcc_unreachable ();
1134
1135 val = get_insns ();
1136 end_sequence ();
1137 return val;
1138 }
1139
1140 \f
1141 static int
1142 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1143 tree type, bool named ATTRIBUTE_UNUSED)
1144 {
1145 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1146
1147 int words;
1148 unsigned int size =
1149 (((mode == BLKmode && type)
1150 ? (unsigned int) int_size_in_bytes (type)
1151 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1152 / UNITS_PER_WORD;
1153
1154 if (*cum >= M32R_MAX_PARM_REGS)
1155 words = 0;
1156 else if (*cum + size > M32R_MAX_PARM_REGS)
1157 words = (*cum + size) - M32R_MAX_PARM_REGS;
1158 else
1159 words = 0;
1160
1161 return words * UNITS_PER_WORD;
1162 }
1163
1164 /* The ROUND_ADVANCE* macros are local to this file. */
1165 /* Round SIZE up to a word boundary. */
1166 #define ROUND_ADVANCE(SIZE) \
1167 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1168
1169 /* Round arg MODE/TYPE up to the next word boundary. */
1170 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1171 ((MODE) == BLKmode \
1172 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1173 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1174
1175 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1176 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1177
1178 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1179 a reg. This includes arguments that have to be passed by reference as the
1180 pointer to them is passed in a reg if one is available (and that is what
1181 we're given).
1182 This macro is only used in this file. */
1183 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1184 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1185
1186 /* Determine where to put an argument to a function.
1187 Value is zero to push the argument on the stack,
1188 or a hard register in which to store the argument.
1189
1190 MODE is the argument's machine mode.
1191 TYPE is the data type of the argument (as a tree).
1192 This is null for libcalls where that information may
1193 not be available.
1194 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1195 the preceding args and about the function being called.
1196 NAMED is nonzero if this argument is a named parameter
1197 (otherwise it is an extra parameter matching an ellipsis). */
1198 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1199 and the rest are pushed. */
1200
1201 static rtx
1202 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1203 const_tree type ATTRIBUTE_UNUSED,
1204 bool named ATTRIBUTE_UNUSED)
1205 {
1206 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1207
1208 return (PASS_IN_REG_P (*cum, mode, type)
1209 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1210 : NULL_RTX);
1211 }
1212
1213 /* Update the data in CUM to advance over an argument
1214 of mode MODE and data type TYPE.
1215 (TYPE is null for libcalls where that information may not be available.) */
1216
1217 static void
1218 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1219 const_tree type, bool named ATTRIBUTE_UNUSED)
1220 {
1221 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1222
1223 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1224 + ROUND_ADVANCE_ARG (mode, type));
1225 }
1226
1227 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1228
1229 static bool
1230 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1231 {
1232 cumulative_args_t dummy = pack_cumulative_args (NULL);
1233
1234 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1235 }
1236
1237 /* Worker function for TARGET_FUNCTION_VALUE. */
1238
1239 static rtx
1240 m32r_function_value (const_tree valtype,
1241 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1242 bool outgoing ATTRIBUTE_UNUSED)
1243 {
1244 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1245 }
1246
1247 /* Worker function for TARGET_LIBCALL_VALUE. */
1248
1249 static rtx
1250 m32r_libcall_value (machine_mode mode,
1251 const_rtx fun ATTRIBUTE_UNUSED)
1252 {
1253 return gen_rtx_REG (mode, 0);
1254 }
1255
1256 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1257
1258 ??? What about r1 in DI/DF values. */
1259
1260 static bool
1261 m32r_function_value_regno_p (const unsigned int regno)
1262 {
1263 return (regno == 0);
1264 }
1265
1266 /* Do any needed setup for a variadic function. For the M32R, we must
1267 create a register parameter block, and then copy any anonymous arguments
1268 in registers to memory.
1269
1270 CUM has not been updated for the last named argument which has type TYPE
1271 and mode MODE, and we rely on this fact. */
1272
1273 static void
1274 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1275 tree type, int *pretend_size, int no_rtl)
1276 {
1277 int first_anon_arg;
1278
1279 if (no_rtl)
1280 return;
1281
1282 /* All BLKmode values are passed by reference. */
1283 gcc_assert (mode != BLKmode);
1284
1285 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1286 + ROUND_ADVANCE_ARG (mode, type));
1287
1288 if (first_anon_arg < M32R_MAX_PARM_REGS)
1289 {
1290 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1291 int first_reg_offset = first_anon_arg;
1292 /* Size in words to "pretend" allocate. */
1293 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1294 rtx regblock;
1295
1296 regblock = gen_frame_mem (BLKmode,
1297 plus_constant (Pmode, arg_pointer_rtx,
1298 FIRST_PARM_OFFSET (0)));
1299 set_mem_alias_set (regblock, get_varargs_alias_set ());
1300 move_block_from_reg (first_reg_offset, regblock, size);
1301
1302 *pretend_size = (size * UNITS_PER_WORD);
1303 }
1304 }
1305
1306 \f
1307 /* Return true if INSN is real instruction bearing insn. */
1308
1309 static int
1310 m32r_is_insn (rtx insn)
1311 {
1312 return (NONDEBUG_INSN_P (insn)
1313 && GET_CODE (PATTERN (insn)) != USE
1314 && GET_CODE (PATTERN (insn)) != CLOBBER);
1315 }
1316
1317 /* Increase the priority of long instructions so that the
1318 short instructions are scheduled ahead of the long ones. */
1319
1320 static int
1321 m32r_adjust_priority (rtx_insn *insn, int priority)
1322 {
1323 if (m32r_is_insn (insn)
1324 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1325 priority <<= 3;
1326
1327 return priority;
1328 }
1329
1330 \f
1331 /* Indicate how many instructions can be issued at the same time.
1332 This is sort of a lie. The m32r can issue only 1 long insn at
1333 once, but it can issue 2 short insns. The default therefore is
1334 set at 2, but this can be overridden by the command line option
1335 -missue-rate=1. */
1336
1337 static int
1338 m32r_issue_rate (void)
1339 {
1340 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1341 }
1342 \f
1343 /* Cost functions. */
1344 /* Memory is 3 times as expensive as registers.
1345 ??? Is that the right way to look at it? */
1346
1347 static int
1348 m32r_memory_move_cost (machine_mode mode,
1349 reg_class_t rclass ATTRIBUTE_UNUSED,
1350 bool in ATTRIBUTE_UNUSED)
1351 {
1352 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1353 return 6;
1354 else
1355 return 12;
1356 }
1357
1358 static bool
1359 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1360 int outer_code ATTRIBUTE_UNUSED,
1361 int opno ATTRIBUTE_UNUSED, int *total,
1362 bool speed ATTRIBUTE_UNUSED)
1363 {
1364 int code = GET_CODE (x);
1365
1366 switch (code)
1367 {
1368 /* Small integers are as cheap as registers. 4 byte values can be
1369 fetched as immediate constants - let's give that the cost of an
1370 extra insn. */
1371 case CONST_INT:
1372 if (INT16_P (INTVAL (x)))
1373 {
1374 *total = 0;
1375 return true;
1376 }
1377 /* FALLTHRU */
1378
1379 case CONST:
1380 case LABEL_REF:
1381 case SYMBOL_REF:
1382 *total = COSTS_N_INSNS (1);
1383 return true;
1384
1385 case CONST_DOUBLE:
1386 {
1387 rtx high, low;
1388
1389 split_double (x, &high, &low);
1390 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1391 + !INT16_P (INTVAL (low)));
1392 return true;
1393 }
1394
1395 case MULT:
1396 *total = COSTS_N_INSNS (3);
1397 return true;
1398
1399 case DIV:
1400 case UDIV:
1401 case MOD:
1402 case UMOD:
1403 *total = COSTS_N_INSNS (10);
1404 return true;
1405
1406 default:
1407 return false;
1408 }
1409 }
1410 \f
1411 /* Type of function DECL.
1412
1413 The result is cached. To reset the cache at the end of a function,
1414 call with DECL = NULL_TREE. */
1415
1416 enum m32r_function_type
1417 m32r_compute_function_type (tree decl)
1418 {
1419 /* Cached value. */
1420 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1421 /* Last function we were called for. */
1422 static tree last_fn = NULL_TREE;
1423
1424 /* Resetting the cached value? */
1425 if (decl == NULL_TREE)
1426 {
1427 fn_type = M32R_FUNCTION_UNKNOWN;
1428 last_fn = NULL_TREE;
1429 return fn_type;
1430 }
1431
1432 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1433 return fn_type;
1434
1435 /* Compute function type. */
1436 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1437 ? M32R_FUNCTION_INTERRUPT
1438 : M32R_FUNCTION_NORMAL);
1439
1440 last_fn = decl;
1441 return fn_type;
1442 }
1443 \f/* Function prologue/epilogue handlers. */
1444
1445 /* M32R stack frames look like:
1446
1447 Before call After call
1448 +-----------------------+ +-----------------------+
1449 | | | |
1450 high | local variables, | | local variables, |
1451 mem | reg save area, etc. | | reg save area, etc. |
1452 | | | |
1453 +-----------------------+ +-----------------------+
1454 | | | |
1455 | arguments on stack. | | arguments on stack. |
1456 | | | |
1457 SP+0->+-----------------------+ +-----------------------+
1458 | reg parm save area, |
1459 | only created for |
1460 | variable argument |
1461 | functions |
1462 +-----------------------+
1463 | previous frame ptr |
1464 +-----------------------+
1465 | |
1466 | register save area |
1467 | |
1468 +-----------------------+
1469 | return address |
1470 +-----------------------+
1471 | |
1472 | local variables |
1473 | |
1474 +-----------------------+
1475 | |
1476 | alloca allocations |
1477 | |
1478 +-----------------------+
1479 | |
1480 low | arguments on stack |
1481 memory | |
1482 SP+0->+-----------------------+
1483
1484 Notes:
1485 1) The "reg parm save area" does not exist for non variable argument fns.
1486 2) The "reg parm save area" can be eliminated completely if we saved regs
1487 containing anonymous args separately but that complicates things too
1488 much (so it's not done).
1489 3) The return address is saved after the register save area so as to have as
1490 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1491
1492 /* Structure to be filled in by m32r_compute_frame_size with register
1493 save masks, and offsets for the current function. */
1494 struct m32r_frame_info
1495 {
1496 unsigned int total_size; /* # bytes that the entire frame takes up. */
1497 unsigned int extra_size; /* # bytes of extra stuff. */
1498 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1499 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1500 unsigned int reg_size; /* # bytes needed to store regs. */
1501 unsigned int var_size; /* # bytes that variables take up. */
1502 unsigned int gmask; /* Mask of saved gp registers. */
1503 unsigned int save_fp; /* Nonzero if fp must be saved. */
1504 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1505 int initialized; /* Nonzero if frame size already calculated. */
1506 };
1507
1508 /* Current frame information calculated by m32r_compute_frame_size. */
1509 static struct m32r_frame_info current_frame_info;
1510
1511 /* Zero structure to initialize current_frame_info. */
1512 static struct m32r_frame_info zero_frame_info;
1513
1514 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1515 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1516
1517 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1518 The return address and frame pointer are treated separately.
1519 Don't consider them here. */
1520 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1521 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1522 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1523
1524 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1525 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1526
1527 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1528 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1529
1530 /* Return the bytes needed to compute the frame pointer from the current
1531 stack pointer.
1532
1533 SIZE is the size needed for local variables. */
1534
1535 unsigned int
1536 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1537 {
1538 unsigned int regno;
1539 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1540 unsigned int reg_size;
1541 unsigned int gmask;
1542 enum m32r_function_type fn_type;
1543 int interrupt_p;
1544 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1545 | crtl->profile);
1546
1547 var_size = M32R_STACK_ALIGN (size);
1548 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1549 pretend_size = crtl->args.pretend_args_size;
1550 extra_size = FIRST_PARM_OFFSET (0);
1551 total_size = extra_size + pretend_size + args_size + var_size;
1552 reg_size = 0;
1553 gmask = 0;
1554
1555 /* See if this is an interrupt handler. Call used registers must be saved
1556 for them too. */
1557 fn_type = m32r_compute_function_type (current_function_decl);
1558 interrupt_p = M32R_INTERRUPT_P (fn_type);
1559
1560 /* Calculate space needed for registers. */
1561 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1562 {
1563 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1564 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1565 {
1566 reg_size += UNITS_PER_WORD;
1567 gmask |= 1 << regno;
1568 }
1569 }
1570
1571 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1572 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1573
1574 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1575 * UNITS_PER_WORD);
1576 total_size += reg_size;
1577
1578 /* ??? Not sure this is necessary, and I don't think the epilogue
1579 handler will do the right thing if this changes total_size. */
1580 total_size = M32R_STACK_ALIGN (total_size);
1581
1582 /* frame_size = total_size - (pretend_size + reg_size); */
1583
1584 /* Save computed information. */
1585 current_frame_info.total_size = total_size;
1586 current_frame_info.extra_size = extra_size;
1587 current_frame_info.pretend_size = pretend_size;
1588 current_frame_info.var_size = var_size;
1589 current_frame_info.args_size = args_size;
1590 current_frame_info.reg_size = reg_size;
1591 current_frame_info.gmask = gmask;
1592 current_frame_info.initialized = reload_completed;
1593
1594 /* Ok, we're done. */
1595 return total_size;
1596 }
1597
1598 /* Worker function for TARGET_CAN_ELIMINATE. */
1599
1600 bool
1601 m32r_can_eliminate (const int from, const int to)
1602 {
1603 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1604 ? ! frame_pointer_needed
1605 : true);
1606 }
1607
1608 \f
1609 /* The table we use to reference PIC data. */
1610 static rtx global_offset_table;
1611
1612 static void
1613 m32r_reload_lr (rtx sp, int size)
1614 {
1615 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1616
1617 if (size == 0)
1618 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1619 else if (size < 32768)
1620 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1621 gen_rtx_PLUS (Pmode, sp,
1622 GEN_INT (size)))));
1623 else
1624 {
1625 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1626
1627 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1628 emit_insn (gen_addsi3 (tmp, tmp, sp));
1629 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1630 }
1631
1632 emit_use (lr);
1633 }
1634
1635 void
1636 m32r_load_pic_register (void)
1637 {
1638 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1639 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1640 GEN_INT (TARGET_MODEL_SMALL)));
1641
1642 /* Need to emit this whether or not we obey regdecls,
1643 since setjmp/longjmp can cause life info to screw up. */
1644 emit_use (pic_offset_table_rtx);
1645 }
1646
1647 /* Expand the m32r prologue as a series of insns. */
1648
1649 void
1650 m32r_expand_prologue (void)
1651 {
1652 int regno;
1653 int frame_size;
1654 unsigned int gmask;
1655 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1656 | crtl->profile);
1657
1658 if (! current_frame_info.initialized)
1659 m32r_compute_frame_size (get_frame_size ());
1660
1661 if (flag_stack_usage_info)
1662 current_function_static_stack_size = current_frame_info.total_size;
1663
1664 gmask = current_frame_info.gmask;
1665
1666 /* These cases shouldn't happen. Catch them now. */
1667 gcc_assert (current_frame_info.total_size || !gmask);
1668
1669 /* Allocate space for register arguments if this is a variadic function. */
1670 if (current_frame_info.pretend_size != 0)
1671 {
1672 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1673 the wrong result on a 64-bit host. */
1674 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1675 emit_insn (gen_addsi3 (stack_pointer_rtx,
1676 stack_pointer_rtx,
1677 GEN_INT (-pretend_size)));
1678 }
1679
1680 /* Save any registers we need to and set up fp. */
1681 if (current_frame_info.save_fp)
1682 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1683
1684 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1685
1686 /* Save any needed call-saved regs (and call-used if this is an
1687 interrupt handler). */
1688 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1689 {
1690 if ((gmask & (1 << regno)) != 0)
1691 emit_insn (gen_movsi_push (stack_pointer_rtx,
1692 gen_rtx_REG (Pmode, regno)));
1693 }
1694
1695 if (current_frame_info.save_lr)
1696 emit_insn (gen_movsi_push (stack_pointer_rtx,
1697 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1698
1699 /* Allocate the stack frame. */
1700 frame_size = (current_frame_info.total_size
1701 - (current_frame_info.pretend_size
1702 + current_frame_info.reg_size));
1703
1704 if (frame_size == 0)
1705 ; /* Nothing to do. */
1706 else if (frame_size <= 32768)
1707 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1708 GEN_INT (-frame_size)));
1709 else
1710 {
1711 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1712
1713 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1714 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1715 }
1716
1717 if (frame_pointer_needed)
1718 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1719
1720 if (crtl->profile)
1721 /* Push lr for mcount (form_pc, x). */
1722 emit_insn (gen_movsi_push (stack_pointer_rtx,
1723 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1724
1725 if (pic_reg_used)
1726 {
1727 m32r_load_pic_register ();
1728 m32r_reload_lr (stack_pointer_rtx,
1729 (crtl->profile ? 0 : frame_size));
1730 }
1731
1732 if (crtl->profile && !pic_reg_used)
1733 emit_insn (gen_blockage ());
1734 }
1735
1736 \f
1737 /* Set up the stack and frame pointer (if desired) for the function.
1738 Note, if this is changed, you need to mirror the changes in
1739 m32r_compute_frame_size which calculates the prolog size. */
1740
1741 static void
1742 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1743 {
1744 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1745
1746 /* If this is an interrupt handler, mark it as such. */
1747 if (M32R_INTERRUPT_P (fn_type))
1748 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1749
1750 if (! current_frame_info.initialized)
1751 m32r_compute_frame_size (size);
1752
1753 /* This is only for the human reader. */
1754 fprintf (file,
1755 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1756 ASM_COMMENT_START,
1757 current_frame_info.var_size,
1758 current_frame_info.reg_size / 4,
1759 current_frame_info.args_size,
1760 current_frame_info.extra_size);
1761 }
1762 \f
1763 /* Output RTL to pop register REGNO from the stack. */
1764
1765 static void
1766 pop (int regno)
1767 {
1768 rtx x;
1769
1770 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1771 stack_pointer_rtx));
1772 add_reg_note (x, REG_INC, stack_pointer_rtx);
1773 }
1774
1775 /* Expand the m32r epilogue as a series of insns. */
1776
1777 void
1778 m32r_expand_epilogue (void)
1779 {
1780 int regno;
1781 int noepilogue = FALSE;
1782 int total_size;
1783
1784 gcc_assert (current_frame_info.initialized);
1785 total_size = current_frame_info.total_size;
1786
1787 if (total_size == 0)
1788 {
1789 rtx insn = get_last_insn ();
1790
1791 /* If the last insn was a BARRIER, we don't have to write any code
1792 because a jump (aka return) was put there. */
1793 if (insn && NOTE_P (insn))
1794 insn = prev_nonnote_insn (insn);
1795 if (insn && BARRIER_P (insn))
1796 noepilogue = TRUE;
1797 }
1798
1799 if (!noepilogue)
1800 {
1801 unsigned int var_size = current_frame_info.var_size;
1802 unsigned int args_size = current_frame_info.args_size;
1803 unsigned int gmask = current_frame_info.gmask;
1804 int can_trust_sp_p = !cfun->calls_alloca;
1805
1806 if (flag_exceptions)
1807 emit_insn (gen_blockage ());
1808
1809 /* The first thing to do is point the sp at the bottom of the register
1810 save area. */
1811 if (can_trust_sp_p)
1812 {
1813 unsigned int reg_offset = var_size + args_size;
1814
1815 if (reg_offset == 0)
1816 ; /* Nothing to do. */
1817 else if (reg_offset < 32768)
1818 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1819 GEN_INT (reg_offset)));
1820 else
1821 {
1822 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1823
1824 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1825 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1826 tmp));
1827 }
1828 }
1829 else if (frame_pointer_needed)
1830 {
1831 unsigned int reg_offset = var_size + args_size;
1832
1833 if (reg_offset == 0)
1834 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1835 else if (reg_offset < 32768)
1836 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1837 GEN_INT (reg_offset)));
1838 else
1839 {
1840 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1841
1842 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1843 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1844 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1845 tmp));
1846 }
1847 }
1848 else
1849 gcc_unreachable ();
1850
1851 if (current_frame_info.save_lr)
1852 pop (RETURN_ADDR_REGNUM);
1853
1854 /* Restore any saved registers, in reverse order of course. */
1855 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1856 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1857 {
1858 if ((gmask & (1L << regno)) != 0)
1859 pop (regno);
1860 }
1861
1862 if (current_frame_info.save_fp)
1863 pop (FRAME_POINTER_REGNUM);
1864
1865 /* Remove varargs area if present. */
1866 if (current_frame_info.pretend_size != 0)
1867 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1868 GEN_INT (current_frame_info.pretend_size)));
1869
1870 emit_insn (gen_blockage ());
1871 }
1872 }
1873
1874 /* Do any necessary cleanup after a function to restore stack, frame,
1875 and regs. */
1876
1877 static void
1878 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1879 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1880 {
1881 /* Reset state info for each function. */
1882 current_frame_info = zero_frame_info;
1883 m32r_compute_function_type (NULL_TREE);
1884 }
1885 \f
1886 /* Return nonzero if this function is known to have a null or 1 instruction
1887 epilogue. */
1888
1889 int
1890 direct_return (void)
1891 {
1892 if (!reload_completed)
1893 return FALSE;
1894
1895 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1896 return FALSE;
1897
1898 if (! current_frame_info.initialized)
1899 m32r_compute_frame_size (get_frame_size ());
1900
1901 return current_frame_info.total_size == 0;
1902 }
1903
1904 \f
1905 /* PIC. */
1906
1907 int
1908 m32r_legitimate_pic_operand_p (rtx x)
1909 {
1910 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1911 return 0;
1912
1913 if (GET_CODE (x) == CONST
1914 && GET_CODE (XEXP (x, 0)) == PLUS
1915 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1916 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1917 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1918 return 0;
1919
1920 return 1;
1921 }
1922
1923 rtx
1924 m32r_legitimize_pic_address (rtx orig, rtx reg)
1925 {
1926 #ifdef DEBUG_PIC
1927 printf("m32r_legitimize_pic_address()\n");
1928 #endif
1929
1930 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1931 {
1932 rtx pic_ref, address;
1933 int subregs = 0;
1934
1935 if (reg == 0)
1936 {
1937 gcc_assert (!reload_in_progress && !reload_completed);
1938 reg = gen_reg_rtx (Pmode);
1939
1940 subregs = 1;
1941 }
1942
1943 if (subregs)
1944 address = gen_reg_rtx (Pmode);
1945 else
1946 address = reg;
1947
1948 crtl->uses_pic_offset_table = 1;
1949
1950 if (GET_CODE (orig) == LABEL_REF
1951 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1952 {
1953 emit_insn (gen_gotoff_load_addr (reg, orig));
1954 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1955 return reg;
1956 }
1957
1958 emit_insn (gen_pic_load_addr (address, orig));
1959
1960 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1961 pic_ref = gen_const_mem (Pmode, address);
1962 emit_move_insn (reg, pic_ref);
1963 return reg;
1964 }
1965 else if (GET_CODE (orig) == CONST)
1966 {
1967 rtx base, offset;
1968
1969 if (GET_CODE (XEXP (orig, 0)) == PLUS
1970 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1971 return orig;
1972
1973 if (reg == 0)
1974 {
1975 gcc_assert (!reload_in_progress && !reload_completed);
1976 reg = gen_reg_rtx (Pmode);
1977 }
1978
1979 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1980 {
1981 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1982 if (base == reg)
1983 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1984 else
1985 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1986 }
1987 else
1988 return orig;
1989
1990 if (CONST_INT_P (offset))
1991 {
1992 if (INT16_P (INTVAL (offset)))
1993 return plus_constant (Pmode, base, INTVAL (offset));
1994 else
1995 {
1996 gcc_assert (! reload_in_progress && ! reload_completed);
1997 offset = force_reg (Pmode, offset);
1998 }
1999 }
2000
2001 return gen_rtx_PLUS (Pmode, base, offset);
2002 }
2003
2004 return orig;
2005 }
2006
2007 static rtx
2008 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2009 machine_mode mode ATTRIBUTE_UNUSED)
2010 {
2011 if (flag_pic)
2012 return m32r_legitimize_pic_address (x, NULL_RTX);
2013 else
2014 return x;
2015 }
2016
2017 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2018
2019 static bool
2020 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2021 {
2022 if (GET_CODE (addr) == LO_SUM)
2023 return true;
2024
2025 return false;
2026 }
2027 \f
2028 /* Nested function support. */
2029
2030 /* Emit RTL insns to initialize the variable parts of a trampoline.
2031 FNADDR is an RTX for the address of the function's pure code.
2032 CXT is an RTX for the static chain value for the function. */
2033
2034 void
2035 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2036 rtx fnaddr ATTRIBUTE_UNUSED,
2037 rtx cxt ATTRIBUTE_UNUSED)
2038 {
2039 }
2040 \f
2041 static void
2042 m32r_file_start (void)
2043 {
2044 default_file_start ();
2045
2046 if (flag_verbose_asm)
2047 fprintf (asm_out_file,
2048 "%s M32R/D special options: -G %d\n",
2049 ASM_COMMENT_START, g_switch_value);
2050
2051 if (TARGET_LITTLE_ENDIAN)
2052 fprintf (asm_out_file, "\t.little\n");
2053 }
2054 \f
2055 /* Print operand X (an rtx) in assembler syntax to file FILE.
2056 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2057 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2058
2059 static void
2060 m32r_print_operand (FILE * file, rtx x, int code)
2061 {
2062 rtx addr;
2063
2064 switch (code)
2065 {
2066 /* The 's' and 'p' codes are used by output_block_move() to
2067 indicate post-increment 's'tores and 'p're-increment loads. */
2068 case 's':
2069 if (REG_P (x))
2070 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2071 else
2072 output_operand_lossage ("invalid operand to %%s code");
2073 return;
2074
2075 case 'p':
2076 if (REG_P (x))
2077 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2078 else
2079 output_operand_lossage ("invalid operand to %%p code");
2080 return;
2081
2082 case 'R' :
2083 /* Write second word of DImode or DFmode reference,
2084 register or memory. */
2085 if (REG_P (x))
2086 fputs (reg_names[REGNO (x)+1], file);
2087 else if (MEM_P (x))
2088 {
2089 fprintf (file, "@(");
2090 /* Handle possible auto-increment. Since it is pre-increment and
2091 we have already done it, we can just use an offset of four. */
2092 /* ??? This is taken from rs6000.c I think. I don't think it is
2093 currently necessary, but keep it around. */
2094 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2095 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2096 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2097 else
2098 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2099 fputc (')', file);
2100 }
2101 else
2102 output_operand_lossage ("invalid operand to %%R code");
2103 return;
2104
2105 case 'H' : /* High word. */
2106 case 'L' : /* Low word. */
2107 if (REG_P (x))
2108 {
2109 /* L = least significant word, H = most significant word. */
2110 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2111 fputs (reg_names[REGNO (x)], file);
2112 else
2113 fputs (reg_names[REGNO (x)+1], file);
2114 }
2115 else if (CONST_INT_P (x)
2116 || GET_CODE (x) == CONST_DOUBLE)
2117 {
2118 rtx first, second;
2119
2120 split_double (x, &first, &second);
2121 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2122 code == 'L' ? INTVAL (first) : INTVAL (second));
2123 }
2124 else
2125 output_operand_lossage ("invalid operand to %%H/%%L code");
2126 return;
2127
2128 case 'A' :
2129 {
2130 char str[30];
2131
2132 if (GET_CODE (x) != CONST_DOUBLE
2133 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2134 fatal_insn ("bad insn for 'A'", x);
2135
2136 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2137 fprintf (file, "%s", str);
2138 return;
2139 }
2140
2141 case 'B' : /* Bottom half. */
2142 case 'T' : /* Top half. */
2143 /* Output the argument to a `seth' insn (sets the Top half-word).
2144 For constants output arguments to a seth/or3 pair to set Top and
2145 Bottom halves. For symbols output arguments to a seth/add3 pair to
2146 set Top and Bottom halves. The difference exists because for
2147 constants seth/or3 is more readable but for symbols we need to use
2148 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2149 switch (GET_CODE (x))
2150 {
2151 case CONST_INT :
2152 case CONST_DOUBLE :
2153 {
2154 rtx first, second;
2155
2156 split_double (x, &first, &second);
2157 x = WORDS_BIG_ENDIAN ? second : first;
2158 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2159 (code == 'B'
2160 ? INTVAL (x) & 0xffff
2161 : (INTVAL (x) >> 16) & 0xffff));
2162 }
2163 return;
2164 case CONST :
2165 case SYMBOL_REF :
2166 if (code == 'B'
2167 && small_data_operand (x, VOIDmode))
2168 {
2169 fputs ("sda(", file);
2170 output_addr_const (file, x);
2171 fputc (')', file);
2172 return;
2173 }
2174 /* fall through */
2175 case LABEL_REF :
2176 fputs (code == 'T' ? "shigh(" : "low(", file);
2177 output_addr_const (file, x);
2178 fputc (')', file);
2179 return;
2180 default :
2181 output_operand_lossage ("invalid operand to %%T/%%B code");
2182 return;
2183 }
2184 break;
2185
2186 case 'U' :
2187 /* ??? wip */
2188 /* Output a load/store with update indicator if appropriate. */
2189 if (MEM_P (x))
2190 {
2191 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2192 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2193 fputs (".a", file);
2194 }
2195 else
2196 output_operand_lossage ("invalid operand to %%U code");
2197 return;
2198
2199 case 'N' :
2200 /* Print a constant value negated. */
2201 if (CONST_INT_P (x))
2202 output_addr_const (file, GEN_INT (- INTVAL (x)));
2203 else
2204 output_operand_lossage ("invalid operand to %%N code");
2205 return;
2206
2207 case 'X' :
2208 /* Print a const_int in hex. Used in comments. */
2209 if (CONST_INT_P (x))
2210 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2211 return;
2212
2213 case '#' :
2214 fputs (IMMEDIATE_PREFIX, file);
2215 return;
2216
2217 case 0 :
2218 /* Do nothing special. */
2219 break;
2220
2221 default :
2222 /* Unknown flag. */
2223 output_operand_lossage ("invalid operand output code");
2224 }
2225
2226 switch (GET_CODE (x))
2227 {
2228 case REG :
2229 fputs (reg_names[REGNO (x)], file);
2230 break;
2231
2232 case MEM :
2233 addr = XEXP (x, 0);
2234 if (GET_CODE (addr) == PRE_INC)
2235 {
2236 if (!REG_P (XEXP (addr, 0)))
2237 fatal_insn ("pre-increment address is not a register", x);
2238
2239 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2240 }
2241 else if (GET_CODE (addr) == PRE_DEC)
2242 {
2243 if (!REG_P (XEXP (addr, 0)))
2244 fatal_insn ("pre-decrement address is not a register", x);
2245
2246 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2247 }
2248 else if (GET_CODE (addr) == POST_INC)
2249 {
2250 if (!REG_P (XEXP (addr, 0)))
2251 fatal_insn ("post-increment address is not a register", x);
2252
2253 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2254 }
2255 else
2256 {
2257 fputs ("@(", file);
2258 output_address (XEXP (x, 0));
2259 fputc (')', file);
2260 }
2261 break;
2262
2263 case CONST_DOUBLE :
2264 /* We handle SFmode constants here as output_addr_const doesn't. */
2265 if (GET_MODE (x) == SFmode)
2266 {
2267 long l;
2268
2269 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2270 fprintf (file, "0x%08lx", l);
2271 break;
2272 }
2273
2274 /* Fall through. Let output_addr_const deal with it. */
2275
2276 default :
2277 output_addr_const (file, x);
2278 break;
2279 }
2280 }
2281
2282 /* Print a memory address as an operand to reference that memory location. */
2283
2284 static void
2285 m32r_print_operand_address (FILE * file, rtx addr)
2286 {
2287 rtx base;
2288 rtx index = 0;
2289 int offset = 0;
2290
2291 switch (GET_CODE (addr))
2292 {
2293 case REG :
2294 fputs (reg_names[REGNO (addr)], file);
2295 break;
2296
2297 case PLUS :
2298 if (CONST_INT_P (XEXP (addr, 0)))
2299 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2300 else if (CONST_INT_P (XEXP (addr, 1)))
2301 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2302 else
2303 base = XEXP (addr, 0), index = XEXP (addr, 1);
2304 if (REG_P (base))
2305 {
2306 /* Print the offset first (if present) to conform to the manual. */
2307 if (index == 0)
2308 {
2309 if (offset != 0)
2310 fprintf (file, "%d,", offset);
2311 fputs (reg_names[REGNO (base)], file);
2312 }
2313 /* The chip doesn't support this, but left in for generality. */
2314 else if (REG_P (index))
2315 fprintf (file, "%s,%s",
2316 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2317 /* Not sure this can happen, but leave in for now. */
2318 else if (GET_CODE (index) == SYMBOL_REF)
2319 {
2320 output_addr_const (file, index);
2321 fputc (',', file);
2322 fputs (reg_names[REGNO (base)], file);
2323 }
2324 else
2325 fatal_insn ("bad address", addr);
2326 }
2327 else if (GET_CODE (base) == LO_SUM)
2328 {
2329 gcc_assert (!index && REG_P (XEXP (base, 0)));
2330 if (small_data_operand (XEXP (base, 1), VOIDmode))
2331 fputs ("sda(", file);
2332 else
2333 fputs ("low(", file);
2334 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2335 offset));
2336 fputs ("),", file);
2337 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2338 }
2339 else
2340 fatal_insn ("bad address", addr);
2341 break;
2342
2343 case LO_SUM :
2344 if (!REG_P (XEXP (addr, 0)))
2345 fatal_insn ("lo_sum not of register", addr);
2346 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2347 fputs ("sda(", file);
2348 else
2349 fputs ("low(", file);
2350 output_addr_const (file, XEXP (addr, 1));
2351 fputs ("),", file);
2352 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2353 break;
2354
2355 case PRE_INC : /* Assume SImode. */
2356 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2357 break;
2358
2359 case PRE_DEC : /* Assume SImode. */
2360 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2361 break;
2362
2363 case POST_INC : /* Assume SImode. */
2364 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2365 break;
2366
2367 default :
2368 output_addr_const (file, addr);
2369 break;
2370 }
2371 }
2372
2373 static bool
2374 m32r_print_operand_punct_valid_p (unsigned char code)
2375 {
2376 return m32r_punct_chars[code];
2377 }
2378
2379 /* Return true if the operands are the constants 0 and 1. */
2380
2381 int
2382 zero_and_one (rtx operand1, rtx operand2)
2383 {
2384 return
2385 CONST_INT_P (operand1)
2386 && CONST_INT_P (operand2)
2387 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2388 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2389 }
2390
2391 /* Generate the correct assembler code to handle the conditional loading of a
2392 value into a register. It is known that the operands satisfy the
2393 conditional_move_operand() function above. The destination is operand[0].
2394 The condition is operand [1]. The 'true' value is operand [2] and the
2395 'false' value is operand [3]. */
2396
2397 char *
2398 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2399 {
2400 static char buffer [100];
2401 const char * dest = reg_names [REGNO (operands [0])];
2402
2403 buffer [0] = 0;
2404
2405 /* Destination must be a register. */
2406 gcc_assert (REG_P (operands [0]));
2407 gcc_assert (conditional_move_operand (operands [2], SImode));
2408 gcc_assert (conditional_move_operand (operands [3], SImode));
2409
2410 /* Check to see if the test is reversed. */
2411 if (GET_CODE (operands [1]) == NE)
2412 {
2413 rtx tmp = operands [2];
2414 operands [2] = operands [3];
2415 operands [3] = tmp;
2416 }
2417
2418 sprintf (buffer, "mvfc %s, cbr", dest);
2419
2420 /* If the true value was '0' then we need to invert the results of the move. */
2421 if (INTVAL (operands [2]) == 0)
2422 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2423 dest, dest);
2424
2425 return buffer;
2426 }
2427
2428 /* Returns true if the registers contained in the two
2429 rtl expressions are different. */
2430
2431 int
2432 m32r_not_same_reg (rtx a, rtx b)
2433 {
2434 int reg_a = -1;
2435 int reg_b = -2;
2436
2437 while (GET_CODE (a) == SUBREG)
2438 a = SUBREG_REG (a);
2439
2440 if (REG_P (a))
2441 reg_a = REGNO (a);
2442
2443 while (GET_CODE (b) == SUBREG)
2444 b = SUBREG_REG (b);
2445
2446 if (REG_P (b))
2447 reg_b = REGNO (b);
2448
2449 return reg_a != reg_b;
2450 }
2451
2452 \f
2453 rtx
2454 m32r_function_symbol (const char *name)
2455 {
2456 int extra_flags = 0;
2457 enum m32r_model model;
2458 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2459
2460 if (TARGET_MODEL_SMALL)
2461 model = M32R_MODEL_SMALL;
2462 else if (TARGET_MODEL_MEDIUM)
2463 model = M32R_MODEL_MEDIUM;
2464 else if (TARGET_MODEL_LARGE)
2465 model = M32R_MODEL_LARGE;
2466 else
2467 gcc_unreachable (); /* Shouldn't happen. */
2468 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2469
2470 if (extra_flags)
2471 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2472
2473 return sym;
2474 }
2475
2476 /* Use a library function to move some bytes. */
2477
2478 static void
2479 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2480 {
2481 /* We want to pass the size as Pmode, which will normally be SImode
2482 but will be DImode if we are using 64-bit longs and pointers. */
2483 if (GET_MODE (bytes_rtx) != VOIDmode
2484 && GET_MODE (bytes_rtx) != Pmode)
2485 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2486
2487 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2488 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2489 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2490 TYPE_UNSIGNED (sizetype)),
2491 TYPE_MODE (sizetype));
2492 }
2493
2494 /* Expand string/block move operations.
2495
2496 operands[0] is the pointer to the destination.
2497 operands[1] is the pointer to the source.
2498 operands[2] is the number of bytes to move.
2499 operands[3] is the alignment.
2500
2501 Returns 1 upon success, 0 otherwise. */
2502
2503 int
2504 m32r_expand_block_move (rtx operands[])
2505 {
2506 rtx orig_dst = operands[0];
2507 rtx orig_src = operands[1];
2508 rtx bytes_rtx = operands[2];
2509 rtx align_rtx = operands[3];
2510 int constp = CONST_INT_P (bytes_rtx);
2511 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2512 int align = INTVAL (align_rtx);
2513 int leftover;
2514 rtx src_reg;
2515 rtx dst_reg;
2516
2517 if (constp && bytes <= 0)
2518 return 1;
2519
2520 /* Move the address into scratch registers. */
2521 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2522 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2523
2524 if (align > UNITS_PER_WORD)
2525 align = UNITS_PER_WORD;
2526
2527 /* If we prefer size over speed, always use a function call.
2528 If we do not know the size, use a function call.
2529 If the blocks are not word aligned, use a function call. */
2530 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2531 {
2532 block_move_call (dst_reg, src_reg, bytes_rtx);
2533 return 0;
2534 }
2535
2536 leftover = bytes % MAX_MOVE_BYTES;
2537 bytes -= leftover;
2538
2539 /* If necessary, generate a loop to handle the bulk of the copy. */
2540 if (bytes)
2541 {
2542 rtx_code_label *label = NULL;
2543 rtx final_src = NULL_RTX;
2544 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2545 rtx rounded_total = GEN_INT (bytes);
2546 rtx new_dst_reg = gen_reg_rtx (SImode);
2547 rtx new_src_reg = gen_reg_rtx (SImode);
2548
2549 /* If we are going to have to perform this loop more than
2550 once, then generate a label and compute the address the
2551 source register will contain upon completion of the final
2552 iteration. */
2553 if (bytes > MAX_MOVE_BYTES)
2554 {
2555 final_src = gen_reg_rtx (Pmode);
2556
2557 if (INT16_P(bytes))
2558 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2559 else
2560 {
2561 emit_insn (gen_movsi (final_src, rounded_total));
2562 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2563 }
2564
2565 label = gen_label_rtx ();
2566 emit_label (label);
2567 }
2568
2569 /* It is known that output_block_move() will update src_reg to point
2570 to the word after the end of the source block, and dst_reg to point
2571 to the last word of the destination block, provided that the block
2572 is MAX_MOVE_BYTES long. */
2573 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2574 new_dst_reg, new_src_reg));
2575 emit_move_insn (dst_reg, new_dst_reg);
2576 emit_move_insn (src_reg, new_src_reg);
2577 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2578
2579 if (bytes > MAX_MOVE_BYTES)
2580 {
2581 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2582 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2583 }
2584 }
2585
2586 if (leftover)
2587 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2588 gen_reg_rtx (SImode),
2589 gen_reg_rtx (SImode)));
2590 return 1;
2591 }
2592
2593 \f
2594 /* Emit load/stores for a small constant word aligned block_move.
2595
2596 operands[0] is the memory address of the destination.
2597 operands[1] is the memory address of the source.
2598 operands[2] is the number of bytes to move.
2599 operands[3] is a temp register.
2600 operands[4] is a temp register. */
2601
2602 void
2603 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2604 {
2605 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2606 int first_time;
2607 int got_extra = 0;
2608
2609 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2610
2611 /* We do not have a post-increment store available, so the first set of
2612 stores are done without any increment, then the remaining ones can use
2613 the pre-increment addressing mode.
2614
2615 Note: expand_block_move() also relies upon this behavior when building
2616 loops to copy large blocks. */
2617 first_time = 1;
2618
2619 while (bytes > 0)
2620 {
2621 if (bytes >= 8)
2622 {
2623 if (first_time)
2624 {
2625 output_asm_insn ("ld\t%5, %p1", operands);
2626 output_asm_insn ("ld\t%6, %p1", operands);
2627 output_asm_insn ("st\t%5, @%0", operands);
2628 output_asm_insn ("st\t%6, %s0", operands);
2629 }
2630 else
2631 {
2632 output_asm_insn ("ld\t%5, %p1", operands);
2633 output_asm_insn ("ld\t%6, %p1", operands);
2634 output_asm_insn ("st\t%5, %s0", operands);
2635 output_asm_insn ("st\t%6, %s0", operands);
2636 }
2637
2638 bytes -= 8;
2639 }
2640 else if (bytes >= 4)
2641 {
2642 if (bytes > 4)
2643 got_extra = 1;
2644
2645 output_asm_insn ("ld\t%5, %p1", operands);
2646
2647 if (got_extra)
2648 output_asm_insn ("ld\t%6, %p1", operands);
2649
2650 if (first_time)
2651 output_asm_insn ("st\t%5, @%0", operands);
2652 else
2653 output_asm_insn ("st\t%5, %s0", operands);
2654
2655 bytes -= 4;
2656 }
2657 else
2658 {
2659 /* Get the entire next word, even though we do not want all of it.
2660 The saves us from doing several smaller loads, and we assume that
2661 we cannot cause a page fault when at least part of the word is in
2662 valid memory [since we don't get called if things aren't properly
2663 aligned]. */
2664 int dst_offset = first_time ? 0 : 4;
2665 /* The amount of increment we have to make to the
2666 destination pointer. */
2667 int dst_inc_amount = dst_offset + bytes - 4;
2668 /* The same for the source pointer. */
2669 int src_inc_amount = bytes;
2670 int last_shift;
2671 rtx my_operands[3];
2672
2673 /* If got_extra is true then we have already loaded
2674 the next word as part of loading and storing the previous word. */
2675 if (! got_extra)
2676 output_asm_insn ("ld\t%6, @%1", operands);
2677
2678 if (bytes >= 2)
2679 {
2680 bytes -= 2;
2681
2682 output_asm_insn ("sra3\t%5, %6, #16", operands);
2683 my_operands[0] = operands[5];
2684 my_operands[1] = GEN_INT (dst_offset);
2685 my_operands[2] = operands[0];
2686 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2687
2688 /* If there is a byte left to store then increment the
2689 destination address and shift the contents of the source
2690 register down by 8 bits. We could not do the address
2691 increment in the store half word instruction, because it does
2692 not have an auto increment mode. */
2693 if (bytes > 0) /* assert (bytes == 1) */
2694 {
2695 dst_offset += 2;
2696 last_shift = 8;
2697 }
2698 }
2699 else
2700 last_shift = 24;
2701
2702 if (bytes > 0)
2703 {
2704 my_operands[0] = operands[6];
2705 my_operands[1] = GEN_INT (last_shift);
2706 output_asm_insn ("srai\t%0, #%1", my_operands);
2707 my_operands[0] = operands[6];
2708 my_operands[1] = GEN_INT (dst_offset);
2709 my_operands[2] = operands[0];
2710 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2711 }
2712
2713 /* Update the destination pointer if needed. We have to do
2714 this so that the patterns matches what we output in this
2715 function. */
2716 if (dst_inc_amount
2717 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2718 {
2719 my_operands[0] = operands[0];
2720 my_operands[1] = GEN_INT (dst_inc_amount);
2721 output_asm_insn ("addi\t%0, #%1", my_operands);
2722 }
2723
2724 /* Update the source pointer if needed. We have to do this
2725 so that the patterns matches what we output in this
2726 function. */
2727 if (src_inc_amount
2728 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2729 {
2730 my_operands[0] = operands[1];
2731 my_operands[1] = GEN_INT (src_inc_amount);
2732 output_asm_insn ("addi\t%0, #%1", my_operands);
2733 }
2734
2735 bytes = 0;
2736 }
2737
2738 first_time = 0;
2739 }
2740 }
2741
2742 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2743
2744 int
2745 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2746 unsigned int new_reg)
2747 {
2748 /* Interrupt routines can't clobber any register that isn't already used. */
2749 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2750 && !df_regs_ever_live_p (new_reg))
2751 return 0;
2752
2753 return 1;
2754 }
2755
2756 rtx
2757 m32r_return_addr (int count)
2758 {
2759 if (count != 0)
2760 return const0_rtx;
2761
2762 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2763 }
2764
2765 static void
2766 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2767 {
2768 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2769 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2770 0x017e8e17 : 0x178e7e01, SImode));
2771 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2772 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2773 0x0c00ae86 : 0x86ae000c, SImode));
2774 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2775 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2776 0xe627871e : 0x1e8727e6, SImode));
2777 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2778 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2779 0xc616c626 : 0x26c61fc6, SImode));
2780 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2781 chain_value);
2782 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2783 XEXP (DECL_RTL (fndecl), 0));
2784
2785 if (m32r_cache_flush_trap >= 0)
2786 emit_insn (gen_flush_icache
2787 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2788 gen_int_mode (m32r_cache_flush_trap, SImode)));
2789 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2790 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2791 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2792 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2793 GEN_INT (3), SImode);
2794 }
2795
2796 /* True if X is a reg that can be used as a base reg. */
2797
2798 static bool
2799 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2800 {
2801 if (! REG_P (x))
2802 return false;
2803
2804 if (strict)
2805 {
2806 if (GPR_P (REGNO (x)))
2807 return true;
2808 }
2809 else
2810 {
2811 if (GPR_P (REGNO (x))
2812 || REGNO (x) == ARG_POINTER_REGNUM
2813 || ! HARD_REGISTER_P (x))
2814 return true;
2815 }
2816
2817 return false;
2818 }
2819
2820 static inline bool
2821 m32r_rtx_ok_for_offset_p (const_rtx x)
2822 {
2823 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2824 }
2825
2826 static inline bool
2827 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2828 const_rtx x, bool strict)
2829 {
2830 if (GET_CODE (x) == PLUS
2831 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2832 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2833 return true;
2834
2835 return false;
2836 }
2837
2838 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2839 since more than one instruction will be required. */
2840
2841 static inline bool
2842 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2843 bool strict)
2844 {
2845 if (GET_CODE (x) == LO_SUM
2846 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2847 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2848 && CONSTANT_P (XEXP (x, 1)))
2849 return true;
2850
2851 return false;
2852 }
2853
2854 /* Is this a load and increment operation. */
2855
2856 static inline bool
2857 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2858 {
2859 if ((mode == SImode || mode == SFmode)
2860 && GET_CODE (x) == POST_INC
2861 && REG_P (XEXP (x, 0))
2862 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2863 return true;
2864
2865 return false;
2866 }
2867
2868 /* Is this an increment/decrement and store operation. */
2869
2870 static inline bool
2871 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2872 {
2873 if ((mode == SImode || mode == SFmode)
2874 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2875 && REG_P (XEXP (x, 0)) \
2876 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2877 return true;
2878
2879 return false;
2880 }
2881
2882 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2883
2884 static bool
2885 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2886 {
2887 if (m32r_rtx_ok_for_base_p (x, strict)
2888 || m32r_legitimate_offset_addres_p (mode, x, strict)
2889 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2890 || m32r_load_postinc_p (mode, x, strict)
2891 || m32r_store_preinc_predec_p (mode, x, strict))
2892 return true;
2893
2894 return false;
2895 }
2896
2897 static void
2898 m32r_conditional_register_usage (void)
2899 {
2900 if (flag_pic)
2901 {
2902 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2903 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2904 }
2905 }
2906
2907 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2908
2909 We don't allow (plus symbol large-constant) as the relocations can't
2910 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2911 We allow all CONST_DOUBLE's as the md file patterns will force the
2912 constant to memory if they can't handle them. */
2913
2914 static bool
2915 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2916 {
2917 return !(GET_CODE (x) == CONST
2918 && GET_CODE (XEXP (x, 0)) == PLUS
2919 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2920 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2921 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2922 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2923 }