]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/frv/frv.c
Use function_arg_info for TARGET_FUNCTION_(INCOMING_)ARG
[thirdparty/gcc.git] / gcc / config / frv / frv.c
1 /* Copyright (C) 1997-2019 Free Software Foundation, Inc.
2 Contributed by Red Hat, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define IN_TARGET_CODE 1
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "cfgrtl.h"
47 #include "langhooks.h"
48 #include "dumpfile.h"
49 #include "builtins.h"
50 #include "ifcvt.h"
51 #include "rtl-iter.h"
52 #include "calls.h"
53
54 /* This file should be included last. */
55 #include "target-def.h"
56
57 #ifndef FRV_INLINE
58 #define FRV_INLINE inline
59 #endif
60
61 /* The maximum number of distinct NOP patterns. There are three:
62 nop, fnop and mnop. */
63 #define NUM_NOP_PATTERNS 3
64
65 /* Classification of instructions and units: integer, floating-point/media,
66 branch and control. */
67 enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS };
68
69 /* The DFA names of the units, in packet order. */
70 static const char *const frv_unit_names[] =
71 {
72 "c",
73 "i0", "f0",
74 "i1", "f1",
75 "i2", "f2",
76 "i3", "f3",
77 "b0", "b1"
78 };
79
80 /* The classification of each unit in frv_unit_names[]. */
81 static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] =
82 {
83 GROUP_C,
84 GROUP_I, GROUP_FM,
85 GROUP_I, GROUP_FM,
86 GROUP_I, GROUP_FM,
87 GROUP_I, GROUP_FM,
88 GROUP_B, GROUP_B
89 };
90
91 /* Return the DFA unit code associated with the Nth unit of integer
92 or floating-point group GROUP, */
93 #define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1]
94
95 /* Return the number of integer or floating-point unit UNIT
96 (1 for I1, 2 for F2, etc.). */
97 #define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2)
98
99 /* The DFA unit number for each unit in frv_unit_names[]. */
100 static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)];
101
102 /* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue
103 an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if
104 no instruction of type T has been seen. */
105 static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1];
106
107 /* An array of dummy nop INSNs, one for each type of nop that the
108 target supports. */
109 static GTY(()) rtx_insn *frv_nops[NUM_NOP_PATTERNS];
110
111 /* The number of nop instructions in frv_nops[]. */
112 static unsigned int frv_num_nops;
113
114 /* The type of access. FRV_IO_UNKNOWN means the access can be either
115 a read or a write. */
116 enum frv_io_type { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE };
117
118 /* Information about one __builtin_read or __builtin_write access, or
119 the combination of several such accesses. The most general value
120 is all-zeros (an unknown access to an unknown address). */
121 struct frv_io {
122 enum frv_io_type type;
123
124 /* The constant address being accessed, or zero if not known. */
125 HOST_WIDE_INT const_address;
126
127 /* The run-time address, as used in operand 0 of the membar pattern. */
128 rtx var_address;
129 };
130
131 /* Return true if instruction INSN should be packed with the following
132 instruction. */
133 #define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode)
134
135 /* Set the value of PACKING_FLAG_P(INSN). */
136 #define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode)
137 #define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode)
138
139 /* Loop with REG set to each hard register in rtx X. */
140 #define FOR_EACH_REGNO(REG, X) \
141 for (REG = REGNO (X); REG < END_REGNO (X); REG++)
142
143 /* This structure contains machine specific function data. */
144 struct GTY(()) machine_function
145 {
146 /* True if we have created an rtx that relies on the stack frame. */
147 int frame_needed;
148
149 /* True if this function contains at least one __builtin_{read,write}*. */
150 bool has_membar_p;
151 };
152
153 /* Temporary register allocation support structure. */
154 typedef struct frv_tmp_reg_struct
155 {
156 HARD_REG_SET regs; /* possible registers to allocate */
157 int next_reg[N_REG_CLASSES]; /* next register to allocate per class */
158 }
159 frv_tmp_reg_t;
160
161 /* Register state information for VLIW re-packing phase. */
162 #define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */
163 #define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */
164 #define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */
165 #define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */
166
167 #define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE)
168
169 typedef unsigned char regstate_t;
170
171 /* Used in frv_frame_accessor_t to indicate the direction of a register-to-
172 memory move. */
173 enum frv_stack_op
174 {
175 FRV_LOAD,
176 FRV_STORE
177 };
178
179 /* Information required by frv_frame_access. */
180 typedef struct
181 {
182 /* This field is FRV_LOAD if registers are to be loaded from the stack and
183 FRV_STORE if they should be stored onto the stack. FRV_STORE implies
184 the move is being done by the prologue code while FRV_LOAD implies it
185 is being done by the epilogue. */
186 enum frv_stack_op op;
187
188 /* The base register to use when accessing the stack. This may be the
189 frame pointer, stack pointer, or a temporary. The choice of register
190 depends on which part of the frame is being accessed and how big the
191 frame is. */
192 rtx base;
193
194 /* The offset of BASE from the bottom of the current frame, in bytes. */
195 int base_offset;
196 } frv_frame_accessor_t;
197
198 /* Conditional execution support gathered together in one structure. */
199 typedef struct
200 {
201 /* Linked list of insns to add if the conditional execution conversion was
202 successful. Each link points to an EXPR_LIST which points to the pattern
203 of the insn to add, and the insn to be inserted before. */
204 rtx added_insns_list;
205
206 /* Identify which registers are safe to allocate for if conversions to
207 conditional execution. We keep the last allocated register in the
208 register classes between COND_EXEC statements. This will mean we allocate
209 different registers for each different COND_EXEC group if we can. This
210 might allow the scheduler to intermix two different COND_EXEC sections. */
211 frv_tmp_reg_t tmp_reg;
212
213 /* For nested IFs, identify which CC registers are used outside of setting
214 via a compare isnsn, and using via a check insn. This will allow us to
215 know if we can rewrite the register to use a different register that will
216 be paired with the CR register controlling the nested IF-THEN blocks. */
217 HARD_REG_SET nested_cc_ok_rewrite;
218
219 /* Temporary registers allocated to hold constants during conditional
220 execution. */
221 rtx scratch_regs[FIRST_PSEUDO_REGISTER];
222
223 /* Current number of temp registers available. */
224 int cur_scratch_regs;
225
226 /* Number of nested conditional execution blocks. */
227 int num_nested_cond_exec;
228
229 /* Map of insns that set up constants in scratch registers. */
230 bitmap scratch_insns_bitmap;
231
232 /* Conditional execution test register (CC0..CC7). */
233 rtx cr_reg;
234
235 /* Conditional execution compare register that is paired with cr_reg, so that
236 nested compares can be done. The csubcc and caddcc instructions don't
237 have enough bits to specify both a CC register to be set and a CR register
238 to do the test on, so the same bit number is used for both. Needless to
239 say, this is rather inconvenient for GCC. */
240 rtx nested_cc_reg;
241
242 /* Extra CR registers used for &&, ||. */
243 rtx extra_int_cr;
244 rtx extra_fp_cr;
245
246 /* Previous CR used in nested if, to make sure we are dealing with the same
247 nested if as the previous statement. */
248 rtx last_nested_if_cr;
249 }
250 frv_ifcvt_t;
251
252 static /* GTY(()) */ frv_ifcvt_t frv_ifcvt;
253
254 /* Map register number to smallest register class. */
255 enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER];
256
257 /* Cached value of frv_stack_info. */
258 static frv_stack_t *frv_stack_cache = (frv_stack_t *)0;
259
260 /* Forward references */
261
262 static void frv_option_override (void);
263 static bool frv_legitimate_address_p (machine_mode, rtx, bool);
264 static int frv_default_flags_for_cpu (void);
265 static int frv_string_begins_with (const char *, const char *);
266 static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
267 static void frv_print_operand (FILE *, rtx, int);
268 static void frv_print_operand_address (FILE *, machine_mode, rtx);
269 static bool frv_print_operand_punct_valid_p (unsigned char code);
270 static void frv_print_operand_memory_reference_reg
271 (FILE *, rtx);
272 static void frv_print_operand_memory_reference (FILE *, rtx, int);
273 static int frv_print_operand_jump_hint (rtx_insn *);
274 static const char *comparison_string (enum rtx_code, rtx);
275 static rtx frv_function_value (const_tree, const_tree,
276 bool);
277 static rtx frv_libcall_value (machine_mode,
278 const_rtx);
279 static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
280 static rtx single_set_pattern (rtx);
281 static int frv_function_contains_far_jump (void);
282 static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
283 enum reg_class,
284 machine_mode,
285 int, int);
286 static rtx frv_frame_offset_rtx (int);
287 static rtx frv_frame_mem (machine_mode, rtx, int);
288 static rtx frv_dwarf_store (rtx, int);
289 static void frv_frame_insn (rtx, rtx);
290 static void frv_frame_access (frv_frame_accessor_t*,
291 rtx, int);
292 static void frv_frame_access_multi (frv_frame_accessor_t*,
293 frv_stack_t *, int);
294 static void frv_frame_access_standard_regs (enum frv_stack_op,
295 frv_stack_t *);
296 static struct machine_function *frv_init_machine_status (void);
297 static rtx frv_int_to_acc (enum insn_code, int, rtx);
298 static machine_mode frv_matching_accg_mode (machine_mode);
299 static rtx frv_read_argument (tree, unsigned int);
300 static rtx frv_read_iacc_argument (machine_mode, tree, unsigned int);
301 static int frv_check_constant_argument (enum insn_code, int, rtx);
302 static rtx frv_legitimize_target (enum insn_code, rtx);
303 static rtx frv_legitimize_argument (enum insn_code, int, rtx);
304 static rtx frv_legitimize_tls_address (rtx, enum tls_model);
305 static rtx frv_legitimize_address (rtx, rtx, machine_mode);
306 static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
307 static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
308 static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
309 static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx);
310 static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx);
311 static rtx frv_expand_voidbinop_builtin (enum insn_code, tree);
312 static rtx frv_expand_int_void2arg (enum insn_code, tree);
313 static rtx frv_expand_prefetches (enum insn_code, tree);
314 static rtx frv_expand_voidtriop_builtin (enum insn_code, tree);
315 static rtx frv_expand_voidaccop_builtin (enum insn_code, tree);
316 static rtx frv_expand_mclracc_builtin (tree);
317 static rtx frv_expand_mrdacc_builtin (enum insn_code, tree);
318 static rtx frv_expand_mwtacc_builtin (enum insn_code, tree);
319 static rtx frv_expand_noargs_builtin (enum insn_code);
320 static void frv_split_iacc_move (rtx, rtx);
321 static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
322 static void frv_ifcvt_add_insn (rtx, rtx_insn *, int);
323 static rtx frv_ifcvt_rewrite_mem (rtx, machine_mode, rtx);
324 static rtx frv_ifcvt_load_value (rtx, rtx);
325 static unsigned int frv_insn_unit (rtx_insn *);
326 static bool frv_issues_to_branch_unit_p (rtx_insn *);
327 static int frv_cond_flags (rtx);
328 static bool frv_regstate_conflict_p (regstate_t, regstate_t);
329 static bool frv_registers_conflict_p (rtx);
330 static void frv_registers_update_1 (rtx, const_rtx, void *);
331 static void frv_registers_update (rtx);
332 static void frv_start_packet (void);
333 static void frv_start_packet_block (void);
334 static void frv_finish_packet (void (*) (void));
335 static bool frv_pack_insn_p (rtx_insn *);
336 static void frv_add_insn_to_packet (rtx_insn *);
337 static void frv_insert_nop_in_packet (rtx_insn *);
338 static bool frv_for_each_packet (void (*) (void));
339 static bool frv_sort_insn_group_1 (enum frv_insn_group,
340 unsigned int, unsigned int,
341 unsigned int, unsigned int,
342 state_t);
343 static int frv_compare_insns (const void *, const void *);
344 static void frv_sort_insn_group (enum frv_insn_group);
345 static void frv_reorder_packet (void);
346 static void frv_fill_unused_units (enum frv_insn_group);
347 static void frv_align_label (void);
348 static void frv_reorg_packet (void);
349 static void frv_register_nop (rtx);
350 static void frv_reorg (void);
351 static void frv_pack_insns (void);
352 static void frv_function_prologue (FILE *);
353 static void frv_function_epilogue (FILE *);
354 static bool frv_assemble_integer (rtx, unsigned, int);
355 static void frv_init_builtins (void);
356 static rtx frv_expand_builtin (tree, rtx, rtx, machine_mode, int);
357 static void frv_init_libfuncs (void);
358 static bool frv_in_small_data_p (const_tree);
359 static void frv_asm_output_mi_thunk
360 (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
361 static void frv_setup_incoming_varargs (cumulative_args_t,
362 const function_arg_info &,
363 int *, int);
364 static rtx frv_expand_builtin_saveregs (void);
365 static void frv_expand_builtin_va_start (tree, rtx);
366 static bool frv_rtx_costs (rtx, machine_mode, int, int,
367 int*, bool);
368 static int frv_register_move_cost (machine_mode,
369 reg_class_t, reg_class_t);
370 static int frv_memory_move_cost (machine_mode,
371 reg_class_t, bool);
372 static void frv_asm_out_constructor (rtx, int);
373 static void frv_asm_out_destructor (rtx, int);
374 static bool frv_function_symbol_referenced_p (rtx);
375 static bool frv_legitimate_constant_p (machine_mode, rtx);
376 static bool frv_cannot_force_const_mem (machine_mode, rtx);
377 static const char *unspec_got_name (int);
378 static void frv_output_const_unspec (FILE *,
379 const struct frv_unspec *);
380 static bool frv_function_ok_for_sibcall (tree, tree);
381 static rtx frv_struct_value_rtx (tree, int);
382 static bool frv_must_pass_in_stack (machine_mode mode, const_tree type);
383 static int frv_arg_partial_bytes (cumulative_args_t,
384 const function_arg_info &);
385 static rtx frv_function_arg (cumulative_args_t, const function_arg_info &);
386 static rtx frv_function_incoming_arg (cumulative_args_t,
387 const function_arg_info &);
388 static void frv_function_arg_advance (cumulative_args_t, machine_mode,
389 const_tree, bool);
390 static unsigned int frv_function_arg_boundary (machine_mode,
391 const_tree);
392 static void frv_output_dwarf_dtprel (FILE *, int, rtx)
393 ATTRIBUTE_UNUSED;
394 static reg_class_t frv_secondary_reload (bool, rtx, reg_class_t,
395 machine_mode,
396 secondary_reload_info *);
397 static bool frv_frame_pointer_required (void);
398 static bool frv_can_eliminate (const int, const int);
399 static void frv_conditional_register_usage (void);
400 static void frv_trampoline_init (rtx, tree, rtx);
401 static bool frv_class_likely_spilled_p (reg_class_t);
402 static unsigned int frv_hard_regno_nregs (unsigned int, machine_mode);
403 static bool frv_hard_regno_mode_ok (unsigned int, machine_mode);
404 static bool frv_modes_tieable_p (machine_mode, machine_mode);
405 \f
406 /* Initialize the GCC target structure. */
407 #undef TARGET_PRINT_OPERAND
408 #define TARGET_PRINT_OPERAND frv_print_operand
409 #undef TARGET_PRINT_OPERAND_ADDRESS
410 #define TARGET_PRINT_OPERAND_ADDRESS frv_print_operand_address
411 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
412 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P frv_print_operand_punct_valid_p
413 #undef TARGET_ASM_FUNCTION_PROLOGUE
414 #define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue
415 #undef TARGET_ASM_FUNCTION_EPILOGUE
416 #define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue
417 #undef TARGET_ASM_INTEGER
418 #define TARGET_ASM_INTEGER frv_assemble_integer
419 #undef TARGET_OPTION_OVERRIDE
420 #define TARGET_OPTION_OVERRIDE frv_option_override
421 #undef TARGET_INIT_BUILTINS
422 #define TARGET_INIT_BUILTINS frv_init_builtins
423 #undef TARGET_EXPAND_BUILTIN
424 #define TARGET_EXPAND_BUILTIN frv_expand_builtin
425 #undef TARGET_INIT_LIBFUNCS
426 #define TARGET_INIT_LIBFUNCS frv_init_libfuncs
427 #undef TARGET_IN_SMALL_DATA_P
428 #define TARGET_IN_SMALL_DATA_P frv_in_small_data_p
429 #undef TARGET_REGISTER_MOVE_COST
430 #define TARGET_REGISTER_MOVE_COST frv_register_move_cost
431 #undef TARGET_MEMORY_MOVE_COST
432 #define TARGET_MEMORY_MOVE_COST frv_memory_move_cost
433 #undef TARGET_RTX_COSTS
434 #define TARGET_RTX_COSTS frv_rtx_costs
435 #undef TARGET_ASM_CONSTRUCTOR
436 #define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor
437 #undef TARGET_ASM_DESTRUCTOR
438 #define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor
439
440 #undef TARGET_ASM_OUTPUT_MI_THUNK
441 #define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk
442 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
443 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
444
445 #undef TARGET_SCHED_ISSUE_RATE
446 #define TARGET_SCHED_ISSUE_RATE frv_issue_rate
447
448 #undef TARGET_LEGITIMIZE_ADDRESS
449 #define TARGET_LEGITIMIZE_ADDRESS frv_legitimize_address
450
451 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
452 #define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall
453 #undef TARGET_LEGITIMATE_CONSTANT_P
454 #define TARGET_LEGITIMATE_CONSTANT_P frv_legitimate_constant_p
455 #undef TARGET_CANNOT_FORCE_CONST_MEM
456 #define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem
457
458 #undef TARGET_HAVE_TLS
459 #define TARGET_HAVE_TLS HAVE_AS_TLS
460
461 #undef TARGET_STRUCT_VALUE_RTX
462 #define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx
463 #undef TARGET_MUST_PASS_IN_STACK
464 #define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack
465 #undef TARGET_PASS_BY_REFERENCE
466 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
467 #undef TARGET_ARG_PARTIAL_BYTES
468 #define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes
469 #undef TARGET_FUNCTION_ARG
470 #define TARGET_FUNCTION_ARG frv_function_arg
471 #undef TARGET_FUNCTION_INCOMING_ARG
472 #define TARGET_FUNCTION_INCOMING_ARG frv_function_incoming_arg
473 #undef TARGET_FUNCTION_ARG_ADVANCE
474 #define TARGET_FUNCTION_ARG_ADVANCE frv_function_arg_advance
475 #undef TARGET_FUNCTION_ARG_BOUNDARY
476 #define TARGET_FUNCTION_ARG_BOUNDARY frv_function_arg_boundary
477
478 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
479 #define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs
480 #undef TARGET_SETUP_INCOMING_VARARGS
481 #define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs
482 #undef TARGET_MACHINE_DEPENDENT_REORG
483 #define TARGET_MACHINE_DEPENDENT_REORG frv_reorg
484
485 #undef TARGET_EXPAND_BUILTIN_VA_START
486 #define TARGET_EXPAND_BUILTIN_VA_START frv_expand_builtin_va_start
487
488 #if HAVE_AS_TLS
489 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
490 #define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel
491 #endif
492
493 #undef TARGET_CLASS_LIKELY_SPILLED_P
494 #define TARGET_CLASS_LIKELY_SPILLED_P frv_class_likely_spilled_p
495
496 #undef TARGET_SECONDARY_RELOAD
497 #define TARGET_SECONDARY_RELOAD frv_secondary_reload
498
499 #undef TARGET_LRA_P
500 #define TARGET_LRA_P hook_bool_void_false
501
502 #undef TARGET_LEGITIMATE_ADDRESS_P
503 #define TARGET_LEGITIMATE_ADDRESS_P frv_legitimate_address_p
504
505 #undef TARGET_FRAME_POINTER_REQUIRED
506 #define TARGET_FRAME_POINTER_REQUIRED frv_frame_pointer_required
507
508 #undef TARGET_CAN_ELIMINATE
509 #define TARGET_CAN_ELIMINATE frv_can_eliminate
510
511 #undef TARGET_CONDITIONAL_REGISTER_USAGE
512 #define TARGET_CONDITIONAL_REGISTER_USAGE frv_conditional_register_usage
513
514 #undef TARGET_TRAMPOLINE_INIT
515 #define TARGET_TRAMPOLINE_INIT frv_trampoline_init
516
517 #undef TARGET_FUNCTION_VALUE
518 #define TARGET_FUNCTION_VALUE frv_function_value
519 #undef TARGET_LIBCALL_VALUE
520 #define TARGET_LIBCALL_VALUE frv_libcall_value
521
522 #undef TARGET_HARD_REGNO_NREGS
523 #define TARGET_HARD_REGNO_NREGS frv_hard_regno_nregs
524 #undef TARGET_HARD_REGNO_MODE_OK
525 #define TARGET_HARD_REGNO_MODE_OK frv_hard_regno_mode_ok
526 #undef TARGET_MODES_TIEABLE_P
527 #define TARGET_MODES_TIEABLE_P frv_modes_tieable_p
528 #undef TARGET_CONSTANT_ALIGNMENT
529 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
530
531 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
532 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
533
534 struct gcc_target targetm = TARGET_INITIALIZER;
535
536 #define FRV_SYMBOL_REF_TLS_P(RTX) \
537 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
538
539 \f
540 /* Any function call that satisfies the machine-independent
541 requirements is eligible on FR-V. */
542
543 static bool
544 frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
545 tree exp ATTRIBUTE_UNUSED)
546 {
547 return true;
548 }
549
550 /* Return true if SYMBOL is a small data symbol and relocation RELOC
551 can be used to access it directly in a load or store. */
552
553 static FRV_INLINE bool
554 frv_small_data_reloc_p (rtx symbol, int reloc)
555 {
556 return (GET_CODE (symbol) == SYMBOL_REF
557 && SYMBOL_REF_SMALL_P (symbol)
558 && (!TARGET_FDPIC || flag_pic == 1)
559 && (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12));
560 }
561
562 /* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC
563 appropriately. */
564
565 bool
566 frv_const_unspec_p (rtx x, struct frv_unspec *unspec)
567 {
568 if (GET_CODE (x) == CONST)
569 {
570 unspec->offset = 0;
571 x = XEXP (x, 0);
572 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
573 {
574 unspec->offset += INTVAL (XEXP (x, 1));
575 x = XEXP (x, 0);
576 }
577 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT)
578 {
579 unspec->symbol = XVECEXP (x, 0, 0);
580 unspec->reloc = INTVAL (XVECEXP (x, 0, 1));
581
582 if (unspec->offset == 0)
583 return true;
584
585 if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc)
586 && unspec->offset > 0
587 && unspec->offset < g_switch_value)
588 return true;
589 }
590 }
591 return false;
592 }
593
594 /* Decide whether we can force certain constants to memory. If we
595 decide we can't, the caller should be able to cope with it in
596 another way.
597
598 We never allow constants to be forced into memory for TARGET_FDPIC.
599 This is necessary for several reasons:
600
601 1. Since frv_legitimate_constant_p rejects constant pool addresses, the
602 target-independent code will try to force them into the constant
603 pool, thus leading to infinite recursion.
604
605 2. We can never introduce new constant pool references during reload.
606 Any such reference would require use of the pseudo FDPIC register.
607
608 3. We can't represent a constant added to a function pointer (which is
609 not the same as a pointer to a function+constant).
610
611 4. In many cases, it's more efficient to calculate the constant in-line. */
612
613 static bool
614 frv_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
615 rtx x ATTRIBUTE_UNUSED)
616 {
617 return TARGET_FDPIC;
618 }
619 \f
620 static int
621 frv_default_flags_for_cpu (void)
622 {
623 switch (frv_cpu_type)
624 {
625 case FRV_CPU_GENERIC:
626 return MASK_DEFAULT_FRV;
627
628 case FRV_CPU_FR550:
629 return MASK_DEFAULT_FR550;
630
631 case FRV_CPU_FR500:
632 case FRV_CPU_TOMCAT:
633 return MASK_DEFAULT_FR500;
634
635 case FRV_CPU_FR450:
636 return MASK_DEFAULT_FR450;
637
638 case FRV_CPU_FR405:
639 case FRV_CPU_FR400:
640 return MASK_DEFAULT_FR400;
641
642 case FRV_CPU_FR300:
643 case FRV_CPU_SIMPLE:
644 return MASK_DEFAULT_SIMPLE;
645
646 default:
647 gcc_unreachable ();
648 }
649 }
650
651 /* Implement TARGET_OPTION_OVERRIDE. */
652
653 static void
654 frv_option_override (void)
655 {
656 int regno;
657 unsigned int i;
658
659 target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit);
660
661 /* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the
662 linker about linking pic and non-pic code. */
663 if (TARGET_LIBPIC)
664 {
665 if (!flag_pic) /* -fPIC */
666 flag_pic = 2;
667
668 if (!global_options_set.x_g_switch_value) /* -G0 */
669 {
670 g_switch_value = 0;
671 }
672 }
673
674 /* A C expression whose value is a register class containing hard
675 register REGNO. In general there is more than one such class;
676 choose a class which is "minimal", meaning that no smaller class
677 also contains the register. */
678
679 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
680 {
681 enum reg_class rclass;
682
683 if (GPR_P (regno))
684 {
685 int gpr_reg = regno - GPR_FIRST;
686
687 if (gpr_reg == GR8_REG)
688 rclass = GR8_REGS;
689
690 else if (gpr_reg == GR9_REG)
691 rclass = GR9_REGS;
692
693 else if (gpr_reg == GR14_REG)
694 rclass = FDPIC_FPTR_REGS;
695
696 else if (gpr_reg == FDPIC_REGNO)
697 rclass = FDPIC_REGS;
698
699 else if ((gpr_reg & 3) == 0)
700 rclass = QUAD_REGS;
701
702 else if ((gpr_reg & 1) == 0)
703 rclass = EVEN_REGS;
704
705 else
706 rclass = GPR_REGS;
707 }
708
709 else if (FPR_P (regno))
710 {
711 int fpr_reg = regno - GPR_FIRST;
712 if ((fpr_reg & 3) == 0)
713 rclass = QUAD_FPR_REGS;
714
715 else if ((fpr_reg & 1) == 0)
716 rclass = FEVEN_REGS;
717
718 else
719 rclass = FPR_REGS;
720 }
721
722 else if (regno == LR_REGNO)
723 rclass = LR_REG;
724
725 else if (regno == LCR_REGNO)
726 rclass = LCR_REG;
727
728 else if (ICC_P (regno))
729 rclass = ICC_REGS;
730
731 else if (FCC_P (regno))
732 rclass = FCC_REGS;
733
734 else if (ICR_P (regno))
735 rclass = ICR_REGS;
736
737 else if (FCR_P (regno))
738 rclass = FCR_REGS;
739
740 else if (ACC_P (regno))
741 {
742 int r = regno - ACC_FIRST;
743 if ((r & 3) == 0)
744 rclass = QUAD_ACC_REGS;
745 else if ((r & 1) == 0)
746 rclass = EVEN_ACC_REGS;
747 else
748 rclass = ACC_REGS;
749 }
750
751 else if (ACCG_P (regno))
752 rclass = ACCG_REGS;
753
754 else
755 rclass = NO_REGS;
756
757 regno_reg_class[regno] = rclass;
758 }
759
760 /* Check for small data option */
761 if (!global_options_set.x_g_switch_value && !TARGET_LIBPIC)
762 g_switch_value = SDATA_DEFAULT_SIZE;
763
764 /* There is no single unaligned SI op for PIC code. Sometimes we
765 need to use ".4byte" and sometimes we need to use ".picptr".
766 See frv_assemble_integer for details. */
767 if (flag_pic || TARGET_FDPIC)
768 targetm.asm_out.unaligned_op.si = 0;
769
770 if ((target_flags_explicit & MASK_LINKED_FP) == 0)
771 target_flags |= MASK_LINKED_FP;
772
773 if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0)
774 target_flags |= MASK_OPTIMIZE_MEMBAR;
775
776 for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++)
777 frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]);
778
779 for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++)
780 frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes);
781
782 init_machine_status = frv_init_machine_status;
783 }
784
785 \f
786 /* Return true if NAME (a STRING_CST node) begins with PREFIX. */
787
788 static int
789 frv_string_begins_with (const char *name, const char *prefix)
790 {
791 const int prefix_len = strlen (prefix);
792
793 /* Remember: NAME's length includes the null terminator. */
794 return (strncmp (name, prefix, prefix_len) == 0);
795 }
796 \f
797 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
798
799 static void
800 frv_conditional_register_usage (void)
801 {
802 int i;
803
804 for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++)
805 fixed_regs[i] = call_used_regs[i] = 1;
806
807 for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++)
808 fixed_regs[i] = call_used_regs[i] = 1;
809
810 /* Reserve the registers used for conditional execution. At present, we need
811 1 ICC and 1 ICR register. */
812 fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1;
813 fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1;
814
815 if (TARGET_FIXED_CC)
816 {
817 fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1;
818 fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1;
819 fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1;
820 fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1;
821 }
822
823 if (TARGET_FDPIC)
824 fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] =
825 call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0;
826
827 #if 0
828 /* If -fpic, SDA_BASE_REG is the PIC register. */
829 if (g_switch_value == 0 && !flag_pic)
830 fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0;
831
832 if (!flag_pic)
833 fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0;
834 #endif
835 }
836
837 \f
838 /*
839 * Compute the stack frame layout
840 *
841 * Register setup:
842 * +---------------+-----------------------+-----------------------+
843 * |Register |type |caller-save/callee-save|
844 * +---------------+-----------------------+-----------------------+
845 * |GR0 |Zero register | - |
846 * |GR1 |Stack pointer(SP) | - |
847 * |GR2 |Frame pointer(FP) | - |
848 * |GR3 |Hidden parameter | caller save |
849 * |GR4-GR7 | - | caller save |
850 * |GR8-GR13 |Argument register | caller save |
851 * |GR14-GR15 | - | caller save |
852 * |GR16-GR31 | - | callee save |
853 * |GR32-GR47 | - | caller save |
854 * |GR48-GR63 | - | callee save |
855 * |FR0-FR15 | - | caller save |
856 * |FR16-FR31 | - | callee save |
857 * |FR32-FR47 | - | caller save |
858 * |FR48-FR63 | - | callee save |
859 * +---------------+-----------------------+-----------------------+
860 *
861 * Stack frame setup:
862 * Low
863 * SP-> |-----------------------------------|
864 * | Argument area |
865 * |-----------------------------------|
866 * | Register save area |
867 * |-----------------------------------|
868 * | Local variable save area |
869 * FP-> |-----------------------------------|
870 * | Old FP |
871 * |-----------------------------------|
872 * | Hidden parameter save area |
873 * |-----------------------------------|
874 * | Return address(LR) storage area |
875 * |-----------------------------------|
876 * | Padding for alignment |
877 * |-----------------------------------|
878 * | Register argument area |
879 * OLD SP-> |-----------------------------------|
880 * | Parameter area |
881 * |-----------------------------------|
882 * High
883 *
884 * Argument area/Parameter area:
885 *
886 * When a function is called, this area is used for argument transfer. When
887 * the argument is set up by the caller function, this area is referred to as
888 * the argument area. When the argument is referenced by the callee function,
889 * this area is referred to as the parameter area. The area is allocated when
890 * all arguments cannot be placed on the argument register at the time of
891 * argument transfer.
892 *
893 * Register save area:
894 *
895 * This is a register save area that must be guaranteed for the caller
896 * function. This area is not secured when the register save operation is not
897 * needed.
898 *
899 * Local variable save area:
900 *
901 * This is the area for local variables and temporary variables.
902 *
903 * Old FP:
904 *
905 * This area stores the FP value of the caller function.
906 *
907 * Hidden parameter save area:
908 *
909 * This area stores the start address of the return value storage
910 * area for a struct/union return function.
911 * When a struct/union is used as the return value, the caller
912 * function stores the return value storage area start address in
913 * register GR3 and passes it to the caller function.
914 * The callee function interprets the address stored in the GR3
915 * as the return value storage area start address.
916 * When register GR3 needs to be saved into memory, the callee
917 * function saves it in the hidden parameter save area. This
918 * area is not secured when the save operation is not needed.
919 *
920 * Return address(LR) storage area:
921 *
922 * This area saves the LR. The LR stores the address of a return to the caller
923 * function for the purpose of function calling.
924 *
925 * Argument register area:
926 *
927 * This area saves the argument register. This area is not secured when the
928 * save operation is not needed.
929 *
930 * Argument:
931 *
932 * Arguments, the count of which equals the count of argument registers (6
933 * words), are positioned in registers GR8 to GR13 and delivered to the callee
934 * function. When a struct/union return function is called, the return value
935 * area address is stored in register GR3. Arguments not placed in the
936 * argument registers will be stored in the stack argument area for transfer
937 * purposes. When an 8-byte type argument is to be delivered using registers,
938 * it is divided into two and placed in two registers for transfer. When
939 * argument registers must be saved to memory, the callee function secures an
940 * argument register save area in the stack. In this case, a continuous
941 * argument register save area must be established in the parameter area. The
942 * argument register save area must be allocated as needed to cover the size of
943 * the argument register to be saved. If the function has a variable count of
944 * arguments, it saves all argument registers in the argument register save
945 * area.
946 *
947 * Argument Extension Format:
948 *
949 * When an argument is to be stored in the stack, its type is converted to an
950 * extended type in accordance with the individual argument type. The argument
951 * is freed by the caller function after the return from the callee function is
952 * made.
953 *
954 * +-----------------------+---------------+------------------------+
955 * | Argument Type |Extended Type |Stack Storage Size(byte)|
956 * +-----------------------+---------------+------------------------+
957 * |char |int | 4 |
958 * |signed char |int | 4 |
959 * |unsigned char |int | 4 |
960 * |[signed] short int |int | 4 |
961 * |unsigned short int |int | 4 |
962 * |[signed] int |No extension | 4 |
963 * |unsigned int |No extension | 4 |
964 * |[signed] long int |No extension | 4 |
965 * |unsigned long int |No extension | 4 |
966 * |[signed] long long int |No extension | 8 |
967 * |unsigned long long int |No extension | 8 |
968 * |float |double | 8 |
969 * |double |No extension | 8 |
970 * |long double |No extension | 8 |
971 * |pointer |No extension | 4 |
972 * |struct/union |- | 4 (*1) |
973 * +-----------------------+---------------+------------------------+
974 *
975 * When a struct/union is to be delivered as an argument, the caller copies it
976 * to the local variable area and delivers the address of that area.
977 *
978 * Return Value:
979 *
980 * +-------------------------------+----------------------+
981 * |Return Value Type |Return Value Interface|
982 * +-------------------------------+----------------------+
983 * |void |None |
984 * |[signed|unsigned] char |GR8 |
985 * |[signed|unsigned] short int |GR8 |
986 * |[signed|unsigned] int |GR8 |
987 * |[signed|unsigned] long int |GR8 |
988 * |pointer |GR8 |
989 * |[signed|unsigned] long long int|GR8 & GR9 |
990 * |float |GR8 |
991 * |double |GR8 & GR9 |
992 * |long double |GR8 & GR9 |
993 * |struct/union |(*1) |
994 * +-------------------------------+----------------------+
995 *
996 * When a struct/union is used as the return value, the caller function stores
997 * the start address of the return value storage area into GR3 and then passes
998 * it to the callee function. The callee function interprets GR3 as the start
999 * address of the return value storage area. When this address needs to be
1000 * saved in memory, the callee function secures the hidden parameter save area
1001 * and saves the address in that area.
1002 */
1003
1004 frv_stack_t *
1005 frv_stack_info (void)
1006 {
1007 static frv_stack_t info, zero_info;
1008 frv_stack_t *info_ptr = &info;
1009 tree fndecl = current_function_decl;
1010 int varargs_p = 0;
1011 tree cur_arg;
1012 tree next_arg;
1013 int range;
1014 int alignment;
1015 int offset;
1016
1017 /* If we've already calculated the values and reload is complete,
1018 just return now. */
1019 if (frv_stack_cache)
1020 return frv_stack_cache;
1021
1022 /* Zero all fields. */
1023 info = zero_info;
1024
1025 /* Set up the register range information. */
1026 info_ptr->regs[STACK_REGS_GPR].name = "gpr";
1027 info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1;
1028 info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST;
1029 info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE;
1030
1031 info_ptr->regs[STACK_REGS_FPR].name = "fpr";
1032 info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST;
1033 info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST;
1034 info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE;
1035
1036 info_ptr->regs[STACK_REGS_LR].name = "lr";
1037 info_ptr->regs[STACK_REGS_LR].first = LR_REGNO;
1038 info_ptr->regs[STACK_REGS_LR].last = LR_REGNO;
1039 info_ptr->regs[STACK_REGS_LR].special_p = 1;
1040
1041 info_ptr->regs[STACK_REGS_CC].name = "cc";
1042 info_ptr->regs[STACK_REGS_CC].first = CC_FIRST;
1043 info_ptr->regs[STACK_REGS_CC].last = CC_LAST;
1044 info_ptr->regs[STACK_REGS_CC].field_p = TRUE;
1045
1046 info_ptr->regs[STACK_REGS_LCR].name = "lcr";
1047 info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO;
1048 info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO;
1049
1050 info_ptr->regs[STACK_REGS_STDARG].name = "stdarg";
1051 info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM;
1052 info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM;
1053 info_ptr->regs[STACK_REGS_STDARG].dword_p = 1;
1054 info_ptr->regs[STACK_REGS_STDARG].special_p = 1;
1055
1056 info_ptr->regs[STACK_REGS_STRUCT].name = "struct";
1057 info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM;
1058 info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM;
1059 info_ptr->regs[STACK_REGS_STRUCT].special_p = 1;
1060
1061 info_ptr->regs[STACK_REGS_FP].name = "fp";
1062 info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM;
1063 info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM;
1064 info_ptr->regs[STACK_REGS_FP].special_p = 1;
1065
1066 /* Determine if this is a stdarg function. If so, allocate space to store
1067 the 6 arguments. */
1068 if (cfun->stdarg)
1069 varargs_p = 1;
1070
1071 else
1072 {
1073 /* Find the last argument, and see if it is __builtin_va_alist. */
1074 for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg)
1075 {
1076 next_arg = DECL_CHAIN (cur_arg);
1077 if (next_arg == (tree)0)
1078 {
1079 if (DECL_NAME (cur_arg)
1080 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist"))
1081 varargs_p = 1;
1082
1083 break;
1084 }
1085 }
1086 }
1087
1088 /* Iterate over all of the register ranges. */
1089 for (range = 0; range < STACK_REGS_MAX; range++)
1090 {
1091 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1092 int first = reg_ptr->first;
1093 int last = reg_ptr->last;
1094 int size_1word = 0;
1095 int size_2words = 0;
1096 int regno;
1097
1098 /* Calculate which registers need to be saved & save area size. */
1099 switch (range)
1100 {
1101 default:
1102 for (regno = first; regno <= last; regno++)
1103 {
1104 if ((df_regs_ever_live_p (regno) && !call_used_regs[regno])
1105 || (crtl->calls_eh_return
1106 && (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
1107 || (!TARGET_FDPIC && flag_pic
1108 && crtl->uses_pic_offset_table && regno == PIC_REGNO))
1109 {
1110 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1111 size_1word += UNITS_PER_WORD;
1112 }
1113 }
1114 break;
1115
1116 /* Calculate whether we need to create a frame after everything else
1117 has been processed. */
1118 case STACK_REGS_FP:
1119 break;
1120
1121 case STACK_REGS_LR:
1122 if (df_regs_ever_live_p (LR_REGNO)
1123 || profile_flag
1124 /* This is set for __builtin_return_address, etc. */
1125 || cfun->machine->frame_needed
1126 || (TARGET_LINKED_FP && frame_pointer_needed)
1127 || (!TARGET_FDPIC && flag_pic
1128 && crtl->uses_pic_offset_table))
1129 {
1130 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1131 size_1word += UNITS_PER_WORD;
1132 }
1133 break;
1134
1135 case STACK_REGS_STDARG:
1136 if (varargs_p)
1137 {
1138 /* If this is a stdarg function with a non varardic
1139 argument split between registers and the stack,
1140 adjust the saved registers downward. */
1141 last -= (ADDR_ALIGN (crtl->args.pretend_args_size, UNITS_PER_WORD)
1142 / UNITS_PER_WORD);
1143
1144 for (regno = first; regno <= last; regno++)
1145 {
1146 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1147 size_1word += UNITS_PER_WORD;
1148 }
1149
1150 info_ptr->stdarg_size = size_1word;
1151 }
1152 break;
1153
1154 case STACK_REGS_STRUCT:
1155 if (cfun->returns_struct)
1156 {
1157 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1158 size_1word += UNITS_PER_WORD;
1159 }
1160 break;
1161 }
1162
1163
1164 if (size_1word)
1165 {
1166 /* If this is a field, it only takes one word. */
1167 if (reg_ptr->field_p)
1168 size_1word = UNITS_PER_WORD;
1169
1170 /* Determine which register pairs can be saved together. */
1171 else if (reg_ptr->dword_p && TARGET_DWORD)
1172 {
1173 for (regno = first; regno < last; regno += 2)
1174 {
1175 if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1])
1176 {
1177 size_2words += 2 * UNITS_PER_WORD;
1178 size_1word -= 2 * UNITS_PER_WORD;
1179 info_ptr->save_p[regno] = REG_SAVE_2WORDS;
1180 info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE;
1181 }
1182 }
1183 }
1184
1185 reg_ptr->size_1word = size_1word;
1186 reg_ptr->size_2words = size_2words;
1187
1188 if (! reg_ptr->special_p)
1189 {
1190 info_ptr->regs_size_1word += size_1word;
1191 info_ptr->regs_size_2words += size_2words;
1192 }
1193 }
1194 }
1195
1196 /* Set up the sizes of each field in the frame body, making the sizes
1197 of each be divisible by the size of a dword if dword operations might
1198 be used, or the size of a word otherwise. */
1199 alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD);
1200
1201 info_ptr->parameter_size = ADDR_ALIGN (crtl->outgoing_args_size, alignment);
1202 info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words
1203 + info_ptr->regs_size_1word,
1204 alignment);
1205 info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment);
1206
1207 info_ptr->pretend_size = crtl->args.pretend_args_size;
1208
1209 /* Work out the size of the frame, excluding the header. Both the frame
1210 body and register parameter area will be dword-aligned. */
1211 info_ptr->total_size
1212 = (ADDR_ALIGN (info_ptr->parameter_size
1213 + info_ptr->regs_size
1214 + info_ptr->vars_size,
1215 2 * UNITS_PER_WORD)
1216 + ADDR_ALIGN (info_ptr->pretend_size
1217 + info_ptr->stdarg_size,
1218 2 * UNITS_PER_WORD));
1219
1220 /* See if we need to create a frame at all, if so add header area. */
1221 if (info_ptr->total_size > 0
1222 || frame_pointer_needed
1223 || info_ptr->regs[STACK_REGS_LR].size_1word > 0
1224 || info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0)
1225 {
1226 offset = info_ptr->parameter_size;
1227 info_ptr->header_size = 4 * UNITS_PER_WORD;
1228 info_ptr->total_size += 4 * UNITS_PER_WORD;
1229
1230 /* Calculate the offsets to save normal register pairs. */
1231 for (range = 0; range < STACK_REGS_MAX; range++)
1232 {
1233 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1234 if (! reg_ptr->special_p)
1235 {
1236 int first = reg_ptr->first;
1237 int last = reg_ptr->last;
1238 int regno;
1239
1240 for (regno = first; regno <= last; regno++)
1241 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS
1242 && regno != FRAME_POINTER_REGNUM
1243 && (regno < FIRST_ARG_REGNUM
1244 || regno > LAST_ARG_REGNUM))
1245 {
1246 info_ptr->reg_offset[regno] = offset;
1247 offset += 2 * UNITS_PER_WORD;
1248 }
1249 }
1250 }
1251
1252 /* Calculate the offsets to save normal single registers. */
1253 for (range = 0; range < STACK_REGS_MAX; range++)
1254 {
1255 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1256 if (! reg_ptr->special_p)
1257 {
1258 int first = reg_ptr->first;
1259 int last = reg_ptr->last;
1260 int regno;
1261
1262 for (regno = first; regno <= last; regno++)
1263 if (info_ptr->save_p[regno] == REG_SAVE_1WORD
1264 && regno != FRAME_POINTER_REGNUM
1265 && (regno < FIRST_ARG_REGNUM
1266 || regno > LAST_ARG_REGNUM))
1267 {
1268 info_ptr->reg_offset[regno] = offset;
1269 offset += UNITS_PER_WORD;
1270 }
1271 }
1272 }
1273
1274 /* Calculate the offset to save the local variables at. */
1275 offset = ADDR_ALIGN (offset, alignment);
1276 if (info_ptr->vars_size)
1277 {
1278 info_ptr->vars_offset = offset;
1279 offset += info_ptr->vars_size;
1280 }
1281
1282 /* Align header to a dword-boundary. */
1283 offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD);
1284
1285 /* Calculate the offsets in the fixed frame. */
1286 info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD;
1287 info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset;
1288 info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD;
1289
1290 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1291 info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD;
1292 info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD;
1293
1294 if (cfun->returns_struct)
1295 {
1296 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1297 info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD;
1298 info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD;
1299 }
1300
1301 /* Calculate the offsets to store the arguments passed in registers
1302 for stdarg functions. The register pairs are first and the single
1303 register if any is last. The register save area starts on a
1304 dword-boundary. */
1305 if (info_ptr->stdarg_size)
1306 {
1307 int first = info_ptr->regs[STACK_REGS_STDARG].first;
1308 int last = info_ptr->regs[STACK_REGS_STDARG].last;
1309 int regno;
1310
1311 /* Skip the header. */
1312 offset += 4 * UNITS_PER_WORD;
1313 for (regno = first; regno <= last; regno++)
1314 {
1315 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS)
1316 {
1317 info_ptr->reg_offset[regno] = offset;
1318 offset += 2 * UNITS_PER_WORD;
1319 }
1320 else if (info_ptr->save_p[regno] == REG_SAVE_1WORD)
1321 {
1322 info_ptr->reg_offset[regno] = offset;
1323 offset += UNITS_PER_WORD;
1324 }
1325 }
1326 }
1327 }
1328
1329 if (reload_completed)
1330 frv_stack_cache = info_ptr;
1331
1332 return info_ptr;
1333 }
1334
1335 \f
1336 /* Print the information about the frv stack offsets, etc. when debugging. */
1337
1338 void
1339 frv_debug_stack (frv_stack_t *info)
1340 {
1341 int range;
1342
1343 if (!info)
1344 info = frv_stack_info ();
1345
1346 fprintf (stderr, "\nStack information for function %s:\n",
1347 ((current_function_decl && DECL_NAME (current_function_decl))
1348 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
1349 : "<unknown>"));
1350
1351 fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size);
1352 fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size);
1353 fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size);
1354 fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n",
1355 info->regs_size, info->regs_size_1word, info->regs_size_2words);
1356
1357 fprintf (stderr, "\theader_size\t= %6d\n", info->header_size);
1358 fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size);
1359 fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset);
1360 fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset);
1361
1362 for (range = 0; range < STACK_REGS_MAX; range++)
1363 {
1364 frv_stack_regs_t *regs = &(info->regs[range]);
1365 if ((regs->size_1word + regs->size_2words) > 0)
1366 {
1367 int first = regs->first;
1368 int last = regs->last;
1369 int regno;
1370
1371 fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =",
1372 regs->name, regs->size_1word + regs->size_2words,
1373 regs->size_1word, regs->size_2words);
1374
1375 for (regno = first; regno <= last; regno++)
1376 {
1377 if (info->save_p[regno] == REG_SAVE_1WORD)
1378 fprintf (stderr, " %s (%d)", reg_names[regno],
1379 info->reg_offset[regno]);
1380
1381 else if (info->save_p[regno] == REG_SAVE_2WORDS)
1382 fprintf (stderr, " %s-%s (%d)", reg_names[regno],
1383 reg_names[regno+1], info->reg_offset[regno]);
1384 }
1385
1386 fputc ('\n', stderr);
1387 }
1388 }
1389
1390 fflush (stderr);
1391 }
1392
1393
1394 \f
1395
1396 /* Used during final to control the packing of insns. The value is
1397 1 if the current instruction should be packed with the next one,
1398 0 if it shouldn't or -1 if packing is disabled altogether. */
1399
1400 static int frv_insn_packing_flag;
1401
1402 /* True if the current function contains a far jump. */
1403
1404 static int
1405 frv_function_contains_far_jump (void)
1406 {
1407 rtx_insn *insn = get_insns ();
1408 while (insn != NULL
1409 && !(JUMP_P (insn)
1410 && get_attr_far_jump (insn) == FAR_JUMP_YES))
1411 insn = NEXT_INSN (insn);
1412 return (insn != NULL);
1413 }
1414
1415 /* For the FRV, this function makes sure that a function with far jumps
1416 will return correctly. It also does the VLIW packing. */
1417
1418 static void
1419 frv_function_prologue (FILE *file)
1420 {
1421 /* If no frame was created, check whether the function uses a call
1422 instruction to implement a far jump. If so, save the link in gr3 and
1423 replace all returns to LR with returns to GR3. GR3 is used because it
1424 is call-clobbered, because is not available to the register allocator,
1425 and because all functions that take a hidden argument pointer will have
1426 a stack frame. */
1427 if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ())
1428 {
1429 rtx_insn *insn;
1430
1431 /* Just to check that the above comment is true. */
1432 gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3));
1433
1434 /* Generate the instruction that saves the link register. */
1435 fprintf (file, "\tmovsg lr,gr3\n");
1436
1437 /* Replace the LR with GR3 in *return_internal patterns. The insn
1438 will now return using jmpl @(gr3,0) rather than bralr. We cannot
1439 simply emit a different assembly directive because bralr and jmpl
1440 execute in different units. */
1441 for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
1442 if (JUMP_P (insn))
1443 {
1444 rtx pattern = PATTERN (insn);
1445 if (GET_CODE (pattern) == PARALLEL
1446 && XVECLEN (pattern, 0) >= 2
1447 && GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN
1448 && GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
1449 {
1450 rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
1451 if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
1452 SET_REGNO (address, GPR_FIRST + 3);
1453 }
1454 }
1455 }
1456
1457 frv_pack_insns ();
1458
1459 /* Allow the garbage collector to free the nops created by frv_reorg. */
1460 memset (frv_nops, 0, sizeof (frv_nops));
1461 }
1462
1463 \f
1464 /* Return the next available temporary register in a given class. */
1465
1466 static rtx
1467 frv_alloc_temp_reg (
1468 frv_tmp_reg_t *info, /* which registers are available */
1469 enum reg_class rclass, /* register class desired */
1470 machine_mode mode, /* mode to allocate register with */
1471 int mark_as_used, /* register not available after allocation */
1472 int no_abort) /* return NULL instead of aborting */
1473 {
1474 int regno = info->next_reg[ (int)rclass ];
1475 int orig_regno = regno;
1476 HARD_REG_SET *reg_in_class = &reg_class_contents[ (int)rclass ];
1477 int i, nr;
1478
1479 for (;;)
1480 {
1481 if (TEST_HARD_REG_BIT (*reg_in_class, regno)
1482 && TEST_HARD_REG_BIT (info->regs, regno))
1483 break;
1484
1485 if (++regno >= FIRST_PSEUDO_REGISTER)
1486 regno = 0;
1487 if (regno == orig_regno)
1488 {
1489 gcc_assert (no_abort);
1490 return NULL_RTX;
1491 }
1492 }
1493
1494 nr = hard_regno_nregs (regno, mode);
1495 info->next_reg[ (int)rclass ] = regno + nr;
1496
1497 if (mark_as_used)
1498 for (i = 0; i < nr; i++)
1499 CLEAR_HARD_REG_BIT (info->regs, regno+i);
1500
1501 return gen_rtx_REG (mode, regno);
1502 }
1503
1504 \f
1505 /* Return an rtx with the value OFFSET, which will either be a register or a
1506 signed 12-bit integer. It can be used as the second operand in an "add"
1507 instruction, or as the index in a load or store.
1508
1509 The function returns a constant rtx if OFFSET is small enough, otherwise
1510 it loads the constant into register OFFSET_REGNO and returns that. */
1511 static rtx
1512 frv_frame_offset_rtx (int offset)
1513 {
1514 rtx offset_rtx = GEN_INT (offset);
1515 if (IN_RANGE (offset, -2048, 2047))
1516 return offset_rtx;
1517 else
1518 {
1519 rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO);
1520 if (IN_RANGE (offset, -32768, 32767))
1521 emit_insn (gen_movsi (reg_rtx, offset_rtx));
1522 else
1523 {
1524 emit_insn (gen_movsi_high (reg_rtx, offset_rtx));
1525 emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx));
1526 }
1527 return reg_rtx;
1528 }
1529 }
1530
1531 /* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
1532 prologue and epilogue uses such expressions to access the stack. */
1533 static rtx
1534 frv_frame_mem (machine_mode mode, rtx base, int offset)
1535 {
1536 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
1537 base,
1538 frv_frame_offset_rtx (offset)));
1539 }
1540
1541 /* Generate a frame-related expression:
1542
1543 (set REG (mem (plus (sp) (const_int OFFSET)))).
1544
1545 Such expressions are used in FRAME_RELATED_EXPR notes for more complex
1546 instructions. Marking the expressions as frame-related is superfluous if
1547 the note contains just a single set. But if the note contains a PARALLEL
1548 or SEQUENCE that has several sets, each set must be individually marked
1549 as frame-related. */
1550 static rtx
1551 frv_dwarf_store (rtx reg, int offset)
1552 {
1553 rtx set = gen_rtx_SET (gen_rtx_MEM (GET_MODE (reg),
1554 plus_constant (Pmode, stack_pointer_rtx,
1555 offset)),
1556 reg);
1557 RTX_FRAME_RELATED_P (set) = 1;
1558 return set;
1559 }
1560
1561 /* Emit a frame-related instruction whose pattern is PATTERN. The
1562 instruction is the last in a sequence that cumulatively performs the
1563 operation described by DWARF_PATTERN. The instruction is marked as
1564 frame-related and has a REG_FRAME_RELATED_EXPR note containing
1565 DWARF_PATTERN. */
1566 static void
1567 frv_frame_insn (rtx pattern, rtx dwarf_pattern)
1568 {
1569 rtx insn = emit_insn (pattern);
1570 RTX_FRAME_RELATED_P (insn) = 1;
1571 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1572 dwarf_pattern,
1573 REG_NOTES (insn));
1574 }
1575
1576 /* Emit instructions that transfer REG to or from the memory location (sp +
1577 STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is
1578 FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this
1579 function to store registers and only the epilogue uses it to load them.
1580
1581 The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET).
1582 The generated instruction will use BASE as its base register. BASE may
1583 simply be the stack pointer, but if several accesses are being made to a
1584 region far away from the stack pointer, it may be more efficient to set
1585 up a temporary instead.
1586
1587 Store instructions will be frame-related and will be annotated with the
1588 overall effect of the store. Load instructions will be followed by a
1589 (use) to prevent later optimizations from zapping them.
1590
1591 The function takes care of the moves to and from SPRs, using TEMP_REGNO
1592 as a temporary in such cases. */
1593 static void
1594 frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
1595 {
1596 machine_mode mode = GET_MODE (reg);
1597 rtx mem = frv_frame_mem (mode,
1598 accessor->base,
1599 stack_offset - accessor->base_offset);
1600
1601 if (accessor->op == FRV_LOAD)
1602 {
1603 if (SPR_P (REGNO (reg)))
1604 {
1605 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1606 emit_insn (gen_rtx_SET (temp, mem));
1607 emit_insn (gen_rtx_SET (reg, temp));
1608 }
1609 else
1610 {
1611 /* We cannot use reg+reg addressing for DImode access. */
1612 if (mode == DImode
1613 && GET_CODE (XEXP (mem, 0)) == PLUS
1614 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1615 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1616 {
1617 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1618
1619 emit_move_insn (temp,
1620 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1621 XEXP (XEXP (mem, 0), 1)));
1622 mem = gen_rtx_MEM (DImode, temp);
1623 }
1624 emit_insn (gen_rtx_SET (reg, mem));
1625 }
1626 emit_use (reg);
1627 }
1628 else
1629 {
1630 if (SPR_P (REGNO (reg)))
1631 {
1632 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1633 emit_insn (gen_rtx_SET (temp, reg));
1634 frv_frame_insn (gen_rtx_SET (mem, temp),
1635 frv_dwarf_store (reg, stack_offset));
1636 }
1637 else if (mode == DImode)
1638 {
1639 /* For DImode saves, the dwarf2 version needs to be a SEQUENCE
1640 with a separate save for each register. */
1641 rtx reg1 = gen_rtx_REG (SImode, REGNO (reg));
1642 rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1);
1643 rtx set1 = frv_dwarf_store (reg1, stack_offset);
1644 rtx set2 = frv_dwarf_store (reg2, stack_offset + 4);
1645
1646 /* Also we cannot use reg+reg addressing. */
1647 if (GET_CODE (XEXP (mem, 0)) == PLUS
1648 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1649 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1650 {
1651 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1652 emit_move_insn (temp,
1653 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1654 XEXP (XEXP (mem, 0), 1)));
1655 mem = gen_rtx_MEM (DImode, temp);
1656 }
1657
1658 frv_frame_insn (gen_rtx_SET (mem, reg),
1659 gen_rtx_PARALLEL (VOIDmode,
1660 gen_rtvec (2, set1, set2)));
1661 }
1662 else
1663 frv_frame_insn (gen_rtx_SET (mem, reg),
1664 frv_dwarf_store (reg, stack_offset));
1665 }
1666 }
1667
1668 /* A function that uses frv_frame_access to transfer a group of registers to
1669 or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO
1670 is the stack information generated by frv_stack_info, and REG_SET is the
1671 number of the register set to transfer. */
1672 static void
1673 frv_frame_access_multi (frv_frame_accessor_t *accessor,
1674 frv_stack_t *info,
1675 int reg_set)
1676 {
1677 frv_stack_regs_t *regs_info;
1678 int regno;
1679
1680 regs_info = &info->regs[reg_set];
1681 for (regno = regs_info->first; regno <= regs_info->last; regno++)
1682 if (info->save_p[regno])
1683 frv_frame_access (accessor,
1684 info->save_p[regno] == REG_SAVE_2WORDS
1685 ? gen_rtx_REG (DImode, regno)
1686 : gen_rtx_REG (SImode, regno),
1687 info->reg_offset[regno]);
1688 }
1689
1690 /* Save or restore callee-saved registers that are kept outside the frame
1691 header. The function saves the registers if OP is FRV_STORE and restores
1692 them if OP is FRV_LOAD. INFO is the stack information generated by
1693 frv_stack_info. */
1694 static void
1695 frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info)
1696 {
1697 frv_frame_accessor_t accessor;
1698
1699 accessor.op = op;
1700 accessor.base = stack_pointer_rtx;
1701 accessor.base_offset = 0;
1702 frv_frame_access_multi (&accessor, info, STACK_REGS_GPR);
1703 frv_frame_access_multi (&accessor, info, STACK_REGS_FPR);
1704 frv_frame_access_multi (&accessor, info, STACK_REGS_LCR);
1705 }
1706
1707
1708 /* Called after register allocation to add any instructions needed for the
1709 prologue. Using a prologue insn is favored compared to putting all of the
1710 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1711 it allows the scheduler to intermix instructions with the saves of
1712 the caller saved registers. In some cases, it might be necessary
1713 to emit a barrier instruction as the last insn to prevent such
1714 scheduling.
1715
1716 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1717 so that the debug info generation code can handle them properly. */
1718 void
1719 frv_expand_prologue (void)
1720 {
1721 frv_stack_t *info = frv_stack_info ();
1722 rtx sp = stack_pointer_rtx;
1723 rtx fp = frame_pointer_rtx;
1724 frv_frame_accessor_t accessor;
1725
1726 if (TARGET_DEBUG_STACK)
1727 frv_debug_stack (info);
1728
1729 if (flag_stack_usage_info)
1730 current_function_static_stack_size = info->total_size;
1731
1732 if (info->total_size == 0)
1733 return;
1734
1735 /* We're interested in three areas of the frame here:
1736
1737 A: the register save area
1738 B: the old FP
1739 C: the header after B
1740
1741 If the frame pointer isn't used, we'll have to set up A, B and C
1742 using the stack pointer. If the frame pointer is used, we'll access
1743 them as follows:
1744
1745 A: set up using sp
1746 B: set up using sp or a temporary (see below)
1747 C: set up using fp
1748
1749 We set up B using the stack pointer if the frame is small enough.
1750 Otherwise, it's more efficient to copy the old stack pointer into a
1751 temporary and use that.
1752
1753 Note that it's important to make sure the prologue and epilogue use the
1754 same registers to access A and C, since doing otherwise will confuse
1755 the aliasing code. */
1756
1757 /* Set up ACCESSOR for accessing region B above. If the frame pointer
1758 isn't used, the same method will serve for C. */
1759 accessor.op = FRV_STORE;
1760 if (frame_pointer_needed && info->total_size > 2048)
1761 {
1762 accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO);
1763 accessor.base_offset = info->total_size;
1764 emit_insn (gen_movsi (accessor.base, sp));
1765 }
1766 else
1767 {
1768 accessor.base = stack_pointer_rtx;
1769 accessor.base_offset = 0;
1770 }
1771
1772 /* Allocate the stack space. */
1773 {
1774 rtx asm_offset = frv_frame_offset_rtx (-info->total_size);
1775 rtx dwarf_offset = GEN_INT (-info->total_size);
1776
1777 frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset),
1778 gen_rtx_SET (sp, gen_rtx_PLUS (Pmode, sp, dwarf_offset)));
1779 }
1780
1781 /* If the frame pointer is needed, store the old one at (sp + FP_OFFSET)
1782 and point the new one to that location. */
1783 if (frame_pointer_needed)
1784 {
1785 int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1786
1787 /* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is
1788 based on ACCESSOR.BASE but DWARF_SRC is always based on the stack
1789 pointer. */
1790 rtx asm_src = plus_constant (Pmode, accessor.base,
1791 fp_offset - accessor.base_offset);
1792 rtx dwarf_src = plus_constant (Pmode, sp, fp_offset);
1793
1794 /* Store the old frame pointer at (sp + FP_OFFSET). */
1795 frv_frame_access (&accessor, fp, fp_offset);
1796
1797 /* Set up the new frame pointer. */
1798 frv_frame_insn (gen_rtx_SET (fp, asm_src),
1799 gen_rtx_SET (fp, dwarf_src));
1800
1801 /* Access region C from the frame pointer. */
1802 accessor.base = fp;
1803 accessor.base_offset = fp_offset;
1804 }
1805
1806 /* Set up region C. */
1807 frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT);
1808 frv_frame_access_multi (&accessor, info, STACK_REGS_LR);
1809 frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG);
1810
1811 /* Set up region A. */
1812 frv_frame_access_standard_regs (FRV_STORE, info);
1813
1814 /* If this is a varargs/stdarg function, issue a blockage to prevent the
1815 scheduler from moving loads before the stores saving the registers. */
1816 if (info->stdarg_size > 0)
1817 emit_insn (gen_blockage ());
1818
1819 /* Set up pic register/small data register for this function. */
1820 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
1821 emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO),
1822 gen_rtx_REG (Pmode, LR_REGNO),
1823 gen_rtx_REG (SImode, OFFSET_REGNO)));
1824 }
1825
1826 \f
1827 /* Under frv, all of the work is done via frv_expand_epilogue, but
1828 this function provides a convenient place to do cleanup. */
1829
1830 static void
1831 frv_function_epilogue (FILE *)
1832 {
1833 frv_stack_cache = (frv_stack_t *)0;
1834
1835 /* Zap last used registers for conditional execution. */
1836 memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg));
1837
1838 /* Release the bitmap of created insns. */
1839 BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap);
1840 }
1841
1842 \f
1843 /* Called after register allocation to add any instructions needed for the
1844 epilogue. Using an epilogue insn is favored compared to putting all of the
1845 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1846 it allows the scheduler to intermix instructions with the saves of
1847 the caller saved registers. In some cases, it might be necessary
1848 to emit a barrier instruction as the last insn to prevent such
1849 scheduling. */
1850
1851 void
1852 frv_expand_epilogue (bool emit_return)
1853 {
1854 frv_stack_t *info = frv_stack_info ();
1855 rtx fp = frame_pointer_rtx;
1856 rtx sp = stack_pointer_rtx;
1857 rtx return_addr;
1858 int fp_offset;
1859
1860 fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1861
1862 /* Restore the stack pointer to its original value if alloca or the like
1863 is used. */
1864 if (! crtl->sp_is_unchanging)
1865 emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset)));
1866
1867 /* Restore the callee-saved registers that were used in this function. */
1868 frv_frame_access_standard_regs (FRV_LOAD, info);
1869
1870 /* Set RETURN_ADDR to the address we should return to. Set it to NULL if
1871 no return instruction should be emitted. */
1872 if (info->save_p[LR_REGNO])
1873 {
1874 int lr_offset;
1875 rtx mem;
1876
1877 /* Use the same method to access the link register's slot as we did in
1878 the prologue. In other words, use the frame pointer if available,
1879 otherwise use the stack pointer.
1880
1881 LR_OFFSET is the offset of the link register's slot from the start
1882 of the frame and MEM is a memory rtx for it. */
1883 lr_offset = info->reg_offset[LR_REGNO];
1884 if (frame_pointer_needed)
1885 mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset);
1886 else
1887 mem = frv_frame_mem (Pmode, sp, lr_offset);
1888
1889 /* Load the old link register into a GPR. */
1890 return_addr = gen_rtx_REG (Pmode, TEMP_REGNO);
1891 emit_insn (gen_rtx_SET (return_addr, mem));
1892 }
1893 else
1894 return_addr = gen_rtx_REG (Pmode, LR_REGNO);
1895
1896 /* Restore the old frame pointer. Emit a USE afterwards to make sure
1897 the load is preserved. */
1898 if (frame_pointer_needed)
1899 {
1900 emit_insn (gen_rtx_SET (fp, gen_rtx_MEM (Pmode, fp)));
1901 emit_use (fp);
1902 }
1903
1904 /* Deallocate the stack frame. */
1905 if (info->total_size != 0)
1906 {
1907 rtx offset = frv_frame_offset_rtx (info->total_size);
1908 emit_insn (gen_stack_adjust (sp, sp, offset));
1909 }
1910
1911 /* If this function uses eh_return, add the final stack adjustment now. */
1912 if (crtl->calls_eh_return)
1913 emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX));
1914
1915 if (emit_return)
1916 emit_jump_insn (gen_epilogue_return (return_addr));
1917 else
1918 {
1919 rtx lr = return_addr;
1920
1921 if (REGNO (return_addr) != LR_REGNO)
1922 {
1923 lr = gen_rtx_REG (Pmode, LR_REGNO);
1924 emit_move_insn (lr, return_addr);
1925 }
1926
1927 emit_use (lr);
1928 }
1929 }
1930
1931 \f
1932 /* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */
1933
1934 static void
1935 frv_asm_output_mi_thunk (FILE *file,
1936 tree thunk_fndecl ATTRIBUTE_UNUSED,
1937 HOST_WIDE_INT delta,
1938 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1939 tree function)
1940 {
1941 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1942 const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0);
1943 const char *name_arg0 = reg_names[FIRST_ARG_REGNUM];
1944 const char *name_jmp = reg_names[JUMP_REGNO];
1945 const char *parallel = (frv_issue_rate () > 1 ? ".p" : "");
1946
1947 assemble_start_function (thunk_fndecl, fnname);
1948
1949 /* Do the add using an addi if possible. */
1950 if (IN_RANGE (delta, -2048, 2047))
1951 fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0);
1952 else
1953 {
1954 const char *const name_add = reg_names[TEMP_REGNO];
1955 fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1956 parallel, delta, name_add);
1957 fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1958 delta, name_add);
1959 fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0);
1960 }
1961
1962 if (TARGET_FDPIC)
1963 {
1964 const char *name_pic = reg_names[FDPIC_REGNO];
1965 name_jmp = reg_names[FDPIC_FPTR_REGNO];
1966
1967 if (flag_pic != 1)
1968 {
1969 fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel);
1970 assemble_name (file, name_func);
1971 fprintf (file, "),%s\n", name_jmp);
1972
1973 fprintf (file, "\tsetlo #gotofffuncdesclo(");
1974 assemble_name (file, name_func);
1975 fprintf (file, "),%s\n", name_jmp);
1976
1977 fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp);
1978 }
1979 else
1980 {
1981 fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic);
1982 assemble_name (file, name_func);
1983 fprintf (file, "\t)), %s\n", name_jmp);
1984 }
1985 }
1986 else if (!flag_pic)
1987 {
1988 fprintf (file, "\tsethi%s #hi(", parallel);
1989 assemble_name (file, name_func);
1990 fprintf (file, "),%s\n", name_jmp);
1991
1992 fprintf (file, "\tsetlo #lo(");
1993 assemble_name (file, name_func);
1994 fprintf (file, "),%s\n", name_jmp);
1995 }
1996 else
1997 {
1998 /* Use JUMP_REGNO as a temporary PIC register. */
1999 const char *name_lr = reg_names[LR_REGNO];
2000 const char *name_gppic = name_jmp;
2001 const char *name_tmp = reg_names[TEMP_REGNO];
2002
2003 fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp);
2004 fprintf (file, "\tcall 1f\n");
2005 fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic);
2006 fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr);
2007 fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp);
2008 fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp);
2009 fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic);
2010
2011 fprintf (file, "\tsethi%s #gprelhi(", parallel);
2012 assemble_name (file, name_func);
2013 fprintf (file, "),%s\n", name_tmp);
2014
2015 fprintf (file, "\tsetlo #gprello(");
2016 assemble_name (file, name_func);
2017 fprintf (file, "),%s\n", name_tmp);
2018
2019 fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp);
2020 }
2021
2022 /* Jump to the function address. */
2023 fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]);
2024 assemble_end_function (thunk_fndecl, fnname);
2025 }
2026
2027 \f
2028
2029 /* On frv, create a frame whenever we need to create stack. */
2030
2031 static bool
2032 frv_frame_pointer_required (void)
2033 {
2034 /* If we forgoing the usual linkage requirements, we only need
2035 a frame pointer if the stack pointer might change. */
2036 if (!TARGET_LINKED_FP)
2037 return !crtl->sp_is_unchanging;
2038
2039 if (! crtl->is_leaf)
2040 return true;
2041
2042 if (get_frame_size () != 0)
2043 return true;
2044
2045 if (cfun->stdarg)
2046 return true;
2047
2048 if (!crtl->sp_is_unchanging)
2049 return true;
2050
2051 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
2052 return true;
2053
2054 if (profile_flag)
2055 return true;
2056
2057 if (cfun->machine->frame_needed)
2058 return true;
2059
2060 return false;
2061 }
2062
2063 \f
2064 /* Worker function for TARGET_CAN_ELIMINATE. */
2065
2066 bool
2067 frv_can_eliminate (const int from, const int to)
2068 {
2069 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2070 ? ! frame_pointer_needed
2071 : true);
2072 }
2073
2074 /* This function returns the initial difference between the specified
2075 pair of registers. */
2076
2077 /* See frv_stack_info for more details on the frv stack frame. */
2078
2079 int
2080 frv_initial_elimination_offset (int from, int to)
2081 {
2082 frv_stack_t *info = frv_stack_info ();
2083 int ret = 0;
2084
2085 if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2086 ret = info->total_size - info->pretend_size;
2087
2088 else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM)
2089 ret = info->reg_offset[FRAME_POINTER_REGNUM];
2090
2091 else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2092 ret = (info->total_size
2093 - info->reg_offset[FRAME_POINTER_REGNUM]
2094 - info->pretend_size);
2095
2096 else
2097 gcc_unreachable ();
2098
2099 if (TARGET_DEBUG_STACK)
2100 fprintf (stderr, "Eliminate %s to %s by adding %d\n",
2101 reg_names [from], reg_names[to], ret);
2102
2103 return ret;
2104 }
2105
2106 \f
2107 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2108
2109 static void
2110 frv_setup_incoming_varargs (cumulative_args_t cum_v,
2111 const function_arg_info &arg,
2112 int *pretend_size,
2113 int second_time)
2114 {
2115 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2116
2117 if (TARGET_DEBUG_ARG)
2118 fprintf (stderr,
2119 "setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
2120 *cum, GET_MODE_NAME (arg.mode), *pretend_size, second_time);
2121 }
2122
2123 \f
2124 /* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */
2125
2126 static rtx
2127 frv_expand_builtin_saveregs (void)
2128 {
2129 int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS;
2130
2131 if (TARGET_DEBUG_ARG)
2132 fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n",
2133 offset);
2134
2135 return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset));
2136 }
2137
2138 \f
2139 /* Expand __builtin_va_start to do the va_start macro. */
2140
2141 static void
2142 frv_expand_builtin_va_start (tree valist, rtx nextarg)
2143 {
2144 tree t;
2145 int num = crtl->args.info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS;
2146
2147 nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx,
2148 GEN_INT (UNITS_PER_WORD * num));
2149
2150 if (TARGET_DEBUG_ARG)
2151 {
2152 fprintf (stderr, "va_start: args_info = %d, num = %d\n",
2153 crtl->args.info, num);
2154
2155 debug_rtx (nextarg);
2156 }
2157
2158 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
2159 fold_convert (TREE_TYPE (valist),
2160 make_tree (sizetype, nextarg)));
2161 TREE_SIDE_EFFECTS (t) = 1;
2162
2163 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2164 }
2165
2166 \f
2167 /* Expand a block move operation, and return 1 if successful. Return 0
2168 if we should let the compiler generate normal code.
2169
2170 operands[0] is the destination
2171 operands[1] is the source
2172 operands[2] is the length
2173 operands[3] is the alignment */
2174
2175 /* Maximum number of loads to do before doing the stores */
2176 #ifndef MAX_MOVE_REG
2177 #define MAX_MOVE_REG 4
2178 #endif
2179
2180 /* Maximum number of total loads to do. */
2181 #ifndef TOTAL_MOVE_REG
2182 #define TOTAL_MOVE_REG 8
2183 #endif
2184
2185 int
2186 frv_expand_block_move (rtx operands[])
2187 {
2188 rtx orig_dest = operands[0];
2189 rtx orig_src = operands[1];
2190 rtx bytes_rtx = operands[2];
2191 rtx align_rtx = operands[3];
2192 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2193 int align;
2194 int bytes;
2195 int offset;
2196 int num_reg;
2197 int i;
2198 rtx src_reg;
2199 rtx dest_reg;
2200 rtx src_addr;
2201 rtx dest_addr;
2202 rtx src_mem;
2203 rtx dest_mem;
2204 rtx tmp_reg;
2205 rtx stores[MAX_MOVE_REG];
2206 int move_bytes;
2207 machine_mode mode;
2208
2209 /* If this is not a fixed size move, just call memcpy. */
2210 if (! constp)
2211 return FALSE;
2212
2213 /* This should be a fixed size alignment. */
2214 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2215
2216 align = INTVAL (align_rtx);
2217
2218 /* Anything to move? */
2219 bytes = INTVAL (bytes_rtx);
2220 if (bytes <= 0)
2221 return TRUE;
2222
2223 /* Don't support real large moves. */
2224 if (bytes > TOTAL_MOVE_REG*align)
2225 return FALSE;
2226
2227 /* Move the address into scratch registers. */
2228 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2229 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2230
2231 num_reg = offset = 0;
2232 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
2233 {
2234 /* Calculate the correct offset for src/dest. */
2235 if (offset == 0)
2236 {
2237 src_addr = src_reg;
2238 dest_addr = dest_reg;
2239 }
2240 else
2241 {
2242 src_addr = plus_constant (Pmode, src_reg, offset);
2243 dest_addr = plus_constant (Pmode, dest_reg, offset);
2244 }
2245
2246 /* Generate the appropriate load and store, saving the stores
2247 for later. */
2248 if (bytes >= 4 && align >= 4)
2249 mode = SImode;
2250 else if (bytes >= 2 && align >= 2)
2251 mode = HImode;
2252 else
2253 mode = QImode;
2254
2255 move_bytes = GET_MODE_SIZE (mode);
2256 tmp_reg = gen_reg_rtx (mode);
2257 src_mem = change_address (orig_src, mode, src_addr);
2258 dest_mem = change_address (orig_dest, mode, dest_addr);
2259 emit_insn (gen_rtx_SET (tmp_reg, src_mem));
2260 stores[num_reg++] = gen_rtx_SET (dest_mem, tmp_reg);
2261
2262 if (num_reg >= MAX_MOVE_REG)
2263 {
2264 for (i = 0; i < num_reg; i++)
2265 emit_insn (stores[i]);
2266 num_reg = 0;
2267 }
2268 }
2269
2270 for (i = 0; i < num_reg; i++)
2271 emit_insn (stores[i]);
2272
2273 return TRUE;
2274 }
2275
2276 \f
2277 /* Expand a block clear operation, and return 1 if successful. Return 0
2278 if we should let the compiler generate normal code.
2279
2280 operands[0] is the destination
2281 operands[1] is the length
2282 operands[3] is the alignment */
2283
2284 int
2285 frv_expand_block_clear (rtx operands[])
2286 {
2287 rtx orig_dest = operands[0];
2288 rtx bytes_rtx = operands[1];
2289 rtx align_rtx = operands[3];
2290 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2291 int align;
2292 int bytes;
2293 int offset;
2294 rtx dest_reg;
2295 rtx dest_addr;
2296 rtx dest_mem;
2297 int clear_bytes;
2298 machine_mode mode;
2299
2300 /* If this is not a fixed size move, just call memcpy. */
2301 if (! constp)
2302 return FALSE;
2303
2304 /* This should be a fixed size alignment. */
2305 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2306
2307 align = INTVAL (align_rtx);
2308
2309 /* Anything to move? */
2310 bytes = INTVAL (bytes_rtx);
2311 if (bytes <= 0)
2312 return TRUE;
2313
2314 /* Don't support real large clears. */
2315 if (bytes > TOTAL_MOVE_REG*align)
2316 return FALSE;
2317
2318 /* Move the address into a scratch register. */
2319 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2320
2321 offset = 0;
2322 for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes))
2323 {
2324 /* Calculate the correct offset for src/dest. */
2325 dest_addr = ((offset == 0)
2326 ? dest_reg
2327 : plus_constant (Pmode, dest_reg, offset));
2328
2329 /* Generate the appropriate store of gr0. */
2330 if (bytes >= 4 && align >= 4)
2331 mode = SImode;
2332 else if (bytes >= 2 && align >= 2)
2333 mode = HImode;
2334 else
2335 mode = QImode;
2336
2337 clear_bytes = GET_MODE_SIZE (mode);
2338 dest_mem = change_address (orig_dest, mode, dest_addr);
2339 emit_insn (gen_rtx_SET (dest_mem, const0_rtx));
2340 }
2341
2342 return TRUE;
2343 }
2344
2345 \f
2346 /* The following variable is used to output modifiers of assembler
2347 code of the current output insn. */
2348
2349 static rtx *frv_insn_operands;
2350
2351 /* The following function is used to add assembler insn code suffix .p
2352 if it is necessary. */
2353
2354 const char *
2355 frv_asm_output_opcode (FILE *f, const char *ptr)
2356 {
2357 int c;
2358
2359 if (frv_insn_packing_flag <= 0)
2360 return ptr;
2361
2362 for (; *ptr && *ptr != ' ' && *ptr != '\t';)
2363 {
2364 c = *ptr++;
2365 if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z')
2366 || (*ptr >= 'A' && *ptr <= 'Z')))
2367 {
2368 int letter = *ptr++;
2369
2370 c = atoi (ptr);
2371 frv_print_operand (f, frv_insn_operands [c], letter);
2372 while ((c = *ptr) >= '0' && c <= '9')
2373 ptr++;
2374 }
2375 else
2376 fputc (c, f);
2377 }
2378
2379 fprintf (f, ".p");
2380
2381 return ptr;
2382 }
2383
2384 /* Set up the packing bit for the current output insn. Note that this
2385 function is not called for asm insns. */
2386
2387 void
2388 frv_final_prescan_insn (rtx_insn *insn, rtx *opvec,
2389 int noperands ATTRIBUTE_UNUSED)
2390 {
2391 if (INSN_P (insn))
2392 {
2393 if (frv_insn_packing_flag >= 0)
2394 {
2395 frv_insn_operands = opvec;
2396 frv_insn_packing_flag = PACKING_FLAG_P (insn);
2397 }
2398 else if (recog_memoized (insn) >= 0
2399 && get_attr_acc_group (insn) == ACC_GROUP_ODD)
2400 /* Packing optimizations have been disabled, but INSN can only
2401 be issued in M1. Insert an mnop in M0. */
2402 fprintf (asm_out_file, "\tmnop.p\n");
2403 }
2404 }
2405
2406
2407 \f
2408 /* A C expression whose value is RTL representing the address in a stack frame
2409 where the pointer to the caller's frame is stored. Assume that FRAMEADDR is
2410 an RTL expression for the address of the stack frame itself.
2411
2412 If you don't define this macro, the default is to return the value of
2413 FRAMEADDR--that is, the stack frame address is also the address of the stack
2414 word that points to the previous frame. */
2415
2416 /* The default is correct, but we need to make sure the frame gets created. */
2417 rtx
2418 frv_dynamic_chain_address (rtx frame)
2419 {
2420 cfun->machine->frame_needed = 1;
2421 return frame;
2422 }
2423
2424
2425 /* A C expression whose value is RTL representing the value of the return
2426 address for the frame COUNT steps up from the current frame, after the
2427 prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame
2428 pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is
2429 defined.
2430
2431 The value of the expression must always be the correct address when COUNT is
2432 zero, but may be `NULL_RTX' if there is not way to determine the return
2433 address of other frames. */
2434
2435 rtx
2436 frv_return_addr_rtx (int count, rtx frame)
2437 {
2438 if (count != 0)
2439 return const0_rtx;
2440 cfun->machine->frame_needed = 1;
2441 return gen_rtx_MEM (Pmode, plus_constant (Pmode, frame, 8));
2442 }
2443
2444 /* Given a memory reference MEMREF, interpret the referenced memory as
2445 an array of MODE values, and return a reference to the element
2446 specified by INDEX. Assume that any pre-modification implicit in
2447 MEMREF has already happened.
2448
2449 MEMREF must be a legitimate operand for modes larger than SImode.
2450 frv_legitimate_address_p forbids register+register addresses, which
2451 this function cannot handle. */
2452 rtx
2453 frv_index_memory (rtx memref, machine_mode mode, int index)
2454 {
2455 rtx base = XEXP (memref, 0);
2456 if (GET_CODE (base) == PRE_MODIFY)
2457 base = XEXP (base, 0);
2458 return change_address (memref, mode,
2459 plus_constant (Pmode, base,
2460 index * GET_MODE_SIZE (mode)));
2461 }
2462
2463 \f
2464 /* Print a memory address as an operand to reference that memory location. */
2465 static void
2466 frv_print_operand_address (FILE * stream, machine_mode /* mode */, rtx x)
2467 {
2468 if (GET_CODE (x) == MEM)
2469 x = XEXP (x, 0);
2470
2471 switch (GET_CODE (x))
2472 {
2473 case REG:
2474 fputs (reg_names [ REGNO (x)], stream);
2475 return;
2476
2477 case CONST_INT:
2478 fprintf (stream, "%ld", (long) INTVAL (x));
2479 return;
2480
2481 case SYMBOL_REF:
2482 assemble_name (stream, XSTR (x, 0));
2483 return;
2484
2485 case LABEL_REF:
2486 case CONST:
2487 output_addr_const (stream, x);
2488 return;
2489
2490 case PLUS:
2491 /* Poorly constructed asm statements can trigger this alternative.
2492 See gcc/testsuite/gcc.dg/asm-4.c for an example. */
2493 frv_print_operand_memory_reference (stream, x, 0);
2494 return;
2495
2496 default:
2497 break;
2498 }
2499
2500 fatal_insn ("bad insn to frv_print_operand_address:", x);
2501 }
2502
2503 \f
2504 static void
2505 frv_print_operand_memory_reference_reg (FILE * stream, rtx x)
2506 {
2507 int regno = true_regnum (x);
2508 if (GPR_P (regno))
2509 fputs (reg_names[regno], stream);
2510 else
2511 fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x);
2512 }
2513
2514 /* Print a memory reference suitable for the ld/st instructions. */
2515
2516 static void
2517 frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset)
2518 {
2519 struct frv_unspec unspec;
2520 rtx x0 = NULL_RTX;
2521 rtx x1 = NULL_RTX;
2522
2523 switch (GET_CODE (x))
2524 {
2525 case SUBREG:
2526 case REG:
2527 x0 = x;
2528 break;
2529
2530 case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */
2531 x0 = XEXP (x, 0);
2532 x1 = XEXP (XEXP (x, 1), 1);
2533 break;
2534
2535 case CONST_INT:
2536 x1 = x;
2537 break;
2538
2539 case PLUS:
2540 x0 = XEXP (x, 0);
2541 x1 = XEXP (x, 1);
2542 if (GET_CODE (x0) == CONST_INT)
2543 {
2544 x0 = XEXP (x, 1);
2545 x1 = XEXP (x, 0);
2546 }
2547 break;
2548
2549 default:
2550 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2551 break;
2552
2553 }
2554
2555 if (addr_offset)
2556 {
2557 if (!x1)
2558 x1 = const0_rtx;
2559 else if (GET_CODE (x1) != CONST_INT)
2560 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2561 }
2562
2563 fputs ("@(", stream);
2564 if (!x0)
2565 fputs (reg_names[GPR_R0], stream);
2566 else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG)
2567 frv_print_operand_memory_reference_reg (stream, x0);
2568 else
2569 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2570
2571 fputs (",", stream);
2572 if (!x1)
2573 fputs (reg_names [GPR_R0], stream);
2574
2575 else
2576 {
2577 switch (GET_CODE (x1))
2578 {
2579 case SUBREG:
2580 case REG:
2581 frv_print_operand_memory_reference_reg (stream, x1);
2582 break;
2583
2584 case CONST_INT:
2585 fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset));
2586 break;
2587
2588 case CONST:
2589 if (!frv_const_unspec_p (x1, &unspec))
2590 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1);
2591 frv_output_const_unspec (stream, &unspec);
2592 break;
2593
2594 default:
2595 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2596 }
2597 }
2598
2599 fputs (")", stream);
2600 }
2601
2602 \f
2603 /* Return 2 for likely branches and 0 for non-likely branches */
2604
2605 #define FRV_JUMP_LIKELY 2
2606 #define FRV_JUMP_NOT_LIKELY 0
2607
2608 static int
2609 frv_print_operand_jump_hint (rtx_insn *insn)
2610 {
2611 rtx note;
2612 rtx labelref;
2613 int ret;
2614 enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
2615
2616 gcc_assert (JUMP_P (insn));
2617
2618 /* Assume any non-conditional jump is likely. */
2619 if (! any_condjump_p (insn))
2620 ret = FRV_JUMP_LIKELY;
2621
2622 else
2623 {
2624 labelref = condjump_label (insn);
2625 if (labelref)
2626 {
2627 rtx label = XEXP (labelref, 0);
2628 jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label))
2629 ? BACKWARD
2630 : FORWARD);
2631 }
2632
2633 note = find_reg_note (insn, REG_BR_PROB, 0);
2634 if (!note)
2635 ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY);
2636
2637 else
2638 {
2639 ret = ((profile_probability::from_reg_br_prob_note (XINT (note, 0))
2640 >= profile_probability::even ())
2641 ? FRV_JUMP_LIKELY
2642 : FRV_JUMP_NOT_LIKELY);
2643 }
2644 }
2645
2646 #if 0
2647 if (TARGET_DEBUG)
2648 {
2649 char *direction;
2650
2651 switch (jump_type)
2652 {
2653 default:
2654 case UNKNOWN: direction = "unknown jump direction"; break;
2655 case BACKWARD: direction = "jump backward"; break;
2656 case FORWARD: direction = "jump forward"; break;
2657 }
2658
2659 fprintf (stderr,
2660 "%s: uid %ld, %s, probability = %d, max prob. = %d, hint = %d\n",
2661 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
2662 (long)INSN_UID (insn), direction, prob,
2663 REG_BR_PROB_BASE, ret);
2664 }
2665 #endif
2666
2667 return ret;
2668 }
2669
2670 \f
2671 /* Return the comparison operator to use for CODE given that the ICC
2672 register is OP0. */
2673
2674 static const char *
2675 comparison_string (enum rtx_code code, rtx op0)
2676 {
2677 bool is_nz_p = GET_MODE (op0) == CC_NZmode;
2678 switch (code)
2679 {
2680 default: output_operand_lossage ("bad condition code"); return "";
2681 case EQ: return "eq";
2682 case NE: return "ne";
2683 case LT: return is_nz_p ? "n" : "lt";
2684 case LE: return "le";
2685 case GT: return "gt";
2686 case GE: return is_nz_p ? "p" : "ge";
2687 case LTU: return is_nz_p ? "no" : "c";
2688 case LEU: return is_nz_p ? "eq" : "ls";
2689 case GTU: return is_nz_p ? "ne" : "hi";
2690 case GEU: return is_nz_p ? "ra" : "nc";
2691 }
2692 }
2693
2694 /* Print an operand to an assembler instruction.
2695
2696 `%' followed by a letter and a digit says to output an operand in an
2697 alternate fashion. Four letters have standard, built-in meanings
2698 described below. The hook `TARGET_PRINT_OPERAND' can define
2699 additional letters with nonstandard meanings.
2700
2701 `%cDIGIT' can be used to substitute an operand that is a constant value
2702 without the syntax that normally indicates an immediate operand.
2703
2704 `%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated
2705 before printing.
2706
2707 `%aDIGIT' can be used to substitute an operand as if it were a memory
2708 reference, with the actual operand treated as the address. This may be
2709 useful when outputting a "load address" instruction, because often the
2710 assembler syntax for such an instruction requires you to write the operand
2711 as if it were a memory reference.
2712
2713 `%lDIGIT' is used to substitute a `label_ref' into a jump instruction.
2714
2715 `%=' outputs a number which is unique to each instruction in the entire
2716 compilation. This is useful for making local labels to be referred to more
2717 than once in a single template that generates multiple assembler
2718 instructions.
2719
2720 `%' followed by a punctuation character specifies a substitution that
2721 does not use an operand. Only one case is standard: `%%' outputs a
2722 `%' into the assembler code. Other nonstandard cases can be defined
2723 in the `TARGET_PRINT_OPERAND' hook. You must also define which
2724 punctuation characters are valid with the
2725 `TARGET_PRINT_OPERAND_PUNCT_VALID_P' hook. */
2726
2727 static void
2728 frv_print_operand (FILE * file, rtx x, int code)
2729 {
2730 struct frv_unspec unspec;
2731 HOST_WIDE_INT value;
2732 int offset;
2733
2734 if (code != 0 && !ISALPHA (code))
2735 value = 0;
2736
2737 else if (GET_CODE (x) == CONST_INT)
2738 value = INTVAL (x);
2739
2740 else if (GET_CODE (x) == CONST_DOUBLE)
2741 {
2742 if (GET_MODE (x) == SFmode)
2743 {
2744 long l;
2745
2746 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2747 value = l;
2748 }
2749
2750 else if (GET_MODE (x) == VOIDmode)
2751 value = CONST_DOUBLE_LOW (x);
2752
2753 else
2754 fatal_insn ("bad insn in frv_print_operand, bad const_double", x);
2755 }
2756
2757 else
2758 value = 0;
2759
2760 switch (code)
2761 {
2762
2763 case '.':
2764 /* Output r0. */
2765 fputs (reg_names[GPR_R0], file);
2766 break;
2767
2768 case '#':
2769 fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn));
2770 break;
2771
2772 case '@':
2773 /* Output small data area base register (gr16). */
2774 fputs (reg_names[SDA_BASE_REG], file);
2775 break;
2776
2777 case '~':
2778 /* Output pic register (gr17). */
2779 fputs (reg_names[PIC_REGNO], file);
2780 break;
2781
2782 case '*':
2783 /* Output the temporary integer CCR register. */
2784 fputs (reg_names[ICR_TEMP], file);
2785 break;
2786
2787 case '&':
2788 /* Output the temporary integer CC register. */
2789 fputs (reg_names[ICC_TEMP], file);
2790 break;
2791
2792 /* case 'a': print an address. */
2793
2794 case 'C':
2795 /* Print appropriate test for integer branch false operation. */
2796 fputs (comparison_string (reverse_condition (GET_CODE (x)),
2797 XEXP (x, 0)), file);
2798 break;
2799
2800 case 'c':
2801 /* Print appropriate test for integer branch true operation. */
2802 fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file);
2803 break;
2804
2805 case 'e':
2806 /* Print 1 for a NE and 0 for an EQ to give the final argument
2807 for a conditional instruction. */
2808 if (GET_CODE (x) == NE)
2809 fputs ("1", file);
2810
2811 else if (GET_CODE (x) == EQ)
2812 fputs ("0", file);
2813
2814 else
2815 fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x);
2816 break;
2817
2818 case 'F':
2819 /* Print appropriate test for floating point branch false operation. */
2820 switch (GET_CODE (x))
2821 {
2822 default:
2823 fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x);
2824
2825 case EQ: fputs ("ne", file); break;
2826 case NE: fputs ("eq", file); break;
2827 case LT: fputs ("uge", file); break;
2828 case LE: fputs ("ug", file); break;
2829 case GT: fputs ("ule", file); break;
2830 case GE: fputs ("ul", file); break;
2831 }
2832 break;
2833
2834 case 'f':
2835 /* Print appropriate test for floating point branch true operation. */
2836 switch (GET_CODE (x))
2837 {
2838 default:
2839 fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x);
2840
2841 case EQ: fputs ("eq", file); break;
2842 case NE: fputs ("ne", file); break;
2843 case LT: fputs ("lt", file); break;
2844 case LE: fputs ("le", file); break;
2845 case GT: fputs ("gt", file); break;
2846 case GE: fputs ("ge", file); break;
2847 }
2848 break;
2849
2850 case 'g':
2851 /* Print appropriate GOT function. */
2852 if (GET_CODE (x) != CONST_INT)
2853 fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x);
2854 fputs (unspec_got_name (INTVAL (x)), file);
2855 break;
2856
2857 case 'I':
2858 /* Print 'i' if the operand is a constant, or is a memory reference that
2859 adds a constant. */
2860 if (GET_CODE (x) == MEM)
2861 x = ((GET_CODE (XEXP (x, 0)) == PLUS)
2862 ? XEXP (XEXP (x, 0), 1)
2863 : XEXP (x, 0));
2864 else if (GET_CODE (x) == PLUS)
2865 x = XEXP (x, 1);
2866
2867 switch (GET_CODE (x))
2868 {
2869 default:
2870 break;
2871
2872 case CONST_INT:
2873 case SYMBOL_REF:
2874 case CONST:
2875 fputs ("i", file);
2876 break;
2877 }
2878 break;
2879
2880 case 'i':
2881 /* For jump instructions, print 'i' if the operand is a constant or
2882 is an expression that adds a constant. */
2883 if (GET_CODE (x) == CONST_INT)
2884 fputs ("i", file);
2885
2886 else
2887 {
2888 if (GET_CODE (x) == CONST_INT
2889 || (GET_CODE (x) == PLUS
2890 && (GET_CODE (XEXP (x, 1)) == CONST_INT
2891 || GET_CODE (XEXP (x, 0)) == CONST_INT)))
2892 fputs ("i", file);
2893 }
2894 break;
2895
2896 case 'L':
2897 /* Print the lower register of a double word register pair */
2898 if (GET_CODE (x) == REG)
2899 fputs (reg_names[ REGNO (x)+1 ], file);
2900 else
2901 fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x);
2902 break;
2903
2904 /* case 'l': print a LABEL_REF. */
2905
2906 case 'M':
2907 case 'N':
2908 /* Print a memory reference for ld/st/jmp, %N prints a memory reference
2909 for the second word of double memory operations. */
2910 offset = (code == 'M') ? 0 : UNITS_PER_WORD;
2911 switch (GET_CODE (x))
2912 {
2913 default:
2914 fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x);
2915
2916 case MEM:
2917 frv_print_operand_memory_reference (file, XEXP (x, 0), offset);
2918 break;
2919
2920 case REG:
2921 case SUBREG:
2922 case CONST_INT:
2923 case PLUS:
2924 case SYMBOL_REF:
2925 frv_print_operand_memory_reference (file, x, offset);
2926 break;
2927 }
2928 break;
2929
2930 case 'O':
2931 /* Print the opcode of a command. */
2932 switch (GET_CODE (x))
2933 {
2934 default:
2935 fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x);
2936
2937 case PLUS: fputs ("add", file); break;
2938 case MINUS: fputs ("sub", file); break;
2939 case AND: fputs ("and", file); break;
2940 case IOR: fputs ("or", file); break;
2941 case XOR: fputs ("xor", file); break;
2942 case ASHIFT: fputs ("sll", file); break;
2943 case ASHIFTRT: fputs ("sra", file); break;
2944 case LSHIFTRT: fputs ("srl", file); break;
2945 }
2946 break;
2947
2948 /* case 'n': negate and print a constant int. */
2949
2950 case 'P':
2951 /* Print PIC label using operand as the number. */
2952 if (GET_CODE (x) != CONST_INT)
2953 fatal_insn ("bad insn to frv_print_operand, P modifier:", x);
2954
2955 fprintf (file, ".LCF%ld", (long)INTVAL (x));
2956 break;
2957
2958 case 'U':
2959 /* Print 'u' if the operand is a update load/store. */
2960 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
2961 fputs ("u", file);
2962 break;
2963
2964 case 'z':
2965 /* If value is 0, print gr0, otherwise it must be a register. */
2966 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
2967 fputs (reg_names[GPR_R0], file);
2968
2969 else if (GET_CODE (x) == REG)
2970 fputs (reg_names [REGNO (x)], file);
2971
2972 else
2973 fatal_insn ("bad insn in frv_print_operand, z case", x);
2974 break;
2975
2976 case 'x':
2977 /* Print constant in hex. */
2978 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2979 {
2980 fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value);
2981 break;
2982 }
2983
2984 /* Fall through. */
2985
2986 case '\0':
2987 if (GET_CODE (x) == REG)
2988 fputs (reg_names [REGNO (x)], file);
2989
2990 else if (GET_CODE (x) == CONST_INT
2991 || GET_CODE (x) == CONST_DOUBLE)
2992 fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value);
2993
2994 else if (frv_const_unspec_p (x, &unspec))
2995 frv_output_const_unspec (file, &unspec);
2996
2997 else if (GET_CODE (x) == MEM)
2998 frv_print_operand_address (file, GET_MODE (x), XEXP (x, 0));
2999
3000 else if (CONSTANT_ADDRESS_P (x))
3001 frv_print_operand_address (file, VOIDmode, x);
3002
3003 else
3004 fatal_insn ("bad insn in frv_print_operand, 0 case", x);
3005
3006 break;
3007
3008 default:
3009 fatal_insn ("frv_print_operand: unknown code", x);
3010 break;
3011 }
3012
3013 return;
3014 }
3015
3016 static bool
3017 frv_print_operand_punct_valid_p (unsigned char code)
3018 {
3019 return (code == '.' || code == '#' || code == '@' || code == '~'
3020 || code == '*' || code == '&');
3021 }
3022
3023 \f
3024 /* A C statement (sans semicolon) for initializing the variable CUM for the
3025 state at the beginning of the argument list. The variable has type
3026 `CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type
3027 of the function which will receive the args, or 0 if the args are to a
3028 compiler support library function. The value of INDIRECT is nonzero when
3029 processing an indirect call, for example a call through a function pointer.
3030 The value of INDIRECT is zero for a call to an explicitly named function, a
3031 library function call, or when `INIT_CUMULATIVE_ARGS' is used to find
3032 arguments for the function being compiled.
3033
3034 When processing a call to a compiler support library function, LIBNAME
3035 identifies which one. It is a `symbol_ref' rtx which contains the name of
3036 the function, as a string. LIBNAME is 0 when an ordinary C function call is
3037 being processed. Thus, each time this macro is called, either LIBNAME or
3038 FNTYPE is nonzero, but never both of them at once. */
3039
3040 void
3041 frv_init_cumulative_args (CUMULATIVE_ARGS *cum,
3042 tree fntype,
3043 rtx libname,
3044 tree fndecl,
3045 int incoming)
3046 {
3047 *cum = FIRST_ARG_REGNUM;
3048
3049 if (TARGET_DEBUG_ARG)
3050 {
3051 fprintf (stderr, "\ninit_cumulative_args:");
3052 if (!fndecl && fntype)
3053 fputs (" indirect", stderr);
3054
3055 if (incoming)
3056 fputs (" incoming", stderr);
3057
3058 if (fntype)
3059 {
3060 tree ret_type = TREE_TYPE (fntype);
3061 fprintf (stderr, " return=%s,",
3062 get_tree_code_name (TREE_CODE (ret_type)));
3063 }
3064
3065 if (libname && GET_CODE (libname) == SYMBOL_REF)
3066 fprintf (stderr, " libname=%s", XSTR (libname, 0));
3067
3068 if (cfun->returns_struct)
3069 fprintf (stderr, " return-struct");
3070
3071 putc ('\n', stderr);
3072 }
3073 }
3074
3075 \f
3076 /* Return true if we should pass an argument on the stack rather than
3077 in registers. */
3078
3079 static bool
3080 frv_must_pass_in_stack (machine_mode mode, const_tree type)
3081 {
3082 if (mode == BLKmode)
3083 return true;
3084 if (type == NULL)
3085 return false;
3086 return AGGREGATE_TYPE_P (type);
3087 }
3088
3089 /* If defined, a C expression that gives the alignment boundary, in bits, of an
3090 argument with the specified mode and type. If it is not defined,
3091 `PARM_BOUNDARY' is used for all arguments. */
3092
3093 static unsigned int
3094 frv_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
3095 const_tree type ATTRIBUTE_UNUSED)
3096 {
3097 return BITS_PER_WORD;
3098 }
3099
3100 static rtx
3101 frv_function_arg_1 (cumulative_args_t cum_v, const function_arg_info &arg,
3102 bool incoming ATTRIBUTE_UNUSED)
3103 {
3104 const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3105
3106 machine_mode xmode = (arg.mode == BLKmode) ? SImode : arg.mode;
3107 int arg_num = *cum;
3108 rtx ret;
3109 const char *debstr;
3110
3111 /* Return a marker for use in the call instruction. */
3112 if (xmode == VOIDmode)
3113 {
3114 ret = const0_rtx;
3115 debstr = "<0>";
3116 }
3117
3118 else if (arg_num <= LAST_ARG_REGNUM)
3119 {
3120 ret = gen_rtx_REG (xmode, arg_num);
3121 debstr = reg_names[arg_num];
3122 }
3123
3124 else
3125 {
3126 ret = NULL_RTX;
3127 debstr = "memory";
3128 }
3129
3130 if (TARGET_DEBUG_ARG)
3131 fprintf (stderr,
3132 "function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n",
3133 arg_num, GET_MODE_NAME (arg.mode), arg.named,
3134 GET_MODE_SIZE (arg.mode), debstr);
3135
3136 return ret;
3137 }
3138
3139 static rtx
3140 frv_function_arg (cumulative_args_t cum, const function_arg_info &arg)
3141 {
3142 return frv_function_arg_1 (cum, arg, false);
3143 }
3144
3145 static rtx
3146 frv_function_incoming_arg (cumulative_args_t cum, const function_arg_info &arg)
3147 {
3148 return frv_function_arg_1 (cum, arg, true);
3149 }
3150
3151 \f
3152 /* A C statement (sans semicolon) to update the summarizer variable CUM to
3153 advance past an argument in the argument list. The values MODE, TYPE and
3154 NAMED describe that argument. Once this is done, the variable CUM is
3155 suitable for analyzing the *following* argument with `FUNCTION_ARG', etc.
3156
3157 This macro need not do anything if the argument in question was passed on
3158 the stack. The compiler knows how to track the amount of stack space used
3159 for arguments without any special help. */
3160
3161 static void
3162 frv_function_arg_advance (cumulative_args_t cum_v,
3163 machine_mode mode,
3164 const_tree type ATTRIBUTE_UNUSED,
3165 bool named)
3166 {
3167 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3168
3169 machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3170 int bytes = GET_MODE_SIZE (xmode);
3171 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3172 int arg_num = *cum;
3173
3174 *cum = arg_num + words;
3175
3176 if (TARGET_DEBUG_ARG)
3177 fprintf (stderr,
3178 "function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n",
3179 arg_num, GET_MODE_NAME (mode), named, words * UNITS_PER_WORD);
3180 }
3181
3182 \f
3183 /* Implement TARGET_ARG_PARTIAL_BYTES. */
3184
3185 static int
3186 frv_arg_partial_bytes (cumulative_args_t cum, const function_arg_info &arg)
3187 {
3188 machine_mode xmode = (arg.mode == BLKmode) ? SImode : arg.mode;
3189 int bytes = GET_MODE_SIZE (xmode);
3190 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3191 int arg_num = *get_cumulative_args (cum);
3192 int ret;
3193
3194 ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
3195 ? LAST_ARG_REGNUM - arg_num + 1
3196 : 0);
3197 ret *= UNITS_PER_WORD;
3198
3199 if (TARGET_DEBUG_ARG && ret)
3200 fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret);
3201
3202 return ret;
3203 }
3204
3205 \f
3206 /* Implements TARGET_FUNCTION_VALUE. */
3207
3208 static rtx
3209 frv_function_value (const_tree valtype,
3210 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3211 bool outgoing ATTRIBUTE_UNUSED)
3212 {
3213 return gen_rtx_REG (TYPE_MODE (valtype), RETURN_VALUE_REGNUM);
3214 }
3215
3216 \f
3217 /* Implements TARGET_LIBCALL_VALUE. */
3218
3219 static rtx
3220 frv_libcall_value (machine_mode mode,
3221 const_rtx fun ATTRIBUTE_UNUSED)
3222 {
3223 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3224 }
3225
3226 \f
3227 /* Implements FUNCTION_VALUE_REGNO_P. */
3228
3229 bool
3230 frv_function_value_regno_p (const unsigned int regno)
3231 {
3232 return (regno == RETURN_VALUE_REGNUM);
3233 }
3234 \f
3235 /* Return true if a register is ok to use as a base or index register. */
3236
3237 static FRV_INLINE int
3238 frv_regno_ok_for_base_p (int regno, int strict_p)
3239 {
3240 if (GPR_P (regno))
3241 return TRUE;
3242
3243 if (strict_p)
3244 return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno]));
3245
3246 if (regno == ARG_POINTER_REGNUM)
3247 return TRUE;
3248
3249 return (regno >= FIRST_PSEUDO_REGISTER);
3250 }
3251
3252 \f
3253 /* A C compound statement with a conditional `goto LABEL;' executed if X (an
3254 RTX) is a legitimate memory address on the target machine for a memory
3255 operand of mode MODE.
3256
3257 It usually pays to define several simpler macros to serve as subroutines for
3258 this one. Otherwise it may be too complicated to understand.
3259
3260 This macro must exist in two variants: a strict variant and a non-strict
3261 one. The strict variant is used in the reload pass. It must be defined so
3262 that any pseudo-register that has not been allocated a hard register is
3263 considered a memory reference. In contexts where some kind of register is
3264 required, a pseudo-register with no hard register must be rejected.
3265
3266 The non-strict variant is used in other passes. It must be defined to
3267 accept all pseudo-registers in every context where some kind of register is
3268 required.
3269
3270 Compiler source files that want to use the strict variant of this macro
3271 define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT'
3272 conditional to define the strict variant in that case and the non-strict
3273 variant otherwise.
3274
3275 Normally, constant addresses which are the sum of a `symbol_ref' and an
3276 integer are stored inside a `const' RTX to mark them as constant.
3277 Therefore, there is no need to recognize such sums specifically as
3278 legitimate addresses. Normally you would simply recognize any `const' as
3279 legitimate.
3280
3281 Usually `TARGET_PRINT_OPERAND_ADDRESS' is not prepared to handle
3282 constant sums that are not marked with `const'. It assumes that a
3283 naked `plus' indicates indexing. If so, then you *must* reject such
3284 naked constant sums as illegitimate addresses, so that none of them
3285 will be given to `TARGET_PRINT_OPERAND_ADDRESS'. */
3286
3287 int
3288 frv_legitimate_address_p_1 (machine_mode mode,
3289 rtx x,
3290 int strict_p,
3291 int condexec_p,
3292 int allow_double_reg_p)
3293 {
3294 rtx x0, x1;
3295 int ret = 0;
3296 HOST_WIDE_INT value;
3297 unsigned regno0;
3298
3299 if (FRV_SYMBOL_REF_TLS_P (x))
3300 return 0;
3301
3302 switch (GET_CODE (x))
3303 {
3304 default:
3305 break;
3306
3307 case SUBREG:
3308 x = SUBREG_REG (x);
3309 if (GET_CODE (x) != REG)
3310 break;
3311
3312 /* Fall through. */
3313
3314 case REG:
3315 ret = frv_regno_ok_for_base_p (REGNO (x), strict_p);
3316 break;
3317
3318 case PRE_MODIFY:
3319 x0 = XEXP (x, 0);
3320 x1 = XEXP (x, 1);
3321 if (GET_CODE (x0) != REG
3322 || ! frv_regno_ok_for_base_p (REGNO (x0), strict_p)
3323 || GET_CODE (x1) != PLUS
3324 || ! rtx_equal_p (x0, XEXP (x1, 0))
3325 || GET_CODE (XEXP (x1, 1)) != REG
3326 || ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p))
3327 break;
3328
3329 ret = 1;
3330 break;
3331
3332 case CONST_INT:
3333 /* 12-bit immediate */
3334 if (condexec_p)
3335 ret = FALSE;
3336 else
3337 {
3338 ret = IN_RANGE (INTVAL (x), -2048, 2047);
3339
3340 /* If we can't use load/store double operations, make sure we can
3341 address the second word. */
3342 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3343 ret = IN_RANGE (INTVAL (x) + GET_MODE_SIZE (mode) - 1,
3344 -2048, 2047);
3345 }
3346 break;
3347
3348 case PLUS:
3349 x0 = XEXP (x, 0);
3350 x1 = XEXP (x, 1);
3351
3352 if (GET_CODE (x0) == SUBREG)
3353 x0 = SUBREG_REG (x0);
3354
3355 if (GET_CODE (x0) != REG)
3356 break;
3357
3358 regno0 = REGNO (x0);
3359 if (!frv_regno_ok_for_base_p (regno0, strict_p))
3360 break;
3361
3362 switch (GET_CODE (x1))
3363 {
3364 default:
3365 break;
3366
3367 case SUBREG:
3368 x1 = SUBREG_REG (x1);
3369 if (GET_CODE (x1) != REG)
3370 break;
3371
3372 /* Fall through. */
3373
3374 case REG:
3375 /* Do not allow reg+reg addressing for modes > 1 word if we
3376 can't depend on having move double instructions. */
3377 if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3378 ret = FALSE;
3379 else
3380 ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p);
3381 break;
3382
3383 case CONST_INT:
3384 /* 12-bit immediate */
3385 if (condexec_p)
3386 ret = FALSE;
3387 else
3388 {
3389 value = INTVAL (x1);
3390 ret = IN_RANGE (value, -2048, 2047);
3391
3392 /* If we can't use load/store double operations, make sure we can
3393 address the second word. */
3394 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3395 ret = IN_RANGE (value + GET_MODE_SIZE (mode) - 1, -2048, 2047);
3396 }
3397 break;
3398
3399 case CONST:
3400 if (!condexec_p && got12_operand (x1, VOIDmode))
3401 ret = TRUE;
3402 break;
3403
3404 }
3405 break;
3406 }
3407
3408 if (TARGET_DEBUG_ADDR)
3409 {
3410 fprintf (stderr, "\n========== legitimate_address_p, mode = %s, result = %d, addresses are %sstrict%s\n",
3411 GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ",
3412 (condexec_p) ? ", inside conditional code" : "");
3413 debug_rtx (x);
3414 }
3415
3416 return ret;
3417 }
3418
3419 bool
3420 frv_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
3421 {
3422 return frv_legitimate_address_p_1 (mode, x, strict_p, FALSE, FALSE);
3423 }
3424
3425 /* Given an ADDR, generate code to inline the PLT. */
3426 static rtx
3427 gen_inlined_tls_plt (rtx addr)
3428 {
3429 rtx retval, dest;
3430 rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG);
3431
3432
3433 dest = gen_reg_rtx (DImode);
3434
3435 if (flag_pic == 1)
3436 {
3437 /*
3438 -fpic version:
3439
3440 lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8
3441 calll #gettlsoff(ADDR)@(gr8, gr0)
3442 */
3443 emit_insn (gen_tls_lddi (dest, addr, picreg));
3444 }
3445 else
3446 {
3447 /*
3448 -fPIC version:
3449
3450 sethi.p #gottlsdeschi(ADDR), gr8
3451 setlo #gottlsdesclo(ADDR), gr8
3452 ldd #tlsdesc(ADDR)@(gr15, gr8), gr8
3453 calll #gettlsoff(ADDR)@(gr8, gr0)
3454 */
3455 rtx reguse = gen_reg_rtx (Pmode);
3456 emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI)));
3457 emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr));
3458 }
3459
3460 retval = gen_reg_rtx (Pmode);
3461 emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg));
3462 return retval;
3463 }
3464
3465 /* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns
3466 the destination address. */
3467 static rtx
3468 gen_tlsmoff (rtx addr, rtx reg)
3469 {
3470 rtx dest = gen_reg_rtx (Pmode);
3471
3472 if (TARGET_BIG_TLS)
3473 {
3474 /* sethi.p #tlsmoffhi(x), grA
3475 setlo #tlsmofflo(x), grA
3476 */
3477 dest = gen_reg_rtx (Pmode);
3478 emit_insn (gen_tlsoff_hilo (dest, addr,
3479 GEN_INT (R_FRV_TLSMOFFHI)));
3480 dest = gen_rtx_PLUS (Pmode, dest, reg);
3481 }
3482 else
3483 {
3484 /* addi grB, #tlsmoff12(x), grC
3485 -or-
3486 ld/st @(grB, #tlsmoff12(x)), grC
3487 */
3488 dest = gen_reg_rtx (Pmode);
3489 emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg,
3490 GEN_INT (R_FRV_TLSMOFF12)));
3491 }
3492 return dest;
3493 }
3494
3495 /* Generate code for a TLS address. */
3496 static rtx
3497 frv_legitimize_tls_address (rtx addr, enum tls_model model)
3498 {
3499 rtx dest, tp = gen_rtx_REG (Pmode, 29);
3500 rtx picreg = get_hard_reg_initial_val (Pmode, 15);
3501
3502 switch (model)
3503 {
3504 case TLS_MODEL_INITIAL_EXEC:
3505 if (flag_pic == 1)
3506 {
3507 /* -fpic version.
3508 ldi @(gr15, #gottlsoff12(x)), gr5
3509 */
3510 dest = gen_reg_rtx (Pmode);
3511 emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg));
3512 dest = gen_rtx_PLUS (Pmode, tp, dest);
3513 }
3514 else
3515 {
3516 /* -fPIC or anything else.
3517
3518 sethi.p #gottlsoffhi(x), gr14
3519 setlo #gottlsofflo(x), gr14
3520 ld #tlsoff(x)@(gr15, gr14), gr9
3521 */
3522 rtx tmp = gen_reg_rtx (Pmode);
3523 dest = gen_reg_rtx (Pmode);
3524 emit_insn (gen_tlsoff_hilo (tmp, addr,
3525 GEN_INT (R_FRV_GOTTLSOFF_HI)));
3526
3527 emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr));
3528 dest = gen_rtx_PLUS (Pmode, tp, dest);
3529 }
3530 break;
3531 case TLS_MODEL_LOCAL_DYNAMIC:
3532 {
3533 rtx reg, retval;
3534
3535 if (TARGET_INLINE_PLT)
3536 retval = gen_inlined_tls_plt (GEN_INT (0));
3537 else
3538 {
3539 /* call #gettlsoff(0) */
3540 retval = gen_reg_rtx (Pmode);
3541 emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg));
3542 }
3543
3544 reg = gen_reg_rtx (Pmode);
3545 emit_insn (gen_rtx_SET (reg, gen_rtx_PLUS (Pmode, retval, tp)));
3546
3547 dest = gen_tlsmoff (addr, reg);
3548
3549 /*
3550 dest = gen_reg_rtx (Pmode);
3551 emit_insn (gen_tlsoff_hilo (dest, addr,
3552 GEN_INT (R_FRV_TLSMOFFHI)));
3553 dest = gen_rtx_PLUS (Pmode, dest, reg);
3554 */
3555 break;
3556 }
3557 case TLS_MODEL_LOCAL_EXEC:
3558 dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29));
3559 break;
3560 case TLS_MODEL_GLOBAL_DYNAMIC:
3561 {
3562 rtx retval;
3563
3564 if (TARGET_INLINE_PLT)
3565 retval = gen_inlined_tls_plt (addr);
3566 else
3567 {
3568 /* call #gettlsoff(x) */
3569 retval = gen_reg_rtx (Pmode);
3570 emit_insn (gen_call_gettlsoff (retval, addr, picreg));
3571 }
3572 dest = gen_rtx_PLUS (Pmode, retval, tp);
3573 break;
3574 }
3575 default:
3576 gcc_unreachable ();
3577 }
3578
3579 return dest;
3580 }
3581
3582 rtx
3583 frv_legitimize_address (rtx x,
3584 rtx oldx ATTRIBUTE_UNUSED,
3585 machine_mode mode ATTRIBUTE_UNUSED)
3586 {
3587 if (GET_CODE (x) == SYMBOL_REF)
3588 {
3589 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3590 if (model != 0)
3591 return frv_legitimize_tls_address (x, model);
3592 }
3593
3594 return x;
3595 }
3596 \f
3597 /* Test whether a local function descriptor is canonical, i.e.,
3598 whether we can use FUNCDESC_GOTOFF to compute the address of the
3599 function. */
3600
3601 static bool
3602 frv_local_funcdesc_p (rtx fnx)
3603 {
3604 tree fn;
3605 enum symbol_visibility vis;
3606 bool ret;
3607
3608 if (! SYMBOL_REF_LOCAL_P (fnx))
3609 return FALSE;
3610
3611 fn = SYMBOL_REF_DECL (fnx);
3612
3613 if (! fn)
3614 return FALSE;
3615
3616 vis = DECL_VISIBILITY (fn);
3617
3618 if (vis == VISIBILITY_PROTECTED)
3619 /* Private function descriptors for protected functions are not
3620 canonical. Temporarily change the visibility to global. */
3621 vis = VISIBILITY_DEFAULT;
3622 else if (flag_shlib)
3623 /* If we're already compiling for a shared library (that, unlike
3624 executables, can't assume that the existence of a definition
3625 implies local binding), we can skip the re-testing. */
3626 return TRUE;
3627
3628 ret = default_binds_local_p_1 (fn, flag_pic);
3629
3630 DECL_VISIBILITY (fn) = vis;
3631
3632 return ret;
3633 }
3634
3635 /* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC
3636 register. */
3637
3638 rtx
3639 frv_gen_GPsym2reg (rtx dest, rtx src)
3640 {
3641 tree gp = get_identifier ("_gp");
3642 rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp));
3643
3644 return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12));
3645 }
3646
3647 static const char *
3648 unspec_got_name (int i)
3649 {
3650 switch (i)
3651 {
3652 case R_FRV_GOT12: return "got12";
3653 case R_FRV_GOTHI: return "gothi";
3654 case R_FRV_GOTLO: return "gotlo";
3655 case R_FRV_FUNCDESC: return "funcdesc";
3656 case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12";
3657 case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi";
3658 case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo";
3659 case R_FRV_FUNCDESC_VALUE: return "funcdescvalue";
3660 case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12";
3661 case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi";
3662 case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo";
3663 case R_FRV_GOTOFF12: return "gotoff12";
3664 case R_FRV_GOTOFFHI: return "gotoffhi";
3665 case R_FRV_GOTOFFLO: return "gotofflo";
3666 case R_FRV_GPREL12: return "gprel12";
3667 case R_FRV_GPRELHI: return "gprelhi";
3668 case R_FRV_GPRELLO: return "gprello";
3669 case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi";
3670 case R_FRV_GOTTLSOFF_LO: return "gottlsofflo";
3671 case R_FRV_TLSMOFFHI: return "tlsmoffhi";
3672 case R_FRV_TLSMOFFLO: return "tlsmofflo";
3673 case R_FRV_TLSMOFF12: return "tlsmoff12";
3674 case R_FRV_TLSDESCHI: return "tlsdeschi";
3675 case R_FRV_TLSDESCLO: return "tlsdesclo";
3676 case R_FRV_GOTTLSDESCHI: return "gottlsdeschi";
3677 case R_FRV_GOTTLSDESCLO: return "gottlsdesclo";
3678 default: gcc_unreachable ();
3679 }
3680 }
3681
3682 /* Write the assembler syntax for UNSPEC to STREAM. Note that any offset
3683 is added inside the relocation operator. */
3684
3685 static void
3686 frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec)
3687 {
3688 fprintf (stream, "#%s(", unspec_got_name (unspec->reloc));
3689 output_addr_const (stream, plus_constant (Pmode, unspec->symbol,
3690 unspec->offset));
3691 fputs (")", stream);
3692 }
3693
3694 /* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo)
3695 or #gotoff12(foo) for some small data symbol foo. If so, return foo,
3696 otherwise return ORIG_X. */
3697
3698 rtx
3699 frv_find_base_term (rtx x)
3700 {
3701 struct frv_unspec unspec;
3702
3703 if (frv_const_unspec_p (x, &unspec)
3704 && frv_small_data_reloc_p (unspec.symbol, unspec.reloc))
3705 return plus_constant (Pmode, unspec.symbol, unspec.offset);
3706
3707 return x;
3708 }
3709
3710 /* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if
3711 the operand is used by a predicated instruction. */
3712
3713 int
3714 frv_legitimate_memory_operand (rtx op, machine_mode mode, int condexec_p)
3715 {
3716 return ((GET_MODE (op) == mode || mode == VOIDmode)
3717 && GET_CODE (op) == MEM
3718 && frv_legitimate_address_p_1 (mode, XEXP (op, 0),
3719 reload_completed, condexec_p, FALSE));
3720 }
3721
3722 void
3723 frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall)
3724 {
3725 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
3726 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG);
3727 rtx c, rvrtx=0;
3728 rtx addr;
3729
3730 if (ret_value)
3731 {
3732 rvrtx = operands[0];
3733 operands ++;
3734 }
3735
3736 addr = XEXP (operands[0], 0);
3737
3738 /* Inline PLTs if we're optimizing for speed. We'd like to inline
3739 any calls that would involve a PLT, but can't tell, since we
3740 don't know whether an extern function is going to be provided by
3741 a separate translation unit or imported from a separate module.
3742 When compiling for shared libraries, if the function has default
3743 visibility, we assume it's overridable, so we inline the PLT, but
3744 for executables, we don't really have a way to make a good
3745 decision: a function is as likely to be imported from a shared
3746 library as it is to be defined in the executable itself. We
3747 assume executables will get global functions defined locally,
3748 whereas shared libraries will have them potentially overridden,
3749 so we only inline PLTs when compiling for shared libraries.
3750
3751 In order to mark a function as local to a shared library, any
3752 non-default visibility attribute suffices. Unfortunately,
3753 there's no simple way to tag a function declaration as ``in a
3754 different module'', which we could then use to trigger PLT
3755 inlining on executables. There's -minline-plt, but it affects
3756 all external functions, so one would have to also mark function
3757 declarations available in the same module with non-default
3758 visibility, which is advantageous in itself. */
3759 if (GET_CODE (addr) == SYMBOL_REF
3760 && ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT)
3761 || sibcall))
3762 {
3763 rtx x, dest;
3764 dest = gen_reg_rtx (SImode);
3765 if (flag_pic != 1)
3766 x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG,
3767 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3768 else
3769 x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG,
3770 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3771 emit_insn (x);
3772 crtl->uses_pic_offset_table = TRUE;
3773 addr = dest;
3774 }
3775 else if (GET_CODE (addr) == SYMBOL_REF)
3776 {
3777 /* These are always either local, or handled through a local
3778 PLT. */
3779 if (ret_value)
3780 c = gen_call_value_fdpicsi (rvrtx, addr, operands[1],
3781 operands[2], picreg, lr);
3782 else
3783 c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr);
3784 emit_call_insn (c);
3785 return;
3786 }
3787 else if (! ldd_address_operand (addr, Pmode))
3788 addr = force_reg (Pmode, addr);
3789
3790 picreg = gen_reg_rtx (DImode);
3791 emit_insn (gen_movdi_ldd (picreg, addr));
3792
3793 if (sibcall && ret_value)
3794 c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx);
3795 else if (sibcall)
3796 c = gen_sibcall_fdpicdi (picreg, const0_rtx);
3797 else if (ret_value)
3798 c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr);
3799 else
3800 c = gen_call_fdpicdi (picreg, const0_rtx, lr);
3801 emit_call_insn (c);
3802 }
3803 \f
3804 /* Look for a SYMBOL_REF of a function in an rtx. We always want to
3805 process these separately from any offsets, such that we add any
3806 offsets to the function descriptor (the actual pointer), not to the
3807 function address. */
3808
3809 static bool
3810 frv_function_symbol_referenced_p (rtx x)
3811 {
3812 const char *format;
3813 int length;
3814 int j;
3815
3816 if (GET_CODE (x) == SYMBOL_REF)
3817 return SYMBOL_REF_FUNCTION_P (x);
3818
3819 length = GET_RTX_LENGTH (GET_CODE (x));
3820 format = GET_RTX_FORMAT (GET_CODE (x));
3821
3822 for (j = 0; j < length; ++j)
3823 {
3824 switch (format[j])
3825 {
3826 case 'e':
3827 if (frv_function_symbol_referenced_p (XEXP (x, j)))
3828 return TRUE;
3829 break;
3830
3831 case 'V':
3832 case 'E':
3833 if (XVEC (x, j) != 0)
3834 {
3835 int k;
3836 for (k = 0; k < XVECLEN (x, j); ++k)
3837 if (frv_function_symbol_referenced_p (XVECEXP (x, j, k)))
3838 return TRUE;
3839 }
3840 break;
3841
3842 default:
3843 /* Nothing to do. */
3844 break;
3845 }
3846 }
3847
3848 return FALSE;
3849 }
3850
3851 /* Return true if the memory operand is one that can be conditionally
3852 executed. */
3853
3854 int
3855 condexec_memory_operand (rtx op, machine_mode mode)
3856 {
3857 machine_mode op_mode = GET_MODE (op);
3858 rtx addr;
3859
3860 if (mode != VOIDmode && op_mode != mode)
3861 return FALSE;
3862
3863 switch (op_mode)
3864 {
3865 default:
3866 return FALSE;
3867
3868 case E_QImode:
3869 case E_HImode:
3870 case E_SImode:
3871 case E_SFmode:
3872 break;
3873 }
3874
3875 if (GET_CODE (op) != MEM)
3876 return FALSE;
3877
3878 addr = XEXP (op, 0);
3879 return frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE);
3880 }
3881 \f
3882 /* Return true if the bare return instruction can be used outside of the
3883 epilog code. For frv, we only do it if there was no stack allocation. */
3884
3885 int
3886 direct_return_p (void)
3887 {
3888 frv_stack_t *info;
3889
3890 if (!reload_completed)
3891 return FALSE;
3892
3893 info = frv_stack_info ();
3894 return (info->total_size == 0);
3895 }
3896
3897 \f
3898 void
3899 frv_emit_move (machine_mode mode, rtx dest, rtx src)
3900 {
3901 if (GET_CODE (src) == SYMBOL_REF)
3902 {
3903 enum tls_model model = SYMBOL_REF_TLS_MODEL (src);
3904 if (model != 0)
3905 src = frv_legitimize_tls_address (src, model);
3906 }
3907
3908 switch (mode)
3909 {
3910 case E_SImode:
3911 if (frv_emit_movsi (dest, src))
3912 return;
3913 break;
3914
3915 case E_QImode:
3916 case E_HImode:
3917 case E_DImode:
3918 case E_SFmode:
3919 case E_DFmode:
3920 if (!reload_in_progress
3921 && !reload_completed
3922 && !register_operand (dest, mode)
3923 && !reg_or_0_operand (src, mode))
3924 src = copy_to_mode_reg (mode, src);
3925 break;
3926
3927 default:
3928 gcc_unreachable ();
3929 }
3930
3931 emit_insn (gen_rtx_SET (dest, src));
3932 }
3933
3934 /* Emit code to handle a MOVSI, adding in the small data register or pic
3935 register if needed to load up addresses. Return TRUE if the appropriate
3936 instructions are emitted. */
3937
3938 int
3939 frv_emit_movsi (rtx dest, rtx src)
3940 {
3941 int base_regno = -1;
3942 int unspec = 0;
3943 rtx sym = src;
3944 struct frv_unspec old_unspec;
3945
3946 if (!reload_in_progress
3947 && !reload_completed
3948 && !register_operand (dest, SImode)
3949 && (!reg_or_0_operand (src, SImode)
3950 /* Virtual registers will almost always be replaced by an
3951 add instruction, so expose this to CSE by copying to
3952 an intermediate register. */
3953 || (GET_CODE (src) == REG
3954 && IN_RANGE (REGNO (src),
3955 FIRST_VIRTUAL_REGISTER,
3956 LAST_VIRTUAL_POINTER_REGISTER))))
3957 {
3958 emit_insn (gen_rtx_SET (dest, copy_to_mode_reg (SImode, src)));
3959 return TRUE;
3960 }
3961
3962 /* Explicitly add in the PIC or small data register if needed. */
3963 switch (GET_CODE (src))
3964 {
3965 default:
3966 break;
3967
3968 case LABEL_REF:
3969 handle_label:
3970 if (TARGET_FDPIC)
3971 {
3972 /* Using GPREL12, we use a single GOT entry for all symbols
3973 in read-only sections, but trade sequences such as:
3974
3975 sethi #gothi(label), gr#
3976 setlo #gotlo(label), gr#
3977 ld @(gr15,gr#), gr#
3978
3979 for
3980
3981 ld @(gr15,#got12(_gp)), gr#
3982 sethi #gprelhi(label), gr##
3983 setlo #gprello(label), gr##
3984 add gr#, gr##, gr##
3985
3986 We may often be able to share gr# for multiple
3987 computations of GPREL addresses, and we may often fold
3988 the final add into the pair of registers of a load or
3989 store instruction, so it's often profitable. Even when
3990 optimizing for size, we're trading a GOT entry for an
3991 additional instruction, which trades GOT space
3992 (read-write) for code size (read-only, shareable), as
3993 long as the symbol is not used in more than two different
3994 locations.
3995
3996 With -fpie/-fpic, we'd be trading a single load for a
3997 sequence of 4 instructions, because the offset of the
3998 label can't be assumed to be addressable with 12 bits, so
3999 we don't do this. */
4000 if (TARGET_GPREL_RO)
4001 unspec = R_FRV_GPREL12;
4002 else
4003 unspec = R_FRV_GOT12;
4004 }
4005 else if (flag_pic)
4006 base_regno = PIC_REGNO;
4007
4008 break;
4009
4010 case CONST:
4011 if (frv_const_unspec_p (src, &old_unspec))
4012 break;
4013
4014 if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0)))
4015 {
4016 handle_whatever:
4017 src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0));
4018 emit_move_insn (dest, src);
4019 return TRUE;
4020 }
4021 else
4022 {
4023 sym = XEXP (sym, 0);
4024 if (GET_CODE (sym) == PLUS
4025 && GET_CODE (XEXP (sym, 0)) == SYMBOL_REF
4026 && GET_CODE (XEXP (sym, 1)) == CONST_INT)
4027 sym = XEXP (sym, 0);
4028 if (GET_CODE (sym) == SYMBOL_REF)
4029 goto handle_sym;
4030 else if (GET_CODE (sym) == LABEL_REF)
4031 goto handle_label;
4032 else
4033 goto handle_whatever;
4034 }
4035 break;
4036
4037 case SYMBOL_REF:
4038 handle_sym:
4039 if (TARGET_FDPIC)
4040 {
4041 enum tls_model model = SYMBOL_REF_TLS_MODEL (sym);
4042
4043 if (model != 0)
4044 {
4045 src = frv_legitimize_tls_address (src, model);
4046 emit_move_insn (dest, src);
4047 return TRUE;
4048 }
4049
4050 if (SYMBOL_REF_FUNCTION_P (sym))
4051 {
4052 if (frv_local_funcdesc_p (sym))
4053 unspec = R_FRV_FUNCDESC_GOTOFF12;
4054 else
4055 unspec = R_FRV_FUNCDESC_GOT12;
4056 }
4057 else
4058 {
4059 if (CONSTANT_POOL_ADDRESS_P (sym))
4060 switch (GET_CODE (get_pool_constant (sym)))
4061 {
4062 case CONST:
4063 case SYMBOL_REF:
4064 case LABEL_REF:
4065 if (flag_pic)
4066 {
4067 unspec = R_FRV_GOTOFF12;
4068 break;
4069 }
4070 /* Fall through. */
4071 default:
4072 if (TARGET_GPREL_RO)
4073 unspec = R_FRV_GPREL12;
4074 else
4075 unspec = R_FRV_GOT12;
4076 break;
4077 }
4078 else if (SYMBOL_REF_LOCAL_P (sym)
4079 && !SYMBOL_REF_EXTERNAL_P (sym)
4080 && SYMBOL_REF_DECL (sym)
4081 && (!DECL_P (SYMBOL_REF_DECL (sym))
4082 || !DECL_COMMON (SYMBOL_REF_DECL (sym))))
4083 {
4084 tree decl = SYMBOL_REF_DECL (sym);
4085 tree init = TREE_CODE (decl) == VAR_DECL
4086 ? DECL_INITIAL (decl)
4087 : TREE_CODE (decl) == CONSTRUCTOR
4088 ? decl : 0;
4089 int reloc = 0;
4090 bool named_section, readonly;
4091
4092 if (init && init != error_mark_node)
4093 reloc = compute_reloc_for_constant (init);
4094
4095 named_section = TREE_CODE (decl) == VAR_DECL
4096 && lookup_attribute ("section", DECL_ATTRIBUTES (decl));
4097 readonly = decl_readonly_section (decl, reloc);
4098
4099 if (named_section)
4100 unspec = R_FRV_GOT12;
4101 else if (!readonly)
4102 unspec = R_FRV_GOTOFF12;
4103 else if (readonly && TARGET_GPREL_RO)
4104 unspec = R_FRV_GPREL12;
4105 else
4106 unspec = R_FRV_GOT12;
4107 }
4108 else
4109 unspec = R_FRV_GOT12;
4110 }
4111 }
4112
4113 else if (SYMBOL_REF_SMALL_P (sym))
4114 base_regno = SDA_BASE_REG;
4115
4116 else if (flag_pic)
4117 base_regno = PIC_REGNO;
4118
4119 break;
4120 }
4121
4122 if (base_regno >= 0)
4123 {
4124 if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym))
4125 emit_insn (gen_symGOTOFF2reg (dest, src,
4126 gen_rtx_REG (Pmode, base_regno),
4127 GEN_INT (R_FRV_GPREL12)));
4128 else
4129 emit_insn (gen_symGOTOFF2reg_hilo (dest, src,
4130 gen_rtx_REG (Pmode, base_regno),
4131 GEN_INT (R_FRV_GPREL12)));
4132 if (base_regno == PIC_REGNO)
4133 crtl->uses_pic_offset_table = TRUE;
4134 return TRUE;
4135 }
4136
4137 if (unspec)
4138 {
4139 rtx x;
4140
4141 /* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce
4142 new uses of it once reload has begun. */
4143 gcc_assert (!reload_in_progress && !reload_completed);
4144
4145 switch (unspec)
4146 {
4147 case R_FRV_GOTOFF12:
4148 if (!frv_small_data_reloc_p (sym, unspec))
4149 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4150 GEN_INT (unspec));
4151 else
4152 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4153 break;
4154 case R_FRV_GPREL12:
4155 if (!frv_small_data_reloc_p (sym, unspec))
4156 x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG,
4157 GEN_INT (unspec));
4158 else
4159 x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4160 break;
4161 case R_FRV_FUNCDESC_GOTOFF12:
4162 if (flag_pic != 1)
4163 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4164 GEN_INT (unspec));
4165 else
4166 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4167 break;
4168 default:
4169 if (flag_pic != 1)
4170 x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG,
4171 GEN_INT (unspec));
4172 else
4173 x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4174 break;
4175 }
4176 emit_insn (x);
4177 crtl->uses_pic_offset_table = TRUE;
4178 return TRUE;
4179 }
4180
4181
4182 return FALSE;
4183 }
4184
4185 \f
4186 /* Return a string to output a single word move. */
4187
4188 const char *
4189 output_move_single (rtx operands[], rtx insn)
4190 {
4191 rtx dest = operands[0];
4192 rtx src = operands[1];
4193
4194 if (GET_CODE (dest) == REG)
4195 {
4196 int dest_regno = REGNO (dest);
4197 machine_mode mode = GET_MODE (dest);
4198
4199 if (GPR_P (dest_regno))
4200 {
4201 if (GET_CODE (src) == REG)
4202 {
4203 /* gpr <- some sort of register */
4204 int src_regno = REGNO (src);
4205
4206 if (GPR_P (src_regno))
4207 return "mov %1, %0";
4208
4209 else if (FPR_P (src_regno))
4210 return "movfg %1, %0";
4211
4212 else if (SPR_P (src_regno))
4213 return "movsg %1, %0";
4214 }
4215
4216 else if (GET_CODE (src) == MEM)
4217 {
4218 /* gpr <- memory */
4219 switch (mode)
4220 {
4221 default:
4222 break;
4223
4224 case E_QImode:
4225 return "ldsb%I1%U1 %M1,%0";
4226
4227 case E_HImode:
4228 return "ldsh%I1%U1 %M1,%0";
4229
4230 case E_SImode:
4231 case E_SFmode:
4232 return "ld%I1%U1 %M1, %0";
4233 }
4234 }
4235
4236 else if (GET_CODE (src) == CONST_INT
4237 || GET_CODE (src) == CONST_DOUBLE)
4238 {
4239 /* gpr <- integer/floating constant */
4240 HOST_WIDE_INT value;
4241
4242 if (GET_CODE (src) == CONST_INT)
4243 value = INTVAL (src);
4244
4245 else if (mode == SFmode)
4246 {
4247 long l;
4248
4249 REAL_VALUE_TO_TARGET_SINGLE
4250 (*CONST_DOUBLE_REAL_VALUE (src), l);
4251 value = l;
4252 }
4253
4254 else
4255 value = CONST_DOUBLE_LOW (src);
4256
4257 if (IN_RANGE (value, -32768, 32767))
4258 return "setlos %1, %0";
4259
4260 return "#";
4261 }
4262
4263 else if (GET_CODE (src) == SYMBOL_REF
4264 || GET_CODE (src) == LABEL_REF
4265 || GET_CODE (src) == CONST)
4266 {
4267 return "#";
4268 }
4269 }
4270
4271 else if (FPR_P (dest_regno))
4272 {
4273 if (GET_CODE (src) == REG)
4274 {
4275 /* fpr <- some sort of register */
4276 int src_regno = REGNO (src);
4277
4278 if (GPR_P (src_regno))
4279 return "movgf %1, %0";
4280
4281 else if (FPR_P (src_regno))
4282 {
4283 if (TARGET_HARD_FLOAT)
4284 return "fmovs %1, %0";
4285 else
4286 return "mor %1, %1, %0";
4287 }
4288 }
4289
4290 else if (GET_CODE (src) == MEM)
4291 {
4292 /* fpr <- memory */
4293 switch (mode)
4294 {
4295 default:
4296 break;
4297
4298 case E_QImode:
4299 return "ldbf%I1%U1 %M1,%0";
4300
4301 case E_HImode:
4302 return "ldhf%I1%U1 %M1,%0";
4303
4304 case E_SImode:
4305 case E_SFmode:
4306 return "ldf%I1%U1 %M1, %0";
4307 }
4308 }
4309
4310 else if (ZERO_P (src))
4311 return "movgf %., %0";
4312 }
4313
4314 else if (SPR_P (dest_regno))
4315 {
4316 if (GET_CODE (src) == REG)
4317 {
4318 /* spr <- some sort of register */
4319 int src_regno = REGNO (src);
4320
4321 if (GPR_P (src_regno))
4322 return "movgs %1, %0";
4323 }
4324 else if (ZERO_P (src))
4325 return "movgs %., %0";
4326 }
4327 }
4328
4329 else if (GET_CODE (dest) == MEM)
4330 {
4331 if (GET_CODE (src) == REG)
4332 {
4333 int src_regno = REGNO (src);
4334 machine_mode mode = GET_MODE (dest);
4335
4336 if (GPR_P (src_regno))
4337 {
4338 switch (mode)
4339 {
4340 default:
4341 break;
4342
4343 case E_QImode:
4344 return "stb%I0%U0 %1, %M0";
4345
4346 case E_HImode:
4347 return "sth%I0%U0 %1, %M0";
4348
4349 case E_SImode:
4350 case E_SFmode:
4351 return "st%I0%U0 %1, %M0";
4352 }
4353 }
4354
4355 else if (FPR_P (src_regno))
4356 {
4357 switch (mode)
4358 {
4359 default:
4360 break;
4361
4362 case E_QImode:
4363 return "stbf%I0%U0 %1, %M0";
4364
4365 case E_HImode:
4366 return "sthf%I0%U0 %1, %M0";
4367
4368 case E_SImode:
4369 case E_SFmode:
4370 return "stf%I0%U0 %1, %M0";
4371 }
4372 }
4373 }
4374
4375 else if (ZERO_P (src))
4376 {
4377 switch (GET_MODE (dest))
4378 {
4379 default:
4380 break;
4381
4382 case E_QImode:
4383 return "stb%I0%U0 %., %M0";
4384
4385 case E_HImode:
4386 return "sth%I0%U0 %., %M0";
4387
4388 case E_SImode:
4389 case E_SFmode:
4390 return "st%I0%U0 %., %M0";
4391 }
4392 }
4393 }
4394
4395 fatal_insn ("bad output_move_single operand", insn);
4396 return "";
4397 }
4398
4399 \f
4400 /* Return a string to output a double word move. */
4401
4402 const char *
4403 output_move_double (rtx operands[], rtx insn)
4404 {
4405 rtx dest = operands[0];
4406 rtx src = operands[1];
4407 machine_mode mode = GET_MODE (dest);
4408
4409 if (GET_CODE (dest) == REG)
4410 {
4411 int dest_regno = REGNO (dest);
4412
4413 if (GPR_P (dest_regno))
4414 {
4415 if (GET_CODE (src) == REG)
4416 {
4417 /* gpr <- some sort of register */
4418 int src_regno = REGNO (src);
4419
4420 if (GPR_P (src_regno))
4421 return "#";
4422
4423 else if (FPR_P (src_regno))
4424 {
4425 if (((dest_regno - GPR_FIRST) & 1) == 0
4426 && ((src_regno - FPR_FIRST) & 1) == 0)
4427 return "movfgd %1, %0";
4428
4429 return "#";
4430 }
4431 }
4432
4433 else if (GET_CODE (src) == MEM)
4434 {
4435 /* gpr <- memory */
4436 if (dbl_memory_one_insn_operand (src, mode))
4437 return "ldd%I1%U1 %M1, %0";
4438
4439 return "#";
4440 }
4441
4442 else if (GET_CODE (src) == CONST_INT
4443 || GET_CODE (src) == CONST_DOUBLE)
4444 return "#";
4445 }
4446
4447 else if (FPR_P (dest_regno))
4448 {
4449 if (GET_CODE (src) == REG)
4450 {
4451 /* fpr <- some sort of register */
4452 int src_regno = REGNO (src);
4453
4454 if (GPR_P (src_regno))
4455 {
4456 if (((dest_regno - FPR_FIRST) & 1) == 0
4457 && ((src_regno - GPR_FIRST) & 1) == 0)
4458 return "movgfd %1, %0";
4459
4460 return "#";
4461 }
4462
4463 else if (FPR_P (src_regno))
4464 {
4465 if (TARGET_DOUBLE
4466 && ((dest_regno - FPR_FIRST) & 1) == 0
4467 && ((src_regno - FPR_FIRST) & 1) == 0)
4468 return "fmovd %1, %0";
4469
4470 return "#";
4471 }
4472 }
4473
4474 else if (GET_CODE (src) == MEM)
4475 {
4476 /* fpr <- memory */
4477 if (dbl_memory_one_insn_operand (src, mode))
4478 return "lddf%I1%U1 %M1, %0";
4479
4480 return "#";
4481 }
4482
4483 else if (ZERO_P (src))
4484 return "#";
4485 }
4486 }
4487
4488 else if (GET_CODE (dest) == MEM)
4489 {
4490 if (GET_CODE (src) == REG)
4491 {
4492 int src_regno = REGNO (src);
4493
4494 if (GPR_P (src_regno))
4495 {
4496 if (((src_regno - GPR_FIRST) & 1) == 0
4497 && dbl_memory_one_insn_operand (dest, mode))
4498 return "std%I0%U0 %1, %M0";
4499
4500 return "#";
4501 }
4502
4503 if (FPR_P (src_regno))
4504 {
4505 if (((src_regno - FPR_FIRST) & 1) == 0
4506 && dbl_memory_one_insn_operand (dest, mode))
4507 return "stdf%I0%U0 %1, %M0";
4508
4509 return "#";
4510 }
4511 }
4512
4513 else if (ZERO_P (src))
4514 {
4515 if (dbl_memory_one_insn_operand (dest, mode))
4516 return "std%I0%U0 %., %M0";
4517
4518 return "#";
4519 }
4520 }
4521
4522 fatal_insn ("bad output_move_double operand", insn);
4523 return "";
4524 }
4525
4526 \f
4527 /* Return a string to output a single word conditional move.
4528 Operand0 -- EQ/NE of ccr register and 0
4529 Operand1 -- CCR register
4530 Operand2 -- destination
4531 Operand3 -- source */
4532
4533 const char *
4534 output_condmove_single (rtx operands[], rtx insn)
4535 {
4536 rtx dest = operands[2];
4537 rtx src = operands[3];
4538
4539 if (GET_CODE (dest) == REG)
4540 {
4541 int dest_regno = REGNO (dest);
4542 machine_mode mode = GET_MODE (dest);
4543
4544 if (GPR_P (dest_regno))
4545 {
4546 if (GET_CODE (src) == REG)
4547 {
4548 /* gpr <- some sort of register */
4549 int src_regno = REGNO (src);
4550
4551 if (GPR_P (src_regno))
4552 return "cmov %z3, %2, %1, %e0";
4553
4554 else if (FPR_P (src_regno))
4555 return "cmovfg %3, %2, %1, %e0";
4556 }
4557
4558 else if (GET_CODE (src) == MEM)
4559 {
4560 /* gpr <- memory */
4561 switch (mode)
4562 {
4563 default:
4564 break;
4565
4566 case E_QImode:
4567 return "cldsb%I3%U3 %M3, %2, %1, %e0";
4568
4569 case E_HImode:
4570 return "cldsh%I3%U3 %M3, %2, %1, %e0";
4571
4572 case E_SImode:
4573 case E_SFmode:
4574 return "cld%I3%U3 %M3, %2, %1, %e0";
4575 }
4576 }
4577
4578 else if (ZERO_P (src))
4579 return "cmov %., %2, %1, %e0";
4580 }
4581
4582 else if (FPR_P (dest_regno))
4583 {
4584 if (GET_CODE (src) == REG)
4585 {
4586 /* fpr <- some sort of register */
4587 int src_regno = REGNO (src);
4588
4589 if (GPR_P (src_regno))
4590 return "cmovgf %3, %2, %1, %e0";
4591
4592 else if (FPR_P (src_regno))
4593 {
4594 if (TARGET_HARD_FLOAT)
4595 return "cfmovs %3,%2,%1,%e0";
4596 else
4597 return "cmor %3, %3, %2, %1, %e0";
4598 }
4599 }
4600
4601 else if (GET_CODE (src) == MEM)
4602 {
4603 /* fpr <- memory */
4604 if (mode == SImode || mode == SFmode)
4605 return "cldf%I3%U3 %M3, %2, %1, %e0";
4606 }
4607
4608 else if (ZERO_P (src))
4609 return "cmovgf %., %2, %1, %e0";
4610 }
4611 }
4612
4613 else if (GET_CODE (dest) == MEM)
4614 {
4615 if (GET_CODE (src) == REG)
4616 {
4617 int src_regno = REGNO (src);
4618 machine_mode mode = GET_MODE (dest);
4619
4620 if (GPR_P (src_regno))
4621 {
4622 switch (mode)
4623 {
4624 default:
4625 break;
4626
4627 case E_QImode:
4628 return "cstb%I2%U2 %3, %M2, %1, %e0";
4629
4630 case E_HImode:
4631 return "csth%I2%U2 %3, %M2, %1, %e0";
4632
4633 case E_SImode:
4634 case E_SFmode:
4635 return "cst%I2%U2 %3, %M2, %1, %e0";
4636 }
4637 }
4638
4639 else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode))
4640 return "cstf%I2%U2 %3, %M2, %1, %e0";
4641 }
4642
4643 else if (ZERO_P (src))
4644 {
4645 machine_mode mode = GET_MODE (dest);
4646 switch (mode)
4647 {
4648 default:
4649 break;
4650
4651 case E_QImode:
4652 return "cstb%I2%U2 %., %M2, %1, %e0";
4653
4654 case E_HImode:
4655 return "csth%I2%U2 %., %M2, %1, %e0";
4656
4657 case E_SImode:
4658 case E_SFmode:
4659 return "cst%I2%U2 %., %M2, %1, %e0";
4660 }
4661 }
4662 }
4663
4664 fatal_insn ("bad output_condmove_single operand", insn);
4665 return "";
4666 }
4667
4668 \f
4669 /* Emit the appropriate code to do a comparison, returning the register the
4670 comparison was done it. */
4671
4672 static rtx
4673 frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
4674 {
4675 machine_mode cc_mode;
4676 rtx cc_reg;
4677
4678 /* Floating point doesn't have comparison against a constant. */
4679 if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG)
4680 op1 = force_reg (GET_MODE (op0), op1);
4681
4682 /* Possibly disable using anything but a fixed register in order to work
4683 around cse moving comparisons past function calls. */
4684 cc_mode = SELECT_CC_MODE (test, op0, op1);
4685 cc_reg = ((TARGET_ALLOC_CC)
4686 ? gen_reg_rtx (cc_mode)
4687 : gen_rtx_REG (cc_mode,
4688 (cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST));
4689
4690 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (cc_mode, op0, op1)));
4691
4692 return cc_reg;
4693 }
4694
4695 \f
4696 /* Emit code for a conditional branch.
4697 XXX: I originally wanted to add a clobber of a CCR register to use in
4698 conditional execution, but that confuses the rest of the compiler. */
4699
4700 int
4701 frv_emit_cond_branch (rtx operands[])
4702 {
4703 rtx test_rtx;
4704 rtx label_ref;
4705 rtx if_else;
4706 enum rtx_code test = GET_CODE (operands[0]);
4707 rtx cc_reg = frv_emit_comparison (test, operands[1], operands[2]);
4708 machine_mode cc_mode = GET_MODE (cc_reg);
4709
4710 /* Branches generate:
4711 (set (pc)
4712 (if_then_else (<test>, <cc_reg>, (const_int 0))
4713 (label_ref <branch_label>)
4714 (pc))) */
4715 label_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
4716 test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4717 if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx);
4718 emit_jump_insn (gen_rtx_SET (pc_rtx, if_else));
4719 return TRUE;
4720 }
4721
4722 \f
4723 /* Emit code to set a gpr to 1/0 based on a comparison. */
4724
4725 int
4726 frv_emit_scc (rtx operands[])
4727 {
4728 rtx set;
4729 rtx test_rtx;
4730 rtx clobber;
4731 rtx cr_reg;
4732 enum rtx_code test = GET_CODE (operands[1]);
4733 rtx cc_reg = frv_emit_comparison (test, operands[2], operands[3]);
4734
4735 /* SCC instructions generate:
4736 (parallel [(set <target> (<test>, <cc_reg>, (const_int 0))
4737 (clobber (<ccr_reg>))]) */
4738 test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx);
4739 set = gen_rtx_SET (operands[0], test_rtx);
4740
4741 cr_reg = ((TARGET_ALLOC_CC)
4742 ? gen_reg_rtx (CC_CCRmode)
4743 : gen_rtx_REG (CC_CCRmode,
4744 ((GET_MODE (cc_reg) == CC_FPmode)
4745 ? FCR_FIRST
4746 : ICR_FIRST)));
4747
4748 clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4749 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
4750 return TRUE;
4751 }
4752
4753 \f
4754 /* Split a SCC instruction into component parts, returning a SEQUENCE to hold
4755 the separate insns. */
4756
4757 rtx
4758 frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value)
4759 {
4760 rtx ret;
4761
4762 start_sequence ();
4763
4764 /* Set the appropriate CCR bit. */
4765 emit_insn (gen_rtx_SET (cr_reg,
4766 gen_rtx_fmt_ee (GET_CODE (test),
4767 GET_MODE (cr_reg),
4768 cc_reg,
4769 const0_rtx)));
4770
4771 /* Move the value into the destination. */
4772 emit_move_insn (dest, GEN_INT (value));
4773
4774 /* Move 0 into the destination if the test failed */
4775 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4776 gen_rtx_EQ (GET_MODE (cr_reg),
4777 cr_reg,
4778 const0_rtx),
4779 gen_rtx_SET (dest, const0_rtx)));
4780
4781 /* Finish up, return sequence. */
4782 ret = get_insns ();
4783 end_sequence ();
4784 return ret;
4785 }
4786
4787 \f
4788 /* Emit the code for a conditional move, return TRUE if we could do the
4789 move. */
4790
4791 int
4792 frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2)
4793 {
4794 rtx set;
4795 rtx clobber_cc;
4796 rtx test2;
4797 rtx cr_reg;
4798 rtx if_rtx;
4799 enum rtx_code test = GET_CODE (test_rtx);
4800 rtx cc_reg = frv_emit_comparison (test,
4801 XEXP (test_rtx, 0), XEXP (test_rtx, 1));
4802 machine_mode cc_mode = GET_MODE (cc_reg);
4803
4804 /* Conditional move instructions generate:
4805 (parallel [(set <target>
4806 (if_then_else (<test> <cc_reg> (const_int 0))
4807 <src1>
4808 <src2>))
4809 (clobber (<ccr_reg>))]) */
4810
4811 /* Handle various cases of conditional move involving two constants. */
4812 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4813 {
4814 HOST_WIDE_INT value1 = INTVAL (src1);
4815 HOST_WIDE_INT value2 = INTVAL (src2);
4816
4817 /* Having 0 as one of the constants can be done by loading the other
4818 constant, and optionally moving in gr0. */
4819 if (value1 == 0 || value2 == 0)
4820 ;
4821
4822 /* If the first value is within an addi range and also the difference
4823 between the two fits in an addi's range, load up the difference, then
4824 conditionally move in 0, and then unconditionally add the first
4825 value. */
4826 else if (IN_RANGE (value1, -2048, 2047)
4827 && IN_RANGE (value2 - value1, -2048, 2047))
4828 ;
4829
4830 /* If neither condition holds, just force the constant into a
4831 register. */
4832 else
4833 {
4834 src1 = force_reg (GET_MODE (dest), src1);
4835 src2 = force_reg (GET_MODE (dest), src2);
4836 }
4837 }
4838
4839 /* If one value is a register, insure the other value is either 0 or a
4840 register. */
4841 else
4842 {
4843 if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0)
4844 src1 = force_reg (GET_MODE (dest), src1);
4845
4846 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
4847 src2 = force_reg (GET_MODE (dest), src2);
4848 }
4849
4850 test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4851 if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2);
4852
4853 set = gen_rtx_SET (dest, if_rtx);
4854
4855 cr_reg = ((TARGET_ALLOC_CC)
4856 ? gen_reg_rtx (CC_CCRmode)
4857 : gen_rtx_REG (CC_CCRmode,
4858 (cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST));
4859
4860 clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4861 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc)));
4862 return TRUE;
4863 }
4864
4865 \f
4866 /* Split a conditional move into constituent parts, returning a SEQUENCE
4867 containing all of the insns. */
4868
4869 rtx
4870 frv_split_cond_move (rtx operands[])
4871 {
4872 rtx dest = operands[0];
4873 rtx test = operands[1];
4874 rtx cc_reg = operands[2];
4875 rtx src1 = operands[3];
4876 rtx src2 = operands[4];
4877 rtx cr_reg = operands[5];
4878 rtx ret;
4879 machine_mode cr_mode = GET_MODE (cr_reg);
4880
4881 start_sequence ();
4882
4883 /* Set the appropriate CCR bit. */
4884 emit_insn (gen_rtx_SET (cr_reg,
4885 gen_rtx_fmt_ee (GET_CODE (test),
4886 GET_MODE (cr_reg),
4887 cc_reg,
4888 const0_rtx)));
4889
4890 /* Handle various cases of conditional move involving two constants. */
4891 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4892 {
4893 HOST_WIDE_INT value1 = INTVAL (src1);
4894 HOST_WIDE_INT value2 = INTVAL (src2);
4895
4896 /* Having 0 as one of the constants can be done by loading the other
4897 constant, and optionally moving in gr0. */
4898 if (value1 == 0)
4899 {
4900 emit_move_insn (dest, src2);
4901 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4902 gen_rtx_NE (cr_mode, cr_reg,
4903 const0_rtx),
4904 gen_rtx_SET (dest, src1)));
4905 }
4906
4907 else if (value2 == 0)
4908 {
4909 emit_move_insn (dest, src1);
4910 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4911 gen_rtx_EQ (cr_mode, cr_reg,
4912 const0_rtx),
4913 gen_rtx_SET (dest, src2)));
4914 }
4915
4916 /* If the first value is within an addi range and also the difference
4917 between the two fits in an addi's range, load up the difference, then
4918 conditionally move in 0, and then unconditionally add the first
4919 value. */
4920 else if (IN_RANGE (value1, -2048, 2047)
4921 && IN_RANGE (value2 - value1, -2048, 2047))
4922 {
4923 rtx dest_si = ((GET_MODE (dest) == SImode)
4924 ? dest
4925 : gen_rtx_SUBREG (SImode, dest, 0));
4926
4927 emit_move_insn (dest_si, GEN_INT (value2 - value1));
4928 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4929 gen_rtx_NE (cr_mode, cr_reg,
4930 const0_rtx),
4931 gen_rtx_SET (dest_si, const0_rtx)));
4932 emit_insn (gen_addsi3 (dest_si, dest_si, src1));
4933 }
4934
4935 else
4936 gcc_unreachable ();
4937 }
4938 else
4939 {
4940 /* Emit the conditional move for the test being true if needed. */
4941 if (! rtx_equal_p (dest, src1))
4942 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4943 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
4944 gen_rtx_SET (dest, src1)));
4945
4946 /* Emit the conditional move for the test being false if needed. */
4947 if (! rtx_equal_p (dest, src2))
4948 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4949 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
4950 gen_rtx_SET (dest, src2)));
4951 }
4952
4953 /* Finish up, return sequence. */
4954 ret = get_insns ();
4955 end_sequence ();
4956 return ret;
4957 }
4958
4959 \f
4960 /* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a
4961 memory location that is not known to be dword-aligned. */
4962 void
4963 frv_split_double_load (rtx dest, rtx source)
4964 {
4965 int regno = REGNO (dest);
4966 rtx dest1 = gen_highpart (SImode, dest);
4967 rtx dest2 = gen_lowpart (SImode, dest);
4968 rtx address = XEXP (source, 0);
4969
4970 /* If the address is pre-modified, load the lower-numbered register
4971 first, then load the other register using an integer offset from
4972 the modified base register. This order should always be safe,
4973 since the pre-modification cannot affect the same registers as the
4974 load does.
4975
4976 The situation for other loads is more complicated. Loading one
4977 of the registers could affect the value of ADDRESS, so we must
4978 be careful which order we do them in. */
4979 if (GET_CODE (address) == PRE_MODIFY
4980 || ! refers_to_regno_p (regno, address))
4981 {
4982 /* It is safe to load the lower-numbered register first. */
4983 emit_move_insn (dest1, change_address (source, SImode, NULL));
4984 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
4985 }
4986 else
4987 {
4988 /* ADDRESS is not pre-modified and the address depends on the
4989 lower-numbered register. Load the higher-numbered register
4990 first. */
4991 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
4992 emit_move_insn (dest1, change_address (source, SImode, NULL));
4993 }
4994 }
4995
4996 /* Split (set DEST SOURCE), where DEST refers to a dword memory location
4997 and SOURCE is either a double register or the constant zero. */
4998 void
4999 frv_split_double_store (rtx dest, rtx source)
5000 {
5001 rtx dest1 = change_address (dest, SImode, NULL);
5002 rtx dest2 = frv_index_memory (dest, SImode, 1);
5003 if (ZERO_P (source))
5004 {
5005 emit_move_insn (dest1, CONST0_RTX (SImode));
5006 emit_move_insn (dest2, CONST0_RTX (SImode));
5007 }
5008 else
5009 {
5010 emit_move_insn (dest1, gen_highpart (SImode, source));
5011 emit_move_insn (dest2, gen_lowpart (SImode, source));
5012 }
5013 }
5014
5015 \f
5016 /* Split a min/max operation returning a SEQUENCE containing all of the
5017 insns. */
5018
5019 rtx
5020 frv_split_minmax (rtx operands[])
5021 {
5022 rtx dest = operands[0];
5023 rtx minmax = operands[1];
5024 rtx src1 = operands[2];
5025 rtx src2 = operands[3];
5026 rtx cc_reg = operands[4];
5027 rtx cr_reg = operands[5];
5028 rtx ret;
5029 enum rtx_code test_code;
5030 machine_mode cr_mode = GET_MODE (cr_reg);
5031
5032 start_sequence ();
5033
5034 /* Figure out which test to use. */
5035 switch (GET_CODE (minmax))
5036 {
5037 default:
5038 gcc_unreachable ();
5039
5040 case SMIN: test_code = LT; break;
5041 case SMAX: test_code = GT; break;
5042 case UMIN: test_code = LTU; break;
5043 case UMAX: test_code = GTU; break;
5044 }
5045
5046 /* Issue the compare instruction. */
5047 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (GET_MODE (cc_reg),
5048 src1, src2)));
5049
5050 /* Set the appropriate CCR bit. */
5051 emit_insn (gen_rtx_SET (cr_reg, gen_rtx_fmt_ee (test_code,
5052 GET_MODE (cr_reg),
5053 cc_reg,
5054 const0_rtx)));
5055
5056 /* If are taking the min/max of a nonzero constant, load that first, and
5057 then do a conditional move of the other value. */
5058 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
5059 {
5060 gcc_assert (!rtx_equal_p (dest, src1));
5061
5062 emit_move_insn (dest, src2);
5063 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5064 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5065 gen_rtx_SET (dest, src1)));
5066 }
5067
5068 /* Otherwise, do each half of the move. */
5069 else
5070 {
5071 /* Emit the conditional move for the test being true if needed. */
5072 if (! rtx_equal_p (dest, src1))
5073 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5074 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5075 gen_rtx_SET (dest, src1)));
5076
5077 /* Emit the conditional move for the test being false if needed. */
5078 if (! rtx_equal_p (dest, src2))
5079 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5080 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5081 gen_rtx_SET (dest, src2)));
5082 }
5083
5084 /* Finish up, return sequence. */
5085 ret = get_insns ();
5086 end_sequence ();
5087 return ret;
5088 }
5089
5090 \f
5091 /* Split an integer abs operation returning a SEQUENCE containing all of the
5092 insns. */
5093
5094 rtx
5095 frv_split_abs (rtx operands[])
5096 {
5097 rtx dest = operands[0];
5098 rtx src = operands[1];
5099 rtx cc_reg = operands[2];
5100 rtx cr_reg = operands[3];
5101 rtx ret;
5102
5103 start_sequence ();
5104
5105 /* Issue the compare < 0 instruction. */
5106 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (CCmode, src, const0_rtx)));
5107
5108 /* Set the appropriate CCR bit. */
5109 emit_insn (gen_rtx_SET (cr_reg, gen_rtx_fmt_ee (LT, CC_CCRmode,
5110 cc_reg, const0_rtx)));
5111
5112 /* Emit the conditional negate if the value is negative. */
5113 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5114 gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx),
5115 gen_negsi2 (dest, src)));
5116
5117 /* Emit the conditional move for the test being false if needed. */
5118 if (! rtx_equal_p (dest, src))
5119 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5120 gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx),
5121 gen_rtx_SET (dest, src)));
5122
5123 /* Finish up, return sequence. */
5124 ret = get_insns ();
5125 end_sequence ();
5126 return ret;
5127 }
5128
5129 \f
5130 /* Initialize machine-specific if-conversion data.
5131 On the FR-V, we don't have any extra fields per se, but it is useful hook to
5132 initialize the static storage. */
5133 void
5134 frv_ifcvt_machdep_init (void *ce_info ATTRIBUTE_UNUSED)
5135 {
5136 frv_ifcvt.added_insns_list = NULL_RTX;
5137 frv_ifcvt.cur_scratch_regs = 0;
5138 frv_ifcvt.num_nested_cond_exec = 0;
5139 frv_ifcvt.cr_reg = NULL_RTX;
5140 frv_ifcvt.nested_cc_reg = NULL_RTX;
5141 frv_ifcvt.extra_int_cr = NULL_RTX;
5142 frv_ifcvt.extra_fp_cr = NULL_RTX;
5143 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5144 }
5145
5146 \f
5147 /* Internal function to add a potential insn to the list of insns to be inserted
5148 if the conditional execution conversion is successful. */
5149
5150 static void
5151 frv_ifcvt_add_insn (rtx pattern, rtx_insn *insn, int before_p)
5152 {
5153 rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn);
5154
5155 link->jump = before_p; /* Mark to add this before or after insn. */
5156 frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link,
5157 frv_ifcvt.added_insns_list);
5158
5159 if (TARGET_DEBUG_COND_EXEC)
5160 {
5161 fprintf (stderr,
5162 "\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n",
5163 (before_p) ? "before" : "after",
5164 (int)INSN_UID (insn));
5165
5166 debug_rtx (pattern);
5167 }
5168 }
5169
5170 \f
5171 /* A C expression to modify the code described by the conditional if
5172 information CE_INFO, possibly updating the tests in TRUE_EXPR, and
5173 FALSE_EXPR for converting if-then and if-then-else code to conditional
5174 instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the
5175 tests cannot be converted. */
5176
5177 void
5178 frv_ifcvt_modify_tests (ce_if_block *ce_info, rtx *p_true, rtx *p_false)
5179 {
5180 basic_block test_bb = ce_info->test_bb; /* test basic block */
5181 basic_block then_bb = ce_info->then_bb; /* THEN */
5182 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
5183 basic_block join_bb = ce_info->join_bb; /* join block or NULL */
5184 rtx true_expr = *p_true;
5185 rtx cr;
5186 rtx cc;
5187 rtx nested_cc;
5188 machine_mode mode = GET_MODE (true_expr);
5189 int j;
5190 basic_block *bb;
5191 int num_bb;
5192 frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg;
5193 rtx check_insn;
5194 rtx sub_cond_exec_reg;
5195 enum rtx_code code;
5196 enum rtx_code code_true;
5197 enum rtx_code code_false;
5198 enum reg_class cc_class;
5199 enum reg_class cr_class;
5200 int cc_first;
5201 int cc_last;
5202 reg_set_iterator rsi;
5203
5204 /* Make sure we are only dealing with hard registers. Also honor the
5205 -mno-cond-exec switch, and -mno-nested-cond-exec switches if
5206 applicable. */
5207 if (!reload_completed || !TARGET_COND_EXEC
5208 || (!TARGET_NESTED_CE && ce_info->pass > 1))
5209 goto fail;
5210
5211 /* Figure out which registers we can allocate for our own purposes. Only
5212 consider registers that are not preserved across function calls and are
5213 not fixed. However, allow the ICC/ICR temporary registers to be allocated
5214 if we did not need to use them in reloading other registers. */
5215 memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
5216 COPY_HARD_REG_SET (tmp_reg->regs, call_used_reg_set);
5217 AND_COMPL_HARD_REG_SET (tmp_reg->regs, fixed_reg_set);
5218 SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
5219 SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
5220
5221 /* If this is a nested IF, we need to discover whether the CC registers that
5222 are set/used inside of the block are used anywhere else. If not, we can
5223 change them to be the CC register that is paired with the CR register that
5224 controls the outermost IF block. */
5225 if (ce_info->pass > 1)
5226 {
5227 CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite);
5228 for (j = CC_FIRST; j <= CC_LAST; j++)
5229 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5230 {
5231 if (REGNO_REG_SET_P (df_get_live_in (then_bb), j))
5232 continue;
5233
5234 if (else_bb
5235 && REGNO_REG_SET_P (df_get_live_in (else_bb), j))
5236 continue;
5237
5238 if (join_bb
5239 && REGNO_REG_SET_P (df_get_live_in (join_bb), j))
5240 continue;
5241
5242 SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
5243 }
5244 }
5245
5246 for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++)
5247 frv_ifcvt.scratch_regs[j] = NULL_RTX;
5248
5249 frv_ifcvt.added_insns_list = NULL_RTX;
5250 frv_ifcvt.cur_scratch_regs = 0;
5251
5252 bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks)
5253 * sizeof (basic_block));
5254
5255 if (join_bb)
5256 {
5257 unsigned int regno;
5258
5259 /* Remove anything live at the beginning of the join block from being
5260 available for allocation. */
5261 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (join_bb), 0, regno, rsi)
5262 {
5263 if (regno < FIRST_PSEUDO_REGISTER)
5264 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5265 }
5266 }
5267
5268 /* Add in all of the blocks in multiple &&/|| blocks to be scanned. */
5269 num_bb = 0;
5270 if (ce_info->num_multiple_test_blocks)
5271 {
5272 basic_block multiple_test_bb = ce_info->last_test_bb;
5273
5274 while (multiple_test_bb != test_bb)
5275 {
5276 bb[num_bb++] = multiple_test_bb;
5277 multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src;
5278 }
5279 }
5280
5281 /* Add in the THEN and ELSE blocks to be scanned. */
5282 bb[num_bb++] = then_bb;
5283 if (else_bb)
5284 bb[num_bb++] = else_bb;
5285
5286 sub_cond_exec_reg = NULL_RTX;
5287 frv_ifcvt.num_nested_cond_exec = 0;
5288
5289 /* Scan all of the blocks for registers that must not be allocated. */
5290 for (j = 0; j < num_bb; j++)
5291 {
5292 rtx_insn *last_insn = BB_END (bb[j]);
5293 rtx_insn *insn = BB_HEAD (bb[j]);
5294 unsigned int regno;
5295
5296 if (dump_file)
5297 fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n",
5298 (bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
5299 (int) bb[j]->index,
5300 (int) INSN_UID (BB_HEAD (bb[j])),
5301 (int) INSN_UID (BB_END (bb[j])));
5302
5303 /* Anything live at the beginning of the block is obviously unavailable
5304 for allocation. */
5305 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb[j]), 0, regno, rsi)
5306 {
5307 if (regno < FIRST_PSEUDO_REGISTER)
5308 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5309 }
5310
5311 /* Loop through the insns in the block. */
5312 for (;;)
5313 {
5314 /* Mark any new registers that are created as being unavailable for
5315 allocation. Also see if the CC register used in nested IFs can be
5316 reallocated. */
5317 if (INSN_P (insn))
5318 {
5319 rtx pattern;
5320 rtx set;
5321 int skip_nested_if = FALSE;
5322 HARD_REG_SET mentioned_regs;
5323
5324 CLEAR_HARD_REG_SET (mentioned_regs);
5325 find_all_hard_regs (PATTERN (insn), &mentioned_regs);
5326 AND_COMPL_HARD_REG_SET (tmp_reg->regs, mentioned_regs);
5327
5328 pattern = PATTERN (insn);
5329 if (GET_CODE (pattern) == COND_EXEC)
5330 {
5331 rtx reg = XEXP (COND_EXEC_TEST (pattern), 0);
5332
5333 if (reg != sub_cond_exec_reg)
5334 {
5335 sub_cond_exec_reg = reg;
5336 frv_ifcvt.num_nested_cond_exec++;
5337 }
5338 }
5339
5340 set = single_set_pattern (pattern);
5341 if (set)
5342 {
5343 rtx dest = SET_DEST (set);
5344 rtx src = SET_SRC (set);
5345
5346 if (GET_CODE (dest) == REG)
5347 {
5348 int regno = REGNO (dest);
5349 enum rtx_code src_code = GET_CODE (src);
5350
5351 if (CC_P (regno) && src_code == COMPARE)
5352 skip_nested_if = TRUE;
5353
5354 else if (CR_P (regno)
5355 && (src_code == IF_THEN_ELSE
5356 || COMPARISON_P (src)))
5357 skip_nested_if = TRUE;
5358 }
5359 }
5360
5361 if (! skip_nested_if)
5362 AND_COMPL_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite,
5363 mentioned_regs);
5364 }
5365
5366 if (insn == last_insn)
5367 break;
5368
5369 insn = NEXT_INSN (insn);
5370 }
5371 }
5372
5373 /* If this is a nested if, rewrite the CC registers that are available to
5374 include the ones that can be rewritten, to increase the chance of being
5375 able to allocate a paired CC/CR register combination. */
5376 if (ce_info->pass > 1)
5377 {
5378 for (j = CC_FIRST; j <= CC_LAST; j++)
5379 if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j))
5380 SET_HARD_REG_BIT (tmp_reg->regs, j);
5381 else
5382 CLEAR_HARD_REG_BIT (tmp_reg->regs, j);
5383 }
5384
5385 if (dump_file)
5386 {
5387 int num_gprs = 0;
5388 fprintf (dump_file, "Available GPRs: ");
5389
5390 for (j = GPR_FIRST; j <= GPR_LAST; j++)
5391 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5392 {
5393 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5394 if (++num_gprs > GPR_TEMP_NUM+2)
5395 break;
5396 }
5397
5398 fprintf (dump_file, "%s\nAvailable CRs: ",
5399 (num_gprs > GPR_TEMP_NUM+2) ? " ..." : "");
5400
5401 for (j = CR_FIRST; j <= CR_LAST; j++)
5402 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5403 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5404
5405 fputs ("\n", dump_file);
5406
5407 if (ce_info->pass > 1)
5408 {
5409 fprintf (dump_file, "Modifiable CCs: ");
5410 for (j = CC_FIRST; j <= CC_LAST; j++)
5411 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5412 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5413
5414 fprintf (dump_file, "\n%d nested COND_EXEC statements\n",
5415 frv_ifcvt.num_nested_cond_exec);
5416 }
5417 }
5418
5419 /* Allocate the appropriate temporary condition code register. Try to
5420 allocate the ICR/FCR register that corresponds to the ICC/FCC register so
5421 that conditional cmp's can be done. */
5422 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5423 {
5424 cr_class = ICR_REGS;
5425 cc_class = ICC_REGS;
5426 cc_first = ICC_FIRST;
5427 cc_last = ICC_LAST;
5428 }
5429 else if (mode == CC_FPmode)
5430 {
5431 cr_class = FCR_REGS;
5432 cc_class = FCC_REGS;
5433 cc_first = FCC_FIRST;
5434 cc_last = FCC_LAST;
5435 }
5436 else
5437 {
5438 cc_first = cc_last = 0;
5439 cr_class = cc_class = NO_REGS;
5440 }
5441
5442 cc = XEXP (true_expr, 0);
5443 nested_cc = cr = NULL_RTX;
5444 if (cc_class != NO_REGS)
5445 {
5446 /* For nested IFs and &&/||, see if we can find a CC and CR register pair
5447 so we can execute a csubcc/caddcc/cfcmps instruction. */
5448 int cc_regno;
5449
5450 for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++)
5451 {
5452 int cr_regno = cc_regno - CC_FIRST + CR_FIRST;
5453
5454 if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno)
5455 && TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno))
5456 {
5457 frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno;
5458 cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE,
5459 TRUE);
5460
5461 frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno;
5462 nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode,
5463 TRUE, TRUE);
5464 break;
5465 }
5466 }
5467 }
5468
5469 if (! cr)
5470 {
5471 if (dump_file)
5472 fprintf (dump_file, "Could not allocate a CR temporary register\n");
5473
5474 goto fail;
5475 }
5476
5477 if (dump_file)
5478 fprintf (dump_file,
5479 "Will use %s for conditional execution, %s for nested comparisons\n",
5480 reg_names[ REGNO (cr)],
5481 (nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>");
5482
5483 /* Set the CCR bit. Note for integer tests, we reverse the condition so that
5484 in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR
5485 bit being true. We don't do this for floating point, because of NaNs. */
5486 code = GET_CODE (true_expr);
5487 if (GET_MODE (cc) != CC_FPmode)
5488 {
5489 code = reverse_condition (code);
5490 code_true = EQ;
5491 code_false = NE;
5492 }
5493 else
5494 {
5495 code_true = NE;
5496 code_false = EQ;
5497 }
5498
5499 check_insn = gen_rtx_SET (cr, gen_rtx_fmt_ee (code, CC_CCRmode,
5500 cc, const0_rtx));
5501
5502 /* Record the check insn to be inserted later. */
5503 frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
5504
5505 /* Update the tests. */
5506 frv_ifcvt.cr_reg = cr;
5507 frv_ifcvt.nested_cc_reg = nested_cc;
5508 *p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx);
5509 *p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx);
5510 return;
5511
5512 /* Fail, don't do this conditional execution. */
5513 fail:
5514 *p_true = NULL_RTX;
5515 *p_false = NULL_RTX;
5516 if (dump_file)
5517 fprintf (dump_file, "Disabling this conditional execution.\n");
5518
5519 return;
5520 }
5521
5522 \f
5523 /* A C expression to modify the code described by the conditional if
5524 information CE_INFO, for the basic block BB, possibly updating the tests in
5525 TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or
5526 if-then-else code to conditional instructions. Set either TRUE_EXPR or
5527 FALSE_EXPR to a null pointer if the tests cannot be converted. */
5528
5529 /* p_true and p_false are given expressions of the form:
5530
5531 (and (eq:CC_CCR (reg:CC_CCR)
5532 (const_int 0))
5533 (eq:CC (reg:CC)
5534 (const_int 0))) */
5535
5536 void
5537 frv_ifcvt_modify_multiple_tests (ce_if_block *ce_info,
5538 basic_block bb,
5539 rtx *p_true,
5540 rtx *p_false)
5541 {
5542 rtx old_true = XEXP (*p_true, 0);
5543 rtx old_false = XEXP (*p_false, 0);
5544 rtx true_expr = XEXP (*p_true, 1);
5545 rtx false_expr = XEXP (*p_false, 1);
5546 rtx test_expr;
5547 rtx old_test;
5548 rtx cr = XEXP (old_true, 0);
5549 rtx check_insn;
5550 rtx new_cr = NULL_RTX;
5551 rtx *p_new_cr = (rtx *)0;
5552 rtx if_else;
5553 rtx compare;
5554 rtx cc;
5555 enum reg_class cr_class;
5556 machine_mode mode = GET_MODE (true_expr);
5557 rtx (*logical_func)(rtx, rtx, rtx);
5558
5559 if (TARGET_DEBUG_COND_EXEC)
5560 {
5561 fprintf (stderr,
5562 "\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n",
5563 ce_info->and_and_p ? "&&" : "||");
5564
5565 debug_rtx (*p_true);
5566
5567 fputs ("\nfalse insn:\n", stderr);
5568 debug_rtx (*p_false);
5569 }
5570
5571 if (!TARGET_MULTI_CE)
5572 goto fail;
5573
5574 if (GET_CODE (cr) != REG)
5575 goto fail;
5576
5577 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5578 {
5579 cr_class = ICR_REGS;
5580 p_new_cr = &frv_ifcvt.extra_int_cr;
5581 }
5582 else if (mode == CC_FPmode)
5583 {
5584 cr_class = FCR_REGS;
5585 p_new_cr = &frv_ifcvt.extra_fp_cr;
5586 }
5587 else
5588 goto fail;
5589
5590 /* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or
5591 more &&/|| tests. */
5592 new_cr = *p_new_cr;
5593 if (! new_cr)
5594 {
5595 new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class,
5596 CC_CCRmode, TRUE, TRUE);
5597 if (! new_cr)
5598 goto fail;
5599 }
5600
5601 if (ce_info->and_and_p)
5602 {
5603 old_test = old_false;
5604 test_expr = true_expr;
5605 logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr;
5606 *p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5607 *p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5608 }
5609 else
5610 {
5611 old_test = old_false;
5612 test_expr = false_expr;
5613 logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr;
5614 *p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5615 *p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5616 }
5617
5618 /* First add the andcr/andncr/orcr/orncr, which will be added after the
5619 conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
5620 stack. */
5621 frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
5622
5623 /* Now add the conditional check insn. */
5624 cc = XEXP (test_expr, 0);
5625 compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx);
5626 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx);
5627
5628 check_insn = gen_rtx_SET (new_cr, if_else);
5629
5630 /* Add the new check insn to the list of check insns that need to be
5631 inserted. */
5632 frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
5633
5634 if (TARGET_DEBUG_COND_EXEC)
5635 {
5636 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n",
5637 stderr);
5638
5639 debug_rtx (*p_true);
5640
5641 fputs ("\nfalse insn:\n", stderr);
5642 debug_rtx (*p_false);
5643 }
5644
5645 return;
5646
5647 fail:
5648 *p_true = *p_false = NULL_RTX;
5649
5650 /* If we allocated a CR register, release it. */
5651 if (new_cr)
5652 {
5653 CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr));
5654 *p_new_cr = NULL_RTX;
5655 }
5656
5657 if (TARGET_DEBUG_COND_EXEC)
5658 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr);
5659
5660 return;
5661 }
5662
5663 \f
5664 /* Return a register which will be loaded with a value if an IF block is
5665 converted to conditional execution. This is used to rewrite instructions
5666 that use constants to ones that just use registers. */
5667
5668 static rtx
5669 frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED)
5670 {
5671 int num_alloc = frv_ifcvt.cur_scratch_regs;
5672 int i;
5673 rtx reg;
5674
5675 /* We know gr0 == 0, so replace any errant uses. */
5676 if (value == const0_rtx)
5677 return gen_rtx_REG (SImode, GPR_FIRST);
5678
5679 /* First search all registers currently loaded to see if we have an
5680 applicable constant. */
5681 if (CONSTANT_P (value)
5682 || (GET_CODE (value) == REG && REGNO (value) == LR_REGNO))
5683 {
5684 for (i = 0; i < num_alloc; i++)
5685 {
5686 if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value))
5687 return SET_DEST (frv_ifcvt.scratch_regs[i]);
5688 }
5689 }
5690
5691 /* Have we exhausted the number of registers available? */
5692 if (num_alloc >= GPR_TEMP_NUM)
5693 {
5694 if (dump_file)
5695 fprintf (dump_file, "Too many temporary registers allocated\n");
5696
5697 return NULL_RTX;
5698 }
5699
5700 /* Allocate the new register. */
5701 reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE);
5702 if (! reg)
5703 {
5704 if (dump_file)
5705 fputs ("Could not find a scratch register\n", dump_file);
5706
5707 return NULL_RTX;
5708 }
5709
5710 frv_ifcvt.cur_scratch_regs++;
5711 frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (reg, value);
5712
5713 if (dump_file)
5714 {
5715 if (GET_CODE (value) == CONST_INT)
5716 fprintf (dump_file, "Register %s will hold %ld\n",
5717 reg_names[ REGNO (reg)], (long)INTVAL (value));
5718
5719 else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)
5720 fprintf (dump_file, "Register %s will hold LR\n",
5721 reg_names[ REGNO (reg)]);
5722
5723 else
5724 fprintf (dump_file, "Register %s will hold a saved value\n",
5725 reg_names[ REGNO (reg)]);
5726 }
5727
5728 return reg;
5729 }
5730
5731 \f
5732 /* Update a MEM used in conditional code that might contain an offset to put
5733 the offset into a scratch register, so that the conditional load/store
5734 operations can be used. This function returns the original pointer if the
5735 MEM is valid to use in conditional code, NULL if we can't load up the offset
5736 into a temporary register, or the new MEM if we were successful. */
5737
5738 static rtx
5739 frv_ifcvt_rewrite_mem (rtx mem, machine_mode mode, rtx insn)
5740 {
5741 rtx addr = XEXP (mem, 0);
5742
5743 if (!frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE))
5744 {
5745 if (GET_CODE (addr) == PLUS)
5746 {
5747 rtx addr_op0 = XEXP (addr, 0);
5748 rtx addr_op1 = XEXP (addr, 1);
5749
5750 if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1))
5751 {
5752 rtx reg = frv_ifcvt_load_value (addr_op1, insn);
5753 if (!reg)
5754 return NULL_RTX;
5755
5756 addr = gen_rtx_PLUS (Pmode, addr_op0, reg);
5757 }
5758
5759 else
5760 return NULL_RTX;
5761 }
5762
5763 else if (CONSTANT_P (addr))
5764 addr = frv_ifcvt_load_value (addr, insn);
5765
5766 else
5767 return NULL_RTX;
5768
5769 if (addr == NULL_RTX)
5770 return NULL_RTX;
5771
5772 else if (XEXP (mem, 0) != addr)
5773 return change_address (mem, mode, addr);
5774 }
5775
5776 return mem;
5777 }
5778
5779 \f
5780 /* Given a PATTERN, return a SET expression if this PATTERN has only a single
5781 SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */
5782
5783 static rtx
5784 single_set_pattern (rtx pattern)
5785 {
5786 rtx set;
5787 int i;
5788
5789 if (GET_CODE (pattern) == COND_EXEC)
5790 pattern = COND_EXEC_CODE (pattern);
5791
5792 if (GET_CODE (pattern) == SET)
5793 return pattern;
5794
5795 else if (GET_CODE (pattern) == PARALLEL)
5796 {
5797 for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++)
5798 {
5799 rtx sub = XVECEXP (pattern, 0, i);
5800
5801 switch (GET_CODE (sub))
5802 {
5803 case USE:
5804 case CLOBBER:
5805 break;
5806
5807 case SET:
5808 if (set)
5809 return 0;
5810 else
5811 set = sub;
5812 break;
5813
5814 default:
5815 return 0;
5816 }
5817 }
5818 return set;
5819 }
5820
5821 return 0;
5822 }
5823
5824 \f
5825 /* A C expression to modify the code described by the conditional if
5826 information CE_INFO with the new PATTERN in INSN. If PATTERN is a null
5827 pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that
5828 insn cannot be converted to be executed conditionally. */
5829
5830 rtx
5831 frv_ifcvt_modify_insn (ce_if_block *ce_info,
5832 rtx pattern,
5833 rtx_insn *insn)
5834 {
5835 rtx orig_ce_pattern = pattern;
5836 rtx set;
5837 rtx op0;
5838 rtx op1;
5839 rtx test;
5840
5841 gcc_assert (GET_CODE (pattern) == COND_EXEC);
5842
5843 test = COND_EXEC_TEST (pattern);
5844 if (GET_CODE (test) == AND)
5845 {
5846 rtx cr = frv_ifcvt.cr_reg;
5847 rtx test_reg;
5848
5849 op0 = XEXP (test, 0);
5850 if (! rtx_equal_p (cr, XEXP (op0, 0)))
5851 goto fail;
5852
5853 op1 = XEXP (test, 1);
5854 test_reg = XEXP (op1, 0);
5855 if (GET_CODE (test_reg) != REG)
5856 goto fail;
5857
5858 /* Is this the first nested if block in this sequence? If so, generate
5859 an andcr or andncr. */
5860 if (! frv_ifcvt.last_nested_if_cr)
5861 {
5862 rtx and_op;
5863
5864 frv_ifcvt.last_nested_if_cr = test_reg;
5865 if (GET_CODE (op0) == NE)
5866 and_op = gen_andcr (test_reg, cr, test_reg);
5867 else
5868 and_op = gen_andncr (test_reg, cr, test_reg);
5869
5870 frv_ifcvt_add_insn (and_op, insn, TRUE);
5871 }
5872
5873 /* If this isn't the first statement in the nested if sequence, see if we
5874 are dealing with the same register. */
5875 else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr))
5876 goto fail;
5877
5878 COND_EXEC_TEST (pattern) = test = op1;
5879 }
5880
5881 /* If this isn't a nested if, reset state variables. */
5882 else
5883 {
5884 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5885 }
5886
5887 set = single_set_pattern (pattern);
5888 if (set)
5889 {
5890 rtx dest = SET_DEST (set);
5891 rtx src = SET_SRC (set);
5892 machine_mode mode = GET_MODE (dest);
5893
5894 /* Check for normal binary operators. */
5895 if (mode == SImode && ARITHMETIC_P (src))
5896 {
5897 op0 = XEXP (src, 0);
5898 op1 = XEXP (src, 1);
5899
5900 if (integer_register_operand (op0, SImode) && CONSTANT_P (op1))
5901 {
5902 op1 = frv_ifcvt_load_value (op1, insn);
5903 if (op1)
5904 COND_EXEC_CODE (pattern)
5905 = gen_rtx_SET (dest, gen_rtx_fmt_ee (GET_CODE (src),
5906 GET_MODE (src),
5907 op0, op1));
5908 else
5909 goto fail;
5910 }
5911 }
5912
5913 /* For multiply by a constant, we need to handle the sign extending
5914 correctly. Add a USE of the value after the multiply to prevent flow
5915 from cratering because only one register out of the two were used. */
5916 else if (mode == DImode && GET_CODE (src) == MULT)
5917 {
5918 op0 = XEXP (src, 0);
5919 op1 = XEXP (src, 1);
5920 if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT)
5921 {
5922 op1 = frv_ifcvt_load_value (op1, insn);
5923 if (op1)
5924 {
5925 op1 = gen_rtx_SIGN_EXTEND (DImode, op1);
5926 COND_EXEC_CODE (pattern)
5927 = gen_rtx_SET (dest, gen_rtx_MULT (DImode, op0, op1));
5928 }
5929 else
5930 goto fail;
5931 }
5932
5933 frv_ifcvt_add_insn (gen_use (dest), insn, FALSE);
5934 }
5935
5936 /* If we are just loading a constant created for a nested conditional
5937 execution statement, just load the constant without any conditional
5938 execution, since we know that the constant will not interfere with any
5939 other registers. */
5940 else if (frv_ifcvt.scratch_insns_bitmap
5941 && bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap,
5942 INSN_UID (insn))
5943 && REG_P (SET_DEST (set))
5944 /* We must not unconditionally set a scratch reg chosen
5945 for a nested if-converted block if its incoming
5946 value from the TEST block (or the result of the THEN
5947 branch) could/should propagate to the JOIN block.
5948 It suffices to test whether the register is live at
5949 the JOIN point: if it's live there, we can infer
5950 that we set it in the former JOIN block of the
5951 nested if-converted block (otherwise it wouldn't
5952 have been available as a scratch register), and it
5953 is either propagated through or set in the other
5954 conditional block. It's probably not worth trying
5955 to catch the latter case, and it could actually
5956 limit scheduling of the combined block quite
5957 severely. */
5958 && ce_info->join_bb
5959 && ! (REGNO_REG_SET_P (df_get_live_in (ce_info->join_bb),
5960 REGNO (SET_DEST (set))))
5961 /* Similarly, we must not unconditionally set a reg
5962 used as scratch in the THEN branch if the same reg
5963 is live in the ELSE branch. */
5964 && (! ce_info->else_bb
5965 || BLOCK_FOR_INSN (insn) == ce_info->else_bb
5966 || ! (REGNO_REG_SET_P (df_get_live_in (ce_info->else_bb),
5967 REGNO (SET_DEST (set))))))
5968 pattern = set;
5969
5970 else if (mode == QImode || mode == HImode || mode == SImode
5971 || mode == SFmode)
5972 {
5973 int changed_p = FALSE;
5974
5975 /* Check for just loading up a constant */
5976 if (CONSTANT_P (src) && integer_register_operand (dest, mode))
5977 {
5978 src = frv_ifcvt_load_value (src, insn);
5979 if (!src)
5980 goto fail;
5981
5982 changed_p = TRUE;
5983 }
5984
5985 /* See if we need to fix up stores */
5986 if (GET_CODE (dest) == MEM)
5987 {
5988 rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn);
5989
5990 if (!new_mem)
5991 goto fail;
5992
5993 else if (new_mem != dest)
5994 {
5995 changed_p = TRUE;
5996 dest = new_mem;
5997 }
5998 }
5999
6000 /* See if we need to fix up loads */
6001 if (GET_CODE (src) == MEM)
6002 {
6003 rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn);
6004
6005 if (!new_mem)
6006 goto fail;
6007
6008 else if (new_mem != src)
6009 {
6010 changed_p = TRUE;
6011 src = new_mem;
6012 }
6013 }
6014
6015 /* If either src or destination changed, redo SET. */
6016 if (changed_p)
6017 COND_EXEC_CODE (pattern) = gen_rtx_SET (dest, src);
6018 }
6019
6020 /* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with
6021 rewriting the CC register to be the same as the paired CC/CR register
6022 for nested ifs. */
6023 else if (mode == CC_CCRmode && COMPARISON_P (src))
6024 {
6025 int regno = REGNO (XEXP (src, 0));
6026 rtx if_else;
6027
6028 if (ce_info->pass > 1
6029 && regno != (int)REGNO (frv_ifcvt.nested_cc_reg)
6030 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno))
6031 {
6032 src = gen_rtx_fmt_ee (GET_CODE (src),
6033 CC_CCRmode,
6034 frv_ifcvt.nested_cc_reg,
6035 XEXP (src, 1));
6036 }
6037
6038 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx);
6039 pattern = gen_rtx_SET (dest, if_else);
6040 }
6041
6042 /* Remap a nested compare instruction to use the paired CC/CR reg. */
6043 else if (ce_info->pass > 1
6044 && GET_CODE (dest) == REG
6045 && CC_P (REGNO (dest))
6046 && REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg)
6047 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite,
6048 REGNO (dest))
6049 && GET_CODE (src) == COMPARE)
6050 {
6051 PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest));
6052 COND_EXEC_CODE (pattern)
6053 = gen_rtx_SET (frv_ifcvt.nested_cc_reg, copy_rtx (src));
6054 }
6055 }
6056
6057 if (TARGET_DEBUG_COND_EXEC)
6058 {
6059 rtx orig_pattern = PATTERN (insn);
6060
6061 PATTERN (insn) = pattern;
6062 fprintf (stderr,
6063 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n",
6064 ce_info->pass);
6065
6066 debug_rtx (insn);
6067 PATTERN (insn) = orig_pattern;
6068 }
6069
6070 return pattern;
6071
6072 fail:
6073 if (TARGET_DEBUG_COND_EXEC)
6074 {
6075 rtx orig_pattern = PATTERN (insn);
6076
6077 PATTERN (insn) = orig_ce_pattern;
6078 fprintf (stderr,
6079 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n",
6080 ce_info->pass);
6081
6082 debug_rtx (insn);
6083 PATTERN (insn) = orig_pattern;
6084 }
6085
6086 return NULL_RTX;
6087 }
6088
6089 \f
6090 /* A C expression to perform any final machine dependent modifications in
6091 converting code to conditional execution in the code described by the
6092 conditional if information CE_INFO. */
6093
6094 void
6095 frv_ifcvt_modify_final (ce_if_block *ce_info ATTRIBUTE_UNUSED)
6096 {
6097 rtx_insn *existing_insn;
6098 rtx check_insn;
6099 rtx p = frv_ifcvt.added_insns_list;
6100 int i;
6101
6102 /* Loop inserting the check insns. The last check insn is the first test,
6103 and is the appropriate place to insert constants. */
6104 gcc_assert (p);
6105
6106 do
6107 {
6108 rtx check_and_insert_insns = XEXP (p, 0);
6109 rtx old_p = p;
6110
6111 check_insn = XEXP (check_and_insert_insns, 0);
6112 existing_insn = as_a <rtx_insn *> (XEXP (check_and_insert_insns, 1));
6113 p = XEXP (p, 1);
6114
6115 /* The jump bit is used to say that the new insn is to be inserted BEFORE
6116 the existing insn, otherwise it is to be inserted AFTER. */
6117 if (check_and_insert_insns->jump)
6118 {
6119 emit_insn_before (check_insn, existing_insn);
6120 check_and_insert_insns->jump = 0;
6121 }
6122 else
6123 emit_insn_after (check_insn, existing_insn);
6124
6125 free_EXPR_LIST_node (check_and_insert_insns);
6126 free_EXPR_LIST_node (old_p);
6127 }
6128 while (p != NULL_RTX);
6129
6130 /* Load up any constants needed into temp gprs */
6131 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6132 {
6133 rtx_insn *insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn);
6134 if (! frv_ifcvt.scratch_insns_bitmap)
6135 frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL);
6136 bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn));
6137 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6138 }
6139
6140 frv_ifcvt.added_insns_list = NULL_RTX;
6141 frv_ifcvt.cur_scratch_regs = 0;
6142 }
6143
6144 \f
6145 /* A C expression to cancel any machine dependent modifications in converting
6146 code to conditional execution in the code described by the conditional if
6147 information CE_INFO. */
6148
6149 void
6150 frv_ifcvt_modify_cancel (ce_if_block *ce_info ATTRIBUTE_UNUSED)
6151 {
6152 int i;
6153 rtx p = frv_ifcvt.added_insns_list;
6154
6155 /* Loop freeing up the EXPR_LIST's allocated. */
6156 while (p != NULL_RTX)
6157 {
6158 rtx check_and_jump = XEXP (p, 0);
6159 rtx old_p = p;
6160
6161 p = XEXP (p, 1);
6162 free_EXPR_LIST_node (check_and_jump);
6163 free_EXPR_LIST_node (old_p);
6164 }
6165
6166 /* Release any temporary gprs allocated. */
6167 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6168 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6169
6170 frv_ifcvt.added_insns_list = NULL_RTX;
6171 frv_ifcvt.cur_scratch_regs = 0;
6172 return;
6173 }
6174 \f
6175 /* A C expression for the size in bytes of the trampoline, as an integer.
6176 The template is:
6177
6178 setlo #0, <jmp_reg>
6179 setlo #0, <static_chain>
6180 sethi #0, <jmp_reg>
6181 sethi #0, <static_chain>
6182 jmpl @(gr0,<jmp_reg>) */
6183
6184 int
6185 frv_trampoline_size (void)
6186 {
6187 if (TARGET_FDPIC)
6188 /* Allocate room for the function descriptor and the lddi
6189 instruction. */
6190 return 8 + 6 * 4;
6191 return 5 /* instructions */ * 4 /* instruction size. */;
6192 }
6193
6194 \f
6195 /* A C statement to initialize the variable parts of a trampoline. ADDR is an
6196 RTX for the address of the trampoline; FNADDR is an RTX for the address of
6197 the nested function; STATIC_CHAIN is an RTX for the static chain value that
6198 should be passed to the function when it is called.
6199
6200 The template is:
6201
6202 setlo #0, <jmp_reg>
6203 setlo #0, <static_chain>
6204 sethi #0, <jmp_reg>
6205 sethi #0, <static_chain>
6206 jmpl @(gr0,<jmp_reg>) */
6207
6208 static void
6209 frv_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
6210 {
6211 rtx addr = XEXP (m_tramp, 0);
6212 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
6213 rtx sc_reg = force_reg (Pmode, static_chain);
6214
6215 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
6216 LCT_NORMAL, VOIDmode,
6217 addr, Pmode,
6218 GEN_INT (frv_trampoline_size ()), SImode,
6219 fnaddr, Pmode,
6220 sc_reg, Pmode);
6221 }
6222
6223 \f
6224 /* Many machines have some registers that cannot be copied directly to or from
6225 memory or even from other types of registers. An example is the `MQ'
6226 register, which on most machines, can only be copied to or from general
6227 registers, but not memory. Some machines allow copying all registers to and
6228 from memory, but require a scratch register for stores to some memory
6229 locations (e.g., those with symbolic address on the RT, and those with
6230 certain symbolic address on the SPARC when compiling PIC). In some cases,
6231 both an intermediate and a scratch register are required.
6232
6233 You should define these macros to indicate to the reload phase that it may
6234 need to allocate at least one register for a reload in addition to the
6235 register to contain the data. Specifically, if copying X to a register
6236 RCLASS in MODE requires an intermediate register, you should define
6237 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
6238 whose registers can be used as intermediate registers or scratch registers.
6239
6240 If copying a register RCLASS in MODE to X requires an intermediate or scratch
6241 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
6242 largest register class required. If the requirements for input and output
6243 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
6244 instead of defining both macros identically.
6245
6246 The values returned by these macros are often `GENERAL_REGS'. Return
6247 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
6248 to or from a register of RCLASS in MODE without requiring a scratch register.
6249 Do not define this macro if it would always return `NO_REGS'.
6250
6251 If a scratch register is required (either with or without an intermediate
6252 register), you should define patterns for `reload_inM' or `reload_outM', as
6253 required.. These patterns, which will normally be implemented with a
6254 `define_expand', should be similar to the `movM' patterns, except that
6255 operand 2 is the scratch register.
6256
6257 Define constraints for the reload register and scratch register that contain
6258 a single register class. If the original reload register (whose class is
6259 RCLASS) can meet the constraint given in the pattern, the value returned by
6260 these macros is used for the class of the scratch register. Otherwise, two
6261 additional reload registers are required. Their classes are obtained from
6262 the constraints in the insn pattern.
6263
6264 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
6265 either be in a hard register or in memory. Use `true_regnum' to find out;
6266 it will return -1 if the pseudo is in memory and the hard register number if
6267 it is in a register.
6268
6269 These macros should not be used in the case where a particular class of
6270 registers can only be copied to memory and not to another class of
6271 registers. In that case, secondary reload registers are not needed and
6272 would not be helpful. Instead, a stack location must be used to perform the
6273 copy and the `movM' pattern should use memory as an intermediate storage.
6274 This case often occurs between floating-point and general registers. */
6275
6276 enum reg_class
6277 frv_secondary_reload_class (enum reg_class rclass,
6278 machine_mode mode ATTRIBUTE_UNUSED,
6279 rtx x)
6280 {
6281 enum reg_class ret;
6282
6283 switch (rclass)
6284 {
6285 default:
6286 ret = NO_REGS;
6287 break;
6288
6289 /* Accumulators/Accumulator guard registers need to go through floating
6290 point registers. */
6291 case QUAD_REGS:
6292 case GPR_REGS:
6293 ret = NO_REGS;
6294 if (x && GET_CODE (x) == REG)
6295 {
6296 int regno = REGNO (x);
6297
6298 if (ACC_P (regno) || ACCG_P (regno))
6299 ret = FPR_REGS;
6300 }
6301 break;
6302
6303 /* Nonzero constants should be loaded into an FPR through a GPR. */
6304 case QUAD_FPR_REGS:
6305 if (x && CONSTANT_P (x) && !ZERO_P (x))
6306 ret = GPR_REGS;
6307 else
6308 ret = NO_REGS;
6309 break;
6310
6311 /* All of these types need gpr registers. */
6312 case ICC_REGS:
6313 case FCC_REGS:
6314 case CC_REGS:
6315 case ICR_REGS:
6316 case FCR_REGS:
6317 case CR_REGS:
6318 case LCR_REG:
6319 case LR_REG:
6320 ret = GPR_REGS;
6321 break;
6322
6323 /* The accumulators need fpr registers. */
6324 case QUAD_ACC_REGS:
6325 case ACCG_REGS:
6326 ret = FPR_REGS;
6327 break;
6328 }
6329
6330 return ret;
6331 }
6332
6333 /* This hook exists to catch the case where secondary_reload_class() is
6334 called from init_reg_autoinc() in regclass.c - before the reload optabs
6335 have been initialised. */
6336
6337 static reg_class_t
6338 frv_secondary_reload (bool in_p, rtx x, reg_class_t reload_class_i,
6339 machine_mode reload_mode,
6340 secondary_reload_info * sri)
6341 {
6342 enum reg_class rclass = NO_REGS;
6343 enum reg_class reload_class = (enum reg_class) reload_class_i;
6344
6345 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
6346 {
6347 sri->icode = sri->prev_sri->t_icode;
6348 return NO_REGS;
6349 }
6350
6351 rclass = frv_secondary_reload_class (reload_class, reload_mode, x);
6352
6353 if (rclass != NO_REGS)
6354 {
6355 enum insn_code icode
6356 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
6357 reload_mode);
6358 if (icode == 0)
6359 {
6360 /* This happens when then the reload_[in|out]_optabs have
6361 not been initialised. */
6362 sri->t_icode = CODE_FOR_nothing;
6363 return rclass;
6364 }
6365 }
6366
6367 /* Fall back to the default secondary reload handler. */
6368 return default_secondary_reload (in_p, x, reload_class, reload_mode, sri);
6369
6370 }
6371 \f
6372 /* Worker function for TARGET_CLASS_LIKELY_SPILLED_P. */
6373
6374 static bool
6375 frv_class_likely_spilled_p (reg_class_t rclass)
6376 {
6377 switch (rclass)
6378 {
6379 default:
6380 break;
6381
6382 case GR8_REGS:
6383 case GR9_REGS:
6384 case GR89_REGS:
6385 case FDPIC_FPTR_REGS:
6386 case FDPIC_REGS:
6387 case ICC_REGS:
6388 case FCC_REGS:
6389 case CC_REGS:
6390 case ICR_REGS:
6391 case FCR_REGS:
6392 case CR_REGS:
6393 case LCR_REG:
6394 case LR_REG:
6395 case SPR_REGS:
6396 case QUAD_ACC_REGS:
6397 case ACCG_REGS:
6398 return true;
6399 }
6400
6401 return false;
6402 }
6403
6404 \f
6405 /* An expression for the alignment of a structure field FIELD if the
6406 alignment computed in the usual way is COMPUTED. GCC uses this
6407 value instead of the value in `BIGGEST_ALIGNMENT' or
6408 `BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */
6409
6410 /* The definition type of the bit field data is either char, short, long or
6411 long long. The maximum bit size is the number of bits of its own type.
6412
6413 The bit field data is assigned to a storage unit that has an adequate size
6414 for bit field data retention and is located at the smallest address.
6415
6416 Consecutive bit field data are packed at consecutive bits having the same
6417 storage unit, with regard to the type, beginning with the MSB and continuing
6418 toward the LSB.
6419
6420 If a field to be assigned lies over a bit field type boundary, its
6421 assignment is completed by aligning it with a boundary suitable for the
6422 type.
6423
6424 When a bit field having a bit length of 0 is declared, it is forcibly
6425 assigned to the next storage unit.
6426
6427 e.g)
6428 struct {
6429 int a:2;
6430 int b:6;
6431 char c:4;
6432 int d:10;
6433 int :0;
6434 int f:2;
6435 } x;
6436
6437 +0 +1 +2 +3
6438 &x 00000000 00000000 00000000 00000000
6439 MLM----L
6440 a b
6441 &x+4 00000000 00000000 00000000 00000000
6442 M--L
6443 c
6444 &x+8 00000000 00000000 00000000 00000000
6445 M----------L
6446 d
6447 &x+12 00000000 00000000 00000000 00000000
6448 ML
6449 f
6450 */
6451
6452 int
6453 frv_adjust_field_align (tree field, int computed)
6454 {
6455 /* Make sure that the bitfield is not wider than the type. */
6456 if (field
6457 && DECL_BIT_FIELD (field)
6458 && !DECL_ARTIFICIAL (field))
6459 {
6460 tree parent = DECL_CONTEXT (field);
6461 tree prev = NULL_TREE;
6462 tree cur;
6463
6464 for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = DECL_CHAIN (cur))
6465 {
6466 if (TREE_CODE (cur) != FIELD_DECL)
6467 continue;
6468
6469 prev = cur;
6470 }
6471
6472 gcc_assert (cur);
6473
6474 /* If this isn't a :0 field and if the previous element is a bitfield
6475 also, see if the type is different, if so, we will need to align the
6476 bit-field to the next boundary. */
6477 if (prev
6478 && ! DECL_PACKED (field)
6479 && ! integer_zerop (DECL_SIZE (field))
6480 && DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev))
6481 {
6482 int prev_align = TYPE_ALIGN (TREE_TYPE (prev));
6483 int cur_align = TYPE_ALIGN (TREE_TYPE (field));
6484 computed = (prev_align > cur_align) ? prev_align : cur_align;
6485 }
6486 }
6487
6488 return computed;
6489 }
6490
6491 \f
6492 /* Implement TARGET_HARD_REGNO_MODE_OK. */
6493
6494 static bool
6495 frv_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
6496 {
6497 int base;
6498 int mask;
6499
6500 switch (mode)
6501 {
6502 case E_CCmode:
6503 case E_CC_UNSmode:
6504 case E_CC_NZmode:
6505 return ICC_P (regno) || GPR_P (regno);
6506
6507 case E_CC_CCRmode:
6508 return CR_P (regno) || GPR_P (regno);
6509
6510 case E_CC_FPmode:
6511 return FCC_P (regno) || GPR_P (regno);
6512
6513 default:
6514 break;
6515 }
6516
6517 /* Set BASE to the first register in REGNO's class. Set MASK to the
6518 bits that must be clear in (REGNO - BASE) for the register to be
6519 well-aligned. */
6520 if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode))
6521 {
6522 if (ACCG_P (regno))
6523 {
6524 /* ACCGs store one byte. Two-byte quantities must start in
6525 even-numbered registers, four-byte ones in registers whose
6526 numbers are divisible by four, and so on. */
6527 base = ACCG_FIRST;
6528 mask = GET_MODE_SIZE (mode) - 1;
6529 }
6530 else
6531 {
6532 /* The other registers store one word. */
6533 if (GPR_P (regno) || regno == AP_FIRST)
6534 base = GPR_FIRST;
6535
6536 else if (FPR_P (regno))
6537 base = FPR_FIRST;
6538
6539 else if (ACC_P (regno))
6540 base = ACC_FIRST;
6541
6542 else if (SPR_P (regno))
6543 return mode == SImode;
6544
6545 /* Fill in the table. */
6546 else
6547 return false;
6548
6549 /* Anything smaller than an SI is OK in any word-sized register. */
6550 if (GET_MODE_SIZE (mode) < 4)
6551 return true;
6552
6553 mask = (GET_MODE_SIZE (mode) / 4) - 1;
6554 }
6555 return (((regno - base) & mask) == 0);
6556 }
6557
6558 return false;
6559 }
6560
6561 /* Implement TARGET_MODES_TIEABLE_P. */
6562
6563 static bool
6564 frv_modes_tieable_p (machine_mode mode1, machine_mode mode2)
6565 {
6566 return mode1 == mode2;
6567 }
6568
6569 \f
6570 /* Implement TARGET_HARD_REGNO_NREGS.
6571
6572 On the FRV, make the CC_FP mode take 3 words in the integer registers, so
6573 that we can build the appropriate instructions to properly reload the
6574 values. Also, make the byte-sized accumulator guards use one guard
6575 for each byte. */
6576
6577 static unsigned int
6578 frv_hard_regno_nregs (unsigned int regno, machine_mode mode)
6579 {
6580 if (ACCG_P (regno))
6581 return GET_MODE_SIZE (mode);
6582 else
6583 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6584 }
6585
6586 \f
6587 /* Implement CLASS_MAX_NREGS. */
6588
6589 int
6590 frv_class_max_nregs (enum reg_class rclass, machine_mode mode)
6591 {
6592 if (rclass == ACCG_REGS)
6593 /* An N-byte value requires N accumulator guards. */
6594 return GET_MODE_SIZE (mode);
6595 else
6596 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6597 }
6598
6599 \f
6600 /* A C expression that is nonzero if X is a legitimate constant for an
6601 immediate operand on the target machine. You can assume that X satisfies
6602 `CONSTANT_P', so you need not check this. In fact, `1' is a suitable
6603 definition for this macro on machines where anything `CONSTANT_P' is valid. */
6604
6605 static bool
6606 frv_legitimate_constant_p (machine_mode mode, rtx x)
6607 {
6608 /* frv_cannot_force_const_mem always returns true for FDPIC. This
6609 means that the move expanders will be expected to deal with most
6610 kinds of constant, regardless of what we return here.
6611
6612 However, among its other duties, frv_legitimate_constant_p decides whether
6613 a constant can be entered into reg_equiv_constant[]. If we return true,
6614 reload can create new instances of the constant whenever it likes.
6615
6616 The idea is therefore to accept as many constants as possible (to give
6617 reload more freedom) while rejecting constants that can only be created
6618 at certain times. In particular, anything with a symbolic component will
6619 require use of the pseudo FDPIC register, which is only available before
6620 reload. */
6621 if (TARGET_FDPIC)
6622 return LEGITIMATE_PIC_OPERAND_P (x);
6623
6624 /* All of the integer constants are ok. */
6625 if (GET_CODE (x) != CONST_DOUBLE)
6626 return TRUE;
6627
6628 /* double integer constants are ok. */
6629 if (GET_MODE (x) == VOIDmode || mode == DImode)
6630 return TRUE;
6631
6632 /* 0 is always ok. */
6633 if (x == CONST0_RTX (mode))
6634 return TRUE;
6635
6636 /* If floating point is just emulated, allow any constant, since it will be
6637 constructed in the GPRs. */
6638 if (!TARGET_HAS_FPRS)
6639 return TRUE;
6640
6641 if (mode == DFmode && !TARGET_DOUBLE)
6642 return TRUE;
6643
6644 /* Otherwise store the constant away and do a load. */
6645 return FALSE;
6646 }
6647
6648 /* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons,
6649 CC_NZ for comparisons against zero in which a single Z or N flag test
6650 is enough, CC_UNS for other unsigned comparisons, and CC for other
6651 signed comparisons. */
6652
6653 machine_mode
6654 frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
6655 {
6656 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
6657 return CC_FPmode;
6658
6659 switch (code)
6660 {
6661 case EQ:
6662 case NE:
6663 case LT:
6664 case GE:
6665 return y == const0_rtx ? CC_NZmode : CCmode;
6666
6667 case GTU:
6668 case GEU:
6669 case LTU:
6670 case LEU:
6671 return y == const0_rtx ? CC_NZmode : CC_UNSmode;
6672
6673 default:
6674 return CCmode;
6675 }
6676 }
6677 \f
6678
6679 /* Worker function for TARGET_REGISTER_MOVE_COST. */
6680
6681 #define HIGH_COST 40
6682 #define MEDIUM_COST 3
6683 #define LOW_COST 1
6684
6685 static int
6686 frv_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6687 reg_class_t from, reg_class_t to)
6688 {
6689 switch (from)
6690 {
6691 default:
6692 break;
6693
6694 case QUAD_REGS:
6695 case GPR_REGS:
6696 case GR8_REGS:
6697 case GR9_REGS:
6698 case GR89_REGS:
6699 case FDPIC_REGS:
6700 case FDPIC_FPTR_REGS:
6701 case FDPIC_CALL_REGS:
6702
6703 switch (to)
6704 {
6705 default:
6706 break;
6707
6708 case QUAD_REGS:
6709 case GPR_REGS:
6710 case GR8_REGS:
6711 case GR9_REGS:
6712 case GR89_REGS:
6713 case FDPIC_REGS:
6714 case FDPIC_FPTR_REGS:
6715 case FDPIC_CALL_REGS:
6716
6717 return LOW_COST;
6718
6719 case FPR_REGS:
6720 return LOW_COST;
6721
6722 case LCR_REG:
6723 case LR_REG:
6724 case SPR_REGS:
6725 return LOW_COST;
6726 }
6727
6728 case QUAD_FPR_REGS:
6729 switch (to)
6730 {
6731 default:
6732 break;
6733
6734 case QUAD_REGS:
6735 case GPR_REGS:
6736 case GR8_REGS:
6737 case GR9_REGS:
6738 case GR89_REGS:
6739 case FDPIC_REGS:
6740 case FDPIC_FPTR_REGS:
6741 case FDPIC_CALL_REGS:
6742
6743 case QUAD_ACC_REGS:
6744 case ACCG_REGS:
6745 return MEDIUM_COST;
6746
6747 case QUAD_FPR_REGS:
6748 return LOW_COST;
6749 }
6750
6751 case LCR_REG:
6752 case LR_REG:
6753 case SPR_REGS:
6754 switch (to)
6755 {
6756 default:
6757 break;
6758
6759 case QUAD_REGS:
6760 case GPR_REGS:
6761 case GR8_REGS:
6762 case GR9_REGS:
6763 case GR89_REGS:
6764 case FDPIC_REGS:
6765 case FDPIC_FPTR_REGS:
6766 case FDPIC_CALL_REGS:
6767
6768 return MEDIUM_COST;
6769 }
6770
6771 case QUAD_ACC_REGS:
6772 case ACCG_REGS:
6773 switch (to)
6774 {
6775 default:
6776 break;
6777
6778 case QUAD_FPR_REGS:
6779 return MEDIUM_COST;
6780
6781 }
6782 }
6783
6784 return HIGH_COST;
6785 }
6786
6787 /* Worker function for TARGET_MEMORY_MOVE_COST. */
6788
6789 static int
6790 frv_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6791 reg_class_t rclass ATTRIBUTE_UNUSED,
6792 bool in ATTRIBUTE_UNUSED)
6793 {
6794 return 4;
6795 }
6796
6797 \f
6798 /* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to
6799 use ".picptr" to generate safe relocations for PIC code. We also
6800 need a fixup entry for aligned (non-debugging) code. */
6801
6802 static bool
6803 frv_assemble_integer (rtx value, unsigned int size, int aligned_p)
6804 {
6805 if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD)
6806 {
6807 if (GET_CODE (value) == CONST
6808 || GET_CODE (value) == SYMBOL_REF
6809 || GET_CODE (value) == LABEL_REF)
6810 {
6811 if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF
6812 && SYMBOL_REF_FUNCTION_P (value))
6813 {
6814 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
6815 output_addr_const (asm_out_file, value);
6816 fputs (")\n", asm_out_file);
6817 return true;
6818 }
6819 else if (TARGET_FDPIC && GET_CODE (value) == CONST
6820 && frv_function_symbol_referenced_p (value))
6821 return false;
6822 if (aligned_p && !TARGET_FDPIC)
6823 {
6824 static int label_num = 0;
6825 char buf[256];
6826 const char *p;
6827
6828 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++);
6829 p = (* targetm.strip_name_encoding) (buf);
6830
6831 fprintf (asm_out_file, "%s:\n", p);
6832 fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP);
6833 fprintf (asm_out_file, "\t.picptr\t%s\n", p);
6834 fprintf (asm_out_file, "\t.previous\n");
6835 }
6836 assemble_integer_with_op ("\t.picptr\t", value);
6837 return true;
6838 }
6839 if (!aligned_p)
6840 {
6841 /* We've set the unaligned SI op to NULL, so we always have to
6842 handle the unaligned case here. */
6843 assemble_integer_with_op ("\t.4byte\t", value);
6844 return true;
6845 }
6846 }
6847 return default_assemble_integer (value, size, aligned_p);
6848 }
6849
6850 /* Function to set up the backend function structure. */
6851
6852 static struct machine_function *
6853 frv_init_machine_status (void)
6854 {
6855 return ggc_cleared_alloc<machine_function> ();
6856 }
6857 \f
6858 /* Implement TARGET_SCHED_ISSUE_RATE. */
6859
6860 int
6861 frv_issue_rate (void)
6862 {
6863 if (!TARGET_PACK)
6864 return 1;
6865
6866 switch (frv_cpu_type)
6867 {
6868 default:
6869 case FRV_CPU_FR300:
6870 case FRV_CPU_SIMPLE:
6871 return 1;
6872
6873 case FRV_CPU_FR400:
6874 case FRV_CPU_FR405:
6875 case FRV_CPU_FR450:
6876 return 2;
6877
6878 case FRV_CPU_GENERIC:
6879 case FRV_CPU_FR500:
6880 case FRV_CPU_TOMCAT:
6881 return 4;
6882
6883 case FRV_CPU_FR550:
6884 return 8;
6885 }
6886 }
6887 \f
6888 /* Return the value of INSN's acc_group attribute. */
6889
6890 int
6891 frv_acc_group (rtx insn)
6892 {
6893 /* This distinction only applies to the FR550 packing constraints. */
6894 if (frv_cpu_type == FRV_CPU_FR550)
6895 {
6896 subrtx_iterator::array_type array;
6897 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
6898 if (REG_P (*iter))
6899 {
6900 unsigned int regno = REGNO (*iter);
6901 /* If REGNO refers to an accumulator, return ACC_GROUP_ODD if
6902 the bit 2 of the register number is set and ACC_GROUP_EVEN if
6903 it is clear. */
6904 if (ACC_P (regno))
6905 return (regno - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
6906 if (ACCG_P (regno))
6907 return (regno - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
6908 }
6909 }
6910 return ACC_GROUP_NONE;
6911 }
6912
6913 /* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction
6914 INSN will try to claim first. Since this value depends only on the
6915 type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
6916
6917 static unsigned int
6918 frv_insn_unit (rtx_insn *insn)
6919 {
6920 enum attr_type type;
6921
6922 type = get_attr_type (insn);
6923 if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes))
6924 {
6925 /* We haven't seen this type of instruction before. */
6926 state_t state;
6927 unsigned int unit;
6928
6929 /* Issue the instruction on its own to see which unit it prefers. */
6930 state = alloca (state_size ());
6931 state_reset (state);
6932 state_transition (state, insn);
6933
6934 /* Find out which unit was taken. */
6935 for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++)
6936 if (cpu_unit_reservation_p (state, frv_unit_codes[unit]))
6937 break;
6938
6939 gcc_assert (unit != ARRAY_SIZE (frv_unit_codes));
6940
6941 frv_type_to_unit[type] = unit;
6942 }
6943 return frv_type_to_unit[type];
6944 }
6945
6946 /* Return true if INSN issues to a branch unit. */
6947
6948 static bool
6949 frv_issues_to_branch_unit_p (rtx_insn *insn)
6950 {
6951 return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
6952 }
6953 \f
6954 /* The instructions in the packet, partitioned into groups. */
6955 struct frv_packet_group {
6956 /* How many instructions in the packet belong to this group. */
6957 unsigned int num_insns;
6958
6959 /* A list of the instructions that belong to this group, in the order
6960 they appear in the rtl stream. */
6961 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
6962
6963 /* The contents of INSNS after they have been sorted into the correct
6964 assembly-language order. Element X issues to unit X. The list may
6965 contain extra nops. */
6966 rtx_insn *sorted[ARRAY_SIZE (frv_unit_codes)];
6967
6968 /* The member of frv_nops[] to use in sorted[]. */
6969 rtx_insn *nop;
6970 };
6971
6972 /* The current state of the packing pass, implemented by frv_pack_insns. */
6973 static struct {
6974 /* The state of the pipeline DFA. */
6975 state_t dfa_state;
6976
6977 /* Which hardware registers are set within the current packet,
6978 and the conditions under which they are set. */
6979 regstate_t regstate[FIRST_PSEUDO_REGISTER];
6980
6981 /* The memory locations that have been modified so far in this
6982 packet. MEM is the memref and COND is the regstate_t condition
6983 under which it is set. */
6984 struct {
6985 rtx mem;
6986 regstate_t cond;
6987 } mems[2];
6988
6989 /* The number of valid entries in MEMS. The value is larger than
6990 ARRAY_SIZE (mems) if there were too many mems to record. */
6991 unsigned int num_mems;
6992
6993 /* The maximum number of instructions that can be packed together. */
6994 unsigned int issue_rate;
6995
6996 /* The instructions in the packet, partitioned into groups. */
6997 struct frv_packet_group groups[NUM_GROUPS];
6998
6999 /* The instructions that make up the current packet. */
7000 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
7001 unsigned int num_insns;
7002 } frv_packet;
7003
7004 /* Return the regstate_t flags for the given COND_EXEC condition.
7005 Abort if the condition isn't in the right form. */
7006
7007 static int
7008 frv_cond_flags (rtx cond)
7009 {
7010 gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
7011 && GET_CODE (XEXP (cond, 0)) == REG
7012 && CR_P (REGNO (XEXP (cond, 0)))
7013 && XEXP (cond, 1) == const0_rtx);
7014 return ((REGNO (XEXP (cond, 0)) - CR_FIRST)
7015 | (GET_CODE (cond) == NE
7016 ? REGSTATE_IF_TRUE
7017 : REGSTATE_IF_FALSE));
7018 }
7019
7020
7021 /* Return true if something accessed under condition COND2 can
7022 conflict with something written under condition COND1. */
7023
7024 static bool
7025 frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2)
7026 {
7027 /* If either reference was unconditional, we have a conflict. */
7028 if ((cond1 & REGSTATE_IF_EITHER) == 0
7029 || (cond2 & REGSTATE_IF_EITHER) == 0)
7030 return true;
7031
7032 /* The references might conflict if they were controlled by
7033 different CRs. */
7034 if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK))
7035 return true;
7036
7037 /* They definitely conflict if they are controlled by the
7038 same condition. */
7039 if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0)
7040 return true;
7041
7042 return false;
7043 }
7044
7045
7046 /* Return true if an instruction with pattern PAT depends on an
7047 instruction in the current packet. COND describes the condition
7048 under which PAT might be set or used. */
7049
7050 static bool
7051 frv_registers_conflict_p_1 (rtx pat, regstate_t cond)
7052 {
7053 subrtx_var_iterator::array_type array;
7054 FOR_EACH_SUBRTX_VAR (iter, array, pat, NONCONST)
7055 {
7056 rtx x = *iter;
7057 if (GET_CODE (x) == REG)
7058 {
7059 unsigned int regno;
7060 FOR_EACH_REGNO (regno, x)
7061 if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0)
7062 if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond))
7063 return true;
7064 }
7065 else if (GET_CODE (x) == MEM)
7066 {
7067 /* If we ran out of memory slots, assume a conflict. */
7068 if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems))
7069 return 1;
7070
7071 /* Check for output or true dependencies with earlier MEMs. */
7072 for (unsigned int i = 0; i < frv_packet.num_mems; i++)
7073 if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
7074 {
7075 if (true_dependence (frv_packet.mems[i].mem, VOIDmode, x))
7076 return true;
7077
7078 if (output_dependence (frv_packet.mems[i].mem, x))
7079 return true;
7080 }
7081 }
7082
7083 /* The return values of calls aren't significant: they describe
7084 the effect of the call as a whole, not of the insn itself. */
7085 else if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
7086 iter.substitute (SET_SRC (x));
7087 }
7088 return false;
7089 }
7090
7091
7092 /* Return true if something in X might depend on an instruction
7093 in the current packet. */
7094
7095 static bool
7096 frv_registers_conflict_p (rtx x)
7097 {
7098 regstate_t flags;
7099
7100 flags = 0;
7101 if (GET_CODE (x) == COND_EXEC)
7102 {
7103 if (frv_registers_conflict_p_1 (XEXP (x, 0), flags))
7104 return true;
7105
7106 flags |= frv_cond_flags (XEXP (x, 0));
7107 x = XEXP (x, 1);
7108 }
7109 return frv_registers_conflict_p_1 (x, flags);
7110 }
7111
7112
7113 /* A note_stores callback. DATA points to the regstate_t condition
7114 under which X is modified. Update FRV_PACKET accordingly. */
7115
7116 static void
7117 frv_registers_update_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7118 {
7119 unsigned int regno;
7120
7121 if (GET_CODE (x) == REG)
7122 FOR_EACH_REGNO (regno, x)
7123 frv_packet.regstate[regno] |= *(regstate_t *) data;
7124
7125 if (GET_CODE (x) == MEM)
7126 {
7127 if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems))
7128 {
7129 frv_packet.mems[frv_packet.num_mems].mem = x;
7130 frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data;
7131 }
7132 frv_packet.num_mems++;
7133 }
7134 }
7135
7136
7137 /* Update the register state information for an instruction whose
7138 body is X. */
7139
7140 static void
7141 frv_registers_update (rtx x)
7142 {
7143 regstate_t flags;
7144
7145 flags = REGSTATE_MODIFIED;
7146 if (GET_CODE (x) == COND_EXEC)
7147 {
7148 flags |= frv_cond_flags (XEXP (x, 0));
7149 x = XEXP (x, 1);
7150 }
7151 note_stores (x, frv_registers_update_1, &flags);
7152 }
7153
7154
7155 /* Initialize frv_packet for the start of a new packet. */
7156
7157 static void
7158 frv_start_packet (void)
7159 {
7160 enum frv_insn_group group;
7161
7162 memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate));
7163 frv_packet.num_mems = 0;
7164 frv_packet.num_insns = 0;
7165 for (group = GROUP_I; group < NUM_GROUPS;
7166 group = (enum frv_insn_group) (group + 1))
7167 frv_packet.groups[group].num_insns = 0;
7168 }
7169
7170
7171 /* Likewise for the start of a new basic block. */
7172
7173 static void
7174 frv_start_packet_block (void)
7175 {
7176 state_reset (frv_packet.dfa_state);
7177 frv_start_packet ();
7178 }
7179
7180
7181 /* Finish the current packet, if any, and start a new one. Call
7182 HANDLE_PACKET with FRV_PACKET describing the completed packet. */
7183
7184 static void
7185 frv_finish_packet (void (*handle_packet) (void))
7186 {
7187 if (frv_packet.num_insns > 0)
7188 {
7189 handle_packet ();
7190 state_transition (frv_packet.dfa_state, 0);
7191 frv_start_packet ();
7192 }
7193 }
7194
7195
7196 /* Return true if INSN can be added to the current packet. Update
7197 the DFA state on success. */
7198
7199 static bool
7200 frv_pack_insn_p (rtx_insn *insn)
7201 {
7202 /* See if the packet is already as long as it can be. */
7203 if (frv_packet.num_insns == frv_packet.issue_rate)
7204 return false;
7205
7206 /* If the scheduler thought that an instruction should start a packet,
7207 it's usually a good idea to believe it. It knows much more about
7208 the latencies than we do.
7209
7210 There are some exceptions though:
7211
7212 - Conditional instructions are scheduled on the assumption that
7213 they will be executed. This is usually a good thing, since it
7214 tends to avoid unnecessary stalls in the conditional code.
7215 But we want to pack conditional instructions as tightly as
7216 possible, in order to optimize the case where they aren't
7217 executed.
7218
7219 - The scheduler will always put branches on their own, even
7220 if there's no real dependency.
7221
7222 - There's no point putting a call in its own packet unless
7223 we have to. */
7224 if (frv_packet.num_insns > 0
7225 && NONJUMP_INSN_P (insn)
7226 && GET_MODE (insn) == TImode
7227 && GET_CODE (PATTERN (insn)) != COND_EXEC)
7228 return false;
7229
7230 /* Check for register conflicts. Don't do this for setlo since any
7231 conflict will be with the partnering sethi, with which it can
7232 be packed. */
7233 if (get_attr_type (insn) != TYPE_SETLO)
7234 if (frv_registers_conflict_p (PATTERN (insn)))
7235 return false;
7236
7237 return state_transition (frv_packet.dfa_state, insn) < 0;
7238 }
7239
7240
7241 /* Add instruction INSN to the current packet. */
7242
7243 static void
7244 frv_add_insn_to_packet (rtx_insn *insn)
7245 {
7246 struct frv_packet_group *packet_group;
7247
7248 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7249 packet_group->insns[packet_group->num_insns++] = insn;
7250 frv_packet.insns[frv_packet.num_insns++] = insn;
7251
7252 frv_registers_update (PATTERN (insn));
7253 }
7254
7255
7256 /* Insert INSN (a member of frv_nops[]) into the current packet. If the
7257 packet ends in a branch or call, insert the nop before it, otherwise
7258 add to the end. */
7259
7260 static void
7261 frv_insert_nop_in_packet (rtx_insn *insn)
7262 {
7263 struct frv_packet_group *packet_group;
7264 rtx_insn *last;
7265
7266 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7267 last = frv_packet.insns[frv_packet.num_insns - 1];
7268 if (! NONJUMP_INSN_P (last))
7269 {
7270 insn = emit_insn_before (PATTERN (insn), last);
7271 frv_packet.insns[frv_packet.num_insns - 1] = insn;
7272 frv_packet.insns[frv_packet.num_insns++] = last;
7273 }
7274 else
7275 {
7276 insn = emit_insn_after (PATTERN (insn), last);
7277 frv_packet.insns[frv_packet.num_insns++] = insn;
7278 }
7279 packet_group->insns[packet_group->num_insns++] = insn;
7280 }
7281
7282
7283 /* If packing is enabled, divide the instructions into packets and
7284 return true. Call HANDLE_PACKET for each complete packet. */
7285
7286 static bool
7287 frv_for_each_packet (void (*handle_packet) (void))
7288 {
7289 rtx_insn *insn, *next_insn;
7290
7291 frv_packet.issue_rate = frv_issue_rate ();
7292
7293 /* Early exit if we don't want to pack insns. */
7294 if (!optimize
7295 || !flag_schedule_insns_after_reload
7296 || !TARGET_VLIW_BRANCH
7297 || frv_packet.issue_rate == 1)
7298 return false;
7299
7300 /* Set up the initial packing state. */
7301 dfa_start ();
7302 frv_packet.dfa_state = alloca (state_size ());
7303
7304 frv_start_packet_block ();
7305 for (insn = get_insns (); insn != 0; insn = next_insn)
7306 {
7307 enum rtx_code code;
7308 bool eh_insn_p;
7309
7310 code = GET_CODE (insn);
7311 next_insn = NEXT_INSN (insn);
7312
7313 if (code == CODE_LABEL)
7314 {
7315 frv_finish_packet (handle_packet);
7316 frv_start_packet_block ();
7317 }
7318
7319 if (INSN_P (insn))
7320 switch (GET_CODE (PATTERN (insn)))
7321 {
7322 case USE:
7323 case CLOBBER:
7324 break;
7325
7326 default:
7327 /* Calls mustn't be packed on a TOMCAT. */
7328 if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT)
7329 frv_finish_packet (handle_packet);
7330
7331 /* Since the last instruction in a packet determines the EH
7332 region, any exception-throwing instruction must come at
7333 the end of reordered packet. Insns that issue to a
7334 branch unit are bound to come last; for others it's
7335 too hard to predict. */
7336 eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL);
7337 if (eh_insn_p && !frv_issues_to_branch_unit_p (insn))
7338 frv_finish_packet (handle_packet);
7339
7340 /* Finish the current packet if we can't add INSN to it.
7341 Simulate cycles until INSN is ready to issue. */
7342 if (!frv_pack_insn_p (insn))
7343 {
7344 frv_finish_packet (handle_packet);
7345 while (!frv_pack_insn_p (insn))
7346 state_transition (frv_packet.dfa_state, 0);
7347 }
7348
7349 /* Add the instruction to the packet. */
7350 frv_add_insn_to_packet (insn);
7351
7352 /* Calls and jumps end a packet, as do insns that throw
7353 an exception. */
7354 if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p)
7355 frv_finish_packet (handle_packet);
7356 break;
7357 }
7358 }
7359 frv_finish_packet (handle_packet);
7360 dfa_finish ();
7361 return true;
7362 }
7363 \f
7364 /* Subroutine of frv_sort_insn_group. We are trying to sort
7365 frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly
7366 language order. We have already picked a new position for
7367 frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set.
7368 These instructions will occupy elements [0, LOWER_SLOT) and
7369 [UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is
7370 the DFA state after issuing these instructions.
7371
7372 Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation
7373 of the unused instructions. Return true if one such permutation gives
7374 a valid ordering, leaving the successful permutation in sorted[].
7375 Do not modify sorted[] until a valid permutation is found. */
7376
7377 static bool
7378 frv_sort_insn_group_1 (enum frv_insn_group group,
7379 unsigned int lower_slot, unsigned int upper_slot,
7380 unsigned int issued, unsigned int num_insns,
7381 state_t state)
7382 {
7383 struct frv_packet_group *packet_group;
7384 unsigned int i;
7385 state_t test_state;
7386 size_t dfa_size;
7387 rtx_insn *insn;
7388
7389 /* Early success if we've filled all the slots. */
7390 if (lower_slot == upper_slot)
7391 return true;
7392
7393 packet_group = &frv_packet.groups[group];
7394 dfa_size = state_size ();
7395 test_state = alloca (dfa_size);
7396
7397 /* Try issuing each unused instruction. */
7398 for (i = num_insns - 1; i + 1 != 0; i--)
7399 if (~issued & (1 << i))
7400 {
7401 insn = packet_group->sorted[i];
7402 memcpy (test_state, state, dfa_size);
7403 if (state_transition (test_state, insn) < 0
7404 && cpu_unit_reservation_p (test_state,
7405 NTH_UNIT (group, upper_slot - 1))
7406 && frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1,
7407 issued | (1 << i), num_insns,
7408 test_state))
7409 {
7410 packet_group->sorted[upper_slot - 1] = insn;
7411 return true;
7412 }
7413 }
7414
7415 return false;
7416 }
7417
7418 /* Compare two instructions by their frv_insn_unit. */
7419
7420 static int
7421 frv_compare_insns (const void *first, const void *second)
7422 {
7423 rtx_insn * const *insn1 = (rtx_insn * const *) first;
7424 rtx_insn * const *insn2 = (rtx_insn * const *) second;
7425 return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
7426 }
7427
7428 /* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[]
7429 and sort it into assembly language order. See frv.md for a description of
7430 the algorithm. */
7431
7432 static void
7433 frv_sort_insn_group (enum frv_insn_group group)
7434 {
7435 struct frv_packet_group *packet_group;
7436 unsigned int first, i, nop, max_unit, num_slots;
7437 state_t state, test_state;
7438 size_t dfa_size;
7439
7440 packet_group = &frv_packet.groups[group];
7441
7442 /* Assume no nop is needed. */
7443 packet_group->nop = 0;
7444
7445 if (packet_group->num_insns == 0)
7446 return;
7447
7448 /* Copy insns[] to sorted[]. */
7449 memcpy (packet_group->sorted, packet_group->insns,
7450 sizeof (rtx) * packet_group->num_insns);
7451
7452 /* Sort sorted[] by the unit that each insn tries to take first. */
7453 if (packet_group->num_insns > 1)
7454 qsort (packet_group->sorted, packet_group->num_insns,
7455 sizeof (rtx), frv_compare_insns);
7456
7457 /* That's always enough for branch and control insns. */
7458 if (group == GROUP_B || group == GROUP_C)
7459 return;
7460
7461 dfa_size = state_size ();
7462 state = alloca (dfa_size);
7463 test_state = alloca (dfa_size);
7464
7465 /* Find the highest FIRST such that sorted[0...FIRST-1] can issue
7466 consecutively and such that the DFA takes unit X when sorted[X]
7467 is added. Set STATE to the new DFA state. */
7468 state_reset (test_state);
7469 for (first = 0; first < packet_group->num_insns; first++)
7470 {
7471 memcpy (state, test_state, dfa_size);
7472 if (state_transition (test_state, packet_group->sorted[first]) >= 0
7473 || !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first)))
7474 break;
7475 }
7476
7477 /* If all the instructions issued in ascending order, we're done. */
7478 if (first == packet_group->num_insns)
7479 return;
7480
7481 /* Add nops to the end of sorted[] and try each permutation until
7482 we find one that works. */
7483 for (nop = 0; nop < frv_num_nops; nop++)
7484 {
7485 max_unit = frv_insn_unit (frv_nops[nop]);
7486 if (frv_unit_groups[max_unit] == group)
7487 {
7488 packet_group->nop = frv_nops[nop];
7489 num_slots = UNIT_NUMBER (max_unit) + 1;
7490 for (i = packet_group->num_insns; i < num_slots; i++)
7491 packet_group->sorted[i] = frv_nops[nop];
7492 if (frv_sort_insn_group_1 (group, first, num_slots,
7493 (1 << first) - 1, num_slots, state))
7494 return;
7495 }
7496 }
7497 gcc_unreachable ();
7498 }
7499 \f
7500 /* Sort the current packet into assembly-language order. Set packing
7501 flags as appropriate. */
7502
7503 static void
7504 frv_reorder_packet (void)
7505 {
7506 unsigned int cursor[NUM_GROUPS];
7507 rtx_insn *insns[ARRAY_SIZE (frv_unit_groups)];
7508 unsigned int unit, to, from;
7509 enum frv_insn_group group;
7510 struct frv_packet_group *packet_group;
7511
7512 /* First sort each group individually. */
7513 for (group = GROUP_I; group < NUM_GROUPS;
7514 group = (enum frv_insn_group) (group + 1))
7515 {
7516 cursor[group] = 0;
7517 frv_sort_insn_group (group);
7518 }
7519
7520 /* Go through the unit template and try add an instruction from
7521 that unit's group. */
7522 to = 0;
7523 for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++)
7524 {
7525 group = frv_unit_groups[unit];
7526 packet_group = &frv_packet.groups[group];
7527 if (cursor[group] < packet_group->num_insns)
7528 {
7529 /* frv_reorg should have added nops for us. */
7530 gcc_assert (packet_group->sorted[cursor[group]]
7531 != packet_group->nop);
7532 insns[to++] = packet_group->sorted[cursor[group]++];
7533 }
7534 }
7535
7536 gcc_assert (to == frv_packet.num_insns);
7537
7538 /* Clear the last instruction's packing flag, thus marking the end of
7539 a packet. Reorder the other instructions relative to it. */
7540 CLEAR_PACKING_FLAG (insns[to - 1]);
7541 for (from = 0; from < to - 1; from++)
7542 {
7543 remove_insn (insns[from]);
7544 add_insn_before (insns[from], insns[to - 1], NULL);
7545 SET_PACKING_FLAG (insns[from]);
7546 }
7547 }
7548
7549
7550 /* Divide instructions into packets. Reorder the contents of each
7551 packet so that they are in the correct assembly-language order.
7552
7553 Since this pass can change the raw meaning of the rtl stream, it must
7554 only be called at the last minute, just before the instructions are
7555 written out. */
7556
7557 static void
7558 frv_pack_insns (void)
7559 {
7560 if (frv_for_each_packet (frv_reorder_packet))
7561 frv_insn_packing_flag = 0;
7562 else
7563 frv_insn_packing_flag = -1;
7564 }
7565 \f
7566 /* See whether we need to add nops to group GROUP in order to
7567 make a valid packet. */
7568
7569 static void
7570 frv_fill_unused_units (enum frv_insn_group group)
7571 {
7572 unsigned int non_nops, nops, i;
7573 struct frv_packet_group *packet_group;
7574
7575 packet_group = &frv_packet.groups[group];
7576
7577 /* Sort the instructions into assembly-language order.
7578 Use nops to fill slots that are otherwise unused. */
7579 frv_sort_insn_group (group);
7580
7581 /* See how many nops are needed before the final useful instruction. */
7582 i = nops = 0;
7583 for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++)
7584 while (packet_group->sorted[i++] == packet_group->nop)
7585 nops++;
7586
7587 /* Insert that many nops into the instruction stream. */
7588 while (nops-- > 0)
7589 frv_insert_nop_in_packet (packet_group->nop);
7590 }
7591
7592 /* Return true if accesses IO1 and IO2 refer to the same doubleword. */
7593
7594 static bool
7595 frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2)
7596 {
7597 if (io1->const_address != 0 && io2->const_address != 0)
7598 return io1->const_address == io2->const_address;
7599
7600 if (io1->var_address != 0 && io2->var_address != 0)
7601 return rtx_equal_p (io1->var_address, io2->var_address);
7602
7603 return false;
7604 }
7605
7606 /* Return true if operations IO1 and IO2 are guaranteed to complete
7607 in order. */
7608
7609 static bool
7610 frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2)
7611 {
7612 /* The order of writes is always preserved. */
7613 if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE)
7614 return true;
7615
7616 /* The order of reads isn't preserved. */
7617 if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE)
7618 return false;
7619
7620 /* One operation is a write and the other is (or could be) a read.
7621 The order is only guaranteed if the accesses are to the same
7622 doubleword. */
7623 return frv_same_doubleword_p (io1, io2);
7624 }
7625
7626 /* Generalize I/O operation X so that it covers both X and Y. */
7627
7628 static void
7629 frv_io_union (struct frv_io *x, const struct frv_io *y)
7630 {
7631 if (x->type != y->type)
7632 x->type = FRV_IO_UNKNOWN;
7633 if (!frv_same_doubleword_p (x, y))
7634 {
7635 x->const_address = 0;
7636 x->var_address = 0;
7637 }
7638 }
7639
7640 /* Fill IO with information about the load or store associated with
7641 membar instruction INSN. */
7642
7643 static void
7644 frv_extract_membar (struct frv_io *io, rtx_insn *insn)
7645 {
7646 extract_insn (insn);
7647 io->type = (enum frv_io_type) INTVAL (recog_data.operand[2]);
7648 io->const_address = INTVAL (recog_data.operand[1]);
7649 io->var_address = XEXP (recog_data.operand[0], 0);
7650 }
7651
7652 /* A note_stores callback for which DATA points to an rtx. Nullify *DATA
7653 if X is a register and *DATA depends on X. */
7654
7655 static void
7656 frv_io_check_address (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7657 {
7658 rtx *other = (rtx *) data;
7659
7660 if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other))
7661 *other = 0;
7662 }
7663
7664 /* A note_stores callback for which DATA points to a HARD_REG_SET.
7665 Remove every modified register from the set. */
7666
7667 static void
7668 frv_io_handle_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7669 {
7670 HARD_REG_SET *set = (HARD_REG_SET *) data;
7671 unsigned int regno;
7672
7673 if (REG_P (x))
7674 FOR_EACH_REGNO (regno, x)
7675 CLEAR_HARD_REG_BIT (*set, regno);
7676 }
7677
7678 /* A note_uses callback that adds all registers in *X to hard register
7679 set *DATA. */
7680
7681 static void
7682 frv_io_handle_use (rtx *x, void *data)
7683 {
7684 find_all_hard_regs (*x, (HARD_REG_SET *) data);
7685 }
7686
7687 /* Go through block BB looking for membars to remove. There are two
7688 cases where intra-block analysis is enough:
7689
7690 - a membar is redundant if it occurs between two consecutive I/O
7691 operations and if those operations are guaranteed to complete
7692 in order.
7693
7694 - a membar for a __builtin_read is redundant if the result is
7695 used before the next I/O operation is issued.
7696
7697 If the last membar in the block could not be removed, and there
7698 are guaranteed to be no I/O operations between that membar and
7699 the end of the block, store the membar in *LAST_MEMBAR, otherwise
7700 store null.
7701
7702 Describe the block's first I/O operation in *NEXT_IO. Describe
7703 an unknown operation if the block doesn't do any I/O. */
7704
7705 static void
7706 frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
7707 rtx_insn **last_membar)
7708 {
7709 HARD_REG_SET used_regs;
7710 rtx set;
7711 rtx_insn *insn, *next_membar;
7712 bool next_is_end_p;
7713
7714 /* NEXT_IO is the next I/O operation to be performed after the current
7715 instruction. It starts off as being an unknown operation. */
7716 memset (next_io, 0, sizeof (*next_io));
7717
7718 /* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */
7719 next_is_end_p = true;
7720
7721 /* If the current instruction is a __builtin_read or __builtin_write,
7722 NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR
7723 is null if the membar has already been deleted.
7724
7725 Note that the initialization here should only be needed to
7726 suppress warnings. */
7727 next_membar = 0;
7728
7729 /* USED_REGS is the set of registers that are used before the
7730 next I/O instruction. */
7731 CLEAR_HARD_REG_SET (used_regs);
7732
7733 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
7734 if (CALL_P (insn))
7735 {
7736 /* We can't predict what a call will do to volatile memory. */
7737 memset (next_io, 0, sizeof (struct frv_io));
7738 next_is_end_p = false;
7739 CLEAR_HARD_REG_SET (used_regs);
7740 }
7741 else if (INSN_P (insn))
7742 switch (recog_memoized (insn))
7743 {
7744 case CODE_FOR_optional_membar_qi:
7745 case CODE_FOR_optional_membar_hi:
7746 case CODE_FOR_optional_membar_si:
7747 case CODE_FOR_optional_membar_di:
7748 next_membar = insn;
7749 if (next_is_end_p)
7750 {
7751 /* Local information isn't enough to decide whether this
7752 membar is needed. Stash it away for later. */
7753 *last_membar = insn;
7754 frv_extract_membar (next_io, insn);
7755 next_is_end_p = false;
7756 }
7757 else
7758 {
7759 /* Check whether the I/O operation before INSN could be
7760 reordered with one described by NEXT_IO. If it can't,
7761 INSN will not be needed. */
7762 struct frv_io prev_io;
7763
7764 frv_extract_membar (&prev_io, insn);
7765 if (frv_io_fixed_order_p (&prev_io, next_io))
7766 {
7767 if (dump_file)
7768 fprintf (dump_file,
7769 ";; [Local] Removing membar %d since order"
7770 " of accesses is guaranteed\n",
7771 INSN_UID (next_membar));
7772
7773 insn = NEXT_INSN (insn);
7774 delete_insn (next_membar);
7775 next_membar = 0;
7776 }
7777 *next_io = prev_io;
7778 }
7779 break;
7780
7781 default:
7782 /* Invalidate NEXT_IO's address if it depends on something that
7783 is clobbered by INSN. */
7784 if (next_io->var_address)
7785 note_stores (PATTERN (insn), frv_io_check_address,
7786 &next_io->var_address);
7787
7788 /* If the next membar is associated with a __builtin_read,
7789 see if INSN reads from that address. If it does, and if
7790 the destination register is used before the next I/O access,
7791 there is no need for the membar. */
7792 set = PATTERN (insn);
7793 if (next_io->type == FRV_IO_READ
7794 && next_io->var_address != 0
7795 && next_membar != 0
7796 && GET_CODE (set) == SET
7797 && GET_CODE (SET_DEST (set)) == REG
7798 && TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set))))
7799 {
7800 rtx src;
7801
7802 src = SET_SRC (set);
7803 if (GET_CODE (src) == ZERO_EXTEND)
7804 src = XEXP (src, 0);
7805
7806 if (GET_CODE (src) == MEM
7807 && rtx_equal_p (XEXP (src, 0), next_io->var_address))
7808 {
7809 if (dump_file)
7810 fprintf (dump_file,
7811 ";; [Local] Removing membar %d since the target"
7812 " of %d is used before the I/O operation\n",
7813 INSN_UID (next_membar), INSN_UID (insn));
7814
7815 if (next_membar == *last_membar)
7816 *last_membar = 0;
7817
7818 delete_insn (next_membar);
7819 next_membar = 0;
7820 }
7821 }
7822
7823 /* If INSN has volatile references, forget about any registers
7824 that are used after it. Otherwise forget about uses that
7825 are (or might be) defined by INSN. */
7826 if (volatile_refs_p (PATTERN (insn)))
7827 CLEAR_HARD_REG_SET (used_regs);
7828 else
7829 note_stores (PATTERN (insn), frv_io_handle_set, &used_regs);
7830
7831 note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
7832 break;
7833 }
7834 }
7835
7836 /* See if MEMBAR, the last membar instruction in BB, can be removed.
7837 FIRST_IO[X] describes the first operation performed by basic block X. */
7838
7839 static void
7840 frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
7841 rtx_insn *membar)
7842 {
7843 struct frv_io this_io, next_io;
7844 edge succ;
7845 edge_iterator ei;
7846
7847 /* We need to keep the membar if there is an edge to the exit block. */
7848 FOR_EACH_EDGE (succ, ei, bb->succs)
7849 /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
7850 if (succ->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7851 return;
7852
7853 /* Work out the union of all successor blocks. */
7854 ei = ei_start (bb->succs);
7855 ei_cond (ei, &succ);
7856 /* next_io = first_io[bb->succ->dest->index]; */
7857 next_io = first_io[succ->dest->index];
7858 ei = ei_start (bb->succs);
7859 if (ei_cond (ei, &succ))
7860 {
7861 for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei))
7862 /*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/
7863 frv_io_union (&next_io, &first_io[succ->dest->index]);
7864 }
7865 else
7866 gcc_unreachable ();
7867
7868 frv_extract_membar (&this_io, membar);
7869 if (frv_io_fixed_order_p (&this_io, &next_io))
7870 {
7871 if (dump_file)
7872 fprintf (dump_file,
7873 ";; [Global] Removing membar %d since order of accesses"
7874 " is guaranteed\n", INSN_UID (membar));
7875
7876 delete_insn (membar);
7877 }
7878 }
7879
7880 /* Remove redundant membars from the current function. */
7881
7882 static void
7883 frv_optimize_membar (void)
7884 {
7885 basic_block bb;
7886 struct frv_io *first_io;
7887 rtx_insn **last_membar;
7888
7889 compute_bb_for_insn ();
7890 first_io = XCNEWVEC (struct frv_io, last_basic_block_for_fn (cfun));
7891 last_membar = XCNEWVEC (rtx_insn *, last_basic_block_for_fn (cfun));
7892
7893 FOR_EACH_BB_FN (bb, cfun)
7894 frv_optimize_membar_local (bb, &first_io[bb->index],
7895 &last_membar[bb->index]);
7896
7897 FOR_EACH_BB_FN (bb, cfun)
7898 if (last_membar[bb->index] != 0)
7899 frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
7900
7901 free (first_io);
7902 free (last_membar);
7903 }
7904 \f
7905 /* Used by frv_reorg to keep track of the current packet's address. */
7906 static unsigned int frv_packet_address;
7907
7908 /* If the current packet falls through to a label, try to pad the packet
7909 with nops in order to fit the label's alignment requirements. */
7910
7911 static void
7912 frv_align_label (void)
7913 {
7914 unsigned int alignment, target, nop;
7915 rtx_insn *x, *last, *barrier, *label;
7916
7917 /* Walk forward to the start of the next packet. Set ALIGNMENT to the
7918 maximum alignment of that packet, LABEL to the last label between
7919 the packets, and BARRIER to the last barrier. */
7920 last = frv_packet.insns[frv_packet.num_insns - 1];
7921 label = barrier = 0;
7922 alignment = 4;
7923 for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x))
7924 {
7925 if (LABEL_P (x))
7926 {
7927 unsigned int subalign = 1 << label_to_alignment (x).levels[0].log;
7928 alignment = MAX (alignment, subalign);
7929 label = x;
7930 }
7931 if (BARRIER_P (x))
7932 barrier = x;
7933 }
7934
7935 /* If -malign-labels, and the packet falls through to an unaligned
7936 label, try introducing a nop to align that label to 8 bytes. */
7937 if (TARGET_ALIGN_LABELS
7938 && label != 0
7939 && barrier == 0
7940 && frv_packet.num_insns < frv_packet.issue_rate)
7941 alignment = MAX (alignment, 8);
7942
7943 /* Advance the address to the end of the current packet. */
7944 frv_packet_address += frv_packet.num_insns * 4;
7945
7946 /* Work out the target address, after alignment. */
7947 target = (frv_packet_address + alignment - 1) & -alignment;
7948
7949 /* If the packet falls through to the label, try to find an efficient
7950 padding sequence. */
7951 if (barrier == 0)
7952 {
7953 /* First try adding nops to the current packet. */
7954 for (nop = 0; nop < frv_num_nops; nop++)
7955 while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop]))
7956 {
7957 frv_insert_nop_in_packet (frv_nops[nop]);
7958 frv_packet_address += 4;
7959 }
7960
7961 /* If we still haven't reached the target, add some new packets that
7962 contain only nops. If there are two types of nop, insert an
7963 alternating sequence of frv_nops[0] and frv_nops[1], which will
7964 lead to packets like:
7965
7966 nop.p
7967 mnop.p/fnop.p
7968 nop.p
7969 mnop/fnop
7970
7971 etc. Just emit frv_nops[0] if that's the only nop we have. */
7972 last = frv_packet.insns[frv_packet.num_insns - 1];
7973 nop = 0;
7974 while (frv_packet_address < target)
7975 {
7976 last = emit_insn_after (PATTERN (frv_nops[nop]), last);
7977 frv_packet_address += 4;
7978 if (frv_num_nops > 1)
7979 nop ^= 1;
7980 }
7981 }
7982
7983 frv_packet_address = target;
7984 }
7985
7986 /* Subroutine of frv_reorg, called after each packet has been constructed
7987 in frv_packet. */
7988
7989 static void
7990 frv_reorg_packet (void)
7991 {
7992 frv_fill_unused_units (GROUP_I);
7993 frv_fill_unused_units (GROUP_FM);
7994 frv_align_label ();
7995 }
7996
7997 /* Add an instruction with pattern NOP to frv_nops[]. */
7998
7999 static void
8000 frv_register_nop (rtx nop)
8001 {
8002 rtx_insn *nop_insn = make_insn_raw (nop);
8003 SET_NEXT_INSN (nop_insn) = 0;
8004 SET_PREV_INSN (nop_insn) = 0;
8005 frv_nops[frv_num_nops++] = nop_insn;
8006 }
8007
8008 /* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions
8009 into packets and check whether we need to insert nops in order to
8010 fulfill the processor's issue requirements. Also, if the user has
8011 requested a certain alignment for a label, try to meet that alignment
8012 by inserting nops in the previous packet. */
8013
8014 static void
8015 frv_reorg (void)
8016 {
8017 if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p)
8018 frv_optimize_membar ();
8019
8020 frv_num_nops = 0;
8021 frv_register_nop (gen_nop ());
8022 if (TARGET_MEDIA)
8023 frv_register_nop (gen_mnop ());
8024 if (TARGET_HARD_FLOAT)
8025 frv_register_nop (gen_fnop ());
8026
8027 /* Estimate the length of each branch. Although this may change after
8028 we've inserted nops, it will only do so in big functions. */
8029 shorten_branches (get_insns ());
8030
8031 frv_packet_address = 0;
8032 frv_for_each_packet (frv_reorg_packet);
8033 }
8034 \f
8035 #define def_builtin(name, type, code) \
8036 add_builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL)
8037
8038 struct builtin_description
8039 {
8040 enum insn_code icode;
8041 const char *name;
8042 enum frv_builtins code;
8043 enum rtx_code comparison;
8044 unsigned int flag;
8045 };
8046
8047 /* Media intrinsics that take a single, constant argument. */
8048
8049 static struct builtin_description bdesc_set[] =
8050 {
8051 { CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, UNKNOWN, 0 }
8052 };
8053
8054 /* Media intrinsics that take just one argument. */
8055
8056 static struct builtin_description bdesc_1arg[] =
8057 {
8058 { CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, UNKNOWN, 0 },
8059 { CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, UNKNOWN, 0 },
8060 { CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, UNKNOWN, 0 },
8061 { CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, UNKNOWN, 0},
8062 { CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, UNKNOWN, 0 },
8063 { CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, UNKNOWN, 0 }
8064 };
8065
8066 /* Media intrinsics that take two arguments. */
8067
8068 static struct builtin_description bdesc_2arg[] =
8069 {
8070 { CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, UNKNOWN, 0},
8071 { CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, UNKNOWN, 0},
8072 { CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, UNKNOWN, 0},
8073 { CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, UNKNOWN, 0},
8074 { CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, UNKNOWN, 0},
8075 { CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, UNKNOWN, 0},
8076 { CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, UNKNOWN, 0},
8077 { CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, UNKNOWN, 0},
8078 { CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, UNKNOWN, 0},
8079 { CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, UNKNOWN, 0},
8080 { CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, UNKNOWN, 0},
8081 { CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, UNKNOWN, 0},
8082 { CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, UNKNOWN, 0},
8083 { CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, UNKNOWN, 0},
8084 { CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, UNKNOWN, 0},
8085 { CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, UNKNOWN, 0},
8086 { CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, UNKNOWN, 0},
8087 { CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, UNKNOWN, 0},
8088 { CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, UNKNOWN, 0},
8089 { CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, UNKNOWN, 0},
8090 { CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, UNKNOWN, 0},
8091 { CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, UNKNOWN, 0},
8092 { CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, UNKNOWN, 0},
8093 { CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, UNKNOWN, 0},
8094 { CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, UNKNOWN, 0},
8095 { CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, UNKNOWN, 0},
8096 { CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, UNKNOWN, 0}
8097 };
8098
8099 /* Integer intrinsics that take two arguments and have no return value. */
8100
8101 static struct builtin_description bdesc_int_void2arg[] =
8102 {
8103 { CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, UNKNOWN, 0},
8104 { CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, UNKNOWN, 0},
8105 { CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, UNKNOWN, 0}
8106 };
8107
8108 static struct builtin_description bdesc_prefetches[] =
8109 {
8110 { CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, UNKNOWN,
8111 0},
8112 { CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, UNKNOWN, 0}
8113 };
8114
8115 /* Media intrinsics that take two arguments, the first being an ACC number. */
8116
8117 static struct builtin_description bdesc_cut[] =
8118 {
8119 { CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, UNKNOWN, 0},
8120 { CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, UNKNOWN, 0},
8121 { CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, UNKNOWN, 0}
8122 };
8123
8124 /* Two-argument media intrinsics with an immediate second argument. */
8125
8126 static struct builtin_description bdesc_2argimm[] =
8127 {
8128 { CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, UNKNOWN, 0},
8129 { CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, UNKNOWN, 0},
8130 { CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, UNKNOWN, 0},
8131 { CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, UNKNOWN, 0},
8132 { CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, UNKNOWN, 0},
8133 { CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, UNKNOWN, 0},
8134 { CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, UNKNOWN, 0},
8135 { CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, UNKNOWN, 0},
8136 { CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, UNKNOWN, 0},
8137 { CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, UNKNOWN, 0},
8138 { CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, UNKNOWN, 0},
8139 { CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, UNKNOWN, 0},
8140 { CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, UNKNOWN, 0},
8141 { CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, UNKNOWN, 0},
8142 { CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, UNKNOWN, 0},
8143 { CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, UNKNOWN, 0},
8144 { CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, UNKNOWN, 0}
8145 };
8146
8147 /* Media intrinsics that take two arguments and return void, the first argument
8148 being a pointer to 4 words in memory. */
8149
8150 static struct builtin_description bdesc_void2arg[] =
8151 {
8152 { CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, UNKNOWN, 0},
8153 { CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, UNKNOWN, 0},
8154 };
8155
8156 /* Media intrinsics that take three arguments, the first being a const_int that
8157 denotes an accumulator, and that return void. */
8158
8159 static struct builtin_description bdesc_void3arg[] =
8160 {
8161 { CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, UNKNOWN, 0},
8162 { CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, UNKNOWN, 0},
8163 { CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, UNKNOWN, 0},
8164 { CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, UNKNOWN, 0},
8165 { CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, UNKNOWN, 0},
8166 { CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, UNKNOWN, 0},
8167 { CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, UNKNOWN, 0},
8168 { CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, UNKNOWN, 0},
8169 { CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, UNKNOWN, 0},
8170 { CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, UNKNOWN, 0},
8171 { CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, UNKNOWN, 0},
8172 { CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, UNKNOWN, 0},
8173 { CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, UNKNOWN, 0},
8174 { CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, UNKNOWN, 0},
8175 { CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, UNKNOWN, 0},
8176 { CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, UNKNOWN, 0},
8177 { CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, UNKNOWN, 0},
8178 { CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, UNKNOWN, 0},
8179 { CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, UNKNOWN, 0},
8180 { CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, UNKNOWN, 0},
8181 { CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, UNKNOWN, 0},
8182 { CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, UNKNOWN, 0},
8183 { CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, UNKNOWN, 0},
8184 { CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, UNKNOWN, 0},
8185 { CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, UNKNOWN, 0}
8186 };
8187
8188 /* Media intrinsics that take two accumulator numbers as argument and
8189 return void. */
8190
8191 static struct builtin_description bdesc_voidacc[] =
8192 {
8193 { CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, UNKNOWN, 0},
8194 { CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, UNKNOWN, 0},
8195 { CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, UNKNOWN, 0},
8196 { CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, UNKNOWN, 0},
8197 { CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, UNKNOWN, 0},
8198 { CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, UNKNOWN, 0}
8199 };
8200
8201 /* Intrinsics that load a value and then issue a MEMBAR. The load is
8202 a normal move and the ICODE is for the membar. */
8203
8204 static struct builtin_description bdesc_loads[] =
8205 {
8206 { CODE_FOR_optional_membar_qi, "__builtin_read8",
8207 FRV_BUILTIN_READ8, UNKNOWN, 0},
8208 { CODE_FOR_optional_membar_hi, "__builtin_read16",
8209 FRV_BUILTIN_READ16, UNKNOWN, 0},
8210 { CODE_FOR_optional_membar_si, "__builtin_read32",
8211 FRV_BUILTIN_READ32, UNKNOWN, 0},
8212 { CODE_FOR_optional_membar_di, "__builtin_read64",
8213 FRV_BUILTIN_READ64, UNKNOWN, 0}
8214 };
8215
8216 /* Likewise stores. */
8217
8218 static struct builtin_description bdesc_stores[] =
8219 {
8220 { CODE_FOR_optional_membar_qi, "__builtin_write8",
8221 FRV_BUILTIN_WRITE8, UNKNOWN, 0},
8222 { CODE_FOR_optional_membar_hi, "__builtin_write16",
8223 FRV_BUILTIN_WRITE16, UNKNOWN, 0},
8224 { CODE_FOR_optional_membar_si, "__builtin_write32",
8225 FRV_BUILTIN_WRITE32, UNKNOWN, 0},
8226 { CODE_FOR_optional_membar_di, "__builtin_write64",
8227 FRV_BUILTIN_WRITE64, UNKNOWN, 0},
8228 };
8229
8230 /* Initialize media builtins. */
8231
8232 static void
8233 frv_init_builtins (void)
8234 {
8235 tree accumulator = integer_type_node;
8236 tree integer = integer_type_node;
8237 tree voidt = void_type_node;
8238 tree uhalf = short_unsigned_type_node;
8239 tree sword1 = long_integer_type_node;
8240 tree uword1 = long_unsigned_type_node;
8241 tree sword2 = long_long_integer_type_node;
8242 tree uword2 = long_long_unsigned_type_node;
8243 tree uword4 = build_pointer_type (uword1);
8244 tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1));
8245 tree ubyte = unsigned_char_type_node;
8246 tree iacc = integer_type_node;
8247
8248 #define UNARY(RET, T1) \
8249 build_function_type_list (RET, T1, NULL_TREE)
8250
8251 #define BINARY(RET, T1, T2) \
8252 build_function_type_list (RET, T1, T2, NULL_TREE)
8253
8254 #define TRINARY(RET, T1, T2, T3) \
8255 build_function_type_list (RET, T1, T2, T3, NULL_TREE)
8256
8257 #define QUAD(RET, T1, T2, T3, T4) \
8258 build_function_type_list (RET, T1, T2, T3, T4, NULL_TREE)
8259
8260 tree void_ftype_void = build_function_type_list (voidt, NULL_TREE);
8261
8262 tree void_ftype_acc = UNARY (voidt, accumulator);
8263 tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1);
8264 tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2);
8265 tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1);
8266 tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator);
8267 tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1);
8268 tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1);
8269 tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2);
8270 tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2);
8271
8272 tree uw1_ftype_uw1 = UNARY (uword1, uword1);
8273 tree uw1_ftype_sw1 = UNARY (uword1, sword1);
8274 tree uw1_ftype_uw2 = UNARY (uword1, uword2);
8275 tree uw1_ftype_acc = UNARY (uword1, accumulator);
8276 tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf);
8277 tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1);
8278 tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer);
8279 tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1);
8280 tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1);
8281 tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1);
8282 tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer);
8283
8284 tree sw1_ftype_int = UNARY (sword1, integer);
8285 tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1);
8286 tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer);
8287
8288 tree uw2_ftype_uw1 = UNARY (uword2, uword1);
8289 tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer);
8290 tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2);
8291 tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer);
8292 tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer);
8293 tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf);
8294
8295 tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2);
8296 tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer);
8297 tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1);
8298 tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1);
8299 tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1);
8300 tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2);
8301 tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1);
8302 tree sw1_ftype_sw1 = UNARY (sword1, sword1);
8303 tree sw2_ftype_iacc = UNARY (sword2, iacc);
8304 tree sw1_ftype_iacc = UNARY (sword1, iacc);
8305 tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node);
8306 tree uw1_ftype_vptr = UNARY (uword1, vptr);
8307 tree uw2_ftype_vptr = UNARY (uword2, vptr);
8308 tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte);
8309 tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf);
8310 tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1);
8311 tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2);
8312
8313 def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND);
8314 def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR);
8315 def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR);
8316 def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT);
8317 def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI);
8318 def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI);
8319 def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT);
8320 def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH);
8321 def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI);
8322 def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI);
8323 def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI);
8324 def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS);
8325 def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU);
8326 def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS);
8327 def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS);
8328 def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS);
8329 def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS);
8330 def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS);
8331 def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU);
8332 def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS);
8333 def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU);
8334 def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS);
8335 def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU);
8336 def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS);
8337 def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU);
8338 def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS);
8339 def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS);
8340 def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS);
8341 def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS);
8342 def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS);
8343 def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU);
8344 def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS);
8345 def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU);
8346 def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS);
8347 def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU);
8348 def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS);
8349 def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU);
8350 def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS);
8351 def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU);
8352 def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS);
8353 def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU);
8354 def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS);
8355 def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU);
8356 def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT);
8357 def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS);
8358 def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW);
8359 def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD);
8360 def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH);
8361 def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH);
8362 def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH);
8363 def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH);
8364 def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH);
8365 def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB);
8366 def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE);
8367 def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC);
8368 def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA);
8369 def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC);
8370 def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG);
8371 def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC);
8372 def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG);
8373 def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1);
8374 def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2);
8375 def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP);
8376 def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS);
8377 def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS);
8378 def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS);
8379 def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS);
8380 def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS);
8381 def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS);
8382 def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS);
8383 def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS);
8384 def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS);
8385 def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS);
8386 def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI);
8387 def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI);
8388 def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI);
8389 def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI);
8390 def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS);
8391 def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS);
8392 def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS);
8393 def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS);
8394 def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH);
8395 def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH);
8396 def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH);
8397 def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS);
8398 def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS);
8399 def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI);
8400 def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI);
8401 def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL);
8402 def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL);
8403 def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS);
8404 def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS);
8405 def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU);
8406 def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS);
8407 def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS);
8408 def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS);
8409 def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN);
8410 def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS);
8411 def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll);
8412 def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl);
8413 def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll);
8414 def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl);
8415 def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0);
8416 def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH);
8417 def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8);
8418 def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16);
8419 def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32);
8420 def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64);
8421
8422 def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8);
8423 def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16);
8424 def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32);
8425 def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64);
8426
8427 #undef UNARY
8428 #undef BINARY
8429 #undef TRINARY
8430 #undef QUAD
8431 }
8432
8433 /* Set the names for various arithmetic operations according to the
8434 FRV ABI. */
8435 static void
8436 frv_init_libfuncs (void)
8437 {
8438 set_optab_libfunc (smod_optab, SImode, "__modi");
8439 set_optab_libfunc (umod_optab, SImode, "__umodi");
8440
8441 set_optab_libfunc (add_optab, DImode, "__addll");
8442 set_optab_libfunc (sub_optab, DImode, "__subll");
8443 set_optab_libfunc (smul_optab, DImode, "__mulll");
8444 set_optab_libfunc (sdiv_optab, DImode, "__divll");
8445 set_optab_libfunc (smod_optab, DImode, "__modll");
8446 set_optab_libfunc (umod_optab, DImode, "__umodll");
8447 set_optab_libfunc (and_optab, DImode, "__andll");
8448 set_optab_libfunc (ior_optab, DImode, "__orll");
8449 set_optab_libfunc (xor_optab, DImode, "__xorll");
8450 set_optab_libfunc (one_cmpl_optab, DImode, "__notll");
8451
8452 set_optab_libfunc (add_optab, SFmode, "__addf");
8453 set_optab_libfunc (sub_optab, SFmode, "__subf");
8454 set_optab_libfunc (smul_optab, SFmode, "__mulf");
8455 set_optab_libfunc (sdiv_optab, SFmode, "__divf");
8456
8457 set_optab_libfunc (add_optab, DFmode, "__addd");
8458 set_optab_libfunc (sub_optab, DFmode, "__subd");
8459 set_optab_libfunc (smul_optab, DFmode, "__muld");
8460 set_optab_libfunc (sdiv_optab, DFmode, "__divd");
8461
8462 set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod");
8463 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof");
8464
8465 set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi");
8466 set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll");
8467 set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi");
8468 set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll");
8469
8470 set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui");
8471 set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull");
8472 set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui");
8473 set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull");
8474
8475 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof");
8476 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof");
8477 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod");
8478 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod");
8479 }
8480
8481 /* Convert an integer constant to an accumulator register. ICODE is the
8482 code of the target instruction, OPNUM is the number of the
8483 accumulator operand and OPVAL is the constant integer. Try both
8484 ACC and ACCG registers; only report an error if neither fit the
8485 instruction. */
8486
8487 static rtx
8488 frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
8489 {
8490 rtx reg;
8491 int i;
8492
8493 /* ACCs and ACCGs are implicit global registers if media intrinsics
8494 are being used. We set up this lazily to avoid creating lots of
8495 unnecessary call_insn rtl in non-media code. */
8496 for (i = 0; i <= ACC_MASK; i++)
8497 if ((i & ACC_MASK) == i)
8498 global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1;
8499
8500 if (GET_CODE (opval) != CONST_INT)
8501 {
8502 error ("accumulator is not a constant integer");
8503 return NULL_RTX;
8504 }
8505 if ((INTVAL (opval) & ~ACC_MASK) != 0)
8506 {
8507 error ("accumulator number is out of bounds");
8508 return NULL_RTX;
8509 }
8510
8511 reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
8512 ACC_FIRST + INTVAL (opval));
8513 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8514 SET_REGNO (reg, ACCG_FIRST + INTVAL (opval));
8515
8516 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8517 {
8518 error ("inappropriate accumulator for %qs", insn_data[icode].name);
8519 return NULL_RTX;
8520 }
8521 return reg;
8522 }
8523
8524 /* If an ACC rtx has mode MODE, return the mode that the matching ACCG
8525 should have. */
8526
8527 static machine_mode
8528 frv_matching_accg_mode (machine_mode mode)
8529 {
8530 switch (mode)
8531 {
8532 case E_V4SImode:
8533 return V4QImode;
8534
8535 case E_DImode:
8536 return HImode;
8537
8538 case E_SImode:
8539 return QImode;
8540
8541 default:
8542 gcc_unreachable ();
8543 }
8544 }
8545
8546 /* Given that a __builtin_read or __builtin_write function is accessing
8547 address ADDRESS, return the value that should be used as operand 1
8548 of the membar. */
8549
8550 static rtx
8551 frv_io_address_cookie (rtx address)
8552 {
8553 return (GET_CODE (address) == CONST_INT
8554 ? GEN_INT (INTVAL (address) / 8 * 8)
8555 : const0_rtx);
8556 }
8557
8558 /* Return the accumulator guard that should be paired with accumulator
8559 register ACC. The mode of the returned register is in the same
8560 class as ACC, but is four times smaller. */
8561
8562 rtx
8563 frv_matching_accg_for_acc (rtx acc)
8564 {
8565 return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)),
8566 REGNO (acc) - ACC_FIRST + ACCG_FIRST);
8567 }
8568
8569 /* Read the requested argument from the call EXP given by INDEX.
8570 Return the value as an rtx. */
8571
8572 static rtx
8573 frv_read_argument (tree exp, unsigned int index)
8574 {
8575 return expand_normal (CALL_EXPR_ARG (exp, index));
8576 }
8577
8578 /* Like frv_read_argument, but interpret the argument as the number
8579 of an IACC register and return a (reg:MODE ...) rtx for it. */
8580
8581 static rtx
8582 frv_read_iacc_argument (machine_mode mode, tree call,
8583 unsigned int index)
8584 {
8585 int i, regno;
8586 rtx op;
8587
8588 op = frv_read_argument (call, index);
8589 if (GET_CODE (op) != CONST_INT
8590 || INTVAL (op) < 0
8591 || INTVAL (op) > IACC_LAST - IACC_FIRST
8592 || ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0)
8593 {
8594 error ("invalid IACC argument");
8595 op = const0_rtx;
8596 }
8597
8598 /* IACCs are implicit global registers. We set up this lazily to
8599 avoid creating lots of unnecessary call_insn rtl when IACCs aren't
8600 being used. */
8601 regno = INTVAL (op) + IACC_FIRST;
8602 for (i = 0; i < hard_regno_nregs (regno, mode); i++)
8603 global_regs[regno + i] = 1;
8604
8605 return gen_rtx_REG (mode, regno);
8606 }
8607
8608 /* Return true if OPVAL can be used for operand OPNUM of instruction ICODE.
8609 The instruction should require a constant operand of some sort. The
8610 function prints an error if OPVAL is not valid. */
8611
8612 static int
8613 frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval)
8614 {
8615 if (GET_CODE (opval) != CONST_INT)
8616 {
8617 error ("%qs expects a constant argument", insn_data[icode].name);
8618 return FALSE;
8619 }
8620 if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode))
8621 {
8622 error ("constant argument out of range for %qs", insn_data[icode].name);
8623 return FALSE;
8624 }
8625 return TRUE;
8626 }
8627
8628 /* Return a legitimate rtx for instruction ICODE's return value. Use TARGET
8629 if it's not null, has the right mode, and satisfies operand 0's
8630 predicate. */
8631
8632 static rtx
8633 frv_legitimize_target (enum insn_code icode, rtx target)
8634 {
8635 machine_mode mode = insn_data[icode].operand[0].mode;
8636
8637 if (! target
8638 || GET_MODE (target) != mode
8639 || ! (*insn_data[icode].operand[0].predicate) (target, mode))
8640 return gen_reg_rtx (mode);
8641 else
8642 return target;
8643 }
8644
8645 /* Given that ARG is being passed as operand OPNUM to instruction ICODE,
8646 check whether ARG satisfies the operand's constraints. If it doesn't,
8647 copy ARG to a temporary register and return that. Otherwise return ARG
8648 itself. */
8649
8650 static rtx
8651 frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
8652 {
8653 machine_mode mode = insn_data[icode].operand[opnum].mode;
8654
8655 if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
8656 return arg;
8657 else
8658 return copy_to_mode_reg (mode, arg);
8659 }
8660
8661 /* Return a volatile memory reference of mode MODE whose address is ARG. */
8662
8663 static rtx
8664 frv_volatile_memref (machine_mode mode, rtx arg)
8665 {
8666 rtx mem;
8667
8668 mem = gen_rtx_MEM (mode, memory_address (mode, arg));
8669 MEM_VOLATILE_P (mem) = 1;
8670 return mem;
8671 }
8672
8673 /* Expand builtins that take a single, constant argument. At the moment,
8674 only MHDSETS falls into this category. */
8675
8676 static rtx
8677 frv_expand_set_builtin (enum insn_code icode, tree call, rtx target)
8678 {
8679 rtx pat;
8680 rtx op0 = frv_read_argument (call, 0);
8681
8682 if (! frv_check_constant_argument (icode, 1, op0))
8683 return NULL_RTX;
8684
8685 target = frv_legitimize_target (icode, target);
8686 pat = GEN_FCN (icode) (target, op0);
8687 if (! pat)
8688 return NULL_RTX;
8689
8690 emit_insn (pat);
8691 return target;
8692 }
8693
8694 /* Expand builtins that take one operand. */
8695
8696 static rtx
8697 frv_expand_unop_builtin (enum insn_code icode, tree call, rtx target)
8698 {
8699 rtx pat;
8700 rtx op0 = frv_read_argument (call, 0);
8701
8702 target = frv_legitimize_target (icode, target);
8703 op0 = frv_legitimize_argument (icode, 1, op0);
8704 pat = GEN_FCN (icode) (target, op0);
8705 if (! pat)
8706 return NULL_RTX;
8707
8708 emit_insn (pat);
8709 return target;
8710 }
8711
8712 /* Expand builtins that take two operands. */
8713
8714 static rtx
8715 frv_expand_binop_builtin (enum insn_code icode, tree call, rtx target)
8716 {
8717 rtx pat;
8718 rtx op0 = frv_read_argument (call, 0);
8719 rtx op1 = frv_read_argument (call, 1);
8720
8721 target = frv_legitimize_target (icode, target);
8722 op0 = frv_legitimize_argument (icode, 1, op0);
8723 op1 = frv_legitimize_argument (icode, 2, op1);
8724 pat = GEN_FCN (icode) (target, op0, op1);
8725 if (! pat)
8726 return NULL_RTX;
8727
8728 emit_insn (pat);
8729 return target;
8730 }
8731
8732 /* Expand cut-style builtins, which take two operands and an implicit ACCG
8733 one. */
8734
8735 static rtx
8736 frv_expand_cut_builtin (enum insn_code icode, tree call, rtx target)
8737 {
8738 rtx pat;
8739 rtx op0 = frv_read_argument (call, 0);
8740 rtx op1 = frv_read_argument (call, 1);
8741 rtx op2;
8742
8743 target = frv_legitimize_target (icode, target);
8744 op0 = frv_int_to_acc (icode, 1, op0);
8745 if (! op0)
8746 return NULL_RTX;
8747
8748 if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT)
8749 {
8750 if (! frv_check_constant_argument (icode, 2, op1))
8751 return NULL_RTX;
8752 }
8753 else
8754 op1 = frv_legitimize_argument (icode, 2, op1);
8755
8756 op2 = frv_matching_accg_for_acc (op0);
8757 pat = GEN_FCN (icode) (target, op0, op1, op2);
8758 if (! pat)
8759 return NULL_RTX;
8760
8761 emit_insn (pat);
8762 return target;
8763 }
8764
8765 /* Expand builtins that take two operands and the second is immediate. */
8766
8767 static rtx
8768 frv_expand_binopimm_builtin (enum insn_code icode, tree call, rtx target)
8769 {
8770 rtx pat;
8771 rtx op0 = frv_read_argument (call, 0);
8772 rtx op1 = frv_read_argument (call, 1);
8773
8774 if (! frv_check_constant_argument (icode, 2, op1))
8775 return NULL_RTX;
8776
8777 target = frv_legitimize_target (icode, target);
8778 op0 = frv_legitimize_argument (icode, 1, op0);
8779 pat = GEN_FCN (icode) (target, op0, op1);
8780 if (! pat)
8781 return NULL_RTX;
8782
8783 emit_insn (pat);
8784 return target;
8785 }
8786
8787 /* Expand builtins that take two operands, the first operand being a pointer to
8788 ints and return void. */
8789
8790 static rtx
8791 frv_expand_voidbinop_builtin (enum insn_code icode, tree call)
8792 {
8793 rtx pat;
8794 rtx op0 = frv_read_argument (call, 0);
8795 rtx op1 = frv_read_argument (call, 1);
8796 machine_mode mode0 = insn_data[icode].operand[0].mode;
8797 rtx addr;
8798
8799 if (GET_CODE (op0) != MEM)
8800 {
8801 rtx reg = op0;
8802
8803 if (! offsettable_address_p (0, mode0, op0))
8804 {
8805 reg = gen_reg_rtx (Pmode);
8806 emit_insn (gen_rtx_SET (reg, op0));
8807 }
8808
8809 op0 = gen_rtx_MEM (SImode, reg);
8810 }
8811
8812 addr = XEXP (op0, 0);
8813 if (! offsettable_address_p (0, mode0, addr))
8814 addr = copy_to_mode_reg (Pmode, op0);
8815
8816 op0 = change_address (op0, V4SImode, addr);
8817 op1 = frv_legitimize_argument (icode, 1, op1);
8818 pat = GEN_FCN (icode) (op0, op1);
8819 if (! pat)
8820 return 0;
8821
8822 emit_insn (pat);
8823 return 0;
8824 }
8825
8826 /* Expand builtins that take two long operands and return void. */
8827
8828 static rtx
8829 frv_expand_int_void2arg (enum insn_code icode, tree call)
8830 {
8831 rtx pat;
8832 rtx op0 = frv_read_argument (call, 0);
8833 rtx op1 = frv_read_argument (call, 1);
8834
8835 op0 = frv_legitimize_argument (icode, 1, op0);
8836 op1 = frv_legitimize_argument (icode, 1, op1);
8837 pat = GEN_FCN (icode) (op0, op1);
8838 if (! pat)
8839 return NULL_RTX;
8840
8841 emit_insn (pat);
8842 return NULL_RTX;
8843 }
8844
8845 /* Expand prefetch builtins. These take a single address as argument. */
8846
8847 static rtx
8848 frv_expand_prefetches (enum insn_code icode, tree call)
8849 {
8850 rtx pat;
8851 rtx op0 = frv_read_argument (call, 0);
8852
8853 pat = GEN_FCN (icode) (force_reg (Pmode, op0));
8854 if (! pat)
8855 return 0;
8856
8857 emit_insn (pat);
8858 return 0;
8859 }
8860
8861 /* Expand builtins that take three operands and return void. The first
8862 argument must be a constant that describes a pair or quad accumulators. A
8863 fourth argument is created that is the accumulator guard register that
8864 corresponds to the accumulator. */
8865
8866 static rtx
8867 frv_expand_voidtriop_builtin (enum insn_code icode, tree call)
8868 {
8869 rtx pat;
8870 rtx op0 = frv_read_argument (call, 0);
8871 rtx op1 = frv_read_argument (call, 1);
8872 rtx op2 = frv_read_argument (call, 2);
8873 rtx op3;
8874
8875 op0 = frv_int_to_acc (icode, 0, op0);
8876 if (! op0)
8877 return NULL_RTX;
8878
8879 op1 = frv_legitimize_argument (icode, 1, op1);
8880 op2 = frv_legitimize_argument (icode, 2, op2);
8881 op3 = frv_matching_accg_for_acc (op0);
8882 pat = GEN_FCN (icode) (op0, op1, op2, op3);
8883 if (! pat)
8884 return NULL_RTX;
8885
8886 emit_insn (pat);
8887 return NULL_RTX;
8888 }
8889
8890 /* Expand builtins that perform accumulator-to-accumulator operations.
8891 These builtins take two accumulator numbers as argument and return
8892 void. */
8893
8894 static rtx
8895 frv_expand_voidaccop_builtin (enum insn_code icode, tree call)
8896 {
8897 rtx pat;
8898 rtx op0 = frv_read_argument (call, 0);
8899 rtx op1 = frv_read_argument (call, 1);
8900 rtx op2;
8901 rtx op3;
8902
8903 op0 = frv_int_to_acc (icode, 0, op0);
8904 if (! op0)
8905 return NULL_RTX;
8906
8907 op1 = frv_int_to_acc (icode, 1, op1);
8908 if (! op1)
8909 return NULL_RTX;
8910
8911 op2 = frv_matching_accg_for_acc (op0);
8912 op3 = frv_matching_accg_for_acc (op1);
8913 pat = GEN_FCN (icode) (op0, op1, op2, op3);
8914 if (! pat)
8915 return NULL_RTX;
8916
8917 emit_insn (pat);
8918 return NULL_RTX;
8919 }
8920
8921 /* Expand a __builtin_read* function. ICODE is the instruction code for the
8922 membar and TARGET_MODE is the mode that the loaded value should have. */
8923
8924 static rtx
8925 frv_expand_load_builtin (enum insn_code icode, machine_mode target_mode,
8926 tree call, rtx target)
8927 {
8928 rtx op0 = frv_read_argument (call, 0);
8929 rtx cookie = frv_io_address_cookie (op0);
8930
8931 if (target == 0 || !REG_P (target))
8932 target = gen_reg_rtx (target_mode);
8933 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
8934 convert_move (target, op0, 1);
8935 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ)));
8936 cfun->machine->has_membar_p = 1;
8937 return target;
8938 }
8939
8940 /* Likewise __builtin_write* functions. */
8941
8942 static rtx
8943 frv_expand_store_builtin (enum insn_code icode, tree call)
8944 {
8945 rtx op0 = frv_read_argument (call, 0);
8946 rtx op1 = frv_read_argument (call, 1);
8947 rtx cookie = frv_io_address_cookie (op0);
8948
8949 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
8950 convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1);
8951 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE)));
8952 cfun->machine->has_membar_p = 1;
8953 return NULL_RTX;
8954 }
8955
8956 /* Expand the MDPACKH builtin. It takes four unsigned short arguments and
8957 each argument forms one word of the two double-word input registers.
8958 CALL is the tree for the call and TARGET, if nonnull, suggests a good place
8959 to put the return value. */
8960
8961 static rtx
8962 frv_expand_mdpackh_builtin (tree call, rtx target)
8963 {
8964 enum insn_code icode = CODE_FOR_mdpackh;
8965 rtx pat, op0, op1;
8966 rtx arg1 = frv_read_argument (call, 0);
8967 rtx arg2 = frv_read_argument (call, 1);
8968 rtx arg3 = frv_read_argument (call, 2);
8969 rtx arg4 = frv_read_argument (call, 3);
8970
8971 target = frv_legitimize_target (icode, target);
8972 op0 = gen_reg_rtx (DImode);
8973 op1 = gen_reg_rtx (DImode);
8974
8975 /* The high half of each word is not explicitly initialized, so indicate
8976 that the input operands are not live before this point. */
8977 emit_clobber (op0);
8978 emit_clobber (op1);
8979
8980 /* Move each argument into the low half of its associated input word. */
8981 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1);
8982 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2);
8983 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3);
8984 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4);
8985
8986 pat = GEN_FCN (icode) (target, op0, op1);
8987 if (! pat)
8988 return NULL_RTX;
8989
8990 emit_insn (pat);
8991 return target;
8992 }
8993
8994 /* Expand the MCLRACC builtin. This builtin takes a single accumulator
8995 number as argument. */
8996
8997 static rtx
8998 frv_expand_mclracc_builtin (tree call)
8999 {
9000 enum insn_code icode = CODE_FOR_mclracc;
9001 rtx pat;
9002 rtx op0 = frv_read_argument (call, 0);
9003
9004 op0 = frv_int_to_acc (icode, 0, op0);
9005 if (! op0)
9006 return NULL_RTX;
9007
9008 pat = GEN_FCN (icode) (op0);
9009 if (pat)
9010 emit_insn (pat);
9011
9012 return NULL_RTX;
9013 }
9014
9015 /* Expand builtins that take no arguments. */
9016
9017 static rtx
9018 frv_expand_noargs_builtin (enum insn_code icode)
9019 {
9020 rtx pat = GEN_FCN (icode) (const0_rtx);
9021 if (pat)
9022 emit_insn (pat);
9023
9024 return NULL_RTX;
9025 }
9026
9027 /* Expand MRDACC and MRDACCG. These builtins take a single accumulator
9028 number or accumulator guard number as argument and return an SI integer. */
9029
9030 static rtx
9031 frv_expand_mrdacc_builtin (enum insn_code icode, tree call)
9032 {
9033 rtx pat;
9034 rtx target = gen_reg_rtx (SImode);
9035 rtx op0 = frv_read_argument (call, 0);
9036
9037 op0 = frv_int_to_acc (icode, 1, op0);
9038 if (! op0)
9039 return NULL_RTX;
9040
9041 pat = GEN_FCN (icode) (target, op0);
9042 if (! pat)
9043 return NULL_RTX;
9044
9045 emit_insn (pat);
9046 return target;
9047 }
9048
9049 /* Expand MWTACC and MWTACCG. These builtins take an accumulator or
9050 accumulator guard as their first argument and an SImode value as their
9051 second. */
9052
9053 static rtx
9054 frv_expand_mwtacc_builtin (enum insn_code icode, tree call)
9055 {
9056 rtx pat;
9057 rtx op0 = frv_read_argument (call, 0);
9058 rtx op1 = frv_read_argument (call, 1);
9059
9060 op0 = frv_int_to_acc (icode, 0, op0);
9061 if (! op0)
9062 return NULL_RTX;
9063
9064 op1 = frv_legitimize_argument (icode, 1, op1);
9065 pat = GEN_FCN (icode) (op0, op1);
9066 if (pat)
9067 emit_insn (pat);
9068
9069 return NULL_RTX;
9070 }
9071
9072 /* Emit a move from SRC to DEST in SImode chunks. This can be used
9073 to move DImode values into and out of IACC0. */
9074
9075 static void
9076 frv_split_iacc_move (rtx dest, rtx src)
9077 {
9078 machine_mode inner;
9079 int i;
9080
9081 inner = GET_MODE (dest);
9082 for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode))
9083 emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i),
9084 simplify_gen_subreg (SImode, src, inner, i));
9085 }
9086
9087 /* Expand builtins. */
9088
9089 static rtx
9090 frv_expand_builtin (tree exp,
9091 rtx target,
9092 rtx subtarget ATTRIBUTE_UNUSED,
9093 machine_mode mode ATTRIBUTE_UNUSED,
9094 int ignore ATTRIBUTE_UNUSED)
9095 {
9096 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9097 unsigned fcode = DECL_MD_FUNCTION_CODE (fndecl);
9098 unsigned i;
9099 struct builtin_description *d;
9100
9101 if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA)
9102 {
9103 error ("media functions are not available unless %<-mmedia%> is used");
9104 return NULL_RTX;
9105 }
9106
9107 switch (fcode)
9108 {
9109 case FRV_BUILTIN_MCOP1:
9110 case FRV_BUILTIN_MCOP2:
9111 case FRV_BUILTIN_MDUNPACKH:
9112 case FRV_BUILTIN_MBTOHE:
9113 if (! TARGET_MEDIA_REV1)
9114 {
9115 error ("this media function is only available on the fr500");
9116 return NULL_RTX;
9117 }
9118 break;
9119
9120 case FRV_BUILTIN_MQXMACHS:
9121 case FRV_BUILTIN_MQXMACXHS:
9122 case FRV_BUILTIN_MQMACXHS:
9123 case FRV_BUILTIN_MADDACCS:
9124 case FRV_BUILTIN_MSUBACCS:
9125 case FRV_BUILTIN_MASACCS:
9126 case FRV_BUILTIN_MDADDACCS:
9127 case FRV_BUILTIN_MDSUBACCS:
9128 case FRV_BUILTIN_MDASACCS:
9129 case FRV_BUILTIN_MABSHS:
9130 case FRV_BUILTIN_MDROTLI:
9131 case FRV_BUILTIN_MCPLHI:
9132 case FRV_BUILTIN_MCPLI:
9133 case FRV_BUILTIN_MDCUTSSI:
9134 case FRV_BUILTIN_MQSATHS:
9135 case FRV_BUILTIN_MHSETLOS:
9136 case FRV_BUILTIN_MHSETLOH:
9137 case FRV_BUILTIN_MHSETHIS:
9138 case FRV_BUILTIN_MHSETHIH:
9139 case FRV_BUILTIN_MHDSETS:
9140 case FRV_BUILTIN_MHDSETH:
9141 if (! TARGET_MEDIA_REV2)
9142 {
9143 error ("this media function is only available on the fr400"
9144 " and fr550");
9145 return NULL_RTX;
9146 }
9147 break;
9148
9149 case FRV_BUILTIN_SMASS:
9150 case FRV_BUILTIN_SMSSS:
9151 case FRV_BUILTIN_SMU:
9152 case FRV_BUILTIN_ADDSS:
9153 case FRV_BUILTIN_SUBSS:
9154 case FRV_BUILTIN_SLASS:
9155 case FRV_BUILTIN_SCUTSS:
9156 case FRV_BUILTIN_IACCreadll:
9157 case FRV_BUILTIN_IACCreadl:
9158 case FRV_BUILTIN_IACCsetll:
9159 case FRV_BUILTIN_IACCsetl:
9160 if (!TARGET_FR405_BUILTINS)
9161 {
9162 error ("this builtin function is only available"
9163 " on the fr405 and fr450");
9164 return NULL_RTX;
9165 }
9166 break;
9167
9168 case FRV_BUILTIN_PREFETCH:
9169 if (!TARGET_FR500_FR550_BUILTINS)
9170 {
9171 error ("this builtin function is only available on the fr500"
9172 " and fr550");
9173 return NULL_RTX;
9174 }
9175 break;
9176
9177 case FRV_BUILTIN_MQLCLRHS:
9178 case FRV_BUILTIN_MQLMTHS:
9179 case FRV_BUILTIN_MQSLLHI:
9180 case FRV_BUILTIN_MQSRAHI:
9181 if (!TARGET_MEDIA_FR450)
9182 {
9183 error ("this builtin function is only available on the fr450");
9184 return NULL_RTX;
9185 }
9186 break;
9187
9188 default:
9189 break;
9190 }
9191
9192 /* Expand unique builtins. */
9193
9194 switch (fcode)
9195 {
9196 case FRV_BUILTIN_MTRAP:
9197 return frv_expand_noargs_builtin (CODE_FOR_mtrap);
9198
9199 case FRV_BUILTIN_MCLRACC:
9200 return frv_expand_mclracc_builtin (exp);
9201
9202 case FRV_BUILTIN_MCLRACCA:
9203 if (TARGET_ACC_8)
9204 return frv_expand_noargs_builtin (CODE_FOR_mclracca8);
9205 else
9206 return frv_expand_noargs_builtin (CODE_FOR_mclracca4);
9207
9208 case FRV_BUILTIN_MRDACC:
9209 return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, exp);
9210
9211 case FRV_BUILTIN_MRDACCG:
9212 return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, exp);
9213
9214 case FRV_BUILTIN_MWTACC:
9215 return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, exp);
9216
9217 case FRV_BUILTIN_MWTACCG:
9218 return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, exp);
9219
9220 case FRV_BUILTIN_MDPACKH:
9221 return frv_expand_mdpackh_builtin (exp, target);
9222
9223 case FRV_BUILTIN_IACCreadll:
9224 {
9225 rtx src = frv_read_iacc_argument (DImode, exp, 0);
9226 if (target == 0 || !REG_P (target))
9227 target = gen_reg_rtx (DImode);
9228 frv_split_iacc_move (target, src);
9229 return target;
9230 }
9231
9232 case FRV_BUILTIN_IACCreadl:
9233 return frv_read_iacc_argument (SImode, exp, 0);
9234
9235 case FRV_BUILTIN_IACCsetll:
9236 {
9237 rtx dest = frv_read_iacc_argument (DImode, exp, 0);
9238 rtx src = frv_read_argument (exp, 1);
9239 frv_split_iacc_move (dest, force_reg (DImode, src));
9240 return 0;
9241 }
9242
9243 case FRV_BUILTIN_IACCsetl:
9244 {
9245 rtx dest = frv_read_iacc_argument (SImode, exp, 0);
9246 rtx src = frv_read_argument (exp, 1);
9247 emit_move_insn (dest, force_reg (SImode, src));
9248 return 0;
9249 }
9250
9251 default:
9252 break;
9253 }
9254
9255 /* Expand groups of builtins. */
9256
9257 for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++)
9258 if (d->code == fcode)
9259 return frv_expand_set_builtin (d->icode, exp, target);
9260
9261 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9262 if (d->code == fcode)
9263 return frv_expand_unop_builtin (d->icode, exp, target);
9264
9265 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9266 if (d->code == fcode)
9267 return frv_expand_binop_builtin (d->icode, exp, target);
9268
9269 for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++)
9270 if (d->code == fcode)
9271 return frv_expand_cut_builtin (d->icode, exp, target);
9272
9273 for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++)
9274 if (d->code == fcode)
9275 return frv_expand_binopimm_builtin (d->icode, exp, target);
9276
9277 for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++)
9278 if (d->code == fcode)
9279 return frv_expand_voidbinop_builtin (d->icode, exp);
9280
9281 for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++)
9282 if (d->code == fcode)
9283 return frv_expand_voidtriop_builtin (d->icode, exp);
9284
9285 for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++)
9286 if (d->code == fcode)
9287 return frv_expand_voidaccop_builtin (d->icode, exp);
9288
9289 for (i = 0, d = bdesc_int_void2arg;
9290 i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++)
9291 if (d->code == fcode)
9292 return frv_expand_int_void2arg (d->icode, exp);
9293
9294 for (i = 0, d = bdesc_prefetches;
9295 i < ARRAY_SIZE (bdesc_prefetches); i++, d++)
9296 if (d->code == fcode)
9297 return frv_expand_prefetches (d->icode, exp);
9298
9299 for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++)
9300 if (d->code == fcode)
9301 return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)),
9302 exp, target);
9303
9304 for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++)
9305 if (d->code == fcode)
9306 return frv_expand_store_builtin (d->icode, exp);
9307
9308 return 0;
9309 }
9310
9311 static bool
9312 frv_in_small_data_p (const_tree decl)
9313 {
9314 HOST_WIDE_INT size;
9315 const char *section_name;
9316
9317 /* Don't apply the -G flag to internal compiler structures. We
9318 should leave such structures in the main data section, partly
9319 for efficiency and partly because the size of some of them
9320 (such as C++ typeinfos) is not known until later. */
9321 if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl))
9322 return false;
9323
9324 /* If we already know which section the decl should be in, see if
9325 it's a small data section. */
9326 section_name = DECL_SECTION_NAME (decl);
9327 if (section_name)
9328 {
9329 if (frv_string_begins_with (section_name, ".sdata"))
9330 return true;
9331 if (frv_string_begins_with (section_name, ".sbss"))
9332 return true;
9333 return false;
9334 }
9335
9336 size = int_size_in_bytes (TREE_TYPE (decl));
9337 if (size > 0 && size <= g_switch_value)
9338 return true;
9339
9340 return false;
9341 }
9342 \f
9343 static bool
9344 frv_rtx_costs (rtx x,
9345 machine_mode mode,
9346 int outer_code,
9347 int opno ATTRIBUTE_UNUSED,
9348 int *total,
9349 bool speed ATTRIBUTE_UNUSED)
9350 {
9351 int code = GET_CODE (x);
9352
9353 if (outer_code == MEM)
9354 {
9355 /* Don't differentiate between memory addresses. All the ones
9356 we accept have equal cost. */
9357 *total = COSTS_N_INSNS (0);
9358 return true;
9359 }
9360
9361 switch (code)
9362 {
9363 case CONST_INT:
9364 /* Make 12-bit integers really cheap. */
9365 if (IN_RANGE (INTVAL (x), -2048, 2047))
9366 {
9367 *total = 0;
9368 return true;
9369 }
9370 /* Fall through. */
9371
9372 case CONST:
9373 case LABEL_REF:
9374 case SYMBOL_REF:
9375 case CONST_DOUBLE:
9376 *total = COSTS_N_INSNS (2);
9377 return true;
9378
9379 case PLUS:
9380 case MINUS:
9381 case AND:
9382 case IOR:
9383 case XOR:
9384 case ASHIFT:
9385 case ASHIFTRT:
9386 case LSHIFTRT:
9387 case NOT:
9388 case NEG:
9389 case COMPARE:
9390 if (mode == SImode)
9391 *total = COSTS_N_INSNS (1);
9392 else if (mode == DImode)
9393 *total = COSTS_N_INSNS (2);
9394 else
9395 *total = COSTS_N_INSNS (3);
9396 return true;
9397
9398 case MULT:
9399 if (mode == SImode)
9400 *total = COSTS_N_INSNS (2);
9401 else
9402 *total = COSTS_N_INSNS (6); /* guess */
9403 return true;
9404
9405 case DIV:
9406 case UDIV:
9407 case MOD:
9408 case UMOD:
9409 *total = COSTS_N_INSNS (18);
9410 return true;
9411
9412 case MEM:
9413 *total = COSTS_N_INSNS (3);
9414 return true;
9415
9416 default:
9417 return false;
9418 }
9419 }
9420 \f
9421 static void
9422 frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9423 {
9424 switch_to_section (ctors_section);
9425 assemble_align (POINTER_SIZE);
9426 if (TARGET_FDPIC)
9427 {
9428 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9429
9430 gcc_assert (ok);
9431 return;
9432 }
9433 assemble_integer_with_op ("\t.picptr\t", symbol);
9434 }
9435
9436 static void
9437 frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9438 {
9439 switch_to_section (dtors_section);
9440 assemble_align (POINTER_SIZE);
9441 if (TARGET_FDPIC)
9442 {
9443 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9444
9445 gcc_assert (ok);
9446 return;
9447 }
9448 assemble_integer_with_op ("\t.picptr\t", symbol);
9449 }
9450
9451 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
9452
9453 static rtx
9454 frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9455 int incoming ATTRIBUTE_UNUSED)
9456 {
9457 return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM);
9458 }
9459
9460 #define TLS_BIAS (2048 - 16)
9461
9462 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
9463 We need to emit DTP-relative relocations. */
9464
9465 static void
9466 frv_output_dwarf_dtprel (FILE *file, int size, rtx x)
9467 {
9468 gcc_assert (size == 4);
9469 fputs ("\t.picptr\ttlsmoff(", file);
9470 /* We want the unbiased TLS offset, so add the bias to the
9471 expression, such that the implicit biasing cancels out. */
9472 output_addr_const (file, plus_constant (Pmode, x, TLS_BIAS));
9473 fputs (")", file);
9474 }
9475
9476 #include "gt-frv.h"