]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/frv/frv.c
2014-11-01 Andrew MacLeod <amacleod@redhat,com>
[thirdparty/gcc.git] / gcc / config / frv / frv.c
1 /* Copyright (C) 1997-2014 Free Software Foundation, Inc.
2 Contributed by Red Hat, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "varasm.h"
27 #include "stor-layout.h"
28 #include "stringpool.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "reload.h"
39 #include "expr.h"
40 #include "obstack.h"
41 #include "except.h"
42 #include "hashtab.h"
43 #include "hash-set.h"
44 #include "vec.h"
45 #include "machmode.h"
46 #include "input.h"
47 #include "function.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "diagnostic-core.h"
51 #include "predict.h"
52 #include "dominance.h"
53 #include "cfg.h"
54 #include "cfgrtl.h"
55 #include "cfganal.h"
56 #include "lcm.h"
57 #include "cfgbuild.h"
58 #include "cfgcleanup.h"
59 #include "basic-block.h"
60 #include "tm_p.h"
61 #include "ggc.h"
62 #include "target.h"
63 #include "target-def.h"
64 #include "targhooks.h"
65 #include "langhooks.h"
66 #include "df.h"
67 #include "dumpfile.h"
68 #include "builtins.h"
69 #include "ifcvt.h"
70
71 #ifndef FRV_INLINE
72 #define FRV_INLINE inline
73 #endif
74
75 /* The maximum number of distinct NOP patterns. There are three:
76 nop, fnop and mnop. */
77 #define NUM_NOP_PATTERNS 3
78
79 /* Classification of instructions and units: integer, floating-point/media,
80 branch and control. */
81 enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS };
82
83 /* The DFA names of the units, in packet order. */
84 static const char *const frv_unit_names[] =
85 {
86 "c",
87 "i0", "f0",
88 "i1", "f1",
89 "i2", "f2",
90 "i3", "f3",
91 "b0", "b1"
92 };
93
94 /* The classification of each unit in frv_unit_names[]. */
95 static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] =
96 {
97 GROUP_C,
98 GROUP_I, GROUP_FM,
99 GROUP_I, GROUP_FM,
100 GROUP_I, GROUP_FM,
101 GROUP_I, GROUP_FM,
102 GROUP_B, GROUP_B
103 };
104
105 /* Return the DFA unit code associated with the Nth unit of integer
106 or floating-point group GROUP, */
107 #define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1]
108
109 /* Return the number of integer or floating-point unit UNIT
110 (1 for I1, 2 for F2, etc.). */
111 #define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2)
112
113 /* The DFA unit number for each unit in frv_unit_names[]. */
114 static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)];
115
116 /* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue
117 an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if
118 no instruction of type T has been seen. */
119 static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1];
120
121 /* An array of dummy nop INSNs, one for each type of nop that the
122 target supports. */
123 static GTY(()) rtx_insn *frv_nops[NUM_NOP_PATTERNS];
124
125 /* The number of nop instructions in frv_nops[]. */
126 static unsigned int frv_num_nops;
127
128 /* The type of access. FRV_IO_UNKNOWN means the access can be either
129 a read or a write. */
130 enum frv_io_type { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE };
131
132 /* Information about one __builtin_read or __builtin_write access, or
133 the combination of several such accesses. The most general value
134 is all-zeros (an unknown access to an unknown address). */
135 struct frv_io {
136 enum frv_io_type type;
137
138 /* The constant address being accessed, or zero if not known. */
139 HOST_WIDE_INT const_address;
140
141 /* The run-time address, as used in operand 0 of the membar pattern. */
142 rtx var_address;
143 };
144
145 /* Return true if instruction INSN should be packed with the following
146 instruction. */
147 #define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode)
148
149 /* Set the value of PACKING_FLAG_P(INSN). */
150 #define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode)
151 #define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode)
152
153 /* Loop with REG set to each hard register in rtx X. */
154 #define FOR_EACH_REGNO(REG, X) \
155 for (REG = REGNO (X); \
156 REG < REGNO (X) + HARD_REGNO_NREGS (REGNO (X), GET_MODE (X)); \
157 REG++)
158
159 /* This structure contains machine specific function data. */
160 struct GTY(()) machine_function
161 {
162 /* True if we have created an rtx that relies on the stack frame. */
163 int frame_needed;
164
165 /* True if this function contains at least one __builtin_{read,write}*. */
166 bool has_membar_p;
167 };
168
169 /* Temporary register allocation support structure. */
170 typedef struct frv_tmp_reg_struct
171 {
172 HARD_REG_SET regs; /* possible registers to allocate */
173 int next_reg[N_REG_CLASSES]; /* next register to allocate per class */
174 }
175 frv_tmp_reg_t;
176
177 /* Register state information for VLIW re-packing phase. */
178 #define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */
179 #define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */
180 #define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */
181 #define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */
182
183 #define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE)
184
185 typedef unsigned char regstate_t;
186
187 /* Used in frv_frame_accessor_t to indicate the direction of a register-to-
188 memory move. */
189 enum frv_stack_op
190 {
191 FRV_LOAD,
192 FRV_STORE
193 };
194
195 /* Information required by frv_frame_access. */
196 typedef struct
197 {
198 /* This field is FRV_LOAD if registers are to be loaded from the stack and
199 FRV_STORE if they should be stored onto the stack. FRV_STORE implies
200 the move is being done by the prologue code while FRV_LOAD implies it
201 is being done by the epilogue. */
202 enum frv_stack_op op;
203
204 /* The base register to use when accessing the stack. This may be the
205 frame pointer, stack pointer, or a temporary. The choice of register
206 depends on which part of the frame is being accessed and how big the
207 frame is. */
208 rtx base;
209
210 /* The offset of BASE from the bottom of the current frame, in bytes. */
211 int base_offset;
212 } frv_frame_accessor_t;
213
214 /* Conditional execution support gathered together in one structure. */
215 typedef struct
216 {
217 /* Linked list of insns to add if the conditional execution conversion was
218 successful. Each link points to an EXPR_LIST which points to the pattern
219 of the insn to add, and the insn to be inserted before. */
220 rtx added_insns_list;
221
222 /* Identify which registers are safe to allocate for if conversions to
223 conditional execution. We keep the last allocated register in the
224 register classes between COND_EXEC statements. This will mean we allocate
225 different registers for each different COND_EXEC group if we can. This
226 might allow the scheduler to intermix two different COND_EXEC sections. */
227 frv_tmp_reg_t tmp_reg;
228
229 /* For nested IFs, identify which CC registers are used outside of setting
230 via a compare isnsn, and using via a check insn. This will allow us to
231 know if we can rewrite the register to use a different register that will
232 be paired with the CR register controlling the nested IF-THEN blocks. */
233 HARD_REG_SET nested_cc_ok_rewrite;
234
235 /* Temporary registers allocated to hold constants during conditional
236 execution. */
237 rtx scratch_regs[FIRST_PSEUDO_REGISTER];
238
239 /* Current number of temp registers available. */
240 int cur_scratch_regs;
241
242 /* Number of nested conditional execution blocks. */
243 int num_nested_cond_exec;
244
245 /* Map of insns that set up constants in scratch registers. */
246 bitmap scratch_insns_bitmap;
247
248 /* Conditional execution test register (CC0..CC7). */
249 rtx cr_reg;
250
251 /* Conditional execution compare register that is paired with cr_reg, so that
252 nested compares can be done. The csubcc and caddcc instructions don't
253 have enough bits to specify both a CC register to be set and a CR register
254 to do the test on, so the same bit number is used for both. Needless to
255 say, this is rather inconvenient for GCC. */
256 rtx nested_cc_reg;
257
258 /* Extra CR registers used for &&, ||. */
259 rtx extra_int_cr;
260 rtx extra_fp_cr;
261
262 /* Previous CR used in nested if, to make sure we are dealing with the same
263 nested if as the previous statement. */
264 rtx last_nested_if_cr;
265 }
266 frv_ifcvt_t;
267
268 static /* GTY(()) */ frv_ifcvt_t frv_ifcvt;
269
270 /* Map register number to smallest register class. */
271 enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER];
272
273 /* Cached value of frv_stack_info. */
274 static frv_stack_t *frv_stack_cache = (frv_stack_t *)0;
275
276 /* Forward references */
277
278 static void frv_option_override (void);
279 static bool frv_legitimate_address_p (machine_mode, rtx, bool);
280 static int frv_default_flags_for_cpu (void);
281 static int frv_string_begins_with (const char *, const char *);
282 static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
283 static void frv_print_operand (FILE *, rtx, int);
284 static void frv_print_operand_address (FILE *, rtx);
285 static bool frv_print_operand_punct_valid_p (unsigned char code);
286 static void frv_print_operand_memory_reference_reg
287 (FILE *, rtx);
288 static void frv_print_operand_memory_reference (FILE *, rtx, int);
289 static int frv_print_operand_jump_hint (rtx_insn *);
290 static const char *comparison_string (enum rtx_code, rtx);
291 static rtx frv_function_value (const_tree, const_tree,
292 bool);
293 static rtx frv_libcall_value (machine_mode,
294 const_rtx);
295 static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
296 static rtx single_set_pattern (rtx);
297 static int frv_function_contains_far_jump (void);
298 static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
299 enum reg_class,
300 machine_mode,
301 int, int);
302 static rtx frv_frame_offset_rtx (int);
303 static rtx frv_frame_mem (machine_mode, rtx, int);
304 static rtx frv_dwarf_store (rtx, int);
305 static void frv_frame_insn (rtx, rtx);
306 static void frv_frame_access (frv_frame_accessor_t*,
307 rtx, int);
308 static void frv_frame_access_multi (frv_frame_accessor_t*,
309 frv_stack_t *, int);
310 static void frv_frame_access_standard_regs (enum frv_stack_op,
311 frv_stack_t *);
312 static struct machine_function *frv_init_machine_status (void);
313 static rtx frv_int_to_acc (enum insn_code, int, rtx);
314 static machine_mode frv_matching_accg_mode (machine_mode);
315 static rtx frv_read_argument (tree, unsigned int);
316 static rtx frv_read_iacc_argument (machine_mode, tree, unsigned int);
317 static int frv_check_constant_argument (enum insn_code, int, rtx);
318 static rtx frv_legitimize_target (enum insn_code, rtx);
319 static rtx frv_legitimize_argument (enum insn_code, int, rtx);
320 static rtx frv_legitimize_tls_address (rtx, enum tls_model);
321 static rtx frv_legitimize_address (rtx, rtx, machine_mode);
322 static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
323 static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
324 static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
325 static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx);
326 static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx);
327 static rtx frv_expand_voidbinop_builtin (enum insn_code, tree);
328 static rtx frv_expand_int_void2arg (enum insn_code, tree);
329 static rtx frv_expand_prefetches (enum insn_code, tree);
330 static rtx frv_expand_voidtriop_builtin (enum insn_code, tree);
331 static rtx frv_expand_voidaccop_builtin (enum insn_code, tree);
332 static rtx frv_expand_mclracc_builtin (tree);
333 static rtx frv_expand_mrdacc_builtin (enum insn_code, tree);
334 static rtx frv_expand_mwtacc_builtin (enum insn_code, tree);
335 static rtx frv_expand_noargs_builtin (enum insn_code);
336 static void frv_split_iacc_move (rtx, rtx);
337 static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
338 static int frv_clear_registers_used (rtx *, void *);
339 static void frv_ifcvt_add_insn (rtx, rtx, int);
340 static rtx frv_ifcvt_rewrite_mem (rtx, machine_mode, rtx);
341 static rtx frv_ifcvt_load_value (rtx, rtx);
342 static int frv_acc_group_1 (rtx *, void *);
343 static unsigned int frv_insn_unit (rtx_insn *);
344 static bool frv_issues_to_branch_unit_p (rtx_insn *);
345 static int frv_cond_flags (rtx);
346 static bool frv_regstate_conflict_p (regstate_t, regstate_t);
347 static int frv_registers_conflict_p_1 (rtx *, void *);
348 static bool frv_registers_conflict_p (rtx);
349 static void frv_registers_update_1 (rtx, const_rtx, void *);
350 static void frv_registers_update (rtx);
351 static void frv_start_packet (void);
352 static void frv_start_packet_block (void);
353 static void frv_finish_packet (void (*) (void));
354 static bool frv_pack_insn_p (rtx_insn *);
355 static void frv_add_insn_to_packet (rtx_insn *);
356 static void frv_insert_nop_in_packet (rtx_insn *);
357 static bool frv_for_each_packet (void (*) (void));
358 static bool frv_sort_insn_group_1 (enum frv_insn_group,
359 unsigned int, unsigned int,
360 unsigned int, unsigned int,
361 state_t);
362 static int frv_compare_insns (const void *, const void *);
363 static void frv_sort_insn_group (enum frv_insn_group);
364 static void frv_reorder_packet (void);
365 static void frv_fill_unused_units (enum frv_insn_group);
366 static void frv_align_label (void);
367 static void frv_reorg_packet (void);
368 static void frv_register_nop (rtx);
369 static void frv_reorg (void);
370 static void frv_pack_insns (void);
371 static void frv_function_prologue (FILE *, HOST_WIDE_INT);
372 static void frv_function_epilogue (FILE *, HOST_WIDE_INT);
373 static bool frv_assemble_integer (rtx, unsigned, int);
374 static void frv_init_builtins (void);
375 static rtx frv_expand_builtin (tree, rtx, rtx, machine_mode, int);
376 static void frv_init_libfuncs (void);
377 static bool frv_in_small_data_p (const_tree);
378 static void frv_asm_output_mi_thunk
379 (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
380 static void frv_setup_incoming_varargs (cumulative_args_t,
381 machine_mode,
382 tree, int *, int);
383 static rtx frv_expand_builtin_saveregs (void);
384 static void frv_expand_builtin_va_start (tree, rtx);
385 static bool frv_rtx_costs (rtx, int, int, int, int*,
386 bool);
387 static int frv_register_move_cost (machine_mode,
388 reg_class_t, reg_class_t);
389 static int frv_memory_move_cost (machine_mode,
390 reg_class_t, bool);
391 static void frv_asm_out_constructor (rtx, int);
392 static void frv_asm_out_destructor (rtx, int);
393 static bool frv_function_symbol_referenced_p (rtx);
394 static bool frv_legitimate_constant_p (machine_mode, rtx);
395 static bool frv_cannot_force_const_mem (machine_mode, rtx);
396 static const char *unspec_got_name (int);
397 static void frv_output_const_unspec (FILE *,
398 const struct frv_unspec *);
399 static bool frv_function_ok_for_sibcall (tree, tree);
400 static rtx frv_struct_value_rtx (tree, int);
401 static bool frv_must_pass_in_stack (machine_mode mode, const_tree type);
402 static int frv_arg_partial_bytes (cumulative_args_t, machine_mode,
403 tree, bool);
404 static rtx frv_function_arg (cumulative_args_t, machine_mode,
405 const_tree, bool);
406 static rtx frv_function_incoming_arg (cumulative_args_t, machine_mode,
407 const_tree, bool);
408 static void frv_function_arg_advance (cumulative_args_t, machine_mode,
409 const_tree, bool);
410 static unsigned int frv_function_arg_boundary (machine_mode,
411 const_tree);
412 static void frv_output_dwarf_dtprel (FILE *, int, rtx)
413 ATTRIBUTE_UNUSED;
414 static reg_class_t frv_secondary_reload (bool, rtx, reg_class_t,
415 machine_mode,
416 secondary_reload_info *);
417 static bool frv_frame_pointer_required (void);
418 static bool frv_can_eliminate (const int, const int);
419 static void frv_conditional_register_usage (void);
420 static void frv_trampoline_init (rtx, tree, rtx);
421 static bool frv_class_likely_spilled_p (reg_class_t);
422 \f
423 /* Initialize the GCC target structure. */
424 #undef TARGET_PRINT_OPERAND
425 #define TARGET_PRINT_OPERAND frv_print_operand
426 #undef TARGET_PRINT_OPERAND_ADDRESS
427 #define TARGET_PRINT_OPERAND_ADDRESS frv_print_operand_address
428 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
429 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P frv_print_operand_punct_valid_p
430 #undef TARGET_ASM_FUNCTION_PROLOGUE
431 #define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue
432 #undef TARGET_ASM_FUNCTION_EPILOGUE
433 #define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue
434 #undef TARGET_ASM_INTEGER
435 #define TARGET_ASM_INTEGER frv_assemble_integer
436 #undef TARGET_OPTION_OVERRIDE
437 #define TARGET_OPTION_OVERRIDE frv_option_override
438 #undef TARGET_INIT_BUILTINS
439 #define TARGET_INIT_BUILTINS frv_init_builtins
440 #undef TARGET_EXPAND_BUILTIN
441 #define TARGET_EXPAND_BUILTIN frv_expand_builtin
442 #undef TARGET_INIT_LIBFUNCS
443 #define TARGET_INIT_LIBFUNCS frv_init_libfuncs
444 #undef TARGET_IN_SMALL_DATA_P
445 #define TARGET_IN_SMALL_DATA_P frv_in_small_data_p
446 #undef TARGET_REGISTER_MOVE_COST
447 #define TARGET_REGISTER_MOVE_COST frv_register_move_cost
448 #undef TARGET_MEMORY_MOVE_COST
449 #define TARGET_MEMORY_MOVE_COST frv_memory_move_cost
450 #undef TARGET_RTX_COSTS
451 #define TARGET_RTX_COSTS frv_rtx_costs
452 #undef TARGET_ASM_CONSTRUCTOR
453 #define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor
454 #undef TARGET_ASM_DESTRUCTOR
455 #define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor
456
457 #undef TARGET_ASM_OUTPUT_MI_THUNK
458 #define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk
459 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
460 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
461
462 #undef TARGET_SCHED_ISSUE_RATE
463 #define TARGET_SCHED_ISSUE_RATE frv_issue_rate
464
465 #undef TARGET_LEGITIMIZE_ADDRESS
466 #define TARGET_LEGITIMIZE_ADDRESS frv_legitimize_address
467
468 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
469 #define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall
470 #undef TARGET_LEGITIMATE_CONSTANT_P
471 #define TARGET_LEGITIMATE_CONSTANT_P frv_legitimate_constant_p
472 #undef TARGET_CANNOT_FORCE_CONST_MEM
473 #define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem
474
475 #undef TARGET_HAVE_TLS
476 #define TARGET_HAVE_TLS HAVE_AS_TLS
477
478 #undef TARGET_STRUCT_VALUE_RTX
479 #define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx
480 #undef TARGET_MUST_PASS_IN_STACK
481 #define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack
482 #undef TARGET_PASS_BY_REFERENCE
483 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
484 #undef TARGET_ARG_PARTIAL_BYTES
485 #define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes
486 #undef TARGET_FUNCTION_ARG
487 #define TARGET_FUNCTION_ARG frv_function_arg
488 #undef TARGET_FUNCTION_INCOMING_ARG
489 #define TARGET_FUNCTION_INCOMING_ARG frv_function_incoming_arg
490 #undef TARGET_FUNCTION_ARG_ADVANCE
491 #define TARGET_FUNCTION_ARG_ADVANCE frv_function_arg_advance
492 #undef TARGET_FUNCTION_ARG_BOUNDARY
493 #define TARGET_FUNCTION_ARG_BOUNDARY frv_function_arg_boundary
494
495 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
496 #define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs
497 #undef TARGET_SETUP_INCOMING_VARARGS
498 #define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs
499 #undef TARGET_MACHINE_DEPENDENT_REORG
500 #define TARGET_MACHINE_DEPENDENT_REORG frv_reorg
501
502 #undef TARGET_EXPAND_BUILTIN_VA_START
503 #define TARGET_EXPAND_BUILTIN_VA_START frv_expand_builtin_va_start
504
505 #if HAVE_AS_TLS
506 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
507 #define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel
508 #endif
509
510 #undef TARGET_CLASS_LIKELY_SPILLED_P
511 #define TARGET_CLASS_LIKELY_SPILLED_P frv_class_likely_spilled_p
512
513 #undef TARGET_SECONDARY_RELOAD
514 #define TARGET_SECONDARY_RELOAD frv_secondary_reload
515
516 #undef TARGET_LEGITIMATE_ADDRESS_P
517 #define TARGET_LEGITIMATE_ADDRESS_P frv_legitimate_address_p
518
519 #undef TARGET_FRAME_POINTER_REQUIRED
520 #define TARGET_FRAME_POINTER_REQUIRED frv_frame_pointer_required
521
522 #undef TARGET_CAN_ELIMINATE
523 #define TARGET_CAN_ELIMINATE frv_can_eliminate
524
525 #undef TARGET_CONDITIONAL_REGISTER_USAGE
526 #define TARGET_CONDITIONAL_REGISTER_USAGE frv_conditional_register_usage
527
528 #undef TARGET_TRAMPOLINE_INIT
529 #define TARGET_TRAMPOLINE_INIT frv_trampoline_init
530
531 #undef TARGET_FUNCTION_VALUE
532 #define TARGET_FUNCTION_VALUE frv_function_value
533 #undef TARGET_LIBCALL_VALUE
534 #define TARGET_LIBCALL_VALUE frv_libcall_value
535
536 struct gcc_target targetm = TARGET_INITIALIZER;
537
538 #define FRV_SYMBOL_REF_TLS_P(RTX) \
539 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
540
541 \f
542 /* Any function call that satisfies the machine-independent
543 requirements is eligible on FR-V. */
544
545 static bool
546 frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
547 tree exp ATTRIBUTE_UNUSED)
548 {
549 return true;
550 }
551
552 /* Return true if SYMBOL is a small data symbol and relocation RELOC
553 can be used to access it directly in a load or store. */
554
555 static FRV_INLINE bool
556 frv_small_data_reloc_p (rtx symbol, int reloc)
557 {
558 return (GET_CODE (symbol) == SYMBOL_REF
559 && SYMBOL_REF_SMALL_P (symbol)
560 && (!TARGET_FDPIC || flag_pic == 1)
561 && (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12));
562 }
563
564 /* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC
565 appropriately. */
566
567 bool
568 frv_const_unspec_p (rtx x, struct frv_unspec *unspec)
569 {
570 if (GET_CODE (x) == CONST)
571 {
572 unspec->offset = 0;
573 x = XEXP (x, 0);
574 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
575 {
576 unspec->offset += INTVAL (XEXP (x, 1));
577 x = XEXP (x, 0);
578 }
579 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT)
580 {
581 unspec->symbol = XVECEXP (x, 0, 0);
582 unspec->reloc = INTVAL (XVECEXP (x, 0, 1));
583
584 if (unspec->offset == 0)
585 return true;
586
587 if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc)
588 && unspec->offset > 0
589 && unspec->offset < g_switch_value)
590 return true;
591 }
592 }
593 return false;
594 }
595
596 /* Decide whether we can force certain constants to memory. If we
597 decide we can't, the caller should be able to cope with it in
598 another way.
599
600 We never allow constants to be forced into memory for TARGET_FDPIC.
601 This is necessary for several reasons:
602
603 1. Since frv_legitimate_constant_p rejects constant pool addresses, the
604 target-independent code will try to force them into the constant
605 pool, thus leading to infinite recursion.
606
607 2. We can never introduce new constant pool references during reload.
608 Any such reference would require use of the pseudo FDPIC register.
609
610 3. We can't represent a constant added to a function pointer (which is
611 not the same as a pointer to a function+constant).
612
613 4. In many cases, it's more efficient to calculate the constant in-line. */
614
615 static bool
616 frv_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
617 rtx x ATTRIBUTE_UNUSED)
618 {
619 return TARGET_FDPIC;
620 }
621 \f
622 static int
623 frv_default_flags_for_cpu (void)
624 {
625 switch (frv_cpu_type)
626 {
627 case FRV_CPU_GENERIC:
628 return MASK_DEFAULT_FRV;
629
630 case FRV_CPU_FR550:
631 return MASK_DEFAULT_FR550;
632
633 case FRV_CPU_FR500:
634 case FRV_CPU_TOMCAT:
635 return MASK_DEFAULT_FR500;
636
637 case FRV_CPU_FR450:
638 return MASK_DEFAULT_FR450;
639
640 case FRV_CPU_FR405:
641 case FRV_CPU_FR400:
642 return MASK_DEFAULT_FR400;
643
644 case FRV_CPU_FR300:
645 case FRV_CPU_SIMPLE:
646 return MASK_DEFAULT_SIMPLE;
647
648 default:
649 gcc_unreachable ();
650 }
651 }
652
653 /* Implement TARGET_OPTION_OVERRIDE. */
654
655 static void
656 frv_option_override (void)
657 {
658 int regno;
659 unsigned int i;
660
661 target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit);
662
663 /* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the
664 linker about linking pic and non-pic code. */
665 if (TARGET_LIBPIC)
666 {
667 if (!flag_pic) /* -fPIC */
668 flag_pic = 2;
669
670 if (!global_options_set.x_g_switch_value) /* -G0 */
671 {
672 g_switch_value = 0;
673 }
674 }
675
676 /* A C expression whose value is a register class containing hard
677 register REGNO. In general there is more than one such class;
678 choose a class which is "minimal", meaning that no smaller class
679 also contains the register. */
680
681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
682 {
683 enum reg_class rclass;
684
685 if (GPR_P (regno))
686 {
687 int gpr_reg = regno - GPR_FIRST;
688
689 if (gpr_reg == GR8_REG)
690 rclass = GR8_REGS;
691
692 else if (gpr_reg == GR9_REG)
693 rclass = GR9_REGS;
694
695 else if (gpr_reg == GR14_REG)
696 rclass = FDPIC_FPTR_REGS;
697
698 else if (gpr_reg == FDPIC_REGNO)
699 rclass = FDPIC_REGS;
700
701 else if ((gpr_reg & 3) == 0)
702 rclass = QUAD_REGS;
703
704 else if ((gpr_reg & 1) == 0)
705 rclass = EVEN_REGS;
706
707 else
708 rclass = GPR_REGS;
709 }
710
711 else if (FPR_P (regno))
712 {
713 int fpr_reg = regno - GPR_FIRST;
714 if ((fpr_reg & 3) == 0)
715 rclass = QUAD_FPR_REGS;
716
717 else if ((fpr_reg & 1) == 0)
718 rclass = FEVEN_REGS;
719
720 else
721 rclass = FPR_REGS;
722 }
723
724 else if (regno == LR_REGNO)
725 rclass = LR_REG;
726
727 else if (regno == LCR_REGNO)
728 rclass = LCR_REG;
729
730 else if (ICC_P (regno))
731 rclass = ICC_REGS;
732
733 else if (FCC_P (regno))
734 rclass = FCC_REGS;
735
736 else if (ICR_P (regno))
737 rclass = ICR_REGS;
738
739 else if (FCR_P (regno))
740 rclass = FCR_REGS;
741
742 else if (ACC_P (regno))
743 {
744 int r = regno - ACC_FIRST;
745 if ((r & 3) == 0)
746 rclass = QUAD_ACC_REGS;
747 else if ((r & 1) == 0)
748 rclass = EVEN_ACC_REGS;
749 else
750 rclass = ACC_REGS;
751 }
752
753 else if (ACCG_P (regno))
754 rclass = ACCG_REGS;
755
756 else
757 rclass = NO_REGS;
758
759 regno_reg_class[regno] = rclass;
760 }
761
762 /* Check for small data option */
763 if (!global_options_set.x_g_switch_value && !TARGET_LIBPIC)
764 g_switch_value = SDATA_DEFAULT_SIZE;
765
766 /* There is no single unaligned SI op for PIC code. Sometimes we
767 need to use ".4byte" and sometimes we need to use ".picptr".
768 See frv_assemble_integer for details. */
769 if (flag_pic || TARGET_FDPIC)
770 targetm.asm_out.unaligned_op.si = 0;
771
772 if ((target_flags_explicit & MASK_LINKED_FP) == 0)
773 target_flags |= MASK_LINKED_FP;
774
775 if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0)
776 target_flags |= MASK_OPTIMIZE_MEMBAR;
777
778 for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++)
779 frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]);
780
781 for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++)
782 frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes);
783
784 init_machine_status = frv_init_machine_status;
785 }
786
787 \f
788 /* Return true if NAME (a STRING_CST node) begins with PREFIX. */
789
790 static int
791 frv_string_begins_with (const char *name, const char *prefix)
792 {
793 const int prefix_len = strlen (prefix);
794
795 /* Remember: NAME's length includes the null terminator. */
796 return (strncmp (name, prefix, prefix_len) == 0);
797 }
798 \f
799 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
800
801 static void
802 frv_conditional_register_usage (void)
803 {
804 int i;
805
806 for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++)
807 fixed_regs[i] = call_used_regs[i] = 1;
808
809 for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++)
810 fixed_regs[i] = call_used_regs[i] = 1;
811
812 /* Reserve the registers used for conditional execution. At present, we need
813 1 ICC and 1 ICR register. */
814 fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1;
815 fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1;
816
817 if (TARGET_FIXED_CC)
818 {
819 fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1;
820 fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1;
821 fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1;
822 fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1;
823 }
824
825 if (TARGET_FDPIC)
826 fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] =
827 call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0;
828
829 #if 0
830 /* If -fpic, SDA_BASE_REG is the PIC register. */
831 if (g_switch_value == 0 && !flag_pic)
832 fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0;
833
834 if (!flag_pic)
835 fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0;
836 #endif
837 }
838
839 \f
840 /*
841 * Compute the stack frame layout
842 *
843 * Register setup:
844 * +---------------+-----------------------+-----------------------+
845 * |Register |type |caller-save/callee-save|
846 * +---------------+-----------------------+-----------------------+
847 * |GR0 |Zero register | - |
848 * |GR1 |Stack pointer(SP) | - |
849 * |GR2 |Frame pointer(FP) | - |
850 * |GR3 |Hidden parameter | caller save |
851 * |GR4-GR7 | - | caller save |
852 * |GR8-GR13 |Argument register | caller save |
853 * |GR14-GR15 | - | caller save |
854 * |GR16-GR31 | - | callee save |
855 * |GR32-GR47 | - | caller save |
856 * |GR48-GR63 | - | callee save |
857 * |FR0-FR15 | - | caller save |
858 * |FR16-FR31 | - | callee save |
859 * |FR32-FR47 | - | caller save |
860 * |FR48-FR63 | - | callee save |
861 * +---------------+-----------------------+-----------------------+
862 *
863 * Stack frame setup:
864 * Low
865 * SP-> |-----------------------------------|
866 * | Argument area |
867 * |-----------------------------------|
868 * | Register save area |
869 * |-----------------------------------|
870 * | Local variable save area |
871 * FP-> |-----------------------------------|
872 * | Old FP |
873 * |-----------------------------------|
874 * | Hidden parameter save area |
875 * |-----------------------------------|
876 * | Return address(LR) storage area |
877 * |-----------------------------------|
878 * | Padding for alignment |
879 * |-----------------------------------|
880 * | Register argument area |
881 * OLD SP-> |-----------------------------------|
882 * | Parameter area |
883 * |-----------------------------------|
884 * High
885 *
886 * Argument area/Parameter area:
887 *
888 * When a function is called, this area is used for argument transfer. When
889 * the argument is set up by the caller function, this area is referred to as
890 * the argument area. When the argument is referenced by the callee function,
891 * this area is referred to as the parameter area. The area is allocated when
892 * all arguments cannot be placed on the argument register at the time of
893 * argument transfer.
894 *
895 * Register save area:
896 *
897 * This is a register save area that must be guaranteed for the caller
898 * function. This area is not secured when the register save operation is not
899 * needed.
900 *
901 * Local variable save area:
902 *
903 * This is the area for local variables and temporary variables.
904 *
905 * Old FP:
906 *
907 * This area stores the FP value of the caller function.
908 *
909 * Hidden parameter save area:
910 *
911 * This area stores the start address of the return value storage
912 * area for a struct/union return function.
913 * When a struct/union is used as the return value, the caller
914 * function stores the return value storage area start address in
915 * register GR3 and passes it to the caller function.
916 * The callee function interprets the address stored in the GR3
917 * as the return value storage area start address.
918 * When register GR3 needs to be saved into memory, the callee
919 * function saves it in the hidden parameter save area. This
920 * area is not secured when the save operation is not needed.
921 *
922 * Return address(LR) storage area:
923 *
924 * This area saves the LR. The LR stores the address of a return to the caller
925 * function for the purpose of function calling.
926 *
927 * Argument register area:
928 *
929 * This area saves the argument register. This area is not secured when the
930 * save operation is not needed.
931 *
932 * Argument:
933 *
934 * Arguments, the count of which equals the count of argument registers (6
935 * words), are positioned in registers GR8 to GR13 and delivered to the callee
936 * function. When a struct/union return function is called, the return value
937 * area address is stored in register GR3. Arguments not placed in the
938 * argument registers will be stored in the stack argument area for transfer
939 * purposes. When an 8-byte type argument is to be delivered using registers,
940 * it is divided into two and placed in two registers for transfer. When
941 * argument registers must be saved to memory, the callee function secures an
942 * argument register save area in the stack. In this case, a continuous
943 * argument register save area must be established in the parameter area. The
944 * argument register save area must be allocated as needed to cover the size of
945 * the argument register to be saved. If the function has a variable count of
946 * arguments, it saves all argument registers in the argument register save
947 * area.
948 *
949 * Argument Extension Format:
950 *
951 * When an argument is to be stored in the stack, its type is converted to an
952 * extended type in accordance with the individual argument type. The argument
953 * is freed by the caller function after the return from the callee function is
954 * made.
955 *
956 * +-----------------------+---------------+------------------------+
957 * | Argument Type |Extended Type |Stack Storage Size(byte)|
958 * +-----------------------+---------------+------------------------+
959 * |char |int | 4 |
960 * |signed char |int | 4 |
961 * |unsigned char |int | 4 |
962 * |[signed] short int |int | 4 |
963 * |unsigned short int |int | 4 |
964 * |[signed] int |No extension | 4 |
965 * |unsigned int |No extension | 4 |
966 * |[signed] long int |No extension | 4 |
967 * |unsigned long int |No extension | 4 |
968 * |[signed] long long int |No extension | 8 |
969 * |unsigned long long int |No extension | 8 |
970 * |float |double | 8 |
971 * |double |No extension | 8 |
972 * |long double |No extension | 8 |
973 * |pointer |No extension | 4 |
974 * |struct/union |- | 4 (*1) |
975 * +-----------------------+---------------+------------------------+
976 *
977 * When a struct/union is to be delivered as an argument, the caller copies it
978 * to the local variable area and delivers the address of that area.
979 *
980 * Return Value:
981 *
982 * +-------------------------------+----------------------+
983 * |Return Value Type |Return Value Interface|
984 * +-------------------------------+----------------------+
985 * |void |None |
986 * |[signed|unsigned] char |GR8 |
987 * |[signed|unsigned] short int |GR8 |
988 * |[signed|unsigned] int |GR8 |
989 * |[signed|unsigned] long int |GR8 |
990 * |pointer |GR8 |
991 * |[signed|unsigned] long long int|GR8 & GR9 |
992 * |float |GR8 |
993 * |double |GR8 & GR9 |
994 * |long double |GR8 & GR9 |
995 * |struct/union |(*1) |
996 * +-------------------------------+----------------------+
997 *
998 * When a struct/union is used as the return value, the caller function stores
999 * the start address of the return value storage area into GR3 and then passes
1000 * it to the callee function. The callee function interprets GR3 as the start
1001 * address of the return value storage area. When this address needs to be
1002 * saved in memory, the callee function secures the hidden parameter save area
1003 * and saves the address in that area.
1004 */
1005
1006 frv_stack_t *
1007 frv_stack_info (void)
1008 {
1009 static frv_stack_t info, zero_info;
1010 frv_stack_t *info_ptr = &info;
1011 tree fndecl = current_function_decl;
1012 int varargs_p = 0;
1013 tree cur_arg;
1014 tree next_arg;
1015 int range;
1016 int alignment;
1017 int offset;
1018
1019 /* If we've already calculated the values and reload is complete,
1020 just return now. */
1021 if (frv_stack_cache)
1022 return frv_stack_cache;
1023
1024 /* Zero all fields. */
1025 info = zero_info;
1026
1027 /* Set up the register range information. */
1028 info_ptr->regs[STACK_REGS_GPR].name = "gpr";
1029 info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1;
1030 info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST;
1031 info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE;
1032
1033 info_ptr->regs[STACK_REGS_FPR].name = "fpr";
1034 info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST;
1035 info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST;
1036 info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE;
1037
1038 info_ptr->regs[STACK_REGS_LR].name = "lr";
1039 info_ptr->regs[STACK_REGS_LR].first = LR_REGNO;
1040 info_ptr->regs[STACK_REGS_LR].last = LR_REGNO;
1041 info_ptr->regs[STACK_REGS_LR].special_p = 1;
1042
1043 info_ptr->regs[STACK_REGS_CC].name = "cc";
1044 info_ptr->regs[STACK_REGS_CC].first = CC_FIRST;
1045 info_ptr->regs[STACK_REGS_CC].last = CC_LAST;
1046 info_ptr->regs[STACK_REGS_CC].field_p = TRUE;
1047
1048 info_ptr->regs[STACK_REGS_LCR].name = "lcr";
1049 info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO;
1050 info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO;
1051
1052 info_ptr->regs[STACK_REGS_STDARG].name = "stdarg";
1053 info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM;
1054 info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM;
1055 info_ptr->regs[STACK_REGS_STDARG].dword_p = 1;
1056 info_ptr->regs[STACK_REGS_STDARG].special_p = 1;
1057
1058 info_ptr->regs[STACK_REGS_STRUCT].name = "struct";
1059 info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM;
1060 info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM;
1061 info_ptr->regs[STACK_REGS_STRUCT].special_p = 1;
1062
1063 info_ptr->regs[STACK_REGS_FP].name = "fp";
1064 info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM;
1065 info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM;
1066 info_ptr->regs[STACK_REGS_FP].special_p = 1;
1067
1068 /* Determine if this is a stdarg function. If so, allocate space to store
1069 the 6 arguments. */
1070 if (cfun->stdarg)
1071 varargs_p = 1;
1072
1073 else
1074 {
1075 /* Find the last argument, and see if it is __builtin_va_alist. */
1076 for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg)
1077 {
1078 next_arg = DECL_CHAIN (cur_arg);
1079 if (next_arg == (tree)0)
1080 {
1081 if (DECL_NAME (cur_arg)
1082 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist"))
1083 varargs_p = 1;
1084
1085 break;
1086 }
1087 }
1088 }
1089
1090 /* Iterate over all of the register ranges. */
1091 for (range = 0; range < STACK_REGS_MAX; range++)
1092 {
1093 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1094 int first = reg_ptr->first;
1095 int last = reg_ptr->last;
1096 int size_1word = 0;
1097 int size_2words = 0;
1098 int regno;
1099
1100 /* Calculate which registers need to be saved & save area size. */
1101 switch (range)
1102 {
1103 default:
1104 for (regno = first; regno <= last; regno++)
1105 {
1106 if ((df_regs_ever_live_p (regno) && !call_used_regs[regno])
1107 || (crtl->calls_eh_return
1108 && (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
1109 || (!TARGET_FDPIC && flag_pic
1110 && crtl->uses_pic_offset_table && regno == PIC_REGNO))
1111 {
1112 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1113 size_1word += UNITS_PER_WORD;
1114 }
1115 }
1116 break;
1117
1118 /* Calculate whether we need to create a frame after everything else
1119 has been processed. */
1120 case STACK_REGS_FP:
1121 break;
1122
1123 case STACK_REGS_LR:
1124 if (df_regs_ever_live_p (LR_REGNO)
1125 || profile_flag
1126 /* This is set for __builtin_return_address, etc. */
1127 || cfun->machine->frame_needed
1128 || (TARGET_LINKED_FP && frame_pointer_needed)
1129 || (!TARGET_FDPIC && flag_pic
1130 && crtl->uses_pic_offset_table))
1131 {
1132 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1133 size_1word += UNITS_PER_WORD;
1134 }
1135 break;
1136
1137 case STACK_REGS_STDARG:
1138 if (varargs_p)
1139 {
1140 /* If this is a stdarg function with a non varardic
1141 argument split between registers and the stack,
1142 adjust the saved registers downward. */
1143 last -= (ADDR_ALIGN (crtl->args.pretend_args_size, UNITS_PER_WORD)
1144 / UNITS_PER_WORD);
1145
1146 for (regno = first; regno <= last; regno++)
1147 {
1148 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1149 size_1word += UNITS_PER_WORD;
1150 }
1151
1152 info_ptr->stdarg_size = size_1word;
1153 }
1154 break;
1155
1156 case STACK_REGS_STRUCT:
1157 if (cfun->returns_struct)
1158 {
1159 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1160 size_1word += UNITS_PER_WORD;
1161 }
1162 break;
1163 }
1164
1165
1166 if (size_1word)
1167 {
1168 /* If this is a field, it only takes one word. */
1169 if (reg_ptr->field_p)
1170 size_1word = UNITS_PER_WORD;
1171
1172 /* Determine which register pairs can be saved together. */
1173 else if (reg_ptr->dword_p && TARGET_DWORD)
1174 {
1175 for (regno = first; regno < last; regno += 2)
1176 {
1177 if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1])
1178 {
1179 size_2words += 2 * UNITS_PER_WORD;
1180 size_1word -= 2 * UNITS_PER_WORD;
1181 info_ptr->save_p[regno] = REG_SAVE_2WORDS;
1182 info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE;
1183 }
1184 }
1185 }
1186
1187 reg_ptr->size_1word = size_1word;
1188 reg_ptr->size_2words = size_2words;
1189
1190 if (! reg_ptr->special_p)
1191 {
1192 info_ptr->regs_size_1word += size_1word;
1193 info_ptr->regs_size_2words += size_2words;
1194 }
1195 }
1196 }
1197
1198 /* Set up the sizes of each each field in the frame body, making the sizes
1199 of each be divisible by the size of a dword if dword operations might
1200 be used, or the size of a word otherwise. */
1201 alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD);
1202
1203 info_ptr->parameter_size = ADDR_ALIGN (crtl->outgoing_args_size, alignment);
1204 info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words
1205 + info_ptr->regs_size_1word,
1206 alignment);
1207 info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment);
1208
1209 info_ptr->pretend_size = crtl->args.pretend_args_size;
1210
1211 /* Work out the size of the frame, excluding the header. Both the frame
1212 body and register parameter area will be dword-aligned. */
1213 info_ptr->total_size
1214 = (ADDR_ALIGN (info_ptr->parameter_size
1215 + info_ptr->regs_size
1216 + info_ptr->vars_size,
1217 2 * UNITS_PER_WORD)
1218 + ADDR_ALIGN (info_ptr->pretend_size
1219 + info_ptr->stdarg_size,
1220 2 * UNITS_PER_WORD));
1221
1222 /* See if we need to create a frame at all, if so add header area. */
1223 if (info_ptr->total_size > 0
1224 || frame_pointer_needed
1225 || info_ptr->regs[STACK_REGS_LR].size_1word > 0
1226 || info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0)
1227 {
1228 offset = info_ptr->parameter_size;
1229 info_ptr->header_size = 4 * UNITS_PER_WORD;
1230 info_ptr->total_size += 4 * UNITS_PER_WORD;
1231
1232 /* Calculate the offsets to save normal register pairs. */
1233 for (range = 0; range < STACK_REGS_MAX; range++)
1234 {
1235 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1236 if (! reg_ptr->special_p)
1237 {
1238 int first = reg_ptr->first;
1239 int last = reg_ptr->last;
1240 int regno;
1241
1242 for (regno = first; regno <= last; regno++)
1243 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS
1244 && regno != FRAME_POINTER_REGNUM
1245 && (regno < FIRST_ARG_REGNUM
1246 || regno > LAST_ARG_REGNUM))
1247 {
1248 info_ptr->reg_offset[regno] = offset;
1249 offset += 2 * UNITS_PER_WORD;
1250 }
1251 }
1252 }
1253
1254 /* Calculate the offsets to save normal single registers. */
1255 for (range = 0; range < STACK_REGS_MAX; range++)
1256 {
1257 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1258 if (! reg_ptr->special_p)
1259 {
1260 int first = reg_ptr->first;
1261 int last = reg_ptr->last;
1262 int regno;
1263
1264 for (regno = first; regno <= last; regno++)
1265 if (info_ptr->save_p[regno] == REG_SAVE_1WORD
1266 && regno != FRAME_POINTER_REGNUM
1267 && (regno < FIRST_ARG_REGNUM
1268 || regno > LAST_ARG_REGNUM))
1269 {
1270 info_ptr->reg_offset[regno] = offset;
1271 offset += UNITS_PER_WORD;
1272 }
1273 }
1274 }
1275
1276 /* Calculate the offset to save the local variables at. */
1277 offset = ADDR_ALIGN (offset, alignment);
1278 if (info_ptr->vars_size)
1279 {
1280 info_ptr->vars_offset = offset;
1281 offset += info_ptr->vars_size;
1282 }
1283
1284 /* Align header to a dword-boundary. */
1285 offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD);
1286
1287 /* Calculate the offsets in the fixed frame. */
1288 info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD;
1289 info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset;
1290 info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD;
1291
1292 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1293 info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD;
1294 info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD;
1295
1296 if (cfun->returns_struct)
1297 {
1298 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1299 info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD;
1300 info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD;
1301 }
1302
1303 /* Calculate the offsets to store the arguments passed in registers
1304 for stdarg functions. The register pairs are first and the single
1305 register if any is last. The register save area starts on a
1306 dword-boundary. */
1307 if (info_ptr->stdarg_size)
1308 {
1309 int first = info_ptr->regs[STACK_REGS_STDARG].first;
1310 int last = info_ptr->regs[STACK_REGS_STDARG].last;
1311 int regno;
1312
1313 /* Skip the header. */
1314 offset += 4 * UNITS_PER_WORD;
1315 for (regno = first; regno <= last; regno++)
1316 {
1317 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS)
1318 {
1319 info_ptr->reg_offset[regno] = offset;
1320 offset += 2 * UNITS_PER_WORD;
1321 }
1322 else if (info_ptr->save_p[regno] == REG_SAVE_1WORD)
1323 {
1324 info_ptr->reg_offset[regno] = offset;
1325 offset += UNITS_PER_WORD;
1326 }
1327 }
1328 }
1329 }
1330
1331 if (reload_completed)
1332 frv_stack_cache = info_ptr;
1333
1334 return info_ptr;
1335 }
1336
1337 \f
1338 /* Print the information about the frv stack offsets, etc. when debugging. */
1339
1340 void
1341 frv_debug_stack (frv_stack_t *info)
1342 {
1343 int range;
1344
1345 if (!info)
1346 info = frv_stack_info ();
1347
1348 fprintf (stderr, "\nStack information for function %s:\n",
1349 ((current_function_decl && DECL_NAME (current_function_decl))
1350 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
1351 : "<unknown>"));
1352
1353 fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size);
1354 fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size);
1355 fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size);
1356 fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n",
1357 info->regs_size, info->regs_size_1word, info->regs_size_2words);
1358
1359 fprintf (stderr, "\theader_size\t= %6d\n", info->header_size);
1360 fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size);
1361 fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset);
1362 fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset);
1363
1364 for (range = 0; range < STACK_REGS_MAX; range++)
1365 {
1366 frv_stack_regs_t *regs = &(info->regs[range]);
1367 if ((regs->size_1word + regs->size_2words) > 0)
1368 {
1369 int first = regs->first;
1370 int last = regs->last;
1371 int regno;
1372
1373 fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =",
1374 regs->name, regs->size_1word + regs->size_2words,
1375 regs->size_1word, regs->size_2words);
1376
1377 for (regno = first; regno <= last; regno++)
1378 {
1379 if (info->save_p[regno] == REG_SAVE_1WORD)
1380 fprintf (stderr, " %s (%d)", reg_names[regno],
1381 info->reg_offset[regno]);
1382
1383 else if (info->save_p[regno] == REG_SAVE_2WORDS)
1384 fprintf (stderr, " %s-%s (%d)", reg_names[regno],
1385 reg_names[regno+1], info->reg_offset[regno]);
1386 }
1387
1388 fputc ('\n', stderr);
1389 }
1390 }
1391
1392 fflush (stderr);
1393 }
1394
1395
1396 \f
1397
1398 /* Used during final to control the packing of insns. The value is
1399 1 if the current instruction should be packed with the next one,
1400 0 if it shouldn't or -1 if packing is disabled altogether. */
1401
1402 static int frv_insn_packing_flag;
1403
1404 /* True if the current function contains a far jump. */
1405
1406 static int
1407 frv_function_contains_far_jump (void)
1408 {
1409 rtx_insn *insn = get_insns ();
1410 while (insn != NULL
1411 && !(JUMP_P (insn)
1412 && get_attr_far_jump (insn) == FAR_JUMP_YES))
1413 insn = NEXT_INSN (insn);
1414 return (insn != NULL);
1415 }
1416
1417 /* For the FRV, this function makes sure that a function with far jumps
1418 will return correctly. It also does the VLIW packing. */
1419
1420 static void
1421 frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1422 {
1423 rtx_insn *insn, *next, *last_call;
1424
1425 /* If no frame was created, check whether the function uses a call
1426 instruction to implement a far jump. If so, save the link in gr3 and
1427 replace all returns to LR with returns to GR3. GR3 is used because it
1428 is call-clobbered, because is not available to the register allocator,
1429 and because all functions that take a hidden argument pointer will have
1430 a stack frame. */
1431 if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ())
1432 {
1433 rtx_insn *insn;
1434
1435 /* Just to check that the above comment is true. */
1436 gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3));
1437
1438 /* Generate the instruction that saves the link register. */
1439 fprintf (file, "\tmovsg lr,gr3\n");
1440
1441 /* Replace the LR with GR3 in *return_internal patterns. The insn
1442 will now return using jmpl @(gr3,0) rather than bralr. We cannot
1443 simply emit a different assembly directive because bralr and jmpl
1444 execute in different units. */
1445 for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
1446 if (JUMP_P (insn))
1447 {
1448 rtx pattern = PATTERN (insn);
1449 if (GET_CODE (pattern) == PARALLEL
1450 && XVECLEN (pattern, 0) >= 2
1451 && GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN
1452 && GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
1453 {
1454 rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
1455 if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
1456 SET_REGNO (address, GPR_FIRST + 3);
1457 }
1458 }
1459 }
1460
1461 frv_pack_insns ();
1462
1463 /* Allow the garbage collector to free the nops created by frv_reorg. */
1464 memset (frv_nops, 0, sizeof (frv_nops));
1465
1466 /* Locate CALL_ARG_LOCATION notes that have been misplaced
1467 and move them back to where they should be located. */
1468 last_call = NULL;
1469 for (insn = get_insns (); insn; insn = next)
1470 {
1471 next = NEXT_INSN (insn);
1472 if (CALL_P (insn)
1473 || (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE
1474 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
1475 last_call = insn;
1476
1477 if (!NOTE_P (insn) || NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION)
1478 continue;
1479
1480 if (NEXT_INSN (last_call) == insn)
1481 continue;
1482
1483 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1484 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1485 SET_PREV_INSN (insn) = last_call;
1486 SET_NEXT_INSN (insn) = NEXT_INSN (last_call);
1487 SET_PREV_INSN (NEXT_INSN (insn)) = insn;
1488 SET_NEXT_INSN (PREV_INSN (insn)) = insn;
1489 last_call = insn;
1490 }
1491 }
1492
1493 \f
1494 /* Return the next available temporary register in a given class. */
1495
1496 static rtx
1497 frv_alloc_temp_reg (
1498 frv_tmp_reg_t *info, /* which registers are available */
1499 enum reg_class rclass, /* register class desired */
1500 machine_mode mode, /* mode to allocate register with */
1501 int mark_as_used, /* register not available after allocation */
1502 int no_abort) /* return NULL instead of aborting */
1503 {
1504 int regno = info->next_reg[ (int)rclass ];
1505 int orig_regno = regno;
1506 HARD_REG_SET *reg_in_class = &reg_class_contents[ (int)rclass ];
1507 int i, nr;
1508
1509 for (;;)
1510 {
1511 if (TEST_HARD_REG_BIT (*reg_in_class, regno)
1512 && TEST_HARD_REG_BIT (info->regs, regno))
1513 break;
1514
1515 if (++regno >= FIRST_PSEUDO_REGISTER)
1516 regno = 0;
1517 if (regno == orig_regno)
1518 {
1519 gcc_assert (no_abort);
1520 return NULL_RTX;
1521 }
1522 }
1523
1524 nr = HARD_REGNO_NREGS (regno, mode);
1525 info->next_reg[ (int)rclass ] = regno + nr;
1526
1527 if (mark_as_used)
1528 for (i = 0; i < nr; i++)
1529 CLEAR_HARD_REG_BIT (info->regs, regno+i);
1530
1531 return gen_rtx_REG (mode, regno);
1532 }
1533
1534 \f
1535 /* Return an rtx with the value OFFSET, which will either be a register or a
1536 signed 12-bit integer. It can be used as the second operand in an "add"
1537 instruction, or as the index in a load or store.
1538
1539 The function returns a constant rtx if OFFSET is small enough, otherwise
1540 it loads the constant into register OFFSET_REGNO and returns that. */
1541 static rtx
1542 frv_frame_offset_rtx (int offset)
1543 {
1544 rtx offset_rtx = GEN_INT (offset);
1545 if (IN_RANGE (offset, -2048, 2047))
1546 return offset_rtx;
1547 else
1548 {
1549 rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO);
1550 if (IN_RANGE (offset, -32768, 32767))
1551 emit_insn (gen_movsi (reg_rtx, offset_rtx));
1552 else
1553 {
1554 emit_insn (gen_movsi_high (reg_rtx, offset_rtx));
1555 emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx));
1556 }
1557 return reg_rtx;
1558 }
1559 }
1560
1561 /* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
1562 prologue and epilogue uses such expressions to access the stack. */
1563 static rtx
1564 frv_frame_mem (machine_mode mode, rtx base, int offset)
1565 {
1566 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
1567 base,
1568 frv_frame_offset_rtx (offset)));
1569 }
1570
1571 /* Generate a frame-related expression:
1572
1573 (set REG (mem (plus (sp) (const_int OFFSET)))).
1574
1575 Such expressions are used in FRAME_RELATED_EXPR notes for more complex
1576 instructions. Marking the expressions as frame-related is superfluous if
1577 the note contains just a single set. But if the note contains a PARALLEL
1578 or SEQUENCE that has several sets, each set must be individually marked
1579 as frame-related. */
1580 static rtx
1581 frv_dwarf_store (rtx reg, int offset)
1582 {
1583 rtx set = gen_rtx_SET (VOIDmode,
1584 gen_rtx_MEM (GET_MODE (reg),
1585 plus_constant (Pmode, stack_pointer_rtx,
1586 offset)),
1587 reg);
1588 RTX_FRAME_RELATED_P (set) = 1;
1589 return set;
1590 }
1591
1592 /* Emit a frame-related instruction whose pattern is PATTERN. The
1593 instruction is the last in a sequence that cumulatively performs the
1594 operation described by DWARF_PATTERN. The instruction is marked as
1595 frame-related and has a REG_FRAME_RELATED_EXPR note containing
1596 DWARF_PATTERN. */
1597 static void
1598 frv_frame_insn (rtx pattern, rtx dwarf_pattern)
1599 {
1600 rtx insn = emit_insn (pattern);
1601 RTX_FRAME_RELATED_P (insn) = 1;
1602 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1603 dwarf_pattern,
1604 REG_NOTES (insn));
1605 }
1606
1607 /* Emit instructions that transfer REG to or from the memory location (sp +
1608 STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is
1609 FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this
1610 function to store registers and only the epilogue uses it to load them.
1611
1612 The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET).
1613 The generated instruction will use BASE as its base register. BASE may
1614 simply be the stack pointer, but if several accesses are being made to a
1615 region far away from the stack pointer, it may be more efficient to set
1616 up a temporary instead.
1617
1618 Store instructions will be frame-related and will be annotated with the
1619 overall effect of the store. Load instructions will be followed by a
1620 (use) to prevent later optimizations from zapping them.
1621
1622 The function takes care of the moves to and from SPRs, using TEMP_REGNO
1623 as a temporary in such cases. */
1624 static void
1625 frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
1626 {
1627 machine_mode mode = GET_MODE (reg);
1628 rtx mem = frv_frame_mem (mode,
1629 accessor->base,
1630 stack_offset - accessor->base_offset);
1631
1632 if (accessor->op == FRV_LOAD)
1633 {
1634 if (SPR_P (REGNO (reg)))
1635 {
1636 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1637 emit_insn (gen_rtx_SET (VOIDmode, temp, mem));
1638 emit_insn (gen_rtx_SET (VOIDmode, reg, temp));
1639 }
1640 else
1641 {
1642 /* We cannot use reg+reg addressing for DImode access. */
1643 if (mode == DImode
1644 && GET_CODE (XEXP (mem, 0)) == PLUS
1645 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1646 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1647 {
1648 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1649
1650 emit_move_insn (temp,
1651 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1652 XEXP (XEXP (mem, 0), 1)));
1653 mem = gen_rtx_MEM (DImode, temp);
1654 }
1655 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1656 }
1657 emit_use (reg);
1658 }
1659 else
1660 {
1661 if (SPR_P (REGNO (reg)))
1662 {
1663 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1664 emit_insn (gen_rtx_SET (VOIDmode, temp, reg));
1665 frv_frame_insn (gen_rtx_SET (Pmode, mem, temp),
1666 frv_dwarf_store (reg, stack_offset));
1667 }
1668 else if (mode == DImode)
1669 {
1670 /* For DImode saves, the dwarf2 version needs to be a SEQUENCE
1671 with a separate save for each register. */
1672 rtx reg1 = gen_rtx_REG (SImode, REGNO (reg));
1673 rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1);
1674 rtx set1 = frv_dwarf_store (reg1, stack_offset);
1675 rtx set2 = frv_dwarf_store (reg2, stack_offset + 4);
1676
1677 /* Also we cannot use reg+reg addressing. */
1678 if (GET_CODE (XEXP (mem, 0)) == PLUS
1679 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1680 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1681 {
1682 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1683 emit_move_insn (temp,
1684 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1685 XEXP (XEXP (mem, 0), 1)));
1686 mem = gen_rtx_MEM (DImode, temp);
1687 }
1688
1689 frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
1690 gen_rtx_PARALLEL (VOIDmode,
1691 gen_rtvec (2, set1, set2)));
1692 }
1693 else
1694 frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
1695 frv_dwarf_store (reg, stack_offset));
1696 }
1697 }
1698
1699 /* A function that uses frv_frame_access to transfer a group of registers to
1700 or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO
1701 is the stack information generated by frv_stack_info, and REG_SET is the
1702 number of the register set to transfer. */
1703 static void
1704 frv_frame_access_multi (frv_frame_accessor_t *accessor,
1705 frv_stack_t *info,
1706 int reg_set)
1707 {
1708 frv_stack_regs_t *regs_info;
1709 int regno;
1710
1711 regs_info = &info->regs[reg_set];
1712 for (regno = regs_info->first; regno <= regs_info->last; regno++)
1713 if (info->save_p[regno])
1714 frv_frame_access (accessor,
1715 info->save_p[regno] == REG_SAVE_2WORDS
1716 ? gen_rtx_REG (DImode, regno)
1717 : gen_rtx_REG (SImode, regno),
1718 info->reg_offset[regno]);
1719 }
1720
1721 /* Save or restore callee-saved registers that are kept outside the frame
1722 header. The function saves the registers if OP is FRV_STORE and restores
1723 them if OP is FRV_LOAD. INFO is the stack information generated by
1724 frv_stack_info. */
1725 static void
1726 frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info)
1727 {
1728 frv_frame_accessor_t accessor;
1729
1730 accessor.op = op;
1731 accessor.base = stack_pointer_rtx;
1732 accessor.base_offset = 0;
1733 frv_frame_access_multi (&accessor, info, STACK_REGS_GPR);
1734 frv_frame_access_multi (&accessor, info, STACK_REGS_FPR);
1735 frv_frame_access_multi (&accessor, info, STACK_REGS_LCR);
1736 }
1737
1738
1739 /* Called after register allocation to add any instructions needed for the
1740 prologue. Using a prologue insn is favored compared to putting all of the
1741 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1742 it allows the scheduler to intermix instructions with the saves of
1743 the caller saved registers. In some cases, it might be necessary
1744 to emit a barrier instruction as the last insn to prevent such
1745 scheduling.
1746
1747 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1748 so that the debug info generation code can handle them properly. */
1749 void
1750 frv_expand_prologue (void)
1751 {
1752 frv_stack_t *info = frv_stack_info ();
1753 rtx sp = stack_pointer_rtx;
1754 rtx fp = frame_pointer_rtx;
1755 frv_frame_accessor_t accessor;
1756
1757 if (TARGET_DEBUG_STACK)
1758 frv_debug_stack (info);
1759
1760 if (flag_stack_usage_info)
1761 current_function_static_stack_size = info->total_size;
1762
1763 if (info->total_size == 0)
1764 return;
1765
1766 /* We're interested in three areas of the frame here:
1767
1768 A: the register save area
1769 B: the old FP
1770 C: the header after B
1771
1772 If the frame pointer isn't used, we'll have to set up A, B and C
1773 using the stack pointer. If the frame pointer is used, we'll access
1774 them as follows:
1775
1776 A: set up using sp
1777 B: set up using sp or a temporary (see below)
1778 C: set up using fp
1779
1780 We set up B using the stack pointer if the frame is small enough.
1781 Otherwise, it's more efficient to copy the old stack pointer into a
1782 temporary and use that.
1783
1784 Note that it's important to make sure the prologue and epilogue use the
1785 same registers to access A and C, since doing otherwise will confuse
1786 the aliasing code. */
1787
1788 /* Set up ACCESSOR for accessing region B above. If the frame pointer
1789 isn't used, the same method will serve for C. */
1790 accessor.op = FRV_STORE;
1791 if (frame_pointer_needed && info->total_size > 2048)
1792 {
1793 accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO);
1794 accessor.base_offset = info->total_size;
1795 emit_insn (gen_movsi (accessor.base, sp));
1796 }
1797 else
1798 {
1799 accessor.base = stack_pointer_rtx;
1800 accessor.base_offset = 0;
1801 }
1802
1803 /* Allocate the stack space. */
1804 {
1805 rtx asm_offset = frv_frame_offset_rtx (-info->total_size);
1806 rtx dwarf_offset = GEN_INT (-info->total_size);
1807
1808 frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset),
1809 gen_rtx_SET (Pmode,
1810 sp,
1811 gen_rtx_PLUS (Pmode, sp, dwarf_offset)));
1812 }
1813
1814 /* If the frame pointer is needed, store the old one at (sp + FP_OFFSET)
1815 and point the new one to that location. */
1816 if (frame_pointer_needed)
1817 {
1818 int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1819
1820 /* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is
1821 based on ACCESSOR.BASE but DWARF_SRC is always based on the stack
1822 pointer. */
1823 rtx asm_src = plus_constant (Pmode, accessor.base,
1824 fp_offset - accessor.base_offset);
1825 rtx dwarf_src = plus_constant (Pmode, sp, fp_offset);
1826
1827 /* Store the old frame pointer at (sp + FP_OFFSET). */
1828 frv_frame_access (&accessor, fp, fp_offset);
1829
1830 /* Set up the new frame pointer. */
1831 frv_frame_insn (gen_rtx_SET (VOIDmode, fp, asm_src),
1832 gen_rtx_SET (VOIDmode, fp, dwarf_src));
1833
1834 /* Access region C from the frame pointer. */
1835 accessor.base = fp;
1836 accessor.base_offset = fp_offset;
1837 }
1838
1839 /* Set up region C. */
1840 frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT);
1841 frv_frame_access_multi (&accessor, info, STACK_REGS_LR);
1842 frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG);
1843
1844 /* Set up region A. */
1845 frv_frame_access_standard_regs (FRV_STORE, info);
1846
1847 /* If this is a varargs/stdarg function, issue a blockage to prevent the
1848 scheduler from moving loads before the stores saving the registers. */
1849 if (info->stdarg_size > 0)
1850 emit_insn (gen_blockage ());
1851
1852 /* Set up pic register/small data register for this function. */
1853 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
1854 emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO),
1855 gen_rtx_REG (Pmode, LR_REGNO),
1856 gen_rtx_REG (SImode, OFFSET_REGNO)));
1857 }
1858
1859 \f
1860 /* Under frv, all of the work is done via frv_expand_epilogue, but
1861 this function provides a convenient place to do cleanup. */
1862
1863 static void
1864 frv_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
1865 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1866 {
1867 frv_stack_cache = (frv_stack_t *)0;
1868
1869 /* Zap last used registers for conditional execution. */
1870 memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg));
1871
1872 /* Release the bitmap of created insns. */
1873 BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap);
1874 }
1875
1876 \f
1877 /* Called after register allocation to add any instructions needed for the
1878 epilogue. Using an epilogue insn is favored compared to putting all of the
1879 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1880 it allows the scheduler to intermix instructions with the saves of
1881 the caller saved registers. In some cases, it might be necessary
1882 to emit a barrier instruction as the last insn to prevent such
1883 scheduling. */
1884
1885 void
1886 frv_expand_epilogue (bool emit_return)
1887 {
1888 frv_stack_t *info = frv_stack_info ();
1889 rtx fp = frame_pointer_rtx;
1890 rtx sp = stack_pointer_rtx;
1891 rtx return_addr;
1892 int fp_offset;
1893
1894 fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1895
1896 /* Restore the stack pointer to its original value if alloca or the like
1897 is used. */
1898 if (! crtl->sp_is_unchanging)
1899 emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset)));
1900
1901 /* Restore the callee-saved registers that were used in this function. */
1902 frv_frame_access_standard_regs (FRV_LOAD, info);
1903
1904 /* Set RETURN_ADDR to the address we should return to. Set it to NULL if
1905 no return instruction should be emitted. */
1906 if (info->save_p[LR_REGNO])
1907 {
1908 int lr_offset;
1909 rtx mem;
1910
1911 /* Use the same method to access the link register's slot as we did in
1912 the prologue. In other words, use the frame pointer if available,
1913 otherwise use the stack pointer.
1914
1915 LR_OFFSET is the offset of the link register's slot from the start
1916 of the frame and MEM is a memory rtx for it. */
1917 lr_offset = info->reg_offset[LR_REGNO];
1918 if (frame_pointer_needed)
1919 mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset);
1920 else
1921 mem = frv_frame_mem (Pmode, sp, lr_offset);
1922
1923 /* Load the old link register into a GPR. */
1924 return_addr = gen_rtx_REG (Pmode, TEMP_REGNO);
1925 emit_insn (gen_rtx_SET (VOIDmode, return_addr, mem));
1926 }
1927 else
1928 return_addr = gen_rtx_REG (Pmode, LR_REGNO);
1929
1930 /* Restore the old frame pointer. Emit a USE afterwards to make sure
1931 the load is preserved. */
1932 if (frame_pointer_needed)
1933 {
1934 emit_insn (gen_rtx_SET (VOIDmode, fp, gen_rtx_MEM (Pmode, fp)));
1935 emit_use (fp);
1936 }
1937
1938 /* Deallocate the stack frame. */
1939 if (info->total_size != 0)
1940 {
1941 rtx offset = frv_frame_offset_rtx (info->total_size);
1942 emit_insn (gen_stack_adjust (sp, sp, offset));
1943 }
1944
1945 /* If this function uses eh_return, add the final stack adjustment now. */
1946 if (crtl->calls_eh_return)
1947 emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX));
1948
1949 if (emit_return)
1950 emit_jump_insn (gen_epilogue_return (return_addr));
1951 else
1952 {
1953 rtx lr = return_addr;
1954
1955 if (REGNO (return_addr) != LR_REGNO)
1956 {
1957 lr = gen_rtx_REG (Pmode, LR_REGNO);
1958 emit_move_insn (lr, return_addr);
1959 }
1960
1961 emit_use (lr);
1962 }
1963 }
1964
1965 \f
1966 /* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */
1967
1968 static void
1969 frv_asm_output_mi_thunk (FILE *file,
1970 tree thunk_fndecl ATTRIBUTE_UNUSED,
1971 HOST_WIDE_INT delta,
1972 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1973 tree function)
1974 {
1975 const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0);
1976 const char *name_arg0 = reg_names[FIRST_ARG_REGNUM];
1977 const char *name_jmp = reg_names[JUMP_REGNO];
1978 const char *parallel = (frv_issue_rate () > 1 ? ".p" : "");
1979
1980 /* Do the add using an addi if possible. */
1981 if (IN_RANGE (delta, -2048, 2047))
1982 fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0);
1983 else
1984 {
1985 const char *const name_add = reg_names[TEMP_REGNO];
1986 fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1987 parallel, delta, name_add);
1988 fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1989 delta, name_add);
1990 fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0);
1991 }
1992
1993 if (TARGET_FDPIC)
1994 {
1995 const char *name_pic = reg_names[FDPIC_REGNO];
1996 name_jmp = reg_names[FDPIC_FPTR_REGNO];
1997
1998 if (flag_pic != 1)
1999 {
2000 fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel);
2001 assemble_name (file, name_func);
2002 fprintf (file, "),%s\n", name_jmp);
2003
2004 fprintf (file, "\tsetlo #gotofffuncdesclo(");
2005 assemble_name (file, name_func);
2006 fprintf (file, "),%s\n", name_jmp);
2007
2008 fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp);
2009 }
2010 else
2011 {
2012 fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic);
2013 assemble_name (file, name_func);
2014 fprintf (file, "\t)), %s\n", name_jmp);
2015 }
2016 }
2017 else if (!flag_pic)
2018 {
2019 fprintf (file, "\tsethi%s #hi(", parallel);
2020 assemble_name (file, name_func);
2021 fprintf (file, "),%s\n", name_jmp);
2022
2023 fprintf (file, "\tsetlo #lo(");
2024 assemble_name (file, name_func);
2025 fprintf (file, "),%s\n", name_jmp);
2026 }
2027 else
2028 {
2029 /* Use JUMP_REGNO as a temporary PIC register. */
2030 const char *name_lr = reg_names[LR_REGNO];
2031 const char *name_gppic = name_jmp;
2032 const char *name_tmp = reg_names[TEMP_REGNO];
2033
2034 fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp);
2035 fprintf (file, "\tcall 1f\n");
2036 fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic);
2037 fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr);
2038 fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp);
2039 fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp);
2040 fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic);
2041
2042 fprintf (file, "\tsethi%s #gprelhi(", parallel);
2043 assemble_name (file, name_func);
2044 fprintf (file, "),%s\n", name_tmp);
2045
2046 fprintf (file, "\tsetlo #gprello(");
2047 assemble_name (file, name_func);
2048 fprintf (file, "),%s\n", name_tmp);
2049
2050 fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp);
2051 }
2052
2053 /* Jump to the function address. */
2054 fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]);
2055 }
2056
2057 \f
2058
2059 /* On frv, create a frame whenever we need to create stack. */
2060
2061 static bool
2062 frv_frame_pointer_required (void)
2063 {
2064 /* If we forgoing the usual linkage requirements, we only need
2065 a frame pointer if the stack pointer might change. */
2066 if (!TARGET_LINKED_FP)
2067 return !crtl->sp_is_unchanging;
2068
2069 if (! crtl->is_leaf)
2070 return true;
2071
2072 if (get_frame_size () != 0)
2073 return true;
2074
2075 if (cfun->stdarg)
2076 return true;
2077
2078 if (!crtl->sp_is_unchanging)
2079 return true;
2080
2081 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
2082 return true;
2083
2084 if (profile_flag)
2085 return true;
2086
2087 if (cfun->machine->frame_needed)
2088 return true;
2089
2090 return false;
2091 }
2092
2093 \f
2094 /* Worker function for TARGET_CAN_ELIMINATE. */
2095
2096 bool
2097 frv_can_eliminate (const int from, const int to)
2098 {
2099 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2100 ? ! frame_pointer_needed
2101 : true);
2102 }
2103
2104 /* This macro is similar to `INITIAL_FRAME_POINTER_OFFSET'. It specifies the
2105 initial difference between the specified pair of registers. This macro must
2106 be defined if `ELIMINABLE_REGS' is defined. */
2107
2108 /* See frv_stack_info for more details on the frv stack frame. */
2109
2110 int
2111 frv_initial_elimination_offset (int from, int to)
2112 {
2113 frv_stack_t *info = frv_stack_info ();
2114 int ret = 0;
2115
2116 if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2117 ret = info->total_size - info->pretend_size;
2118
2119 else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM)
2120 ret = info->reg_offset[FRAME_POINTER_REGNUM];
2121
2122 else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2123 ret = (info->total_size
2124 - info->reg_offset[FRAME_POINTER_REGNUM]
2125 - info->pretend_size);
2126
2127 else
2128 gcc_unreachable ();
2129
2130 if (TARGET_DEBUG_STACK)
2131 fprintf (stderr, "Eliminate %s to %s by adding %d\n",
2132 reg_names [from], reg_names[to], ret);
2133
2134 return ret;
2135 }
2136
2137 \f
2138 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2139
2140 static void
2141 frv_setup_incoming_varargs (cumulative_args_t cum_v,
2142 machine_mode mode,
2143 tree type ATTRIBUTE_UNUSED,
2144 int *pretend_size,
2145 int second_time)
2146 {
2147 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2148
2149 if (TARGET_DEBUG_ARG)
2150 fprintf (stderr,
2151 "setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
2152 *cum, GET_MODE_NAME (mode), *pretend_size, second_time);
2153 }
2154
2155 \f
2156 /* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */
2157
2158 static rtx
2159 frv_expand_builtin_saveregs (void)
2160 {
2161 int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS;
2162
2163 if (TARGET_DEBUG_ARG)
2164 fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n",
2165 offset);
2166
2167 return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset));
2168 }
2169
2170 \f
2171 /* Expand __builtin_va_start to do the va_start macro. */
2172
2173 static void
2174 frv_expand_builtin_va_start (tree valist, rtx nextarg)
2175 {
2176 tree t;
2177 int num = crtl->args.info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS;
2178
2179 nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx,
2180 GEN_INT (UNITS_PER_WORD * num));
2181
2182 if (TARGET_DEBUG_ARG)
2183 {
2184 fprintf (stderr, "va_start: args_info = %d, num = %d\n",
2185 crtl->args.info, num);
2186
2187 debug_rtx (nextarg);
2188 }
2189
2190 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
2191 fold_convert (TREE_TYPE (valist),
2192 make_tree (sizetype, nextarg)));
2193 TREE_SIDE_EFFECTS (t) = 1;
2194
2195 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2196 }
2197
2198 \f
2199 /* Expand a block move operation, and return 1 if successful. Return 0
2200 if we should let the compiler generate normal code.
2201
2202 operands[0] is the destination
2203 operands[1] is the source
2204 operands[2] is the length
2205 operands[3] is the alignment */
2206
2207 /* Maximum number of loads to do before doing the stores */
2208 #ifndef MAX_MOVE_REG
2209 #define MAX_MOVE_REG 4
2210 #endif
2211
2212 /* Maximum number of total loads to do. */
2213 #ifndef TOTAL_MOVE_REG
2214 #define TOTAL_MOVE_REG 8
2215 #endif
2216
2217 int
2218 frv_expand_block_move (rtx operands[])
2219 {
2220 rtx orig_dest = operands[0];
2221 rtx orig_src = operands[1];
2222 rtx bytes_rtx = operands[2];
2223 rtx align_rtx = operands[3];
2224 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2225 int align;
2226 int bytes;
2227 int offset;
2228 int num_reg;
2229 int i;
2230 rtx src_reg;
2231 rtx dest_reg;
2232 rtx src_addr;
2233 rtx dest_addr;
2234 rtx src_mem;
2235 rtx dest_mem;
2236 rtx tmp_reg;
2237 rtx stores[MAX_MOVE_REG];
2238 int move_bytes;
2239 machine_mode mode;
2240
2241 /* If this is not a fixed size move, just call memcpy. */
2242 if (! constp)
2243 return FALSE;
2244
2245 /* This should be a fixed size alignment. */
2246 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2247
2248 align = INTVAL (align_rtx);
2249
2250 /* Anything to move? */
2251 bytes = INTVAL (bytes_rtx);
2252 if (bytes <= 0)
2253 return TRUE;
2254
2255 /* Don't support real large moves. */
2256 if (bytes > TOTAL_MOVE_REG*align)
2257 return FALSE;
2258
2259 /* Move the address into scratch registers. */
2260 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2261 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2262
2263 num_reg = offset = 0;
2264 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
2265 {
2266 /* Calculate the correct offset for src/dest. */
2267 if (offset == 0)
2268 {
2269 src_addr = src_reg;
2270 dest_addr = dest_reg;
2271 }
2272 else
2273 {
2274 src_addr = plus_constant (Pmode, src_reg, offset);
2275 dest_addr = plus_constant (Pmode, dest_reg, offset);
2276 }
2277
2278 /* Generate the appropriate load and store, saving the stores
2279 for later. */
2280 if (bytes >= 4 && align >= 4)
2281 mode = SImode;
2282 else if (bytes >= 2 && align >= 2)
2283 mode = HImode;
2284 else
2285 mode = QImode;
2286
2287 move_bytes = GET_MODE_SIZE (mode);
2288 tmp_reg = gen_reg_rtx (mode);
2289 src_mem = change_address (orig_src, mode, src_addr);
2290 dest_mem = change_address (orig_dest, mode, dest_addr);
2291 emit_insn (gen_rtx_SET (VOIDmode, tmp_reg, src_mem));
2292 stores[num_reg++] = gen_rtx_SET (VOIDmode, dest_mem, tmp_reg);
2293
2294 if (num_reg >= MAX_MOVE_REG)
2295 {
2296 for (i = 0; i < num_reg; i++)
2297 emit_insn (stores[i]);
2298 num_reg = 0;
2299 }
2300 }
2301
2302 for (i = 0; i < num_reg; i++)
2303 emit_insn (stores[i]);
2304
2305 return TRUE;
2306 }
2307
2308 \f
2309 /* Expand a block clear operation, and return 1 if successful. Return 0
2310 if we should let the compiler generate normal code.
2311
2312 operands[0] is the destination
2313 operands[1] is the length
2314 operands[3] is the alignment */
2315
2316 int
2317 frv_expand_block_clear (rtx operands[])
2318 {
2319 rtx orig_dest = operands[0];
2320 rtx bytes_rtx = operands[1];
2321 rtx align_rtx = operands[3];
2322 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2323 int align;
2324 int bytes;
2325 int offset;
2326 rtx dest_reg;
2327 rtx dest_addr;
2328 rtx dest_mem;
2329 int clear_bytes;
2330 machine_mode mode;
2331
2332 /* If this is not a fixed size move, just call memcpy. */
2333 if (! constp)
2334 return FALSE;
2335
2336 /* This should be a fixed size alignment. */
2337 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2338
2339 align = INTVAL (align_rtx);
2340
2341 /* Anything to move? */
2342 bytes = INTVAL (bytes_rtx);
2343 if (bytes <= 0)
2344 return TRUE;
2345
2346 /* Don't support real large clears. */
2347 if (bytes > TOTAL_MOVE_REG*align)
2348 return FALSE;
2349
2350 /* Move the address into a scratch register. */
2351 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2352
2353 offset = 0;
2354 for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes))
2355 {
2356 /* Calculate the correct offset for src/dest. */
2357 dest_addr = ((offset == 0)
2358 ? dest_reg
2359 : plus_constant (Pmode, dest_reg, offset));
2360
2361 /* Generate the appropriate store of gr0. */
2362 if (bytes >= 4 && align >= 4)
2363 mode = SImode;
2364 else if (bytes >= 2 && align >= 2)
2365 mode = HImode;
2366 else
2367 mode = QImode;
2368
2369 clear_bytes = GET_MODE_SIZE (mode);
2370 dest_mem = change_address (orig_dest, mode, dest_addr);
2371 emit_insn (gen_rtx_SET (VOIDmode, dest_mem, const0_rtx));
2372 }
2373
2374 return TRUE;
2375 }
2376
2377 \f
2378 /* The following variable is used to output modifiers of assembler
2379 code of the current output insn. */
2380
2381 static rtx *frv_insn_operands;
2382
2383 /* The following function is used to add assembler insn code suffix .p
2384 if it is necessary. */
2385
2386 const char *
2387 frv_asm_output_opcode (FILE *f, const char *ptr)
2388 {
2389 int c;
2390
2391 if (frv_insn_packing_flag <= 0)
2392 return ptr;
2393
2394 for (; *ptr && *ptr != ' ' && *ptr != '\t';)
2395 {
2396 c = *ptr++;
2397 if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z')
2398 || (*ptr >= 'A' && *ptr <= 'Z')))
2399 {
2400 int letter = *ptr++;
2401
2402 c = atoi (ptr);
2403 frv_print_operand (f, frv_insn_operands [c], letter);
2404 while ((c = *ptr) >= '0' && c <= '9')
2405 ptr++;
2406 }
2407 else
2408 fputc (c, f);
2409 }
2410
2411 fprintf (f, ".p");
2412
2413 return ptr;
2414 }
2415
2416 /* Set up the packing bit for the current output insn. Note that this
2417 function is not called for asm insns. */
2418
2419 void
2420 frv_final_prescan_insn (rtx_insn *insn, rtx *opvec,
2421 int noperands ATTRIBUTE_UNUSED)
2422 {
2423 if (INSN_P (insn))
2424 {
2425 if (frv_insn_packing_flag >= 0)
2426 {
2427 frv_insn_operands = opvec;
2428 frv_insn_packing_flag = PACKING_FLAG_P (insn);
2429 }
2430 else if (recog_memoized (insn) >= 0
2431 && get_attr_acc_group (insn) == ACC_GROUP_ODD)
2432 /* Packing optimizations have been disabled, but INSN can only
2433 be issued in M1. Insert an mnop in M0. */
2434 fprintf (asm_out_file, "\tmnop.p\n");
2435 }
2436 }
2437
2438
2439 \f
2440 /* A C expression whose value is RTL representing the address in a stack frame
2441 where the pointer to the caller's frame is stored. Assume that FRAMEADDR is
2442 an RTL expression for the address of the stack frame itself.
2443
2444 If you don't define this macro, the default is to return the value of
2445 FRAMEADDR--that is, the stack frame address is also the address of the stack
2446 word that points to the previous frame. */
2447
2448 /* The default is correct, but we need to make sure the frame gets created. */
2449 rtx
2450 frv_dynamic_chain_address (rtx frame)
2451 {
2452 cfun->machine->frame_needed = 1;
2453 return frame;
2454 }
2455
2456
2457 /* A C expression whose value is RTL representing the value of the return
2458 address for the frame COUNT steps up from the current frame, after the
2459 prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame
2460 pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is
2461 defined.
2462
2463 The value of the expression must always be the correct address when COUNT is
2464 zero, but may be `NULL_RTX' if there is not way to determine the return
2465 address of other frames. */
2466
2467 rtx
2468 frv_return_addr_rtx (int count, rtx frame)
2469 {
2470 if (count != 0)
2471 return const0_rtx;
2472 cfun->machine->frame_needed = 1;
2473 return gen_rtx_MEM (Pmode, plus_constant (Pmode, frame, 8));
2474 }
2475
2476 /* Given a memory reference MEMREF, interpret the referenced memory as
2477 an array of MODE values, and return a reference to the element
2478 specified by INDEX. Assume that any pre-modification implicit in
2479 MEMREF has already happened.
2480
2481 MEMREF must be a legitimate operand for modes larger than SImode.
2482 frv_legitimate_address_p forbids register+register addresses, which
2483 this function cannot handle. */
2484 rtx
2485 frv_index_memory (rtx memref, machine_mode mode, int index)
2486 {
2487 rtx base = XEXP (memref, 0);
2488 if (GET_CODE (base) == PRE_MODIFY)
2489 base = XEXP (base, 0);
2490 return change_address (memref, mode,
2491 plus_constant (Pmode, base,
2492 index * GET_MODE_SIZE (mode)));
2493 }
2494
2495 \f
2496 /* Print a memory address as an operand to reference that memory location. */
2497 static void
2498 frv_print_operand_address (FILE * stream, rtx x)
2499 {
2500 if (GET_CODE (x) == MEM)
2501 x = XEXP (x, 0);
2502
2503 switch (GET_CODE (x))
2504 {
2505 case REG:
2506 fputs (reg_names [ REGNO (x)], stream);
2507 return;
2508
2509 case CONST_INT:
2510 fprintf (stream, "%ld", (long) INTVAL (x));
2511 return;
2512
2513 case SYMBOL_REF:
2514 assemble_name (stream, XSTR (x, 0));
2515 return;
2516
2517 case LABEL_REF:
2518 case CONST:
2519 output_addr_const (stream, x);
2520 return;
2521
2522 case PLUS:
2523 /* Poorly constructed asm statements can trigger this alternative.
2524 See gcc/testsuite/gcc.dg/asm-4.c for an example. */
2525 frv_print_operand_memory_reference (stream, x, 0);
2526 return;
2527
2528 default:
2529 break;
2530 }
2531
2532 fatal_insn ("bad insn to frv_print_operand_address:", x);
2533 }
2534
2535 \f
2536 static void
2537 frv_print_operand_memory_reference_reg (FILE * stream, rtx x)
2538 {
2539 int regno = true_regnum (x);
2540 if (GPR_P (regno))
2541 fputs (reg_names[regno], stream);
2542 else
2543 fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x);
2544 }
2545
2546 /* Print a memory reference suitable for the ld/st instructions. */
2547
2548 static void
2549 frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset)
2550 {
2551 struct frv_unspec unspec;
2552 rtx x0 = NULL_RTX;
2553 rtx x1 = NULL_RTX;
2554
2555 switch (GET_CODE (x))
2556 {
2557 case SUBREG:
2558 case REG:
2559 x0 = x;
2560 break;
2561
2562 case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */
2563 x0 = XEXP (x, 0);
2564 x1 = XEXP (XEXP (x, 1), 1);
2565 break;
2566
2567 case CONST_INT:
2568 x1 = x;
2569 break;
2570
2571 case PLUS:
2572 x0 = XEXP (x, 0);
2573 x1 = XEXP (x, 1);
2574 if (GET_CODE (x0) == CONST_INT)
2575 {
2576 x0 = XEXP (x, 1);
2577 x1 = XEXP (x, 0);
2578 }
2579 break;
2580
2581 default:
2582 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2583 break;
2584
2585 }
2586
2587 if (addr_offset)
2588 {
2589 if (!x1)
2590 x1 = const0_rtx;
2591 else if (GET_CODE (x1) != CONST_INT)
2592 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2593 }
2594
2595 fputs ("@(", stream);
2596 if (!x0)
2597 fputs (reg_names[GPR_R0], stream);
2598 else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG)
2599 frv_print_operand_memory_reference_reg (stream, x0);
2600 else
2601 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2602
2603 fputs (",", stream);
2604 if (!x1)
2605 fputs (reg_names [GPR_R0], stream);
2606
2607 else
2608 {
2609 switch (GET_CODE (x1))
2610 {
2611 case SUBREG:
2612 case REG:
2613 frv_print_operand_memory_reference_reg (stream, x1);
2614 break;
2615
2616 case CONST_INT:
2617 fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset));
2618 break;
2619
2620 case CONST:
2621 if (!frv_const_unspec_p (x1, &unspec))
2622 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1);
2623 frv_output_const_unspec (stream, &unspec);
2624 break;
2625
2626 default:
2627 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2628 }
2629 }
2630
2631 fputs (")", stream);
2632 }
2633
2634 \f
2635 /* Return 2 for likely branches and 0 for non-likely branches */
2636
2637 #define FRV_JUMP_LIKELY 2
2638 #define FRV_JUMP_NOT_LIKELY 0
2639
2640 static int
2641 frv_print_operand_jump_hint (rtx_insn *insn)
2642 {
2643 rtx note;
2644 rtx labelref;
2645 int ret;
2646 int prob = -1;
2647 enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
2648
2649 gcc_assert (JUMP_P (insn));
2650
2651 /* Assume any non-conditional jump is likely. */
2652 if (! any_condjump_p (insn))
2653 ret = FRV_JUMP_LIKELY;
2654
2655 else
2656 {
2657 labelref = condjump_label (insn);
2658 if (labelref)
2659 {
2660 rtx label = XEXP (labelref, 0);
2661 jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label))
2662 ? BACKWARD
2663 : FORWARD);
2664 }
2665
2666 note = find_reg_note (insn, REG_BR_PROB, 0);
2667 if (!note)
2668 ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY);
2669
2670 else
2671 {
2672 prob = XINT (note, 0);
2673 ret = ((prob >= (REG_BR_PROB_BASE / 2))
2674 ? FRV_JUMP_LIKELY
2675 : FRV_JUMP_NOT_LIKELY);
2676 }
2677 }
2678
2679 #if 0
2680 if (TARGET_DEBUG)
2681 {
2682 char *direction;
2683
2684 switch (jump_type)
2685 {
2686 default:
2687 case UNKNOWN: direction = "unknown jump direction"; break;
2688 case BACKWARD: direction = "jump backward"; break;
2689 case FORWARD: direction = "jump forward"; break;
2690 }
2691
2692 fprintf (stderr,
2693 "%s: uid %ld, %s, probability = %d, max prob. = %d, hint = %d\n",
2694 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
2695 (long)INSN_UID (insn), direction, prob,
2696 REG_BR_PROB_BASE, ret);
2697 }
2698 #endif
2699
2700 return ret;
2701 }
2702
2703 \f
2704 /* Return the comparison operator to use for CODE given that the ICC
2705 register is OP0. */
2706
2707 static const char *
2708 comparison_string (enum rtx_code code, rtx op0)
2709 {
2710 bool is_nz_p = GET_MODE (op0) == CC_NZmode;
2711 switch (code)
2712 {
2713 default: output_operand_lossage ("bad condition code");
2714 case EQ: return "eq";
2715 case NE: return "ne";
2716 case LT: return is_nz_p ? "n" : "lt";
2717 case LE: return "le";
2718 case GT: return "gt";
2719 case GE: return is_nz_p ? "p" : "ge";
2720 case LTU: return is_nz_p ? "no" : "c";
2721 case LEU: return is_nz_p ? "eq" : "ls";
2722 case GTU: return is_nz_p ? "ne" : "hi";
2723 case GEU: return is_nz_p ? "ra" : "nc";
2724 }
2725 }
2726
2727 /* Print an operand to an assembler instruction.
2728
2729 `%' followed by a letter and a digit says to output an operand in an
2730 alternate fashion. Four letters have standard, built-in meanings
2731 described below. The hook `TARGET_PRINT_OPERAND' can define
2732 additional letters with nonstandard meanings.
2733
2734 `%cDIGIT' can be used to substitute an operand that is a constant value
2735 without the syntax that normally indicates an immediate operand.
2736
2737 `%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated
2738 before printing.
2739
2740 `%aDIGIT' can be used to substitute an operand as if it were a memory
2741 reference, with the actual operand treated as the address. This may be
2742 useful when outputting a "load address" instruction, because often the
2743 assembler syntax for such an instruction requires you to write the operand
2744 as if it were a memory reference.
2745
2746 `%lDIGIT' is used to substitute a `label_ref' into a jump instruction.
2747
2748 `%=' outputs a number which is unique to each instruction in the entire
2749 compilation. This is useful for making local labels to be referred to more
2750 than once in a single template that generates multiple assembler
2751 instructions.
2752
2753 `%' followed by a punctuation character specifies a substitution that
2754 does not use an operand. Only one case is standard: `%%' outputs a
2755 `%' into the assembler code. Other nonstandard cases can be defined
2756 in the `TARGET_PRINT_OPERAND' hook. You must also define which
2757 punctuation characters are valid with the
2758 `TARGET_PRINT_OPERAND_PUNCT_VALID_P' hook. */
2759
2760 static void
2761 frv_print_operand (FILE * file, rtx x, int code)
2762 {
2763 struct frv_unspec unspec;
2764 HOST_WIDE_INT value;
2765 int offset;
2766
2767 if (code != 0 && !ISALPHA (code))
2768 value = 0;
2769
2770 else if (GET_CODE (x) == CONST_INT)
2771 value = INTVAL (x);
2772
2773 else if (GET_CODE (x) == CONST_DOUBLE)
2774 {
2775 if (GET_MODE (x) == SFmode)
2776 {
2777 REAL_VALUE_TYPE rv;
2778 long l;
2779
2780 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2781 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2782 value = l;
2783 }
2784
2785 else if (GET_MODE (x) == VOIDmode)
2786 value = CONST_DOUBLE_LOW (x);
2787
2788 else
2789 fatal_insn ("bad insn in frv_print_operand, bad const_double", x);
2790 }
2791
2792 else
2793 value = 0;
2794
2795 switch (code)
2796 {
2797
2798 case '.':
2799 /* Output r0. */
2800 fputs (reg_names[GPR_R0], file);
2801 break;
2802
2803 case '#':
2804 fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn));
2805 break;
2806
2807 case '@':
2808 /* Output small data area base register (gr16). */
2809 fputs (reg_names[SDA_BASE_REG], file);
2810 break;
2811
2812 case '~':
2813 /* Output pic register (gr17). */
2814 fputs (reg_names[PIC_REGNO], file);
2815 break;
2816
2817 case '*':
2818 /* Output the temporary integer CCR register. */
2819 fputs (reg_names[ICR_TEMP], file);
2820 break;
2821
2822 case '&':
2823 /* Output the temporary integer CC register. */
2824 fputs (reg_names[ICC_TEMP], file);
2825 break;
2826
2827 /* case 'a': print an address. */
2828
2829 case 'C':
2830 /* Print appropriate test for integer branch false operation. */
2831 fputs (comparison_string (reverse_condition (GET_CODE (x)),
2832 XEXP (x, 0)), file);
2833 break;
2834
2835 case 'c':
2836 /* Print appropriate test for integer branch true operation. */
2837 fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file);
2838 break;
2839
2840 case 'e':
2841 /* Print 1 for a NE and 0 for an EQ to give the final argument
2842 for a conditional instruction. */
2843 if (GET_CODE (x) == NE)
2844 fputs ("1", file);
2845
2846 else if (GET_CODE (x) == EQ)
2847 fputs ("0", file);
2848
2849 else
2850 fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x);
2851 break;
2852
2853 case 'F':
2854 /* Print appropriate test for floating point branch false operation. */
2855 switch (GET_CODE (x))
2856 {
2857 default:
2858 fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x);
2859
2860 case EQ: fputs ("ne", file); break;
2861 case NE: fputs ("eq", file); break;
2862 case LT: fputs ("uge", file); break;
2863 case LE: fputs ("ug", file); break;
2864 case GT: fputs ("ule", file); break;
2865 case GE: fputs ("ul", file); break;
2866 }
2867 break;
2868
2869 case 'f':
2870 /* Print appropriate test for floating point branch true operation. */
2871 switch (GET_CODE (x))
2872 {
2873 default:
2874 fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x);
2875
2876 case EQ: fputs ("eq", file); break;
2877 case NE: fputs ("ne", file); break;
2878 case LT: fputs ("lt", file); break;
2879 case LE: fputs ("le", file); break;
2880 case GT: fputs ("gt", file); break;
2881 case GE: fputs ("ge", file); break;
2882 }
2883 break;
2884
2885 case 'g':
2886 /* Print appropriate GOT function. */
2887 if (GET_CODE (x) != CONST_INT)
2888 fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x);
2889 fputs (unspec_got_name (INTVAL (x)), file);
2890 break;
2891
2892 case 'I':
2893 /* Print 'i' if the operand is a constant, or is a memory reference that
2894 adds a constant. */
2895 if (GET_CODE (x) == MEM)
2896 x = ((GET_CODE (XEXP (x, 0)) == PLUS)
2897 ? XEXP (XEXP (x, 0), 1)
2898 : XEXP (x, 0));
2899 else if (GET_CODE (x) == PLUS)
2900 x = XEXP (x, 1);
2901
2902 switch (GET_CODE (x))
2903 {
2904 default:
2905 break;
2906
2907 case CONST_INT:
2908 case SYMBOL_REF:
2909 case CONST:
2910 fputs ("i", file);
2911 break;
2912 }
2913 break;
2914
2915 case 'i':
2916 /* For jump instructions, print 'i' if the operand is a constant or
2917 is an expression that adds a constant. */
2918 if (GET_CODE (x) == CONST_INT)
2919 fputs ("i", file);
2920
2921 else
2922 {
2923 if (GET_CODE (x) == CONST_INT
2924 || (GET_CODE (x) == PLUS
2925 && (GET_CODE (XEXP (x, 1)) == CONST_INT
2926 || GET_CODE (XEXP (x, 0)) == CONST_INT)))
2927 fputs ("i", file);
2928 }
2929 break;
2930
2931 case 'L':
2932 /* Print the lower register of a double word register pair */
2933 if (GET_CODE (x) == REG)
2934 fputs (reg_names[ REGNO (x)+1 ], file);
2935 else
2936 fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x);
2937 break;
2938
2939 /* case 'l': print a LABEL_REF. */
2940
2941 case 'M':
2942 case 'N':
2943 /* Print a memory reference for ld/st/jmp, %N prints a memory reference
2944 for the second word of double memory operations. */
2945 offset = (code == 'M') ? 0 : UNITS_PER_WORD;
2946 switch (GET_CODE (x))
2947 {
2948 default:
2949 fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x);
2950
2951 case MEM:
2952 frv_print_operand_memory_reference (file, XEXP (x, 0), offset);
2953 break;
2954
2955 case REG:
2956 case SUBREG:
2957 case CONST_INT:
2958 case PLUS:
2959 case SYMBOL_REF:
2960 frv_print_operand_memory_reference (file, x, offset);
2961 break;
2962 }
2963 break;
2964
2965 case 'O':
2966 /* Print the opcode of a command. */
2967 switch (GET_CODE (x))
2968 {
2969 default:
2970 fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x);
2971
2972 case PLUS: fputs ("add", file); break;
2973 case MINUS: fputs ("sub", file); break;
2974 case AND: fputs ("and", file); break;
2975 case IOR: fputs ("or", file); break;
2976 case XOR: fputs ("xor", file); break;
2977 case ASHIFT: fputs ("sll", file); break;
2978 case ASHIFTRT: fputs ("sra", file); break;
2979 case LSHIFTRT: fputs ("srl", file); break;
2980 }
2981 break;
2982
2983 /* case 'n': negate and print a constant int. */
2984
2985 case 'P':
2986 /* Print PIC label using operand as the number. */
2987 if (GET_CODE (x) != CONST_INT)
2988 fatal_insn ("bad insn to frv_print_operand, P modifier:", x);
2989
2990 fprintf (file, ".LCF%ld", (long)INTVAL (x));
2991 break;
2992
2993 case 'U':
2994 /* Print 'u' if the operand is a update load/store. */
2995 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
2996 fputs ("u", file);
2997 break;
2998
2999 case 'z':
3000 /* If value is 0, print gr0, otherwise it must be a register. */
3001 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
3002 fputs (reg_names[GPR_R0], file);
3003
3004 else if (GET_CODE (x) == REG)
3005 fputs (reg_names [REGNO (x)], file);
3006
3007 else
3008 fatal_insn ("bad insn in frv_print_operand, z case", x);
3009 break;
3010
3011 case 'x':
3012 /* Print constant in hex. */
3013 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3014 {
3015 fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value);
3016 break;
3017 }
3018
3019 /* Fall through. */
3020
3021 case '\0':
3022 if (GET_CODE (x) == REG)
3023 fputs (reg_names [REGNO (x)], file);
3024
3025 else if (GET_CODE (x) == CONST_INT
3026 || GET_CODE (x) == CONST_DOUBLE)
3027 fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value);
3028
3029 else if (frv_const_unspec_p (x, &unspec))
3030 frv_output_const_unspec (file, &unspec);
3031
3032 else if (GET_CODE (x) == MEM)
3033 frv_print_operand_address (file, XEXP (x, 0));
3034
3035 else if (CONSTANT_ADDRESS_P (x))
3036 frv_print_operand_address (file, x);
3037
3038 else
3039 fatal_insn ("bad insn in frv_print_operand, 0 case", x);
3040
3041 break;
3042
3043 default:
3044 fatal_insn ("frv_print_operand: unknown code", x);
3045 break;
3046 }
3047
3048 return;
3049 }
3050
3051 static bool
3052 frv_print_operand_punct_valid_p (unsigned char code)
3053 {
3054 return (code == '.' || code == '#' || code == '@' || code == '~'
3055 || code == '*' || code == '&');
3056 }
3057
3058 \f
3059 /* A C statement (sans semicolon) for initializing the variable CUM for the
3060 state at the beginning of the argument list. The variable has type
3061 `CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type
3062 of the function which will receive the args, or 0 if the args are to a
3063 compiler support library function. The value of INDIRECT is nonzero when
3064 processing an indirect call, for example a call through a function pointer.
3065 The value of INDIRECT is zero for a call to an explicitly named function, a
3066 library function call, or when `INIT_CUMULATIVE_ARGS' is used to find
3067 arguments for the function being compiled.
3068
3069 When processing a call to a compiler support library function, LIBNAME
3070 identifies which one. It is a `symbol_ref' rtx which contains the name of
3071 the function, as a string. LIBNAME is 0 when an ordinary C function call is
3072 being processed. Thus, each time this macro is called, either LIBNAME or
3073 FNTYPE is nonzero, but never both of them at once. */
3074
3075 void
3076 frv_init_cumulative_args (CUMULATIVE_ARGS *cum,
3077 tree fntype,
3078 rtx libname,
3079 tree fndecl,
3080 int incoming)
3081 {
3082 *cum = FIRST_ARG_REGNUM;
3083
3084 if (TARGET_DEBUG_ARG)
3085 {
3086 fprintf (stderr, "\ninit_cumulative_args:");
3087 if (!fndecl && fntype)
3088 fputs (" indirect", stderr);
3089
3090 if (incoming)
3091 fputs (" incoming", stderr);
3092
3093 if (fntype)
3094 {
3095 tree ret_type = TREE_TYPE (fntype);
3096 fprintf (stderr, " return=%s,",
3097 get_tree_code_name (TREE_CODE (ret_type)));
3098 }
3099
3100 if (libname && GET_CODE (libname) == SYMBOL_REF)
3101 fprintf (stderr, " libname=%s", XSTR (libname, 0));
3102
3103 if (cfun->returns_struct)
3104 fprintf (stderr, " return-struct");
3105
3106 putc ('\n', stderr);
3107 }
3108 }
3109
3110 \f
3111 /* Return true if we should pass an argument on the stack rather than
3112 in registers. */
3113
3114 static bool
3115 frv_must_pass_in_stack (machine_mode mode, const_tree type)
3116 {
3117 if (mode == BLKmode)
3118 return true;
3119 if (type == NULL)
3120 return false;
3121 return AGGREGATE_TYPE_P (type);
3122 }
3123
3124 /* If defined, a C expression that gives the alignment boundary, in bits, of an
3125 argument with the specified mode and type. If it is not defined,
3126 `PARM_BOUNDARY' is used for all arguments. */
3127
3128 static unsigned int
3129 frv_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
3130 const_tree type ATTRIBUTE_UNUSED)
3131 {
3132 return BITS_PER_WORD;
3133 }
3134
3135 static rtx
3136 frv_function_arg_1 (cumulative_args_t cum_v, machine_mode mode,
3137 const_tree type ATTRIBUTE_UNUSED, bool named,
3138 bool incoming ATTRIBUTE_UNUSED)
3139 {
3140 const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3141
3142 machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3143 int arg_num = *cum;
3144 rtx ret;
3145 const char *debstr;
3146
3147 /* Return a marker for use in the call instruction. */
3148 if (xmode == VOIDmode)
3149 {
3150 ret = const0_rtx;
3151 debstr = "<0>";
3152 }
3153
3154 else if (arg_num <= LAST_ARG_REGNUM)
3155 {
3156 ret = gen_rtx_REG (xmode, arg_num);
3157 debstr = reg_names[arg_num];
3158 }
3159
3160 else
3161 {
3162 ret = NULL_RTX;
3163 debstr = "memory";
3164 }
3165
3166 if (TARGET_DEBUG_ARG)
3167 fprintf (stderr,
3168 "function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n",
3169 arg_num, GET_MODE_NAME (mode), named, GET_MODE_SIZE (mode), debstr);
3170
3171 return ret;
3172 }
3173
3174 static rtx
3175 frv_function_arg (cumulative_args_t cum, machine_mode mode,
3176 const_tree type, bool named)
3177 {
3178 return frv_function_arg_1 (cum, mode, type, named, false);
3179 }
3180
3181 static rtx
3182 frv_function_incoming_arg (cumulative_args_t cum, machine_mode mode,
3183 const_tree type, bool named)
3184 {
3185 return frv_function_arg_1 (cum, mode, type, named, true);
3186 }
3187
3188 \f
3189 /* A C statement (sans semicolon) to update the summarizer variable CUM to
3190 advance past an argument in the argument list. The values MODE, TYPE and
3191 NAMED describe that argument. Once this is done, the variable CUM is
3192 suitable for analyzing the *following* argument with `FUNCTION_ARG', etc.
3193
3194 This macro need not do anything if the argument in question was passed on
3195 the stack. The compiler knows how to track the amount of stack space used
3196 for arguments without any special help. */
3197
3198 static void
3199 frv_function_arg_advance (cumulative_args_t cum_v,
3200 machine_mode mode,
3201 const_tree type ATTRIBUTE_UNUSED,
3202 bool named)
3203 {
3204 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3205
3206 machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3207 int bytes = GET_MODE_SIZE (xmode);
3208 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3209 int arg_num = *cum;
3210
3211 *cum = arg_num + words;
3212
3213 if (TARGET_DEBUG_ARG)
3214 fprintf (stderr,
3215 "function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n",
3216 arg_num, GET_MODE_NAME (mode), named, words * UNITS_PER_WORD);
3217 }
3218
3219 \f
3220 /* A C expression for the number of words, at the beginning of an argument,
3221 must be put in registers. The value must be zero for arguments that are
3222 passed entirely in registers or that are entirely pushed on the stack.
3223
3224 On some machines, certain arguments must be passed partially in registers
3225 and partially in memory. On these machines, typically the first N words of
3226 arguments are passed in registers, and the rest on the stack. If a
3227 multi-word argument (a `double' or a structure) crosses that boundary, its
3228 first few words must be passed in registers and the rest must be pushed.
3229 This macro tells the compiler when this occurs, and how many of the words
3230 should go in registers.
3231
3232 `FUNCTION_ARG' for these arguments should return the first register to be
3233 used by the caller for this argument; likewise `FUNCTION_INCOMING_ARG', for
3234 the called function. */
3235
3236 static int
3237 frv_arg_partial_bytes (cumulative_args_t cum, machine_mode mode,
3238 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
3239 {
3240
3241 machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3242 int bytes = GET_MODE_SIZE (xmode);
3243 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3244 int arg_num = *get_cumulative_args (cum);
3245 int ret;
3246
3247 ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
3248 ? LAST_ARG_REGNUM - arg_num + 1
3249 : 0);
3250 ret *= UNITS_PER_WORD;
3251
3252 if (TARGET_DEBUG_ARG && ret)
3253 fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret);
3254
3255 return ret;
3256 }
3257
3258 \f
3259 /* Implements TARGET_FUNCTION_VALUE. */
3260
3261 static rtx
3262 frv_function_value (const_tree valtype,
3263 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3264 bool outgoing ATTRIBUTE_UNUSED)
3265 {
3266 return gen_rtx_REG (TYPE_MODE (valtype), RETURN_VALUE_REGNUM);
3267 }
3268
3269 \f
3270 /* Implements TARGET_LIBCALL_VALUE. */
3271
3272 static rtx
3273 frv_libcall_value (machine_mode mode,
3274 const_rtx fun ATTRIBUTE_UNUSED)
3275 {
3276 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3277 }
3278
3279 \f
3280 /* Implements FUNCTION_VALUE_REGNO_P. */
3281
3282 bool
3283 frv_function_value_regno_p (const unsigned int regno)
3284 {
3285 return (regno == RETURN_VALUE_REGNUM);
3286 }
3287 \f
3288 /* Return true if a register is ok to use as a base or index register. */
3289
3290 static FRV_INLINE int
3291 frv_regno_ok_for_base_p (int regno, int strict_p)
3292 {
3293 if (GPR_P (regno))
3294 return TRUE;
3295
3296 if (strict_p)
3297 return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno]));
3298
3299 if (regno == ARG_POINTER_REGNUM)
3300 return TRUE;
3301
3302 return (regno >= FIRST_PSEUDO_REGISTER);
3303 }
3304
3305 \f
3306 /* A C compound statement with a conditional `goto LABEL;' executed if X (an
3307 RTX) is a legitimate memory address on the target machine for a memory
3308 operand of mode MODE.
3309
3310 It usually pays to define several simpler macros to serve as subroutines for
3311 this one. Otherwise it may be too complicated to understand.
3312
3313 This macro must exist in two variants: a strict variant and a non-strict
3314 one. The strict variant is used in the reload pass. It must be defined so
3315 that any pseudo-register that has not been allocated a hard register is
3316 considered a memory reference. In contexts where some kind of register is
3317 required, a pseudo-register with no hard register must be rejected.
3318
3319 The non-strict variant is used in other passes. It must be defined to
3320 accept all pseudo-registers in every context where some kind of register is
3321 required.
3322
3323 Compiler source files that want to use the strict variant of this macro
3324 define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT'
3325 conditional to define the strict variant in that case and the non-strict
3326 variant otherwise.
3327
3328 Normally, constant addresses which are the sum of a `symbol_ref' and an
3329 integer are stored inside a `const' RTX to mark them as constant.
3330 Therefore, there is no need to recognize such sums specifically as
3331 legitimate addresses. Normally you would simply recognize any `const' as
3332 legitimate.
3333
3334 Usually `TARGET_PRINT_OPERAND_ADDRESS' is not prepared to handle
3335 constant sums that are not marked with `const'. It assumes that a
3336 naked `plus' indicates indexing. If so, then you *must* reject such
3337 naked constant sums as illegitimate addresses, so that none of them
3338 will be given to `TARGET_PRINT_OPERAND_ADDRESS'. */
3339
3340 int
3341 frv_legitimate_address_p_1 (machine_mode mode,
3342 rtx x,
3343 int strict_p,
3344 int condexec_p,
3345 int allow_double_reg_p)
3346 {
3347 rtx x0, x1;
3348 int ret = 0;
3349 HOST_WIDE_INT value;
3350 unsigned regno0;
3351
3352 if (FRV_SYMBOL_REF_TLS_P (x))
3353 return 0;
3354
3355 switch (GET_CODE (x))
3356 {
3357 default:
3358 break;
3359
3360 case SUBREG:
3361 x = SUBREG_REG (x);
3362 if (GET_CODE (x) != REG)
3363 break;
3364
3365 /* Fall through. */
3366
3367 case REG:
3368 ret = frv_regno_ok_for_base_p (REGNO (x), strict_p);
3369 break;
3370
3371 case PRE_MODIFY:
3372 x0 = XEXP (x, 0);
3373 x1 = XEXP (x, 1);
3374 if (GET_CODE (x0) != REG
3375 || ! frv_regno_ok_for_base_p (REGNO (x0), strict_p)
3376 || GET_CODE (x1) != PLUS
3377 || ! rtx_equal_p (x0, XEXP (x1, 0))
3378 || GET_CODE (XEXP (x1, 1)) != REG
3379 || ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p))
3380 break;
3381
3382 ret = 1;
3383 break;
3384
3385 case CONST_INT:
3386 /* 12-bit immediate */
3387 if (condexec_p)
3388 ret = FALSE;
3389 else
3390 {
3391 ret = IN_RANGE (INTVAL (x), -2048, 2047);
3392
3393 /* If we can't use load/store double operations, make sure we can
3394 address the second word. */
3395 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3396 ret = IN_RANGE (INTVAL (x) + GET_MODE_SIZE (mode) - 1,
3397 -2048, 2047);
3398 }
3399 break;
3400
3401 case PLUS:
3402 x0 = XEXP (x, 0);
3403 x1 = XEXP (x, 1);
3404
3405 if (GET_CODE (x0) == SUBREG)
3406 x0 = SUBREG_REG (x0);
3407
3408 if (GET_CODE (x0) != REG)
3409 break;
3410
3411 regno0 = REGNO (x0);
3412 if (!frv_regno_ok_for_base_p (regno0, strict_p))
3413 break;
3414
3415 switch (GET_CODE (x1))
3416 {
3417 default:
3418 break;
3419
3420 case SUBREG:
3421 x1 = SUBREG_REG (x1);
3422 if (GET_CODE (x1) != REG)
3423 break;
3424
3425 /* Fall through. */
3426
3427 case REG:
3428 /* Do not allow reg+reg addressing for modes > 1 word if we
3429 can't depend on having move double instructions. */
3430 if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3431 ret = FALSE;
3432 else
3433 ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p);
3434 break;
3435
3436 case CONST_INT:
3437 /* 12-bit immediate */
3438 if (condexec_p)
3439 ret = FALSE;
3440 else
3441 {
3442 value = INTVAL (x1);
3443 ret = IN_RANGE (value, -2048, 2047);
3444
3445 /* If we can't use load/store double operations, make sure we can
3446 address the second word. */
3447 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3448 ret = IN_RANGE (value + GET_MODE_SIZE (mode) - 1, -2048, 2047);
3449 }
3450 break;
3451
3452 case CONST:
3453 if (!condexec_p && got12_operand (x1, VOIDmode))
3454 ret = TRUE;
3455 break;
3456
3457 }
3458 break;
3459 }
3460
3461 if (TARGET_DEBUG_ADDR)
3462 {
3463 fprintf (stderr, "\n========== legitimate_address_p, mode = %s, result = %d, addresses are %sstrict%s\n",
3464 GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ",
3465 (condexec_p) ? ", inside conditional code" : "");
3466 debug_rtx (x);
3467 }
3468
3469 return ret;
3470 }
3471
3472 bool
3473 frv_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
3474 {
3475 return frv_legitimate_address_p_1 (mode, x, strict_p, FALSE, FALSE);
3476 }
3477
3478 /* Given an ADDR, generate code to inline the PLT. */
3479 static rtx
3480 gen_inlined_tls_plt (rtx addr)
3481 {
3482 rtx retval, dest;
3483 rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG);
3484
3485
3486 dest = gen_reg_rtx (DImode);
3487
3488 if (flag_pic == 1)
3489 {
3490 /*
3491 -fpic version:
3492
3493 lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8
3494 calll #gettlsoff(ADDR)@(gr8, gr0)
3495 */
3496 emit_insn (gen_tls_lddi (dest, addr, picreg));
3497 }
3498 else
3499 {
3500 /*
3501 -fPIC version:
3502
3503 sethi.p #gottlsdeschi(ADDR), gr8
3504 setlo #gottlsdesclo(ADDR), gr8
3505 ldd #tlsdesc(ADDR)@(gr15, gr8), gr8
3506 calll #gettlsoff(ADDR)@(gr8, gr0)
3507 */
3508 rtx reguse = gen_reg_rtx (Pmode);
3509 emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI)));
3510 emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr));
3511 }
3512
3513 retval = gen_reg_rtx (Pmode);
3514 emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg));
3515 return retval;
3516 }
3517
3518 /* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns
3519 the destination address. */
3520 static rtx
3521 gen_tlsmoff (rtx addr, rtx reg)
3522 {
3523 rtx dest = gen_reg_rtx (Pmode);
3524
3525 if (TARGET_BIG_TLS)
3526 {
3527 /* sethi.p #tlsmoffhi(x), grA
3528 setlo #tlsmofflo(x), grA
3529 */
3530 dest = gen_reg_rtx (Pmode);
3531 emit_insn (gen_tlsoff_hilo (dest, addr,
3532 GEN_INT (R_FRV_TLSMOFFHI)));
3533 dest = gen_rtx_PLUS (Pmode, dest, reg);
3534 }
3535 else
3536 {
3537 /* addi grB, #tlsmoff12(x), grC
3538 -or-
3539 ld/st @(grB, #tlsmoff12(x)), grC
3540 */
3541 dest = gen_reg_rtx (Pmode);
3542 emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg,
3543 GEN_INT (R_FRV_TLSMOFF12)));
3544 }
3545 return dest;
3546 }
3547
3548 /* Generate code for a TLS address. */
3549 static rtx
3550 frv_legitimize_tls_address (rtx addr, enum tls_model model)
3551 {
3552 rtx dest, tp = gen_rtx_REG (Pmode, 29);
3553 rtx picreg = get_hard_reg_initial_val (Pmode, 15);
3554
3555 switch (model)
3556 {
3557 case TLS_MODEL_INITIAL_EXEC:
3558 if (flag_pic == 1)
3559 {
3560 /* -fpic version.
3561 ldi @(gr15, #gottlsoff12(x)), gr5
3562 */
3563 dest = gen_reg_rtx (Pmode);
3564 emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg));
3565 dest = gen_rtx_PLUS (Pmode, tp, dest);
3566 }
3567 else
3568 {
3569 /* -fPIC or anything else.
3570
3571 sethi.p #gottlsoffhi(x), gr14
3572 setlo #gottlsofflo(x), gr14
3573 ld #tlsoff(x)@(gr15, gr14), gr9
3574 */
3575 rtx tmp = gen_reg_rtx (Pmode);
3576 dest = gen_reg_rtx (Pmode);
3577 emit_insn (gen_tlsoff_hilo (tmp, addr,
3578 GEN_INT (R_FRV_GOTTLSOFF_HI)));
3579
3580 emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr));
3581 dest = gen_rtx_PLUS (Pmode, tp, dest);
3582 }
3583 break;
3584 case TLS_MODEL_LOCAL_DYNAMIC:
3585 {
3586 rtx reg, retval;
3587
3588 if (TARGET_INLINE_PLT)
3589 retval = gen_inlined_tls_plt (GEN_INT (0));
3590 else
3591 {
3592 /* call #gettlsoff(0) */
3593 retval = gen_reg_rtx (Pmode);
3594 emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg));
3595 }
3596
3597 reg = gen_reg_rtx (Pmode);
3598 emit_insn (gen_rtx_SET (VOIDmode, reg,
3599 gen_rtx_PLUS (Pmode,
3600 retval, tp)));
3601
3602 dest = gen_tlsmoff (addr, reg);
3603
3604 /*
3605 dest = gen_reg_rtx (Pmode);
3606 emit_insn (gen_tlsoff_hilo (dest, addr,
3607 GEN_INT (R_FRV_TLSMOFFHI)));
3608 dest = gen_rtx_PLUS (Pmode, dest, reg);
3609 */
3610 break;
3611 }
3612 case TLS_MODEL_LOCAL_EXEC:
3613 dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29));
3614 break;
3615 case TLS_MODEL_GLOBAL_DYNAMIC:
3616 {
3617 rtx retval;
3618
3619 if (TARGET_INLINE_PLT)
3620 retval = gen_inlined_tls_plt (addr);
3621 else
3622 {
3623 /* call #gettlsoff(x) */
3624 retval = gen_reg_rtx (Pmode);
3625 emit_insn (gen_call_gettlsoff (retval, addr, picreg));
3626 }
3627 dest = gen_rtx_PLUS (Pmode, retval, tp);
3628 break;
3629 }
3630 default:
3631 gcc_unreachable ();
3632 }
3633
3634 return dest;
3635 }
3636
3637 rtx
3638 frv_legitimize_address (rtx x,
3639 rtx oldx ATTRIBUTE_UNUSED,
3640 machine_mode mode ATTRIBUTE_UNUSED)
3641 {
3642 if (GET_CODE (x) == SYMBOL_REF)
3643 {
3644 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3645 if (model != 0)
3646 return frv_legitimize_tls_address (x, model);
3647 }
3648
3649 return x;
3650 }
3651 \f
3652 /* Test whether a local function descriptor is canonical, i.e.,
3653 whether we can use FUNCDESC_GOTOFF to compute the address of the
3654 function. */
3655
3656 static bool
3657 frv_local_funcdesc_p (rtx fnx)
3658 {
3659 tree fn;
3660 enum symbol_visibility vis;
3661 bool ret;
3662
3663 if (! SYMBOL_REF_LOCAL_P (fnx))
3664 return FALSE;
3665
3666 fn = SYMBOL_REF_DECL (fnx);
3667
3668 if (! fn)
3669 return FALSE;
3670
3671 vis = DECL_VISIBILITY (fn);
3672
3673 if (vis == VISIBILITY_PROTECTED)
3674 /* Private function descriptors for protected functions are not
3675 canonical. Temporarily change the visibility to global. */
3676 vis = VISIBILITY_DEFAULT;
3677 else if (flag_shlib)
3678 /* If we're already compiling for a shared library (that, unlike
3679 executables, can't assume that the existence of a definition
3680 implies local binding), we can skip the re-testing. */
3681 return TRUE;
3682
3683 ret = default_binds_local_p_1 (fn, flag_pic);
3684
3685 DECL_VISIBILITY (fn) = vis;
3686
3687 return ret;
3688 }
3689
3690 /* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC
3691 register. */
3692
3693 rtx
3694 frv_gen_GPsym2reg (rtx dest, rtx src)
3695 {
3696 tree gp = get_identifier ("_gp");
3697 rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp));
3698
3699 return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12));
3700 }
3701
3702 static const char *
3703 unspec_got_name (int i)
3704 {
3705 switch (i)
3706 {
3707 case R_FRV_GOT12: return "got12";
3708 case R_FRV_GOTHI: return "gothi";
3709 case R_FRV_GOTLO: return "gotlo";
3710 case R_FRV_FUNCDESC: return "funcdesc";
3711 case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12";
3712 case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi";
3713 case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo";
3714 case R_FRV_FUNCDESC_VALUE: return "funcdescvalue";
3715 case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12";
3716 case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi";
3717 case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo";
3718 case R_FRV_GOTOFF12: return "gotoff12";
3719 case R_FRV_GOTOFFHI: return "gotoffhi";
3720 case R_FRV_GOTOFFLO: return "gotofflo";
3721 case R_FRV_GPREL12: return "gprel12";
3722 case R_FRV_GPRELHI: return "gprelhi";
3723 case R_FRV_GPRELLO: return "gprello";
3724 case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi";
3725 case R_FRV_GOTTLSOFF_LO: return "gottlsofflo";
3726 case R_FRV_TLSMOFFHI: return "tlsmoffhi";
3727 case R_FRV_TLSMOFFLO: return "tlsmofflo";
3728 case R_FRV_TLSMOFF12: return "tlsmoff12";
3729 case R_FRV_TLSDESCHI: return "tlsdeschi";
3730 case R_FRV_TLSDESCLO: return "tlsdesclo";
3731 case R_FRV_GOTTLSDESCHI: return "gottlsdeschi";
3732 case R_FRV_GOTTLSDESCLO: return "gottlsdesclo";
3733 default: gcc_unreachable ();
3734 }
3735 }
3736
3737 /* Write the assembler syntax for UNSPEC to STREAM. Note that any offset
3738 is added inside the relocation operator. */
3739
3740 static void
3741 frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec)
3742 {
3743 fprintf (stream, "#%s(", unspec_got_name (unspec->reloc));
3744 output_addr_const (stream, plus_constant (Pmode, unspec->symbol,
3745 unspec->offset));
3746 fputs (")", stream);
3747 }
3748
3749 /* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo)
3750 or #gotoff12(foo) for some small data symbol foo. If so, return foo,
3751 otherwise return ORIG_X. */
3752
3753 rtx
3754 frv_find_base_term (rtx x)
3755 {
3756 struct frv_unspec unspec;
3757
3758 if (frv_const_unspec_p (x, &unspec)
3759 && frv_small_data_reloc_p (unspec.symbol, unspec.reloc))
3760 return plus_constant (Pmode, unspec.symbol, unspec.offset);
3761
3762 return x;
3763 }
3764
3765 /* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if
3766 the operand is used by a predicated instruction. */
3767
3768 int
3769 frv_legitimate_memory_operand (rtx op, machine_mode mode, int condexec_p)
3770 {
3771 return ((GET_MODE (op) == mode || mode == VOIDmode)
3772 && GET_CODE (op) == MEM
3773 && frv_legitimate_address_p_1 (mode, XEXP (op, 0),
3774 reload_completed, condexec_p, FALSE));
3775 }
3776
3777 void
3778 frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall)
3779 {
3780 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
3781 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG);
3782 rtx c, rvrtx=0;
3783 rtx addr;
3784
3785 if (ret_value)
3786 {
3787 rvrtx = operands[0];
3788 operands ++;
3789 }
3790
3791 addr = XEXP (operands[0], 0);
3792
3793 /* Inline PLTs if we're optimizing for speed. We'd like to inline
3794 any calls that would involve a PLT, but can't tell, since we
3795 don't know whether an extern function is going to be provided by
3796 a separate translation unit or imported from a separate module.
3797 When compiling for shared libraries, if the function has default
3798 visibility, we assume it's overridable, so we inline the PLT, but
3799 for executables, we don't really have a way to make a good
3800 decision: a function is as likely to be imported from a shared
3801 library as it is to be defined in the executable itself. We
3802 assume executables will get global functions defined locally,
3803 whereas shared libraries will have them potentially overridden,
3804 so we only inline PLTs when compiling for shared libraries.
3805
3806 In order to mark a function as local to a shared library, any
3807 non-default visibility attribute suffices. Unfortunately,
3808 there's no simple way to tag a function declaration as ``in a
3809 different module'', which we could then use to trigger PLT
3810 inlining on executables. There's -minline-plt, but it affects
3811 all external functions, so one would have to also mark function
3812 declarations available in the same module with non-default
3813 visibility, which is advantageous in itself. */
3814 if (GET_CODE (addr) == SYMBOL_REF
3815 && ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT)
3816 || sibcall))
3817 {
3818 rtx x, dest;
3819 dest = gen_reg_rtx (SImode);
3820 if (flag_pic != 1)
3821 x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG,
3822 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3823 else
3824 x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG,
3825 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3826 emit_insn (x);
3827 crtl->uses_pic_offset_table = TRUE;
3828 addr = dest;
3829 }
3830 else if (GET_CODE (addr) == SYMBOL_REF)
3831 {
3832 /* These are always either local, or handled through a local
3833 PLT. */
3834 if (ret_value)
3835 c = gen_call_value_fdpicsi (rvrtx, addr, operands[1],
3836 operands[2], picreg, lr);
3837 else
3838 c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr);
3839 emit_call_insn (c);
3840 return;
3841 }
3842 else if (! ldd_address_operand (addr, Pmode))
3843 addr = force_reg (Pmode, addr);
3844
3845 picreg = gen_reg_rtx (DImode);
3846 emit_insn (gen_movdi_ldd (picreg, addr));
3847
3848 if (sibcall && ret_value)
3849 c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx);
3850 else if (sibcall)
3851 c = gen_sibcall_fdpicdi (picreg, const0_rtx);
3852 else if (ret_value)
3853 c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr);
3854 else
3855 c = gen_call_fdpicdi (picreg, const0_rtx, lr);
3856 emit_call_insn (c);
3857 }
3858 \f
3859 /* Look for a SYMBOL_REF of a function in an rtx. We always want to
3860 process these separately from any offsets, such that we add any
3861 offsets to the function descriptor (the actual pointer), not to the
3862 function address. */
3863
3864 static bool
3865 frv_function_symbol_referenced_p (rtx x)
3866 {
3867 const char *format;
3868 int length;
3869 int j;
3870
3871 if (GET_CODE (x) == SYMBOL_REF)
3872 return SYMBOL_REF_FUNCTION_P (x);
3873
3874 length = GET_RTX_LENGTH (GET_CODE (x));
3875 format = GET_RTX_FORMAT (GET_CODE (x));
3876
3877 for (j = 0; j < length; ++j)
3878 {
3879 switch (format[j])
3880 {
3881 case 'e':
3882 if (frv_function_symbol_referenced_p (XEXP (x, j)))
3883 return TRUE;
3884 break;
3885
3886 case 'V':
3887 case 'E':
3888 if (XVEC (x, j) != 0)
3889 {
3890 int k;
3891 for (k = 0; k < XVECLEN (x, j); ++k)
3892 if (frv_function_symbol_referenced_p (XVECEXP (x, j, k)))
3893 return TRUE;
3894 }
3895 break;
3896
3897 default:
3898 /* Nothing to do. */
3899 break;
3900 }
3901 }
3902
3903 return FALSE;
3904 }
3905
3906 /* Return true if the memory operand is one that can be conditionally
3907 executed. */
3908
3909 int
3910 condexec_memory_operand (rtx op, machine_mode mode)
3911 {
3912 machine_mode op_mode = GET_MODE (op);
3913 rtx addr;
3914
3915 if (mode != VOIDmode && op_mode != mode)
3916 return FALSE;
3917
3918 switch (op_mode)
3919 {
3920 default:
3921 return FALSE;
3922
3923 case QImode:
3924 case HImode:
3925 case SImode:
3926 case SFmode:
3927 break;
3928 }
3929
3930 if (GET_CODE (op) != MEM)
3931 return FALSE;
3932
3933 addr = XEXP (op, 0);
3934 return frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE);
3935 }
3936 \f
3937 /* Return true if the bare return instruction can be used outside of the
3938 epilog code. For frv, we only do it if there was no stack allocation. */
3939
3940 int
3941 direct_return_p (void)
3942 {
3943 frv_stack_t *info;
3944
3945 if (!reload_completed)
3946 return FALSE;
3947
3948 info = frv_stack_info ();
3949 return (info->total_size == 0);
3950 }
3951
3952 \f
3953 void
3954 frv_emit_move (machine_mode mode, rtx dest, rtx src)
3955 {
3956 if (GET_CODE (src) == SYMBOL_REF)
3957 {
3958 enum tls_model model = SYMBOL_REF_TLS_MODEL (src);
3959 if (model != 0)
3960 src = frv_legitimize_tls_address (src, model);
3961 }
3962
3963 switch (mode)
3964 {
3965 case SImode:
3966 if (frv_emit_movsi (dest, src))
3967 return;
3968 break;
3969
3970 case QImode:
3971 case HImode:
3972 case DImode:
3973 case SFmode:
3974 case DFmode:
3975 if (!reload_in_progress
3976 && !reload_completed
3977 && !register_operand (dest, mode)
3978 && !reg_or_0_operand (src, mode))
3979 src = copy_to_mode_reg (mode, src);
3980 break;
3981
3982 default:
3983 gcc_unreachable ();
3984 }
3985
3986 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
3987 }
3988
3989 /* Emit code to handle a MOVSI, adding in the small data register or pic
3990 register if needed to load up addresses. Return TRUE if the appropriate
3991 instructions are emitted. */
3992
3993 int
3994 frv_emit_movsi (rtx dest, rtx src)
3995 {
3996 int base_regno = -1;
3997 int unspec = 0;
3998 rtx sym = src;
3999 struct frv_unspec old_unspec;
4000
4001 if (!reload_in_progress
4002 && !reload_completed
4003 && !register_operand (dest, SImode)
4004 && (!reg_or_0_operand (src, SImode)
4005 /* Virtual registers will almost always be replaced by an
4006 add instruction, so expose this to CSE by copying to
4007 an intermediate register. */
4008 || (GET_CODE (src) == REG
4009 && IN_RANGE (REGNO (src),
4010 FIRST_VIRTUAL_REGISTER,
4011 LAST_VIRTUAL_POINTER_REGISTER))))
4012 {
4013 emit_insn (gen_rtx_SET (VOIDmode, dest, copy_to_mode_reg (SImode, src)));
4014 return TRUE;
4015 }
4016
4017 /* Explicitly add in the PIC or small data register if needed. */
4018 switch (GET_CODE (src))
4019 {
4020 default:
4021 break;
4022
4023 case LABEL_REF:
4024 handle_label:
4025 if (TARGET_FDPIC)
4026 {
4027 /* Using GPREL12, we use a single GOT entry for all symbols
4028 in read-only sections, but trade sequences such as:
4029
4030 sethi #gothi(label), gr#
4031 setlo #gotlo(label), gr#
4032 ld @(gr15,gr#), gr#
4033
4034 for
4035
4036 ld @(gr15,#got12(_gp)), gr#
4037 sethi #gprelhi(label), gr##
4038 setlo #gprello(label), gr##
4039 add gr#, gr##, gr##
4040
4041 We may often be able to share gr# for multiple
4042 computations of GPREL addresses, and we may often fold
4043 the final add into the pair of registers of a load or
4044 store instruction, so it's often profitable. Even when
4045 optimizing for size, we're trading a GOT entry for an
4046 additional instruction, which trades GOT space
4047 (read-write) for code size (read-only, shareable), as
4048 long as the symbol is not used in more than two different
4049 locations.
4050
4051 With -fpie/-fpic, we'd be trading a single load for a
4052 sequence of 4 instructions, because the offset of the
4053 label can't be assumed to be addressable with 12 bits, so
4054 we don't do this. */
4055 if (TARGET_GPREL_RO)
4056 unspec = R_FRV_GPREL12;
4057 else
4058 unspec = R_FRV_GOT12;
4059 }
4060 else if (flag_pic)
4061 base_regno = PIC_REGNO;
4062
4063 break;
4064
4065 case CONST:
4066 if (frv_const_unspec_p (src, &old_unspec))
4067 break;
4068
4069 if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0)))
4070 {
4071 handle_whatever:
4072 src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0));
4073 emit_move_insn (dest, src);
4074 return TRUE;
4075 }
4076 else
4077 {
4078 sym = XEXP (sym, 0);
4079 if (GET_CODE (sym) == PLUS
4080 && GET_CODE (XEXP (sym, 0)) == SYMBOL_REF
4081 && GET_CODE (XEXP (sym, 1)) == CONST_INT)
4082 sym = XEXP (sym, 0);
4083 if (GET_CODE (sym) == SYMBOL_REF)
4084 goto handle_sym;
4085 else if (GET_CODE (sym) == LABEL_REF)
4086 goto handle_label;
4087 else
4088 goto handle_whatever;
4089 }
4090 break;
4091
4092 case SYMBOL_REF:
4093 handle_sym:
4094 if (TARGET_FDPIC)
4095 {
4096 enum tls_model model = SYMBOL_REF_TLS_MODEL (sym);
4097
4098 if (model != 0)
4099 {
4100 src = frv_legitimize_tls_address (src, model);
4101 emit_move_insn (dest, src);
4102 return TRUE;
4103 }
4104
4105 if (SYMBOL_REF_FUNCTION_P (sym))
4106 {
4107 if (frv_local_funcdesc_p (sym))
4108 unspec = R_FRV_FUNCDESC_GOTOFF12;
4109 else
4110 unspec = R_FRV_FUNCDESC_GOT12;
4111 }
4112 else
4113 {
4114 if (CONSTANT_POOL_ADDRESS_P (sym))
4115 switch (GET_CODE (get_pool_constant (sym)))
4116 {
4117 case CONST:
4118 case SYMBOL_REF:
4119 case LABEL_REF:
4120 if (flag_pic)
4121 {
4122 unspec = R_FRV_GOTOFF12;
4123 break;
4124 }
4125 /* Fall through. */
4126 default:
4127 if (TARGET_GPREL_RO)
4128 unspec = R_FRV_GPREL12;
4129 else
4130 unspec = R_FRV_GOT12;
4131 break;
4132 }
4133 else if (SYMBOL_REF_LOCAL_P (sym)
4134 && !SYMBOL_REF_EXTERNAL_P (sym)
4135 && SYMBOL_REF_DECL (sym)
4136 && (!DECL_P (SYMBOL_REF_DECL (sym))
4137 || !DECL_COMMON (SYMBOL_REF_DECL (sym))))
4138 {
4139 tree decl = SYMBOL_REF_DECL (sym);
4140 tree init = TREE_CODE (decl) == VAR_DECL
4141 ? DECL_INITIAL (decl)
4142 : TREE_CODE (decl) == CONSTRUCTOR
4143 ? decl : 0;
4144 int reloc = 0;
4145 bool named_section, readonly;
4146
4147 if (init && init != error_mark_node)
4148 reloc = compute_reloc_for_constant (init);
4149
4150 named_section = TREE_CODE (decl) == VAR_DECL
4151 && lookup_attribute ("section", DECL_ATTRIBUTES (decl));
4152 readonly = decl_readonly_section (decl, reloc);
4153
4154 if (named_section)
4155 unspec = R_FRV_GOT12;
4156 else if (!readonly)
4157 unspec = R_FRV_GOTOFF12;
4158 else if (readonly && TARGET_GPREL_RO)
4159 unspec = R_FRV_GPREL12;
4160 else
4161 unspec = R_FRV_GOT12;
4162 }
4163 else
4164 unspec = R_FRV_GOT12;
4165 }
4166 }
4167
4168 else if (SYMBOL_REF_SMALL_P (sym))
4169 base_regno = SDA_BASE_REG;
4170
4171 else if (flag_pic)
4172 base_regno = PIC_REGNO;
4173
4174 break;
4175 }
4176
4177 if (base_regno >= 0)
4178 {
4179 if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym))
4180 emit_insn (gen_symGOTOFF2reg (dest, src,
4181 gen_rtx_REG (Pmode, base_regno),
4182 GEN_INT (R_FRV_GPREL12)));
4183 else
4184 emit_insn (gen_symGOTOFF2reg_hilo (dest, src,
4185 gen_rtx_REG (Pmode, base_regno),
4186 GEN_INT (R_FRV_GPREL12)));
4187 if (base_regno == PIC_REGNO)
4188 crtl->uses_pic_offset_table = TRUE;
4189 return TRUE;
4190 }
4191
4192 if (unspec)
4193 {
4194 rtx x;
4195
4196 /* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce
4197 new uses of it once reload has begun. */
4198 gcc_assert (!reload_in_progress && !reload_completed);
4199
4200 switch (unspec)
4201 {
4202 case R_FRV_GOTOFF12:
4203 if (!frv_small_data_reloc_p (sym, unspec))
4204 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4205 GEN_INT (unspec));
4206 else
4207 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4208 break;
4209 case R_FRV_GPREL12:
4210 if (!frv_small_data_reloc_p (sym, unspec))
4211 x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG,
4212 GEN_INT (unspec));
4213 else
4214 x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4215 break;
4216 case R_FRV_FUNCDESC_GOTOFF12:
4217 if (flag_pic != 1)
4218 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4219 GEN_INT (unspec));
4220 else
4221 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4222 break;
4223 default:
4224 if (flag_pic != 1)
4225 x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG,
4226 GEN_INT (unspec));
4227 else
4228 x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4229 break;
4230 }
4231 emit_insn (x);
4232 crtl->uses_pic_offset_table = TRUE;
4233 return TRUE;
4234 }
4235
4236
4237 return FALSE;
4238 }
4239
4240 \f
4241 /* Return a string to output a single word move. */
4242
4243 const char *
4244 output_move_single (rtx operands[], rtx insn)
4245 {
4246 rtx dest = operands[0];
4247 rtx src = operands[1];
4248
4249 if (GET_CODE (dest) == REG)
4250 {
4251 int dest_regno = REGNO (dest);
4252 machine_mode mode = GET_MODE (dest);
4253
4254 if (GPR_P (dest_regno))
4255 {
4256 if (GET_CODE (src) == REG)
4257 {
4258 /* gpr <- some sort of register */
4259 int src_regno = REGNO (src);
4260
4261 if (GPR_P (src_regno))
4262 return "mov %1, %0";
4263
4264 else if (FPR_P (src_regno))
4265 return "movfg %1, %0";
4266
4267 else if (SPR_P (src_regno))
4268 return "movsg %1, %0";
4269 }
4270
4271 else if (GET_CODE (src) == MEM)
4272 {
4273 /* gpr <- memory */
4274 switch (mode)
4275 {
4276 default:
4277 break;
4278
4279 case QImode:
4280 return "ldsb%I1%U1 %M1,%0";
4281
4282 case HImode:
4283 return "ldsh%I1%U1 %M1,%0";
4284
4285 case SImode:
4286 case SFmode:
4287 return "ld%I1%U1 %M1, %0";
4288 }
4289 }
4290
4291 else if (GET_CODE (src) == CONST_INT
4292 || GET_CODE (src) == CONST_DOUBLE)
4293 {
4294 /* gpr <- integer/floating constant */
4295 HOST_WIDE_INT value;
4296
4297 if (GET_CODE (src) == CONST_INT)
4298 value = INTVAL (src);
4299
4300 else if (mode == SFmode)
4301 {
4302 REAL_VALUE_TYPE rv;
4303 long l;
4304
4305 REAL_VALUE_FROM_CONST_DOUBLE (rv, src);
4306 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
4307 value = l;
4308 }
4309
4310 else
4311 value = CONST_DOUBLE_LOW (src);
4312
4313 if (IN_RANGE (value, -32768, 32767))
4314 return "setlos %1, %0";
4315
4316 return "#";
4317 }
4318
4319 else if (GET_CODE (src) == SYMBOL_REF
4320 || GET_CODE (src) == LABEL_REF
4321 || GET_CODE (src) == CONST)
4322 {
4323 return "#";
4324 }
4325 }
4326
4327 else if (FPR_P (dest_regno))
4328 {
4329 if (GET_CODE (src) == REG)
4330 {
4331 /* fpr <- some sort of register */
4332 int src_regno = REGNO (src);
4333
4334 if (GPR_P (src_regno))
4335 return "movgf %1, %0";
4336
4337 else if (FPR_P (src_regno))
4338 {
4339 if (TARGET_HARD_FLOAT)
4340 return "fmovs %1, %0";
4341 else
4342 return "mor %1, %1, %0";
4343 }
4344 }
4345
4346 else if (GET_CODE (src) == MEM)
4347 {
4348 /* fpr <- memory */
4349 switch (mode)
4350 {
4351 default:
4352 break;
4353
4354 case QImode:
4355 return "ldbf%I1%U1 %M1,%0";
4356
4357 case HImode:
4358 return "ldhf%I1%U1 %M1,%0";
4359
4360 case SImode:
4361 case SFmode:
4362 return "ldf%I1%U1 %M1, %0";
4363 }
4364 }
4365
4366 else if (ZERO_P (src))
4367 return "movgf %., %0";
4368 }
4369
4370 else if (SPR_P (dest_regno))
4371 {
4372 if (GET_CODE (src) == REG)
4373 {
4374 /* spr <- some sort of register */
4375 int src_regno = REGNO (src);
4376
4377 if (GPR_P (src_regno))
4378 return "movgs %1, %0";
4379 }
4380 else if (ZERO_P (src))
4381 return "movgs %., %0";
4382 }
4383 }
4384
4385 else if (GET_CODE (dest) == MEM)
4386 {
4387 if (GET_CODE (src) == REG)
4388 {
4389 int src_regno = REGNO (src);
4390 machine_mode mode = GET_MODE (dest);
4391
4392 if (GPR_P (src_regno))
4393 {
4394 switch (mode)
4395 {
4396 default:
4397 break;
4398
4399 case QImode:
4400 return "stb%I0%U0 %1, %M0";
4401
4402 case HImode:
4403 return "sth%I0%U0 %1, %M0";
4404
4405 case SImode:
4406 case SFmode:
4407 return "st%I0%U0 %1, %M0";
4408 }
4409 }
4410
4411 else if (FPR_P (src_regno))
4412 {
4413 switch (mode)
4414 {
4415 default:
4416 break;
4417
4418 case QImode:
4419 return "stbf%I0%U0 %1, %M0";
4420
4421 case HImode:
4422 return "sthf%I0%U0 %1, %M0";
4423
4424 case SImode:
4425 case SFmode:
4426 return "stf%I0%U0 %1, %M0";
4427 }
4428 }
4429 }
4430
4431 else if (ZERO_P (src))
4432 {
4433 switch (GET_MODE (dest))
4434 {
4435 default:
4436 break;
4437
4438 case QImode:
4439 return "stb%I0%U0 %., %M0";
4440
4441 case HImode:
4442 return "sth%I0%U0 %., %M0";
4443
4444 case SImode:
4445 case SFmode:
4446 return "st%I0%U0 %., %M0";
4447 }
4448 }
4449 }
4450
4451 fatal_insn ("bad output_move_single operand", insn);
4452 return "";
4453 }
4454
4455 \f
4456 /* Return a string to output a double word move. */
4457
4458 const char *
4459 output_move_double (rtx operands[], rtx insn)
4460 {
4461 rtx dest = operands[0];
4462 rtx src = operands[1];
4463 machine_mode mode = GET_MODE (dest);
4464
4465 if (GET_CODE (dest) == REG)
4466 {
4467 int dest_regno = REGNO (dest);
4468
4469 if (GPR_P (dest_regno))
4470 {
4471 if (GET_CODE (src) == REG)
4472 {
4473 /* gpr <- some sort of register */
4474 int src_regno = REGNO (src);
4475
4476 if (GPR_P (src_regno))
4477 return "#";
4478
4479 else if (FPR_P (src_regno))
4480 {
4481 if (((dest_regno - GPR_FIRST) & 1) == 0
4482 && ((src_regno - FPR_FIRST) & 1) == 0)
4483 return "movfgd %1, %0";
4484
4485 return "#";
4486 }
4487 }
4488
4489 else if (GET_CODE (src) == MEM)
4490 {
4491 /* gpr <- memory */
4492 if (dbl_memory_one_insn_operand (src, mode))
4493 return "ldd%I1%U1 %M1, %0";
4494
4495 return "#";
4496 }
4497
4498 else if (GET_CODE (src) == CONST_INT
4499 || GET_CODE (src) == CONST_DOUBLE)
4500 return "#";
4501 }
4502
4503 else if (FPR_P (dest_regno))
4504 {
4505 if (GET_CODE (src) == REG)
4506 {
4507 /* fpr <- some sort of register */
4508 int src_regno = REGNO (src);
4509
4510 if (GPR_P (src_regno))
4511 {
4512 if (((dest_regno - FPR_FIRST) & 1) == 0
4513 && ((src_regno - GPR_FIRST) & 1) == 0)
4514 return "movgfd %1, %0";
4515
4516 return "#";
4517 }
4518
4519 else if (FPR_P (src_regno))
4520 {
4521 if (TARGET_DOUBLE
4522 && ((dest_regno - FPR_FIRST) & 1) == 0
4523 && ((src_regno - FPR_FIRST) & 1) == 0)
4524 return "fmovd %1, %0";
4525
4526 return "#";
4527 }
4528 }
4529
4530 else if (GET_CODE (src) == MEM)
4531 {
4532 /* fpr <- memory */
4533 if (dbl_memory_one_insn_operand (src, mode))
4534 return "lddf%I1%U1 %M1, %0";
4535
4536 return "#";
4537 }
4538
4539 else if (ZERO_P (src))
4540 return "#";
4541 }
4542 }
4543
4544 else if (GET_CODE (dest) == MEM)
4545 {
4546 if (GET_CODE (src) == REG)
4547 {
4548 int src_regno = REGNO (src);
4549
4550 if (GPR_P (src_regno))
4551 {
4552 if (((src_regno - GPR_FIRST) & 1) == 0
4553 && dbl_memory_one_insn_operand (dest, mode))
4554 return "std%I0%U0 %1, %M0";
4555
4556 return "#";
4557 }
4558
4559 if (FPR_P (src_regno))
4560 {
4561 if (((src_regno - FPR_FIRST) & 1) == 0
4562 && dbl_memory_one_insn_operand (dest, mode))
4563 return "stdf%I0%U0 %1, %M0";
4564
4565 return "#";
4566 }
4567 }
4568
4569 else if (ZERO_P (src))
4570 {
4571 if (dbl_memory_one_insn_operand (dest, mode))
4572 return "std%I0%U0 %., %M0";
4573
4574 return "#";
4575 }
4576 }
4577
4578 fatal_insn ("bad output_move_double operand", insn);
4579 return "";
4580 }
4581
4582 \f
4583 /* Return a string to output a single word conditional move.
4584 Operand0 -- EQ/NE of ccr register and 0
4585 Operand1 -- CCR register
4586 Operand2 -- destination
4587 Operand3 -- source */
4588
4589 const char *
4590 output_condmove_single (rtx operands[], rtx insn)
4591 {
4592 rtx dest = operands[2];
4593 rtx src = operands[3];
4594
4595 if (GET_CODE (dest) == REG)
4596 {
4597 int dest_regno = REGNO (dest);
4598 machine_mode mode = GET_MODE (dest);
4599
4600 if (GPR_P (dest_regno))
4601 {
4602 if (GET_CODE (src) == REG)
4603 {
4604 /* gpr <- some sort of register */
4605 int src_regno = REGNO (src);
4606
4607 if (GPR_P (src_regno))
4608 return "cmov %z3, %2, %1, %e0";
4609
4610 else if (FPR_P (src_regno))
4611 return "cmovfg %3, %2, %1, %e0";
4612 }
4613
4614 else if (GET_CODE (src) == MEM)
4615 {
4616 /* gpr <- memory */
4617 switch (mode)
4618 {
4619 default:
4620 break;
4621
4622 case QImode:
4623 return "cldsb%I3%U3 %M3, %2, %1, %e0";
4624
4625 case HImode:
4626 return "cldsh%I3%U3 %M3, %2, %1, %e0";
4627
4628 case SImode:
4629 case SFmode:
4630 return "cld%I3%U3 %M3, %2, %1, %e0";
4631 }
4632 }
4633
4634 else if (ZERO_P (src))
4635 return "cmov %., %2, %1, %e0";
4636 }
4637
4638 else if (FPR_P (dest_regno))
4639 {
4640 if (GET_CODE (src) == REG)
4641 {
4642 /* fpr <- some sort of register */
4643 int src_regno = REGNO (src);
4644
4645 if (GPR_P (src_regno))
4646 return "cmovgf %3, %2, %1, %e0";
4647
4648 else if (FPR_P (src_regno))
4649 {
4650 if (TARGET_HARD_FLOAT)
4651 return "cfmovs %3,%2,%1,%e0";
4652 else
4653 return "cmor %3, %3, %2, %1, %e0";
4654 }
4655 }
4656
4657 else if (GET_CODE (src) == MEM)
4658 {
4659 /* fpr <- memory */
4660 if (mode == SImode || mode == SFmode)
4661 return "cldf%I3%U3 %M3, %2, %1, %e0";
4662 }
4663
4664 else if (ZERO_P (src))
4665 return "cmovgf %., %2, %1, %e0";
4666 }
4667 }
4668
4669 else if (GET_CODE (dest) == MEM)
4670 {
4671 if (GET_CODE (src) == REG)
4672 {
4673 int src_regno = REGNO (src);
4674 machine_mode mode = GET_MODE (dest);
4675
4676 if (GPR_P (src_regno))
4677 {
4678 switch (mode)
4679 {
4680 default:
4681 break;
4682
4683 case QImode:
4684 return "cstb%I2%U2 %3, %M2, %1, %e0";
4685
4686 case HImode:
4687 return "csth%I2%U2 %3, %M2, %1, %e0";
4688
4689 case SImode:
4690 case SFmode:
4691 return "cst%I2%U2 %3, %M2, %1, %e0";
4692 }
4693 }
4694
4695 else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode))
4696 return "cstf%I2%U2 %3, %M2, %1, %e0";
4697 }
4698
4699 else if (ZERO_P (src))
4700 {
4701 machine_mode mode = GET_MODE (dest);
4702 switch (mode)
4703 {
4704 default:
4705 break;
4706
4707 case QImode:
4708 return "cstb%I2%U2 %., %M2, %1, %e0";
4709
4710 case HImode:
4711 return "csth%I2%U2 %., %M2, %1, %e0";
4712
4713 case SImode:
4714 case SFmode:
4715 return "cst%I2%U2 %., %M2, %1, %e0";
4716 }
4717 }
4718 }
4719
4720 fatal_insn ("bad output_condmove_single operand", insn);
4721 return "";
4722 }
4723
4724 \f
4725 /* Emit the appropriate code to do a comparison, returning the register the
4726 comparison was done it. */
4727
4728 static rtx
4729 frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
4730 {
4731 machine_mode cc_mode;
4732 rtx cc_reg;
4733
4734 /* Floating point doesn't have comparison against a constant. */
4735 if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG)
4736 op1 = force_reg (GET_MODE (op0), op1);
4737
4738 /* Possibly disable using anything but a fixed register in order to work
4739 around cse moving comparisons past function calls. */
4740 cc_mode = SELECT_CC_MODE (test, op0, op1);
4741 cc_reg = ((TARGET_ALLOC_CC)
4742 ? gen_reg_rtx (cc_mode)
4743 : gen_rtx_REG (cc_mode,
4744 (cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST));
4745
4746 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4747 gen_rtx_COMPARE (cc_mode, op0, op1)));
4748
4749 return cc_reg;
4750 }
4751
4752 \f
4753 /* Emit code for a conditional branch.
4754 XXX: I originally wanted to add a clobber of a CCR register to use in
4755 conditional execution, but that confuses the rest of the compiler. */
4756
4757 int
4758 frv_emit_cond_branch (rtx operands[])
4759 {
4760 rtx test_rtx;
4761 rtx label_ref;
4762 rtx if_else;
4763 enum rtx_code test = GET_CODE (operands[0]);
4764 rtx cc_reg = frv_emit_comparison (test, operands[1], operands[2]);
4765 machine_mode cc_mode = GET_MODE (cc_reg);
4766
4767 /* Branches generate:
4768 (set (pc)
4769 (if_then_else (<test>, <cc_reg>, (const_int 0))
4770 (label_ref <branch_label>)
4771 (pc))) */
4772 label_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
4773 test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4774 if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx);
4775 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, if_else));
4776 return TRUE;
4777 }
4778
4779 \f
4780 /* Emit code to set a gpr to 1/0 based on a comparison. */
4781
4782 int
4783 frv_emit_scc (rtx operands[])
4784 {
4785 rtx set;
4786 rtx test_rtx;
4787 rtx clobber;
4788 rtx cr_reg;
4789 enum rtx_code test = GET_CODE (operands[1]);
4790 rtx cc_reg = frv_emit_comparison (test, operands[2], operands[3]);
4791
4792 /* SCC instructions generate:
4793 (parallel [(set <target> (<test>, <cc_reg>, (const_int 0))
4794 (clobber (<ccr_reg>))]) */
4795 test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx);
4796 set = gen_rtx_SET (VOIDmode, operands[0], test_rtx);
4797
4798 cr_reg = ((TARGET_ALLOC_CC)
4799 ? gen_reg_rtx (CC_CCRmode)
4800 : gen_rtx_REG (CC_CCRmode,
4801 ((GET_MODE (cc_reg) == CC_FPmode)
4802 ? FCR_FIRST
4803 : ICR_FIRST)));
4804
4805 clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4806 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
4807 return TRUE;
4808 }
4809
4810 \f
4811 /* Split a SCC instruction into component parts, returning a SEQUENCE to hold
4812 the separate insns. */
4813
4814 rtx
4815 frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value)
4816 {
4817 rtx ret;
4818
4819 start_sequence ();
4820
4821 /* Set the appropriate CCR bit. */
4822 emit_insn (gen_rtx_SET (VOIDmode,
4823 cr_reg,
4824 gen_rtx_fmt_ee (GET_CODE (test),
4825 GET_MODE (cr_reg),
4826 cc_reg,
4827 const0_rtx)));
4828
4829 /* Move the value into the destination. */
4830 emit_move_insn (dest, GEN_INT (value));
4831
4832 /* Move 0 into the destination if the test failed */
4833 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4834 gen_rtx_EQ (GET_MODE (cr_reg),
4835 cr_reg,
4836 const0_rtx),
4837 gen_rtx_SET (VOIDmode, dest, const0_rtx)));
4838
4839 /* Finish up, return sequence. */
4840 ret = get_insns ();
4841 end_sequence ();
4842 return ret;
4843 }
4844
4845 \f
4846 /* Emit the code for a conditional move, return TRUE if we could do the
4847 move. */
4848
4849 int
4850 frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2)
4851 {
4852 rtx set;
4853 rtx clobber_cc;
4854 rtx test2;
4855 rtx cr_reg;
4856 rtx if_rtx;
4857 enum rtx_code test = GET_CODE (test_rtx);
4858 rtx cc_reg = frv_emit_comparison (test,
4859 XEXP (test_rtx, 0), XEXP (test_rtx, 1));
4860 machine_mode cc_mode = GET_MODE (cc_reg);
4861
4862 /* Conditional move instructions generate:
4863 (parallel [(set <target>
4864 (if_then_else (<test> <cc_reg> (const_int 0))
4865 <src1>
4866 <src2>))
4867 (clobber (<ccr_reg>))]) */
4868
4869 /* Handle various cases of conditional move involving two constants. */
4870 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4871 {
4872 HOST_WIDE_INT value1 = INTVAL (src1);
4873 HOST_WIDE_INT value2 = INTVAL (src2);
4874
4875 /* Having 0 as one of the constants can be done by loading the other
4876 constant, and optionally moving in gr0. */
4877 if (value1 == 0 || value2 == 0)
4878 ;
4879
4880 /* If the first value is within an addi range and also the difference
4881 between the two fits in an addi's range, load up the difference, then
4882 conditionally move in 0, and then unconditionally add the first
4883 value. */
4884 else if (IN_RANGE (value1, -2048, 2047)
4885 && IN_RANGE (value2 - value1, -2048, 2047))
4886 ;
4887
4888 /* If neither condition holds, just force the constant into a
4889 register. */
4890 else
4891 {
4892 src1 = force_reg (GET_MODE (dest), src1);
4893 src2 = force_reg (GET_MODE (dest), src2);
4894 }
4895 }
4896
4897 /* If one value is a register, insure the other value is either 0 or a
4898 register. */
4899 else
4900 {
4901 if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0)
4902 src1 = force_reg (GET_MODE (dest), src1);
4903
4904 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
4905 src2 = force_reg (GET_MODE (dest), src2);
4906 }
4907
4908 test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4909 if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2);
4910
4911 set = gen_rtx_SET (VOIDmode, dest, if_rtx);
4912
4913 cr_reg = ((TARGET_ALLOC_CC)
4914 ? gen_reg_rtx (CC_CCRmode)
4915 : gen_rtx_REG (CC_CCRmode,
4916 (cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST));
4917
4918 clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4919 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc)));
4920 return TRUE;
4921 }
4922
4923 \f
4924 /* Split a conditional move into constituent parts, returning a SEQUENCE
4925 containing all of the insns. */
4926
4927 rtx
4928 frv_split_cond_move (rtx operands[])
4929 {
4930 rtx dest = operands[0];
4931 rtx test = operands[1];
4932 rtx cc_reg = operands[2];
4933 rtx src1 = operands[3];
4934 rtx src2 = operands[4];
4935 rtx cr_reg = operands[5];
4936 rtx ret;
4937 machine_mode cr_mode = GET_MODE (cr_reg);
4938
4939 start_sequence ();
4940
4941 /* Set the appropriate CCR bit. */
4942 emit_insn (gen_rtx_SET (VOIDmode,
4943 cr_reg,
4944 gen_rtx_fmt_ee (GET_CODE (test),
4945 GET_MODE (cr_reg),
4946 cc_reg,
4947 const0_rtx)));
4948
4949 /* Handle various cases of conditional move involving two constants. */
4950 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4951 {
4952 HOST_WIDE_INT value1 = INTVAL (src1);
4953 HOST_WIDE_INT value2 = INTVAL (src2);
4954
4955 /* Having 0 as one of the constants can be done by loading the other
4956 constant, and optionally moving in gr0. */
4957 if (value1 == 0)
4958 {
4959 emit_move_insn (dest, src2);
4960 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4961 gen_rtx_NE (cr_mode, cr_reg,
4962 const0_rtx),
4963 gen_rtx_SET (VOIDmode, dest, src1)));
4964 }
4965
4966 else if (value2 == 0)
4967 {
4968 emit_move_insn (dest, src1);
4969 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4970 gen_rtx_EQ (cr_mode, cr_reg,
4971 const0_rtx),
4972 gen_rtx_SET (VOIDmode, dest, src2)));
4973 }
4974
4975 /* If the first value is within an addi range and also the difference
4976 between the two fits in an addi's range, load up the difference, then
4977 conditionally move in 0, and then unconditionally add the first
4978 value. */
4979 else if (IN_RANGE (value1, -2048, 2047)
4980 && IN_RANGE (value2 - value1, -2048, 2047))
4981 {
4982 rtx dest_si = ((GET_MODE (dest) == SImode)
4983 ? dest
4984 : gen_rtx_SUBREG (SImode, dest, 0));
4985
4986 emit_move_insn (dest_si, GEN_INT (value2 - value1));
4987 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4988 gen_rtx_NE (cr_mode, cr_reg,
4989 const0_rtx),
4990 gen_rtx_SET (VOIDmode, dest_si,
4991 const0_rtx)));
4992 emit_insn (gen_addsi3 (dest_si, dest_si, src1));
4993 }
4994
4995 else
4996 gcc_unreachable ();
4997 }
4998 else
4999 {
5000 /* Emit the conditional move for the test being true if needed. */
5001 if (! rtx_equal_p (dest, src1))
5002 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5003 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5004 gen_rtx_SET (VOIDmode, dest, src1)));
5005
5006 /* Emit the conditional move for the test being false if needed. */
5007 if (! rtx_equal_p (dest, src2))
5008 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5009 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5010 gen_rtx_SET (VOIDmode, dest, src2)));
5011 }
5012
5013 /* Finish up, return sequence. */
5014 ret = get_insns ();
5015 end_sequence ();
5016 return ret;
5017 }
5018
5019 \f
5020 /* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a
5021 memory location that is not known to be dword-aligned. */
5022 void
5023 frv_split_double_load (rtx dest, rtx source)
5024 {
5025 int regno = REGNO (dest);
5026 rtx dest1 = gen_highpart (SImode, dest);
5027 rtx dest2 = gen_lowpart (SImode, dest);
5028 rtx address = XEXP (source, 0);
5029
5030 /* If the address is pre-modified, load the lower-numbered register
5031 first, then load the other register using an integer offset from
5032 the modified base register. This order should always be safe,
5033 since the pre-modification cannot affect the same registers as the
5034 load does.
5035
5036 The situation for other loads is more complicated. Loading one
5037 of the registers could affect the value of ADDRESS, so we must
5038 be careful which order we do them in. */
5039 if (GET_CODE (address) == PRE_MODIFY
5040 || ! refers_to_regno_p (regno, regno + 1, address, NULL))
5041 {
5042 /* It is safe to load the lower-numbered register first. */
5043 emit_move_insn (dest1, change_address (source, SImode, NULL));
5044 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
5045 }
5046 else
5047 {
5048 /* ADDRESS is not pre-modified and the address depends on the
5049 lower-numbered register. Load the higher-numbered register
5050 first. */
5051 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
5052 emit_move_insn (dest1, change_address (source, SImode, NULL));
5053 }
5054 }
5055
5056 /* Split (set DEST SOURCE), where DEST refers to a dword memory location
5057 and SOURCE is either a double register or the constant zero. */
5058 void
5059 frv_split_double_store (rtx dest, rtx source)
5060 {
5061 rtx dest1 = change_address (dest, SImode, NULL);
5062 rtx dest2 = frv_index_memory (dest, SImode, 1);
5063 if (ZERO_P (source))
5064 {
5065 emit_move_insn (dest1, CONST0_RTX (SImode));
5066 emit_move_insn (dest2, CONST0_RTX (SImode));
5067 }
5068 else
5069 {
5070 emit_move_insn (dest1, gen_highpart (SImode, source));
5071 emit_move_insn (dest2, gen_lowpart (SImode, source));
5072 }
5073 }
5074
5075 \f
5076 /* Split a min/max operation returning a SEQUENCE containing all of the
5077 insns. */
5078
5079 rtx
5080 frv_split_minmax (rtx operands[])
5081 {
5082 rtx dest = operands[0];
5083 rtx minmax = operands[1];
5084 rtx src1 = operands[2];
5085 rtx src2 = operands[3];
5086 rtx cc_reg = operands[4];
5087 rtx cr_reg = operands[5];
5088 rtx ret;
5089 enum rtx_code test_code;
5090 machine_mode cr_mode = GET_MODE (cr_reg);
5091
5092 start_sequence ();
5093
5094 /* Figure out which test to use. */
5095 switch (GET_CODE (minmax))
5096 {
5097 default:
5098 gcc_unreachable ();
5099
5100 case SMIN: test_code = LT; break;
5101 case SMAX: test_code = GT; break;
5102 case UMIN: test_code = LTU; break;
5103 case UMAX: test_code = GTU; break;
5104 }
5105
5106 /* Issue the compare instruction. */
5107 emit_insn (gen_rtx_SET (VOIDmode,
5108 cc_reg,
5109 gen_rtx_COMPARE (GET_MODE (cc_reg),
5110 src1, src2)));
5111
5112 /* Set the appropriate CCR bit. */
5113 emit_insn (gen_rtx_SET (VOIDmode,
5114 cr_reg,
5115 gen_rtx_fmt_ee (test_code,
5116 GET_MODE (cr_reg),
5117 cc_reg,
5118 const0_rtx)));
5119
5120 /* If are taking the min/max of a nonzero constant, load that first, and
5121 then do a conditional move of the other value. */
5122 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
5123 {
5124 gcc_assert (!rtx_equal_p (dest, src1));
5125
5126 emit_move_insn (dest, src2);
5127 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5128 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5129 gen_rtx_SET (VOIDmode, dest, src1)));
5130 }
5131
5132 /* Otherwise, do each half of the move. */
5133 else
5134 {
5135 /* Emit the conditional move for the test being true if needed. */
5136 if (! rtx_equal_p (dest, src1))
5137 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5138 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5139 gen_rtx_SET (VOIDmode, dest, src1)));
5140
5141 /* Emit the conditional move for the test being false if needed. */
5142 if (! rtx_equal_p (dest, src2))
5143 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5144 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5145 gen_rtx_SET (VOIDmode, dest, src2)));
5146 }
5147
5148 /* Finish up, return sequence. */
5149 ret = get_insns ();
5150 end_sequence ();
5151 return ret;
5152 }
5153
5154 \f
5155 /* Split an integer abs operation returning a SEQUENCE containing all of the
5156 insns. */
5157
5158 rtx
5159 frv_split_abs (rtx operands[])
5160 {
5161 rtx dest = operands[0];
5162 rtx src = operands[1];
5163 rtx cc_reg = operands[2];
5164 rtx cr_reg = operands[3];
5165 rtx ret;
5166
5167 start_sequence ();
5168
5169 /* Issue the compare < 0 instruction. */
5170 emit_insn (gen_rtx_SET (VOIDmode,
5171 cc_reg,
5172 gen_rtx_COMPARE (CCmode, src, const0_rtx)));
5173
5174 /* Set the appropriate CCR bit. */
5175 emit_insn (gen_rtx_SET (VOIDmode,
5176 cr_reg,
5177 gen_rtx_fmt_ee (LT, CC_CCRmode, cc_reg, const0_rtx)));
5178
5179 /* Emit the conditional negate if the value is negative. */
5180 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5181 gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx),
5182 gen_negsi2 (dest, src)));
5183
5184 /* Emit the conditional move for the test being false if needed. */
5185 if (! rtx_equal_p (dest, src))
5186 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5187 gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx),
5188 gen_rtx_SET (VOIDmode, dest, src)));
5189
5190 /* Finish up, return sequence. */
5191 ret = get_insns ();
5192 end_sequence ();
5193 return ret;
5194 }
5195
5196 \f
5197 /* An internal function called by for_each_rtx to clear in a hard_reg set each
5198 register used in an insn. */
5199
5200 static int
5201 frv_clear_registers_used (rtx *ptr, void *data)
5202 {
5203 if (GET_CODE (*ptr) == REG)
5204 {
5205 int regno = REGNO (*ptr);
5206 HARD_REG_SET *p_regs = (HARD_REG_SET *)data;
5207
5208 if (regno < FIRST_PSEUDO_REGISTER)
5209 {
5210 int reg_max = regno + HARD_REGNO_NREGS (regno, GET_MODE (*ptr));
5211
5212 while (regno < reg_max)
5213 {
5214 CLEAR_HARD_REG_BIT (*p_regs, regno);
5215 regno++;
5216 }
5217 }
5218 }
5219
5220 return 0;
5221 }
5222
5223 \f
5224 /* Initialize machine-specific if-conversion data.
5225 On the FR-V, we don't have any extra fields per se, but it is useful hook to
5226 initialize the static storage. */
5227 void
5228 frv_ifcvt_machdep_init (void *ce_info ATTRIBUTE_UNUSED)
5229 {
5230 frv_ifcvt.added_insns_list = NULL_RTX;
5231 frv_ifcvt.cur_scratch_regs = 0;
5232 frv_ifcvt.num_nested_cond_exec = 0;
5233 frv_ifcvt.cr_reg = NULL_RTX;
5234 frv_ifcvt.nested_cc_reg = NULL_RTX;
5235 frv_ifcvt.extra_int_cr = NULL_RTX;
5236 frv_ifcvt.extra_fp_cr = NULL_RTX;
5237 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5238 }
5239
5240 \f
5241 /* Internal function to add a potential insn to the list of insns to be inserted
5242 if the conditional execution conversion is successful. */
5243
5244 static void
5245 frv_ifcvt_add_insn (rtx pattern, rtx insn, int before_p)
5246 {
5247 rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn);
5248
5249 link->jump = before_p; /* Mark to add this before or after insn. */
5250 frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link,
5251 frv_ifcvt.added_insns_list);
5252
5253 if (TARGET_DEBUG_COND_EXEC)
5254 {
5255 fprintf (stderr,
5256 "\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n",
5257 (before_p) ? "before" : "after",
5258 (int)INSN_UID (insn));
5259
5260 debug_rtx (pattern);
5261 }
5262 }
5263
5264 \f
5265 /* A C expression to modify the code described by the conditional if
5266 information CE_INFO, possibly updating the tests in TRUE_EXPR, and
5267 FALSE_EXPR for converting if-then and if-then-else code to conditional
5268 instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the
5269 tests cannot be converted. */
5270
5271 void
5272 frv_ifcvt_modify_tests (ce_if_block *ce_info, rtx *p_true, rtx *p_false)
5273 {
5274 basic_block test_bb = ce_info->test_bb; /* test basic block */
5275 basic_block then_bb = ce_info->then_bb; /* THEN */
5276 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
5277 basic_block join_bb = ce_info->join_bb; /* join block or NULL */
5278 rtx true_expr = *p_true;
5279 rtx cr;
5280 rtx cc;
5281 rtx nested_cc;
5282 machine_mode mode = GET_MODE (true_expr);
5283 int j;
5284 basic_block *bb;
5285 int num_bb;
5286 frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg;
5287 rtx check_insn;
5288 rtx sub_cond_exec_reg;
5289 enum rtx_code code;
5290 enum rtx_code code_true;
5291 enum rtx_code code_false;
5292 enum reg_class cc_class;
5293 enum reg_class cr_class;
5294 int cc_first;
5295 int cc_last;
5296 reg_set_iterator rsi;
5297
5298 /* Make sure we are only dealing with hard registers. Also honor the
5299 -mno-cond-exec switch, and -mno-nested-cond-exec switches if
5300 applicable. */
5301 if (!reload_completed || !TARGET_COND_EXEC
5302 || (!TARGET_NESTED_CE && ce_info->pass > 1))
5303 goto fail;
5304
5305 /* Figure out which registers we can allocate for our own purposes. Only
5306 consider registers that are not preserved across function calls and are
5307 not fixed. However, allow the ICC/ICR temporary registers to be allocated
5308 if we did not need to use them in reloading other registers. */
5309 memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
5310 COPY_HARD_REG_SET (tmp_reg->regs, call_used_reg_set);
5311 AND_COMPL_HARD_REG_SET (tmp_reg->regs, fixed_reg_set);
5312 SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
5313 SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
5314
5315 /* If this is a nested IF, we need to discover whether the CC registers that
5316 are set/used inside of the block are used anywhere else. If not, we can
5317 change them to be the CC register that is paired with the CR register that
5318 controls the outermost IF block. */
5319 if (ce_info->pass > 1)
5320 {
5321 CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite);
5322 for (j = CC_FIRST; j <= CC_LAST; j++)
5323 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5324 {
5325 if (REGNO_REG_SET_P (df_get_live_in (then_bb), j))
5326 continue;
5327
5328 if (else_bb
5329 && REGNO_REG_SET_P (df_get_live_in (else_bb), j))
5330 continue;
5331
5332 if (join_bb
5333 && REGNO_REG_SET_P (df_get_live_in (join_bb), j))
5334 continue;
5335
5336 SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
5337 }
5338 }
5339
5340 for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++)
5341 frv_ifcvt.scratch_regs[j] = NULL_RTX;
5342
5343 frv_ifcvt.added_insns_list = NULL_RTX;
5344 frv_ifcvt.cur_scratch_regs = 0;
5345
5346 bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks)
5347 * sizeof (basic_block));
5348
5349 if (join_bb)
5350 {
5351 unsigned int regno;
5352
5353 /* Remove anything live at the beginning of the join block from being
5354 available for allocation. */
5355 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (join_bb), 0, regno, rsi)
5356 {
5357 if (regno < FIRST_PSEUDO_REGISTER)
5358 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5359 }
5360 }
5361
5362 /* Add in all of the blocks in multiple &&/|| blocks to be scanned. */
5363 num_bb = 0;
5364 if (ce_info->num_multiple_test_blocks)
5365 {
5366 basic_block multiple_test_bb = ce_info->last_test_bb;
5367
5368 while (multiple_test_bb != test_bb)
5369 {
5370 bb[num_bb++] = multiple_test_bb;
5371 multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src;
5372 }
5373 }
5374
5375 /* Add in the THEN and ELSE blocks to be scanned. */
5376 bb[num_bb++] = then_bb;
5377 if (else_bb)
5378 bb[num_bb++] = else_bb;
5379
5380 sub_cond_exec_reg = NULL_RTX;
5381 frv_ifcvt.num_nested_cond_exec = 0;
5382
5383 /* Scan all of the blocks for registers that must not be allocated. */
5384 for (j = 0; j < num_bb; j++)
5385 {
5386 rtx_insn *last_insn = BB_END (bb[j]);
5387 rtx_insn *insn = BB_HEAD (bb[j]);
5388 unsigned int regno;
5389
5390 if (dump_file)
5391 fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n",
5392 (bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
5393 (int) bb[j]->index,
5394 (int) INSN_UID (BB_HEAD (bb[j])),
5395 (int) INSN_UID (BB_END (bb[j])));
5396
5397 /* Anything live at the beginning of the block is obviously unavailable
5398 for allocation. */
5399 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb[j]), 0, regno, rsi)
5400 {
5401 if (regno < FIRST_PSEUDO_REGISTER)
5402 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5403 }
5404
5405 /* Loop through the insns in the block. */
5406 for (;;)
5407 {
5408 /* Mark any new registers that are created as being unavailable for
5409 allocation. Also see if the CC register used in nested IFs can be
5410 reallocated. */
5411 if (INSN_P (insn))
5412 {
5413 rtx pattern;
5414 rtx set;
5415 int skip_nested_if = FALSE;
5416
5417 for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
5418 (void *)&tmp_reg->regs);
5419
5420 pattern = PATTERN (insn);
5421 if (GET_CODE (pattern) == COND_EXEC)
5422 {
5423 rtx reg = XEXP (COND_EXEC_TEST (pattern), 0);
5424
5425 if (reg != sub_cond_exec_reg)
5426 {
5427 sub_cond_exec_reg = reg;
5428 frv_ifcvt.num_nested_cond_exec++;
5429 }
5430 }
5431
5432 set = single_set_pattern (pattern);
5433 if (set)
5434 {
5435 rtx dest = SET_DEST (set);
5436 rtx src = SET_SRC (set);
5437
5438 if (GET_CODE (dest) == REG)
5439 {
5440 int regno = REGNO (dest);
5441 enum rtx_code src_code = GET_CODE (src);
5442
5443 if (CC_P (regno) && src_code == COMPARE)
5444 skip_nested_if = TRUE;
5445
5446 else if (CR_P (regno)
5447 && (src_code == IF_THEN_ELSE
5448 || COMPARISON_P (src)))
5449 skip_nested_if = TRUE;
5450 }
5451 }
5452
5453 if (! skip_nested_if)
5454 for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
5455 (void *)&frv_ifcvt.nested_cc_ok_rewrite);
5456 }
5457
5458 if (insn == last_insn)
5459 break;
5460
5461 insn = NEXT_INSN (insn);
5462 }
5463 }
5464
5465 /* If this is a nested if, rewrite the CC registers that are available to
5466 include the ones that can be rewritten, to increase the chance of being
5467 able to allocate a paired CC/CR register combination. */
5468 if (ce_info->pass > 1)
5469 {
5470 for (j = CC_FIRST; j <= CC_LAST; j++)
5471 if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j))
5472 SET_HARD_REG_BIT (tmp_reg->regs, j);
5473 else
5474 CLEAR_HARD_REG_BIT (tmp_reg->regs, j);
5475 }
5476
5477 if (dump_file)
5478 {
5479 int num_gprs = 0;
5480 fprintf (dump_file, "Available GPRs: ");
5481
5482 for (j = GPR_FIRST; j <= GPR_LAST; j++)
5483 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5484 {
5485 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5486 if (++num_gprs > GPR_TEMP_NUM+2)
5487 break;
5488 }
5489
5490 fprintf (dump_file, "%s\nAvailable CRs: ",
5491 (num_gprs > GPR_TEMP_NUM+2) ? " ..." : "");
5492
5493 for (j = CR_FIRST; j <= CR_LAST; j++)
5494 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5495 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5496
5497 fputs ("\n", dump_file);
5498
5499 if (ce_info->pass > 1)
5500 {
5501 fprintf (dump_file, "Modifiable CCs: ");
5502 for (j = CC_FIRST; j <= CC_LAST; j++)
5503 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5504 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5505
5506 fprintf (dump_file, "\n%d nested COND_EXEC statements\n",
5507 frv_ifcvt.num_nested_cond_exec);
5508 }
5509 }
5510
5511 /* Allocate the appropriate temporary condition code register. Try to
5512 allocate the ICR/FCR register that corresponds to the ICC/FCC register so
5513 that conditional cmp's can be done. */
5514 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5515 {
5516 cr_class = ICR_REGS;
5517 cc_class = ICC_REGS;
5518 cc_first = ICC_FIRST;
5519 cc_last = ICC_LAST;
5520 }
5521 else if (mode == CC_FPmode)
5522 {
5523 cr_class = FCR_REGS;
5524 cc_class = FCC_REGS;
5525 cc_first = FCC_FIRST;
5526 cc_last = FCC_LAST;
5527 }
5528 else
5529 {
5530 cc_first = cc_last = 0;
5531 cr_class = cc_class = NO_REGS;
5532 }
5533
5534 cc = XEXP (true_expr, 0);
5535 nested_cc = cr = NULL_RTX;
5536 if (cc_class != NO_REGS)
5537 {
5538 /* For nested IFs and &&/||, see if we can find a CC and CR register pair
5539 so we can execute a csubcc/caddcc/cfcmps instruction. */
5540 int cc_regno;
5541
5542 for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++)
5543 {
5544 int cr_regno = cc_regno - CC_FIRST + CR_FIRST;
5545
5546 if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno)
5547 && TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno))
5548 {
5549 frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno;
5550 cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE,
5551 TRUE);
5552
5553 frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno;
5554 nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode,
5555 TRUE, TRUE);
5556 break;
5557 }
5558 }
5559 }
5560
5561 if (! cr)
5562 {
5563 if (dump_file)
5564 fprintf (dump_file, "Could not allocate a CR temporary register\n");
5565
5566 goto fail;
5567 }
5568
5569 if (dump_file)
5570 fprintf (dump_file,
5571 "Will use %s for conditional execution, %s for nested comparisons\n",
5572 reg_names[ REGNO (cr)],
5573 (nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>");
5574
5575 /* Set the CCR bit. Note for integer tests, we reverse the condition so that
5576 in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR
5577 bit being true. We don't do this for floating point, because of NaNs. */
5578 code = GET_CODE (true_expr);
5579 if (GET_MODE (cc) != CC_FPmode)
5580 {
5581 code = reverse_condition (code);
5582 code_true = EQ;
5583 code_false = NE;
5584 }
5585 else
5586 {
5587 code_true = NE;
5588 code_false = EQ;
5589 }
5590
5591 check_insn = gen_rtx_SET (VOIDmode, cr,
5592 gen_rtx_fmt_ee (code, CC_CCRmode, cc, const0_rtx));
5593
5594 /* Record the check insn to be inserted later. */
5595 frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
5596
5597 /* Update the tests. */
5598 frv_ifcvt.cr_reg = cr;
5599 frv_ifcvt.nested_cc_reg = nested_cc;
5600 *p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx);
5601 *p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx);
5602 return;
5603
5604 /* Fail, don't do this conditional execution. */
5605 fail:
5606 *p_true = NULL_RTX;
5607 *p_false = NULL_RTX;
5608 if (dump_file)
5609 fprintf (dump_file, "Disabling this conditional execution.\n");
5610
5611 return;
5612 }
5613
5614 \f
5615 /* A C expression to modify the code described by the conditional if
5616 information CE_INFO, for the basic block BB, possibly updating the tests in
5617 TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or
5618 if-then-else code to conditional instructions. Set either TRUE_EXPR or
5619 FALSE_EXPR to a null pointer if the tests cannot be converted. */
5620
5621 /* p_true and p_false are given expressions of the form:
5622
5623 (and (eq:CC_CCR (reg:CC_CCR)
5624 (const_int 0))
5625 (eq:CC (reg:CC)
5626 (const_int 0))) */
5627
5628 void
5629 frv_ifcvt_modify_multiple_tests (ce_if_block *ce_info,
5630 basic_block bb,
5631 rtx *p_true,
5632 rtx *p_false)
5633 {
5634 rtx old_true = XEXP (*p_true, 0);
5635 rtx old_false = XEXP (*p_false, 0);
5636 rtx true_expr = XEXP (*p_true, 1);
5637 rtx false_expr = XEXP (*p_false, 1);
5638 rtx test_expr;
5639 rtx old_test;
5640 rtx cr = XEXP (old_true, 0);
5641 rtx check_insn;
5642 rtx new_cr = NULL_RTX;
5643 rtx *p_new_cr = (rtx *)0;
5644 rtx if_else;
5645 rtx compare;
5646 rtx cc;
5647 enum reg_class cr_class;
5648 machine_mode mode = GET_MODE (true_expr);
5649 rtx (*logical_func)(rtx, rtx, rtx);
5650
5651 if (TARGET_DEBUG_COND_EXEC)
5652 {
5653 fprintf (stderr,
5654 "\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n",
5655 ce_info->and_and_p ? "&&" : "||");
5656
5657 debug_rtx (*p_true);
5658
5659 fputs ("\nfalse insn:\n", stderr);
5660 debug_rtx (*p_false);
5661 }
5662
5663 if (!TARGET_MULTI_CE)
5664 goto fail;
5665
5666 if (GET_CODE (cr) != REG)
5667 goto fail;
5668
5669 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5670 {
5671 cr_class = ICR_REGS;
5672 p_new_cr = &frv_ifcvt.extra_int_cr;
5673 }
5674 else if (mode == CC_FPmode)
5675 {
5676 cr_class = FCR_REGS;
5677 p_new_cr = &frv_ifcvt.extra_fp_cr;
5678 }
5679 else
5680 goto fail;
5681
5682 /* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or
5683 more &&/|| tests. */
5684 new_cr = *p_new_cr;
5685 if (! new_cr)
5686 {
5687 new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class,
5688 CC_CCRmode, TRUE, TRUE);
5689 if (! new_cr)
5690 goto fail;
5691 }
5692
5693 if (ce_info->and_and_p)
5694 {
5695 old_test = old_false;
5696 test_expr = true_expr;
5697 logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr;
5698 *p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5699 *p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5700 }
5701 else
5702 {
5703 old_test = old_false;
5704 test_expr = false_expr;
5705 logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr;
5706 *p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5707 *p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5708 }
5709
5710 /* First add the andcr/andncr/orcr/orncr, which will be added after the
5711 conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
5712 stack. */
5713 frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
5714
5715 /* Now add the conditional check insn. */
5716 cc = XEXP (test_expr, 0);
5717 compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx);
5718 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx);
5719
5720 check_insn = gen_rtx_SET (VOIDmode, new_cr, if_else);
5721
5722 /* Add the new check insn to the list of check insns that need to be
5723 inserted. */
5724 frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
5725
5726 if (TARGET_DEBUG_COND_EXEC)
5727 {
5728 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n",
5729 stderr);
5730
5731 debug_rtx (*p_true);
5732
5733 fputs ("\nfalse insn:\n", stderr);
5734 debug_rtx (*p_false);
5735 }
5736
5737 return;
5738
5739 fail:
5740 *p_true = *p_false = NULL_RTX;
5741
5742 /* If we allocated a CR register, release it. */
5743 if (new_cr)
5744 {
5745 CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr));
5746 *p_new_cr = NULL_RTX;
5747 }
5748
5749 if (TARGET_DEBUG_COND_EXEC)
5750 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr);
5751
5752 return;
5753 }
5754
5755 \f
5756 /* Return a register which will be loaded with a value if an IF block is
5757 converted to conditional execution. This is used to rewrite instructions
5758 that use constants to ones that just use registers. */
5759
5760 static rtx
5761 frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED)
5762 {
5763 int num_alloc = frv_ifcvt.cur_scratch_regs;
5764 int i;
5765 rtx reg;
5766
5767 /* We know gr0 == 0, so replace any errant uses. */
5768 if (value == const0_rtx)
5769 return gen_rtx_REG (SImode, GPR_FIRST);
5770
5771 /* First search all registers currently loaded to see if we have an
5772 applicable constant. */
5773 if (CONSTANT_P (value)
5774 || (GET_CODE (value) == REG && REGNO (value) == LR_REGNO))
5775 {
5776 for (i = 0; i < num_alloc; i++)
5777 {
5778 if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value))
5779 return SET_DEST (frv_ifcvt.scratch_regs[i]);
5780 }
5781 }
5782
5783 /* Have we exhausted the number of registers available? */
5784 if (num_alloc >= GPR_TEMP_NUM)
5785 {
5786 if (dump_file)
5787 fprintf (dump_file, "Too many temporary registers allocated\n");
5788
5789 return NULL_RTX;
5790 }
5791
5792 /* Allocate the new register. */
5793 reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE);
5794 if (! reg)
5795 {
5796 if (dump_file)
5797 fputs ("Could not find a scratch register\n", dump_file);
5798
5799 return NULL_RTX;
5800 }
5801
5802 frv_ifcvt.cur_scratch_regs++;
5803 frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (VOIDmode, reg, value);
5804
5805 if (dump_file)
5806 {
5807 if (GET_CODE (value) == CONST_INT)
5808 fprintf (dump_file, "Register %s will hold %ld\n",
5809 reg_names[ REGNO (reg)], (long)INTVAL (value));
5810
5811 else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)
5812 fprintf (dump_file, "Register %s will hold LR\n",
5813 reg_names[ REGNO (reg)]);
5814
5815 else
5816 fprintf (dump_file, "Register %s will hold a saved value\n",
5817 reg_names[ REGNO (reg)]);
5818 }
5819
5820 return reg;
5821 }
5822
5823 \f
5824 /* Update a MEM used in conditional code that might contain an offset to put
5825 the offset into a scratch register, so that the conditional load/store
5826 operations can be used. This function returns the original pointer if the
5827 MEM is valid to use in conditional code, NULL if we can't load up the offset
5828 into a temporary register, or the new MEM if we were successful. */
5829
5830 static rtx
5831 frv_ifcvt_rewrite_mem (rtx mem, machine_mode mode, rtx insn)
5832 {
5833 rtx addr = XEXP (mem, 0);
5834
5835 if (!frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE))
5836 {
5837 if (GET_CODE (addr) == PLUS)
5838 {
5839 rtx addr_op0 = XEXP (addr, 0);
5840 rtx addr_op1 = XEXP (addr, 1);
5841
5842 if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1))
5843 {
5844 rtx reg = frv_ifcvt_load_value (addr_op1, insn);
5845 if (!reg)
5846 return NULL_RTX;
5847
5848 addr = gen_rtx_PLUS (Pmode, addr_op0, reg);
5849 }
5850
5851 else
5852 return NULL_RTX;
5853 }
5854
5855 else if (CONSTANT_P (addr))
5856 addr = frv_ifcvt_load_value (addr, insn);
5857
5858 else
5859 return NULL_RTX;
5860
5861 if (addr == NULL_RTX)
5862 return NULL_RTX;
5863
5864 else if (XEXP (mem, 0) != addr)
5865 return change_address (mem, mode, addr);
5866 }
5867
5868 return mem;
5869 }
5870
5871 \f
5872 /* Given a PATTERN, return a SET expression if this PATTERN has only a single
5873 SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */
5874
5875 static rtx
5876 single_set_pattern (rtx pattern)
5877 {
5878 rtx set;
5879 int i;
5880
5881 if (GET_CODE (pattern) == COND_EXEC)
5882 pattern = COND_EXEC_CODE (pattern);
5883
5884 if (GET_CODE (pattern) == SET)
5885 return pattern;
5886
5887 else if (GET_CODE (pattern) == PARALLEL)
5888 {
5889 for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++)
5890 {
5891 rtx sub = XVECEXP (pattern, 0, i);
5892
5893 switch (GET_CODE (sub))
5894 {
5895 case USE:
5896 case CLOBBER:
5897 break;
5898
5899 case SET:
5900 if (set)
5901 return 0;
5902 else
5903 set = sub;
5904 break;
5905
5906 default:
5907 return 0;
5908 }
5909 }
5910 return set;
5911 }
5912
5913 return 0;
5914 }
5915
5916 \f
5917 /* A C expression to modify the code described by the conditional if
5918 information CE_INFO with the new PATTERN in INSN. If PATTERN is a null
5919 pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that
5920 insn cannot be converted to be executed conditionally. */
5921
5922 rtx
5923 frv_ifcvt_modify_insn (ce_if_block *ce_info,
5924 rtx pattern,
5925 rtx insn)
5926 {
5927 rtx orig_ce_pattern = pattern;
5928 rtx set;
5929 rtx op0;
5930 rtx op1;
5931 rtx test;
5932
5933 gcc_assert (GET_CODE (pattern) == COND_EXEC);
5934
5935 test = COND_EXEC_TEST (pattern);
5936 if (GET_CODE (test) == AND)
5937 {
5938 rtx cr = frv_ifcvt.cr_reg;
5939 rtx test_reg;
5940
5941 op0 = XEXP (test, 0);
5942 if (! rtx_equal_p (cr, XEXP (op0, 0)))
5943 goto fail;
5944
5945 op1 = XEXP (test, 1);
5946 test_reg = XEXP (op1, 0);
5947 if (GET_CODE (test_reg) != REG)
5948 goto fail;
5949
5950 /* Is this the first nested if block in this sequence? If so, generate
5951 an andcr or andncr. */
5952 if (! frv_ifcvt.last_nested_if_cr)
5953 {
5954 rtx and_op;
5955
5956 frv_ifcvt.last_nested_if_cr = test_reg;
5957 if (GET_CODE (op0) == NE)
5958 and_op = gen_andcr (test_reg, cr, test_reg);
5959 else
5960 and_op = gen_andncr (test_reg, cr, test_reg);
5961
5962 frv_ifcvt_add_insn (and_op, insn, TRUE);
5963 }
5964
5965 /* If this isn't the first statement in the nested if sequence, see if we
5966 are dealing with the same register. */
5967 else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr))
5968 goto fail;
5969
5970 COND_EXEC_TEST (pattern) = test = op1;
5971 }
5972
5973 /* If this isn't a nested if, reset state variables. */
5974 else
5975 {
5976 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5977 }
5978
5979 set = single_set_pattern (pattern);
5980 if (set)
5981 {
5982 rtx dest = SET_DEST (set);
5983 rtx src = SET_SRC (set);
5984 machine_mode mode = GET_MODE (dest);
5985
5986 /* Check for normal binary operators. */
5987 if (mode == SImode && ARITHMETIC_P (src))
5988 {
5989 op0 = XEXP (src, 0);
5990 op1 = XEXP (src, 1);
5991
5992 if (integer_register_operand (op0, SImode) && CONSTANT_P (op1))
5993 {
5994 op1 = frv_ifcvt_load_value (op1, insn);
5995 if (op1)
5996 COND_EXEC_CODE (pattern)
5997 = gen_rtx_SET (VOIDmode, dest, gen_rtx_fmt_ee (GET_CODE (src),
5998 GET_MODE (src),
5999 op0, op1));
6000 else
6001 goto fail;
6002 }
6003 }
6004
6005 /* For multiply by a constant, we need to handle the sign extending
6006 correctly. Add a USE of the value after the multiply to prevent flow
6007 from cratering because only one register out of the two were used. */
6008 else if (mode == DImode && GET_CODE (src) == MULT)
6009 {
6010 op0 = XEXP (src, 0);
6011 op1 = XEXP (src, 1);
6012 if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT)
6013 {
6014 op1 = frv_ifcvt_load_value (op1, insn);
6015 if (op1)
6016 {
6017 op1 = gen_rtx_SIGN_EXTEND (DImode, op1);
6018 COND_EXEC_CODE (pattern)
6019 = gen_rtx_SET (VOIDmode, dest,
6020 gen_rtx_MULT (DImode, op0, op1));
6021 }
6022 else
6023 goto fail;
6024 }
6025
6026 frv_ifcvt_add_insn (gen_use (dest), insn, FALSE);
6027 }
6028
6029 /* If we are just loading a constant created for a nested conditional
6030 execution statement, just load the constant without any conditional
6031 execution, since we know that the constant will not interfere with any
6032 other registers. */
6033 else if (frv_ifcvt.scratch_insns_bitmap
6034 && bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap,
6035 INSN_UID (insn))
6036 && REG_P (SET_DEST (set))
6037 /* We must not unconditionally set a scratch reg chosen
6038 for a nested if-converted block if its incoming
6039 value from the TEST block (or the result of the THEN
6040 branch) could/should propagate to the JOIN block.
6041 It suffices to test whether the register is live at
6042 the JOIN point: if it's live there, we can infer
6043 that we set it in the former JOIN block of the
6044 nested if-converted block (otherwise it wouldn't
6045 have been available as a scratch register), and it
6046 is either propagated through or set in the other
6047 conditional block. It's probably not worth trying
6048 to catch the latter case, and it could actually
6049 limit scheduling of the combined block quite
6050 severely. */
6051 && ce_info->join_bb
6052 && ! (REGNO_REG_SET_P (df_get_live_in (ce_info->join_bb),
6053 REGNO (SET_DEST (set))))
6054 /* Similarly, we must not unconditionally set a reg
6055 used as scratch in the THEN branch if the same reg
6056 is live in the ELSE branch. */
6057 && (! ce_info->else_bb
6058 || BLOCK_FOR_INSN (insn) == ce_info->else_bb
6059 || ! (REGNO_REG_SET_P (df_get_live_in (ce_info->else_bb),
6060 REGNO (SET_DEST (set))))))
6061 pattern = set;
6062
6063 else if (mode == QImode || mode == HImode || mode == SImode
6064 || mode == SFmode)
6065 {
6066 int changed_p = FALSE;
6067
6068 /* Check for just loading up a constant */
6069 if (CONSTANT_P (src) && integer_register_operand (dest, mode))
6070 {
6071 src = frv_ifcvt_load_value (src, insn);
6072 if (!src)
6073 goto fail;
6074
6075 changed_p = TRUE;
6076 }
6077
6078 /* See if we need to fix up stores */
6079 if (GET_CODE (dest) == MEM)
6080 {
6081 rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn);
6082
6083 if (!new_mem)
6084 goto fail;
6085
6086 else if (new_mem != dest)
6087 {
6088 changed_p = TRUE;
6089 dest = new_mem;
6090 }
6091 }
6092
6093 /* See if we need to fix up loads */
6094 if (GET_CODE (src) == MEM)
6095 {
6096 rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn);
6097
6098 if (!new_mem)
6099 goto fail;
6100
6101 else if (new_mem != src)
6102 {
6103 changed_p = TRUE;
6104 src = new_mem;
6105 }
6106 }
6107
6108 /* If either src or destination changed, redo SET. */
6109 if (changed_p)
6110 COND_EXEC_CODE (pattern) = gen_rtx_SET (VOIDmode, dest, src);
6111 }
6112
6113 /* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with
6114 rewriting the CC register to be the same as the paired CC/CR register
6115 for nested ifs. */
6116 else if (mode == CC_CCRmode && COMPARISON_P (src))
6117 {
6118 int regno = REGNO (XEXP (src, 0));
6119 rtx if_else;
6120
6121 if (ce_info->pass > 1
6122 && regno != (int)REGNO (frv_ifcvt.nested_cc_reg)
6123 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno))
6124 {
6125 src = gen_rtx_fmt_ee (GET_CODE (src),
6126 CC_CCRmode,
6127 frv_ifcvt.nested_cc_reg,
6128 XEXP (src, 1));
6129 }
6130
6131 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx);
6132 pattern = gen_rtx_SET (VOIDmode, dest, if_else);
6133 }
6134
6135 /* Remap a nested compare instruction to use the paired CC/CR reg. */
6136 else if (ce_info->pass > 1
6137 && GET_CODE (dest) == REG
6138 && CC_P (REGNO (dest))
6139 && REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg)
6140 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite,
6141 REGNO (dest))
6142 && GET_CODE (src) == COMPARE)
6143 {
6144 PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest));
6145 COND_EXEC_CODE (pattern)
6146 = gen_rtx_SET (VOIDmode, frv_ifcvt.nested_cc_reg, copy_rtx (src));
6147 }
6148 }
6149
6150 if (TARGET_DEBUG_COND_EXEC)
6151 {
6152 rtx orig_pattern = PATTERN (insn);
6153
6154 PATTERN (insn) = pattern;
6155 fprintf (stderr,
6156 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n",
6157 ce_info->pass);
6158
6159 debug_rtx (insn);
6160 PATTERN (insn) = orig_pattern;
6161 }
6162
6163 return pattern;
6164
6165 fail:
6166 if (TARGET_DEBUG_COND_EXEC)
6167 {
6168 rtx orig_pattern = PATTERN (insn);
6169
6170 PATTERN (insn) = orig_ce_pattern;
6171 fprintf (stderr,
6172 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n",
6173 ce_info->pass);
6174
6175 debug_rtx (insn);
6176 PATTERN (insn) = orig_pattern;
6177 }
6178
6179 return NULL_RTX;
6180 }
6181
6182 \f
6183 /* A C expression to perform any final machine dependent modifications in
6184 converting code to conditional execution in the code described by the
6185 conditional if information CE_INFO. */
6186
6187 void
6188 frv_ifcvt_modify_final (ce_if_block *ce_info ATTRIBUTE_UNUSED)
6189 {
6190 rtx existing_insn;
6191 rtx check_insn;
6192 rtx p = frv_ifcvt.added_insns_list;
6193 int i;
6194
6195 /* Loop inserting the check insns. The last check insn is the first test,
6196 and is the appropriate place to insert constants. */
6197 gcc_assert (p);
6198
6199 do
6200 {
6201 rtx check_and_insert_insns = XEXP (p, 0);
6202 rtx old_p = p;
6203
6204 check_insn = XEXP (check_and_insert_insns, 0);
6205 existing_insn = XEXP (check_and_insert_insns, 1);
6206 p = XEXP (p, 1);
6207
6208 /* The jump bit is used to say that the new insn is to be inserted BEFORE
6209 the existing insn, otherwise it is to be inserted AFTER. */
6210 if (check_and_insert_insns->jump)
6211 {
6212 emit_insn_before (check_insn, existing_insn);
6213 check_and_insert_insns->jump = 0;
6214 }
6215 else
6216 emit_insn_after (check_insn, existing_insn);
6217
6218 free_EXPR_LIST_node (check_and_insert_insns);
6219 free_EXPR_LIST_node (old_p);
6220 }
6221 while (p != NULL_RTX);
6222
6223 /* Load up any constants needed into temp gprs */
6224 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6225 {
6226 rtx insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn);
6227 if (! frv_ifcvt.scratch_insns_bitmap)
6228 frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL);
6229 bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn));
6230 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6231 }
6232
6233 frv_ifcvt.added_insns_list = NULL_RTX;
6234 frv_ifcvt.cur_scratch_regs = 0;
6235 }
6236
6237 \f
6238 /* A C expression to cancel any machine dependent modifications in converting
6239 code to conditional execution in the code described by the conditional if
6240 information CE_INFO. */
6241
6242 void
6243 frv_ifcvt_modify_cancel (ce_if_block *ce_info ATTRIBUTE_UNUSED)
6244 {
6245 int i;
6246 rtx p = frv_ifcvt.added_insns_list;
6247
6248 /* Loop freeing up the EXPR_LIST's allocated. */
6249 while (p != NULL_RTX)
6250 {
6251 rtx check_and_jump = XEXP (p, 0);
6252 rtx old_p = p;
6253
6254 p = XEXP (p, 1);
6255 free_EXPR_LIST_node (check_and_jump);
6256 free_EXPR_LIST_node (old_p);
6257 }
6258
6259 /* Release any temporary gprs allocated. */
6260 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6261 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6262
6263 frv_ifcvt.added_insns_list = NULL_RTX;
6264 frv_ifcvt.cur_scratch_regs = 0;
6265 return;
6266 }
6267 \f
6268 /* A C expression for the size in bytes of the trampoline, as an integer.
6269 The template is:
6270
6271 setlo #0, <jmp_reg>
6272 setlo #0, <static_chain>
6273 sethi #0, <jmp_reg>
6274 sethi #0, <static_chain>
6275 jmpl @(gr0,<jmp_reg>) */
6276
6277 int
6278 frv_trampoline_size (void)
6279 {
6280 if (TARGET_FDPIC)
6281 /* Allocate room for the function descriptor and the lddi
6282 instruction. */
6283 return 8 + 6 * 4;
6284 return 5 /* instructions */ * 4 /* instruction size. */;
6285 }
6286
6287 \f
6288 /* A C statement to initialize the variable parts of a trampoline. ADDR is an
6289 RTX for the address of the trampoline; FNADDR is an RTX for the address of
6290 the nested function; STATIC_CHAIN is an RTX for the static chain value that
6291 should be passed to the function when it is called.
6292
6293 The template is:
6294
6295 setlo #0, <jmp_reg>
6296 setlo #0, <static_chain>
6297 sethi #0, <jmp_reg>
6298 sethi #0, <static_chain>
6299 jmpl @(gr0,<jmp_reg>) */
6300
6301 static void
6302 frv_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
6303 {
6304 rtx addr = XEXP (m_tramp, 0);
6305 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
6306 rtx sc_reg = force_reg (Pmode, static_chain);
6307
6308 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
6309 LCT_NORMAL, VOIDmode, 4,
6310 addr, Pmode,
6311 GEN_INT (frv_trampoline_size ()), SImode,
6312 fnaddr, Pmode,
6313 sc_reg, Pmode);
6314 }
6315
6316 \f
6317 /* Many machines have some registers that cannot be copied directly to or from
6318 memory or even from other types of registers. An example is the `MQ'
6319 register, which on most machines, can only be copied to or from general
6320 registers, but not memory. Some machines allow copying all registers to and
6321 from memory, but require a scratch register for stores to some memory
6322 locations (e.g., those with symbolic address on the RT, and those with
6323 certain symbolic address on the SPARC when compiling PIC). In some cases,
6324 both an intermediate and a scratch register are required.
6325
6326 You should define these macros to indicate to the reload phase that it may
6327 need to allocate at least one register for a reload in addition to the
6328 register to contain the data. Specifically, if copying X to a register
6329 RCLASS in MODE requires an intermediate register, you should define
6330 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
6331 whose registers can be used as intermediate registers or scratch registers.
6332
6333 If copying a register RCLASS in MODE to X requires an intermediate or scratch
6334 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
6335 largest register class required. If the requirements for input and output
6336 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
6337 instead of defining both macros identically.
6338
6339 The values returned by these macros are often `GENERAL_REGS'. Return
6340 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
6341 to or from a register of RCLASS in MODE without requiring a scratch register.
6342 Do not define this macro if it would always return `NO_REGS'.
6343
6344 If a scratch register is required (either with or without an intermediate
6345 register), you should define patterns for `reload_inM' or `reload_outM', as
6346 required.. These patterns, which will normally be implemented with a
6347 `define_expand', should be similar to the `movM' patterns, except that
6348 operand 2 is the scratch register.
6349
6350 Define constraints for the reload register and scratch register that contain
6351 a single register class. If the original reload register (whose class is
6352 RCLASS) can meet the constraint given in the pattern, the value returned by
6353 these macros is used for the class of the scratch register. Otherwise, two
6354 additional reload registers are required. Their classes are obtained from
6355 the constraints in the insn pattern.
6356
6357 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
6358 either be in a hard register or in memory. Use `true_regnum' to find out;
6359 it will return -1 if the pseudo is in memory and the hard register number if
6360 it is in a register.
6361
6362 These macros should not be used in the case where a particular class of
6363 registers can only be copied to memory and not to another class of
6364 registers. In that case, secondary reload registers are not needed and
6365 would not be helpful. Instead, a stack location must be used to perform the
6366 copy and the `movM' pattern should use memory as an intermediate storage.
6367 This case often occurs between floating-point and general registers. */
6368
6369 enum reg_class
6370 frv_secondary_reload_class (enum reg_class rclass,
6371 machine_mode mode ATTRIBUTE_UNUSED,
6372 rtx x)
6373 {
6374 enum reg_class ret;
6375
6376 switch (rclass)
6377 {
6378 default:
6379 ret = NO_REGS;
6380 break;
6381
6382 /* Accumulators/Accumulator guard registers need to go through floating
6383 point registers. */
6384 case QUAD_REGS:
6385 case GPR_REGS:
6386 ret = NO_REGS;
6387 if (x && GET_CODE (x) == REG)
6388 {
6389 int regno = REGNO (x);
6390
6391 if (ACC_P (regno) || ACCG_P (regno))
6392 ret = FPR_REGS;
6393 }
6394 break;
6395
6396 /* Nonzero constants should be loaded into an FPR through a GPR. */
6397 case QUAD_FPR_REGS:
6398 if (x && CONSTANT_P (x) && !ZERO_P (x))
6399 ret = GPR_REGS;
6400 else
6401 ret = NO_REGS;
6402 break;
6403
6404 /* All of these types need gpr registers. */
6405 case ICC_REGS:
6406 case FCC_REGS:
6407 case CC_REGS:
6408 case ICR_REGS:
6409 case FCR_REGS:
6410 case CR_REGS:
6411 case LCR_REG:
6412 case LR_REG:
6413 ret = GPR_REGS;
6414 break;
6415
6416 /* The accumulators need fpr registers. */
6417 case QUAD_ACC_REGS:
6418 case ACCG_REGS:
6419 ret = FPR_REGS;
6420 break;
6421 }
6422
6423 return ret;
6424 }
6425
6426 /* This hook exists to catch the case where secondary_reload_class() is
6427 called from init_reg_autoinc() in regclass.c - before the reload optabs
6428 have been initialised. */
6429
6430 static reg_class_t
6431 frv_secondary_reload (bool in_p, rtx x, reg_class_t reload_class_i,
6432 machine_mode reload_mode,
6433 secondary_reload_info * sri)
6434 {
6435 enum reg_class rclass = NO_REGS;
6436 enum reg_class reload_class = (enum reg_class) reload_class_i;
6437
6438 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
6439 {
6440 sri->icode = sri->prev_sri->t_icode;
6441 return NO_REGS;
6442 }
6443
6444 rclass = frv_secondary_reload_class (reload_class, reload_mode, x);
6445
6446 if (rclass != NO_REGS)
6447 {
6448 enum insn_code icode
6449 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
6450 reload_mode);
6451 if (icode == 0)
6452 {
6453 /* This happens when then the reload_[in|out]_optabs have
6454 not been initialised. */
6455 sri->t_icode = CODE_FOR_nothing;
6456 return rclass;
6457 }
6458 }
6459
6460 /* Fall back to the default secondary reload handler. */
6461 return default_secondary_reload (in_p, x, reload_class, reload_mode, sri);
6462
6463 }
6464 \f
6465 /* Worker function for TARGET_CLASS_LIKELY_SPILLED_P. */
6466
6467 static bool
6468 frv_class_likely_spilled_p (reg_class_t rclass)
6469 {
6470 switch (rclass)
6471 {
6472 default:
6473 break;
6474
6475 case GR8_REGS:
6476 case GR9_REGS:
6477 case GR89_REGS:
6478 case FDPIC_FPTR_REGS:
6479 case FDPIC_REGS:
6480 case ICC_REGS:
6481 case FCC_REGS:
6482 case CC_REGS:
6483 case ICR_REGS:
6484 case FCR_REGS:
6485 case CR_REGS:
6486 case LCR_REG:
6487 case LR_REG:
6488 case SPR_REGS:
6489 case QUAD_ACC_REGS:
6490 case ACCG_REGS:
6491 return true;
6492 }
6493
6494 return false;
6495 }
6496
6497 \f
6498 /* An expression for the alignment of a structure field FIELD if the
6499 alignment computed in the usual way is COMPUTED. GCC uses this
6500 value instead of the value in `BIGGEST_ALIGNMENT' or
6501 `BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */
6502
6503 /* The definition type of the bit field data is either char, short, long or
6504 long long. The maximum bit size is the number of bits of its own type.
6505
6506 The bit field data is assigned to a storage unit that has an adequate size
6507 for bit field data retention and is located at the smallest address.
6508
6509 Consecutive bit field data are packed at consecutive bits having the same
6510 storage unit, with regard to the type, beginning with the MSB and continuing
6511 toward the LSB.
6512
6513 If a field to be assigned lies over a bit field type boundary, its
6514 assignment is completed by aligning it with a boundary suitable for the
6515 type.
6516
6517 When a bit field having a bit length of 0 is declared, it is forcibly
6518 assigned to the next storage unit.
6519
6520 e.g)
6521 struct {
6522 int a:2;
6523 int b:6;
6524 char c:4;
6525 int d:10;
6526 int :0;
6527 int f:2;
6528 } x;
6529
6530 +0 +1 +2 +3
6531 &x 00000000 00000000 00000000 00000000
6532 MLM----L
6533 a b
6534 &x+4 00000000 00000000 00000000 00000000
6535 M--L
6536 c
6537 &x+8 00000000 00000000 00000000 00000000
6538 M----------L
6539 d
6540 &x+12 00000000 00000000 00000000 00000000
6541 ML
6542 f
6543 */
6544
6545 int
6546 frv_adjust_field_align (tree field, int computed)
6547 {
6548 /* Make sure that the bitfield is not wider than the type. */
6549 if (DECL_BIT_FIELD (field)
6550 && !DECL_ARTIFICIAL (field))
6551 {
6552 tree parent = DECL_CONTEXT (field);
6553 tree prev = NULL_TREE;
6554 tree cur;
6555
6556 for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = DECL_CHAIN (cur))
6557 {
6558 if (TREE_CODE (cur) != FIELD_DECL)
6559 continue;
6560
6561 prev = cur;
6562 }
6563
6564 gcc_assert (cur);
6565
6566 /* If this isn't a :0 field and if the previous element is a bitfield
6567 also, see if the type is different, if so, we will need to align the
6568 bit-field to the next boundary. */
6569 if (prev
6570 && ! DECL_PACKED (field)
6571 && ! integer_zerop (DECL_SIZE (field))
6572 && DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev))
6573 {
6574 int prev_align = TYPE_ALIGN (TREE_TYPE (prev));
6575 int cur_align = TYPE_ALIGN (TREE_TYPE (field));
6576 computed = (prev_align > cur_align) ? prev_align : cur_align;
6577 }
6578 }
6579
6580 return computed;
6581 }
6582
6583 \f
6584 /* A C expression that is nonzero if it is permissible to store a value of mode
6585 MODE in hard register number REGNO (or in several registers starting with
6586 that one). For a machine where all registers are equivalent, a suitable
6587 definition is
6588
6589 #define HARD_REGNO_MODE_OK(REGNO, MODE) 1
6590
6591 It is not necessary for this macro to check for the numbers of fixed
6592 registers, because the allocation mechanism considers them to be always
6593 occupied.
6594
6595 On some machines, double-precision values must be kept in even/odd register
6596 pairs. The way to implement that is to define this macro to reject odd
6597 register numbers for such modes.
6598
6599 The minimum requirement for a mode to be OK in a register is that the
6600 `movMODE' instruction pattern support moves between the register and any
6601 other hard register for which the mode is OK; and that moving a value into
6602 the register and back out not alter it.
6603
6604 Since the same instruction used to move `SImode' will work for all narrower
6605 integer modes, it is not necessary on any machine for `HARD_REGNO_MODE_OK'
6606 to distinguish between these modes, provided you define patterns `movhi',
6607 etc., to take advantage of this. This is useful because of the interaction
6608 between `HARD_REGNO_MODE_OK' and `MODES_TIEABLE_P'; it is very desirable for
6609 all integer modes to be tieable.
6610
6611 Many machines have special registers for floating point arithmetic. Often
6612 people assume that floating point machine modes are allowed only in floating
6613 point registers. This is not true. Any registers that can hold integers
6614 can safely *hold* a floating point machine mode, whether or not floating
6615 arithmetic can be done on it in those registers. Integer move instructions
6616 can be used to move the values.
6617
6618 On some machines, though, the converse is true: fixed-point machine modes
6619 may not go in floating registers. This is true if the floating registers
6620 normalize any value stored in them, because storing a non-floating value
6621 there would garble it. In this case, `HARD_REGNO_MODE_OK' should reject
6622 fixed-point machine modes in floating registers. But if the floating
6623 registers do not automatically normalize, if you can store any bit pattern
6624 in one and retrieve it unchanged without a trap, then any machine mode may
6625 go in a floating register, so you can define this macro to say so.
6626
6627 The primary significance of special floating registers is rather that they
6628 are the registers acceptable in floating point arithmetic instructions.
6629 However, this is of no concern to `HARD_REGNO_MODE_OK'. You handle it by
6630 writing the proper constraints for those instructions.
6631
6632 On some machines, the floating registers are especially slow to access, so
6633 that it is better to store a value in a stack frame than in such a register
6634 if floating point arithmetic is not being done. As long as the floating
6635 registers are not in class `GENERAL_REGS', they will not be used unless some
6636 pattern's constraint asks for one. */
6637
6638 int
6639 frv_hard_regno_mode_ok (int regno, machine_mode mode)
6640 {
6641 int base;
6642 int mask;
6643
6644 switch (mode)
6645 {
6646 case CCmode:
6647 case CC_UNSmode:
6648 case CC_NZmode:
6649 return ICC_P (regno) || GPR_P (regno);
6650
6651 case CC_CCRmode:
6652 return CR_P (regno) || GPR_P (regno);
6653
6654 case CC_FPmode:
6655 return FCC_P (regno) || GPR_P (regno);
6656
6657 default:
6658 break;
6659 }
6660
6661 /* Set BASE to the first register in REGNO's class. Set MASK to the
6662 bits that must be clear in (REGNO - BASE) for the register to be
6663 well-aligned. */
6664 if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode))
6665 {
6666 if (ACCG_P (regno))
6667 {
6668 /* ACCGs store one byte. Two-byte quantities must start in
6669 even-numbered registers, four-byte ones in registers whose
6670 numbers are divisible by four, and so on. */
6671 base = ACCG_FIRST;
6672 mask = GET_MODE_SIZE (mode) - 1;
6673 }
6674 else
6675 {
6676 /* The other registers store one word. */
6677 if (GPR_P (regno) || regno == AP_FIRST)
6678 base = GPR_FIRST;
6679
6680 else if (FPR_P (regno))
6681 base = FPR_FIRST;
6682
6683 else if (ACC_P (regno))
6684 base = ACC_FIRST;
6685
6686 else if (SPR_P (regno))
6687 return mode == SImode;
6688
6689 /* Fill in the table. */
6690 else
6691 return 0;
6692
6693 /* Anything smaller than an SI is OK in any word-sized register. */
6694 if (GET_MODE_SIZE (mode) < 4)
6695 return 1;
6696
6697 mask = (GET_MODE_SIZE (mode) / 4) - 1;
6698 }
6699 return (((regno - base) & mask) == 0);
6700 }
6701
6702 return 0;
6703 }
6704
6705 \f
6706 /* A C expression for the number of consecutive hard registers, starting at
6707 register number REGNO, required to hold a value of mode MODE.
6708
6709 On a machine where all registers are exactly one word, a suitable definition
6710 of this macro is
6711
6712 #define HARD_REGNO_NREGS(REGNO, MODE) \
6713 ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) \
6714 / UNITS_PER_WORD)) */
6715
6716 /* On the FRV, make the CC_FP mode take 3 words in the integer registers, so
6717 that we can build the appropriate instructions to properly reload the
6718 values. Also, make the byte-sized accumulator guards use one guard
6719 for each byte. */
6720
6721 int
6722 frv_hard_regno_nregs (int regno, machine_mode mode)
6723 {
6724 if (ACCG_P (regno))
6725 return GET_MODE_SIZE (mode);
6726 else
6727 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6728 }
6729
6730 \f
6731 /* A C expression for the maximum number of consecutive registers of
6732 class RCLASS needed to hold a value of mode MODE.
6733
6734 This is closely related to the macro `HARD_REGNO_NREGS'. In fact, the value
6735 of the macro `CLASS_MAX_NREGS (RCLASS, MODE)' should be the maximum value of
6736 `HARD_REGNO_NREGS (REGNO, MODE)' for all REGNO values in the class RCLASS.
6737
6738 This macro helps control the handling of multiple-word values in
6739 the reload pass.
6740
6741 This declaration is required. */
6742
6743 int
6744 frv_class_max_nregs (enum reg_class rclass, machine_mode mode)
6745 {
6746 if (rclass == ACCG_REGS)
6747 /* An N-byte value requires N accumulator guards. */
6748 return GET_MODE_SIZE (mode);
6749 else
6750 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6751 }
6752
6753 \f
6754 /* A C expression that is nonzero if X is a legitimate constant for an
6755 immediate operand on the target machine. You can assume that X satisfies
6756 `CONSTANT_P', so you need not check this. In fact, `1' is a suitable
6757 definition for this macro on machines where anything `CONSTANT_P' is valid. */
6758
6759 static bool
6760 frv_legitimate_constant_p (machine_mode mode, rtx x)
6761 {
6762 /* frv_cannot_force_const_mem always returns true for FDPIC. This
6763 means that the move expanders will be expected to deal with most
6764 kinds of constant, regardless of what we return here.
6765
6766 However, among its other duties, frv_legitimate_constant_p decides whether
6767 a constant can be entered into reg_equiv_constant[]. If we return true,
6768 reload can create new instances of the constant whenever it likes.
6769
6770 The idea is therefore to accept as many constants as possible (to give
6771 reload more freedom) while rejecting constants that can only be created
6772 at certain times. In particular, anything with a symbolic component will
6773 require use of the pseudo FDPIC register, which is only available before
6774 reload. */
6775 if (TARGET_FDPIC)
6776 return LEGITIMATE_PIC_OPERAND_P (x);
6777
6778 /* All of the integer constants are ok. */
6779 if (GET_CODE (x) != CONST_DOUBLE)
6780 return TRUE;
6781
6782 /* double integer constants are ok. */
6783 if (GET_MODE (x) == VOIDmode || mode == DImode)
6784 return TRUE;
6785
6786 /* 0 is always ok. */
6787 if (x == CONST0_RTX (mode))
6788 return TRUE;
6789
6790 /* If floating point is just emulated, allow any constant, since it will be
6791 constructed in the GPRs. */
6792 if (!TARGET_HAS_FPRS)
6793 return TRUE;
6794
6795 if (mode == DFmode && !TARGET_DOUBLE)
6796 return TRUE;
6797
6798 /* Otherwise store the constant away and do a load. */
6799 return FALSE;
6800 }
6801
6802 /* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons,
6803 CC_NZ for comparisons against zero in which a single Z or N flag test
6804 is enough, CC_UNS for other unsigned comparisons, and CC for other
6805 signed comparisons. */
6806
6807 machine_mode
6808 frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
6809 {
6810 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
6811 return CC_FPmode;
6812
6813 switch (code)
6814 {
6815 case EQ:
6816 case NE:
6817 case LT:
6818 case GE:
6819 return y == const0_rtx ? CC_NZmode : CCmode;
6820
6821 case GTU:
6822 case GEU:
6823 case LTU:
6824 case LEU:
6825 return y == const0_rtx ? CC_NZmode : CC_UNSmode;
6826
6827 default:
6828 return CCmode;
6829 }
6830 }
6831 \f
6832
6833 /* Worker function for TARGET_REGISTER_MOVE_COST. */
6834
6835 #define HIGH_COST 40
6836 #define MEDIUM_COST 3
6837 #define LOW_COST 1
6838
6839 static int
6840 frv_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6841 reg_class_t from, reg_class_t to)
6842 {
6843 switch (from)
6844 {
6845 default:
6846 break;
6847
6848 case QUAD_REGS:
6849 case GPR_REGS:
6850 case GR8_REGS:
6851 case GR9_REGS:
6852 case GR89_REGS:
6853 case FDPIC_REGS:
6854 case FDPIC_FPTR_REGS:
6855 case FDPIC_CALL_REGS:
6856
6857 switch (to)
6858 {
6859 default:
6860 break;
6861
6862 case QUAD_REGS:
6863 case GPR_REGS:
6864 case GR8_REGS:
6865 case GR9_REGS:
6866 case GR89_REGS:
6867 case FDPIC_REGS:
6868 case FDPIC_FPTR_REGS:
6869 case FDPIC_CALL_REGS:
6870
6871 return LOW_COST;
6872
6873 case FPR_REGS:
6874 return LOW_COST;
6875
6876 case LCR_REG:
6877 case LR_REG:
6878 case SPR_REGS:
6879 return LOW_COST;
6880 }
6881
6882 case QUAD_FPR_REGS:
6883 switch (to)
6884 {
6885 default:
6886 break;
6887
6888 case QUAD_REGS:
6889 case GPR_REGS:
6890 case GR8_REGS:
6891 case GR9_REGS:
6892 case GR89_REGS:
6893 case FDPIC_REGS:
6894 case FDPIC_FPTR_REGS:
6895 case FDPIC_CALL_REGS:
6896
6897 case QUAD_ACC_REGS:
6898 case ACCG_REGS:
6899 return MEDIUM_COST;
6900
6901 case QUAD_FPR_REGS:
6902 return LOW_COST;
6903 }
6904
6905 case LCR_REG:
6906 case LR_REG:
6907 case SPR_REGS:
6908 switch (to)
6909 {
6910 default:
6911 break;
6912
6913 case QUAD_REGS:
6914 case GPR_REGS:
6915 case GR8_REGS:
6916 case GR9_REGS:
6917 case GR89_REGS:
6918 case FDPIC_REGS:
6919 case FDPIC_FPTR_REGS:
6920 case FDPIC_CALL_REGS:
6921
6922 return MEDIUM_COST;
6923 }
6924
6925 case QUAD_ACC_REGS:
6926 case ACCG_REGS:
6927 switch (to)
6928 {
6929 default:
6930 break;
6931
6932 case QUAD_FPR_REGS:
6933 return MEDIUM_COST;
6934
6935 }
6936 }
6937
6938 return HIGH_COST;
6939 }
6940
6941 /* Worker function for TARGET_MEMORY_MOVE_COST. */
6942
6943 static int
6944 frv_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6945 reg_class_t rclass ATTRIBUTE_UNUSED,
6946 bool in ATTRIBUTE_UNUSED)
6947 {
6948 return 4;
6949 }
6950
6951 \f
6952 /* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to
6953 use ".picptr" to generate safe relocations for PIC code. We also
6954 need a fixup entry for aligned (non-debugging) code. */
6955
6956 static bool
6957 frv_assemble_integer (rtx value, unsigned int size, int aligned_p)
6958 {
6959 if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD)
6960 {
6961 if (GET_CODE (value) == CONST
6962 || GET_CODE (value) == SYMBOL_REF
6963 || GET_CODE (value) == LABEL_REF)
6964 {
6965 if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF
6966 && SYMBOL_REF_FUNCTION_P (value))
6967 {
6968 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
6969 output_addr_const (asm_out_file, value);
6970 fputs (")\n", asm_out_file);
6971 return true;
6972 }
6973 else if (TARGET_FDPIC && GET_CODE (value) == CONST
6974 && frv_function_symbol_referenced_p (value))
6975 return false;
6976 if (aligned_p && !TARGET_FDPIC)
6977 {
6978 static int label_num = 0;
6979 char buf[256];
6980 const char *p;
6981
6982 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++);
6983 p = (* targetm.strip_name_encoding) (buf);
6984
6985 fprintf (asm_out_file, "%s:\n", p);
6986 fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP);
6987 fprintf (asm_out_file, "\t.picptr\t%s\n", p);
6988 fprintf (asm_out_file, "\t.previous\n");
6989 }
6990 assemble_integer_with_op ("\t.picptr\t", value);
6991 return true;
6992 }
6993 if (!aligned_p)
6994 {
6995 /* We've set the unaligned SI op to NULL, so we always have to
6996 handle the unaligned case here. */
6997 assemble_integer_with_op ("\t.4byte\t", value);
6998 return true;
6999 }
7000 }
7001 return default_assemble_integer (value, size, aligned_p);
7002 }
7003
7004 /* Function to set up the backend function structure. */
7005
7006 static struct machine_function *
7007 frv_init_machine_status (void)
7008 {
7009 return ggc_cleared_alloc<machine_function> ();
7010 }
7011 \f
7012 /* Implement TARGET_SCHED_ISSUE_RATE. */
7013
7014 int
7015 frv_issue_rate (void)
7016 {
7017 if (!TARGET_PACK)
7018 return 1;
7019
7020 switch (frv_cpu_type)
7021 {
7022 default:
7023 case FRV_CPU_FR300:
7024 case FRV_CPU_SIMPLE:
7025 return 1;
7026
7027 case FRV_CPU_FR400:
7028 case FRV_CPU_FR405:
7029 case FRV_CPU_FR450:
7030 return 2;
7031
7032 case FRV_CPU_GENERIC:
7033 case FRV_CPU_FR500:
7034 case FRV_CPU_TOMCAT:
7035 return 4;
7036
7037 case FRV_CPU_FR550:
7038 return 8;
7039 }
7040 }
7041 \f
7042 /* A for_each_rtx callback. If X refers to an accumulator, return
7043 ACC_GROUP_ODD if the bit 2 of the register number is set and
7044 ACC_GROUP_EVEN if it is clear. Return 0 (ACC_GROUP_NONE)
7045 otherwise. */
7046
7047 static int
7048 frv_acc_group_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
7049 {
7050 if (REG_P (*x))
7051 {
7052 if (ACC_P (REGNO (*x)))
7053 return (REGNO (*x) - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
7054 if (ACCG_P (REGNO (*x)))
7055 return (REGNO (*x) - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
7056 }
7057 return 0;
7058 }
7059
7060 /* Return the value of INSN's acc_group attribute. */
7061
7062 int
7063 frv_acc_group (rtx insn)
7064 {
7065 /* This distinction only applies to the FR550 packing constraints. */
7066 if (frv_cpu_type != FRV_CPU_FR550)
7067 return ACC_GROUP_NONE;
7068 return for_each_rtx (&PATTERN (insn), frv_acc_group_1, 0);
7069 }
7070
7071 /* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction
7072 INSN will try to claim first. Since this value depends only on the
7073 type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
7074
7075 static unsigned int
7076 frv_insn_unit (rtx_insn *insn)
7077 {
7078 enum attr_type type;
7079
7080 type = get_attr_type (insn);
7081 if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes))
7082 {
7083 /* We haven't seen this type of instruction before. */
7084 state_t state;
7085 unsigned int unit;
7086
7087 /* Issue the instruction on its own to see which unit it prefers. */
7088 state = alloca (state_size ());
7089 state_reset (state);
7090 state_transition (state, insn);
7091
7092 /* Find out which unit was taken. */
7093 for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++)
7094 if (cpu_unit_reservation_p (state, frv_unit_codes[unit]))
7095 break;
7096
7097 gcc_assert (unit != ARRAY_SIZE (frv_unit_codes));
7098
7099 frv_type_to_unit[type] = unit;
7100 }
7101 return frv_type_to_unit[type];
7102 }
7103
7104 /* Return true if INSN issues to a branch unit. */
7105
7106 static bool
7107 frv_issues_to_branch_unit_p (rtx_insn *insn)
7108 {
7109 return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
7110 }
7111 \f
7112 /* The instructions in the packet, partitioned into groups. */
7113 struct frv_packet_group {
7114 /* How many instructions in the packet belong to this group. */
7115 unsigned int num_insns;
7116
7117 /* A list of the instructions that belong to this group, in the order
7118 they appear in the rtl stream. */
7119 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
7120
7121 /* The contents of INSNS after they have been sorted into the correct
7122 assembly-language order. Element X issues to unit X. The list may
7123 contain extra nops. */
7124 rtx_insn *sorted[ARRAY_SIZE (frv_unit_codes)];
7125
7126 /* The member of frv_nops[] to use in sorted[]. */
7127 rtx_insn *nop;
7128 };
7129
7130 /* The current state of the packing pass, implemented by frv_pack_insns. */
7131 static struct {
7132 /* The state of the pipeline DFA. */
7133 state_t dfa_state;
7134
7135 /* Which hardware registers are set within the current packet,
7136 and the conditions under which they are set. */
7137 regstate_t regstate[FIRST_PSEUDO_REGISTER];
7138
7139 /* The memory locations that have been modified so far in this
7140 packet. MEM is the memref and COND is the regstate_t condition
7141 under which it is set. */
7142 struct {
7143 rtx mem;
7144 regstate_t cond;
7145 } mems[2];
7146
7147 /* The number of valid entries in MEMS. The value is larger than
7148 ARRAY_SIZE (mems) if there were too many mems to record. */
7149 unsigned int num_mems;
7150
7151 /* The maximum number of instructions that can be packed together. */
7152 unsigned int issue_rate;
7153
7154 /* The instructions in the packet, partitioned into groups. */
7155 struct frv_packet_group groups[NUM_GROUPS];
7156
7157 /* The instructions that make up the current packet. */
7158 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
7159 unsigned int num_insns;
7160 } frv_packet;
7161
7162 /* Return the regstate_t flags for the given COND_EXEC condition.
7163 Abort if the condition isn't in the right form. */
7164
7165 static int
7166 frv_cond_flags (rtx cond)
7167 {
7168 gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
7169 && GET_CODE (XEXP (cond, 0)) == REG
7170 && CR_P (REGNO (XEXP (cond, 0)))
7171 && XEXP (cond, 1) == const0_rtx);
7172 return ((REGNO (XEXP (cond, 0)) - CR_FIRST)
7173 | (GET_CODE (cond) == NE
7174 ? REGSTATE_IF_TRUE
7175 : REGSTATE_IF_FALSE));
7176 }
7177
7178
7179 /* Return true if something accessed under condition COND2 can
7180 conflict with something written under condition COND1. */
7181
7182 static bool
7183 frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2)
7184 {
7185 /* If either reference was unconditional, we have a conflict. */
7186 if ((cond1 & REGSTATE_IF_EITHER) == 0
7187 || (cond2 & REGSTATE_IF_EITHER) == 0)
7188 return true;
7189
7190 /* The references might conflict if they were controlled by
7191 different CRs. */
7192 if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK))
7193 return true;
7194
7195 /* They definitely conflict if they are controlled by the
7196 same condition. */
7197 if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0)
7198 return true;
7199
7200 return false;
7201 }
7202
7203
7204 /* A for_each_rtx callback. Return 1 if *X depends on an instruction in
7205 the current packet. DATA points to a regstate_t that describes the
7206 condition under which *X might be set or used. */
7207
7208 static int
7209 frv_registers_conflict_p_1 (rtx *x, void *data)
7210 {
7211 unsigned int regno, i;
7212 regstate_t cond;
7213
7214 cond = *(regstate_t *) data;
7215
7216 if (GET_CODE (*x) == REG)
7217 FOR_EACH_REGNO (regno, *x)
7218 if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0)
7219 if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond))
7220 return 1;
7221
7222 if (GET_CODE (*x) == MEM)
7223 {
7224 /* If we ran out of memory slots, assume a conflict. */
7225 if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems))
7226 return 1;
7227
7228 /* Check for output or true dependencies with earlier MEMs. */
7229 for (i = 0; i < frv_packet.num_mems; i++)
7230 if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
7231 {
7232 if (true_dependence (frv_packet.mems[i].mem, VOIDmode, *x))
7233 return 1;
7234
7235 if (output_dependence (frv_packet.mems[i].mem, *x))
7236 return 1;
7237 }
7238 }
7239
7240 /* The return values of calls aren't significant: they describe
7241 the effect of the call as a whole, not of the insn itself. */
7242 if (GET_CODE (*x) == SET && GET_CODE (SET_SRC (*x)) == CALL)
7243 {
7244 if (for_each_rtx (&SET_SRC (*x), frv_registers_conflict_p_1, data))
7245 return 1;
7246 return -1;
7247 }
7248
7249 /* Check subexpressions. */
7250 return 0;
7251 }
7252
7253
7254 /* Return true if something in X might depend on an instruction
7255 in the current packet. */
7256
7257 static bool
7258 frv_registers_conflict_p (rtx x)
7259 {
7260 regstate_t flags;
7261
7262 flags = 0;
7263 if (GET_CODE (x) == COND_EXEC)
7264 {
7265 if (for_each_rtx (&XEXP (x, 0), frv_registers_conflict_p_1, &flags))
7266 return true;
7267
7268 flags |= frv_cond_flags (XEXP (x, 0));
7269 x = XEXP (x, 1);
7270 }
7271 return for_each_rtx (&x, frv_registers_conflict_p_1, &flags);
7272 }
7273
7274
7275 /* A note_stores callback. DATA points to the regstate_t condition
7276 under which X is modified. Update FRV_PACKET accordingly. */
7277
7278 static void
7279 frv_registers_update_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7280 {
7281 unsigned int regno;
7282
7283 if (GET_CODE (x) == REG)
7284 FOR_EACH_REGNO (regno, x)
7285 frv_packet.regstate[regno] |= *(regstate_t *) data;
7286
7287 if (GET_CODE (x) == MEM)
7288 {
7289 if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems))
7290 {
7291 frv_packet.mems[frv_packet.num_mems].mem = x;
7292 frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data;
7293 }
7294 frv_packet.num_mems++;
7295 }
7296 }
7297
7298
7299 /* Update the register state information for an instruction whose
7300 body is X. */
7301
7302 static void
7303 frv_registers_update (rtx x)
7304 {
7305 regstate_t flags;
7306
7307 flags = REGSTATE_MODIFIED;
7308 if (GET_CODE (x) == COND_EXEC)
7309 {
7310 flags |= frv_cond_flags (XEXP (x, 0));
7311 x = XEXP (x, 1);
7312 }
7313 note_stores (x, frv_registers_update_1, &flags);
7314 }
7315
7316
7317 /* Initialize frv_packet for the start of a new packet. */
7318
7319 static void
7320 frv_start_packet (void)
7321 {
7322 enum frv_insn_group group;
7323
7324 memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate));
7325 frv_packet.num_mems = 0;
7326 frv_packet.num_insns = 0;
7327 for (group = GROUP_I; group < NUM_GROUPS;
7328 group = (enum frv_insn_group) (group + 1))
7329 frv_packet.groups[group].num_insns = 0;
7330 }
7331
7332
7333 /* Likewise for the start of a new basic block. */
7334
7335 static void
7336 frv_start_packet_block (void)
7337 {
7338 state_reset (frv_packet.dfa_state);
7339 frv_start_packet ();
7340 }
7341
7342
7343 /* Finish the current packet, if any, and start a new one. Call
7344 HANDLE_PACKET with FRV_PACKET describing the completed packet. */
7345
7346 static void
7347 frv_finish_packet (void (*handle_packet) (void))
7348 {
7349 if (frv_packet.num_insns > 0)
7350 {
7351 handle_packet ();
7352 state_transition (frv_packet.dfa_state, 0);
7353 frv_start_packet ();
7354 }
7355 }
7356
7357
7358 /* Return true if INSN can be added to the current packet. Update
7359 the DFA state on success. */
7360
7361 static bool
7362 frv_pack_insn_p (rtx_insn *insn)
7363 {
7364 /* See if the packet is already as long as it can be. */
7365 if (frv_packet.num_insns == frv_packet.issue_rate)
7366 return false;
7367
7368 /* If the scheduler thought that an instruction should start a packet,
7369 it's usually a good idea to believe it. It knows much more about
7370 the latencies than we do.
7371
7372 There are some exceptions though:
7373
7374 - Conditional instructions are scheduled on the assumption that
7375 they will be executed. This is usually a good thing, since it
7376 tends to avoid unnecessary stalls in the conditional code.
7377 But we want to pack conditional instructions as tightly as
7378 possible, in order to optimize the case where they aren't
7379 executed.
7380
7381 - The scheduler will always put branches on their own, even
7382 if there's no real dependency.
7383
7384 - There's no point putting a call in its own packet unless
7385 we have to. */
7386 if (frv_packet.num_insns > 0
7387 && NONJUMP_INSN_P (insn)
7388 && GET_MODE (insn) == TImode
7389 && GET_CODE (PATTERN (insn)) != COND_EXEC)
7390 return false;
7391
7392 /* Check for register conflicts. Don't do this for setlo since any
7393 conflict will be with the partnering sethi, with which it can
7394 be packed. */
7395 if (get_attr_type (insn) != TYPE_SETLO)
7396 if (frv_registers_conflict_p (PATTERN (insn)))
7397 return false;
7398
7399 return state_transition (frv_packet.dfa_state, insn) < 0;
7400 }
7401
7402
7403 /* Add instruction INSN to the current packet. */
7404
7405 static void
7406 frv_add_insn_to_packet (rtx_insn *insn)
7407 {
7408 struct frv_packet_group *packet_group;
7409
7410 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7411 packet_group->insns[packet_group->num_insns++] = insn;
7412 frv_packet.insns[frv_packet.num_insns++] = insn;
7413
7414 frv_registers_update (PATTERN (insn));
7415 }
7416
7417
7418 /* Insert INSN (a member of frv_nops[]) into the current packet. If the
7419 packet ends in a branch or call, insert the nop before it, otherwise
7420 add to the end. */
7421
7422 static void
7423 frv_insert_nop_in_packet (rtx_insn *insn)
7424 {
7425 struct frv_packet_group *packet_group;
7426 rtx_insn *last;
7427
7428 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7429 last = frv_packet.insns[frv_packet.num_insns - 1];
7430 if (! NONJUMP_INSN_P (last))
7431 {
7432 insn = emit_insn_before (PATTERN (insn), last);
7433 frv_packet.insns[frv_packet.num_insns - 1] = insn;
7434 frv_packet.insns[frv_packet.num_insns++] = last;
7435 }
7436 else
7437 {
7438 insn = emit_insn_after (PATTERN (insn), last);
7439 frv_packet.insns[frv_packet.num_insns++] = insn;
7440 }
7441 packet_group->insns[packet_group->num_insns++] = insn;
7442 }
7443
7444
7445 /* If packing is enabled, divide the instructions into packets and
7446 return true. Call HANDLE_PACKET for each complete packet. */
7447
7448 static bool
7449 frv_for_each_packet (void (*handle_packet) (void))
7450 {
7451 rtx_insn *insn, *next_insn;
7452
7453 frv_packet.issue_rate = frv_issue_rate ();
7454
7455 /* Early exit if we don't want to pack insns. */
7456 if (!optimize
7457 || !flag_schedule_insns_after_reload
7458 || !TARGET_VLIW_BRANCH
7459 || frv_packet.issue_rate == 1)
7460 return false;
7461
7462 /* Set up the initial packing state. */
7463 dfa_start ();
7464 frv_packet.dfa_state = alloca (state_size ());
7465
7466 frv_start_packet_block ();
7467 for (insn = get_insns (); insn != 0; insn = next_insn)
7468 {
7469 enum rtx_code code;
7470 bool eh_insn_p;
7471
7472 code = GET_CODE (insn);
7473 next_insn = NEXT_INSN (insn);
7474
7475 if (code == CODE_LABEL)
7476 {
7477 frv_finish_packet (handle_packet);
7478 frv_start_packet_block ();
7479 }
7480
7481 if (INSN_P (insn))
7482 switch (GET_CODE (PATTERN (insn)))
7483 {
7484 case USE:
7485 case CLOBBER:
7486 break;
7487
7488 default:
7489 /* Calls mustn't be packed on a TOMCAT. */
7490 if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT)
7491 frv_finish_packet (handle_packet);
7492
7493 /* Since the last instruction in a packet determines the EH
7494 region, any exception-throwing instruction must come at
7495 the end of reordered packet. Insns that issue to a
7496 branch unit are bound to come last; for others it's
7497 too hard to predict. */
7498 eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL);
7499 if (eh_insn_p && !frv_issues_to_branch_unit_p (insn))
7500 frv_finish_packet (handle_packet);
7501
7502 /* Finish the current packet if we can't add INSN to it.
7503 Simulate cycles until INSN is ready to issue. */
7504 if (!frv_pack_insn_p (insn))
7505 {
7506 frv_finish_packet (handle_packet);
7507 while (!frv_pack_insn_p (insn))
7508 state_transition (frv_packet.dfa_state, 0);
7509 }
7510
7511 /* Add the instruction to the packet. */
7512 frv_add_insn_to_packet (insn);
7513
7514 /* Calls and jumps end a packet, as do insns that throw
7515 an exception. */
7516 if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p)
7517 frv_finish_packet (handle_packet);
7518 break;
7519 }
7520 }
7521 frv_finish_packet (handle_packet);
7522 dfa_finish ();
7523 return true;
7524 }
7525 \f
7526 /* Subroutine of frv_sort_insn_group. We are trying to sort
7527 frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly
7528 language order. We have already picked a new position for
7529 frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set.
7530 These instructions will occupy elements [0, LOWER_SLOT) and
7531 [UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is
7532 the DFA state after issuing these instructions.
7533
7534 Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation
7535 of the unused instructions. Return true if one such permutation gives
7536 a valid ordering, leaving the successful permutation in sorted[].
7537 Do not modify sorted[] until a valid permutation is found. */
7538
7539 static bool
7540 frv_sort_insn_group_1 (enum frv_insn_group group,
7541 unsigned int lower_slot, unsigned int upper_slot,
7542 unsigned int issued, unsigned int num_insns,
7543 state_t state)
7544 {
7545 struct frv_packet_group *packet_group;
7546 unsigned int i;
7547 state_t test_state;
7548 size_t dfa_size;
7549 rtx_insn *insn;
7550
7551 /* Early success if we've filled all the slots. */
7552 if (lower_slot == upper_slot)
7553 return true;
7554
7555 packet_group = &frv_packet.groups[group];
7556 dfa_size = state_size ();
7557 test_state = alloca (dfa_size);
7558
7559 /* Try issuing each unused instruction. */
7560 for (i = num_insns - 1; i + 1 != 0; i--)
7561 if (~issued & (1 << i))
7562 {
7563 insn = packet_group->sorted[i];
7564 memcpy (test_state, state, dfa_size);
7565 if (state_transition (test_state, insn) < 0
7566 && cpu_unit_reservation_p (test_state,
7567 NTH_UNIT (group, upper_slot - 1))
7568 && frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1,
7569 issued | (1 << i), num_insns,
7570 test_state))
7571 {
7572 packet_group->sorted[upper_slot - 1] = insn;
7573 return true;
7574 }
7575 }
7576
7577 return false;
7578 }
7579
7580 /* Compare two instructions by their frv_insn_unit. */
7581
7582 static int
7583 frv_compare_insns (const void *first, const void *second)
7584 {
7585 rtx_insn * const *insn1 = (rtx_insn * const *) first;
7586 rtx_insn * const *insn2 = (rtx_insn * const *) second;
7587 return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
7588 }
7589
7590 /* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[]
7591 and sort it into assembly language order. See frv.md for a description of
7592 the algorithm. */
7593
7594 static void
7595 frv_sort_insn_group (enum frv_insn_group group)
7596 {
7597 struct frv_packet_group *packet_group;
7598 unsigned int first, i, nop, max_unit, num_slots;
7599 state_t state, test_state;
7600 size_t dfa_size;
7601
7602 packet_group = &frv_packet.groups[group];
7603
7604 /* Assume no nop is needed. */
7605 packet_group->nop = 0;
7606
7607 if (packet_group->num_insns == 0)
7608 return;
7609
7610 /* Copy insns[] to sorted[]. */
7611 memcpy (packet_group->sorted, packet_group->insns,
7612 sizeof (rtx) * packet_group->num_insns);
7613
7614 /* Sort sorted[] by the unit that each insn tries to take first. */
7615 if (packet_group->num_insns > 1)
7616 qsort (packet_group->sorted, packet_group->num_insns,
7617 sizeof (rtx), frv_compare_insns);
7618
7619 /* That's always enough for branch and control insns. */
7620 if (group == GROUP_B || group == GROUP_C)
7621 return;
7622
7623 dfa_size = state_size ();
7624 state = alloca (dfa_size);
7625 test_state = alloca (dfa_size);
7626
7627 /* Find the highest FIRST such that sorted[0...FIRST-1] can issue
7628 consecutively and such that the DFA takes unit X when sorted[X]
7629 is added. Set STATE to the new DFA state. */
7630 state_reset (test_state);
7631 for (first = 0; first < packet_group->num_insns; first++)
7632 {
7633 memcpy (state, test_state, dfa_size);
7634 if (state_transition (test_state, packet_group->sorted[first]) >= 0
7635 || !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first)))
7636 break;
7637 }
7638
7639 /* If all the instructions issued in ascending order, we're done. */
7640 if (first == packet_group->num_insns)
7641 return;
7642
7643 /* Add nops to the end of sorted[] and try each permutation until
7644 we find one that works. */
7645 for (nop = 0; nop < frv_num_nops; nop++)
7646 {
7647 max_unit = frv_insn_unit (frv_nops[nop]);
7648 if (frv_unit_groups[max_unit] == group)
7649 {
7650 packet_group->nop = frv_nops[nop];
7651 num_slots = UNIT_NUMBER (max_unit) + 1;
7652 for (i = packet_group->num_insns; i < num_slots; i++)
7653 packet_group->sorted[i] = frv_nops[nop];
7654 if (frv_sort_insn_group_1 (group, first, num_slots,
7655 (1 << first) - 1, num_slots, state))
7656 return;
7657 }
7658 }
7659 gcc_unreachable ();
7660 }
7661 \f
7662 /* Sort the current packet into assembly-language order. Set packing
7663 flags as appropriate. */
7664
7665 static void
7666 frv_reorder_packet (void)
7667 {
7668 unsigned int cursor[NUM_GROUPS];
7669 rtx insns[ARRAY_SIZE (frv_unit_groups)];
7670 unsigned int unit, to, from;
7671 enum frv_insn_group group;
7672 struct frv_packet_group *packet_group;
7673
7674 /* First sort each group individually. */
7675 for (group = GROUP_I; group < NUM_GROUPS;
7676 group = (enum frv_insn_group) (group + 1))
7677 {
7678 cursor[group] = 0;
7679 frv_sort_insn_group (group);
7680 }
7681
7682 /* Go through the unit template and try add an instruction from
7683 that unit's group. */
7684 to = 0;
7685 for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++)
7686 {
7687 group = frv_unit_groups[unit];
7688 packet_group = &frv_packet.groups[group];
7689 if (cursor[group] < packet_group->num_insns)
7690 {
7691 /* frv_reorg should have added nops for us. */
7692 gcc_assert (packet_group->sorted[cursor[group]]
7693 != packet_group->nop);
7694 insns[to++] = packet_group->sorted[cursor[group]++];
7695 }
7696 }
7697
7698 gcc_assert (to == frv_packet.num_insns);
7699
7700 /* Clear the last instruction's packing flag, thus marking the end of
7701 a packet. Reorder the other instructions relative to it. */
7702 CLEAR_PACKING_FLAG (insns[to - 1]);
7703 for (from = 0; from < to - 1; from++)
7704 {
7705 remove_insn (insns[from]);
7706 add_insn_before (insns[from], insns[to - 1], NULL);
7707 SET_PACKING_FLAG (insns[from]);
7708 }
7709 }
7710
7711
7712 /* Divide instructions into packets. Reorder the contents of each
7713 packet so that they are in the correct assembly-language order.
7714
7715 Since this pass can change the raw meaning of the rtl stream, it must
7716 only be called at the last minute, just before the instructions are
7717 written out. */
7718
7719 static void
7720 frv_pack_insns (void)
7721 {
7722 if (frv_for_each_packet (frv_reorder_packet))
7723 frv_insn_packing_flag = 0;
7724 else
7725 frv_insn_packing_flag = -1;
7726 }
7727 \f
7728 /* See whether we need to add nops to group GROUP in order to
7729 make a valid packet. */
7730
7731 static void
7732 frv_fill_unused_units (enum frv_insn_group group)
7733 {
7734 unsigned int non_nops, nops, i;
7735 struct frv_packet_group *packet_group;
7736
7737 packet_group = &frv_packet.groups[group];
7738
7739 /* Sort the instructions into assembly-language order.
7740 Use nops to fill slots that are otherwise unused. */
7741 frv_sort_insn_group (group);
7742
7743 /* See how many nops are needed before the final useful instruction. */
7744 i = nops = 0;
7745 for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++)
7746 while (packet_group->sorted[i++] == packet_group->nop)
7747 nops++;
7748
7749 /* Insert that many nops into the instruction stream. */
7750 while (nops-- > 0)
7751 frv_insert_nop_in_packet (packet_group->nop);
7752 }
7753
7754 /* Return true if accesses IO1 and IO2 refer to the same doubleword. */
7755
7756 static bool
7757 frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2)
7758 {
7759 if (io1->const_address != 0 && io2->const_address != 0)
7760 return io1->const_address == io2->const_address;
7761
7762 if (io1->var_address != 0 && io2->var_address != 0)
7763 return rtx_equal_p (io1->var_address, io2->var_address);
7764
7765 return false;
7766 }
7767
7768 /* Return true if operations IO1 and IO2 are guaranteed to complete
7769 in order. */
7770
7771 static bool
7772 frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2)
7773 {
7774 /* The order of writes is always preserved. */
7775 if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE)
7776 return true;
7777
7778 /* The order of reads isn't preserved. */
7779 if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE)
7780 return false;
7781
7782 /* One operation is a write and the other is (or could be) a read.
7783 The order is only guaranteed if the accesses are to the same
7784 doubleword. */
7785 return frv_same_doubleword_p (io1, io2);
7786 }
7787
7788 /* Generalize I/O operation X so that it covers both X and Y. */
7789
7790 static void
7791 frv_io_union (struct frv_io *x, const struct frv_io *y)
7792 {
7793 if (x->type != y->type)
7794 x->type = FRV_IO_UNKNOWN;
7795 if (!frv_same_doubleword_p (x, y))
7796 {
7797 x->const_address = 0;
7798 x->var_address = 0;
7799 }
7800 }
7801
7802 /* Fill IO with information about the load or store associated with
7803 membar instruction INSN. */
7804
7805 static void
7806 frv_extract_membar (struct frv_io *io, rtx_insn *insn)
7807 {
7808 extract_insn (insn);
7809 io->type = (enum frv_io_type) INTVAL (recog_data.operand[2]);
7810 io->const_address = INTVAL (recog_data.operand[1]);
7811 io->var_address = XEXP (recog_data.operand[0], 0);
7812 }
7813
7814 /* A note_stores callback for which DATA points to an rtx. Nullify *DATA
7815 if X is a register and *DATA depends on X. */
7816
7817 static void
7818 frv_io_check_address (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7819 {
7820 rtx *other = (rtx *) data;
7821
7822 if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other))
7823 *other = 0;
7824 }
7825
7826 /* A note_stores callback for which DATA points to a HARD_REG_SET.
7827 Remove every modified register from the set. */
7828
7829 static void
7830 frv_io_handle_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7831 {
7832 HARD_REG_SET *set = (HARD_REG_SET *) data;
7833 unsigned int regno;
7834
7835 if (REG_P (x))
7836 FOR_EACH_REGNO (regno, x)
7837 CLEAR_HARD_REG_BIT (*set, regno);
7838 }
7839
7840 /* A for_each_rtx callback for which DATA points to a HARD_REG_SET.
7841 Add every register in *X to the set. */
7842
7843 static int
7844 frv_io_handle_use_1 (rtx *x, void *data)
7845 {
7846 HARD_REG_SET *set = (HARD_REG_SET *) data;
7847 unsigned int regno;
7848
7849 if (REG_P (*x))
7850 FOR_EACH_REGNO (regno, *x)
7851 SET_HARD_REG_BIT (*set, regno);
7852
7853 return 0;
7854 }
7855
7856 /* A note_stores callback that applies frv_io_handle_use_1 to an
7857 entire rhs value. */
7858
7859 static void
7860 frv_io_handle_use (rtx *x, void *data)
7861 {
7862 for_each_rtx (x, frv_io_handle_use_1, data);
7863 }
7864
7865 /* Go through block BB looking for membars to remove. There are two
7866 cases where intra-block analysis is enough:
7867
7868 - a membar is redundant if it occurs between two consecutive I/O
7869 operations and if those operations are guaranteed to complete
7870 in order.
7871
7872 - a membar for a __builtin_read is redundant if the result is
7873 used before the next I/O operation is issued.
7874
7875 If the last membar in the block could not be removed, and there
7876 are guaranteed to be no I/O operations between that membar and
7877 the end of the block, store the membar in *LAST_MEMBAR, otherwise
7878 store null.
7879
7880 Describe the block's first I/O operation in *NEXT_IO. Describe
7881 an unknown operation if the block doesn't do any I/O. */
7882
7883 static void
7884 frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
7885 rtx_insn **last_membar)
7886 {
7887 HARD_REG_SET used_regs;
7888 rtx next_membar, set;
7889 rtx_insn *insn;
7890 bool next_is_end_p;
7891
7892 /* NEXT_IO is the next I/O operation to be performed after the current
7893 instruction. It starts off as being an unknown operation. */
7894 memset (next_io, 0, sizeof (*next_io));
7895
7896 /* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */
7897 next_is_end_p = true;
7898
7899 /* If the current instruction is a __builtin_read or __builtin_write,
7900 NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR
7901 is null if the membar has already been deleted.
7902
7903 Note that the initialization here should only be needed to
7904 suppress warnings. */
7905 next_membar = 0;
7906
7907 /* USED_REGS is the set of registers that are used before the
7908 next I/O instruction. */
7909 CLEAR_HARD_REG_SET (used_regs);
7910
7911 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
7912 if (CALL_P (insn))
7913 {
7914 /* We can't predict what a call will do to volatile memory. */
7915 memset (next_io, 0, sizeof (struct frv_io));
7916 next_is_end_p = false;
7917 CLEAR_HARD_REG_SET (used_regs);
7918 }
7919 else if (INSN_P (insn))
7920 switch (recog_memoized (insn))
7921 {
7922 case CODE_FOR_optional_membar_qi:
7923 case CODE_FOR_optional_membar_hi:
7924 case CODE_FOR_optional_membar_si:
7925 case CODE_FOR_optional_membar_di:
7926 next_membar = insn;
7927 if (next_is_end_p)
7928 {
7929 /* Local information isn't enough to decide whether this
7930 membar is needed. Stash it away for later. */
7931 *last_membar = insn;
7932 frv_extract_membar (next_io, insn);
7933 next_is_end_p = false;
7934 }
7935 else
7936 {
7937 /* Check whether the I/O operation before INSN could be
7938 reordered with one described by NEXT_IO. If it can't,
7939 INSN will not be needed. */
7940 struct frv_io prev_io;
7941
7942 frv_extract_membar (&prev_io, insn);
7943 if (frv_io_fixed_order_p (&prev_io, next_io))
7944 {
7945 if (dump_file)
7946 fprintf (dump_file,
7947 ";; [Local] Removing membar %d since order"
7948 " of accesses is guaranteed\n",
7949 INSN_UID (next_membar));
7950
7951 insn = NEXT_INSN (insn);
7952 delete_insn (next_membar);
7953 next_membar = 0;
7954 }
7955 *next_io = prev_io;
7956 }
7957 break;
7958
7959 default:
7960 /* Invalidate NEXT_IO's address if it depends on something that
7961 is clobbered by INSN. */
7962 if (next_io->var_address)
7963 note_stores (PATTERN (insn), frv_io_check_address,
7964 &next_io->var_address);
7965
7966 /* If the next membar is associated with a __builtin_read,
7967 see if INSN reads from that address. If it does, and if
7968 the destination register is used before the next I/O access,
7969 there is no need for the membar. */
7970 set = PATTERN (insn);
7971 if (next_io->type == FRV_IO_READ
7972 && next_io->var_address != 0
7973 && next_membar != 0
7974 && GET_CODE (set) == SET
7975 && GET_CODE (SET_DEST (set)) == REG
7976 && TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set))))
7977 {
7978 rtx src;
7979
7980 src = SET_SRC (set);
7981 if (GET_CODE (src) == ZERO_EXTEND)
7982 src = XEXP (src, 0);
7983
7984 if (GET_CODE (src) == MEM
7985 && rtx_equal_p (XEXP (src, 0), next_io->var_address))
7986 {
7987 if (dump_file)
7988 fprintf (dump_file,
7989 ";; [Local] Removing membar %d since the target"
7990 " of %d is used before the I/O operation\n",
7991 INSN_UID (next_membar), INSN_UID (insn));
7992
7993 if (next_membar == *last_membar)
7994 *last_membar = 0;
7995
7996 delete_insn (next_membar);
7997 next_membar = 0;
7998 }
7999 }
8000
8001 /* If INSN has volatile references, forget about any registers
8002 that are used after it. Otherwise forget about uses that
8003 are (or might be) defined by INSN. */
8004 if (volatile_refs_p (PATTERN (insn)))
8005 CLEAR_HARD_REG_SET (used_regs);
8006 else
8007 note_stores (PATTERN (insn), frv_io_handle_set, &used_regs);
8008
8009 note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
8010 break;
8011 }
8012 }
8013
8014 /* See if MEMBAR, the last membar instruction in BB, can be removed.
8015 FIRST_IO[X] describes the first operation performed by basic block X. */
8016
8017 static void
8018 frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
8019 rtx_insn *membar)
8020 {
8021 struct frv_io this_io, next_io;
8022 edge succ;
8023 edge_iterator ei;
8024
8025 /* We need to keep the membar if there is an edge to the exit block. */
8026 FOR_EACH_EDGE (succ, ei, bb->succs)
8027 /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
8028 if (succ->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8029 return;
8030
8031 /* Work out the union of all successor blocks. */
8032 ei = ei_start (bb->succs);
8033 ei_cond (ei, &succ);
8034 /* next_io = first_io[bb->succ->dest->index]; */
8035 next_io = first_io[succ->dest->index];
8036 ei = ei_start (bb->succs);
8037 if (ei_cond (ei, &succ))
8038 {
8039 for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei))
8040 /*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/
8041 frv_io_union (&next_io, &first_io[succ->dest->index]);
8042 }
8043 else
8044 gcc_unreachable ();
8045
8046 frv_extract_membar (&this_io, membar);
8047 if (frv_io_fixed_order_p (&this_io, &next_io))
8048 {
8049 if (dump_file)
8050 fprintf (dump_file,
8051 ";; [Global] Removing membar %d since order of accesses"
8052 " is guaranteed\n", INSN_UID (membar));
8053
8054 delete_insn (membar);
8055 }
8056 }
8057
8058 /* Remove redundant membars from the current function. */
8059
8060 static void
8061 frv_optimize_membar (void)
8062 {
8063 basic_block bb;
8064 struct frv_io *first_io;
8065 rtx_insn **last_membar;
8066
8067 compute_bb_for_insn ();
8068 first_io = XCNEWVEC (struct frv_io, last_basic_block_for_fn (cfun));
8069 last_membar = XCNEWVEC (rtx_insn *, last_basic_block_for_fn (cfun));
8070
8071 FOR_EACH_BB_FN (bb, cfun)
8072 frv_optimize_membar_local (bb, &first_io[bb->index],
8073 &last_membar[bb->index]);
8074
8075 FOR_EACH_BB_FN (bb, cfun)
8076 if (last_membar[bb->index] != 0)
8077 frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
8078
8079 free (first_io);
8080 free (last_membar);
8081 }
8082 \f
8083 /* Used by frv_reorg to keep track of the current packet's address. */
8084 static unsigned int frv_packet_address;
8085
8086 /* If the current packet falls through to a label, try to pad the packet
8087 with nops in order to fit the label's alignment requirements. */
8088
8089 static void
8090 frv_align_label (void)
8091 {
8092 unsigned int alignment, target, nop;
8093 rtx_insn *x, *last, *barrier, *label;
8094
8095 /* Walk forward to the start of the next packet. Set ALIGNMENT to the
8096 maximum alignment of that packet, LABEL to the last label between
8097 the packets, and BARRIER to the last barrier. */
8098 last = frv_packet.insns[frv_packet.num_insns - 1];
8099 label = barrier = 0;
8100 alignment = 4;
8101 for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x))
8102 {
8103 if (LABEL_P (x))
8104 {
8105 unsigned int subalign = 1 << label_to_alignment (x);
8106 alignment = MAX (alignment, subalign);
8107 label = x;
8108 }
8109 if (BARRIER_P (x))
8110 barrier = x;
8111 }
8112
8113 /* If -malign-labels, and the packet falls through to an unaligned
8114 label, try introducing a nop to align that label to 8 bytes. */
8115 if (TARGET_ALIGN_LABELS
8116 && label != 0
8117 && barrier == 0
8118 && frv_packet.num_insns < frv_packet.issue_rate)
8119 alignment = MAX (alignment, 8);
8120
8121 /* Advance the address to the end of the current packet. */
8122 frv_packet_address += frv_packet.num_insns * 4;
8123
8124 /* Work out the target address, after alignment. */
8125 target = (frv_packet_address + alignment - 1) & -alignment;
8126
8127 /* If the packet falls through to the label, try to find an efficient
8128 padding sequence. */
8129 if (barrier == 0)
8130 {
8131 /* First try adding nops to the current packet. */
8132 for (nop = 0; nop < frv_num_nops; nop++)
8133 while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop]))
8134 {
8135 frv_insert_nop_in_packet (frv_nops[nop]);
8136 frv_packet_address += 4;
8137 }
8138
8139 /* If we still haven't reached the target, add some new packets that
8140 contain only nops. If there are two types of nop, insert an
8141 alternating sequence of frv_nops[0] and frv_nops[1], which will
8142 lead to packets like:
8143
8144 nop.p
8145 mnop.p/fnop.p
8146 nop.p
8147 mnop/fnop
8148
8149 etc. Just emit frv_nops[0] if that's the only nop we have. */
8150 last = frv_packet.insns[frv_packet.num_insns - 1];
8151 nop = 0;
8152 while (frv_packet_address < target)
8153 {
8154 last = emit_insn_after (PATTERN (frv_nops[nop]), last);
8155 frv_packet_address += 4;
8156 if (frv_num_nops > 1)
8157 nop ^= 1;
8158 }
8159 }
8160
8161 frv_packet_address = target;
8162 }
8163
8164 /* Subroutine of frv_reorg, called after each packet has been constructed
8165 in frv_packet. */
8166
8167 static void
8168 frv_reorg_packet (void)
8169 {
8170 frv_fill_unused_units (GROUP_I);
8171 frv_fill_unused_units (GROUP_FM);
8172 frv_align_label ();
8173 }
8174
8175 /* Add an instruction with pattern NOP to frv_nops[]. */
8176
8177 static void
8178 frv_register_nop (rtx nop)
8179 {
8180 rtx_insn *nop_insn = make_insn_raw (nop);
8181 SET_NEXT_INSN (nop_insn) = 0;
8182 SET_PREV_INSN (nop_insn) = 0;
8183 frv_nops[frv_num_nops++] = nop_insn;
8184 }
8185
8186 /* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions
8187 into packets and check whether we need to insert nops in order to
8188 fulfill the processor's issue requirements. Also, if the user has
8189 requested a certain alignment for a label, try to meet that alignment
8190 by inserting nops in the previous packet. */
8191
8192 static void
8193 frv_reorg (void)
8194 {
8195 if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p)
8196 frv_optimize_membar ();
8197
8198 frv_num_nops = 0;
8199 frv_register_nop (gen_nop ());
8200 if (TARGET_MEDIA)
8201 frv_register_nop (gen_mnop ());
8202 if (TARGET_HARD_FLOAT)
8203 frv_register_nop (gen_fnop ());
8204
8205 /* Estimate the length of each branch. Although this may change after
8206 we've inserted nops, it will only do so in big functions. */
8207 shorten_branches (get_insns ());
8208
8209 frv_packet_address = 0;
8210 frv_for_each_packet (frv_reorg_packet);
8211 }
8212 \f
8213 #define def_builtin(name, type, code) \
8214 add_builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL)
8215
8216 struct builtin_description
8217 {
8218 enum insn_code icode;
8219 const char *name;
8220 enum frv_builtins code;
8221 enum rtx_code comparison;
8222 unsigned int flag;
8223 };
8224
8225 /* Media intrinsics that take a single, constant argument. */
8226
8227 static struct builtin_description bdesc_set[] =
8228 {
8229 { CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, UNKNOWN, 0 }
8230 };
8231
8232 /* Media intrinsics that take just one argument. */
8233
8234 static struct builtin_description bdesc_1arg[] =
8235 {
8236 { CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, UNKNOWN, 0 },
8237 { CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, UNKNOWN, 0 },
8238 { CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, UNKNOWN, 0 },
8239 { CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, UNKNOWN, 0},
8240 { CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, UNKNOWN, 0 },
8241 { CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, UNKNOWN, 0 }
8242 };
8243
8244 /* Media intrinsics that take two arguments. */
8245
8246 static struct builtin_description bdesc_2arg[] =
8247 {
8248 { CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, UNKNOWN, 0},
8249 { CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, UNKNOWN, 0},
8250 { CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, UNKNOWN, 0},
8251 { CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, UNKNOWN, 0},
8252 { CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, UNKNOWN, 0},
8253 { CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, UNKNOWN, 0},
8254 { CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, UNKNOWN, 0},
8255 { CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, UNKNOWN, 0},
8256 { CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, UNKNOWN, 0},
8257 { CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, UNKNOWN, 0},
8258 { CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, UNKNOWN, 0},
8259 { CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, UNKNOWN, 0},
8260 { CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, UNKNOWN, 0},
8261 { CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, UNKNOWN, 0},
8262 { CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, UNKNOWN, 0},
8263 { CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, UNKNOWN, 0},
8264 { CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, UNKNOWN, 0},
8265 { CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, UNKNOWN, 0},
8266 { CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, UNKNOWN, 0},
8267 { CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, UNKNOWN, 0},
8268 { CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, UNKNOWN, 0},
8269 { CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, UNKNOWN, 0},
8270 { CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, UNKNOWN, 0},
8271 { CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, UNKNOWN, 0},
8272 { CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, UNKNOWN, 0},
8273 { CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, UNKNOWN, 0},
8274 { CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, UNKNOWN, 0}
8275 };
8276
8277 /* Integer intrinsics that take two arguments and have no return value. */
8278
8279 static struct builtin_description bdesc_int_void2arg[] =
8280 {
8281 { CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, UNKNOWN, 0},
8282 { CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, UNKNOWN, 0},
8283 { CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, UNKNOWN, 0}
8284 };
8285
8286 static struct builtin_description bdesc_prefetches[] =
8287 {
8288 { CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, UNKNOWN,
8289 0},
8290 { CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, UNKNOWN, 0}
8291 };
8292
8293 /* Media intrinsics that take two arguments, the first being an ACC number. */
8294
8295 static struct builtin_description bdesc_cut[] =
8296 {
8297 { CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, UNKNOWN, 0},
8298 { CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, UNKNOWN, 0},
8299 { CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, UNKNOWN, 0}
8300 };
8301
8302 /* Two-argument media intrinsics with an immediate second argument. */
8303
8304 static struct builtin_description bdesc_2argimm[] =
8305 {
8306 { CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, UNKNOWN, 0},
8307 { CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, UNKNOWN, 0},
8308 { CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, UNKNOWN, 0},
8309 { CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, UNKNOWN, 0},
8310 { CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, UNKNOWN, 0},
8311 { CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, UNKNOWN, 0},
8312 { CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, UNKNOWN, 0},
8313 { CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, UNKNOWN, 0},
8314 { CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, UNKNOWN, 0},
8315 { CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, UNKNOWN, 0},
8316 { CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, UNKNOWN, 0},
8317 { CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, UNKNOWN, 0},
8318 { CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, UNKNOWN, 0},
8319 { CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, UNKNOWN, 0},
8320 { CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, UNKNOWN, 0},
8321 { CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, UNKNOWN, 0},
8322 { CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, UNKNOWN, 0}
8323 };
8324
8325 /* Media intrinsics that take two arguments and return void, the first argument
8326 being a pointer to 4 words in memory. */
8327
8328 static struct builtin_description bdesc_void2arg[] =
8329 {
8330 { CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, UNKNOWN, 0},
8331 { CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, UNKNOWN, 0},
8332 };
8333
8334 /* Media intrinsics that take three arguments, the first being a const_int that
8335 denotes an accumulator, and that return void. */
8336
8337 static struct builtin_description bdesc_void3arg[] =
8338 {
8339 { CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, UNKNOWN, 0},
8340 { CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, UNKNOWN, 0},
8341 { CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, UNKNOWN, 0},
8342 { CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, UNKNOWN, 0},
8343 { CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, UNKNOWN, 0},
8344 { CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, UNKNOWN, 0},
8345 { CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, UNKNOWN, 0},
8346 { CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, UNKNOWN, 0},
8347 { CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, UNKNOWN, 0},
8348 { CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, UNKNOWN, 0},
8349 { CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, UNKNOWN, 0},
8350 { CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, UNKNOWN, 0},
8351 { CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, UNKNOWN, 0},
8352 { CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, UNKNOWN, 0},
8353 { CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, UNKNOWN, 0},
8354 { CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, UNKNOWN, 0},
8355 { CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, UNKNOWN, 0},
8356 { CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, UNKNOWN, 0},
8357 { CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, UNKNOWN, 0},
8358 { CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, UNKNOWN, 0},
8359 { CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, UNKNOWN, 0},
8360 { CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, UNKNOWN, 0},
8361 { CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, UNKNOWN, 0},
8362 { CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, UNKNOWN, 0},
8363 { CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, UNKNOWN, 0}
8364 };
8365
8366 /* Media intrinsics that take two accumulator numbers as argument and
8367 return void. */
8368
8369 static struct builtin_description bdesc_voidacc[] =
8370 {
8371 { CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, UNKNOWN, 0},
8372 { CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, UNKNOWN, 0},
8373 { CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, UNKNOWN, 0},
8374 { CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, UNKNOWN, 0},
8375 { CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, UNKNOWN, 0},
8376 { CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, UNKNOWN, 0}
8377 };
8378
8379 /* Intrinsics that load a value and then issue a MEMBAR. The load is
8380 a normal move and the ICODE is for the membar. */
8381
8382 static struct builtin_description bdesc_loads[] =
8383 {
8384 { CODE_FOR_optional_membar_qi, "__builtin_read8",
8385 FRV_BUILTIN_READ8, UNKNOWN, 0},
8386 { CODE_FOR_optional_membar_hi, "__builtin_read16",
8387 FRV_BUILTIN_READ16, UNKNOWN, 0},
8388 { CODE_FOR_optional_membar_si, "__builtin_read32",
8389 FRV_BUILTIN_READ32, UNKNOWN, 0},
8390 { CODE_FOR_optional_membar_di, "__builtin_read64",
8391 FRV_BUILTIN_READ64, UNKNOWN, 0}
8392 };
8393
8394 /* Likewise stores. */
8395
8396 static struct builtin_description bdesc_stores[] =
8397 {
8398 { CODE_FOR_optional_membar_qi, "__builtin_write8",
8399 FRV_BUILTIN_WRITE8, UNKNOWN, 0},
8400 { CODE_FOR_optional_membar_hi, "__builtin_write16",
8401 FRV_BUILTIN_WRITE16, UNKNOWN, 0},
8402 { CODE_FOR_optional_membar_si, "__builtin_write32",
8403 FRV_BUILTIN_WRITE32, UNKNOWN, 0},
8404 { CODE_FOR_optional_membar_di, "__builtin_write64",
8405 FRV_BUILTIN_WRITE64, UNKNOWN, 0},
8406 };
8407
8408 /* Initialize media builtins. */
8409
8410 static void
8411 frv_init_builtins (void)
8412 {
8413 tree accumulator = integer_type_node;
8414 tree integer = integer_type_node;
8415 tree voidt = void_type_node;
8416 tree uhalf = short_unsigned_type_node;
8417 tree sword1 = long_integer_type_node;
8418 tree uword1 = long_unsigned_type_node;
8419 tree sword2 = long_long_integer_type_node;
8420 tree uword2 = long_long_unsigned_type_node;
8421 tree uword4 = build_pointer_type (uword1);
8422 tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1));
8423 tree ubyte = unsigned_char_type_node;
8424 tree iacc = integer_type_node;
8425
8426 #define UNARY(RET, T1) \
8427 build_function_type_list (RET, T1, NULL_TREE)
8428
8429 #define BINARY(RET, T1, T2) \
8430 build_function_type_list (RET, T1, T2, NULL_TREE)
8431
8432 #define TRINARY(RET, T1, T2, T3) \
8433 build_function_type_list (RET, T1, T2, T3, NULL_TREE)
8434
8435 #define QUAD(RET, T1, T2, T3, T4) \
8436 build_function_type_list (RET, T1, T2, T3, T4, NULL_TREE)
8437
8438 tree void_ftype_void = build_function_type_list (voidt, NULL_TREE);
8439
8440 tree void_ftype_acc = UNARY (voidt, accumulator);
8441 tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1);
8442 tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2);
8443 tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1);
8444 tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator);
8445 tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1);
8446 tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1);
8447 tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2);
8448 tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2);
8449
8450 tree uw1_ftype_uw1 = UNARY (uword1, uword1);
8451 tree uw1_ftype_sw1 = UNARY (uword1, sword1);
8452 tree uw1_ftype_uw2 = UNARY (uword1, uword2);
8453 tree uw1_ftype_acc = UNARY (uword1, accumulator);
8454 tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf);
8455 tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1);
8456 tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer);
8457 tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1);
8458 tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1);
8459 tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1);
8460 tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer);
8461
8462 tree sw1_ftype_int = UNARY (sword1, integer);
8463 tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1);
8464 tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer);
8465
8466 tree uw2_ftype_uw1 = UNARY (uword2, uword1);
8467 tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer);
8468 tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2);
8469 tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer);
8470 tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer);
8471 tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf);
8472
8473 tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2);
8474 tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer);
8475 tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1);
8476 tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1);
8477 tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1);
8478 tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2);
8479 tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1);
8480 tree sw1_ftype_sw1 = UNARY (sword1, sword1);
8481 tree sw2_ftype_iacc = UNARY (sword2, iacc);
8482 tree sw1_ftype_iacc = UNARY (sword1, iacc);
8483 tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node);
8484 tree uw1_ftype_vptr = UNARY (uword1, vptr);
8485 tree uw2_ftype_vptr = UNARY (uword2, vptr);
8486 tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte);
8487 tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf);
8488 tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1);
8489 tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2);
8490
8491 def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND);
8492 def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR);
8493 def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR);
8494 def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT);
8495 def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI);
8496 def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI);
8497 def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT);
8498 def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH);
8499 def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI);
8500 def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI);
8501 def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI);
8502 def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS);
8503 def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU);
8504 def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS);
8505 def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS);
8506 def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS);
8507 def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS);
8508 def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS);
8509 def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU);
8510 def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS);
8511 def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU);
8512 def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS);
8513 def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU);
8514 def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS);
8515 def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU);
8516 def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS);
8517 def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS);
8518 def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS);
8519 def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS);
8520 def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS);
8521 def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU);
8522 def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS);
8523 def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU);
8524 def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS);
8525 def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU);
8526 def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS);
8527 def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU);
8528 def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS);
8529 def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU);
8530 def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS);
8531 def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU);
8532 def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS);
8533 def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU);
8534 def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT);
8535 def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS);
8536 def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW);
8537 def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD);
8538 def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH);
8539 def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH);
8540 def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH);
8541 def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH);
8542 def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH);
8543 def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB);
8544 def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE);
8545 def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC);
8546 def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA);
8547 def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC);
8548 def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG);
8549 def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC);
8550 def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG);
8551 def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1);
8552 def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2);
8553 def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP);
8554 def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS);
8555 def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS);
8556 def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS);
8557 def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS);
8558 def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS);
8559 def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS);
8560 def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS);
8561 def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS);
8562 def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS);
8563 def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS);
8564 def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI);
8565 def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI);
8566 def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI);
8567 def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI);
8568 def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS);
8569 def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS);
8570 def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS);
8571 def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS);
8572 def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH);
8573 def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH);
8574 def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH);
8575 def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS);
8576 def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS);
8577 def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI);
8578 def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI);
8579 def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL);
8580 def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL);
8581 def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS);
8582 def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS);
8583 def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU);
8584 def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS);
8585 def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS);
8586 def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS);
8587 def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN);
8588 def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS);
8589 def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll);
8590 def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl);
8591 def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll);
8592 def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl);
8593 def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0);
8594 def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH);
8595 def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8);
8596 def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16);
8597 def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32);
8598 def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64);
8599
8600 def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8);
8601 def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16);
8602 def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32);
8603 def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64);
8604
8605 #undef UNARY
8606 #undef BINARY
8607 #undef TRINARY
8608 #undef QUAD
8609 }
8610
8611 /* Set the names for various arithmetic operations according to the
8612 FRV ABI. */
8613 static void
8614 frv_init_libfuncs (void)
8615 {
8616 set_optab_libfunc (smod_optab, SImode, "__modi");
8617 set_optab_libfunc (umod_optab, SImode, "__umodi");
8618
8619 set_optab_libfunc (add_optab, DImode, "__addll");
8620 set_optab_libfunc (sub_optab, DImode, "__subll");
8621 set_optab_libfunc (smul_optab, DImode, "__mulll");
8622 set_optab_libfunc (sdiv_optab, DImode, "__divll");
8623 set_optab_libfunc (smod_optab, DImode, "__modll");
8624 set_optab_libfunc (umod_optab, DImode, "__umodll");
8625 set_optab_libfunc (and_optab, DImode, "__andll");
8626 set_optab_libfunc (ior_optab, DImode, "__orll");
8627 set_optab_libfunc (xor_optab, DImode, "__xorll");
8628 set_optab_libfunc (one_cmpl_optab, DImode, "__notll");
8629
8630 set_optab_libfunc (add_optab, SFmode, "__addf");
8631 set_optab_libfunc (sub_optab, SFmode, "__subf");
8632 set_optab_libfunc (smul_optab, SFmode, "__mulf");
8633 set_optab_libfunc (sdiv_optab, SFmode, "__divf");
8634
8635 set_optab_libfunc (add_optab, DFmode, "__addd");
8636 set_optab_libfunc (sub_optab, DFmode, "__subd");
8637 set_optab_libfunc (smul_optab, DFmode, "__muld");
8638 set_optab_libfunc (sdiv_optab, DFmode, "__divd");
8639
8640 set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod");
8641 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof");
8642
8643 set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi");
8644 set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll");
8645 set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi");
8646 set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll");
8647
8648 set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui");
8649 set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull");
8650 set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui");
8651 set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull");
8652
8653 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof");
8654 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof");
8655 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod");
8656 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod");
8657 }
8658
8659 /* Convert an integer constant to an accumulator register. ICODE is the
8660 code of the target instruction, OPNUM is the number of the
8661 accumulator operand and OPVAL is the constant integer. Try both
8662 ACC and ACCG registers; only report an error if neither fit the
8663 instruction. */
8664
8665 static rtx
8666 frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
8667 {
8668 rtx reg;
8669 int i;
8670
8671 /* ACCs and ACCGs are implicit global registers if media intrinsics
8672 are being used. We set up this lazily to avoid creating lots of
8673 unnecessary call_insn rtl in non-media code. */
8674 for (i = 0; i <= ACC_MASK; i++)
8675 if ((i & ACC_MASK) == i)
8676 global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1;
8677
8678 if (GET_CODE (opval) != CONST_INT)
8679 {
8680 error ("accumulator is not a constant integer");
8681 return NULL_RTX;
8682 }
8683 if ((INTVAL (opval) & ~ACC_MASK) != 0)
8684 {
8685 error ("accumulator number is out of bounds");
8686 return NULL_RTX;
8687 }
8688
8689 reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
8690 ACC_FIRST + INTVAL (opval));
8691 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8692 SET_REGNO (reg, ACCG_FIRST + INTVAL (opval));
8693
8694 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8695 {
8696 error ("inappropriate accumulator for %qs", insn_data[icode].name);
8697 return NULL_RTX;
8698 }
8699 return reg;
8700 }
8701
8702 /* If an ACC rtx has mode MODE, return the mode that the matching ACCG
8703 should have. */
8704
8705 static machine_mode
8706 frv_matching_accg_mode (machine_mode mode)
8707 {
8708 switch (mode)
8709 {
8710 case V4SImode:
8711 return V4QImode;
8712
8713 case DImode:
8714 return HImode;
8715
8716 case SImode:
8717 return QImode;
8718
8719 default:
8720 gcc_unreachable ();
8721 }
8722 }
8723
8724 /* Given that a __builtin_read or __builtin_write function is accessing
8725 address ADDRESS, return the value that should be used as operand 1
8726 of the membar. */
8727
8728 static rtx
8729 frv_io_address_cookie (rtx address)
8730 {
8731 return (GET_CODE (address) == CONST_INT
8732 ? GEN_INT (INTVAL (address) / 8 * 8)
8733 : const0_rtx);
8734 }
8735
8736 /* Return the accumulator guard that should be paired with accumulator
8737 register ACC. The mode of the returned register is in the same
8738 class as ACC, but is four times smaller. */
8739
8740 rtx
8741 frv_matching_accg_for_acc (rtx acc)
8742 {
8743 return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)),
8744 REGNO (acc) - ACC_FIRST + ACCG_FIRST);
8745 }
8746
8747 /* Read the requested argument from the call EXP given by INDEX.
8748 Return the value as an rtx. */
8749
8750 static rtx
8751 frv_read_argument (tree exp, unsigned int index)
8752 {
8753 return expand_normal (CALL_EXPR_ARG (exp, index));
8754 }
8755
8756 /* Like frv_read_argument, but interpret the argument as the number
8757 of an IACC register and return a (reg:MODE ...) rtx for it. */
8758
8759 static rtx
8760 frv_read_iacc_argument (machine_mode mode, tree call,
8761 unsigned int index)
8762 {
8763 int i, regno;
8764 rtx op;
8765
8766 op = frv_read_argument (call, index);
8767 if (GET_CODE (op) != CONST_INT
8768 || INTVAL (op) < 0
8769 || INTVAL (op) > IACC_LAST - IACC_FIRST
8770 || ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0)
8771 {
8772 error ("invalid IACC argument");
8773 op = const0_rtx;
8774 }
8775
8776 /* IACCs are implicit global registers. We set up this lazily to
8777 avoid creating lots of unnecessary call_insn rtl when IACCs aren't
8778 being used. */
8779 regno = INTVAL (op) + IACC_FIRST;
8780 for (i = 0; i < HARD_REGNO_NREGS (regno, mode); i++)
8781 global_regs[regno + i] = 1;
8782
8783 return gen_rtx_REG (mode, regno);
8784 }
8785
8786 /* Return true if OPVAL can be used for operand OPNUM of instruction ICODE.
8787 The instruction should require a constant operand of some sort. The
8788 function prints an error if OPVAL is not valid. */
8789
8790 static int
8791 frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval)
8792 {
8793 if (GET_CODE (opval) != CONST_INT)
8794 {
8795 error ("%qs expects a constant argument", insn_data[icode].name);
8796 return FALSE;
8797 }
8798 if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode))
8799 {
8800 error ("constant argument out of range for %qs", insn_data[icode].name);
8801 return FALSE;
8802 }
8803 return TRUE;
8804 }
8805
8806 /* Return a legitimate rtx for instruction ICODE's return value. Use TARGET
8807 if it's not null, has the right mode, and satisfies operand 0's
8808 predicate. */
8809
8810 static rtx
8811 frv_legitimize_target (enum insn_code icode, rtx target)
8812 {
8813 machine_mode mode = insn_data[icode].operand[0].mode;
8814
8815 if (! target
8816 || GET_MODE (target) != mode
8817 || ! (*insn_data[icode].operand[0].predicate) (target, mode))
8818 return gen_reg_rtx (mode);
8819 else
8820 return target;
8821 }
8822
8823 /* Given that ARG is being passed as operand OPNUM to instruction ICODE,
8824 check whether ARG satisfies the operand's constraints. If it doesn't,
8825 copy ARG to a temporary register and return that. Otherwise return ARG
8826 itself. */
8827
8828 static rtx
8829 frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
8830 {
8831 machine_mode mode = insn_data[icode].operand[opnum].mode;
8832
8833 if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
8834 return arg;
8835 else
8836 return copy_to_mode_reg (mode, arg);
8837 }
8838
8839 /* Return a volatile memory reference of mode MODE whose address is ARG. */
8840
8841 static rtx
8842 frv_volatile_memref (machine_mode mode, rtx arg)
8843 {
8844 rtx mem;
8845
8846 mem = gen_rtx_MEM (mode, memory_address (mode, arg));
8847 MEM_VOLATILE_P (mem) = 1;
8848 return mem;
8849 }
8850
8851 /* Expand builtins that take a single, constant argument. At the moment,
8852 only MHDSETS falls into this category. */
8853
8854 static rtx
8855 frv_expand_set_builtin (enum insn_code icode, tree call, rtx target)
8856 {
8857 rtx pat;
8858 rtx op0 = frv_read_argument (call, 0);
8859
8860 if (! frv_check_constant_argument (icode, 1, op0))
8861 return NULL_RTX;
8862
8863 target = frv_legitimize_target (icode, target);
8864 pat = GEN_FCN (icode) (target, op0);
8865 if (! pat)
8866 return NULL_RTX;
8867
8868 emit_insn (pat);
8869 return target;
8870 }
8871
8872 /* Expand builtins that take one operand. */
8873
8874 static rtx
8875 frv_expand_unop_builtin (enum insn_code icode, tree call, rtx target)
8876 {
8877 rtx pat;
8878 rtx op0 = frv_read_argument (call, 0);
8879
8880 target = frv_legitimize_target (icode, target);
8881 op0 = frv_legitimize_argument (icode, 1, op0);
8882 pat = GEN_FCN (icode) (target, op0);
8883 if (! pat)
8884 return NULL_RTX;
8885
8886 emit_insn (pat);
8887 return target;
8888 }
8889
8890 /* Expand builtins that take two operands. */
8891
8892 static rtx
8893 frv_expand_binop_builtin (enum insn_code icode, tree call, rtx target)
8894 {
8895 rtx pat;
8896 rtx op0 = frv_read_argument (call, 0);
8897 rtx op1 = frv_read_argument (call, 1);
8898
8899 target = frv_legitimize_target (icode, target);
8900 op0 = frv_legitimize_argument (icode, 1, op0);
8901 op1 = frv_legitimize_argument (icode, 2, op1);
8902 pat = GEN_FCN (icode) (target, op0, op1);
8903 if (! pat)
8904 return NULL_RTX;
8905
8906 emit_insn (pat);
8907 return target;
8908 }
8909
8910 /* Expand cut-style builtins, which take two operands and an implicit ACCG
8911 one. */
8912
8913 static rtx
8914 frv_expand_cut_builtin (enum insn_code icode, tree call, rtx target)
8915 {
8916 rtx pat;
8917 rtx op0 = frv_read_argument (call, 0);
8918 rtx op1 = frv_read_argument (call, 1);
8919 rtx op2;
8920
8921 target = frv_legitimize_target (icode, target);
8922 op0 = frv_int_to_acc (icode, 1, op0);
8923 if (! op0)
8924 return NULL_RTX;
8925
8926 if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT)
8927 {
8928 if (! frv_check_constant_argument (icode, 2, op1))
8929 return NULL_RTX;
8930 }
8931 else
8932 op1 = frv_legitimize_argument (icode, 2, op1);
8933
8934 op2 = frv_matching_accg_for_acc (op0);
8935 pat = GEN_FCN (icode) (target, op0, op1, op2);
8936 if (! pat)
8937 return NULL_RTX;
8938
8939 emit_insn (pat);
8940 return target;
8941 }
8942
8943 /* Expand builtins that take two operands and the second is immediate. */
8944
8945 static rtx
8946 frv_expand_binopimm_builtin (enum insn_code icode, tree call, rtx target)
8947 {
8948 rtx pat;
8949 rtx op0 = frv_read_argument (call, 0);
8950 rtx op1 = frv_read_argument (call, 1);
8951
8952 if (! frv_check_constant_argument (icode, 2, op1))
8953 return NULL_RTX;
8954
8955 target = frv_legitimize_target (icode, target);
8956 op0 = frv_legitimize_argument (icode, 1, op0);
8957 pat = GEN_FCN (icode) (target, op0, op1);
8958 if (! pat)
8959 return NULL_RTX;
8960
8961 emit_insn (pat);
8962 return target;
8963 }
8964
8965 /* Expand builtins that take two operands, the first operand being a pointer to
8966 ints and return void. */
8967
8968 static rtx
8969 frv_expand_voidbinop_builtin (enum insn_code icode, tree call)
8970 {
8971 rtx pat;
8972 rtx op0 = frv_read_argument (call, 0);
8973 rtx op1 = frv_read_argument (call, 1);
8974 machine_mode mode0 = insn_data[icode].operand[0].mode;
8975 rtx addr;
8976
8977 if (GET_CODE (op0) != MEM)
8978 {
8979 rtx reg = op0;
8980
8981 if (! offsettable_address_p (0, mode0, op0))
8982 {
8983 reg = gen_reg_rtx (Pmode);
8984 emit_insn (gen_rtx_SET (VOIDmode, reg, op0));
8985 }
8986
8987 op0 = gen_rtx_MEM (SImode, reg);
8988 }
8989
8990 addr = XEXP (op0, 0);
8991 if (! offsettable_address_p (0, mode0, addr))
8992 addr = copy_to_mode_reg (Pmode, op0);
8993
8994 op0 = change_address (op0, V4SImode, addr);
8995 op1 = frv_legitimize_argument (icode, 1, op1);
8996 pat = GEN_FCN (icode) (op0, op1);
8997 if (! pat)
8998 return 0;
8999
9000 emit_insn (pat);
9001 return 0;
9002 }
9003
9004 /* Expand builtins that take two long operands and return void. */
9005
9006 static rtx
9007 frv_expand_int_void2arg (enum insn_code icode, tree call)
9008 {
9009 rtx pat;
9010 rtx op0 = frv_read_argument (call, 0);
9011 rtx op1 = frv_read_argument (call, 1);
9012
9013 op0 = frv_legitimize_argument (icode, 1, op0);
9014 op1 = frv_legitimize_argument (icode, 1, op1);
9015 pat = GEN_FCN (icode) (op0, op1);
9016 if (! pat)
9017 return NULL_RTX;
9018
9019 emit_insn (pat);
9020 return NULL_RTX;
9021 }
9022
9023 /* Expand prefetch builtins. These take a single address as argument. */
9024
9025 static rtx
9026 frv_expand_prefetches (enum insn_code icode, tree call)
9027 {
9028 rtx pat;
9029 rtx op0 = frv_read_argument (call, 0);
9030
9031 pat = GEN_FCN (icode) (force_reg (Pmode, op0));
9032 if (! pat)
9033 return 0;
9034
9035 emit_insn (pat);
9036 return 0;
9037 }
9038
9039 /* Expand builtins that take three operands and return void. The first
9040 argument must be a constant that describes a pair or quad accumulators. A
9041 fourth argument is created that is the accumulator guard register that
9042 corresponds to the accumulator. */
9043
9044 static rtx
9045 frv_expand_voidtriop_builtin (enum insn_code icode, tree call)
9046 {
9047 rtx pat;
9048 rtx op0 = frv_read_argument (call, 0);
9049 rtx op1 = frv_read_argument (call, 1);
9050 rtx op2 = frv_read_argument (call, 2);
9051 rtx op3;
9052
9053 op0 = frv_int_to_acc (icode, 0, op0);
9054 if (! op0)
9055 return NULL_RTX;
9056
9057 op1 = frv_legitimize_argument (icode, 1, op1);
9058 op2 = frv_legitimize_argument (icode, 2, op2);
9059 op3 = frv_matching_accg_for_acc (op0);
9060 pat = GEN_FCN (icode) (op0, op1, op2, op3);
9061 if (! pat)
9062 return NULL_RTX;
9063
9064 emit_insn (pat);
9065 return NULL_RTX;
9066 }
9067
9068 /* Expand builtins that perform accumulator-to-accumulator operations.
9069 These builtins take two accumulator numbers as argument and return
9070 void. */
9071
9072 static rtx
9073 frv_expand_voidaccop_builtin (enum insn_code icode, tree call)
9074 {
9075 rtx pat;
9076 rtx op0 = frv_read_argument (call, 0);
9077 rtx op1 = frv_read_argument (call, 1);
9078 rtx op2;
9079 rtx op3;
9080
9081 op0 = frv_int_to_acc (icode, 0, op0);
9082 if (! op0)
9083 return NULL_RTX;
9084
9085 op1 = frv_int_to_acc (icode, 1, op1);
9086 if (! op1)
9087 return NULL_RTX;
9088
9089 op2 = frv_matching_accg_for_acc (op0);
9090 op3 = frv_matching_accg_for_acc (op1);
9091 pat = GEN_FCN (icode) (op0, op1, op2, op3);
9092 if (! pat)
9093 return NULL_RTX;
9094
9095 emit_insn (pat);
9096 return NULL_RTX;
9097 }
9098
9099 /* Expand a __builtin_read* function. ICODE is the instruction code for the
9100 membar and TARGET_MODE is the mode that the loaded value should have. */
9101
9102 static rtx
9103 frv_expand_load_builtin (enum insn_code icode, machine_mode target_mode,
9104 tree call, rtx target)
9105 {
9106 rtx op0 = frv_read_argument (call, 0);
9107 rtx cookie = frv_io_address_cookie (op0);
9108
9109 if (target == 0 || !REG_P (target))
9110 target = gen_reg_rtx (target_mode);
9111 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
9112 convert_move (target, op0, 1);
9113 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ)));
9114 cfun->machine->has_membar_p = 1;
9115 return target;
9116 }
9117
9118 /* Likewise __builtin_write* functions. */
9119
9120 static rtx
9121 frv_expand_store_builtin (enum insn_code icode, tree call)
9122 {
9123 rtx op0 = frv_read_argument (call, 0);
9124 rtx op1 = frv_read_argument (call, 1);
9125 rtx cookie = frv_io_address_cookie (op0);
9126
9127 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
9128 convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1);
9129 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE)));
9130 cfun->machine->has_membar_p = 1;
9131 return NULL_RTX;
9132 }
9133
9134 /* Expand the MDPACKH builtin. It takes four unsigned short arguments and
9135 each argument forms one word of the two double-word input registers.
9136 CALL is the tree for the call and TARGET, if nonnull, suggests a good place
9137 to put the return value. */
9138
9139 static rtx
9140 frv_expand_mdpackh_builtin (tree call, rtx target)
9141 {
9142 enum insn_code icode = CODE_FOR_mdpackh;
9143 rtx pat, op0, op1;
9144 rtx arg1 = frv_read_argument (call, 0);
9145 rtx arg2 = frv_read_argument (call, 1);
9146 rtx arg3 = frv_read_argument (call, 2);
9147 rtx arg4 = frv_read_argument (call, 3);
9148
9149 target = frv_legitimize_target (icode, target);
9150 op0 = gen_reg_rtx (DImode);
9151 op1 = gen_reg_rtx (DImode);
9152
9153 /* The high half of each word is not explicitly initialized, so indicate
9154 that the input operands are not live before this point. */
9155 emit_clobber (op0);
9156 emit_clobber (op1);
9157
9158 /* Move each argument into the low half of its associated input word. */
9159 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1);
9160 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2);
9161 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3);
9162 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4);
9163
9164 pat = GEN_FCN (icode) (target, op0, op1);
9165 if (! pat)
9166 return NULL_RTX;
9167
9168 emit_insn (pat);
9169 return target;
9170 }
9171
9172 /* Expand the MCLRACC builtin. This builtin takes a single accumulator
9173 number as argument. */
9174
9175 static rtx
9176 frv_expand_mclracc_builtin (tree call)
9177 {
9178 enum insn_code icode = CODE_FOR_mclracc;
9179 rtx pat;
9180 rtx op0 = frv_read_argument (call, 0);
9181
9182 op0 = frv_int_to_acc (icode, 0, op0);
9183 if (! op0)
9184 return NULL_RTX;
9185
9186 pat = GEN_FCN (icode) (op0);
9187 if (pat)
9188 emit_insn (pat);
9189
9190 return NULL_RTX;
9191 }
9192
9193 /* Expand builtins that take no arguments. */
9194
9195 static rtx
9196 frv_expand_noargs_builtin (enum insn_code icode)
9197 {
9198 rtx pat = GEN_FCN (icode) (const0_rtx);
9199 if (pat)
9200 emit_insn (pat);
9201
9202 return NULL_RTX;
9203 }
9204
9205 /* Expand MRDACC and MRDACCG. These builtins take a single accumulator
9206 number or accumulator guard number as argument and return an SI integer. */
9207
9208 static rtx
9209 frv_expand_mrdacc_builtin (enum insn_code icode, tree call)
9210 {
9211 rtx pat;
9212 rtx target = gen_reg_rtx (SImode);
9213 rtx op0 = frv_read_argument (call, 0);
9214
9215 op0 = frv_int_to_acc (icode, 1, op0);
9216 if (! op0)
9217 return NULL_RTX;
9218
9219 pat = GEN_FCN (icode) (target, op0);
9220 if (! pat)
9221 return NULL_RTX;
9222
9223 emit_insn (pat);
9224 return target;
9225 }
9226
9227 /* Expand MWTACC and MWTACCG. These builtins take an accumulator or
9228 accumulator guard as their first argument and an SImode value as their
9229 second. */
9230
9231 static rtx
9232 frv_expand_mwtacc_builtin (enum insn_code icode, tree call)
9233 {
9234 rtx pat;
9235 rtx op0 = frv_read_argument (call, 0);
9236 rtx op1 = frv_read_argument (call, 1);
9237
9238 op0 = frv_int_to_acc (icode, 0, op0);
9239 if (! op0)
9240 return NULL_RTX;
9241
9242 op1 = frv_legitimize_argument (icode, 1, op1);
9243 pat = GEN_FCN (icode) (op0, op1);
9244 if (pat)
9245 emit_insn (pat);
9246
9247 return NULL_RTX;
9248 }
9249
9250 /* Emit a move from SRC to DEST in SImode chunks. This can be used
9251 to move DImode values into and out of IACC0. */
9252
9253 static void
9254 frv_split_iacc_move (rtx dest, rtx src)
9255 {
9256 machine_mode inner;
9257 int i;
9258
9259 inner = GET_MODE (dest);
9260 for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode))
9261 emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i),
9262 simplify_gen_subreg (SImode, src, inner, i));
9263 }
9264
9265 /* Expand builtins. */
9266
9267 static rtx
9268 frv_expand_builtin (tree exp,
9269 rtx target,
9270 rtx subtarget ATTRIBUTE_UNUSED,
9271 machine_mode mode ATTRIBUTE_UNUSED,
9272 int ignore ATTRIBUTE_UNUSED)
9273 {
9274 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9275 unsigned fcode = (unsigned)DECL_FUNCTION_CODE (fndecl);
9276 unsigned i;
9277 struct builtin_description *d;
9278
9279 if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA)
9280 {
9281 error ("media functions are not available unless -mmedia is used");
9282 return NULL_RTX;
9283 }
9284
9285 switch (fcode)
9286 {
9287 case FRV_BUILTIN_MCOP1:
9288 case FRV_BUILTIN_MCOP2:
9289 case FRV_BUILTIN_MDUNPACKH:
9290 case FRV_BUILTIN_MBTOHE:
9291 if (! TARGET_MEDIA_REV1)
9292 {
9293 error ("this media function is only available on the fr500");
9294 return NULL_RTX;
9295 }
9296 break;
9297
9298 case FRV_BUILTIN_MQXMACHS:
9299 case FRV_BUILTIN_MQXMACXHS:
9300 case FRV_BUILTIN_MQMACXHS:
9301 case FRV_BUILTIN_MADDACCS:
9302 case FRV_BUILTIN_MSUBACCS:
9303 case FRV_BUILTIN_MASACCS:
9304 case FRV_BUILTIN_MDADDACCS:
9305 case FRV_BUILTIN_MDSUBACCS:
9306 case FRV_BUILTIN_MDASACCS:
9307 case FRV_BUILTIN_MABSHS:
9308 case FRV_BUILTIN_MDROTLI:
9309 case FRV_BUILTIN_MCPLHI:
9310 case FRV_BUILTIN_MCPLI:
9311 case FRV_BUILTIN_MDCUTSSI:
9312 case FRV_BUILTIN_MQSATHS:
9313 case FRV_BUILTIN_MHSETLOS:
9314 case FRV_BUILTIN_MHSETLOH:
9315 case FRV_BUILTIN_MHSETHIS:
9316 case FRV_BUILTIN_MHSETHIH:
9317 case FRV_BUILTIN_MHDSETS:
9318 case FRV_BUILTIN_MHDSETH:
9319 if (! TARGET_MEDIA_REV2)
9320 {
9321 error ("this media function is only available on the fr400"
9322 " and fr550");
9323 return NULL_RTX;
9324 }
9325 break;
9326
9327 case FRV_BUILTIN_SMASS:
9328 case FRV_BUILTIN_SMSSS:
9329 case FRV_BUILTIN_SMU:
9330 case FRV_BUILTIN_ADDSS:
9331 case FRV_BUILTIN_SUBSS:
9332 case FRV_BUILTIN_SLASS:
9333 case FRV_BUILTIN_SCUTSS:
9334 case FRV_BUILTIN_IACCreadll:
9335 case FRV_BUILTIN_IACCreadl:
9336 case FRV_BUILTIN_IACCsetll:
9337 case FRV_BUILTIN_IACCsetl:
9338 if (!TARGET_FR405_BUILTINS)
9339 {
9340 error ("this builtin function is only available"
9341 " on the fr405 and fr450");
9342 return NULL_RTX;
9343 }
9344 break;
9345
9346 case FRV_BUILTIN_PREFETCH:
9347 if (!TARGET_FR500_FR550_BUILTINS)
9348 {
9349 error ("this builtin function is only available on the fr500"
9350 " and fr550");
9351 return NULL_RTX;
9352 }
9353 break;
9354
9355 case FRV_BUILTIN_MQLCLRHS:
9356 case FRV_BUILTIN_MQLMTHS:
9357 case FRV_BUILTIN_MQSLLHI:
9358 case FRV_BUILTIN_MQSRAHI:
9359 if (!TARGET_MEDIA_FR450)
9360 {
9361 error ("this builtin function is only available on the fr450");
9362 return NULL_RTX;
9363 }
9364 break;
9365
9366 default:
9367 break;
9368 }
9369
9370 /* Expand unique builtins. */
9371
9372 switch (fcode)
9373 {
9374 case FRV_BUILTIN_MTRAP:
9375 return frv_expand_noargs_builtin (CODE_FOR_mtrap);
9376
9377 case FRV_BUILTIN_MCLRACC:
9378 return frv_expand_mclracc_builtin (exp);
9379
9380 case FRV_BUILTIN_MCLRACCA:
9381 if (TARGET_ACC_8)
9382 return frv_expand_noargs_builtin (CODE_FOR_mclracca8);
9383 else
9384 return frv_expand_noargs_builtin (CODE_FOR_mclracca4);
9385
9386 case FRV_BUILTIN_MRDACC:
9387 return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, exp);
9388
9389 case FRV_BUILTIN_MRDACCG:
9390 return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, exp);
9391
9392 case FRV_BUILTIN_MWTACC:
9393 return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, exp);
9394
9395 case FRV_BUILTIN_MWTACCG:
9396 return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, exp);
9397
9398 case FRV_BUILTIN_MDPACKH:
9399 return frv_expand_mdpackh_builtin (exp, target);
9400
9401 case FRV_BUILTIN_IACCreadll:
9402 {
9403 rtx src = frv_read_iacc_argument (DImode, exp, 0);
9404 if (target == 0 || !REG_P (target))
9405 target = gen_reg_rtx (DImode);
9406 frv_split_iacc_move (target, src);
9407 return target;
9408 }
9409
9410 case FRV_BUILTIN_IACCreadl:
9411 return frv_read_iacc_argument (SImode, exp, 0);
9412
9413 case FRV_BUILTIN_IACCsetll:
9414 {
9415 rtx dest = frv_read_iacc_argument (DImode, exp, 0);
9416 rtx src = frv_read_argument (exp, 1);
9417 frv_split_iacc_move (dest, force_reg (DImode, src));
9418 return 0;
9419 }
9420
9421 case FRV_BUILTIN_IACCsetl:
9422 {
9423 rtx dest = frv_read_iacc_argument (SImode, exp, 0);
9424 rtx src = frv_read_argument (exp, 1);
9425 emit_move_insn (dest, force_reg (SImode, src));
9426 return 0;
9427 }
9428
9429 default:
9430 break;
9431 }
9432
9433 /* Expand groups of builtins. */
9434
9435 for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++)
9436 if (d->code == fcode)
9437 return frv_expand_set_builtin (d->icode, exp, target);
9438
9439 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9440 if (d->code == fcode)
9441 return frv_expand_unop_builtin (d->icode, exp, target);
9442
9443 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9444 if (d->code == fcode)
9445 return frv_expand_binop_builtin (d->icode, exp, target);
9446
9447 for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++)
9448 if (d->code == fcode)
9449 return frv_expand_cut_builtin (d->icode, exp, target);
9450
9451 for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++)
9452 if (d->code == fcode)
9453 return frv_expand_binopimm_builtin (d->icode, exp, target);
9454
9455 for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++)
9456 if (d->code == fcode)
9457 return frv_expand_voidbinop_builtin (d->icode, exp);
9458
9459 for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++)
9460 if (d->code == fcode)
9461 return frv_expand_voidtriop_builtin (d->icode, exp);
9462
9463 for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++)
9464 if (d->code == fcode)
9465 return frv_expand_voidaccop_builtin (d->icode, exp);
9466
9467 for (i = 0, d = bdesc_int_void2arg;
9468 i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++)
9469 if (d->code == fcode)
9470 return frv_expand_int_void2arg (d->icode, exp);
9471
9472 for (i = 0, d = bdesc_prefetches;
9473 i < ARRAY_SIZE (bdesc_prefetches); i++, d++)
9474 if (d->code == fcode)
9475 return frv_expand_prefetches (d->icode, exp);
9476
9477 for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++)
9478 if (d->code == fcode)
9479 return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)),
9480 exp, target);
9481
9482 for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++)
9483 if (d->code == fcode)
9484 return frv_expand_store_builtin (d->icode, exp);
9485
9486 return 0;
9487 }
9488
9489 static bool
9490 frv_in_small_data_p (const_tree decl)
9491 {
9492 HOST_WIDE_INT size;
9493 const char *section_name;
9494
9495 /* Don't apply the -G flag to internal compiler structures. We
9496 should leave such structures in the main data section, partly
9497 for efficiency and partly because the size of some of them
9498 (such as C++ typeinfos) is not known until later. */
9499 if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl))
9500 return false;
9501
9502 /* If we already know which section the decl should be in, see if
9503 it's a small data section. */
9504 section_name = DECL_SECTION_NAME (decl);
9505 if (section_name)
9506 {
9507 if (frv_string_begins_with (section_name, ".sdata"))
9508 return true;
9509 if (frv_string_begins_with (section_name, ".sbss"))
9510 return true;
9511 return false;
9512 }
9513
9514 size = int_size_in_bytes (TREE_TYPE (decl));
9515 if (size > 0 && size <= g_switch_value)
9516 return true;
9517
9518 return false;
9519 }
9520 \f
9521 static bool
9522 frv_rtx_costs (rtx x,
9523 int code ATTRIBUTE_UNUSED,
9524 int outer_code ATTRIBUTE_UNUSED,
9525 int opno ATTRIBUTE_UNUSED,
9526 int *total,
9527 bool speed ATTRIBUTE_UNUSED)
9528 {
9529 if (outer_code == MEM)
9530 {
9531 /* Don't differentiate between memory addresses. All the ones
9532 we accept have equal cost. */
9533 *total = COSTS_N_INSNS (0);
9534 return true;
9535 }
9536
9537 switch (code)
9538 {
9539 case CONST_INT:
9540 /* Make 12-bit integers really cheap. */
9541 if (IN_RANGE (INTVAL (x), -2048, 2047))
9542 {
9543 *total = 0;
9544 return true;
9545 }
9546 /* Fall through. */
9547
9548 case CONST:
9549 case LABEL_REF:
9550 case SYMBOL_REF:
9551 case CONST_DOUBLE:
9552 *total = COSTS_N_INSNS (2);
9553 return true;
9554
9555 case PLUS:
9556 case MINUS:
9557 case AND:
9558 case IOR:
9559 case XOR:
9560 case ASHIFT:
9561 case ASHIFTRT:
9562 case LSHIFTRT:
9563 case NOT:
9564 case NEG:
9565 case COMPARE:
9566 if (GET_MODE (x) == SImode)
9567 *total = COSTS_N_INSNS (1);
9568 else if (GET_MODE (x) == DImode)
9569 *total = COSTS_N_INSNS (2);
9570 else
9571 *total = COSTS_N_INSNS (3);
9572 return true;
9573
9574 case MULT:
9575 if (GET_MODE (x) == SImode)
9576 *total = COSTS_N_INSNS (2);
9577 else
9578 *total = COSTS_N_INSNS (6); /* guess */
9579 return true;
9580
9581 case DIV:
9582 case UDIV:
9583 case MOD:
9584 case UMOD:
9585 *total = COSTS_N_INSNS (18);
9586 return true;
9587
9588 case MEM:
9589 *total = COSTS_N_INSNS (3);
9590 return true;
9591
9592 default:
9593 return false;
9594 }
9595 }
9596 \f
9597 static void
9598 frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9599 {
9600 switch_to_section (ctors_section);
9601 assemble_align (POINTER_SIZE);
9602 if (TARGET_FDPIC)
9603 {
9604 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9605
9606 gcc_assert (ok);
9607 return;
9608 }
9609 assemble_integer_with_op ("\t.picptr\t", symbol);
9610 }
9611
9612 static void
9613 frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9614 {
9615 switch_to_section (dtors_section);
9616 assemble_align (POINTER_SIZE);
9617 if (TARGET_FDPIC)
9618 {
9619 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9620
9621 gcc_assert (ok);
9622 return;
9623 }
9624 assemble_integer_with_op ("\t.picptr\t", symbol);
9625 }
9626
9627 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
9628
9629 static rtx
9630 frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9631 int incoming ATTRIBUTE_UNUSED)
9632 {
9633 return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM);
9634 }
9635
9636 #define TLS_BIAS (2048 - 16)
9637
9638 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
9639 We need to emit DTP-relative relocations. */
9640
9641 static void
9642 frv_output_dwarf_dtprel (FILE *file, int size, rtx x)
9643 {
9644 gcc_assert (size == 4);
9645 fputs ("\t.picptr\ttlsmoff(", file);
9646 /* We want the unbiased TLS offset, so add the bias to the
9647 expression, such that the implicit biasing cancels out. */
9648 output_addr_const (file, plus_constant (Pmode, x, TLS_BIAS));
9649 fputs (")", file);
9650 }
9651
9652 #include "gt-frv.h"