]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/frv/frv.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / config / frv / frv.cc
1 /* Copyright (C) 1997-2024 Free Software Foundation, Inc.
2 Contributed by Red Hat, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define IN_TARGET_CODE 1
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "cfgrtl.h"
47 #include "langhooks.h"
48 #include "dumpfile.h"
49 #include "builtins.h"
50 #include "ifcvt.h"
51 #include "rtl-iter.h"
52 #include "calls.h"
53 #include "opts.h"
54
55 /* This file should be included last. */
56 #include "target-def.h"
57
58 #ifndef FRV_INLINE
59 #define FRV_INLINE inline
60 #endif
61
62 /* The maximum number of distinct NOP patterns. There are three:
63 nop, fnop and mnop. */
64 #define NUM_NOP_PATTERNS 3
65
66 /* Classification of instructions and units: integer, floating-point/media,
67 branch and control. */
68 enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS };
69
70 /* The DFA names of the units, in packet order. */
71 static const char *const frv_unit_names[] =
72 {
73 "c",
74 "i0", "f0",
75 "i1", "f1",
76 "i2", "f2",
77 "i3", "f3",
78 "b0", "b1"
79 };
80
81 /* The classification of each unit in frv_unit_names[]. */
82 static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] =
83 {
84 GROUP_C,
85 GROUP_I, GROUP_FM,
86 GROUP_I, GROUP_FM,
87 GROUP_I, GROUP_FM,
88 GROUP_I, GROUP_FM,
89 GROUP_B, GROUP_B
90 };
91
92 /* Return the DFA unit code associated with the Nth unit of integer
93 or floating-point group GROUP, */
94 #define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1]
95
96 /* Return the number of integer or floating-point unit UNIT
97 (1 for I1, 2 for F2, etc.). */
98 #define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2)
99
100 /* The DFA unit number for each unit in frv_unit_names[]. */
101 static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)];
102
103 /* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue
104 an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if
105 no instruction of type T has been seen. */
106 static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1];
107
108 /* An array of dummy nop INSNs, one for each type of nop that the
109 target supports. */
110 static GTY(()) rtx_insn *frv_nops[NUM_NOP_PATTERNS];
111
112 /* The number of nop instructions in frv_nops[]. */
113 static unsigned int frv_num_nops;
114
115 /* The type of access. FRV_IO_UNKNOWN means the access can be either
116 a read or a write. */
117 enum frv_io_type { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE };
118
119 /* Information about one __builtin_read or __builtin_write access, or
120 the combination of several such accesses. The most general value
121 is all-zeros (an unknown access to an unknown address). */
122 struct frv_io {
123 enum frv_io_type type;
124
125 /* The constant address being accessed, or zero if not known. */
126 HOST_WIDE_INT const_address;
127
128 /* The run-time address, as used in operand 0 of the membar pattern. */
129 rtx var_address;
130 };
131
132 /* Return true if instruction INSN should be packed with the following
133 instruction. */
134 #define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode)
135
136 /* Set the value of PACKING_FLAG_P(INSN). */
137 #define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode)
138 #define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode)
139
140 /* Loop with REG set to each hard register in rtx X. */
141 #define FOR_EACH_REGNO(REG, X) \
142 for (REG = REGNO (X); REG < END_REGNO (X); REG++)
143
144 /* This structure contains machine specific function data. */
145 struct GTY(()) machine_function
146 {
147 /* True if we have created an rtx that relies on the stack frame. */
148 int frame_needed;
149
150 /* True if this function contains at least one __builtin_{read,write}*. */
151 bool has_membar_p;
152 };
153
154 /* Temporary register allocation support structure. */
155 typedef struct frv_tmp_reg_struct
156 {
157 HARD_REG_SET regs; /* possible registers to allocate */
158 int next_reg[N_REG_CLASSES]; /* next register to allocate per class */
159 }
160 frv_tmp_reg_t;
161
162 /* Register state information for VLIW re-packing phase. */
163 #define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */
164 #define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */
165 #define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */
166 #define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */
167
168 #define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE)
169
170 typedef unsigned char regstate_t;
171
172 /* Used in frv_frame_accessor_t to indicate the direction of a register-to-
173 memory move. */
174 enum frv_stack_op
175 {
176 FRV_LOAD,
177 FRV_STORE
178 };
179
180 /* Information required by frv_frame_access. */
181 typedef struct
182 {
183 /* This field is FRV_LOAD if registers are to be loaded from the stack and
184 FRV_STORE if they should be stored onto the stack. FRV_STORE implies
185 the move is being done by the prologue code while FRV_LOAD implies it
186 is being done by the epilogue. */
187 enum frv_stack_op op;
188
189 /* The base register to use when accessing the stack. This may be the
190 frame pointer, stack pointer, or a temporary. The choice of register
191 depends on which part of the frame is being accessed and how big the
192 frame is. */
193 rtx base;
194
195 /* The offset of BASE from the bottom of the current frame, in bytes. */
196 int base_offset;
197 } frv_frame_accessor_t;
198
199 /* Conditional execution support gathered together in one structure. */
200 typedef struct
201 {
202 /* Linked list of insns to add if the conditional execution conversion was
203 successful. Each link points to an EXPR_LIST which points to the pattern
204 of the insn to add, and the insn to be inserted before. */
205 rtx added_insns_list;
206
207 /* Identify which registers are safe to allocate for if conversions to
208 conditional execution. We keep the last allocated register in the
209 register classes between COND_EXEC statements. This will mean we allocate
210 different registers for each different COND_EXEC group if we can. This
211 might allow the scheduler to intermix two different COND_EXEC sections. */
212 frv_tmp_reg_t tmp_reg;
213
214 /* For nested IFs, identify which CC registers are used outside of setting
215 via a compare isnsn, and using via a check insn. This will allow us to
216 know if we can rewrite the register to use a different register that will
217 be paired with the CR register controlling the nested IF-THEN blocks. */
218 HARD_REG_SET nested_cc_ok_rewrite;
219
220 /* Temporary registers allocated to hold constants during conditional
221 execution. */
222 rtx scratch_regs[FIRST_PSEUDO_REGISTER];
223
224 /* Current number of temp registers available. */
225 int cur_scratch_regs;
226
227 /* Number of nested conditional execution blocks. */
228 int num_nested_cond_exec;
229
230 /* Map of insns that set up constants in scratch registers. */
231 bitmap scratch_insns_bitmap;
232
233 /* Conditional execution test register (CC0..CC7). */
234 rtx cr_reg;
235
236 /* Conditional execution compare register that is paired with cr_reg, so that
237 nested compares can be done. The csubcc and caddcc instructions don't
238 have enough bits to specify both a CC register to be set and a CR register
239 to do the test on, so the same bit number is used for both. Needless to
240 say, this is rather inconvenient for GCC. */
241 rtx nested_cc_reg;
242
243 /* Extra CR registers used for &&, ||. */
244 rtx extra_int_cr;
245 rtx extra_fp_cr;
246
247 /* Previous CR used in nested if, to make sure we are dealing with the same
248 nested if as the previous statement. */
249 rtx last_nested_if_cr;
250 }
251 frv_ifcvt_t;
252
253 static /* GTY(()) */ frv_ifcvt_t frv_ifcvt;
254
255 /* Map register number to smallest register class. */
256 enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER];
257
258 /* Cached value of frv_stack_info. */
259 static frv_stack_t *frv_stack_cache = (frv_stack_t *)0;
260
261 /* Forward references */
262
263 static void frv_option_override (void);
264 static bool frv_legitimate_address_p (machine_mode, rtx, bool,
265 code_helper = ERROR_MARK);
266 static int frv_default_flags_for_cpu (void);
267 static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
268 static void frv_print_operand (FILE *, rtx, int);
269 static void frv_print_operand_address (FILE *, machine_mode, rtx);
270 static bool frv_print_operand_punct_valid_p (unsigned char code);
271 static void frv_print_operand_memory_reference_reg
272 (FILE *, rtx);
273 static void frv_print_operand_memory_reference (FILE *, rtx, int);
274 static int frv_print_operand_jump_hint (rtx_insn *);
275 static const char *comparison_string (enum rtx_code, rtx);
276 static rtx frv_function_value (const_tree, const_tree,
277 bool);
278 static rtx frv_libcall_value (machine_mode,
279 const_rtx);
280 static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
281 static rtx single_set_pattern (rtx);
282 static int frv_function_contains_far_jump (void);
283 static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
284 enum reg_class,
285 machine_mode,
286 int, int);
287 static rtx frv_frame_offset_rtx (int);
288 static rtx frv_frame_mem (machine_mode, rtx, int);
289 static rtx frv_dwarf_store (rtx, int);
290 static void frv_frame_insn (rtx, rtx);
291 static void frv_frame_access (frv_frame_accessor_t*,
292 rtx, int);
293 static void frv_frame_access_multi (frv_frame_accessor_t*,
294 frv_stack_t *, int);
295 static void frv_frame_access_standard_regs (enum frv_stack_op,
296 frv_stack_t *);
297 static struct machine_function *frv_init_machine_status (void);
298 static rtx frv_int_to_acc (enum insn_code, int, rtx);
299 static machine_mode frv_matching_accg_mode (machine_mode);
300 static rtx frv_read_argument (tree, unsigned int);
301 static rtx frv_read_iacc_argument (machine_mode, tree, unsigned int);
302 static int frv_check_constant_argument (enum insn_code, int, rtx);
303 static rtx frv_legitimize_target (enum insn_code, rtx);
304 static rtx frv_legitimize_argument (enum insn_code, int, rtx);
305 static rtx frv_legitimize_tls_address (rtx, enum tls_model);
306 static rtx frv_legitimize_address (rtx, rtx, machine_mode);
307 static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
308 static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
309 static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
310 static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx);
311 static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx);
312 static rtx frv_expand_voidbinop_builtin (enum insn_code, tree);
313 static rtx frv_expand_int_void2arg (enum insn_code, tree);
314 static rtx frv_expand_prefetches (enum insn_code, tree);
315 static rtx frv_expand_voidtriop_builtin (enum insn_code, tree);
316 static rtx frv_expand_voidaccop_builtin (enum insn_code, tree);
317 static rtx frv_expand_mclracc_builtin (tree);
318 static rtx frv_expand_mrdacc_builtin (enum insn_code, tree);
319 static rtx frv_expand_mwtacc_builtin (enum insn_code, tree);
320 static rtx frv_expand_noargs_builtin (enum insn_code);
321 static void frv_split_iacc_move (rtx, rtx);
322 static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
323 static void frv_ifcvt_add_insn (rtx, rtx_insn *, int);
324 static rtx frv_ifcvt_rewrite_mem (rtx, machine_mode, rtx);
325 static rtx frv_ifcvt_load_value (rtx, rtx);
326 static unsigned int frv_insn_unit (rtx_insn *);
327 static bool frv_issues_to_branch_unit_p (rtx_insn *);
328 static int frv_cond_flags (rtx);
329 static bool frv_regstate_conflict_p (regstate_t, regstate_t);
330 static bool frv_registers_conflict_p (rtx);
331 static void frv_registers_update_1 (rtx, const_rtx, void *);
332 static void frv_registers_update (rtx);
333 static void frv_start_packet (void);
334 static void frv_start_packet_block (void);
335 static void frv_finish_packet (void (*) (void));
336 static bool frv_pack_insn_p (rtx_insn *);
337 static void frv_add_insn_to_packet (rtx_insn *);
338 static void frv_insert_nop_in_packet (rtx_insn *);
339 static bool frv_for_each_packet (void (*) (void));
340 static bool frv_sort_insn_group_1 (enum frv_insn_group,
341 unsigned int, unsigned int,
342 unsigned int, unsigned int,
343 state_t);
344 static int frv_compare_insns (const void *, const void *);
345 static void frv_sort_insn_group (enum frv_insn_group);
346 static void frv_reorder_packet (void);
347 static void frv_fill_unused_units (enum frv_insn_group);
348 static void frv_align_label (void);
349 static void frv_reorg_packet (void);
350 static void frv_register_nop (rtx);
351 static void frv_reorg (void);
352 static void frv_pack_insns (void);
353 static void frv_function_prologue (FILE *);
354 static void frv_function_epilogue (FILE *);
355 static bool frv_assemble_integer (rtx, unsigned, int);
356 static void frv_init_builtins (void);
357 static rtx frv_expand_builtin (tree, rtx, rtx, machine_mode, int);
358 static void frv_init_libfuncs (void);
359 static bool frv_in_small_data_p (const_tree);
360 static void frv_asm_output_mi_thunk
361 (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
362 static void frv_setup_incoming_varargs (cumulative_args_t,
363 const function_arg_info &,
364 int *, int);
365 static rtx frv_expand_builtin_saveregs (void);
366 static void frv_expand_builtin_va_start (tree, rtx);
367 static bool frv_rtx_costs (rtx, machine_mode, int, int,
368 int*, bool);
369 static int frv_register_move_cost (machine_mode,
370 reg_class_t, reg_class_t);
371 static int frv_memory_move_cost (machine_mode,
372 reg_class_t, bool);
373 static void frv_asm_out_constructor (rtx, int);
374 static void frv_asm_out_destructor (rtx, int);
375 static bool frv_function_symbol_referenced_p (rtx);
376 static bool frv_legitimate_constant_p (machine_mode, rtx);
377 static bool frv_cannot_force_const_mem (machine_mode, rtx);
378 static const char *unspec_got_name (int);
379 static void frv_output_const_unspec (FILE *,
380 const struct frv_unspec *);
381 static bool frv_function_ok_for_sibcall (tree, tree);
382 static rtx frv_struct_value_rtx (tree, int);
383 static bool frv_must_pass_in_stack (const function_arg_info &);
384 static int frv_arg_partial_bytes (cumulative_args_t,
385 const function_arg_info &);
386 static rtx frv_function_arg (cumulative_args_t, const function_arg_info &);
387 static rtx frv_function_incoming_arg (cumulative_args_t,
388 const function_arg_info &);
389 static void frv_function_arg_advance (cumulative_args_t,
390 const function_arg_info &);
391 static unsigned int frv_function_arg_boundary (machine_mode,
392 const_tree);
393 static void frv_output_dwarf_dtprel (FILE *, int, rtx)
394 ATTRIBUTE_UNUSED;
395 static reg_class_t frv_secondary_reload (bool, rtx, reg_class_t,
396 machine_mode,
397 secondary_reload_info *);
398 static bool frv_frame_pointer_required (void);
399 static bool frv_can_eliminate (const int, const int);
400 static void frv_conditional_register_usage (void);
401 static void frv_trampoline_init (rtx, tree, rtx);
402 static bool frv_class_likely_spilled_p (reg_class_t);
403 static unsigned int frv_hard_regno_nregs (unsigned int, machine_mode);
404 static bool frv_hard_regno_mode_ok (unsigned int, machine_mode);
405 static bool frv_modes_tieable_p (machine_mode, machine_mode);
406 \f
407 /* Initialize the GCC target structure. */
408 #undef TARGET_PRINT_OPERAND
409 #define TARGET_PRINT_OPERAND frv_print_operand
410 #undef TARGET_PRINT_OPERAND_ADDRESS
411 #define TARGET_PRINT_OPERAND_ADDRESS frv_print_operand_address
412 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
413 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P frv_print_operand_punct_valid_p
414 #undef TARGET_ASM_FUNCTION_PROLOGUE
415 #define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue
416 #undef TARGET_ASM_FUNCTION_EPILOGUE
417 #define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue
418 #undef TARGET_ASM_INTEGER
419 #define TARGET_ASM_INTEGER frv_assemble_integer
420 #undef TARGET_OPTION_OVERRIDE
421 #define TARGET_OPTION_OVERRIDE frv_option_override
422 #undef TARGET_INIT_BUILTINS
423 #define TARGET_INIT_BUILTINS frv_init_builtins
424 #undef TARGET_EXPAND_BUILTIN
425 #define TARGET_EXPAND_BUILTIN frv_expand_builtin
426 #undef TARGET_INIT_LIBFUNCS
427 #define TARGET_INIT_LIBFUNCS frv_init_libfuncs
428 #undef TARGET_IN_SMALL_DATA_P
429 #define TARGET_IN_SMALL_DATA_P frv_in_small_data_p
430 #undef TARGET_REGISTER_MOVE_COST
431 #define TARGET_REGISTER_MOVE_COST frv_register_move_cost
432 #undef TARGET_MEMORY_MOVE_COST
433 #define TARGET_MEMORY_MOVE_COST frv_memory_move_cost
434 #undef TARGET_RTX_COSTS
435 #define TARGET_RTX_COSTS frv_rtx_costs
436 #undef TARGET_ASM_CONSTRUCTOR
437 #define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor
438 #undef TARGET_ASM_DESTRUCTOR
439 #define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor
440
441 #undef TARGET_ASM_OUTPUT_MI_THUNK
442 #define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk
443 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
444 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
445
446 #undef TARGET_SCHED_ISSUE_RATE
447 #define TARGET_SCHED_ISSUE_RATE frv_issue_rate
448
449 #undef TARGET_LEGITIMIZE_ADDRESS
450 #define TARGET_LEGITIMIZE_ADDRESS frv_legitimize_address
451
452 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
453 #define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall
454 #undef TARGET_LEGITIMATE_CONSTANT_P
455 #define TARGET_LEGITIMATE_CONSTANT_P frv_legitimate_constant_p
456 #undef TARGET_CANNOT_FORCE_CONST_MEM
457 #define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem
458
459 #undef TARGET_HAVE_TLS
460 #define TARGET_HAVE_TLS HAVE_AS_TLS
461
462 #undef TARGET_STRUCT_VALUE_RTX
463 #define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx
464 #undef TARGET_MUST_PASS_IN_STACK
465 #define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack
466 #undef TARGET_PASS_BY_REFERENCE
467 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
468 #undef TARGET_ARG_PARTIAL_BYTES
469 #define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes
470 #undef TARGET_FUNCTION_ARG
471 #define TARGET_FUNCTION_ARG frv_function_arg
472 #undef TARGET_FUNCTION_INCOMING_ARG
473 #define TARGET_FUNCTION_INCOMING_ARG frv_function_incoming_arg
474 #undef TARGET_FUNCTION_ARG_ADVANCE
475 #define TARGET_FUNCTION_ARG_ADVANCE frv_function_arg_advance
476 #undef TARGET_FUNCTION_ARG_BOUNDARY
477 #define TARGET_FUNCTION_ARG_BOUNDARY frv_function_arg_boundary
478
479 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
480 #define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs
481 #undef TARGET_SETUP_INCOMING_VARARGS
482 #define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs
483 #undef TARGET_MACHINE_DEPENDENT_REORG
484 #define TARGET_MACHINE_DEPENDENT_REORG frv_reorg
485
486 #undef TARGET_EXPAND_BUILTIN_VA_START
487 #define TARGET_EXPAND_BUILTIN_VA_START frv_expand_builtin_va_start
488
489 #if HAVE_AS_TLS
490 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
491 #define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel
492 #endif
493
494 #undef TARGET_CLASS_LIKELY_SPILLED_P
495 #define TARGET_CLASS_LIKELY_SPILLED_P frv_class_likely_spilled_p
496
497 #undef TARGET_SECONDARY_RELOAD
498 #define TARGET_SECONDARY_RELOAD frv_secondary_reload
499
500 #undef TARGET_LRA_P
501 #define TARGET_LRA_P hook_bool_void_false
502
503 #undef TARGET_LEGITIMATE_ADDRESS_P
504 #define TARGET_LEGITIMATE_ADDRESS_P frv_legitimate_address_p
505
506 #undef TARGET_FRAME_POINTER_REQUIRED
507 #define TARGET_FRAME_POINTER_REQUIRED frv_frame_pointer_required
508
509 #undef TARGET_CAN_ELIMINATE
510 #define TARGET_CAN_ELIMINATE frv_can_eliminate
511
512 #undef TARGET_CONDITIONAL_REGISTER_USAGE
513 #define TARGET_CONDITIONAL_REGISTER_USAGE frv_conditional_register_usage
514
515 #undef TARGET_TRAMPOLINE_INIT
516 #define TARGET_TRAMPOLINE_INIT frv_trampoline_init
517
518 #undef TARGET_FUNCTION_VALUE
519 #define TARGET_FUNCTION_VALUE frv_function_value
520 #undef TARGET_LIBCALL_VALUE
521 #define TARGET_LIBCALL_VALUE frv_libcall_value
522
523 #undef TARGET_HARD_REGNO_NREGS
524 #define TARGET_HARD_REGNO_NREGS frv_hard_regno_nregs
525 #undef TARGET_HARD_REGNO_MODE_OK
526 #define TARGET_HARD_REGNO_MODE_OK frv_hard_regno_mode_ok
527 #undef TARGET_MODES_TIEABLE_P
528 #define TARGET_MODES_TIEABLE_P frv_modes_tieable_p
529 #undef TARGET_CONSTANT_ALIGNMENT
530 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
531
532 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
533 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
534
535 struct gcc_target targetm = TARGET_INITIALIZER;
536
537 #define FRV_SYMBOL_REF_TLS_P(RTX) \
538 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
539
540 \f
541 /* Any function call that satisfies the machine-independent
542 requirements is eligible on FR-V. */
543
544 static bool
545 frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
546 tree exp ATTRIBUTE_UNUSED)
547 {
548 return true;
549 }
550
551 /* Return true if SYMBOL is a small data symbol and relocation RELOC
552 can be used to access it directly in a load or store. */
553
554 static FRV_INLINE bool
555 frv_small_data_reloc_p (rtx symbol, int reloc)
556 {
557 return (GET_CODE (symbol) == SYMBOL_REF
558 && SYMBOL_REF_SMALL_P (symbol)
559 && (!TARGET_FDPIC || flag_pic == 1)
560 && (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12));
561 }
562
563 /* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC
564 appropriately. */
565
566 bool
567 frv_const_unspec_p (rtx x, struct frv_unspec *unspec)
568 {
569 if (GET_CODE (x) == CONST)
570 {
571 unspec->offset = 0;
572 x = XEXP (x, 0);
573 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
574 {
575 unspec->offset += INTVAL (XEXP (x, 1));
576 x = XEXP (x, 0);
577 }
578 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT)
579 {
580 unspec->symbol = XVECEXP (x, 0, 0);
581 unspec->reloc = INTVAL (XVECEXP (x, 0, 1));
582
583 if (unspec->offset == 0)
584 return true;
585
586 if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc)
587 && unspec->offset > 0
588 && unspec->offset < g_switch_value)
589 return true;
590 }
591 }
592 return false;
593 }
594
595 /* Decide whether we can force certain constants to memory. If we
596 decide we can't, the caller should be able to cope with it in
597 another way.
598
599 We never allow constants to be forced into memory for TARGET_FDPIC.
600 This is necessary for several reasons:
601
602 1. Since frv_legitimate_constant_p rejects constant pool addresses, the
603 target-independent code will try to force them into the constant
604 pool, thus leading to infinite recursion.
605
606 2. We can never introduce new constant pool references during reload.
607 Any such reference would require use of the pseudo FDPIC register.
608
609 3. We can't represent a constant added to a function pointer (which is
610 not the same as a pointer to a function+constant).
611
612 4. In many cases, it's more efficient to calculate the constant in-line. */
613
614 static bool
615 frv_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
616 rtx x ATTRIBUTE_UNUSED)
617 {
618 return TARGET_FDPIC;
619 }
620 \f
621 static int
622 frv_default_flags_for_cpu (void)
623 {
624 switch (frv_cpu_type)
625 {
626 case FRV_CPU_GENERIC:
627 return MASK_DEFAULT_FRV;
628
629 case FRV_CPU_FR550:
630 return MASK_DEFAULT_FR550;
631
632 case FRV_CPU_FR500:
633 case FRV_CPU_TOMCAT:
634 return MASK_DEFAULT_FR500;
635
636 case FRV_CPU_FR450:
637 return MASK_DEFAULT_FR450;
638
639 case FRV_CPU_FR405:
640 case FRV_CPU_FR400:
641 return MASK_DEFAULT_FR400;
642
643 case FRV_CPU_FR300:
644 case FRV_CPU_SIMPLE:
645 return MASK_DEFAULT_SIMPLE;
646
647 default:
648 gcc_unreachable ();
649 }
650 }
651
652 /* Implement TARGET_OPTION_OVERRIDE. */
653
654 static void
655 frv_option_override (void)
656 {
657 int regno;
658 unsigned int i;
659
660 target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit);
661
662 /* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the
663 linker about linking pic and non-pic code. */
664 if (TARGET_LIBPIC)
665 {
666 if (!flag_pic) /* -fPIC */
667 flag_pic = 2;
668
669 if (!OPTION_SET_P (g_switch_value)) /* -G0 */
670 {
671 g_switch_value = 0;
672 }
673 }
674
675 /* A C expression whose value is a register class containing hard
676 register REGNO. In general there is more than one such class;
677 choose a class which is "minimal", meaning that no smaller class
678 also contains the register. */
679
680 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
681 {
682 enum reg_class rclass;
683
684 if (GPR_P (regno))
685 {
686 int gpr_reg = regno - GPR_FIRST;
687
688 if (gpr_reg == GR8_REG)
689 rclass = GR8_REGS;
690
691 else if (gpr_reg == GR9_REG)
692 rclass = GR9_REGS;
693
694 else if (gpr_reg == GR14_REG)
695 rclass = FDPIC_FPTR_REGS;
696
697 else if (gpr_reg == FDPIC_REGNO)
698 rclass = FDPIC_REGS;
699
700 else if ((gpr_reg & 3) == 0)
701 rclass = QUAD_REGS;
702
703 else if ((gpr_reg & 1) == 0)
704 rclass = EVEN_REGS;
705
706 else
707 rclass = GPR_REGS;
708 }
709
710 else if (FPR_P (regno))
711 {
712 int fpr_reg = regno - GPR_FIRST;
713 if ((fpr_reg & 3) == 0)
714 rclass = QUAD_FPR_REGS;
715
716 else if ((fpr_reg & 1) == 0)
717 rclass = FEVEN_REGS;
718
719 else
720 rclass = FPR_REGS;
721 }
722
723 else if (regno == LR_REGNO)
724 rclass = LR_REG;
725
726 else if (regno == LCR_REGNO)
727 rclass = LCR_REG;
728
729 else if (ICC_P (regno))
730 rclass = ICC_REGS;
731
732 else if (FCC_P (regno))
733 rclass = FCC_REGS;
734
735 else if (ICR_P (regno))
736 rclass = ICR_REGS;
737
738 else if (FCR_P (regno))
739 rclass = FCR_REGS;
740
741 else if (ACC_P (regno))
742 {
743 int r = regno - ACC_FIRST;
744 if ((r & 3) == 0)
745 rclass = QUAD_ACC_REGS;
746 else if ((r & 1) == 0)
747 rclass = EVEN_ACC_REGS;
748 else
749 rclass = ACC_REGS;
750 }
751
752 else if (ACCG_P (regno))
753 rclass = ACCG_REGS;
754
755 else
756 rclass = NO_REGS;
757
758 regno_reg_class[regno] = rclass;
759 }
760
761 /* Check for small data option */
762 if (!OPTION_SET_P (g_switch_value) && !TARGET_LIBPIC)
763 g_switch_value = SDATA_DEFAULT_SIZE;
764
765 /* There is no single unaligned SI op for PIC code. Sometimes we
766 need to use ".4byte" and sometimes we need to use ".picptr".
767 See frv_assemble_integer for details. */
768 if (flag_pic || TARGET_FDPIC)
769 targetm.asm_out.unaligned_op.si = 0;
770
771 if ((target_flags_explicit & MASK_LINKED_FP) == 0)
772 target_flags |= MASK_LINKED_FP;
773
774 if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0)
775 target_flags |= MASK_OPTIMIZE_MEMBAR;
776
777 for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++)
778 frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]);
779
780 for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++)
781 frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes);
782
783 init_machine_status = frv_init_machine_status;
784 }
785
786 \f
787 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
788
789 static void
790 frv_conditional_register_usage (void)
791 {
792 int i;
793
794 for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++)
795 fixed_regs[i] = call_used_regs[i] = 1;
796
797 for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++)
798 fixed_regs[i] = call_used_regs[i] = 1;
799
800 /* Reserve the registers used for conditional execution. At present, we need
801 1 ICC and 1 ICR register. */
802 fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1;
803 fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1;
804
805 if (TARGET_FIXED_CC)
806 {
807 fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1;
808 fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1;
809 fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1;
810 fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1;
811 }
812
813 if (TARGET_FDPIC)
814 fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] =
815 call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0;
816
817 #if 0
818 /* If -fpic, SDA_BASE_REG is the PIC register. */
819 if (g_switch_value == 0 && !flag_pic)
820 fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0;
821
822 if (!flag_pic)
823 fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0;
824 #endif
825 }
826
827 \f
828 /*
829 * Compute the stack frame layout
830 *
831 * Register setup:
832 * +---------------+-----------------------+-----------------------+
833 * |Register |type |caller-save/callee-save|
834 * +---------------+-----------------------+-----------------------+
835 * |GR0 |Zero register | - |
836 * |GR1 |Stack pointer(SP) | - |
837 * |GR2 |Frame pointer(FP) | - |
838 * |GR3 |Hidden parameter | caller save |
839 * |GR4-GR7 | - | caller save |
840 * |GR8-GR13 |Argument register | caller save |
841 * |GR14-GR15 | - | caller save |
842 * |GR16-GR31 | - | callee save |
843 * |GR32-GR47 | - | caller save |
844 * |GR48-GR63 | - | callee save |
845 * |FR0-FR15 | - | caller save |
846 * |FR16-FR31 | - | callee save |
847 * |FR32-FR47 | - | caller save |
848 * |FR48-FR63 | - | callee save |
849 * +---------------+-----------------------+-----------------------+
850 *
851 * Stack frame setup:
852 * Low
853 * SP-> |-----------------------------------|
854 * | Argument area |
855 * |-----------------------------------|
856 * | Register save area |
857 * |-----------------------------------|
858 * | Local variable save area |
859 * FP-> |-----------------------------------|
860 * | Old FP |
861 * |-----------------------------------|
862 * | Hidden parameter save area |
863 * |-----------------------------------|
864 * | Return address(LR) storage area |
865 * |-----------------------------------|
866 * | Padding for alignment |
867 * |-----------------------------------|
868 * | Register argument area |
869 * OLD SP-> |-----------------------------------|
870 * | Parameter area |
871 * |-----------------------------------|
872 * High
873 *
874 * Argument area/Parameter area:
875 *
876 * When a function is called, this area is used for argument transfer. When
877 * the argument is set up by the caller function, this area is referred to as
878 * the argument area. When the argument is referenced by the callee function,
879 * this area is referred to as the parameter area. The area is allocated when
880 * all arguments cannot be placed on the argument register at the time of
881 * argument transfer.
882 *
883 * Register save area:
884 *
885 * This is a register save area that must be guaranteed for the caller
886 * function. This area is not secured when the register save operation is not
887 * needed.
888 *
889 * Local variable save area:
890 *
891 * This is the area for local variables and temporary variables.
892 *
893 * Old FP:
894 *
895 * This area stores the FP value of the caller function.
896 *
897 * Hidden parameter save area:
898 *
899 * This area stores the start address of the return value storage
900 * area for a struct/union return function.
901 * When a struct/union is used as the return value, the caller
902 * function stores the return value storage area start address in
903 * register GR3 and passes it to the caller function.
904 * The callee function interprets the address stored in the GR3
905 * as the return value storage area start address.
906 * When register GR3 needs to be saved into memory, the callee
907 * function saves it in the hidden parameter save area. This
908 * area is not secured when the save operation is not needed.
909 *
910 * Return address(LR) storage area:
911 *
912 * This area saves the LR. The LR stores the address of a return to the caller
913 * function for the purpose of function calling.
914 *
915 * Argument register area:
916 *
917 * This area saves the argument register. This area is not secured when the
918 * save operation is not needed.
919 *
920 * Argument:
921 *
922 * Arguments, the count of which equals the count of argument registers (6
923 * words), are positioned in registers GR8 to GR13 and delivered to the callee
924 * function. When a struct/union return function is called, the return value
925 * area address is stored in register GR3. Arguments not placed in the
926 * argument registers will be stored in the stack argument area for transfer
927 * purposes. When an 8-byte type argument is to be delivered using registers,
928 * it is divided into two and placed in two registers for transfer. When
929 * argument registers must be saved to memory, the callee function secures an
930 * argument register save area in the stack. In this case, a continuous
931 * argument register save area must be established in the parameter area. The
932 * argument register save area must be allocated as needed to cover the size of
933 * the argument register to be saved. If the function has a variable count of
934 * arguments, it saves all argument registers in the argument register save
935 * area.
936 *
937 * Argument Extension Format:
938 *
939 * When an argument is to be stored in the stack, its type is converted to an
940 * extended type in accordance with the individual argument type. The argument
941 * is freed by the caller function after the return from the callee function is
942 * made.
943 *
944 * +-----------------------+---------------+------------------------+
945 * | Argument Type |Extended Type |Stack Storage Size(byte)|
946 * +-----------------------+---------------+------------------------+
947 * |char |int | 4 |
948 * |signed char |int | 4 |
949 * |unsigned char |int | 4 |
950 * |[signed] short int |int | 4 |
951 * |unsigned short int |int | 4 |
952 * |[signed] int |No extension | 4 |
953 * |unsigned int |No extension | 4 |
954 * |[signed] long int |No extension | 4 |
955 * |unsigned long int |No extension | 4 |
956 * |[signed] long long int |No extension | 8 |
957 * |unsigned long long int |No extension | 8 |
958 * |float |double | 8 |
959 * |double |No extension | 8 |
960 * |long double |No extension | 8 |
961 * |pointer |No extension | 4 |
962 * |struct/union |- | 4 (*1) |
963 * +-----------------------+---------------+------------------------+
964 *
965 * When a struct/union is to be delivered as an argument, the caller copies it
966 * to the local variable area and delivers the address of that area.
967 *
968 * Return Value:
969 *
970 * +-------------------------------+----------------------+
971 * |Return Value Type |Return Value Interface|
972 * +-------------------------------+----------------------+
973 * |void |None |
974 * |[signed|unsigned] char |GR8 |
975 * |[signed|unsigned] short int |GR8 |
976 * |[signed|unsigned] int |GR8 |
977 * |[signed|unsigned] long int |GR8 |
978 * |pointer |GR8 |
979 * |[signed|unsigned] long long int|GR8 & GR9 |
980 * |float |GR8 |
981 * |double |GR8 & GR9 |
982 * |long double |GR8 & GR9 |
983 * |struct/union |(*1) |
984 * +-------------------------------+----------------------+
985 *
986 * When a struct/union is used as the return value, the caller function stores
987 * the start address of the return value storage area into GR3 and then passes
988 * it to the callee function. The callee function interprets GR3 as the start
989 * address of the return value storage area. When this address needs to be
990 * saved in memory, the callee function secures the hidden parameter save area
991 * and saves the address in that area.
992 */
993
994 frv_stack_t *
995 frv_stack_info (void)
996 {
997 static frv_stack_t info, zero_info;
998 frv_stack_t *info_ptr = &info;
999 tree fndecl = current_function_decl;
1000 int varargs_p = 0;
1001 tree cur_arg;
1002 tree next_arg;
1003 int range;
1004 int alignment;
1005 int offset;
1006
1007 /* If we've already calculated the values and reload is complete,
1008 just return now. */
1009 if (frv_stack_cache)
1010 return frv_stack_cache;
1011
1012 /* Zero all fields. */
1013 info = zero_info;
1014
1015 /* Set up the register range information. */
1016 info_ptr->regs[STACK_REGS_GPR].name = "gpr";
1017 info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1;
1018 info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST;
1019 info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE;
1020
1021 info_ptr->regs[STACK_REGS_FPR].name = "fpr";
1022 info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST;
1023 info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST;
1024 info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE;
1025
1026 info_ptr->regs[STACK_REGS_LR].name = "lr";
1027 info_ptr->regs[STACK_REGS_LR].first = LR_REGNO;
1028 info_ptr->regs[STACK_REGS_LR].last = LR_REGNO;
1029 info_ptr->regs[STACK_REGS_LR].special_p = 1;
1030
1031 info_ptr->regs[STACK_REGS_CC].name = "cc";
1032 info_ptr->regs[STACK_REGS_CC].first = CC_FIRST;
1033 info_ptr->regs[STACK_REGS_CC].last = CC_LAST;
1034 info_ptr->regs[STACK_REGS_CC].field_p = TRUE;
1035
1036 info_ptr->regs[STACK_REGS_LCR].name = "lcr";
1037 info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO;
1038 info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO;
1039
1040 info_ptr->regs[STACK_REGS_STDARG].name = "stdarg";
1041 info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM;
1042 info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM;
1043 info_ptr->regs[STACK_REGS_STDARG].dword_p = 1;
1044 info_ptr->regs[STACK_REGS_STDARG].special_p = 1;
1045
1046 info_ptr->regs[STACK_REGS_STRUCT].name = "struct";
1047 info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM;
1048 info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM;
1049 info_ptr->regs[STACK_REGS_STRUCT].special_p = 1;
1050
1051 info_ptr->regs[STACK_REGS_FP].name = "fp";
1052 info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM;
1053 info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM;
1054 info_ptr->regs[STACK_REGS_FP].special_p = 1;
1055
1056 /* Determine if this is a stdarg function. If so, allocate space to store
1057 the 6 arguments. */
1058 if (cfun->stdarg)
1059 varargs_p = 1;
1060
1061 else
1062 {
1063 /* Find the last argument, and see if it is __builtin_va_alist. */
1064 for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg)
1065 {
1066 next_arg = DECL_CHAIN (cur_arg);
1067 if (next_arg == (tree)0)
1068 {
1069 if (DECL_NAME (cur_arg)
1070 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist"))
1071 varargs_p = 1;
1072
1073 break;
1074 }
1075 }
1076 }
1077
1078 /* Iterate over all of the register ranges. */
1079 for (range = 0; range < STACK_REGS_MAX; range++)
1080 {
1081 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1082 int first = reg_ptr->first;
1083 int last = reg_ptr->last;
1084 int size_1word = 0;
1085 int size_2words = 0;
1086 int regno;
1087
1088 /* Calculate which registers need to be saved & save area size. */
1089 switch (range)
1090 {
1091 default:
1092 for (regno = first; regno <= last; regno++)
1093 {
1094 if ((df_regs_ever_live_p (regno)
1095 && !call_used_or_fixed_reg_p (regno))
1096 || (crtl->calls_eh_return
1097 && (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
1098 || (!TARGET_FDPIC && flag_pic
1099 && crtl->uses_pic_offset_table && regno == PIC_REGNO))
1100 {
1101 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1102 size_1word += UNITS_PER_WORD;
1103 }
1104 }
1105 break;
1106
1107 /* Calculate whether we need to create a frame after everything else
1108 has been processed. */
1109 case STACK_REGS_FP:
1110 break;
1111
1112 case STACK_REGS_LR:
1113 if (df_regs_ever_live_p (LR_REGNO)
1114 || profile_flag
1115 /* This is set for __builtin_return_address, etc. */
1116 || cfun->machine->frame_needed
1117 || (TARGET_LINKED_FP && frame_pointer_needed)
1118 || (!TARGET_FDPIC && flag_pic
1119 && crtl->uses_pic_offset_table))
1120 {
1121 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1122 size_1word += UNITS_PER_WORD;
1123 }
1124 break;
1125
1126 case STACK_REGS_STDARG:
1127 if (varargs_p)
1128 {
1129 /* If this is a stdarg function with a non varardic
1130 argument split between registers and the stack,
1131 adjust the saved registers downward. */
1132 last -= (ADDR_ALIGN (crtl->args.pretend_args_size, UNITS_PER_WORD)
1133 / UNITS_PER_WORD);
1134
1135 for (regno = first; regno <= last; regno++)
1136 {
1137 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1138 size_1word += UNITS_PER_WORD;
1139 }
1140
1141 info_ptr->stdarg_size = size_1word;
1142 }
1143 break;
1144
1145 case STACK_REGS_STRUCT:
1146 if (cfun->returns_struct)
1147 {
1148 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1149 size_1word += UNITS_PER_WORD;
1150 }
1151 break;
1152 }
1153
1154
1155 if (size_1word)
1156 {
1157 /* If this is a field, it only takes one word. */
1158 if (reg_ptr->field_p)
1159 size_1word = UNITS_PER_WORD;
1160
1161 /* Determine which register pairs can be saved together. */
1162 else if (reg_ptr->dword_p && TARGET_DWORD)
1163 {
1164 for (regno = first; regno < last; regno += 2)
1165 {
1166 if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1])
1167 {
1168 size_2words += 2 * UNITS_PER_WORD;
1169 size_1word -= 2 * UNITS_PER_WORD;
1170 info_ptr->save_p[regno] = REG_SAVE_2WORDS;
1171 info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE;
1172 }
1173 }
1174 }
1175
1176 reg_ptr->size_1word = size_1word;
1177 reg_ptr->size_2words = size_2words;
1178
1179 if (! reg_ptr->special_p)
1180 {
1181 info_ptr->regs_size_1word += size_1word;
1182 info_ptr->regs_size_2words += size_2words;
1183 }
1184 }
1185 }
1186
1187 /* Set up the sizes of each field in the frame body, making the sizes
1188 of each be divisible by the size of a dword if dword operations might
1189 be used, or the size of a word otherwise. */
1190 alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD);
1191
1192 info_ptr->parameter_size = ADDR_ALIGN (crtl->outgoing_args_size, alignment);
1193 info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words
1194 + info_ptr->regs_size_1word,
1195 alignment);
1196 info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment);
1197
1198 info_ptr->pretend_size = crtl->args.pretend_args_size;
1199
1200 /* Work out the size of the frame, excluding the header. Both the frame
1201 body and register parameter area will be dword-aligned. */
1202 info_ptr->total_size
1203 = (ADDR_ALIGN (info_ptr->parameter_size
1204 + info_ptr->regs_size
1205 + info_ptr->vars_size,
1206 2 * UNITS_PER_WORD)
1207 + ADDR_ALIGN (info_ptr->pretend_size
1208 + info_ptr->stdarg_size,
1209 2 * UNITS_PER_WORD));
1210
1211 /* See if we need to create a frame at all, if so add header area. */
1212 if (info_ptr->total_size > 0
1213 || frame_pointer_needed
1214 || info_ptr->regs[STACK_REGS_LR].size_1word > 0
1215 || info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0)
1216 {
1217 offset = info_ptr->parameter_size;
1218 info_ptr->header_size = 4 * UNITS_PER_WORD;
1219 info_ptr->total_size += 4 * UNITS_PER_WORD;
1220
1221 /* Calculate the offsets to save normal register pairs. */
1222 for (range = 0; range < STACK_REGS_MAX; range++)
1223 {
1224 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1225 if (! reg_ptr->special_p)
1226 {
1227 int first = reg_ptr->first;
1228 int last = reg_ptr->last;
1229 int regno;
1230
1231 for (regno = first; regno <= last; regno++)
1232 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS
1233 && regno != FRAME_POINTER_REGNUM
1234 && (regno < FIRST_ARG_REGNUM
1235 || regno > LAST_ARG_REGNUM))
1236 {
1237 info_ptr->reg_offset[regno] = offset;
1238 offset += 2 * UNITS_PER_WORD;
1239 }
1240 }
1241 }
1242
1243 /* Calculate the offsets to save normal single registers. */
1244 for (range = 0; range < STACK_REGS_MAX; range++)
1245 {
1246 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1247 if (! reg_ptr->special_p)
1248 {
1249 int first = reg_ptr->first;
1250 int last = reg_ptr->last;
1251 int regno;
1252
1253 for (regno = first; regno <= last; regno++)
1254 if (info_ptr->save_p[regno] == REG_SAVE_1WORD
1255 && regno != FRAME_POINTER_REGNUM
1256 && (regno < FIRST_ARG_REGNUM
1257 || regno > LAST_ARG_REGNUM))
1258 {
1259 info_ptr->reg_offset[regno] = offset;
1260 offset += UNITS_PER_WORD;
1261 }
1262 }
1263 }
1264
1265 /* Calculate the offset to save the local variables at. */
1266 offset = ADDR_ALIGN (offset, alignment);
1267 if (info_ptr->vars_size)
1268 {
1269 info_ptr->vars_offset = offset;
1270 offset += info_ptr->vars_size;
1271 }
1272
1273 /* Align header to a dword-boundary. */
1274 offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD);
1275
1276 /* Calculate the offsets in the fixed frame. */
1277 info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD;
1278 info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset;
1279 info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD;
1280
1281 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1282 info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD;
1283 info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD;
1284
1285 if (cfun->returns_struct)
1286 {
1287 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1288 info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD;
1289 info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD;
1290 }
1291
1292 /* Calculate the offsets to store the arguments passed in registers
1293 for stdarg functions. The register pairs are first and the single
1294 register if any is last. The register save area starts on a
1295 dword-boundary. */
1296 if (info_ptr->stdarg_size)
1297 {
1298 int first = info_ptr->regs[STACK_REGS_STDARG].first;
1299 int last = info_ptr->regs[STACK_REGS_STDARG].last;
1300 int regno;
1301
1302 /* Skip the header. */
1303 offset += 4 * UNITS_PER_WORD;
1304 for (regno = first; regno <= last; regno++)
1305 {
1306 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS)
1307 {
1308 info_ptr->reg_offset[regno] = offset;
1309 offset += 2 * UNITS_PER_WORD;
1310 }
1311 else if (info_ptr->save_p[regno] == REG_SAVE_1WORD)
1312 {
1313 info_ptr->reg_offset[regno] = offset;
1314 offset += UNITS_PER_WORD;
1315 }
1316 }
1317 }
1318 }
1319
1320 if (reload_completed)
1321 frv_stack_cache = info_ptr;
1322
1323 return info_ptr;
1324 }
1325
1326 \f
1327 /* Print the information about the frv stack offsets, etc. when debugging. */
1328
1329 void
1330 frv_debug_stack (frv_stack_t *info)
1331 {
1332 int range;
1333
1334 if (!info)
1335 info = frv_stack_info ();
1336
1337 fprintf (stderr, "\nStack information for function %s:\n",
1338 ((current_function_decl && DECL_NAME (current_function_decl))
1339 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
1340 : "<unknown>"));
1341
1342 fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size);
1343 fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size);
1344 fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size);
1345 fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n",
1346 info->regs_size, info->regs_size_1word, info->regs_size_2words);
1347
1348 fprintf (stderr, "\theader_size\t= %6d\n", info->header_size);
1349 fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size);
1350 fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset);
1351 fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset);
1352
1353 for (range = 0; range < STACK_REGS_MAX; range++)
1354 {
1355 frv_stack_regs_t *regs = &(info->regs[range]);
1356 if ((regs->size_1word + regs->size_2words) > 0)
1357 {
1358 int first = regs->first;
1359 int last = regs->last;
1360 int regno;
1361
1362 fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =",
1363 regs->name, regs->size_1word + regs->size_2words,
1364 regs->size_1word, regs->size_2words);
1365
1366 for (regno = first; regno <= last; regno++)
1367 {
1368 if (info->save_p[regno] == REG_SAVE_1WORD)
1369 fprintf (stderr, " %s (%d)", reg_names[regno],
1370 info->reg_offset[regno]);
1371
1372 else if (info->save_p[regno] == REG_SAVE_2WORDS)
1373 fprintf (stderr, " %s-%s (%d)", reg_names[regno],
1374 reg_names[regno+1], info->reg_offset[regno]);
1375 }
1376
1377 fputc ('\n', stderr);
1378 }
1379 }
1380
1381 fflush (stderr);
1382 }
1383
1384
1385 \f
1386
1387 /* Used during final to control the packing of insns. The value is
1388 1 if the current instruction should be packed with the next one,
1389 0 if it shouldn't or -1 if packing is disabled altogether. */
1390
1391 static int frv_insn_packing_flag;
1392
1393 /* True if the current function contains a far jump. */
1394
1395 static int
1396 frv_function_contains_far_jump (void)
1397 {
1398 rtx_insn *insn = get_insns ();
1399 while (insn != NULL
1400 && !(JUMP_P (insn)
1401 && get_attr_far_jump (insn) == FAR_JUMP_YES))
1402 insn = NEXT_INSN (insn);
1403 return (insn != NULL);
1404 }
1405
1406 /* For the FRV, this function makes sure that a function with far jumps
1407 will return correctly. It also does the VLIW packing. */
1408
1409 static void
1410 frv_function_prologue (FILE *file)
1411 {
1412 /* If no frame was created, check whether the function uses a call
1413 instruction to implement a far jump. If so, save the link in gr3 and
1414 replace all returns to LR with returns to GR3. GR3 is used because it
1415 is call-clobbered, because is not available to the register allocator,
1416 and because all functions that take a hidden argument pointer will have
1417 a stack frame. */
1418 if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ())
1419 {
1420 rtx_insn *insn;
1421
1422 /* Just to check that the above comment is true. */
1423 gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3));
1424
1425 /* Generate the instruction that saves the link register. */
1426 fprintf (file, "\tmovsg lr,gr3\n");
1427
1428 /* Replace the LR with GR3 in *return_internal patterns. The insn
1429 will now return using jmpl @(gr3,0) rather than bralr. We cannot
1430 simply emit a different assembly directive because bralr and jmpl
1431 execute in different units. */
1432 for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
1433 if (JUMP_P (insn))
1434 {
1435 rtx pattern = PATTERN (insn);
1436 if (GET_CODE (pattern) == PARALLEL
1437 && XVECLEN (pattern, 0) >= 2
1438 && GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN
1439 && GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
1440 {
1441 rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
1442 if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
1443 SET_REGNO (address, GPR_FIRST + 3);
1444 }
1445 }
1446 }
1447
1448 frv_pack_insns ();
1449
1450 /* Allow the garbage collector to free the nops created by frv_reorg. */
1451 memset (frv_nops, 0, sizeof (frv_nops));
1452 }
1453
1454 \f
1455 /* Return the next available temporary register in a given class. */
1456
1457 static rtx
1458 frv_alloc_temp_reg (
1459 frv_tmp_reg_t *info, /* which registers are available */
1460 enum reg_class rclass, /* register class desired */
1461 machine_mode mode, /* mode to allocate register with */
1462 int mark_as_used, /* register not available after allocation */
1463 int no_abort) /* return NULL instead of aborting */
1464 {
1465 int regno = info->next_reg[ (int)rclass ];
1466 int orig_regno = regno;
1467 HARD_REG_SET *reg_in_class = &reg_class_contents[ (int)rclass ];
1468 int i, nr;
1469
1470 for (;;)
1471 {
1472 if (TEST_HARD_REG_BIT (*reg_in_class, regno)
1473 && TEST_HARD_REG_BIT (info->regs, regno))
1474 break;
1475
1476 if (++regno >= FIRST_PSEUDO_REGISTER)
1477 regno = 0;
1478 if (regno == orig_regno)
1479 {
1480 gcc_assert (no_abort);
1481 return NULL_RTX;
1482 }
1483 }
1484
1485 nr = hard_regno_nregs (regno, mode);
1486 info->next_reg[ (int)rclass ] = regno + nr;
1487
1488 if (mark_as_used)
1489 for (i = 0; i < nr; i++)
1490 CLEAR_HARD_REG_BIT (info->regs, regno+i);
1491
1492 return gen_rtx_REG (mode, regno);
1493 }
1494
1495 \f
1496 /* Return an rtx with the value OFFSET, which will either be a register or a
1497 signed 12-bit integer. It can be used as the second operand in an "add"
1498 instruction, or as the index in a load or store.
1499
1500 The function returns a constant rtx if OFFSET is small enough, otherwise
1501 it loads the constant into register OFFSET_REGNO and returns that. */
1502 static rtx
1503 frv_frame_offset_rtx (int offset)
1504 {
1505 rtx offset_rtx = GEN_INT (offset);
1506 if (IN_RANGE (offset, -2048, 2047))
1507 return offset_rtx;
1508 else
1509 {
1510 rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO);
1511 if (IN_RANGE (offset, -32768, 32767))
1512 emit_insn (gen_movsi (reg_rtx, offset_rtx));
1513 else
1514 {
1515 emit_insn (gen_movsi_high (reg_rtx, offset_rtx));
1516 emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx));
1517 }
1518 return reg_rtx;
1519 }
1520 }
1521
1522 /* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
1523 prologue and epilogue uses such expressions to access the stack. */
1524 static rtx
1525 frv_frame_mem (machine_mode mode, rtx base, int offset)
1526 {
1527 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
1528 base,
1529 frv_frame_offset_rtx (offset)));
1530 }
1531
1532 /* Generate a frame-related expression:
1533
1534 (set REG (mem (plus (sp) (const_int OFFSET)))).
1535
1536 Such expressions are used in FRAME_RELATED_EXPR notes for more complex
1537 instructions. Marking the expressions as frame-related is superfluous if
1538 the note contains just a single set. But if the note contains a PARALLEL
1539 or SEQUENCE that has several sets, each set must be individually marked
1540 as frame-related. */
1541 static rtx
1542 frv_dwarf_store (rtx reg, int offset)
1543 {
1544 rtx set = gen_rtx_SET (gen_rtx_MEM (GET_MODE (reg),
1545 plus_constant (Pmode, stack_pointer_rtx,
1546 offset)),
1547 reg);
1548 RTX_FRAME_RELATED_P (set) = 1;
1549 return set;
1550 }
1551
1552 /* Emit a frame-related instruction whose pattern is PATTERN. The
1553 instruction is the last in a sequence that cumulatively performs the
1554 operation described by DWARF_PATTERN. The instruction is marked as
1555 frame-related and has a REG_FRAME_RELATED_EXPR note containing
1556 DWARF_PATTERN. */
1557 static void
1558 frv_frame_insn (rtx pattern, rtx dwarf_pattern)
1559 {
1560 rtx insn = emit_insn (pattern);
1561 RTX_FRAME_RELATED_P (insn) = 1;
1562 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1563 dwarf_pattern,
1564 REG_NOTES (insn));
1565 }
1566
1567 /* Emit instructions that transfer REG to or from the memory location (sp +
1568 STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is
1569 FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this
1570 function to store registers and only the epilogue uses it to load them.
1571
1572 The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET).
1573 The generated instruction will use BASE as its base register. BASE may
1574 simply be the stack pointer, but if several accesses are being made to a
1575 region far away from the stack pointer, it may be more efficient to set
1576 up a temporary instead.
1577
1578 Store instructions will be frame-related and will be annotated with the
1579 overall effect of the store. Load instructions will be followed by a
1580 (use) to prevent later optimizations from zapping them.
1581
1582 The function takes care of the moves to and from SPRs, using TEMP_REGNO
1583 as a temporary in such cases. */
1584 static void
1585 frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
1586 {
1587 machine_mode mode = GET_MODE (reg);
1588 rtx mem = frv_frame_mem (mode,
1589 accessor->base,
1590 stack_offset - accessor->base_offset);
1591
1592 if (accessor->op == FRV_LOAD)
1593 {
1594 if (SPR_P (REGNO (reg)))
1595 {
1596 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1597 emit_insn (gen_rtx_SET (temp, mem));
1598 emit_insn (gen_rtx_SET (reg, temp));
1599 }
1600 else
1601 {
1602 /* We cannot use reg+reg addressing for DImode access. */
1603 if (mode == DImode
1604 && GET_CODE (XEXP (mem, 0)) == PLUS
1605 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1606 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1607 {
1608 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1609
1610 emit_move_insn (temp,
1611 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1612 XEXP (XEXP (mem, 0), 1)));
1613 mem = gen_rtx_MEM (DImode, temp);
1614 }
1615 emit_insn (gen_rtx_SET (reg, mem));
1616 }
1617 emit_use (reg);
1618 }
1619 else
1620 {
1621 if (SPR_P (REGNO (reg)))
1622 {
1623 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1624 emit_insn (gen_rtx_SET (temp, reg));
1625 frv_frame_insn (gen_rtx_SET (mem, temp),
1626 frv_dwarf_store (reg, stack_offset));
1627 }
1628 else if (mode == DImode)
1629 {
1630 /* For DImode saves, the dwarf2 version needs to be a SEQUENCE
1631 with a separate save for each register. */
1632 rtx reg1 = gen_rtx_REG (SImode, REGNO (reg));
1633 rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1);
1634 rtx set1 = frv_dwarf_store (reg1, stack_offset);
1635 rtx set2 = frv_dwarf_store (reg2, stack_offset + 4);
1636
1637 /* Also we cannot use reg+reg addressing. */
1638 if (GET_CODE (XEXP (mem, 0)) == PLUS
1639 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1640 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1641 {
1642 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1643 emit_move_insn (temp,
1644 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1645 XEXP (XEXP (mem, 0), 1)));
1646 mem = gen_rtx_MEM (DImode, temp);
1647 }
1648
1649 frv_frame_insn (gen_rtx_SET (mem, reg),
1650 gen_rtx_PARALLEL (VOIDmode,
1651 gen_rtvec (2, set1, set2)));
1652 }
1653 else
1654 frv_frame_insn (gen_rtx_SET (mem, reg),
1655 frv_dwarf_store (reg, stack_offset));
1656 }
1657 }
1658
1659 /* A function that uses frv_frame_access to transfer a group of registers to
1660 or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO
1661 is the stack information generated by frv_stack_info, and REG_SET is the
1662 number of the register set to transfer. */
1663 static void
1664 frv_frame_access_multi (frv_frame_accessor_t *accessor,
1665 frv_stack_t *info,
1666 int reg_set)
1667 {
1668 frv_stack_regs_t *regs_info;
1669 int regno;
1670
1671 regs_info = &info->regs[reg_set];
1672 for (regno = regs_info->first; regno <= regs_info->last; regno++)
1673 if (info->save_p[regno])
1674 frv_frame_access (accessor,
1675 info->save_p[regno] == REG_SAVE_2WORDS
1676 ? gen_rtx_REG (DImode, regno)
1677 : gen_rtx_REG (SImode, regno),
1678 info->reg_offset[regno]);
1679 }
1680
1681 /* Save or restore callee-saved registers that are kept outside the frame
1682 header. The function saves the registers if OP is FRV_STORE and restores
1683 them if OP is FRV_LOAD. INFO is the stack information generated by
1684 frv_stack_info. */
1685 static void
1686 frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info)
1687 {
1688 frv_frame_accessor_t accessor;
1689
1690 accessor.op = op;
1691 accessor.base = stack_pointer_rtx;
1692 accessor.base_offset = 0;
1693 frv_frame_access_multi (&accessor, info, STACK_REGS_GPR);
1694 frv_frame_access_multi (&accessor, info, STACK_REGS_FPR);
1695 frv_frame_access_multi (&accessor, info, STACK_REGS_LCR);
1696 }
1697
1698
1699 /* Called after register allocation to add any instructions needed for the
1700 prologue. Using a prologue insn is favored compared to putting all of the
1701 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1702 it allows the scheduler to intermix instructions with the saves of
1703 the caller saved registers. In some cases, it might be necessary
1704 to emit a barrier instruction as the last insn to prevent such
1705 scheduling.
1706
1707 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1708 so that the debug info generation code can handle them properly. */
1709 void
1710 frv_expand_prologue (void)
1711 {
1712 frv_stack_t *info = frv_stack_info ();
1713 rtx sp = stack_pointer_rtx;
1714 rtx fp = frame_pointer_rtx;
1715 frv_frame_accessor_t accessor;
1716
1717 if (TARGET_DEBUG_STACK)
1718 frv_debug_stack (info);
1719
1720 if (flag_stack_usage_info)
1721 current_function_static_stack_size = info->total_size;
1722
1723 if (info->total_size == 0)
1724 return;
1725
1726 /* We're interested in three areas of the frame here:
1727
1728 A: the register save area
1729 B: the old FP
1730 C: the header after B
1731
1732 If the frame pointer isn't used, we'll have to set up A, B and C
1733 using the stack pointer. If the frame pointer is used, we'll access
1734 them as follows:
1735
1736 A: set up using sp
1737 B: set up using sp or a temporary (see below)
1738 C: set up using fp
1739
1740 We set up B using the stack pointer if the frame is small enough.
1741 Otherwise, it's more efficient to copy the old stack pointer into a
1742 temporary and use that.
1743
1744 Note that it's important to make sure the prologue and epilogue use the
1745 same registers to access A and C, since doing otherwise will confuse
1746 the aliasing code. */
1747
1748 /* Set up ACCESSOR for accessing region B above. If the frame pointer
1749 isn't used, the same method will serve for C. */
1750 accessor.op = FRV_STORE;
1751 if (frame_pointer_needed && info->total_size > 2048)
1752 {
1753 accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO);
1754 accessor.base_offset = info->total_size;
1755 emit_insn (gen_movsi (accessor.base, sp));
1756 }
1757 else
1758 {
1759 accessor.base = stack_pointer_rtx;
1760 accessor.base_offset = 0;
1761 }
1762
1763 /* Allocate the stack space. */
1764 {
1765 rtx asm_offset = frv_frame_offset_rtx (-info->total_size);
1766 rtx dwarf_offset = GEN_INT (-info->total_size);
1767
1768 frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset),
1769 gen_rtx_SET (sp, gen_rtx_PLUS (Pmode, sp, dwarf_offset)));
1770 }
1771
1772 /* If the frame pointer is needed, store the old one at (sp + FP_OFFSET)
1773 and point the new one to that location. */
1774 if (frame_pointer_needed)
1775 {
1776 int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1777
1778 /* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is
1779 based on ACCESSOR.BASE but DWARF_SRC is always based on the stack
1780 pointer. */
1781 rtx asm_src = plus_constant (Pmode, accessor.base,
1782 fp_offset - accessor.base_offset);
1783 rtx dwarf_src = plus_constant (Pmode, sp, fp_offset);
1784
1785 /* Store the old frame pointer at (sp + FP_OFFSET). */
1786 frv_frame_access (&accessor, fp, fp_offset);
1787
1788 /* Set up the new frame pointer. */
1789 frv_frame_insn (gen_rtx_SET (fp, asm_src),
1790 gen_rtx_SET (fp, dwarf_src));
1791
1792 /* Access region C from the frame pointer. */
1793 accessor.base = fp;
1794 accessor.base_offset = fp_offset;
1795 }
1796
1797 /* Set up region C. */
1798 frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT);
1799 frv_frame_access_multi (&accessor, info, STACK_REGS_LR);
1800 frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG);
1801
1802 /* Set up region A. */
1803 frv_frame_access_standard_regs (FRV_STORE, info);
1804
1805 /* If this is a varargs/stdarg function, issue a blockage to prevent the
1806 scheduler from moving loads before the stores saving the registers. */
1807 if (info->stdarg_size > 0)
1808 emit_insn (gen_blockage ());
1809
1810 /* Set up pic register/small data register for this function. */
1811 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
1812 emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO),
1813 gen_rtx_REG (Pmode, LR_REGNO),
1814 gen_rtx_REG (SImode, OFFSET_REGNO)));
1815 }
1816
1817 \f
1818 /* Under frv, all of the work is done via frv_expand_epilogue, but
1819 this function provides a convenient place to do cleanup. */
1820
1821 static void
1822 frv_function_epilogue (FILE *)
1823 {
1824 frv_stack_cache = (frv_stack_t *)0;
1825
1826 /* Zap last used registers for conditional execution. */
1827 memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg));
1828
1829 /* Release the bitmap of created insns. */
1830 BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap);
1831 }
1832
1833 \f
1834 /* Called after register allocation to add any instructions needed for the
1835 epilogue. Using an epilogue insn is favored compared to putting all of the
1836 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1837 it allows the scheduler to intermix instructions with the saves of
1838 the caller saved registers. In some cases, it might be necessary
1839 to emit a barrier instruction as the last insn to prevent such
1840 scheduling. */
1841
1842 void
1843 frv_expand_epilogue (bool emit_return)
1844 {
1845 frv_stack_t *info = frv_stack_info ();
1846 rtx fp = frame_pointer_rtx;
1847 rtx sp = stack_pointer_rtx;
1848 rtx return_addr;
1849 int fp_offset;
1850
1851 fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1852
1853 /* Restore the stack pointer to its original value if alloca or the like
1854 is used. */
1855 if (! crtl->sp_is_unchanging)
1856 emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset)));
1857
1858 /* Restore the callee-saved registers that were used in this function. */
1859 frv_frame_access_standard_regs (FRV_LOAD, info);
1860
1861 /* Set RETURN_ADDR to the address we should return to. Set it to NULL if
1862 no return instruction should be emitted. */
1863 if (info->save_p[LR_REGNO])
1864 {
1865 int lr_offset;
1866 rtx mem;
1867
1868 /* Use the same method to access the link register's slot as we did in
1869 the prologue. In other words, use the frame pointer if available,
1870 otherwise use the stack pointer.
1871
1872 LR_OFFSET is the offset of the link register's slot from the start
1873 of the frame and MEM is a memory rtx for it. */
1874 lr_offset = info->reg_offset[LR_REGNO];
1875 if (frame_pointer_needed)
1876 mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset);
1877 else
1878 mem = frv_frame_mem (Pmode, sp, lr_offset);
1879
1880 /* Load the old link register into a GPR. */
1881 return_addr = gen_rtx_REG (Pmode, TEMP_REGNO);
1882 emit_insn (gen_rtx_SET (return_addr, mem));
1883 }
1884 else
1885 return_addr = gen_rtx_REG (Pmode, LR_REGNO);
1886
1887 /* Restore the old frame pointer. Emit a USE afterwards to make sure
1888 the load is preserved. */
1889 if (frame_pointer_needed)
1890 {
1891 emit_insn (gen_rtx_SET (fp, gen_rtx_MEM (Pmode, fp)));
1892 emit_use (fp);
1893 }
1894
1895 /* Deallocate the stack frame. */
1896 if (info->total_size != 0)
1897 {
1898 rtx offset = frv_frame_offset_rtx (info->total_size);
1899 emit_insn (gen_stack_adjust (sp, sp, offset));
1900 }
1901
1902 /* If this function uses eh_return, add the final stack adjustment now. */
1903 if (crtl->calls_eh_return)
1904 emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX));
1905
1906 if (emit_return)
1907 emit_jump_insn (gen_epilogue_return (return_addr));
1908 else
1909 {
1910 rtx lr = return_addr;
1911
1912 if (REGNO (return_addr) != LR_REGNO)
1913 {
1914 lr = gen_rtx_REG (Pmode, LR_REGNO);
1915 emit_move_insn (lr, return_addr);
1916 }
1917
1918 emit_use (lr);
1919 }
1920 }
1921
1922 \f
1923 /* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */
1924
1925 static void
1926 frv_asm_output_mi_thunk (FILE *file,
1927 tree thunk_fndecl ATTRIBUTE_UNUSED,
1928 HOST_WIDE_INT delta,
1929 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1930 tree function)
1931 {
1932 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1933 const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0);
1934 const char *name_arg0 = reg_names[FIRST_ARG_REGNUM];
1935 const char *name_jmp = reg_names[JUMP_REGNO];
1936 const char *parallel = (frv_issue_rate () > 1 ? ".p" : "");
1937
1938 assemble_start_function (thunk_fndecl, fnname);
1939
1940 /* Do the add using an addi if possible. */
1941 if (IN_RANGE (delta, -2048, 2047))
1942 fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0);
1943 else
1944 {
1945 const char *const name_add = reg_names[TEMP_REGNO];
1946 fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1947 parallel, delta, name_add);
1948 fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1949 delta, name_add);
1950 fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0);
1951 }
1952
1953 if (TARGET_FDPIC)
1954 {
1955 const char *name_pic = reg_names[FDPIC_REGNO];
1956 name_jmp = reg_names[FDPIC_FPTR_REGNO];
1957
1958 if (flag_pic != 1)
1959 {
1960 fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel);
1961 assemble_name (file, name_func);
1962 fprintf (file, "),%s\n", name_jmp);
1963
1964 fprintf (file, "\tsetlo #gotofffuncdesclo(");
1965 assemble_name (file, name_func);
1966 fprintf (file, "),%s\n", name_jmp);
1967
1968 fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp);
1969 }
1970 else
1971 {
1972 fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic);
1973 assemble_name (file, name_func);
1974 fprintf (file, "\t)), %s\n", name_jmp);
1975 }
1976 }
1977 else if (!flag_pic)
1978 {
1979 fprintf (file, "\tsethi%s #hi(", parallel);
1980 assemble_name (file, name_func);
1981 fprintf (file, "),%s\n", name_jmp);
1982
1983 fprintf (file, "\tsetlo #lo(");
1984 assemble_name (file, name_func);
1985 fprintf (file, "),%s\n", name_jmp);
1986 }
1987 else
1988 {
1989 /* Use JUMP_REGNO as a temporary PIC register. */
1990 const char *name_lr = reg_names[LR_REGNO];
1991 const char *name_gppic = name_jmp;
1992 const char *name_tmp = reg_names[TEMP_REGNO];
1993
1994 fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp);
1995 fprintf (file, "\tcall 1f\n");
1996 fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic);
1997 fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr);
1998 fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp);
1999 fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp);
2000 fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic);
2001
2002 fprintf (file, "\tsethi%s #gprelhi(", parallel);
2003 assemble_name (file, name_func);
2004 fprintf (file, "),%s\n", name_tmp);
2005
2006 fprintf (file, "\tsetlo #gprello(");
2007 assemble_name (file, name_func);
2008 fprintf (file, "),%s\n", name_tmp);
2009
2010 fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp);
2011 }
2012
2013 /* Jump to the function address. */
2014 fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]);
2015 assemble_end_function (thunk_fndecl, fnname);
2016 }
2017
2018 \f
2019
2020 /* On frv, create a frame whenever we need to create stack. */
2021
2022 static bool
2023 frv_frame_pointer_required (void)
2024 {
2025 /* If we forgoing the usual linkage requirements, we only need
2026 a frame pointer if the stack pointer might change. */
2027 if (!TARGET_LINKED_FP)
2028 return !crtl->sp_is_unchanging;
2029
2030 if (! crtl->is_leaf)
2031 return true;
2032
2033 if (get_frame_size () != 0)
2034 return true;
2035
2036 if (cfun->stdarg)
2037 return true;
2038
2039 if (!crtl->sp_is_unchanging)
2040 return true;
2041
2042 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
2043 return true;
2044
2045 if (profile_flag)
2046 return true;
2047
2048 if (cfun->machine->frame_needed)
2049 return true;
2050
2051 return false;
2052 }
2053
2054 \f
2055 /* Worker function for TARGET_CAN_ELIMINATE. */
2056
2057 bool
2058 frv_can_eliminate (const int from, const int to)
2059 {
2060 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2061 ? ! frame_pointer_needed
2062 : true);
2063 }
2064
2065 /* This function returns the initial difference between the specified
2066 pair of registers. */
2067
2068 /* See frv_stack_info for more details on the frv stack frame. */
2069
2070 int
2071 frv_initial_elimination_offset (int from, int to)
2072 {
2073 frv_stack_t *info = frv_stack_info ();
2074 int ret = 0;
2075
2076 if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2077 ret = info->total_size - info->pretend_size;
2078
2079 else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM)
2080 ret = info->reg_offset[FRAME_POINTER_REGNUM];
2081
2082 else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2083 ret = (info->total_size
2084 - info->reg_offset[FRAME_POINTER_REGNUM]
2085 - info->pretend_size);
2086
2087 else
2088 gcc_unreachable ();
2089
2090 if (TARGET_DEBUG_STACK)
2091 fprintf (stderr, "Eliminate %s to %s by adding %d\n",
2092 reg_names [from], reg_names[to], ret);
2093
2094 return ret;
2095 }
2096
2097 \f
2098 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2099
2100 static void
2101 frv_setup_incoming_varargs (cumulative_args_t cum_v,
2102 const function_arg_info &arg,
2103 int *pretend_size,
2104 int second_time)
2105 {
2106 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2107
2108 if (!TYPE_NO_NAMED_ARGS_STDARG_P (TREE_TYPE (current_function_decl))
2109 && TARGET_DEBUG_ARG)
2110 fprintf (stderr,
2111 "setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
2112 *cum, GET_MODE_NAME (arg.mode), *pretend_size, second_time);
2113 }
2114
2115 \f
2116 /* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */
2117
2118 static rtx
2119 frv_expand_builtin_saveregs (void)
2120 {
2121 int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS;
2122
2123 if (TARGET_DEBUG_ARG)
2124 fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n",
2125 offset);
2126
2127 return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset));
2128 }
2129
2130 \f
2131 /* Expand __builtin_va_start to do the va_start macro. */
2132
2133 static void
2134 frv_expand_builtin_va_start (tree valist, rtx nextarg)
2135 {
2136 tree t;
2137 int num = crtl->args.info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS;
2138
2139 nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx,
2140 GEN_INT (UNITS_PER_WORD * num));
2141
2142 if (TARGET_DEBUG_ARG)
2143 {
2144 fprintf (stderr, "va_start: args_info = %d, num = %d\n",
2145 crtl->args.info, num);
2146
2147 debug_rtx (nextarg);
2148 }
2149
2150 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
2151 fold_convert (TREE_TYPE (valist),
2152 make_tree (sizetype, nextarg)));
2153 TREE_SIDE_EFFECTS (t) = 1;
2154
2155 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2156 }
2157
2158 \f
2159 /* Expand a block move operation, and return 1 if successful. Return 0
2160 if we should let the compiler generate normal code.
2161
2162 operands[0] is the destination
2163 operands[1] is the source
2164 operands[2] is the length
2165 operands[3] is the alignment */
2166
2167 /* Maximum number of loads to do before doing the stores */
2168 #ifndef MAX_MOVE_REG
2169 #define MAX_MOVE_REG 4
2170 #endif
2171
2172 /* Maximum number of total loads to do. */
2173 #ifndef TOTAL_MOVE_REG
2174 #define TOTAL_MOVE_REG 8
2175 #endif
2176
2177 int
2178 frv_expand_block_move (rtx operands[])
2179 {
2180 rtx orig_dest = operands[0];
2181 rtx orig_src = operands[1];
2182 rtx bytes_rtx = operands[2];
2183 rtx align_rtx = operands[3];
2184 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2185 int align;
2186 int bytes;
2187 int offset;
2188 int num_reg;
2189 int i;
2190 rtx src_reg;
2191 rtx dest_reg;
2192 rtx src_addr;
2193 rtx dest_addr;
2194 rtx src_mem;
2195 rtx dest_mem;
2196 rtx tmp_reg;
2197 rtx stores[MAX_MOVE_REG];
2198 int move_bytes;
2199 machine_mode mode;
2200
2201 /* If this is not a fixed size move, just call memcpy. */
2202 if (! constp)
2203 return FALSE;
2204
2205 /* This should be a fixed size alignment. */
2206 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2207
2208 align = INTVAL (align_rtx);
2209
2210 /* Anything to move? */
2211 bytes = INTVAL (bytes_rtx);
2212 if (bytes <= 0)
2213 return TRUE;
2214
2215 /* Don't support real large moves. */
2216 if (bytes > TOTAL_MOVE_REG*align)
2217 return FALSE;
2218
2219 /* Move the address into scratch registers. */
2220 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2221 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2222
2223 num_reg = offset = 0;
2224 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
2225 {
2226 /* Calculate the correct offset for src/dest. */
2227 if (offset == 0)
2228 {
2229 src_addr = src_reg;
2230 dest_addr = dest_reg;
2231 }
2232 else
2233 {
2234 src_addr = plus_constant (Pmode, src_reg, offset);
2235 dest_addr = plus_constant (Pmode, dest_reg, offset);
2236 }
2237
2238 /* Generate the appropriate load and store, saving the stores
2239 for later. */
2240 if (bytes >= 4 && align >= 4)
2241 mode = SImode;
2242 else if (bytes >= 2 && align >= 2)
2243 mode = HImode;
2244 else
2245 mode = QImode;
2246
2247 move_bytes = GET_MODE_SIZE (mode);
2248 tmp_reg = gen_reg_rtx (mode);
2249 src_mem = change_address (orig_src, mode, src_addr);
2250 dest_mem = change_address (orig_dest, mode, dest_addr);
2251 emit_insn (gen_rtx_SET (tmp_reg, src_mem));
2252 stores[num_reg++] = gen_rtx_SET (dest_mem, tmp_reg);
2253
2254 if (num_reg >= MAX_MOVE_REG)
2255 {
2256 for (i = 0; i < num_reg; i++)
2257 emit_insn (stores[i]);
2258 num_reg = 0;
2259 }
2260 }
2261
2262 for (i = 0; i < num_reg; i++)
2263 emit_insn (stores[i]);
2264
2265 return TRUE;
2266 }
2267
2268 \f
2269 /* Expand a block clear operation, and return 1 if successful. Return 0
2270 if we should let the compiler generate normal code.
2271
2272 operands[0] is the destination
2273 operands[1] is the length
2274 operands[3] is the alignment */
2275
2276 int
2277 frv_expand_block_clear (rtx operands[])
2278 {
2279 rtx orig_dest = operands[0];
2280 rtx bytes_rtx = operands[1];
2281 rtx align_rtx = operands[3];
2282 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2283 int align;
2284 int bytes;
2285 int offset;
2286 rtx dest_reg;
2287 rtx dest_addr;
2288 rtx dest_mem;
2289 int clear_bytes;
2290 machine_mode mode;
2291
2292 /* If this is not a fixed size move, just call memcpy. */
2293 if (! constp)
2294 return FALSE;
2295
2296 /* This should be a fixed size alignment. */
2297 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2298
2299 align = INTVAL (align_rtx);
2300
2301 /* Anything to move? */
2302 bytes = INTVAL (bytes_rtx);
2303 if (bytes <= 0)
2304 return TRUE;
2305
2306 /* Don't support real large clears. */
2307 if (bytes > TOTAL_MOVE_REG*align)
2308 return FALSE;
2309
2310 /* Move the address into a scratch register. */
2311 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2312
2313 offset = 0;
2314 for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes))
2315 {
2316 /* Calculate the correct offset for src/dest. */
2317 dest_addr = ((offset == 0)
2318 ? dest_reg
2319 : plus_constant (Pmode, dest_reg, offset));
2320
2321 /* Generate the appropriate store of gr0. */
2322 if (bytes >= 4 && align >= 4)
2323 mode = SImode;
2324 else if (bytes >= 2 && align >= 2)
2325 mode = HImode;
2326 else
2327 mode = QImode;
2328
2329 clear_bytes = GET_MODE_SIZE (mode);
2330 dest_mem = change_address (orig_dest, mode, dest_addr);
2331 emit_insn (gen_rtx_SET (dest_mem, const0_rtx));
2332 }
2333
2334 return TRUE;
2335 }
2336
2337 \f
2338 /* The following variable is used to output modifiers of assembler
2339 code of the current output insn. */
2340
2341 static rtx *frv_insn_operands;
2342
2343 /* The following function is used to add assembler insn code suffix .p
2344 if it is necessary. */
2345
2346 const char *
2347 frv_asm_output_opcode (FILE *f, const char *ptr)
2348 {
2349 int c;
2350
2351 if (frv_insn_packing_flag <= 0)
2352 return ptr;
2353
2354 for (; *ptr && *ptr != ' ' && *ptr != '\t';)
2355 {
2356 c = *ptr++;
2357 if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z')
2358 || (*ptr >= 'A' && *ptr <= 'Z')))
2359 {
2360 int letter = *ptr++;
2361
2362 c = atoi (ptr);
2363 frv_print_operand (f, frv_insn_operands [c], letter);
2364 while ((c = *ptr) >= '0' && c <= '9')
2365 ptr++;
2366 }
2367 else
2368 fputc (c, f);
2369 }
2370
2371 fprintf (f, ".p");
2372
2373 return ptr;
2374 }
2375
2376 /* Set up the packing bit for the current output insn. Note that this
2377 function is not called for asm insns. */
2378
2379 void
2380 frv_final_prescan_insn (rtx_insn *insn, rtx *opvec,
2381 int noperands ATTRIBUTE_UNUSED)
2382 {
2383 if (INSN_P (insn))
2384 {
2385 if (frv_insn_packing_flag >= 0)
2386 {
2387 frv_insn_operands = opvec;
2388 frv_insn_packing_flag = PACKING_FLAG_P (insn);
2389 }
2390 else if (recog_memoized (insn) >= 0
2391 && get_attr_acc_group (insn) == ACC_GROUP_ODD)
2392 /* Packing optimizations have been disabled, but INSN can only
2393 be issued in M1. Insert an mnop in M0. */
2394 fprintf (asm_out_file, "\tmnop.p\n");
2395 }
2396 }
2397
2398
2399 \f
2400 /* A C expression whose value is RTL representing the address in a stack frame
2401 where the pointer to the caller's frame is stored. Assume that FRAMEADDR is
2402 an RTL expression for the address of the stack frame itself.
2403
2404 If you don't define this macro, the default is to return the value of
2405 FRAMEADDR--that is, the stack frame address is also the address of the stack
2406 word that points to the previous frame. */
2407
2408 /* The default is correct, but we need to make sure the frame gets created. */
2409 rtx
2410 frv_dynamic_chain_address (rtx frame)
2411 {
2412 cfun->machine->frame_needed = 1;
2413 return frame;
2414 }
2415
2416
2417 /* A C expression whose value is RTL representing the value of the return
2418 address for the frame COUNT steps up from the current frame, after the
2419 prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame
2420 pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is
2421 defined.
2422
2423 The value of the expression must always be the correct address when COUNT is
2424 zero, but may be `NULL_RTX' if there is not way to determine the return
2425 address of other frames. */
2426
2427 rtx
2428 frv_return_addr_rtx (int count, rtx frame)
2429 {
2430 if (count != 0)
2431 return const0_rtx;
2432 cfun->machine->frame_needed = 1;
2433 return gen_rtx_MEM (Pmode, plus_constant (Pmode, frame, 8));
2434 }
2435
2436 /* Given a memory reference MEMREF, interpret the referenced memory as
2437 an array of MODE values, and return a reference to the element
2438 specified by INDEX. Assume that any pre-modification implicit in
2439 MEMREF has already happened.
2440
2441 MEMREF must be a legitimate operand for modes larger than SImode.
2442 frv_legitimate_address_p forbids register+register addresses, which
2443 this function cannot handle. */
2444 rtx
2445 frv_index_memory (rtx memref, machine_mode mode, int index)
2446 {
2447 rtx base = XEXP (memref, 0);
2448 if (GET_CODE (base) == PRE_MODIFY)
2449 base = XEXP (base, 0);
2450 return change_address (memref, mode,
2451 plus_constant (Pmode, base,
2452 index * GET_MODE_SIZE (mode)));
2453 }
2454
2455 \f
2456 /* Print a memory address as an operand to reference that memory location. */
2457 static void
2458 frv_print_operand_address (FILE * stream, machine_mode /* mode */, rtx x)
2459 {
2460 if (GET_CODE (x) == MEM)
2461 x = XEXP (x, 0);
2462
2463 switch (GET_CODE (x))
2464 {
2465 case REG:
2466 fputs (reg_names [ REGNO (x)], stream);
2467 return;
2468
2469 case CONST_INT:
2470 fprintf (stream, "%ld", (long) INTVAL (x));
2471 return;
2472
2473 case SYMBOL_REF:
2474 assemble_name (stream, XSTR (x, 0));
2475 return;
2476
2477 case LABEL_REF:
2478 case CONST:
2479 output_addr_const (stream, x);
2480 return;
2481
2482 case PLUS:
2483 /* Poorly constructed asm statements can trigger this alternative.
2484 See gcc/testsuite/gcc.dg/asm-4.c for an example. */
2485 frv_print_operand_memory_reference (stream, x, 0);
2486 return;
2487
2488 default:
2489 break;
2490 }
2491
2492 fatal_insn ("bad insn to frv_print_operand_address:", x);
2493 }
2494
2495 \f
2496 static void
2497 frv_print_operand_memory_reference_reg (FILE * stream, rtx x)
2498 {
2499 int regno = true_regnum (x);
2500 if (GPR_P (regno))
2501 fputs (reg_names[regno], stream);
2502 else
2503 fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x);
2504 }
2505
2506 /* Print a memory reference suitable for the ld/st instructions. */
2507
2508 static void
2509 frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset)
2510 {
2511 struct frv_unspec unspec;
2512 rtx x0 = NULL_RTX;
2513 rtx x1 = NULL_RTX;
2514
2515 switch (GET_CODE (x))
2516 {
2517 case SUBREG:
2518 case REG:
2519 x0 = x;
2520 break;
2521
2522 case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */
2523 x0 = XEXP (x, 0);
2524 x1 = XEXP (XEXP (x, 1), 1);
2525 break;
2526
2527 case CONST_INT:
2528 x1 = x;
2529 break;
2530
2531 case PLUS:
2532 x0 = XEXP (x, 0);
2533 x1 = XEXP (x, 1);
2534 if (GET_CODE (x0) == CONST_INT)
2535 {
2536 x0 = XEXP (x, 1);
2537 x1 = XEXP (x, 0);
2538 }
2539 break;
2540
2541 default:
2542 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2543 break;
2544
2545 }
2546
2547 if (addr_offset)
2548 {
2549 if (!x1)
2550 x1 = const0_rtx;
2551 else if (GET_CODE (x1) != CONST_INT)
2552 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2553 }
2554
2555 fputs ("@(", stream);
2556 if (!x0)
2557 fputs (reg_names[GPR_R0], stream);
2558 else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG)
2559 frv_print_operand_memory_reference_reg (stream, x0);
2560 else
2561 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2562
2563 fputs (",", stream);
2564 if (!x1)
2565 fputs (reg_names [GPR_R0], stream);
2566
2567 else
2568 {
2569 switch (GET_CODE (x1))
2570 {
2571 case SUBREG:
2572 case REG:
2573 frv_print_operand_memory_reference_reg (stream, x1);
2574 break;
2575
2576 case CONST_INT:
2577 fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset));
2578 break;
2579
2580 case CONST:
2581 if (!frv_const_unspec_p (x1, &unspec))
2582 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1);
2583 frv_output_const_unspec (stream, &unspec);
2584 break;
2585
2586 default:
2587 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2588 }
2589 }
2590
2591 fputs (")", stream);
2592 }
2593
2594 \f
2595 /* Return 2 for likely branches and 0 for non-likely branches */
2596
2597 #define FRV_JUMP_LIKELY 2
2598 #define FRV_JUMP_NOT_LIKELY 0
2599
2600 static int
2601 frv_print_operand_jump_hint (rtx_insn *insn)
2602 {
2603 rtx note;
2604 rtx labelref;
2605 int ret;
2606 enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
2607
2608 gcc_assert (JUMP_P (insn));
2609
2610 /* Assume any non-conditional jump is likely. */
2611 if (! any_condjump_p (insn))
2612 ret = FRV_JUMP_LIKELY;
2613
2614 else
2615 {
2616 labelref = condjump_label (insn);
2617 if (labelref)
2618 {
2619 rtx label = XEXP (labelref, 0);
2620 jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label))
2621 ? BACKWARD
2622 : FORWARD);
2623 }
2624
2625 note = find_reg_note (insn, REG_BR_PROB, 0);
2626 if (!note)
2627 ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY);
2628
2629 else
2630 {
2631 ret = ((profile_probability::from_reg_br_prob_note (XINT (note, 0))
2632 >= profile_probability::even ())
2633 ? FRV_JUMP_LIKELY
2634 : FRV_JUMP_NOT_LIKELY);
2635 }
2636 }
2637
2638 #if 0
2639 if (TARGET_DEBUG)
2640 {
2641 char *direction;
2642
2643 switch (jump_type)
2644 {
2645 default:
2646 case UNKNOWN: direction = "unknown jump direction"; break;
2647 case BACKWARD: direction = "jump backward"; break;
2648 case FORWARD: direction = "jump forward"; break;
2649 }
2650
2651 fprintf (stderr,
2652 "%s: uid %ld, %s, probability = %d, max prob. = %d, hint = %d\n",
2653 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
2654 (long)INSN_UID (insn), direction, prob,
2655 REG_BR_PROB_BASE, ret);
2656 }
2657 #endif
2658
2659 return ret;
2660 }
2661
2662 \f
2663 /* Return the comparison operator to use for CODE given that the ICC
2664 register is OP0. */
2665
2666 static const char *
2667 comparison_string (enum rtx_code code, rtx op0)
2668 {
2669 bool is_nz_p = GET_MODE (op0) == CC_NZmode;
2670 switch (code)
2671 {
2672 default: output_operand_lossage ("bad condition code"); return "";
2673 case EQ: return "eq";
2674 case NE: return "ne";
2675 case LT: return is_nz_p ? "n" : "lt";
2676 case LE: return "le";
2677 case GT: return "gt";
2678 case GE: return is_nz_p ? "p" : "ge";
2679 case LTU: return is_nz_p ? "no" : "c";
2680 case LEU: return is_nz_p ? "eq" : "ls";
2681 case GTU: return is_nz_p ? "ne" : "hi";
2682 case GEU: return is_nz_p ? "ra" : "nc";
2683 }
2684 }
2685
2686 /* Print an operand to an assembler instruction.
2687
2688 `%' followed by a letter and a digit says to output an operand in an
2689 alternate fashion. Four letters have standard, built-in meanings
2690 described below. The hook `TARGET_PRINT_OPERAND' can define
2691 additional letters with nonstandard meanings.
2692
2693 `%cDIGIT' can be used to substitute an operand that is a constant value
2694 without the syntax that normally indicates an immediate operand.
2695
2696 `%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated
2697 before printing.
2698
2699 `%aDIGIT' can be used to substitute an operand as if it were a memory
2700 reference, with the actual operand treated as the address. This may be
2701 useful when outputting a "load address" instruction, because often the
2702 assembler syntax for such an instruction requires you to write the operand
2703 as if it were a memory reference.
2704
2705 `%lDIGIT' is used to substitute a `label_ref' into a jump instruction.
2706
2707 `%=' outputs a number which is unique to each instruction in the entire
2708 compilation. This is useful for making local labels to be referred to more
2709 than once in a single template that generates multiple assembler
2710 instructions.
2711
2712 `%' followed by a punctuation character specifies a substitution that
2713 does not use an operand. Only one case is standard: `%%' outputs a
2714 `%' into the assembler code. Other nonstandard cases can be defined
2715 in the `TARGET_PRINT_OPERAND' hook. You must also define which
2716 punctuation characters are valid with the
2717 `TARGET_PRINT_OPERAND_PUNCT_VALID_P' hook. */
2718
2719 static void
2720 frv_print_operand (FILE * file, rtx x, int code)
2721 {
2722 struct frv_unspec unspec;
2723 HOST_WIDE_INT value;
2724 int offset;
2725
2726 if (code != 0 && !ISALPHA (code))
2727 value = 0;
2728
2729 else if (GET_CODE (x) == CONST_INT)
2730 value = INTVAL (x);
2731
2732 else if (GET_CODE (x) == CONST_DOUBLE)
2733 {
2734 if (GET_MODE (x) == SFmode)
2735 {
2736 long l;
2737
2738 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2739 value = l;
2740 }
2741
2742 else if (GET_MODE (x) == VOIDmode)
2743 value = CONST_DOUBLE_LOW (x);
2744
2745 else
2746 fatal_insn ("bad insn in frv_print_operand, bad const_double", x);
2747 }
2748
2749 else
2750 value = 0;
2751
2752 switch (code)
2753 {
2754
2755 case '.':
2756 /* Output r0. */
2757 fputs (reg_names[GPR_R0], file);
2758 break;
2759
2760 case '#':
2761 fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn));
2762 break;
2763
2764 case '@':
2765 /* Output small data area base register (gr16). */
2766 fputs (reg_names[SDA_BASE_REG], file);
2767 break;
2768
2769 case '~':
2770 /* Output pic register (gr17). */
2771 fputs (reg_names[PIC_REGNO], file);
2772 break;
2773
2774 case '*':
2775 /* Output the temporary integer CCR register. */
2776 fputs (reg_names[ICR_TEMP], file);
2777 break;
2778
2779 case '&':
2780 /* Output the temporary integer CC register. */
2781 fputs (reg_names[ICC_TEMP], file);
2782 break;
2783
2784 /* case 'a': print an address. */
2785
2786 case 'C':
2787 /* Print appropriate test for integer branch false operation. */
2788 fputs (comparison_string (reverse_condition (GET_CODE (x)),
2789 XEXP (x, 0)), file);
2790 break;
2791
2792 case 'c':
2793 /* Print appropriate test for integer branch true operation. */
2794 fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file);
2795 break;
2796
2797 case 'e':
2798 /* Print 1 for a NE and 0 for an EQ to give the final argument
2799 for a conditional instruction. */
2800 if (GET_CODE (x) == NE)
2801 fputs ("1", file);
2802
2803 else if (GET_CODE (x) == EQ)
2804 fputs ("0", file);
2805
2806 else
2807 fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x);
2808 break;
2809
2810 case 'F':
2811 /* Print appropriate test for floating point branch false operation. */
2812 switch (GET_CODE (x))
2813 {
2814 default:
2815 fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x);
2816
2817 case EQ: fputs ("ne", file); break;
2818 case NE: fputs ("eq", file); break;
2819 case LT: fputs ("uge", file); break;
2820 case LE: fputs ("ug", file); break;
2821 case GT: fputs ("ule", file); break;
2822 case GE: fputs ("ul", file); break;
2823 }
2824 break;
2825
2826 case 'f':
2827 /* Print appropriate test for floating point branch true operation. */
2828 switch (GET_CODE (x))
2829 {
2830 default:
2831 fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x);
2832
2833 case EQ: fputs ("eq", file); break;
2834 case NE: fputs ("ne", file); break;
2835 case LT: fputs ("lt", file); break;
2836 case LE: fputs ("le", file); break;
2837 case GT: fputs ("gt", file); break;
2838 case GE: fputs ("ge", file); break;
2839 }
2840 break;
2841
2842 case 'g':
2843 /* Print appropriate GOT function. */
2844 if (GET_CODE (x) != CONST_INT)
2845 fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x);
2846 fputs (unspec_got_name (INTVAL (x)), file);
2847 break;
2848
2849 case 'I':
2850 /* Print 'i' if the operand is a constant, or is a memory reference that
2851 adds a constant. */
2852 if (GET_CODE (x) == MEM)
2853 x = ((GET_CODE (XEXP (x, 0)) == PLUS)
2854 ? XEXP (XEXP (x, 0), 1)
2855 : XEXP (x, 0));
2856 else if (GET_CODE (x) == PLUS)
2857 x = XEXP (x, 1);
2858
2859 switch (GET_CODE (x))
2860 {
2861 default:
2862 break;
2863
2864 case CONST_INT:
2865 case SYMBOL_REF:
2866 case CONST:
2867 fputs ("i", file);
2868 break;
2869 }
2870 break;
2871
2872 case 'i':
2873 /* For jump instructions, print 'i' if the operand is a constant or
2874 is an expression that adds a constant. */
2875 if (GET_CODE (x) == CONST_INT)
2876 fputs ("i", file);
2877
2878 else
2879 {
2880 if (GET_CODE (x) == CONST_INT
2881 || (GET_CODE (x) == PLUS
2882 && (GET_CODE (XEXP (x, 1)) == CONST_INT
2883 || GET_CODE (XEXP (x, 0)) == CONST_INT)))
2884 fputs ("i", file);
2885 }
2886 break;
2887
2888 case 'L':
2889 /* Print the lower register of a double word register pair */
2890 if (GET_CODE (x) == REG)
2891 fputs (reg_names[ REGNO (x)+1 ], file);
2892 else
2893 fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x);
2894 break;
2895
2896 /* case 'l': print a LABEL_REF. */
2897
2898 case 'M':
2899 case 'N':
2900 /* Print a memory reference for ld/st/jmp, %N prints a memory reference
2901 for the second word of double memory operations. */
2902 offset = (code == 'M') ? 0 : UNITS_PER_WORD;
2903 switch (GET_CODE (x))
2904 {
2905 default:
2906 fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x);
2907
2908 case MEM:
2909 frv_print_operand_memory_reference (file, XEXP (x, 0), offset);
2910 break;
2911
2912 case REG:
2913 case SUBREG:
2914 case CONST_INT:
2915 case PLUS:
2916 case SYMBOL_REF:
2917 frv_print_operand_memory_reference (file, x, offset);
2918 break;
2919 }
2920 break;
2921
2922 case 'O':
2923 /* Print the opcode of a command. */
2924 switch (GET_CODE (x))
2925 {
2926 default:
2927 fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x);
2928
2929 case PLUS: fputs ("add", file); break;
2930 case MINUS: fputs ("sub", file); break;
2931 case AND: fputs ("and", file); break;
2932 case IOR: fputs ("or", file); break;
2933 case XOR: fputs ("xor", file); break;
2934 case ASHIFT: fputs ("sll", file); break;
2935 case ASHIFTRT: fputs ("sra", file); break;
2936 case LSHIFTRT: fputs ("srl", file); break;
2937 }
2938 break;
2939
2940 /* case 'n': negate and print a constant int. */
2941
2942 case 'P':
2943 /* Print PIC label using operand as the number. */
2944 if (GET_CODE (x) != CONST_INT)
2945 fatal_insn ("bad insn to frv_print_operand, P modifier:", x);
2946
2947 fprintf (file, ".LCF%ld", (long)INTVAL (x));
2948 break;
2949
2950 case 'U':
2951 /* Print 'u' if the operand is a update load/store. */
2952 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
2953 fputs ("u", file);
2954 break;
2955
2956 case 'z':
2957 /* If value is 0, print gr0, otherwise it must be a register. */
2958 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
2959 fputs (reg_names[GPR_R0], file);
2960
2961 else if (GET_CODE (x) == REG)
2962 fputs (reg_names [REGNO (x)], file);
2963
2964 else
2965 fatal_insn ("bad insn in frv_print_operand, z case", x);
2966 break;
2967
2968 case 'x':
2969 /* Print constant in hex. */
2970 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2971 {
2972 fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value);
2973 break;
2974 }
2975
2976 /* Fall through. */
2977
2978 case '\0':
2979 if (GET_CODE (x) == REG)
2980 fputs (reg_names [REGNO (x)], file);
2981
2982 else if (GET_CODE (x) == CONST_INT
2983 || GET_CODE (x) == CONST_DOUBLE)
2984 fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value);
2985
2986 else if (frv_const_unspec_p (x, &unspec))
2987 frv_output_const_unspec (file, &unspec);
2988
2989 else if (GET_CODE (x) == MEM)
2990 frv_print_operand_address (file, GET_MODE (x), XEXP (x, 0));
2991
2992 else if (CONSTANT_ADDRESS_P (x))
2993 frv_print_operand_address (file, VOIDmode, x);
2994
2995 else
2996 fatal_insn ("bad insn in frv_print_operand, 0 case", x);
2997
2998 break;
2999
3000 default:
3001 fatal_insn ("frv_print_operand: unknown code", x);
3002 break;
3003 }
3004
3005 return;
3006 }
3007
3008 static bool
3009 frv_print_operand_punct_valid_p (unsigned char code)
3010 {
3011 return (code == '.' || code == '#' || code == '@' || code == '~'
3012 || code == '*' || code == '&');
3013 }
3014
3015 \f
3016 /* A C statement (sans semicolon) for initializing the variable CUM for the
3017 state at the beginning of the argument list. The variable has type
3018 `CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type
3019 of the function which will receive the args, or 0 if the args are to a
3020 compiler support library function. The value of INDIRECT is nonzero when
3021 processing an indirect call, for example a call through a function pointer.
3022 The value of INDIRECT is zero for a call to an explicitly named function, a
3023 library function call, or when `INIT_CUMULATIVE_ARGS' is used to find
3024 arguments for the function being compiled.
3025
3026 When processing a call to a compiler support library function, LIBNAME
3027 identifies which one. It is a `symbol_ref' rtx which contains the name of
3028 the function, as a string. LIBNAME is 0 when an ordinary C function call is
3029 being processed. Thus, each time this macro is called, either LIBNAME or
3030 FNTYPE is nonzero, but never both of them at once. */
3031
3032 void
3033 frv_init_cumulative_args (CUMULATIVE_ARGS *cum,
3034 tree fntype,
3035 rtx libname,
3036 tree fndecl,
3037 int incoming)
3038 {
3039 *cum = FIRST_ARG_REGNUM;
3040
3041 if (TARGET_DEBUG_ARG)
3042 {
3043 fprintf (stderr, "\ninit_cumulative_args:");
3044 if (!fndecl && fntype)
3045 fputs (" indirect", stderr);
3046
3047 if (incoming)
3048 fputs (" incoming", stderr);
3049
3050 if (fntype)
3051 {
3052 tree ret_type = TREE_TYPE (fntype);
3053 fprintf (stderr, " return=%s,",
3054 get_tree_code_name (TREE_CODE (ret_type)));
3055 }
3056
3057 if (libname && GET_CODE (libname) == SYMBOL_REF)
3058 fprintf (stderr, " libname=%s", XSTR (libname, 0));
3059
3060 if (cfun->returns_struct)
3061 fprintf (stderr, " return-struct");
3062
3063 putc ('\n', stderr);
3064 }
3065 }
3066
3067 \f
3068 /* Return true if we should pass an argument on the stack rather than
3069 in registers. */
3070
3071 static bool
3072 frv_must_pass_in_stack (const function_arg_info &arg)
3073 {
3074 return arg.mode == BLKmode || arg.aggregate_type_p ();
3075 }
3076
3077 /* If defined, a C expression that gives the alignment boundary, in bits, of an
3078 argument with the specified mode and type. If it is not defined,
3079 `PARM_BOUNDARY' is used for all arguments. */
3080
3081 static unsigned int
3082 frv_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
3083 const_tree type ATTRIBUTE_UNUSED)
3084 {
3085 return BITS_PER_WORD;
3086 }
3087
3088 static rtx
3089 frv_function_arg_1 (cumulative_args_t cum_v, const function_arg_info &arg,
3090 bool incoming ATTRIBUTE_UNUSED)
3091 {
3092 const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3093
3094 machine_mode xmode = (arg.mode == BLKmode) ? SImode : arg.mode;
3095 int arg_num = *cum;
3096 rtx ret;
3097 const char *debstr;
3098
3099 /* Return a marker for use in the call instruction. */
3100 if (xmode == VOIDmode)
3101 {
3102 ret = const0_rtx;
3103 debstr = "<0>";
3104 }
3105
3106 else if (arg_num <= LAST_ARG_REGNUM)
3107 {
3108 ret = gen_rtx_REG (xmode, arg_num);
3109 debstr = reg_names[arg_num];
3110 }
3111
3112 else
3113 {
3114 ret = NULL_RTX;
3115 debstr = "memory";
3116 }
3117
3118 if (TARGET_DEBUG_ARG)
3119 fprintf (stderr,
3120 "function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n",
3121 arg_num, GET_MODE_NAME (arg.mode), arg.named,
3122 GET_MODE_SIZE (arg.mode), debstr);
3123
3124 return ret;
3125 }
3126
3127 static rtx
3128 frv_function_arg (cumulative_args_t cum, const function_arg_info &arg)
3129 {
3130 return frv_function_arg_1 (cum, arg, false);
3131 }
3132
3133 static rtx
3134 frv_function_incoming_arg (cumulative_args_t cum, const function_arg_info &arg)
3135 {
3136 return frv_function_arg_1 (cum, arg, true);
3137 }
3138
3139 \f
3140 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
3141
3142 static void
3143 frv_function_arg_advance (cumulative_args_t cum_v,
3144 const function_arg_info &arg)
3145 {
3146 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3147
3148 machine_mode xmode = (arg.mode == BLKmode) ? SImode : arg.mode;
3149 int bytes = GET_MODE_SIZE (xmode);
3150 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3151 int arg_num = *cum;
3152
3153 *cum = arg_num + words;
3154
3155 if (TARGET_DEBUG_ARG)
3156 fprintf (stderr,
3157 "function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n",
3158 arg_num, GET_MODE_NAME (arg.mode), arg.named,
3159 words * UNITS_PER_WORD);
3160 }
3161
3162 \f
3163 /* Implement TARGET_ARG_PARTIAL_BYTES. */
3164
3165 static int
3166 frv_arg_partial_bytes (cumulative_args_t cum, const function_arg_info &arg)
3167 {
3168 machine_mode xmode = (arg.mode == BLKmode) ? SImode : arg.mode;
3169 int bytes = GET_MODE_SIZE (xmode);
3170 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3171 int arg_num = *get_cumulative_args (cum);
3172 int ret;
3173
3174 ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
3175 ? LAST_ARG_REGNUM - arg_num + 1
3176 : 0);
3177 ret *= UNITS_PER_WORD;
3178
3179 if (TARGET_DEBUG_ARG && ret)
3180 fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret);
3181
3182 return ret;
3183 }
3184
3185 \f
3186 /* Implements TARGET_FUNCTION_VALUE. */
3187
3188 static rtx
3189 frv_function_value (const_tree valtype,
3190 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3191 bool outgoing ATTRIBUTE_UNUSED)
3192 {
3193 return gen_rtx_REG (TYPE_MODE (valtype), RETURN_VALUE_REGNUM);
3194 }
3195
3196 \f
3197 /* Implements TARGET_LIBCALL_VALUE. */
3198
3199 static rtx
3200 frv_libcall_value (machine_mode mode,
3201 const_rtx fun ATTRIBUTE_UNUSED)
3202 {
3203 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3204 }
3205
3206 \f
3207 /* Implements FUNCTION_VALUE_REGNO_P. */
3208
3209 bool
3210 frv_function_value_regno_p (const unsigned int regno)
3211 {
3212 return (regno == RETURN_VALUE_REGNUM);
3213 }
3214 \f
3215 /* Return true if a register is ok to use as a base or index register. */
3216
3217 static FRV_INLINE int
3218 frv_regno_ok_for_base_p (int regno, int strict_p)
3219 {
3220 if (GPR_P (regno))
3221 return TRUE;
3222
3223 if (strict_p)
3224 return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno]));
3225
3226 if (regno == ARG_POINTER_REGNUM)
3227 return TRUE;
3228
3229 return (regno >= FIRST_PSEUDO_REGISTER);
3230 }
3231
3232 \f
3233 /* A C compound statement with a conditional `goto LABEL;' executed if X (an
3234 RTX) is a legitimate memory address on the target machine for a memory
3235 operand of mode MODE.
3236
3237 It usually pays to define several simpler macros to serve as subroutines for
3238 this one. Otherwise it may be too complicated to understand.
3239
3240 This macro must exist in two variants: a strict variant and a non-strict
3241 one. The strict variant is used in the reload pass. It must be defined so
3242 that any pseudo-register that has not been allocated a hard register is
3243 considered a memory reference. In contexts where some kind of register is
3244 required, a pseudo-register with no hard register must be rejected.
3245
3246 The non-strict variant is used in other passes. It must be defined to
3247 accept all pseudo-registers in every context where some kind of register is
3248 required.
3249
3250 Compiler source files that want to use the strict variant of this macro
3251 define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT'
3252 conditional to define the strict variant in that case and the non-strict
3253 variant otherwise.
3254
3255 Normally, constant addresses which are the sum of a `symbol_ref' and an
3256 integer are stored inside a `const' RTX to mark them as constant.
3257 Therefore, there is no need to recognize such sums specifically as
3258 legitimate addresses. Normally you would simply recognize any `const' as
3259 legitimate.
3260
3261 Usually `TARGET_PRINT_OPERAND_ADDRESS' is not prepared to handle
3262 constant sums that are not marked with `const'. It assumes that a
3263 naked `plus' indicates indexing. If so, then you *must* reject such
3264 naked constant sums as illegitimate addresses, so that none of them
3265 will be given to `TARGET_PRINT_OPERAND_ADDRESS'. */
3266
3267 int
3268 frv_legitimate_address_p_1 (machine_mode mode,
3269 rtx x,
3270 int strict_p,
3271 int condexec_p,
3272 int allow_double_reg_p)
3273 {
3274 rtx x0, x1;
3275 int ret = 0;
3276 HOST_WIDE_INT value;
3277 unsigned regno0;
3278
3279 if (FRV_SYMBOL_REF_TLS_P (x))
3280 return 0;
3281
3282 switch (GET_CODE (x))
3283 {
3284 default:
3285 break;
3286
3287 case SUBREG:
3288 x = SUBREG_REG (x);
3289 if (GET_CODE (x) != REG)
3290 break;
3291
3292 /* Fall through. */
3293
3294 case REG:
3295 ret = frv_regno_ok_for_base_p (REGNO (x), strict_p);
3296 break;
3297
3298 case PRE_MODIFY:
3299 x0 = XEXP (x, 0);
3300 x1 = XEXP (x, 1);
3301 if (GET_CODE (x0) != REG
3302 || ! frv_regno_ok_for_base_p (REGNO (x0), strict_p)
3303 || GET_CODE (x1) != PLUS
3304 || ! rtx_equal_p (x0, XEXP (x1, 0))
3305 || GET_CODE (XEXP (x1, 1)) != REG
3306 || ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p))
3307 break;
3308
3309 ret = 1;
3310 break;
3311
3312 case CONST_INT:
3313 /* 12-bit immediate */
3314 if (condexec_p)
3315 ret = FALSE;
3316 else
3317 {
3318 ret = IN_RANGE (INTVAL (x), -2048, 2047);
3319
3320 /* If we can't use load/store double operations, make sure we can
3321 address the second word. */
3322 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3323 ret = IN_RANGE (INTVAL (x) + GET_MODE_SIZE (mode) - 1,
3324 -2048, 2047);
3325 }
3326 break;
3327
3328 case PLUS:
3329 x0 = XEXP (x, 0);
3330 x1 = XEXP (x, 1);
3331
3332 if (GET_CODE (x0) == SUBREG)
3333 x0 = SUBREG_REG (x0);
3334
3335 if (GET_CODE (x0) != REG)
3336 break;
3337
3338 regno0 = REGNO (x0);
3339 if (!frv_regno_ok_for_base_p (regno0, strict_p))
3340 break;
3341
3342 switch (GET_CODE (x1))
3343 {
3344 default:
3345 break;
3346
3347 case SUBREG:
3348 x1 = SUBREG_REG (x1);
3349 if (GET_CODE (x1) != REG)
3350 break;
3351
3352 /* Fall through. */
3353
3354 case REG:
3355 /* Do not allow reg+reg addressing for modes > 1 word if we
3356 can't depend on having move double instructions. */
3357 if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3358 ret = FALSE;
3359 else
3360 ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p);
3361 break;
3362
3363 case CONST_INT:
3364 /* 12-bit immediate */
3365 if (condexec_p)
3366 ret = FALSE;
3367 else
3368 {
3369 value = INTVAL (x1);
3370 ret = IN_RANGE (value, -2048, 2047);
3371
3372 /* If we can't use load/store double operations, make sure we can
3373 address the second word. */
3374 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3375 ret = IN_RANGE (value + GET_MODE_SIZE (mode) - 1, -2048, 2047);
3376 }
3377 break;
3378
3379 case CONST:
3380 if (!condexec_p && got12_operand (x1, VOIDmode))
3381 ret = TRUE;
3382 break;
3383
3384 }
3385 break;
3386 }
3387
3388 if (TARGET_DEBUG_ADDR)
3389 {
3390 fprintf (stderr, "\n========== legitimate_address_p, mode = %s, result = %d, addresses are %sstrict%s\n",
3391 GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ",
3392 (condexec_p) ? ", inside conditional code" : "");
3393 debug_rtx (x);
3394 }
3395
3396 return ret;
3397 }
3398
3399 bool
3400 frv_legitimate_address_p (machine_mode mode, rtx x, bool strict_p, code_helper)
3401 {
3402 return frv_legitimate_address_p_1 (mode, x, strict_p, FALSE, FALSE);
3403 }
3404
3405 /* Given an ADDR, generate code to inline the PLT. */
3406 static rtx
3407 gen_inlined_tls_plt (rtx addr)
3408 {
3409 rtx retval, dest;
3410 rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG);
3411
3412
3413 dest = gen_reg_rtx (DImode);
3414
3415 if (flag_pic == 1)
3416 {
3417 /*
3418 -fpic version:
3419
3420 lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8
3421 calll #gettlsoff(ADDR)@(gr8, gr0)
3422 */
3423 emit_insn (gen_tls_lddi (dest, addr, picreg));
3424 }
3425 else
3426 {
3427 /*
3428 -fPIC version:
3429
3430 sethi.p #gottlsdeschi(ADDR), gr8
3431 setlo #gottlsdesclo(ADDR), gr8
3432 ldd #tlsdesc(ADDR)@(gr15, gr8), gr8
3433 calll #gettlsoff(ADDR)@(gr8, gr0)
3434 */
3435 rtx reguse = gen_reg_rtx (Pmode);
3436 emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI)));
3437 emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr));
3438 }
3439
3440 retval = gen_reg_rtx (Pmode);
3441 emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg));
3442 return retval;
3443 }
3444
3445 /* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns
3446 the destination address. */
3447 static rtx
3448 gen_tlsmoff (rtx addr, rtx reg)
3449 {
3450 rtx dest = gen_reg_rtx (Pmode);
3451
3452 if (TARGET_BIG_TLS)
3453 {
3454 /* sethi.p #tlsmoffhi(x), grA
3455 setlo #tlsmofflo(x), grA
3456 */
3457 dest = gen_reg_rtx (Pmode);
3458 emit_insn (gen_tlsoff_hilo (dest, addr,
3459 GEN_INT (R_FRV_TLSMOFFHI)));
3460 dest = gen_rtx_PLUS (Pmode, dest, reg);
3461 }
3462 else
3463 {
3464 /* addi grB, #tlsmoff12(x), grC
3465 -or-
3466 ld/st @(grB, #tlsmoff12(x)), grC
3467 */
3468 dest = gen_reg_rtx (Pmode);
3469 emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg,
3470 GEN_INT (R_FRV_TLSMOFF12)));
3471 }
3472 return dest;
3473 }
3474
3475 /* Generate code for a TLS address. */
3476 static rtx
3477 frv_legitimize_tls_address (rtx addr, enum tls_model model)
3478 {
3479 rtx dest, tp = gen_rtx_REG (Pmode, 29);
3480 rtx picreg = get_hard_reg_initial_val (Pmode, 15);
3481
3482 switch (model)
3483 {
3484 case TLS_MODEL_INITIAL_EXEC:
3485 if (flag_pic == 1)
3486 {
3487 /* -fpic version.
3488 ldi @(gr15, #gottlsoff12(x)), gr5
3489 */
3490 dest = gen_reg_rtx (Pmode);
3491 emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg));
3492 dest = gen_rtx_PLUS (Pmode, tp, dest);
3493 }
3494 else
3495 {
3496 /* -fPIC or anything else.
3497
3498 sethi.p #gottlsoffhi(x), gr14
3499 setlo #gottlsofflo(x), gr14
3500 ld #tlsoff(x)@(gr15, gr14), gr9
3501 */
3502 rtx tmp = gen_reg_rtx (Pmode);
3503 dest = gen_reg_rtx (Pmode);
3504 emit_insn (gen_tlsoff_hilo (tmp, addr,
3505 GEN_INT (R_FRV_GOTTLSOFF_HI)));
3506
3507 emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr));
3508 dest = gen_rtx_PLUS (Pmode, tp, dest);
3509 }
3510 break;
3511 case TLS_MODEL_LOCAL_DYNAMIC:
3512 {
3513 rtx reg, retval;
3514
3515 if (TARGET_INLINE_PLT)
3516 retval = gen_inlined_tls_plt (GEN_INT (0));
3517 else
3518 {
3519 /* call #gettlsoff(0) */
3520 retval = gen_reg_rtx (Pmode);
3521 emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg));
3522 }
3523
3524 reg = gen_reg_rtx (Pmode);
3525 emit_insn (gen_rtx_SET (reg, gen_rtx_PLUS (Pmode, retval, tp)));
3526
3527 dest = gen_tlsmoff (addr, reg);
3528
3529 /*
3530 dest = gen_reg_rtx (Pmode);
3531 emit_insn (gen_tlsoff_hilo (dest, addr,
3532 GEN_INT (R_FRV_TLSMOFFHI)));
3533 dest = gen_rtx_PLUS (Pmode, dest, reg);
3534 */
3535 break;
3536 }
3537 case TLS_MODEL_LOCAL_EXEC:
3538 dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29));
3539 break;
3540 case TLS_MODEL_GLOBAL_DYNAMIC:
3541 {
3542 rtx retval;
3543
3544 if (TARGET_INLINE_PLT)
3545 retval = gen_inlined_tls_plt (addr);
3546 else
3547 {
3548 /* call #gettlsoff(x) */
3549 retval = gen_reg_rtx (Pmode);
3550 emit_insn (gen_call_gettlsoff (retval, addr, picreg));
3551 }
3552 dest = gen_rtx_PLUS (Pmode, retval, tp);
3553 break;
3554 }
3555 default:
3556 gcc_unreachable ();
3557 }
3558
3559 return dest;
3560 }
3561
3562 rtx
3563 frv_legitimize_address (rtx x,
3564 rtx oldx ATTRIBUTE_UNUSED,
3565 machine_mode mode ATTRIBUTE_UNUSED)
3566 {
3567 if (GET_CODE (x) == SYMBOL_REF)
3568 {
3569 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3570 if (model != 0)
3571 return frv_legitimize_tls_address (x, model);
3572 }
3573
3574 return x;
3575 }
3576 \f
3577 /* Test whether a local function descriptor is canonical, i.e.,
3578 whether we can use FUNCDESC_GOTOFF to compute the address of the
3579 function. */
3580
3581 static bool
3582 frv_local_funcdesc_p (rtx fnx)
3583 {
3584 tree fn;
3585 enum symbol_visibility vis;
3586 bool ret;
3587
3588 if (! SYMBOL_REF_LOCAL_P (fnx))
3589 return FALSE;
3590
3591 fn = SYMBOL_REF_DECL (fnx);
3592
3593 if (! fn)
3594 return FALSE;
3595
3596 vis = DECL_VISIBILITY (fn);
3597
3598 if (vis == VISIBILITY_PROTECTED)
3599 /* Private function descriptors for protected functions are not
3600 canonical. Temporarily change the visibility to global. */
3601 vis = VISIBILITY_DEFAULT;
3602 else if (flag_shlib)
3603 /* If we're already compiling for a shared library (that, unlike
3604 executables, can't assume that the existence of a definition
3605 implies local binding), we can skip the re-testing. */
3606 return TRUE;
3607
3608 ret = default_binds_local_p_1 (fn, flag_pic);
3609
3610 DECL_VISIBILITY (fn) = vis;
3611
3612 return ret;
3613 }
3614
3615 /* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC
3616 register. */
3617
3618 rtx
3619 frv_gen_GPsym2reg (rtx dest, rtx src)
3620 {
3621 tree gp = get_identifier ("_gp");
3622 rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp));
3623
3624 return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12));
3625 }
3626
3627 static const char *
3628 unspec_got_name (int i)
3629 {
3630 switch (i)
3631 {
3632 case R_FRV_GOT12: return "got12";
3633 case R_FRV_GOTHI: return "gothi";
3634 case R_FRV_GOTLO: return "gotlo";
3635 case R_FRV_FUNCDESC: return "funcdesc";
3636 case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12";
3637 case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi";
3638 case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo";
3639 case R_FRV_FUNCDESC_VALUE: return "funcdescvalue";
3640 case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12";
3641 case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi";
3642 case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo";
3643 case R_FRV_GOTOFF12: return "gotoff12";
3644 case R_FRV_GOTOFFHI: return "gotoffhi";
3645 case R_FRV_GOTOFFLO: return "gotofflo";
3646 case R_FRV_GPREL12: return "gprel12";
3647 case R_FRV_GPRELHI: return "gprelhi";
3648 case R_FRV_GPRELLO: return "gprello";
3649 case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi";
3650 case R_FRV_GOTTLSOFF_LO: return "gottlsofflo";
3651 case R_FRV_TLSMOFFHI: return "tlsmoffhi";
3652 case R_FRV_TLSMOFFLO: return "tlsmofflo";
3653 case R_FRV_TLSMOFF12: return "tlsmoff12";
3654 case R_FRV_TLSDESCHI: return "tlsdeschi";
3655 case R_FRV_TLSDESCLO: return "tlsdesclo";
3656 case R_FRV_GOTTLSDESCHI: return "gottlsdeschi";
3657 case R_FRV_GOTTLSDESCLO: return "gottlsdesclo";
3658 default: gcc_unreachable ();
3659 }
3660 }
3661
3662 /* Write the assembler syntax for UNSPEC to STREAM. Note that any offset
3663 is added inside the relocation operator. */
3664
3665 static void
3666 frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec)
3667 {
3668 fprintf (stream, "#%s(", unspec_got_name (unspec->reloc));
3669 output_addr_const (stream, plus_constant (Pmode, unspec->symbol,
3670 unspec->offset));
3671 fputs (")", stream);
3672 }
3673
3674 /* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo)
3675 or #gotoff12(foo) for some small data symbol foo. If so, return foo,
3676 otherwise return ORIG_X. */
3677
3678 rtx
3679 frv_find_base_term (rtx x)
3680 {
3681 struct frv_unspec unspec;
3682
3683 if (frv_const_unspec_p (x, &unspec)
3684 && frv_small_data_reloc_p (unspec.symbol, unspec.reloc))
3685 return plus_constant (Pmode, unspec.symbol, unspec.offset);
3686
3687 return x;
3688 }
3689
3690 /* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if
3691 the operand is used by a predicated instruction. */
3692
3693 int
3694 frv_legitimate_memory_operand (rtx op, machine_mode mode, int condexec_p)
3695 {
3696 return ((GET_MODE (op) == mode || mode == VOIDmode)
3697 && GET_CODE (op) == MEM
3698 && frv_legitimate_address_p_1 (mode, XEXP (op, 0),
3699 reload_completed, condexec_p, FALSE));
3700 }
3701
3702 void
3703 frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall)
3704 {
3705 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
3706 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG);
3707 rtx c, rvrtx=0;
3708 rtx addr;
3709
3710 if (ret_value)
3711 {
3712 rvrtx = operands[0];
3713 operands ++;
3714 }
3715
3716 addr = XEXP (operands[0], 0);
3717
3718 /* Inline PLTs if we're optimizing for speed. We'd like to inline
3719 any calls that would involve a PLT, but can't tell, since we
3720 don't know whether an extern function is going to be provided by
3721 a separate translation unit or imported from a separate module.
3722 When compiling for shared libraries, if the function has default
3723 visibility, we assume it's overridable, so we inline the PLT, but
3724 for executables, we don't really have a way to make a good
3725 decision: a function is as likely to be imported from a shared
3726 library as it is to be defined in the executable itself. We
3727 assume executables will get global functions defined locally,
3728 whereas shared libraries will have them potentially overridden,
3729 so we only inline PLTs when compiling for shared libraries.
3730
3731 In order to mark a function as local to a shared library, any
3732 non-default visibility attribute suffices. Unfortunately,
3733 there's no simple way to tag a function declaration as ``in a
3734 different module'', which we could then use to trigger PLT
3735 inlining on executables. There's -minline-plt, but it affects
3736 all external functions, so one would have to also mark function
3737 declarations available in the same module with non-default
3738 visibility, which is advantageous in itself. */
3739 if (GET_CODE (addr) == SYMBOL_REF
3740 && ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT)
3741 || sibcall))
3742 {
3743 rtx x, dest;
3744 dest = gen_reg_rtx (SImode);
3745 if (flag_pic != 1)
3746 x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG,
3747 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3748 else
3749 x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG,
3750 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3751 emit_insn (x);
3752 crtl->uses_pic_offset_table = TRUE;
3753 addr = dest;
3754 }
3755 else if (GET_CODE (addr) == SYMBOL_REF)
3756 {
3757 /* These are always either local, or handled through a local
3758 PLT. */
3759 if (ret_value)
3760 c = gen_call_value_fdpicsi (rvrtx, addr, operands[1],
3761 operands[2], picreg, lr);
3762 else
3763 c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr);
3764 emit_call_insn (c);
3765 return;
3766 }
3767 else if (! ldd_address_operand (addr, Pmode))
3768 addr = force_reg (Pmode, addr);
3769
3770 picreg = gen_reg_rtx (DImode);
3771 emit_insn (gen_movdi_ldd (picreg, addr));
3772
3773 if (sibcall && ret_value)
3774 c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx);
3775 else if (sibcall)
3776 c = gen_sibcall_fdpicdi (picreg, const0_rtx);
3777 else if (ret_value)
3778 c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr);
3779 else
3780 c = gen_call_fdpicdi (picreg, const0_rtx, lr);
3781 emit_call_insn (c);
3782 }
3783 \f
3784 /* Look for a SYMBOL_REF of a function in an rtx. We always want to
3785 process these separately from any offsets, such that we add any
3786 offsets to the function descriptor (the actual pointer), not to the
3787 function address. */
3788
3789 static bool
3790 frv_function_symbol_referenced_p (rtx x)
3791 {
3792 const char *format;
3793 int length;
3794 int j;
3795
3796 if (GET_CODE (x) == SYMBOL_REF)
3797 return SYMBOL_REF_FUNCTION_P (x);
3798
3799 length = GET_RTX_LENGTH (GET_CODE (x));
3800 format = GET_RTX_FORMAT (GET_CODE (x));
3801
3802 for (j = 0; j < length; ++j)
3803 {
3804 switch (format[j])
3805 {
3806 case 'e':
3807 if (frv_function_symbol_referenced_p (XEXP (x, j)))
3808 return TRUE;
3809 break;
3810
3811 case 'V':
3812 case 'E':
3813 if (XVEC (x, j) != 0)
3814 {
3815 int k;
3816 for (k = 0; k < XVECLEN (x, j); ++k)
3817 if (frv_function_symbol_referenced_p (XVECEXP (x, j, k)))
3818 return TRUE;
3819 }
3820 break;
3821
3822 default:
3823 /* Nothing to do. */
3824 break;
3825 }
3826 }
3827
3828 return FALSE;
3829 }
3830
3831 /* Return true if the memory operand is one that can be conditionally
3832 executed. */
3833
3834 int
3835 condexec_memory_operand (rtx op, machine_mode mode)
3836 {
3837 machine_mode op_mode = GET_MODE (op);
3838 rtx addr;
3839
3840 if (mode != VOIDmode && op_mode != mode)
3841 return FALSE;
3842
3843 switch (op_mode)
3844 {
3845 default:
3846 return FALSE;
3847
3848 case E_QImode:
3849 case E_HImode:
3850 case E_SImode:
3851 case E_SFmode:
3852 break;
3853 }
3854
3855 if (GET_CODE (op) != MEM)
3856 return FALSE;
3857
3858 addr = XEXP (op, 0);
3859 return frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE);
3860 }
3861 \f
3862 /* Return true if the bare return instruction can be used outside of the
3863 epilog code. For frv, we only do it if there was no stack allocation. */
3864
3865 int
3866 direct_return_p (void)
3867 {
3868 frv_stack_t *info;
3869
3870 if (!reload_completed)
3871 return FALSE;
3872
3873 info = frv_stack_info ();
3874 return (info->total_size == 0);
3875 }
3876
3877 \f
3878 void
3879 frv_emit_move (machine_mode mode, rtx dest, rtx src)
3880 {
3881 if (GET_CODE (src) == SYMBOL_REF)
3882 {
3883 enum tls_model model = SYMBOL_REF_TLS_MODEL (src);
3884 if (model != 0)
3885 src = frv_legitimize_tls_address (src, model);
3886 }
3887
3888 switch (mode)
3889 {
3890 case E_SImode:
3891 if (frv_emit_movsi (dest, src))
3892 return;
3893 break;
3894
3895 case E_QImode:
3896 case E_HImode:
3897 case E_DImode:
3898 case E_SFmode:
3899 case E_DFmode:
3900 if (!reload_in_progress
3901 && !reload_completed
3902 && !register_operand (dest, mode)
3903 && !reg_or_0_operand (src, mode))
3904 src = copy_to_mode_reg (mode, src);
3905 break;
3906
3907 default:
3908 gcc_unreachable ();
3909 }
3910
3911 emit_insn (gen_rtx_SET (dest, src));
3912 }
3913
3914 /* Emit code to handle a MOVSI, adding in the small data register or pic
3915 register if needed to load up addresses. Return TRUE if the appropriate
3916 instructions are emitted. */
3917
3918 int
3919 frv_emit_movsi (rtx dest, rtx src)
3920 {
3921 int base_regno = -1;
3922 int unspec = 0;
3923 rtx sym = src;
3924 struct frv_unspec old_unspec;
3925
3926 if (!reload_in_progress
3927 && !reload_completed
3928 && !register_operand (dest, SImode)
3929 && (!reg_or_0_operand (src, SImode)
3930 /* Virtual registers will almost always be replaced by an
3931 add instruction, so expose this to CSE by copying to
3932 an intermediate register. */
3933 || (GET_CODE (src) == REG
3934 && IN_RANGE (REGNO (src),
3935 FIRST_VIRTUAL_REGISTER,
3936 LAST_VIRTUAL_POINTER_REGISTER))))
3937 {
3938 emit_insn (gen_rtx_SET (dest, copy_to_mode_reg (SImode, src)));
3939 return TRUE;
3940 }
3941
3942 /* Explicitly add in the PIC or small data register if needed. */
3943 switch (GET_CODE (src))
3944 {
3945 default:
3946 break;
3947
3948 case LABEL_REF:
3949 handle_label:
3950 if (TARGET_FDPIC)
3951 {
3952 /* Using GPREL12, we use a single GOT entry for all symbols
3953 in read-only sections, but trade sequences such as:
3954
3955 sethi #gothi(label), gr#
3956 setlo #gotlo(label), gr#
3957 ld @(gr15,gr#), gr#
3958
3959 for
3960
3961 ld @(gr15,#got12(_gp)), gr#
3962 sethi #gprelhi(label), gr##
3963 setlo #gprello(label), gr##
3964 add gr#, gr##, gr##
3965
3966 We may often be able to share gr# for multiple
3967 computations of GPREL addresses, and we may often fold
3968 the final add into the pair of registers of a load or
3969 store instruction, so it's often profitable. Even when
3970 optimizing for size, we're trading a GOT entry for an
3971 additional instruction, which trades GOT space
3972 (read-write) for code size (read-only, shareable), as
3973 long as the symbol is not used in more than two different
3974 locations.
3975
3976 With -fpie/-fpic, we'd be trading a single load for a
3977 sequence of 4 instructions, because the offset of the
3978 label can't be assumed to be addressable with 12 bits, so
3979 we don't do this. */
3980 if (TARGET_GPREL_RO)
3981 unspec = R_FRV_GPREL12;
3982 else
3983 unspec = R_FRV_GOT12;
3984 }
3985 else if (flag_pic)
3986 base_regno = PIC_REGNO;
3987
3988 break;
3989
3990 case CONST:
3991 if (frv_const_unspec_p (src, &old_unspec))
3992 break;
3993
3994 if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0)))
3995 {
3996 handle_whatever:
3997 src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0));
3998 emit_move_insn (dest, src);
3999 return TRUE;
4000 }
4001 else
4002 {
4003 sym = XEXP (sym, 0);
4004 if (GET_CODE (sym) == PLUS
4005 && GET_CODE (XEXP (sym, 0)) == SYMBOL_REF
4006 && GET_CODE (XEXP (sym, 1)) == CONST_INT)
4007 sym = XEXP (sym, 0);
4008 if (GET_CODE (sym) == SYMBOL_REF)
4009 goto handle_sym;
4010 else if (GET_CODE (sym) == LABEL_REF)
4011 goto handle_label;
4012 else
4013 goto handle_whatever;
4014 }
4015 break;
4016
4017 case SYMBOL_REF:
4018 handle_sym:
4019 if (TARGET_FDPIC)
4020 {
4021 enum tls_model model = SYMBOL_REF_TLS_MODEL (sym);
4022
4023 if (model != 0)
4024 {
4025 src = frv_legitimize_tls_address (src, model);
4026 emit_move_insn (dest, src);
4027 return TRUE;
4028 }
4029
4030 if (SYMBOL_REF_FUNCTION_P (sym))
4031 {
4032 if (frv_local_funcdesc_p (sym))
4033 unspec = R_FRV_FUNCDESC_GOTOFF12;
4034 else
4035 unspec = R_FRV_FUNCDESC_GOT12;
4036 }
4037 else
4038 {
4039 if (CONSTANT_POOL_ADDRESS_P (sym))
4040 switch (GET_CODE (get_pool_constant (sym)))
4041 {
4042 case CONST:
4043 case SYMBOL_REF:
4044 case LABEL_REF:
4045 if (flag_pic)
4046 {
4047 unspec = R_FRV_GOTOFF12;
4048 break;
4049 }
4050 /* Fall through. */
4051 default:
4052 if (TARGET_GPREL_RO)
4053 unspec = R_FRV_GPREL12;
4054 else
4055 unspec = R_FRV_GOT12;
4056 break;
4057 }
4058 else if (SYMBOL_REF_LOCAL_P (sym)
4059 && !SYMBOL_REF_EXTERNAL_P (sym)
4060 && SYMBOL_REF_DECL (sym)
4061 && (!DECL_P (SYMBOL_REF_DECL (sym))
4062 || !DECL_COMMON (SYMBOL_REF_DECL (sym))))
4063 {
4064 tree decl = SYMBOL_REF_DECL (sym);
4065 tree init = VAR_P (decl)
4066 ? DECL_INITIAL (decl)
4067 : TREE_CODE (decl) == CONSTRUCTOR
4068 ? decl : 0;
4069 int reloc = 0;
4070 bool named_section, readonly;
4071
4072 if (init && init != error_mark_node)
4073 reloc = compute_reloc_for_constant (init);
4074
4075 named_section = VAR_P (decl)
4076 && lookup_attribute ("section", DECL_ATTRIBUTES (decl));
4077 readonly = decl_readonly_section (decl, reloc);
4078
4079 if (named_section)
4080 unspec = R_FRV_GOT12;
4081 else if (!readonly)
4082 unspec = R_FRV_GOTOFF12;
4083 else if (readonly && TARGET_GPREL_RO)
4084 unspec = R_FRV_GPREL12;
4085 else
4086 unspec = R_FRV_GOT12;
4087 }
4088 else
4089 unspec = R_FRV_GOT12;
4090 }
4091 }
4092
4093 else if (SYMBOL_REF_SMALL_P (sym))
4094 base_regno = SDA_BASE_REG;
4095
4096 else if (flag_pic)
4097 base_regno = PIC_REGNO;
4098
4099 break;
4100 }
4101
4102 if (base_regno >= 0)
4103 {
4104 if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym))
4105 emit_insn (gen_symGOTOFF2reg (dest, src,
4106 gen_rtx_REG (Pmode, base_regno),
4107 GEN_INT (R_FRV_GPREL12)));
4108 else
4109 emit_insn (gen_symGOTOFF2reg_hilo (dest, src,
4110 gen_rtx_REG (Pmode, base_regno),
4111 GEN_INT (R_FRV_GPREL12)));
4112 if (base_regno == PIC_REGNO)
4113 crtl->uses_pic_offset_table = TRUE;
4114 return TRUE;
4115 }
4116
4117 if (unspec)
4118 {
4119 rtx x;
4120
4121 /* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce
4122 new uses of it once reload has begun. */
4123 gcc_assert (!reload_in_progress && !reload_completed);
4124
4125 switch (unspec)
4126 {
4127 case R_FRV_GOTOFF12:
4128 if (!frv_small_data_reloc_p (sym, unspec))
4129 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4130 GEN_INT (unspec));
4131 else
4132 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4133 break;
4134 case R_FRV_GPREL12:
4135 if (!frv_small_data_reloc_p (sym, unspec))
4136 x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG,
4137 GEN_INT (unspec));
4138 else
4139 x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4140 break;
4141 case R_FRV_FUNCDESC_GOTOFF12:
4142 if (flag_pic != 1)
4143 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4144 GEN_INT (unspec));
4145 else
4146 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4147 break;
4148 default:
4149 if (flag_pic != 1)
4150 x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG,
4151 GEN_INT (unspec));
4152 else
4153 x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4154 break;
4155 }
4156 emit_insn (x);
4157 crtl->uses_pic_offset_table = TRUE;
4158 return TRUE;
4159 }
4160
4161
4162 return FALSE;
4163 }
4164
4165 \f
4166 /* Return a string to output a single word move. */
4167
4168 const char *
4169 output_move_single (rtx operands[], rtx insn)
4170 {
4171 rtx dest = operands[0];
4172 rtx src = operands[1];
4173
4174 if (GET_CODE (dest) == REG)
4175 {
4176 int dest_regno = REGNO (dest);
4177 machine_mode mode = GET_MODE (dest);
4178
4179 if (GPR_P (dest_regno))
4180 {
4181 if (GET_CODE (src) == REG)
4182 {
4183 /* gpr <- some sort of register */
4184 int src_regno = REGNO (src);
4185
4186 if (GPR_P (src_regno))
4187 return "mov %1, %0";
4188
4189 else if (FPR_P (src_regno))
4190 return "movfg %1, %0";
4191
4192 else if (SPR_P (src_regno))
4193 return "movsg %1, %0";
4194 }
4195
4196 else if (GET_CODE (src) == MEM)
4197 {
4198 /* gpr <- memory */
4199 switch (mode)
4200 {
4201 default:
4202 break;
4203
4204 case E_QImode:
4205 return "ldsb%I1%U1 %M1,%0";
4206
4207 case E_HImode:
4208 return "ldsh%I1%U1 %M1,%0";
4209
4210 case E_SImode:
4211 case E_SFmode:
4212 return "ld%I1%U1 %M1, %0";
4213 }
4214 }
4215
4216 else if (GET_CODE (src) == CONST_INT
4217 || GET_CODE (src) == CONST_DOUBLE)
4218 {
4219 /* gpr <- integer/floating constant */
4220 HOST_WIDE_INT value;
4221
4222 if (GET_CODE (src) == CONST_INT)
4223 value = INTVAL (src);
4224
4225 else if (mode == SFmode)
4226 {
4227 long l;
4228
4229 REAL_VALUE_TO_TARGET_SINGLE
4230 (*CONST_DOUBLE_REAL_VALUE (src), l);
4231 value = l;
4232 }
4233
4234 else
4235 value = CONST_DOUBLE_LOW (src);
4236
4237 if (IN_RANGE (value, -32768, 32767))
4238 return "setlos %1, %0";
4239
4240 return "#";
4241 }
4242
4243 else if (GET_CODE (src) == SYMBOL_REF
4244 || GET_CODE (src) == LABEL_REF
4245 || GET_CODE (src) == CONST)
4246 {
4247 return "#";
4248 }
4249 }
4250
4251 else if (FPR_P (dest_regno))
4252 {
4253 if (GET_CODE (src) == REG)
4254 {
4255 /* fpr <- some sort of register */
4256 int src_regno = REGNO (src);
4257
4258 if (GPR_P (src_regno))
4259 return "movgf %1, %0";
4260
4261 else if (FPR_P (src_regno))
4262 {
4263 if (TARGET_HARD_FLOAT)
4264 return "fmovs %1, %0";
4265 else
4266 return "mor %1, %1, %0";
4267 }
4268 }
4269
4270 else if (GET_CODE (src) == MEM)
4271 {
4272 /* fpr <- memory */
4273 switch (mode)
4274 {
4275 default:
4276 break;
4277
4278 case E_QImode:
4279 return "ldbf%I1%U1 %M1,%0";
4280
4281 case E_HImode:
4282 return "ldhf%I1%U1 %M1,%0";
4283
4284 case E_SImode:
4285 case E_SFmode:
4286 return "ldf%I1%U1 %M1, %0";
4287 }
4288 }
4289
4290 else if (ZERO_P (src))
4291 return "movgf %., %0";
4292 }
4293
4294 else if (SPR_P (dest_regno))
4295 {
4296 if (GET_CODE (src) == REG)
4297 {
4298 /* spr <- some sort of register */
4299 int src_regno = REGNO (src);
4300
4301 if (GPR_P (src_regno))
4302 return "movgs %1, %0";
4303 }
4304 else if (ZERO_P (src))
4305 return "movgs %., %0";
4306 }
4307 }
4308
4309 else if (GET_CODE (dest) == MEM)
4310 {
4311 if (GET_CODE (src) == REG)
4312 {
4313 int src_regno = REGNO (src);
4314 machine_mode mode = GET_MODE (dest);
4315
4316 if (GPR_P (src_regno))
4317 {
4318 switch (mode)
4319 {
4320 default:
4321 break;
4322
4323 case E_QImode:
4324 return "stb%I0%U0 %1, %M0";
4325
4326 case E_HImode:
4327 return "sth%I0%U0 %1, %M0";
4328
4329 case E_SImode:
4330 case E_SFmode:
4331 return "st%I0%U0 %1, %M0";
4332 }
4333 }
4334
4335 else if (FPR_P (src_regno))
4336 {
4337 switch (mode)
4338 {
4339 default:
4340 break;
4341
4342 case E_QImode:
4343 return "stbf%I0%U0 %1, %M0";
4344
4345 case E_HImode:
4346 return "sthf%I0%U0 %1, %M0";
4347
4348 case E_SImode:
4349 case E_SFmode:
4350 return "stf%I0%U0 %1, %M0";
4351 }
4352 }
4353 }
4354
4355 else if (ZERO_P (src))
4356 {
4357 switch (GET_MODE (dest))
4358 {
4359 default:
4360 break;
4361
4362 case E_QImode:
4363 return "stb%I0%U0 %., %M0";
4364
4365 case E_HImode:
4366 return "sth%I0%U0 %., %M0";
4367
4368 case E_SImode:
4369 case E_SFmode:
4370 return "st%I0%U0 %., %M0";
4371 }
4372 }
4373 }
4374
4375 fatal_insn ("bad output_move_single operand", insn);
4376 return "";
4377 }
4378
4379 \f
4380 /* Return a string to output a double word move. */
4381
4382 const char *
4383 output_move_double (rtx operands[], rtx insn)
4384 {
4385 rtx dest = operands[0];
4386 rtx src = operands[1];
4387 machine_mode mode = GET_MODE (dest);
4388
4389 if (GET_CODE (dest) == REG)
4390 {
4391 int dest_regno = REGNO (dest);
4392
4393 if (GPR_P (dest_regno))
4394 {
4395 if (GET_CODE (src) == REG)
4396 {
4397 /* gpr <- some sort of register */
4398 int src_regno = REGNO (src);
4399
4400 if (GPR_P (src_regno))
4401 return "#";
4402
4403 else if (FPR_P (src_regno))
4404 {
4405 if (((dest_regno - GPR_FIRST) & 1) == 0
4406 && ((src_regno - FPR_FIRST) & 1) == 0)
4407 return "movfgd %1, %0";
4408
4409 return "#";
4410 }
4411 }
4412
4413 else if (GET_CODE (src) == MEM)
4414 {
4415 /* gpr <- memory */
4416 if (dbl_memory_one_insn_operand (src, mode))
4417 return "ldd%I1%U1 %M1, %0";
4418
4419 return "#";
4420 }
4421
4422 else if (GET_CODE (src) == CONST_INT
4423 || GET_CODE (src) == CONST_DOUBLE)
4424 return "#";
4425 }
4426
4427 else if (FPR_P (dest_regno))
4428 {
4429 if (GET_CODE (src) == REG)
4430 {
4431 /* fpr <- some sort of register */
4432 int src_regno = REGNO (src);
4433
4434 if (GPR_P (src_regno))
4435 {
4436 if (((dest_regno - FPR_FIRST) & 1) == 0
4437 && ((src_regno - GPR_FIRST) & 1) == 0)
4438 return "movgfd %1, %0";
4439
4440 return "#";
4441 }
4442
4443 else if (FPR_P (src_regno))
4444 {
4445 if (TARGET_DOUBLE
4446 && ((dest_regno - FPR_FIRST) & 1) == 0
4447 && ((src_regno - FPR_FIRST) & 1) == 0)
4448 return "fmovd %1, %0";
4449
4450 return "#";
4451 }
4452 }
4453
4454 else if (GET_CODE (src) == MEM)
4455 {
4456 /* fpr <- memory */
4457 if (dbl_memory_one_insn_operand (src, mode))
4458 return "lddf%I1%U1 %M1, %0";
4459
4460 return "#";
4461 }
4462
4463 else if (ZERO_P (src))
4464 return "#";
4465 }
4466 }
4467
4468 else if (GET_CODE (dest) == MEM)
4469 {
4470 if (GET_CODE (src) == REG)
4471 {
4472 int src_regno = REGNO (src);
4473
4474 if (GPR_P (src_regno))
4475 {
4476 if (((src_regno - GPR_FIRST) & 1) == 0
4477 && dbl_memory_one_insn_operand (dest, mode))
4478 return "std%I0%U0 %1, %M0";
4479
4480 return "#";
4481 }
4482
4483 if (FPR_P (src_regno))
4484 {
4485 if (((src_regno - FPR_FIRST) & 1) == 0
4486 && dbl_memory_one_insn_operand (dest, mode))
4487 return "stdf%I0%U0 %1, %M0";
4488
4489 return "#";
4490 }
4491 }
4492
4493 else if (ZERO_P (src))
4494 {
4495 if (dbl_memory_one_insn_operand (dest, mode))
4496 return "std%I0%U0 %., %M0";
4497
4498 return "#";
4499 }
4500 }
4501
4502 fatal_insn ("bad output_move_double operand", insn);
4503 return "";
4504 }
4505
4506 \f
4507 /* Return a string to output a single word conditional move.
4508 Operand0 -- EQ/NE of ccr register and 0
4509 Operand1 -- CCR register
4510 Operand2 -- destination
4511 Operand3 -- source */
4512
4513 const char *
4514 output_condmove_single (rtx operands[], rtx insn)
4515 {
4516 rtx dest = operands[2];
4517 rtx src = operands[3];
4518
4519 if (GET_CODE (dest) == REG)
4520 {
4521 int dest_regno = REGNO (dest);
4522 machine_mode mode = GET_MODE (dest);
4523
4524 if (GPR_P (dest_regno))
4525 {
4526 if (GET_CODE (src) == REG)
4527 {
4528 /* gpr <- some sort of register */
4529 int src_regno = REGNO (src);
4530
4531 if (GPR_P (src_regno))
4532 return "cmov %z3, %2, %1, %e0";
4533
4534 else if (FPR_P (src_regno))
4535 return "cmovfg %3, %2, %1, %e0";
4536 }
4537
4538 else if (GET_CODE (src) == MEM)
4539 {
4540 /* gpr <- memory */
4541 switch (mode)
4542 {
4543 default:
4544 break;
4545
4546 case E_QImode:
4547 return "cldsb%I3%U3 %M3, %2, %1, %e0";
4548
4549 case E_HImode:
4550 return "cldsh%I3%U3 %M3, %2, %1, %e0";
4551
4552 case E_SImode:
4553 case E_SFmode:
4554 return "cld%I3%U3 %M3, %2, %1, %e0";
4555 }
4556 }
4557
4558 else if (ZERO_P (src))
4559 return "cmov %., %2, %1, %e0";
4560 }
4561
4562 else if (FPR_P (dest_regno))
4563 {
4564 if (GET_CODE (src) == REG)
4565 {
4566 /* fpr <- some sort of register */
4567 int src_regno = REGNO (src);
4568
4569 if (GPR_P (src_regno))
4570 return "cmovgf %3, %2, %1, %e0";
4571
4572 else if (FPR_P (src_regno))
4573 {
4574 if (TARGET_HARD_FLOAT)
4575 return "cfmovs %3,%2,%1,%e0";
4576 else
4577 return "cmor %3, %3, %2, %1, %e0";
4578 }
4579 }
4580
4581 else if (GET_CODE (src) == MEM)
4582 {
4583 /* fpr <- memory */
4584 if (mode == SImode || mode == SFmode)
4585 return "cldf%I3%U3 %M3, %2, %1, %e0";
4586 }
4587
4588 else if (ZERO_P (src))
4589 return "cmovgf %., %2, %1, %e0";
4590 }
4591 }
4592
4593 else if (GET_CODE (dest) == MEM)
4594 {
4595 if (GET_CODE (src) == REG)
4596 {
4597 int src_regno = REGNO (src);
4598 machine_mode mode = GET_MODE (dest);
4599
4600 if (GPR_P (src_regno))
4601 {
4602 switch (mode)
4603 {
4604 default:
4605 break;
4606
4607 case E_QImode:
4608 return "cstb%I2%U2 %3, %M2, %1, %e0";
4609
4610 case E_HImode:
4611 return "csth%I2%U2 %3, %M2, %1, %e0";
4612
4613 case E_SImode:
4614 case E_SFmode:
4615 return "cst%I2%U2 %3, %M2, %1, %e0";
4616 }
4617 }
4618
4619 else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode))
4620 return "cstf%I2%U2 %3, %M2, %1, %e0";
4621 }
4622
4623 else if (ZERO_P (src))
4624 {
4625 machine_mode mode = GET_MODE (dest);
4626 switch (mode)
4627 {
4628 default:
4629 break;
4630
4631 case E_QImode:
4632 return "cstb%I2%U2 %., %M2, %1, %e0";
4633
4634 case E_HImode:
4635 return "csth%I2%U2 %., %M2, %1, %e0";
4636
4637 case E_SImode:
4638 case E_SFmode:
4639 return "cst%I2%U2 %., %M2, %1, %e0";
4640 }
4641 }
4642 }
4643
4644 fatal_insn ("bad output_condmove_single operand", insn);
4645 return "";
4646 }
4647
4648 \f
4649 /* Emit the appropriate code to do a comparison, returning the register the
4650 comparison was done it. */
4651
4652 static rtx
4653 frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
4654 {
4655 machine_mode cc_mode;
4656 rtx cc_reg;
4657
4658 /* Floating point doesn't have comparison against a constant. */
4659 if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG)
4660 op1 = force_reg (GET_MODE (op0), op1);
4661
4662 /* Possibly disable using anything but a fixed register in order to work
4663 around cse moving comparisons past function calls. */
4664 cc_mode = SELECT_CC_MODE (test, op0, op1);
4665 cc_reg = ((TARGET_ALLOC_CC)
4666 ? gen_reg_rtx (cc_mode)
4667 : gen_rtx_REG (cc_mode,
4668 (cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST));
4669
4670 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (cc_mode, op0, op1)));
4671
4672 return cc_reg;
4673 }
4674
4675 \f
4676 /* Emit code for a conditional branch.
4677 XXX: I originally wanted to add a clobber of a CCR register to use in
4678 conditional execution, but that confuses the rest of the compiler. */
4679
4680 int
4681 frv_emit_cond_branch (rtx operands[])
4682 {
4683 rtx test_rtx;
4684 rtx label_ref;
4685 rtx if_else;
4686 enum rtx_code test = GET_CODE (operands[0]);
4687 rtx cc_reg = frv_emit_comparison (test, operands[1], operands[2]);
4688 machine_mode cc_mode = GET_MODE (cc_reg);
4689
4690 /* Branches generate:
4691 (set (pc)
4692 (if_then_else (<test>, <cc_reg>, (const_int 0))
4693 (label_ref <branch_label>)
4694 (pc))) */
4695 label_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
4696 test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4697 if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx);
4698 emit_jump_insn (gen_rtx_SET (pc_rtx, if_else));
4699 return TRUE;
4700 }
4701
4702 \f
4703 /* Emit code to set a gpr to 1/0 based on a comparison. */
4704
4705 int
4706 frv_emit_scc (rtx operands[])
4707 {
4708 rtx set;
4709 rtx test_rtx;
4710 rtx clobber;
4711 rtx cr_reg;
4712 enum rtx_code test = GET_CODE (operands[1]);
4713 rtx cc_reg = frv_emit_comparison (test, operands[2], operands[3]);
4714
4715 /* SCC instructions generate:
4716 (parallel [(set <target> (<test>, <cc_reg>, (const_int 0))
4717 (clobber (<ccr_reg>))]) */
4718 test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx);
4719 set = gen_rtx_SET (operands[0], test_rtx);
4720
4721 cr_reg = ((TARGET_ALLOC_CC)
4722 ? gen_reg_rtx (CC_CCRmode)
4723 : gen_rtx_REG (CC_CCRmode,
4724 ((GET_MODE (cc_reg) == CC_FPmode)
4725 ? FCR_FIRST
4726 : ICR_FIRST)));
4727
4728 clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4729 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
4730 return TRUE;
4731 }
4732
4733 \f
4734 /* Split a SCC instruction into component parts, returning a SEQUENCE to hold
4735 the separate insns. */
4736
4737 rtx
4738 frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value)
4739 {
4740 rtx ret;
4741
4742 start_sequence ();
4743
4744 /* Set the appropriate CCR bit. */
4745 emit_insn (gen_rtx_SET (cr_reg,
4746 gen_rtx_fmt_ee (GET_CODE (test),
4747 GET_MODE (cr_reg),
4748 cc_reg,
4749 const0_rtx)));
4750
4751 /* Move the value into the destination. */
4752 emit_move_insn (dest, GEN_INT (value));
4753
4754 /* Move 0 into the destination if the test failed */
4755 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4756 gen_rtx_EQ (GET_MODE (cr_reg),
4757 cr_reg,
4758 const0_rtx),
4759 gen_rtx_SET (dest, const0_rtx)));
4760
4761 /* Finish up, return sequence. */
4762 ret = get_insns ();
4763 end_sequence ();
4764 return ret;
4765 }
4766
4767 \f
4768 /* Emit the code for a conditional move, return TRUE if we could do the
4769 move. */
4770
4771 int
4772 frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2)
4773 {
4774 rtx set;
4775 rtx clobber_cc;
4776 rtx test2;
4777 rtx cr_reg;
4778 rtx if_rtx;
4779 enum rtx_code test = GET_CODE (test_rtx);
4780 rtx cc_reg = frv_emit_comparison (test,
4781 XEXP (test_rtx, 0), XEXP (test_rtx, 1));
4782 machine_mode cc_mode = GET_MODE (cc_reg);
4783
4784 /* Conditional move instructions generate:
4785 (parallel [(set <target>
4786 (if_then_else (<test> <cc_reg> (const_int 0))
4787 <src1>
4788 <src2>))
4789 (clobber (<ccr_reg>))]) */
4790
4791 /* Handle various cases of conditional move involving two constants. */
4792 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4793 {
4794 HOST_WIDE_INT value1 = INTVAL (src1);
4795 HOST_WIDE_INT value2 = INTVAL (src2);
4796
4797 /* Having 0 as one of the constants can be done by loading the other
4798 constant, and optionally moving in gr0. */
4799 if (value1 == 0 || value2 == 0)
4800 ;
4801
4802 /* If the first value is within an addi range and also the difference
4803 between the two fits in an addi's range, load up the difference, then
4804 conditionally move in 0, and then unconditionally add the first
4805 value. */
4806 else if (IN_RANGE (value1, -2048, 2047)
4807 && IN_RANGE (value2 - value1, -2048, 2047))
4808 ;
4809
4810 /* If neither condition holds, just force the constant into a
4811 register. */
4812 else
4813 {
4814 src1 = force_reg (GET_MODE (dest), src1);
4815 src2 = force_reg (GET_MODE (dest), src2);
4816 }
4817 }
4818
4819 /* If one value is a register, insure the other value is either 0 or a
4820 register. */
4821 else
4822 {
4823 if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0)
4824 src1 = force_reg (GET_MODE (dest), src1);
4825
4826 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
4827 src2 = force_reg (GET_MODE (dest), src2);
4828 }
4829
4830 test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4831 if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2);
4832
4833 set = gen_rtx_SET (dest, if_rtx);
4834
4835 cr_reg = ((TARGET_ALLOC_CC)
4836 ? gen_reg_rtx (CC_CCRmode)
4837 : gen_rtx_REG (CC_CCRmode,
4838 (cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST));
4839
4840 clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4841 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc)));
4842 return TRUE;
4843 }
4844
4845 \f
4846 /* Split a conditional move into constituent parts, returning a SEQUENCE
4847 containing all of the insns. */
4848
4849 rtx
4850 frv_split_cond_move (rtx operands[])
4851 {
4852 rtx dest = operands[0];
4853 rtx test = operands[1];
4854 rtx cc_reg = operands[2];
4855 rtx src1 = operands[3];
4856 rtx src2 = operands[4];
4857 rtx cr_reg = operands[5];
4858 rtx ret;
4859 machine_mode cr_mode = GET_MODE (cr_reg);
4860
4861 start_sequence ();
4862
4863 /* Set the appropriate CCR bit. */
4864 emit_insn (gen_rtx_SET (cr_reg,
4865 gen_rtx_fmt_ee (GET_CODE (test),
4866 GET_MODE (cr_reg),
4867 cc_reg,
4868 const0_rtx)));
4869
4870 /* Handle various cases of conditional move involving two constants. */
4871 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4872 {
4873 HOST_WIDE_INT value1 = INTVAL (src1);
4874 HOST_WIDE_INT value2 = INTVAL (src2);
4875
4876 /* Having 0 as one of the constants can be done by loading the other
4877 constant, and optionally moving in gr0. */
4878 if (value1 == 0)
4879 {
4880 emit_move_insn (dest, src2);
4881 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4882 gen_rtx_NE (cr_mode, cr_reg,
4883 const0_rtx),
4884 gen_rtx_SET (dest, src1)));
4885 }
4886
4887 else if (value2 == 0)
4888 {
4889 emit_move_insn (dest, src1);
4890 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4891 gen_rtx_EQ (cr_mode, cr_reg,
4892 const0_rtx),
4893 gen_rtx_SET (dest, src2)));
4894 }
4895
4896 /* If the first value is within an addi range and also the difference
4897 between the two fits in an addi's range, load up the difference, then
4898 conditionally move in 0, and then unconditionally add the first
4899 value. */
4900 else if (IN_RANGE (value1, -2048, 2047)
4901 && IN_RANGE (value2 - value1, -2048, 2047))
4902 {
4903 rtx dest_si = ((GET_MODE (dest) == SImode)
4904 ? dest
4905 : gen_rtx_SUBREG (SImode, dest, 0));
4906
4907 emit_move_insn (dest_si, GEN_INT (value2 - value1));
4908 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4909 gen_rtx_NE (cr_mode, cr_reg,
4910 const0_rtx),
4911 gen_rtx_SET (dest_si, const0_rtx)));
4912 emit_insn (gen_addsi3 (dest_si, dest_si, src1));
4913 }
4914
4915 else
4916 gcc_unreachable ();
4917 }
4918 else
4919 {
4920 /* Emit the conditional move for the test being true if needed. */
4921 if (! rtx_equal_p (dest, src1))
4922 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4923 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
4924 gen_rtx_SET (dest, src1)));
4925
4926 /* Emit the conditional move for the test being false if needed. */
4927 if (! rtx_equal_p (dest, src2))
4928 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4929 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
4930 gen_rtx_SET (dest, src2)));
4931 }
4932
4933 /* Finish up, return sequence. */
4934 ret = get_insns ();
4935 end_sequence ();
4936 return ret;
4937 }
4938
4939 \f
4940 /* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a
4941 memory location that is not known to be dword-aligned. */
4942 void
4943 frv_split_double_load (rtx dest, rtx source)
4944 {
4945 int regno = REGNO (dest);
4946 rtx dest1 = gen_highpart (SImode, dest);
4947 rtx dest2 = gen_lowpart (SImode, dest);
4948 rtx address = XEXP (source, 0);
4949
4950 /* If the address is pre-modified, load the lower-numbered register
4951 first, then load the other register using an integer offset from
4952 the modified base register. This order should always be safe,
4953 since the pre-modification cannot affect the same registers as the
4954 load does.
4955
4956 The situation for other loads is more complicated. Loading one
4957 of the registers could affect the value of ADDRESS, so we must
4958 be careful which order we do them in. */
4959 if (GET_CODE (address) == PRE_MODIFY
4960 || ! refers_to_regno_p (regno, address))
4961 {
4962 /* It is safe to load the lower-numbered register first. */
4963 emit_move_insn (dest1, change_address (source, SImode, NULL));
4964 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
4965 }
4966 else
4967 {
4968 /* ADDRESS is not pre-modified and the address depends on the
4969 lower-numbered register. Load the higher-numbered register
4970 first. */
4971 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
4972 emit_move_insn (dest1, change_address (source, SImode, NULL));
4973 }
4974 }
4975
4976 /* Split (set DEST SOURCE), where DEST refers to a dword memory location
4977 and SOURCE is either a double register or the constant zero. */
4978 void
4979 frv_split_double_store (rtx dest, rtx source)
4980 {
4981 rtx dest1 = change_address (dest, SImode, NULL);
4982 rtx dest2 = frv_index_memory (dest, SImode, 1);
4983 if (ZERO_P (source))
4984 {
4985 emit_move_insn (dest1, CONST0_RTX (SImode));
4986 emit_move_insn (dest2, CONST0_RTX (SImode));
4987 }
4988 else
4989 {
4990 emit_move_insn (dest1, gen_highpart (SImode, source));
4991 emit_move_insn (dest2, gen_lowpart (SImode, source));
4992 }
4993 }
4994
4995 \f
4996 /* Split a min/max operation returning a SEQUENCE containing all of the
4997 insns. */
4998
4999 rtx
5000 frv_split_minmax (rtx operands[])
5001 {
5002 rtx dest = operands[0];
5003 rtx minmax = operands[1];
5004 rtx src1 = operands[2];
5005 rtx src2 = operands[3];
5006 rtx cc_reg = operands[4];
5007 rtx cr_reg = operands[5];
5008 rtx ret;
5009 enum rtx_code test_code;
5010 machine_mode cr_mode = GET_MODE (cr_reg);
5011
5012 start_sequence ();
5013
5014 /* Figure out which test to use. */
5015 switch (GET_CODE (minmax))
5016 {
5017 default:
5018 gcc_unreachable ();
5019
5020 case SMIN: test_code = LT; break;
5021 case SMAX: test_code = GT; break;
5022 case UMIN: test_code = LTU; break;
5023 case UMAX: test_code = GTU; break;
5024 }
5025
5026 /* Issue the compare instruction. */
5027 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (GET_MODE (cc_reg),
5028 src1, src2)));
5029
5030 /* Set the appropriate CCR bit. */
5031 emit_insn (gen_rtx_SET (cr_reg, gen_rtx_fmt_ee (test_code,
5032 GET_MODE (cr_reg),
5033 cc_reg,
5034 const0_rtx)));
5035
5036 /* If are taking the min/max of a nonzero constant, load that first, and
5037 then do a conditional move of the other value. */
5038 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
5039 {
5040 gcc_assert (!rtx_equal_p (dest, src1));
5041
5042 emit_move_insn (dest, src2);
5043 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5044 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5045 gen_rtx_SET (dest, src1)));
5046 }
5047
5048 /* Otherwise, do each half of the move. */
5049 else
5050 {
5051 /* Emit the conditional move for the test being true if needed. */
5052 if (! rtx_equal_p (dest, src1))
5053 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5054 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5055 gen_rtx_SET (dest, src1)));
5056
5057 /* Emit the conditional move for the test being false if needed. */
5058 if (! rtx_equal_p (dest, src2))
5059 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5060 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5061 gen_rtx_SET (dest, src2)));
5062 }
5063
5064 /* Finish up, return sequence. */
5065 ret = get_insns ();
5066 end_sequence ();
5067 return ret;
5068 }
5069
5070 \f
5071 /* Split an integer abs operation returning a SEQUENCE containing all of the
5072 insns. */
5073
5074 rtx
5075 frv_split_abs (rtx operands[])
5076 {
5077 rtx dest = operands[0];
5078 rtx src = operands[1];
5079 rtx cc_reg = operands[2];
5080 rtx cr_reg = operands[3];
5081 rtx ret;
5082
5083 start_sequence ();
5084
5085 /* Issue the compare < 0 instruction. */
5086 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (CCmode, src, const0_rtx)));
5087
5088 /* Set the appropriate CCR bit. */
5089 emit_insn (gen_rtx_SET (cr_reg, gen_rtx_fmt_ee (LT, CC_CCRmode,
5090 cc_reg, const0_rtx)));
5091
5092 /* Emit the conditional negate if the value is negative. */
5093 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5094 gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx),
5095 gen_negsi2 (dest, src)));
5096
5097 /* Emit the conditional move for the test being false if needed. */
5098 if (! rtx_equal_p (dest, src))
5099 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5100 gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx),
5101 gen_rtx_SET (dest, src)));
5102
5103 /* Finish up, return sequence. */
5104 ret = get_insns ();
5105 end_sequence ();
5106 return ret;
5107 }
5108
5109 \f
5110 /* Initialize machine-specific if-conversion data.
5111 On the FR-V, we don't have any extra fields per se, but it is useful hook to
5112 initialize the static storage. */
5113 void
5114 frv_ifcvt_machdep_init (void *ce_info ATTRIBUTE_UNUSED)
5115 {
5116 frv_ifcvt.added_insns_list = NULL_RTX;
5117 frv_ifcvt.cur_scratch_regs = 0;
5118 frv_ifcvt.num_nested_cond_exec = 0;
5119 frv_ifcvt.cr_reg = NULL_RTX;
5120 frv_ifcvt.nested_cc_reg = NULL_RTX;
5121 frv_ifcvt.extra_int_cr = NULL_RTX;
5122 frv_ifcvt.extra_fp_cr = NULL_RTX;
5123 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5124 }
5125
5126 \f
5127 /* Internal function to add a potential insn to the list of insns to be inserted
5128 if the conditional execution conversion is successful. */
5129
5130 static void
5131 frv_ifcvt_add_insn (rtx pattern, rtx_insn *insn, int before_p)
5132 {
5133 rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn);
5134
5135 link->jump = before_p; /* Mark to add this before or after insn. */
5136 frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link,
5137 frv_ifcvt.added_insns_list);
5138
5139 if (TARGET_DEBUG_COND_EXEC)
5140 {
5141 fprintf (stderr,
5142 "\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n",
5143 (before_p) ? "before" : "after",
5144 (int)INSN_UID (insn));
5145
5146 debug_rtx (pattern);
5147 }
5148 }
5149
5150 \f
5151 /* A C expression to modify the code described by the conditional if
5152 information CE_INFO, possibly updating the tests in TRUE_EXPR, and
5153 FALSE_EXPR for converting if-then and if-then-else code to conditional
5154 instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the
5155 tests cannot be converted. */
5156
5157 void
5158 frv_ifcvt_modify_tests (ce_if_block *ce_info, rtx *p_true, rtx *p_false)
5159 {
5160 basic_block test_bb = ce_info->test_bb; /* test basic block */
5161 basic_block then_bb = ce_info->then_bb; /* THEN */
5162 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
5163 basic_block join_bb = ce_info->join_bb; /* join block or NULL */
5164 rtx true_expr = *p_true;
5165 rtx cr;
5166 rtx cc;
5167 rtx nested_cc;
5168 machine_mode mode = GET_MODE (true_expr);
5169 int j;
5170 basic_block *bb;
5171 int num_bb;
5172 frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg;
5173 rtx check_insn;
5174 rtx sub_cond_exec_reg;
5175 enum rtx_code code;
5176 enum rtx_code code_true;
5177 enum rtx_code code_false;
5178 enum reg_class cc_class;
5179 enum reg_class cr_class;
5180 int cc_first;
5181 int cc_last;
5182 reg_set_iterator rsi;
5183
5184 /* Make sure we are only dealing with hard registers. Also honor the
5185 -mno-cond-exec switch, and -mno-nested-cond-exec switches if
5186 applicable. */
5187 if (!reload_completed || !TARGET_COND_EXEC
5188 || (!TARGET_NESTED_CE && ce_info->pass > 1))
5189 goto fail;
5190
5191 /* Figure out which registers we can allocate for our own purposes. Only
5192 consider registers that are not preserved across function calls and are
5193 not fixed. However, allow the ICC/ICR temporary registers to be allocated
5194 if we did not need to use them in reloading other registers. */
5195 memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
5196 tmp_reg->regs = regs_invalidated_by_call & ~fixed_reg_set;
5197 SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
5198 SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
5199
5200 /* If this is a nested IF, we need to discover whether the CC registers that
5201 are set/used inside of the block are used anywhere else. If not, we can
5202 change them to be the CC register that is paired with the CR register that
5203 controls the outermost IF block. */
5204 if (ce_info->pass > 1)
5205 {
5206 CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite);
5207 for (j = CC_FIRST; j <= CC_LAST; j++)
5208 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5209 {
5210 if (REGNO_REG_SET_P (df_get_live_in (then_bb), j))
5211 continue;
5212
5213 if (else_bb
5214 && REGNO_REG_SET_P (df_get_live_in (else_bb), j))
5215 continue;
5216
5217 if (join_bb
5218 && REGNO_REG_SET_P (df_get_live_in (join_bb), j))
5219 continue;
5220
5221 SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
5222 }
5223 }
5224
5225 for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++)
5226 frv_ifcvt.scratch_regs[j] = NULL_RTX;
5227
5228 frv_ifcvt.added_insns_list = NULL_RTX;
5229 frv_ifcvt.cur_scratch_regs = 0;
5230
5231 bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks)
5232 * sizeof (basic_block));
5233
5234 if (join_bb)
5235 {
5236 unsigned int regno;
5237
5238 /* Remove anything live at the beginning of the join block from being
5239 available for allocation. */
5240 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (join_bb), 0, regno, rsi)
5241 {
5242 if (regno < FIRST_PSEUDO_REGISTER)
5243 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5244 }
5245 }
5246
5247 /* Add in all of the blocks in multiple &&/|| blocks to be scanned. */
5248 num_bb = 0;
5249 if (ce_info->num_multiple_test_blocks)
5250 {
5251 basic_block multiple_test_bb = ce_info->last_test_bb;
5252
5253 while (multiple_test_bb != test_bb)
5254 {
5255 bb[num_bb++] = multiple_test_bb;
5256 multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src;
5257 }
5258 }
5259
5260 /* Add in the THEN and ELSE blocks to be scanned. */
5261 bb[num_bb++] = then_bb;
5262 if (else_bb)
5263 bb[num_bb++] = else_bb;
5264
5265 sub_cond_exec_reg = NULL_RTX;
5266 frv_ifcvt.num_nested_cond_exec = 0;
5267
5268 /* Scan all of the blocks for registers that must not be allocated. */
5269 for (j = 0; j < num_bb; j++)
5270 {
5271 rtx_insn *last_insn = BB_END (bb[j]);
5272 rtx_insn *insn = BB_HEAD (bb[j]);
5273 unsigned int regno;
5274
5275 if (dump_file)
5276 fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n",
5277 (bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
5278 (int) bb[j]->index,
5279 (int) INSN_UID (BB_HEAD (bb[j])),
5280 (int) INSN_UID (BB_END (bb[j])));
5281
5282 /* Anything live at the beginning of the block is obviously unavailable
5283 for allocation. */
5284 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb[j]), 0, regno, rsi)
5285 {
5286 if (regno < FIRST_PSEUDO_REGISTER)
5287 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5288 }
5289
5290 /* Loop through the insns in the block. */
5291 for (;;)
5292 {
5293 /* Mark any new registers that are created as being unavailable for
5294 allocation. Also see if the CC register used in nested IFs can be
5295 reallocated. */
5296 if (INSN_P (insn))
5297 {
5298 rtx pattern;
5299 rtx set;
5300 int skip_nested_if = FALSE;
5301 HARD_REG_SET mentioned_regs;
5302
5303 CLEAR_HARD_REG_SET (mentioned_regs);
5304 find_all_hard_regs (PATTERN (insn), &mentioned_regs);
5305 tmp_reg->regs &= ~mentioned_regs;
5306
5307 pattern = PATTERN (insn);
5308 if (GET_CODE (pattern) == COND_EXEC)
5309 {
5310 rtx reg = XEXP (COND_EXEC_TEST (pattern), 0);
5311
5312 if (reg != sub_cond_exec_reg)
5313 {
5314 sub_cond_exec_reg = reg;
5315 frv_ifcvt.num_nested_cond_exec++;
5316 }
5317 }
5318
5319 set = single_set_pattern (pattern);
5320 if (set)
5321 {
5322 rtx dest = SET_DEST (set);
5323 rtx src = SET_SRC (set);
5324
5325 if (GET_CODE (dest) == REG)
5326 {
5327 int regno = REGNO (dest);
5328 enum rtx_code src_code = GET_CODE (src);
5329
5330 if (CC_P (regno) && src_code == COMPARE)
5331 skip_nested_if = TRUE;
5332
5333 else if (CR_P (regno)
5334 && (src_code == IF_THEN_ELSE
5335 || COMPARISON_P (src)))
5336 skip_nested_if = TRUE;
5337 }
5338 }
5339
5340 if (! skip_nested_if)
5341 frv_ifcvt.nested_cc_ok_rewrite &= ~mentioned_regs;
5342 }
5343
5344 if (insn == last_insn)
5345 break;
5346
5347 insn = NEXT_INSN (insn);
5348 }
5349 }
5350
5351 /* If this is a nested if, rewrite the CC registers that are available to
5352 include the ones that can be rewritten, to increase the chance of being
5353 able to allocate a paired CC/CR register combination. */
5354 if (ce_info->pass > 1)
5355 {
5356 for (j = CC_FIRST; j <= CC_LAST; j++)
5357 if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j))
5358 SET_HARD_REG_BIT (tmp_reg->regs, j);
5359 else
5360 CLEAR_HARD_REG_BIT (tmp_reg->regs, j);
5361 }
5362
5363 if (dump_file)
5364 {
5365 int num_gprs = 0;
5366 fprintf (dump_file, "Available GPRs: ");
5367
5368 for (j = GPR_FIRST; j <= GPR_LAST; j++)
5369 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5370 {
5371 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5372 if (++num_gprs > GPR_TEMP_NUM+2)
5373 break;
5374 }
5375
5376 fprintf (dump_file, "%s\nAvailable CRs: ",
5377 (num_gprs > GPR_TEMP_NUM+2) ? " ..." : "");
5378
5379 for (j = CR_FIRST; j <= CR_LAST; j++)
5380 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5381 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5382
5383 fputs ("\n", dump_file);
5384
5385 if (ce_info->pass > 1)
5386 {
5387 fprintf (dump_file, "Modifiable CCs: ");
5388 for (j = CC_FIRST; j <= CC_LAST; j++)
5389 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5390 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5391
5392 fprintf (dump_file, "\n%d nested COND_EXEC statements\n",
5393 frv_ifcvt.num_nested_cond_exec);
5394 }
5395 }
5396
5397 /* Allocate the appropriate temporary condition code register. Try to
5398 allocate the ICR/FCR register that corresponds to the ICC/FCC register so
5399 that conditional cmp's can be done. */
5400 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5401 {
5402 cr_class = ICR_REGS;
5403 cc_class = ICC_REGS;
5404 cc_first = ICC_FIRST;
5405 cc_last = ICC_LAST;
5406 }
5407 else if (mode == CC_FPmode)
5408 {
5409 cr_class = FCR_REGS;
5410 cc_class = FCC_REGS;
5411 cc_first = FCC_FIRST;
5412 cc_last = FCC_LAST;
5413 }
5414 else
5415 {
5416 cc_first = cc_last = 0;
5417 cr_class = cc_class = NO_REGS;
5418 }
5419
5420 cc = XEXP (true_expr, 0);
5421 nested_cc = cr = NULL_RTX;
5422 if (cc_class != NO_REGS)
5423 {
5424 /* For nested IFs and &&/||, see if we can find a CC and CR register pair
5425 so we can execute a csubcc/caddcc/cfcmps instruction. */
5426 int cc_regno;
5427
5428 for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++)
5429 {
5430 int cr_regno = cc_regno - CC_FIRST + CR_FIRST;
5431
5432 if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno)
5433 && TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno))
5434 {
5435 frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno;
5436 cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE,
5437 TRUE);
5438
5439 frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno;
5440 nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode,
5441 TRUE, TRUE);
5442 break;
5443 }
5444 }
5445 }
5446
5447 if (! cr)
5448 {
5449 if (dump_file)
5450 fprintf (dump_file, "Could not allocate a CR temporary register\n");
5451
5452 goto fail;
5453 }
5454
5455 if (dump_file)
5456 fprintf (dump_file,
5457 "Will use %s for conditional execution, %s for nested comparisons\n",
5458 reg_names[ REGNO (cr)],
5459 (nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>");
5460
5461 /* Set the CCR bit. Note for integer tests, we reverse the condition so that
5462 in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR
5463 bit being true. We don't do this for floating point, because of NaNs. */
5464 code = GET_CODE (true_expr);
5465 if (GET_MODE (cc) != CC_FPmode)
5466 {
5467 code = reverse_condition (code);
5468 code_true = EQ;
5469 code_false = NE;
5470 }
5471 else
5472 {
5473 code_true = NE;
5474 code_false = EQ;
5475 }
5476
5477 check_insn = gen_rtx_SET (cr, gen_rtx_fmt_ee (code, CC_CCRmode,
5478 cc, const0_rtx));
5479
5480 /* Record the check insn to be inserted later. */
5481 frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
5482
5483 /* Update the tests. */
5484 frv_ifcvt.cr_reg = cr;
5485 frv_ifcvt.nested_cc_reg = nested_cc;
5486 *p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx);
5487 *p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx);
5488 return;
5489
5490 /* Fail, don't do this conditional execution. */
5491 fail:
5492 *p_true = NULL_RTX;
5493 *p_false = NULL_RTX;
5494 if (dump_file)
5495 fprintf (dump_file, "Disabling this conditional execution.\n");
5496
5497 return;
5498 }
5499
5500 \f
5501 /* A C expression to modify the code described by the conditional if
5502 information CE_INFO, for the basic block BB, possibly updating the tests in
5503 TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or
5504 if-then-else code to conditional instructions. Set either TRUE_EXPR or
5505 FALSE_EXPR to a null pointer if the tests cannot be converted. */
5506
5507 /* p_true and p_false are given expressions of the form:
5508
5509 (and (eq:CC_CCR (reg:CC_CCR)
5510 (const_int 0))
5511 (eq:CC (reg:CC)
5512 (const_int 0))) */
5513
5514 void
5515 frv_ifcvt_modify_multiple_tests (ce_if_block *ce_info,
5516 basic_block bb,
5517 rtx *p_true,
5518 rtx *p_false)
5519 {
5520 rtx old_true = XEXP (*p_true, 0);
5521 rtx old_false = XEXP (*p_false, 0);
5522 rtx true_expr = XEXP (*p_true, 1);
5523 rtx false_expr = XEXP (*p_false, 1);
5524 rtx test_expr;
5525 rtx old_test;
5526 rtx cr = XEXP (old_true, 0);
5527 rtx check_insn;
5528 rtx new_cr = NULL_RTX;
5529 rtx *p_new_cr = (rtx *)0;
5530 rtx if_else;
5531 rtx compare;
5532 rtx cc;
5533 enum reg_class cr_class;
5534 machine_mode mode = GET_MODE (true_expr);
5535 rtx (*logical_func)(rtx, rtx, rtx);
5536
5537 if (TARGET_DEBUG_COND_EXEC)
5538 {
5539 fprintf (stderr,
5540 "\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n",
5541 ce_info->and_and_p ? "&&" : "||");
5542
5543 debug_rtx (*p_true);
5544
5545 fputs ("\nfalse insn:\n", stderr);
5546 debug_rtx (*p_false);
5547 }
5548
5549 if (!TARGET_MULTI_CE)
5550 goto fail;
5551
5552 if (GET_CODE (cr) != REG)
5553 goto fail;
5554
5555 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5556 {
5557 cr_class = ICR_REGS;
5558 p_new_cr = &frv_ifcvt.extra_int_cr;
5559 }
5560 else if (mode == CC_FPmode)
5561 {
5562 cr_class = FCR_REGS;
5563 p_new_cr = &frv_ifcvt.extra_fp_cr;
5564 }
5565 else
5566 goto fail;
5567
5568 /* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or
5569 more &&/|| tests. */
5570 new_cr = *p_new_cr;
5571 if (! new_cr)
5572 {
5573 new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class,
5574 CC_CCRmode, TRUE, TRUE);
5575 if (! new_cr)
5576 goto fail;
5577 }
5578
5579 if (ce_info->and_and_p)
5580 {
5581 old_test = old_false;
5582 test_expr = true_expr;
5583 logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr;
5584 *p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5585 *p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5586 }
5587 else
5588 {
5589 old_test = old_false;
5590 test_expr = false_expr;
5591 logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr;
5592 *p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5593 *p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5594 }
5595
5596 /* First add the andcr/andncr/orcr/orncr, which will be added after the
5597 conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
5598 stack. */
5599 frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
5600
5601 /* Now add the conditional check insn. */
5602 cc = XEXP (test_expr, 0);
5603 compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx);
5604 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx);
5605
5606 check_insn = gen_rtx_SET (new_cr, if_else);
5607
5608 /* Add the new check insn to the list of check insns that need to be
5609 inserted. */
5610 frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
5611
5612 if (TARGET_DEBUG_COND_EXEC)
5613 {
5614 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n",
5615 stderr);
5616
5617 debug_rtx (*p_true);
5618
5619 fputs ("\nfalse insn:\n", stderr);
5620 debug_rtx (*p_false);
5621 }
5622
5623 return;
5624
5625 fail:
5626 *p_true = *p_false = NULL_RTX;
5627
5628 /* If we allocated a CR register, release it. */
5629 if (new_cr)
5630 {
5631 CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr));
5632 *p_new_cr = NULL_RTX;
5633 }
5634
5635 if (TARGET_DEBUG_COND_EXEC)
5636 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr);
5637
5638 return;
5639 }
5640
5641 \f
5642 /* Return a register which will be loaded with a value if an IF block is
5643 converted to conditional execution. This is used to rewrite instructions
5644 that use constants to ones that just use registers. */
5645
5646 static rtx
5647 frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED)
5648 {
5649 int num_alloc = frv_ifcvt.cur_scratch_regs;
5650 int i;
5651 rtx reg;
5652
5653 /* We know gr0 == 0, so replace any errant uses. */
5654 if (value == const0_rtx)
5655 return gen_rtx_REG (SImode, GPR_FIRST);
5656
5657 /* First search all registers currently loaded to see if we have an
5658 applicable constant. */
5659 if (CONSTANT_P (value)
5660 || (GET_CODE (value) == REG && REGNO (value) == LR_REGNO))
5661 {
5662 for (i = 0; i < num_alloc; i++)
5663 {
5664 if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value))
5665 return SET_DEST (frv_ifcvt.scratch_regs[i]);
5666 }
5667 }
5668
5669 /* Have we exhausted the number of registers available? */
5670 if (num_alloc >= GPR_TEMP_NUM)
5671 {
5672 if (dump_file)
5673 fprintf (dump_file, "Too many temporary registers allocated\n");
5674
5675 return NULL_RTX;
5676 }
5677
5678 /* Allocate the new register. */
5679 reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE);
5680 if (! reg)
5681 {
5682 if (dump_file)
5683 fputs ("Could not find a scratch register\n", dump_file);
5684
5685 return NULL_RTX;
5686 }
5687
5688 frv_ifcvt.cur_scratch_regs++;
5689 frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (reg, value);
5690
5691 if (dump_file)
5692 {
5693 if (GET_CODE (value) == CONST_INT)
5694 fprintf (dump_file, "Register %s will hold %ld\n",
5695 reg_names[ REGNO (reg)], (long)INTVAL (value));
5696
5697 else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)
5698 fprintf (dump_file, "Register %s will hold LR\n",
5699 reg_names[ REGNO (reg)]);
5700
5701 else
5702 fprintf (dump_file, "Register %s will hold a saved value\n",
5703 reg_names[ REGNO (reg)]);
5704 }
5705
5706 return reg;
5707 }
5708
5709 \f
5710 /* Update a MEM used in conditional code that might contain an offset to put
5711 the offset into a scratch register, so that the conditional load/store
5712 operations can be used. This function returns the original pointer if the
5713 MEM is valid to use in conditional code, NULL if we can't load up the offset
5714 into a temporary register, or the new MEM if we were successful. */
5715
5716 static rtx
5717 frv_ifcvt_rewrite_mem (rtx mem, machine_mode mode, rtx insn)
5718 {
5719 rtx addr = XEXP (mem, 0);
5720
5721 if (!frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE))
5722 {
5723 if (GET_CODE (addr) == PLUS)
5724 {
5725 rtx addr_op0 = XEXP (addr, 0);
5726 rtx addr_op1 = XEXP (addr, 1);
5727
5728 if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1))
5729 {
5730 rtx reg = frv_ifcvt_load_value (addr_op1, insn);
5731 if (!reg)
5732 return NULL_RTX;
5733
5734 addr = gen_rtx_PLUS (Pmode, addr_op0, reg);
5735 }
5736
5737 else
5738 return NULL_RTX;
5739 }
5740
5741 else if (CONSTANT_P (addr))
5742 addr = frv_ifcvt_load_value (addr, insn);
5743
5744 else
5745 return NULL_RTX;
5746
5747 if (addr == NULL_RTX)
5748 return NULL_RTX;
5749
5750 else if (XEXP (mem, 0) != addr)
5751 return change_address (mem, mode, addr);
5752 }
5753
5754 return mem;
5755 }
5756
5757 \f
5758 /* Given a PATTERN, return a SET expression if this PATTERN has only a single
5759 SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */
5760
5761 static rtx
5762 single_set_pattern (rtx pattern)
5763 {
5764 rtx set;
5765 int i;
5766
5767 if (GET_CODE (pattern) == COND_EXEC)
5768 pattern = COND_EXEC_CODE (pattern);
5769
5770 if (GET_CODE (pattern) == SET)
5771 return pattern;
5772
5773 else if (GET_CODE (pattern) == PARALLEL)
5774 {
5775 for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++)
5776 {
5777 rtx sub = XVECEXP (pattern, 0, i);
5778
5779 switch (GET_CODE (sub))
5780 {
5781 case USE:
5782 case CLOBBER:
5783 break;
5784
5785 case SET:
5786 if (set)
5787 return 0;
5788 else
5789 set = sub;
5790 break;
5791
5792 default:
5793 return 0;
5794 }
5795 }
5796 return set;
5797 }
5798
5799 return 0;
5800 }
5801
5802 \f
5803 /* A C expression to modify the code described by the conditional if
5804 information CE_INFO with the new PATTERN in INSN. If PATTERN is a null
5805 pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that
5806 insn cannot be converted to be executed conditionally. */
5807
5808 rtx
5809 frv_ifcvt_modify_insn (ce_if_block *ce_info,
5810 rtx pattern,
5811 rtx_insn *insn)
5812 {
5813 rtx orig_ce_pattern = pattern;
5814 rtx set;
5815 rtx op0;
5816 rtx op1;
5817 rtx test;
5818
5819 gcc_assert (GET_CODE (pattern) == COND_EXEC);
5820
5821 test = COND_EXEC_TEST (pattern);
5822 if (GET_CODE (test) == AND)
5823 {
5824 rtx cr = frv_ifcvt.cr_reg;
5825 rtx test_reg;
5826
5827 op0 = XEXP (test, 0);
5828 if (! rtx_equal_p (cr, XEXP (op0, 0)))
5829 goto fail;
5830
5831 op1 = XEXP (test, 1);
5832 test_reg = XEXP (op1, 0);
5833 if (GET_CODE (test_reg) != REG)
5834 goto fail;
5835
5836 /* Is this the first nested if block in this sequence? If so, generate
5837 an andcr or andncr. */
5838 if (! frv_ifcvt.last_nested_if_cr)
5839 {
5840 rtx and_op;
5841
5842 frv_ifcvt.last_nested_if_cr = test_reg;
5843 if (GET_CODE (op0) == NE)
5844 and_op = gen_andcr (test_reg, cr, test_reg);
5845 else
5846 and_op = gen_andncr (test_reg, cr, test_reg);
5847
5848 frv_ifcvt_add_insn (and_op, insn, TRUE);
5849 }
5850
5851 /* If this isn't the first statement in the nested if sequence, see if we
5852 are dealing with the same register. */
5853 else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr))
5854 goto fail;
5855
5856 COND_EXEC_TEST (pattern) = test = op1;
5857 }
5858
5859 /* If this isn't a nested if, reset state variables. */
5860 else
5861 {
5862 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5863 }
5864
5865 set = single_set_pattern (pattern);
5866 if (set)
5867 {
5868 rtx dest = SET_DEST (set);
5869 rtx src = SET_SRC (set);
5870 machine_mode mode = GET_MODE (dest);
5871
5872 /* Check for normal binary operators. */
5873 if (mode == SImode && ARITHMETIC_P (src))
5874 {
5875 op0 = XEXP (src, 0);
5876 op1 = XEXP (src, 1);
5877
5878 if (integer_register_operand (op0, SImode) && CONSTANT_P (op1))
5879 {
5880 op1 = frv_ifcvt_load_value (op1, insn);
5881 if (op1)
5882 COND_EXEC_CODE (pattern)
5883 = gen_rtx_SET (dest, gen_rtx_fmt_ee (GET_CODE (src),
5884 GET_MODE (src),
5885 op0, op1));
5886 else
5887 goto fail;
5888 }
5889 }
5890
5891 /* For multiply by a constant, we need to handle the sign extending
5892 correctly. Add a USE of the value after the multiply to prevent flow
5893 from cratering because only one register out of the two were used. */
5894 else if (mode == DImode && GET_CODE (src) == MULT)
5895 {
5896 op0 = XEXP (src, 0);
5897 op1 = XEXP (src, 1);
5898 if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT)
5899 {
5900 op1 = frv_ifcvt_load_value (op1, insn);
5901 if (op1)
5902 {
5903 op1 = gen_rtx_SIGN_EXTEND (DImode, op1);
5904 COND_EXEC_CODE (pattern)
5905 = gen_rtx_SET (dest, gen_rtx_MULT (DImode, op0, op1));
5906 }
5907 else
5908 goto fail;
5909 }
5910
5911 frv_ifcvt_add_insn (gen_use (dest), insn, FALSE);
5912 }
5913
5914 /* If we are just loading a constant created for a nested conditional
5915 execution statement, just load the constant without any conditional
5916 execution, since we know that the constant will not interfere with any
5917 other registers. */
5918 else if (frv_ifcvt.scratch_insns_bitmap
5919 && bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap,
5920 INSN_UID (insn))
5921 && REG_P (SET_DEST (set))
5922 /* We must not unconditionally set a scratch reg chosen
5923 for a nested if-converted block if its incoming
5924 value from the TEST block (or the result of the THEN
5925 branch) could/should propagate to the JOIN block.
5926 It suffices to test whether the register is live at
5927 the JOIN point: if it's live there, we can infer
5928 that we set it in the former JOIN block of the
5929 nested if-converted block (otherwise it wouldn't
5930 have been available as a scratch register), and it
5931 is either propagated through or set in the other
5932 conditional block. It's probably not worth trying
5933 to catch the latter case, and it could actually
5934 limit scheduling of the combined block quite
5935 severely. */
5936 && ce_info->join_bb
5937 && ! (REGNO_REG_SET_P (df_get_live_in (ce_info->join_bb),
5938 REGNO (SET_DEST (set))))
5939 /* Similarly, we must not unconditionally set a reg
5940 used as scratch in the THEN branch if the same reg
5941 is live in the ELSE branch. */
5942 && (! ce_info->else_bb
5943 || BLOCK_FOR_INSN (insn) == ce_info->else_bb
5944 || ! (REGNO_REG_SET_P (df_get_live_in (ce_info->else_bb),
5945 REGNO (SET_DEST (set))))))
5946 pattern = set;
5947
5948 else if (mode == QImode || mode == HImode || mode == SImode
5949 || mode == SFmode)
5950 {
5951 int changed_p = FALSE;
5952
5953 /* Check for just loading up a constant */
5954 if (CONSTANT_P (src) && integer_register_operand (dest, mode))
5955 {
5956 src = frv_ifcvt_load_value (src, insn);
5957 if (!src)
5958 goto fail;
5959
5960 changed_p = TRUE;
5961 }
5962
5963 /* See if we need to fix up stores */
5964 if (GET_CODE (dest) == MEM)
5965 {
5966 rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn);
5967
5968 if (!new_mem)
5969 goto fail;
5970
5971 else if (new_mem != dest)
5972 {
5973 changed_p = TRUE;
5974 dest = new_mem;
5975 }
5976 }
5977
5978 /* See if we need to fix up loads */
5979 if (GET_CODE (src) == MEM)
5980 {
5981 rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn);
5982
5983 if (!new_mem)
5984 goto fail;
5985
5986 else if (new_mem != src)
5987 {
5988 changed_p = TRUE;
5989 src = new_mem;
5990 }
5991 }
5992
5993 /* If either src or destination changed, redo SET. */
5994 if (changed_p)
5995 COND_EXEC_CODE (pattern) = gen_rtx_SET (dest, src);
5996 }
5997
5998 /* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with
5999 rewriting the CC register to be the same as the paired CC/CR register
6000 for nested ifs. */
6001 else if (mode == CC_CCRmode && COMPARISON_P (src))
6002 {
6003 int regno = REGNO (XEXP (src, 0));
6004 rtx if_else;
6005
6006 if (ce_info->pass > 1
6007 && regno != (int)REGNO (frv_ifcvt.nested_cc_reg)
6008 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno))
6009 {
6010 src = gen_rtx_fmt_ee (GET_CODE (src),
6011 CC_CCRmode,
6012 frv_ifcvt.nested_cc_reg,
6013 XEXP (src, 1));
6014 }
6015
6016 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx);
6017 pattern = gen_rtx_SET (dest, if_else);
6018 }
6019
6020 /* Remap a nested compare instruction to use the paired CC/CR reg. */
6021 else if (ce_info->pass > 1
6022 && GET_CODE (dest) == REG
6023 && CC_P (REGNO (dest))
6024 && REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg)
6025 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite,
6026 REGNO (dest))
6027 && GET_CODE (src) == COMPARE)
6028 {
6029 PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest));
6030 COND_EXEC_CODE (pattern)
6031 = gen_rtx_SET (frv_ifcvt.nested_cc_reg, copy_rtx (src));
6032 }
6033 }
6034
6035 if (TARGET_DEBUG_COND_EXEC)
6036 {
6037 rtx orig_pattern = PATTERN (insn);
6038
6039 PATTERN (insn) = pattern;
6040 fprintf (stderr,
6041 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n",
6042 ce_info->pass);
6043
6044 debug_rtx (insn);
6045 PATTERN (insn) = orig_pattern;
6046 }
6047
6048 return pattern;
6049
6050 fail:
6051 if (TARGET_DEBUG_COND_EXEC)
6052 {
6053 rtx orig_pattern = PATTERN (insn);
6054
6055 PATTERN (insn) = orig_ce_pattern;
6056 fprintf (stderr,
6057 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n",
6058 ce_info->pass);
6059
6060 debug_rtx (insn);
6061 PATTERN (insn) = orig_pattern;
6062 }
6063
6064 return NULL_RTX;
6065 }
6066
6067 \f
6068 /* A C expression to perform any final machine dependent modifications in
6069 converting code to conditional execution in the code described by the
6070 conditional if information CE_INFO. */
6071
6072 void
6073 frv_ifcvt_modify_final (ce_if_block *ce_info ATTRIBUTE_UNUSED)
6074 {
6075 rtx_insn *existing_insn;
6076 rtx check_insn;
6077 rtx p = frv_ifcvt.added_insns_list;
6078 int i;
6079
6080 /* Loop inserting the check insns. The last check insn is the first test,
6081 and is the appropriate place to insert constants. */
6082 gcc_assert (p);
6083
6084 do
6085 {
6086 rtx check_and_insert_insns = XEXP (p, 0);
6087 rtx old_p = p;
6088
6089 check_insn = XEXP (check_and_insert_insns, 0);
6090 existing_insn = as_a <rtx_insn *> (XEXP (check_and_insert_insns, 1));
6091 p = XEXP (p, 1);
6092
6093 /* The jump bit is used to say that the new insn is to be inserted BEFORE
6094 the existing insn, otherwise it is to be inserted AFTER. */
6095 if (check_and_insert_insns->jump)
6096 {
6097 emit_insn_before (check_insn, existing_insn);
6098 check_and_insert_insns->jump = 0;
6099 }
6100 else
6101 emit_insn_after (check_insn, existing_insn);
6102
6103 free_EXPR_LIST_node (check_and_insert_insns);
6104 free_EXPR_LIST_node (old_p);
6105 }
6106 while (p != NULL_RTX);
6107
6108 /* Load up any constants needed into temp gprs */
6109 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6110 {
6111 rtx_insn *insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn);
6112 if (! frv_ifcvt.scratch_insns_bitmap)
6113 frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL);
6114 bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn));
6115 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6116 }
6117
6118 frv_ifcvt.added_insns_list = NULL_RTX;
6119 frv_ifcvt.cur_scratch_regs = 0;
6120 }
6121
6122 \f
6123 /* A C expression to cancel any machine dependent modifications in converting
6124 code to conditional execution in the code described by the conditional if
6125 information CE_INFO. */
6126
6127 void
6128 frv_ifcvt_modify_cancel (ce_if_block *ce_info ATTRIBUTE_UNUSED)
6129 {
6130 int i;
6131 rtx p = frv_ifcvt.added_insns_list;
6132
6133 /* Loop freeing up the EXPR_LIST's allocated. */
6134 while (p != NULL_RTX)
6135 {
6136 rtx check_and_jump = XEXP (p, 0);
6137 rtx old_p = p;
6138
6139 p = XEXP (p, 1);
6140 free_EXPR_LIST_node (check_and_jump);
6141 free_EXPR_LIST_node (old_p);
6142 }
6143
6144 /* Release any temporary gprs allocated. */
6145 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6146 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6147
6148 frv_ifcvt.added_insns_list = NULL_RTX;
6149 frv_ifcvt.cur_scratch_regs = 0;
6150 return;
6151 }
6152 \f
6153 /* A C expression for the size in bytes of the trampoline, as an integer.
6154 The template is:
6155
6156 setlo #0, <jmp_reg>
6157 setlo #0, <static_chain>
6158 sethi #0, <jmp_reg>
6159 sethi #0, <static_chain>
6160 jmpl @(gr0,<jmp_reg>) */
6161
6162 int
6163 frv_trampoline_size (void)
6164 {
6165 if (TARGET_FDPIC)
6166 /* Allocate room for the function descriptor and the lddi
6167 instruction. */
6168 return 8 + 6 * 4;
6169 return 5 /* instructions */ * 4 /* instruction size. */;
6170 }
6171
6172 \f
6173 /* A C statement to initialize the variable parts of a trampoline. ADDR is an
6174 RTX for the address of the trampoline; FNADDR is an RTX for the address of
6175 the nested function; STATIC_CHAIN is an RTX for the static chain value that
6176 should be passed to the function when it is called.
6177
6178 The template is:
6179
6180 setlo #0, <jmp_reg>
6181 setlo #0, <static_chain>
6182 sethi #0, <jmp_reg>
6183 sethi #0, <static_chain>
6184 jmpl @(gr0,<jmp_reg>) */
6185
6186 static void
6187 frv_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
6188 {
6189 rtx addr = XEXP (m_tramp, 0);
6190 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
6191 rtx sc_reg = force_reg (Pmode, static_chain);
6192
6193 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
6194 LCT_NORMAL, VOIDmode,
6195 addr, Pmode,
6196 GEN_INT (frv_trampoline_size ()), SImode,
6197 fnaddr, Pmode,
6198 sc_reg, Pmode);
6199 }
6200
6201 \f
6202 /* Many machines have some registers that cannot be copied directly to or from
6203 memory or even from other types of registers. An example is the `MQ'
6204 register, which on most machines, can only be copied to or from general
6205 registers, but not memory. Some machines allow copying all registers to and
6206 from memory, but require a scratch register for stores to some memory
6207 locations (e.g., those with symbolic address on the RT, and those with
6208 certain symbolic address on the SPARC when compiling PIC). In some cases,
6209 both an intermediate and a scratch register are required.
6210
6211 You should define these macros to indicate to the reload phase that it may
6212 need to allocate at least one register for a reload in addition to the
6213 register to contain the data. Specifically, if copying X to a register
6214 RCLASS in MODE requires an intermediate register, you should define
6215 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
6216 whose registers can be used as intermediate registers or scratch registers.
6217
6218 If copying a register RCLASS in MODE to X requires an intermediate or scratch
6219 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
6220 largest register class required. If the requirements for input and output
6221 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
6222 instead of defining both macros identically.
6223
6224 The values returned by these macros are often `GENERAL_REGS'. Return
6225 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
6226 to or from a register of RCLASS in MODE without requiring a scratch register.
6227 Do not define this macro if it would always return `NO_REGS'.
6228
6229 If a scratch register is required (either with or without an intermediate
6230 register), you should define patterns for `reload_inM' or `reload_outM', as
6231 required.. These patterns, which will normally be implemented with a
6232 `define_expand', should be similar to the `movM' patterns, except that
6233 operand 2 is the scratch register.
6234
6235 Define constraints for the reload register and scratch register that contain
6236 a single register class. If the original reload register (whose class is
6237 RCLASS) can meet the constraint given in the pattern, the value returned by
6238 these macros is used for the class of the scratch register. Otherwise, two
6239 additional reload registers are required. Their classes are obtained from
6240 the constraints in the insn pattern.
6241
6242 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
6243 either be in a hard register or in memory. Use `true_regnum' to find out;
6244 it will return -1 if the pseudo is in memory and the hard register number if
6245 it is in a register.
6246
6247 These macros should not be used in the case where a particular class of
6248 registers can only be copied to memory and not to another class of
6249 registers. In that case, secondary reload registers are not needed and
6250 would not be helpful. Instead, a stack location must be used to perform the
6251 copy and the `movM' pattern should use memory as an intermediate storage.
6252 This case often occurs between floating-point and general registers. */
6253
6254 enum reg_class
6255 frv_secondary_reload_class (enum reg_class rclass,
6256 machine_mode mode ATTRIBUTE_UNUSED,
6257 rtx x)
6258 {
6259 enum reg_class ret;
6260
6261 switch (rclass)
6262 {
6263 default:
6264 ret = NO_REGS;
6265 break;
6266
6267 /* Accumulators/Accumulator guard registers need to go through floating
6268 point registers. */
6269 case QUAD_REGS:
6270 case GPR_REGS:
6271 ret = NO_REGS;
6272 if (x && GET_CODE (x) == REG)
6273 {
6274 int regno = REGNO (x);
6275
6276 if (ACC_P (regno) || ACCG_P (regno))
6277 ret = FPR_REGS;
6278 }
6279 break;
6280
6281 /* Nonzero constants should be loaded into an FPR through a GPR. */
6282 case QUAD_FPR_REGS:
6283 if (x && CONSTANT_P (x) && !ZERO_P (x))
6284 ret = GPR_REGS;
6285 else
6286 ret = NO_REGS;
6287 break;
6288
6289 /* All of these types need gpr registers. */
6290 case ICC_REGS:
6291 case FCC_REGS:
6292 case CC_REGS:
6293 case ICR_REGS:
6294 case FCR_REGS:
6295 case CR_REGS:
6296 case LCR_REG:
6297 case LR_REG:
6298 ret = GPR_REGS;
6299 break;
6300
6301 /* The accumulators need fpr registers. */
6302 case QUAD_ACC_REGS:
6303 case ACCG_REGS:
6304 ret = FPR_REGS;
6305 break;
6306 }
6307
6308 return ret;
6309 }
6310
6311 /* This hook exists to catch the case where secondary_reload_class() is
6312 called from init_reg_autoinc() in regclass.c - before the reload optabs
6313 have been initialised. */
6314
6315 static reg_class_t
6316 frv_secondary_reload (bool in_p, rtx x, reg_class_t reload_class_i,
6317 machine_mode reload_mode,
6318 secondary_reload_info * sri)
6319 {
6320 enum reg_class rclass = NO_REGS;
6321 enum reg_class reload_class = (enum reg_class) reload_class_i;
6322
6323 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
6324 {
6325 sri->icode = sri->prev_sri->t_icode;
6326 return NO_REGS;
6327 }
6328
6329 rclass = frv_secondary_reload_class (reload_class, reload_mode, x);
6330
6331 if (rclass != NO_REGS)
6332 {
6333 enum insn_code icode
6334 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
6335 reload_mode);
6336 if (icode == 0)
6337 {
6338 /* This happens when then the reload_[in|out]_optabs have
6339 not been initialised. */
6340 sri->t_icode = CODE_FOR_nothing;
6341 return rclass;
6342 }
6343 }
6344
6345 /* Fall back to the default secondary reload handler. */
6346 return default_secondary_reload (in_p, x, reload_class, reload_mode, sri);
6347
6348 }
6349 \f
6350 /* Worker function for TARGET_CLASS_LIKELY_SPILLED_P. */
6351
6352 static bool
6353 frv_class_likely_spilled_p (reg_class_t rclass)
6354 {
6355 switch (rclass)
6356 {
6357 default:
6358 break;
6359
6360 case GR8_REGS:
6361 case GR9_REGS:
6362 case GR89_REGS:
6363 case FDPIC_FPTR_REGS:
6364 case FDPIC_REGS:
6365 case ICC_REGS:
6366 case FCC_REGS:
6367 case CC_REGS:
6368 case ICR_REGS:
6369 case FCR_REGS:
6370 case CR_REGS:
6371 case LCR_REG:
6372 case LR_REG:
6373 case SPR_REGS:
6374 case QUAD_ACC_REGS:
6375 case ACCG_REGS:
6376 return true;
6377 }
6378
6379 return false;
6380 }
6381
6382 \f
6383 /* An expression for the alignment of a structure field FIELD if the
6384 alignment computed in the usual way is COMPUTED. GCC uses this
6385 value instead of the value in `BIGGEST_ALIGNMENT' or
6386 `BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */
6387
6388 /* The definition type of the bit field data is either char, short, long or
6389 long long. The maximum bit size is the number of bits of its own type.
6390
6391 The bit field data is assigned to a storage unit that has an adequate size
6392 for bit field data retention and is located at the smallest address.
6393
6394 Consecutive bit field data are packed at consecutive bits having the same
6395 storage unit, with regard to the type, beginning with the MSB and continuing
6396 toward the LSB.
6397
6398 If a field to be assigned lies over a bit field type boundary, its
6399 assignment is completed by aligning it with a boundary suitable for the
6400 type.
6401
6402 When a bit field having a bit length of 0 is declared, it is forcibly
6403 assigned to the next storage unit.
6404
6405 e.g)
6406 struct {
6407 int a:2;
6408 int b:6;
6409 char c:4;
6410 int d:10;
6411 int :0;
6412 int f:2;
6413 } x;
6414
6415 +0 +1 +2 +3
6416 &x 00000000 00000000 00000000 00000000
6417 MLM----L
6418 a b
6419 &x+4 00000000 00000000 00000000 00000000
6420 M--L
6421 c
6422 &x+8 00000000 00000000 00000000 00000000
6423 M----------L
6424 d
6425 &x+12 00000000 00000000 00000000 00000000
6426 ML
6427 f
6428 */
6429
6430 int
6431 frv_adjust_field_align (tree field, int computed)
6432 {
6433 /* Make sure that the bitfield is not wider than the type. */
6434 if (field
6435 && DECL_BIT_FIELD (field)
6436 && !DECL_ARTIFICIAL (field))
6437 {
6438 tree parent = DECL_CONTEXT (field);
6439 tree prev = NULL_TREE;
6440 tree cur;
6441
6442 for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = DECL_CHAIN (cur))
6443 {
6444 if (TREE_CODE (cur) != FIELD_DECL)
6445 continue;
6446
6447 prev = cur;
6448 }
6449
6450 gcc_assert (cur);
6451
6452 /* If this isn't a :0 field and if the previous element is a bitfield
6453 also, see if the type is different, if so, we will need to align the
6454 bit-field to the next boundary. */
6455 if (prev
6456 && ! DECL_PACKED (field)
6457 && ! integer_zerop (DECL_SIZE (field))
6458 && DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev))
6459 {
6460 int prev_align = TYPE_ALIGN (TREE_TYPE (prev));
6461 int cur_align = TYPE_ALIGN (TREE_TYPE (field));
6462 computed = (prev_align > cur_align) ? prev_align : cur_align;
6463 }
6464 }
6465
6466 return computed;
6467 }
6468
6469 \f
6470 /* Implement TARGET_HARD_REGNO_MODE_OK. */
6471
6472 static bool
6473 frv_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
6474 {
6475 int base;
6476 int mask;
6477
6478 switch (mode)
6479 {
6480 case E_CCmode:
6481 case E_CC_UNSmode:
6482 case E_CC_NZmode:
6483 return ICC_P (regno) || GPR_P (regno);
6484
6485 case E_CC_CCRmode:
6486 return CR_P (regno) || GPR_P (regno);
6487
6488 case E_CC_FPmode:
6489 return FCC_P (regno) || GPR_P (regno);
6490
6491 default:
6492 break;
6493 }
6494
6495 /* Set BASE to the first register in REGNO's class. Set MASK to the
6496 bits that must be clear in (REGNO - BASE) for the register to be
6497 well-aligned. */
6498 if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode))
6499 {
6500 if (ACCG_P (regno))
6501 {
6502 /* ACCGs store one byte. Two-byte quantities must start in
6503 even-numbered registers, four-byte ones in registers whose
6504 numbers are divisible by four, and so on. */
6505 base = ACCG_FIRST;
6506 mask = GET_MODE_SIZE (mode) - 1;
6507 }
6508 else
6509 {
6510 /* The other registers store one word. */
6511 if (GPR_P (regno) || regno == AP_FIRST)
6512 base = GPR_FIRST;
6513
6514 else if (FPR_P (regno))
6515 base = FPR_FIRST;
6516
6517 else if (ACC_P (regno))
6518 base = ACC_FIRST;
6519
6520 else if (SPR_P (regno))
6521 return mode == SImode;
6522
6523 /* Fill in the table. */
6524 else
6525 return false;
6526
6527 /* Anything smaller than an SI is OK in any word-sized register. */
6528 if (GET_MODE_SIZE (mode) < 4)
6529 return true;
6530
6531 mask = (GET_MODE_SIZE (mode) / 4) - 1;
6532 }
6533 return (((regno - base) & mask) == 0);
6534 }
6535
6536 return false;
6537 }
6538
6539 /* Implement TARGET_MODES_TIEABLE_P. */
6540
6541 static bool
6542 frv_modes_tieable_p (machine_mode mode1, machine_mode mode2)
6543 {
6544 return mode1 == mode2;
6545 }
6546
6547 \f
6548 /* Implement TARGET_HARD_REGNO_NREGS.
6549
6550 On the FRV, make the CC_FP mode take 3 words in the integer registers, so
6551 that we can build the appropriate instructions to properly reload the
6552 values. Also, make the byte-sized accumulator guards use one guard
6553 for each byte. */
6554
6555 static unsigned int
6556 frv_hard_regno_nregs (unsigned int regno, machine_mode mode)
6557 {
6558 if (ACCG_P (regno))
6559 return GET_MODE_SIZE (mode);
6560 else
6561 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6562 }
6563
6564 \f
6565 /* Implement CLASS_MAX_NREGS. */
6566
6567 int
6568 frv_class_max_nregs (enum reg_class rclass, machine_mode mode)
6569 {
6570 if (rclass == ACCG_REGS)
6571 /* An N-byte value requires N accumulator guards. */
6572 return GET_MODE_SIZE (mode);
6573 else
6574 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6575 }
6576
6577 \f
6578 /* A C expression that is nonzero if X is a legitimate constant for an
6579 immediate operand on the target machine. You can assume that X satisfies
6580 `CONSTANT_P', so you need not check this. In fact, `1' is a suitable
6581 definition for this macro on machines where anything `CONSTANT_P' is valid. */
6582
6583 static bool
6584 frv_legitimate_constant_p (machine_mode mode, rtx x)
6585 {
6586 /* frv_cannot_force_const_mem always returns true for FDPIC. This
6587 means that the move expanders will be expected to deal with most
6588 kinds of constant, regardless of what we return here.
6589
6590 However, among its other duties, frv_legitimate_constant_p decides whether
6591 a constant can be entered into reg_equiv_constant[]. If we return true,
6592 reload can create new instances of the constant whenever it likes.
6593
6594 The idea is therefore to accept as many constants as possible (to give
6595 reload more freedom) while rejecting constants that can only be created
6596 at certain times. In particular, anything with a symbolic component will
6597 require use of the pseudo FDPIC register, which is only available before
6598 reload. */
6599 if (TARGET_FDPIC)
6600 return LEGITIMATE_PIC_OPERAND_P (x);
6601
6602 /* All of the integer constants are ok. */
6603 if (GET_CODE (x) != CONST_DOUBLE)
6604 return TRUE;
6605
6606 /* double integer constants are ok. */
6607 if (GET_MODE (x) == VOIDmode || mode == DImode)
6608 return TRUE;
6609
6610 /* 0 is always ok. */
6611 if (x == CONST0_RTX (mode))
6612 return TRUE;
6613
6614 /* If floating point is just emulated, allow any constant, since it will be
6615 constructed in the GPRs. */
6616 if (!TARGET_HAS_FPRS)
6617 return TRUE;
6618
6619 if (mode == DFmode && !TARGET_DOUBLE)
6620 return TRUE;
6621
6622 /* Otherwise store the constant away and do a load. */
6623 return FALSE;
6624 }
6625
6626 /* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons,
6627 CC_NZ for comparisons against zero in which a single Z or N flag test
6628 is enough, CC_UNS for other unsigned comparisons, and CC for other
6629 signed comparisons. */
6630
6631 machine_mode
6632 frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
6633 {
6634 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
6635 return CC_FPmode;
6636
6637 switch (code)
6638 {
6639 case EQ:
6640 case NE:
6641 case LT:
6642 case GE:
6643 return y == const0_rtx ? CC_NZmode : CCmode;
6644
6645 case GTU:
6646 case GEU:
6647 case LTU:
6648 case LEU:
6649 return y == const0_rtx ? CC_NZmode : CC_UNSmode;
6650
6651 default:
6652 return CCmode;
6653 }
6654 }
6655 \f
6656
6657 /* Worker function for TARGET_REGISTER_MOVE_COST. */
6658
6659 #define HIGH_COST 40
6660 #define MEDIUM_COST 3
6661 #define LOW_COST 1
6662
6663 static int
6664 frv_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6665 reg_class_t from, reg_class_t to)
6666 {
6667 switch (from)
6668 {
6669 default:
6670 break;
6671
6672 case QUAD_REGS:
6673 case GPR_REGS:
6674 case GR8_REGS:
6675 case GR9_REGS:
6676 case GR89_REGS:
6677 case FDPIC_REGS:
6678 case FDPIC_FPTR_REGS:
6679 case FDPIC_CALL_REGS:
6680 switch (to)
6681 {
6682 default:
6683 break;
6684
6685 case QUAD_REGS:
6686 case GPR_REGS:
6687 case GR8_REGS:
6688 case GR9_REGS:
6689 case GR89_REGS:
6690 case FDPIC_REGS:
6691 case FDPIC_FPTR_REGS:
6692 case FDPIC_CALL_REGS:
6693 return LOW_COST;
6694
6695 case FPR_REGS:
6696 return LOW_COST;
6697
6698 case LCR_REG:
6699 case LR_REG:
6700 case SPR_REGS:
6701 return LOW_COST;
6702 }
6703 break;
6704
6705 case QUAD_FPR_REGS:
6706 switch (to)
6707 {
6708 default:
6709 break;
6710
6711 case QUAD_REGS:
6712 case GPR_REGS:
6713 case GR8_REGS:
6714 case GR9_REGS:
6715 case GR89_REGS:
6716 case FDPIC_REGS:
6717 case FDPIC_FPTR_REGS:
6718 case FDPIC_CALL_REGS:
6719
6720 case QUAD_ACC_REGS:
6721 case ACCG_REGS:
6722 return MEDIUM_COST;
6723
6724 case QUAD_FPR_REGS:
6725 return LOW_COST;
6726 }
6727 break;
6728
6729 case LCR_REG:
6730 case LR_REG:
6731 case SPR_REGS:
6732 switch (to)
6733 {
6734 default:
6735 break;
6736
6737 case QUAD_REGS:
6738 case GPR_REGS:
6739 case GR8_REGS:
6740 case GR9_REGS:
6741 case GR89_REGS:
6742 case FDPIC_REGS:
6743 case FDPIC_FPTR_REGS:
6744 case FDPIC_CALL_REGS:
6745 return MEDIUM_COST;
6746 }
6747 break;
6748
6749 case QUAD_ACC_REGS:
6750 case ACCG_REGS:
6751 switch (to)
6752 {
6753 default:
6754 break;
6755
6756 case QUAD_FPR_REGS:
6757 return MEDIUM_COST;
6758 }
6759 break;
6760 }
6761
6762 return HIGH_COST;
6763 }
6764
6765 /* Worker function for TARGET_MEMORY_MOVE_COST. */
6766
6767 static int
6768 frv_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6769 reg_class_t rclass ATTRIBUTE_UNUSED,
6770 bool in ATTRIBUTE_UNUSED)
6771 {
6772 return 4;
6773 }
6774
6775 \f
6776 /* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to
6777 use ".picptr" to generate safe relocations for PIC code. We also
6778 need a fixup entry for aligned (non-debugging) code. */
6779
6780 static bool
6781 frv_assemble_integer (rtx value, unsigned int size, int aligned_p)
6782 {
6783 if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD)
6784 {
6785 if (GET_CODE (value) == CONST
6786 || GET_CODE (value) == SYMBOL_REF
6787 || GET_CODE (value) == LABEL_REF)
6788 {
6789 if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF
6790 && SYMBOL_REF_FUNCTION_P (value))
6791 {
6792 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
6793 output_addr_const (asm_out_file, value);
6794 fputs (")\n", asm_out_file);
6795 return true;
6796 }
6797 else if (TARGET_FDPIC && GET_CODE (value) == CONST
6798 && frv_function_symbol_referenced_p (value))
6799 return false;
6800 if (aligned_p && !TARGET_FDPIC)
6801 {
6802 static int label_num = 0;
6803 char buf[256];
6804 const char *p;
6805
6806 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++);
6807 p = (* targetm.strip_name_encoding) (buf);
6808
6809 fprintf (asm_out_file, "%s:\n", p);
6810 fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP);
6811 fprintf (asm_out_file, "\t.picptr\t%s\n", p);
6812 fprintf (asm_out_file, "\t.previous\n");
6813 }
6814 assemble_integer_with_op ("\t.picptr\t", value);
6815 return true;
6816 }
6817 if (!aligned_p)
6818 {
6819 /* We've set the unaligned SI op to NULL, so we always have to
6820 handle the unaligned case here. */
6821 assemble_integer_with_op ("\t.4byte\t", value);
6822 return true;
6823 }
6824 }
6825 return default_assemble_integer (value, size, aligned_p);
6826 }
6827
6828 /* Function to set up the backend function structure. */
6829
6830 static struct machine_function *
6831 frv_init_machine_status (void)
6832 {
6833 return ggc_cleared_alloc<machine_function> ();
6834 }
6835 \f
6836 /* Implement TARGET_SCHED_ISSUE_RATE. */
6837
6838 int
6839 frv_issue_rate (void)
6840 {
6841 if (!TARGET_PACK)
6842 return 1;
6843
6844 switch (frv_cpu_type)
6845 {
6846 default:
6847 case FRV_CPU_FR300:
6848 case FRV_CPU_SIMPLE:
6849 return 1;
6850
6851 case FRV_CPU_FR400:
6852 case FRV_CPU_FR405:
6853 case FRV_CPU_FR450:
6854 return 2;
6855
6856 case FRV_CPU_GENERIC:
6857 case FRV_CPU_FR500:
6858 case FRV_CPU_TOMCAT:
6859 return 4;
6860
6861 case FRV_CPU_FR550:
6862 return 8;
6863 }
6864 }
6865 \f
6866 /* Return the value of INSN's acc_group attribute. */
6867
6868 int
6869 frv_acc_group (rtx insn)
6870 {
6871 /* This distinction only applies to the FR550 packing constraints. */
6872 if (frv_cpu_type == FRV_CPU_FR550)
6873 {
6874 subrtx_iterator::array_type array;
6875 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
6876 if (REG_P (*iter))
6877 {
6878 unsigned int regno = REGNO (*iter);
6879 /* If REGNO refers to an accumulator, return ACC_GROUP_ODD if
6880 the bit 2 of the register number is set and ACC_GROUP_EVEN if
6881 it is clear. */
6882 if (ACC_P (regno))
6883 return (regno - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
6884 if (ACCG_P (regno))
6885 return (regno - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
6886 }
6887 }
6888 return ACC_GROUP_NONE;
6889 }
6890
6891 /* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction
6892 INSN will try to claim first. Since this value depends only on the
6893 type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
6894
6895 static unsigned int
6896 frv_insn_unit (rtx_insn *insn)
6897 {
6898 enum attr_type type;
6899
6900 type = get_attr_type (insn);
6901 if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes))
6902 {
6903 /* We haven't seen this type of instruction before. */
6904 state_t state;
6905 unsigned int unit;
6906
6907 /* Issue the instruction on its own to see which unit it prefers. */
6908 state = alloca (state_size ());
6909 state_reset (state);
6910 state_transition (state, insn);
6911
6912 /* Find out which unit was taken. */
6913 for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++)
6914 if (cpu_unit_reservation_p (state, frv_unit_codes[unit]))
6915 break;
6916
6917 gcc_assert (unit != ARRAY_SIZE (frv_unit_codes));
6918
6919 frv_type_to_unit[type] = unit;
6920 }
6921 return frv_type_to_unit[type];
6922 }
6923
6924 /* Return true if INSN issues to a branch unit. */
6925
6926 static bool
6927 frv_issues_to_branch_unit_p (rtx_insn *insn)
6928 {
6929 return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
6930 }
6931 \f
6932 /* The instructions in the packet, partitioned into groups. */
6933 struct frv_packet_group {
6934 /* How many instructions in the packet belong to this group. */
6935 unsigned int num_insns;
6936
6937 /* A list of the instructions that belong to this group, in the order
6938 they appear in the rtl stream. */
6939 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
6940
6941 /* The contents of INSNS after they have been sorted into the correct
6942 assembly-language order. Element X issues to unit X. The list may
6943 contain extra nops. */
6944 rtx_insn *sorted[ARRAY_SIZE (frv_unit_codes)];
6945
6946 /* The member of frv_nops[] to use in sorted[]. */
6947 rtx_insn *nop;
6948 };
6949
6950 /* The current state of the packing pass, implemented by frv_pack_insns. */
6951 static struct {
6952 /* The state of the pipeline DFA. */
6953 state_t dfa_state;
6954
6955 /* Which hardware registers are set within the current packet,
6956 and the conditions under which they are set. */
6957 regstate_t regstate[FIRST_PSEUDO_REGISTER];
6958
6959 /* The memory locations that have been modified so far in this
6960 packet. MEM is the memref and COND is the regstate_t condition
6961 under which it is set. */
6962 struct {
6963 rtx mem;
6964 regstate_t cond;
6965 } mems[2];
6966
6967 /* The number of valid entries in MEMS. The value is larger than
6968 ARRAY_SIZE (mems) if there were too many mems to record. */
6969 unsigned int num_mems;
6970
6971 /* The maximum number of instructions that can be packed together. */
6972 unsigned int issue_rate;
6973
6974 /* The instructions in the packet, partitioned into groups. */
6975 struct frv_packet_group groups[NUM_GROUPS];
6976
6977 /* The instructions that make up the current packet. */
6978 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
6979 unsigned int num_insns;
6980 } frv_packet;
6981
6982 /* Return the regstate_t flags for the given COND_EXEC condition.
6983 Abort if the condition isn't in the right form. */
6984
6985 static int
6986 frv_cond_flags (rtx cond)
6987 {
6988 gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
6989 && GET_CODE (XEXP (cond, 0)) == REG
6990 && CR_P (REGNO (XEXP (cond, 0)))
6991 && XEXP (cond, 1) == const0_rtx);
6992 return ((REGNO (XEXP (cond, 0)) - CR_FIRST)
6993 | (GET_CODE (cond) == NE
6994 ? REGSTATE_IF_TRUE
6995 : REGSTATE_IF_FALSE));
6996 }
6997
6998
6999 /* Return true if something accessed under condition COND2 can
7000 conflict with something written under condition COND1. */
7001
7002 static bool
7003 frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2)
7004 {
7005 /* If either reference was unconditional, we have a conflict. */
7006 if ((cond1 & REGSTATE_IF_EITHER) == 0
7007 || (cond2 & REGSTATE_IF_EITHER) == 0)
7008 return true;
7009
7010 /* The references might conflict if they were controlled by
7011 different CRs. */
7012 if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK))
7013 return true;
7014
7015 /* They definitely conflict if they are controlled by the
7016 same condition. */
7017 if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0)
7018 return true;
7019
7020 return false;
7021 }
7022
7023
7024 /* Return true if an instruction with pattern PAT depends on an
7025 instruction in the current packet. COND describes the condition
7026 under which PAT might be set or used. */
7027
7028 static bool
7029 frv_registers_conflict_p_1 (rtx pat, regstate_t cond)
7030 {
7031 subrtx_var_iterator::array_type array;
7032 FOR_EACH_SUBRTX_VAR (iter, array, pat, NONCONST)
7033 {
7034 rtx x = *iter;
7035 if (GET_CODE (x) == REG)
7036 {
7037 unsigned int regno;
7038 FOR_EACH_REGNO (regno, x)
7039 if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0)
7040 if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond))
7041 return true;
7042 }
7043 else if (GET_CODE (x) == MEM)
7044 {
7045 /* If we ran out of memory slots, assume a conflict. */
7046 if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems))
7047 return 1;
7048
7049 /* Check for output or true dependencies with earlier MEMs. */
7050 for (unsigned int i = 0; i < frv_packet.num_mems; i++)
7051 if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
7052 {
7053 if (true_dependence (frv_packet.mems[i].mem, VOIDmode, x))
7054 return true;
7055
7056 if (output_dependence (frv_packet.mems[i].mem, x))
7057 return true;
7058 }
7059 }
7060
7061 /* The return values of calls aren't significant: they describe
7062 the effect of the call as a whole, not of the insn itself. */
7063 else if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
7064 iter.substitute (SET_SRC (x));
7065 }
7066 return false;
7067 }
7068
7069
7070 /* Return true if something in X might depend on an instruction
7071 in the current packet. */
7072
7073 static bool
7074 frv_registers_conflict_p (rtx x)
7075 {
7076 regstate_t flags;
7077
7078 flags = 0;
7079 if (GET_CODE (x) == COND_EXEC)
7080 {
7081 if (frv_registers_conflict_p_1 (XEXP (x, 0), flags))
7082 return true;
7083
7084 flags |= frv_cond_flags (XEXP (x, 0));
7085 x = XEXP (x, 1);
7086 }
7087 return frv_registers_conflict_p_1 (x, flags);
7088 }
7089
7090
7091 /* A note_stores callback. DATA points to the regstate_t condition
7092 under which X is modified. Update FRV_PACKET accordingly. */
7093
7094 static void
7095 frv_registers_update_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7096 {
7097 unsigned int regno;
7098
7099 if (GET_CODE (x) == REG)
7100 FOR_EACH_REGNO (regno, x)
7101 frv_packet.regstate[regno] |= *(regstate_t *) data;
7102
7103 if (GET_CODE (x) == MEM)
7104 {
7105 if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems))
7106 {
7107 frv_packet.mems[frv_packet.num_mems].mem = x;
7108 frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data;
7109 }
7110 frv_packet.num_mems++;
7111 }
7112 }
7113
7114
7115 /* Update the register state information for an instruction whose
7116 body is X. */
7117
7118 static void
7119 frv_registers_update (rtx x)
7120 {
7121 regstate_t flags;
7122
7123 flags = REGSTATE_MODIFIED;
7124 if (GET_CODE (x) == COND_EXEC)
7125 {
7126 flags |= frv_cond_flags (XEXP (x, 0));
7127 x = XEXP (x, 1);
7128 }
7129 note_pattern_stores (x, frv_registers_update_1, &flags);
7130 }
7131
7132
7133 /* Initialize frv_packet for the start of a new packet. */
7134
7135 static void
7136 frv_start_packet (void)
7137 {
7138 enum frv_insn_group group;
7139
7140 memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate));
7141 frv_packet.num_mems = 0;
7142 frv_packet.num_insns = 0;
7143 for (group = GROUP_I; group < NUM_GROUPS;
7144 group = (enum frv_insn_group) (group + 1))
7145 frv_packet.groups[group].num_insns = 0;
7146 }
7147
7148
7149 /* Likewise for the start of a new basic block. */
7150
7151 static void
7152 frv_start_packet_block (void)
7153 {
7154 state_reset (frv_packet.dfa_state);
7155 frv_start_packet ();
7156 }
7157
7158
7159 /* Finish the current packet, if any, and start a new one. Call
7160 HANDLE_PACKET with FRV_PACKET describing the completed packet. */
7161
7162 static void
7163 frv_finish_packet (void (*handle_packet) (void))
7164 {
7165 if (frv_packet.num_insns > 0)
7166 {
7167 handle_packet ();
7168 state_transition (frv_packet.dfa_state, 0);
7169 frv_start_packet ();
7170 }
7171 }
7172
7173
7174 /* Return true if INSN can be added to the current packet. Update
7175 the DFA state on success. */
7176
7177 static bool
7178 frv_pack_insn_p (rtx_insn *insn)
7179 {
7180 /* See if the packet is already as long as it can be. */
7181 if (frv_packet.num_insns == frv_packet.issue_rate)
7182 return false;
7183
7184 /* If the scheduler thought that an instruction should start a packet,
7185 it's usually a good idea to believe it. It knows much more about
7186 the latencies than we do.
7187
7188 There are some exceptions though:
7189
7190 - Conditional instructions are scheduled on the assumption that
7191 they will be executed. This is usually a good thing, since it
7192 tends to avoid unnecessary stalls in the conditional code.
7193 But we want to pack conditional instructions as tightly as
7194 possible, in order to optimize the case where they aren't
7195 executed.
7196
7197 - The scheduler will always put branches on their own, even
7198 if there's no real dependency.
7199
7200 - There's no point putting a call in its own packet unless
7201 we have to. */
7202 if (frv_packet.num_insns > 0
7203 && NONJUMP_INSN_P (insn)
7204 && GET_MODE (insn) == TImode
7205 && GET_CODE (PATTERN (insn)) != COND_EXEC)
7206 return false;
7207
7208 /* Check for register conflicts. Don't do this for setlo since any
7209 conflict will be with the partnering sethi, with which it can
7210 be packed. */
7211 if (get_attr_type (insn) != TYPE_SETLO)
7212 if (frv_registers_conflict_p (PATTERN (insn)))
7213 return false;
7214
7215 return state_transition (frv_packet.dfa_state, insn) < 0;
7216 }
7217
7218
7219 /* Add instruction INSN to the current packet. */
7220
7221 static void
7222 frv_add_insn_to_packet (rtx_insn *insn)
7223 {
7224 struct frv_packet_group *packet_group;
7225
7226 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7227 packet_group->insns[packet_group->num_insns++] = insn;
7228 frv_packet.insns[frv_packet.num_insns++] = insn;
7229
7230 frv_registers_update (PATTERN (insn));
7231 }
7232
7233
7234 /* Insert INSN (a member of frv_nops[]) into the current packet. If the
7235 packet ends in a branch or call, insert the nop before it, otherwise
7236 add to the end. */
7237
7238 static void
7239 frv_insert_nop_in_packet (rtx_insn *insn)
7240 {
7241 struct frv_packet_group *packet_group;
7242 rtx_insn *last;
7243
7244 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7245 last = frv_packet.insns[frv_packet.num_insns - 1];
7246 if (! NONJUMP_INSN_P (last))
7247 {
7248 insn = emit_insn_before (PATTERN (insn), last);
7249 frv_packet.insns[frv_packet.num_insns - 1] = insn;
7250 frv_packet.insns[frv_packet.num_insns++] = last;
7251 }
7252 else
7253 {
7254 insn = emit_insn_after (PATTERN (insn), last);
7255 frv_packet.insns[frv_packet.num_insns++] = insn;
7256 }
7257 packet_group->insns[packet_group->num_insns++] = insn;
7258 }
7259
7260
7261 /* If packing is enabled, divide the instructions into packets and
7262 return true. Call HANDLE_PACKET for each complete packet. */
7263
7264 static bool
7265 frv_for_each_packet (void (*handle_packet) (void))
7266 {
7267 rtx_insn *insn, *next_insn;
7268
7269 frv_packet.issue_rate = frv_issue_rate ();
7270
7271 /* Early exit if we don't want to pack insns. */
7272 if (!optimize
7273 || !flag_schedule_insns_after_reload
7274 || !TARGET_VLIW_BRANCH
7275 || frv_packet.issue_rate == 1)
7276 return false;
7277
7278 /* Set up the initial packing state. */
7279 dfa_start ();
7280 frv_packet.dfa_state = alloca (state_size ());
7281
7282 frv_start_packet_block ();
7283 for (insn = get_insns (); insn != 0; insn = next_insn)
7284 {
7285 enum rtx_code code;
7286 bool eh_insn_p;
7287
7288 code = GET_CODE (insn);
7289 next_insn = NEXT_INSN (insn);
7290
7291 if (code == CODE_LABEL)
7292 {
7293 frv_finish_packet (handle_packet);
7294 frv_start_packet_block ();
7295 }
7296
7297 if (INSN_P (insn))
7298 switch (GET_CODE (PATTERN (insn)))
7299 {
7300 case USE:
7301 case CLOBBER:
7302 break;
7303
7304 default:
7305 /* Calls mustn't be packed on a TOMCAT. */
7306 if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT)
7307 frv_finish_packet (handle_packet);
7308
7309 /* Since the last instruction in a packet determines the EH
7310 region, any exception-throwing instruction must come at
7311 the end of reordered packet. Insns that issue to a
7312 branch unit are bound to come last; for others it's
7313 too hard to predict. */
7314 eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL);
7315 if (eh_insn_p && !frv_issues_to_branch_unit_p (insn))
7316 frv_finish_packet (handle_packet);
7317
7318 /* Finish the current packet if we can't add INSN to it.
7319 Simulate cycles until INSN is ready to issue. */
7320 if (!frv_pack_insn_p (insn))
7321 {
7322 frv_finish_packet (handle_packet);
7323 while (!frv_pack_insn_p (insn))
7324 state_transition (frv_packet.dfa_state, 0);
7325 }
7326
7327 /* Add the instruction to the packet. */
7328 frv_add_insn_to_packet (insn);
7329
7330 /* Calls and jumps end a packet, as do insns that throw
7331 an exception. */
7332 if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p)
7333 frv_finish_packet (handle_packet);
7334 break;
7335 }
7336 }
7337 frv_finish_packet (handle_packet);
7338 dfa_finish ();
7339 return true;
7340 }
7341 \f
7342 /* Subroutine of frv_sort_insn_group. We are trying to sort
7343 frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly
7344 language order. We have already picked a new position for
7345 frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set.
7346 These instructions will occupy elements [0, LOWER_SLOT) and
7347 [UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is
7348 the DFA state after issuing these instructions.
7349
7350 Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation
7351 of the unused instructions. Return true if one such permutation gives
7352 a valid ordering, leaving the successful permutation in sorted[].
7353 Do not modify sorted[] until a valid permutation is found. */
7354
7355 static bool
7356 frv_sort_insn_group_1 (enum frv_insn_group group,
7357 unsigned int lower_slot, unsigned int upper_slot,
7358 unsigned int issued, unsigned int num_insns,
7359 state_t state)
7360 {
7361 struct frv_packet_group *packet_group;
7362 unsigned int i;
7363 state_t test_state;
7364 size_t dfa_size;
7365 rtx_insn *insn;
7366
7367 /* Early success if we've filled all the slots. */
7368 if (lower_slot == upper_slot)
7369 return true;
7370
7371 packet_group = &frv_packet.groups[group];
7372 dfa_size = state_size ();
7373 test_state = alloca (dfa_size);
7374
7375 /* Try issuing each unused instruction. */
7376 for (i = num_insns - 1; i + 1 != 0; i--)
7377 if (~issued & (1 << i))
7378 {
7379 insn = packet_group->sorted[i];
7380 memcpy (test_state, state, dfa_size);
7381 if (state_transition (test_state, insn) < 0
7382 && cpu_unit_reservation_p (test_state,
7383 NTH_UNIT (group, upper_slot - 1))
7384 && frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1,
7385 issued | (1 << i), num_insns,
7386 test_state))
7387 {
7388 packet_group->sorted[upper_slot - 1] = insn;
7389 return true;
7390 }
7391 }
7392
7393 return false;
7394 }
7395
7396 /* Compare two instructions by their frv_insn_unit. */
7397
7398 static int
7399 frv_compare_insns (const void *first, const void *second)
7400 {
7401 rtx_insn * const *insn1 = (rtx_insn * const *) first;
7402 rtx_insn * const *insn2 = (rtx_insn * const *) second;
7403 return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
7404 }
7405
7406 /* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[]
7407 and sort it into assembly language order. See frv.md for a description of
7408 the algorithm. */
7409
7410 static void
7411 frv_sort_insn_group (enum frv_insn_group group)
7412 {
7413 struct frv_packet_group *packet_group;
7414 unsigned int first, i, nop, max_unit, num_slots;
7415 state_t state, test_state;
7416 size_t dfa_size;
7417
7418 packet_group = &frv_packet.groups[group];
7419
7420 /* Assume no nop is needed. */
7421 packet_group->nop = 0;
7422
7423 if (packet_group->num_insns == 0)
7424 return;
7425
7426 /* Copy insns[] to sorted[]. */
7427 memcpy (packet_group->sorted, packet_group->insns,
7428 sizeof (rtx) * packet_group->num_insns);
7429
7430 /* Sort sorted[] by the unit that each insn tries to take first. */
7431 if (packet_group->num_insns > 1)
7432 qsort (packet_group->sorted, packet_group->num_insns,
7433 sizeof (rtx), frv_compare_insns);
7434
7435 /* That's always enough for branch and control insns. */
7436 if (group == GROUP_B || group == GROUP_C)
7437 return;
7438
7439 dfa_size = state_size ();
7440 state = alloca (dfa_size);
7441 test_state = alloca (dfa_size);
7442
7443 /* Find the highest FIRST such that sorted[0...FIRST-1] can issue
7444 consecutively and such that the DFA takes unit X when sorted[X]
7445 is added. Set STATE to the new DFA state. */
7446 state_reset (test_state);
7447 for (first = 0; first < packet_group->num_insns; first++)
7448 {
7449 memcpy (state, test_state, dfa_size);
7450 if (state_transition (test_state, packet_group->sorted[first]) >= 0
7451 || !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first)))
7452 break;
7453 }
7454
7455 /* If all the instructions issued in ascending order, we're done. */
7456 if (first == packet_group->num_insns)
7457 return;
7458
7459 /* Add nops to the end of sorted[] and try each permutation until
7460 we find one that works. */
7461 for (nop = 0; nop < frv_num_nops; nop++)
7462 {
7463 max_unit = frv_insn_unit (frv_nops[nop]);
7464 if (frv_unit_groups[max_unit] == group)
7465 {
7466 packet_group->nop = frv_nops[nop];
7467 num_slots = UNIT_NUMBER (max_unit) + 1;
7468 for (i = packet_group->num_insns; i < num_slots; i++)
7469 packet_group->sorted[i] = frv_nops[nop];
7470 if (frv_sort_insn_group_1 (group, first, num_slots,
7471 (1 << first) - 1, num_slots, state))
7472 return;
7473 }
7474 }
7475 gcc_unreachable ();
7476 }
7477 \f
7478 /* Sort the current packet into assembly-language order. Set packing
7479 flags as appropriate. */
7480
7481 static void
7482 frv_reorder_packet (void)
7483 {
7484 unsigned int cursor[NUM_GROUPS];
7485 rtx_insn *insns[ARRAY_SIZE (frv_unit_groups)];
7486 unsigned int unit, to, from;
7487 enum frv_insn_group group;
7488 struct frv_packet_group *packet_group;
7489
7490 /* First sort each group individually. */
7491 for (group = GROUP_I; group < NUM_GROUPS;
7492 group = (enum frv_insn_group) (group + 1))
7493 {
7494 cursor[group] = 0;
7495 frv_sort_insn_group (group);
7496 }
7497
7498 /* Go through the unit template and try add an instruction from
7499 that unit's group. */
7500 to = 0;
7501 for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++)
7502 {
7503 group = frv_unit_groups[unit];
7504 packet_group = &frv_packet.groups[group];
7505 if (cursor[group] < packet_group->num_insns)
7506 {
7507 /* frv_reorg should have added nops for us. */
7508 gcc_assert (packet_group->sorted[cursor[group]]
7509 != packet_group->nop);
7510 insns[to++] = packet_group->sorted[cursor[group]++];
7511 }
7512 }
7513
7514 gcc_assert (to == frv_packet.num_insns);
7515
7516 /* Clear the last instruction's packing flag, thus marking the end of
7517 a packet. Reorder the other instructions relative to it. */
7518 CLEAR_PACKING_FLAG (insns[to - 1]);
7519 for (from = 0; from < to - 1; from++)
7520 {
7521 remove_insn (insns[from]);
7522 add_insn_before (insns[from], insns[to - 1], NULL);
7523 SET_PACKING_FLAG (insns[from]);
7524 }
7525 }
7526
7527
7528 /* Divide instructions into packets. Reorder the contents of each
7529 packet so that they are in the correct assembly-language order.
7530
7531 Since this pass can change the raw meaning of the rtl stream, it must
7532 only be called at the last minute, just before the instructions are
7533 written out. */
7534
7535 static void
7536 frv_pack_insns (void)
7537 {
7538 if (frv_for_each_packet (frv_reorder_packet))
7539 frv_insn_packing_flag = 0;
7540 else
7541 frv_insn_packing_flag = -1;
7542 }
7543 \f
7544 /* See whether we need to add nops to group GROUP in order to
7545 make a valid packet. */
7546
7547 static void
7548 frv_fill_unused_units (enum frv_insn_group group)
7549 {
7550 unsigned int non_nops, nops, i;
7551 struct frv_packet_group *packet_group;
7552
7553 packet_group = &frv_packet.groups[group];
7554
7555 /* Sort the instructions into assembly-language order.
7556 Use nops to fill slots that are otherwise unused. */
7557 frv_sort_insn_group (group);
7558
7559 /* See how many nops are needed before the final useful instruction. */
7560 i = nops = 0;
7561 for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++)
7562 while (packet_group->sorted[i++] == packet_group->nop)
7563 nops++;
7564
7565 /* Insert that many nops into the instruction stream. */
7566 while (nops-- > 0)
7567 frv_insert_nop_in_packet (packet_group->nop);
7568 }
7569
7570 /* Return true if accesses IO1 and IO2 refer to the same doubleword. */
7571
7572 static bool
7573 frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2)
7574 {
7575 if (io1->const_address != 0 && io2->const_address != 0)
7576 return io1->const_address == io2->const_address;
7577
7578 if (io1->var_address != 0 && io2->var_address != 0)
7579 return rtx_equal_p (io1->var_address, io2->var_address);
7580
7581 return false;
7582 }
7583
7584 /* Return true if operations IO1 and IO2 are guaranteed to complete
7585 in order. */
7586
7587 static bool
7588 frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2)
7589 {
7590 /* The order of writes is always preserved. */
7591 if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE)
7592 return true;
7593
7594 /* The order of reads isn't preserved. */
7595 if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE)
7596 return false;
7597
7598 /* One operation is a write and the other is (or could be) a read.
7599 The order is only guaranteed if the accesses are to the same
7600 doubleword. */
7601 return frv_same_doubleword_p (io1, io2);
7602 }
7603
7604 /* Generalize I/O operation X so that it covers both X and Y. */
7605
7606 static void
7607 frv_io_union (struct frv_io *x, const struct frv_io *y)
7608 {
7609 if (x->type != y->type)
7610 x->type = FRV_IO_UNKNOWN;
7611 if (!frv_same_doubleword_p (x, y))
7612 {
7613 x->const_address = 0;
7614 x->var_address = 0;
7615 }
7616 }
7617
7618 /* Fill IO with information about the load or store associated with
7619 membar instruction INSN. */
7620
7621 static void
7622 frv_extract_membar (struct frv_io *io, rtx_insn *insn)
7623 {
7624 extract_insn (insn);
7625 io->type = (enum frv_io_type) INTVAL (recog_data.operand[2]);
7626 io->const_address = INTVAL (recog_data.operand[1]);
7627 io->var_address = XEXP (recog_data.operand[0], 0);
7628 }
7629
7630 /* A note_stores callback for which DATA points to an rtx. Nullify *DATA
7631 if X is a register and *DATA depends on X. */
7632
7633 static void
7634 frv_io_check_address (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7635 {
7636 rtx *other = (rtx *) data;
7637
7638 if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other))
7639 *other = 0;
7640 }
7641
7642 /* A note_stores callback for which DATA points to a HARD_REG_SET.
7643 Remove every modified register from the set. */
7644
7645 static void
7646 frv_io_handle_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7647 {
7648 HARD_REG_SET *set = (HARD_REG_SET *) data;
7649 unsigned int regno;
7650
7651 if (REG_P (x))
7652 FOR_EACH_REGNO (regno, x)
7653 CLEAR_HARD_REG_BIT (*set, regno);
7654 }
7655
7656 /* A note_uses callback that adds all registers in *X to hard register
7657 set *DATA. */
7658
7659 static void
7660 frv_io_handle_use (rtx *x, void *data)
7661 {
7662 find_all_hard_regs (*x, (HARD_REG_SET *) data);
7663 }
7664
7665 /* Go through block BB looking for membars to remove. There are two
7666 cases where intra-block analysis is enough:
7667
7668 - a membar is redundant if it occurs between two consecutive I/O
7669 operations and if those operations are guaranteed to complete
7670 in order.
7671
7672 - a membar for a __builtin_read is redundant if the result is
7673 used before the next I/O operation is issued.
7674
7675 If the last membar in the block could not be removed, and there
7676 are guaranteed to be no I/O operations between that membar and
7677 the end of the block, store the membar in *LAST_MEMBAR, otherwise
7678 store null.
7679
7680 Describe the block's first I/O operation in *NEXT_IO. Describe
7681 an unknown operation if the block doesn't do any I/O. */
7682
7683 static void
7684 frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
7685 rtx_insn **last_membar)
7686 {
7687 HARD_REG_SET used_regs;
7688 rtx set;
7689 rtx_insn *insn, *next_membar;
7690 bool next_is_end_p;
7691
7692 /* NEXT_IO is the next I/O operation to be performed after the current
7693 instruction. It starts off as being an unknown operation. */
7694 memset (next_io, 0, sizeof (*next_io));
7695
7696 /* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */
7697 next_is_end_p = true;
7698
7699 /* If the current instruction is a __builtin_read or __builtin_write,
7700 NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR
7701 is null if the membar has already been deleted.
7702
7703 Note that the initialization here should only be needed to
7704 suppress warnings. */
7705 next_membar = 0;
7706
7707 /* USED_REGS is the set of registers that are used before the
7708 next I/O instruction. */
7709 CLEAR_HARD_REG_SET (used_regs);
7710
7711 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
7712 if (CALL_P (insn))
7713 {
7714 /* We can't predict what a call will do to volatile memory. */
7715 memset (next_io, 0, sizeof (struct frv_io));
7716 next_is_end_p = false;
7717 CLEAR_HARD_REG_SET (used_regs);
7718 }
7719 else if (INSN_P (insn))
7720 switch (recog_memoized (insn))
7721 {
7722 case CODE_FOR_optional_membar_qi:
7723 case CODE_FOR_optional_membar_hi:
7724 case CODE_FOR_optional_membar_si:
7725 case CODE_FOR_optional_membar_di:
7726 next_membar = insn;
7727 if (next_is_end_p)
7728 {
7729 /* Local information isn't enough to decide whether this
7730 membar is needed. Stash it away for later. */
7731 *last_membar = insn;
7732 frv_extract_membar (next_io, insn);
7733 next_is_end_p = false;
7734 }
7735 else
7736 {
7737 /* Check whether the I/O operation before INSN could be
7738 reordered with one described by NEXT_IO. If it can't,
7739 INSN will not be needed. */
7740 struct frv_io prev_io;
7741
7742 frv_extract_membar (&prev_io, insn);
7743 if (frv_io_fixed_order_p (&prev_io, next_io))
7744 {
7745 if (dump_file)
7746 fprintf (dump_file,
7747 ";; [Local] Removing membar %d since order"
7748 " of accesses is guaranteed\n",
7749 INSN_UID (next_membar));
7750
7751 insn = NEXT_INSN (insn);
7752 delete_insn (next_membar);
7753 next_membar = 0;
7754 }
7755 *next_io = prev_io;
7756 }
7757 break;
7758
7759 default:
7760 /* Invalidate NEXT_IO's address if it depends on something that
7761 is clobbered by INSN. */
7762 if (next_io->var_address)
7763 note_stores (insn, frv_io_check_address, &next_io->var_address);
7764
7765 /* If the next membar is associated with a __builtin_read,
7766 see if INSN reads from that address. If it does, and if
7767 the destination register is used before the next I/O access,
7768 there is no need for the membar. */
7769 set = PATTERN (insn);
7770 if (next_io->type == FRV_IO_READ
7771 && next_io->var_address != 0
7772 && next_membar != 0
7773 && GET_CODE (set) == SET
7774 && GET_CODE (SET_DEST (set)) == REG
7775 && TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set))))
7776 {
7777 rtx src;
7778
7779 src = SET_SRC (set);
7780 if (GET_CODE (src) == ZERO_EXTEND)
7781 src = XEXP (src, 0);
7782
7783 if (GET_CODE (src) == MEM
7784 && rtx_equal_p (XEXP (src, 0), next_io->var_address))
7785 {
7786 if (dump_file)
7787 fprintf (dump_file,
7788 ";; [Local] Removing membar %d since the target"
7789 " of %d is used before the I/O operation\n",
7790 INSN_UID (next_membar), INSN_UID (insn));
7791
7792 if (next_membar == *last_membar)
7793 *last_membar = 0;
7794
7795 delete_insn (next_membar);
7796 next_membar = 0;
7797 }
7798 }
7799
7800 /* If INSN has volatile references, forget about any registers
7801 that are used after it. Otherwise forget about uses that
7802 are (or might be) defined by INSN. */
7803 if (volatile_refs_p (PATTERN (insn)))
7804 CLEAR_HARD_REG_SET (used_regs);
7805 else
7806 note_stores (insn, frv_io_handle_set, &used_regs);
7807
7808 note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
7809 break;
7810 }
7811 }
7812
7813 /* See if MEMBAR, the last membar instruction in BB, can be removed.
7814 FIRST_IO[X] describes the first operation performed by basic block X. */
7815
7816 static void
7817 frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
7818 rtx_insn *membar)
7819 {
7820 struct frv_io this_io, next_io;
7821 edge succ;
7822 edge_iterator ei;
7823
7824 /* We need to keep the membar if there is an edge to the exit block. */
7825 FOR_EACH_EDGE (succ, ei, bb->succs)
7826 /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
7827 if (succ->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7828 return;
7829
7830 /* Work out the union of all successor blocks. */
7831 ei = ei_start (bb->succs);
7832 ei_cond (ei, &succ);
7833 /* next_io = first_io[bb->succ->dest->index]; */
7834 next_io = first_io[succ->dest->index];
7835 ei = ei_start (bb->succs);
7836 if (ei_cond (ei, &succ))
7837 {
7838 for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei))
7839 /*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/
7840 frv_io_union (&next_io, &first_io[succ->dest->index]);
7841 }
7842 else
7843 gcc_unreachable ();
7844
7845 frv_extract_membar (&this_io, membar);
7846 if (frv_io_fixed_order_p (&this_io, &next_io))
7847 {
7848 if (dump_file)
7849 fprintf (dump_file,
7850 ";; [Global] Removing membar %d since order of accesses"
7851 " is guaranteed\n", INSN_UID (membar));
7852
7853 delete_insn (membar);
7854 }
7855 }
7856
7857 /* Remove redundant membars from the current function. */
7858
7859 static void
7860 frv_optimize_membar (void)
7861 {
7862 basic_block bb;
7863 struct frv_io *first_io;
7864 rtx_insn **last_membar;
7865
7866 compute_bb_for_insn ();
7867 first_io = XCNEWVEC (struct frv_io, last_basic_block_for_fn (cfun));
7868 last_membar = XCNEWVEC (rtx_insn *, last_basic_block_for_fn (cfun));
7869
7870 FOR_EACH_BB_FN (bb, cfun)
7871 frv_optimize_membar_local (bb, &first_io[bb->index],
7872 &last_membar[bb->index]);
7873
7874 FOR_EACH_BB_FN (bb, cfun)
7875 if (last_membar[bb->index] != 0)
7876 frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
7877
7878 free (first_io);
7879 free (last_membar);
7880 }
7881 \f
7882 /* Used by frv_reorg to keep track of the current packet's address. */
7883 static unsigned int frv_packet_address;
7884
7885 /* If the current packet falls through to a label, try to pad the packet
7886 with nops in order to fit the label's alignment requirements. */
7887
7888 static void
7889 frv_align_label (void)
7890 {
7891 unsigned int alignment, target, nop;
7892 rtx_insn *x, *last, *barrier, *label;
7893
7894 /* Walk forward to the start of the next packet. Set ALIGNMENT to the
7895 maximum alignment of that packet, LABEL to the last label between
7896 the packets, and BARRIER to the last barrier. */
7897 last = frv_packet.insns[frv_packet.num_insns - 1];
7898 label = barrier = 0;
7899 alignment = 4;
7900 for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x))
7901 {
7902 if (LABEL_P (x))
7903 {
7904 unsigned int subalign = 1 << label_to_alignment (x).levels[0].log;
7905 alignment = MAX (alignment, subalign);
7906 label = x;
7907 }
7908 if (BARRIER_P (x))
7909 barrier = x;
7910 }
7911
7912 /* If -malign-labels, and the packet falls through to an unaligned
7913 label, try introducing a nop to align that label to 8 bytes. */
7914 if (TARGET_ALIGN_LABELS
7915 && label != 0
7916 && barrier == 0
7917 && frv_packet.num_insns < frv_packet.issue_rate)
7918 alignment = MAX (alignment, 8);
7919
7920 /* Advance the address to the end of the current packet. */
7921 frv_packet_address += frv_packet.num_insns * 4;
7922
7923 /* Work out the target address, after alignment. */
7924 target = (frv_packet_address + alignment - 1) & -alignment;
7925
7926 /* If the packet falls through to the label, try to find an efficient
7927 padding sequence. */
7928 if (barrier == 0)
7929 {
7930 /* First try adding nops to the current packet. */
7931 for (nop = 0; nop < frv_num_nops; nop++)
7932 while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop]))
7933 {
7934 frv_insert_nop_in_packet (frv_nops[nop]);
7935 frv_packet_address += 4;
7936 }
7937
7938 /* If we still haven't reached the target, add some new packets that
7939 contain only nops. If there are two types of nop, insert an
7940 alternating sequence of frv_nops[0] and frv_nops[1], which will
7941 lead to packets like:
7942
7943 nop.p
7944 mnop.p/fnop.p
7945 nop.p
7946 mnop/fnop
7947
7948 etc. Just emit frv_nops[0] if that's the only nop we have. */
7949 last = frv_packet.insns[frv_packet.num_insns - 1];
7950 nop = 0;
7951 while (frv_packet_address < target)
7952 {
7953 last = emit_insn_after (PATTERN (frv_nops[nop]), last);
7954 frv_packet_address += 4;
7955 if (frv_num_nops > 1)
7956 nop ^= 1;
7957 }
7958 }
7959
7960 frv_packet_address = target;
7961 }
7962
7963 /* Subroutine of frv_reorg, called after each packet has been constructed
7964 in frv_packet. */
7965
7966 static void
7967 frv_reorg_packet (void)
7968 {
7969 frv_fill_unused_units (GROUP_I);
7970 frv_fill_unused_units (GROUP_FM);
7971 frv_align_label ();
7972 }
7973
7974 /* Add an instruction with pattern NOP to frv_nops[]. */
7975
7976 static void
7977 frv_register_nop (rtx nop)
7978 {
7979 rtx_insn *nop_insn = make_insn_raw (nop);
7980 SET_NEXT_INSN (nop_insn) = 0;
7981 SET_PREV_INSN (nop_insn) = 0;
7982 frv_nops[frv_num_nops++] = nop_insn;
7983 }
7984
7985 /* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions
7986 into packets and check whether we need to insert nops in order to
7987 fulfill the processor's issue requirements. Also, if the user has
7988 requested a certain alignment for a label, try to meet that alignment
7989 by inserting nops in the previous packet. */
7990
7991 static void
7992 frv_reorg (void)
7993 {
7994 if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p)
7995 frv_optimize_membar ();
7996
7997 frv_num_nops = 0;
7998 frv_register_nop (gen_nop ());
7999 if (TARGET_MEDIA)
8000 frv_register_nop (gen_mnop ());
8001 if (TARGET_HARD_FLOAT)
8002 frv_register_nop (gen_fnop ());
8003
8004 /* Estimate the length of each branch. Although this may change after
8005 we've inserted nops, it will only do so in big functions. */
8006 shorten_branches (get_insns ());
8007
8008 frv_packet_address = 0;
8009 frv_for_each_packet (frv_reorg_packet);
8010 }
8011 \f
8012 #define def_builtin(name, type, code) \
8013 add_builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL)
8014
8015 struct builtin_description
8016 {
8017 enum insn_code icode;
8018 const char *name;
8019 enum frv_builtins code;
8020 enum rtx_code comparison;
8021 unsigned int flag;
8022 };
8023
8024 /* Media intrinsics that take a single, constant argument. */
8025
8026 static struct builtin_description bdesc_set[] =
8027 {
8028 { CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, UNKNOWN, 0 }
8029 };
8030
8031 /* Media intrinsics that take just one argument. */
8032
8033 static struct builtin_description bdesc_1arg[] =
8034 {
8035 { CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, UNKNOWN, 0 },
8036 { CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, UNKNOWN, 0 },
8037 { CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, UNKNOWN, 0 },
8038 { CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, UNKNOWN, 0},
8039 { CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, UNKNOWN, 0 },
8040 { CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, UNKNOWN, 0 }
8041 };
8042
8043 /* Media intrinsics that take two arguments. */
8044
8045 static struct builtin_description bdesc_2arg[] =
8046 {
8047 { CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, UNKNOWN, 0},
8048 { CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, UNKNOWN, 0},
8049 { CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, UNKNOWN, 0},
8050 { CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, UNKNOWN, 0},
8051 { CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, UNKNOWN, 0},
8052 { CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, UNKNOWN, 0},
8053 { CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, UNKNOWN, 0},
8054 { CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, UNKNOWN, 0},
8055 { CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, UNKNOWN, 0},
8056 { CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, UNKNOWN, 0},
8057 { CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, UNKNOWN, 0},
8058 { CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, UNKNOWN, 0},
8059 { CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, UNKNOWN, 0},
8060 { CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, UNKNOWN, 0},
8061 { CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, UNKNOWN, 0},
8062 { CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, UNKNOWN, 0},
8063 { CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, UNKNOWN, 0},
8064 { CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, UNKNOWN, 0},
8065 { CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, UNKNOWN, 0},
8066 { CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, UNKNOWN, 0},
8067 { CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, UNKNOWN, 0},
8068 { CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, UNKNOWN, 0},
8069 { CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, UNKNOWN, 0},
8070 { CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, UNKNOWN, 0},
8071 { CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, UNKNOWN, 0},
8072 { CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, UNKNOWN, 0},
8073 { CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, UNKNOWN, 0}
8074 };
8075
8076 /* Integer intrinsics that take two arguments and have no return value. */
8077
8078 static struct builtin_description bdesc_int_void2arg[] =
8079 {
8080 { CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, UNKNOWN, 0},
8081 { CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, UNKNOWN, 0},
8082 { CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, UNKNOWN, 0}
8083 };
8084
8085 static struct builtin_description bdesc_prefetches[] =
8086 {
8087 { CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, UNKNOWN,
8088 0},
8089 { CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, UNKNOWN, 0}
8090 };
8091
8092 /* Media intrinsics that take two arguments, the first being an ACC number. */
8093
8094 static struct builtin_description bdesc_cut[] =
8095 {
8096 { CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, UNKNOWN, 0},
8097 { CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, UNKNOWN, 0},
8098 { CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, UNKNOWN, 0}
8099 };
8100
8101 /* Two-argument media intrinsics with an immediate second argument. */
8102
8103 static struct builtin_description bdesc_2argimm[] =
8104 {
8105 { CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, UNKNOWN, 0},
8106 { CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, UNKNOWN, 0},
8107 { CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, UNKNOWN, 0},
8108 { CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, UNKNOWN, 0},
8109 { CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, UNKNOWN, 0},
8110 { CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, UNKNOWN, 0},
8111 { CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, UNKNOWN, 0},
8112 { CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, UNKNOWN, 0},
8113 { CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, UNKNOWN, 0},
8114 { CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, UNKNOWN, 0},
8115 { CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, UNKNOWN, 0},
8116 { CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, UNKNOWN, 0},
8117 { CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, UNKNOWN, 0},
8118 { CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, UNKNOWN, 0},
8119 { CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, UNKNOWN, 0},
8120 { CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, UNKNOWN, 0},
8121 { CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, UNKNOWN, 0}
8122 };
8123
8124 /* Media intrinsics that take two arguments and return void, the first argument
8125 being a pointer to 4 words in memory. */
8126
8127 static struct builtin_description bdesc_void2arg[] =
8128 {
8129 { CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, UNKNOWN, 0},
8130 { CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, UNKNOWN, 0},
8131 };
8132
8133 /* Media intrinsics that take three arguments, the first being a const_int that
8134 denotes an accumulator, and that return void. */
8135
8136 static struct builtin_description bdesc_void3arg[] =
8137 {
8138 { CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, UNKNOWN, 0},
8139 { CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, UNKNOWN, 0},
8140 { CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, UNKNOWN, 0},
8141 { CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, UNKNOWN, 0},
8142 { CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, UNKNOWN, 0},
8143 { CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, UNKNOWN, 0},
8144 { CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, UNKNOWN, 0},
8145 { CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, UNKNOWN, 0},
8146 { CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, UNKNOWN, 0},
8147 { CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, UNKNOWN, 0},
8148 { CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, UNKNOWN, 0},
8149 { CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, UNKNOWN, 0},
8150 { CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, UNKNOWN, 0},
8151 { CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, UNKNOWN, 0},
8152 { CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, UNKNOWN, 0},
8153 { CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, UNKNOWN, 0},
8154 { CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, UNKNOWN, 0},
8155 { CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, UNKNOWN, 0},
8156 { CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, UNKNOWN, 0},
8157 { CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, UNKNOWN, 0},
8158 { CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, UNKNOWN, 0},
8159 { CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, UNKNOWN, 0},
8160 { CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, UNKNOWN, 0},
8161 { CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, UNKNOWN, 0},
8162 { CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, UNKNOWN, 0}
8163 };
8164
8165 /* Media intrinsics that take two accumulator numbers as argument and
8166 return void. */
8167
8168 static struct builtin_description bdesc_voidacc[] =
8169 {
8170 { CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, UNKNOWN, 0},
8171 { CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, UNKNOWN, 0},
8172 { CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, UNKNOWN, 0},
8173 { CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, UNKNOWN, 0},
8174 { CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, UNKNOWN, 0},
8175 { CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, UNKNOWN, 0}
8176 };
8177
8178 /* Intrinsics that load a value and then issue a MEMBAR. The load is
8179 a normal move and the ICODE is for the membar. */
8180
8181 static struct builtin_description bdesc_loads[] =
8182 {
8183 { CODE_FOR_optional_membar_qi, "__builtin_read8",
8184 FRV_BUILTIN_READ8, UNKNOWN, 0},
8185 { CODE_FOR_optional_membar_hi, "__builtin_read16",
8186 FRV_BUILTIN_READ16, UNKNOWN, 0},
8187 { CODE_FOR_optional_membar_si, "__builtin_read32",
8188 FRV_BUILTIN_READ32, UNKNOWN, 0},
8189 { CODE_FOR_optional_membar_di, "__builtin_read64",
8190 FRV_BUILTIN_READ64, UNKNOWN, 0}
8191 };
8192
8193 /* Likewise stores. */
8194
8195 static struct builtin_description bdesc_stores[] =
8196 {
8197 { CODE_FOR_optional_membar_qi, "__builtin_write8",
8198 FRV_BUILTIN_WRITE8, UNKNOWN, 0},
8199 { CODE_FOR_optional_membar_hi, "__builtin_write16",
8200 FRV_BUILTIN_WRITE16, UNKNOWN, 0},
8201 { CODE_FOR_optional_membar_si, "__builtin_write32",
8202 FRV_BUILTIN_WRITE32, UNKNOWN, 0},
8203 { CODE_FOR_optional_membar_di, "__builtin_write64",
8204 FRV_BUILTIN_WRITE64, UNKNOWN, 0},
8205 };
8206
8207 /* Initialize media builtins. */
8208
8209 static void
8210 frv_init_builtins (void)
8211 {
8212 tree accumulator = integer_type_node;
8213 tree integer = integer_type_node;
8214 tree voidt = void_type_node;
8215 tree uhalf = short_unsigned_type_node;
8216 tree sword1 = long_integer_type_node;
8217 tree uword1 = long_unsigned_type_node;
8218 tree sword2 = long_long_integer_type_node;
8219 tree uword2 = long_long_unsigned_type_node;
8220 tree uword4 = build_pointer_type (uword1);
8221 tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1));
8222 tree ubyte = unsigned_char_type_node;
8223 tree iacc = integer_type_node;
8224
8225 #define UNARY(RET, T1) \
8226 build_function_type_list (RET, T1, NULL_TREE)
8227
8228 #define BINARY(RET, T1, T2) \
8229 build_function_type_list (RET, T1, T2, NULL_TREE)
8230
8231 #define TRINARY(RET, T1, T2, T3) \
8232 build_function_type_list (RET, T1, T2, T3, NULL_TREE)
8233
8234 #define QUAD(RET, T1, T2, T3, T4) \
8235 build_function_type_list (RET, T1, T2, T3, T4, NULL_TREE)
8236
8237 tree void_ftype_void = build_function_type_list (voidt, NULL_TREE);
8238
8239 tree void_ftype_acc = UNARY (voidt, accumulator);
8240 tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1);
8241 tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2);
8242 tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1);
8243 tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator);
8244 tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1);
8245 tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1);
8246 tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2);
8247 tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2);
8248
8249 tree uw1_ftype_uw1 = UNARY (uword1, uword1);
8250 tree uw1_ftype_sw1 = UNARY (uword1, sword1);
8251 tree uw1_ftype_uw2 = UNARY (uword1, uword2);
8252 tree uw1_ftype_acc = UNARY (uword1, accumulator);
8253 tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf);
8254 tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1);
8255 tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer);
8256 tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1);
8257 tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1);
8258 tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1);
8259 tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer);
8260
8261 tree sw1_ftype_int = UNARY (sword1, integer);
8262 tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1);
8263 tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer);
8264
8265 tree uw2_ftype_uw1 = UNARY (uword2, uword1);
8266 tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer);
8267 tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2);
8268 tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer);
8269 tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer);
8270 tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf);
8271
8272 tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2);
8273 tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer);
8274 tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1);
8275 tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1);
8276 tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1);
8277 tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2);
8278 tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1);
8279 tree sw1_ftype_sw1 = UNARY (sword1, sword1);
8280 tree sw2_ftype_iacc = UNARY (sword2, iacc);
8281 tree sw1_ftype_iacc = UNARY (sword1, iacc);
8282 tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node);
8283 tree uw1_ftype_vptr = UNARY (uword1, vptr);
8284 tree uw2_ftype_vptr = UNARY (uword2, vptr);
8285 tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte);
8286 tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf);
8287 tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1);
8288 tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2);
8289
8290 def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND);
8291 def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR);
8292 def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR);
8293 def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT);
8294 def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI);
8295 def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI);
8296 def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT);
8297 def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH);
8298 def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI);
8299 def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI);
8300 def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI);
8301 def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS);
8302 def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU);
8303 def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS);
8304 def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS);
8305 def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS);
8306 def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS);
8307 def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS);
8308 def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU);
8309 def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS);
8310 def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU);
8311 def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS);
8312 def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU);
8313 def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS);
8314 def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU);
8315 def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS);
8316 def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS);
8317 def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS);
8318 def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS);
8319 def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS);
8320 def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU);
8321 def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS);
8322 def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU);
8323 def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS);
8324 def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU);
8325 def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS);
8326 def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU);
8327 def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS);
8328 def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU);
8329 def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS);
8330 def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU);
8331 def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS);
8332 def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU);
8333 def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT);
8334 def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS);
8335 def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW);
8336 def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD);
8337 def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH);
8338 def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH);
8339 def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH);
8340 def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH);
8341 def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH);
8342 def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB);
8343 def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE);
8344 def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC);
8345 def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA);
8346 def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC);
8347 def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG);
8348 def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC);
8349 def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG);
8350 def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1);
8351 def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2);
8352 def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP);
8353 def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS);
8354 def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS);
8355 def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS);
8356 def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS);
8357 def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS);
8358 def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS);
8359 def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS);
8360 def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS);
8361 def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS);
8362 def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS);
8363 def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI);
8364 def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI);
8365 def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI);
8366 def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI);
8367 def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS);
8368 def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS);
8369 def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS);
8370 def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS);
8371 def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH);
8372 def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH);
8373 def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH);
8374 def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS);
8375 def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS);
8376 def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI);
8377 def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI);
8378 def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL);
8379 def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL);
8380 def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS);
8381 def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS);
8382 def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU);
8383 def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS);
8384 def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS);
8385 def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS);
8386 def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN);
8387 def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS);
8388 def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll);
8389 def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl);
8390 def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll);
8391 def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl);
8392 def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0);
8393 def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH);
8394 def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8);
8395 def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16);
8396 def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32);
8397 def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64);
8398
8399 def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8);
8400 def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16);
8401 def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32);
8402 def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64);
8403
8404 #undef UNARY
8405 #undef BINARY
8406 #undef TRINARY
8407 #undef QUAD
8408 }
8409
8410 /* Set the names for various arithmetic operations according to the
8411 FRV ABI. */
8412 static void
8413 frv_init_libfuncs (void)
8414 {
8415 set_optab_libfunc (smod_optab, SImode, "__modi");
8416 set_optab_libfunc (umod_optab, SImode, "__umodi");
8417
8418 set_optab_libfunc (add_optab, DImode, "__addll");
8419 set_optab_libfunc (sub_optab, DImode, "__subll");
8420 set_optab_libfunc (smul_optab, DImode, "__mulll");
8421 set_optab_libfunc (sdiv_optab, DImode, "__divll");
8422 set_optab_libfunc (smod_optab, DImode, "__modll");
8423 set_optab_libfunc (umod_optab, DImode, "__umodll");
8424 set_optab_libfunc (and_optab, DImode, "__andll");
8425 set_optab_libfunc (ior_optab, DImode, "__orll");
8426 set_optab_libfunc (xor_optab, DImode, "__xorll");
8427 set_optab_libfunc (one_cmpl_optab, DImode, "__notll");
8428
8429 set_optab_libfunc (add_optab, SFmode, "__addf");
8430 set_optab_libfunc (sub_optab, SFmode, "__subf");
8431 set_optab_libfunc (smul_optab, SFmode, "__mulf");
8432 set_optab_libfunc (sdiv_optab, SFmode, "__divf");
8433
8434 set_optab_libfunc (add_optab, DFmode, "__addd");
8435 set_optab_libfunc (sub_optab, DFmode, "__subd");
8436 set_optab_libfunc (smul_optab, DFmode, "__muld");
8437 set_optab_libfunc (sdiv_optab, DFmode, "__divd");
8438
8439 set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod");
8440 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof");
8441
8442 set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi");
8443 set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll");
8444 set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi");
8445 set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll");
8446
8447 set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui");
8448 set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull");
8449 set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui");
8450 set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull");
8451
8452 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof");
8453 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof");
8454 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod");
8455 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod");
8456 }
8457
8458 /* Convert an integer constant to an accumulator register. ICODE is the
8459 code of the target instruction, OPNUM is the number of the
8460 accumulator operand and OPVAL is the constant integer. Try both
8461 ACC and ACCG registers; only report an error if neither fit the
8462 instruction. */
8463
8464 static rtx
8465 frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
8466 {
8467 rtx reg;
8468 int i;
8469
8470 /* ACCs and ACCGs are implicit global registers if media intrinsics
8471 are being used. We set up this lazily to avoid creating lots of
8472 unnecessary call_insn rtl in non-media code. */
8473 for (i = 0; i <= ACC_MASK; i++)
8474 if ((i & ACC_MASK) == i)
8475 global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1;
8476
8477 if (GET_CODE (opval) != CONST_INT)
8478 {
8479 error ("accumulator is not a constant integer");
8480 return NULL_RTX;
8481 }
8482 if ((INTVAL (opval) & ~ACC_MASK) != 0)
8483 {
8484 error ("accumulator number is out of bounds");
8485 return NULL_RTX;
8486 }
8487
8488 reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
8489 ACC_FIRST + INTVAL (opval));
8490 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8491 SET_REGNO (reg, ACCG_FIRST + INTVAL (opval));
8492
8493 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8494 {
8495 error ("inappropriate accumulator for %qs", insn_data[icode].name);
8496 return NULL_RTX;
8497 }
8498 return reg;
8499 }
8500
8501 /* If an ACC rtx has mode MODE, return the mode that the matching ACCG
8502 should have. */
8503
8504 static machine_mode
8505 frv_matching_accg_mode (machine_mode mode)
8506 {
8507 switch (mode)
8508 {
8509 case E_V4SImode:
8510 return V4QImode;
8511
8512 case E_DImode:
8513 return HImode;
8514
8515 case E_SImode:
8516 return QImode;
8517
8518 default:
8519 gcc_unreachable ();
8520 }
8521 }
8522
8523 /* Given that a __builtin_read or __builtin_write function is accessing
8524 address ADDRESS, return the value that should be used as operand 1
8525 of the membar. */
8526
8527 static rtx
8528 frv_io_address_cookie (rtx address)
8529 {
8530 return (GET_CODE (address) == CONST_INT
8531 ? GEN_INT (INTVAL (address) / 8 * 8)
8532 : const0_rtx);
8533 }
8534
8535 /* Return the accumulator guard that should be paired with accumulator
8536 register ACC. The mode of the returned register is in the same
8537 class as ACC, but is four times smaller. */
8538
8539 rtx
8540 frv_matching_accg_for_acc (rtx acc)
8541 {
8542 return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)),
8543 REGNO (acc) - ACC_FIRST + ACCG_FIRST);
8544 }
8545
8546 /* Read the requested argument from the call EXP given by INDEX.
8547 Return the value as an rtx. */
8548
8549 static rtx
8550 frv_read_argument (tree exp, unsigned int index)
8551 {
8552 return expand_normal (CALL_EXPR_ARG (exp, index));
8553 }
8554
8555 /* Like frv_read_argument, but interpret the argument as the number
8556 of an IACC register and return a (reg:MODE ...) rtx for it. */
8557
8558 static rtx
8559 frv_read_iacc_argument (machine_mode mode, tree call,
8560 unsigned int index)
8561 {
8562 int i, regno;
8563 rtx op;
8564
8565 op = frv_read_argument (call, index);
8566 if (GET_CODE (op) != CONST_INT
8567 || INTVAL (op) < 0
8568 || INTVAL (op) > IACC_LAST - IACC_FIRST
8569 || ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0)
8570 {
8571 error ("invalid IACC argument");
8572 op = const0_rtx;
8573 }
8574
8575 /* IACCs are implicit global registers. We set up this lazily to
8576 avoid creating lots of unnecessary call_insn rtl when IACCs aren't
8577 being used. */
8578 regno = INTVAL (op) + IACC_FIRST;
8579 for (i = 0; i < hard_regno_nregs (regno, mode); i++)
8580 global_regs[regno + i] = 1;
8581
8582 return gen_rtx_REG (mode, regno);
8583 }
8584
8585 /* Return true if OPVAL can be used for operand OPNUM of instruction ICODE.
8586 The instruction should require a constant operand of some sort. The
8587 function prints an error if OPVAL is not valid. */
8588
8589 static int
8590 frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval)
8591 {
8592 if (GET_CODE (opval) != CONST_INT)
8593 {
8594 error ("%qs expects a constant argument", insn_data[icode].name);
8595 return FALSE;
8596 }
8597 if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode))
8598 {
8599 error ("constant argument out of range for %qs", insn_data[icode].name);
8600 return FALSE;
8601 }
8602 return TRUE;
8603 }
8604
8605 /* Return a legitimate rtx for instruction ICODE's return value. Use TARGET
8606 if it's not null, has the right mode, and satisfies operand 0's
8607 predicate. */
8608
8609 static rtx
8610 frv_legitimize_target (enum insn_code icode, rtx target)
8611 {
8612 machine_mode mode = insn_data[icode].operand[0].mode;
8613
8614 if (! target
8615 || GET_MODE (target) != mode
8616 || ! (*insn_data[icode].operand[0].predicate) (target, mode))
8617 return gen_reg_rtx (mode);
8618 else
8619 return target;
8620 }
8621
8622 /* Given that ARG is being passed as operand OPNUM to instruction ICODE,
8623 check whether ARG satisfies the operand's constraints. If it doesn't,
8624 copy ARG to a temporary register and return that. Otherwise return ARG
8625 itself. */
8626
8627 static rtx
8628 frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
8629 {
8630 machine_mode mode = insn_data[icode].operand[opnum].mode;
8631
8632 if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
8633 return arg;
8634 else
8635 return copy_to_mode_reg (mode, arg);
8636 }
8637
8638 /* Return a volatile memory reference of mode MODE whose address is ARG. */
8639
8640 static rtx
8641 frv_volatile_memref (machine_mode mode, rtx arg)
8642 {
8643 rtx mem;
8644
8645 mem = gen_rtx_MEM (mode, memory_address (mode, arg));
8646 MEM_VOLATILE_P (mem) = 1;
8647 return mem;
8648 }
8649
8650 /* Expand builtins that take a single, constant argument. At the moment,
8651 only MHDSETS falls into this category. */
8652
8653 static rtx
8654 frv_expand_set_builtin (enum insn_code icode, tree call, rtx target)
8655 {
8656 rtx pat;
8657 rtx op0 = frv_read_argument (call, 0);
8658
8659 if (! frv_check_constant_argument (icode, 1, op0))
8660 return NULL_RTX;
8661
8662 target = frv_legitimize_target (icode, target);
8663 pat = GEN_FCN (icode) (target, op0);
8664 if (! pat)
8665 return NULL_RTX;
8666
8667 emit_insn (pat);
8668 return target;
8669 }
8670
8671 /* Expand builtins that take one operand. */
8672
8673 static rtx
8674 frv_expand_unop_builtin (enum insn_code icode, tree call, rtx target)
8675 {
8676 rtx pat;
8677 rtx op0 = frv_read_argument (call, 0);
8678
8679 target = frv_legitimize_target (icode, target);
8680 op0 = frv_legitimize_argument (icode, 1, op0);
8681 pat = GEN_FCN (icode) (target, op0);
8682 if (! pat)
8683 return NULL_RTX;
8684
8685 emit_insn (pat);
8686 return target;
8687 }
8688
8689 /* Expand builtins that take two operands. */
8690
8691 static rtx
8692 frv_expand_binop_builtin (enum insn_code icode, tree call, rtx target)
8693 {
8694 rtx pat;
8695 rtx op0 = frv_read_argument (call, 0);
8696 rtx op1 = frv_read_argument (call, 1);
8697
8698 target = frv_legitimize_target (icode, target);
8699 op0 = frv_legitimize_argument (icode, 1, op0);
8700 op1 = frv_legitimize_argument (icode, 2, op1);
8701 pat = GEN_FCN (icode) (target, op0, op1);
8702 if (! pat)
8703 return NULL_RTX;
8704
8705 emit_insn (pat);
8706 return target;
8707 }
8708
8709 /* Expand cut-style builtins, which take two operands and an implicit ACCG
8710 one. */
8711
8712 static rtx
8713 frv_expand_cut_builtin (enum insn_code icode, tree call, rtx target)
8714 {
8715 rtx pat;
8716 rtx op0 = frv_read_argument (call, 0);
8717 rtx op1 = frv_read_argument (call, 1);
8718 rtx op2;
8719
8720 target = frv_legitimize_target (icode, target);
8721 op0 = frv_int_to_acc (icode, 1, op0);
8722 if (! op0)
8723 return NULL_RTX;
8724
8725 if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT)
8726 {
8727 if (! frv_check_constant_argument (icode, 2, op1))
8728 return NULL_RTX;
8729 }
8730 else
8731 op1 = frv_legitimize_argument (icode, 2, op1);
8732
8733 op2 = frv_matching_accg_for_acc (op0);
8734 pat = GEN_FCN (icode) (target, op0, op1, op2);
8735 if (! pat)
8736 return NULL_RTX;
8737
8738 emit_insn (pat);
8739 return target;
8740 }
8741
8742 /* Expand builtins that take two operands and the second is immediate. */
8743
8744 static rtx
8745 frv_expand_binopimm_builtin (enum insn_code icode, tree call, rtx target)
8746 {
8747 rtx pat;
8748 rtx op0 = frv_read_argument (call, 0);
8749 rtx op1 = frv_read_argument (call, 1);
8750
8751 if (! frv_check_constant_argument (icode, 2, op1))
8752 return NULL_RTX;
8753
8754 target = frv_legitimize_target (icode, target);
8755 op0 = frv_legitimize_argument (icode, 1, op0);
8756 pat = GEN_FCN (icode) (target, op0, op1);
8757 if (! pat)
8758 return NULL_RTX;
8759
8760 emit_insn (pat);
8761 return target;
8762 }
8763
8764 /* Expand builtins that take two operands, the first operand being a pointer to
8765 ints and return void. */
8766
8767 static rtx
8768 frv_expand_voidbinop_builtin (enum insn_code icode, tree call)
8769 {
8770 rtx pat;
8771 rtx op0 = frv_read_argument (call, 0);
8772 rtx op1 = frv_read_argument (call, 1);
8773 machine_mode mode0 = insn_data[icode].operand[0].mode;
8774 rtx addr;
8775
8776 if (GET_CODE (op0) != MEM)
8777 {
8778 rtx reg = op0;
8779
8780 if (! offsettable_address_p (0, mode0, op0))
8781 {
8782 reg = gen_reg_rtx (Pmode);
8783 emit_insn (gen_rtx_SET (reg, op0));
8784 }
8785
8786 op0 = gen_rtx_MEM (SImode, reg);
8787 }
8788
8789 addr = XEXP (op0, 0);
8790 if (! offsettable_address_p (0, mode0, addr))
8791 addr = copy_to_mode_reg (Pmode, op0);
8792
8793 op0 = change_address (op0, V4SImode, addr);
8794 op1 = frv_legitimize_argument (icode, 1, op1);
8795 pat = GEN_FCN (icode) (op0, op1);
8796 if (! pat)
8797 return 0;
8798
8799 emit_insn (pat);
8800 return 0;
8801 }
8802
8803 /* Expand builtins that take two long operands and return void. */
8804
8805 static rtx
8806 frv_expand_int_void2arg (enum insn_code icode, tree call)
8807 {
8808 rtx pat;
8809 rtx op0 = frv_read_argument (call, 0);
8810 rtx op1 = frv_read_argument (call, 1);
8811
8812 op0 = frv_legitimize_argument (icode, 1, op0);
8813 op1 = frv_legitimize_argument (icode, 1, op1);
8814 pat = GEN_FCN (icode) (op0, op1);
8815 if (! pat)
8816 return NULL_RTX;
8817
8818 emit_insn (pat);
8819 return NULL_RTX;
8820 }
8821
8822 /* Expand prefetch builtins. These take a single address as argument. */
8823
8824 static rtx
8825 frv_expand_prefetches (enum insn_code icode, tree call)
8826 {
8827 rtx pat;
8828 rtx op0 = frv_read_argument (call, 0);
8829
8830 pat = GEN_FCN (icode) (force_reg (Pmode, op0));
8831 if (! pat)
8832 return 0;
8833
8834 emit_insn (pat);
8835 return 0;
8836 }
8837
8838 /* Expand builtins that take three operands and return void. The first
8839 argument must be a constant that describes a pair or quad accumulators. A
8840 fourth argument is created that is the accumulator guard register that
8841 corresponds to the accumulator. */
8842
8843 static rtx
8844 frv_expand_voidtriop_builtin (enum insn_code icode, tree call)
8845 {
8846 rtx pat;
8847 rtx op0 = frv_read_argument (call, 0);
8848 rtx op1 = frv_read_argument (call, 1);
8849 rtx op2 = frv_read_argument (call, 2);
8850 rtx op3;
8851
8852 op0 = frv_int_to_acc (icode, 0, op0);
8853 if (! op0)
8854 return NULL_RTX;
8855
8856 op1 = frv_legitimize_argument (icode, 1, op1);
8857 op2 = frv_legitimize_argument (icode, 2, op2);
8858 op3 = frv_matching_accg_for_acc (op0);
8859 pat = GEN_FCN (icode) (op0, op1, op2, op3);
8860 if (! pat)
8861 return NULL_RTX;
8862
8863 emit_insn (pat);
8864 return NULL_RTX;
8865 }
8866
8867 /* Expand builtins that perform accumulator-to-accumulator operations.
8868 These builtins take two accumulator numbers as argument and return
8869 void. */
8870
8871 static rtx
8872 frv_expand_voidaccop_builtin (enum insn_code icode, tree call)
8873 {
8874 rtx pat;
8875 rtx op0 = frv_read_argument (call, 0);
8876 rtx op1 = frv_read_argument (call, 1);
8877 rtx op2;
8878 rtx op3;
8879
8880 op0 = frv_int_to_acc (icode, 0, op0);
8881 if (! op0)
8882 return NULL_RTX;
8883
8884 op1 = frv_int_to_acc (icode, 1, op1);
8885 if (! op1)
8886 return NULL_RTX;
8887
8888 op2 = frv_matching_accg_for_acc (op0);
8889 op3 = frv_matching_accg_for_acc (op1);
8890 pat = GEN_FCN (icode) (op0, op1, op2, op3);
8891 if (! pat)
8892 return NULL_RTX;
8893
8894 emit_insn (pat);
8895 return NULL_RTX;
8896 }
8897
8898 /* Expand a __builtin_read* function. ICODE is the instruction code for the
8899 membar and TARGET_MODE is the mode that the loaded value should have. */
8900
8901 static rtx
8902 frv_expand_load_builtin (enum insn_code icode, machine_mode target_mode,
8903 tree call, rtx target)
8904 {
8905 rtx op0 = frv_read_argument (call, 0);
8906 rtx cookie = frv_io_address_cookie (op0);
8907
8908 if (target == 0 || !REG_P (target))
8909 target = gen_reg_rtx (target_mode);
8910 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
8911 convert_move (target, op0, 1);
8912 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ)));
8913 cfun->machine->has_membar_p = 1;
8914 return target;
8915 }
8916
8917 /* Likewise __builtin_write* functions. */
8918
8919 static rtx
8920 frv_expand_store_builtin (enum insn_code icode, tree call)
8921 {
8922 rtx op0 = frv_read_argument (call, 0);
8923 rtx op1 = frv_read_argument (call, 1);
8924 rtx cookie = frv_io_address_cookie (op0);
8925
8926 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
8927 convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1);
8928 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE)));
8929 cfun->machine->has_membar_p = 1;
8930 return NULL_RTX;
8931 }
8932
8933 /* Expand the MDPACKH builtin. It takes four unsigned short arguments and
8934 each argument forms one word of the two double-word input registers.
8935 CALL is the tree for the call and TARGET, if nonnull, suggests a good place
8936 to put the return value. */
8937
8938 static rtx
8939 frv_expand_mdpackh_builtin (tree call, rtx target)
8940 {
8941 enum insn_code icode = CODE_FOR_mdpackh;
8942 rtx pat, op0, op1;
8943 rtx arg1 = frv_read_argument (call, 0);
8944 rtx arg2 = frv_read_argument (call, 1);
8945 rtx arg3 = frv_read_argument (call, 2);
8946 rtx arg4 = frv_read_argument (call, 3);
8947
8948 target = frv_legitimize_target (icode, target);
8949 op0 = gen_reg_rtx (DImode);
8950 op1 = gen_reg_rtx (DImode);
8951
8952 /* The high half of each word is not explicitly initialized, so indicate
8953 that the input operands are not live before this point. */
8954 emit_clobber (op0);
8955 emit_clobber (op1);
8956
8957 /* Move each argument into the low half of its associated input word. */
8958 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1);
8959 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2);
8960 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3);
8961 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4);
8962
8963 pat = GEN_FCN (icode) (target, op0, op1);
8964 if (! pat)
8965 return NULL_RTX;
8966
8967 emit_insn (pat);
8968 return target;
8969 }
8970
8971 /* Expand the MCLRACC builtin. This builtin takes a single accumulator
8972 number as argument. */
8973
8974 static rtx
8975 frv_expand_mclracc_builtin (tree call)
8976 {
8977 enum insn_code icode = CODE_FOR_mclracc;
8978 rtx pat;
8979 rtx op0 = frv_read_argument (call, 0);
8980
8981 op0 = frv_int_to_acc (icode, 0, op0);
8982 if (! op0)
8983 return NULL_RTX;
8984
8985 pat = GEN_FCN (icode) (op0);
8986 if (pat)
8987 emit_insn (pat);
8988
8989 return NULL_RTX;
8990 }
8991
8992 /* Expand builtins that take no arguments. */
8993
8994 static rtx
8995 frv_expand_noargs_builtin (enum insn_code icode)
8996 {
8997 rtx pat = GEN_FCN (icode) (const0_rtx);
8998 if (pat)
8999 emit_insn (pat);
9000
9001 return NULL_RTX;
9002 }
9003
9004 /* Expand MRDACC and MRDACCG. These builtins take a single accumulator
9005 number or accumulator guard number as argument and return an SI integer. */
9006
9007 static rtx
9008 frv_expand_mrdacc_builtin (enum insn_code icode, tree call)
9009 {
9010 rtx pat;
9011 rtx target = gen_reg_rtx (SImode);
9012 rtx op0 = frv_read_argument (call, 0);
9013
9014 op0 = frv_int_to_acc (icode, 1, op0);
9015 if (! op0)
9016 return NULL_RTX;
9017
9018 pat = GEN_FCN (icode) (target, op0);
9019 if (! pat)
9020 return NULL_RTX;
9021
9022 emit_insn (pat);
9023 return target;
9024 }
9025
9026 /* Expand MWTACC and MWTACCG. These builtins take an accumulator or
9027 accumulator guard as their first argument and an SImode value as their
9028 second. */
9029
9030 static rtx
9031 frv_expand_mwtacc_builtin (enum insn_code icode, tree call)
9032 {
9033 rtx pat;
9034 rtx op0 = frv_read_argument (call, 0);
9035 rtx op1 = frv_read_argument (call, 1);
9036
9037 op0 = frv_int_to_acc (icode, 0, op0);
9038 if (! op0)
9039 return NULL_RTX;
9040
9041 op1 = frv_legitimize_argument (icode, 1, op1);
9042 pat = GEN_FCN (icode) (op0, op1);
9043 if (pat)
9044 emit_insn (pat);
9045
9046 return NULL_RTX;
9047 }
9048
9049 /* Emit a move from SRC to DEST in SImode chunks. This can be used
9050 to move DImode values into and out of IACC0. */
9051
9052 static void
9053 frv_split_iacc_move (rtx dest, rtx src)
9054 {
9055 machine_mode inner;
9056 int i;
9057
9058 inner = GET_MODE (dest);
9059 for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode))
9060 emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i),
9061 simplify_gen_subreg (SImode, src, inner, i));
9062 }
9063
9064 /* Expand builtins. */
9065
9066 static rtx
9067 frv_expand_builtin (tree exp,
9068 rtx target,
9069 rtx subtarget ATTRIBUTE_UNUSED,
9070 machine_mode mode ATTRIBUTE_UNUSED,
9071 int ignore ATTRIBUTE_UNUSED)
9072 {
9073 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9074 unsigned fcode = DECL_MD_FUNCTION_CODE (fndecl);
9075 unsigned i;
9076 struct builtin_description *d;
9077
9078 if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA)
9079 {
9080 error ("media functions are not available unless %<-mmedia%> is used");
9081 return NULL_RTX;
9082 }
9083
9084 switch (fcode)
9085 {
9086 case FRV_BUILTIN_MCOP1:
9087 case FRV_BUILTIN_MCOP2:
9088 case FRV_BUILTIN_MDUNPACKH:
9089 case FRV_BUILTIN_MBTOHE:
9090 if (! TARGET_MEDIA_REV1)
9091 {
9092 error ("this media function is only available on the fr500");
9093 return NULL_RTX;
9094 }
9095 break;
9096
9097 case FRV_BUILTIN_MQXMACHS:
9098 case FRV_BUILTIN_MQXMACXHS:
9099 case FRV_BUILTIN_MQMACXHS:
9100 case FRV_BUILTIN_MADDACCS:
9101 case FRV_BUILTIN_MSUBACCS:
9102 case FRV_BUILTIN_MASACCS:
9103 case FRV_BUILTIN_MDADDACCS:
9104 case FRV_BUILTIN_MDSUBACCS:
9105 case FRV_BUILTIN_MDASACCS:
9106 case FRV_BUILTIN_MABSHS:
9107 case FRV_BUILTIN_MDROTLI:
9108 case FRV_BUILTIN_MCPLHI:
9109 case FRV_BUILTIN_MCPLI:
9110 case FRV_BUILTIN_MDCUTSSI:
9111 case FRV_BUILTIN_MQSATHS:
9112 case FRV_BUILTIN_MHSETLOS:
9113 case FRV_BUILTIN_MHSETLOH:
9114 case FRV_BUILTIN_MHSETHIS:
9115 case FRV_BUILTIN_MHSETHIH:
9116 case FRV_BUILTIN_MHDSETS:
9117 case FRV_BUILTIN_MHDSETH:
9118 if (! TARGET_MEDIA_REV2)
9119 {
9120 error ("this media function is only available on the fr400"
9121 " and fr550");
9122 return NULL_RTX;
9123 }
9124 break;
9125
9126 case FRV_BUILTIN_SMASS:
9127 case FRV_BUILTIN_SMSSS:
9128 case FRV_BUILTIN_SMU:
9129 case FRV_BUILTIN_ADDSS:
9130 case FRV_BUILTIN_SUBSS:
9131 case FRV_BUILTIN_SLASS:
9132 case FRV_BUILTIN_SCUTSS:
9133 case FRV_BUILTIN_IACCreadll:
9134 case FRV_BUILTIN_IACCreadl:
9135 case FRV_BUILTIN_IACCsetll:
9136 case FRV_BUILTIN_IACCsetl:
9137 if (!TARGET_FR405_BUILTINS)
9138 {
9139 error ("this built-in function is only available"
9140 " on the fr405 and fr450");
9141 return NULL_RTX;
9142 }
9143 break;
9144
9145 case FRV_BUILTIN_PREFETCH:
9146 if (!TARGET_FR500_FR550_BUILTINS)
9147 {
9148 error ("this built-in function is only available on the fr500"
9149 " and fr550");
9150 return NULL_RTX;
9151 }
9152 break;
9153
9154 case FRV_BUILTIN_MQLCLRHS:
9155 case FRV_BUILTIN_MQLMTHS:
9156 case FRV_BUILTIN_MQSLLHI:
9157 case FRV_BUILTIN_MQSRAHI:
9158 if (!TARGET_MEDIA_FR450)
9159 {
9160 error ("this built-in function is only available on the fr450");
9161 return NULL_RTX;
9162 }
9163 break;
9164
9165 default:
9166 break;
9167 }
9168
9169 /* Expand unique builtins. */
9170
9171 switch (fcode)
9172 {
9173 case FRV_BUILTIN_MTRAP:
9174 return frv_expand_noargs_builtin (CODE_FOR_mtrap);
9175
9176 case FRV_BUILTIN_MCLRACC:
9177 return frv_expand_mclracc_builtin (exp);
9178
9179 case FRV_BUILTIN_MCLRACCA:
9180 if (TARGET_ACC_8)
9181 return frv_expand_noargs_builtin (CODE_FOR_mclracca8);
9182 else
9183 return frv_expand_noargs_builtin (CODE_FOR_mclracca4);
9184
9185 case FRV_BUILTIN_MRDACC:
9186 return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, exp);
9187
9188 case FRV_BUILTIN_MRDACCG:
9189 return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, exp);
9190
9191 case FRV_BUILTIN_MWTACC:
9192 return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, exp);
9193
9194 case FRV_BUILTIN_MWTACCG:
9195 return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, exp);
9196
9197 case FRV_BUILTIN_MDPACKH:
9198 return frv_expand_mdpackh_builtin (exp, target);
9199
9200 case FRV_BUILTIN_IACCreadll:
9201 {
9202 rtx src = frv_read_iacc_argument (DImode, exp, 0);
9203 if (target == 0 || !REG_P (target))
9204 target = gen_reg_rtx (DImode);
9205 frv_split_iacc_move (target, src);
9206 return target;
9207 }
9208
9209 case FRV_BUILTIN_IACCreadl:
9210 return frv_read_iacc_argument (SImode, exp, 0);
9211
9212 case FRV_BUILTIN_IACCsetll:
9213 {
9214 rtx dest = frv_read_iacc_argument (DImode, exp, 0);
9215 rtx src = frv_read_argument (exp, 1);
9216 frv_split_iacc_move (dest, force_reg (DImode, src));
9217 return 0;
9218 }
9219
9220 case FRV_BUILTIN_IACCsetl:
9221 {
9222 rtx dest = frv_read_iacc_argument (SImode, exp, 0);
9223 rtx src = frv_read_argument (exp, 1);
9224 emit_move_insn (dest, force_reg (SImode, src));
9225 return 0;
9226 }
9227
9228 default:
9229 break;
9230 }
9231
9232 /* Expand groups of builtins. */
9233
9234 for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++)
9235 if (d->code == fcode)
9236 return frv_expand_set_builtin (d->icode, exp, target);
9237
9238 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9239 if (d->code == fcode)
9240 return frv_expand_unop_builtin (d->icode, exp, target);
9241
9242 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9243 if (d->code == fcode)
9244 return frv_expand_binop_builtin (d->icode, exp, target);
9245
9246 for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++)
9247 if (d->code == fcode)
9248 return frv_expand_cut_builtin (d->icode, exp, target);
9249
9250 for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++)
9251 if (d->code == fcode)
9252 return frv_expand_binopimm_builtin (d->icode, exp, target);
9253
9254 for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++)
9255 if (d->code == fcode)
9256 return frv_expand_voidbinop_builtin (d->icode, exp);
9257
9258 for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++)
9259 if (d->code == fcode)
9260 return frv_expand_voidtriop_builtin (d->icode, exp);
9261
9262 for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++)
9263 if (d->code == fcode)
9264 return frv_expand_voidaccop_builtin (d->icode, exp);
9265
9266 for (i = 0, d = bdesc_int_void2arg;
9267 i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++)
9268 if (d->code == fcode)
9269 return frv_expand_int_void2arg (d->icode, exp);
9270
9271 for (i = 0, d = bdesc_prefetches;
9272 i < ARRAY_SIZE (bdesc_prefetches); i++, d++)
9273 if (d->code == fcode)
9274 return frv_expand_prefetches (d->icode, exp);
9275
9276 for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++)
9277 if (d->code == fcode)
9278 return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)),
9279 exp, target);
9280
9281 for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++)
9282 if (d->code == fcode)
9283 return frv_expand_store_builtin (d->icode, exp);
9284
9285 return 0;
9286 }
9287
9288 static bool
9289 frv_in_small_data_p (const_tree decl)
9290 {
9291 HOST_WIDE_INT size;
9292 const char *section_name;
9293
9294 /* Don't apply the -G flag to internal compiler structures. We
9295 should leave such structures in the main data section, partly
9296 for efficiency and partly because the size of some of them
9297 (such as C++ typeinfos) is not known until later. */
9298 if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl))
9299 return false;
9300
9301 /* If we already know which section the decl should be in, see if
9302 it's a small data section. */
9303 section_name = DECL_SECTION_NAME (decl);
9304 if (section_name)
9305 {
9306 if (startswith (section_name, ".sdata"))
9307 return true;
9308 if (startswith (section_name, ".sbss"))
9309 return true;
9310 return false;
9311 }
9312
9313 size = int_size_in_bytes (TREE_TYPE (decl));
9314 if (size > 0 && size <= g_switch_value)
9315 return true;
9316
9317 return false;
9318 }
9319 \f
9320 static bool
9321 frv_rtx_costs (rtx x,
9322 machine_mode mode,
9323 int outer_code,
9324 int opno ATTRIBUTE_UNUSED,
9325 int *total,
9326 bool speed ATTRIBUTE_UNUSED)
9327 {
9328 int code = GET_CODE (x);
9329
9330 if (outer_code == MEM)
9331 {
9332 /* Don't differentiate between memory addresses. All the ones
9333 we accept have equal cost. */
9334 *total = COSTS_N_INSNS (0);
9335 return true;
9336 }
9337
9338 switch (code)
9339 {
9340 case CONST_INT:
9341 /* Make 12-bit integers really cheap. */
9342 if (IN_RANGE (INTVAL (x), -2048, 2047))
9343 {
9344 *total = 0;
9345 return true;
9346 }
9347 /* Fall through. */
9348
9349 case CONST:
9350 case LABEL_REF:
9351 case SYMBOL_REF:
9352 case CONST_DOUBLE:
9353 *total = COSTS_N_INSNS (2);
9354 return true;
9355
9356 case PLUS:
9357 case MINUS:
9358 case AND:
9359 case IOR:
9360 case XOR:
9361 case ASHIFT:
9362 case ASHIFTRT:
9363 case LSHIFTRT:
9364 case NOT:
9365 case NEG:
9366 case COMPARE:
9367 if (mode == SImode)
9368 *total = COSTS_N_INSNS (1);
9369 else if (mode == DImode)
9370 *total = COSTS_N_INSNS (2);
9371 else
9372 *total = COSTS_N_INSNS (3);
9373 return true;
9374
9375 case MULT:
9376 if (mode == SImode)
9377 *total = COSTS_N_INSNS (2);
9378 else
9379 *total = COSTS_N_INSNS (6); /* guess */
9380 return true;
9381
9382 case DIV:
9383 case UDIV:
9384 case MOD:
9385 case UMOD:
9386 *total = COSTS_N_INSNS (18);
9387 return true;
9388
9389 case MEM:
9390 *total = COSTS_N_INSNS (3);
9391 return true;
9392
9393 default:
9394 return false;
9395 }
9396 }
9397 \f
9398 static void
9399 frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9400 {
9401 switch_to_section (ctors_section);
9402 assemble_align (POINTER_SIZE);
9403 if (TARGET_FDPIC)
9404 {
9405 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9406
9407 gcc_assert (ok);
9408 return;
9409 }
9410 assemble_integer_with_op ("\t.picptr\t", symbol);
9411 }
9412
9413 static void
9414 frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9415 {
9416 switch_to_section (dtors_section);
9417 assemble_align (POINTER_SIZE);
9418 if (TARGET_FDPIC)
9419 {
9420 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9421
9422 gcc_assert (ok);
9423 return;
9424 }
9425 assemble_integer_with_op ("\t.picptr\t", symbol);
9426 }
9427
9428 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
9429
9430 static rtx
9431 frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9432 int incoming ATTRIBUTE_UNUSED)
9433 {
9434 return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM);
9435 }
9436
9437 #define TLS_BIAS (2048 - 16)
9438
9439 /* This is called from dwarf2out.cc via TARGET_ASM_OUTPUT_DWARF_DTPREL.
9440 We need to emit DTP-relative relocations. */
9441
9442 static void
9443 frv_output_dwarf_dtprel (FILE *file, int size, rtx x)
9444 {
9445 gcc_assert (size == 4);
9446 fputs ("\t.picptr\ttlsmoff(", file);
9447 /* We want the unbiased TLS offset, so add the bias to the
9448 expression, such that the implicit biasing cancels out. */
9449 output_addr_const (file, plus_constant (Pmode, x, TLS_BIAS));
9450 fputs (")", file);
9451 }
9452
9453 #include "gt-frv.h"