]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/frv/frv.c
arc-protos.h (arc_select_cc_mode, gen_compare_reg): Wrap in RTX_CODE macro guard.
[thirdparty/gcc.git] / gcc / config / frv / frv.c
1 /* Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007
2 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "expr.h"
39 #include "obstack.h"
40 #include "except.h"
41 #include "function.h"
42 #include "optabs.h"
43 #include "toplev.h"
44 #include "basic-block.h"
45 #include "tm_p.h"
46 #include "ggc.h"
47 #include <ctype.h>
48 #include "target.h"
49 #include "target-def.h"
50 #include "targhooks.h"
51 #include "integrate.h"
52 #include "langhooks.h"
53 #include "df.h"
54
55 #ifndef FRV_INLINE
56 #define FRV_INLINE inline
57 #endif
58
59 /* The maximum number of distinct NOP patterns. There are three:
60 nop, fnop and mnop. */
61 #define NUM_NOP_PATTERNS 3
62
63 /* Classification of instructions and units: integer, floating-point/media,
64 branch and control. */
65 enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS };
66
67 /* The DFA names of the units, in packet order. */
68 static const char *const frv_unit_names[] =
69 {
70 "c",
71 "i0", "f0",
72 "i1", "f1",
73 "i2", "f2",
74 "i3", "f3",
75 "b0", "b1"
76 };
77
78 /* The classification of each unit in frv_unit_names[]. */
79 static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] =
80 {
81 GROUP_C,
82 GROUP_I, GROUP_FM,
83 GROUP_I, GROUP_FM,
84 GROUP_I, GROUP_FM,
85 GROUP_I, GROUP_FM,
86 GROUP_B, GROUP_B
87 };
88
89 /* Return the DFA unit code associated with the Nth unit of integer
90 or floating-point group GROUP, */
91 #define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1]
92
93 /* Return the number of integer or floating-point unit UNIT
94 (1 for I1, 2 for F2, etc.). */
95 #define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2)
96
97 /* The DFA unit number for each unit in frv_unit_names[]. */
98 static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)];
99
100 /* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue
101 an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if
102 no instruction of type T has been seen. */
103 static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1];
104
105 /* An array of dummy nop INSNs, one for each type of nop that the
106 target supports. */
107 static GTY(()) rtx frv_nops[NUM_NOP_PATTERNS];
108
109 /* The number of nop instructions in frv_nops[]. */
110 static unsigned int frv_num_nops;
111
112 /* Information about one __builtin_read or __builtin_write access, or
113 the combination of several such accesses. The most general value
114 is all-zeros (an unknown access to an unknown address). */
115 struct frv_io {
116 /* The type of access. FRV_IO_UNKNOWN means the access can be either
117 a read or a write. */
118 enum { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE } type;
119
120 /* The constant address being accessed, or zero if not known. */
121 HOST_WIDE_INT const_address;
122
123 /* The run-time address, as used in operand 0 of the membar pattern. */
124 rtx var_address;
125 };
126
127 /* Return true if instruction INSN should be packed with the following
128 instruction. */
129 #define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode)
130
131 /* Set the value of PACKING_FLAG_P(INSN). */
132 #define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode)
133 #define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode)
134
135 /* Loop with REG set to each hard register in rtx X. */
136 #define FOR_EACH_REGNO(REG, X) \
137 for (REG = REGNO (X); \
138 REG < REGNO (X) + HARD_REGNO_NREGS (REGNO (X), GET_MODE (X)); \
139 REG++)
140
141 /* This structure contains machine specific function data. */
142 struct machine_function GTY(())
143 {
144 /* True if we have created an rtx that relies on the stack frame. */
145 int frame_needed;
146
147 /* True if this function contains at least one __builtin_{read,write}*. */
148 bool has_membar_p;
149 };
150
151 /* Temporary register allocation support structure. */
152 typedef struct frv_tmp_reg_struct
153 {
154 HARD_REG_SET regs; /* possible registers to allocate */
155 int next_reg[N_REG_CLASSES]; /* next register to allocate per class */
156 }
157 frv_tmp_reg_t;
158
159 /* Register state information for VLIW re-packing phase. */
160 #define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */
161 #define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */
162 #define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */
163 #define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */
164
165 #define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE)
166
167 typedef unsigned char regstate_t;
168
169 /* Used in frv_frame_accessor_t to indicate the direction of a register-to-
170 memory move. */
171 enum frv_stack_op
172 {
173 FRV_LOAD,
174 FRV_STORE
175 };
176
177 /* Information required by frv_frame_access. */
178 typedef struct
179 {
180 /* This field is FRV_LOAD if registers are to be loaded from the stack and
181 FRV_STORE if they should be stored onto the stack. FRV_STORE implies
182 the move is being done by the prologue code while FRV_LOAD implies it
183 is being done by the epilogue. */
184 enum frv_stack_op op;
185
186 /* The base register to use when accessing the stack. This may be the
187 frame pointer, stack pointer, or a temporary. The choice of register
188 depends on which part of the frame is being accessed and how big the
189 frame is. */
190 rtx base;
191
192 /* The offset of BASE from the bottom of the current frame, in bytes. */
193 int base_offset;
194 } frv_frame_accessor_t;
195
196 /* Define the information needed to generate branch and scc insns. This is
197 stored from the compare operation. */
198 rtx frv_compare_op0;
199 rtx frv_compare_op1;
200
201 /* Conditional execution support gathered together in one structure. */
202 typedef struct
203 {
204 /* Linked list of insns to add if the conditional execution conversion was
205 successful. Each link points to an EXPR_LIST which points to the pattern
206 of the insn to add, and the insn to be inserted before. */
207 rtx added_insns_list;
208
209 /* Identify which registers are safe to allocate for if conversions to
210 conditional execution. We keep the last allocated register in the
211 register classes between COND_EXEC statements. This will mean we allocate
212 different registers for each different COND_EXEC group if we can. This
213 might allow the scheduler to intermix two different COND_EXEC sections. */
214 frv_tmp_reg_t tmp_reg;
215
216 /* For nested IFs, identify which CC registers are used outside of setting
217 via a compare isnsn, and using via a check insn. This will allow us to
218 know if we can rewrite the register to use a different register that will
219 be paired with the CR register controlling the nested IF-THEN blocks. */
220 HARD_REG_SET nested_cc_ok_rewrite;
221
222 /* Temporary registers allocated to hold constants during conditional
223 execution. */
224 rtx scratch_regs[FIRST_PSEUDO_REGISTER];
225
226 /* Current number of temp registers available. */
227 int cur_scratch_regs;
228
229 /* Number of nested conditional execution blocks. */
230 int num_nested_cond_exec;
231
232 /* Map of insns that set up constants in scratch registers. */
233 bitmap scratch_insns_bitmap;
234
235 /* Conditional execution test register (CC0..CC7). */
236 rtx cr_reg;
237
238 /* Conditional execution compare register that is paired with cr_reg, so that
239 nested compares can be done. The csubcc and caddcc instructions don't
240 have enough bits to specify both a CC register to be set and a CR register
241 to do the test on, so the same bit number is used for both. Needless to
242 say, this is rather inconvenient for GCC. */
243 rtx nested_cc_reg;
244
245 /* Extra CR registers used for &&, ||. */
246 rtx extra_int_cr;
247 rtx extra_fp_cr;
248
249 /* Previous CR used in nested if, to make sure we are dealing with the same
250 nested if as the previous statement. */
251 rtx last_nested_if_cr;
252 }
253 frv_ifcvt_t;
254
255 static /* GTY(()) */ frv_ifcvt_t frv_ifcvt;
256
257 /* Map register number to smallest register class. */
258 enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER];
259
260 /* Map class letter into register class. */
261 enum reg_class reg_class_from_letter[256];
262
263 /* Cached value of frv_stack_info. */
264 static frv_stack_t *frv_stack_cache = (frv_stack_t *)0;
265
266 /* -mcpu= support */
267 frv_cpu_t frv_cpu_type = CPU_TYPE; /* value of -mcpu= */
268
269 /* Forward references */
270
271 static bool frv_handle_option (size_t, const char *, int);
272 static int frv_default_flags_for_cpu (void);
273 static int frv_string_begins_with (tree, const char *);
274 static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
275 static void frv_print_operand_memory_reference_reg
276 (FILE *, rtx);
277 static void frv_print_operand_memory_reference (FILE *, rtx, int);
278 static int frv_print_operand_jump_hint (rtx);
279 static const char *comparison_string (enum rtx_code, rtx);
280 static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
281 static rtx single_set_pattern (rtx);
282 static int frv_function_contains_far_jump (void);
283 static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
284 enum reg_class,
285 enum machine_mode,
286 int, int);
287 static rtx frv_frame_offset_rtx (int);
288 static rtx frv_frame_mem (enum machine_mode, rtx, int);
289 static rtx frv_dwarf_store (rtx, int);
290 static void frv_frame_insn (rtx, rtx);
291 static void frv_frame_access (frv_frame_accessor_t*,
292 rtx, int);
293 static void frv_frame_access_multi (frv_frame_accessor_t*,
294 frv_stack_t *, int);
295 static void frv_frame_access_standard_regs (enum frv_stack_op,
296 frv_stack_t *);
297 static struct machine_function *frv_init_machine_status (void);
298 static rtx frv_int_to_acc (enum insn_code, int, rtx);
299 static enum machine_mode frv_matching_accg_mode (enum machine_mode);
300 static rtx frv_read_argument (tree, unsigned int);
301 static rtx frv_read_iacc_argument (enum machine_mode, tree, unsigned int);
302 static int frv_check_constant_argument (enum insn_code, int, rtx);
303 static rtx frv_legitimize_target (enum insn_code, rtx);
304 static rtx frv_legitimize_argument (enum insn_code, int, rtx);
305 static rtx frv_legitimize_tls_address (rtx, enum tls_model);
306 static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
307 static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
308 static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
309 static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx);
310 static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx);
311 static rtx frv_expand_voidbinop_builtin (enum insn_code, tree);
312 static rtx frv_expand_int_void2arg (enum insn_code, tree);
313 static rtx frv_expand_prefetches (enum insn_code, tree);
314 static rtx frv_expand_voidtriop_builtin (enum insn_code, tree);
315 static rtx frv_expand_voidaccop_builtin (enum insn_code, tree);
316 static rtx frv_expand_mclracc_builtin (tree);
317 static rtx frv_expand_mrdacc_builtin (enum insn_code, tree);
318 static rtx frv_expand_mwtacc_builtin (enum insn_code, tree);
319 static rtx frv_expand_noargs_builtin (enum insn_code);
320 static void frv_split_iacc_move (rtx, rtx);
321 static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
322 static int frv_clear_registers_used (rtx *, void *);
323 static void frv_ifcvt_add_insn (rtx, rtx, int);
324 static rtx frv_ifcvt_rewrite_mem (rtx, enum machine_mode, rtx);
325 static rtx frv_ifcvt_load_value (rtx, rtx);
326 static int frv_acc_group_1 (rtx *, void *);
327 static unsigned int frv_insn_unit (rtx);
328 static bool frv_issues_to_branch_unit_p (rtx);
329 static int frv_cond_flags (rtx);
330 static bool frv_regstate_conflict_p (regstate_t, regstate_t);
331 static int frv_registers_conflict_p_1 (rtx *, void *);
332 static bool frv_registers_conflict_p (rtx);
333 static void frv_registers_update_1 (rtx, const_rtx, void *);
334 static void frv_registers_update (rtx);
335 static void frv_start_packet (void);
336 static void frv_start_packet_block (void);
337 static void frv_finish_packet (void (*) (void));
338 static bool frv_pack_insn_p (rtx);
339 static void frv_add_insn_to_packet (rtx);
340 static void frv_insert_nop_in_packet (rtx);
341 static bool frv_for_each_packet (void (*) (void));
342 static bool frv_sort_insn_group_1 (enum frv_insn_group,
343 unsigned int, unsigned int,
344 unsigned int, unsigned int,
345 state_t);
346 static int frv_compare_insns (const void *, const void *);
347 static void frv_sort_insn_group (enum frv_insn_group);
348 static void frv_reorder_packet (void);
349 static void frv_fill_unused_units (enum frv_insn_group);
350 static void frv_align_label (void);
351 static void frv_reorg_packet (void);
352 static void frv_register_nop (rtx);
353 static void frv_reorg (void);
354 static void frv_pack_insns (void);
355 static void frv_function_prologue (FILE *, HOST_WIDE_INT);
356 static void frv_function_epilogue (FILE *, HOST_WIDE_INT);
357 static bool frv_assemble_integer (rtx, unsigned, int);
358 static void frv_init_builtins (void);
359 static rtx frv_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
360 static void frv_init_libfuncs (void);
361 static bool frv_in_small_data_p (tree);
362 static void frv_asm_output_mi_thunk
363 (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
364 static void frv_setup_incoming_varargs (CUMULATIVE_ARGS *,
365 enum machine_mode,
366 tree, int *, int);
367 static rtx frv_expand_builtin_saveregs (void);
368 static bool frv_rtx_costs (rtx, int, int, int*);
369 static void frv_asm_out_constructor (rtx, int);
370 static void frv_asm_out_destructor (rtx, int);
371 static bool frv_function_symbol_referenced_p (rtx);
372 static bool frv_cannot_force_const_mem (rtx);
373 static const char *unspec_got_name (int);
374 static void frv_output_const_unspec (FILE *,
375 const struct frv_unspec *);
376 static bool frv_function_ok_for_sibcall (tree, tree);
377 static rtx frv_struct_value_rtx (tree, int);
378 static bool frv_must_pass_in_stack (enum machine_mode mode, const_tree type);
379 static int frv_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
380 tree, bool);
381 static void frv_output_dwarf_dtprel (FILE *, int, rtx)
382 ATTRIBUTE_UNUSED;
383 \f
384 /* Allow us to easily change the default for -malloc-cc. */
385 #ifndef DEFAULT_NO_ALLOC_CC
386 #define MASK_DEFAULT_ALLOC_CC MASK_ALLOC_CC
387 #else
388 #define MASK_DEFAULT_ALLOC_CC 0
389 #endif
390 \f
391 /* Initialize the GCC target structure. */
392 #undef TARGET_ASM_FUNCTION_PROLOGUE
393 #define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue
394 #undef TARGET_ASM_FUNCTION_EPILOGUE
395 #define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue
396 #undef TARGET_ASM_INTEGER
397 #define TARGET_ASM_INTEGER frv_assemble_integer
398 #undef TARGET_DEFAULT_TARGET_FLAGS
399 #define TARGET_DEFAULT_TARGET_FLAGS \
400 (MASK_DEFAULT_ALLOC_CC \
401 | MASK_COND_MOVE \
402 | MASK_SCC \
403 | MASK_COND_EXEC \
404 | MASK_VLIW_BRANCH \
405 | MASK_MULTI_CE \
406 | MASK_NESTED_CE)
407 #undef TARGET_HANDLE_OPTION
408 #define TARGET_HANDLE_OPTION frv_handle_option
409 #undef TARGET_INIT_BUILTINS
410 #define TARGET_INIT_BUILTINS frv_init_builtins
411 #undef TARGET_EXPAND_BUILTIN
412 #define TARGET_EXPAND_BUILTIN frv_expand_builtin
413 #undef TARGET_INIT_LIBFUNCS
414 #define TARGET_INIT_LIBFUNCS frv_init_libfuncs
415 #undef TARGET_IN_SMALL_DATA_P
416 #define TARGET_IN_SMALL_DATA_P frv_in_small_data_p
417 #undef TARGET_RTX_COSTS
418 #define TARGET_RTX_COSTS frv_rtx_costs
419 #undef TARGET_ASM_CONSTRUCTOR
420 #define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor
421 #undef TARGET_ASM_DESTRUCTOR
422 #define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor
423
424 #undef TARGET_ASM_OUTPUT_MI_THUNK
425 #define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk
426 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
427 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
428
429 #undef TARGET_SCHED_ISSUE_RATE
430 #define TARGET_SCHED_ISSUE_RATE frv_issue_rate
431
432 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
433 #define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall
434 #undef TARGET_CANNOT_FORCE_CONST_MEM
435 #define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem
436
437 #undef TARGET_HAVE_TLS
438 #define TARGET_HAVE_TLS HAVE_AS_TLS
439
440 #undef TARGET_STRUCT_VALUE_RTX
441 #define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx
442 #undef TARGET_MUST_PASS_IN_STACK
443 #define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack
444 #undef TARGET_PASS_BY_REFERENCE
445 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
446 #undef TARGET_ARG_PARTIAL_BYTES
447 #define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes
448
449 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
450 #define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs
451 #undef TARGET_SETUP_INCOMING_VARARGS
452 #define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs
453 #undef TARGET_MACHINE_DEPENDENT_REORG
454 #define TARGET_MACHINE_DEPENDENT_REORG frv_reorg
455
456 #if HAVE_AS_TLS
457 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
458 #define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel
459 #endif
460
461 struct gcc_target targetm = TARGET_INITIALIZER;
462
463 #define FRV_SYMBOL_REF_TLS_P(RTX) \
464 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
465
466 \f
467 /* Any function call that satisfies the machine-independent
468 requirements is eligible on FR-V. */
469
470 static bool
471 frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
472 tree exp ATTRIBUTE_UNUSED)
473 {
474 return true;
475 }
476
477 /* Return true if SYMBOL is a small data symbol and relocation RELOC
478 can be used to access it directly in a load or store. */
479
480 static FRV_INLINE bool
481 frv_small_data_reloc_p (rtx symbol, int reloc)
482 {
483 return (GET_CODE (symbol) == SYMBOL_REF
484 && SYMBOL_REF_SMALL_P (symbol)
485 && (!TARGET_FDPIC || flag_pic == 1)
486 && (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12));
487 }
488
489 /* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC
490 appropriately. */
491
492 bool
493 frv_const_unspec_p (rtx x, struct frv_unspec *unspec)
494 {
495 if (GET_CODE (x) == CONST)
496 {
497 unspec->offset = 0;
498 x = XEXP (x, 0);
499 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
500 {
501 unspec->offset += INTVAL (XEXP (x, 1));
502 x = XEXP (x, 0);
503 }
504 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT)
505 {
506 unspec->symbol = XVECEXP (x, 0, 0);
507 unspec->reloc = INTVAL (XVECEXP (x, 0, 1));
508
509 if (unspec->offset == 0)
510 return true;
511
512 if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc)
513 && unspec->offset > 0
514 && (unsigned HOST_WIDE_INT) unspec->offset < g_switch_value)
515 return true;
516 }
517 }
518 return false;
519 }
520
521 /* Decide whether we can force certain constants to memory. If we
522 decide we can't, the caller should be able to cope with it in
523 another way.
524
525 We never allow constants to be forced into memory for TARGET_FDPIC.
526 This is necessary for several reasons:
527
528 1. Since LEGITIMATE_CONSTANT_P rejects constant pool addresses, the
529 target-independent code will try to force them into the constant
530 pool, thus leading to infinite recursion.
531
532 2. We can never introduce new constant pool references during reload.
533 Any such reference would require use of the pseudo FDPIC register.
534
535 3. We can't represent a constant added to a function pointer (which is
536 not the same as a pointer to a function+constant).
537
538 4. In many cases, it's more efficient to calculate the constant in-line. */
539
540 static bool
541 frv_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
542 {
543 return TARGET_FDPIC;
544 }
545 \f
546 /* Implement TARGET_HANDLE_OPTION. */
547
548 static bool
549 frv_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
550 {
551 switch (code)
552 {
553 case OPT_mcpu_:
554 if (strcmp (arg, "simple") == 0)
555 frv_cpu_type = FRV_CPU_SIMPLE;
556 else if (strcmp (arg, "tomcat") == 0)
557 frv_cpu_type = FRV_CPU_TOMCAT;
558 else if (strcmp (arg, "fr550") == 0)
559 frv_cpu_type = FRV_CPU_FR550;
560 else if (strcmp (arg, "fr500") == 0)
561 frv_cpu_type = FRV_CPU_FR500;
562 else if (strcmp (arg, "fr450") == 0)
563 frv_cpu_type = FRV_CPU_FR450;
564 else if (strcmp (arg, "fr405") == 0)
565 frv_cpu_type = FRV_CPU_FR405;
566 else if (strcmp (arg, "fr400") == 0)
567 frv_cpu_type = FRV_CPU_FR400;
568 else if (strcmp (arg, "fr300") == 0)
569 frv_cpu_type = FRV_CPU_FR300;
570 else if (strcmp (arg, "frv") == 0)
571 frv_cpu_type = FRV_CPU_GENERIC;
572 else
573 return false;
574 return true;
575
576 default:
577 return true;
578 }
579 }
580
581 static int
582 frv_default_flags_for_cpu (void)
583 {
584 switch (frv_cpu_type)
585 {
586 case FRV_CPU_GENERIC:
587 return MASK_DEFAULT_FRV;
588
589 case FRV_CPU_FR550:
590 return MASK_DEFAULT_FR550;
591
592 case FRV_CPU_FR500:
593 case FRV_CPU_TOMCAT:
594 return MASK_DEFAULT_FR500;
595
596 case FRV_CPU_FR450:
597 return MASK_DEFAULT_FR450;
598
599 case FRV_CPU_FR405:
600 case FRV_CPU_FR400:
601 return MASK_DEFAULT_FR400;
602
603 case FRV_CPU_FR300:
604 case FRV_CPU_SIMPLE:
605 return MASK_DEFAULT_SIMPLE;
606
607 default:
608 gcc_unreachable ();
609 }
610 }
611
612 /* Sometimes certain combinations of command options do not make
613 sense on a particular target machine. You can define a macro
614 `OVERRIDE_OPTIONS' to take account of this. This macro, if
615 defined, is executed once just after all the command options have
616 been parsed.
617
618 Don't use this macro to turn on various extra optimizations for
619 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
620
621 void
622 frv_override_options (void)
623 {
624 int regno;
625 unsigned int i;
626
627 target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit);
628
629 /* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the
630 linker about linking pic and non-pic code. */
631 if (TARGET_LIBPIC)
632 {
633 if (!flag_pic) /* -fPIC */
634 flag_pic = 2;
635
636 if (! g_switch_set) /* -G0 */
637 {
638 g_switch_set = 1;
639 g_switch_value = 0;
640 }
641 }
642
643 /* A C expression whose value is a register class containing hard
644 register REGNO. In general there is more than one such class;
645 choose a class which is "minimal", meaning that no smaller class
646 also contains the register. */
647
648 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
649 {
650 enum reg_class class;
651
652 if (GPR_P (regno))
653 {
654 int gpr_reg = regno - GPR_FIRST;
655
656 if (gpr_reg == GR8_REG)
657 class = GR8_REGS;
658
659 else if (gpr_reg == GR9_REG)
660 class = GR9_REGS;
661
662 else if (gpr_reg == GR14_REG)
663 class = FDPIC_FPTR_REGS;
664
665 else if (gpr_reg == FDPIC_REGNO)
666 class = FDPIC_REGS;
667
668 else if ((gpr_reg & 3) == 0)
669 class = QUAD_REGS;
670
671 else if ((gpr_reg & 1) == 0)
672 class = EVEN_REGS;
673
674 else
675 class = GPR_REGS;
676 }
677
678 else if (FPR_P (regno))
679 {
680 int fpr_reg = regno - GPR_FIRST;
681 if ((fpr_reg & 3) == 0)
682 class = QUAD_FPR_REGS;
683
684 else if ((fpr_reg & 1) == 0)
685 class = FEVEN_REGS;
686
687 else
688 class = FPR_REGS;
689 }
690
691 else if (regno == LR_REGNO)
692 class = LR_REG;
693
694 else if (regno == LCR_REGNO)
695 class = LCR_REG;
696
697 else if (ICC_P (regno))
698 class = ICC_REGS;
699
700 else if (FCC_P (regno))
701 class = FCC_REGS;
702
703 else if (ICR_P (regno))
704 class = ICR_REGS;
705
706 else if (FCR_P (regno))
707 class = FCR_REGS;
708
709 else if (ACC_P (regno))
710 {
711 int r = regno - ACC_FIRST;
712 if ((r & 3) == 0)
713 class = QUAD_ACC_REGS;
714 else if ((r & 1) == 0)
715 class = EVEN_ACC_REGS;
716 else
717 class = ACC_REGS;
718 }
719
720 else if (ACCG_P (regno))
721 class = ACCG_REGS;
722
723 else
724 class = NO_REGS;
725
726 regno_reg_class[regno] = class;
727 }
728
729 /* Check for small data option */
730 if (!g_switch_set)
731 g_switch_value = SDATA_DEFAULT_SIZE;
732
733 /* A C expression which defines the machine-dependent operand
734 constraint letters for register classes. If CHAR is such a
735 letter, the value should be the register class corresponding to
736 it. Otherwise, the value should be `NO_REGS'. The register
737 letter `r', corresponding to class `GENERAL_REGS', will not be
738 passed to this macro; you do not need to handle it.
739
740 The following letters are unavailable, due to being used as
741 constraints:
742 '0'..'9'
743 '<', '>'
744 'E', 'F', 'G', 'H'
745 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P'
746 'Q', 'R', 'S', 'T', 'U'
747 'V', 'X'
748 'g', 'i', 'm', 'n', 'o', 'p', 'r', 's' */
749
750 for (i = 0; i < 256; i++)
751 reg_class_from_letter[i] = NO_REGS;
752
753 reg_class_from_letter['a'] = ACC_REGS;
754 reg_class_from_letter['b'] = EVEN_ACC_REGS;
755 reg_class_from_letter['c'] = CC_REGS;
756 reg_class_from_letter['d'] = GPR_REGS;
757 reg_class_from_letter['e'] = EVEN_REGS;
758 reg_class_from_letter['f'] = FPR_REGS;
759 reg_class_from_letter['h'] = FEVEN_REGS;
760 reg_class_from_letter['l'] = LR_REG;
761 reg_class_from_letter['q'] = QUAD_REGS;
762 reg_class_from_letter['t'] = ICC_REGS;
763 reg_class_from_letter['u'] = FCC_REGS;
764 reg_class_from_letter['v'] = ICR_REGS;
765 reg_class_from_letter['w'] = FCR_REGS;
766 reg_class_from_letter['x'] = QUAD_FPR_REGS;
767 reg_class_from_letter['y'] = LCR_REG;
768 reg_class_from_letter['z'] = SPR_REGS;
769 reg_class_from_letter['A'] = QUAD_ACC_REGS;
770 reg_class_from_letter['B'] = ACCG_REGS;
771 reg_class_from_letter['C'] = CR_REGS;
772 reg_class_from_letter['W'] = FDPIC_CALL_REGS; /* gp14+15 */
773 reg_class_from_letter['Z'] = FDPIC_REGS; /* gp15 */
774
775 /* There is no single unaligned SI op for PIC code. Sometimes we
776 need to use ".4byte" and sometimes we need to use ".picptr".
777 See frv_assemble_integer for details. */
778 if (flag_pic || TARGET_FDPIC)
779 targetm.asm_out.unaligned_op.si = 0;
780
781 if ((target_flags_explicit & MASK_LINKED_FP) == 0)
782 target_flags |= MASK_LINKED_FP;
783
784 if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0)
785 target_flags |= MASK_OPTIMIZE_MEMBAR;
786
787 for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++)
788 frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]);
789
790 for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++)
791 frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes);
792
793 init_machine_status = frv_init_machine_status;
794 }
795
796 \f
797 /* Some machines may desire to change what optimizations are performed for
798 various optimization levels. This macro, if defined, is executed once just
799 after the optimization level is determined and before the remainder of the
800 command options have been parsed. Values set in this macro are used as the
801 default values for the other command line options.
802
803 LEVEL is the optimization level specified; 2 if `-O2' is specified, 1 if
804 `-O' is specified, and 0 if neither is specified.
805
806 SIZE is nonzero if `-Os' is specified, 0 otherwise.
807
808 You should not use this macro to change options that are not
809 machine-specific. These should uniformly selected by the same optimization
810 level on all supported machines. Use this macro to enable machine-specific
811 optimizations.
812
813 *Do not examine `write_symbols' in this macro!* The debugging options are
814 *not supposed to alter the generated code. */
815
816 /* On the FRV, possibly disable VLIW packing which is done by the 2nd
817 scheduling pass at the current time. */
818 void
819 frv_optimization_options (int level, int size ATTRIBUTE_UNUSED)
820 {
821 if (level >= 2)
822 {
823 #ifdef DISABLE_SCHED2
824 flag_schedule_insns_after_reload = 0;
825 #endif
826 #ifdef ENABLE_RCSP
827 flag_rcsp = 1;
828 #endif
829 }
830 }
831
832 \f
833 /* Return true if NAME (a STRING_CST node) begins with PREFIX. */
834
835 static int
836 frv_string_begins_with (tree name, const char *prefix)
837 {
838 int prefix_len = strlen (prefix);
839
840 /* Remember: NAME's length includes the null terminator. */
841 return (TREE_STRING_LENGTH (name) > prefix_len
842 && strncmp (TREE_STRING_POINTER (name), prefix, prefix_len) == 0);
843 }
844 \f
845 /* Zero or more C statements that may conditionally modify two variables
846 `fixed_regs' and `call_used_regs' (both of type `char []') after they have
847 been initialized from the two preceding macros.
848
849 This is necessary in case the fixed or call-clobbered registers depend on
850 target flags.
851
852 You need not define this macro if it has no work to do.
853
854 If the usage of an entire class of registers depends on the target flags,
855 you may indicate this to GCC by using this macro to modify `fixed_regs' and
856 `call_used_regs' to 1 for each of the registers in the classes which should
857 not be used by GCC. Also define the macro `REG_CLASS_FROM_LETTER' to return
858 `NO_REGS' if it is called with a letter for a class that shouldn't be used.
859
860 (However, if this class is not included in `GENERAL_REGS' and all of the
861 insn patterns whose constraints permit this class are controlled by target
862 switches, then GCC will automatically avoid using these registers when the
863 target switches are opposed to them.) */
864
865 void
866 frv_conditional_register_usage (void)
867 {
868 int i;
869
870 for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++)
871 fixed_regs[i] = call_used_regs[i] = 1;
872
873 for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++)
874 fixed_regs[i] = call_used_regs[i] = 1;
875
876 /* Reserve the registers used for conditional execution. At present, we need
877 1 ICC and 1 ICR register. */
878 fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1;
879 fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1;
880
881 if (TARGET_FIXED_CC)
882 {
883 fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1;
884 fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1;
885 fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1;
886 fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1;
887 }
888
889 if (TARGET_FDPIC)
890 fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] =
891 call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0;
892
893 #if 0
894 /* If -fpic, SDA_BASE_REG is the PIC register. */
895 if (g_switch_value == 0 && !flag_pic)
896 fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0;
897
898 if (!flag_pic)
899 fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0;
900 #endif
901 }
902
903 \f
904 /*
905 * Compute the stack frame layout
906 *
907 * Register setup:
908 * +---------------+-----------------------+-----------------------+
909 * |Register |type |caller-save/callee-save|
910 * +---------------+-----------------------+-----------------------+
911 * |GR0 |Zero register | - |
912 * |GR1 |Stack pointer(SP) | - |
913 * |GR2 |Frame pointer(FP) | - |
914 * |GR3 |Hidden parameter | caller save |
915 * |GR4-GR7 | - | caller save |
916 * |GR8-GR13 |Argument register | caller save |
917 * |GR14-GR15 | - | caller save |
918 * |GR16-GR31 | - | callee save |
919 * |GR32-GR47 | - | caller save |
920 * |GR48-GR63 | - | callee save |
921 * |FR0-FR15 | - | caller save |
922 * |FR16-FR31 | - | callee save |
923 * |FR32-FR47 | - | caller save |
924 * |FR48-FR63 | - | callee save |
925 * +---------------+-----------------------+-----------------------+
926 *
927 * Stack frame setup:
928 * Low
929 * SP-> |-----------------------------------|
930 * | Argument area |
931 * |-----------------------------------|
932 * | Register save area |
933 * |-----------------------------------|
934 * | Local variable save area |
935 * FP-> |-----------------------------------|
936 * | Old FP |
937 * |-----------------------------------|
938 * | Hidden parameter save area |
939 * |-----------------------------------|
940 * | Return address(LR) storage area |
941 * |-----------------------------------|
942 * | Padding for alignment |
943 * |-----------------------------------|
944 * | Register argument area |
945 * OLD SP-> |-----------------------------------|
946 * | Parameter area |
947 * |-----------------------------------|
948 * High
949 *
950 * Argument area/Parameter area:
951 *
952 * When a function is called, this area is used for argument transfer. When
953 * the argument is set up by the caller function, this area is referred to as
954 * the argument area. When the argument is referenced by the callee function,
955 * this area is referred to as the parameter area. The area is allocated when
956 * all arguments cannot be placed on the argument register at the time of
957 * argument transfer.
958 *
959 * Register save area:
960 *
961 * This is a register save area that must be guaranteed for the caller
962 * function. This area is not secured when the register save operation is not
963 * needed.
964 *
965 * Local variable save area:
966 *
967 * This is the area for local variables and temporary variables.
968 *
969 * Old FP:
970 *
971 * This area stores the FP value of the caller function.
972 *
973 * Hidden parameter save area:
974 *
975 * This area stores the start address of the return value storage
976 * area for a struct/union return function.
977 * When a struct/union is used as the return value, the caller
978 * function stores the return value storage area start address in
979 * register GR3 and passes it to the caller function.
980 * The callee function interprets the address stored in the GR3
981 * as the return value storage area start address.
982 * When register GR3 needs to be saved into memory, the callee
983 * function saves it in the hidden parameter save area. This
984 * area is not secured when the save operation is not needed.
985 *
986 * Return address(LR) storage area:
987 *
988 * This area saves the LR. The LR stores the address of a return to the caller
989 * function for the purpose of function calling.
990 *
991 * Argument register area:
992 *
993 * This area saves the argument register. This area is not secured when the
994 * save operation is not needed.
995 *
996 * Argument:
997 *
998 * Arguments, the count of which equals the count of argument registers (6
999 * words), are positioned in registers GR8 to GR13 and delivered to the callee
1000 * function. When a struct/union return function is called, the return value
1001 * area address is stored in register GR3. Arguments not placed in the
1002 * argument registers will be stored in the stack argument area for transfer
1003 * purposes. When an 8-byte type argument is to be delivered using registers,
1004 * it is divided into two and placed in two registers for transfer. When
1005 * argument registers must be saved to memory, the callee function secures an
1006 * argument register save area in the stack. In this case, a continuous
1007 * argument register save area must be established in the parameter area. The
1008 * argument register save area must be allocated as needed to cover the size of
1009 * the argument register to be saved. If the function has a variable count of
1010 * arguments, it saves all argument registers in the argument register save
1011 * area.
1012 *
1013 * Argument Extension Format:
1014 *
1015 * When an argument is to be stored in the stack, its type is converted to an
1016 * extended type in accordance with the individual argument type. The argument
1017 * is freed by the caller function after the return from the callee function is
1018 * made.
1019 *
1020 * +-----------------------+---------------+------------------------+
1021 * | Argument Type |Extended Type |Stack Storage Size(byte)|
1022 * +-----------------------+---------------+------------------------+
1023 * |char |int | 4 |
1024 * |signed char |int | 4 |
1025 * |unsigned char |int | 4 |
1026 * |[signed] short int |int | 4 |
1027 * |unsigned short int |int | 4 |
1028 * |[signed] int |No extension | 4 |
1029 * |unsigned int |No extension | 4 |
1030 * |[signed] long int |No extension | 4 |
1031 * |unsigned long int |No extension | 4 |
1032 * |[signed] long long int |No extension | 8 |
1033 * |unsigned long long int |No extension | 8 |
1034 * |float |double | 8 |
1035 * |double |No extension | 8 |
1036 * |long double |No extension | 8 |
1037 * |pointer |No extension | 4 |
1038 * |struct/union |- | 4 (*1) |
1039 * +-----------------------+---------------+------------------------+
1040 *
1041 * When a struct/union is to be delivered as an argument, the caller copies it
1042 * to the local variable area and delivers the address of that area.
1043 *
1044 * Return Value:
1045 *
1046 * +-------------------------------+----------------------+
1047 * |Return Value Type |Return Value Interface|
1048 * +-------------------------------+----------------------+
1049 * |void |None |
1050 * |[signed|unsigned] char |GR8 |
1051 * |[signed|unsigned] short int |GR8 |
1052 * |[signed|unsigned] int |GR8 |
1053 * |[signed|unsigned] long int |GR8 |
1054 * |pointer |GR8 |
1055 * |[signed|unsigned] long long int|GR8 & GR9 |
1056 * |float |GR8 |
1057 * |double |GR8 & GR9 |
1058 * |long double |GR8 & GR9 |
1059 * |struct/union |(*1) |
1060 * +-------------------------------+----------------------+
1061 *
1062 * When a struct/union is used as the return value, the caller function stores
1063 * the start address of the return value storage area into GR3 and then passes
1064 * it to the callee function. The callee function interprets GR3 as the start
1065 * address of the return value storage area. When this address needs to be
1066 * saved in memory, the callee function secures the hidden parameter save area
1067 * and saves the address in that area.
1068 */
1069
1070 frv_stack_t *
1071 frv_stack_info (void)
1072 {
1073 static frv_stack_t info, zero_info;
1074 frv_stack_t *info_ptr = &info;
1075 tree fndecl = current_function_decl;
1076 int varargs_p = 0;
1077 tree cur_arg;
1078 tree next_arg;
1079 int range;
1080 int alignment;
1081 int offset;
1082
1083 /* If we've already calculated the values and reload is complete,
1084 just return now. */
1085 if (frv_stack_cache)
1086 return frv_stack_cache;
1087
1088 /* Zero all fields. */
1089 info = zero_info;
1090
1091 /* Set up the register range information. */
1092 info_ptr->regs[STACK_REGS_GPR].name = "gpr";
1093 info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1;
1094 info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST;
1095 info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE;
1096
1097 info_ptr->regs[STACK_REGS_FPR].name = "fpr";
1098 info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST;
1099 info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST;
1100 info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE;
1101
1102 info_ptr->regs[STACK_REGS_LR].name = "lr";
1103 info_ptr->regs[STACK_REGS_LR].first = LR_REGNO;
1104 info_ptr->regs[STACK_REGS_LR].last = LR_REGNO;
1105 info_ptr->regs[STACK_REGS_LR].special_p = 1;
1106
1107 info_ptr->regs[STACK_REGS_CC].name = "cc";
1108 info_ptr->regs[STACK_REGS_CC].first = CC_FIRST;
1109 info_ptr->regs[STACK_REGS_CC].last = CC_LAST;
1110 info_ptr->regs[STACK_REGS_CC].field_p = TRUE;
1111
1112 info_ptr->regs[STACK_REGS_LCR].name = "lcr";
1113 info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO;
1114 info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO;
1115
1116 info_ptr->regs[STACK_REGS_STDARG].name = "stdarg";
1117 info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM;
1118 info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM;
1119 info_ptr->regs[STACK_REGS_STDARG].dword_p = 1;
1120 info_ptr->regs[STACK_REGS_STDARG].special_p = 1;
1121
1122 info_ptr->regs[STACK_REGS_STRUCT].name = "struct";
1123 info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM;
1124 info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM;
1125 info_ptr->regs[STACK_REGS_STRUCT].special_p = 1;
1126
1127 info_ptr->regs[STACK_REGS_FP].name = "fp";
1128 info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM;
1129 info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM;
1130 info_ptr->regs[STACK_REGS_FP].special_p = 1;
1131
1132 /* Determine if this is a stdarg function. If so, allocate space to store
1133 the 6 arguments. */
1134 if (cfun->stdarg)
1135 varargs_p = 1;
1136
1137 else
1138 {
1139 /* Find the last argument, and see if it is __builtin_va_alist. */
1140 for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg)
1141 {
1142 next_arg = TREE_CHAIN (cur_arg);
1143 if (next_arg == (tree)0)
1144 {
1145 if (DECL_NAME (cur_arg)
1146 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist"))
1147 varargs_p = 1;
1148
1149 break;
1150 }
1151 }
1152 }
1153
1154 /* Iterate over all of the register ranges. */
1155 for (range = 0; range < STACK_REGS_MAX; range++)
1156 {
1157 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1158 int first = reg_ptr->first;
1159 int last = reg_ptr->last;
1160 int size_1word = 0;
1161 int size_2words = 0;
1162 int regno;
1163
1164 /* Calculate which registers need to be saved & save area size. */
1165 switch (range)
1166 {
1167 default:
1168 for (regno = first; regno <= last; regno++)
1169 {
1170 if ((df_regs_ever_live_p (regno) && !call_used_regs[regno])
1171 || (current_function_calls_eh_return
1172 && (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
1173 || (!TARGET_FDPIC && flag_pic
1174 && cfun->uses_pic_offset_table && regno == PIC_REGNO))
1175 {
1176 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1177 size_1word += UNITS_PER_WORD;
1178 }
1179 }
1180 break;
1181
1182 /* Calculate whether we need to create a frame after everything else
1183 has been processed. */
1184 case STACK_REGS_FP:
1185 break;
1186
1187 case STACK_REGS_LR:
1188 if (df_regs_ever_live_p (LR_REGNO)
1189 || profile_flag
1190 /* This is set for __builtin_return_address, etc. */
1191 || cfun->machine->frame_needed
1192 || (TARGET_LINKED_FP && frame_pointer_needed)
1193 || (!TARGET_FDPIC && flag_pic
1194 && cfun->uses_pic_offset_table))
1195 {
1196 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1197 size_1word += UNITS_PER_WORD;
1198 }
1199 break;
1200
1201 case STACK_REGS_STDARG:
1202 if (varargs_p)
1203 {
1204 /* If this is a stdarg function with a non varardic
1205 argument split between registers and the stack,
1206 adjust the saved registers downward. */
1207 last -= (ADDR_ALIGN (cfun->pretend_args_size, UNITS_PER_WORD)
1208 / UNITS_PER_WORD);
1209
1210 for (regno = first; regno <= last; regno++)
1211 {
1212 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1213 size_1word += UNITS_PER_WORD;
1214 }
1215
1216 info_ptr->stdarg_size = size_1word;
1217 }
1218 break;
1219
1220 case STACK_REGS_STRUCT:
1221 if (cfun->returns_struct)
1222 {
1223 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1224 size_1word += UNITS_PER_WORD;
1225 }
1226 break;
1227 }
1228
1229
1230 if (size_1word)
1231 {
1232 /* If this is a field, it only takes one word. */
1233 if (reg_ptr->field_p)
1234 size_1word = UNITS_PER_WORD;
1235
1236 /* Determine which register pairs can be saved together. */
1237 else if (reg_ptr->dword_p && TARGET_DWORD)
1238 {
1239 for (regno = first; regno < last; regno += 2)
1240 {
1241 if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1])
1242 {
1243 size_2words += 2 * UNITS_PER_WORD;
1244 size_1word -= 2 * UNITS_PER_WORD;
1245 info_ptr->save_p[regno] = REG_SAVE_2WORDS;
1246 info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE;
1247 }
1248 }
1249 }
1250
1251 reg_ptr->size_1word = size_1word;
1252 reg_ptr->size_2words = size_2words;
1253
1254 if (! reg_ptr->special_p)
1255 {
1256 info_ptr->regs_size_1word += size_1word;
1257 info_ptr->regs_size_2words += size_2words;
1258 }
1259 }
1260 }
1261
1262 /* Set up the sizes of each each field in the frame body, making the sizes
1263 of each be divisible by the size of a dword if dword operations might
1264 be used, or the size of a word otherwise. */
1265 alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD);
1266
1267 info_ptr->parameter_size = ADDR_ALIGN (cfun->outgoing_args_size, alignment);
1268 info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words
1269 + info_ptr->regs_size_1word,
1270 alignment);
1271 info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment);
1272
1273 info_ptr->pretend_size = cfun->pretend_args_size;
1274
1275 /* Work out the size of the frame, excluding the header. Both the frame
1276 body and register parameter area will be dword-aligned. */
1277 info_ptr->total_size
1278 = (ADDR_ALIGN (info_ptr->parameter_size
1279 + info_ptr->regs_size
1280 + info_ptr->vars_size,
1281 2 * UNITS_PER_WORD)
1282 + ADDR_ALIGN (info_ptr->pretend_size
1283 + info_ptr->stdarg_size,
1284 2 * UNITS_PER_WORD));
1285
1286 /* See if we need to create a frame at all, if so add header area. */
1287 if (info_ptr->total_size > 0
1288 || frame_pointer_needed
1289 || info_ptr->regs[STACK_REGS_LR].size_1word > 0
1290 || info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0)
1291 {
1292 offset = info_ptr->parameter_size;
1293 info_ptr->header_size = 4 * UNITS_PER_WORD;
1294 info_ptr->total_size += 4 * UNITS_PER_WORD;
1295
1296 /* Calculate the offsets to save normal register pairs. */
1297 for (range = 0; range < STACK_REGS_MAX; range++)
1298 {
1299 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1300 if (! reg_ptr->special_p)
1301 {
1302 int first = reg_ptr->first;
1303 int last = reg_ptr->last;
1304 int regno;
1305
1306 for (regno = first; regno <= last; regno++)
1307 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS
1308 && regno != FRAME_POINTER_REGNUM
1309 && (regno < FIRST_ARG_REGNUM
1310 || regno > LAST_ARG_REGNUM))
1311 {
1312 info_ptr->reg_offset[regno] = offset;
1313 offset += 2 * UNITS_PER_WORD;
1314 }
1315 }
1316 }
1317
1318 /* Calculate the offsets to save normal single registers. */
1319 for (range = 0; range < STACK_REGS_MAX; range++)
1320 {
1321 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1322 if (! reg_ptr->special_p)
1323 {
1324 int first = reg_ptr->first;
1325 int last = reg_ptr->last;
1326 int regno;
1327
1328 for (regno = first; regno <= last; regno++)
1329 if (info_ptr->save_p[regno] == REG_SAVE_1WORD
1330 && regno != FRAME_POINTER_REGNUM
1331 && (regno < FIRST_ARG_REGNUM
1332 || regno > LAST_ARG_REGNUM))
1333 {
1334 info_ptr->reg_offset[regno] = offset;
1335 offset += UNITS_PER_WORD;
1336 }
1337 }
1338 }
1339
1340 /* Calculate the offset to save the local variables at. */
1341 offset = ADDR_ALIGN (offset, alignment);
1342 if (info_ptr->vars_size)
1343 {
1344 info_ptr->vars_offset = offset;
1345 offset += info_ptr->vars_size;
1346 }
1347
1348 /* Align header to a dword-boundary. */
1349 offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD);
1350
1351 /* Calculate the offsets in the fixed frame. */
1352 info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD;
1353 info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset;
1354 info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD;
1355
1356 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1357 info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD;
1358 info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD;
1359
1360 if (cfun->returns_struct)
1361 {
1362 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1363 info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD;
1364 info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD;
1365 }
1366
1367 /* Calculate the offsets to store the arguments passed in registers
1368 for stdarg functions. The register pairs are first and the single
1369 register if any is last. The register save area starts on a
1370 dword-boundary. */
1371 if (info_ptr->stdarg_size)
1372 {
1373 int first = info_ptr->regs[STACK_REGS_STDARG].first;
1374 int last = info_ptr->regs[STACK_REGS_STDARG].last;
1375 int regno;
1376
1377 /* Skip the header. */
1378 offset += 4 * UNITS_PER_WORD;
1379 for (regno = first; regno <= last; regno++)
1380 {
1381 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS)
1382 {
1383 info_ptr->reg_offset[regno] = offset;
1384 offset += 2 * UNITS_PER_WORD;
1385 }
1386 else if (info_ptr->save_p[regno] == REG_SAVE_1WORD)
1387 {
1388 info_ptr->reg_offset[regno] = offset;
1389 offset += UNITS_PER_WORD;
1390 }
1391 }
1392 }
1393 }
1394
1395 if (reload_completed)
1396 frv_stack_cache = info_ptr;
1397
1398 return info_ptr;
1399 }
1400
1401 \f
1402 /* Print the information about the frv stack offsets, etc. when debugging. */
1403
1404 void
1405 frv_debug_stack (frv_stack_t *info)
1406 {
1407 int range;
1408
1409 if (!info)
1410 info = frv_stack_info ();
1411
1412 fprintf (stderr, "\nStack information for function %s:\n",
1413 ((current_function_decl && DECL_NAME (current_function_decl))
1414 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
1415 : "<unknown>"));
1416
1417 fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size);
1418 fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size);
1419 fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size);
1420 fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n",
1421 info->regs_size, info->regs_size_1word, info->regs_size_2words);
1422
1423 fprintf (stderr, "\theader_size\t= %6d\n", info->header_size);
1424 fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size);
1425 fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset);
1426 fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset);
1427
1428 for (range = 0; range < STACK_REGS_MAX; range++)
1429 {
1430 frv_stack_regs_t *regs = &(info->regs[range]);
1431 if ((regs->size_1word + regs->size_2words) > 0)
1432 {
1433 int first = regs->first;
1434 int last = regs->last;
1435 int regno;
1436
1437 fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =",
1438 regs->name, regs->size_1word + regs->size_2words,
1439 regs->size_1word, regs->size_2words);
1440
1441 for (regno = first; regno <= last; regno++)
1442 {
1443 if (info->save_p[regno] == REG_SAVE_1WORD)
1444 fprintf (stderr, " %s (%d)", reg_names[regno],
1445 info->reg_offset[regno]);
1446
1447 else if (info->save_p[regno] == REG_SAVE_2WORDS)
1448 fprintf (stderr, " %s-%s (%d)", reg_names[regno],
1449 reg_names[regno+1], info->reg_offset[regno]);
1450 }
1451
1452 fputc ('\n', stderr);
1453 }
1454 }
1455
1456 fflush (stderr);
1457 }
1458
1459
1460 \f
1461
1462 /* Used during final to control the packing of insns. The value is
1463 1 if the current instruction should be packed with the next one,
1464 0 if it shouldn't or -1 if packing is disabled altogether. */
1465
1466 static int frv_insn_packing_flag;
1467
1468 /* True if the current function contains a far jump. */
1469
1470 static int
1471 frv_function_contains_far_jump (void)
1472 {
1473 rtx insn = get_insns ();
1474 while (insn != NULL
1475 && !(GET_CODE (insn) == JUMP_INSN
1476 /* Ignore tablejump patterns. */
1477 && GET_CODE (PATTERN (insn)) != ADDR_VEC
1478 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
1479 && get_attr_far_jump (insn) == FAR_JUMP_YES))
1480 insn = NEXT_INSN (insn);
1481 return (insn != NULL);
1482 }
1483
1484 /* For the FRV, this function makes sure that a function with far jumps
1485 will return correctly. It also does the VLIW packing. */
1486
1487 static void
1488 frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1489 {
1490 /* If no frame was created, check whether the function uses a call
1491 instruction to implement a far jump. If so, save the link in gr3 and
1492 replace all returns to LR with returns to GR3. GR3 is used because it
1493 is call-clobbered, because is not available to the register allocator,
1494 and because all functions that take a hidden argument pointer will have
1495 a stack frame. */
1496 if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ())
1497 {
1498 rtx insn;
1499
1500 /* Just to check that the above comment is true. */
1501 gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3));
1502
1503 /* Generate the instruction that saves the link register. */
1504 fprintf (file, "\tmovsg lr,gr3\n");
1505
1506 /* Replace the LR with GR3 in *return_internal patterns. The insn
1507 will now return using jmpl @(gr3,0) rather than bralr. We cannot
1508 simply emit a different assembly directive because bralr and jmpl
1509 execute in different units. */
1510 for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
1511 if (GET_CODE (insn) == JUMP_INSN)
1512 {
1513 rtx pattern = PATTERN (insn);
1514 if (GET_CODE (pattern) == PARALLEL
1515 && XVECLEN (pattern, 0) >= 2
1516 && GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN
1517 && GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
1518 {
1519 rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
1520 if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
1521 SET_REGNO (address, GPR_FIRST + 3);
1522 }
1523 }
1524 }
1525
1526 frv_pack_insns ();
1527
1528 /* Allow the garbage collector to free the nops created by frv_reorg. */
1529 memset (frv_nops, 0, sizeof (frv_nops));
1530 }
1531
1532 \f
1533 /* Return the next available temporary register in a given class. */
1534
1535 static rtx
1536 frv_alloc_temp_reg (
1537 frv_tmp_reg_t *info, /* which registers are available */
1538 enum reg_class class, /* register class desired */
1539 enum machine_mode mode, /* mode to allocate register with */
1540 int mark_as_used, /* register not available after allocation */
1541 int no_abort) /* return NULL instead of aborting */
1542 {
1543 int regno = info->next_reg[ (int)class ];
1544 int orig_regno = regno;
1545 HARD_REG_SET *reg_in_class = &reg_class_contents[ (int)class ];
1546 int i, nr;
1547
1548 for (;;)
1549 {
1550 if (TEST_HARD_REG_BIT (*reg_in_class, regno)
1551 && TEST_HARD_REG_BIT (info->regs, regno))
1552 break;
1553
1554 if (++regno >= FIRST_PSEUDO_REGISTER)
1555 regno = 0;
1556 if (regno == orig_regno)
1557 {
1558 gcc_assert (no_abort);
1559 return NULL_RTX;
1560 }
1561 }
1562
1563 nr = HARD_REGNO_NREGS (regno, mode);
1564 info->next_reg[ (int)class ] = regno + nr;
1565
1566 if (mark_as_used)
1567 for (i = 0; i < nr; i++)
1568 CLEAR_HARD_REG_BIT (info->regs, regno+i);
1569
1570 return gen_rtx_REG (mode, regno);
1571 }
1572
1573 \f
1574 /* Return an rtx with the value OFFSET, which will either be a register or a
1575 signed 12-bit integer. It can be used as the second operand in an "add"
1576 instruction, or as the index in a load or store.
1577
1578 The function returns a constant rtx if OFFSET is small enough, otherwise
1579 it loads the constant into register OFFSET_REGNO and returns that. */
1580 static rtx
1581 frv_frame_offset_rtx (int offset)
1582 {
1583 rtx offset_rtx = GEN_INT (offset);
1584 if (IN_RANGE_P (offset, -2048, 2047))
1585 return offset_rtx;
1586 else
1587 {
1588 rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO);
1589 if (IN_RANGE_P (offset, -32768, 32767))
1590 emit_insn (gen_movsi (reg_rtx, offset_rtx));
1591 else
1592 {
1593 emit_insn (gen_movsi_high (reg_rtx, offset_rtx));
1594 emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx));
1595 }
1596 return reg_rtx;
1597 }
1598 }
1599
1600 /* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
1601 prologue and epilogue uses such expressions to access the stack. */
1602 static rtx
1603 frv_frame_mem (enum machine_mode mode, rtx base, int offset)
1604 {
1605 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
1606 base,
1607 frv_frame_offset_rtx (offset)));
1608 }
1609
1610 /* Generate a frame-related expression:
1611
1612 (set REG (mem (plus (sp) (const_int OFFSET)))).
1613
1614 Such expressions are used in FRAME_RELATED_EXPR notes for more complex
1615 instructions. Marking the expressions as frame-related is superfluous if
1616 the note contains just a single set. But if the note contains a PARALLEL
1617 or SEQUENCE that has several sets, each set must be individually marked
1618 as frame-related. */
1619 static rtx
1620 frv_dwarf_store (rtx reg, int offset)
1621 {
1622 rtx set = gen_rtx_SET (VOIDmode,
1623 gen_rtx_MEM (GET_MODE (reg),
1624 plus_constant (stack_pointer_rtx,
1625 offset)),
1626 reg);
1627 RTX_FRAME_RELATED_P (set) = 1;
1628 return set;
1629 }
1630
1631 /* Emit a frame-related instruction whose pattern is PATTERN. The
1632 instruction is the last in a sequence that cumulatively performs the
1633 operation described by DWARF_PATTERN. The instruction is marked as
1634 frame-related and has a REG_FRAME_RELATED_EXPR note containing
1635 DWARF_PATTERN. */
1636 static void
1637 frv_frame_insn (rtx pattern, rtx dwarf_pattern)
1638 {
1639 rtx insn = emit_insn (pattern);
1640 RTX_FRAME_RELATED_P (insn) = 1;
1641 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1642 dwarf_pattern,
1643 REG_NOTES (insn));
1644 }
1645
1646 /* Emit instructions that transfer REG to or from the memory location (sp +
1647 STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is
1648 FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this
1649 function to store registers and only the epilogue uses it to load them.
1650
1651 The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET).
1652 The generated instruction will use BASE as its base register. BASE may
1653 simply be the stack pointer, but if several accesses are being made to a
1654 region far away from the stack pointer, it may be more efficient to set
1655 up a temporary instead.
1656
1657 Store instructions will be frame-related and will be annotated with the
1658 overall effect of the store. Load instructions will be followed by a
1659 (use) to prevent later optimizations from zapping them.
1660
1661 The function takes care of the moves to and from SPRs, using TEMP_REGNO
1662 as a temporary in such cases. */
1663 static void
1664 frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
1665 {
1666 enum machine_mode mode = GET_MODE (reg);
1667 rtx mem = frv_frame_mem (mode,
1668 accessor->base,
1669 stack_offset - accessor->base_offset);
1670
1671 if (accessor->op == FRV_LOAD)
1672 {
1673 if (SPR_P (REGNO (reg)))
1674 {
1675 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1676 emit_insn (gen_rtx_SET (VOIDmode, temp, mem));
1677 emit_insn (gen_rtx_SET (VOIDmode, reg, temp));
1678 }
1679 else
1680 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1681 emit_insn (gen_rtx_USE (VOIDmode, reg));
1682 }
1683 else
1684 {
1685 if (SPR_P (REGNO (reg)))
1686 {
1687 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1688 emit_insn (gen_rtx_SET (VOIDmode, temp, reg));
1689 frv_frame_insn (gen_rtx_SET (Pmode, mem, temp),
1690 frv_dwarf_store (reg, stack_offset));
1691 }
1692 else if (GET_MODE (reg) == DImode)
1693 {
1694 /* For DImode saves, the dwarf2 version needs to be a SEQUENCE
1695 with a separate save for each register. */
1696 rtx reg1 = gen_rtx_REG (SImode, REGNO (reg));
1697 rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1);
1698 rtx set1 = frv_dwarf_store (reg1, stack_offset);
1699 rtx set2 = frv_dwarf_store (reg2, stack_offset + 4);
1700 frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
1701 gen_rtx_PARALLEL (VOIDmode,
1702 gen_rtvec (2, set1, set2)));
1703 }
1704 else
1705 frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
1706 frv_dwarf_store (reg, stack_offset));
1707 }
1708 }
1709
1710 /* A function that uses frv_frame_access to transfer a group of registers to
1711 or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO
1712 is the stack information generated by frv_stack_info, and REG_SET is the
1713 number of the register set to transfer. */
1714 static void
1715 frv_frame_access_multi (frv_frame_accessor_t *accessor,
1716 frv_stack_t *info,
1717 int reg_set)
1718 {
1719 frv_stack_regs_t *regs_info;
1720 int regno;
1721
1722 regs_info = &info->regs[reg_set];
1723 for (regno = regs_info->first; regno <= regs_info->last; regno++)
1724 if (info->save_p[regno])
1725 frv_frame_access (accessor,
1726 info->save_p[regno] == REG_SAVE_2WORDS
1727 ? gen_rtx_REG (DImode, regno)
1728 : gen_rtx_REG (SImode, regno),
1729 info->reg_offset[regno]);
1730 }
1731
1732 /* Save or restore callee-saved registers that are kept outside the frame
1733 header. The function saves the registers if OP is FRV_STORE and restores
1734 them if OP is FRV_LOAD. INFO is the stack information generated by
1735 frv_stack_info. */
1736 static void
1737 frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info)
1738 {
1739 frv_frame_accessor_t accessor;
1740
1741 accessor.op = op;
1742 accessor.base = stack_pointer_rtx;
1743 accessor.base_offset = 0;
1744 frv_frame_access_multi (&accessor, info, STACK_REGS_GPR);
1745 frv_frame_access_multi (&accessor, info, STACK_REGS_FPR);
1746 frv_frame_access_multi (&accessor, info, STACK_REGS_LCR);
1747 }
1748
1749
1750 /* Called after register allocation to add any instructions needed for the
1751 prologue. Using a prologue insn is favored compared to putting all of the
1752 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1753 it allows the scheduler to intermix instructions with the saves of
1754 the caller saved registers. In some cases, it might be necessary
1755 to emit a barrier instruction as the last insn to prevent such
1756 scheduling.
1757
1758 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1759 so that the debug info generation code can handle them properly. */
1760 void
1761 frv_expand_prologue (void)
1762 {
1763 frv_stack_t *info = frv_stack_info ();
1764 rtx sp = stack_pointer_rtx;
1765 rtx fp = frame_pointer_rtx;
1766 frv_frame_accessor_t accessor;
1767
1768 if (TARGET_DEBUG_STACK)
1769 frv_debug_stack (info);
1770
1771 if (info->total_size == 0)
1772 return;
1773
1774 /* We're interested in three areas of the frame here:
1775
1776 A: the register save area
1777 B: the old FP
1778 C: the header after B
1779
1780 If the frame pointer isn't used, we'll have to set up A, B and C
1781 using the stack pointer. If the frame pointer is used, we'll access
1782 them as follows:
1783
1784 A: set up using sp
1785 B: set up using sp or a temporary (see below)
1786 C: set up using fp
1787
1788 We set up B using the stack pointer if the frame is small enough.
1789 Otherwise, it's more efficient to copy the old stack pointer into a
1790 temporary and use that.
1791
1792 Note that it's important to make sure the prologue and epilogue use the
1793 same registers to access A and C, since doing otherwise will confuse
1794 the aliasing code. */
1795
1796 /* Set up ACCESSOR for accessing region B above. If the frame pointer
1797 isn't used, the same method will serve for C. */
1798 accessor.op = FRV_STORE;
1799 if (frame_pointer_needed && info->total_size > 2048)
1800 {
1801 rtx insn;
1802
1803 accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO);
1804 accessor.base_offset = info->total_size;
1805 insn = emit_insn (gen_movsi (accessor.base, sp));
1806 }
1807 else
1808 {
1809 accessor.base = stack_pointer_rtx;
1810 accessor.base_offset = 0;
1811 }
1812
1813 /* Allocate the stack space. */
1814 {
1815 rtx asm_offset = frv_frame_offset_rtx (-info->total_size);
1816 rtx dwarf_offset = GEN_INT (-info->total_size);
1817
1818 frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset),
1819 gen_rtx_SET (Pmode,
1820 sp,
1821 gen_rtx_PLUS (Pmode, sp, dwarf_offset)));
1822 }
1823
1824 /* If the frame pointer is needed, store the old one at (sp + FP_OFFSET)
1825 and point the new one to that location. */
1826 if (frame_pointer_needed)
1827 {
1828 int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1829
1830 /* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is
1831 based on ACCESSOR.BASE but DWARF_SRC is always based on the stack
1832 pointer. */
1833 rtx asm_src = plus_constant (accessor.base,
1834 fp_offset - accessor.base_offset);
1835 rtx dwarf_src = plus_constant (sp, fp_offset);
1836
1837 /* Store the old frame pointer at (sp + FP_OFFSET). */
1838 frv_frame_access (&accessor, fp, fp_offset);
1839
1840 /* Set up the new frame pointer. */
1841 frv_frame_insn (gen_rtx_SET (VOIDmode, fp, asm_src),
1842 gen_rtx_SET (VOIDmode, fp, dwarf_src));
1843
1844 /* Access region C from the frame pointer. */
1845 accessor.base = fp;
1846 accessor.base_offset = fp_offset;
1847 }
1848
1849 /* Set up region C. */
1850 frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT);
1851 frv_frame_access_multi (&accessor, info, STACK_REGS_LR);
1852 frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG);
1853
1854 /* Set up region A. */
1855 frv_frame_access_standard_regs (FRV_STORE, info);
1856
1857 /* If this is a varargs/stdarg function, issue a blockage to prevent the
1858 scheduler from moving loads before the stores saving the registers. */
1859 if (info->stdarg_size > 0)
1860 emit_insn (gen_blockage ());
1861
1862 /* Set up pic register/small data register for this function. */
1863 if (!TARGET_FDPIC && flag_pic && cfun->uses_pic_offset_table)
1864 emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO),
1865 gen_rtx_REG (Pmode, LR_REGNO),
1866 gen_rtx_REG (SImode, OFFSET_REGNO)));
1867 }
1868
1869 \f
1870 /* Under frv, all of the work is done via frv_expand_epilogue, but
1871 this function provides a convenient place to do cleanup. */
1872
1873 static void
1874 frv_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
1875 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1876 {
1877 frv_stack_cache = (frv_stack_t *)0;
1878
1879 /* Zap last used registers for conditional execution. */
1880 memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg));
1881
1882 /* Release the bitmap of created insns. */
1883 BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap);
1884 }
1885
1886 \f
1887 /* Called after register allocation to add any instructions needed for the
1888 epilogue. Using an epilogue insn is favored compared to putting all of the
1889 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1890 it allows the scheduler to intermix instructions with the saves of
1891 the caller saved registers. In some cases, it might be necessary
1892 to emit a barrier instruction as the last insn to prevent such
1893 scheduling. */
1894
1895 void
1896 frv_expand_epilogue (bool emit_return)
1897 {
1898 frv_stack_t *info = frv_stack_info ();
1899 rtx fp = frame_pointer_rtx;
1900 rtx sp = stack_pointer_rtx;
1901 rtx return_addr;
1902 int fp_offset;
1903
1904 fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1905
1906 /* Restore the stack pointer to its original value if alloca or the like
1907 is used. */
1908 if (! current_function_sp_is_unchanging)
1909 emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset)));
1910
1911 /* Restore the callee-saved registers that were used in this function. */
1912 frv_frame_access_standard_regs (FRV_LOAD, info);
1913
1914 /* Set RETURN_ADDR to the address we should return to. Set it to NULL if
1915 no return instruction should be emitted. */
1916 if (info->save_p[LR_REGNO])
1917 {
1918 int lr_offset;
1919 rtx mem;
1920
1921 /* Use the same method to access the link register's slot as we did in
1922 the prologue. In other words, use the frame pointer if available,
1923 otherwise use the stack pointer.
1924
1925 LR_OFFSET is the offset of the link register's slot from the start
1926 of the frame and MEM is a memory rtx for it. */
1927 lr_offset = info->reg_offset[LR_REGNO];
1928 if (frame_pointer_needed)
1929 mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset);
1930 else
1931 mem = frv_frame_mem (Pmode, sp, lr_offset);
1932
1933 /* Load the old link register into a GPR. */
1934 return_addr = gen_rtx_REG (Pmode, TEMP_REGNO);
1935 emit_insn (gen_rtx_SET (VOIDmode, return_addr, mem));
1936 }
1937 else
1938 return_addr = gen_rtx_REG (Pmode, LR_REGNO);
1939
1940 /* Restore the old frame pointer. Emit a USE afterwards to make sure
1941 the load is preserved. */
1942 if (frame_pointer_needed)
1943 {
1944 emit_insn (gen_rtx_SET (VOIDmode, fp, gen_rtx_MEM (Pmode, fp)));
1945 emit_insn (gen_rtx_USE (VOIDmode, fp));
1946 }
1947
1948 /* Deallocate the stack frame. */
1949 if (info->total_size != 0)
1950 {
1951 rtx offset = frv_frame_offset_rtx (info->total_size);
1952 emit_insn (gen_stack_adjust (sp, sp, offset));
1953 }
1954
1955 /* If this function uses eh_return, add the final stack adjustment now. */
1956 if (current_function_calls_eh_return)
1957 emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX));
1958
1959 if (emit_return)
1960 emit_jump_insn (gen_epilogue_return (return_addr));
1961 else
1962 {
1963 rtx lr = return_addr;
1964
1965 if (REGNO (return_addr) != LR_REGNO)
1966 {
1967 lr = gen_rtx_REG (Pmode, LR_REGNO);
1968 emit_move_insn (lr, return_addr);
1969 }
1970
1971 emit_insn (gen_rtx_USE (VOIDmode, lr));
1972 }
1973 }
1974
1975 \f
1976 /* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */
1977
1978 static void
1979 frv_asm_output_mi_thunk (FILE *file,
1980 tree thunk_fndecl ATTRIBUTE_UNUSED,
1981 HOST_WIDE_INT delta,
1982 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1983 tree function)
1984 {
1985 const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0);
1986 const char *name_arg0 = reg_names[FIRST_ARG_REGNUM];
1987 const char *name_jmp = reg_names[JUMP_REGNO];
1988 const char *parallel = (frv_issue_rate () > 1 ? ".p" : "");
1989
1990 /* Do the add using an addi if possible. */
1991 if (IN_RANGE_P (delta, -2048, 2047))
1992 fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0);
1993 else
1994 {
1995 const char *const name_add = reg_names[TEMP_REGNO];
1996 fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1997 parallel, delta, name_add);
1998 fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1999 delta, name_add);
2000 fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0);
2001 }
2002
2003 if (TARGET_FDPIC)
2004 {
2005 const char *name_pic = reg_names[FDPIC_REGNO];
2006 name_jmp = reg_names[FDPIC_FPTR_REGNO];
2007
2008 if (flag_pic != 1)
2009 {
2010 fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel);
2011 assemble_name (file, name_func);
2012 fprintf (file, "),%s\n", name_jmp);
2013
2014 fprintf (file, "\tsetlo #gotofffuncdesclo(");
2015 assemble_name (file, name_func);
2016 fprintf (file, "),%s\n", name_jmp);
2017
2018 fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp);
2019 }
2020 else
2021 {
2022 fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic);
2023 assemble_name (file, name_func);
2024 fprintf (file, "\t)), %s\n", name_jmp);
2025 }
2026 }
2027 else if (!flag_pic)
2028 {
2029 fprintf (file, "\tsethi%s #hi(", parallel);
2030 assemble_name (file, name_func);
2031 fprintf (file, "),%s\n", name_jmp);
2032
2033 fprintf (file, "\tsetlo #lo(");
2034 assemble_name (file, name_func);
2035 fprintf (file, "),%s\n", name_jmp);
2036 }
2037 else
2038 {
2039 /* Use JUMP_REGNO as a temporary PIC register. */
2040 const char *name_lr = reg_names[LR_REGNO];
2041 const char *name_gppic = name_jmp;
2042 const char *name_tmp = reg_names[TEMP_REGNO];
2043
2044 fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp);
2045 fprintf (file, "\tcall 1f\n");
2046 fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic);
2047 fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr);
2048 fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp);
2049 fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp);
2050 fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic);
2051
2052 fprintf (file, "\tsethi%s #gprelhi(", parallel);
2053 assemble_name (file, name_func);
2054 fprintf (file, "),%s\n", name_tmp);
2055
2056 fprintf (file, "\tsetlo #gprello(");
2057 assemble_name (file, name_func);
2058 fprintf (file, "),%s\n", name_tmp);
2059
2060 fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp);
2061 }
2062
2063 /* Jump to the function address. */
2064 fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]);
2065 }
2066
2067 \f
2068 /* A C expression which is nonzero if a function must have and use a frame
2069 pointer. This expression is evaluated in the reload pass. If its value is
2070 nonzero the function will have a frame pointer.
2071
2072 The expression can in principle examine the current function and decide
2073 according to the facts, but on most machines the constant 0 or the constant
2074 1 suffices. Use 0 when the machine allows code to be generated with no
2075 frame pointer, and doing so saves some time or space. Use 1 when there is
2076 no possible advantage to avoiding a frame pointer.
2077
2078 In certain cases, the compiler does not know how to produce valid code
2079 without a frame pointer. The compiler recognizes those cases and
2080 automatically gives the function a frame pointer regardless of what
2081 `FRAME_POINTER_REQUIRED' says. You don't need to worry about them.
2082
2083 In a function that does not require a frame pointer, the frame pointer
2084 register can be allocated for ordinary usage, unless you mark it as a fixed
2085 register. See `FIXED_REGISTERS' for more information. */
2086
2087 /* On frv, create a frame whenever we need to create stack. */
2088
2089 int
2090 frv_frame_pointer_required (void)
2091 {
2092 /* If we forgoing the usual linkage requirements, we only need
2093 a frame pointer if the stack pointer might change. */
2094 if (!TARGET_LINKED_FP)
2095 return !current_function_sp_is_unchanging;
2096
2097 if (! current_function_is_leaf)
2098 return TRUE;
2099
2100 if (get_frame_size () != 0)
2101 return TRUE;
2102
2103 if (cfun->stdarg)
2104 return TRUE;
2105
2106 if (!current_function_sp_is_unchanging)
2107 return TRUE;
2108
2109 if (!TARGET_FDPIC && flag_pic && cfun->uses_pic_offset_table)
2110 return TRUE;
2111
2112 if (profile_flag)
2113 return TRUE;
2114
2115 if (cfun->machine->frame_needed)
2116 return TRUE;
2117
2118 return FALSE;
2119 }
2120
2121 \f
2122 /* This macro is similar to `INITIAL_FRAME_POINTER_OFFSET'. It specifies the
2123 initial difference between the specified pair of registers. This macro must
2124 be defined if `ELIMINABLE_REGS' is defined. */
2125
2126 /* See frv_stack_info for more details on the frv stack frame. */
2127
2128 int
2129 frv_initial_elimination_offset (int from, int to)
2130 {
2131 frv_stack_t *info = frv_stack_info ();
2132 int ret = 0;
2133
2134 if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2135 ret = info->total_size - info->pretend_size;
2136
2137 else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM)
2138 ret = info->reg_offset[FRAME_POINTER_REGNUM];
2139
2140 else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2141 ret = (info->total_size
2142 - info->reg_offset[FRAME_POINTER_REGNUM]
2143 - info->pretend_size);
2144
2145 else
2146 gcc_unreachable ();
2147
2148 if (TARGET_DEBUG_STACK)
2149 fprintf (stderr, "Eliminate %s to %s by adding %d\n",
2150 reg_names [from], reg_names[to], ret);
2151
2152 return ret;
2153 }
2154
2155 \f
2156 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2157
2158 static void
2159 frv_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
2160 enum machine_mode mode,
2161 tree type ATTRIBUTE_UNUSED,
2162 int *pretend_size,
2163 int second_time)
2164 {
2165 if (TARGET_DEBUG_ARG)
2166 fprintf (stderr,
2167 "setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
2168 *cum, GET_MODE_NAME (mode), *pretend_size, second_time);
2169 }
2170
2171 \f
2172 /* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */
2173
2174 static rtx
2175 frv_expand_builtin_saveregs (void)
2176 {
2177 int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS;
2178
2179 if (TARGET_DEBUG_ARG)
2180 fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n",
2181 offset);
2182
2183 return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset));
2184 }
2185
2186 \f
2187 /* Expand __builtin_va_start to do the va_start macro. */
2188
2189 void
2190 frv_expand_builtin_va_start (tree valist, rtx nextarg)
2191 {
2192 tree t;
2193 int num = cfun->args_info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS;
2194
2195 nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx,
2196 GEN_INT (UNITS_PER_WORD * num));
2197
2198 if (TARGET_DEBUG_ARG)
2199 {
2200 fprintf (stderr, "va_start: args_info = %d, num = %d\n",
2201 cfun->args_info, num);
2202
2203 debug_rtx (nextarg);
2204 }
2205
2206 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist,
2207 fold_convert (TREE_TYPE (valist),
2208 make_tree (sizetype, nextarg)));
2209 TREE_SIDE_EFFECTS (t) = 1;
2210
2211 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2212 }
2213
2214 \f
2215 /* Expand a block move operation, and return 1 if successful. Return 0
2216 if we should let the compiler generate normal code.
2217
2218 operands[0] is the destination
2219 operands[1] is the source
2220 operands[2] is the length
2221 operands[3] is the alignment */
2222
2223 /* Maximum number of loads to do before doing the stores */
2224 #ifndef MAX_MOVE_REG
2225 #define MAX_MOVE_REG 4
2226 #endif
2227
2228 /* Maximum number of total loads to do. */
2229 #ifndef TOTAL_MOVE_REG
2230 #define TOTAL_MOVE_REG 8
2231 #endif
2232
2233 int
2234 frv_expand_block_move (rtx operands[])
2235 {
2236 rtx orig_dest = operands[0];
2237 rtx orig_src = operands[1];
2238 rtx bytes_rtx = operands[2];
2239 rtx align_rtx = operands[3];
2240 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2241 int align;
2242 int bytes;
2243 int offset;
2244 int num_reg;
2245 int i;
2246 rtx src_reg;
2247 rtx dest_reg;
2248 rtx src_addr;
2249 rtx dest_addr;
2250 rtx src_mem;
2251 rtx dest_mem;
2252 rtx tmp_reg;
2253 rtx stores[MAX_MOVE_REG];
2254 int move_bytes;
2255 enum machine_mode mode;
2256
2257 /* If this is not a fixed size move, just call memcpy. */
2258 if (! constp)
2259 return FALSE;
2260
2261 /* This should be a fixed size alignment. */
2262 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2263
2264 align = INTVAL (align_rtx);
2265
2266 /* Anything to move? */
2267 bytes = INTVAL (bytes_rtx);
2268 if (bytes <= 0)
2269 return TRUE;
2270
2271 /* Don't support real large moves. */
2272 if (bytes > TOTAL_MOVE_REG*align)
2273 return FALSE;
2274
2275 /* Move the address into scratch registers. */
2276 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2277 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2278
2279 num_reg = offset = 0;
2280 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
2281 {
2282 /* Calculate the correct offset for src/dest. */
2283 if (offset == 0)
2284 {
2285 src_addr = src_reg;
2286 dest_addr = dest_reg;
2287 }
2288 else
2289 {
2290 src_addr = plus_constant (src_reg, offset);
2291 dest_addr = plus_constant (dest_reg, offset);
2292 }
2293
2294 /* Generate the appropriate load and store, saving the stores
2295 for later. */
2296 if (bytes >= 4 && align >= 4)
2297 mode = SImode;
2298 else if (bytes >= 2 && align >= 2)
2299 mode = HImode;
2300 else
2301 mode = QImode;
2302
2303 move_bytes = GET_MODE_SIZE (mode);
2304 tmp_reg = gen_reg_rtx (mode);
2305 src_mem = change_address (orig_src, mode, src_addr);
2306 dest_mem = change_address (orig_dest, mode, dest_addr);
2307 emit_insn (gen_rtx_SET (VOIDmode, tmp_reg, src_mem));
2308 stores[num_reg++] = gen_rtx_SET (VOIDmode, dest_mem, tmp_reg);
2309
2310 if (num_reg >= MAX_MOVE_REG)
2311 {
2312 for (i = 0; i < num_reg; i++)
2313 emit_insn (stores[i]);
2314 num_reg = 0;
2315 }
2316 }
2317
2318 for (i = 0; i < num_reg; i++)
2319 emit_insn (stores[i]);
2320
2321 return TRUE;
2322 }
2323
2324 \f
2325 /* Expand a block clear operation, and return 1 if successful. Return 0
2326 if we should let the compiler generate normal code.
2327
2328 operands[0] is the destination
2329 operands[1] is the length
2330 operands[3] is the alignment */
2331
2332 int
2333 frv_expand_block_clear (rtx operands[])
2334 {
2335 rtx orig_dest = operands[0];
2336 rtx bytes_rtx = operands[1];
2337 rtx align_rtx = operands[3];
2338 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2339 int align;
2340 int bytes;
2341 int offset;
2342 int num_reg;
2343 rtx dest_reg;
2344 rtx dest_addr;
2345 rtx dest_mem;
2346 int clear_bytes;
2347 enum machine_mode mode;
2348
2349 /* If this is not a fixed size move, just call memcpy. */
2350 if (! constp)
2351 return FALSE;
2352
2353 /* This should be a fixed size alignment. */
2354 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2355
2356 align = INTVAL (align_rtx);
2357
2358 /* Anything to move? */
2359 bytes = INTVAL (bytes_rtx);
2360 if (bytes <= 0)
2361 return TRUE;
2362
2363 /* Don't support real large clears. */
2364 if (bytes > TOTAL_MOVE_REG*align)
2365 return FALSE;
2366
2367 /* Move the address into a scratch register. */
2368 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2369
2370 num_reg = offset = 0;
2371 for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes))
2372 {
2373 /* Calculate the correct offset for src/dest. */
2374 dest_addr = ((offset == 0)
2375 ? dest_reg
2376 : plus_constant (dest_reg, offset));
2377
2378 /* Generate the appropriate store of gr0. */
2379 if (bytes >= 4 && align >= 4)
2380 mode = SImode;
2381 else if (bytes >= 2 && align >= 2)
2382 mode = HImode;
2383 else
2384 mode = QImode;
2385
2386 clear_bytes = GET_MODE_SIZE (mode);
2387 dest_mem = change_address (orig_dest, mode, dest_addr);
2388 emit_insn (gen_rtx_SET (VOIDmode, dest_mem, const0_rtx));
2389 }
2390
2391 return TRUE;
2392 }
2393
2394 \f
2395 /* The following variable is used to output modifiers of assembler
2396 code of the current output insn. */
2397
2398 static rtx *frv_insn_operands;
2399
2400 /* The following function is used to add assembler insn code suffix .p
2401 if it is necessary. */
2402
2403 const char *
2404 frv_asm_output_opcode (FILE *f, const char *ptr)
2405 {
2406 int c;
2407
2408 if (frv_insn_packing_flag <= 0)
2409 return ptr;
2410
2411 for (; *ptr && *ptr != ' ' && *ptr != '\t';)
2412 {
2413 c = *ptr++;
2414 if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z')
2415 || (*ptr >= 'A' && *ptr <= 'Z')))
2416 {
2417 int letter = *ptr++;
2418
2419 c = atoi (ptr);
2420 frv_print_operand (f, frv_insn_operands [c], letter);
2421 while ((c = *ptr) >= '0' && c <= '9')
2422 ptr++;
2423 }
2424 else
2425 fputc (c, f);
2426 }
2427
2428 fprintf (f, ".p");
2429
2430 return ptr;
2431 }
2432
2433 /* Set up the packing bit for the current output insn. Note that this
2434 function is not called for asm insns. */
2435
2436 void
2437 frv_final_prescan_insn (rtx insn, rtx *opvec,
2438 int noperands ATTRIBUTE_UNUSED)
2439 {
2440 if (INSN_P (insn))
2441 {
2442 if (frv_insn_packing_flag >= 0)
2443 {
2444 frv_insn_operands = opvec;
2445 frv_insn_packing_flag = PACKING_FLAG_P (insn);
2446 }
2447 else if (recog_memoized (insn) >= 0
2448 && get_attr_acc_group (insn) == ACC_GROUP_ODD)
2449 /* Packing optimizations have been disabled, but INSN can only
2450 be issued in M1. Insert an mnop in M0. */
2451 fprintf (asm_out_file, "\tmnop.p\n");
2452 }
2453 }
2454
2455
2456 \f
2457 /* A C expression whose value is RTL representing the address in a stack frame
2458 where the pointer to the caller's frame is stored. Assume that FRAMEADDR is
2459 an RTL expression for the address of the stack frame itself.
2460
2461 If you don't define this macro, the default is to return the value of
2462 FRAMEADDR--that is, the stack frame address is also the address of the stack
2463 word that points to the previous frame. */
2464
2465 /* The default is correct, but we need to make sure the frame gets created. */
2466 rtx
2467 frv_dynamic_chain_address (rtx frame)
2468 {
2469 cfun->machine->frame_needed = 1;
2470 return frame;
2471 }
2472
2473
2474 /* A C expression whose value is RTL representing the value of the return
2475 address for the frame COUNT steps up from the current frame, after the
2476 prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame
2477 pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is
2478 defined.
2479
2480 The value of the expression must always be the correct address when COUNT is
2481 zero, but may be `NULL_RTX' if there is not way to determine the return
2482 address of other frames. */
2483
2484 rtx
2485 frv_return_addr_rtx (int count, rtx frame)
2486 {
2487 if (count != 0)
2488 return const0_rtx;
2489 cfun->machine->frame_needed = 1;
2490 return gen_rtx_MEM (Pmode, plus_constant (frame, 8));
2491 }
2492
2493 /* Given a memory reference MEMREF, interpret the referenced memory as
2494 an array of MODE values, and return a reference to the element
2495 specified by INDEX. Assume that any pre-modification implicit in
2496 MEMREF has already happened.
2497
2498 MEMREF must be a legitimate operand for modes larger than SImode.
2499 GO_IF_LEGITIMATE_ADDRESS forbids register+register addresses, which
2500 this function cannot handle. */
2501 rtx
2502 frv_index_memory (rtx memref, enum machine_mode mode, int index)
2503 {
2504 rtx base = XEXP (memref, 0);
2505 if (GET_CODE (base) == PRE_MODIFY)
2506 base = XEXP (base, 0);
2507 return change_address (memref, mode,
2508 plus_constant (base, index * GET_MODE_SIZE (mode)));
2509 }
2510
2511 \f
2512 /* Print a memory address as an operand to reference that memory location. */
2513 void
2514 frv_print_operand_address (FILE * stream, rtx x)
2515 {
2516 if (GET_CODE (x) == MEM)
2517 x = XEXP (x, 0);
2518
2519 switch (GET_CODE (x))
2520 {
2521 case REG:
2522 fputs (reg_names [ REGNO (x)], stream);
2523 return;
2524
2525 case CONST_INT:
2526 fprintf (stream, "%ld", (long) INTVAL (x));
2527 return;
2528
2529 case SYMBOL_REF:
2530 assemble_name (stream, XSTR (x, 0));
2531 return;
2532
2533 case LABEL_REF:
2534 case CONST:
2535 output_addr_const (stream, x);
2536 return;
2537
2538 default:
2539 break;
2540 }
2541
2542 fatal_insn ("bad insn to frv_print_operand_address:", x);
2543 }
2544
2545 \f
2546 static void
2547 frv_print_operand_memory_reference_reg (FILE * stream, rtx x)
2548 {
2549 int regno = true_regnum (x);
2550 if (GPR_P (regno))
2551 fputs (reg_names[regno], stream);
2552 else
2553 fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x);
2554 }
2555
2556 /* Print a memory reference suitable for the ld/st instructions. */
2557
2558 static void
2559 frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset)
2560 {
2561 struct frv_unspec unspec;
2562 rtx x0 = NULL_RTX;
2563 rtx x1 = NULL_RTX;
2564
2565 switch (GET_CODE (x))
2566 {
2567 case SUBREG:
2568 case REG:
2569 x0 = x;
2570 break;
2571
2572 case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */
2573 x0 = XEXP (x, 0);
2574 x1 = XEXP (XEXP (x, 1), 1);
2575 break;
2576
2577 case CONST_INT:
2578 x1 = x;
2579 break;
2580
2581 case PLUS:
2582 x0 = XEXP (x, 0);
2583 x1 = XEXP (x, 1);
2584 if (GET_CODE (x0) == CONST_INT)
2585 {
2586 x0 = XEXP (x, 1);
2587 x1 = XEXP (x, 0);
2588 }
2589 break;
2590
2591 default:
2592 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2593 break;
2594
2595 }
2596
2597 if (addr_offset)
2598 {
2599 if (!x1)
2600 x1 = const0_rtx;
2601 else if (GET_CODE (x1) != CONST_INT)
2602 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2603 }
2604
2605 fputs ("@(", stream);
2606 if (!x0)
2607 fputs (reg_names[GPR_R0], stream);
2608 else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG)
2609 frv_print_operand_memory_reference_reg (stream, x0);
2610 else
2611 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2612
2613 fputs (",", stream);
2614 if (!x1)
2615 fputs (reg_names [GPR_R0], stream);
2616
2617 else
2618 {
2619 switch (GET_CODE (x1))
2620 {
2621 case SUBREG:
2622 case REG:
2623 frv_print_operand_memory_reference_reg (stream, x1);
2624 break;
2625
2626 case CONST_INT:
2627 fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset));
2628 break;
2629
2630 case CONST:
2631 if (!frv_const_unspec_p (x1, &unspec))
2632 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1);
2633 frv_output_const_unspec (stream, &unspec);
2634 break;
2635
2636 default:
2637 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2638 }
2639 }
2640
2641 fputs (")", stream);
2642 }
2643
2644 \f
2645 /* Return 2 for likely branches and 0 for non-likely branches */
2646
2647 #define FRV_JUMP_LIKELY 2
2648 #define FRV_JUMP_NOT_LIKELY 0
2649
2650 static int
2651 frv_print_operand_jump_hint (rtx insn)
2652 {
2653 rtx note;
2654 rtx labelref;
2655 int ret;
2656 HOST_WIDE_INT prob = -1;
2657 enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
2658
2659 gcc_assert (GET_CODE (insn) == JUMP_INSN);
2660
2661 /* Assume any non-conditional jump is likely. */
2662 if (! any_condjump_p (insn))
2663 ret = FRV_JUMP_LIKELY;
2664
2665 else
2666 {
2667 labelref = condjump_label (insn);
2668 if (labelref)
2669 {
2670 rtx label = XEXP (labelref, 0);
2671 jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label))
2672 ? BACKWARD
2673 : FORWARD);
2674 }
2675
2676 note = find_reg_note (insn, REG_BR_PROB, 0);
2677 if (!note)
2678 ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY);
2679
2680 else
2681 {
2682 prob = INTVAL (XEXP (note, 0));
2683 ret = ((prob >= (REG_BR_PROB_BASE / 2))
2684 ? FRV_JUMP_LIKELY
2685 : FRV_JUMP_NOT_LIKELY);
2686 }
2687 }
2688
2689 #if 0
2690 if (TARGET_DEBUG)
2691 {
2692 char *direction;
2693
2694 switch (jump_type)
2695 {
2696 default:
2697 case UNKNOWN: direction = "unknown jump direction"; break;
2698 case BACKWARD: direction = "jump backward"; break;
2699 case FORWARD: direction = "jump forward"; break;
2700 }
2701
2702 fprintf (stderr,
2703 "%s: uid %ld, %s, probability = %ld, max prob. = %ld, hint = %d\n",
2704 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
2705 (long)INSN_UID (insn), direction, (long)prob,
2706 (long)REG_BR_PROB_BASE, ret);
2707 }
2708 #endif
2709
2710 return ret;
2711 }
2712
2713 \f
2714 /* Return the comparison operator to use for CODE given that the ICC
2715 register is OP0. */
2716
2717 static const char *
2718 comparison_string (enum rtx_code code, rtx op0)
2719 {
2720 bool is_nz_p = GET_MODE (op0) == CC_NZmode;
2721 switch (code)
2722 {
2723 default: output_operand_lossage ("bad condition code");
2724 case EQ: return "eq";
2725 case NE: return "ne";
2726 case LT: return is_nz_p ? "n" : "lt";
2727 case LE: return "le";
2728 case GT: return "gt";
2729 case GE: return is_nz_p ? "p" : "ge";
2730 case LTU: return is_nz_p ? "no" : "c";
2731 case LEU: return is_nz_p ? "eq" : "ls";
2732 case GTU: return is_nz_p ? "ne" : "hi";
2733 case GEU: return is_nz_p ? "ra" : "nc";
2734 }
2735 }
2736
2737 /* Print an operand to an assembler instruction.
2738
2739 `%' followed by a letter and a digit says to output an operand in an
2740 alternate fashion. Four letters have standard, built-in meanings described
2741 below. The machine description macro `PRINT_OPERAND' can define additional
2742 letters with nonstandard meanings.
2743
2744 `%cDIGIT' can be used to substitute an operand that is a constant value
2745 without the syntax that normally indicates an immediate operand.
2746
2747 `%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated
2748 before printing.
2749
2750 `%aDIGIT' can be used to substitute an operand as if it were a memory
2751 reference, with the actual operand treated as the address. This may be
2752 useful when outputting a "load address" instruction, because often the
2753 assembler syntax for such an instruction requires you to write the operand
2754 as if it were a memory reference.
2755
2756 `%lDIGIT' is used to substitute a `label_ref' into a jump instruction.
2757
2758 `%=' outputs a number which is unique to each instruction in the entire
2759 compilation. This is useful for making local labels to be referred to more
2760 than once in a single template that generates multiple assembler
2761 instructions.
2762
2763 `%' followed by a punctuation character specifies a substitution that does
2764 not use an operand. Only one case is standard: `%%' outputs a `%' into the
2765 assembler code. Other nonstandard cases can be defined in the
2766 `PRINT_OPERAND' macro. You must also define which punctuation characters
2767 are valid with the `PRINT_OPERAND_PUNCT_VALID_P' macro. */
2768
2769 void
2770 frv_print_operand (FILE * file, rtx x, int code)
2771 {
2772 struct frv_unspec unspec;
2773 HOST_WIDE_INT value;
2774 int offset;
2775
2776 if (code != 0 && !isalpha (code))
2777 value = 0;
2778
2779 else if (GET_CODE (x) == CONST_INT)
2780 value = INTVAL (x);
2781
2782 else if (GET_CODE (x) == CONST_DOUBLE)
2783 {
2784 if (GET_MODE (x) == SFmode)
2785 {
2786 REAL_VALUE_TYPE rv;
2787 long l;
2788
2789 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2790 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2791 value = l;
2792 }
2793
2794 else if (GET_MODE (x) == VOIDmode)
2795 value = CONST_DOUBLE_LOW (x);
2796
2797 else
2798 fatal_insn ("bad insn in frv_print_operand, bad const_double", x);
2799 }
2800
2801 else
2802 value = 0;
2803
2804 switch (code)
2805 {
2806
2807 case '.':
2808 /* Output r0. */
2809 fputs (reg_names[GPR_R0], file);
2810 break;
2811
2812 case '#':
2813 fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn));
2814 break;
2815
2816 case '@':
2817 /* Output small data area base register (gr16). */
2818 fputs (reg_names[SDA_BASE_REG], file);
2819 break;
2820
2821 case '~':
2822 /* Output pic register (gr17). */
2823 fputs (reg_names[PIC_REGNO], file);
2824 break;
2825
2826 case '*':
2827 /* Output the temporary integer CCR register. */
2828 fputs (reg_names[ICR_TEMP], file);
2829 break;
2830
2831 case '&':
2832 /* Output the temporary integer CC register. */
2833 fputs (reg_names[ICC_TEMP], file);
2834 break;
2835
2836 /* case 'a': print an address. */
2837
2838 case 'C':
2839 /* Print appropriate test for integer branch false operation. */
2840 fputs (comparison_string (reverse_condition (GET_CODE (x)),
2841 XEXP (x, 0)), file);
2842 break;
2843
2844 case 'c':
2845 /* Print appropriate test for integer branch true operation. */
2846 fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file);
2847 break;
2848
2849 case 'e':
2850 /* Print 1 for a NE and 0 for an EQ to give the final argument
2851 for a conditional instruction. */
2852 if (GET_CODE (x) == NE)
2853 fputs ("1", file);
2854
2855 else if (GET_CODE (x) == EQ)
2856 fputs ("0", file);
2857
2858 else
2859 fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x);
2860 break;
2861
2862 case 'F':
2863 /* Print appropriate test for floating point branch false operation. */
2864 switch (GET_CODE (x))
2865 {
2866 default:
2867 fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x);
2868
2869 case EQ: fputs ("ne", file); break;
2870 case NE: fputs ("eq", file); break;
2871 case LT: fputs ("uge", file); break;
2872 case LE: fputs ("ug", file); break;
2873 case GT: fputs ("ule", file); break;
2874 case GE: fputs ("ul", file); break;
2875 }
2876 break;
2877
2878 case 'f':
2879 /* Print appropriate test for floating point branch true operation. */
2880 switch (GET_CODE (x))
2881 {
2882 default:
2883 fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x);
2884
2885 case EQ: fputs ("eq", file); break;
2886 case NE: fputs ("ne", file); break;
2887 case LT: fputs ("lt", file); break;
2888 case LE: fputs ("le", file); break;
2889 case GT: fputs ("gt", file); break;
2890 case GE: fputs ("ge", file); break;
2891 }
2892 break;
2893
2894 case 'g':
2895 /* Print appropriate GOT function. */
2896 if (GET_CODE (x) != CONST_INT)
2897 fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x);
2898 fputs (unspec_got_name (INTVAL (x)), file);
2899 break;
2900
2901 case 'I':
2902 /* Print 'i' if the operand is a constant, or is a memory reference that
2903 adds a constant. */
2904 if (GET_CODE (x) == MEM)
2905 x = ((GET_CODE (XEXP (x, 0)) == PLUS)
2906 ? XEXP (XEXP (x, 0), 1)
2907 : XEXP (x, 0));
2908 else if (GET_CODE (x) == PLUS)
2909 x = XEXP (x, 1);
2910
2911 switch (GET_CODE (x))
2912 {
2913 default:
2914 break;
2915
2916 case CONST_INT:
2917 case SYMBOL_REF:
2918 case CONST:
2919 fputs ("i", file);
2920 break;
2921 }
2922 break;
2923
2924 case 'i':
2925 /* For jump instructions, print 'i' if the operand is a constant or
2926 is an expression that adds a constant. */
2927 if (GET_CODE (x) == CONST_INT)
2928 fputs ("i", file);
2929
2930 else
2931 {
2932 if (GET_CODE (x) == CONST_INT
2933 || (GET_CODE (x) == PLUS
2934 && (GET_CODE (XEXP (x, 1)) == CONST_INT
2935 || GET_CODE (XEXP (x, 0)) == CONST_INT)))
2936 fputs ("i", file);
2937 }
2938 break;
2939
2940 case 'L':
2941 /* Print the lower register of a double word register pair */
2942 if (GET_CODE (x) == REG)
2943 fputs (reg_names[ REGNO (x)+1 ], file);
2944 else
2945 fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x);
2946 break;
2947
2948 /* case 'l': print a LABEL_REF. */
2949
2950 case 'M':
2951 case 'N':
2952 /* Print a memory reference for ld/st/jmp, %N prints a memory reference
2953 for the second word of double memory operations. */
2954 offset = (code == 'M') ? 0 : UNITS_PER_WORD;
2955 switch (GET_CODE (x))
2956 {
2957 default:
2958 fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x);
2959
2960 case MEM:
2961 frv_print_operand_memory_reference (file, XEXP (x, 0), offset);
2962 break;
2963
2964 case REG:
2965 case SUBREG:
2966 case CONST_INT:
2967 case PLUS:
2968 case SYMBOL_REF:
2969 frv_print_operand_memory_reference (file, x, offset);
2970 break;
2971 }
2972 break;
2973
2974 case 'O':
2975 /* Print the opcode of a command. */
2976 switch (GET_CODE (x))
2977 {
2978 default:
2979 fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x);
2980
2981 case PLUS: fputs ("add", file); break;
2982 case MINUS: fputs ("sub", file); break;
2983 case AND: fputs ("and", file); break;
2984 case IOR: fputs ("or", file); break;
2985 case XOR: fputs ("xor", file); break;
2986 case ASHIFT: fputs ("sll", file); break;
2987 case ASHIFTRT: fputs ("sra", file); break;
2988 case LSHIFTRT: fputs ("srl", file); break;
2989 }
2990 break;
2991
2992 /* case 'n': negate and print a constant int. */
2993
2994 case 'P':
2995 /* Print PIC label using operand as the number. */
2996 if (GET_CODE (x) != CONST_INT)
2997 fatal_insn ("bad insn to frv_print_operand, P modifier:", x);
2998
2999 fprintf (file, ".LCF%ld", (long)INTVAL (x));
3000 break;
3001
3002 case 'U':
3003 /* Print 'u' if the operand is a update load/store. */
3004 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
3005 fputs ("u", file);
3006 break;
3007
3008 case 'z':
3009 /* If value is 0, print gr0, otherwise it must be a register. */
3010 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
3011 fputs (reg_names[GPR_R0], file);
3012
3013 else if (GET_CODE (x) == REG)
3014 fputs (reg_names [REGNO (x)], file);
3015
3016 else
3017 fatal_insn ("bad insn in frv_print_operand, z case", x);
3018 break;
3019
3020 case 'x':
3021 /* Print constant in hex. */
3022 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3023 {
3024 fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value);
3025 break;
3026 }
3027
3028 /* Fall through. */
3029
3030 case '\0':
3031 if (GET_CODE (x) == REG)
3032 fputs (reg_names [REGNO (x)], file);
3033
3034 else if (GET_CODE (x) == CONST_INT
3035 || GET_CODE (x) == CONST_DOUBLE)
3036 fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value);
3037
3038 else if (frv_const_unspec_p (x, &unspec))
3039 frv_output_const_unspec (file, &unspec);
3040
3041 else if (GET_CODE (x) == MEM)
3042 frv_print_operand_address (file, XEXP (x, 0));
3043
3044 else if (CONSTANT_ADDRESS_P (x))
3045 frv_print_operand_address (file, x);
3046
3047 else
3048 fatal_insn ("bad insn in frv_print_operand, 0 case", x);
3049
3050 break;
3051
3052 default:
3053 fatal_insn ("frv_print_operand: unknown code", x);
3054 break;
3055 }
3056
3057 return;
3058 }
3059
3060 \f
3061 /* A C statement (sans semicolon) for initializing the variable CUM for the
3062 state at the beginning of the argument list. The variable has type
3063 `CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type
3064 of the function which will receive the args, or 0 if the args are to a
3065 compiler support library function. The value of INDIRECT is nonzero when
3066 processing an indirect call, for example a call through a function pointer.
3067 The value of INDIRECT is zero for a call to an explicitly named function, a
3068 library function call, or when `INIT_CUMULATIVE_ARGS' is used to find
3069 arguments for the function being compiled.
3070
3071 When processing a call to a compiler support library function, LIBNAME
3072 identifies which one. It is a `symbol_ref' rtx which contains the name of
3073 the function, as a string. LIBNAME is 0 when an ordinary C function call is
3074 being processed. Thus, each time this macro is called, either LIBNAME or
3075 FNTYPE is nonzero, but never both of them at once. */
3076
3077 void
3078 frv_init_cumulative_args (CUMULATIVE_ARGS *cum,
3079 tree fntype,
3080 rtx libname,
3081 tree fndecl,
3082 int incoming)
3083 {
3084 *cum = FIRST_ARG_REGNUM;
3085
3086 if (TARGET_DEBUG_ARG)
3087 {
3088 fprintf (stderr, "\ninit_cumulative_args:");
3089 if (!fndecl && fntype)
3090 fputs (" indirect", stderr);
3091
3092 if (incoming)
3093 fputs (" incoming", stderr);
3094
3095 if (fntype)
3096 {
3097 tree ret_type = TREE_TYPE (fntype);
3098 fprintf (stderr, " return=%s,",
3099 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3100 }
3101
3102 if (libname && GET_CODE (libname) == SYMBOL_REF)
3103 fprintf (stderr, " libname=%s", XSTR (libname, 0));
3104
3105 if (cfun->returns_struct)
3106 fprintf (stderr, " return-struct");
3107
3108 putc ('\n', stderr);
3109 }
3110 }
3111
3112 \f
3113 /* Return true if we should pass an argument on the stack rather than
3114 in registers. */
3115
3116 static bool
3117 frv_must_pass_in_stack (enum machine_mode mode, const_tree type)
3118 {
3119 if (mode == BLKmode)
3120 return true;
3121 if (type == NULL)
3122 return false;
3123 return AGGREGATE_TYPE_P (type);
3124 }
3125
3126 /* If defined, a C expression that gives the alignment boundary, in bits, of an
3127 argument with the specified mode and type. If it is not defined,
3128 `PARM_BOUNDARY' is used for all arguments. */
3129
3130 int
3131 frv_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
3132 tree type ATTRIBUTE_UNUSED)
3133 {
3134 return BITS_PER_WORD;
3135 }
3136
3137 rtx
3138 frv_function_arg (CUMULATIVE_ARGS *cum,
3139 enum machine_mode mode,
3140 tree type ATTRIBUTE_UNUSED,
3141 int named,
3142 int incoming ATTRIBUTE_UNUSED)
3143 {
3144 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3145 int arg_num = *cum;
3146 rtx ret;
3147 const char *debstr;
3148
3149 /* Return a marker for use in the call instruction. */
3150 if (xmode == VOIDmode)
3151 {
3152 ret = const0_rtx;
3153 debstr = "<0>";
3154 }
3155
3156 else if (arg_num <= LAST_ARG_REGNUM)
3157 {
3158 ret = gen_rtx_REG (xmode, arg_num);
3159 debstr = reg_names[arg_num];
3160 }
3161
3162 else
3163 {
3164 ret = NULL_RTX;
3165 debstr = "memory";
3166 }
3167
3168 if (TARGET_DEBUG_ARG)
3169 fprintf (stderr,
3170 "function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n",
3171 arg_num, GET_MODE_NAME (mode), named, GET_MODE_SIZE (mode), debstr);
3172
3173 return ret;
3174 }
3175
3176 \f
3177 /* A C statement (sans semicolon) to update the summarizer variable CUM to
3178 advance past an argument in the argument list. The values MODE, TYPE and
3179 NAMED describe that argument. Once this is done, the variable CUM is
3180 suitable for analyzing the *following* argument with `FUNCTION_ARG', etc.
3181
3182 This macro need not do anything if the argument in question was passed on
3183 the stack. The compiler knows how to track the amount of stack space used
3184 for arguments without any special help. */
3185
3186 void
3187 frv_function_arg_advance (CUMULATIVE_ARGS *cum,
3188 enum machine_mode mode,
3189 tree type ATTRIBUTE_UNUSED,
3190 int named)
3191 {
3192 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3193 int bytes = GET_MODE_SIZE (xmode);
3194 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3195 int arg_num = *cum;
3196
3197 *cum = arg_num + words;
3198
3199 if (TARGET_DEBUG_ARG)
3200 fprintf (stderr,
3201 "function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n",
3202 arg_num, GET_MODE_NAME (mode), named, words * UNITS_PER_WORD);
3203 }
3204
3205 \f
3206 /* A C expression for the number of words, at the beginning of an argument,
3207 must be put in registers. The value must be zero for arguments that are
3208 passed entirely in registers or that are entirely pushed on the stack.
3209
3210 On some machines, certain arguments must be passed partially in registers
3211 and partially in memory. On these machines, typically the first N words of
3212 arguments are passed in registers, and the rest on the stack. If a
3213 multi-word argument (a `double' or a structure) crosses that boundary, its
3214 first few words must be passed in registers and the rest must be pushed.
3215 This macro tells the compiler when this occurs, and how many of the words
3216 should go in registers.
3217
3218 `FUNCTION_ARG' for these arguments should return the first register to be
3219 used by the caller for this argument; likewise `FUNCTION_INCOMING_ARG', for
3220 the called function. */
3221
3222 static int
3223 frv_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3224 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
3225 {
3226 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3227 int bytes = GET_MODE_SIZE (xmode);
3228 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3229 int arg_num = *cum;
3230 int ret;
3231
3232 ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
3233 ? LAST_ARG_REGNUM - arg_num + 1
3234 : 0);
3235 ret *= UNITS_PER_WORD;
3236
3237 if (TARGET_DEBUG_ARG && ret)
3238 fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret);
3239
3240 return ret;
3241 }
3242
3243 \f
3244 /* Return true if a register is ok to use as a base or index register. */
3245
3246 static FRV_INLINE int
3247 frv_regno_ok_for_base_p (int regno, int strict_p)
3248 {
3249 if (GPR_P (regno))
3250 return TRUE;
3251
3252 if (strict_p)
3253 return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno]));
3254
3255 if (regno == ARG_POINTER_REGNUM)
3256 return TRUE;
3257
3258 return (regno >= FIRST_PSEUDO_REGISTER);
3259 }
3260
3261 \f
3262 /* A C compound statement with a conditional `goto LABEL;' executed if X (an
3263 RTX) is a legitimate memory address on the target machine for a memory
3264 operand of mode MODE.
3265
3266 It usually pays to define several simpler macros to serve as subroutines for
3267 this one. Otherwise it may be too complicated to understand.
3268
3269 This macro must exist in two variants: a strict variant and a non-strict
3270 one. The strict variant is used in the reload pass. It must be defined so
3271 that any pseudo-register that has not been allocated a hard register is
3272 considered a memory reference. In contexts where some kind of register is
3273 required, a pseudo-register with no hard register must be rejected.
3274
3275 The non-strict variant is used in other passes. It must be defined to
3276 accept all pseudo-registers in every context where some kind of register is
3277 required.
3278
3279 Compiler source files that want to use the strict variant of this macro
3280 define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT'
3281 conditional to define the strict variant in that case and the non-strict
3282 variant otherwise.
3283
3284 Subroutines to check for acceptable registers for various purposes (one for
3285 base registers, one for index registers, and so on) are typically among the
3286 subroutines used to define `GO_IF_LEGITIMATE_ADDRESS'. Then only these
3287 subroutine macros need have two variants; the higher levels of macros may be
3288 the same whether strict or not.
3289
3290 Normally, constant addresses which are the sum of a `symbol_ref' and an
3291 integer are stored inside a `const' RTX to mark them as constant.
3292 Therefore, there is no need to recognize such sums specifically as
3293 legitimate addresses. Normally you would simply recognize any `const' as
3294 legitimate.
3295
3296 Usually `PRINT_OPERAND_ADDRESS' is not prepared to handle constant sums that
3297 are not marked with `const'. It assumes that a naked `plus' indicates
3298 indexing. If so, then you *must* reject such naked constant sums as
3299 illegitimate addresses, so that none of them will be given to
3300 `PRINT_OPERAND_ADDRESS'.
3301
3302 On some machines, whether a symbolic address is legitimate depends on the
3303 section that the address refers to. On these machines, define the macro
3304 `ENCODE_SECTION_INFO' to store the information into the `symbol_ref', and
3305 then check for it here. When you see a `const', you will have to look
3306 inside it to find the `symbol_ref' in order to determine the section.
3307
3308 The best way to modify the name string is by adding text to the beginning,
3309 with suitable punctuation to prevent any ambiguity. Allocate the new name
3310 in `saveable_obstack'. You will have to modify `ASM_OUTPUT_LABELREF' to
3311 remove and decode the added text and output the name accordingly, and define
3312 `(* targetm.strip_name_encoding)' to access the original name string.
3313
3314 You can check the information stored here into the `symbol_ref' in the
3315 definitions of the macros `GO_IF_LEGITIMATE_ADDRESS' and
3316 `PRINT_OPERAND_ADDRESS'. */
3317
3318 int
3319 frv_legitimate_address_p (enum machine_mode mode,
3320 rtx x,
3321 int strict_p,
3322 int condexec_p,
3323 int allow_double_reg_p)
3324 {
3325 rtx x0, x1;
3326 int ret = 0;
3327 HOST_WIDE_INT value;
3328 unsigned regno0;
3329
3330 if (FRV_SYMBOL_REF_TLS_P (x))
3331 return 0;
3332
3333 switch (GET_CODE (x))
3334 {
3335 default:
3336 break;
3337
3338 case SUBREG:
3339 x = SUBREG_REG (x);
3340 if (GET_CODE (x) != REG)
3341 break;
3342
3343 /* Fall through. */
3344
3345 case REG:
3346 ret = frv_regno_ok_for_base_p (REGNO (x), strict_p);
3347 break;
3348
3349 case PRE_MODIFY:
3350 x0 = XEXP (x, 0);
3351 x1 = XEXP (x, 1);
3352 if (GET_CODE (x0) != REG
3353 || ! frv_regno_ok_for_base_p (REGNO (x0), strict_p)
3354 || GET_CODE (x1) != PLUS
3355 || ! rtx_equal_p (x0, XEXP (x1, 0))
3356 || GET_CODE (XEXP (x1, 1)) != REG
3357 || ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p))
3358 break;
3359
3360 ret = 1;
3361 break;
3362
3363 case CONST_INT:
3364 /* 12-bit immediate */
3365 if (condexec_p)
3366 ret = FALSE;
3367 else
3368 {
3369 ret = IN_RANGE_P (INTVAL (x), -2048, 2047);
3370
3371 /* If we can't use load/store double operations, make sure we can
3372 address the second word. */
3373 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3374 ret = IN_RANGE_P (INTVAL (x) + GET_MODE_SIZE (mode) - 1,
3375 -2048, 2047);
3376 }
3377 break;
3378
3379 case PLUS:
3380 x0 = XEXP (x, 0);
3381 x1 = XEXP (x, 1);
3382
3383 if (GET_CODE (x0) == SUBREG)
3384 x0 = SUBREG_REG (x0);
3385
3386 if (GET_CODE (x0) != REG)
3387 break;
3388
3389 regno0 = REGNO (x0);
3390 if (!frv_regno_ok_for_base_p (regno0, strict_p))
3391 break;
3392
3393 switch (GET_CODE (x1))
3394 {
3395 default:
3396 break;
3397
3398 case SUBREG:
3399 x1 = SUBREG_REG (x1);
3400 if (GET_CODE (x1) != REG)
3401 break;
3402
3403 /* Fall through. */
3404
3405 case REG:
3406 /* Do not allow reg+reg addressing for modes > 1 word if we
3407 can't depend on having move double instructions. */
3408 if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3409 ret = FALSE;
3410 else
3411 ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p);
3412 break;
3413
3414 case CONST_INT:
3415 /* 12-bit immediate */
3416 if (condexec_p)
3417 ret = FALSE;
3418 else
3419 {
3420 value = INTVAL (x1);
3421 ret = IN_RANGE_P (value, -2048, 2047);
3422
3423 /* If we can't use load/store double operations, make sure we can
3424 address the second word. */
3425 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3426 ret = IN_RANGE_P (value + GET_MODE_SIZE (mode) - 1, -2048, 2047);
3427 }
3428 break;
3429
3430 case CONST:
3431 if (!condexec_p && got12_operand (x1, VOIDmode))
3432 ret = TRUE;
3433 break;
3434
3435 }
3436 break;
3437 }
3438
3439 if (TARGET_DEBUG_ADDR)
3440 {
3441 fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, mode = %s, result = %d, addresses are %sstrict%s\n",
3442 GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ",
3443 (condexec_p) ? ", inside conditional code" : "");
3444 debug_rtx (x);
3445 }
3446
3447 return ret;
3448 }
3449
3450 /* Given an ADDR, generate code to inline the PLT. */
3451 static rtx
3452 gen_inlined_tls_plt (rtx addr)
3453 {
3454 rtx retval, dest;
3455 rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG);
3456
3457
3458 dest = gen_reg_rtx (DImode);
3459
3460 if (flag_pic == 1)
3461 {
3462 /*
3463 -fpic version:
3464
3465 lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8
3466 calll #gettlsoff(ADDR)@(gr8, gr0)
3467 */
3468 emit_insn (gen_tls_lddi (dest, addr, picreg));
3469 }
3470 else
3471 {
3472 /*
3473 -fPIC version:
3474
3475 sethi.p #gottlsdeschi(ADDR), gr8
3476 setlo #gottlsdesclo(ADDR), gr8
3477 ldd #tlsdesc(ADDR)@(gr15, gr8), gr8
3478 calll #gettlsoff(ADDR)@(gr8, gr0)
3479 */
3480 rtx reguse = gen_reg_rtx (Pmode);
3481 emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI)));
3482 emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr));
3483 }
3484
3485 retval = gen_reg_rtx (Pmode);
3486 emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg));
3487 return retval;
3488 }
3489
3490 /* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns
3491 the destination address. */
3492 static rtx
3493 gen_tlsmoff (rtx addr, rtx reg)
3494 {
3495 rtx dest = gen_reg_rtx (Pmode);
3496
3497 if (TARGET_BIG_TLS)
3498 {
3499 /* sethi.p #tlsmoffhi(x), grA
3500 setlo #tlsmofflo(x), grA
3501 */
3502 dest = gen_reg_rtx (Pmode);
3503 emit_insn (gen_tlsoff_hilo (dest, addr,
3504 GEN_INT (R_FRV_TLSMOFFHI)));
3505 dest = gen_rtx_PLUS (Pmode, dest, reg);
3506 }
3507 else
3508 {
3509 /* addi grB, #tlsmoff12(x), grC
3510 -or-
3511 ld/st @(grB, #tlsmoff12(x)), grC
3512 */
3513 dest = gen_reg_rtx (Pmode);
3514 emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg,
3515 GEN_INT (R_FRV_TLSMOFF12)));
3516 }
3517 return dest;
3518 }
3519
3520 /* Generate code for a TLS address. */
3521 static rtx
3522 frv_legitimize_tls_address (rtx addr, enum tls_model model)
3523 {
3524 rtx dest, tp = gen_rtx_REG (Pmode, 29);
3525 rtx picreg = get_hard_reg_initial_val (Pmode, 15);
3526
3527 switch (model)
3528 {
3529 case TLS_MODEL_INITIAL_EXEC:
3530 if (flag_pic == 1)
3531 {
3532 /* -fpic version.
3533 ldi @(gr15, #gottlsoff12(x)), gr5
3534 */
3535 dest = gen_reg_rtx (Pmode);
3536 emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg));
3537 dest = gen_rtx_PLUS (Pmode, tp, dest);
3538 }
3539 else
3540 {
3541 /* -fPIC or anything else.
3542
3543 sethi.p #gottlsoffhi(x), gr14
3544 setlo #gottlsofflo(x), gr14
3545 ld #tlsoff(x)@(gr15, gr14), gr9
3546 */
3547 rtx tmp = gen_reg_rtx (Pmode);
3548 dest = gen_reg_rtx (Pmode);
3549 emit_insn (gen_tlsoff_hilo (tmp, addr,
3550 GEN_INT (R_FRV_GOTTLSOFF_HI)));
3551
3552 emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr));
3553 dest = gen_rtx_PLUS (Pmode, tp, dest);
3554 }
3555 break;
3556 case TLS_MODEL_LOCAL_DYNAMIC:
3557 {
3558 rtx reg, retval;
3559
3560 if (TARGET_INLINE_PLT)
3561 retval = gen_inlined_tls_plt (GEN_INT (0));
3562 else
3563 {
3564 /* call #gettlsoff(0) */
3565 retval = gen_reg_rtx (Pmode);
3566 emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg));
3567 }
3568
3569 reg = gen_reg_rtx (Pmode);
3570 emit_insn (gen_rtx_SET (VOIDmode, reg,
3571 gen_rtx_PLUS (Pmode,
3572 retval, tp)));
3573
3574 dest = gen_tlsmoff (addr, reg);
3575
3576 /*
3577 dest = gen_reg_rtx (Pmode);
3578 emit_insn (gen_tlsoff_hilo (dest, addr,
3579 GEN_INT (R_FRV_TLSMOFFHI)));
3580 dest = gen_rtx_PLUS (Pmode, dest, reg);
3581 */
3582 break;
3583 }
3584 case TLS_MODEL_LOCAL_EXEC:
3585 dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29));
3586 break;
3587 case TLS_MODEL_GLOBAL_DYNAMIC:
3588 {
3589 rtx retval;
3590
3591 if (TARGET_INLINE_PLT)
3592 retval = gen_inlined_tls_plt (addr);
3593 else
3594 {
3595 /* call #gettlsoff(x) */
3596 retval = gen_reg_rtx (Pmode);
3597 emit_insn (gen_call_gettlsoff (retval, addr, picreg));
3598 }
3599 dest = gen_rtx_PLUS (Pmode, retval, tp);
3600 break;
3601 }
3602 default:
3603 gcc_unreachable ();
3604 }
3605
3606 return dest;
3607 }
3608
3609 rtx
3610 frv_legitimize_address (rtx x,
3611 rtx oldx ATTRIBUTE_UNUSED,
3612 enum machine_mode mode ATTRIBUTE_UNUSED)
3613 {
3614 if (GET_CODE (x) == SYMBOL_REF)
3615 {
3616 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3617 if (model != 0)
3618 return frv_legitimize_tls_address (x, model);
3619 }
3620
3621 return NULL_RTX;
3622 }
3623 \f
3624 /* Test whether a local function descriptor is canonical, i.e.,
3625 whether we can use FUNCDESC_GOTOFF to compute the address of the
3626 function. */
3627
3628 static bool
3629 frv_local_funcdesc_p (rtx fnx)
3630 {
3631 tree fn;
3632 enum symbol_visibility vis;
3633 bool ret;
3634
3635 if (! SYMBOL_REF_LOCAL_P (fnx))
3636 return FALSE;
3637
3638 fn = SYMBOL_REF_DECL (fnx);
3639
3640 if (! fn)
3641 return FALSE;
3642
3643 vis = DECL_VISIBILITY (fn);
3644
3645 if (vis == VISIBILITY_PROTECTED)
3646 /* Private function descriptors for protected functions are not
3647 canonical. Temporarily change the visibility to global. */
3648 vis = VISIBILITY_DEFAULT;
3649 else if (flag_shlib)
3650 /* If we're already compiling for a shared library (that, unlike
3651 executables, can't assume that the existence of a definition
3652 implies local binding), we can skip the re-testing. */
3653 return TRUE;
3654
3655 ret = default_binds_local_p_1 (fn, flag_pic);
3656
3657 DECL_VISIBILITY (fn) = vis;
3658
3659 return ret;
3660 }
3661
3662 /* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC
3663 register. */
3664
3665 rtx
3666 frv_gen_GPsym2reg (rtx dest, rtx src)
3667 {
3668 tree gp = get_identifier ("_gp");
3669 rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp));
3670
3671 return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12));
3672 }
3673
3674 static const char *
3675 unspec_got_name (int i)
3676 {
3677 switch (i)
3678 {
3679 case R_FRV_GOT12: return "got12";
3680 case R_FRV_GOTHI: return "gothi";
3681 case R_FRV_GOTLO: return "gotlo";
3682 case R_FRV_FUNCDESC: return "funcdesc";
3683 case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12";
3684 case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi";
3685 case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo";
3686 case R_FRV_FUNCDESC_VALUE: return "funcdescvalue";
3687 case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12";
3688 case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi";
3689 case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo";
3690 case R_FRV_GOTOFF12: return "gotoff12";
3691 case R_FRV_GOTOFFHI: return "gotoffhi";
3692 case R_FRV_GOTOFFLO: return "gotofflo";
3693 case R_FRV_GPREL12: return "gprel12";
3694 case R_FRV_GPRELHI: return "gprelhi";
3695 case R_FRV_GPRELLO: return "gprello";
3696 case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi";
3697 case R_FRV_GOTTLSOFF_LO: return "gottlsofflo";
3698 case R_FRV_TLSMOFFHI: return "tlsmoffhi";
3699 case R_FRV_TLSMOFFLO: return "tlsmofflo";
3700 case R_FRV_TLSMOFF12: return "tlsmoff12";
3701 case R_FRV_TLSDESCHI: return "tlsdeschi";
3702 case R_FRV_TLSDESCLO: return "tlsdesclo";
3703 case R_FRV_GOTTLSDESCHI: return "gottlsdeschi";
3704 case R_FRV_GOTTLSDESCLO: return "gottlsdesclo";
3705 default: gcc_unreachable ();
3706 }
3707 }
3708
3709 /* Write the assembler syntax for UNSPEC to STREAM. Note that any offset
3710 is added inside the relocation operator. */
3711
3712 static void
3713 frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec)
3714 {
3715 fprintf (stream, "#%s(", unspec_got_name (unspec->reloc));
3716 output_addr_const (stream, plus_constant (unspec->symbol, unspec->offset));
3717 fputs (")", stream);
3718 }
3719
3720 /* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo)
3721 or #gotoff12(foo) for some small data symbol foo. If so, return foo,
3722 otherwise return ORIG_X. */
3723
3724 rtx
3725 frv_find_base_term (rtx x)
3726 {
3727 struct frv_unspec unspec;
3728
3729 if (frv_const_unspec_p (x, &unspec)
3730 && frv_small_data_reloc_p (unspec.symbol, unspec.reloc))
3731 return plus_constant (unspec.symbol, unspec.offset);
3732
3733 return x;
3734 }
3735
3736 /* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if
3737 the operand is used by a predicated instruction. */
3738
3739 int
3740 frv_legitimate_memory_operand (rtx op, enum machine_mode mode, int condexec_p)
3741 {
3742 return ((GET_MODE (op) == mode || mode == VOIDmode)
3743 && GET_CODE (op) == MEM
3744 && frv_legitimate_address_p (mode, XEXP (op, 0),
3745 reload_completed, condexec_p, FALSE));
3746 }
3747
3748 void
3749 frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall)
3750 {
3751 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
3752 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG);
3753 rtx c, rvrtx=0;
3754 rtx addr;
3755
3756 if (ret_value)
3757 {
3758 rvrtx = operands[0];
3759 operands ++;
3760 }
3761
3762 addr = XEXP (operands[0], 0);
3763
3764 /* Inline PLTs if we're optimizing for speed. We'd like to inline
3765 any calls that would involve a PLT, but can't tell, since we
3766 don't know whether an extern function is going to be provided by
3767 a separate translation unit or imported from a separate module.
3768 When compiling for shared libraries, if the function has default
3769 visibility, we assume it's overridable, so we inline the PLT, but
3770 for executables, we don't really have a way to make a good
3771 decision: a function is as likely to be imported from a shared
3772 library as it is to be defined in the executable itself. We
3773 assume executables will get global functions defined locally,
3774 whereas shared libraries will have them potentially overridden,
3775 so we only inline PLTs when compiling for shared libraries.
3776
3777 In order to mark a function as local to a shared library, any
3778 non-default visibility attribute suffices. Unfortunately,
3779 there's no simple way to tag a function declaration as ``in a
3780 different module'', which we could then use to trigger PLT
3781 inlining on executables. There's -minline-plt, but it affects
3782 all external functions, so one would have to also mark function
3783 declarations available in the same module with non-default
3784 visibility, which is advantageous in itself. */
3785 if (GET_CODE (addr) == SYMBOL_REF
3786 && ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT)
3787 || sibcall))
3788 {
3789 rtx x, dest;
3790 dest = gen_reg_rtx (SImode);
3791 if (flag_pic != 1)
3792 x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG,
3793 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3794 else
3795 x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG,
3796 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3797 emit_insn (x);
3798 cfun->uses_pic_offset_table = TRUE;
3799 addr = dest;
3800 }
3801 else if (GET_CODE (addr) == SYMBOL_REF)
3802 {
3803 /* These are always either local, or handled through a local
3804 PLT. */
3805 if (ret_value)
3806 c = gen_call_value_fdpicsi (rvrtx, addr, operands[1],
3807 operands[2], picreg, lr);
3808 else
3809 c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr);
3810 emit_call_insn (c);
3811 return;
3812 }
3813 else if (! ldd_address_operand (addr, Pmode))
3814 addr = force_reg (Pmode, addr);
3815
3816 picreg = gen_reg_rtx (DImode);
3817 emit_insn (gen_movdi_ldd (picreg, addr));
3818
3819 if (sibcall && ret_value)
3820 c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx);
3821 else if (sibcall)
3822 c = gen_sibcall_fdpicdi (picreg, const0_rtx);
3823 else if (ret_value)
3824 c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr);
3825 else
3826 c = gen_call_fdpicdi (picreg, const0_rtx, lr);
3827 emit_call_insn (c);
3828 }
3829 \f
3830 /* Look for a SYMBOL_REF of a function in an rtx. We always want to
3831 process these separately from any offsets, such that we add any
3832 offsets to the function descriptor (the actual pointer), not to the
3833 function address. */
3834
3835 static bool
3836 frv_function_symbol_referenced_p (rtx x)
3837 {
3838 const char *format;
3839 int length;
3840 int j;
3841
3842 if (GET_CODE (x) == SYMBOL_REF)
3843 return SYMBOL_REF_FUNCTION_P (x);
3844
3845 length = GET_RTX_LENGTH (GET_CODE (x));
3846 format = GET_RTX_FORMAT (GET_CODE (x));
3847
3848 for (j = 0; j < length; ++j)
3849 {
3850 switch (format[j])
3851 {
3852 case 'e':
3853 if (frv_function_symbol_referenced_p (XEXP (x, j)))
3854 return TRUE;
3855 break;
3856
3857 case 'V':
3858 case 'E':
3859 if (XVEC (x, j) != 0)
3860 {
3861 int k;
3862 for (k = 0; k < XVECLEN (x, j); ++k)
3863 if (frv_function_symbol_referenced_p (XVECEXP (x, j, k)))
3864 return TRUE;
3865 }
3866 break;
3867
3868 default:
3869 /* Nothing to do. */
3870 break;
3871 }
3872 }
3873
3874 return FALSE;
3875 }
3876
3877 /* Return true if the memory operand is one that can be conditionally
3878 executed. */
3879
3880 int
3881 condexec_memory_operand (rtx op, enum machine_mode mode)
3882 {
3883 enum machine_mode op_mode = GET_MODE (op);
3884 rtx addr;
3885
3886 if (mode != VOIDmode && op_mode != mode)
3887 return FALSE;
3888
3889 switch (op_mode)
3890 {
3891 default:
3892 return FALSE;
3893
3894 case QImode:
3895 case HImode:
3896 case SImode:
3897 case SFmode:
3898 break;
3899 }
3900
3901 if (GET_CODE (op) != MEM)
3902 return FALSE;
3903
3904 addr = XEXP (op, 0);
3905 return frv_legitimate_address_p (mode, addr, reload_completed, TRUE, FALSE);
3906 }
3907 \f
3908 /* Return true if the bare return instruction can be used outside of the
3909 epilog code. For frv, we only do it if there was no stack allocation. */
3910
3911 int
3912 direct_return_p (void)
3913 {
3914 frv_stack_t *info;
3915
3916 if (!reload_completed)
3917 return FALSE;
3918
3919 info = frv_stack_info ();
3920 return (info->total_size == 0);
3921 }
3922
3923 \f
3924 void
3925 frv_emit_move (enum machine_mode mode, rtx dest, rtx src)
3926 {
3927 if (GET_CODE (src) == SYMBOL_REF)
3928 {
3929 enum tls_model model = SYMBOL_REF_TLS_MODEL (src);
3930 if (model != 0)
3931 src = frv_legitimize_tls_address (src, model);
3932 }
3933
3934 switch (mode)
3935 {
3936 case SImode:
3937 if (frv_emit_movsi (dest, src))
3938 return;
3939 break;
3940
3941 case QImode:
3942 case HImode:
3943 case DImode:
3944 case SFmode:
3945 case DFmode:
3946 if (!reload_in_progress
3947 && !reload_completed
3948 && !register_operand (dest, mode)
3949 && !reg_or_0_operand (src, mode))
3950 src = copy_to_mode_reg (mode, src);
3951 break;
3952
3953 default:
3954 gcc_unreachable ();
3955 }
3956
3957 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
3958 }
3959
3960 /* Emit code to handle a MOVSI, adding in the small data register or pic
3961 register if needed to load up addresses. Return TRUE if the appropriate
3962 instructions are emitted. */
3963
3964 int
3965 frv_emit_movsi (rtx dest, rtx src)
3966 {
3967 int base_regno = -1;
3968 int unspec = 0;
3969 rtx sym = src;
3970 struct frv_unspec old_unspec;
3971
3972 if (!reload_in_progress
3973 && !reload_completed
3974 && !register_operand (dest, SImode)
3975 && (!reg_or_0_operand (src, SImode)
3976 /* Virtual registers will almost always be replaced by an
3977 add instruction, so expose this to CSE by copying to
3978 an intermediate register. */
3979 || (GET_CODE (src) == REG
3980 && IN_RANGE_P (REGNO (src),
3981 FIRST_VIRTUAL_REGISTER,
3982 LAST_VIRTUAL_REGISTER))))
3983 {
3984 emit_insn (gen_rtx_SET (VOIDmode, dest, copy_to_mode_reg (SImode, src)));
3985 return TRUE;
3986 }
3987
3988 /* Explicitly add in the PIC or small data register if needed. */
3989 switch (GET_CODE (src))
3990 {
3991 default:
3992 break;
3993
3994 case LABEL_REF:
3995 handle_label:
3996 if (TARGET_FDPIC)
3997 {
3998 /* Using GPREL12, we use a single GOT entry for all symbols
3999 in read-only sections, but trade sequences such as:
4000
4001 sethi #gothi(label), gr#
4002 setlo #gotlo(label), gr#
4003 ld @(gr15,gr#), gr#
4004
4005 for
4006
4007 ld @(gr15,#got12(_gp)), gr#
4008 sethi #gprelhi(label), gr##
4009 setlo #gprello(label), gr##
4010 add gr#, gr##, gr##
4011
4012 We may often be able to share gr# for multiple
4013 computations of GPREL addresses, and we may often fold
4014 the final add into the pair of registers of a load or
4015 store instruction, so it's often profitable. Even when
4016 optimizing for size, we're trading a GOT entry for an
4017 additional instruction, which trades GOT space
4018 (read-write) for code size (read-only, shareable), as
4019 long as the symbol is not used in more than two different
4020 locations.
4021
4022 With -fpie/-fpic, we'd be trading a single load for a
4023 sequence of 4 instructions, because the offset of the
4024 label can't be assumed to be addressable with 12 bits, so
4025 we don't do this. */
4026 if (TARGET_GPREL_RO)
4027 unspec = R_FRV_GPREL12;
4028 else
4029 unspec = R_FRV_GOT12;
4030 }
4031 else if (flag_pic)
4032 base_regno = PIC_REGNO;
4033
4034 break;
4035
4036 case CONST:
4037 if (frv_const_unspec_p (src, &old_unspec))
4038 break;
4039
4040 if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0)))
4041 {
4042 handle_whatever:
4043 src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0));
4044 emit_move_insn (dest, src);
4045 return TRUE;
4046 }
4047 else
4048 {
4049 sym = XEXP (sym, 0);
4050 if (GET_CODE (sym) == PLUS
4051 && GET_CODE (XEXP (sym, 0)) == SYMBOL_REF
4052 && GET_CODE (XEXP (sym, 1)) == CONST_INT)
4053 sym = XEXP (sym, 0);
4054 if (GET_CODE (sym) == SYMBOL_REF)
4055 goto handle_sym;
4056 else if (GET_CODE (sym) == LABEL_REF)
4057 goto handle_label;
4058 else
4059 goto handle_whatever;
4060 }
4061 break;
4062
4063 case SYMBOL_REF:
4064 handle_sym:
4065 if (TARGET_FDPIC)
4066 {
4067 enum tls_model model = SYMBOL_REF_TLS_MODEL (sym);
4068
4069 if (model != 0)
4070 {
4071 src = frv_legitimize_tls_address (src, model);
4072 emit_move_insn (dest, src);
4073 return TRUE;
4074 }
4075
4076 if (SYMBOL_REF_FUNCTION_P (sym))
4077 {
4078 if (frv_local_funcdesc_p (sym))
4079 unspec = R_FRV_FUNCDESC_GOTOFF12;
4080 else
4081 unspec = R_FRV_FUNCDESC_GOT12;
4082 }
4083 else
4084 {
4085 if (CONSTANT_POOL_ADDRESS_P (sym))
4086 switch (GET_CODE (get_pool_constant (sym)))
4087 {
4088 case CONST:
4089 case SYMBOL_REF:
4090 case LABEL_REF:
4091 if (flag_pic)
4092 {
4093 unspec = R_FRV_GOTOFF12;
4094 break;
4095 }
4096 /* Fall through. */
4097 default:
4098 if (TARGET_GPREL_RO)
4099 unspec = R_FRV_GPREL12;
4100 else
4101 unspec = R_FRV_GOT12;
4102 break;
4103 }
4104 else if (SYMBOL_REF_LOCAL_P (sym)
4105 && !SYMBOL_REF_EXTERNAL_P (sym)
4106 && SYMBOL_REF_DECL (sym)
4107 && (!DECL_P (SYMBOL_REF_DECL (sym))
4108 || !DECL_COMMON (SYMBOL_REF_DECL (sym))))
4109 {
4110 tree decl = SYMBOL_REF_DECL (sym);
4111 tree init = TREE_CODE (decl) == VAR_DECL
4112 ? DECL_INITIAL (decl)
4113 : TREE_CODE (decl) == CONSTRUCTOR
4114 ? decl : 0;
4115 int reloc = 0;
4116 bool named_section, readonly;
4117
4118 if (init && init != error_mark_node)
4119 reloc = compute_reloc_for_constant (init);
4120
4121 named_section = TREE_CODE (decl) == VAR_DECL
4122 && lookup_attribute ("section", DECL_ATTRIBUTES (decl));
4123 readonly = decl_readonly_section (decl, reloc);
4124
4125 if (named_section)
4126 unspec = R_FRV_GOT12;
4127 else if (!readonly)
4128 unspec = R_FRV_GOTOFF12;
4129 else if (readonly && TARGET_GPREL_RO)
4130 unspec = R_FRV_GPREL12;
4131 else
4132 unspec = R_FRV_GOT12;
4133 }
4134 else
4135 unspec = R_FRV_GOT12;
4136 }
4137 }
4138
4139 else if (SYMBOL_REF_SMALL_P (sym))
4140 base_regno = SDA_BASE_REG;
4141
4142 else if (flag_pic)
4143 base_regno = PIC_REGNO;
4144
4145 break;
4146 }
4147
4148 if (base_regno >= 0)
4149 {
4150 if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym))
4151 emit_insn (gen_symGOTOFF2reg (dest, src,
4152 gen_rtx_REG (Pmode, base_regno),
4153 GEN_INT (R_FRV_GPREL12)));
4154 else
4155 emit_insn (gen_symGOTOFF2reg_hilo (dest, src,
4156 gen_rtx_REG (Pmode, base_regno),
4157 GEN_INT (R_FRV_GPREL12)));
4158 if (base_regno == PIC_REGNO)
4159 cfun->uses_pic_offset_table = TRUE;
4160 return TRUE;
4161 }
4162
4163 if (unspec)
4164 {
4165 rtx x;
4166
4167 /* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce
4168 new uses of it once reload has begun. */
4169 gcc_assert (!reload_in_progress && !reload_completed);
4170
4171 switch (unspec)
4172 {
4173 case R_FRV_GOTOFF12:
4174 if (!frv_small_data_reloc_p (sym, unspec))
4175 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4176 GEN_INT (unspec));
4177 else
4178 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4179 break;
4180 case R_FRV_GPREL12:
4181 if (!frv_small_data_reloc_p (sym, unspec))
4182 x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG,
4183 GEN_INT (unspec));
4184 else
4185 x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4186 break;
4187 case R_FRV_FUNCDESC_GOTOFF12:
4188 if (flag_pic != 1)
4189 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4190 GEN_INT (unspec));
4191 else
4192 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4193 break;
4194 default:
4195 if (flag_pic != 1)
4196 x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG,
4197 GEN_INT (unspec));
4198 else
4199 x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4200 break;
4201 }
4202 emit_insn (x);
4203 cfun->uses_pic_offset_table = TRUE;
4204 return TRUE;
4205 }
4206
4207
4208 return FALSE;
4209 }
4210
4211 \f
4212 /* Return a string to output a single word move. */
4213
4214 const char *
4215 output_move_single (rtx operands[], rtx insn)
4216 {
4217 rtx dest = operands[0];
4218 rtx src = operands[1];
4219
4220 if (GET_CODE (dest) == REG)
4221 {
4222 int dest_regno = REGNO (dest);
4223 enum machine_mode mode = GET_MODE (dest);
4224
4225 if (GPR_P (dest_regno))
4226 {
4227 if (GET_CODE (src) == REG)
4228 {
4229 /* gpr <- some sort of register */
4230 int src_regno = REGNO (src);
4231
4232 if (GPR_P (src_regno))
4233 return "mov %1, %0";
4234
4235 else if (FPR_P (src_regno))
4236 return "movfg %1, %0";
4237
4238 else if (SPR_P (src_regno))
4239 return "movsg %1, %0";
4240 }
4241
4242 else if (GET_CODE (src) == MEM)
4243 {
4244 /* gpr <- memory */
4245 switch (mode)
4246 {
4247 default:
4248 break;
4249
4250 case QImode:
4251 return "ldsb%I1%U1 %M1,%0";
4252
4253 case HImode:
4254 return "ldsh%I1%U1 %M1,%0";
4255
4256 case SImode:
4257 case SFmode:
4258 return "ld%I1%U1 %M1, %0";
4259 }
4260 }
4261
4262 else if (GET_CODE (src) == CONST_INT
4263 || GET_CODE (src) == CONST_DOUBLE)
4264 {
4265 /* gpr <- integer/floating constant */
4266 HOST_WIDE_INT value;
4267
4268 if (GET_CODE (src) == CONST_INT)
4269 value = INTVAL (src);
4270
4271 else if (mode == SFmode)
4272 {
4273 REAL_VALUE_TYPE rv;
4274 long l;
4275
4276 REAL_VALUE_FROM_CONST_DOUBLE (rv, src);
4277 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
4278 value = l;
4279 }
4280
4281 else
4282 value = CONST_DOUBLE_LOW (src);
4283
4284 if (IN_RANGE_P (value, -32768, 32767))
4285 return "setlos %1, %0";
4286
4287 return "#";
4288 }
4289
4290 else if (GET_CODE (src) == SYMBOL_REF
4291 || GET_CODE (src) == LABEL_REF
4292 || GET_CODE (src) == CONST)
4293 {
4294 return "#";
4295 }
4296 }
4297
4298 else if (FPR_P (dest_regno))
4299 {
4300 if (GET_CODE (src) == REG)
4301 {
4302 /* fpr <- some sort of register */
4303 int src_regno = REGNO (src);
4304
4305 if (GPR_P (src_regno))
4306 return "movgf %1, %0";
4307
4308 else if (FPR_P (src_regno))
4309 {
4310 if (TARGET_HARD_FLOAT)
4311 return "fmovs %1, %0";
4312 else
4313 return "mor %1, %1, %0";
4314 }
4315 }
4316
4317 else if (GET_CODE (src) == MEM)
4318 {
4319 /* fpr <- memory */
4320 switch (mode)
4321 {
4322 default:
4323 break;
4324
4325 case QImode:
4326 return "ldbf%I1%U1 %M1,%0";
4327
4328 case HImode:
4329 return "ldhf%I1%U1 %M1,%0";
4330
4331 case SImode:
4332 case SFmode:
4333 return "ldf%I1%U1 %M1, %0";
4334 }
4335 }
4336
4337 else if (ZERO_P (src))
4338 return "movgf %., %0";
4339 }
4340
4341 else if (SPR_P (dest_regno))
4342 {
4343 if (GET_CODE (src) == REG)
4344 {
4345 /* spr <- some sort of register */
4346 int src_regno = REGNO (src);
4347
4348 if (GPR_P (src_regno))
4349 return "movgs %1, %0";
4350 }
4351 else if (ZERO_P (src))
4352 return "movgs %., %0";
4353 }
4354 }
4355
4356 else if (GET_CODE (dest) == MEM)
4357 {
4358 if (GET_CODE (src) == REG)
4359 {
4360 int src_regno = REGNO (src);
4361 enum machine_mode mode = GET_MODE (dest);
4362
4363 if (GPR_P (src_regno))
4364 {
4365 switch (mode)
4366 {
4367 default:
4368 break;
4369
4370 case QImode:
4371 return "stb%I0%U0 %1, %M0";
4372
4373 case HImode:
4374 return "sth%I0%U0 %1, %M0";
4375
4376 case SImode:
4377 case SFmode:
4378 return "st%I0%U0 %1, %M0";
4379 }
4380 }
4381
4382 else if (FPR_P (src_regno))
4383 {
4384 switch (mode)
4385 {
4386 default:
4387 break;
4388
4389 case QImode:
4390 return "stbf%I0%U0 %1, %M0";
4391
4392 case HImode:
4393 return "sthf%I0%U0 %1, %M0";
4394
4395 case SImode:
4396 case SFmode:
4397 return "stf%I0%U0 %1, %M0";
4398 }
4399 }
4400 }
4401
4402 else if (ZERO_P (src))
4403 {
4404 switch (GET_MODE (dest))
4405 {
4406 default:
4407 break;
4408
4409 case QImode:
4410 return "stb%I0%U0 %., %M0";
4411
4412 case HImode:
4413 return "sth%I0%U0 %., %M0";
4414
4415 case SImode:
4416 case SFmode:
4417 return "st%I0%U0 %., %M0";
4418 }
4419 }
4420 }
4421
4422 fatal_insn ("bad output_move_single operand", insn);
4423 return "";
4424 }
4425
4426 \f
4427 /* Return a string to output a double word move. */
4428
4429 const char *
4430 output_move_double (rtx operands[], rtx insn)
4431 {
4432 rtx dest = operands[0];
4433 rtx src = operands[1];
4434 enum machine_mode mode = GET_MODE (dest);
4435
4436 if (GET_CODE (dest) == REG)
4437 {
4438 int dest_regno = REGNO (dest);
4439
4440 if (GPR_P (dest_regno))
4441 {
4442 if (GET_CODE (src) == REG)
4443 {
4444 /* gpr <- some sort of register */
4445 int src_regno = REGNO (src);
4446
4447 if (GPR_P (src_regno))
4448 return "#";
4449
4450 else if (FPR_P (src_regno))
4451 {
4452 if (((dest_regno - GPR_FIRST) & 1) == 0
4453 && ((src_regno - FPR_FIRST) & 1) == 0)
4454 return "movfgd %1, %0";
4455
4456 return "#";
4457 }
4458 }
4459
4460 else if (GET_CODE (src) == MEM)
4461 {
4462 /* gpr <- memory */
4463 if (dbl_memory_one_insn_operand (src, mode))
4464 return "ldd%I1%U1 %M1, %0";
4465
4466 return "#";
4467 }
4468
4469 else if (GET_CODE (src) == CONST_INT
4470 || GET_CODE (src) == CONST_DOUBLE)
4471 return "#";
4472 }
4473
4474 else if (FPR_P (dest_regno))
4475 {
4476 if (GET_CODE (src) == REG)
4477 {
4478 /* fpr <- some sort of register */
4479 int src_regno = REGNO (src);
4480
4481 if (GPR_P (src_regno))
4482 {
4483 if (((dest_regno - FPR_FIRST) & 1) == 0
4484 && ((src_regno - GPR_FIRST) & 1) == 0)
4485 return "movgfd %1, %0";
4486
4487 return "#";
4488 }
4489
4490 else if (FPR_P (src_regno))
4491 {
4492 if (TARGET_DOUBLE
4493 && ((dest_regno - FPR_FIRST) & 1) == 0
4494 && ((src_regno - FPR_FIRST) & 1) == 0)
4495 return "fmovd %1, %0";
4496
4497 return "#";
4498 }
4499 }
4500
4501 else if (GET_CODE (src) == MEM)
4502 {
4503 /* fpr <- memory */
4504 if (dbl_memory_one_insn_operand (src, mode))
4505 return "lddf%I1%U1 %M1, %0";
4506
4507 return "#";
4508 }
4509
4510 else if (ZERO_P (src))
4511 return "#";
4512 }
4513 }
4514
4515 else if (GET_CODE (dest) == MEM)
4516 {
4517 if (GET_CODE (src) == REG)
4518 {
4519 int src_regno = REGNO (src);
4520
4521 if (GPR_P (src_regno))
4522 {
4523 if (((src_regno - GPR_FIRST) & 1) == 0
4524 && dbl_memory_one_insn_operand (dest, mode))
4525 return "std%I0%U0 %1, %M0";
4526
4527 return "#";
4528 }
4529
4530 if (FPR_P (src_regno))
4531 {
4532 if (((src_regno - FPR_FIRST) & 1) == 0
4533 && dbl_memory_one_insn_operand (dest, mode))
4534 return "stdf%I0%U0 %1, %M0";
4535
4536 return "#";
4537 }
4538 }
4539
4540 else if (ZERO_P (src))
4541 {
4542 if (dbl_memory_one_insn_operand (dest, mode))
4543 return "std%I0%U0 %., %M0";
4544
4545 return "#";
4546 }
4547 }
4548
4549 fatal_insn ("bad output_move_double operand", insn);
4550 return "";
4551 }
4552
4553 \f
4554 /* Return a string to output a single word conditional move.
4555 Operand0 -- EQ/NE of ccr register and 0
4556 Operand1 -- CCR register
4557 Operand2 -- destination
4558 Operand3 -- source */
4559
4560 const char *
4561 output_condmove_single (rtx operands[], rtx insn)
4562 {
4563 rtx dest = operands[2];
4564 rtx src = operands[3];
4565
4566 if (GET_CODE (dest) == REG)
4567 {
4568 int dest_regno = REGNO (dest);
4569 enum machine_mode mode = GET_MODE (dest);
4570
4571 if (GPR_P (dest_regno))
4572 {
4573 if (GET_CODE (src) == REG)
4574 {
4575 /* gpr <- some sort of register */
4576 int src_regno = REGNO (src);
4577
4578 if (GPR_P (src_regno))
4579 return "cmov %z3, %2, %1, %e0";
4580
4581 else if (FPR_P (src_regno))
4582 return "cmovfg %3, %2, %1, %e0";
4583 }
4584
4585 else if (GET_CODE (src) == MEM)
4586 {
4587 /* gpr <- memory */
4588 switch (mode)
4589 {
4590 default:
4591 break;
4592
4593 case QImode:
4594 return "cldsb%I3%U3 %M3, %2, %1, %e0";
4595
4596 case HImode:
4597 return "cldsh%I3%U3 %M3, %2, %1, %e0";
4598
4599 case SImode:
4600 case SFmode:
4601 return "cld%I3%U3 %M3, %2, %1, %e0";
4602 }
4603 }
4604
4605 else if (ZERO_P (src))
4606 return "cmov %., %2, %1, %e0";
4607 }
4608
4609 else if (FPR_P (dest_regno))
4610 {
4611 if (GET_CODE (src) == REG)
4612 {
4613 /* fpr <- some sort of register */
4614 int src_regno = REGNO (src);
4615
4616 if (GPR_P (src_regno))
4617 return "cmovgf %3, %2, %1, %e0";
4618
4619 else if (FPR_P (src_regno))
4620 {
4621 if (TARGET_HARD_FLOAT)
4622 return "cfmovs %3,%2,%1,%e0";
4623 else
4624 return "cmor %3, %3, %2, %1, %e0";
4625 }
4626 }
4627
4628 else if (GET_CODE (src) == MEM)
4629 {
4630 /* fpr <- memory */
4631 if (mode == SImode || mode == SFmode)
4632 return "cldf%I3%U3 %M3, %2, %1, %e0";
4633 }
4634
4635 else if (ZERO_P (src))
4636 return "cmovgf %., %2, %1, %e0";
4637 }
4638 }
4639
4640 else if (GET_CODE (dest) == MEM)
4641 {
4642 if (GET_CODE (src) == REG)
4643 {
4644 int src_regno = REGNO (src);
4645 enum machine_mode mode = GET_MODE (dest);
4646
4647 if (GPR_P (src_regno))
4648 {
4649 switch (mode)
4650 {
4651 default:
4652 break;
4653
4654 case QImode:
4655 return "cstb%I2%U2 %3, %M2, %1, %e0";
4656
4657 case HImode:
4658 return "csth%I2%U2 %3, %M2, %1, %e0";
4659
4660 case SImode:
4661 case SFmode:
4662 return "cst%I2%U2 %3, %M2, %1, %e0";
4663 }
4664 }
4665
4666 else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode))
4667 return "cstf%I2%U2 %3, %M2, %1, %e0";
4668 }
4669
4670 else if (ZERO_P (src))
4671 {
4672 enum machine_mode mode = GET_MODE (dest);
4673 switch (mode)
4674 {
4675 default:
4676 break;
4677
4678 case QImode:
4679 return "cstb%I2%U2 %., %M2, %1, %e0";
4680
4681 case HImode:
4682 return "csth%I2%U2 %., %M2, %1, %e0";
4683
4684 case SImode:
4685 case SFmode:
4686 return "cst%I2%U2 %., %M2, %1, %e0";
4687 }
4688 }
4689 }
4690
4691 fatal_insn ("bad output_condmove_single operand", insn);
4692 return "";
4693 }
4694
4695 \f
4696 /* Emit the appropriate code to do a comparison, returning the register the
4697 comparison was done it. */
4698
4699 static rtx
4700 frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
4701 {
4702 enum machine_mode cc_mode;
4703 rtx cc_reg;
4704
4705 /* Floating point doesn't have comparison against a constant. */
4706 if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG)
4707 op1 = force_reg (GET_MODE (op0), op1);
4708
4709 /* Possibly disable using anything but a fixed register in order to work
4710 around cse moving comparisons past function calls. */
4711 cc_mode = SELECT_CC_MODE (test, op0, op1);
4712 cc_reg = ((TARGET_ALLOC_CC)
4713 ? gen_reg_rtx (cc_mode)
4714 : gen_rtx_REG (cc_mode,
4715 (cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST));
4716
4717 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4718 gen_rtx_COMPARE (cc_mode, op0, op1)));
4719
4720 return cc_reg;
4721 }
4722
4723 \f
4724 /* Emit code for a conditional branch. The comparison operands were previously
4725 stored in frv_compare_op0 and frv_compare_op1.
4726
4727 XXX: I originally wanted to add a clobber of a CCR register to use in
4728 conditional execution, but that confuses the rest of the compiler. */
4729
4730 int
4731 frv_emit_cond_branch (enum rtx_code test, rtx label)
4732 {
4733 rtx test_rtx;
4734 rtx label_ref;
4735 rtx if_else;
4736 rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
4737 enum machine_mode cc_mode = GET_MODE (cc_reg);
4738
4739 /* Branches generate:
4740 (set (pc)
4741 (if_then_else (<test>, <cc_reg>, (const_int 0))
4742 (label_ref <branch_label>)
4743 (pc))) */
4744 label_ref = gen_rtx_LABEL_REF (VOIDmode, label);
4745 test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4746 if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx);
4747 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, if_else));
4748 return TRUE;
4749 }
4750
4751 \f
4752 /* Emit code to set a gpr to 1/0 based on a comparison. The comparison
4753 operands were previously stored in frv_compare_op0 and frv_compare_op1. */
4754
4755 int
4756 frv_emit_scc (enum rtx_code test, rtx target)
4757 {
4758 rtx set;
4759 rtx test_rtx;
4760 rtx clobber;
4761 rtx cr_reg;
4762 rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
4763
4764 /* SCC instructions generate:
4765 (parallel [(set <target> (<test>, <cc_reg>, (const_int 0))
4766 (clobber (<ccr_reg>))]) */
4767 test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx);
4768 set = gen_rtx_SET (VOIDmode, target, test_rtx);
4769
4770 cr_reg = ((TARGET_ALLOC_CC)
4771 ? gen_reg_rtx (CC_CCRmode)
4772 : gen_rtx_REG (CC_CCRmode,
4773 ((GET_MODE (cc_reg) == CC_FPmode)
4774 ? FCR_FIRST
4775 : ICR_FIRST)));
4776
4777 clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4778 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
4779 return TRUE;
4780 }
4781
4782 \f
4783 /* Split a SCC instruction into component parts, returning a SEQUENCE to hold
4784 the separate insns. */
4785
4786 rtx
4787 frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value)
4788 {
4789 rtx ret;
4790
4791 start_sequence ();
4792
4793 /* Set the appropriate CCR bit. */
4794 emit_insn (gen_rtx_SET (VOIDmode,
4795 cr_reg,
4796 gen_rtx_fmt_ee (GET_CODE (test),
4797 GET_MODE (cr_reg),
4798 cc_reg,
4799 const0_rtx)));
4800
4801 /* Move the value into the destination. */
4802 emit_move_insn (dest, GEN_INT (value));
4803
4804 /* Move 0 into the destination if the test failed */
4805 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4806 gen_rtx_EQ (GET_MODE (cr_reg),
4807 cr_reg,
4808 const0_rtx),
4809 gen_rtx_SET (VOIDmode, dest, const0_rtx)));
4810
4811 /* Finish up, return sequence. */
4812 ret = get_insns ();
4813 end_sequence ();
4814 return ret;
4815 }
4816
4817 \f
4818 /* Emit the code for a conditional move, return TRUE if we could do the
4819 move. */
4820
4821 int
4822 frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2)
4823 {
4824 rtx set;
4825 rtx clobber_cc;
4826 rtx test2;
4827 rtx cr_reg;
4828 rtx if_rtx;
4829 enum rtx_code test = GET_CODE (test_rtx);
4830 rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
4831 enum machine_mode cc_mode = GET_MODE (cc_reg);
4832
4833 /* Conditional move instructions generate:
4834 (parallel [(set <target>
4835 (if_then_else (<test> <cc_reg> (const_int 0))
4836 <src1>
4837 <src2>))
4838 (clobber (<ccr_reg>))]) */
4839
4840 /* Handle various cases of conditional move involving two constants. */
4841 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4842 {
4843 HOST_WIDE_INT value1 = INTVAL (src1);
4844 HOST_WIDE_INT value2 = INTVAL (src2);
4845
4846 /* Having 0 as one of the constants can be done by loading the other
4847 constant, and optionally moving in gr0. */
4848 if (value1 == 0 || value2 == 0)
4849 ;
4850
4851 /* If the first value is within an addi range and also the difference
4852 between the two fits in an addi's range, load up the difference, then
4853 conditionally move in 0, and then unconditionally add the first
4854 value. */
4855 else if (IN_RANGE_P (value1, -2048, 2047)
4856 && IN_RANGE_P (value2 - value1, -2048, 2047))
4857 ;
4858
4859 /* If neither condition holds, just force the constant into a
4860 register. */
4861 else
4862 {
4863 src1 = force_reg (GET_MODE (dest), src1);
4864 src2 = force_reg (GET_MODE (dest), src2);
4865 }
4866 }
4867
4868 /* If one value is a register, insure the other value is either 0 or a
4869 register. */
4870 else
4871 {
4872 if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0)
4873 src1 = force_reg (GET_MODE (dest), src1);
4874
4875 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
4876 src2 = force_reg (GET_MODE (dest), src2);
4877 }
4878
4879 test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4880 if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2);
4881
4882 set = gen_rtx_SET (VOIDmode, dest, if_rtx);
4883
4884 cr_reg = ((TARGET_ALLOC_CC)
4885 ? gen_reg_rtx (CC_CCRmode)
4886 : gen_rtx_REG (CC_CCRmode,
4887 (cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST));
4888
4889 clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4890 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc)));
4891 return TRUE;
4892 }
4893
4894 \f
4895 /* Split a conditional move into constituent parts, returning a SEQUENCE
4896 containing all of the insns. */
4897
4898 rtx
4899 frv_split_cond_move (rtx operands[])
4900 {
4901 rtx dest = operands[0];
4902 rtx test = operands[1];
4903 rtx cc_reg = operands[2];
4904 rtx src1 = operands[3];
4905 rtx src2 = operands[4];
4906 rtx cr_reg = operands[5];
4907 rtx ret;
4908 enum machine_mode cr_mode = GET_MODE (cr_reg);
4909
4910 start_sequence ();
4911
4912 /* Set the appropriate CCR bit. */
4913 emit_insn (gen_rtx_SET (VOIDmode,
4914 cr_reg,
4915 gen_rtx_fmt_ee (GET_CODE (test),
4916 GET_MODE (cr_reg),
4917 cc_reg,
4918 const0_rtx)));
4919
4920 /* Handle various cases of conditional move involving two constants. */
4921 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4922 {
4923 HOST_WIDE_INT value1 = INTVAL (src1);
4924 HOST_WIDE_INT value2 = INTVAL (src2);
4925
4926 /* Having 0 as one of the constants can be done by loading the other
4927 constant, and optionally moving in gr0. */
4928 if (value1 == 0)
4929 {
4930 emit_move_insn (dest, src2);
4931 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4932 gen_rtx_NE (cr_mode, cr_reg,
4933 const0_rtx),
4934 gen_rtx_SET (VOIDmode, dest, src1)));
4935 }
4936
4937 else if (value2 == 0)
4938 {
4939 emit_move_insn (dest, src1);
4940 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4941 gen_rtx_EQ (cr_mode, cr_reg,
4942 const0_rtx),
4943 gen_rtx_SET (VOIDmode, dest, src2)));
4944 }
4945
4946 /* If the first value is within an addi range and also the difference
4947 between the two fits in an addi's range, load up the difference, then
4948 conditionally move in 0, and then unconditionally add the first
4949 value. */
4950 else if (IN_RANGE_P (value1, -2048, 2047)
4951 && IN_RANGE_P (value2 - value1, -2048, 2047))
4952 {
4953 rtx dest_si = ((GET_MODE (dest) == SImode)
4954 ? dest
4955 : gen_rtx_SUBREG (SImode, dest, 0));
4956
4957 emit_move_insn (dest_si, GEN_INT (value2 - value1));
4958 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4959 gen_rtx_NE (cr_mode, cr_reg,
4960 const0_rtx),
4961 gen_rtx_SET (VOIDmode, dest_si,
4962 const0_rtx)));
4963 emit_insn (gen_addsi3 (dest_si, dest_si, src1));
4964 }
4965
4966 else
4967 gcc_unreachable ();
4968 }
4969 else
4970 {
4971 /* Emit the conditional move for the test being true if needed. */
4972 if (! rtx_equal_p (dest, src1))
4973 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4974 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
4975 gen_rtx_SET (VOIDmode, dest, src1)));
4976
4977 /* Emit the conditional move for the test being false if needed. */
4978 if (! rtx_equal_p (dest, src2))
4979 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4980 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
4981 gen_rtx_SET (VOIDmode, dest, src2)));
4982 }
4983
4984 /* Finish up, return sequence. */
4985 ret = get_insns ();
4986 end_sequence ();
4987 return ret;
4988 }
4989
4990 \f
4991 /* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a
4992 memory location that is not known to be dword-aligned. */
4993 void
4994 frv_split_double_load (rtx dest, rtx source)
4995 {
4996 int regno = REGNO (dest);
4997 rtx dest1 = gen_highpart (SImode, dest);
4998 rtx dest2 = gen_lowpart (SImode, dest);
4999 rtx address = XEXP (source, 0);
5000
5001 /* If the address is pre-modified, load the lower-numbered register
5002 first, then load the other register using an integer offset from
5003 the modified base register. This order should always be safe,
5004 since the pre-modification cannot affect the same registers as the
5005 load does.
5006
5007 The situation for other loads is more complicated. Loading one
5008 of the registers could affect the value of ADDRESS, so we must
5009 be careful which order we do them in. */
5010 if (GET_CODE (address) == PRE_MODIFY
5011 || ! refers_to_regno_p (regno, regno + 1, address, NULL))
5012 {
5013 /* It is safe to load the lower-numbered register first. */
5014 emit_move_insn (dest1, change_address (source, SImode, NULL));
5015 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
5016 }
5017 else
5018 {
5019 /* ADDRESS is not pre-modified and the address depends on the
5020 lower-numbered register. Load the higher-numbered register
5021 first. */
5022 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
5023 emit_move_insn (dest1, change_address (source, SImode, NULL));
5024 }
5025 }
5026
5027 /* Split (set DEST SOURCE), where DEST refers to a dword memory location
5028 and SOURCE is either a double register or the constant zero. */
5029 void
5030 frv_split_double_store (rtx dest, rtx source)
5031 {
5032 rtx dest1 = change_address (dest, SImode, NULL);
5033 rtx dest2 = frv_index_memory (dest, SImode, 1);
5034 if (ZERO_P (source))
5035 {
5036 emit_move_insn (dest1, CONST0_RTX (SImode));
5037 emit_move_insn (dest2, CONST0_RTX (SImode));
5038 }
5039 else
5040 {
5041 emit_move_insn (dest1, gen_highpart (SImode, source));
5042 emit_move_insn (dest2, gen_lowpart (SImode, source));
5043 }
5044 }
5045
5046 \f
5047 /* Split a min/max operation returning a SEQUENCE containing all of the
5048 insns. */
5049
5050 rtx
5051 frv_split_minmax (rtx operands[])
5052 {
5053 rtx dest = operands[0];
5054 rtx minmax = operands[1];
5055 rtx src1 = operands[2];
5056 rtx src2 = operands[3];
5057 rtx cc_reg = operands[4];
5058 rtx cr_reg = operands[5];
5059 rtx ret;
5060 enum rtx_code test_code;
5061 enum machine_mode cr_mode = GET_MODE (cr_reg);
5062
5063 start_sequence ();
5064
5065 /* Figure out which test to use. */
5066 switch (GET_CODE (minmax))
5067 {
5068 default:
5069 gcc_unreachable ();
5070
5071 case SMIN: test_code = LT; break;
5072 case SMAX: test_code = GT; break;
5073 case UMIN: test_code = LTU; break;
5074 case UMAX: test_code = GTU; break;
5075 }
5076
5077 /* Issue the compare instruction. */
5078 emit_insn (gen_rtx_SET (VOIDmode,
5079 cc_reg,
5080 gen_rtx_COMPARE (GET_MODE (cc_reg),
5081 src1, src2)));
5082
5083 /* Set the appropriate CCR bit. */
5084 emit_insn (gen_rtx_SET (VOIDmode,
5085 cr_reg,
5086 gen_rtx_fmt_ee (test_code,
5087 GET_MODE (cr_reg),
5088 cc_reg,
5089 const0_rtx)));
5090
5091 /* If are taking the min/max of a nonzero constant, load that first, and
5092 then do a conditional move of the other value. */
5093 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
5094 {
5095 gcc_assert (!rtx_equal_p (dest, src1));
5096
5097 emit_move_insn (dest, src2);
5098 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5099 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5100 gen_rtx_SET (VOIDmode, dest, src1)));
5101 }
5102
5103 /* Otherwise, do each half of the move. */
5104 else
5105 {
5106 /* Emit the conditional move for the test being true if needed. */
5107 if (! rtx_equal_p (dest, src1))
5108 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5109 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5110 gen_rtx_SET (VOIDmode, dest, src1)));
5111
5112 /* Emit the conditional move for the test being false if needed. */
5113 if (! rtx_equal_p (dest, src2))
5114 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5115 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5116 gen_rtx_SET (VOIDmode, dest, src2)));
5117 }
5118
5119 /* Finish up, return sequence. */
5120 ret = get_insns ();
5121 end_sequence ();
5122 return ret;
5123 }
5124
5125 \f
5126 /* Split an integer abs operation returning a SEQUENCE containing all of the
5127 insns. */
5128
5129 rtx
5130 frv_split_abs (rtx operands[])
5131 {
5132 rtx dest = operands[0];
5133 rtx src = operands[1];
5134 rtx cc_reg = operands[2];
5135 rtx cr_reg = operands[3];
5136 rtx ret;
5137
5138 start_sequence ();
5139
5140 /* Issue the compare < 0 instruction. */
5141 emit_insn (gen_rtx_SET (VOIDmode,
5142 cc_reg,
5143 gen_rtx_COMPARE (CCmode, src, const0_rtx)));
5144
5145 /* Set the appropriate CCR bit. */
5146 emit_insn (gen_rtx_SET (VOIDmode,
5147 cr_reg,
5148 gen_rtx_fmt_ee (LT, CC_CCRmode, cc_reg, const0_rtx)));
5149
5150 /* Emit the conditional negate if the value is negative. */
5151 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5152 gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx),
5153 gen_negsi2 (dest, src)));
5154
5155 /* Emit the conditional move for the test being false if needed. */
5156 if (! rtx_equal_p (dest, src))
5157 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5158 gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx),
5159 gen_rtx_SET (VOIDmode, dest, src)));
5160
5161 /* Finish up, return sequence. */
5162 ret = get_insns ();
5163 end_sequence ();
5164 return ret;
5165 }
5166
5167 \f
5168 /* An internal function called by for_each_rtx to clear in a hard_reg set each
5169 register used in an insn. */
5170
5171 static int
5172 frv_clear_registers_used (rtx *ptr, void *data)
5173 {
5174 if (GET_CODE (*ptr) == REG)
5175 {
5176 int regno = REGNO (*ptr);
5177 HARD_REG_SET *p_regs = (HARD_REG_SET *)data;
5178
5179 if (regno < FIRST_PSEUDO_REGISTER)
5180 {
5181 int reg_max = regno + HARD_REGNO_NREGS (regno, GET_MODE (*ptr));
5182
5183 while (regno < reg_max)
5184 {
5185 CLEAR_HARD_REG_BIT (*p_regs, regno);
5186 regno++;
5187 }
5188 }
5189 }
5190
5191 return 0;
5192 }
5193
5194 \f
5195 /* Initialize the extra fields provided by IFCVT_EXTRA_FIELDS. */
5196
5197 /* On the FR-V, we don't have any extra fields per se, but it is useful hook to
5198 initialize the static storage. */
5199 void
5200 frv_ifcvt_init_extra_fields (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
5201 {
5202 frv_ifcvt.added_insns_list = NULL_RTX;
5203 frv_ifcvt.cur_scratch_regs = 0;
5204 frv_ifcvt.num_nested_cond_exec = 0;
5205 frv_ifcvt.cr_reg = NULL_RTX;
5206 frv_ifcvt.nested_cc_reg = NULL_RTX;
5207 frv_ifcvt.extra_int_cr = NULL_RTX;
5208 frv_ifcvt.extra_fp_cr = NULL_RTX;
5209 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5210 }
5211
5212 \f
5213 /* Internal function to add a potential insn to the list of insns to be inserted
5214 if the conditional execution conversion is successful. */
5215
5216 static void
5217 frv_ifcvt_add_insn (rtx pattern, rtx insn, int before_p)
5218 {
5219 rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn);
5220
5221 link->jump = before_p; /* Mark to add this before or after insn. */
5222 frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link,
5223 frv_ifcvt.added_insns_list);
5224
5225 if (TARGET_DEBUG_COND_EXEC)
5226 {
5227 fprintf (stderr,
5228 "\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n",
5229 (before_p) ? "before" : "after",
5230 (int)INSN_UID (insn));
5231
5232 debug_rtx (pattern);
5233 }
5234 }
5235
5236 \f
5237 /* A C expression to modify the code described by the conditional if
5238 information CE_INFO, possibly updating the tests in TRUE_EXPR, and
5239 FALSE_EXPR for converting if-then and if-then-else code to conditional
5240 instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the
5241 tests cannot be converted. */
5242
5243 void
5244 frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
5245 {
5246 basic_block test_bb = ce_info->test_bb; /* test basic block */
5247 basic_block then_bb = ce_info->then_bb; /* THEN */
5248 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
5249 basic_block join_bb = ce_info->join_bb; /* join block or NULL */
5250 rtx true_expr = *p_true;
5251 rtx cr;
5252 rtx cc;
5253 rtx nested_cc;
5254 enum machine_mode mode = GET_MODE (true_expr);
5255 int j;
5256 basic_block *bb;
5257 int num_bb;
5258 frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg;
5259 rtx check_insn;
5260 rtx sub_cond_exec_reg;
5261 enum rtx_code code;
5262 enum rtx_code code_true;
5263 enum rtx_code code_false;
5264 enum reg_class cc_class;
5265 enum reg_class cr_class;
5266 int cc_first;
5267 int cc_last;
5268 reg_set_iterator rsi;
5269
5270 /* Make sure we are only dealing with hard registers. Also honor the
5271 -mno-cond-exec switch, and -mno-nested-cond-exec switches if
5272 applicable. */
5273 if (!reload_completed || !TARGET_COND_EXEC
5274 || (!TARGET_NESTED_CE && ce_info->pass > 1))
5275 goto fail;
5276
5277 /* Figure out which registers we can allocate for our own purposes. Only
5278 consider registers that are not preserved across function calls and are
5279 not fixed. However, allow the ICC/ICR temporary registers to be allocated
5280 if we did not need to use them in reloading other registers. */
5281 memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
5282 COPY_HARD_REG_SET (tmp_reg->regs, call_used_reg_set);
5283 AND_COMPL_HARD_REG_SET (tmp_reg->regs, fixed_reg_set);
5284 SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
5285 SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
5286
5287 /* If this is a nested IF, we need to discover whether the CC registers that
5288 are set/used inside of the block are used anywhere else. If not, we can
5289 change them to be the CC register that is paired with the CR register that
5290 controls the outermost IF block. */
5291 if (ce_info->pass > 1)
5292 {
5293 CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite);
5294 for (j = CC_FIRST; j <= CC_LAST; j++)
5295 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5296 {
5297 if (REGNO_REG_SET_P (df_get_live_in (then_bb), j))
5298 continue;
5299
5300 if (else_bb
5301 && REGNO_REG_SET_P (df_get_live_in (else_bb), j))
5302 continue;
5303
5304 if (join_bb
5305 && REGNO_REG_SET_P (df_get_live_in (join_bb), j))
5306 continue;
5307
5308 SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
5309 }
5310 }
5311
5312 for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++)
5313 frv_ifcvt.scratch_regs[j] = NULL_RTX;
5314
5315 frv_ifcvt.added_insns_list = NULL_RTX;
5316 frv_ifcvt.cur_scratch_regs = 0;
5317
5318 bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks)
5319 * sizeof (basic_block));
5320
5321 if (join_bb)
5322 {
5323 unsigned int regno;
5324
5325 /* Remove anything live at the beginning of the join block from being
5326 available for allocation. */
5327 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (join_bb), 0, regno, rsi)
5328 {
5329 if (regno < FIRST_PSEUDO_REGISTER)
5330 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5331 }
5332 }
5333
5334 /* Add in all of the blocks in multiple &&/|| blocks to be scanned. */
5335 num_bb = 0;
5336 if (ce_info->num_multiple_test_blocks)
5337 {
5338 basic_block multiple_test_bb = ce_info->last_test_bb;
5339
5340 while (multiple_test_bb != test_bb)
5341 {
5342 bb[num_bb++] = multiple_test_bb;
5343 multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src;
5344 }
5345 }
5346
5347 /* Add in the THEN and ELSE blocks to be scanned. */
5348 bb[num_bb++] = then_bb;
5349 if (else_bb)
5350 bb[num_bb++] = else_bb;
5351
5352 sub_cond_exec_reg = NULL_RTX;
5353 frv_ifcvt.num_nested_cond_exec = 0;
5354
5355 /* Scan all of the blocks for registers that must not be allocated. */
5356 for (j = 0; j < num_bb; j++)
5357 {
5358 rtx last_insn = BB_END (bb[j]);
5359 rtx insn = BB_HEAD (bb[j]);
5360 unsigned int regno;
5361
5362 if (dump_file)
5363 fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n",
5364 (bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
5365 (int) bb[j]->index,
5366 (int) INSN_UID (BB_HEAD (bb[j])),
5367 (int) INSN_UID (BB_END (bb[j])));
5368
5369 /* Anything live at the beginning of the block is obviously unavailable
5370 for allocation. */
5371 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb[j]), 0, regno, rsi)
5372 {
5373 if (regno < FIRST_PSEUDO_REGISTER)
5374 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5375 }
5376
5377 /* Loop through the insns in the block. */
5378 for (;;)
5379 {
5380 /* Mark any new registers that are created as being unavailable for
5381 allocation. Also see if the CC register used in nested IFs can be
5382 reallocated. */
5383 if (INSN_P (insn))
5384 {
5385 rtx pattern;
5386 rtx set;
5387 int skip_nested_if = FALSE;
5388
5389 for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
5390 (void *)&tmp_reg->regs);
5391
5392 pattern = PATTERN (insn);
5393 if (GET_CODE (pattern) == COND_EXEC)
5394 {
5395 rtx reg = XEXP (COND_EXEC_TEST (pattern), 0);
5396
5397 if (reg != sub_cond_exec_reg)
5398 {
5399 sub_cond_exec_reg = reg;
5400 frv_ifcvt.num_nested_cond_exec++;
5401 }
5402 }
5403
5404 set = single_set_pattern (pattern);
5405 if (set)
5406 {
5407 rtx dest = SET_DEST (set);
5408 rtx src = SET_SRC (set);
5409
5410 if (GET_CODE (dest) == REG)
5411 {
5412 int regno = REGNO (dest);
5413 enum rtx_code src_code = GET_CODE (src);
5414
5415 if (CC_P (regno) && src_code == COMPARE)
5416 skip_nested_if = TRUE;
5417
5418 else if (CR_P (regno)
5419 && (src_code == IF_THEN_ELSE
5420 || COMPARISON_P (src)))
5421 skip_nested_if = TRUE;
5422 }
5423 }
5424
5425 if (! skip_nested_if)
5426 for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
5427 (void *)&frv_ifcvt.nested_cc_ok_rewrite);
5428 }
5429
5430 if (insn == last_insn)
5431 break;
5432
5433 insn = NEXT_INSN (insn);
5434 }
5435 }
5436
5437 /* If this is a nested if, rewrite the CC registers that are available to
5438 include the ones that can be rewritten, to increase the chance of being
5439 able to allocate a paired CC/CR register combination. */
5440 if (ce_info->pass > 1)
5441 {
5442 for (j = CC_FIRST; j <= CC_LAST; j++)
5443 if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j))
5444 SET_HARD_REG_BIT (tmp_reg->regs, j);
5445 else
5446 CLEAR_HARD_REG_BIT (tmp_reg->regs, j);
5447 }
5448
5449 if (dump_file)
5450 {
5451 int num_gprs = 0;
5452 fprintf (dump_file, "Available GPRs: ");
5453
5454 for (j = GPR_FIRST; j <= GPR_LAST; j++)
5455 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5456 {
5457 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5458 if (++num_gprs > GPR_TEMP_NUM+2)
5459 break;
5460 }
5461
5462 fprintf (dump_file, "%s\nAvailable CRs: ",
5463 (num_gprs > GPR_TEMP_NUM+2) ? " ..." : "");
5464
5465 for (j = CR_FIRST; j <= CR_LAST; j++)
5466 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5467 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5468
5469 fputs ("\n", dump_file);
5470
5471 if (ce_info->pass > 1)
5472 {
5473 fprintf (dump_file, "Modifiable CCs: ");
5474 for (j = CC_FIRST; j <= CC_LAST; j++)
5475 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5476 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5477
5478 fprintf (dump_file, "\n%d nested COND_EXEC statements\n",
5479 frv_ifcvt.num_nested_cond_exec);
5480 }
5481 }
5482
5483 /* Allocate the appropriate temporary condition code register. Try to
5484 allocate the ICR/FCR register that corresponds to the ICC/FCC register so
5485 that conditional cmp's can be done. */
5486 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5487 {
5488 cr_class = ICR_REGS;
5489 cc_class = ICC_REGS;
5490 cc_first = ICC_FIRST;
5491 cc_last = ICC_LAST;
5492 }
5493 else if (mode == CC_FPmode)
5494 {
5495 cr_class = FCR_REGS;
5496 cc_class = FCC_REGS;
5497 cc_first = FCC_FIRST;
5498 cc_last = FCC_LAST;
5499 }
5500 else
5501 {
5502 cc_first = cc_last = 0;
5503 cr_class = cc_class = NO_REGS;
5504 }
5505
5506 cc = XEXP (true_expr, 0);
5507 nested_cc = cr = NULL_RTX;
5508 if (cc_class != NO_REGS)
5509 {
5510 /* For nested IFs and &&/||, see if we can find a CC and CR register pair
5511 so we can execute a csubcc/caddcc/cfcmps instruction. */
5512 int cc_regno;
5513
5514 for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++)
5515 {
5516 int cr_regno = cc_regno - CC_FIRST + CR_FIRST;
5517
5518 if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno)
5519 && TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno))
5520 {
5521 frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno;
5522 cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE,
5523 TRUE);
5524
5525 frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno;
5526 nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode,
5527 TRUE, TRUE);
5528 break;
5529 }
5530 }
5531 }
5532
5533 if (! cr)
5534 {
5535 if (dump_file)
5536 fprintf (dump_file, "Could not allocate a CR temporary register\n");
5537
5538 goto fail;
5539 }
5540
5541 if (dump_file)
5542 fprintf (dump_file,
5543 "Will use %s for conditional execution, %s for nested comparisons\n",
5544 reg_names[ REGNO (cr)],
5545 (nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>");
5546
5547 /* Set the CCR bit. Note for integer tests, we reverse the condition so that
5548 in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR
5549 bit being true. We don't do this for floating point, because of NaNs. */
5550 code = GET_CODE (true_expr);
5551 if (GET_MODE (cc) != CC_FPmode)
5552 {
5553 code = reverse_condition (code);
5554 code_true = EQ;
5555 code_false = NE;
5556 }
5557 else
5558 {
5559 code_true = NE;
5560 code_false = EQ;
5561 }
5562
5563 check_insn = gen_rtx_SET (VOIDmode, cr,
5564 gen_rtx_fmt_ee (code, CC_CCRmode, cc, const0_rtx));
5565
5566 /* Record the check insn to be inserted later. */
5567 frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
5568
5569 /* Update the tests. */
5570 frv_ifcvt.cr_reg = cr;
5571 frv_ifcvt.nested_cc_reg = nested_cc;
5572 *p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx);
5573 *p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx);
5574 return;
5575
5576 /* Fail, don't do this conditional execution. */
5577 fail:
5578 *p_true = NULL_RTX;
5579 *p_false = NULL_RTX;
5580 if (dump_file)
5581 fprintf (dump_file, "Disabling this conditional execution.\n");
5582
5583 return;
5584 }
5585
5586 \f
5587 /* A C expression to modify the code described by the conditional if
5588 information CE_INFO, for the basic block BB, possibly updating the tests in
5589 TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or
5590 if-then-else code to conditional instructions. Set either TRUE_EXPR or
5591 FALSE_EXPR to a null pointer if the tests cannot be converted. */
5592
5593 /* p_true and p_false are given expressions of the form:
5594
5595 (and (eq:CC_CCR (reg:CC_CCR)
5596 (const_int 0))
5597 (eq:CC (reg:CC)
5598 (const_int 0))) */
5599
5600 void
5601 frv_ifcvt_modify_multiple_tests (ce_if_block_t *ce_info,
5602 basic_block bb,
5603 rtx *p_true,
5604 rtx *p_false)
5605 {
5606 rtx old_true = XEXP (*p_true, 0);
5607 rtx old_false = XEXP (*p_false, 0);
5608 rtx true_expr = XEXP (*p_true, 1);
5609 rtx false_expr = XEXP (*p_false, 1);
5610 rtx test_expr;
5611 rtx old_test;
5612 rtx cr = XEXP (old_true, 0);
5613 rtx check_insn;
5614 rtx new_cr = NULL_RTX;
5615 rtx *p_new_cr = (rtx *)0;
5616 rtx if_else;
5617 rtx compare;
5618 rtx cc;
5619 enum reg_class cr_class;
5620 enum machine_mode mode = GET_MODE (true_expr);
5621 rtx (*logical_func)(rtx, rtx, rtx);
5622
5623 if (TARGET_DEBUG_COND_EXEC)
5624 {
5625 fprintf (stderr,
5626 "\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n",
5627 ce_info->and_and_p ? "&&" : "||");
5628
5629 debug_rtx (*p_true);
5630
5631 fputs ("\nfalse insn:\n", stderr);
5632 debug_rtx (*p_false);
5633 }
5634
5635 if (!TARGET_MULTI_CE)
5636 goto fail;
5637
5638 if (GET_CODE (cr) != REG)
5639 goto fail;
5640
5641 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5642 {
5643 cr_class = ICR_REGS;
5644 p_new_cr = &frv_ifcvt.extra_int_cr;
5645 }
5646 else if (mode == CC_FPmode)
5647 {
5648 cr_class = FCR_REGS;
5649 p_new_cr = &frv_ifcvt.extra_fp_cr;
5650 }
5651 else
5652 goto fail;
5653
5654 /* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or
5655 more &&/|| tests. */
5656 new_cr = *p_new_cr;
5657 if (! new_cr)
5658 {
5659 new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class,
5660 CC_CCRmode, TRUE, TRUE);
5661 if (! new_cr)
5662 goto fail;
5663 }
5664
5665 if (ce_info->and_and_p)
5666 {
5667 old_test = old_false;
5668 test_expr = true_expr;
5669 logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr;
5670 *p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5671 *p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5672 }
5673 else
5674 {
5675 old_test = old_false;
5676 test_expr = false_expr;
5677 logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr;
5678 *p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5679 *p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5680 }
5681
5682 /* First add the andcr/andncr/orcr/orncr, which will be added after the
5683 conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
5684 stack. */
5685 frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
5686
5687 /* Now add the conditional check insn. */
5688 cc = XEXP (test_expr, 0);
5689 compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx);
5690 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx);
5691
5692 check_insn = gen_rtx_SET (VOIDmode, new_cr, if_else);
5693
5694 /* Add the new check insn to the list of check insns that need to be
5695 inserted. */
5696 frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
5697
5698 if (TARGET_DEBUG_COND_EXEC)
5699 {
5700 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n",
5701 stderr);
5702
5703 debug_rtx (*p_true);
5704
5705 fputs ("\nfalse insn:\n", stderr);
5706 debug_rtx (*p_false);
5707 }
5708
5709 return;
5710
5711 fail:
5712 *p_true = *p_false = NULL_RTX;
5713
5714 /* If we allocated a CR register, release it. */
5715 if (new_cr)
5716 {
5717 CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr));
5718 *p_new_cr = NULL_RTX;
5719 }
5720
5721 if (TARGET_DEBUG_COND_EXEC)
5722 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr);
5723
5724 return;
5725 }
5726
5727 \f
5728 /* Return a register which will be loaded with a value if an IF block is
5729 converted to conditional execution. This is used to rewrite instructions
5730 that use constants to ones that just use registers. */
5731
5732 static rtx
5733 frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED)
5734 {
5735 int num_alloc = frv_ifcvt.cur_scratch_regs;
5736 int i;
5737 rtx reg;
5738
5739 /* We know gr0 == 0, so replace any errant uses. */
5740 if (value == const0_rtx)
5741 return gen_rtx_REG (SImode, GPR_FIRST);
5742
5743 /* First search all registers currently loaded to see if we have an
5744 applicable constant. */
5745 if (CONSTANT_P (value)
5746 || (GET_CODE (value) == REG && REGNO (value) == LR_REGNO))
5747 {
5748 for (i = 0; i < num_alloc; i++)
5749 {
5750 if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value))
5751 return SET_DEST (frv_ifcvt.scratch_regs[i]);
5752 }
5753 }
5754
5755 /* Have we exhausted the number of registers available? */
5756 if (num_alloc >= GPR_TEMP_NUM)
5757 {
5758 if (dump_file)
5759 fprintf (dump_file, "Too many temporary registers allocated\n");
5760
5761 return NULL_RTX;
5762 }
5763
5764 /* Allocate the new register. */
5765 reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE);
5766 if (! reg)
5767 {
5768 if (dump_file)
5769 fputs ("Could not find a scratch register\n", dump_file);
5770
5771 return NULL_RTX;
5772 }
5773
5774 frv_ifcvt.cur_scratch_regs++;
5775 frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (VOIDmode, reg, value);
5776
5777 if (dump_file)
5778 {
5779 if (GET_CODE (value) == CONST_INT)
5780 fprintf (dump_file, "Register %s will hold %ld\n",
5781 reg_names[ REGNO (reg)], (long)INTVAL (value));
5782
5783 else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)
5784 fprintf (dump_file, "Register %s will hold LR\n",
5785 reg_names[ REGNO (reg)]);
5786
5787 else
5788 fprintf (dump_file, "Register %s will hold a saved value\n",
5789 reg_names[ REGNO (reg)]);
5790 }
5791
5792 return reg;
5793 }
5794
5795 \f
5796 /* Update a MEM used in conditional code that might contain an offset to put
5797 the offset into a scratch register, so that the conditional load/store
5798 operations can be used. This function returns the original pointer if the
5799 MEM is valid to use in conditional code, NULL if we can't load up the offset
5800 into a temporary register, or the new MEM if we were successful. */
5801
5802 static rtx
5803 frv_ifcvt_rewrite_mem (rtx mem, enum machine_mode mode, rtx insn)
5804 {
5805 rtx addr = XEXP (mem, 0);
5806
5807 if (!frv_legitimate_address_p (mode, addr, reload_completed, TRUE, FALSE))
5808 {
5809 if (GET_CODE (addr) == PLUS)
5810 {
5811 rtx addr_op0 = XEXP (addr, 0);
5812 rtx addr_op1 = XEXP (addr, 1);
5813
5814 if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1))
5815 {
5816 rtx reg = frv_ifcvt_load_value (addr_op1, insn);
5817 if (!reg)
5818 return NULL_RTX;
5819
5820 addr = gen_rtx_PLUS (Pmode, addr_op0, reg);
5821 }
5822
5823 else
5824 return NULL_RTX;
5825 }
5826
5827 else if (CONSTANT_P (addr))
5828 addr = frv_ifcvt_load_value (addr, insn);
5829
5830 else
5831 return NULL_RTX;
5832
5833 if (addr == NULL_RTX)
5834 return NULL_RTX;
5835
5836 else if (XEXP (mem, 0) != addr)
5837 return change_address (mem, mode, addr);
5838 }
5839
5840 return mem;
5841 }
5842
5843 \f
5844 /* Given a PATTERN, return a SET expression if this PATTERN has only a single
5845 SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */
5846
5847 static rtx
5848 single_set_pattern (rtx pattern)
5849 {
5850 rtx set;
5851 int i;
5852
5853 if (GET_CODE (pattern) == COND_EXEC)
5854 pattern = COND_EXEC_CODE (pattern);
5855
5856 if (GET_CODE (pattern) == SET)
5857 return pattern;
5858
5859 else if (GET_CODE (pattern) == PARALLEL)
5860 {
5861 for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++)
5862 {
5863 rtx sub = XVECEXP (pattern, 0, i);
5864
5865 switch (GET_CODE (sub))
5866 {
5867 case USE:
5868 case CLOBBER:
5869 break;
5870
5871 case SET:
5872 if (set)
5873 return 0;
5874 else
5875 set = sub;
5876 break;
5877
5878 default:
5879 return 0;
5880 }
5881 }
5882 return set;
5883 }
5884
5885 return 0;
5886 }
5887
5888 \f
5889 /* A C expression to modify the code described by the conditional if
5890 information CE_INFO with the new PATTERN in INSN. If PATTERN is a null
5891 pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that
5892 insn cannot be converted to be executed conditionally. */
5893
5894 rtx
5895 frv_ifcvt_modify_insn (ce_if_block_t *ce_info,
5896 rtx pattern,
5897 rtx insn)
5898 {
5899 rtx orig_ce_pattern = pattern;
5900 rtx set;
5901 rtx op0;
5902 rtx op1;
5903 rtx test;
5904
5905 gcc_assert (GET_CODE (pattern) == COND_EXEC);
5906
5907 test = COND_EXEC_TEST (pattern);
5908 if (GET_CODE (test) == AND)
5909 {
5910 rtx cr = frv_ifcvt.cr_reg;
5911 rtx test_reg;
5912
5913 op0 = XEXP (test, 0);
5914 if (! rtx_equal_p (cr, XEXP (op0, 0)))
5915 goto fail;
5916
5917 op1 = XEXP (test, 1);
5918 test_reg = XEXP (op1, 0);
5919 if (GET_CODE (test_reg) != REG)
5920 goto fail;
5921
5922 /* Is this the first nested if block in this sequence? If so, generate
5923 an andcr or andncr. */
5924 if (! frv_ifcvt.last_nested_if_cr)
5925 {
5926 rtx and_op;
5927
5928 frv_ifcvt.last_nested_if_cr = test_reg;
5929 if (GET_CODE (op0) == NE)
5930 and_op = gen_andcr (test_reg, cr, test_reg);
5931 else
5932 and_op = gen_andncr (test_reg, cr, test_reg);
5933
5934 frv_ifcvt_add_insn (and_op, insn, TRUE);
5935 }
5936
5937 /* If this isn't the first statement in the nested if sequence, see if we
5938 are dealing with the same register. */
5939 else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr))
5940 goto fail;
5941
5942 COND_EXEC_TEST (pattern) = test = op1;
5943 }
5944
5945 /* If this isn't a nested if, reset state variables. */
5946 else
5947 {
5948 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5949 }
5950
5951 set = single_set_pattern (pattern);
5952 if (set)
5953 {
5954 rtx dest = SET_DEST (set);
5955 rtx src = SET_SRC (set);
5956 enum machine_mode mode = GET_MODE (dest);
5957
5958 /* Check for normal binary operators. */
5959 if (mode == SImode && ARITHMETIC_P (src))
5960 {
5961 op0 = XEXP (src, 0);
5962 op1 = XEXP (src, 1);
5963
5964 if (integer_register_operand (op0, SImode) && CONSTANT_P (op1))
5965 {
5966 op1 = frv_ifcvt_load_value (op1, insn);
5967 if (op1)
5968 COND_EXEC_CODE (pattern)
5969 = gen_rtx_SET (VOIDmode, dest, gen_rtx_fmt_ee (GET_CODE (src),
5970 GET_MODE (src),
5971 op0, op1));
5972 else
5973 goto fail;
5974 }
5975 }
5976
5977 /* For multiply by a constant, we need to handle the sign extending
5978 correctly. Add a USE of the value after the multiply to prevent flow
5979 from cratering because only one register out of the two were used. */
5980 else if (mode == DImode && GET_CODE (src) == MULT)
5981 {
5982 op0 = XEXP (src, 0);
5983 op1 = XEXP (src, 1);
5984 if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT)
5985 {
5986 op1 = frv_ifcvt_load_value (op1, insn);
5987 if (op1)
5988 {
5989 op1 = gen_rtx_SIGN_EXTEND (DImode, op1);
5990 COND_EXEC_CODE (pattern)
5991 = gen_rtx_SET (VOIDmode, dest,
5992 gen_rtx_MULT (DImode, op0, op1));
5993 }
5994 else
5995 goto fail;
5996 }
5997
5998 frv_ifcvt_add_insn (gen_rtx_USE (VOIDmode, dest), insn, FALSE);
5999 }
6000
6001 /* If we are just loading a constant created for a nested conditional
6002 execution statement, just load the constant without any conditional
6003 execution, since we know that the constant will not interfere with any
6004 other registers. */
6005 else if (frv_ifcvt.scratch_insns_bitmap
6006 && bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap,
6007 INSN_UID (insn))
6008 && REG_P (SET_DEST (set))
6009 /* We must not unconditionally set a scratch reg chosen
6010 for a nested if-converted block if its incoming
6011 value from the TEST block (or the result of the THEN
6012 branch) could/should propagate to the JOIN block.
6013 It suffices to test whether the register is live at
6014 the JOIN point: if it's live there, we can infer
6015 that we set it in the former JOIN block of the
6016 nested if-converted block (otherwise it wouldn't
6017 have been available as a scratch register), and it
6018 is either propagated through or set in the other
6019 conditional block. It's probably not worth trying
6020 to catch the latter case, and it could actually
6021 limit scheduling of the combined block quite
6022 severely. */
6023 && ce_info->join_bb
6024 && ! (REGNO_REG_SET_P (df_get_live_in (ce_info->join_bb),
6025 REGNO (SET_DEST (set))))
6026 /* Similarly, we must not unconditionally set a reg
6027 used as scratch in the THEN branch if the same reg
6028 is live in the ELSE branch. */
6029 && (! ce_info->else_bb
6030 || BLOCK_FOR_INSN (insn) == ce_info->else_bb
6031 || ! (REGNO_REG_SET_P (df_get_live_in (ce_info->else_bb),
6032 REGNO (SET_DEST (set))))))
6033 pattern = set;
6034
6035 else if (mode == QImode || mode == HImode || mode == SImode
6036 || mode == SFmode)
6037 {
6038 int changed_p = FALSE;
6039
6040 /* Check for just loading up a constant */
6041 if (CONSTANT_P (src) && integer_register_operand (dest, mode))
6042 {
6043 src = frv_ifcvt_load_value (src, insn);
6044 if (!src)
6045 goto fail;
6046
6047 changed_p = TRUE;
6048 }
6049
6050 /* See if we need to fix up stores */
6051 if (GET_CODE (dest) == MEM)
6052 {
6053 rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn);
6054
6055 if (!new_mem)
6056 goto fail;
6057
6058 else if (new_mem != dest)
6059 {
6060 changed_p = TRUE;
6061 dest = new_mem;
6062 }
6063 }
6064
6065 /* See if we need to fix up loads */
6066 if (GET_CODE (src) == MEM)
6067 {
6068 rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn);
6069
6070 if (!new_mem)
6071 goto fail;
6072
6073 else if (new_mem != src)
6074 {
6075 changed_p = TRUE;
6076 src = new_mem;
6077 }
6078 }
6079
6080 /* If either src or destination changed, redo SET. */
6081 if (changed_p)
6082 COND_EXEC_CODE (pattern) = gen_rtx_SET (VOIDmode, dest, src);
6083 }
6084
6085 /* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with
6086 rewriting the CC register to be the same as the paired CC/CR register
6087 for nested ifs. */
6088 else if (mode == CC_CCRmode && COMPARISON_P (src))
6089 {
6090 int regno = REGNO (XEXP (src, 0));
6091 rtx if_else;
6092
6093 if (ce_info->pass > 1
6094 && regno != (int)REGNO (frv_ifcvt.nested_cc_reg)
6095 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno))
6096 {
6097 src = gen_rtx_fmt_ee (GET_CODE (src),
6098 CC_CCRmode,
6099 frv_ifcvt.nested_cc_reg,
6100 XEXP (src, 1));
6101 }
6102
6103 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx);
6104 pattern = gen_rtx_SET (VOIDmode, dest, if_else);
6105 }
6106
6107 /* Remap a nested compare instruction to use the paired CC/CR reg. */
6108 else if (ce_info->pass > 1
6109 && GET_CODE (dest) == REG
6110 && CC_P (REGNO (dest))
6111 && REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg)
6112 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite,
6113 REGNO (dest))
6114 && GET_CODE (src) == COMPARE)
6115 {
6116 PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest));
6117 COND_EXEC_CODE (pattern)
6118 = gen_rtx_SET (VOIDmode, frv_ifcvt.nested_cc_reg, copy_rtx (src));
6119 }
6120 }
6121
6122 if (TARGET_DEBUG_COND_EXEC)
6123 {
6124 rtx orig_pattern = PATTERN (insn);
6125
6126 PATTERN (insn) = pattern;
6127 fprintf (stderr,
6128 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n",
6129 ce_info->pass);
6130
6131 debug_rtx (insn);
6132 PATTERN (insn) = orig_pattern;
6133 }
6134
6135 return pattern;
6136
6137 fail:
6138 if (TARGET_DEBUG_COND_EXEC)
6139 {
6140 rtx orig_pattern = PATTERN (insn);
6141
6142 PATTERN (insn) = orig_ce_pattern;
6143 fprintf (stderr,
6144 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n",
6145 ce_info->pass);
6146
6147 debug_rtx (insn);
6148 PATTERN (insn) = orig_pattern;
6149 }
6150
6151 return NULL_RTX;
6152 }
6153
6154 \f
6155 /* A C expression to perform any final machine dependent modifications in
6156 converting code to conditional execution in the code described by the
6157 conditional if information CE_INFO. */
6158
6159 void
6160 frv_ifcvt_modify_final (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
6161 {
6162 rtx existing_insn;
6163 rtx check_insn;
6164 rtx p = frv_ifcvt.added_insns_list;
6165 int i;
6166
6167 /* Loop inserting the check insns. The last check insn is the first test,
6168 and is the appropriate place to insert constants. */
6169 gcc_assert (p);
6170
6171 do
6172 {
6173 rtx check_and_insert_insns = XEXP (p, 0);
6174 rtx old_p = p;
6175
6176 check_insn = XEXP (check_and_insert_insns, 0);
6177 existing_insn = XEXP (check_and_insert_insns, 1);
6178 p = XEXP (p, 1);
6179
6180 /* The jump bit is used to say that the new insn is to be inserted BEFORE
6181 the existing insn, otherwise it is to be inserted AFTER. */
6182 if (check_and_insert_insns->jump)
6183 {
6184 emit_insn_before (check_insn, existing_insn);
6185 check_and_insert_insns->jump = 0;
6186 }
6187 else
6188 emit_insn_after (check_insn, existing_insn);
6189
6190 free_EXPR_LIST_node (check_and_insert_insns);
6191 free_EXPR_LIST_node (old_p);
6192 }
6193 while (p != NULL_RTX);
6194
6195 /* Load up any constants needed into temp gprs */
6196 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6197 {
6198 rtx insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn);
6199 if (! frv_ifcvt.scratch_insns_bitmap)
6200 frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL);
6201 bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn));
6202 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6203 }
6204
6205 frv_ifcvt.added_insns_list = NULL_RTX;
6206 frv_ifcvt.cur_scratch_regs = 0;
6207 }
6208
6209 \f
6210 /* A C expression to cancel any machine dependent modifications in converting
6211 code to conditional execution in the code described by the conditional if
6212 information CE_INFO. */
6213
6214 void
6215 frv_ifcvt_modify_cancel (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
6216 {
6217 int i;
6218 rtx p = frv_ifcvt.added_insns_list;
6219
6220 /* Loop freeing up the EXPR_LIST's allocated. */
6221 while (p != NULL_RTX)
6222 {
6223 rtx check_and_jump = XEXP (p, 0);
6224 rtx old_p = p;
6225
6226 p = XEXP (p, 1);
6227 free_EXPR_LIST_node (check_and_jump);
6228 free_EXPR_LIST_node (old_p);
6229 }
6230
6231 /* Release any temporary gprs allocated. */
6232 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6233 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6234
6235 frv_ifcvt.added_insns_list = NULL_RTX;
6236 frv_ifcvt.cur_scratch_regs = 0;
6237 return;
6238 }
6239 \f
6240 /* A C expression for the size in bytes of the trampoline, as an integer.
6241 The template is:
6242
6243 setlo #0, <jmp_reg>
6244 setlo #0, <static_chain>
6245 sethi #0, <jmp_reg>
6246 sethi #0, <static_chain>
6247 jmpl @(gr0,<jmp_reg>) */
6248
6249 int
6250 frv_trampoline_size (void)
6251 {
6252 if (TARGET_FDPIC)
6253 /* Allocate room for the function descriptor and the lddi
6254 instruction. */
6255 return 8 + 6 * 4;
6256 return 5 /* instructions */ * 4 /* instruction size. */;
6257 }
6258
6259 \f
6260 /* A C statement to initialize the variable parts of a trampoline. ADDR is an
6261 RTX for the address of the trampoline; FNADDR is an RTX for the address of
6262 the nested function; STATIC_CHAIN is an RTX for the static chain value that
6263 should be passed to the function when it is called.
6264
6265 The template is:
6266
6267 setlo #0, <jmp_reg>
6268 setlo #0, <static_chain>
6269 sethi #0, <jmp_reg>
6270 sethi #0, <static_chain>
6271 jmpl @(gr0,<jmp_reg>) */
6272
6273 void
6274 frv_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
6275 {
6276 rtx sc_reg = force_reg (Pmode, static_chain);
6277
6278 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
6279 FALSE, VOIDmode, 4,
6280 addr, Pmode,
6281 GEN_INT (frv_trampoline_size ()), SImode,
6282 fnaddr, Pmode,
6283 sc_reg, Pmode);
6284 }
6285
6286 \f
6287 /* Many machines have some registers that cannot be copied directly to or from
6288 memory or even from other types of registers. An example is the `MQ'
6289 register, which on most machines, can only be copied to or from general
6290 registers, but not memory. Some machines allow copying all registers to and
6291 from memory, but require a scratch register for stores to some memory
6292 locations (e.g., those with symbolic address on the RT, and those with
6293 certain symbolic address on the SPARC when compiling PIC). In some cases,
6294 both an intermediate and a scratch register are required.
6295
6296 You should define these macros to indicate to the reload phase that it may
6297 need to allocate at least one register for a reload in addition to the
6298 register to contain the data. Specifically, if copying X to a register
6299 CLASS in MODE requires an intermediate register, you should define
6300 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
6301 whose registers can be used as intermediate registers or scratch registers.
6302
6303 If copying a register CLASS in MODE to X requires an intermediate or scratch
6304 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
6305 largest register class required. If the requirements for input and output
6306 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
6307 instead of defining both macros identically.
6308
6309 The values returned by these macros are often `GENERAL_REGS'. Return
6310 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
6311 to or from a register of CLASS in MODE without requiring a scratch register.
6312 Do not define this macro if it would always return `NO_REGS'.
6313
6314 If a scratch register is required (either with or without an intermediate
6315 register), you should define patterns for `reload_inM' or `reload_outM', as
6316 required.. These patterns, which will normally be implemented with a
6317 `define_expand', should be similar to the `movM' patterns, except that
6318 operand 2 is the scratch register.
6319
6320 Define constraints for the reload register and scratch register that contain
6321 a single register class. If the original reload register (whose class is
6322 CLASS) can meet the constraint given in the pattern, the value returned by
6323 these macros is used for the class of the scratch register. Otherwise, two
6324 additional reload registers are required. Their classes are obtained from
6325 the constraints in the insn pattern.
6326
6327 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
6328 either be in a hard register or in memory. Use `true_regnum' to find out;
6329 it will return -1 if the pseudo is in memory and the hard register number if
6330 it is in a register.
6331
6332 These macros should not be used in the case where a particular class of
6333 registers can only be copied to memory and not to another class of
6334 registers. In that case, secondary reload registers are not needed and
6335 would not be helpful. Instead, a stack location must be used to perform the
6336 copy and the `movM' pattern should use memory as an intermediate storage.
6337 This case often occurs between floating-point and general registers. */
6338
6339 enum reg_class
6340 frv_secondary_reload_class (enum reg_class class,
6341 enum machine_mode mode ATTRIBUTE_UNUSED,
6342 rtx x,
6343 int in_p ATTRIBUTE_UNUSED)
6344 {
6345 enum reg_class ret;
6346
6347 switch (class)
6348 {
6349 default:
6350 ret = NO_REGS;
6351 break;
6352
6353 /* Accumulators/Accumulator guard registers need to go through floating
6354 point registers. */
6355 case QUAD_REGS:
6356 case EVEN_REGS:
6357 case GPR_REGS:
6358 ret = NO_REGS;
6359 if (x && GET_CODE (x) == REG)
6360 {
6361 int regno = REGNO (x);
6362
6363 if (ACC_P (regno) || ACCG_P (regno))
6364 ret = FPR_REGS;
6365 }
6366 break;
6367
6368 /* Nonzero constants should be loaded into an FPR through a GPR. */
6369 case QUAD_FPR_REGS:
6370 case FEVEN_REGS:
6371 case FPR_REGS:
6372 if (x && CONSTANT_P (x) && !ZERO_P (x))
6373 ret = GPR_REGS;
6374 else
6375 ret = NO_REGS;
6376 break;
6377
6378 /* All of these types need gpr registers. */
6379 case ICC_REGS:
6380 case FCC_REGS:
6381 case CC_REGS:
6382 case ICR_REGS:
6383 case FCR_REGS:
6384 case CR_REGS:
6385 case LCR_REG:
6386 case LR_REG:
6387 ret = GPR_REGS;
6388 break;
6389
6390 /* The accumulators need fpr registers */
6391 case ACC_REGS:
6392 case EVEN_ACC_REGS:
6393 case QUAD_ACC_REGS:
6394 case ACCG_REGS:
6395 ret = FPR_REGS;
6396 break;
6397 }
6398
6399 return ret;
6400 }
6401
6402 \f
6403 /* A C expression whose value is nonzero if pseudos that have been assigned to
6404 registers of class CLASS would likely be spilled because registers of CLASS
6405 are needed for spill registers.
6406
6407 The default value of this macro returns 1 if CLASS has exactly one register
6408 and zero otherwise. On most machines, this default should be used. Only
6409 define this macro to some other expression if pseudo allocated by
6410 `local-alloc.c' end up in memory because their hard registers were needed
6411 for spill registers. If this macro returns nonzero for those classes, those
6412 pseudos will only be allocated by `global.c', which knows how to reallocate
6413 the pseudo to another register. If there would not be another register
6414 available for reallocation, you should not change the definition of this
6415 macro since the only effect of such a definition would be to slow down
6416 register allocation. */
6417
6418 int
6419 frv_class_likely_spilled_p (enum reg_class class)
6420 {
6421 switch (class)
6422 {
6423 default:
6424 break;
6425
6426 case GR8_REGS:
6427 case GR9_REGS:
6428 case GR89_REGS:
6429 case FDPIC_FPTR_REGS:
6430 case FDPIC_REGS:
6431 case ICC_REGS:
6432 case FCC_REGS:
6433 case CC_REGS:
6434 case ICR_REGS:
6435 case FCR_REGS:
6436 case CR_REGS:
6437 case LCR_REG:
6438 case LR_REG:
6439 case SPR_REGS:
6440 case QUAD_ACC_REGS:
6441 case EVEN_ACC_REGS:
6442 case ACC_REGS:
6443 case ACCG_REGS:
6444 return TRUE;
6445 }
6446
6447 return FALSE;
6448 }
6449
6450 \f
6451 /* An expression for the alignment of a structure field FIELD if the
6452 alignment computed in the usual way is COMPUTED. GCC uses this
6453 value instead of the value in `BIGGEST_ALIGNMENT' or
6454 `BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */
6455
6456 /* The definition type of the bit field data is either char, short, long or
6457 long long. The maximum bit size is the number of bits of its own type.
6458
6459 The bit field data is assigned to a storage unit that has an adequate size
6460 for bit field data retention and is located at the smallest address.
6461
6462 Consecutive bit field data are packed at consecutive bits having the same
6463 storage unit, with regard to the type, beginning with the MSB and continuing
6464 toward the LSB.
6465
6466 If a field to be assigned lies over a bit field type boundary, its
6467 assignment is completed by aligning it with a boundary suitable for the
6468 type.
6469
6470 When a bit field having a bit length of 0 is declared, it is forcibly
6471 assigned to the next storage unit.
6472
6473 e.g)
6474 struct {
6475 int a:2;
6476 int b:6;
6477 char c:4;
6478 int d:10;
6479 int :0;
6480 int f:2;
6481 } x;
6482
6483 +0 +1 +2 +3
6484 &x 00000000 00000000 00000000 00000000
6485 MLM----L
6486 a b
6487 &x+4 00000000 00000000 00000000 00000000
6488 M--L
6489 c
6490 &x+8 00000000 00000000 00000000 00000000
6491 M----------L
6492 d
6493 &x+12 00000000 00000000 00000000 00000000
6494 ML
6495 f
6496 */
6497
6498 int
6499 frv_adjust_field_align (tree field, int computed)
6500 {
6501 /* Make sure that the bitfield is not wider than the type. */
6502 if (DECL_BIT_FIELD (field)
6503 && !DECL_ARTIFICIAL (field))
6504 {
6505 tree parent = DECL_CONTEXT (field);
6506 tree prev = NULL_TREE;
6507 tree cur;
6508
6509 for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = TREE_CHAIN (cur))
6510 {
6511 if (TREE_CODE (cur) != FIELD_DECL)
6512 continue;
6513
6514 prev = cur;
6515 }
6516
6517 gcc_assert (cur);
6518
6519 /* If this isn't a :0 field and if the previous element is a bitfield
6520 also, see if the type is different, if so, we will need to align the
6521 bit-field to the next boundary. */
6522 if (prev
6523 && ! DECL_PACKED (field)
6524 && ! integer_zerop (DECL_SIZE (field))
6525 && DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev))
6526 {
6527 int prev_align = TYPE_ALIGN (TREE_TYPE (prev));
6528 int cur_align = TYPE_ALIGN (TREE_TYPE (field));
6529 computed = (prev_align > cur_align) ? prev_align : cur_align;
6530 }
6531 }
6532
6533 return computed;
6534 }
6535
6536 \f
6537 /* A C expression that is nonzero if it is permissible to store a value of mode
6538 MODE in hard register number REGNO (or in several registers starting with
6539 that one). For a machine where all registers are equivalent, a suitable
6540 definition is
6541
6542 #define HARD_REGNO_MODE_OK(REGNO, MODE) 1
6543
6544 It is not necessary for this macro to check for the numbers of fixed
6545 registers, because the allocation mechanism considers them to be always
6546 occupied.
6547
6548 On some machines, double-precision values must be kept in even/odd register
6549 pairs. The way to implement that is to define this macro to reject odd
6550 register numbers for such modes.
6551
6552 The minimum requirement for a mode to be OK in a register is that the
6553 `movMODE' instruction pattern support moves between the register and any
6554 other hard register for which the mode is OK; and that moving a value into
6555 the register and back out not alter it.
6556
6557 Since the same instruction used to move `SImode' will work for all narrower
6558 integer modes, it is not necessary on any machine for `HARD_REGNO_MODE_OK'
6559 to distinguish between these modes, provided you define patterns `movhi',
6560 etc., to take advantage of this. This is useful because of the interaction
6561 between `HARD_REGNO_MODE_OK' and `MODES_TIEABLE_P'; it is very desirable for
6562 all integer modes to be tieable.
6563
6564 Many machines have special registers for floating point arithmetic. Often
6565 people assume that floating point machine modes are allowed only in floating
6566 point registers. This is not true. Any registers that can hold integers
6567 can safely *hold* a floating point machine mode, whether or not floating
6568 arithmetic can be done on it in those registers. Integer move instructions
6569 can be used to move the values.
6570
6571 On some machines, though, the converse is true: fixed-point machine modes
6572 may not go in floating registers. This is true if the floating registers
6573 normalize any value stored in them, because storing a non-floating value
6574 there would garble it. In this case, `HARD_REGNO_MODE_OK' should reject
6575 fixed-point machine modes in floating registers. But if the floating
6576 registers do not automatically normalize, if you can store any bit pattern
6577 in one and retrieve it unchanged without a trap, then any machine mode may
6578 go in a floating register, so you can define this macro to say so.
6579
6580 The primary significance of special floating registers is rather that they
6581 are the registers acceptable in floating point arithmetic instructions.
6582 However, this is of no concern to `HARD_REGNO_MODE_OK'. You handle it by
6583 writing the proper constraints for those instructions.
6584
6585 On some machines, the floating registers are especially slow to access, so
6586 that it is better to store a value in a stack frame than in such a register
6587 if floating point arithmetic is not being done. As long as the floating
6588 registers are not in class `GENERAL_REGS', they will not be used unless some
6589 pattern's constraint asks for one. */
6590
6591 int
6592 frv_hard_regno_mode_ok (int regno, enum machine_mode mode)
6593 {
6594 int base;
6595 int mask;
6596
6597 switch (mode)
6598 {
6599 case CCmode:
6600 case CC_UNSmode:
6601 case CC_NZmode:
6602 return ICC_P (regno) || GPR_P (regno);
6603
6604 case CC_CCRmode:
6605 return CR_P (regno) || GPR_P (regno);
6606
6607 case CC_FPmode:
6608 return FCC_P (regno) || GPR_P (regno);
6609
6610 default:
6611 break;
6612 }
6613
6614 /* Set BASE to the first register in REGNO's class. Set MASK to the
6615 bits that must be clear in (REGNO - BASE) for the register to be
6616 well-aligned. */
6617 if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode))
6618 {
6619 if (ACCG_P (regno))
6620 {
6621 /* ACCGs store one byte. Two-byte quantities must start in
6622 even-numbered registers, four-byte ones in registers whose
6623 numbers are divisible by four, and so on. */
6624 base = ACCG_FIRST;
6625 mask = GET_MODE_SIZE (mode) - 1;
6626 }
6627 else
6628 {
6629 /* The other registers store one word. */
6630 if (GPR_P (regno) || regno == AP_FIRST)
6631 base = GPR_FIRST;
6632
6633 else if (FPR_P (regno))
6634 base = FPR_FIRST;
6635
6636 else if (ACC_P (regno))
6637 base = ACC_FIRST;
6638
6639 else if (SPR_P (regno))
6640 return mode == SImode;
6641
6642 /* Fill in the table. */
6643 else
6644 return 0;
6645
6646 /* Anything smaller than an SI is OK in any word-sized register. */
6647 if (GET_MODE_SIZE (mode) < 4)
6648 return 1;
6649
6650 mask = (GET_MODE_SIZE (mode) / 4) - 1;
6651 }
6652 return (((regno - base) & mask) == 0);
6653 }
6654
6655 return 0;
6656 }
6657
6658 \f
6659 /* A C expression for the number of consecutive hard registers, starting at
6660 register number REGNO, required to hold a value of mode MODE.
6661
6662 On a machine where all registers are exactly one word, a suitable definition
6663 of this macro is
6664
6665 #define HARD_REGNO_NREGS(REGNO, MODE) \
6666 ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) \
6667 / UNITS_PER_WORD)) */
6668
6669 /* On the FRV, make the CC_FP mode take 3 words in the integer registers, so
6670 that we can build the appropriate instructions to properly reload the
6671 values. Also, make the byte-sized accumulator guards use one guard
6672 for each byte. */
6673
6674 int
6675 frv_hard_regno_nregs (int regno, enum machine_mode mode)
6676 {
6677 if (ACCG_P (regno))
6678 return GET_MODE_SIZE (mode);
6679 else
6680 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6681 }
6682
6683 \f
6684 /* A C expression for the maximum number of consecutive registers of
6685 class CLASS needed to hold a value of mode MODE.
6686
6687 This is closely related to the macro `HARD_REGNO_NREGS'. In fact, the value
6688 of the macro `CLASS_MAX_NREGS (CLASS, MODE)' should be the maximum value of
6689 `HARD_REGNO_NREGS (REGNO, MODE)' for all REGNO values in the class CLASS.
6690
6691 This macro helps control the handling of multiple-word values in
6692 the reload pass.
6693
6694 This declaration is required. */
6695
6696 int
6697 frv_class_max_nregs (enum reg_class class, enum machine_mode mode)
6698 {
6699 if (class == ACCG_REGS)
6700 /* An N-byte value requires N accumulator guards. */
6701 return GET_MODE_SIZE (mode);
6702 else
6703 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6704 }
6705
6706 \f
6707 /* A C expression that is nonzero if X is a legitimate constant for an
6708 immediate operand on the target machine. You can assume that X satisfies
6709 `CONSTANT_P', so you need not check this. In fact, `1' is a suitable
6710 definition for this macro on machines where anything `CONSTANT_P' is valid. */
6711
6712 int
6713 frv_legitimate_constant_p (rtx x)
6714 {
6715 enum machine_mode mode = GET_MODE (x);
6716
6717 /* frv_cannot_force_const_mem always returns true for FDPIC. This
6718 means that the move expanders will be expected to deal with most
6719 kinds of constant, regardless of what we return here.
6720
6721 However, among its other duties, LEGITIMATE_CONSTANT_P decides whether
6722 a constant can be entered into reg_equiv_constant[]. If we return true,
6723 reload can create new instances of the constant whenever it likes.
6724
6725 The idea is therefore to accept as many constants as possible (to give
6726 reload more freedom) while rejecting constants that can only be created
6727 at certain times. In particular, anything with a symbolic component will
6728 require use of the pseudo FDPIC register, which is only available before
6729 reload. */
6730 if (TARGET_FDPIC)
6731 return LEGITIMATE_PIC_OPERAND_P (x);
6732
6733 /* All of the integer constants are ok. */
6734 if (GET_CODE (x) != CONST_DOUBLE)
6735 return TRUE;
6736
6737 /* double integer constants are ok. */
6738 if (mode == VOIDmode || mode == DImode)
6739 return TRUE;
6740
6741 /* 0 is always ok. */
6742 if (x == CONST0_RTX (mode))
6743 return TRUE;
6744
6745 /* If floating point is just emulated, allow any constant, since it will be
6746 constructed in the GPRs. */
6747 if (!TARGET_HAS_FPRS)
6748 return TRUE;
6749
6750 if (mode == DFmode && !TARGET_DOUBLE)
6751 return TRUE;
6752
6753 /* Otherwise store the constant away and do a load. */
6754 return FALSE;
6755 }
6756
6757 /* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons,
6758 CC_NZ for comparisons against zero in which a single Z or N flag test
6759 is enough, CC_UNS for other unsigned comparisons, and CC for other
6760 signed comparisons. */
6761
6762 enum machine_mode
6763 frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
6764 {
6765 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
6766 return CC_FPmode;
6767
6768 switch (code)
6769 {
6770 case EQ:
6771 case NE:
6772 case LT:
6773 case GE:
6774 return y == const0_rtx ? CC_NZmode : CCmode;
6775
6776 case GTU:
6777 case GEU:
6778 case LTU:
6779 case LEU:
6780 return y == const0_rtx ? CC_NZmode : CC_UNSmode;
6781
6782 default:
6783 return CCmode;
6784 }
6785 }
6786 \f
6787 /* A C expression for the cost of moving data from a register in class FROM to
6788 one in class TO. The classes are expressed using the enumeration values
6789 such as `GENERAL_REGS'. A value of 4 is the default; other values are
6790 interpreted relative to that.
6791
6792 It is not required that the cost always equal 2 when FROM is the same as TO;
6793 on some machines it is expensive to move between registers if they are not
6794 general registers.
6795
6796 If reload sees an insn consisting of a single `set' between two hard
6797 registers, and if `REGISTER_MOVE_COST' applied to their classes returns a
6798 value of 2, reload does not check to ensure that the constraints of the insn
6799 are met. Setting a cost of other than 2 will allow reload to verify that
6800 the constraints are met. You should do this if the `movM' pattern's
6801 constraints do not allow such copying. */
6802
6803 #define HIGH_COST 40
6804 #define MEDIUM_COST 3
6805 #define LOW_COST 1
6806
6807 int
6808 frv_register_move_cost (enum reg_class from, enum reg_class to)
6809 {
6810 switch (from)
6811 {
6812 default:
6813 break;
6814
6815 case QUAD_REGS:
6816 case EVEN_REGS:
6817 case GPR_REGS:
6818 switch (to)
6819 {
6820 default:
6821 break;
6822
6823 case QUAD_REGS:
6824 case EVEN_REGS:
6825 case GPR_REGS:
6826 return LOW_COST;
6827
6828 case FEVEN_REGS:
6829 case FPR_REGS:
6830 return LOW_COST;
6831
6832 case LCR_REG:
6833 case LR_REG:
6834 case SPR_REGS:
6835 return LOW_COST;
6836 }
6837
6838 case FEVEN_REGS:
6839 case FPR_REGS:
6840 switch (to)
6841 {
6842 default:
6843 break;
6844
6845 case QUAD_REGS:
6846 case EVEN_REGS:
6847 case GPR_REGS:
6848 case ACC_REGS:
6849 case EVEN_ACC_REGS:
6850 case QUAD_ACC_REGS:
6851 case ACCG_REGS:
6852 return MEDIUM_COST;
6853
6854 case FEVEN_REGS:
6855 case FPR_REGS:
6856 return LOW_COST;
6857 }
6858
6859 case LCR_REG:
6860 case LR_REG:
6861 case SPR_REGS:
6862 switch (to)
6863 {
6864 default:
6865 break;
6866
6867 case QUAD_REGS:
6868 case EVEN_REGS:
6869 case GPR_REGS:
6870 return MEDIUM_COST;
6871 }
6872
6873 case ACC_REGS:
6874 case EVEN_ACC_REGS:
6875 case QUAD_ACC_REGS:
6876 case ACCG_REGS:
6877 switch (to)
6878 {
6879 default:
6880 break;
6881
6882 case FEVEN_REGS:
6883 case FPR_REGS:
6884 return MEDIUM_COST;
6885
6886 }
6887 }
6888
6889 return HIGH_COST;
6890 }
6891 \f
6892 /* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to
6893 use ".picptr" to generate safe relocations for PIC code. We also
6894 need a fixup entry for aligned (non-debugging) code. */
6895
6896 static bool
6897 frv_assemble_integer (rtx value, unsigned int size, int aligned_p)
6898 {
6899 if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD)
6900 {
6901 if (GET_CODE (value) == CONST
6902 || GET_CODE (value) == SYMBOL_REF
6903 || GET_CODE (value) == LABEL_REF)
6904 {
6905 if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF
6906 && SYMBOL_REF_FUNCTION_P (value))
6907 {
6908 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
6909 output_addr_const (asm_out_file, value);
6910 fputs (")\n", asm_out_file);
6911 return true;
6912 }
6913 else if (TARGET_FDPIC && GET_CODE (value) == CONST
6914 && frv_function_symbol_referenced_p (value))
6915 return false;
6916 if (aligned_p && !TARGET_FDPIC)
6917 {
6918 static int label_num = 0;
6919 char buf[256];
6920 const char *p;
6921
6922 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++);
6923 p = (* targetm.strip_name_encoding) (buf);
6924
6925 fprintf (asm_out_file, "%s:\n", p);
6926 fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP);
6927 fprintf (asm_out_file, "\t.picptr\t%s\n", p);
6928 fprintf (asm_out_file, "\t.previous\n");
6929 }
6930 assemble_integer_with_op ("\t.picptr\t", value);
6931 return true;
6932 }
6933 if (!aligned_p)
6934 {
6935 /* We've set the unaligned SI op to NULL, so we always have to
6936 handle the unaligned case here. */
6937 assemble_integer_with_op ("\t.4byte\t", value);
6938 return true;
6939 }
6940 }
6941 return default_assemble_integer (value, size, aligned_p);
6942 }
6943
6944 /* Function to set up the backend function structure. */
6945
6946 static struct machine_function *
6947 frv_init_machine_status (void)
6948 {
6949 return ggc_alloc_cleared (sizeof (struct machine_function));
6950 }
6951 \f
6952 /* Implement TARGET_SCHED_ISSUE_RATE. */
6953
6954 int
6955 frv_issue_rate (void)
6956 {
6957 if (!TARGET_PACK)
6958 return 1;
6959
6960 switch (frv_cpu_type)
6961 {
6962 default:
6963 case FRV_CPU_FR300:
6964 case FRV_CPU_SIMPLE:
6965 return 1;
6966
6967 case FRV_CPU_FR400:
6968 case FRV_CPU_FR405:
6969 case FRV_CPU_FR450:
6970 return 2;
6971
6972 case FRV_CPU_GENERIC:
6973 case FRV_CPU_FR500:
6974 case FRV_CPU_TOMCAT:
6975 return 4;
6976
6977 case FRV_CPU_FR550:
6978 return 8;
6979 }
6980 }
6981 \f
6982 /* A for_each_rtx callback. If X refers to an accumulator, return
6983 ACC_GROUP_ODD if the bit 2 of the register number is set and
6984 ACC_GROUP_EVEN if it is clear. Return 0 (ACC_GROUP_NONE)
6985 otherwise. */
6986
6987 static int
6988 frv_acc_group_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
6989 {
6990 if (REG_P (*x))
6991 {
6992 if (ACC_P (REGNO (*x)))
6993 return (REGNO (*x) - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
6994 if (ACCG_P (REGNO (*x)))
6995 return (REGNO (*x) - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
6996 }
6997 return 0;
6998 }
6999
7000 /* Return the value of INSN's acc_group attribute. */
7001
7002 int
7003 frv_acc_group (rtx insn)
7004 {
7005 /* This distinction only applies to the FR550 packing constraints. */
7006 if (frv_cpu_type != FRV_CPU_FR550)
7007 return ACC_GROUP_NONE;
7008 return for_each_rtx (&PATTERN (insn), frv_acc_group_1, 0);
7009 }
7010
7011 /* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction
7012 INSN will try to claim first. Since this value depends only on the
7013 type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
7014
7015 static unsigned int
7016 frv_insn_unit (rtx insn)
7017 {
7018 enum attr_type type;
7019
7020 type = get_attr_type (insn);
7021 if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes))
7022 {
7023 /* We haven't seen this type of instruction before. */
7024 state_t state;
7025 unsigned int unit;
7026
7027 /* Issue the instruction on its own to see which unit it prefers. */
7028 state = alloca (state_size ());
7029 state_reset (state);
7030 state_transition (state, insn);
7031
7032 /* Find out which unit was taken. */
7033 for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++)
7034 if (cpu_unit_reservation_p (state, frv_unit_codes[unit]))
7035 break;
7036
7037 gcc_assert (unit != ARRAY_SIZE (frv_unit_codes));
7038
7039 frv_type_to_unit[type] = unit;
7040 }
7041 return frv_type_to_unit[type];
7042 }
7043
7044 /* Return true if INSN issues to a branch unit. */
7045
7046 static bool
7047 frv_issues_to_branch_unit_p (rtx insn)
7048 {
7049 return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
7050 }
7051 \f
7052 /* The current state of the packing pass, implemented by frv_pack_insns. */
7053 static struct {
7054 /* The state of the pipeline DFA. */
7055 state_t dfa_state;
7056
7057 /* Which hardware registers are set within the current packet,
7058 and the conditions under which they are set. */
7059 regstate_t regstate[FIRST_PSEUDO_REGISTER];
7060
7061 /* The memory locations that have been modified so far in this
7062 packet. MEM is the memref and COND is the regstate_t condition
7063 under which it is set. */
7064 struct {
7065 rtx mem;
7066 regstate_t cond;
7067 } mems[2];
7068
7069 /* The number of valid entries in MEMS. The value is larger than
7070 ARRAY_SIZE (mems) if there were too many mems to record. */
7071 unsigned int num_mems;
7072
7073 /* The maximum number of instructions that can be packed together. */
7074 unsigned int issue_rate;
7075
7076 /* The instructions in the packet, partitioned into groups. */
7077 struct frv_packet_group {
7078 /* How many instructions in the packet belong to this group. */
7079 unsigned int num_insns;
7080
7081 /* A list of the instructions that belong to this group, in the order
7082 they appear in the rtl stream. */
7083 rtx insns[ARRAY_SIZE (frv_unit_codes)];
7084
7085 /* The contents of INSNS after they have been sorted into the correct
7086 assembly-language order. Element X issues to unit X. The list may
7087 contain extra nops. */
7088 rtx sorted[ARRAY_SIZE (frv_unit_codes)];
7089
7090 /* The member of frv_nops[] to use in sorted[]. */
7091 rtx nop;
7092 } groups[NUM_GROUPS];
7093
7094 /* The instructions that make up the current packet. */
7095 rtx insns[ARRAY_SIZE (frv_unit_codes)];
7096 unsigned int num_insns;
7097 } frv_packet;
7098
7099 /* Return the regstate_t flags for the given COND_EXEC condition.
7100 Abort if the condition isn't in the right form. */
7101
7102 static int
7103 frv_cond_flags (rtx cond)
7104 {
7105 gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
7106 && GET_CODE (XEXP (cond, 0)) == REG
7107 && CR_P (REGNO (XEXP (cond, 0)))
7108 && XEXP (cond, 1) == const0_rtx);
7109 return ((REGNO (XEXP (cond, 0)) - CR_FIRST)
7110 | (GET_CODE (cond) == NE
7111 ? REGSTATE_IF_TRUE
7112 : REGSTATE_IF_FALSE));
7113 }
7114
7115
7116 /* Return true if something accessed under condition COND2 can
7117 conflict with something written under condition COND1. */
7118
7119 static bool
7120 frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2)
7121 {
7122 /* If either reference was unconditional, we have a conflict. */
7123 if ((cond1 & REGSTATE_IF_EITHER) == 0
7124 || (cond2 & REGSTATE_IF_EITHER) == 0)
7125 return true;
7126
7127 /* The references might conflict if they were controlled by
7128 different CRs. */
7129 if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK))
7130 return true;
7131
7132 /* They definitely conflict if they are controlled by the
7133 same condition. */
7134 if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0)
7135 return true;
7136
7137 return false;
7138 }
7139
7140
7141 /* A for_each_rtx callback. Return 1 if *X depends on an instruction in
7142 the current packet. DATA points to a regstate_t that describes the
7143 condition under which *X might be set or used. */
7144
7145 static int
7146 frv_registers_conflict_p_1 (rtx *x, void *data)
7147 {
7148 unsigned int regno, i;
7149 regstate_t cond;
7150
7151 cond = *(regstate_t *) data;
7152
7153 if (GET_CODE (*x) == REG)
7154 FOR_EACH_REGNO (regno, *x)
7155 if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0)
7156 if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond))
7157 return 1;
7158
7159 if (GET_CODE (*x) == MEM)
7160 {
7161 /* If we ran out of memory slots, assume a conflict. */
7162 if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems))
7163 return 1;
7164
7165 /* Check for output or true dependencies with earlier MEMs. */
7166 for (i = 0; i < frv_packet.num_mems; i++)
7167 if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
7168 {
7169 if (true_dependence (frv_packet.mems[i].mem, VOIDmode,
7170 *x, rtx_varies_p))
7171 return 1;
7172
7173 if (output_dependence (frv_packet.mems[i].mem, *x))
7174 return 1;
7175 }
7176 }
7177
7178 /* The return values of calls aren't significant: they describe
7179 the effect of the call as a whole, not of the insn itself. */
7180 if (GET_CODE (*x) == SET && GET_CODE (SET_SRC (*x)) == CALL)
7181 {
7182 if (for_each_rtx (&SET_SRC (*x), frv_registers_conflict_p_1, data))
7183 return 1;
7184 return -1;
7185 }
7186
7187 /* Check subexpressions. */
7188 return 0;
7189 }
7190
7191
7192 /* Return true if something in X might depend on an instruction
7193 in the current packet. */
7194
7195 static bool
7196 frv_registers_conflict_p (rtx x)
7197 {
7198 regstate_t flags;
7199
7200 flags = 0;
7201 if (GET_CODE (x) == COND_EXEC)
7202 {
7203 if (for_each_rtx (&XEXP (x, 0), frv_registers_conflict_p_1, &flags))
7204 return true;
7205
7206 flags |= frv_cond_flags (XEXP (x, 0));
7207 x = XEXP (x, 1);
7208 }
7209 return for_each_rtx (&x, frv_registers_conflict_p_1, &flags);
7210 }
7211
7212
7213 /* A note_stores callback. DATA points to the regstate_t condition
7214 under which X is modified. Update FRV_PACKET accordingly. */
7215
7216 static void
7217 frv_registers_update_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7218 {
7219 unsigned int regno;
7220
7221 if (GET_CODE (x) == REG)
7222 FOR_EACH_REGNO (regno, x)
7223 frv_packet.regstate[regno] |= *(regstate_t *) data;
7224
7225 if (GET_CODE (x) == MEM)
7226 {
7227 if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems))
7228 {
7229 frv_packet.mems[frv_packet.num_mems].mem = x;
7230 frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data;
7231 }
7232 frv_packet.num_mems++;
7233 }
7234 }
7235
7236
7237 /* Update the register state information for an instruction whose
7238 body is X. */
7239
7240 static void
7241 frv_registers_update (rtx x)
7242 {
7243 regstate_t flags;
7244
7245 flags = REGSTATE_MODIFIED;
7246 if (GET_CODE (x) == COND_EXEC)
7247 {
7248 flags |= frv_cond_flags (XEXP (x, 0));
7249 x = XEXP (x, 1);
7250 }
7251 note_stores (x, frv_registers_update_1, &flags);
7252 }
7253
7254
7255 /* Initialize frv_packet for the start of a new packet. */
7256
7257 static void
7258 frv_start_packet (void)
7259 {
7260 enum frv_insn_group group;
7261
7262 memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate));
7263 frv_packet.num_mems = 0;
7264 frv_packet.num_insns = 0;
7265 for (group = 0; group < NUM_GROUPS; group++)
7266 frv_packet.groups[group].num_insns = 0;
7267 }
7268
7269
7270 /* Likewise for the start of a new basic block. */
7271
7272 static void
7273 frv_start_packet_block (void)
7274 {
7275 state_reset (frv_packet.dfa_state);
7276 frv_start_packet ();
7277 }
7278
7279
7280 /* Finish the current packet, if any, and start a new one. Call
7281 HANDLE_PACKET with FRV_PACKET describing the completed packet. */
7282
7283 static void
7284 frv_finish_packet (void (*handle_packet) (void))
7285 {
7286 if (frv_packet.num_insns > 0)
7287 {
7288 handle_packet ();
7289 state_transition (frv_packet.dfa_state, 0);
7290 frv_start_packet ();
7291 }
7292 }
7293
7294
7295 /* Return true if INSN can be added to the current packet. Update
7296 the DFA state on success. */
7297
7298 static bool
7299 frv_pack_insn_p (rtx insn)
7300 {
7301 /* See if the packet is already as long as it can be. */
7302 if (frv_packet.num_insns == frv_packet.issue_rate)
7303 return false;
7304
7305 /* If the scheduler thought that an instruction should start a packet,
7306 it's usually a good idea to believe it. It knows much more about
7307 the latencies than we do.
7308
7309 There are some exceptions though:
7310
7311 - Conditional instructions are scheduled on the assumption that
7312 they will be executed. This is usually a good thing, since it
7313 tends to avoid unnecessary stalls in the conditional code.
7314 But we want to pack conditional instructions as tightly as
7315 possible, in order to optimize the case where they aren't
7316 executed.
7317
7318 - The scheduler will always put branches on their own, even
7319 if there's no real dependency.
7320
7321 - There's no point putting a call in its own packet unless
7322 we have to. */
7323 if (frv_packet.num_insns > 0
7324 && GET_CODE (insn) == INSN
7325 && GET_MODE (insn) == TImode
7326 && GET_CODE (PATTERN (insn)) != COND_EXEC)
7327 return false;
7328
7329 /* Check for register conflicts. Don't do this for setlo since any
7330 conflict will be with the partnering sethi, with which it can
7331 be packed. */
7332 if (get_attr_type (insn) != TYPE_SETLO)
7333 if (frv_registers_conflict_p (PATTERN (insn)))
7334 return false;
7335
7336 return state_transition (frv_packet.dfa_state, insn) < 0;
7337 }
7338
7339
7340 /* Add instruction INSN to the current packet. */
7341
7342 static void
7343 frv_add_insn_to_packet (rtx insn)
7344 {
7345 struct frv_packet_group *packet_group;
7346
7347 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7348 packet_group->insns[packet_group->num_insns++] = insn;
7349 frv_packet.insns[frv_packet.num_insns++] = insn;
7350
7351 frv_registers_update (PATTERN (insn));
7352 }
7353
7354
7355 /* Insert INSN (a member of frv_nops[]) into the current packet. If the
7356 packet ends in a branch or call, insert the nop before it, otherwise
7357 add to the end. */
7358
7359 static void
7360 frv_insert_nop_in_packet (rtx insn)
7361 {
7362 struct frv_packet_group *packet_group;
7363 rtx last;
7364
7365 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7366 last = frv_packet.insns[frv_packet.num_insns - 1];
7367 if (GET_CODE (last) != INSN)
7368 {
7369 insn = emit_insn_before (PATTERN (insn), last);
7370 frv_packet.insns[frv_packet.num_insns - 1] = insn;
7371 frv_packet.insns[frv_packet.num_insns++] = last;
7372 }
7373 else
7374 {
7375 insn = emit_insn_after (PATTERN (insn), last);
7376 frv_packet.insns[frv_packet.num_insns++] = insn;
7377 }
7378 packet_group->insns[packet_group->num_insns++] = insn;
7379 }
7380
7381
7382 /* If packing is enabled, divide the instructions into packets and
7383 return true. Call HANDLE_PACKET for each complete packet. */
7384
7385 static bool
7386 frv_for_each_packet (void (*handle_packet) (void))
7387 {
7388 rtx insn, next_insn;
7389
7390 frv_packet.issue_rate = frv_issue_rate ();
7391
7392 /* Early exit if we don't want to pack insns. */
7393 if (!optimize
7394 || !flag_schedule_insns_after_reload
7395 || !TARGET_VLIW_BRANCH
7396 || frv_packet.issue_rate == 1)
7397 return false;
7398
7399 /* Set up the initial packing state. */
7400 dfa_start ();
7401 frv_packet.dfa_state = alloca (state_size ());
7402
7403 frv_start_packet_block ();
7404 for (insn = get_insns (); insn != 0; insn = next_insn)
7405 {
7406 enum rtx_code code;
7407 bool eh_insn_p;
7408
7409 code = GET_CODE (insn);
7410 next_insn = NEXT_INSN (insn);
7411
7412 if (code == CODE_LABEL)
7413 {
7414 frv_finish_packet (handle_packet);
7415 frv_start_packet_block ();
7416 }
7417
7418 if (INSN_P (insn))
7419 switch (GET_CODE (PATTERN (insn)))
7420 {
7421 case USE:
7422 case CLOBBER:
7423 case ADDR_VEC:
7424 case ADDR_DIFF_VEC:
7425 break;
7426
7427 default:
7428 /* Calls mustn't be packed on a TOMCAT. */
7429 if (GET_CODE (insn) == CALL_INSN && frv_cpu_type == FRV_CPU_TOMCAT)
7430 frv_finish_packet (handle_packet);
7431
7432 /* Since the last instruction in a packet determines the EH
7433 region, any exception-throwing instruction must come at
7434 the end of reordered packet. Insns that issue to a
7435 branch unit are bound to come last; for others it's
7436 too hard to predict. */
7437 eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL);
7438 if (eh_insn_p && !frv_issues_to_branch_unit_p (insn))
7439 frv_finish_packet (handle_packet);
7440
7441 /* Finish the current packet if we can't add INSN to it.
7442 Simulate cycles until INSN is ready to issue. */
7443 if (!frv_pack_insn_p (insn))
7444 {
7445 frv_finish_packet (handle_packet);
7446 while (!frv_pack_insn_p (insn))
7447 state_transition (frv_packet.dfa_state, 0);
7448 }
7449
7450 /* Add the instruction to the packet. */
7451 frv_add_insn_to_packet (insn);
7452
7453 /* Calls and jumps end a packet, as do insns that throw
7454 an exception. */
7455 if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p)
7456 frv_finish_packet (handle_packet);
7457 break;
7458 }
7459 }
7460 frv_finish_packet (handle_packet);
7461 dfa_finish ();
7462 return true;
7463 }
7464 \f
7465 /* Subroutine of frv_sort_insn_group. We are trying to sort
7466 frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly
7467 language order. We have already picked a new position for
7468 frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set.
7469 These instructions will occupy elements [0, LOWER_SLOT) and
7470 [UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is
7471 the DFA state after issuing these instructions.
7472
7473 Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation
7474 of the unused instructions. Return true if one such permutation gives
7475 a valid ordering, leaving the successful permutation in sorted[].
7476 Do not modify sorted[] until a valid permutation is found. */
7477
7478 static bool
7479 frv_sort_insn_group_1 (enum frv_insn_group group,
7480 unsigned int lower_slot, unsigned int upper_slot,
7481 unsigned int issued, unsigned int num_insns,
7482 state_t state)
7483 {
7484 struct frv_packet_group *packet_group;
7485 unsigned int i;
7486 state_t test_state;
7487 size_t dfa_size;
7488 rtx insn;
7489
7490 /* Early success if we've filled all the slots. */
7491 if (lower_slot == upper_slot)
7492 return true;
7493
7494 packet_group = &frv_packet.groups[group];
7495 dfa_size = state_size ();
7496 test_state = alloca (dfa_size);
7497
7498 /* Try issuing each unused instruction. */
7499 for (i = num_insns - 1; i + 1 != 0; i--)
7500 if (~issued & (1 << i))
7501 {
7502 insn = packet_group->sorted[i];
7503 memcpy (test_state, state, dfa_size);
7504 if (state_transition (test_state, insn) < 0
7505 && cpu_unit_reservation_p (test_state,
7506 NTH_UNIT (group, upper_slot - 1))
7507 && frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1,
7508 issued | (1 << i), num_insns,
7509 test_state))
7510 {
7511 packet_group->sorted[upper_slot - 1] = insn;
7512 return true;
7513 }
7514 }
7515
7516 return false;
7517 }
7518
7519 /* Compare two instructions by their frv_insn_unit. */
7520
7521 static int
7522 frv_compare_insns (const void *first, const void *second)
7523 {
7524 const rtx *insn1 = first, *insn2 = second;
7525 return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
7526 }
7527
7528 /* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[]
7529 and sort it into assembly language order. See frv.md for a description of
7530 the algorithm. */
7531
7532 static void
7533 frv_sort_insn_group (enum frv_insn_group group)
7534 {
7535 struct frv_packet_group *packet_group;
7536 unsigned int first, i, nop, max_unit, num_slots;
7537 state_t state, test_state;
7538 size_t dfa_size;
7539
7540 packet_group = &frv_packet.groups[group];
7541
7542 /* Assume no nop is needed. */
7543 packet_group->nop = 0;
7544
7545 if (packet_group->num_insns == 0)
7546 return;
7547
7548 /* Copy insns[] to sorted[]. */
7549 memcpy (packet_group->sorted, packet_group->insns,
7550 sizeof (rtx) * packet_group->num_insns);
7551
7552 /* Sort sorted[] by the unit that each insn tries to take first. */
7553 if (packet_group->num_insns > 1)
7554 qsort (packet_group->sorted, packet_group->num_insns,
7555 sizeof (rtx), frv_compare_insns);
7556
7557 /* That's always enough for branch and control insns. */
7558 if (group == GROUP_B || group == GROUP_C)
7559 return;
7560
7561 dfa_size = state_size ();
7562 state = alloca (dfa_size);
7563 test_state = alloca (dfa_size);
7564
7565 /* Find the highest FIRST such that sorted[0...FIRST-1] can issue
7566 consecutively and such that the DFA takes unit X when sorted[X]
7567 is added. Set STATE to the new DFA state. */
7568 state_reset (test_state);
7569 for (first = 0; first < packet_group->num_insns; first++)
7570 {
7571 memcpy (state, test_state, dfa_size);
7572 if (state_transition (test_state, packet_group->sorted[first]) >= 0
7573 || !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first)))
7574 break;
7575 }
7576
7577 /* If all the instructions issued in ascending order, we're done. */
7578 if (first == packet_group->num_insns)
7579 return;
7580
7581 /* Add nops to the end of sorted[] and try each permutation until
7582 we find one that works. */
7583 for (nop = 0; nop < frv_num_nops; nop++)
7584 {
7585 max_unit = frv_insn_unit (frv_nops[nop]);
7586 if (frv_unit_groups[max_unit] == group)
7587 {
7588 packet_group->nop = frv_nops[nop];
7589 num_slots = UNIT_NUMBER (max_unit) + 1;
7590 for (i = packet_group->num_insns; i < num_slots; i++)
7591 packet_group->sorted[i] = frv_nops[nop];
7592 if (frv_sort_insn_group_1 (group, first, num_slots,
7593 (1 << first) - 1, num_slots, state))
7594 return;
7595 }
7596 }
7597 gcc_unreachable ();
7598 }
7599 \f
7600 /* Sort the current packet into assembly-language order. Set packing
7601 flags as appropriate. */
7602
7603 static void
7604 frv_reorder_packet (void)
7605 {
7606 unsigned int cursor[NUM_GROUPS];
7607 rtx insns[ARRAY_SIZE (frv_unit_groups)];
7608 unsigned int unit, to, from;
7609 enum frv_insn_group group;
7610 struct frv_packet_group *packet_group;
7611
7612 /* First sort each group individually. */
7613 for (group = 0; group < NUM_GROUPS; group++)
7614 {
7615 cursor[group] = 0;
7616 frv_sort_insn_group (group);
7617 }
7618
7619 /* Go through the unit template and try add an instruction from
7620 that unit's group. */
7621 to = 0;
7622 for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++)
7623 {
7624 group = frv_unit_groups[unit];
7625 packet_group = &frv_packet.groups[group];
7626 if (cursor[group] < packet_group->num_insns)
7627 {
7628 /* frv_reorg should have added nops for us. */
7629 gcc_assert (packet_group->sorted[cursor[group]]
7630 != packet_group->nop);
7631 insns[to++] = packet_group->sorted[cursor[group]++];
7632 }
7633 }
7634
7635 gcc_assert (to == frv_packet.num_insns);
7636
7637 /* Clear the last instruction's packing flag, thus marking the end of
7638 a packet. Reorder the other instructions relative to it. */
7639 CLEAR_PACKING_FLAG (insns[to - 1]);
7640 for (from = 0; from < to - 1; from++)
7641 {
7642 remove_insn (insns[from]);
7643 add_insn_before (insns[from], insns[to - 1], NULL);
7644 SET_PACKING_FLAG (insns[from]);
7645 }
7646 }
7647
7648
7649 /* Divide instructions into packets. Reorder the contents of each
7650 packet so that they are in the correct assembly-language order.
7651
7652 Since this pass can change the raw meaning of the rtl stream, it must
7653 only be called at the last minute, just before the instructions are
7654 written out. */
7655
7656 static void
7657 frv_pack_insns (void)
7658 {
7659 if (frv_for_each_packet (frv_reorder_packet))
7660 frv_insn_packing_flag = 0;
7661 else
7662 frv_insn_packing_flag = -1;
7663 }
7664 \f
7665 /* See whether we need to add nops to group GROUP in order to
7666 make a valid packet. */
7667
7668 static void
7669 frv_fill_unused_units (enum frv_insn_group group)
7670 {
7671 unsigned int non_nops, nops, i;
7672 struct frv_packet_group *packet_group;
7673
7674 packet_group = &frv_packet.groups[group];
7675
7676 /* Sort the instructions into assembly-language order.
7677 Use nops to fill slots that are otherwise unused. */
7678 frv_sort_insn_group (group);
7679
7680 /* See how many nops are needed before the final useful instruction. */
7681 i = nops = 0;
7682 for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++)
7683 while (packet_group->sorted[i++] == packet_group->nop)
7684 nops++;
7685
7686 /* Insert that many nops into the instruction stream. */
7687 while (nops-- > 0)
7688 frv_insert_nop_in_packet (packet_group->nop);
7689 }
7690
7691 /* Return true if accesses IO1 and IO2 refer to the same doubleword. */
7692
7693 static bool
7694 frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2)
7695 {
7696 if (io1->const_address != 0 && io2->const_address != 0)
7697 return io1->const_address == io2->const_address;
7698
7699 if (io1->var_address != 0 && io2->var_address != 0)
7700 return rtx_equal_p (io1->var_address, io2->var_address);
7701
7702 return false;
7703 }
7704
7705 /* Return true if operations IO1 and IO2 are guaranteed to complete
7706 in order. */
7707
7708 static bool
7709 frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2)
7710 {
7711 /* The order of writes is always preserved. */
7712 if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE)
7713 return true;
7714
7715 /* The order of reads isn't preserved. */
7716 if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE)
7717 return false;
7718
7719 /* One operation is a write and the other is (or could be) a read.
7720 The order is only guaranteed if the accesses are to the same
7721 doubleword. */
7722 return frv_same_doubleword_p (io1, io2);
7723 }
7724
7725 /* Generalize I/O operation X so that it covers both X and Y. */
7726
7727 static void
7728 frv_io_union (struct frv_io *x, const struct frv_io *y)
7729 {
7730 if (x->type != y->type)
7731 x->type = FRV_IO_UNKNOWN;
7732 if (!frv_same_doubleword_p (x, y))
7733 {
7734 x->const_address = 0;
7735 x->var_address = 0;
7736 }
7737 }
7738
7739 /* Fill IO with information about the load or store associated with
7740 membar instruction INSN. */
7741
7742 static void
7743 frv_extract_membar (struct frv_io *io, rtx insn)
7744 {
7745 extract_insn (insn);
7746 io->type = INTVAL (recog_data.operand[2]);
7747 io->const_address = INTVAL (recog_data.operand[1]);
7748 io->var_address = XEXP (recog_data.operand[0], 0);
7749 }
7750
7751 /* A note_stores callback for which DATA points to an rtx. Nullify *DATA
7752 if X is a register and *DATA depends on X. */
7753
7754 static void
7755 frv_io_check_address (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7756 {
7757 rtx *other = data;
7758
7759 if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other))
7760 *other = 0;
7761 }
7762
7763 /* A note_stores callback for which DATA points to a HARD_REG_SET.
7764 Remove every modified register from the set. */
7765
7766 static void
7767 frv_io_handle_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7768 {
7769 HARD_REG_SET *set = data;
7770 unsigned int regno;
7771
7772 if (REG_P (x))
7773 FOR_EACH_REGNO (regno, x)
7774 CLEAR_HARD_REG_BIT (*set, regno);
7775 }
7776
7777 /* A for_each_rtx callback for which DATA points to a HARD_REG_SET.
7778 Add every register in *X to the set. */
7779
7780 static int
7781 frv_io_handle_use_1 (rtx *x, void *data)
7782 {
7783 HARD_REG_SET *set = data;
7784 unsigned int regno;
7785
7786 if (REG_P (*x))
7787 FOR_EACH_REGNO (regno, *x)
7788 SET_HARD_REG_BIT (*set, regno);
7789
7790 return 0;
7791 }
7792
7793 /* A note_stores callback that applies frv_io_handle_use_1 to an
7794 entire rhs value. */
7795
7796 static void
7797 frv_io_handle_use (rtx *x, void *data)
7798 {
7799 for_each_rtx (x, frv_io_handle_use_1, data);
7800 }
7801
7802 /* Go through block BB looking for membars to remove. There are two
7803 cases where intra-block analysis is enough:
7804
7805 - a membar is redundant if it occurs between two consecutive I/O
7806 operations and if those operations are guaranteed to complete
7807 in order.
7808
7809 - a membar for a __builtin_read is redundant if the result is
7810 used before the next I/O operation is issued.
7811
7812 If the last membar in the block could not be removed, and there
7813 are guaranteed to be no I/O operations between that membar and
7814 the end of the block, store the membar in *LAST_MEMBAR, otherwise
7815 store null.
7816
7817 Describe the block's first I/O operation in *NEXT_IO. Describe
7818 an unknown operation if the block doesn't do any I/O. */
7819
7820 static void
7821 frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
7822 rtx *last_membar)
7823 {
7824 HARD_REG_SET used_regs;
7825 rtx next_membar, set, insn;
7826 bool next_is_end_p;
7827
7828 /* NEXT_IO is the next I/O operation to be performed after the current
7829 instruction. It starts off as being an unknown operation. */
7830 memset (next_io, 0, sizeof (*next_io));
7831
7832 /* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */
7833 next_is_end_p = true;
7834
7835 /* If the current instruction is a __builtin_read or __builtin_write,
7836 NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR
7837 is null if the membar has already been deleted.
7838
7839 Note that the initialization here should only be needed to
7840 suppress warnings. */
7841 next_membar = 0;
7842
7843 /* USED_REGS is the set of registers that are used before the
7844 next I/O instruction. */
7845 CLEAR_HARD_REG_SET (used_regs);
7846
7847 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
7848 if (GET_CODE (insn) == CALL_INSN)
7849 {
7850 /* We can't predict what a call will do to volatile memory. */
7851 memset (next_io, 0, sizeof (struct frv_io));
7852 next_is_end_p = false;
7853 CLEAR_HARD_REG_SET (used_regs);
7854 }
7855 else if (INSN_P (insn))
7856 switch (recog_memoized (insn))
7857 {
7858 case CODE_FOR_optional_membar_qi:
7859 case CODE_FOR_optional_membar_hi:
7860 case CODE_FOR_optional_membar_si:
7861 case CODE_FOR_optional_membar_di:
7862 next_membar = insn;
7863 if (next_is_end_p)
7864 {
7865 /* Local information isn't enough to decide whether this
7866 membar is needed. Stash it away for later. */
7867 *last_membar = insn;
7868 frv_extract_membar (next_io, insn);
7869 next_is_end_p = false;
7870 }
7871 else
7872 {
7873 /* Check whether the I/O operation before INSN could be
7874 reordered with one described by NEXT_IO. If it can't,
7875 INSN will not be needed. */
7876 struct frv_io prev_io;
7877
7878 frv_extract_membar (&prev_io, insn);
7879 if (frv_io_fixed_order_p (&prev_io, next_io))
7880 {
7881 if (dump_file)
7882 fprintf (dump_file,
7883 ";; [Local] Removing membar %d since order"
7884 " of accesses is guaranteed\n",
7885 INSN_UID (next_membar));
7886
7887 insn = NEXT_INSN (insn);
7888 delete_insn (next_membar);
7889 next_membar = 0;
7890 }
7891 *next_io = prev_io;
7892 }
7893 break;
7894
7895 default:
7896 /* Invalidate NEXT_IO's address if it depends on something that
7897 is clobbered by INSN. */
7898 if (next_io->var_address)
7899 note_stores (PATTERN (insn), frv_io_check_address,
7900 &next_io->var_address);
7901
7902 /* If the next membar is associated with a __builtin_read,
7903 see if INSN reads from that address. If it does, and if
7904 the destination register is used before the next I/O access,
7905 there is no need for the membar. */
7906 set = PATTERN (insn);
7907 if (next_io->type == FRV_IO_READ
7908 && next_io->var_address != 0
7909 && next_membar != 0
7910 && GET_CODE (set) == SET
7911 && GET_CODE (SET_DEST (set)) == REG
7912 && TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set))))
7913 {
7914 rtx src;
7915
7916 src = SET_SRC (set);
7917 if (GET_CODE (src) == ZERO_EXTEND)
7918 src = XEXP (src, 0);
7919
7920 if (GET_CODE (src) == MEM
7921 && rtx_equal_p (XEXP (src, 0), next_io->var_address))
7922 {
7923 if (dump_file)
7924 fprintf (dump_file,
7925 ";; [Local] Removing membar %d since the target"
7926 " of %d is used before the I/O operation\n",
7927 INSN_UID (next_membar), INSN_UID (insn));
7928
7929 if (next_membar == *last_membar)
7930 *last_membar = 0;
7931
7932 delete_insn (next_membar);
7933 next_membar = 0;
7934 }
7935 }
7936
7937 /* If INSN has volatile references, forget about any registers
7938 that are used after it. Otherwise forget about uses that
7939 are (or might be) defined by INSN. */
7940 if (volatile_refs_p (PATTERN (insn)))
7941 CLEAR_HARD_REG_SET (used_regs);
7942 else
7943 note_stores (PATTERN (insn), frv_io_handle_set, &used_regs);
7944
7945 note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
7946 break;
7947 }
7948 }
7949
7950 /* See if MEMBAR, the last membar instruction in BB, can be removed.
7951 FIRST_IO[X] describes the first operation performed by basic block X. */
7952
7953 static void
7954 frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
7955 rtx membar)
7956 {
7957 struct frv_io this_io, next_io;
7958 edge succ;
7959 edge_iterator ei;
7960
7961 /* We need to keep the membar if there is an edge to the exit block. */
7962 FOR_EACH_EDGE (succ, ei, bb->succs)
7963 /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
7964 if (succ->dest == EXIT_BLOCK_PTR)
7965 return;
7966
7967 /* Work out the union of all successor blocks. */
7968 ei = ei_start (bb->succs);
7969 ei_cond (ei, &succ);
7970 /* next_io = first_io[bb->succ->dest->index]; */
7971 next_io = first_io[succ->dest->index];
7972 ei = ei_start (bb->succs);
7973 if (ei_cond (ei, &succ))
7974 {
7975 for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei))
7976 /*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/
7977 frv_io_union (&next_io, &first_io[succ->dest->index]);
7978 }
7979 else
7980 gcc_unreachable ();
7981
7982 frv_extract_membar (&this_io, membar);
7983 if (frv_io_fixed_order_p (&this_io, &next_io))
7984 {
7985 if (dump_file)
7986 fprintf (dump_file,
7987 ";; [Global] Removing membar %d since order of accesses"
7988 " is guaranteed\n", INSN_UID (membar));
7989
7990 delete_insn (membar);
7991 }
7992 }
7993
7994 /* Remove redundant membars from the current function. */
7995
7996 static void
7997 frv_optimize_membar (void)
7998 {
7999 basic_block bb;
8000 struct frv_io *first_io;
8001 rtx *last_membar;
8002
8003 compute_bb_for_insn ();
8004 first_io = xcalloc (last_basic_block, sizeof (struct frv_io));
8005 last_membar = xcalloc (last_basic_block, sizeof (rtx));
8006
8007 FOR_EACH_BB (bb)
8008 frv_optimize_membar_local (bb, &first_io[bb->index],
8009 &last_membar[bb->index]);
8010
8011 FOR_EACH_BB (bb)
8012 if (last_membar[bb->index] != 0)
8013 frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
8014
8015 free (first_io);
8016 free (last_membar);
8017 }
8018 \f
8019 /* Used by frv_reorg to keep track of the current packet's address. */
8020 static unsigned int frv_packet_address;
8021
8022 /* If the current packet falls through to a label, try to pad the packet
8023 with nops in order to fit the label's alignment requirements. */
8024
8025 static void
8026 frv_align_label (void)
8027 {
8028 unsigned int alignment, target, nop;
8029 rtx x, last, barrier, label;
8030
8031 /* Walk forward to the start of the next packet. Set ALIGNMENT to the
8032 maximum alignment of that packet, LABEL to the last label between
8033 the packets, and BARRIER to the last barrier. */
8034 last = frv_packet.insns[frv_packet.num_insns - 1];
8035 label = barrier = 0;
8036 alignment = 4;
8037 for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x))
8038 {
8039 if (LABEL_P (x))
8040 {
8041 unsigned int subalign = 1 << label_to_alignment (x);
8042 alignment = MAX (alignment, subalign);
8043 label = x;
8044 }
8045 if (BARRIER_P (x))
8046 barrier = x;
8047 }
8048
8049 /* If -malign-labels, and the packet falls through to an unaligned
8050 label, try introducing a nop to align that label to 8 bytes. */
8051 if (TARGET_ALIGN_LABELS
8052 && label != 0
8053 && barrier == 0
8054 && frv_packet.num_insns < frv_packet.issue_rate)
8055 alignment = MAX (alignment, 8);
8056
8057 /* Advance the address to the end of the current packet. */
8058 frv_packet_address += frv_packet.num_insns * 4;
8059
8060 /* Work out the target address, after alignment. */
8061 target = (frv_packet_address + alignment - 1) & -alignment;
8062
8063 /* If the packet falls through to the label, try to find an efficient
8064 padding sequence. */
8065 if (barrier == 0)
8066 {
8067 /* First try adding nops to the current packet. */
8068 for (nop = 0; nop < frv_num_nops; nop++)
8069 while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop]))
8070 {
8071 frv_insert_nop_in_packet (frv_nops[nop]);
8072 frv_packet_address += 4;
8073 }
8074
8075 /* If we still haven't reached the target, add some new packets that
8076 contain only nops. If there are two types of nop, insert an
8077 alternating sequence of frv_nops[0] and frv_nops[1], which will
8078 lead to packets like:
8079
8080 nop.p
8081 mnop.p/fnop.p
8082 nop.p
8083 mnop/fnop
8084
8085 etc. Just emit frv_nops[0] if that's the only nop we have. */
8086 last = frv_packet.insns[frv_packet.num_insns - 1];
8087 nop = 0;
8088 while (frv_packet_address < target)
8089 {
8090 last = emit_insn_after (PATTERN (frv_nops[nop]), last);
8091 frv_packet_address += 4;
8092 if (frv_num_nops > 1)
8093 nop ^= 1;
8094 }
8095 }
8096
8097 frv_packet_address = target;
8098 }
8099
8100 /* Subroutine of frv_reorg, called after each packet has been constructed
8101 in frv_packet. */
8102
8103 static void
8104 frv_reorg_packet (void)
8105 {
8106 frv_fill_unused_units (GROUP_I);
8107 frv_fill_unused_units (GROUP_FM);
8108 frv_align_label ();
8109 }
8110
8111 /* Add an instruction with pattern NOP to frv_nops[]. */
8112
8113 static void
8114 frv_register_nop (rtx nop)
8115 {
8116 nop = make_insn_raw (nop);
8117 NEXT_INSN (nop) = 0;
8118 PREV_INSN (nop) = 0;
8119 frv_nops[frv_num_nops++] = nop;
8120 }
8121
8122 /* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions
8123 into packets and check whether we need to insert nops in order to
8124 fulfill the processor's issue requirements. Also, if the user has
8125 requested a certain alignment for a label, try to meet that alignment
8126 by inserting nops in the previous packet. */
8127
8128 static void
8129 frv_reorg (void)
8130 {
8131 if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p)
8132 frv_optimize_membar ();
8133
8134 frv_num_nops = 0;
8135 frv_register_nop (gen_nop ());
8136 if (TARGET_MEDIA)
8137 frv_register_nop (gen_mnop ());
8138 if (TARGET_HARD_FLOAT)
8139 frv_register_nop (gen_fnop ());
8140
8141 /* Estimate the length of each branch. Although this may change after
8142 we've inserted nops, it will only do so in big functions. */
8143 shorten_branches (get_insns ());
8144
8145 frv_packet_address = 0;
8146 frv_for_each_packet (frv_reorg_packet);
8147 }
8148 \f
8149 #define def_builtin(name, type, code) \
8150 add_builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL)
8151
8152 struct builtin_description
8153 {
8154 enum insn_code icode;
8155 const char *name;
8156 enum frv_builtins code;
8157 enum rtx_code comparison;
8158 unsigned int flag;
8159 };
8160
8161 /* Media intrinsics that take a single, constant argument. */
8162
8163 static struct builtin_description bdesc_set[] =
8164 {
8165 { CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, 0, 0 }
8166 };
8167
8168 /* Media intrinsics that take just one argument. */
8169
8170 static struct builtin_description bdesc_1arg[] =
8171 {
8172 { CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, 0, 0 },
8173 { CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, 0, 0 },
8174 { CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, 0, 0 },
8175 { CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, 0, 0 },
8176 { CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, 0, 0 },
8177 { CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, 0, 0 }
8178 };
8179
8180 /* Media intrinsics that take two arguments. */
8181
8182 static struct builtin_description bdesc_2arg[] =
8183 {
8184 { CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, 0, 0 },
8185 { CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, 0, 0 },
8186 { CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, 0, 0 },
8187 { CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, 0, 0 },
8188 { CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, 0, 0 },
8189 { CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, 0, 0 },
8190 { CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, 0, 0 },
8191 { CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, 0, 0 },
8192 { CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, 0, 0 },
8193 { CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, 0, 0 },
8194 { CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, 0, 0 },
8195 { CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, 0, 0 },
8196 { CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, 0, 0 },
8197 { CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, 0, 0 },
8198 { CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, 0, 0 },
8199 { CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, 0, 0 },
8200 { CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, 0, 0 },
8201 { CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, 0, 0 },
8202 { CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, 0, 0 },
8203 { CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, 0, 0 },
8204 { CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, 0, 0 },
8205 { CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, 0, 0 },
8206 { CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, 0, 0 },
8207 { CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, 0, 0 },
8208 { CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, 0, 0 },
8209 { CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, 0, 0 },
8210 { CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, 0, 0 }
8211 };
8212
8213 /* Integer intrinsics that take two arguments and have no return value. */
8214
8215 static struct builtin_description bdesc_int_void2arg[] =
8216 {
8217 { CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, 0, 0 },
8218 { CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, 0, 0 },
8219 { CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, 0, 0 }
8220 };
8221
8222 static struct builtin_description bdesc_prefetches[] =
8223 {
8224 { CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, 0, 0 },
8225 { CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, 0, 0 }
8226 };
8227
8228 /* Media intrinsics that take two arguments, the first being an ACC number. */
8229
8230 static struct builtin_description bdesc_cut[] =
8231 {
8232 { CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, 0, 0 },
8233 { CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, 0, 0 },
8234 { CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, 0, 0 }
8235 };
8236
8237 /* Two-argument media intrinsics with an immediate second argument. */
8238
8239 static struct builtin_description bdesc_2argimm[] =
8240 {
8241 { CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, 0, 0 },
8242 { CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, 0, 0 },
8243 { CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, 0, 0 },
8244 { CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, 0, 0 },
8245 { CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, 0, 0 },
8246 { CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, 0, 0 },
8247 { CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, 0, 0 },
8248 { CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, 0, 0 },
8249 { CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, 0, 0 },
8250 { CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, 0, 0 },
8251 { CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, 0, 0 },
8252 { CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, 0, 0 },
8253 { CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, 0, 0 },
8254 { CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, 0, 0 },
8255 { CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, 0, 0 },
8256 { CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, 0, 0 },
8257 { CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, 0, 0 }
8258 };
8259
8260 /* Media intrinsics that take two arguments and return void, the first argument
8261 being a pointer to 4 words in memory. */
8262
8263 static struct builtin_description bdesc_void2arg[] =
8264 {
8265 { CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, 0, 0 },
8266 { CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, 0, 0 },
8267 };
8268
8269 /* Media intrinsics that take three arguments, the first being a const_int that
8270 denotes an accumulator, and that return void. */
8271
8272 static struct builtin_description bdesc_void3arg[] =
8273 {
8274 { CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, 0, 0 },
8275 { CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, 0, 0 },
8276 { CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, 0, 0 },
8277 { CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, 0, 0 },
8278 { CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, 0, 0 },
8279 { CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, 0, 0 },
8280 { CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, 0, 0 },
8281 { CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, 0, 0 },
8282 { CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, 0, 0 },
8283 { CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, 0, 0 },
8284 { CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, 0, 0 },
8285 { CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, 0, 0 },
8286 { CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, 0, 0 },
8287 { CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, 0, 0 },
8288 { CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, 0, 0 },
8289 { CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, 0, 0 },
8290 { CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, 0, 0 },
8291 { CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, 0, 0 },
8292 { CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, 0, 0 },
8293 { CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, 0, 0 },
8294 { CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, 0, 0 },
8295 { CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, 0, 0 },
8296 { CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, 0, 0 },
8297 { CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, 0, 0 },
8298 { CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, 0, 0 }
8299 };
8300
8301 /* Media intrinsics that take two accumulator numbers as argument and
8302 return void. */
8303
8304 static struct builtin_description bdesc_voidacc[] =
8305 {
8306 { CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, 0, 0 },
8307 { CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, 0, 0 },
8308 { CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, 0, 0 },
8309 { CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, 0, 0 },
8310 { CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, 0, 0 },
8311 { CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, 0, 0 }
8312 };
8313
8314 /* Intrinsics that load a value and then issue a MEMBAR. The load is
8315 a normal move and the ICODE is for the membar. */
8316
8317 static struct builtin_description bdesc_loads[] =
8318 {
8319 { CODE_FOR_optional_membar_qi, "__builtin_read8",
8320 FRV_BUILTIN_READ8, 0, 0 },
8321 { CODE_FOR_optional_membar_hi, "__builtin_read16",
8322 FRV_BUILTIN_READ16, 0, 0 },
8323 { CODE_FOR_optional_membar_si, "__builtin_read32",
8324 FRV_BUILTIN_READ32, 0, 0 },
8325 { CODE_FOR_optional_membar_di, "__builtin_read64",
8326 FRV_BUILTIN_READ64, 0, 0 }
8327 };
8328
8329 /* Likewise stores. */
8330
8331 static struct builtin_description bdesc_stores[] =
8332 {
8333 { CODE_FOR_optional_membar_qi, "__builtin_write8",
8334 FRV_BUILTIN_WRITE8, 0, 0 },
8335 { CODE_FOR_optional_membar_hi, "__builtin_write16",
8336 FRV_BUILTIN_WRITE16, 0, 0 },
8337 { CODE_FOR_optional_membar_si, "__builtin_write32",
8338 FRV_BUILTIN_WRITE32, 0, 0 },
8339 { CODE_FOR_optional_membar_di, "__builtin_write64",
8340 FRV_BUILTIN_WRITE64, 0, 0 },
8341 };
8342
8343 /* Initialize media builtins. */
8344
8345 static void
8346 frv_init_builtins (void)
8347 {
8348 tree endlink = void_list_node;
8349 tree accumulator = integer_type_node;
8350 tree integer = integer_type_node;
8351 tree voidt = void_type_node;
8352 tree uhalf = short_unsigned_type_node;
8353 tree sword1 = long_integer_type_node;
8354 tree uword1 = long_unsigned_type_node;
8355 tree sword2 = long_long_integer_type_node;
8356 tree uword2 = long_long_unsigned_type_node;
8357 tree uword4 = build_pointer_type (uword1);
8358 tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1));
8359 tree ubyte = unsigned_char_type_node;
8360 tree iacc = integer_type_node;
8361
8362 #define UNARY(RET, T1) \
8363 build_function_type (RET, tree_cons (NULL_TREE, T1, endlink))
8364
8365 #define BINARY(RET, T1, T2) \
8366 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8367 tree_cons (NULL_TREE, T2, endlink)))
8368
8369 #define TRINARY(RET, T1, T2, T3) \
8370 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8371 tree_cons (NULL_TREE, T2, \
8372 tree_cons (NULL_TREE, T3, endlink))))
8373
8374 #define QUAD(RET, T1, T2, T3, T4) \
8375 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8376 tree_cons (NULL_TREE, T2, \
8377 tree_cons (NULL_TREE, T3, \
8378 tree_cons (NULL_TREE, T4, endlink)))))
8379
8380 tree void_ftype_void = build_function_type (voidt, endlink);
8381
8382 tree void_ftype_acc = UNARY (voidt, accumulator);
8383 tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1);
8384 tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2);
8385 tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1);
8386 tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator);
8387 tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1);
8388 tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1);
8389 tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2);
8390 tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2);
8391
8392 tree uw1_ftype_uw1 = UNARY (uword1, uword1);
8393 tree uw1_ftype_sw1 = UNARY (uword1, sword1);
8394 tree uw1_ftype_uw2 = UNARY (uword1, uword2);
8395 tree uw1_ftype_acc = UNARY (uword1, accumulator);
8396 tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf);
8397 tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1);
8398 tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer);
8399 tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1);
8400 tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1);
8401 tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1);
8402 tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer);
8403
8404 tree sw1_ftype_int = UNARY (sword1, integer);
8405 tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1);
8406 tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer);
8407
8408 tree uw2_ftype_uw1 = UNARY (uword2, uword1);
8409 tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer);
8410 tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2);
8411 tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer);
8412 tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer);
8413 tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf);
8414
8415 tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2);
8416 tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer);
8417 tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1);
8418 tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1);
8419 tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1);
8420 tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2);
8421 tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1);
8422 tree sw1_ftype_sw1 = UNARY (sword1, sword1);
8423 tree sw2_ftype_iacc = UNARY (sword2, iacc);
8424 tree sw1_ftype_iacc = UNARY (sword1, iacc);
8425 tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node);
8426 tree uw1_ftype_vptr = UNARY (uword1, vptr);
8427 tree uw2_ftype_vptr = UNARY (uword2, vptr);
8428 tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte);
8429 tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf);
8430 tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1);
8431 tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2);
8432
8433 def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND);
8434 def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR);
8435 def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR);
8436 def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT);
8437 def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI);
8438 def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI);
8439 def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT);
8440 def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH);
8441 def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI);
8442 def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI);
8443 def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI);
8444 def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS);
8445 def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU);
8446 def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS);
8447 def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS);
8448 def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS);
8449 def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS);
8450 def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS);
8451 def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU);
8452 def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS);
8453 def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU);
8454 def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS);
8455 def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU);
8456 def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS);
8457 def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU);
8458 def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS);
8459 def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS);
8460 def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS);
8461 def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS);
8462 def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS);
8463 def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU);
8464 def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS);
8465 def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU);
8466 def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS);
8467 def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU);
8468 def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS);
8469 def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU);
8470 def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS);
8471 def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU);
8472 def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS);
8473 def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU);
8474 def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS);
8475 def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU);
8476 def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT);
8477 def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS);
8478 def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW);
8479 def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD);
8480 def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH);
8481 def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH);
8482 def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH);
8483 def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH);
8484 def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH);
8485 def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB);
8486 def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE);
8487 def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC);
8488 def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA);
8489 def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC);
8490 def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG);
8491 def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC);
8492 def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG);
8493 def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1);
8494 def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2);
8495 def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP);
8496 def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS);
8497 def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS);
8498 def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS);
8499 def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS);
8500 def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS);
8501 def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS);
8502 def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS);
8503 def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS);
8504 def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS);
8505 def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS);
8506 def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI);
8507 def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI);
8508 def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI);
8509 def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI);
8510 def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS);
8511 def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS);
8512 def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS);
8513 def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS);
8514 def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH);
8515 def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH);
8516 def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH);
8517 def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS);
8518 def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS);
8519 def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI);
8520 def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI);
8521 def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL);
8522 def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL);
8523 def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS);
8524 def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS);
8525 def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU);
8526 def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS);
8527 def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS);
8528 def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS);
8529 def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN);
8530 def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS);
8531 def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll);
8532 def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl);
8533 def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll);
8534 def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl);
8535 def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0);
8536 def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH);
8537 def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8);
8538 def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16);
8539 def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32);
8540 def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64);
8541
8542 def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8);
8543 def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16);
8544 def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32);
8545 def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64);
8546
8547 #undef UNARY
8548 #undef BINARY
8549 #undef TRINARY
8550 #undef QUAD
8551 }
8552
8553 /* Set the names for various arithmetic operations according to the
8554 FRV ABI. */
8555 static void
8556 frv_init_libfuncs (void)
8557 {
8558 set_optab_libfunc (smod_optab, SImode, "__modi");
8559 set_optab_libfunc (umod_optab, SImode, "__umodi");
8560
8561 set_optab_libfunc (add_optab, DImode, "__addll");
8562 set_optab_libfunc (sub_optab, DImode, "__subll");
8563 set_optab_libfunc (smul_optab, DImode, "__mulll");
8564 set_optab_libfunc (sdiv_optab, DImode, "__divll");
8565 set_optab_libfunc (smod_optab, DImode, "__modll");
8566 set_optab_libfunc (umod_optab, DImode, "__umodll");
8567 set_optab_libfunc (and_optab, DImode, "__andll");
8568 set_optab_libfunc (ior_optab, DImode, "__orll");
8569 set_optab_libfunc (xor_optab, DImode, "__xorll");
8570 set_optab_libfunc (one_cmpl_optab, DImode, "__notll");
8571
8572 set_optab_libfunc (add_optab, SFmode, "__addf");
8573 set_optab_libfunc (sub_optab, SFmode, "__subf");
8574 set_optab_libfunc (smul_optab, SFmode, "__mulf");
8575 set_optab_libfunc (sdiv_optab, SFmode, "__divf");
8576
8577 set_optab_libfunc (add_optab, DFmode, "__addd");
8578 set_optab_libfunc (sub_optab, DFmode, "__subd");
8579 set_optab_libfunc (smul_optab, DFmode, "__muld");
8580 set_optab_libfunc (sdiv_optab, DFmode, "__divd");
8581
8582 set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod");
8583 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof");
8584
8585 set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi");
8586 set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll");
8587 set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi");
8588 set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll");
8589
8590 set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui");
8591 set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull");
8592 set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui");
8593 set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull");
8594
8595 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof");
8596 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof");
8597 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod");
8598 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod");
8599 }
8600
8601 /* Convert an integer constant to an accumulator register. ICODE is the
8602 code of the target instruction, OPNUM is the number of the
8603 accumulator operand and OPVAL is the constant integer. Try both
8604 ACC and ACCG registers; only report an error if neither fit the
8605 instruction. */
8606
8607 static rtx
8608 frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
8609 {
8610 rtx reg;
8611 int i;
8612
8613 /* ACCs and ACCGs are implicit global registers if media intrinsics
8614 are being used. We set up this lazily to avoid creating lots of
8615 unnecessary call_insn rtl in non-media code. */
8616 for (i = 0; i <= ACC_MASK; i++)
8617 if ((i & ACC_MASK) == i)
8618 global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1;
8619
8620 if (GET_CODE (opval) != CONST_INT)
8621 {
8622 error ("accumulator is not a constant integer");
8623 return NULL_RTX;
8624 }
8625 if ((INTVAL (opval) & ~ACC_MASK) != 0)
8626 {
8627 error ("accumulator number is out of bounds");
8628 return NULL_RTX;
8629 }
8630
8631 reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
8632 ACC_FIRST + INTVAL (opval));
8633 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8634 SET_REGNO (reg, ACCG_FIRST + INTVAL (opval));
8635
8636 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8637 {
8638 error ("inappropriate accumulator for %qs", insn_data[icode].name);
8639 return NULL_RTX;
8640 }
8641 return reg;
8642 }
8643
8644 /* If an ACC rtx has mode MODE, return the mode that the matching ACCG
8645 should have. */
8646
8647 static enum machine_mode
8648 frv_matching_accg_mode (enum machine_mode mode)
8649 {
8650 switch (mode)
8651 {
8652 case V4SImode:
8653 return V4QImode;
8654
8655 case DImode:
8656 return HImode;
8657
8658 case SImode:
8659 return QImode;
8660
8661 default:
8662 gcc_unreachable ();
8663 }
8664 }
8665
8666 /* Given that a __builtin_read or __builtin_write function is accessing
8667 address ADDRESS, return the value that should be used as operand 1
8668 of the membar. */
8669
8670 static rtx
8671 frv_io_address_cookie (rtx address)
8672 {
8673 return (GET_CODE (address) == CONST_INT
8674 ? GEN_INT (INTVAL (address) / 8 * 8)
8675 : const0_rtx);
8676 }
8677
8678 /* Return the accumulator guard that should be paired with accumulator
8679 register ACC. The mode of the returned register is in the same
8680 class as ACC, but is four times smaller. */
8681
8682 rtx
8683 frv_matching_accg_for_acc (rtx acc)
8684 {
8685 return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)),
8686 REGNO (acc) - ACC_FIRST + ACCG_FIRST);
8687 }
8688
8689 /* Read the requested argument from the call EXP given by INDEX.
8690 Return the value as an rtx. */
8691
8692 static rtx
8693 frv_read_argument (tree exp, unsigned int index)
8694 {
8695 return expand_expr (CALL_EXPR_ARG (exp, index),
8696 NULL_RTX, VOIDmode, 0);
8697 }
8698
8699 /* Like frv_read_argument, but interpret the argument as the number
8700 of an IACC register and return a (reg:MODE ...) rtx for it. */
8701
8702 static rtx
8703 frv_read_iacc_argument (enum machine_mode mode, tree call,
8704 unsigned int index)
8705 {
8706 int i, regno;
8707 rtx op;
8708
8709 op = frv_read_argument (call, index);
8710 if (GET_CODE (op) != CONST_INT
8711 || INTVAL (op) < 0
8712 || INTVAL (op) > IACC_LAST - IACC_FIRST
8713 || ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0)
8714 {
8715 error ("invalid IACC argument");
8716 op = const0_rtx;
8717 }
8718
8719 /* IACCs are implicit global registers. We set up this lazily to
8720 avoid creating lots of unnecessary call_insn rtl when IACCs aren't
8721 being used. */
8722 regno = INTVAL (op) + IACC_FIRST;
8723 for (i = 0; i < HARD_REGNO_NREGS (regno, mode); i++)
8724 global_regs[regno + i] = 1;
8725
8726 return gen_rtx_REG (mode, regno);
8727 }
8728
8729 /* Return true if OPVAL can be used for operand OPNUM of instruction ICODE.
8730 The instruction should require a constant operand of some sort. The
8731 function prints an error if OPVAL is not valid. */
8732
8733 static int
8734 frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval)
8735 {
8736 if (GET_CODE (opval) != CONST_INT)
8737 {
8738 error ("%qs expects a constant argument", insn_data[icode].name);
8739 return FALSE;
8740 }
8741 if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode))
8742 {
8743 error ("constant argument out of range for %qs", insn_data[icode].name);
8744 return FALSE;
8745 }
8746 return TRUE;
8747 }
8748
8749 /* Return a legitimate rtx for instruction ICODE's return value. Use TARGET
8750 if it's not null, has the right mode, and satisfies operand 0's
8751 predicate. */
8752
8753 static rtx
8754 frv_legitimize_target (enum insn_code icode, rtx target)
8755 {
8756 enum machine_mode mode = insn_data[icode].operand[0].mode;
8757
8758 if (! target
8759 || GET_MODE (target) != mode
8760 || ! (*insn_data[icode].operand[0].predicate) (target, mode))
8761 return gen_reg_rtx (mode);
8762 else
8763 return target;
8764 }
8765
8766 /* Given that ARG is being passed as operand OPNUM to instruction ICODE,
8767 check whether ARG satisfies the operand's constraints. If it doesn't,
8768 copy ARG to a temporary register and return that. Otherwise return ARG
8769 itself. */
8770
8771 static rtx
8772 frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
8773 {
8774 enum machine_mode mode = insn_data[icode].operand[opnum].mode;
8775
8776 if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
8777 return arg;
8778 else
8779 return copy_to_mode_reg (mode, arg);
8780 }
8781
8782 /* Return a volatile memory reference of mode MODE whose address is ARG. */
8783
8784 static rtx
8785 frv_volatile_memref (enum machine_mode mode, rtx arg)
8786 {
8787 rtx mem;
8788
8789 mem = gen_rtx_MEM (mode, memory_address (mode, arg));
8790 MEM_VOLATILE_P (mem) = 1;
8791 return mem;
8792 }
8793
8794 /* Expand builtins that take a single, constant argument. At the moment,
8795 only MHDSETS falls into this category. */
8796
8797 static rtx
8798 frv_expand_set_builtin (enum insn_code icode, tree call, rtx target)
8799 {
8800 rtx pat;
8801 rtx op0 = frv_read_argument (call, 0);
8802
8803 if (! frv_check_constant_argument (icode, 1, op0))
8804 return NULL_RTX;
8805
8806 target = frv_legitimize_target (icode, target);
8807 pat = GEN_FCN (icode) (target, op0);
8808 if (! pat)
8809 return NULL_RTX;
8810
8811 emit_insn (pat);
8812 return target;
8813 }
8814
8815 /* Expand builtins that take one operand. */
8816
8817 static rtx
8818 frv_expand_unop_builtin (enum insn_code icode, tree call, rtx target)
8819 {
8820 rtx pat;
8821 rtx op0 = frv_read_argument (call, 0);
8822
8823 target = frv_legitimize_target (icode, target);
8824 op0 = frv_legitimize_argument (icode, 1, op0);
8825 pat = GEN_FCN (icode) (target, op0);
8826 if (! pat)
8827 return NULL_RTX;
8828
8829 emit_insn (pat);
8830 return target;
8831 }
8832
8833 /* Expand builtins that take two operands. */
8834
8835 static rtx
8836 frv_expand_binop_builtin (enum insn_code icode, tree call, rtx target)
8837 {
8838 rtx pat;
8839 rtx op0 = frv_read_argument (call, 0);
8840 rtx op1 = frv_read_argument (call, 1);
8841
8842 target = frv_legitimize_target (icode, target);
8843 op0 = frv_legitimize_argument (icode, 1, op0);
8844 op1 = frv_legitimize_argument (icode, 2, op1);
8845 pat = GEN_FCN (icode) (target, op0, op1);
8846 if (! pat)
8847 return NULL_RTX;
8848
8849 emit_insn (pat);
8850 return target;
8851 }
8852
8853 /* Expand cut-style builtins, which take two operands and an implicit ACCG
8854 one. */
8855
8856 static rtx
8857 frv_expand_cut_builtin (enum insn_code icode, tree call, rtx target)
8858 {
8859 rtx pat;
8860 rtx op0 = frv_read_argument (call, 0);
8861 rtx op1 = frv_read_argument (call, 1);
8862 rtx op2;
8863
8864 target = frv_legitimize_target (icode, target);
8865 op0 = frv_int_to_acc (icode, 1, op0);
8866 if (! op0)
8867 return NULL_RTX;
8868
8869 if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT)
8870 {
8871 if (! frv_check_constant_argument (icode, 2, op1))
8872 return NULL_RTX;
8873 }
8874 else
8875 op1 = frv_legitimize_argument (icode, 2, op1);
8876
8877 op2 = frv_matching_accg_for_acc (op0);
8878 pat = GEN_FCN (icode) (target, op0, op1, op2);
8879 if (! pat)
8880 return NULL_RTX;
8881
8882 emit_insn (pat);
8883 return target;
8884 }
8885
8886 /* Expand builtins that take two operands and the second is immediate. */
8887
8888 static rtx
8889 frv_expand_binopimm_builtin (enum insn_code icode, tree call, rtx target)
8890 {
8891 rtx pat;
8892 rtx op0 = frv_read_argument (call, 0);
8893 rtx op1 = frv_read_argument (call, 1);
8894
8895 if (! frv_check_constant_argument (icode, 2, op1))
8896 return NULL_RTX;
8897
8898 target = frv_legitimize_target (icode, target);
8899 op0 = frv_legitimize_argument (icode, 1, op0);
8900 pat = GEN_FCN (icode) (target, op0, op1);
8901 if (! pat)
8902 return NULL_RTX;
8903
8904 emit_insn (pat);
8905 return target;
8906 }
8907
8908 /* Expand builtins that take two operands, the first operand being a pointer to
8909 ints and return void. */
8910
8911 static rtx
8912 frv_expand_voidbinop_builtin (enum insn_code icode, tree call)
8913 {
8914 rtx pat;
8915 rtx op0 = frv_read_argument (call, 0);
8916 rtx op1 = frv_read_argument (call, 1);
8917 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8918 rtx addr;
8919
8920 if (GET_CODE (op0) != MEM)
8921 {
8922 rtx reg = op0;
8923
8924 if (! offsettable_address_p (0, mode0, op0))
8925 {
8926 reg = gen_reg_rtx (Pmode);
8927 emit_insn (gen_rtx_SET (VOIDmode, reg, op0));
8928 }
8929
8930 op0 = gen_rtx_MEM (SImode, reg);
8931 }
8932
8933 addr = XEXP (op0, 0);
8934 if (! offsettable_address_p (0, mode0, addr))
8935 addr = copy_to_mode_reg (Pmode, op0);
8936
8937 op0 = change_address (op0, V4SImode, addr);
8938 op1 = frv_legitimize_argument (icode, 1, op1);
8939 pat = GEN_FCN (icode) (op0, op1);
8940 if (! pat)
8941 return 0;
8942
8943 emit_insn (pat);
8944 return 0;
8945 }
8946
8947 /* Expand builtins that take two long operands and return void. */
8948
8949 static rtx
8950 frv_expand_int_void2arg (enum insn_code icode, tree call)
8951 {
8952 rtx pat;
8953 rtx op0 = frv_read_argument (call, 0);
8954 rtx op1 = frv_read_argument (call, 1);
8955
8956 op0 = frv_legitimize_argument (icode, 1, op0);
8957 op1 = frv_legitimize_argument (icode, 1, op1);
8958 pat = GEN_FCN (icode) (op0, op1);
8959 if (! pat)
8960 return NULL_RTX;
8961
8962 emit_insn (pat);
8963 return NULL_RTX;
8964 }
8965
8966 /* Expand prefetch builtins. These take a single address as argument. */
8967
8968 static rtx
8969 frv_expand_prefetches (enum insn_code icode, tree call)
8970 {
8971 rtx pat;
8972 rtx op0 = frv_read_argument (call, 0);
8973
8974 pat = GEN_FCN (icode) (force_reg (Pmode, op0));
8975 if (! pat)
8976 return 0;
8977
8978 emit_insn (pat);
8979 return 0;
8980 }
8981
8982 /* Expand builtins that take three operands and return void. The first
8983 argument must be a constant that describes a pair or quad accumulators. A
8984 fourth argument is created that is the accumulator guard register that
8985 corresponds to the accumulator. */
8986
8987 static rtx
8988 frv_expand_voidtriop_builtin (enum insn_code icode, tree call)
8989 {
8990 rtx pat;
8991 rtx op0 = frv_read_argument (call, 0);
8992 rtx op1 = frv_read_argument (call, 1);
8993 rtx op2 = frv_read_argument (call, 2);
8994 rtx op3;
8995
8996 op0 = frv_int_to_acc (icode, 0, op0);
8997 if (! op0)
8998 return NULL_RTX;
8999
9000 op1 = frv_legitimize_argument (icode, 1, op1);
9001 op2 = frv_legitimize_argument (icode, 2, op2);
9002 op3 = frv_matching_accg_for_acc (op0);
9003 pat = GEN_FCN (icode) (op0, op1, op2, op3);
9004 if (! pat)
9005 return NULL_RTX;
9006
9007 emit_insn (pat);
9008 return NULL_RTX;
9009 }
9010
9011 /* Expand builtins that perform accumulator-to-accumulator operations.
9012 These builtins take two accumulator numbers as argument and return
9013 void. */
9014
9015 static rtx
9016 frv_expand_voidaccop_builtin (enum insn_code icode, tree call)
9017 {
9018 rtx pat;
9019 rtx op0 = frv_read_argument (call, 0);
9020 rtx op1 = frv_read_argument (call, 1);
9021 rtx op2;
9022 rtx op3;
9023
9024 op0 = frv_int_to_acc (icode, 0, op0);
9025 if (! op0)
9026 return NULL_RTX;
9027
9028 op1 = frv_int_to_acc (icode, 1, op1);
9029 if (! op1)
9030 return NULL_RTX;
9031
9032 op2 = frv_matching_accg_for_acc (op0);
9033 op3 = frv_matching_accg_for_acc (op1);
9034 pat = GEN_FCN (icode) (op0, op1, op2, op3);
9035 if (! pat)
9036 return NULL_RTX;
9037
9038 emit_insn (pat);
9039 return NULL_RTX;
9040 }
9041
9042 /* Expand a __builtin_read* function. ICODE is the instruction code for the
9043 membar and TARGET_MODE is the mode that the loaded value should have. */
9044
9045 static rtx
9046 frv_expand_load_builtin (enum insn_code icode, enum machine_mode target_mode,
9047 tree call, rtx target)
9048 {
9049 rtx op0 = frv_read_argument (call, 0);
9050 rtx cookie = frv_io_address_cookie (op0);
9051
9052 if (target == 0 || !REG_P (target))
9053 target = gen_reg_rtx (target_mode);
9054 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
9055 convert_move (target, op0, 1);
9056 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ)));
9057 cfun->machine->has_membar_p = 1;
9058 return target;
9059 }
9060
9061 /* Likewise __builtin_write* functions. */
9062
9063 static rtx
9064 frv_expand_store_builtin (enum insn_code icode, tree call)
9065 {
9066 rtx op0 = frv_read_argument (call, 0);
9067 rtx op1 = frv_read_argument (call, 1);
9068 rtx cookie = frv_io_address_cookie (op0);
9069
9070 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
9071 convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1);
9072 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE)));
9073 cfun->machine->has_membar_p = 1;
9074 return NULL_RTX;
9075 }
9076
9077 /* Expand the MDPACKH builtin. It takes four unsigned short arguments and
9078 each argument forms one word of the two double-word input registers.
9079 CALL is the tree for the call and TARGET, if nonnull, suggests a good place
9080 to put the return value. */
9081
9082 static rtx
9083 frv_expand_mdpackh_builtin (tree call, rtx target)
9084 {
9085 enum insn_code icode = CODE_FOR_mdpackh;
9086 rtx pat, op0, op1;
9087 rtx arg1 = frv_read_argument (call, 0);
9088 rtx arg2 = frv_read_argument (call, 1);
9089 rtx arg3 = frv_read_argument (call, 2);
9090 rtx arg4 = frv_read_argument (call, 3);
9091
9092 target = frv_legitimize_target (icode, target);
9093 op0 = gen_reg_rtx (DImode);
9094 op1 = gen_reg_rtx (DImode);
9095
9096 /* The high half of each word is not explicitly initialized, so indicate
9097 that the input operands are not live before this point. */
9098 emit_insn (gen_rtx_CLOBBER (DImode, op0));
9099 emit_insn (gen_rtx_CLOBBER (DImode, op1));
9100
9101 /* Move each argument into the low half of its associated input word. */
9102 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1);
9103 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2);
9104 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3);
9105 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4);
9106
9107 pat = GEN_FCN (icode) (target, op0, op1);
9108 if (! pat)
9109 return NULL_RTX;
9110
9111 emit_insn (pat);
9112 return target;
9113 }
9114
9115 /* Expand the MCLRACC builtin. This builtin takes a single accumulator
9116 number as argument. */
9117
9118 static rtx
9119 frv_expand_mclracc_builtin (tree call)
9120 {
9121 enum insn_code icode = CODE_FOR_mclracc;
9122 rtx pat;
9123 rtx op0 = frv_read_argument (call, 0);
9124
9125 op0 = frv_int_to_acc (icode, 0, op0);
9126 if (! op0)
9127 return NULL_RTX;
9128
9129 pat = GEN_FCN (icode) (op0);
9130 if (pat)
9131 emit_insn (pat);
9132
9133 return NULL_RTX;
9134 }
9135
9136 /* Expand builtins that take no arguments. */
9137
9138 static rtx
9139 frv_expand_noargs_builtin (enum insn_code icode)
9140 {
9141 rtx pat = GEN_FCN (icode) (const0_rtx);
9142 if (pat)
9143 emit_insn (pat);
9144
9145 return NULL_RTX;
9146 }
9147
9148 /* Expand MRDACC and MRDACCG. These builtins take a single accumulator
9149 number or accumulator guard number as argument and return an SI integer. */
9150
9151 static rtx
9152 frv_expand_mrdacc_builtin (enum insn_code icode, tree call)
9153 {
9154 rtx pat;
9155 rtx target = gen_reg_rtx (SImode);
9156 rtx op0 = frv_read_argument (call, 0);
9157
9158 op0 = frv_int_to_acc (icode, 1, op0);
9159 if (! op0)
9160 return NULL_RTX;
9161
9162 pat = GEN_FCN (icode) (target, op0);
9163 if (! pat)
9164 return NULL_RTX;
9165
9166 emit_insn (pat);
9167 return target;
9168 }
9169
9170 /* Expand MWTACC and MWTACCG. These builtins take an accumulator or
9171 accumulator guard as their first argument and an SImode value as their
9172 second. */
9173
9174 static rtx
9175 frv_expand_mwtacc_builtin (enum insn_code icode, tree call)
9176 {
9177 rtx pat;
9178 rtx op0 = frv_read_argument (call, 0);
9179 rtx op1 = frv_read_argument (call, 1);
9180
9181 op0 = frv_int_to_acc (icode, 0, op0);
9182 if (! op0)
9183 return NULL_RTX;
9184
9185 op1 = frv_legitimize_argument (icode, 1, op1);
9186 pat = GEN_FCN (icode) (op0, op1);
9187 if (pat)
9188 emit_insn (pat);
9189
9190 return NULL_RTX;
9191 }
9192
9193 /* Emit a move from SRC to DEST in SImode chunks. This can be used
9194 to move DImode values into and out of IACC0. */
9195
9196 static void
9197 frv_split_iacc_move (rtx dest, rtx src)
9198 {
9199 enum machine_mode inner;
9200 int i;
9201
9202 inner = GET_MODE (dest);
9203 for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode))
9204 emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i),
9205 simplify_gen_subreg (SImode, src, inner, i));
9206 }
9207
9208 /* Expand builtins. */
9209
9210 static rtx
9211 frv_expand_builtin (tree exp,
9212 rtx target,
9213 rtx subtarget ATTRIBUTE_UNUSED,
9214 enum machine_mode mode ATTRIBUTE_UNUSED,
9215 int ignore ATTRIBUTE_UNUSED)
9216 {
9217 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9218 unsigned fcode = (unsigned)DECL_FUNCTION_CODE (fndecl);
9219 unsigned i;
9220 struct builtin_description *d;
9221
9222 if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA)
9223 {
9224 error ("media functions are not available unless -mmedia is used");
9225 return NULL_RTX;
9226 }
9227
9228 switch (fcode)
9229 {
9230 case FRV_BUILTIN_MCOP1:
9231 case FRV_BUILTIN_MCOP2:
9232 case FRV_BUILTIN_MDUNPACKH:
9233 case FRV_BUILTIN_MBTOHE:
9234 if (! TARGET_MEDIA_REV1)
9235 {
9236 error ("this media function is only available on the fr500");
9237 return NULL_RTX;
9238 }
9239 break;
9240
9241 case FRV_BUILTIN_MQXMACHS:
9242 case FRV_BUILTIN_MQXMACXHS:
9243 case FRV_BUILTIN_MQMACXHS:
9244 case FRV_BUILTIN_MADDACCS:
9245 case FRV_BUILTIN_MSUBACCS:
9246 case FRV_BUILTIN_MASACCS:
9247 case FRV_BUILTIN_MDADDACCS:
9248 case FRV_BUILTIN_MDSUBACCS:
9249 case FRV_BUILTIN_MDASACCS:
9250 case FRV_BUILTIN_MABSHS:
9251 case FRV_BUILTIN_MDROTLI:
9252 case FRV_BUILTIN_MCPLHI:
9253 case FRV_BUILTIN_MCPLI:
9254 case FRV_BUILTIN_MDCUTSSI:
9255 case FRV_BUILTIN_MQSATHS:
9256 case FRV_BUILTIN_MHSETLOS:
9257 case FRV_BUILTIN_MHSETLOH:
9258 case FRV_BUILTIN_MHSETHIS:
9259 case FRV_BUILTIN_MHSETHIH:
9260 case FRV_BUILTIN_MHDSETS:
9261 case FRV_BUILTIN_MHDSETH:
9262 if (! TARGET_MEDIA_REV2)
9263 {
9264 error ("this media function is only available on the fr400"
9265 " and fr550");
9266 return NULL_RTX;
9267 }
9268 break;
9269
9270 case FRV_BUILTIN_SMASS:
9271 case FRV_BUILTIN_SMSSS:
9272 case FRV_BUILTIN_SMU:
9273 case FRV_BUILTIN_ADDSS:
9274 case FRV_BUILTIN_SUBSS:
9275 case FRV_BUILTIN_SLASS:
9276 case FRV_BUILTIN_SCUTSS:
9277 case FRV_BUILTIN_IACCreadll:
9278 case FRV_BUILTIN_IACCreadl:
9279 case FRV_BUILTIN_IACCsetll:
9280 case FRV_BUILTIN_IACCsetl:
9281 if (!TARGET_FR405_BUILTINS)
9282 {
9283 error ("this builtin function is only available"
9284 " on the fr405 and fr450");
9285 return NULL_RTX;
9286 }
9287 break;
9288
9289 case FRV_BUILTIN_PREFETCH:
9290 if (!TARGET_FR500_FR550_BUILTINS)
9291 {
9292 error ("this builtin function is only available on the fr500"
9293 " and fr550");
9294 return NULL_RTX;
9295 }
9296 break;
9297
9298 case FRV_BUILTIN_MQLCLRHS:
9299 case FRV_BUILTIN_MQLMTHS:
9300 case FRV_BUILTIN_MQSLLHI:
9301 case FRV_BUILTIN_MQSRAHI:
9302 if (!TARGET_MEDIA_FR450)
9303 {
9304 error ("this builtin function is only available on the fr450");
9305 return NULL_RTX;
9306 }
9307 break;
9308
9309 default:
9310 break;
9311 }
9312
9313 /* Expand unique builtins. */
9314
9315 switch (fcode)
9316 {
9317 case FRV_BUILTIN_MTRAP:
9318 return frv_expand_noargs_builtin (CODE_FOR_mtrap);
9319
9320 case FRV_BUILTIN_MCLRACC:
9321 return frv_expand_mclracc_builtin (exp);
9322
9323 case FRV_BUILTIN_MCLRACCA:
9324 if (TARGET_ACC_8)
9325 return frv_expand_noargs_builtin (CODE_FOR_mclracca8);
9326 else
9327 return frv_expand_noargs_builtin (CODE_FOR_mclracca4);
9328
9329 case FRV_BUILTIN_MRDACC:
9330 return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, exp);
9331
9332 case FRV_BUILTIN_MRDACCG:
9333 return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, exp);
9334
9335 case FRV_BUILTIN_MWTACC:
9336 return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, exp);
9337
9338 case FRV_BUILTIN_MWTACCG:
9339 return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, exp);
9340
9341 case FRV_BUILTIN_MDPACKH:
9342 return frv_expand_mdpackh_builtin (exp, target);
9343
9344 case FRV_BUILTIN_IACCreadll:
9345 {
9346 rtx src = frv_read_iacc_argument (DImode, exp, 0);
9347 if (target == 0 || !REG_P (target))
9348 target = gen_reg_rtx (DImode);
9349 frv_split_iacc_move (target, src);
9350 return target;
9351 }
9352
9353 case FRV_BUILTIN_IACCreadl:
9354 return frv_read_iacc_argument (SImode, exp, 0);
9355
9356 case FRV_BUILTIN_IACCsetll:
9357 {
9358 rtx dest = frv_read_iacc_argument (DImode, exp, 0);
9359 rtx src = frv_read_argument (exp, 1);
9360 frv_split_iacc_move (dest, force_reg (DImode, src));
9361 return 0;
9362 }
9363
9364 case FRV_BUILTIN_IACCsetl:
9365 {
9366 rtx dest = frv_read_iacc_argument (SImode, exp, 0);
9367 rtx src = frv_read_argument (exp, 1);
9368 emit_move_insn (dest, force_reg (SImode, src));
9369 return 0;
9370 }
9371
9372 default:
9373 break;
9374 }
9375
9376 /* Expand groups of builtins. */
9377
9378 for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++)
9379 if (d->code == fcode)
9380 return frv_expand_set_builtin (d->icode, exp, target);
9381
9382 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9383 if (d->code == fcode)
9384 return frv_expand_unop_builtin (d->icode, exp, target);
9385
9386 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9387 if (d->code == fcode)
9388 return frv_expand_binop_builtin (d->icode, exp, target);
9389
9390 for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++)
9391 if (d->code == fcode)
9392 return frv_expand_cut_builtin (d->icode, exp, target);
9393
9394 for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++)
9395 if (d->code == fcode)
9396 return frv_expand_binopimm_builtin (d->icode, exp, target);
9397
9398 for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++)
9399 if (d->code == fcode)
9400 return frv_expand_voidbinop_builtin (d->icode, exp);
9401
9402 for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++)
9403 if (d->code == fcode)
9404 return frv_expand_voidtriop_builtin (d->icode, exp);
9405
9406 for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++)
9407 if (d->code == fcode)
9408 return frv_expand_voidaccop_builtin (d->icode, exp);
9409
9410 for (i = 0, d = bdesc_int_void2arg;
9411 i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++)
9412 if (d->code == fcode)
9413 return frv_expand_int_void2arg (d->icode, exp);
9414
9415 for (i = 0, d = bdesc_prefetches;
9416 i < ARRAY_SIZE (bdesc_prefetches); i++, d++)
9417 if (d->code == fcode)
9418 return frv_expand_prefetches (d->icode, exp);
9419
9420 for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++)
9421 if (d->code == fcode)
9422 return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)),
9423 exp, target);
9424
9425 for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++)
9426 if (d->code == fcode)
9427 return frv_expand_store_builtin (d->icode, exp);
9428
9429 return 0;
9430 }
9431
9432 static bool
9433 frv_in_small_data_p (tree decl)
9434 {
9435 HOST_WIDE_INT size;
9436 tree section_name;
9437
9438 /* Don't apply the -G flag to internal compiler structures. We
9439 should leave such structures in the main data section, partly
9440 for efficiency and partly because the size of some of them
9441 (such as C++ typeinfos) is not known until later. */
9442 if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl))
9443 return false;
9444
9445 /* If we already know which section the decl should be in, see if
9446 it's a small data section. */
9447 section_name = DECL_SECTION_NAME (decl);
9448 if (section_name)
9449 {
9450 gcc_assert (TREE_CODE (section_name) == STRING_CST);
9451 if (frv_string_begins_with (section_name, ".sdata"))
9452 return true;
9453 if (frv_string_begins_with (section_name, ".sbss"))
9454 return true;
9455 return false;
9456 }
9457
9458 size = int_size_in_bytes (TREE_TYPE (decl));
9459 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
9460 return true;
9461
9462 return false;
9463 }
9464 \f
9465 static bool
9466 frv_rtx_costs (rtx x,
9467 int code ATTRIBUTE_UNUSED,
9468 int outer_code ATTRIBUTE_UNUSED,
9469 int *total)
9470 {
9471 if (outer_code == MEM)
9472 {
9473 /* Don't differentiate between memory addresses. All the ones
9474 we accept have equal cost. */
9475 *total = COSTS_N_INSNS (0);
9476 return true;
9477 }
9478
9479 switch (code)
9480 {
9481 case CONST_INT:
9482 /* Make 12-bit integers really cheap. */
9483 if (IN_RANGE_P (INTVAL (x), -2048, 2047))
9484 {
9485 *total = 0;
9486 return true;
9487 }
9488 /* Fall through. */
9489
9490 case CONST:
9491 case LABEL_REF:
9492 case SYMBOL_REF:
9493 case CONST_DOUBLE:
9494 *total = COSTS_N_INSNS (2);
9495 return true;
9496
9497 case PLUS:
9498 case MINUS:
9499 case AND:
9500 case IOR:
9501 case XOR:
9502 case ASHIFT:
9503 case ASHIFTRT:
9504 case LSHIFTRT:
9505 case NOT:
9506 case NEG:
9507 case COMPARE:
9508 if (GET_MODE (x) == SImode)
9509 *total = COSTS_N_INSNS (1);
9510 else if (GET_MODE (x) == DImode)
9511 *total = COSTS_N_INSNS (2);
9512 else
9513 *total = COSTS_N_INSNS (3);
9514 return true;
9515
9516 case MULT:
9517 if (GET_MODE (x) == SImode)
9518 *total = COSTS_N_INSNS (2);
9519 else
9520 *total = COSTS_N_INSNS (6); /* guess */
9521 return true;
9522
9523 case DIV:
9524 case UDIV:
9525 case MOD:
9526 case UMOD:
9527 *total = COSTS_N_INSNS (18);
9528 return true;
9529
9530 case MEM:
9531 *total = COSTS_N_INSNS (3);
9532 return true;
9533
9534 default:
9535 return false;
9536 }
9537 }
9538 \f
9539 static void
9540 frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9541 {
9542 switch_to_section (ctors_section);
9543 assemble_align (POINTER_SIZE);
9544 if (TARGET_FDPIC)
9545 {
9546 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9547
9548 gcc_assert (ok);
9549 return;
9550 }
9551 assemble_integer_with_op ("\t.picptr\t", symbol);
9552 }
9553
9554 static void
9555 frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9556 {
9557 switch_to_section (dtors_section);
9558 assemble_align (POINTER_SIZE);
9559 if (TARGET_FDPIC)
9560 {
9561 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9562
9563 gcc_assert (ok);
9564 return;
9565 }
9566 assemble_integer_with_op ("\t.picptr\t", symbol);
9567 }
9568
9569 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
9570
9571 static rtx
9572 frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9573 int incoming ATTRIBUTE_UNUSED)
9574 {
9575 return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM);
9576 }
9577
9578 #define TLS_BIAS (2048 - 16)
9579
9580 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
9581 We need to emit DTP-relative relocations. */
9582
9583 static void
9584 frv_output_dwarf_dtprel (FILE *file, int size, rtx x)
9585 {
9586 gcc_assert (size == 4);
9587 fputs ("\t.picptr\ttlsmoff(", file);
9588 /* We want the unbiased TLS offset, so add the bias to the
9589 expression, such that the implicit biasing cancels out. */
9590 output_addr_const (file, plus_constant (x, TLS_BIAS));
9591 fputs (")", file);
9592 }
9593
9594 #include "gt-frv.h"