1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "double-int.h"
36 #include "fold-const.h"
39 #include "stringpool.h"
40 #include "stor-layout.h"
42 #include "hard-reg-set.h"
43 #include "insn-config.h"
44 #include "conditions.h"
45 #include "insn-flags.h"
47 #include "insn-attr.h"
56 #include "insn-codes.h"
61 #include "diagnostic-core.h"
63 #include "target-def.h"
64 #include "langhooks.h"
65 #include "dominance.h"
71 #include "cfgcleanup.h"
73 #include "basic-block.h"
75 #include "hash-table.h"
76 #include "tree-ssa-alias.h"
77 #include "internal-fn.h"
78 #include "gimple-fold.h"
80 #include "gimple-expr.h"
89 /* Structure of this file:
91 + Command Line Option Support
92 + Pattern support - constraints, predicates, expanders
95 + Functions to save and restore machine-specific function data.
96 + Frame/Epilog/Prolog Related
98 + Function args in registers
99 + Handle pipeline hazards
102 + Machine-dependent Reorg
107 Symbols are encoded as @ <char> . <name> where <char> is one of these:
115 c - cb (control bus) */
117 struct GTY(()) machine_function
119 int mep_frame_pointer_needed
;
122 int arg_regs_to_save
;
127 /* Records __builtin_return address. */
131 int reg_save_slot
[FIRST_PSEUDO_REGISTER
];
132 unsigned char reg_saved
[FIRST_PSEUDO_REGISTER
];
134 /* 2 if the current function has an interrupt attribute, 1 if not, 0
135 if unknown. This is here because resource.c uses EPILOGUE_USES
137 int interrupt_handler
;
139 /* Likewise, for disinterrupt attribute. */
140 int disable_interrupts
;
142 /* Number of doloop tags used so far. */
145 /* True if the last tag was allocated to a doloop_end. */
146 bool doloop_tag_from_end
;
148 /* True if reload changes $TP. */
149 bool reload_changes_tp
;
151 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
152 We only set this if the function is an interrupt handler. */
153 int asms_without_operands
;
156 #define MEP_CONTROL_REG(x) \
157 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
159 static GTY(()) section
* based_section
;
160 static GTY(()) section
* tinybss_section
;
161 static GTY(()) section
* far_section
;
162 static GTY(()) section
* farbss_section
;
163 static GTY(()) section
* frodata_section
;
164 static GTY(()) section
* srodata_section
;
166 static GTY(()) section
* vtext_section
;
167 static GTY(()) section
* vftext_section
;
168 static GTY(()) section
* ftext_section
;
170 static void mep_set_leaf_registers (int);
171 static bool symbol_p (rtx
);
172 static bool symbolref_p (rtx
);
173 static void encode_pattern_1 (rtx
);
174 static void encode_pattern (rtx
);
175 static bool const_in_range (rtx
, int, int);
176 static void mep_rewrite_mult (rtx_insn
*, rtx
);
177 static void mep_rewrite_mulsi3 (rtx_insn
*, rtx
, rtx
, rtx
);
178 static void mep_rewrite_maddsi3 (rtx_insn
*, rtx
, rtx
, rtx
, rtx
);
179 static bool mep_reuse_lo_p_1 (rtx
, rtx
, rtx_insn
*, bool);
180 static bool move_needs_splitting (rtx
, rtx
, machine_mode
);
181 static bool mep_expand_setcc_1 (enum rtx_code
, rtx
, rtx
, rtx
);
182 static bool mep_nongeneral_reg (rtx
);
183 static bool mep_general_copro_reg (rtx
);
184 static bool mep_nonregister (rtx
);
185 static struct machine_function
* mep_init_machine_status (void);
186 static rtx
mep_tp_rtx (void);
187 static rtx
mep_gp_rtx (void);
188 static bool mep_interrupt_p (void);
189 static bool mep_disinterrupt_p (void);
190 static bool mep_reg_set_p (rtx
, rtx
);
191 static bool mep_reg_set_in_function (int);
192 static bool mep_interrupt_saved_reg (int);
193 static bool mep_call_saves_register (int);
194 static rtx_insn
*F (rtx_insn
*);
195 static void add_constant (int, int, int, int);
196 static rtx_insn
*maybe_dead_move (rtx
, rtx
, bool);
197 static void mep_reload_pointer (int, const char *);
198 static void mep_start_function (FILE *, HOST_WIDE_INT
);
199 static bool mep_function_ok_for_sibcall (tree
, tree
);
200 static int unique_bit_in (HOST_WIDE_INT
);
201 static int bit_size_for_clip (HOST_WIDE_INT
);
202 static int bytesize (const_tree
, machine_mode
);
203 static tree
mep_validate_based_tiny (tree
*, tree
, tree
, int, bool *);
204 static tree
mep_validate_near_far (tree
*, tree
, tree
, int, bool *);
205 static tree
mep_validate_disinterrupt (tree
*, tree
, tree
, int, bool *);
206 static tree
mep_validate_interrupt (tree
*, tree
, tree
, int, bool *);
207 static tree
mep_validate_io_cb (tree
*, tree
, tree
, int, bool *);
208 static tree
mep_validate_vliw (tree
*, tree
, tree
, int, bool *);
209 static bool mep_function_attribute_inlinable_p (const_tree
);
210 static bool mep_can_inline_p (tree
, tree
);
211 static bool mep_lookup_pragma_disinterrupt (const char *);
212 static int mep_multiple_address_regions (tree
, bool);
213 static int mep_attrlist_to_encoding (tree
, tree
);
214 static void mep_insert_attributes (tree
, tree
*);
215 static void mep_encode_section_info (tree
, rtx
, int);
216 static section
* mep_select_section (tree
, int, unsigned HOST_WIDE_INT
);
217 static void mep_unique_section (tree
, int);
218 static unsigned int mep_section_type_flags (tree
, const char *, int);
219 static void mep_asm_named_section (const char *, unsigned int, tree
);
220 static bool mep_mentioned_p (rtx
, rtx
, int);
221 static void mep_reorg_regmove (rtx_insn
*);
222 static rtx_insn
*mep_insert_repeat_label_last (rtx_insn
*, rtx_code_label
*,
224 static void mep_reorg_repeat (rtx_insn
*);
225 static bool mep_invertable_branch_p (rtx_insn
*);
226 static void mep_invert_branch (rtx_insn
*, rtx_insn
*);
227 static void mep_reorg_erepeat (rtx_insn
*);
228 static void mep_jmp_return_reorg (rtx_insn
*);
229 static void mep_reorg_addcombine (rtx_insn
*);
230 static void mep_reorg (void);
231 static void mep_init_intrinsics (void);
232 static void mep_init_builtins (void);
233 static void mep_intrinsic_unavailable (int);
234 static bool mep_get_intrinsic_insn (int, const struct cgen_insn
**);
235 static bool mep_get_move_insn (int, const struct cgen_insn
**);
236 static rtx
mep_convert_arg (machine_mode
, rtx
);
237 static rtx
mep_convert_regnum (const struct cgen_regnum_operand
*, rtx
);
238 static rtx
mep_legitimize_arg (const struct insn_operand_data
*, rtx
, int);
239 static void mep_incompatible_arg (const struct insn_operand_data
*, rtx
, int, tree
);
240 static rtx
mep_expand_builtin (tree
, rtx
, rtx
, machine_mode
, int);
241 static int mep_adjust_cost (rtx_insn
*, rtx
, rtx_insn
*, int);
242 static int mep_issue_rate (void);
243 static rtx_insn
*mep_find_ready_insn (rtx_insn
**, int, enum attr_slot
, int);
244 static void mep_move_ready_insn (rtx_insn
**, int, rtx_insn
*);
245 static int mep_sched_reorder (FILE *, int, rtx_insn
**, int *, int);
246 static rtx_insn
*mep_make_bundle (rtx
, rtx_insn
*);
247 static void mep_bundle_insns (rtx_insn
*);
248 static bool mep_rtx_cost (rtx
, int, int, int, int *, bool);
249 static int mep_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
250 static void mep_setup_incoming_varargs (cumulative_args_t
, machine_mode
,
252 static bool mep_pass_by_reference (cumulative_args_t cum
, machine_mode
,
254 static rtx
mep_function_arg (cumulative_args_t
, machine_mode
,
256 static void mep_function_arg_advance (cumulative_args_t
, machine_mode
,
258 static bool mep_vector_mode_supported_p (machine_mode
);
259 static rtx
mep_allocate_initial_value (rtx
);
260 static void mep_asm_init_sections (void);
261 static int mep_comp_type_attributes (const_tree
, const_tree
);
262 static bool mep_narrow_volatile_bitfield (void);
263 static rtx
mep_expand_builtin_saveregs (void);
264 static tree
mep_build_builtin_va_list (void);
265 static void mep_expand_va_start (tree
, rtx
);
266 static tree
mep_gimplify_va_arg_expr (tree
, tree
, gimple_seq
*, gimple_seq
*);
267 static bool mep_can_eliminate (const int, const int);
268 static void mep_conditional_register_usage (void);
269 static void mep_trampoline_init (rtx
, tree
, rtx
);
271 #define WANT_GCC_DEFINITIONS
272 #include "mep-intrin.h"
273 #undef WANT_GCC_DEFINITIONS
276 /* Command Line Option Support. */
278 char mep_leaf_registers
[FIRST_PSEUDO_REGISTER
];
280 /* True if we can use cmov instructions to move values back and forth
281 between core and coprocessor registers. */
282 bool mep_have_core_copro_moves_p
;
284 /* True if we can use cmov instructions (or a work-alike) to move
285 values between coprocessor registers. */
286 bool mep_have_copro_copro_moves_p
;
288 /* A table of all coprocessor instructions that can act like
289 a coprocessor-to-coprocessor cmov. */
290 static const int mep_cmov_insns
[] = {
305 mep_set_leaf_registers (int enable
)
309 if (mep_leaf_registers
[0] != enable
)
310 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
311 mep_leaf_registers
[i
] = enable
;
315 mep_conditional_register_usage (void)
319 if (!TARGET_OPT_MULT
&& !TARGET_OPT_DIV
)
321 fixed_regs
[HI_REGNO
] = 1;
322 fixed_regs
[LO_REGNO
] = 1;
323 call_used_regs
[HI_REGNO
] = 1;
324 call_used_regs
[LO_REGNO
] = 1;
327 for (i
= FIRST_SHADOW_REGISTER
; i
<= LAST_SHADOW_REGISTER
; i
++)
332 mep_option_override (void)
336 cl_deferred_option
*opt
;
337 vec
<cl_deferred_option
> *v
= (vec
<cl_deferred_option
> *) mep_deferred_options
;
340 FOR_EACH_VEC_ELT (*v
, i
, opt
)
342 switch (opt
->opt_index
)
345 for (j
= 0; j
< 32; j
++)
346 fixed_regs
[j
+ 48] = 0;
347 for (j
= 0; j
< 32; j
++)
348 call_used_regs
[j
+ 48] = 1;
349 for (j
= 6; j
< 8; j
++)
350 call_used_regs
[j
+ 48] = 0;
352 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
387 warning (OPT_fpic
, "-fpic is not supported");
389 warning (OPT_fPIC
, "-fPIC is not supported");
390 if (TARGET_S
&& TARGET_M
)
391 error ("only one of -ms and -mm may be given");
392 if (TARGET_S
&& TARGET_L
)
393 error ("only one of -ms and -ml may be given");
394 if (TARGET_M
&& TARGET_L
)
395 error ("only one of -mm and -ml may be given");
396 if (TARGET_S
&& global_options_set
.x_mep_tiny_cutoff
)
397 error ("only one of -ms and -mtiny= may be given");
398 if (TARGET_M
&& global_options_set
.x_mep_tiny_cutoff
)
399 error ("only one of -mm and -mtiny= may be given");
400 if (TARGET_OPT_CLIP
&& ! TARGET_OPT_MINMAX
)
401 warning (0, "-mclip currently has no effect without -mminmax");
403 if (mep_const_section
)
405 if (strcmp (mep_const_section
, "tiny") != 0
406 && strcmp (mep_const_section
, "near") != 0
407 && strcmp (mep_const_section
, "far") != 0)
408 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
412 mep_tiny_cutoff
= 65536;
415 if (TARGET_L
&& ! global_options_set
.x_mep_tiny_cutoff
)
418 if (TARGET_64BIT_CR_REGS
)
419 flag_split_wide_types
= 0;
421 init_machine_status
= mep_init_machine_status
;
422 mep_init_intrinsics ();
425 /* Pattern Support - constraints, predicates, expanders. */
427 /* MEP has very few instructions that can refer to the span of
428 addresses used by symbols, so it's common to check for them. */
433 int c
= GET_CODE (x
);
435 return (c
== CONST_INT
445 if (GET_CODE (x
) != MEM
)
448 c
= GET_CODE (XEXP (x
, 0));
449 return (c
== CONST_INT
454 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
456 #define GEN_REG(R, STRICT) \
459 && ((R) == ARG_POINTER_REGNUM \
460 || (R) >= FIRST_PSEUDO_REGISTER)))
462 static char pattern
[12], *patternp
;
463 static GTY(()) rtx patternr
[12];
464 #define RTX_IS(x) (strcmp (pattern, x) == 0)
467 encode_pattern_1 (rtx x
)
471 if (patternp
== pattern
+ sizeof (pattern
) - 2)
477 patternr
[patternp
-pattern
] = x
;
479 switch (GET_CODE (x
))
487 encode_pattern_1 (XEXP(x
, 0));
491 encode_pattern_1 (XEXP(x
, 0));
492 encode_pattern_1 (XEXP(x
, 1));
496 encode_pattern_1 (XEXP(x
, 0));
497 encode_pattern_1 (XEXP(x
, 1));
501 encode_pattern_1 (XEXP(x
, 0));
515 *patternp
++ = '0' + XCINT(x
, 1, UNSPEC
);
516 for (i
=0; i
<XVECLEN (x
, 0); i
++)
517 encode_pattern_1 (XVECEXP (x
, 0, i
));
525 fprintf (stderr
, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x
)));
534 encode_pattern (rtx x
)
537 encode_pattern_1 (x
);
542 mep_section_tag (rtx x
)
548 switch (GET_CODE (x
))
555 x
= XVECEXP (x
, 0, 0);
558 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
567 if (GET_CODE (x
) != SYMBOL_REF
)
570 if (name
[0] == '@' && name
[2] == '.')
572 if (name
[1] == 'i' || name
[1] == 'I')
575 return 'f'; /* near */
576 return 'n'; /* far */
584 mep_regno_reg_class (int regno
)
588 case SP_REGNO
: return SP_REGS
;
589 case TP_REGNO
: return TP_REGS
;
590 case GP_REGNO
: return GP_REGS
;
591 case 0: return R0_REGS
;
592 case HI_REGNO
: return HI_REGS
;
593 case LO_REGNO
: return LO_REGS
;
594 case ARG_POINTER_REGNUM
: return GENERAL_REGS
;
597 if (GR_REGNO_P (regno
))
598 return regno
< FIRST_GR_REGNO
+ 8 ? TPREL_REGS
: GENERAL_REGS
;
599 if (CONTROL_REGNO_P (regno
))
602 if (CR_REGNO_P (regno
))
606 /* Search for the register amongst user-defined subclasses of
607 the coprocessor registers. */
608 for (i
= USER0_REGS
; i
<= USER3_REGS
; ++i
)
610 if (! TEST_HARD_REG_BIT (reg_class_contents
[i
], regno
))
612 for (j
= 0; j
< N_REG_CLASSES
; ++j
)
614 enum reg_class sub
= reg_class_subclasses
[i
][j
];
616 if (sub
== LIM_REG_CLASSES
)
618 if (TEST_HARD_REG_BIT (reg_class_contents
[sub
], regno
))
623 return LOADABLE_CR_REGNO_P (regno
) ? LOADABLE_CR_REGS
: CR_REGS
;
626 if (CCR_REGNO_P (regno
))
629 gcc_assert (regno
>= FIRST_SHADOW_REGISTER
&& regno
<= LAST_SHADOW_REGISTER
);
634 const_in_range (rtx x
, int minv
, int maxv
)
636 return (GET_CODE (x
) == CONST_INT
637 && INTVAL (x
) >= minv
638 && INTVAL (x
) <= maxv
);
641 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
642 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
643 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
644 at the end of the insn stream. */
647 mep_mulr_source (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
)
649 if (rtx_equal_p (dest
, src1
))
651 else if (rtx_equal_p (dest
, src2
))
656 emit_insn (gen_movsi (copy_rtx (dest
), src1
));
658 emit_insn_before (gen_movsi (copy_rtx (dest
), src1
), insn
);
663 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
664 Change the last element of PATTERN from (clobber (scratch:SI))
665 to (clobber (reg:SI HI_REGNO)). */
668 mep_rewrite_mult (rtx_insn
*insn
, rtx pattern
)
672 hi_clobber
= XVECEXP (pattern
, 0, XVECLEN (pattern
, 0) - 1);
673 XEXP (hi_clobber
, 0) = gen_rtx_REG (SImode
, HI_REGNO
);
674 PATTERN (insn
) = pattern
;
675 INSN_CODE (insn
) = -1;
678 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
679 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
680 store the result in DEST if nonnull. */
683 mep_rewrite_mulsi3 (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
)
687 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
689 pattern
= gen_mulsi3r (lo
, dest
, copy_rtx (dest
),
690 mep_mulr_source (insn
, dest
, src1
, src2
));
692 pattern
= gen_mulsi3_lo (lo
, src1
, src2
);
693 mep_rewrite_mult (insn
, pattern
);
696 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
697 SRC3 into $lo, then use either madd or maddr. The move into $lo will
698 be deleted by a peephole2 if SRC3 is already in $lo. */
701 mep_rewrite_maddsi3 (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
, rtx src3
)
705 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
706 emit_insn_before (gen_movsi (copy_rtx (lo
), src3
), insn
);
708 pattern
= gen_maddsi3r (lo
, dest
, copy_rtx (dest
),
709 mep_mulr_source (insn
, dest
, src1
, src2
),
712 pattern
= gen_maddsi3_lo (lo
, src1
, src2
, copy_rtx (lo
));
713 mep_rewrite_mult (insn
, pattern
);
716 /* Return true if $lo has the same value as integer register GPR when
717 instruction INSN is reached. If necessary, rewrite the instruction
718 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
719 rtx for (reg:SI LO_REGNO).
721 This function is intended to be used by the peephole2 pass. Since
722 that pass goes from the end of a basic block to the beginning, and
723 propagates liveness information on the way, there is no need to
724 update register notes here.
726 If GPR_DEAD_P is true on entry, and this function returns true,
727 then the caller will replace _every_ use of GPR in and after INSN
728 with LO. This means that if the instruction that sets $lo is a
729 mulr- or maddr-type instruction, we can rewrite it to use mul or
730 madd instead. In combination with the copy progagation pass,
731 this allows us to replace sequences like:
740 if GPR is no longer used. */
743 mep_reuse_lo_p_1 (rtx lo
, rtx gpr
, rtx_insn
*insn
, bool gpr_dead_p
)
747 insn
= PREV_INSN (insn
);
749 switch (recog_memoized (insn
))
751 case CODE_FOR_mulsi3_1
:
753 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
755 mep_rewrite_mulsi3 (insn
,
756 gpr_dead_p
? NULL
: recog_data
.operand
[0],
757 recog_data
.operand
[1],
758 recog_data
.operand
[2]);
763 case CODE_FOR_maddsi3
:
765 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
767 mep_rewrite_maddsi3 (insn
,
768 gpr_dead_p
? NULL
: recog_data
.operand
[0],
769 recog_data
.operand
[1],
770 recog_data
.operand
[2],
771 recog_data
.operand
[3]);
776 case CODE_FOR_mulsi3r
:
777 case CODE_FOR_maddsi3r
:
779 return rtx_equal_p (recog_data
.operand
[1], gpr
);
782 if (reg_set_p (lo
, insn
)
783 || reg_set_p (gpr
, insn
)
784 || volatile_insn_p (PATTERN (insn
)))
787 if (gpr_dead_p
&& reg_referenced_p (gpr
, PATTERN (insn
)))
792 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
796 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
799 mep_reuse_lo_p (rtx lo
, rtx gpr
, rtx_insn
*insn
, bool gpr_dead_p
)
801 bool result
= mep_reuse_lo_p_1 (lo
, gpr
, insn
, gpr_dead_p
);
806 /* Return true if SET can be turned into a post-modify load or store
807 that adds OFFSET to GPR. In other words, return true if SET can be
810 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
812 It's OK to change SET to an equivalent operation in order to
816 mep_use_post_modify_for_set_p (rtx set
, rtx gpr
, rtx offset
)
819 unsigned int reg_bytes
, mem_bytes
;
820 machine_mode reg_mode
, mem_mode
;
822 /* Only simple SETs can be converted. */
823 if (GET_CODE (set
) != SET
)
826 /* Point REG to what we hope will be the register side of the set and
827 MEM to what we hope will be the memory side. */
828 if (GET_CODE (SET_DEST (set
)) == MEM
)
830 mem
= &SET_DEST (set
);
831 reg
= &SET_SRC (set
);
835 reg
= &SET_DEST (set
);
836 mem
= &SET_SRC (set
);
837 if (GET_CODE (*mem
) == SIGN_EXTEND
)
838 mem
= &XEXP (*mem
, 0);
841 /* Check that *REG is a suitable coprocessor register. */
842 if (GET_CODE (*reg
) != REG
|| !LOADABLE_CR_REGNO_P (REGNO (*reg
)))
845 /* Check that *MEM is a suitable memory reference. */
846 if (GET_CODE (*mem
) != MEM
|| !rtx_equal_p (XEXP (*mem
, 0), gpr
))
849 /* Get the number of bytes in each operand. */
850 mem_bytes
= GET_MODE_SIZE (GET_MODE (*mem
));
851 reg_bytes
= GET_MODE_SIZE (GET_MODE (*reg
));
853 /* Check that OFFSET is suitably aligned. */
854 if (INTVAL (offset
) & (mem_bytes
- 1))
857 /* Convert *MEM to a normal integer mode. */
858 mem_mode
= mode_for_size (mem_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
859 *mem
= change_address (*mem
, mem_mode
, NULL
);
861 /* Adjust *REG as well. */
862 *reg
= shallow_copy_rtx (*reg
);
863 if (reg
== &SET_DEST (set
) && reg_bytes
< UNITS_PER_WORD
)
865 /* SET is a subword load. Convert it to an explicit extension. */
866 PUT_MODE (*reg
, SImode
);
867 *mem
= gen_rtx_SIGN_EXTEND (SImode
, *mem
);
871 reg_mode
= mode_for_size (reg_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
872 PUT_MODE (*reg
, reg_mode
);
877 /* Return the effect of frame-related instruction INSN. */
880 mep_frame_expr (rtx_insn
*insn
)
884 note
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, 0);
885 expr
= (note
!= 0 ? XEXP (note
, 0) : copy_rtx (PATTERN (insn
)));
886 RTX_FRAME_RELATED_P (expr
) = 1;
890 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
891 new pattern in INSN1; INSN2 will be deleted by the caller. */
894 mep_make_parallel (rtx_insn
*insn1
, rtx_insn
*insn2
)
898 if (RTX_FRAME_RELATED_P (insn2
))
900 expr
= mep_frame_expr (insn2
);
901 if (RTX_FRAME_RELATED_P (insn1
))
902 expr
= gen_rtx_SEQUENCE (VOIDmode
,
903 gen_rtvec (2, mep_frame_expr (insn1
), expr
));
904 set_unique_reg_note (insn1
, REG_FRAME_RELATED_EXPR
, expr
);
905 RTX_FRAME_RELATED_P (insn1
) = 1;
908 PATTERN (insn1
) = gen_rtx_PARALLEL (VOIDmode
,
909 gen_rtvec (2, PATTERN (insn1
),
911 INSN_CODE (insn1
) = -1;
914 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
915 the basic block to see if any previous load or store instruction can
916 be persuaded to do SET_INSN as a side-effect. Return true if so. */
919 mep_use_post_modify_p_1 (rtx_insn
*set_insn
, rtx reg
, rtx offset
)
926 insn
= PREV_INSN (insn
);
929 if (mep_use_post_modify_for_set_p (PATTERN (insn
), reg
, offset
))
931 mep_make_parallel (insn
, set_insn
);
935 if (reg_set_p (reg
, insn
)
936 || reg_referenced_p (reg
, PATTERN (insn
))
937 || volatile_insn_p (PATTERN (insn
)))
941 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
945 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
948 mep_use_post_modify_p (rtx_insn
*insn
, rtx reg
, rtx offset
)
950 bool result
= mep_use_post_modify_p_1 (insn
, reg
, offset
);
956 mep_allow_clip (rtx ux
, rtx lx
, int s
)
958 HOST_WIDE_INT u
= INTVAL (ux
);
959 HOST_WIDE_INT l
= INTVAL (lx
);
962 if (!TARGET_OPT_CLIP
)
967 for (i
= 0; i
< 30; i
++)
968 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1)
969 && (l
== - ((HOST_WIDE_INT
) 1 << i
)))
977 for (i
= 0; i
< 30; i
++)
978 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1))
985 mep_bit_position_p (rtx x
, bool looking_for
)
987 if (GET_CODE (x
) != CONST_INT
)
989 switch ((int) INTVAL(x
) & 0xff)
991 case 0x01: case 0x02: case 0x04: case 0x08:
992 case 0x10: case 0x20: case 0x40: case 0x80:
994 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
995 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1002 move_needs_splitting (rtx dest
, rtx src
,
1003 machine_mode mode ATTRIBUTE_UNUSED
)
1005 int s
= mep_section_tag (src
);
1009 if (GET_CODE (src
) == CONST
1010 || GET_CODE (src
) == MEM
)
1011 src
= XEXP (src
, 0);
1012 else if (GET_CODE (src
) == SYMBOL_REF
1013 || GET_CODE (src
) == LABEL_REF
1014 || GET_CODE (src
) == PLUS
)
1020 || (GET_CODE (src
) == PLUS
1021 && GET_CODE (XEXP (src
, 1)) == CONST_INT
1022 && (INTVAL (XEXP (src
, 1)) < -65536
1023 || INTVAL (XEXP (src
, 1)) > 0xffffff))
1024 || (GET_CODE (dest
) == REG
1025 && REGNO (dest
) > 7 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
))
1031 mep_split_mov (rtx
*operands
, int symbolic
)
1035 if (move_needs_splitting (operands
[0], operands
[1], SImode
))
1040 if (GET_CODE (operands
[1]) != CONST_INT
)
1043 if (constraint_satisfied_p (operands
[1], CONSTRAINT_I
)
1044 || constraint_satisfied_p (operands
[1], CONSTRAINT_J
)
1045 || constraint_satisfied_p (operands
[1], CONSTRAINT_O
))
1048 if (((!reload_completed
&& !reload_in_progress
)
1049 || (REG_P (operands
[0]) && REGNO (operands
[0]) < 8))
1050 && constraint_satisfied_p (operands
[1], CONSTRAINT_K
))
1056 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1057 it to one specific value. So the insn chosen depends on whether
1058 the source and destination modes match. */
1061 mep_vliw_mode_match (rtx tgt
)
1063 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1064 bool tgt_vliw
= INTVAL (tgt
);
1066 return src_vliw
== tgt_vliw
;
1069 /* Like the above, but also test for near/far mismatches. */
1072 mep_vliw_jmp_match (rtx tgt
)
1074 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1075 bool tgt_vliw
= INTVAL (tgt
);
1077 if (mep_section_tag (DECL_RTL (cfun
->decl
)) == 'f')
1080 return src_vliw
== tgt_vliw
;
1084 mep_multi_slot (rtx_insn
*x
)
1086 return get_attr_slot (x
) == SLOT_MULTI
;
1089 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1092 mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1094 /* We can't convert symbol values to gp- or tp-rel values after
1095 reload, as reload might have used $gp or $tp for other
1097 if (GET_CODE (x
) == SYMBOL_REF
&& (reload_in_progress
|| reload_completed
))
1099 char e
= mep_section_tag (x
);
1100 return (e
!= 't' && e
!= 'b');
1105 /* Be careful not to use macros that need to be compiled one way for
1106 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1109 mep_legitimate_address (machine_mode mode
, rtx x
, int strict
)
1113 #define DEBUG_LEGIT 0
1115 fprintf (stderr
, "legit: mode %s strict %d ", mode_name
[mode
], strict
);
1119 if (GET_CODE (x
) == LO_SUM
1120 && GET_CODE (XEXP (x
, 0)) == REG
1121 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1122 && CONSTANT_P (XEXP (x
, 1)))
1124 if (GET_MODE_SIZE (mode
) > 4)
1126 /* We will end up splitting this, and lo_sums are not
1127 offsettable for us. */
1129 fprintf(stderr
, " - nope, %%lo(sym)[reg] not splittable\n");
1134 fprintf (stderr
, " - yup, %%lo(sym)[reg]\n");
1139 if (GET_CODE (x
) == REG
1140 && GEN_REG (REGNO (x
), strict
))
1143 fprintf (stderr
, " - yup, [reg]\n");
1148 if (GET_CODE (x
) == PLUS
1149 && GET_CODE (XEXP (x
, 0)) == REG
1150 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1151 && const_in_range (XEXP (x
, 1), -32768, 32767))
1154 fprintf (stderr
, " - yup, [reg+const]\n");
1159 if (GET_CODE (x
) == PLUS
1160 && GET_CODE (XEXP (x
, 0)) == REG
1161 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1162 && GET_CODE (XEXP (x
, 1)) == CONST
1163 && (GET_CODE (XEXP (XEXP (x
, 1), 0)) == UNSPEC
1164 || (GET_CODE (XEXP (XEXP (x
, 1), 0)) == PLUS
1165 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == UNSPEC
1166 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == CONST_INT
)))
1169 fprintf (stderr
, " - yup, [reg+unspec]\n");
1174 the_tag
= mep_section_tag (x
);
1179 fprintf (stderr
, " - nope, [far]\n");
1184 if (mode
== VOIDmode
1185 && GET_CODE (x
) == SYMBOL_REF
)
1188 fprintf (stderr
, " - yup, call [symbol]\n");
1193 if ((mode
== SImode
|| mode
== SFmode
)
1195 && mep_legitimate_constant_p (mode
, x
)
1196 && the_tag
!= 't' && the_tag
!= 'b')
1198 if (GET_CODE (x
) != CONST_INT
1199 || (INTVAL (x
) <= 0xfffff
1201 && (INTVAL (x
) % 4) == 0))
1204 fprintf (stderr
, " - yup, [const]\n");
1211 fprintf (stderr
, " - nope.\n");
1217 mep_legitimize_reload_address (rtx
*x
, machine_mode mode
, int opnum
,
1219 int ind_levels ATTRIBUTE_UNUSED
)
1221 enum reload_type type
= (enum reload_type
) type_i
;
1223 if (GET_CODE (*x
) == PLUS
1224 && GET_CODE (XEXP (*x
, 0)) == MEM
1225 && GET_CODE (XEXP (*x
, 1)) == REG
)
1227 /* GCC will by default copy the MEM into a REG, which results in
1228 an invalid address. For us, the best thing to do is move the
1229 whole expression to a REG. */
1230 push_reload (*x
, NULL_RTX
, x
, NULL
,
1231 GENERAL_REGS
, mode
, VOIDmode
,
1236 if (GET_CODE (*x
) == PLUS
1237 && GET_CODE (XEXP (*x
, 0)) == SYMBOL_REF
1238 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
)
1240 char e
= mep_section_tag (XEXP (*x
, 0));
1242 if (e
!= 't' && e
!= 'b')
1244 /* GCC thinks that (sym+const) is a valid address. Well,
1245 sometimes it is, this time it isn't. The best thing to
1246 do is reload the symbol to a register, since reg+int
1247 tends to work, and we can't just add the symbol and
1249 push_reload (XEXP (*x
, 0), NULL_RTX
, &(XEXP(*x
, 0)), NULL
,
1250 GENERAL_REGS
, mode
, VOIDmode
,
1259 mep_core_address_length (rtx_insn
*insn
, int opn
)
1261 rtx set
= single_set (insn
);
1262 rtx mem
= XEXP (set
, opn
);
1263 rtx other
= XEXP (set
, 1-opn
);
1264 rtx addr
= XEXP (mem
, 0);
1266 if (register_operand (addr
, Pmode
))
1268 if (GET_CODE (addr
) == PLUS
)
1270 rtx addend
= XEXP (addr
, 1);
1272 gcc_assert (REG_P (XEXP (addr
, 0)));
1274 switch (REGNO (XEXP (addr
, 0)))
1276 case STACK_POINTER_REGNUM
:
1277 if (GET_MODE_SIZE (GET_MODE (mem
)) == 4
1278 && mep_imm7a4_operand (addend
, VOIDmode
))
1283 gcc_assert (REG_P (other
));
1285 if (REGNO (other
) >= 8)
1288 if (GET_CODE (addend
) == CONST
1289 && GET_CODE (XEXP (addend
, 0)) == UNSPEC
1290 && XINT (XEXP (addend
, 0), 1) == UNS_TPREL
)
1293 if (GET_CODE (addend
) == CONST_INT
1294 && INTVAL (addend
) >= 0
1295 && INTVAL (addend
) <= 127
1296 && INTVAL (addend
) % GET_MODE_SIZE (GET_MODE (mem
)) == 0)
1306 mep_cop_address_length (rtx_insn
*insn
, int opn
)
1308 rtx set
= single_set (insn
);
1309 rtx mem
= XEXP (set
, opn
);
1310 rtx addr
= XEXP (mem
, 0);
1312 if (GET_CODE (mem
) != MEM
)
1314 if (register_operand (addr
, Pmode
))
1316 if (GET_CODE (addr
) == POST_INC
)
1322 #define DEBUG_EXPAND_MOV 0
1324 mep_expand_mov (rtx
*operands
, machine_mode mode
)
1329 int post_reload
= 0;
1331 tag
[0] = mep_section_tag (operands
[0]);
1332 tag
[1] = mep_section_tag (operands
[1]);
1334 if (!reload_in_progress
1335 && !reload_completed
1336 && GET_CODE (operands
[0]) != REG
1337 && GET_CODE (operands
[0]) != SUBREG
1338 && GET_CODE (operands
[1]) != REG
1339 && GET_CODE (operands
[1]) != SUBREG
)
1340 operands
[1] = copy_to_mode_reg (mode
, operands
[1]);
1342 #if DEBUG_EXPAND_MOV
1343 fprintf(stderr
, "expand move %s %d\n", mode_name
[mode
],
1344 reload_in_progress
|| reload_completed
);
1345 debug_rtx (operands
[0]);
1346 debug_rtx (operands
[1]);
1349 if (mode
== DImode
|| mode
== DFmode
)
1352 if (reload_in_progress
|| reload_completed
)
1356 if (GET_CODE (operands
[0]) == REG
&& REGNO (operands
[0]) == TP_REGNO
)
1357 cfun
->machine
->reload_changes_tp
= true;
1359 if (tag
[0] == 't' || tag
[1] == 't')
1361 r
= has_hard_reg_initial_val (Pmode
, GP_REGNO
);
1362 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != GP_REGNO
)
1365 if (tag
[0] == 'b' || tag
[1] == 'b')
1367 r
= has_hard_reg_initial_val (Pmode
, TP_REGNO
);
1368 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != TP_REGNO
)
1371 if (cfun
->machine
->reload_changes_tp
== true)
1378 if (symbol_p (operands
[1]))
1380 t
= mep_section_tag (operands
[1]);
1381 if (t
== 'b' || t
== 't')
1384 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
1386 tpsym
= operands
[1];
1387 n
= gen_rtx_UNSPEC (mode
,
1388 gen_rtvec (1, operands
[1]),
1389 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1390 n
= gen_rtx_CONST (mode
, n
);
1392 else if (GET_CODE (operands
[1]) == CONST
1393 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
1394 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
1395 && GET_CODE (XEXP (XEXP (operands
[1], 0), 1)) == CONST_INT
)
1397 tpsym
= XEXP (XEXP (operands
[1], 0), 0);
1398 tpoffs
= XEXP (XEXP (operands
[1], 0), 1);
1399 n
= gen_rtx_UNSPEC (mode
,
1400 gen_rtvec (1, tpsym
),
1401 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1402 n
= gen_rtx_PLUS (mode
, n
, tpoffs
);
1403 n
= gen_rtx_CONST (mode
, n
);
1405 else if (GET_CODE (operands
[1]) == CONST
1406 && GET_CODE (XEXP (operands
[1], 0)) == UNSPEC
)
1410 error ("unusual TP-relative address");
1414 n
= gen_rtx_PLUS (mode
, (t
== 'b' ? mep_tp_rtx ()
1415 : mep_gp_rtx ()), n
);
1416 n
= emit_insn (gen_rtx_SET (mode
, operands
[0], n
));
1417 #if DEBUG_EXPAND_MOV
1418 fprintf(stderr
, "mep_expand_mov emitting ");
1425 for (i
=0; i
< 2; i
++)
1427 t
= mep_section_tag (operands
[i
]);
1428 if (GET_CODE (operands
[i
]) == MEM
&& (t
== 'b' || t
== 't'))
1433 sym
= XEXP (operands
[i
], 0);
1434 if (GET_CODE (sym
) == CONST
1435 && GET_CODE (XEXP (sym
, 0)) == UNSPEC
)
1436 sym
= XVECEXP (XEXP (sym
, 0), 0, 0);
1449 n
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, sym
), u
);
1450 n
= gen_rtx_CONST (Pmode
, n
);
1451 n
= gen_rtx_PLUS (Pmode
, r
, n
);
1452 operands
[i
] = replace_equiv_address (operands
[i
], n
);
1457 if ((GET_CODE (operands
[1]) != REG
1458 && MEP_CONTROL_REG (operands
[0]))
1459 || (GET_CODE (operands
[0]) != REG
1460 && MEP_CONTROL_REG (operands
[1])))
1463 #if DEBUG_EXPAND_MOV
1464 fprintf (stderr
, "cr-mem, forcing op1 to reg\n");
1466 temp
= gen_reg_rtx (mode
);
1467 emit_move_insn (temp
, operands
[1]);
1471 if (symbolref_p (operands
[0])
1472 && (mep_section_tag (XEXP (operands
[0], 0)) == 'f'
1473 || (GET_MODE_SIZE (mode
) != 4)))
1477 gcc_assert (!reload_in_progress
&& !reload_completed
);
1479 temp
= force_reg (Pmode
, XEXP (operands
[0], 0));
1480 operands
[0] = replace_equiv_address (operands
[0], temp
);
1481 emit_move_insn (operands
[0], operands
[1]);
1485 if (!post_reload
&& (tag
[1] == 't' || tag
[1] == 'b'))
1488 if (symbol_p (operands
[1])
1489 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1491 emit_insn (gen_movsi_topsym_s (operands
[0], operands
[1]));
1492 emit_insn (gen_movsi_botsym_s (operands
[0], operands
[0], operands
[1]));
1496 if (symbolref_p (operands
[1])
1497 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1501 if (reload_in_progress
|| reload_completed
)
1504 temp
= gen_reg_rtx (Pmode
);
1506 emit_insn (gen_movsi_topsym_s (temp
, operands
[1]));
1507 emit_insn (gen_movsi_botsym_s (temp
, temp
, operands
[1]));
1508 emit_move_insn (operands
[0], replace_equiv_address (operands
[1], temp
));
1515 /* Cases where the pattern can't be made to use at all. */
1518 mep_mov_ok (rtx
*operands
, machine_mode mode ATTRIBUTE_UNUSED
)
1522 #define DEBUG_MOV_OK 0
1524 fprintf (stderr
, "mep_mov_ok %s %c=%c\n", mode_name
[mode
], mep_section_tag (operands
[0]),
1525 mep_section_tag (operands
[1]));
1526 debug_rtx (operands
[0]);
1527 debug_rtx (operands
[1]);
1530 /* We want the movh patterns to get these. */
1531 if (GET_CODE (operands
[1]) == HIGH
)
1534 /* We can't store a register to a far variable without using a
1535 scratch register to hold the address. Using far variables should
1536 be split by mep_emit_mov anyway. */
1537 if (mep_section_tag (operands
[0]) == 'f'
1538 || mep_section_tag (operands
[1]) == 'f')
1541 fprintf (stderr
, " - no, f\n");
1545 i
= mep_section_tag (operands
[1]);
1546 if ((i
== 'b' || i
== 't') && !reload_completed
&& !reload_in_progress
)
1547 /* These are supposed to be generated with adds of the appropriate
1548 register. During and after reload, however, we allow them to
1549 be accessed as normal symbols because adding a dependency on
1550 the base register now might cause problems. */
1553 fprintf (stderr
, " - no, bt\n");
1558 /* The only moves we can allow involve at least one general
1559 register, so require it. */
1560 for (i
= 0; i
< 2; i
++)
1562 /* Allow subregs too, before reload. */
1563 rtx x
= operands
[i
];
1565 if (GET_CODE (x
) == SUBREG
)
1567 if (GET_CODE (x
) == REG
1568 && ! MEP_CONTROL_REG (x
))
1571 fprintf (stderr
, " - ok\n");
1577 fprintf (stderr
, " - no, no gen reg\n");
1582 #define DEBUG_SPLIT_WIDE_MOVE 0
1584 mep_split_wide_move (rtx
*operands
, machine_mode mode
)
1588 #if DEBUG_SPLIT_WIDE_MOVE
1589 fprintf (stderr
, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name
[mode
]);
1590 debug_rtx (operands
[0]);
1591 debug_rtx (operands
[1]);
1594 for (i
= 0; i
<= 1; i
++)
1596 rtx op
= operands
[i
], hi
, lo
;
1598 switch (GET_CODE (op
))
1602 unsigned int regno
= REGNO (op
);
1604 if (TARGET_64BIT_CR_REGS
&& CR_REGNO_P (regno
))
1608 lo
= gen_rtx_REG (SImode
, regno
);
1610 hi
= gen_rtx_ZERO_EXTRACT (SImode
,
1611 gen_rtx_REG (DImode
, regno
),
1616 hi
= gen_rtx_REG (SImode
, regno
+ TARGET_LITTLE_ENDIAN
);
1617 lo
= gen_rtx_REG (SImode
, regno
+ TARGET_BIG_ENDIAN
);
1625 hi
= operand_subword (op
, TARGET_LITTLE_ENDIAN
, 0, mode
);
1626 lo
= operand_subword (op
, TARGET_BIG_ENDIAN
, 0, mode
);
1633 /* The high part of CR <- GPR moves must be done after the low part. */
1634 operands
[i
+ 4] = lo
;
1635 operands
[i
+ 2] = hi
;
1638 if (reg_mentioned_p (operands
[2], operands
[5])
1639 || GET_CODE (operands
[2]) == ZERO_EXTRACT
1640 || GET_CODE (operands
[4]) == ZERO_EXTRACT
)
1644 /* Overlapping register pairs -- make sure we don't
1645 early-clobber ourselves. */
1647 operands
[2] = operands
[4];
1650 operands
[3] = operands
[5];
1654 #if DEBUG_SPLIT_WIDE_MOVE
1655 fprintf(stderr
, "\033[34m");
1656 debug_rtx (operands
[2]);
1657 debug_rtx (operands
[3]);
1658 debug_rtx (operands
[4]);
1659 debug_rtx (operands
[5]);
1660 fprintf(stderr
, "\033[0m");
1664 /* Emit a setcc instruction in its entirity. */
1667 mep_expand_setcc_1 (enum rtx_code code
, rtx dest
, rtx op1
, rtx op2
)
1675 tmp
= op1
, op1
= op2
, op2
= tmp
;
1676 code
= swap_condition (code
);
1681 op1
= force_reg (SImode
, op1
);
1682 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1683 gen_rtx_fmt_ee (code
, SImode
, op1
, op2
)));
1687 if (op2
!= const0_rtx
)
1688 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1689 mep_expand_setcc_1 (LTU
, dest
, op1
, const1_rtx
);
1693 /* Branchful sequence:
1695 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1698 Branchless sequence:
1699 add3 tmp, op1, -op2 32-bit (or mov + sub)
1700 sltu3 tmp, tmp, 1 16-bit
1701 xor3 dest, tmp, 1 32-bit
1703 if (optimize_size
&& op2
!= const0_rtx
)
1706 if (op2
!= const0_rtx
)
1707 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1709 op2
= gen_reg_rtx (SImode
);
1710 mep_expand_setcc_1 (LTU
, op2
, op1
, const1_rtx
);
1712 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1713 gen_rtx_XOR (SImode
, op2
, const1_rtx
)));
1717 if (GET_CODE (op2
) != CONST_INT
1718 || INTVAL (op2
) == 0x7ffffff)
1720 op2
= GEN_INT (INTVAL (op2
) + 1);
1721 return mep_expand_setcc_1 (LT
, dest
, op1
, op2
);
1724 if (GET_CODE (op2
) != CONST_INT
1725 || INTVAL (op2
) == -1)
1727 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) + 1, SImode
));
1728 return mep_expand_setcc_1 (LTU
, dest
, op1
, op2
);
1731 if (GET_CODE (op2
) != CONST_INT
1732 || INTVAL (op2
) == trunc_int_for_mode (0x80000000, SImode
))
1734 op2
= GEN_INT (INTVAL (op2
) - 1);
1735 return mep_expand_setcc_1 (GT
, dest
, op1
, op2
);
1738 if (GET_CODE (op2
) != CONST_INT
1739 || op2
== const0_rtx
)
1741 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) - 1, SImode
));
1742 return mep_expand_setcc_1 (GTU
, dest
, op1
, op2
);
1750 mep_expand_setcc (rtx
*operands
)
1752 rtx dest
= operands
[0];
1753 enum rtx_code code
= GET_CODE (operands
[1]);
1754 rtx op0
= operands
[2];
1755 rtx op1
= operands
[3];
1757 return mep_expand_setcc_1 (code
, dest
, op0
, op1
);
1761 mep_expand_cbranch (rtx
*operands
)
1763 enum rtx_code code
= GET_CODE (operands
[0]);
1764 rtx op0
= operands
[1];
1765 rtx op1
= operands
[2];
1772 if (mep_imm4_operand (op1
, SImode
))
1775 tmp
= gen_reg_rtx (SImode
);
1776 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1783 if (mep_imm4_operand (op1
, SImode
))
1786 tmp
= gen_reg_rtx (SImode
);
1787 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1796 if (! mep_reg_or_imm4_operand (op1
, SImode
))
1797 op1
= force_reg (SImode
, op1
);
1802 if (GET_CODE (op1
) == CONST_INT
1803 && INTVAL (op1
) != 0x7fffffff)
1805 op1
= GEN_INT (INTVAL (op1
) + 1);
1806 code
= (code
== LE
? LT
: GE
);
1810 tmp
= gen_reg_rtx (SImode
);
1811 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op1
, op0
));
1813 code
= (code
== LE
? EQ
: NE
);
1819 if (op1
== const1_rtx
)
1826 tmp
= gen_reg_rtx (SImode
);
1827 gcc_assert (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
));
1834 tmp
= gen_reg_rtx (SImode
);
1835 if (mep_expand_setcc_1 (LEU
, tmp
, op0
, op1
))
1837 else if (mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
))
1846 tmp
= gen_reg_rtx (SImode
);
1847 gcc_assert (mep_expand_setcc_1 (GTU
, tmp
, op0
, op1
)
1848 || mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
));
1855 tmp
= gen_reg_rtx (SImode
);
1856 if (mep_expand_setcc_1 (GEU
, tmp
, op0
, op1
))
1858 else if (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
))
1870 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
1874 mep_emit_cbranch (rtx
*operands
, int ne
)
1876 if (GET_CODE (operands
[1]) == REG
)
1877 return ne
? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1878 else if (INTVAL (operands
[1]) == 0 && !mep_vliw_function_p(cfun
->decl
))
1879 return ne
? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1881 return ne
? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1885 mep_expand_call (rtx
*operands
, int returns_value
)
1887 rtx addr
= operands
[returns_value
];
1888 rtx tp
= mep_tp_rtx ();
1889 rtx gp
= mep_gp_rtx ();
1891 gcc_assert (GET_CODE (addr
) == MEM
);
1893 addr
= XEXP (addr
, 0);
1895 if (! mep_call_address_operand (addr
, VOIDmode
))
1896 addr
= force_reg (SImode
, addr
);
1898 if (! operands
[returns_value
+2])
1899 operands
[returns_value
+2] = const0_rtx
;
1902 emit_call_insn (gen_call_value_internal (operands
[0], addr
, operands
[2],
1903 operands
[3], tp
, gp
));
1905 emit_call_insn (gen_call_internal (addr
, operands
[1],
1906 operands
[2], tp
, gp
));
1909 /* Aliasing Support. */
1911 /* If X is a machine specific address (i.e. a symbol or label being
1912 referenced as a displacement from the GOT implemented using an
1913 UNSPEC), then return the base term. Otherwise return X. */
1916 mep_find_base_term (rtx x
)
1921 if (GET_CODE (x
) != PLUS
)
1926 if (has_hard_reg_initial_val(Pmode
, TP_REGNO
)
1927 && base
== mep_tp_rtx ())
1929 else if (has_hard_reg_initial_val(Pmode
, GP_REGNO
)
1930 && base
== mep_gp_rtx ())
1935 if (GET_CODE (term
) != CONST
)
1937 term
= XEXP (term
, 0);
1939 if (GET_CODE (term
) != UNSPEC
1940 || XINT (term
, 1) != unspec
)
1943 return XVECEXP (term
, 0, 0);
1946 /* Reload Support. */
1948 /* Return true if the registers in CLASS cannot represent the change from
1949 modes FROM to TO. */
1952 mep_cannot_change_mode_class (machine_mode from
, machine_mode to
,
1953 enum reg_class regclass
)
1958 /* 64-bit COP regs must remain 64-bit COP regs. */
1959 if (TARGET_64BIT_CR_REGS
1960 && (regclass
== CR_REGS
1961 || regclass
== LOADABLE_CR_REGS
)
1962 && (GET_MODE_SIZE (to
) < 8
1963 || GET_MODE_SIZE (from
) < 8))
1969 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1972 mep_general_reg (rtx x
)
1974 while (GET_CODE (x
) == SUBREG
)
1976 return GET_CODE (x
) == REG
&& GR_REGNO_P (REGNO (x
));
1980 mep_nongeneral_reg (rtx x
)
1982 while (GET_CODE (x
) == SUBREG
)
1984 return (GET_CODE (x
) == REG
1985 && !GR_REGNO_P (REGNO (x
)) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
1989 mep_general_copro_reg (rtx x
)
1991 while (GET_CODE (x
) == SUBREG
)
1993 return (GET_CODE (x
) == REG
&& CR_REGNO_P (REGNO (x
)));
1997 mep_nonregister (rtx x
)
1999 while (GET_CODE (x
) == SUBREG
)
2001 return (GET_CODE (x
) != REG
|| REGNO (x
) >= FIRST_PSEUDO_REGISTER
);
2004 #define DEBUG_RELOAD 0
2006 /* Return the secondary reload class needed for moving value X to or
2007 from a register in coprocessor register class CLASS. */
2009 static enum reg_class
2010 mep_secondary_copro_reload_class (enum reg_class rclass
, rtx x
)
2012 if (mep_general_reg (x
))
2013 /* We can do the move directly if mep_have_core_copro_moves_p,
2014 otherwise we need to go through memory. Either way, no secondary
2015 register is needed. */
2018 if (mep_general_copro_reg (x
))
2020 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2021 if (mep_have_copro_copro_moves_p
)
2024 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2025 if (mep_have_core_copro_moves_p
)
2026 return GENERAL_REGS
;
2028 /* Otherwise we need to do it through memory. No secondary
2029 register is needed. */
2033 if (reg_class_subset_p (rclass
, LOADABLE_CR_REGS
)
2034 && constraint_satisfied_p (x
, CONSTRAINT_U
))
2035 /* X is a memory value that we can access directly. */
2038 /* We have to move X into a GPR first and then copy it to
2039 the coprocessor register. The move from the GPR to the
2040 coprocessor might be done directly or through memory,
2041 depending on mep_have_core_copro_moves_p. */
2042 return GENERAL_REGS
;
2045 /* Copying X to register in RCLASS. */
2048 mep_secondary_input_reload_class (enum reg_class rclass
,
2049 machine_mode mode ATTRIBUTE_UNUSED
,
2055 fprintf (stderr
, "secondary input reload copy to %s %s from ", reg_class_names
[rclass
], mode_name
[mode
]);
2059 if (reg_class_subset_p (rclass
, CR_REGS
))
2060 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2061 else if (MEP_NONGENERAL_CLASS (rclass
)
2062 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2066 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2068 return (enum reg_class
) rv
;
2071 /* Copying register in RCLASS to X. */
2074 mep_secondary_output_reload_class (enum reg_class rclass
,
2075 machine_mode mode ATTRIBUTE_UNUSED
,
2081 fprintf (stderr
, "secondary output reload copy from %s %s to ", reg_class_names
[rclass
], mode_name
[mode
]);
2085 if (reg_class_subset_p (rclass
, CR_REGS
))
2086 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2087 else if (MEP_NONGENERAL_CLASS (rclass
)
2088 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2092 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2095 return (enum reg_class
) rv
;
2098 /* Implement SECONDARY_MEMORY_NEEDED. */
2101 mep_secondary_memory_needed (enum reg_class rclass1
, enum reg_class rclass2
,
2102 machine_mode mode ATTRIBUTE_UNUSED
)
2104 if (!mep_have_core_copro_moves_p
)
2106 if (reg_classes_intersect_p (rclass1
, CR_REGS
)
2107 && reg_classes_intersect_p (rclass2
, GENERAL_REGS
))
2109 if (reg_classes_intersect_p (rclass2
, CR_REGS
)
2110 && reg_classes_intersect_p (rclass1
, GENERAL_REGS
))
2112 if (!mep_have_copro_copro_moves_p
2113 && reg_classes_intersect_p (rclass1
, CR_REGS
)
2114 && reg_classes_intersect_p (rclass2
, CR_REGS
))
2121 mep_expand_reload (rtx
*operands
, machine_mode mode
)
2123 /* There are three cases for each direction:
2128 int s0
= mep_section_tag (operands
[0]) == 'f';
2129 int s1
= mep_section_tag (operands
[1]) == 'f';
2130 int c0
= mep_nongeneral_reg (operands
[0]);
2131 int c1
= mep_nongeneral_reg (operands
[1]);
2132 int which
= (s0
? 20:0) + (c0
? 10:0) + (s1
? 2:0) + (c1
? 1:0);
2135 fprintf (stderr
, "expand_reload %s\n", mode_name
[mode
]);
2136 debug_rtx (operands
[0]);
2137 debug_rtx (operands
[1]);
2142 case 00: /* Don't know why this gets here. */
2143 case 02: /* general = far */
2144 emit_move_insn (operands
[0], operands
[1]);
2147 case 10: /* cr = mem */
2148 case 11: /* cr = cr */
2149 case 01: /* mem = cr */
2150 case 12: /* cr = far */
2151 emit_move_insn (operands
[2], operands
[1]);
2152 emit_move_insn (operands
[0], operands
[2]);
2155 case 20: /* far = general */
2156 emit_move_insn (operands
[2], XEXP (operands
[1], 0));
2157 emit_move_insn (operands
[0], gen_rtx_MEM (mode
, operands
[2]));
2160 case 21: /* far = cr */
2161 case 22: /* far = far */
2163 fprintf (stderr
, "unsupported expand reload case %02d for mode %s\n",
2164 which
, mode_name
[mode
]);
2165 debug_rtx (operands
[0]);
2166 debug_rtx (operands
[1]);
2171 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2172 can be moved directly into registers 0 to 7, but not into the rest.
2173 If so, and if the required class includes registers 0 to 7, restrict
2174 it to those registers. */
2177 mep_preferred_reload_class (rtx x
, enum reg_class rclass
)
2179 switch (GET_CODE (x
))
2182 if (INTVAL (x
) >= 0x10000
2183 && INTVAL (x
) < 0x01000000
2184 && (INTVAL (x
) & 0xffff) != 0
2185 && reg_class_subset_p (TPREL_REGS
, rclass
))
2186 rclass
= TPREL_REGS
;
2192 if (mep_section_tag (x
) != 'f'
2193 && reg_class_subset_p (TPREL_REGS
, rclass
))
2194 rclass
= TPREL_REGS
;
2203 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2204 moves, 4 for direct double-register moves, and 1000 for anything
2205 that requires a temporary register or temporary stack slot. */
2208 mep_register_move_cost (machine_mode mode
, enum reg_class from
, enum reg_class to
)
2210 if (mep_have_copro_copro_moves_p
2211 && reg_class_subset_p (from
, CR_REGS
)
2212 && reg_class_subset_p (to
, CR_REGS
))
2214 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2218 if (reg_class_subset_p (from
, CR_REGS
)
2219 && reg_class_subset_p (to
, CR_REGS
))
2221 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2225 if (reg_class_subset_p (from
, CR_REGS
)
2226 || reg_class_subset_p (to
, CR_REGS
))
2228 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2232 if (mep_secondary_memory_needed (from
, to
, mode
))
2234 if (MEP_NONGENERAL_CLASS (from
) && MEP_NONGENERAL_CLASS (to
))
2237 if (GET_MODE_SIZE (mode
) > 4)
2244 /* Functions to save and restore machine-specific function data. */
2246 static struct machine_function
*
2247 mep_init_machine_status (void)
2249 return ggc_cleared_alloc
<machine_function
> ();
2253 mep_allocate_initial_value (rtx reg
)
2257 if (GET_CODE (reg
) != REG
)
2260 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2263 /* In interrupt functions, the "initial" values of $gp and $tp are
2264 provided by the prologue. They are not necessarily the same as
2265 the values that the caller was using. */
2266 if (REGNO (reg
) == TP_REGNO
|| REGNO (reg
) == GP_REGNO
)
2267 if (mep_interrupt_p ())
2270 if (! cfun
->machine
->reg_save_slot
[REGNO(reg
)])
2272 cfun
->machine
->reg_save_size
+= 4;
2273 cfun
->machine
->reg_save_slot
[REGNO(reg
)] = cfun
->machine
->reg_save_size
;
2276 rss
= cfun
->machine
->reg_save_slot
[REGNO(reg
)];
2277 return gen_rtx_MEM (SImode
, plus_constant (Pmode
, arg_pointer_rtx
, -rss
));
2281 mep_return_addr_rtx (int count
)
2286 return get_hard_reg_initial_val (Pmode
, LP_REGNO
);
2292 return get_hard_reg_initial_val (Pmode
, TP_REGNO
);
2298 return get_hard_reg_initial_val (Pmode
, GP_REGNO
);
2302 mep_interrupt_p (void)
2304 if (cfun
->machine
->interrupt_handler
== 0)
2306 int interrupt_handler
2307 = (lookup_attribute ("interrupt",
2308 DECL_ATTRIBUTES (current_function_decl
))
2310 cfun
->machine
->interrupt_handler
= interrupt_handler
? 2 : 1;
2312 return cfun
->machine
->interrupt_handler
== 2;
2316 mep_disinterrupt_p (void)
2318 if (cfun
->machine
->disable_interrupts
== 0)
2320 int disable_interrupts
2321 = (lookup_attribute ("disinterrupt",
2322 DECL_ATTRIBUTES (current_function_decl
))
2324 cfun
->machine
->disable_interrupts
= disable_interrupts
? 2 : 1;
2326 return cfun
->machine
->disable_interrupts
== 2;
2330 /* Frame/Epilog/Prolog Related. */
2333 mep_reg_set_p (rtx reg
, rtx insn
)
2335 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2338 if (FIND_REG_INC_NOTE (insn
, reg
))
2340 insn
= PATTERN (insn
);
2343 if (GET_CODE (insn
) == SET
2344 && GET_CODE (XEXP (insn
, 0)) == REG
2345 && GET_CODE (XEXP (insn
, 1)) == REG
2346 && REGNO (XEXP (insn
, 0)) == REGNO (XEXP (insn
, 1)))
2349 return set_of (reg
, insn
) != NULL_RTX
;
2353 #define MEP_SAVES_UNKNOWN 0
2354 #define MEP_SAVES_YES 1
2355 #define MEP_SAVES_MAYBE 2
2356 #define MEP_SAVES_NO 3
2359 mep_reg_set_in_function (int regno
)
2364 if (mep_interrupt_p () && df_regs_ever_live_p(regno
))
2367 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2370 push_topmost_sequence ();
2371 insn
= get_insns ();
2372 pop_topmost_sequence ();
2377 reg
= gen_rtx_REG (SImode
, regno
);
2379 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
2380 if (INSN_P (insn
) && mep_reg_set_p (reg
, insn
))
2386 mep_asm_without_operands_p (void)
2388 if (cfun
->machine
->asms_without_operands
== 0)
2392 push_topmost_sequence ();
2393 insn
= get_insns ();
2394 pop_topmost_sequence ();
2396 cfun
->machine
->asms_without_operands
= 1;
2400 && GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
2402 cfun
->machine
->asms_without_operands
= 2;
2405 insn
= NEXT_INSN (insn
);
2409 return cfun
->machine
->asms_without_operands
== 2;
2412 /* Interrupt functions save/restore every call-preserved register, and
2413 any call-used register it uses (or all if it calls any function,
2414 since they may get clobbered there too). Here we check to see
2415 which call-used registers need saving. */
2417 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2418 && (r == FIRST_CCR_REGNO + 1 \
2419 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2420 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2423 mep_interrupt_saved_reg (int r
)
2425 if (!mep_interrupt_p ())
2427 if (r
== REGSAVE_CONTROL_TEMP
2428 || (TARGET_64BIT_CR_REGS
&& TARGET_COP
&& r
== REGSAVE_CONTROL_TEMP
+1))
2430 if (mep_asm_without_operands_p ()
2432 || (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
|| r
== LP_REGNO
)
2433 || IVC2_ISAVED_REG (r
)))
2436 /* Function calls mean we need to save $lp. */
2437 if (r
== LP_REGNO
|| IVC2_ISAVED_REG (r
))
2439 if (!crtl
->is_leaf
|| cfun
->machine
->doloop_tags
> 0)
2440 /* The interrupt handler might use these registers for repeat blocks,
2441 or it might call a function that does so. */
2442 if (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
)
2444 if (crtl
->is_leaf
&& call_used_regs
[r
] && !df_regs_ever_live_p(r
))
2446 /* Functions we call might clobber these. */
2447 if (call_used_regs
[r
] && !fixed_regs
[r
])
2449 /* Additional registers that need to be saved for IVC2. */
2450 if (IVC2_ISAVED_REG (r
))
2457 mep_call_saves_register (int r
)
2459 if (! cfun
->machine
->frame_locked
)
2461 int rv
= MEP_SAVES_NO
;
2463 if (cfun
->machine
->reg_save_slot
[r
])
2465 else if (r
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2467 else if (r
== FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
2469 else if ((!call_used_regs
[r
] || r
== LP_REGNO
) && df_regs_ever_live_p(r
))
2471 else if (crtl
->calls_eh_return
&& (r
== 10 || r
== 11))
2472 /* We need these to have stack slots so that they can be set during
2475 else if (mep_interrupt_saved_reg (r
))
2477 cfun
->machine
->reg_saved
[r
] = rv
;
2479 return cfun
->machine
->reg_saved
[r
] == MEP_SAVES_YES
;
2482 /* Return true if epilogue uses register REGNO. */
2485 mep_epilogue_uses (int regno
)
2487 /* Since $lp is a call-saved register, the generic code will normally
2488 mark it used in the epilogue if it needs to be saved and restored.
2489 However, when profiling is enabled, the profiling code will implicitly
2490 clobber $11. This case has to be handled specially both here and in
2491 mep_call_saves_register. */
2492 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2494 /* Interrupt functions save/restore pretty much everything. */
2495 return (reload_completed
&& mep_interrupt_saved_reg (regno
));
2499 mep_reg_size (int regno
)
2501 if (CR_REGNO_P (regno
) && TARGET_64BIT_CR_REGS
)
2506 /* Worker function for TARGET_CAN_ELIMINATE. */
2509 mep_can_eliminate (const int from
, const int to
)
2511 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
2512 ? ! frame_pointer_needed
2517 mep_elimination_offset (int from
, int to
)
2521 int frame_size
= get_frame_size () + crtl
->outgoing_args_size
;
2524 if (!cfun
->machine
->frame_locked
)
2525 memset (cfun
->machine
->reg_saved
, 0, sizeof (cfun
->machine
->reg_saved
));
2527 /* We don't count arg_regs_to_save in the arg pointer offset, because
2528 gcc thinks the arg pointer has moved along with the saved regs.
2529 However, we do count it when we adjust $sp in the prologue. */
2531 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2532 if (mep_call_saves_register (i
))
2533 reg_save_size
+= mep_reg_size (i
);
2535 if (reg_save_size
% 8)
2536 cfun
->machine
->regsave_filler
= 8 - (reg_save_size
% 8);
2538 cfun
->machine
->regsave_filler
= 0;
2540 /* This is what our total stack adjustment looks like. */
2541 total_size
= (reg_save_size
+ frame_size
+ cfun
->machine
->regsave_filler
);
2544 cfun
->machine
->frame_filler
= 8 - (total_size
% 8);
2546 cfun
->machine
->frame_filler
= 0;
2549 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
2550 return reg_save_size
+ cfun
->machine
->regsave_filler
;
2552 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2553 return cfun
->machine
->frame_filler
+ frame_size
;
2555 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2556 return reg_save_size
+ cfun
->machine
->regsave_filler
+ cfun
->machine
->frame_filler
+ frame_size
;
2564 RTX_FRAME_RELATED_P (x
) = 1;
2568 /* Since the prologue/epilogue code is generated after optimization,
2569 we can't rely on gcc to split constants for us. So, this code
2570 captures all the ways to add a constant to a register in one logic
2571 chunk, including optimizing away insns we just don't need. This
2572 makes the prolog/epilog code easier to follow. */
2574 add_constant (int dest
, int src
, int value
, int mark_frame
)
2579 if (src
== dest
&& value
== 0)
2584 insn
= emit_move_insn (gen_rtx_REG (SImode
, dest
),
2585 gen_rtx_REG (SImode
, src
));
2587 RTX_FRAME_RELATED_P(insn
) = 1;
2591 if (value
>= -32768 && value
<= 32767)
2593 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2594 gen_rtx_REG (SImode
, src
),
2597 RTX_FRAME_RELATED_P(insn
) = 1;
2601 /* Big constant, need to use a temp register. We use
2602 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2603 area is always small enough to directly add to). */
2605 hi
= trunc_int_for_mode (value
& 0xffff0000, SImode
);
2606 lo
= value
& 0xffff;
2608 insn
= emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2613 insn
= emit_insn (gen_iorsi3 (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2614 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2618 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2619 gen_rtx_REG (SImode
, src
),
2620 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
)));
2623 RTX_FRAME_RELATED_P(insn
) = 1;
2624 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2625 gen_rtx_SET (SImode
,
2626 gen_rtx_REG (SImode
, dest
),
2627 gen_rtx_PLUS (SImode
,
2628 gen_rtx_REG (SImode
, dest
),
2633 /* Move SRC to DEST. Mark the move as being potentially dead if
2637 maybe_dead_move (rtx dest
, rtx src
, bool ATTRIBUTE_UNUSED maybe_dead_p
)
2639 rtx_insn
*insn
= emit_move_insn (dest
, src
);
2642 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
2647 /* Used for interrupt functions, which can't assume that $tp and $gp
2648 contain the correct pointers. */
2651 mep_reload_pointer (int regno
, const char *symbol
)
2655 if (!df_regs_ever_live_p(regno
) && crtl
->is_leaf
)
2658 reg
= gen_rtx_REG (SImode
, regno
);
2659 sym
= gen_rtx_SYMBOL_REF (SImode
, symbol
);
2660 emit_insn (gen_movsi_topsym_s (reg
, sym
));
2661 emit_insn (gen_movsi_botsym_s (reg
, reg
, sym
));
2664 /* Assign save slots for any register not already saved. DImode
2665 registers go at the end of the reg save area; the rest go at the
2666 beginning. This is for alignment purposes. Returns true if a frame
2667 is really needed. */
2669 mep_assign_save_slots (int reg_save_size
)
2671 bool really_need_stack_frame
= false;
2675 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2676 if (mep_call_saves_register(i
))
2678 int regsize
= mep_reg_size (i
);
2680 if ((i
!= TP_REGNO
&& i
!= GP_REGNO
&& i
!= LP_REGNO
)
2681 || mep_reg_set_in_function (i
))
2682 really_need_stack_frame
= true;
2684 if (cfun
->machine
->reg_save_slot
[i
])
2689 cfun
->machine
->reg_save_size
+= regsize
;
2690 cfun
->machine
->reg_save_slot
[i
] = cfun
->machine
->reg_save_size
;
2694 cfun
->machine
->reg_save_slot
[i
] = reg_save_size
- di_ofs
;
2698 cfun
->machine
->frame_locked
= 1;
2699 return really_need_stack_frame
;
2703 mep_expand_prologue (void)
2705 int i
, rss
, sp_offset
= 0;
2708 int really_need_stack_frame
;
2710 /* We must not allow register renaming in interrupt functions,
2711 because that invalidates the correctness of the set of call-used
2712 registers we're going to save/restore. */
2713 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2715 if (mep_disinterrupt_p ())
2716 emit_insn (gen_mep_disable_int ());
2718 cfun
->machine
->mep_frame_pointer_needed
= frame_pointer_needed
;
2720 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2721 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2722 really_need_stack_frame
= frame_size
;
2724 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2726 sp_offset
= reg_save_size
;
2727 if (sp_offset
+ frame_size
< 128)
2728 sp_offset
+= frame_size
;
2730 add_constant (SP_REGNO
, SP_REGNO
, -sp_offset
, 1);
2732 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2733 if (mep_call_saves_register(i
))
2739 rss
= cfun
->machine
->reg_save_slot
[i
];
2741 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2742 && (!mep_reg_set_in_function (i
)
2743 && !mep_interrupt_p ()))
2746 if (mep_reg_size (i
) == 8)
2751 /* If there is a pseudo associated with this register's initial value,
2752 reload might have already spilt it to the stack slot suggested by
2753 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2755 mem
= gen_rtx_MEM (rmode
,
2756 plus_constant (Pmode
, stack_pointer_rtx
,
2758 maybe_dead_p
= rtx_equal_p (mem
, has_hard_reg_initial_val (rmode
, i
));
2760 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2761 F(maybe_dead_move (mem
, gen_rtx_REG (rmode
, i
), maybe_dead_p
));
2762 else if (rmode
== DImode
)
2765 int be
= TARGET_BIG_ENDIAN
? 4 : 0;
2767 mem
= gen_rtx_MEM (SImode
,
2768 plus_constant (Pmode
, stack_pointer_rtx
,
2769 sp_offset
- rss
+ be
));
2771 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2772 gen_rtx_REG (SImode
, i
),
2774 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2775 gen_rtx_ZERO_EXTRACT (SImode
,
2776 gen_rtx_REG (DImode
, i
),
2780 insn
= maybe_dead_move (mem
,
2781 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2783 RTX_FRAME_RELATED_P (insn
) = 1;
2785 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2786 gen_rtx_SET (VOIDmode
,
2788 gen_rtx_REG (rmode
, i
)));
2789 mem
= gen_rtx_MEM (SImode
,
2790 plus_constant (Pmode
, stack_pointer_rtx
,
2791 sp_offset
- rss
+ (4-be
)));
2792 insn
= maybe_dead_move (mem
,
2793 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2799 maybe_dead_move (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2800 gen_rtx_REG (rmode
, i
),
2802 insn
= maybe_dead_move (mem
,
2803 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2805 RTX_FRAME_RELATED_P (insn
) = 1;
2807 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2808 gen_rtx_SET (VOIDmode
,
2810 gen_rtx_REG (rmode
, i
)));
2814 if (frame_pointer_needed
)
2816 /* We've already adjusted down by sp_offset. Total $sp change
2817 is reg_save_size + frame_size. We want a net change here of
2818 just reg_save_size. */
2819 add_constant (FP_REGNO
, SP_REGNO
, sp_offset
- reg_save_size
, 1);
2822 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
-(reg_save_size
+frame_size
), 1);
2824 if (mep_interrupt_p ())
2826 mep_reload_pointer(GP_REGNO
, "__sdabase");
2827 mep_reload_pointer(TP_REGNO
, "__tpbase");
2832 mep_start_function (FILE *file
, HOST_WIDE_INT hwi_local
)
2834 int local
= hwi_local
;
2835 int frame_size
= local
+ crtl
->outgoing_args_size
;
2840 int slot_map
[FIRST_PSEUDO_REGISTER
], si
, sj
;
2842 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2843 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2844 sp_offset
= reg_save_size
+ frame_size
;
2846 ffill
= cfun
->machine
->frame_filler
;
2848 if (cfun
->machine
->mep_frame_pointer_needed
)
2849 reg_names
[FP_REGNO
] = "$fp";
2851 reg_names
[FP_REGNO
] = "$8";
2856 if (debug_info_level
== DINFO_LEVEL_NONE
)
2858 fprintf (file
, "\t# frame: %d", sp_offset
);
2860 fprintf (file
, " %d regs", reg_save_size
);
2862 fprintf (file
, " %d locals", local
);
2863 if (crtl
->outgoing_args_size
)
2864 fprintf (file
, " %d args", crtl
->outgoing_args_size
);
2865 fprintf (file
, "\n");
2869 fprintf (file
, "\t#\n");
2870 fprintf (file
, "\t# Initial Frame Information:\n");
2871 if (sp_offset
|| !frame_pointer_needed
)
2872 fprintf (file
, "\t# Entry ---------- 0\n");
2874 /* Sort registers by save slots, so they're printed in the order
2875 they appear in memory, not the order they're saved in. */
2876 for (si
=0; si
<FIRST_PSEUDO_REGISTER
; si
++)
2878 for (si
=0; si
<FIRST_PSEUDO_REGISTER
-1; si
++)
2879 for (sj
=si
+1; sj
<FIRST_PSEUDO_REGISTER
; sj
++)
2880 if (cfun
->machine
->reg_save_slot
[slot_map
[si
]]
2881 > cfun
->machine
->reg_save_slot
[slot_map
[sj
]])
2883 int t
= slot_map
[si
];
2884 slot_map
[si
] = slot_map
[sj
];
2889 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2892 int r
= slot_map
[i
];
2893 int rss
= cfun
->machine
->reg_save_slot
[r
];
2895 if (!mep_call_saves_register (r
))
2898 if ((r
== TP_REGNO
|| r
== GP_REGNO
|| r
== LP_REGNO
)
2899 && (!mep_reg_set_in_function (r
)
2900 && !mep_interrupt_p ()))
2903 rsize
= mep_reg_size(r
);
2904 skip
= rss
- (sp
+rsize
);
2906 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2907 fprintf (file
, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2908 rsize
, reg_names
[r
], sp_offset
- rss
);
2912 skip
= reg_save_size
- sp
;
2914 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2916 if (frame_pointer_needed
)
2917 fprintf (file
, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size
, sp_offset
-reg_save_size
);
2919 fprintf (file
, "\t# %3d bytes for local vars\n", local
);
2921 fprintf (file
, "\t# %3d bytes for alignment\n", ffill
);
2922 if (crtl
->outgoing_args_size
)
2923 fprintf (file
, "\t# %3d bytes for outgoing args\n",
2924 crtl
->outgoing_args_size
);
2925 fprintf (file
, "\t# SP ---> ---------- %d\n", sp_offset
);
2926 fprintf (file
, "\t#\n");
2930 static int mep_prevent_lp_restore
= 0;
2931 static int mep_sibcall_epilogue
= 0;
2934 mep_expand_epilogue (void)
2936 int i
, sp_offset
= 0;
2937 int reg_save_size
= 0;
2939 int lp_temp
= LP_REGNO
, lp_slot
= -1;
2940 int really_need_stack_frame
= get_frame_size() + crtl
->outgoing_args_size
;
2941 int interrupt_handler
= mep_interrupt_p ();
2943 if (profile_arc_flag
== 2)
2944 emit_insn (gen_mep_bb_trace_ret ());
2946 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2947 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2949 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2951 if (frame_pointer_needed
)
2953 /* If we have a frame pointer, we won't have a reliable stack
2954 pointer (alloca, you know), so rebase SP from FP */
2955 emit_move_insn (gen_rtx_REG (SImode
, SP_REGNO
),
2956 gen_rtx_REG (SImode
, FP_REGNO
));
2957 sp_offset
= reg_save_size
;
2961 /* SP is right under our local variable space. Adjust it if
2963 sp_offset
= reg_save_size
+ frame_size
;
2964 if (sp_offset
>= 128)
2966 add_constant (SP_REGNO
, SP_REGNO
, frame_size
, 0);
2967 sp_offset
-= frame_size
;
2971 /* This is backwards so that we restore the control and coprocessor
2972 registers before the temporary registers we use to restore
2974 for (i
=FIRST_PSEUDO_REGISTER
-1; i
>=1; i
--)
2975 if (mep_call_saves_register (i
))
2978 int rss
= cfun
->machine
->reg_save_slot
[i
];
2980 if (mep_reg_size (i
) == 8)
2985 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2986 && !(mep_reg_set_in_function (i
) || interrupt_handler
))
2988 if (mep_prevent_lp_restore
&& i
== LP_REGNO
)
2990 if (!mep_prevent_lp_restore
2991 && !interrupt_handler
2992 && (i
== 10 || i
== 11))
2995 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2996 emit_move_insn (gen_rtx_REG (rmode
, i
),
2998 plus_constant (Pmode
, stack_pointer_rtx
,
3002 if (i
== LP_REGNO
&& !mep_sibcall_epilogue
&& !interrupt_handler
)
3003 /* Defer this one so we can jump indirect rather than
3004 copying the RA to $lp and "ret". EH epilogues
3005 automatically skip this anyway. */
3006 lp_slot
= sp_offset
-rss
;
3009 emit_move_insn (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
3011 plus_constant (Pmode
,
3014 emit_move_insn (gen_rtx_REG (rmode
, i
),
3015 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
));
3021 /* Restore this one last so we know it will be in the temp
3022 register when we return by jumping indirectly via the temp. */
3023 emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
3024 gen_rtx_MEM (SImode
,
3025 plus_constant (Pmode
, stack_pointer_rtx
,
3027 lp_temp
= REGSAVE_CONTROL_TEMP
;
3031 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
, 0);
3033 if (crtl
->calls_eh_return
&& mep_prevent_lp_restore
)
3034 emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, SP_REGNO
),
3035 gen_rtx_REG (SImode
, SP_REGNO
),
3036 cfun
->machine
->eh_stack_adjust
));
3038 if (mep_sibcall_epilogue
)
3041 if (mep_disinterrupt_p ())
3042 emit_insn (gen_mep_enable_int ());
3044 if (mep_prevent_lp_restore
)
3046 emit_jump_insn (gen_eh_return_internal ());
3049 else if (interrupt_handler
)
3050 emit_jump_insn (gen_mep_reti ());
3052 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode
, lp_temp
)));
3056 mep_expand_eh_return (rtx
*operands
)
3058 if (GET_CODE (operands
[0]) != REG
|| REGNO (operands
[0]) != LP_REGNO
)
3060 rtx ra
= gen_rtx_REG (Pmode
, LP_REGNO
);
3061 emit_move_insn (ra
, operands
[0]);
3065 emit_insn (gen_eh_epilogue (operands
[0]));
3069 mep_emit_eh_epilogue (rtx
*operands ATTRIBUTE_UNUSED
)
3071 cfun
->machine
->eh_stack_adjust
= gen_rtx_REG (Pmode
, 0);
3072 mep_prevent_lp_restore
= 1;
3073 mep_expand_epilogue ();
3074 mep_prevent_lp_restore
= 0;
3078 mep_expand_sibcall_epilogue (void)
3080 mep_sibcall_epilogue
= 1;
3081 mep_expand_epilogue ();
3082 mep_sibcall_epilogue
= 0;
3086 mep_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
3091 if (mep_section_tag (DECL_RTL (decl
)) == 'f')
3094 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3095 if (mep_interrupt_p () || mep_disinterrupt_p ())
3102 mep_return_stackadj_rtx (void)
3104 return gen_rtx_REG (SImode
, 10);
3108 mep_return_handler_rtx (void)
3110 return gen_rtx_REG (SImode
, LP_REGNO
);
3114 mep_function_profiler (FILE *file
)
3116 /* Always right at the beginning of the function. */
3117 fprintf (file
, "\t# mep function profiler\n");
3118 fprintf (file
, "\tadd\t$sp, -8\n");
3119 fprintf (file
, "\tsw\t$0, ($sp)\n");
3120 fprintf (file
, "\tldc\t$0, $lp\n");
3121 fprintf (file
, "\tsw\t$0, 4($sp)\n");
3122 fprintf (file
, "\tbsr\t__mep_mcount\n");
3123 fprintf (file
, "\tlw\t$0, 4($sp)\n");
3124 fprintf (file
, "\tstc\t$0, $lp\n");
3125 fprintf (file
, "\tlw\t$0, ($sp)\n");
3126 fprintf (file
, "\tadd\t$sp, 8\n\n");
3130 mep_emit_bb_trace_ret (void)
3132 fprintf (asm_out_file
, "\t# end of block profiling\n");
3133 fprintf (asm_out_file
, "\tadd\t$sp, -8\n");
3134 fprintf (asm_out_file
, "\tsw\t$0, ($sp)\n");
3135 fprintf (asm_out_file
, "\tldc\t$0, $lp\n");
3136 fprintf (asm_out_file
, "\tsw\t$0, 4($sp)\n");
3137 fprintf (asm_out_file
, "\tbsr\t__bb_trace_ret\n");
3138 fprintf (asm_out_file
, "\tlw\t$0, 4($sp)\n");
3139 fprintf (asm_out_file
, "\tstc\t$0, $lp\n");
3140 fprintf (asm_out_file
, "\tlw\t$0, ($sp)\n");
3141 fprintf (asm_out_file
, "\tadd\t$sp, 8\n\n");
3148 /* Operand Printing. */
3151 mep_print_operand_address (FILE *stream
, rtx address
)
3153 if (GET_CODE (address
) == MEM
)
3154 address
= XEXP (address
, 0);
3156 /* cf: gcc.dg/asm-4.c. */
3157 gcc_assert (GET_CODE (address
) == REG
);
3159 mep_print_operand (stream
, address
, 0);
3165 const char *pattern
;
3168 const conversions
[] =
3171 { 0, "m+ri", "3(2)" },
3175 { 0, "mLrs", "%lo(3)(2)" },
3176 { 0, "mLr+si", "%lo(4+5)(2)" },
3177 { 0, "m+ru2s", "%tpoff(5)(2)" },
3178 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3179 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3180 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3181 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3182 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3184 { 0, "m+si", "(2+3)" },
3185 { 0, "m+li", "(2+3)" },
3188 { 0, "+si", "1+2" },
3189 { 0, "+u2si", "%tpoff(3+4)" },
3190 { 0, "+u3si", "%sdaoff(3+4)" },
3196 { 'h', "Hs", "%hi(1)" },
3198 { 'I', "u2s", "%tpoff(2)" },
3199 { 'I', "u3s", "%sdaoff(2)" },
3200 { 'I', "+u2si", "%tpoff(3+4)" },
3201 { 'I', "+u3si", "%sdaoff(3+4)" },
3203 { 'P', "mr", "(1\\+),\\0" },
3209 unique_bit_in (HOST_WIDE_INT i
)
3213 case 0x01: case 0xfe: return 0;
3214 case 0x02: case 0xfd: return 1;
3215 case 0x04: case 0xfb: return 2;
3216 case 0x08: case 0xf7: return 3;
3217 case 0x10: case 0x7f: return 4;
3218 case 0x20: case 0xbf: return 5;
3219 case 0x40: case 0xdf: return 6;
3220 case 0x80: case 0xef: return 7;
3227 bit_size_for_clip (HOST_WIDE_INT i
)
3231 for (rv
= 0; rv
< 31; rv
++)
3232 if (((HOST_WIDE_INT
) 1 << rv
) > i
)
3237 /* Print an operand to a assembler instruction. */
3240 mep_print_operand (FILE *file
, rtx x
, int code
)
3243 const char *real_name
;
3247 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3248 we're using, then skip over the "mep_" part of its name. */
3249 const struct cgen_insn
*insn
;
3251 if (mep_get_move_insn (mep_cmov
, &insn
))
3252 fputs (cgen_intrinsics
[insn
->intrinsic
] + 4, file
);
3254 mep_intrinsic_unavailable (mep_cmov
);
3259 switch (GET_CODE (x
))
3262 fputs ("clr", file
);
3265 fputs ("set", file
);
3268 fputs ("not", file
);
3271 output_operand_lossage ("invalid %%L code");
3276 /* Print the second operand of a CR <- CR move. If we're using
3277 a two-operand instruction (i.e., a real cmov), then just print
3278 the operand normally. If we're using a "reg, reg, immediate"
3279 instruction such as caddi3, print the operand followed by a
3280 zero field. If we're using a three-register instruction,
3281 print the operand twice. */
3282 const struct cgen_insn
*insn
;
3284 mep_print_operand (file
, x
, 0);
3285 if (mep_get_move_insn (mep_cmov
, &insn
)
3286 && insn_data
[insn
->icode
].n_operands
== 3)
3289 if (insn_data
[insn
->icode
].operand
[2].predicate (x
, VOIDmode
))
3290 mep_print_operand (file
, x
, 0);
3292 mep_print_operand (file
, const0_rtx
, 0);
3298 for (i
= 0; conversions
[i
].pattern
; i
++)
3299 if (conversions
[i
].code
== code
3300 && strcmp(conversions
[i
].pattern
, pattern
) == 0)
3302 for (j
= 0; conversions
[i
].format
[j
]; j
++)
3303 if (conversions
[i
].format
[j
] == '\\')
3305 fputc (conversions
[i
].format
[j
+1], file
);
3308 else if (ISDIGIT(conversions
[i
].format
[j
]))
3310 rtx r
= patternr
[conversions
[i
].format
[j
] - '0'];
3311 switch (GET_CODE (r
))
3314 fprintf (file
, "%s", reg_names
[REGNO (r
)]);
3320 fprintf (file
, "%d", unique_bit_in (INTVAL (r
)));
3323 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)));
3326 fprintf (file
, "0x%x", ((int) INTVAL (r
) >> 16) & 0xffff);
3329 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)) - 1);
3332 fprintf (file
, "0x%x", (int) INTVAL (r
) & 0xffff);
3335 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3336 && !(INTVAL (r
) & 0xff))
3337 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL(r
));
3339 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3342 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3343 && conversions
[i
].format
[j
+1] == 0)
3345 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (r
));
3346 fprintf (file
, " # 0x%x", (int) INTVAL(r
) & 0xffff);
3349 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3352 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3357 fprintf(file
, "[const_double 0x%lx]",
3358 (unsigned long) CONST_DOUBLE_HIGH(r
));
3361 real_name
= targetm
.strip_name_encoding (XSTR (r
, 0));
3362 assemble_name (file
, real_name
);
3365 output_asm_label (r
);
3368 fprintf (stderr
, "don't know how to print this operand:");
3375 if (conversions
[i
].format
[j
] == '+'
3376 && (!code
|| code
== 'I')
3377 && ISDIGIT (conversions
[i
].format
[j
+1])
3378 && GET_CODE (patternr
[conversions
[i
].format
[j
+1] - '0']) == CONST_INT
3379 && INTVAL (patternr
[conversions
[i
].format
[j
+1] - '0']) < 0)
3381 fputc(conversions
[i
].format
[j
], file
);
3385 if (!conversions
[i
].pattern
)
3387 error ("unconvertible operand %c %qs", code
?code
:'-', pattern
);
3395 mep_final_prescan_insn (rtx_insn
*insn
, rtx
*operands ATTRIBUTE_UNUSED
,
3396 int noperands ATTRIBUTE_UNUSED
)
3398 /* Despite the fact that MeP is perfectly capable of branching and
3399 doing something else in the same bundle, gcc does jump
3400 optimization *after* scheduling, so we cannot trust the bundling
3401 flags on jump instructions. */
3402 if (GET_MODE (insn
) == BImode
3403 && get_attr_slots (insn
) != SLOTS_CORE
)
3404 fputc ('+', asm_out_file
);
3407 /* Function args in registers. */
3410 mep_setup_incoming_varargs (cumulative_args_t cum
,
3411 machine_mode mode ATTRIBUTE_UNUSED
,
3412 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
3413 int second_time ATTRIBUTE_UNUSED
)
3415 int nsave
= 4 - (get_cumulative_args (cum
)->nregs
+ 1);
3418 cfun
->machine
->arg_regs_to_save
= nsave
;
3419 *pretend_size
= nsave
* 4;
3423 bytesize (const_tree type
, machine_mode mode
)
3425 if (mode
== BLKmode
)
3426 return int_size_in_bytes (type
);
3427 return GET_MODE_SIZE (mode
);
3431 mep_expand_builtin_saveregs (void)
3436 ns
= cfun
->machine
->arg_regs_to_save
;
3439 bufsize
= 8 * ((ns
+ 1) / 2) + 8 * ns
;
3440 regbuf
= assign_stack_local (SImode
, bufsize
, 64);
3445 regbuf
= assign_stack_local (SImode
, bufsize
, 32);
3448 move_block_from_reg (5-ns
, regbuf
, ns
);
3452 rtx tmp
= gen_rtx_MEM (DImode
, XEXP (regbuf
, 0));
3453 int ofs
= 8 * ((ns
+1)/2);
3455 for (i
=0; i
<ns
; i
++)
3457 int rn
= (4-ns
) + i
+ 49;
3460 ptr
= offset_address (tmp
, GEN_INT (ofs
), 2);
3461 emit_move_insn (ptr
, gen_rtx_REG (DImode
, rn
));
3465 return XEXP (regbuf
, 0);
3469 mep_build_builtin_va_list (void)
3471 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3475 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3477 f_next_gp
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3478 get_identifier ("__va_next_gp"), ptr_type_node
);
3479 f_next_gp_limit
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3480 get_identifier ("__va_next_gp_limit"),
3482 f_next_cop
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_cop"),
3484 f_next_stack
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_stack"),
3487 DECL_FIELD_CONTEXT (f_next_gp
) = record
;
3488 DECL_FIELD_CONTEXT (f_next_gp_limit
) = record
;
3489 DECL_FIELD_CONTEXT (f_next_cop
) = record
;
3490 DECL_FIELD_CONTEXT (f_next_stack
) = record
;
3492 TYPE_FIELDS (record
) = f_next_gp
;
3493 DECL_CHAIN (f_next_gp
) = f_next_gp_limit
;
3494 DECL_CHAIN (f_next_gp_limit
) = f_next_cop
;
3495 DECL_CHAIN (f_next_cop
) = f_next_stack
;
3497 layout_type (record
);
3503 mep_expand_va_start (tree valist
, rtx nextarg
)
3505 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3506 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3510 ns
= cfun
->machine
->arg_regs_to_save
;
3512 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3513 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3514 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3515 f_next_stack
= DECL_CHAIN (f_next_cop
);
3517 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3519 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3520 valist
, f_next_gp_limit
, NULL_TREE
);
3521 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3523 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3524 valist
, f_next_stack
, NULL_TREE
);
3526 /* va_list.next_gp = expand_builtin_saveregs (); */
3527 u
= make_tree (sizetype
, expand_builtin_saveregs ());
3528 u
= fold_convert (ptr_type_node
, u
);
3529 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp
, u
);
3530 TREE_SIDE_EFFECTS (t
) = 1;
3531 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3533 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3534 u
= fold_build_pointer_plus_hwi (u
, 4 * ns
);
3535 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp_limit
, u
);
3536 TREE_SIDE_EFFECTS (t
) = 1;
3537 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3539 u
= fold_build_pointer_plus_hwi (u
, 8 * ((ns
+1)/2));
3540 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3541 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_cop
, u
);
3542 TREE_SIDE_EFFECTS (t
) = 1;
3543 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3545 /* va_list.next_stack = nextarg; */
3546 u
= make_tree (ptr_type_node
, nextarg
);
3547 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_stack
, u
);
3548 TREE_SIDE_EFFECTS (t
) = 1;
3549 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3553 mep_gimplify_va_arg_expr (tree valist
, tree type
,
3555 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
3557 HOST_WIDE_INT size
, rsize
;
3558 bool by_reference
, ivc2_vec
;
3559 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3560 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3561 tree label_sover
, label_selse
;
3564 ivc2_vec
= TARGET_IVC2
&& VECTOR_TYPE_P (type
);
3566 size
= int_size_in_bytes (type
);
3567 by_reference
= (size
> (ivc2_vec
? 8 : 4)) || (size
<= 0);
3571 type
= build_pointer_type (type
);
3574 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
3576 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3577 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3578 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3579 f_next_stack
= DECL_CHAIN (f_next_cop
);
3581 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3583 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3584 valist
, f_next_gp_limit
, NULL_TREE
);
3585 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3587 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3588 valist
, f_next_stack
, NULL_TREE
);
3590 /* if f_next_gp < f_next_gp_limit
3591 IF (VECTOR_P && IVC2)
3599 val = *f_next_stack;
3600 f_next_stack += rsize;
3604 label_sover
= create_artificial_label (UNKNOWN_LOCATION
);
3605 label_selse
= create_artificial_label (UNKNOWN_LOCATION
);
3606 res_addr
= create_tmp_var (ptr_type_node
);
3608 tmp
= build2 (GE_EXPR
, boolean_type_node
, next_gp
,
3609 unshare_expr (next_gp_limit
));
3610 tmp
= build3 (COND_EXPR
, void_type_node
, tmp
,
3611 build1 (GOTO_EXPR
, void_type_node
,
3612 unshare_expr (label_selse
)),
3614 gimplify_and_add (tmp
, pre_p
);
3618 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_cop
);
3619 gimplify_and_add (tmp
, pre_p
);
3623 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_gp
);
3624 gimplify_and_add (tmp
, pre_p
);
3627 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_gp
), 4);
3628 gimplify_assign (unshare_expr (next_gp
), tmp
, pre_p
);
3630 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_cop
), 8);
3631 gimplify_assign (unshare_expr (next_cop
), tmp
, pre_p
);
3633 tmp
= build1 (GOTO_EXPR
, void_type_node
, unshare_expr (label_sover
));
3634 gimplify_and_add (tmp
, pre_p
);
3638 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_selse
));
3639 gimplify_and_add (tmp
, pre_p
);
3641 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, unshare_expr (next_stack
));
3642 gimplify_and_add (tmp
, pre_p
);
3644 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_stack
), rsize
);
3645 gimplify_assign (unshare_expr (next_stack
), tmp
, pre_p
);
3649 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_sover
));
3650 gimplify_and_add (tmp
, pre_p
);
3652 res_addr
= fold_convert (build_pointer_type (type
), res_addr
);
3655 res_addr
= build_va_arg_indirect_ref (res_addr
);
3657 return build_va_arg_indirect_ref (res_addr
);
3661 mep_init_cumulative_args (CUMULATIVE_ARGS
*pcum
, tree fntype
,
3662 rtx libname ATTRIBUTE_UNUSED
,
3663 tree fndecl ATTRIBUTE_UNUSED
)
3667 if (fntype
&& lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype
)))
3673 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3674 larger than 4 bytes are passed indirectly. Return value in 0,
3675 unless bigger than 4 bytes, then the caller passes a pointer as the
3676 first arg. For varargs, we copy $1..$4 to the stack. */
3679 mep_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
3680 const_tree type ATTRIBUTE_UNUSED
,
3681 bool named ATTRIBUTE_UNUSED
)
3683 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3685 /* VOIDmode is a signal for the backend to pass data to the call
3686 expander via the second operand to the call pattern. We use
3687 this to determine whether to use "jsr" or "jsrv". */
3688 if (mode
== VOIDmode
)
3689 return GEN_INT (cum
->vliw
);
3691 /* If we havn't run out of argument registers, return the next. */
3694 if (type
&& TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3695 return gen_rtx_REG (mode
, cum
->nregs
+ 49);
3697 return gen_rtx_REG (mode
, cum
->nregs
+ 1);
3700 /* Otherwise the argument goes on the stack. */
3705 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
3708 bool named ATTRIBUTE_UNUSED
)
3710 int size
= bytesize (type
, mode
);
3712 /* This is non-obvious, but yes, large values passed after we've run
3713 out of registers are *still* passed by reference - we put the
3714 address of the parameter on the stack, as well as putting the
3715 parameter itself elsewhere on the stack. */
3717 if (size
<= 0 || size
> 8)
3721 if (TARGET_IVC2
&& get_cumulative_args (cum
)->nregs
< 4
3722 && type
!= NULL_TREE
&& VECTOR_TYPE_P (type
))
3728 mep_function_arg_advance (cumulative_args_t pcum
,
3729 machine_mode mode ATTRIBUTE_UNUSED
,
3730 const_tree type ATTRIBUTE_UNUSED
,
3731 bool named ATTRIBUTE_UNUSED
)
3733 get_cumulative_args (pcum
)->nregs
+= 1;
3737 mep_return_in_memory (const_tree type
, const_tree decl ATTRIBUTE_UNUSED
)
3739 int size
= bytesize (type
, BLKmode
);
3740 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3741 return size
> 0 && size
<= 8 ? 0 : 1;
3742 return size
> 0 && size
<= 4 ? 0 : 1;
3746 mep_narrow_volatile_bitfield (void)
3752 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3755 mep_function_value (const_tree type
, const_tree func ATTRIBUTE_UNUSED
)
3757 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3758 return gen_rtx_REG (TYPE_MODE (type
), 48);
3759 return gen_rtx_REG (TYPE_MODE (type
), RETURN_VALUE_REGNUM
);
3762 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3765 mep_libcall_value (machine_mode mode
)
3767 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
3770 /* Handle pipeline hazards. */
3772 typedef enum { op_none
, op_stc
, op_fsft
, op_ret
} op_num
;
3773 static const char *opnames
[] = { "", "stc", "fsft", "ret" };
3775 static int prev_opcode
= 0;
3777 /* This isn't as optimal as it could be, because we don't know what
3778 control register the STC opcode is storing in. We only need to add
3779 the nop if it's the relevant register, but we add it for irrelevant
3783 mep_asm_output_opcode (FILE *file
, const char *ptr
)
3785 int this_opcode
= op_none
;
3786 const char *hazard
= 0;
3791 if (strncmp (ptr
, "fsft", 4) == 0 && !ISGRAPH (ptr
[4]))
3792 this_opcode
= op_fsft
;
3795 if (strncmp (ptr
, "ret", 3) == 0 && !ISGRAPH (ptr
[3]))
3796 this_opcode
= op_ret
;
3799 if (strncmp (ptr
, "stc", 3) == 0 && !ISGRAPH (ptr
[3]))
3800 this_opcode
= op_stc
;
3804 if (prev_opcode
== op_stc
&& this_opcode
== op_fsft
)
3806 if (prev_opcode
== op_stc
&& this_opcode
== op_ret
)
3810 fprintf(file
, "%s\t# %s-%s hazard\n\t",
3811 hazard
, opnames
[prev_opcode
], opnames
[this_opcode
]);
3813 prev_opcode
= this_opcode
;
3816 /* Handle attributes. */
3819 mep_validate_based_tiny (tree
*node
, tree name
, tree args
,
3820 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3822 if (TREE_CODE (*node
) != VAR_DECL
3823 && TREE_CODE (*node
) != POINTER_TYPE
3824 && TREE_CODE (*node
) != TYPE_DECL
)
3826 warning (0, "%qE attribute only applies to variables", name
);
3829 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3831 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3833 warning (0, "address region attributes not allowed with auto storage class");
3836 /* Ignore storage attribute of pointed to variable: char __far * x; */
3837 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3839 warning (0, "address region attributes on pointed-to types ignored");
3848 mep_multiple_address_regions (tree list
, bool check_section_attr
)
3851 int count_sections
= 0;
3852 int section_attr_count
= 0;
3854 for (a
= list
; a
; a
= TREE_CHAIN (a
))
3856 if (is_attribute_p ("based", TREE_PURPOSE (a
))
3857 || is_attribute_p ("tiny", TREE_PURPOSE (a
))
3858 || is_attribute_p ("near", TREE_PURPOSE (a
))
3859 || is_attribute_p ("far", TREE_PURPOSE (a
))
3860 || is_attribute_p ("io", TREE_PURPOSE (a
)))
3862 if (check_section_attr
)
3863 section_attr_count
+= is_attribute_p ("section", TREE_PURPOSE (a
));
3866 if (check_section_attr
)
3867 return section_attr_count
;
3869 return count_sections
;
3872 #define MEP_ATTRIBUTES(decl) \
3873 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3874 : DECL_ATTRIBUTES (decl) \
3875 ? (DECL_ATTRIBUTES (decl)) \
3876 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3879 mep_validate_near_far (tree
*node
, tree name
, tree args
,
3880 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3882 if (TREE_CODE (*node
) != VAR_DECL
3883 && TREE_CODE (*node
) != FUNCTION_DECL
3884 && TREE_CODE (*node
) != METHOD_TYPE
3885 && TREE_CODE (*node
) != POINTER_TYPE
3886 && TREE_CODE (*node
) != TYPE_DECL
)
3888 warning (0, "%qE attribute only applies to variables and functions",
3892 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3894 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3896 warning (0, "address region attributes not allowed with auto storage class");
3899 /* Ignore storage attribute of pointed to variable: char __far * x; */
3900 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3902 warning (0, "address region attributes on pointed-to types ignored");
3906 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node
), false) > 0)
3908 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3909 name
, DECL_NAME (*node
), DECL_SOURCE_LINE (*node
));
3910 DECL_ATTRIBUTES (*node
) = NULL_TREE
;
3916 mep_validate_disinterrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3917 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3919 if (TREE_CODE (*node
) != FUNCTION_DECL
3920 && TREE_CODE (*node
) != METHOD_TYPE
)
3922 warning (0, "%qE attribute only applies to functions", name
);
3929 mep_validate_interrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3930 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3934 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3936 warning (0, "%qE attribute only applies to functions", name
);
3941 if (DECL_DECLARED_INLINE_P (*node
))
3942 error ("cannot inline interrupt function %qE", DECL_NAME (*node
));
3943 DECL_UNINLINABLE (*node
) = 1;
3945 function_type
= TREE_TYPE (*node
);
3947 if (TREE_TYPE (function_type
) != void_type_node
)
3948 error ("interrupt function must have return type of void");
3950 if (prototype_p (function_type
)
3951 && (TREE_VALUE (TYPE_ARG_TYPES (function_type
)) != void_type_node
3952 || TREE_CHAIN (TYPE_ARG_TYPES (function_type
)) != NULL_TREE
))
3953 error ("interrupt function must have no arguments");
3959 mep_validate_io_cb (tree
*node
, tree name
, tree args
,
3960 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3962 if (TREE_CODE (*node
) != VAR_DECL
)
3964 warning (0, "%qE attribute only applies to variables", name
);
3968 if (args
!= NULL_TREE
)
3970 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
3971 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
3972 if (TREE_CODE (TREE_VALUE (args
)) != INTEGER_CST
)
3974 warning (0, "%qE attribute allows only an integer constant argument",
3980 if (*no_add
== false && !TARGET_IO_NO_VOLATILE
)
3981 TREE_THIS_VOLATILE (*node
) = 1;
3987 mep_validate_vliw (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3988 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3990 if (TREE_CODE (*node
) != FUNCTION_TYPE
3991 && TREE_CODE (*node
) != FUNCTION_DECL
3992 && TREE_CODE (*node
) != METHOD_TYPE
3993 && TREE_CODE (*node
) != FIELD_DECL
3994 && TREE_CODE (*node
) != TYPE_DECL
)
3996 static int gave_pointer_note
= 0;
3997 static int gave_array_note
= 0;
3998 static const char * given_type
= NULL
;
4000 given_type
= get_tree_code_name (TREE_CODE (*node
));
4001 if (TREE_CODE (*node
) == POINTER_TYPE
)
4002 given_type
= "pointers";
4003 if (TREE_CODE (*node
) == ARRAY_TYPE
)
4004 given_type
= "arrays";
4007 warning (0, "%qE attribute only applies to functions, not %s",
4010 warning (0, "%qE attribute only applies to functions",
4014 if (TREE_CODE (*node
) == POINTER_TYPE
4015 && !gave_pointer_note
)
4017 inform (input_location
,
4018 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4019 " typedef int (__vliw *vfuncptr) ();");
4020 gave_pointer_note
= 1;
4023 if (TREE_CODE (*node
) == ARRAY_TYPE
4024 && !gave_array_note
)
4026 inform (input_location
,
4027 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4028 " typedef int (__vliw *vfuncptr[]) ();");
4029 gave_array_note
= 1;
4033 error ("VLIW functions are not allowed without a VLIW configuration");
4037 static const struct attribute_spec mep_attribute_table
[11] =
4039 /* name min max decl type func handler
4040 affects_type_identity */
4041 { "based", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4042 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4043 { "near", 0, 0, false, false, false, mep_validate_near_far
, false },
4044 { "far", 0, 0, false, false, false, mep_validate_near_far
, false },
4045 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt
,
4047 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt
, false },
4048 { "io", 0, 1, false, false, false, mep_validate_io_cb
, false },
4049 { "cb", 0, 1, false, false, false, mep_validate_io_cb
, false },
4050 { "vliw", 0, 0, false, true, false, mep_validate_vliw
, false },
4051 { NULL
, 0, 0, false, false, false, NULL
, false }
4055 mep_function_attribute_inlinable_p (const_tree callee
)
4057 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (callee
));
4058 if (!attrs
) attrs
= DECL_ATTRIBUTES (callee
);
4059 return (lookup_attribute ("disinterrupt", attrs
) == 0
4060 && lookup_attribute ("interrupt", attrs
) == 0);
4064 mep_can_inline_p (tree caller
, tree callee
)
4066 if (TREE_CODE (callee
) == ADDR_EXPR
)
4067 callee
= TREE_OPERAND (callee
, 0);
4069 if (!mep_vliw_function_p (caller
)
4070 && mep_vliw_function_p (callee
))
4078 #define FUNC_DISINTERRUPT 2
4081 struct GTY(()) pragma_entry
{
4086 struct pragma_traits
: default_hashmap_traits
4088 static hashval_t
hash (const char *s
) { return htab_hash_string (s
); }
4090 equal_keys (const char *a
, const char *b
)
4092 return strcmp (a
, b
) == 0;
4096 /* Hash table of farcall-tagged sections. */
4097 static GTY(()) hash_map
<const char *, pragma_entry
, pragma_traits
> *
4101 mep_note_pragma_flag (const char *funcname
, int flag
)
4105 = hash_map
<const char *, pragma_entry
, pragma_traits
>::create_ggc (31);
4108 const char *name
= ggc_strdup (funcname
);
4109 pragma_entry
*slot
= &pragma_htab
->get_or_insert (name
, &existed
);
4119 mep_lookup_pragma_flag (const char *funcname
, int flag
)
4124 if (funcname
[0] == '@' && funcname
[2] == '.')
4127 pragma_entry
*slot
= pragma_htab
->get (funcname
);
4128 if (slot
&& (slot
->flag
& flag
))
4137 mep_lookup_pragma_call (const char *funcname
)
4139 return mep_lookup_pragma_flag (funcname
, FUNC_CALL
);
4143 mep_note_pragma_call (const char *funcname
)
4145 mep_note_pragma_flag (funcname
, FUNC_CALL
);
4149 mep_lookup_pragma_disinterrupt (const char *funcname
)
4151 return mep_lookup_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4155 mep_note_pragma_disinterrupt (const char *funcname
)
4157 mep_note_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4161 note_unused_pragma_disinterrupt (const char *const &s
, const pragma_entry
&e
,
4164 if ((e
.flag
& FUNC_DISINTERRUPT
)
4165 && !(e
.used
& FUNC_DISINTERRUPT
))
4166 warning (0, "\"#pragma disinterrupt %s\" not used", s
);
4171 mep_file_cleanups (void)
4174 pragma_htab
->traverse
<void *, note_unused_pragma_disinterrupt
> (NULL
);
4177 /* These three functions provide a bridge between the pramgas that
4178 affect register classes, and the functions that maintain them. We
4179 can't call those functions directly as pragma handling is part of
4180 the front end and doesn't have direct access to them. */
4183 mep_save_register_info (void)
4185 save_register_info ();
4189 mep_reinit_regs (void)
4195 mep_init_regs (void)
4203 mep_attrlist_to_encoding (tree list
, tree decl
)
4205 if (mep_multiple_address_regions (list
, false) > 1)
4207 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4208 TREE_PURPOSE (TREE_CHAIN (list
)),
4210 DECL_SOURCE_LINE (decl
));
4211 TREE_CHAIN (list
) = NULL_TREE
;
4216 if (is_attribute_p ("based", TREE_PURPOSE (list
)))
4218 if (is_attribute_p ("tiny", TREE_PURPOSE (list
)))
4220 if (is_attribute_p ("near", TREE_PURPOSE (list
)))
4222 if (is_attribute_p ("far", TREE_PURPOSE (list
)))
4224 if (is_attribute_p ("io", TREE_PURPOSE (list
)))
4226 if (TREE_VALUE (list
)
4227 && TREE_VALUE (TREE_VALUE (list
))
4228 && TREE_CODE (TREE_VALUE (TREE_VALUE (list
))) == INTEGER_CST
)
4230 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list
)));
4232 && location
<= 0x1000000)
4237 if (is_attribute_p ("cb", TREE_PURPOSE (list
)))
4239 list
= TREE_CHAIN (list
);
4242 && TREE_CODE (decl
) == FUNCTION_DECL
4243 && DECL_SECTION_NAME (decl
) == 0)
4249 mep_comp_type_attributes (const_tree t1
, const_tree t2
)
4253 vliw1
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1
)) != 0);
4254 vliw2
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2
)) != 0);
4263 mep_insert_attributes (tree decl
, tree
*attributes
)
4266 const char *secname
= 0;
4267 tree attrib
, attrlist
;
4270 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4272 const char *funcname
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4274 if (mep_lookup_pragma_disinterrupt (funcname
))
4276 attrib
= build_tree_list (get_identifier ("disinterrupt"), NULL_TREE
);
4277 *attributes
= chainon (*attributes
, attrib
);
4281 if (TREE_CODE (decl
) != VAR_DECL
4282 || ! (TREE_PUBLIC (decl
) || TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
4285 if (TREE_READONLY (decl
) && TARGET_DC
)
4286 /* -mdc means that const variables default to the near section,
4287 regardless of the size cutoff. */
4290 /* User specified an attribute, so override the default.
4291 Ignore storage attribute of pointed to variable. char __far * x; */
4292 if (! (TREE_TYPE (decl
) && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
))
4294 if (TYPE_P (decl
) && TYPE_ATTRIBUTES (decl
) && *attributes
)
4295 TYPE_ATTRIBUTES (decl
) = NULL_TREE
;
4296 else if (DECL_ATTRIBUTES (decl
) && *attributes
)
4297 DECL_ATTRIBUTES (decl
) = NULL_TREE
;
4300 attrlist
= *attributes
? *attributes
: DECL_ATTRIBUTES (decl
);
4301 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4302 if (!encoding
&& TYPE_P (TREE_TYPE (decl
)))
4304 attrlist
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
4305 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4309 /* This means that the declaration has a specific section
4310 attribute, so we should not apply the default rules. */
4312 if (encoding
== 'i' || encoding
== 'I')
4314 tree attr
= lookup_attribute ("io", attrlist
);
4316 && TREE_VALUE (attr
)
4317 && TREE_VALUE (TREE_VALUE(attr
)))
4319 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4320 static tree previous_value
= 0;
4321 static int previous_location
= 0;
4322 static tree previous_name
= 0;
4324 /* We take advantage of the fact that gcc will reuse the
4325 same tree pointer when applying an attribute to a
4326 list of decls, but produce a new tree for attributes
4327 on separate source lines, even when they're textually
4328 identical. This is the behavior we want. */
4329 if (TREE_VALUE (attr
) == previous_value
4330 && location
== previous_location
)
4332 warning(0, "__io address 0x%x is the same for %qE and %qE",
4333 location
, previous_name
, DECL_NAME (decl
));
4335 previous_name
= DECL_NAME (decl
);
4336 previous_location
= location
;
4337 previous_value
= TREE_VALUE (attr
);
4344 /* Declarations of arrays can change size. Don't trust them. */
4345 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
4348 size
= int_size_in_bytes (TREE_TYPE (decl
));
4350 if (TARGET_RAND_TPGP
&& size
<= 4 && size
> 0)
4352 if (TREE_PUBLIC (decl
)
4353 || DECL_EXTERNAL (decl
)
4354 || TREE_STATIC (decl
))
4356 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4380 if (size
<= mep_based_cutoff
&& size
> 0)
4382 else if (size
<= mep_tiny_cutoff
&& size
> 0)
4388 if (mep_const_section
&& TREE_READONLY (decl
))
4390 if (strcmp (mep_const_section
, "tiny") == 0)
4392 else if (strcmp (mep_const_section
, "near") == 0)
4394 else if (strcmp (mep_const_section
, "far") == 0)
4401 if (!mep_multiple_address_regions (*attributes
, true)
4402 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl
), false))
4404 attrib
= build_tree_list (get_identifier (secname
), NULL_TREE
);
4406 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4407 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4408 and mep_validate_based_tiny. */
4409 DECL_ATTRIBUTES (decl
) = chainon (DECL_ATTRIBUTES (decl
), attrib
);
4414 mep_encode_section_info (tree decl
, rtx rtl
, int first
)
4417 const char *oldname
;
4418 const char *secname
;
4424 tree mep_attributes
;
4429 if (TREE_CODE (decl
) != VAR_DECL
4430 && TREE_CODE (decl
) != FUNCTION_DECL
)
4433 rtlname
= XEXP (rtl
, 0);
4434 if (GET_CODE (rtlname
) == SYMBOL_REF
)
4435 oldname
= XSTR (rtlname
, 0);
4436 else if (GET_CODE (rtlname
) == MEM
4437 && GET_CODE (XEXP (rtlname
, 0)) == SYMBOL_REF
)
4438 oldname
= XSTR (XEXP (rtlname
, 0), 0);
4442 type
= TREE_TYPE (decl
);
4443 if (type
== error_mark_node
)
4445 mep_attributes
= MEP_ATTRIBUTES (decl
);
4447 encoding
= mep_attrlist_to_encoding (mep_attributes
, decl
);
4451 newname
= (char *) alloca (strlen (oldname
) + 4);
4452 sprintf (newname
, "@%c.%s", encoding
, oldname
);
4453 idp
= get_identifier (newname
);
4455 gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (idp
));
4456 SYMBOL_REF_WEAK (XEXP (rtl
, 0)) = DECL_WEAK (decl
);
4457 SET_SYMBOL_REF_DECL (XEXP (rtl
, 0), decl
);
4470 maxsize
= 0x1000000;
4478 if (maxsize
&& int_size_in_bytes (TREE_TYPE (decl
)) > maxsize
)
4480 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4482 (long) int_size_in_bytes (TREE_TYPE (decl
)),
4490 mep_strip_name_encoding (const char *sym
)
4496 else if (*sym
== '@' && sym
[2] == '.')
4504 mep_select_section (tree decl
, int reloc ATTRIBUTE_UNUSED
,
4505 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
4510 switch (TREE_CODE (decl
))
4513 if (!TREE_READONLY (decl
)
4514 || TREE_SIDE_EFFECTS (decl
)
4515 || !DECL_INITIAL (decl
)
4516 || (DECL_INITIAL (decl
) != error_mark_node
4517 && !TREE_CONSTANT (DECL_INITIAL (decl
))))
4521 if (! TREE_CONSTANT (decl
))
4529 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4531 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4533 if (name
[0] == '@' && name
[2] == '.')
4538 if (flag_function_sections
|| DECL_COMDAT_GROUP (decl
))
4539 mep_unique_section (decl
, 0);
4540 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4542 if (encoding
== 'f')
4543 return vftext_section
;
4545 return vtext_section
;
4547 else if (encoding
== 'f')
4548 return ftext_section
;
4550 return text_section
;
4553 if (TREE_CODE (decl
) == VAR_DECL
)
4555 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4557 if (name
[0] == '@' && name
[2] == '.')
4561 return based_section
;
4565 return srodata_section
;
4566 if (DECL_INITIAL (decl
))
4567 return sdata_section
;
4568 return tinybss_section
;
4572 return frodata_section
;
4577 error_at (DECL_SOURCE_LOCATION (decl
),
4578 "variable %D of type %<io%> must be uninitialized", decl
);
4579 return data_section
;
4582 error_at (DECL_SOURCE_LOCATION (decl
),
4583 "variable %D of type %<cb%> must be uninitialized", decl
);
4584 return data_section
;
4589 return readonly_data_section
;
4591 return data_section
;
4595 mep_unique_section (tree decl
, int reloc
)
4597 static const char *prefixes
[][2] =
4599 { ".text.", ".gnu.linkonce.t." },
4600 { ".rodata.", ".gnu.linkonce.r." },
4601 { ".data.", ".gnu.linkonce.d." },
4602 { ".based.", ".gnu.linkonce.based." },
4603 { ".sdata.", ".gnu.linkonce.s." },
4604 { ".far.", ".gnu.linkonce.far." },
4605 { ".ftext.", ".gnu.linkonce.ft." },
4606 { ".frodata.", ".gnu.linkonce.frd." },
4607 { ".srodata.", ".gnu.linkonce.srd." },
4608 { ".vtext.", ".gnu.linkonce.v." },
4609 { ".vftext.", ".gnu.linkonce.vf." }
4611 int sec
= 2; /* .data */
4613 const char *name
, *prefix
;
4616 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
4617 if (DECL_RTL (decl
))
4618 name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4620 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4622 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4623 sec
= 9; /* .vtext */
4625 sec
= 0; /* .text */
4627 else if (decl_readonly_section (decl
, reloc
))
4628 sec
= 1; /* .rodata */
4630 if (name
[0] == '@' && name
[2] == '.')
4635 sec
= 3; /* .based */
4639 sec
= 8; /* .srodata */
4641 sec
= 4; /* .sdata */
4645 sec
= 6; /* .ftext */
4647 sec
= 10; /* .vftext */
4649 sec
= 7; /* .frodata */
4651 sec
= 5; /* .far. */
4657 prefix
= prefixes
[sec
][DECL_COMDAT_GROUP(decl
) != NULL
];
4658 len
= strlen (name
) + strlen (prefix
);
4659 string
= (char *) alloca (len
+ 1);
4661 sprintf (string
, "%s%s", prefix
, name
);
4663 set_decl_section_name (decl
, string
);
4666 /* Given a decl, a section name, and whether the decl initializer
4667 has relocs, choose attributes for the section. */
4669 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4672 mep_section_type_flags (tree decl
, const char *name
, int reloc
)
4674 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4676 if (decl
&& TREE_CODE (decl
) == FUNCTION_DECL
4677 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4678 flags
|= SECTION_MEP_VLIW
;
4683 /* Switch to an arbitrary section NAME with attributes as specified
4684 by FLAGS. ALIGN specifies any known alignment requirements for
4685 the section; 0 if the default should be used.
4687 Differs from the standard ELF version only in support of VLIW mode. */
4690 mep_asm_named_section (const char *name
, unsigned int flags
, tree decl ATTRIBUTE_UNUSED
)
4692 char flagchars
[8], *f
= flagchars
;
4695 if (!(flags
& SECTION_DEBUG
))
4697 if (flags
& SECTION_WRITE
)
4699 if (flags
& SECTION_CODE
)
4701 if (flags
& SECTION_SMALL
)
4703 if (flags
& SECTION_MEP_VLIW
)
4707 if (flags
& SECTION_BSS
)
4712 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",@%s\n",
4713 name
, flagchars
, type
);
4715 if (flags
& SECTION_CODE
)
4716 fputs ((flags
& SECTION_MEP_VLIW
? "\t.vliw\n" : "\t.core\n"),
4721 mep_output_aligned_common (FILE *stream
, tree decl
, const char *name
,
4722 int size
, int align
, int global
)
4724 /* We intentionally don't use mep_section_tag() here. */
4726 && (name
[1] == 'i' || name
[1] == 'I' || name
[1] == 'c')
4730 tree attr
= lookup_attribute ((name
[1] == 'c' ? "cb" : "io"),
4731 DECL_ATTRIBUTES (decl
));
4733 && TREE_VALUE (attr
)
4734 && TREE_VALUE (TREE_VALUE(attr
)))
4735 location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4740 fprintf (stream
, "\t.globl\t");
4741 assemble_name (stream
, name
);
4742 fprintf (stream
, "\n");
4744 assemble_name (stream
, name
);
4745 fprintf (stream
, " = %d\n", location
);
4748 if (name
[0] == '@' && name
[2] == '.')
4750 const char *sec
= 0;
4754 switch_to_section (based_section
);
4758 switch_to_section (tinybss_section
);
4762 switch_to_section (farbss_section
);
4771 while (align
> BITS_PER_UNIT
)
4776 name2
= targetm
.strip_name_encoding (name
);
4778 fprintf (stream
, "\t.globl\t%s\n", name2
);
4779 fprintf (stream
, "\t.p2align %d\n", p2align
);
4780 fprintf (stream
, "\t.type\t%s,@object\n", name2
);
4781 fprintf (stream
, "\t.size\t%s,%d\n", name2
, size
);
4782 fprintf (stream
, "%s:\n\t.zero\t%d\n", name2
, size
);
4789 fprintf (stream
, "\t.local\t");
4790 assemble_name (stream
, name
);
4791 fprintf (stream
, "\n");
4793 fprintf (stream
, "\t.comm\t");
4794 assemble_name (stream
, name
);
4795 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
4801 mep_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4803 rtx addr
= XEXP (m_tramp
, 0);
4804 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
4806 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__mep_trampoline_helper"),
4807 LCT_NORMAL
, VOIDmode
, 3,
4810 static_chain
, Pmode
);
4813 /* Experimental Reorg. */
4816 mep_mentioned_p (rtx in
,
4817 rtx reg
, /* NULL for mem */
4818 int modes_too
) /* if nonzero, modes must match also. */
4826 if (reg
&& GET_CODE (reg
) != REG
)
4829 if (GET_CODE (in
) == LABEL_REF
)
4832 code
= GET_CODE (in
);
4838 return mep_mentioned_p (XEXP (in
, 0), reg
, modes_too
);
4844 if (modes_too
&& (GET_MODE (in
) != GET_MODE (reg
)))
4846 return (REGNO (in
) == REGNO (reg
));
4859 /* Set's source should be read-only. */
4860 if (code
== SET
&& !reg
)
4861 return mep_mentioned_p (SET_DEST (in
), reg
, modes_too
);
4863 fmt
= GET_RTX_FORMAT (code
);
4865 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4870 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
4871 if (mep_mentioned_p (XVECEXP (in
, i
, j
), reg
, modes_too
))
4874 else if (fmt
[i
] == 'e'
4875 && mep_mentioned_p (XEXP (in
, i
), reg
, modes_too
))
4881 #define EXPERIMENTAL_REGMOVE_REORG 1
4883 #if EXPERIMENTAL_REGMOVE_REORG
4886 mep_compatible_reg_class (int r1
, int r2
)
4888 if (GR_REGNO_P (r1
) && GR_REGNO_P (r2
))
4890 if (CR_REGNO_P (r1
) && CR_REGNO_P (r2
))
4896 mep_reorg_regmove (rtx_insn
*insns
)
4898 rtx_insn
*insn
, *next
, *follow
;
4900 int count
= 0, done
= 0, replace
, before
= 0;
4903 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4904 if (NONJUMP_INSN_P (insn
))
4907 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4908 set that uses the r2 and r2 dies there. We replace r2 with r1
4909 and see if it's still a valid insn. If so, delete the first set.
4910 Copied from reorg.c. */
4915 for (insn
= insns
; insn
; insn
= next
)
4917 next
= next_nonnote_nondebug_insn (insn
);
4918 if (! NONJUMP_INSN_P (insn
))
4920 pat
= PATTERN (insn
);
4924 if (GET_CODE (pat
) == SET
4925 && GET_CODE (SET_SRC (pat
)) == REG
4926 && GET_CODE (SET_DEST (pat
)) == REG
4927 && find_regno_note (insn
, REG_DEAD
, REGNO (SET_SRC (pat
)))
4928 && mep_compatible_reg_class (REGNO (SET_SRC (pat
)), REGNO (SET_DEST (pat
))))
4930 follow
= next_nonnote_nondebug_insn (insn
);
4932 fprintf (dump_file
, "superfluous moves: considering %d\n", INSN_UID (insn
));
4934 while (follow
&& NONJUMP_INSN_P (follow
)
4935 && GET_CODE (PATTERN (follow
)) == SET
4936 && !dead_or_set_p (follow
, SET_SRC (pat
))
4937 && !mep_mentioned_p (PATTERN (follow
), SET_SRC (pat
), 0)
4938 && !mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 0))
4941 fprintf (dump_file
, "\tskipping %d\n", INSN_UID (follow
));
4942 follow
= next_nonnote_insn (follow
);
4946 fprintf (dump_file
, "\tfollow is %d\n", INSN_UID (follow
));
4947 if (follow
&& NONJUMP_INSN_P (follow
)
4948 && GET_CODE (PATTERN (follow
)) == SET
4949 && find_regno_note (follow
, REG_DEAD
, REGNO (SET_DEST (pat
))))
4951 if (GET_CODE (SET_DEST (PATTERN (follow
))) == REG
)
4953 if (mep_mentioned_p (SET_SRC (PATTERN (follow
)), SET_DEST (pat
), 1))
4956 where
= & SET_SRC (PATTERN (follow
));
4959 else if (GET_CODE (SET_DEST (PATTERN (follow
))) == MEM
)
4961 if (mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 1))
4964 where
= & PATTERN (follow
);
4970 /* If so, follow is the corresponding insn */
4977 fprintf (dump_file
, "----- Candidate for superfluous move deletion:\n\n");
4978 for (x
= insn
; x
;x
= NEXT_INSN (x
))
4980 print_rtl_single (dump_file
, x
);
4983 fprintf (dump_file
, "\n");
4987 if (validate_replace_rtx_subexp (SET_DEST (pat
), SET_SRC (pat
),
4994 fprintf (dump_file
, "\n----- Success! new insn:\n\n");
4995 print_rtl_single (dump_file
, follow
);
5005 fprintf (dump_file
, "\n%d insn%s deleted out of %d.\n\n", count
, count
== 1 ? "" : "s", before
);
5006 fprintf (dump_file
, "=====\n");
5012 /* Figure out where to put LABEL, which is the label for a repeat loop.
5013 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5014 the loop ends just before LAST_INSN. If SHARED, insns other than the
5015 "repeat" might use LABEL to jump to the loop's continuation point.
5017 Return the last instruction in the adjusted loop. */
5020 mep_insert_repeat_label_last (rtx_insn
*last_insn
, rtx_code_label
*label
,
5021 bool including
, bool shared
)
5023 rtx_insn
*next
, *prev
;
5024 int count
= 0, code
, icode
;
5027 fprintf (dump_file
, "considering end of repeat loop at insn %d\n",
5028 INSN_UID (last_insn
));
5030 /* Set PREV to the last insn in the loop. */
5033 prev
= PREV_INSN (prev
);
5035 /* Set NEXT to the next insn after the repeat label. */
5040 code
= GET_CODE (prev
);
5041 if (code
== CALL_INSN
|| code
== CODE_LABEL
|| code
== BARRIER
)
5046 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
5047 prev
= as_a
<rtx_insn
*> (XVECEXP (PATTERN (prev
), 0, 1));
5049 /* Other insns that should not be in the last two opcodes. */
5050 icode
= recog_memoized (prev
);
5052 || icode
== CODE_FOR_repeat
5053 || icode
== CODE_FOR_erepeat
5054 || get_attr_may_trap (prev
) == MAY_TRAP_YES
)
5057 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5058 is the second instruction in a VLIW bundle. In that case,
5059 loop again: if the first instruction also satisfies the
5060 conditions above then we will reach here again and put
5061 both of them into the repeat epilogue. Otherwise both
5062 should remain outside. */
5063 if (GET_MODE (prev
) != BImode
)
5068 print_rtl_single (dump_file
, next
);
5073 prev
= PREV_INSN (prev
);
5076 /* See if we're adding the label immediately after the repeat insn.
5077 If so, we need to separate them with a nop. */
5078 prev
= prev_real_insn (next
);
5080 switch (recog_memoized (prev
))
5082 case CODE_FOR_repeat
:
5083 case CODE_FOR_erepeat
:
5085 fprintf (dump_file
, "Adding nop inside loop\n");
5086 emit_insn_before (gen_nop (), next
);
5093 /* Insert the label. */
5094 emit_label_before (label
, next
);
5096 /* Insert the nops. */
5097 if (dump_file
&& count
< 2)
5098 fprintf (dump_file
, "Adding %d nop%s\n\n",
5099 2 - count
, count
== 1 ? "" : "s");
5101 for (; count
< 2; count
++)
5103 last_insn
= emit_insn_after (gen_nop (), last_insn
);
5105 emit_insn_before (gen_nop (), last_insn
);
5112 mep_emit_doloop (rtx
*operands
, int is_end
)
5116 if (cfun
->machine
->doloop_tags
== 0
5117 || cfun
->machine
->doloop_tag_from_end
== is_end
)
5119 cfun
->machine
->doloop_tags
++;
5120 cfun
->machine
->doloop_tag_from_end
= is_end
;
5123 tag
= GEN_INT (cfun
->machine
->doloop_tags
- 1);
5125 emit_jump_insn (gen_doloop_end_internal (operands
[0], operands
[1], tag
));
5127 emit_insn (gen_doloop_begin_internal (operands
[0], operands
[0], tag
));
5131 /* Code for converting doloop_begins and doloop_ends into valid
5132 MeP instructions. A doloop_begin is just a placeholder:
5134 $count = unspec ($count)
5136 where $count is initially the number of iterations - 1.
5137 doloop_end has the form:
5139 if ($count-- == 0) goto label
5141 The counter variable is private to the doloop insns, nothing else
5142 relies on its value.
5144 There are three cases, in decreasing order of preference:
5146 1. A loop has exactly one doloop_begin and one doloop_end.
5147 The doloop_end branches to the first instruction after
5150 In this case we can replace the doloop_begin with a repeat
5151 instruction and remove the doloop_end. I.e.:
5153 $count1 = unspec ($count1)
5158 if ($count2-- == 0) goto label
5162 repeat $count1,repeat_label
5170 2. As for (1), except there are several doloop_ends. One of them
5171 (call it X) falls through to a label L. All the others fall
5172 through to branches to L.
5174 In this case, we remove X and replace the other doloop_ends
5175 with branches to the repeat label. For example:
5177 $count1 = unspec ($count1)
5180 if ($count2-- == 0) goto label
5183 if ($count3-- == 0) goto label
5188 repeat $count1,repeat_label
5199 3. The fallback case. Replace doloop_begins with:
5203 Replace doloop_ends with the equivalent of:
5206 if ($count == 0) goto label
5208 Note that this might need a scratch register if $count
5209 is stored in memory. */
5211 /* A structure describing one doloop_begin. */
5212 struct mep_doloop_begin
{
5213 /* The next doloop_begin with the same tag. */
5214 struct mep_doloop_begin
*next
;
5216 /* The instruction itself. */
5219 /* The initial counter value. This is known to be a general register. */
5223 /* A structure describing a doloop_end. */
5224 struct mep_doloop_end
{
5225 /* The next doloop_end with the same loop tag. */
5226 struct mep_doloop_end
*next
;
5228 /* The instruction itself. */
5231 /* The first instruction after INSN when the branch isn't taken. */
5232 rtx_insn
*fallthrough
;
5234 /* The location of the counter value. Since doloop_end_internal is a
5235 jump instruction, it has to allow the counter to be stored anywhere
5236 (any non-fixed register or memory location). */
5239 /* The target label (the place where the insn branches when the counter
5243 /* A scratch register. Only available when COUNTER isn't stored
5244 in a general register. */
5249 /* One do-while loop. */
5251 /* All the doloop_begins for this loop (in no particular order). */
5252 struct mep_doloop_begin
*begin
;
5254 /* All the doloop_ends. When there is more than one, arrange things
5255 so that the first one is the most likely to be X in case (2) above. */
5256 struct mep_doloop_end
*end
;
5260 /* Return true if LOOP can be converted into repeat/repeat_end form
5261 (that is, if it matches cases (1) or (2) above). */
5264 mep_repeat_loop_p (struct mep_doloop
*loop
)
5266 struct mep_doloop_end
*end
;
5269 /* There must be exactly one doloop_begin and at least one doloop_end. */
5270 if (loop
->begin
== 0 || loop
->end
== 0 || loop
->begin
->next
!= 0)
5273 /* The first doloop_end (X) must branch back to the insn after
5274 the doloop_begin. */
5275 if (prev_real_insn (loop
->end
->label
) != loop
->begin
->insn
)
5278 /* All the other doloop_ends must branch to the same place as X.
5279 When the branch isn't taken, they must jump to the instruction
5281 fallthrough
= loop
->end
->fallthrough
;
5282 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5283 if (end
->label
!= loop
->end
->label
5284 || !simplejump_p (end
->fallthrough
)
5285 || next_real_insn (JUMP_LABEL (end
->fallthrough
)) != fallthrough
)
5292 /* The main repeat reorg function. See comment above for details. */
5295 mep_reorg_repeat (rtx_insn
*insns
)
5298 struct mep_doloop
*loops
, *loop
;
5299 struct mep_doloop_begin
*begin
;
5300 struct mep_doloop_end
*end
;
5302 /* Quick exit if we haven't created any loops. */
5303 if (cfun
->machine
->doloop_tags
== 0)
5306 /* Create an array of mep_doloop structures. */
5307 loops
= (struct mep_doloop
*) alloca (sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5308 memset (loops
, 0, sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5310 /* Search the function for do-while insns and group them by loop tag. */
5311 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5313 switch (recog_memoized (insn
))
5315 case CODE_FOR_doloop_begin_internal
:
5316 insn_extract (insn
);
5317 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5319 begin
= (struct mep_doloop_begin
*) alloca (sizeof (struct mep_doloop_begin
));
5320 begin
->next
= loop
->begin
;
5322 begin
->counter
= recog_data
.operand
[0];
5324 loop
->begin
= begin
;
5327 case CODE_FOR_doloop_end_internal
:
5328 insn_extract (insn
);
5329 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5331 end
= (struct mep_doloop_end
*) alloca (sizeof (struct mep_doloop_end
));
5333 end
->fallthrough
= next_real_insn (insn
);
5334 end
->counter
= recog_data
.operand
[0];
5335 end
->label
= recog_data
.operand
[1];
5336 end
->scratch
= recog_data
.operand
[3];
5338 /* If this insn falls through to an unconditional jump,
5339 give it a lower priority than the others. */
5340 if (loop
->end
!= 0 && simplejump_p (end
->fallthrough
))
5342 end
->next
= loop
->end
->next
;
5343 loop
->end
->next
= end
;
5347 end
->next
= loop
->end
;
5353 /* Convert the insns for each loop in turn. */
5354 for (loop
= loops
; loop
< loops
+ cfun
->machine
->doloop_tags
; loop
++)
5355 if (mep_repeat_loop_p (loop
))
5357 /* Case (1) or (2). */
5358 rtx_code_label
*repeat_label
;
5361 /* Create a new label for the repeat insn. */
5362 repeat_label
= gen_label_rtx ();
5364 /* Replace the doloop_begin with a repeat. */
5365 label_ref
= gen_rtx_LABEL_REF (VOIDmode
, repeat_label
);
5366 emit_insn_before (gen_repeat (loop
->begin
->counter
, label_ref
),
5368 delete_insn (loop
->begin
->insn
);
5370 /* Insert the repeat label before the first doloop_end.
5371 Fill the gap with nops if there are other doloop_ends. */
5372 mep_insert_repeat_label_last (loop
->end
->insn
, repeat_label
,
5373 false, loop
->end
->next
!= 0);
5375 /* Emit a repeat_end (to improve the readability of the output). */
5376 emit_insn_before (gen_repeat_end (), loop
->end
->insn
);
5378 /* Delete the first doloop_end. */
5379 delete_insn (loop
->end
->insn
);
5381 /* Replace the others with branches to REPEAT_LABEL. */
5382 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5384 emit_jump_insn_before (gen_jump (repeat_label
), end
->insn
);
5385 delete_insn (end
->insn
);
5386 delete_insn (end
->fallthrough
);
5391 /* Case (3). First replace all the doloop_begins with increment
5393 for (begin
= loop
->begin
; begin
!= 0; begin
= begin
->next
)
5395 emit_insn_before (gen_add3_insn (copy_rtx (begin
->counter
),
5396 begin
->counter
, const1_rtx
),
5398 delete_insn (begin
->insn
);
5401 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5402 for (end
= loop
->end
; end
!= 0; end
= end
->next
)
5408 /* Load the counter value into a general register. */
5410 if (!REG_P (reg
) || REGNO (reg
) > 15)
5413 emit_move_insn (copy_rtx (reg
), copy_rtx (end
->counter
));
5416 /* Decrement the counter. */
5417 emit_insn (gen_add3_insn (copy_rtx (reg
), copy_rtx (reg
),
5420 /* Copy it back to its original location. */
5421 if (reg
!= end
->counter
)
5422 emit_move_insn (copy_rtx (end
->counter
), copy_rtx (reg
));
5424 /* Jump back to the start label. */
5425 insn
= emit_jump_insn (gen_mep_bne_true (reg
, const0_rtx
,
5427 JUMP_LABEL (insn
) = end
->label
;
5428 LABEL_NUSES (end
->label
)++;
5430 /* Emit the whole sequence before the doloop_end. */
5431 insn
= get_insns ();
5433 emit_insn_before (insn
, end
->insn
);
5435 /* Delete the doloop_end. */
5436 delete_insn (end
->insn
);
5443 mep_invertable_branch_p (rtx_insn
*insn
)
5446 enum rtx_code old_code
;
5449 set
= PATTERN (insn
);
5450 if (GET_CODE (set
) != SET
)
5452 if (GET_CODE (XEXP (set
, 1)) != IF_THEN_ELSE
)
5454 cond
= XEXP (XEXP (set
, 1), 0);
5455 old_code
= GET_CODE (cond
);
5459 PUT_CODE (cond
, NE
);
5462 PUT_CODE (cond
, EQ
);
5465 PUT_CODE (cond
, GE
);
5468 PUT_CODE (cond
, LT
);
5473 INSN_CODE (insn
) = -1;
5474 i
= recog_memoized (insn
);
5475 PUT_CODE (cond
, old_code
);
5476 INSN_CODE (insn
) = -1;
5481 mep_invert_branch (rtx_insn
*insn
, rtx_insn
*after
)
5483 rtx cond
, set
, label
;
5486 set
= PATTERN (insn
);
5488 gcc_assert (GET_CODE (set
) == SET
);
5489 gcc_assert (GET_CODE (XEXP (set
, 1)) == IF_THEN_ELSE
);
5491 cond
= XEXP (XEXP (set
, 1), 0);
5492 switch (GET_CODE (cond
))
5495 PUT_CODE (cond
, NE
);
5498 PUT_CODE (cond
, EQ
);
5501 PUT_CODE (cond
, GE
);
5504 PUT_CODE (cond
, LT
);
5509 label
= gen_label_rtx ();
5510 emit_label_after (label
, after
);
5511 for (i
=1; i
<=2; i
++)
5512 if (GET_CODE (XEXP (XEXP (set
, 1), i
)) == LABEL_REF
)
5514 rtx ref
= XEXP (XEXP (set
, 1), i
);
5515 if (LABEL_NUSES (XEXP (ref
, 0)) == 1)
5516 delete_insn (XEXP (ref
, 0));
5517 XEXP (ref
, 0) = label
;
5518 LABEL_NUSES (label
) ++;
5519 JUMP_LABEL (insn
) = label
;
5521 INSN_CODE (insn
) = -1;
5522 i
= recog_memoized (insn
);
5523 gcc_assert (i
>= 0);
5527 mep_reorg_erepeat (rtx_insn
*insns
)
5529 rtx_insn
*insn
, *prev
;
5534 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5536 && mep_invertable_branch_p (insn
))
5540 fprintf (dump_file
, "\n------------------------------\n");
5541 fprintf (dump_file
, "erepeat: considering this jump:\n");
5542 print_rtl_single (dump_file
, insn
);
5544 count
= simplejump_p (insn
) ? 0 : 1;
5545 for (prev
= PREV_INSN (insn
); prev
; prev
= PREV_INSN (prev
))
5547 if (CALL_P (prev
) || BARRIER_P (prev
))
5550 if (prev
== JUMP_LABEL (insn
))
5554 fprintf (dump_file
, "found loop top, %d insns\n", count
);
5556 if (LABEL_NUSES (prev
) == 1)
5557 /* We're the only user, always safe */ ;
5558 else if (LABEL_NUSES (prev
) == 2)
5560 /* See if there's a barrier before this label. If
5561 so, we know nobody inside the loop uses it.
5562 But we must be careful to put the erepeat
5563 *after* the label. */
5565 for (barrier
= PREV_INSN (prev
);
5566 barrier
&& NOTE_P (barrier
);
5567 barrier
= PREV_INSN (barrier
))
5569 if (barrier
&& ! BARRIER_P (barrier
))
5574 /* We don't know who else, within or without our loop, uses this */
5576 fprintf (dump_file
, "... but there are multiple users, too risky.\n");
5580 /* Generate a label to be used by the erepat insn. */
5581 l
= gen_label_rtx ();
5583 /* Insert the erepeat after INSN's target label. */
5584 x
= gen_erepeat (gen_rtx_LABEL_REF (VOIDmode
, l
));
5586 emit_insn_after (x
, prev
);
5588 /* Insert the erepeat label. */
5589 newlast
= (mep_insert_repeat_label_last
5590 (insn
, l
, !simplejump_p (insn
), false));
5591 if (simplejump_p (insn
))
5593 emit_insn_before (gen_erepeat_end (), insn
);
5598 mep_invert_branch (insn
, newlast
);
5599 emit_insn_after (gen_erepeat_end (), newlast
);
5606 /* A label is OK if there is exactly one user, and we
5607 can find that user before the next label. */
5610 if (LABEL_NUSES (prev
) == 1)
5612 for (user
= PREV_INSN (prev
);
5613 user
&& (INSN_P (user
) || NOTE_P (user
));
5614 user
= PREV_INSN (user
))
5615 if (JUMP_P (user
) && JUMP_LABEL (user
) == prev
)
5617 safe
= INSN_UID (user
);
5624 fprintf (dump_file
, "... ignoring jump from insn %d to %d\n",
5625 safe
, INSN_UID (prev
));
5635 fprintf (dump_file
, "\n==============================\n");
5638 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5639 always do this on its own. */
5642 mep_jmp_return_reorg (rtx_insn
*insns
)
5644 rtx_insn
*insn
, *label
, *ret
;
5647 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5648 if (simplejump_p (insn
))
5650 /* Find the fist real insn the jump jumps to. */
5651 label
= ret
= safe_as_a
<rtx_insn
*> (JUMP_LABEL (insn
));
5655 || GET_CODE (PATTERN (ret
)) == USE
))
5656 ret
= NEXT_INSN (ret
);
5660 /* Is it a return? */
5661 ret_code
= recog_memoized (ret
);
5662 if (ret_code
== CODE_FOR_return_internal
5663 || ret_code
== CODE_FOR_eh_return_internal
)
5665 /* It is. Replace the jump with a return. */
5666 LABEL_NUSES (label
) --;
5667 if (LABEL_NUSES (label
) == 0)
5668 delete_insn (label
);
5669 PATTERN (insn
) = copy_rtx (PATTERN (ret
));
5670 INSN_CODE (insn
) = -1;
5678 mep_reorg_addcombine (rtx_insn
*insns
)
5682 for (i
= insns
; i
; i
= NEXT_INSN (i
))
5684 && INSN_CODE (i
) == CODE_FOR_addsi3
5685 && GET_CODE (SET_DEST (PATTERN (i
))) == REG
5686 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 0)) == REG
5687 && REGNO (SET_DEST (PATTERN (i
))) == REGNO (XEXP (SET_SRC (PATTERN (i
)), 0))
5688 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 1)) == CONST_INT
)
5692 && INSN_CODE (n
) == CODE_FOR_addsi3
5693 && GET_CODE (SET_DEST (PATTERN (n
))) == REG
5694 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 0)) == REG
5695 && REGNO (SET_DEST (PATTERN (n
))) == REGNO (XEXP (SET_SRC (PATTERN (n
)), 0))
5696 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 1)) == CONST_INT
)
5698 int ic
= INTVAL (XEXP (SET_SRC (PATTERN (i
)), 1));
5699 int nc
= INTVAL (XEXP (SET_SRC (PATTERN (n
)), 1));
5700 if (REGNO (SET_DEST (PATTERN (i
))) == REGNO (SET_DEST (PATTERN (n
)))
5702 && ic
+ nc
> -32768)
5704 XEXP (SET_SRC (PATTERN (i
)), 1) = GEN_INT (ic
+ nc
);
5705 SET_NEXT_INSN (i
) = NEXT_INSN (n
);
5707 SET_PREV_INSN (NEXT_INSN (i
)) = i
;
5713 /* If this insn adjusts the stack, return the adjustment, else return
5716 add_sp_insn_p (rtx_insn
*insn
)
5720 if (! single_set (insn
))
5722 pat
= PATTERN (insn
);
5723 if (GET_CODE (SET_DEST (pat
)) != REG
)
5725 if (REGNO (SET_DEST (pat
)) != SP_REGNO
)
5727 if (GET_CODE (SET_SRC (pat
)) != PLUS
)
5729 if (GET_CODE (XEXP (SET_SRC (pat
), 0)) != REG
)
5731 if (REGNO (XEXP (SET_SRC (pat
), 0)) != SP_REGNO
)
5733 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) != CONST_INT
)
5735 return INTVAL (XEXP (SET_SRC (pat
), 1));
5738 /* Check for trivial functions that set up an unneeded stack
5741 mep_reorg_noframe (rtx_insn
*insns
)
5743 rtx_insn
*start_frame_insn
;
5744 rtx_insn
*end_frame_insn
= 0;
5748 /* The first insn should be $sp = $sp + N */
5749 while (insns
&& ! INSN_P (insns
))
5750 insns
= NEXT_INSN (insns
);
5754 sp_adjust
= add_sp_insn_p (insns
);
5758 start_frame_insn
= insns
;
5759 sp
= SET_DEST (PATTERN (start_frame_insn
));
5761 insns
= next_real_insn (insns
);
5765 rtx_insn
*next
= next_real_insn (insns
);
5769 sp2
= add_sp_insn_p (insns
);
5774 end_frame_insn
= insns
;
5775 if (sp2
!= -sp_adjust
)
5778 else if (mep_mentioned_p (insns
, sp
, 0))
5780 else if (CALL_P (insns
))
5788 delete_insn (start_frame_insn
);
5789 delete_insn (end_frame_insn
);
5796 rtx_insn
*insns
= get_insns ();
5798 /* We require accurate REG_DEAD notes. */
5799 compute_bb_for_insn ();
5800 df_note_add_problem ();
5803 mep_reorg_addcombine (insns
);
5804 #if EXPERIMENTAL_REGMOVE_REORG
5805 /* VLIW packing has been done already, so we can't just delete things. */
5806 if (!mep_vliw_function_p (cfun
->decl
))
5807 mep_reorg_regmove (insns
);
5809 mep_jmp_return_reorg (insns
);
5810 mep_bundle_insns (insns
);
5811 mep_reorg_repeat (insns
);
5814 && !profile_arc_flag
5815 && TARGET_OPT_REPEAT
5816 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO
)))
5817 mep_reorg_erepeat (insns
);
5819 /* This may delete *insns so make sure it's last. */
5820 mep_reorg_noframe (insns
);
5822 df_finish_pass (false);
5827 /*----------------------------------------------------------------------*/
5829 /*----------------------------------------------------------------------*/
5831 /* Element X gives the index into cgen_insns[] of the most general
5832 implementation of intrinsic X. Unimplemented intrinsics are
5834 int mep_intrinsic_insn
[ARRAY_SIZE (cgen_intrinsics
)];
5836 /* Element X gives the index of another instruction that is mapped to
5837 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5840 Things are set up so that mep_intrinsic_chain[X] < X. */
5841 static int mep_intrinsic_chain
[ARRAY_SIZE (cgen_insns
)];
5843 /* The bitmask for the current ISA. The ISA masks are declared
5845 unsigned int mep_selected_isa
;
5848 const char *config_name
;
5852 static struct mep_config mep_configs
[] = {
5853 #ifdef COPROC_SELECTION_TABLE
5854 COPROC_SELECTION_TABLE
,
5859 /* Initialize the global intrinsics variables above. */
5862 mep_init_intrinsics (void)
5866 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5867 mep_selected_isa
= mep_configs
[0].isa
;
5868 if (mep_config_string
!= 0)
5869 for (i
= 0; mep_configs
[i
].config_name
; i
++)
5870 if (strcmp (mep_config_string
, mep_configs
[i
].config_name
) == 0)
5872 mep_selected_isa
= mep_configs
[i
].isa
;
5876 /* Assume all intrinsics are unavailable. */
5877 for (i
= 0; i
< ARRAY_SIZE (mep_intrinsic_insn
); i
++)
5878 mep_intrinsic_insn
[i
] = -1;
5880 /* Build up the global intrinsic tables. */
5881 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5882 if ((cgen_insns
[i
].isas
& mep_selected_isa
) != 0)
5884 mep_intrinsic_chain
[i
] = mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
];
5885 mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] = i
;
5887 /* See whether we can directly move values between one coprocessor
5888 register and another. */
5889 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
5890 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns
[i
]))
5891 mep_have_copro_copro_moves_p
= true;
5893 /* See whether we can directly move values between core and
5894 coprocessor registers. */
5895 mep_have_core_copro_moves_p
= (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1
)
5896 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2
));
5898 mep_have_core_copro_moves_p
= 1;
5901 /* Declare all available intrinsic functions. Called once only. */
5903 static tree cp_data_bus_int_type_node
;
5904 static tree opaque_vector_type_node
;
5905 static tree v8qi_type_node
;
5906 static tree v4hi_type_node
;
5907 static tree v2si_type_node
;
5908 static tree v8uqi_type_node
;
5909 static tree v4uhi_type_node
;
5910 static tree v2usi_type_node
;
5913 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr
)
5917 case cgen_regnum_operand_type_POINTER
: return ptr_type_node
;
5918 case cgen_regnum_operand_type_LONG
: return long_integer_type_node
;
5919 case cgen_regnum_operand_type_ULONG
: return long_unsigned_type_node
;
5920 case cgen_regnum_operand_type_SHORT
: return short_integer_type_node
;
5921 case cgen_regnum_operand_type_USHORT
: return short_unsigned_type_node
;
5922 case cgen_regnum_operand_type_CHAR
: return char_type_node
;
5923 case cgen_regnum_operand_type_UCHAR
: return unsigned_char_type_node
;
5924 case cgen_regnum_operand_type_SI
: return intSI_type_node
;
5925 case cgen_regnum_operand_type_DI
: return intDI_type_node
;
5926 case cgen_regnum_operand_type_VECTOR
: return opaque_vector_type_node
;
5927 case cgen_regnum_operand_type_V8QI
: return v8qi_type_node
;
5928 case cgen_regnum_operand_type_V4HI
: return v4hi_type_node
;
5929 case cgen_regnum_operand_type_V2SI
: return v2si_type_node
;
5930 case cgen_regnum_operand_type_V8UQI
: return v8uqi_type_node
;
5931 case cgen_regnum_operand_type_V4UHI
: return v4uhi_type_node
;
5932 case cgen_regnum_operand_type_V2USI
: return v2usi_type_node
;
5933 case cgen_regnum_operand_type_CP_DATA_BUS_INT
: return cp_data_bus_int_type_node
;
5935 return void_type_node
;
5940 mep_init_builtins (void)
5944 if (TARGET_64BIT_CR_REGS
)
5945 cp_data_bus_int_type_node
= long_long_integer_type_node
;
5947 cp_data_bus_int_type_node
= long_integer_type_node
;
5949 opaque_vector_type_node
= build_opaque_vector_type (intQI_type_node
, 8);
5950 v8qi_type_node
= build_vector_type (intQI_type_node
, 8);
5951 v4hi_type_node
= build_vector_type (intHI_type_node
, 4);
5952 v2si_type_node
= build_vector_type (intSI_type_node
, 2);
5953 v8uqi_type_node
= build_vector_type (unsigned_intQI_type_node
, 8);
5954 v4uhi_type_node
= build_vector_type (unsigned_intHI_type_node
, 4);
5955 v2usi_type_node
= build_vector_type (unsigned_intSI_type_node
, 2);
5957 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node
);
5959 add_builtin_type ("cp_vector", opaque_vector_type_node
);
5961 add_builtin_type ("cp_v8qi", v8qi_type_node
);
5962 add_builtin_type ("cp_v4hi", v4hi_type_node
);
5963 add_builtin_type ("cp_v2si", v2si_type_node
);
5965 add_builtin_type ("cp_v8uqi", v8uqi_type_node
);
5966 add_builtin_type ("cp_v4uhi", v4uhi_type_node
);
5967 add_builtin_type ("cp_v2usi", v2usi_type_node
);
5969 /* Intrinsics like mep_cadd3 are implemented with two groups of
5970 instructions, one which uses UNSPECs and one which uses a specific
5971 rtl code such as PLUS. Instructions in the latter group belong
5972 to GROUP_KNOWN_CODE.
5974 In such cases, the intrinsic will have two entries in the global
5975 tables above. The unspec form is accessed using builtin functions
5976 while the specific form is accessed using the mep_* enum in
5979 The idea is that __cop arithmetic and builtin functions have
5980 different optimization requirements. If mep_cadd3() appears in
5981 the source code, the user will surely except gcc to use cadd3
5982 rather than a work-alike such as add3. However, if the user
5983 just writes "a + b", where a or b are __cop variables, it is
5984 reasonable for gcc to choose a core instruction rather than
5985 cadd3 if it believes that is more optimal. */
5986 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5987 if ((cgen_insns
[i
].groups
& GROUP_KNOWN_CODE
) == 0
5988 && mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] >= 0)
5990 tree ret_type
= void_type_node
;
5993 if (i
> 0 && cgen_insns
[i
].intrinsic
== cgen_insns
[i
-1].intrinsic
)
5996 if (cgen_insns
[i
].cret_p
)
5997 ret_type
= mep_cgen_regnum_to_type (cgen_insns
[i
].regnums
[0].type
);
5999 bi_type
= build_function_type_list (ret_type
, NULL_TREE
);
6000 add_builtin_function (cgen_intrinsics
[cgen_insns
[i
].intrinsic
],
6002 cgen_insns
[i
].intrinsic
, BUILT_IN_MD
, NULL
, NULL
);
6006 /* Report the unavailablity of the given intrinsic. */
6010 mep_intrinsic_unavailable (int intrinsic
)
6012 static int already_reported_p
[ARRAY_SIZE (cgen_intrinsics
)];
6014 if (already_reported_p
[intrinsic
])
6017 if (mep_intrinsic_insn
[intrinsic
] < 0)
6018 error ("coprocessor intrinsic %qs is not available in this configuration",
6019 cgen_intrinsics
[intrinsic
]);
6020 else if (CGEN_CURRENT_GROUP
== GROUP_VLIW
)
6021 error ("%qs is not available in VLIW functions",
6022 cgen_intrinsics
[intrinsic
]);
6024 error ("%qs is not available in non-VLIW functions",
6025 cgen_intrinsics
[intrinsic
]);
6027 already_reported_p
[intrinsic
] = 1;
6032 /* See if any implementation of INTRINSIC is available to the
6033 current function. If so, store the most general implementation
6034 in *INSN_PTR and return true. Return false otherwise. */
6037 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED
, const struct cgen_insn
**insn_ptr ATTRIBUTE_UNUSED
)
6041 i
= mep_intrinsic_insn
[intrinsic
];
6042 while (i
>= 0 && !CGEN_ENABLE_INSN_P (i
))
6043 i
= mep_intrinsic_chain
[i
];
6047 *insn_ptr
= &cgen_insns
[i
];
6054 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6055 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6056 try using a work-alike instead. In this case, the returned insn
6057 may have three operands rather than two. */
6060 mep_get_move_insn (int intrinsic
, const struct cgen_insn
**cgen_insn
)
6064 if (intrinsic
== mep_cmov
)
6066 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
6067 if (mep_get_intrinsic_insn (mep_cmov_insns
[i
], cgen_insn
))
6071 return mep_get_intrinsic_insn (intrinsic
, cgen_insn
);
6075 /* If ARG is a register operand that is the same size as MODE, convert it
6076 to MODE using a subreg. Otherwise return ARG as-is. */
6079 mep_convert_arg (machine_mode mode
, rtx arg
)
6081 if (GET_MODE (arg
) != mode
6082 && register_operand (arg
, VOIDmode
)
6083 && GET_MODE_SIZE (GET_MODE (arg
)) == GET_MODE_SIZE (mode
))
6084 return simplify_gen_subreg (mode
, arg
, GET_MODE (arg
), 0);
6089 /* Apply regnum conversions to ARG using the description given by REGNUM.
6090 Return the new argument on success and null on failure. */
6093 mep_convert_regnum (const struct cgen_regnum_operand
*regnum
, rtx arg
)
6095 if (regnum
->count
== 0)
6098 if (GET_CODE (arg
) != CONST_INT
6100 || INTVAL (arg
) >= regnum
->count
)
6103 return gen_rtx_REG (SImode
, INTVAL (arg
) + regnum
->base
);
6107 /* Try to make intrinsic argument ARG match the given operand.
6108 UNSIGNED_P is true if the argument has an unsigned type. */
6111 mep_legitimize_arg (const struct insn_operand_data
*operand
, rtx arg
,
6114 if (GET_CODE (arg
) == CONST_INT
)
6116 /* CONST_INTs can only be bound to integer operands. */
6117 if (GET_MODE_CLASS (operand
->mode
) != MODE_INT
)
6120 else if (GET_CODE (arg
) == CONST_DOUBLE
)
6121 /* These hold vector constants. */;
6122 else if (GET_MODE_SIZE (GET_MODE (arg
)) != GET_MODE_SIZE (operand
->mode
))
6124 /* If the argument is a different size from what's expected, we must
6125 have a value in the right mode class in order to convert it. */
6126 if (GET_MODE_CLASS (operand
->mode
) != GET_MODE_CLASS (GET_MODE (arg
)))
6129 /* If the operand is an rvalue, promote or demote it to match the
6130 operand's size. This might not need extra instructions when
6131 ARG is a register value. */
6132 if (operand
->constraint
[0] != '=')
6133 arg
= convert_to_mode (operand
->mode
, arg
, unsigned_p
);
6136 /* If the operand is an lvalue, bind the operand to a new register.
6137 The caller will copy this value into ARG after the main
6138 instruction. By doing this always, we produce slightly more
6140 /* But not for control registers. */
6141 if (operand
->constraint
[0] == '='
6143 || ! (CONTROL_REGNO_P (REGNO (arg
))
6144 || CCR_REGNO_P (REGNO (arg
))
6145 || CR_REGNO_P (REGNO (arg
)))
6147 return gen_reg_rtx (operand
->mode
);
6149 /* Try simple mode punning. */
6150 arg
= mep_convert_arg (operand
->mode
, arg
);
6151 if (operand
->predicate (arg
, operand
->mode
))
6154 /* See if forcing the argument into a register will make it match. */
6155 if (GET_CODE (arg
) == CONST_INT
|| GET_CODE (arg
) == CONST_DOUBLE
)
6156 arg
= force_reg (operand
->mode
, arg
);
6158 arg
= mep_convert_arg (operand
->mode
, force_reg (GET_MODE (arg
), arg
));
6159 if (operand
->predicate (arg
, operand
->mode
))
6166 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6167 function FNNAME. OPERAND describes the operand to which ARGNUM
6171 mep_incompatible_arg (const struct insn_operand_data
*operand
, rtx arg
,
6172 int argnum
, tree fnname
)
6176 if (GET_CODE (arg
) == CONST_INT
)
6177 for (i
= 0; i
< ARRAY_SIZE (cgen_immediate_predicates
); i
++)
6178 if (operand
->predicate
== cgen_immediate_predicates
[i
].predicate
)
6180 const struct cgen_immediate_predicate
*predicate
;
6181 HOST_WIDE_INT argval
;
6183 predicate
= &cgen_immediate_predicates
[i
];
6184 argval
= INTVAL (arg
);
6185 if (argval
< predicate
->lower
|| argval
>= predicate
->upper
)
6186 error ("argument %d of %qE must be in the range %d...%d",
6187 argnum
, fnname
, predicate
->lower
, predicate
->upper
- 1);
6189 error ("argument %d of %qE must be a multiple of %d",
6190 argnum
, fnname
, predicate
->align
);
6194 error ("incompatible type for argument %d of %qE", argnum
, fnname
);
6198 mep_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6199 rtx subtarget ATTRIBUTE_UNUSED
,
6200 machine_mode mode ATTRIBUTE_UNUSED
,
6201 int ignore ATTRIBUTE_UNUSED
)
6203 rtx pat
, op
[10], arg
[10];
6205 int opindex
, unsigned_p
[10];
6207 unsigned int n_args
;
6209 const struct cgen_insn
*cgen_insn
;
6210 const struct insn_data_d
*idata
;
6211 unsigned int first_arg
= 0;
6212 unsigned int builtin_n_args
;
6214 fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6215 fnname
= DECL_NAME (fndecl
);
6217 /* Find out which instruction we should emit. Note that some coprocessor
6218 intrinsics may only be available in VLIW mode, or only in normal mode. */
6219 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl
), &cgen_insn
))
6221 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl
));
6224 idata
= &insn_data
[cgen_insn
->icode
];
6226 builtin_n_args
= cgen_insn
->num_args
;
6228 if (cgen_insn
->cret_p
)
6230 if (cgen_insn
->cret_p
> 1)
6233 mep_cgen_regnum_to_type (cgen_insn
->regnums
[0].type
);
6237 /* Evaluate each argument. */
6238 n_args
= call_expr_nargs (exp
);
6240 if (n_args
< builtin_n_args
)
6242 error ("too few arguments to %qE", fnname
);
6245 if (n_args
> builtin_n_args
)
6247 error ("too many arguments to %qE", fnname
);
6251 for (a
= first_arg
; a
< builtin_n_args
+ first_arg
; a
++)
6255 args
= CALL_EXPR_ARG (exp
, a
- first_arg
);
6260 if (cgen_insn
->regnums
[a
].reference_p
)
6262 if (TREE_CODE (value
) != ADDR_EXPR
)
6265 error ("argument %d of %qE must be an address", a
+1, fnname
);
6268 value
= TREE_OPERAND (value
, 0);
6272 /* If the argument has been promoted to int, get the unpromoted
6273 value. This is necessary when sub-int memory values are bound
6274 to reference parameters. */
6275 if (TREE_CODE (value
) == NOP_EXPR
6276 && TREE_TYPE (value
) == integer_type_node
6277 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6278 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value
, 0)))
6279 < TYPE_PRECISION (TREE_TYPE (value
))))
6280 value
= TREE_OPERAND (value
, 0);
6282 /* If the argument has been promoted to double, get the unpromoted
6283 SFmode value. This is necessary for FMAX support, for example. */
6284 if (TREE_CODE (value
) == NOP_EXPR
6285 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value
))
6286 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6287 && TYPE_MODE (TREE_TYPE (value
)) == DFmode
6288 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value
, 0))) == SFmode
)
6289 value
= TREE_OPERAND (value
, 0);
6291 unsigned_p
[a
] = TYPE_UNSIGNED (TREE_TYPE (value
));
6292 arg
[a
] = expand_expr (value
, NULL
, VOIDmode
, EXPAND_NORMAL
);
6293 arg
[a
] = mep_convert_regnum (&cgen_insn
->regnums
[a
], arg
[a
]);
6294 if (cgen_insn
->regnums
[a
].reference_p
)
6296 tree pointed_to
= TREE_TYPE (TREE_TYPE (value
));
6297 machine_mode pointed_mode
= TYPE_MODE (pointed_to
);
6299 arg
[a
] = gen_rtx_MEM (pointed_mode
, arg
[a
]);
6303 error ("argument %d of %qE must be in the range %d...%d",
6304 a
+ 1, fnname
, 0, cgen_insn
->regnums
[a
].count
- 1);
6309 for (a
= 0; a
< first_arg
; a
++)
6311 if (a
== 0 && target
&& GET_MODE (target
) == idata
->operand
[0].mode
)
6314 arg
[a
] = gen_reg_rtx (idata
->operand
[0].mode
);
6317 /* Convert the arguments into a form suitable for the intrinsic.
6318 Report an error if this isn't possible. */
6319 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6321 a
= cgen_insn
->op_mapping
[opindex
];
6322 op
[opindex
] = mep_legitimize_arg (&idata
->operand
[opindex
],
6323 arg
[a
], unsigned_p
[a
]);
6324 if (op
[opindex
] == 0)
6326 mep_incompatible_arg (&idata
->operand
[opindex
],
6327 arg
[a
], a
+ 1 - first_arg
, fnname
);
6332 /* Emit the instruction. */
6333 pat
= idata
->genfun (op
[0], op
[1], op
[2], op
[3], op
[4],
6334 op
[5], op
[6], op
[7], op
[8], op
[9]);
6336 if (GET_CODE (pat
) == SET
6337 && GET_CODE (SET_DEST (pat
)) == PC
6338 && GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
6339 emit_jump_insn (pat
);
6343 /* Copy lvalues back to their final locations. */
6344 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6345 if (idata
->operand
[opindex
].constraint
[0] == '=')
6347 a
= cgen_insn
->op_mapping
[opindex
];
6350 if (GET_MODE_CLASS (GET_MODE (arg
[a
]))
6351 != GET_MODE_CLASS (GET_MODE (op
[opindex
])))
6352 emit_move_insn (arg
[a
], gen_lowpart (GET_MODE (arg
[a
]),
6356 /* First convert the operand to the right mode, then copy it
6357 into the destination. Doing the conversion as a separate
6358 step (rather than using convert_move) means that we can
6359 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6360 refer to the same register. */
6361 op
[opindex
] = convert_to_mode (GET_MODE (arg
[a
]),
6362 op
[opindex
], unsigned_p
[a
]);
6363 if (!rtx_equal_p (arg
[a
], op
[opindex
]))
6364 emit_move_insn (arg
[a
], op
[opindex
]);
6369 if (first_arg
> 0 && target
&& target
!= op
[0])
6371 emit_move_insn (target
, op
[0]);
6378 mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED
)
6383 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6384 a global register. */
6387 global_reg_mentioned_p_1 (const_rtx x
)
6391 switch (GET_CODE (x
))
6394 if (REG_P (SUBREG_REG (x
)))
6396 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
6397 && global_regs
[subreg_regno (x
)])
6405 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
6410 /* A non-constant call might use a global register. */
6420 /* Returns nonzero if X mentions a global register. */
6423 global_reg_mentioned_p (rtx x
)
6429 if (! RTL_CONST_OR_PURE_CALL_P (x
))
6431 x
= CALL_INSN_FUNCTION_USAGE (x
);
6439 subrtx_iterator::array_type array
;
6440 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
6441 if (global_reg_mentioned_p_1 (*iter
))
6445 /* Scheduling hooks for VLIW mode.
6447 Conceptually this is very simple: we have a two-pack architecture
6448 that takes one core insn and one coprocessor insn to make up either
6449 a 32- or 64-bit instruction word (depending on the option bit set in
6450 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6451 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6452 and one 48-bit cop insn or two 32-bit core/cop insns.
6454 In practice, instruction selection will be a bear. Consider in
6455 VL64 mode the following insns
6460 these cannot pack, since the add is a 16-bit core insn and cmov
6461 is a 32-bit cop insn. However,
6466 packs just fine. For good VLIW code generation in VL64 mode, we
6467 will have to have 32-bit alternatives for many of the common core
6468 insns. Not implemented. */
6471 mep_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
, int cost
)
6475 if (REG_NOTE_KIND (link
) != 0)
6477 /* See whether INSN and DEP_INSN are intrinsics that set the same
6478 hard register. If so, it is more important to free up DEP_INSN
6479 than it is to free up INSN.
6481 Note that intrinsics like mep_mulr are handled differently from
6482 the equivalent mep.md patterns. In mep.md, if we don't care
6483 about the value of $lo and $hi, the pattern will just clobber
6484 the registers, not set them. Since clobbers don't count as
6485 output dependencies, it is often possible to reorder two mulrs,
6488 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6489 so any pair of mep_mulr()s will be inter-dependent. We should
6490 therefore give the first mep_mulr() a higher priority. */
6491 if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
6492 && global_reg_mentioned_p (PATTERN (insn
))
6493 && global_reg_mentioned_p (PATTERN (dep_insn
)))
6496 /* If the dependence is an anti or output dependence, assume it
6501 /* If we can't recognize the insns, we can't really do anything. */
6502 if (recog_memoized (dep_insn
) < 0)
6505 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6506 attribute instead. */
6509 cost_specified
= get_attr_latency (dep_insn
);
6510 if (cost_specified
!= 0)
6511 return cost_specified
;
6517 /* ??? We don't properly compute the length of a load/store insn,
6518 taking into account the addressing mode. */
6521 mep_issue_rate (void)
6523 return TARGET_IVC2
? 3 : 2;
6526 /* Return true if function DECL was declared with the vliw attribute. */
6529 mep_vliw_function_p (tree decl
)
6531 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))) != 0;
6535 mep_find_ready_insn (rtx_insn
**ready
, int nready
, enum attr_slot slot
,
6540 for (i
= nready
- 1; i
>= 0; --i
)
6542 rtx_insn
*insn
= ready
[i
];
6543 if (recog_memoized (insn
) >= 0
6544 && get_attr_slot (insn
) == slot
6545 && get_attr_length (insn
) == length
)
6553 mep_move_ready_insn (rtx_insn
**ready
, int nready
, rtx_insn
*insn
)
6557 for (i
= 0; i
< nready
; ++i
)
6558 if (ready
[i
] == insn
)
6560 for (; i
< nready
- 1; ++i
)
6561 ready
[i
] = ready
[i
+ 1];
6570 mep_print_sched_insn (FILE *dump
, rtx_insn
*insn
)
6572 const char *slots
= "none";
6573 const char *name
= NULL
;
6577 if (GET_CODE (PATTERN (insn
)) == SET
6578 || GET_CODE (PATTERN (insn
)) == PARALLEL
)
6580 switch (get_attr_slots (insn
))
6582 case SLOTS_CORE
: slots
= "core"; break;
6583 case SLOTS_C3
: slots
= "c3"; break;
6584 case SLOTS_P0
: slots
= "p0"; break;
6585 case SLOTS_P0_P0S
: slots
= "p0,p0s"; break;
6586 case SLOTS_P0_P1
: slots
= "p0,p1"; break;
6587 case SLOTS_P0S
: slots
= "p0s"; break;
6588 case SLOTS_P0S_P1
: slots
= "p0s,p1"; break;
6589 case SLOTS_P1
: slots
= "p1"; break;
6591 sprintf(buf
, "%d", get_attr_slots (insn
));
6596 if (GET_CODE (PATTERN (insn
)) == USE
)
6599 code
= INSN_CODE (insn
);
6601 name
= get_insn_name (code
);
6606 "insn %4d %4d %8s %s\n",
6614 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
,
6615 int sched_verbose ATTRIBUTE_UNUSED
, rtx_insn
**ready
,
6616 int *pnready
, int clock ATTRIBUTE_UNUSED
)
6618 int nready
= *pnready
;
6619 rtx_insn
*core_insn
, *cop_insn
;
6622 if (dump
&& sched_verbose
> 1)
6624 fprintf (dump
, "\nsched_reorder: clock %d nready %d\n", clock
, nready
);
6625 for (i
=0; i
<nready
; i
++)
6626 mep_print_sched_insn (dump
, ready
[i
]);
6627 fprintf (dump
, "\n");
6630 if (!mep_vliw_function_p (cfun
->decl
))
6635 /* IVC2 uses a DFA to determine what's ready and what's not. */
6639 /* We can issue either a core or coprocessor instruction.
6640 Look for a matched pair of insns to reorder. If we don't
6641 find any, don't second-guess the scheduler's priorities. */
6643 if ((core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 2))
6644 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
,
6645 TARGET_OPT_VL64
? 6 : 2)))
6647 else if (TARGET_OPT_VL64
6648 && (core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 4))
6649 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
, 4)))
6652 /* We didn't find a pair. Issue the single insn at the head
6653 of the ready list. */
6656 /* Reorder the two insns first. */
6657 mep_move_ready_insn (ready
, nready
, core_insn
);
6658 mep_move_ready_insn (ready
, nready
- 1, cop_insn
);
6662 /* Return true if X contains a register that is set by insn PREV. */
6665 mep_store_find_set (const_rtx x
, const rtx_insn
*prev
)
6667 subrtx_iterator::array_type array
;
6668 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
6669 if (REG_P (x
) && reg_set_p (x
, prev
))
6674 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6675 not the containing insn. */
6678 mep_store_data_bypass_1 (rtx_insn
*prev
, rtx pat
)
6680 /* Cope with intrinsics like swcpa. */
6681 if (GET_CODE (pat
) == PARALLEL
)
6685 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6686 if (mep_store_data_bypass_p (prev
,
6687 as_a
<rtx_insn
*> (XVECEXP (pat
, 0, i
))))
6693 /* Check for some sort of store. */
6694 if (GET_CODE (pat
) != SET
6695 || GET_CODE (SET_DEST (pat
)) != MEM
)
6698 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6699 The first operand to the unspec is the store data and the other operands
6700 are used to calculate the address. */
6701 if (GET_CODE (SET_SRC (pat
)) == UNSPEC
)
6706 src
= SET_SRC (pat
);
6707 for (i
= 1; i
< XVECLEN (src
, 0); i
++)
6708 if (mep_store_find_set (XVECEXP (src
, 0, i
), prev
))
6714 /* Otherwise just check that PREV doesn't modify any register mentioned
6715 in the memory destination. */
6716 return !mep_store_find_set (SET_DEST (pat
), prev
);
6719 /* Return true if INSN is a store instruction and if the store address
6720 has no true dependence on PREV. */
6723 mep_store_data_bypass_p (rtx_insn
*prev
, rtx_insn
*insn
)
6725 return INSN_P (insn
) ? mep_store_data_bypass_1 (prev
, PATTERN (insn
)) : false;
6728 /* Return true if, apart from HI/LO, there are no true dependencies
6729 between multiplication instructions PREV and INSN. */
6732 mep_mul_hilo_bypass_p (rtx_insn
*prev
, rtx_insn
*insn
)
6736 pat
= PATTERN (insn
);
6737 if (GET_CODE (pat
) == PARALLEL
)
6738 pat
= XVECEXP (pat
, 0, 0);
6739 if (GET_CODE (pat
) != SET
)
6741 subrtx_iterator::array_type array
;
6742 FOR_EACH_SUBRTX (iter
, array
, SET_SRC (pat
), NONCONST
)
6744 const_rtx x
= *iter
;
6746 && REGNO (x
) != LO_REGNO
6747 && REGNO (x
) != HI_REGNO
6748 && reg_set_p (x
, prev
))
6754 /* Return true if INSN is an ldc instruction that issues to the
6755 MeP-h1 integer pipeline. This is true for instructions that
6756 read from PSW, LP, SAR, HI and LO. */
6759 mep_ipipe_ldc_p (rtx_insn
*insn
)
6763 pat
= PATTERN (insn
);
6765 /* Cope with instrinsics that set both a hard register and its shadow.
6766 The set of the hard register comes first. */
6767 if (GET_CODE (pat
) == PARALLEL
)
6768 pat
= XVECEXP (pat
, 0, 0);
6770 if (GET_CODE (pat
) == SET
)
6772 src
= SET_SRC (pat
);
6774 /* Cope with intrinsics. The first operand to the unspec is
6775 the source register. */
6776 if (GET_CODE (src
) == UNSPEC
|| GET_CODE (src
) == UNSPEC_VOLATILE
)
6777 src
= XVECEXP (src
, 0, 0);
6780 switch (REGNO (src
))
6793 /* Create a VLIW bundle from core instruction CORE and coprocessor
6794 instruction COP. COP always satisfies INSN_P, but CORE can be
6795 either a new pattern or an existing instruction.
6797 Emit the bundle in place of COP and return it. */
6800 mep_make_bundle (rtx core_insn_or_pat
, rtx_insn
*cop
)
6803 rtx_insn
*core_insn
;
6806 /* If CORE is an existing instruction, remove it, otherwise put
6807 the new pattern in an INSN harness. */
6808 if (INSN_P (core_insn_or_pat
))
6810 core_insn
= as_a
<rtx_insn
*> (core_insn_or_pat
);
6811 remove_insn (core_insn
);
6814 core_insn
= make_insn_raw (core_insn_or_pat
);
6816 /* Generate the bundle sequence and replace COP with it. */
6817 seq
= gen_rtx_SEQUENCE (VOIDmode
, gen_rtvec (2, core_insn
, cop
));
6818 insn
= emit_insn_after (seq
, cop
);
6821 /* Set up the links of the insns inside the SEQUENCE. */
6822 SET_PREV_INSN (core_insn
) = PREV_INSN (insn
);
6823 SET_NEXT_INSN (core_insn
) = cop
;
6824 SET_PREV_INSN (cop
) = core_insn
;
6825 SET_NEXT_INSN (cop
) = NEXT_INSN (insn
);
6827 /* Set the VLIW flag for the coprocessor instruction. */
6828 PUT_MODE (core_insn
, VOIDmode
);
6829 PUT_MODE (cop
, BImode
);
6831 /* Derive a location for the bundle. Individual instructions cannot
6832 have their own location because there can be no assembler labels
6833 between CORE_INSN and COP. */
6834 INSN_LOCATION (insn
) = INSN_LOCATION (INSN_LOCATION (core_insn
) ? core_insn
: cop
);
6835 INSN_LOCATION (core_insn
) = 0;
6836 INSN_LOCATION (cop
) = 0;
6841 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6844 mep_insn_dependent_p_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
6846 rtx
* pinsn
= (rtx
*) data
;
6848 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
6852 /* Return true if anything in insn X is (anti,output,true) dependent on
6853 anything in insn Y. */
6856 mep_insn_dependent_p (rtx x
, rtx y
)
6860 gcc_assert (INSN_P (x
));
6861 gcc_assert (INSN_P (y
));
6864 note_stores (PATTERN (x
), mep_insn_dependent_p_1
, &tmp
);
6865 if (tmp
== NULL_RTX
)
6869 note_stores (PATTERN (y
), mep_insn_dependent_p_1
, &tmp
);
6870 if (tmp
== NULL_RTX
)
6877 core_insn_p (rtx_insn
*insn
)
6879 if (GET_CODE (PATTERN (insn
)) == USE
)
6881 if (get_attr_slot (insn
) == SLOT_CORE
)
6886 /* Mark coprocessor instructions that can be bundled together with
6887 the immediately preceding core instruction. This is later used
6888 to emit the "+" that tells the assembler to create a VLIW insn.
6890 For unbundled insns, the assembler will automatically add coprocessor
6891 nops, and 16-bit core nops. Due to an apparent oversight in the
6892 spec, the assembler will _not_ automatically add 32-bit core nops,
6893 so we have to emit those here.
6895 Called from mep_insn_reorg. */
6898 mep_bundle_insns (rtx_insn
*insns
)
6900 rtx_insn
*insn
, *last
= NULL
, *first
= NULL
;
6901 int saw_scheduling
= 0;
6903 /* Only do bundling if we're in vliw mode. */
6904 if (!mep_vliw_function_p (cfun
->decl
))
6907 /* The first insn in a bundle are TImode, the remainder are
6908 VOIDmode. After this function, the first has VOIDmode and the
6909 rest have BImode. */
6911 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6913 /* First, move any NOTEs that are within a bundle, to the beginning
6915 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6917 if (NOTE_P (insn
) && first
)
6918 /* Don't clear FIRST. */;
6920 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == TImode
)
6923 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == VOIDmode
&& first
)
6925 rtx_insn
*note
, *prev
;
6927 /* INSN is part of a bundle; FIRST is the first insn in that
6928 bundle. Move all intervening notes out of the bundle.
6929 In addition, since the debug pass may insert a label
6930 whenever the current line changes, set the location info
6931 for INSN to match FIRST. */
6933 INSN_LOCATION (insn
) = INSN_LOCATION (first
);
6935 note
= PREV_INSN (insn
);
6936 while (note
&& note
!= first
)
6938 prev
= PREV_INSN (note
);
6942 /* Remove NOTE from here... */
6943 SET_PREV_INSN (NEXT_INSN (note
)) = PREV_INSN (note
);
6944 SET_NEXT_INSN (PREV_INSN (note
)) = NEXT_INSN (note
);
6945 /* ...and put it in here. */
6946 SET_NEXT_INSN (note
) = first
;
6947 SET_PREV_INSN (note
) = PREV_INSN (first
);
6948 SET_NEXT_INSN (PREV_INSN (note
)) = note
;
6949 SET_PREV_INSN (NEXT_INSN (note
)) = note
;
6956 else if (!NONJUMP_INSN_P (insn
))
6960 /* Now fix up the bundles. */
6961 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6966 if (!NONJUMP_INSN_P (insn
))
6972 /* If we're not optimizing enough, there won't be scheduling
6973 info. We detect that here. */
6974 if (GET_MODE (insn
) == TImode
)
6976 if (!saw_scheduling
)
6981 rtx_insn
*core_insn
= NULL
;
6983 /* IVC2 slots are scheduled by DFA, so we just accept
6984 whatever the scheduler gives us. However, we must make
6985 sure the core insn (if any) is the first in the bundle.
6986 The IVC2 assembler can insert whatever NOPs are needed,
6987 and allows a COP insn to be first. */
6989 if (NONJUMP_INSN_P (insn
)
6990 && GET_CODE (PATTERN (insn
)) != USE
6991 && GET_MODE (insn
) == TImode
)
6995 && GET_MODE (NEXT_INSN (last
)) == VOIDmode
6996 && NONJUMP_INSN_P (NEXT_INSN (last
));
6997 last
= NEXT_INSN (last
))
6999 if (core_insn_p (last
))
7002 if (core_insn_p (last
))
7005 if (core_insn
&& core_insn
!= insn
)
7007 /* Swap core insn to first in the bundle. */
7009 /* Remove core insn. */
7010 if (PREV_INSN (core_insn
))
7011 SET_NEXT_INSN (PREV_INSN (core_insn
)) = NEXT_INSN (core_insn
);
7012 if (NEXT_INSN (core_insn
))
7013 SET_PREV_INSN (NEXT_INSN (core_insn
)) = PREV_INSN (core_insn
);
7015 /* Re-insert core insn. */
7016 SET_PREV_INSN (core_insn
) = PREV_INSN (insn
);
7017 SET_NEXT_INSN (core_insn
) = insn
;
7019 if (PREV_INSN (core_insn
))
7020 SET_NEXT_INSN (PREV_INSN (core_insn
)) = core_insn
;
7021 SET_PREV_INSN (insn
) = core_insn
;
7023 PUT_MODE (core_insn
, TImode
);
7024 PUT_MODE (insn
, VOIDmode
);
7028 /* The first insn has TImode, the rest have VOIDmode */
7029 if (GET_MODE (insn
) == TImode
)
7030 PUT_MODE (insn
, VOIDmode
);
7032 PUT_MODE (insn
, BImode
);
7036 PUT_MODE (insn
, VOIDmode
);
7037 if (recog_memoized (insn
) >= 0
7038 && get_attr_slot (insn
) == SLOT_COP
)
7042 || recog_memoized (last
) < 0
7043 || get_attr_slot (last
) != SLOT_CORE
7044 || (get_attr_length (insn
)
7045 != (TARGET_OPT_VL64
? 8 : 4) - get_attr_length (last
))
7046 || mep_insn_dependent_p (insn
, last
))
7048 switch (get_attr_length (insn
))
7053 insn
= mep_make_bundle (gen_nop (), insn
);
7056 if (TARGET_OPT_VL64
)
7057 insn
= mep_make_bundle (gen_nop32 (), insn
);
7060 if (TARGET_OPT_VL64
)
7061 error ("2 byte cop instructions are"
7062 " not allowed in 64-bit VLIW mode");
7064 insn
= mep_make_bundle (gen_nop (), insn
);
7067 error ("unexpected %d byte cop instruction",
7068 get_attr_length (insn
));
7073 insn
= mep_make_bundle (last
, insn
);
7081 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7082 Return true on success. This function can fail if the intrinsic
7083 is unavailable or if the operands don't satisfy their predicates. */
7086 mep_emit_intrinsic (int intrinsic
, const rtx
*operands
)
7088 const struct cgen_insn
*cgen_insn
;
7089 const struct insn_data_d
*idata
;
7093 if (!mep_get_intrinsic_insn (intrinsic
, &cgen_insn
))
7096 idata
= &insn_data
[cgen_insn
->icode
];
7097 for (i
= 0; i
< idata
->n_operands
; i
++)
7099 newop
[i
] = mep_convert_arg (idata
->operand
[i
].mode
, operands
[i
]);
7100 if (!idata
->operand
[i
].predicate (newop
[i
], idata
->operand
[i
].mode
))
7104 emit_insn (idata
->genfun (newop
[0], newop
[1], newop
[2],
7105 newop
[3], newop
[4], newop
[5],
7106 newop
[6], newop
[7], newop
[8]));
7112 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7113 OPERANDS[0]. Report an error if the instruction could not
7114 be synthesized. OPERANDS[1] is a register_operand. For sign
7115 and zero extensions, it may be smaller than SImode. */
7118 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic
,
7119 rtx
* operands ATTRIBUTE_UNUSED
)
7125 /* Likewise, but apply a binary operation to OPERANDS[1] and
7126 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7127 can be a general_operand.
7129 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7130 third operand. REG and REG3 take register operands only. */
7133 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate
,
7134 int ATTRIBUTE_UNUSED immediate3
,
7135 int ATTRIBUTE_UNUSED reg
,
7136 int ATTRIBUTE_UNUSED reg3
,
7137 rtx
* operands ATTRIBUTE_UNUSED
)
7143 mep_rtx_cost (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
7144 int opno ATTRIBUTE_UNUSED
, int *total
,
7145 bool ATTRIBUTE_UNUSED speed_t
)
7150 if (INTVAL (x
) >= -128 && INTVAL (x
) < 127)
7152 else if (INTVAL (x
) >= -32768 && INTVAL (x
) < 65536)
7159 *total
= optimize_size
? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7163 *total
= (GET_CODE (XEXP (x
, 1)) == CONST_INT
7165 : COSTS_N_INSNS (2));
7172 mep_address_cost (rtx addr ATTRIBUTE_UNUSED
,
7173 machine_mode mode ATTRIBUTE_UNUSED
,
7174 addr_space_t as ATTRIBUTE_UNUSED
,
7175 bool ATTRIBUTE_UNUSED speed_p
)
7181 mep_asm_init_sections (void)
7184 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7185 "\t.section .based,\"aw\"");
7188 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7189 "\t.section .sbss,\"aw\"");
7192 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7193 "\t.section .sdata,\"aw\",@progbits");
7196 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7197 "\t.section .far,\"aw\"");
7200 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7201 "\t.section .farbss,\"aw\"");
7204 = get_unnamed_section (0, output_section_asm_op
,
7205 "\t.section .frodata,\"a\"");
7208 = get_unnamed_section (0, output_section_asm_op
,
7209 "\t.section .srodata,\"a\"");
7212 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7213 "\t.section .vtext,\"axv\"\n\t.vliw");
7216 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7217 "\t.section .vftext,\"axv\"\n\t.vliw");
7220 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7221 "\t.section .ftext,\"ax\"\n\t.core");
7225 /* Initialize the GCC target structure. */
7227 #undef TARGET_ASM_FUNCTION_PROLOGUE
7228 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7229 #undef TARGET_ATTRIBUTE_TABLE
7230 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7231 #undef TARGET_COMP_TYPE_ATTRIBUTES
7232 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7233 #undef TARGET_INSERT_ATTRIBUTES
7234 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7235 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7236 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7237 #undef TARGET_CAN_INLINE_P
7238 #define TARGET_CAN_INLINE_P mep_can_inline_p
7239 #undef TARGET_SECTION_TYPE_FLAGS
7240 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7241 #undef TARGET_ASM_NAMED_SECTION
7242 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7243 #undef TARGET_INIT_BUILTINS
7244 #define TARGET_INIT_BUILTINS mep_init_builtins
7245 #undef TARGET_EXPAND_BUILTIN
7246 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7247 #undef TARGET_SCHED_ADJUST_COST
7248 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7249 #undef TARGET_SCHED_ISSUE_RATE
7250 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7251 #undef TARGET_SCHED_REORDER
7252 #define TARGET_SCHED_REORDER mep_sched_reorder
7253 #undef TARGET_STRIP_NAME_ENCODING
7254 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7255 #undef TARGET_ASM_SELECT_SECTION
7256 #define TARGET_ASM_SELECT_SECTION mep_select_section
7257 #undef TARGET_ASM_UNIQUE_SECTION
7258 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7259 #undef TARGET_ENCODE_SECTION_INFO
7260 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7261 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7262 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7263 #undef TARGET_RTX_COSTS
7264 #define TARGET_RTX_COSTS mep_rtx_cost
7265 #undef TARGET_ADDRESS_COST
7266 #define TARGET_ADDRESS_COST mep_address_cost
7267 #undef TARGET_MACHINE_DEPENDENT_REORG
7268 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7269 #undef TARGET_SETUP_INCOMING_VARARGS
7270 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7271 #undef TARGET_PASS_BY_REFERENCE
7272 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7273 #undef TARGET_FUNCTION_ARG
7274 #define TARGET_FUNCTION_ARG mep_function_arg
7275 #undef TARGET_FUNCTION_ARG_ADVANCE
7276 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7277 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7278 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7279 #undef TARGET_OPTION_OVERRIDE
7280 #define TARGET_OPTION_OVERRIDE mep_option_override
7281 #undef TARGET_ALLOCATE_INITIAL_VALUE
7282 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7283 #undef TARGET_ASM_INIT_SECTIONS
7284 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7285 #undef TARGET_RETURN_IN_MEMORY
7286 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7287 #undef TARGET_NARROW_VOLATILE_BITFIELD
7288 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7289 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7290 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7291 #undef TARGET_BUILD_BUILTIN_VA_LIST
7292 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7293 #undef TARGET_EXPAND_BUILTIN_VA_START
7294 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7295 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7296 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7297 #undef TARGET_CAN_ELIMINATE
7298 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7299 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7300 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7301 #undef TARGET_TRAMPOLINE_INIT
7302 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7303 #undef TARGET_LEGITIMATE_CONSTANT_P
7304 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7305 #undef TARGET_CAN_USE_DOLOOP_P
7306 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7308 struct gcc_target targetm
= TARGET_INITIALIZER
;