1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "fold-const.h"
32 #include "stringpool.h"
33 #include "stor-layout.h"
35 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "conditions.h"
38 #include "insn-flags.h"
40 #include "insn-attr.h"
52 #include "insn-codes.h"
56 #include "diagnostic-core.h"
58 #include "target-def.h"
59 #include "langhooks.h"
60 #include "dominance.h"
66 #include "cfgcleanup.h"
68 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "gimple-fold.h"
74 #include "gimple-expr.h"
82 /* Structure of this file:
84 + Command Line Option Support
85 + Pattern support - constraints, predicates, expanders
88 + Functions to save and restore machine-specific function data.
89 + Frame/Epilog/Prolog Related
91 + Function args in registers
92 + Handle pipeline hazards
95 + Machine-dependent Reorg
100 Symbols are encoded as @ <char> . <name> where <char> is one of these:
108 c - cb (control bus) */
110 struct GTY(()) machine_function
112 int mep_frame_pointer_needed
;
115 int arg_regs_to_save
;
120 /* Records __builtin_return address. */
124 int reg_save_slot
[FIRST_PSEUDO_REGISTER
];
125 unsigned char reg_saved
[FIRST_PSEUDO_REGISTER
];
127 /* 2 if the current function has an interrupt attribute, 1 if not, 0
128 if unknown. This is here because resource.c uses EPILOGUE_USES
130 int interrupt_handler
;
132 /* Likewise, for disinterrupt attribute. */
133 int disable_interrupts
;
135 /* Number of doloop tags used so far. */
138 /* True if the last tag was allocated to a doloop_end. */
139 bool doloop_tag_from_end
;
141 /* True if reload changes $TP. */
142 bool reload_changes_tp
;
144 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
145 We only set this if the function is an interrupt handler. */
146 int asms_without_operands
;
149 #define MEP_CONTROL_REG(x) \
150 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
152 static GTY(()) section
* based_section
;
153 static GTY(()) section
* tinybss_section
;
154 static GTY(()) section
* far_section
;
155 static GTY(()) section
* farbss_section
;
156 static GTY(()) section
* frodata_section
;
157 static GTY(()) section
* srodata_section
;
159 static GTY(()) section
* vtext_section
;
160 static GTY(()) section
* vftext_section
;
161 static GTY(()) section
* ftext_section
;
163 static void mep_set_leaf_registers (int);
164 static bool symbol_p (rtx
);
165 static bool symbolref_p (rtx
);
166 static void encode_pattern_1 (rtx
);
167 static void encode_pattern (rtx
);
168 static bool const_in_range (rtx
, int, int);
169 static void mep_rewrite_mult (rtx_insn
*, rtx
);
170 static void mep_rewrite_mulsi3 (rtx_insn
*, rtx
, rtx
, rtx
);
171 static void mep_rewrite_maddsi3 (rtx_insn
*, rtx
, rtx
, rtx
, rtx
);
172 static bool mep_reuse_lo_p_1 (rtx
, rtx
, rtx_insn
*, bool);
173 static bool move_needs_splitting (rtx
, rtx
, machine_mode
);
174 static bool mep_expand_setcc_1 (enum rtx_code
, rtx
, rtx
, rtx
);
175 static bool mep_nongeneral_reg (rtx
);
176 static bool mep_general_copro_reg (rtx
);
177 static bool mep_nonregister (rtx
);
178 static struct machine_function
* mep_init_machine_status (void);
179 static rtx
mep_tp_rtx (void);
180 static rtx
mep_gp_rtx (void);
181 static bool mep_interrupt_p (void);
182 static bool mep_disinterrupt_p (void);
183 static bool mep_reg_set_p (rtx
, rtx
);
184 static bool mep_reg_set_in_function (int);
185 static bool mep_interrupt_saved_reg (int);
186 static bool mep_call_saves_register (int);
187 static rtx_insn
*F (rtx_insn
*);
188 static void add_constant (int, int, int, int);
189 static rtx_insn
*maybe_dead_move (rtx
, rtx
, bool);
190 static void mep_reload_pointer (int, const char *);
191 static void mep_start_function (FILE *, HOST_WIDE_INT
);
192 static bool mep_function_ok_for_sibcall (tree
, tree
);
193 static int unique_bit_in (HOST_WIDE_INT
);
194 static int bit_size_for_clip (HOST_WIDE_INT
);
195 static int bytesize (const_tree
, machine_mode
);
196 static tree
mep_validate_based_tiny (tree
*, tree
, tree
, int, bool *);
197 static tree
mep_validate_near_far (tree
*, tree
, tree
, int, bool *);
198 static tree
mep_validate_disinterrupt (tree
*, tree
, tree
, int, bool *);
199 static tree
mep_validate_interrupt (tree
*, tree
, tree
, int, bool *);
200 static tree
mep_validate_io_cb (tree
*, tree
, tree
, int, bool *);
201 static tree
mep_validate_vliw (tree
*, tree
, tree
, int, bool *);
202 static bool mep_function_attribute_inlinable_p (const_tree
);
203 static bool mep_can_inline_p (tree
, tree
);
204 static bool mep_lookup_pragma_disinterrupt (const char *);
205 static int mep_multiple_address_regions (tree
, bool);
206 static int mep_attrlist_to_encoding (tree
, tree
);
207 static void mep_insert_attributes (tree
, tree
*);
208 static void mep_encode_section_info (tree
, rtx
, int);
209 static section
* mep_select_section (tree
, int, unsigned HOST_WIDE_INT
);
210 static void mep_unique_section (tree
, int);
211 static unsigned int mep_section_type_flags (tree
, const char *, int);
212 static void mep_asm_named_section (const char *, unsigned int, tree
);
213 static bool mep_mentioned_p (rtx
, rtx
, int);
214 static void mep_reorg_regmove (rtx_insn
*);
215 static rtx_insn
*mep_insert_repeat_label_last (rtx_insn
*, rtx_code_label
*,
217 static void mep_reorg_repeat (rtx_insn
*);
218 static bool mep_invertable_branch_p (rtx_insn
*);
219 static void mep_invert_branch (rtx_insn
*, rtx_insn
*);
220 static void mep_reorg_erepeat (rtx_insn
*);
221 static void mep_jmp_return_reorg (rtx_insn
*);
222 static void mep_reorg_addcombine (rtx_insn
*);
223 static void mep_reorg (void);
224 static void mep_init_intrinsics (void);
225 static void mep_init_builtins (void);
226 static void mep_intrinsic_unavailable (int);
227 static bool mep_get_intrinsic_insn (int, const struct cgen_insn
**);
228 static bool mep_get_move_insn (int, const struct cgen_insn
**);
229 static rtx
mep_convert_arg (machine_mode
, rtx
);
230 static rtx
mep_convert_regnum (const struct cgen_regnum_operand
*, rtx
);
231 static rtx
mep_legitimize_arg (const struct insn_operand_data
*, rtx
, int);
232 static void mep_incompatible_arg (const struct insn_operand_data
*, rtx
, int, tree
);
233 static rtx
mep_expand_builtin (tree
, rtx
, rtx
, machine_mode
, int);
234 static int mep_adjust_cost (rtx_insn
*, rtx
, rtx_insn
*, int);
235 static int mep_issue_rate (void);
236 static rtx_insn
*mep_find_ready_insn (rtx_insn
**, int, enum attr_slot
, int);
237 static void mep_move_ready_insn (rtx_insn
**, int, rtx_insn
*);
238 static int mep_sched_reorder (FILE *, int, rtx_insn
**, int *, int);
239 static rtx_insn
*mep_make_bundle (rtx
, rtx_insn
*);
240 static void mep_bundle_insns (rtx_insn
*);
241 static bool mep_rtx_cost (rtx
, int, int, int, int *, bool);
242 static int mep_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
243 static void mep_setup_incoming_varargs (cumulative_args_t
, machine_mode
,
245 static bool mep_pass_by_reference (cumulative_args_t cum
, machine_mode
,
247 static rtx
mep_function_arg (cumulative_args_t
, machine_mode
,
249 static void mep_function_arg_advance (cumulative_args_t
, machine_mode
,
251 static bool mep_vector_mode_supported_p (machine_mode
);
252 static rtx
mep_allocate_initial_value (rtx
);
253 static void mep_asm_init_sections (void);
254 static int mep_comp_type_attributes (const_tree
, const_tree
);
255 static bool mep_narrow_volatile_bitfield (void);
256 static rtx
mep_expand_builtin_saveregs (void);
257 static tree
mep_build_builtin_va_list (void);
258 static void mep_expand_va_start (tree
, rtx
);
259 static tree
mep_gimplify_va_arg_expr (tree
, tree
, gimple_seq
*, gimple_seq
*);
260 static bool mep_can_eliminate (const int, const int);
261 static void mep_conditional_register_usage (void);
262 static void mep_trampoline_init (rtx
, tree
, rtx
);
264 #define WANT_GCC_DEFINITIONS
265 #include "mep-intrin.h"
266 #undef WANT_GCC_DEFINITIONS
269 /* Command Line Option Support. */
271 char mep_leaf_registers
[FIRST_PSEUDO_REGISTER
];
273 /* True if we can use cmov instructions to move values back and forth
274 between core and coprocessor registers. */
275 bool mep_have_core_copro_moves_p
;
277 /* True if we can use cmov instructions (or a work-alike) to move
278 values between coprocessor registers. */
279 bool mep_have_copro_copro_moves_p
;
281 /* A table of all coprocessor instructions that can act like
282 a coprocessor-to-coprocessor cmov. */
283 static const int mep_cmov_insns
[] = {
298 mep_set_leaf_registers (int enable
)
302 if (mep_leaf_registers
[0] != enable
)
303 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
304 mep_leaf_registers
[i
] = enable
;
308 mep_conditional_register_usage (void)
312 if (!TARGET_OPT_MULT
&& !TARGET_OPT_DIV
)
314 fixed_regs
[HI_REGNO
] = 1;
315 fixed_regs
[LO_REGNO
] = 1;
316 call_used_regs
[HI_REGNO
] = 1;
317 call_used_regs
[LO_REGNO
] = 1;
320 for (i
= FIRST_SHADOW_REGISTER
; i
<= LAST_SHADOW_REGISTER
; i
++)
325 mep_option_override (void)
329 cl_deferred_option
*opt
;
330 vec
<cl_deferred_option
> *v
= (vec
<cl_deferred_option
> *) mep_deferred_options
;
333 FOR_EACH_VEC_ELT (*v
, i
, opt
)
335 switch (opt
->opt_index
)
338 for (j
= 0; j
< 32; j
++)
339 fixed_regs
[j
+ 48] = 0;
340 for (j
= 0; j
< 32; j
++)
341 call_used_regs
[j
+ 48] = 1;
342 for (j
= 6; j
< 8; j
++)
343 call_used_regs
[j
+ 48] = 0;
345 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
380 warning (OPT_fpic
, "-fpic is not supported");
382 warning (OPT_fPIC
, "-fPIC is not supported");
383 if (TARGET_S
&& TARGET_M
)
384 error ("only one of -ms and -mm may be given");
385 if (TARGET_S
&& TARGET_L
)
386 error ("only one of -ms and -ml may be given");
387 if (TARGET_M
&& TARGET_L
)
388 error ("only one of -mm and -ml may be given");
389 if (TARGET_S
&& global_options_set
.x_mep_tiny_cutoff
)
390 error ("only one of -ms and -mtiny= may be given");
391 if (TARGET_M
&& global_options_set
.x_mep_tiny_cutoff
)
392 error ("only one of -mm and -mtiny= may be given");
393 if (TARGET_OPT_CLIP
&& ! TARGET_OPT_MINMAX
)
394 warning (0, "-mclip currently has no effect without -mminmax");
396 if (mep_const_section
)
398 if (strcmp (mep_const_section
, "tiny") != 0
399 && strcmp (mep_const_section
, "near") != 0
400 && strcmp (mep_const_section
, "far") != 0)
401 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
405 mep_tiny_cutoff
= 65536;
408 if (TARGET_L
&& ! global_options_set
.x_mep_tiny_cutoff
)
411 if (TARGET_64BIT_CR_REGS
)
412 flag_split_wide_types
= 0;
414 init_machine_status
= mep_init_machine_status
;
415 mep_init_intrinsics ();
418 /* Pattern Support - constraints, predicates, expanders. */
420 /* MEP has very few instructions that can refer to the span of
421 addresses used by symbols, so it's common to check for them. */
426 int c
= GET_CODE (x
);
428 return (c
== CONST_INT
438 if (GET_CODE (x
) != MEM
)
441 c
= GET_CODE (XEXP (x
, 0));
442 return (c
== CONST_INT
447 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
449 #define GEN_REG(R, STRICT) \
452 && ((R) == ARG_POINTER_REGNUM \
453 || (R) >= FIRST_PSEUDO_REGISTER)))
455 static char pattern
[12], *patternp
;
456 static GTY(()) rtx patternr
[12];
457 #define RTX_IS(x) (strcmp (pattern, x) == 0)
460 encode_pattern_1 (rtx x
)
464 if (patternp
== pattern
+ sizeof (pattern
) - 2)
470 patternr
[patternp
-pattern
] = x
;
472 switch (GET_CODE (x
))
480 encode_pattern_1 (XEXP(x
, 0));
484 encode_pattern_1 (XEXP(x
, 0));
485 encode_pattern_1 (XEXP(x
, 1));
489 encode_pattern_1 (XEXP(x
, 0));
490 encode_pattern_1 (XEXP(x
, 1));
494 encode_pattern_1 (XEXP(x
, 0));
508 *patternp
++ = '0' + XCINT(x
, 1, UNSPEC
);
509 for (i
=0; i
<XVECLEN (x
, 0); i
++)
510 encode_pattern_1 (XVECEXP (x
, 0, i
));
518 fprintf (stderr
, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x
)));
527 encode_pattern (rtx x
)
530 encode_pattern_1 (x
);
535 mep_section_tag (rtx x
)
541 switch (GET_CODE (x
))
548 x
= XVECEXP (x
, 0, 0);
551 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
560 if (GET_CODE (x
) != SYMBOL_REF
)
563 if (name
[0] == '@' && name
[2] == '.')
565 if (name
[1] == 'i' || name
[1] == 'I')
568 return 'f'; /* near */
569 return 'n'; /* far */
577 mep_regno_reg_class (int regno
)
581 case SP_REGNO
: return SP_REGS
;
582 case TP_REGNO
: return TP_REGS
;
583 case GP_REGNO
: return GP_REGS
;
584 case 0: return R0_REGS
;
585 case HI_REGNO
: return HI_REGS
;
586 case LO_REGNO
: return LO_REGS
;
587 case ARG_POINTER_REGNUM
: return GENERAL_REGS
;
590 if (GR_REGNO_P (regno
))
591 return regno
< FIRST_GR_REGNO
+ 8 ? TPREL_REGS
: GENERAL_REGS
;
592 if (CONTROL_REGNO_P (regno
))
595 if (CR_REGNO_P (regno
))
599 /* Search for the register amongst user-defined subclasses of
600 the coprocessor registers. */
601 for (i
= USER0_REGS
; i
<= USER3_REGS
; ++i
)
603 if (! TEST_HARD_REG_BIT (reg_class_contents
[i
], regno
))
605 for (j
= 0; j
< N_REG_CLASSES
; ++j
)
607 enum reg_class sub
= reg_class_subclasses
[i
][j
];
609 if (sub
== LIM_REG_CLASSES
)
611 if (TEST_HARD_REG_BIT (reg_class_contents
[sub
], regno
))
616 return LOADABLE_CR_REGNO_P (regno
) ? LOADABLE_CR_REGS
: CR_REGS
;
619 if (CCR_REGNO_P (regno
))
622 gcc_assert (regno
>= FIRST_SHADOW_REGISTER
&& regno
<= LAST_SHADOW_REGISTER
);
627 const_in_range (rtx x
, int minv
, int maxv
)
629 return (GET_CODE (x
) == CONST_INT
630 && INTVAL (x
) >= minv
631 && INTVAL (x
) <= maxv
);
634 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
635 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
636 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
637 at the end of the insn stream. */
640 mep_mulr_source (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
)
642 if (rtx_equal_p (dest
, src1
))
644 else if (rtx_equal_p (dest
, src2
))
649 emit_insn (gen_movsi (copy_rtx (dest
), src1
));
651 emit_insn_before (gen_movsi (copy_rtx (dest
), src1
), insn
);
656 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
657 Change the last element of PATTERN from (clobber (scratch:SI))
658 to (clobber (reg:SI HI_REGNO)). */
661 mep_rewrite_mult (rtx_insn
*insn
, rtx pattern
)
665 hi_clobber
= XVECEXP (pattern
, 0, XVECLEN (pattern
, 0) - 1);
666 XEXP (hi_clobber
, 0) = gen_rtx_REG (SImode
, HI_REGNO
);
667 PATTERN (insn
) = pattern
;
668 INSN_CODE (insn
) = -1;
671 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
672 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
673 store the result in DEST if nonnull. */
676 mep_rewrite_mulsi3 (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
)
680 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
682 pattern
= gen_mulsi3r (lo
, dest
, copy_rtx (dest
),
683 mep_mulr_source (insn
, dest
, src1
, src2
));
685 pattern
= gen_mulsi3_lo (lo
, src1
, src2
);
686 mep_rewrite_mult (insn
, pattern
);
689 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
690 SRC3 into $lo, then use either madd or maddr. The move into $lo will
691 be deleted by a peephole2 if SRC3 is already in $lo. */
694 mep_rewrite_maddsi3 (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
, rtx src3
)
698 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
699 emit_insn_before (gen_movsi (copy_rtx (lo
), src3
), insn
);
701 pattern
= gen_maddsi3r (lo
, dest
, copy_rtx (dest
),
702 mep_mulr_source (insn
, dest
, src1
, src2
),
705 pattern
= gen_maddsi3_lo (lo
, src1
, src2
, copy_rtx (lo
));
706 mep_rewrite_mult (insn
, pattern
);
709 /* Return true if $lo has the same value as integer register GPR when
710 instruction INSN is reached. If necessary, rewrite the instruction
711 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
712 rtx for (reg:SI LO_REGNO).
714 This function is intended to be used by the peephole2 pass. Since
715 that pass goes from the end of a basic block to the beginning, and
716 propagates liveness information on the way, there is no need to
717 update register notes here.
719 If GPR_DEAD_P is true on entry, and this function returns true,
720 then the caller will replace _every_ use of GPR in and after INSN
721 with LO. This means that if the instruction that sets $lo is a
722 mulr- or maddr-type instruction, we can rewrite it to use mul or
723 madd instead. In combination with the copy progagation pass,
724 this allows us to replace sequences like:
733 if GPR is no longer used. */
736 mep_reuse_lo_p_1 (rtx lo
, rtx gpr
, rtx_insn
*insn
, bool gpr_dead_p
)
740 insn
= PREV_INSN (insn
);
742 switch (recog_memoized (insn
))
744 case CODE_FOR_mulsi3_1
:
746 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
748 mep_rewrite_mulsi3 (insn
,
749 gpr_dead_p
? NULL
: recog_data
.operand
[0],
750 recog_data
.operand
[1],
751 recog_data
.operand
[2]);
756 case CODE_FOR_maddsi3
:
758 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
760 mep_rewrite_maddsi3 (insn
,
761 gpr_dead_p
? NULL
: recog_data
.operand
[0],
762 recog_data
.operand
[1],
763 recog_data
.operand
[2],
764 recog_data
.operand
[3]);
769 case CODE_FOR_mulsi3r
:
770 case CODE_FOR_maddsi3r
:
772 return rtx_equal_p (recog_data
.operand
[1], gpr
);
775 if (reg_set_p (lo
, insn
)
776 || reg_set_p (gpr
, insn
)
777 || volatile_insn_p (PATTERN (insn
)))
780 if (gpr_dead_p
&& reg_referenced_p (gpr
, PATTERN (insn
)))
785 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
789 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
792 mep_reuse_lo_p (rtx lo
, rtx gpr
, rtx_insn
*insn
, bool gpr_dead_p
)
794 bool result
= mep_reuse_lo_p_1 (lo
, gpr
, insn
, gpr_dead_p
);
799 /* Return true if SET can be turned into a post-modify load or store
800 that adds OFFSET to GPR. In other words, return true if SET can be
803 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
805 It's OK to change SET to an equivalent operation in order to
809 mep_use_post_modify_for_set_p (rtx set
, rtx gpr
, rtx offset
)
812 unsigned int reg_bytes
, mem_bytes
;
813 machine_mode reg_mode
, mem_mode
;
815 /* Only simple SETs can be converted. */
816 if (GET_CODE (set
) != SET
)
819 /* Point REG to what we hope will be the register side of the set and
820 MEM to what we hope will be the memory side. */
821 if (GET_CODE (SET_DEST (set
)) == MEM
)
823 mem
= &SET_DEST (set
);
824 reg
= &SET_SRC (set
);
828 reg
= &SET_DEST (set
);
829 mem
= &SET_SRC (set
);
830 if (GET_CODE (*mem
) == SIGN_EXTEND
)
831 mem
= &XEXP (*mem
, 0);
834 /* Check that *REG is a suitable coprocessor register. */
835 if (GET_CODE (*reg
) != REG
|| !LOADABLE_CR_REGNO_P (REGNO (*reg
)))
838 /* Check that *MEM is a suitable memory reference. */
839 if (GET_CODE (*mem
) != MEM
|| !rtx_equal_p (XEXP (*mem
, 0), gpr
))
842 /* Get the number of bytes in each operand. */
843 mem_bytes
= GET_MODE_SIZE (GET_MODE (*mem
));
844 reg_bytes
= GET_MODE_SIZE (GET_MODE (*reg
));
846 /* Check that OFFSET is suitably aligned. */
847 if (INTVAL (offset
) & (mem_bytes
- 1))
850 /* Convert *MEM to a normal integer mode. */
851 mem_mode
= mode_for_size (mem_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
852 *mem
= change_address (*mem
, mem_mode
, NULL
);
854 /* Adjust *REG as well. */
855 *reg
= shallow_copy_rtx (*reg
);
856 if (reg
== &SET_DEST (set
) && reg_bytes
< UNITS_PER_WORD
)
858 /* SET is a subword load. Convert it to an explicit extension. */
859 PUT_MODE (*reg
, SImode
);
860 *mem
= gen_rtx_SIGN_EXTEND (SImode
, *mem
);
864 reg_mode
= mode_for_size (reg_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
865 PUT_MODE (*reg
, reg_mode
);
870 /* Return the effect of frame-related instruction INSN. */
873 mep_frame_expr (rtx_insn
*insn
)
877 note
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, 0);
878 expr
= (note
!= 0 ? XEXP (note
, 0) : copy_rtx (PATTERN (insn
)));
879 RTX_FRAME_RELATED_P (expr
) = 1;
883 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
884 new pattern in INSN1; INSN2 will be deleted by the caller. */
887 mep_make_parallel (rtx_insn
*insn1
, rtx_insn
*insn2
)
891 if (RTX_FRAME_RELATED_P (insn2
))
893 expr
= mep_frame_expr (insn2
);
894 if (RTX_FRAME_RELATED_P (insn1
))
895 expr
= gen_rtx_SEQUENCE (VOIDmode
,
896 gen_rtvec (2, mep_frame_expr (insn1
), expr
));
897 set_unique_reg_note (insn1
, REG_FRAME_RELATED_EXPR
, expr
);
898 RTX_FRAME_RELATED_P (insn1
) = 1;
901 PATTERN (insn1
) = gen_rtx_PARALLEL (VOIDmode
,
902 gen_rtvec (2, PATTERN (insn1
),
904 INSN_CODE (insn1
) = -1;
907 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
908 the basic block to see if any previous load or store instruction can
909 be persuaded to do SET_INSN as a side-effect. Return true if so. */
912 mep_use_post_modify_p_1 (rtx_insn
*set_insn
, rtx reg
, rtx offset
)
919 insn
= PREV_INSN (insn
);
922 if (mep_use_post_modify_for_set_p (PATTERN (insn
), reg
, offset
))
924 mep_make_parallel (insn
, set_insn
);
928 if (reg_set_p (reg
, insn
)
929 || reg_referenced_p (reg
, PATTERN (insn
))
930 || volatile_insn_p (PATTERN (insn
)))
934 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
938 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
941 mep_use_post_modify_p (rtx_insn
*insn
, rtx reg
, rtx offset
)
943 bool result
= mep_use_post_modify_p_1 (insn
, reg
, offset
);
949 mep_allow_clip (rtx ux
, rtx lx
, int s
)
951 HOST_WIDE_INT u
= INTVAL (ux
);
952 HOST_WIDE_INT l
= INTVAL (lx
);
955 if (!TARGET_OPT_CLIP
)
960 for (i
= 0; i
< 30; i
++)
961 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1)
962 && (l
== - ((HOST_WIDE_INT
) 1 << i
)))
970 for (i
= 0; i
< 30; i
++)
971 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1))
978 mep_bit_position_p (rtx x
, bool looking_for
)
980 if (GET_CODE (x
) != CONST_INT
)
982 switch ((int) INTVAL(x
) & 0xff)
984 case 0x01: case 0x02: case 0x04: case 0x08:
985 case 0x10: case 0x20: case 0x40: case 0x80:
987 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
988 case 0xef: case 0xdf: case 0xbf: case 0x7f:
995 move_needs_splitting (rtx dest
, rtx src
,
996 machine_mode mode ATTRIBUTE_UNUSED
)
998 int s
= mep_section_tag (src
);
1002 if (GET_CODE (src
) == CONST
1003 || GET_CODE (src
) == MEM
)
1004 src
= XEXP (src
, 0);
1005 else if (GET_CODE (src
) == SYMBOL_REF
1006 || GET_CODE (src
) == LABEL_REF
1007 || GET_CODE (src
) == PLUS
)
1013 || (GET_CODE (src
) == PLUS
1014 && GET_CODE (XEXP (src
, 1)) == CONST_INT
1015 && (INTVAL (XEXP (src
, 1)) < -65536
1016 || INTVAL (XEXP (src
, 1)) > 0xffffff))
1017 || (GET_CODE (dest
) == REG
1018 && REGNO (dest
) > 7 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
))
1024 mep_split_mov (rtx
*operands
, int symbolic
)
1028 if (move_needs_splitting (operands
[0], operands
[1], SImode
))
1033 if (GET_CODE (operands
[1]) != CONST_INT
)
1036 if (constraint_satisfied_p (operands
[1], CONSTRAINT_I
)
1037 || constraint_satisfied_p (operands
[1], CONSTRAINT_J
)
1038 || constraint_satisfied_p (operands
[1], CONSTRAINT_O
))
1041 if (((!reload_completed
&& !reload_in_progress
)
1042 || (REG_P (operands
[0]) && REGNO (operands
[0]) < 8))
1043 && constraint_satisfied_p (operands
[1], CONSTRAINT_K
))
1049 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1050 it to one specific value. So the insn chosen depends on whether
1051 the source and destination modes match. */
1054 mep_vliw_mode_match (rtx tgt
)
1056 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1057 bool tgt_vliw
= INTVAL (tgt
);
1059 return src_vliw
== tgt_vliw
;
1062 /* Like the above, but also test for near/far mismatches. */
1065 mep_vliw_jmp_match (rtx tgt
)
1067 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1068 bool tgt_vliw
= INTVAL (tgt
);
1070 if (mep_section_tag (DECL_RTL (cfun
->decl
)) == 'f')
1073 return src_vliw
== tgt_vliw
;
1077 mep_multi_slot (rtx_insn
*x
)
1079 return get_attr_slot (x
) == SLOT_MULTI
;
1082 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1085 mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1087 /* We can't convert symbol values to gp- or tp-rel values after
1088 reload, as reload might have used $gp or $tp for other
1090 if (GET_CODE (x
) == SYMBOL_REF
&& (reload_in_progress
|| reload_completed
))
1092 char e
= mep_section_tag (x
);
1093 return (e
!= 't' && e
!= 'b');
1098 /* Be careful not to use macros that need to be compiled one way for
1099 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1102 mep_legitimate_address (machine_mode mode
, rtx x
, int strict
)
1106 #define DEBUG_LEGIT 0
1108 fprintf (stderr
, "legit: mode %s strict %d ", mode_name
[mode
], strict
);
1112 if (GET_CODE (x
) == LO_SUM
1113 && GET_CODE (XEXP (x
, 0)) == REG
1114 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1115 && CONSTANT_P (XEXP (x
, 1)))
1117 if (GET_MODE_SIZE (mode
) > 4)
1119 /* We will end up splitting this, and lo_sums are not
1120 offsettable for us. */
1122 fprintf(stderr
, " - nope, %%lo(sym)[reg] not splittable\n");
1127 fprintf (stderr
, " - yup, %%lo(sym)[reg]\n");
1132 if (GET_CODE (x
) == REG
1133 && GEN_REG (REGNO (x
), strict
))
1136 fprintf (stderr
, " - yup, [reg]\n");
1141 if (GET_CODE (x
) == PLUS
1142 && GET_CODE (XEXP (x
, 0)) == REG
1143 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1144 && const_in_range (XEXP (x
, 1), -32768, 32767))
1147 fprintf (stderr
, " - yup, [reg+const]\n");
1152 if (GET_CODE (x
) == PLUS
1153 && GET_CODE (XEXP (x
, 0)) == REG
1154 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1155 && GET_CODE (XEXP (x
, 1)) == CONST
1156 && (GET_CODE (XEXP (XEXP (x
, 1), 0)) == UNSPEC
1157 || (GET_CODE (XEXP (XEXP (x
, 1), 0)) == PLUS
1158 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == UNSPEC
1159 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == CONST_INT
)))
1162 fprintf (stderr
, " - yup, [reg+unspec]\n");
1167 the_tag
= mep_section_tag (x
);
1172 fprintf (stderr
, " - nope, [far]\n");
1177 if (mode
== VOIDmode
1178 && GET_CODE (x
) == SYMBOL_REF
)
1181 fprintf (stderr
, " - yup, call [symbol]\n");
1186 if ((mode
== SImode
|| mode
== SFmode
)
1188 && mep_legitimate_constant_p (mode
, x
)
1189 && the_tag
!= 't' && the_tag
!= 'b')
1191 if (GET_CODE (x
) != CONST_INT
1192 || (INTVAL (x
) <= 0xfffff
1194 && (INTVAL (x
) % 4) == 0))
1197 fprintf (stderr
, " - yup, [const]\n");
1204 fprintf (stderr
, " - nope.\n");
1210 mep_legitimize_reload_address (rtx
*x
, machine_mode mode
, int opnum
,
1212 int ind_levels ATTRIBUTE_UNUSED
)
1214 enum reload_type type
= (enum reload_type
) type_i
;
1216 if (GET_CODE (*x
) == PLUS
1217 && GET_CODE (XEXP (*x
, 0)) == MEM
1218 && GET_CODE (XEXP (*x
, 1)) == REG
)
1220 /* GCC will by default copy the MEM into a REG, which results in
1221 an invalid address. For us, the best thing to do is move the
1222 whole expression to a REG. */
1223 push_reload (*x
, NULL_RTX
, x
, NULL
,
1224 GENERAL_REGS
, mode
, VOIDmode
,
1229 if (GET_CODE (*x
) == PLUS
1230 && GET_CODE (XEXP (*x
, 0)) == SYMBOL_REF
1231 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
)
1233 char e
= mep_section_tag (XEXP (*x
, 0));
1235 if (e
!= 't' && e
!= 'b')
1237 /* GCC thinks that (sym+const) is a valid address. Well,
1238 sometimes it is, this time it isn't. The best thing to
1239 do is reload the symbol to a register, since reg+int
1240 tends to work, and we can't just add the symbol and
1242 push_reload (XEXP (*x
, 0), NULL_RTX
, &(XEXP(*x
, 0)), NULL
,
1243 GENERAL_REGS
, mode
, VOIDmode
,
1252 mep_core_address_length (rtx_insn
*insn
, int opn
)
1254 rtx set
= single_set (insn
);
1255 rtx mem
= XEXP (set
, opn
);
1256 rtx other
= XEXP (set
, 1-opn
);
1257 rtx addr
= XEXP (mem
, 0);
1259 if (register_operand (addr
, Pmode
))
1261 if (GET_CODE (addr
) == PLUS
)
1263 rtx addend
= XEXP (addr
, 1);
1265 gcc_assert (REG_P (XEXP (addr
, 0)));
1267 switch (REGNO (XEXP (addr
, 0)))
1269 case STACK_POINTER_REGNUM
:
1270 if (GET_MODE_SIZE (GET_MODE (mem
)) == 4
1271 && mep_imm7a4_operand (addend
, VOIDmode
))
1276 gcc_assert (REG_P (other
));
1278 if (REGNO (other
) >= 8)
1281 if (GET_CODE (addend
) == CONST
1282 && GET_CODE (XEXP (addend
, 0)) == UNSPEC
1283 && XINT (XEXP (addend
, 0), 1) == UNS_TPREL
)
1286 if (GET_CODE (addend
) == CONST_INT
1287 && INTVAL (addend
) >= 0
1288 && INTVAL (addend
) <= 127
1289 && INTVAL (addend
) % GET_MODE_SIZE (GET_MODE (mem
)) == 0)
1299 mep_cop_address_length (rtx_insn
*insn
, int opn
)
1301 rtx set
= single_set (insn
);
1302 rtx mem
= XEXP (set
, opn
);
1303 rtx addr
= XEXP (mem
, 0);
1305 if (GET_CODE (mem
) != MEM
)
1307 if (register_operand (addr
, Pmode
))
1309 if (GET_CODE (addr
) == POST_INC
)
1315 #define DEBUG_EXPAND_MOV 0
1317 mep_expand_mov (rtx
*operands
, machine_mode mode
)
1322 int post_reload
= 0;
1324 tag
[0] = mep_section_tag (operands
[0]);
1325 tag
[1] = mep_section_tag (operands
[1]);
1327 if (!reload_in_progress
1328 && !reload_completed
1329 && GET_CODE (operands
[0]) != REG
1330 && GET_CODE (operands
[0]) != SUBREG
1331 && GET_CODE (operands
[1]) != REG
1332 && GET_CODE (operands
[1]) != SUBREG
)
1333 operands
[1] = copy_to_mode_reg (mode
, operands
[1]);
1335 #if DEBUG_EXPAND_MOV
1336 fprintf(stderr
, "expand move %s %d\n", mode_name
[mode
],
1337 reload_in_progress
|| reload_completed
);
1338 debug_rtx (operands
[0]);
1339 debug_rtx (operands
[1]);
1342 if (mode
== DImode
|| mode
== DFmode
)
1345 if (reload_in_progress
|| reload_completed
)
1349 if (GET_CODE (operands
[0]) == REG
&& REGNO (operands
[0]) == TP_REGNO
)
1350 cfun
->machine
->reload_changes_tp
= true;
1352 if (tag
[0] == 't' || tag
[1] == 't')
1354 r
= has_hard_reg_initial_val (Pmode
, GP_REGNO
);
1355 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != GP_REGNO
)
1358 if (tag
[0] == 'b' || tag
[1] == 'b')
1360 r
= has_hard_reg_initial_val (Pmode
, TP_REGNO
);
1361 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != TP_REGNO
)
1364 if (cfun
->machine
->reload_changes_tp
== true)
1371 if (symbol_p (operands
[1]))
1373 t
= mep_section_tag (operands
[1]);
1374 if (t
== 'b' || t
== 't')
1377 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
1379 tpsym
= operands
[1];
1380 n
= gen_rtx_UNSPEC (mode
,
1381 gen_rtvec (1, operands
[1]),
1382 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1383 n
= gen_rtx_CONST (mode
, n
);
1385 else if (GET_CODE (operands
[1]) == CONST
1386 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
1387 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
1388 && GET_CODE (XEXP (XEXP (operands
[1], 0), 1)) == CONST_INT
)
1390 tpsym
= XEXP (XEXP (operands
[1], 0), 0);
1391 tpoffs
= XEXP (XEXP (operands
[1], 0), 1);
1392 n
= gen_rtx_UNSPEC (mode
,
1393 gen_rtvec (1, tpsym
),
1394 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1395 n
= gen_rtx_PLUS (mode
, n
, tpoffs
);
1396 n
= gen_rtx_CONST (mode
, n
);
1398 else if (GET_CODE (operands
[1]) == CONST
1399 && GET_CODE (XEXP (operands
[1], 0)) == UNSPEC
)
1403 error ("unusual TP-relative address");
1407 n
= gen_rtx_PLUS (mode
, (t
== 'b' ? mep_tp_rtx ()
1408 : mep_gp_rtx ()), n
);
1409 n
= emit_insn (gen_rtx_SET (operands
[0], n
));
1410 #if DEBUG_EXPAND_MOV
1411 fprintf(stderr
, "mep_expand_mov emitting ");
1418 for (i
=0; i
< 2; i
++)
1420 t
= mep_section_tag (operands
[i
]);
1421 if (GET_CODE (operands
[i
]) == MEM
&& (t
== 'b' || t
== 't'))
1426 sym
= XEXP (operands
[i
], 0);
1427 if (GET_CODE (sym
) == CONST
1428 && GET_CODE (XEXP (sym
, 0)) == UNSPEC
)
1429 sym
= XVECEXP (XEXP (sym
, 0), 0, 0);
1442 n
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, sym
), u
);
1443 n
= gen_rtx_CONST (Pmode
, n
);
1444 n
= gen_rtx_PLUS (Pmode
, r
, n
);
1445 operands
[i
] = replace_equiv_address (operands
[i
], n
);
1450 if ((GET_CODE (operands
[1]) != REG
1451 && MEP_CONTROL_REG (operands
[0]))
1452 || (GET_CODE (operands
[0]) != REG
1453 && MEP_CONTROL_REG (operands
[1])))
1456 #if DEBUG_EXPAND_MOV
1457 fprintf (stderr
, "cr-mem, forcing op1 to reg\n");
1459 temp
= gen_reg_rtx (mode
);
1460 emit_move_insn (temp
, operands
[1]);
1464 if (symbolref_p (operands
[0])
1465 && (mep_section_tag (XEXP (operands
[0], 0)) == 'f'
1466 || (GET_MODE_SIZE (mode
) != 4)))
1470 gcc_assert (!reload_in_progress
&& !reload_completed
);
1472 temp
= force_reg (Pmode
, XEXP (operands
[0], 0));
1473 operands
[0] = replace_equiv_address (operands
[0], temp
);
1474 emit_move_insn (operands
[0], operands
[1]);
1478 if (!post_reload
&& (tag
[1] == 't' || tag
[1] == 'b'))
1481 if (symbol_p (operands
[1])
1482 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1484 emit_insn (gen_movsi_topsym_s (operands
[0], operands
[1]));
1485 emit_insn (gen_movsi_botsym_s (operands
[0], operands
[0], operands
[1]));
1489 if (symbolref_p (operands
[1])
1490 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1494 if (reload_in_progress
|| reload_completed
)
1497 temp
= gen_reg_rtx (Pmode
);
1499 emit_insn (gen_movsi_topsym_s (temp
, operands
[1]));
1500 emit_insn (gen_movsi_botsym_s (temp
, temp
, operands
[1]));
1501 emit_move_insn (operands
[0], replace_equiv_address (operands
[1], temp
));
1508 /* Cases where the pattern can't be made to use at all. */
1511 mep_mov_ok (rtx
*operands
, machine_mode mode ATTRIBUTE_UNUSED
)
1515 #define DEBUG_MOV_OK 0
1517 fprintf (stderr
, "mep_mov_ok %s %c=%c\n", mode_name
[mode
], mep_section_tag (operands
[0]),
1518 mep_section_tag (operands
[1]));
1519 debug_rtx (operands
[0]);
1520 debug_rtx (operands
[1]);
1523 /* We want the movh patterns to get these. */
1524 if (GET_CODE (operands
[1]) == HIGH
)
1527 /* We can't store a register to a far variable without using a
1528 scratch register to hold the address. Using far variables should
1529 be split by mep_emit_mov anyway. */
1530 if (mep_section_tag (operands
[0]) == 'f'
1531 || mep_section_tag (operands
[1]) == 'f')
1534 fprintf (stderr
, " - no, f\n");
1538 i
= mep_section_tag (operands
[1]);
1539 if ((i
== 'b' || i
== 't') && !reload_completed
&& !reload_in_progress
)
1540 /* These are supposed to be generated with adds of the appropriate
1541 register. During and after reload, however, we allow them to
1542 be accessed as normal symbols because adding a dependency on
1543 the base register now might cause problems. */
1546 fprintf (stderr
, " - no, bt\n");
1551 /* The only moves we can allow involve at least one general
1552 register, so require it. */
1553 for (i
= 0; i
< 2; i
++)
1555 /* Allow subregs too, before reload. */
1556 rtx x
= operands
[i
];
1558 if (GET_CODE (x
) == SUBREG
)
1560 if (GET_CODE (x
) == REG
1561 && ! MEP_CONTROL_REG (x
))
1564 fprintf (stderr
, " - ok\n");
1570 fprintf (stderr
, " - no, no gen reg\n");
1575 #define DEBUG_SPLIT_WIDE_MOVE 0
1577 mep_split_wide_move (rtx
*operands
, machine_mode mode
)
1581 #if DEBUG_SPLIT_WIDE_MOVE
1582 fprintf (stderr
, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name
[mode
]);
1583 debug_rtx (operands
[0]);
1584 debug_rtx (operands
[1]);
1587 for (i
= 0; i
<= 1; i
++)
1589 rtx op
= operands
[i
], hi
, lo
;
1591 switch (GET_CODE (op
))
1595 unsigned int regno
= REGNO (op
);
1597 if (TARGET_64BIT_CR_REGS
&& CR_REGNO_P (regno
))
1601 lo
= gen_rtx_REG (SImode
, regno
);
1603 hi
= gen_rtx_ZERO_EXTRACT (SImode
,
1604 gen_rtx_REG (DImode
, regno
),
1609 hi
= gen_rtx_REG (SImode
, regno
+ TARGET_LITTLE_ENDIAN
);
1610 lo
= gen_rtx_REG (SImode
, regno
+ TARGET_BIG_ENDIAN
);
1618 hi
= operand_subword (op
, TARGET_LITTLE_ENDIAN
, 0, mode
);
1619 lo
= operand_subword (op
, TARGET_BIG_ENDIAN
, 0, mode
);
1626 /* The high part of CR <- GPR moves must be done after the low part. */
1627 operands
[i
+ 4] = lo
;
1628 operands
[i
+ 2] = hi
;
1631 if (reg_mentioned_p (operands
[2], operands
[5])
1632 || GET_CODE (operands
[2]) == ZERO_EXTRACT
1633 || GET_CODE (operands
[4]) == ZERO_EXTRACT
)
1637 /* Overlapping register pairs -- make sure we don't
1638 early-clobber ourselves. */
1640 operands
[2] = operands
[4];
1643 operands
[3] = operands
[5];
1647 #if DEBUG_SPLIT_WIDE_MOVE
1648 fprintf(stderr
, "\033[34m");
1649 debug_rtx (operands
[2]);
1650 debug_rtx (operands
[3]);
1651 debug_rtx (operands
[4]);
1652 debug_rtx (operands
[5]);
1653 fprintf(stderr
, "\033[0m");
1657 /* Emit a setcc instruction in its entirity. */
1660 mep_expand_setcc_1 (enum rtx_code code
, rtx dest
, rtx op1
, rtx op2
)
1668 tmp
= op1
, op1
= op2
, op2
= tmp
;
1669 code
= swap_condition (code
);
1674 op1
= force_reg (SImode
, op1
);
1675 emit_insn (gen_rtx_SET (dest
, gen_rtx_fmt_ee (code
, SImode
, op1
, op2
)));
1679 if (op2
!= const0_rtx
)
1680 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1681 mep_expand_setcc_1 (LTU
, dest
, op1
, const1_rtx
);
1685 /* Branchful sequence:
1687 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1690 Branchless sequence:
1691 add3 tmp, op1, -op2 32-bit (or mov + sub)
1692 sltu3 tmp, tmp, 1 16-bit
1693 xor3 dest, tmp, 1 32-bit
1695 if (optimize_size
&& op2
!= const0_rtx
)
1698 if (op2
!= const0_rtx
)
1699 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1701 op2
= gen_reg_rtx (SImode
);
1702 mep_expand_setcc_1 (LTU
, op2
, op1
, const1_rtx
);
1704 emit_insn (gen_rtx_SET (dest
, gen_rtx_XOR (SImode
, op2
, const1_rtx
)));
1708 if (GET_CODE (op2
) != CONST_INT
1709 || INTVAL (op2
) == 0x7ffffff)
1711 op2
= GEN_INT (INTVAL (op2
) + 1);
1712 return mep_expand_setcc_1 (LT
, dest
, op1
, op2
);
1715 if (GET_CODE (op2
) != CONST_INT
1716 || INTVAL (op2
) == -1)
1718 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) + 1, SImode
));
1719 return mep_expand_setcc_1 (LTU
, dest
, op1
, op2
);
1722 if (GET_CODE (op2
) != CONST_INT
1723 || INTVAL (op2
) == trunc_int_for_mode (0x80000000, SImode
))
1725 op2
= GEN_INT (INTVAL (op2
) - 1);
1726 return mep_expand_setcc_1 (GT
, dest
, op1
, op2
);
1729 if (GET_CODE (op2
) != CONST_INT
1730 || op2
== const0_rtx
)
1732 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) - 1, SImode
));
1733 return mep_expand_setcc_1 (GTU
, dest
, op1
, op2
);
1741 mep_expand_setcc (rtx
*operands
)
1743 rtx dest
= operands
[0];
1744 enum rtx_code code
= GET_CODE (operands
[1]);
1745 rtx op0
= operands
[2];
1746 rtx op1
= operands
[3];
1748 return mep_expand_setcc_1 (code
, dest
, op0
, op1
);
1752 mep_expand_cbranch (rtx
*operands
)
1754 enum rtx_code code
= GET_CODE (operands
[0]);
1755 rtx op0
= operands
[1];
1756 rtx op1
= operands
[2];
1763 if (mep_imm4_operand (op1
, SImode
))
1766 tmp
= gen_reg_rtx (SImode
);
1767 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1774 if (mep_imm4_operand (op1
, SImode
))
1777 tmp
= gen_reg_rtx (SImode
);
1778 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1787 if (! mep_reg_or_imm4_operand (op1
, SImode
))
1788 op1
= force_reg (SImode
, op1
);
1793 if (GET_CODE (op1
) == CONST_INT
1794 && INTVAL (op1
) != 0x7fffffff)
1796 op1
= GEN_INT (INTVAL (op1
) + 1);
1797 code
= (code
== LE
? LT
: GE
);
1801 tmp
= gen_reg_rtx (SImode
);
1802 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op1
, op0
));
1804 code
= (code
== LE
? EQ
: NE
);
1810 if (op1
== const1_rtx
)
1817 tmp
= gen_reg_rtx (SImode
);
1818 gcc_assert (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
));
1825 tmp
= gen_reg_rtx (SImode
);
1826 if (mep_expand_setcc_1 (LEU
, tmp
, op0
, op1
))
1828 else if (mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
))
1837 tmp
= gen_reg_rtx (SImode
);
1838 gcc_assert (mep_expand_setcc_1 (GTU
, tmp
, op0
, op1
)
1839 || mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
));
1846 tmp
= gen_reg_rtx (SImode
);
1847 if (mep_expand_setcc_1 (GEU
, tmp
, op0
, op1
))
1849 else if (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
))
1861 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
1865 mep_emit_cbranch (rtx
*operands
, int ne
)
1867 if (GET_CODE (operands
[1]) == REG
)
1868 return ne
? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1869 else if (INTVAL (operands
[1]) == 0 && !mep_vliw_function_p(cfun
->decl
))
1870 return ne
? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1872 return ne
? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1876 mep_expand_call (rtx
*operands
, int returns_value
)
1878 rtx addr
= operands
[returns_value
];
1879 rtx tp
= mep_tp_rtx ();
1880 rtx gp
= mep_gp_rtx ();
1882 gcc_assert (GET_CODE (addr
) == MEM
);
1884 addr
= XEXP (addr
, 0);
1886 if (! mep_call_address_operand (addr
, VOIDmode
))
1887 addr
= force_reg (SImode
, addr
);
1889 if (! operands
[returns_value
+2])
1890 operands
[returns_value
+2] = const0_rtx
;
1893 emit_call_insn (gen_call_value_internal (operands
[0], addr
, operands
[2],
1894 operands
[3], tp
, gp
));
1896 emit_call_insn (gen_call_internal (addr
, operands
[1],
1897 operands
[2], tp
, gp
));
1900 /* Aliasing Support. */
1902 /* If X is a machine specific address (i.e. a symbol or label being
1903 referenced as a displacement from the GOT implemented using an
1904 UNSPEC), then return the base term. Otherwise return X. */
1907 mep_find_base_term (rtx x
)
1912 if (GET_CODE (x
) != PLUS
)
1917 if (has_hard_reg_initial_val(Pmode
, TP_REGNO
)
1918 && base
== mep_tp_rtx ())
1920 else if (has_hard_reg_initial_val(Pmode
, GP_REGNO
)
1921 && base
== mep_gp_rtx ())
1926 if (GET_CODE (term
) != CONST
)
1928 term
= XEXP (term
, 0);
1930 if (GET_CODE (term
) != UNSPEC
1931 || XINT (term
, 1) != unspec
)
1934 return XVECEXP (term
, 0, 0);
1937 /* Reload Support. */
1939 /* Return true if the registers in CLASS cannot represent the change from
1940 modes FROM to TO. */
1943 mep_cannot_change_mode_class (machine_mode from
, machine_mode to
,
1944 enum reg_class regclass
)
1949 /* 64-bit COP regs must remain 64-bit COP regs. */
1950 if (TARGET_64BIT_CR_REGS
1951 && (regclass
== CR_REGS
1952 || regclass
== LOADABLE_CR_REGS
)
1953 && (GET_MODE_SIZE (to
) < 8
1954 || GET_MODE_SIZE (from
) < 8))
1960 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1963 mep_general_reg (rtx x
)
1965 while (GET_CODE (x
) == SUBREG
)
1967 return GET_CODE (x
) == REG
&& GR_REGNO_P (REGNO (x
));
1971 mep_nongeneral_reg (rtx x
)
1973 while (GET_CODE (x
) == SUBREG
)
1975 return (GET_CODE (x
) == REG
1976 && !GR_REGNO_P (REGNO (x
)) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
1980 mep_general_copro_reg (rtx x
)
1982 while (GET_CODE (x
) == SUBREG
)
1984 return (GET_CODE (x
) == REG
&& CR_REGNO_P (REGNO (x
)));
1988 mep_nonregister (rtx x
)
1990 while (GET_CODE (x
) == SUBREG
)
1992 return (GET_CODE (x
) != REG
|| REGNO (x
) >= FIRST_PSEUDO_REGISTER
);
1995 #define DEBUG_RELOAD 0
1997 /* Return the secondary reload class needed for moving value X to or
1998 from a register in coprocessor register class CLASS. */
2000 static enum reg_class
2001 mep_secondary_copro_reload_class (enum reg_class rclass
, rtx x
)
2003 if (mep_general_reg (x
))
2004 /* We can do the move directly if mep_have_core_copro_moves_p,
2005 otherwise we need to go through memory. Either way, no secondary
2006 register is needed. */
2009 if (mep_general_copro_reg (x
))
2011 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2012 if (mep_have_copro_copro_moves_p
)
2015 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2016 if (mep_have_core_copro_moves_p
)
2017 return GENERAL_REGS
;
2019 /* Otherwise we need to do it through memory. No secondary
2020 register is needed. */
2024 if (reg_class_subset_p (rclass
, LOADABLE_CR_REGS
)
2025 && constraint_satisfied_p (x
, CONSTRAINT_U
))
2026 /* X is a memory value that we can access directly. */
2029 /* We have to move X into a GPR first and then copy it to
2030 the coprocessor register. The move from the GPR to the
2031 coprocessor might be done directly or through memory,
2032 depending on mep_have_core_copro_moves_p. */
2033 return GENERAL_REGS
;
2036 /* Copying X to register in RCLASS. */
2039 mep_secondary_input_reload_class (enum reg_class rclass
,
2040 machine_mode mode ATTRIBUTE_UNUSED
,
2046 fprintf (stderr
, "secondary input reload copy to %s %s from ", reg_class_names
[rclass
], mode_name
[mode
]);
2050 if (reg_class_subset_p (rclass
, CR_REGS
))
2051 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2052 else if (MEP_NONGENERAL_CLASS (rclass
)
2053 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2057 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2059 return (enum reg_class
) rv
;
2062 /* Copying register in RCLASS to X. */
2065 mep_secondary_output_reload_class (enum reg_class rclass
,
2066 machine_mode mode ATTRIBUTE_UNUSED
,
2072 fprintf (stderr
, "secondary output reload copy from %s %s to ", reg_class_names
[rclass
], mode_name
[mode
]);
2076 if (reg_class_subset_p (rclass
, CR_REGS
))
2077 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2078 else if (MEP_NONGENERAL_CLASS (rclass
)
2079 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2083 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2086 return (enum reg_class
) rv
;
2089 /* Implement SECONDARY_MEMORY_NEEDED. */
2092 mep_secondary_memory_needed (enum reg_class rclass1
, enum reg_class rclass2
,
2093 machine_mode mode ATTRIBUTE_UNUSED
)
2095 if (!mep_have_core_copro_moves_p
)
2097 if (reg_classes_intersect_p (rclass1
, CR_REGS
)
2098 && reg_classes_intersect_p (rclass2
, GENERAL_REGS
))
2100 if (reg_classes_intersect_p (rclass2
, CR_REGS
)
2101 && reg_classes_intersect_p (rclass1
, GENERAL_REGS
))
2103 if (!mep_have_copro_copro_moves_p
2104 && reg_classes_intersect_p (rclass1
, CR_REGS
)
2105 && reg_classes_intersect_p (rclass2
, CR_REGS
))
2112 mep_expand_reload (rtx
*operands
, machine_mode mode
)
2114 /* There are three cases for each direction:
2119 int s0
= mep_section_tag (operands
[0]) == 'f';
2120 int s1
= mep_section_tag (operands
[1]) == 'f';
2121 int c0
= mep_nongeneral_reg (operands
[0]);
2122 int c1
= mep_nongeneral_reg (operands
[1]);
2123 int which
= (s0
? 20:0) + (c0
? 10:0) + (s1
? 2:0) + (c1
? 1:0);
2126 fprintf (stderr
, "expand_reload %s\n", mode_name
[mode
]);
2127 debug_rtx (operands
[0]);
2128 debug_rtx (operands
[1]);
2133 case 00: /* Don't know why this gets here. */
2134 case 02: /* general = far */
2135 emit_move_insn (operands
[0], operands
[1]);
2138 case 10: /* cr = mem */
2139 case 11: /* cr = cr */
2140 case 01: /* mem = cr */
2141 case 12: /* cr = far */
2142 emit_move_insn (operands
[2], operands
[1]);
2143 emit_move_insn (operands
[0], operands
[2]);
2146 case 20: /* far = general */
2147 emit_move_insn (operands
[2], XEXP (operands
[1], 0));
2148 emit_move_insn (operands
[0], gen_rtx_MEM (mode
, operands
[2]));
2151 case 21: /* far = cr */
2152 case 22: /* far = far */
2154 fprintf (stderr
, "unsupported expand reload case %02d for mode %s\n",
2155 which
, mode_name
[mode
]);
2156 debug_rtx (operands
[0]);
2157 debug_rtx (operands
[1]);
2162 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2163 can be moved directly into registers 0 to 7, but not into the rest.
2164 If so, and if the required class includes registers 0 to 7, restrict
2165 it to those registers. */
2168 mep_preferred_reload_class (rtx x
, enum reg_class rclass
)
2170 switch (GET_CODE (x
))
2173 if (INTVAL (x
) >= 0x10000
2174 && INTVAL (x
) < 0x01000000
2175 && (INTVAL (x
) & 0xffff) != 0
2176 && reg_class_subset_p (TPREL_REGS
, rclass
))
2177 rclass
= TPREL_REGS
;
2183 if (mep_section_tag (x
) != 'f'
2184 && reg_class_subset_p (TPREL_REGS
, rclass
))
2185 rclass
= TPREL_REGS
;
2194 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2195 moves, 4 for direct double-register moves, and 1000 for anything
2196 that requires a temporary register or temporary stack slot. */
2199 mep_register_move_cost (machine_mode mode
, enum reg_class from
, enum reg_class to
)
2201 if (mep_have_copro_copro_moves_p
2202 && reg_class_subset_p (from
, CR_REGS
)
2203 && reg_class_subset_p (to
, CR_REGS
))
2205 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2209 if (reg_class_subset_p (from
, CR_REGS
)
2210 && reg_class_subset_p (to
, CR_REGS
))
2212 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2216 if (reg_class_subset_p (from
, CR_REGS
)
2217 || reg_class_subset_p (to
, CR_REGS
))
2219 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2223 if (mep_secondary_memory_needed (from
, to
, mode
))
2225 if (MEP_NONGENERAL_CLASS (from
) && MEP_NONGENERAL_CLASS (to
))
2228 if (GET_MODE_SIZE (mode
) > 4)
2235 /* Functions to save and restore machine-specific function data. */
2237 static struct machine_function
*
2238 mep_init_machine_status (void)
2240 return ggc_cleared_alloc
<machine_function
> ();
2244 mep_allocate_initial_value (rtx reg
)
2248 if (GET_CODE (reg
) != REG
)
2251 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2254 /* In interrupt functions, the "initial" values of $gp and $tp are
2255 provided by the prologue. They are not necessarily the same as
2256 the values that the caller was using. */
2257 if (REGNO (reg
) == TP_REGNO
|| REGNO (reg
) == GP_REGNO
)
2258 if (mep_interrupt_p ())
2261 if (! cfun
->machine
->reg_save_slot
[REGNO(reg
)])
2263 cfun
->machine
->reg_save_size
+= 4;
2264 cfun
->machine
->reg_save_slot
[REGNO(reg
)] = cfun
->machine
->reg_save_size
;
2267 rss
= cfun
->machine
->reg_save_slot
[REGNO(reg
)];
2268 return gen_rtx_MEM (SImode
, plus_constant (Pmode
, arg_pointer_rtx
, -rss
));
2272 mep_return_addr_rtx (int count
)
2277 return get_hard_reg_initial_val (Pmode
, LP_REGNO
);
2283 return get_hard_reg_initial_val (Pmode
, TP_REGNO
);
2289 return get_hard_reg_initial_val (Pmode
, GP_REGNO
);
2293 mep_interrupt_p (void)
2295 if (cfun
->machine
->interrupt_handler
== 0)
2297 int interrupt_handler
2298 = (lookup_attribute ("interrupt",
2299 DECL_ATTRIBUTES (current_function_decl
))
2301 cfun
->machine
->interrupt_handler
= interrupt_handler
? 2 : 1;
2303 return cfun
->machine
->interrupt_handler
== 2;
2307 mep_disinterrupt_p (void)
2309 if (cfun
->machine
->disable_interrupts
== 0)
2311 int disable_interrupts
2312 = (lookup_attribute ("disinterrupt",
2313 DECL_ATTRIBUTES (current_function_decl
))
2315 cfun
->machine
->disable_interrupts
= disable_interrupts
? 2 : 1;
2317 return cfun
->machine
->disable_interrupts
== 2;
2321 /* Frame/Epilog/Prolog Related. */
2324 mep_reg_set_p (rtx reg
, rtx insn
)
2326 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2329 if (FIND_REG_INC_NOTE (insn
, reg
))
2331 insn
= PATTERN (insn
);
2334 if (GET_CODE (insn
) == SET
2335 && GET_CODE (XEXP (insn
, 0)) == REG
2336 && GET_CODE (XEXP (insn
, 1)) == REG
2337 && REGNO (XEXP (insn
, 0)) == REGNO (XEXP (insn
, 1)))
2340 return set_of (reg
, insn
) != NULL_RTX
;
2344 #define MEP_SAVES_UNKNOWN 0
2345 #define MEP_SAVES_YES 1
2346 #define MEP_SAVES_MAYBE 2
2347 #define MEP_SAVES_NO 3
2350 mep_reg_set_in_function (int regno
)
2355 if (mep_interrupt_p () && df_regs_ever_live_p(regno
))
2358 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2361 push_topmost_sequence ();
2362 insn
= get_insns ();
2363 pop_topmost_sequence ();
2368 reg
= gen_rtx_REG (SImode
, regno
);
2370 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
2371 if (INSN_P (insn
) && mep_reg_set_p (reg
, insn
))
2377 mep_asm_without_operands_p (void)
2379 if (cfun
->machine
->asms_without_operands
== 0)
2383 push_topmost_sequence ();
2384 insn
= get_insns ();
2385 pop_topmost_sequence ();
2387 cfun
->machine
->asms_without_operands
= 1;
2391 && GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
2393 cfun
->machine
->asms_without_operands
= 2;
2396 insn
= NEXT_INSN (insn
);
2400 return cfun
->machine
->asms_without_operands
== 2;
2403 /* Interrupt functions save/restore every call-preserved register, and
2404 any call-used register it uses (or all if it calls any function,
2405 since they may get clobbered there too). Here we check to see
2406 which call-used registers need saving. */
2408 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2409 && (r == FIRST_CCR_REGNO + 1 \
2410 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2411 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2414 mep_interrupt_saved_reg (int r
)
2416 if (!mep_interrupt_p ())
2418 if (r
== REGSAVE_CONTROL_TEMP
2419 || (TARGET_64BIT_CR_REGS
&& TARGET_COP
&& r
== REGSAVE_CONTROL_TEMP
+1))
2421 if (mep_asm_without_operands_p ()
2423 || (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
|| r
== LP_REGNO
)
2424 || IVC2_ISAVED_REG (r
)))
2427 /* Function calls mean we need to save $lp. */
2428 if (r
== LP_REGNO
|| IVC2_ISAVED_REG (r
))
2430 if (!crtl
->is_leaf
|| cfun
->machine
->doloop_tags
> 0)
2431 /* The interrupt handler might use these registers for repeat blocks,
2432 or it might call a function that does so. */
2433 if (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
)
2435 if (crtl
->is_leaf
&& call_used_regs
[r
] && !df_regs_ever_live_p(r
))
2437 /* Functions we call might clobber these. */
2438 if (call_used_regs
[r
] && !fixed_regs
[r
])
2440 /* Additional registers that need to be saved for IVC2. */
2441 if (IVC2_ISAVED_REG (r
))
2448 mep_call_saves_register (int r
)
2450 if (! cfun
->machine
->frame_locked
)
2452 int rv
= MEP_SAVES_NO
;
2454 if (cfun
->machine
->reg_save_slot
[r
])
2456 else if (r
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2458 else if (r
== FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
2460 else if ((!call_used_regs
[r
] || r
== LP_REGNO
) && df_regs_ever_live_p(r
))
2462 else if (crtl
->calls_eh_return
&& (r
== 10 || r
== 11))
2463 /* We need these to have stack slots so that they can be set during
2466 else if (mep_interrupt_saved_reg (r
))
2468 cfun
->machine
->reg_saved
[r
] = rv
;
2470 return cfun
->machine
->reg_saved
[r
] == MEP_SAVES_YES
;
2473 /* Return true if epilogue uses register REGNO. */
2476 mep_epilogue_uses (int regno
)
2478 /* Since $lp is a call-saved register, the generic code will normally
2479 mark it used in the epilogue if it needs to be saved and restored.
2480 However, when profiling is enabled, the profiling code will implicitly
2481 clobber $11. This case has to be handled specially both here and in
2482 mep_call_saves_register. */
2483 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2485 /* Interrupt functions save/restore pretty much everything. */
2486 return (reload_completed
&& mep_interrupt_saved_reg (regno
));
2490 mep_reg_size (int regno
)
2492 if (CR_REGNO_P (regno
) && TARGET_64BIT_CR_REGS
)
2497 /* Worker function for TARGET_CAN_ELIMINATE. */
2500 mep_can_eliminate (const int from
, const int to
)
2502 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
2503 ? ! frame_pointer_needed
2508 mep_elimination_offset (int from
, int to
)
2512 int frame_size
= get_frame_size () + crtl
->outgoing_args_size
;
2515 if (!cfun
->machine
->frame_locked
)
2516 memset (cfun
->machine
->reg_saved
, 0, sizeof (cfun
->machine
->reg_saved
));
2518 /* We don't count arg_regs_to_save in the arg pointer offset, because
2519 gcc thinks the arg pointer has moved along with the saved regs.
2520 However, we do count it when we adjust $sp in the prologue. */
2522 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2523 if (mep_call_saves_register (i
))
2524 reg_save_size
+= mep_reg_size (i
);
2526 if (reg_save_size
% 8)
2527 cfun
->machine
->regsave_filler
= 8 - (reg_save_size
% 8);
2529 cfun
->machine
->regsave_filler
= 0;
2531 /* This is what our total stack adjustment looks like. */
2532 total_size
= (reg_save_size
+ frame_size
+ cfun
->machine
->regsave_filler
);
2535 cfun
->machine
->frame_filler
= 8 - (total_size
% 8);
2537 cfun
->machine
->frame_filler
= 0;
2540 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
2541 return reg_save_size
+ cfun
->machine
->regsave_filler
;
2543 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2544 return cfun
->machine
->frame_filler
+ frame_size
;
2546 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2547 return reg_save_size
+ cfun
->machine
->regsave_filler
+ cfun
->machine
->frame_filler
+ frame_size
;
2555 RTX_FRAME_RELATED_P (x
) = 1;
2559 /* Since the prologue/epilogue code is generated after optimization,
2560 we can't rely on gcc to split constants for us. So, this code
2561 captures all the ways to add a constant to a register in one logic
2562 chunk, including optimizing away insns we just don't need. This
2563 makes the prolog/epilog code easier to follow. */
2565 add_constant (int dest
, int src
, int value
, int mark_frame
)
2570 if (src
== dest
&& value
== 0)
2575 insn
= emit_move_insn (gen_rtx_REG (SImode
, dest
),
2576 gen_rtx_REG (SImode
, src
));
2578 RTX_FRAME_RELATED_P(insn
) = 1;
2582 if (value
>= -32768 && value
<= 32767)
2584 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2585 gen_rtx_REG (SImode
, src
),
2588 RTX_FRAME_RELATED_P(insn
) = 1;
2592 /* Big constant, need to use a temp register. We use
2593 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2594 area is always small enough to directly add to). */
2596 hi
= trunc_int_for_mode (value
& 0xffff0000, SImode
);
2597 lo
= value
& 0xffff;
2599 insn
= emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2604 insn
= emit_insn (gen_iorsi3 (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2605 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2609 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2610 gen_rtx_REG (SImode
, src
),
2611 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
)));
2614 RTX_FRAME_RELATED_P(insn
) = 1;
2615 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2616 gen_rtx_SET (gen_rtx_REG (SImode
, dest
),
2617 gen_rtx_PLUS (SImode
,
2618 gen_rtx_REG (SImode
, dest
),
2623 /* Move SRC to DEST. Mark the move as being potentially dead if
2627 maybe_dead_move (rtx dest
, rtx src
, bool ATTRIBUTE_UNUSED maybe_dead_p
)
2629 rtx_insn
*insn
= emit_move_insn (dest
, src
);
2632 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
2637 /* Used for interrupt functions, which can't assume that $tp and $gp
2638 contain the correct pointers. */
2641 mep_reload_pointer (int regno
, const char *symbol
)
2645 if (!df_regs_ever_live_p(regno
) && crtl
->is_leaf
)
2648 reg
= gen_rtx_REG (SImode
, regno
);
2649 sym
= gen_rtx_SYMBOL_REF (SImode
, symbol
);
2650 emit_insn (gen_movsi_topsym_s (reg
, sym
));
2651 emit_insn (gen_movsi_botsym_s (reg
, reg
, sym
));
2654 /* Assign save slots for any register not already saved. DImode
2655 registers go at the end of the reg save area; the rest go at the
2656 beginning. This is for alignment purposes. Returns true if a frame
2657 is really needed. */
2659 mep_assign_save_slots (int reg_save_size
)
2661 bool really_need_stack_frame
= false;
2665 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2666 if (mep_call_saves_register(i
))
2668 int regsize
= mep_reg_size (i
);
2670 if ((i
!= TP_REGNO
&& i
!= GP_REGNO
&& i
!= LP_REGNO
)
2671 || mep_reg_set_in_function (i
))
2672 really_need_stack_frame
= true;
2674 if (cfun
->machine
->reg_save_slot
[i
])
2679 cfun
->machine
->reg_save_size
+= regsize
;
2680 cfun
->machine
->reg_save_slot
[i
] = cfun
->machine
->reg_save_size
;
2684 cfun
->machine
->reg_save_slot
[i
] = reg_save_size
- di_ofs
;
2688 cfun
->machine
->frame_locked
= 1;
2689 return really_need_stack_frame
;
2693 mep_expand_prologue (void)
2695 int i
, rss
, sp_offset
= 0;
2698 int really_need_stack_frame
;
2700 /* We must not allow register renaming in interrupt functions,
2701 because that invalidates the correctness of the set of call-used
2702 registers we're going to save/restore. */
2703 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2705 if (mep_disinterrupt_p ())
2706 emit_insn (gen_mep_disable_int ());
2708 cfun
->machine
->mep_frame_pointer_needed
= frame_pointer_needed
;
2710 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2711 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2712 really_need_stack_frame
= frame_size
;
2714 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2716 sp_offset
= reg_save_size
;
2717 if (sp_offset
+ frame_size
< 128)
2718 sp_offset
+= frame_size
;
2720 add_constant (SP_REGNO
, SP_REGNO
, -sp_offset
, 1);
2722 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2723 if (mep_call_saves_register(i
))
2729 rss
= cfun
->machine
->reg_save_slot
[i
];
2731 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2732 && (!mep_reg_set_in_function (i
)
2733 && !mep_interrupt_p ()))
2736 if (mep_reg_size (i
) == 8)
2741 /* If there is a pseudo associated with this register's initial value,
2742 reload might have already spilt it to the stack slot suggested by
2743 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2745 mem
= gen_rtx_MEM (rmode
,
2746 plus_constant (Pmode
, stack_pointer_rtx
,
2748 maybe_dead_p
= rtx_equal_p (mem
, has_hard_reg_initial_val (rmode
, i
));
2750 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2751 F(maybe_dead_move (mem
, gen_rtx_REG (rmode
, i
), maybe_dead_p
));
2752 else if (rmode
== DImode
)
2755 int be
= TARGET_BIG_ENDIAN
? 4 : 0;
2757 mem
= gen_rtx_MEM (SImode
,
2758 plus_constant (Pmode
, stack_pointer_rtx
,
2759 sp_offset
- rss
+ be
));
2761 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2762 gen_rtx_REG (SImode
, i
),
2764 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2765 gen_rtx_ZERO_EXTRACT (SImode
,
2766 gen_rtx_REG (DImode
, i
),
2770 insn
= maybe_dead_move (mem
,
2771 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2773 RTX_FRAME_RELATED_P (insn
) = 1;
2775 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2776 gen_rtx_SET (copy_rtx (mem
),
2777 gen_rtx_REG (rmode
, i
)));
2778 mem
= gen_rtx_MEM (SImode
,
2779 plus_constant (Pmode
, stack_pointer_rtx
,
2780 sp_offset
- rss
+ (4-be
)));
2781 insn
= maybe_dead_move (mem
,
2782 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2788 maybe_dead_move (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2789 gen_rtx_REG (rmode
, i
),
2791 insn
= maybe_dead_move (mem
,
2792 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2794 RTX_FRAME_RELATED_P (insn
) = 1;
2796 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2797 gen_rtx_SET (copy_rtx (mem
),
2798 gen_rtx_REG (rmode
, i
)));
2802 if (frame_pointer_needed
)
2804 /* We've already adjusted down by sp_offset. Total $sp change
2805 is reg_save_size + frame_size. We want a net change here of
2806 just reg_save_size. */
2807 add_constant (FP_REGNO
, SP_REGNO
, sp_offset
- reg_save_size
, 1);
2810 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
-(reg_save_size
+frame_size
), 1);
2812 if (mep_interrupt_p ())
2814 mep_reload_pointer(GP_REGNO
, "__sdabase");
2815 mep_reload_pointer(TP_REGNO
, "__tpbase");
2820 mep_start_function (FILE *file
, HOST_WIDE_INT hwi_local
)
2822 int local
= hwi_local
;
2823 int frame_size
= local
+ crtl
->outgoing_args_size
;
2828 int slot_map
[FIRST_PSEUDO_REGISTER
], si
, sj
;
2830 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2831 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2832 sp_offset
= reg_save_size
+ frame_size
;
2834 ffill
= cfun
->machine
->frame_filler
;
2836 if (cfun
->machine
->mep_frame_pointer_needed
)
2837 reg_names
[FP_REGNO
] = "$fp";
2839 reg_names
[FP_REGNO
] = "$8";
2844 if (debug_info_level
== DINFO_LEVEL_NONE
)
2846 fprintf (file
, "\t# frame: %d", sp_offset
);
2848 fprintf (file
, " %d regs", reg_save_size
);
2850 fprintf (file
, " %d locals", local
);
2851 if (crtl
->outgoing_args_size
)
2852 fprintf (file
, " %d args", crtl
->outgoing_args_size
);
2853 fprintf (file
, "\n");
2857 fprintf (file
, "\t#\n");
2858 fprintf (file
, "\t# Initial Frame Information:\n");
2859 if (sp_offset
|| !frame_pointer_needed
)
2860 fprintf (file
, "\t# Entry ---------- 0\n");
2862 /* Sort registers by save slots, so they're printed in the order
2863 they appear in memory, not the order they're saved in. */
2864 for (si
=0; si
<FIRST_PSEUDO_REGISTER
; si
++)
2866 for (si
=0; si
<FIRST_PSEUDO_REGISTER
-1; si
++)
2867 for (sj
=si
+1; sj
<FIRST_PSEUDO_REGISTER
; sj
++)
2868 if (cfun
->machine
->reg_save_slot
[slot_map
[si
]]
2869 > cfun
->machine
->reg_save_slot
[slot_map
[sj
]])
2871 int t
= slot_map
[si
];
2872 slot_map
[si
] = slot_map
[sj
];
2877 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2880 int r
= slot_map
[i
];
2881 int rss
= cfun
->machine
->reg_save_slot
[r
];
2883 if (!mep_call_saves_register (r
))
2886 if ((r
== TP_REGNO
|| r
== GP_REGNO
|| r
== LP_REGNO
)
2887 && (!mep_reg_set_in_function (r
)
2888 && !mep_interrupt_p ()))
2891 rsize
= mep_reg_size(r
);
2892 skip
= rss
- (sp
+rsize
);
2894 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2895 fprintf (file
, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2896 rsize
, reg_names
[r
], sp_offset
- rss
);
2900 skip
= reg_save_size
- sp
;
2902 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2904 if (frame_pointer_needed
)
2905 fprintf (file
, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size
, sp_offset
-reg_save_size
);
2907 fprintf (file
, "\t# %3d bytes for local vars\n", local
);
2909 fprintf (file
, "\t# %3d bytes for alignment\n", ffill
);
2910 if (crtl
->outgoing_args_size
)
2911 fprintf (file
, "\t# %3d bytes for outgoing args\n",
2912 crtl
->outgoing_args_size
);
2913 fprintf (file
, "\t# SP ---> ---------- %d\n", sp_offset
);
2914 fprintf (file
, "\t#\n");
2918 static int mep_prevent_lp_restore
= 0;
2919 static int mep_sibcall_epilogue
= 0;
2922 mep_expand_epilogue (void)
2924 int i
, sp_offset
= 0;
2925 int reg_save_size
= 0;
2927 int lp_temp
= LP_REGNO
, lp_slot
= -1;
2928 int really_need_stack_frame
= get_frame_size() + crtl
->outgoing_args_size
;
2929 int interrupt_handler
= mep_interrupt_p ();
2931 if (profile_arc_flag
== 2)
2932 emit_insn (gen_mep_bb_trace_ret ());
2934 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2935 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2937 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2939 if (frame_pointer_needed
)
2941 /* If we have a frame pointer, we won't have a reliable stack
2942 pointer (alloca, you know), so rebase SP from FP */
2943 emit_move_insn (gen_rtx_REG (SImode
, SP_REGNO
),
2944 gen_rtx_REG (SImode
, FP_REGNO
));
2945 sp_offset
= reg_save_size
;
2949 /* SP is right under our local variable space. Adjust it if
2951 sp_offset
= reg_save_size
+ frame_size
;
2952 if (sp_offset
>= 128)
2954 add_constant (SP_REGNO
, SP_REGNO
, frame_size
, 0);
2955 sp_offset
-= frame_size
;
2959 /* This is backwards so that we restore the control and coprocessor
2960 registers before the temporary registers we use to restore
2962 for (i
=FIRST_PSEUDO_REGISTER
-1; i
>=1; i
--)
2963 if (mep_call_saves_register (i
))
2966 int rss
= cfun
->machine
->reg_save_slot
[i
];
2968 if (mep_reg_size (i
) == 8)
2973 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2974 && !(mep_reg_set_in_function (i
) || interrupt_handler
))
2976 if (mep_prevent_lp_restore
&& i
== LP_REGNO
)
2978 if (!mep_prevent_lp_restore
2979 && !interrupt_handler
2980 && (i
== 10 || i
== 11))
2983 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2984 emit_move_insn (gen_rtx_REG (rmode
, i
),
2986 plus_constant (Pmode
, stack_pointer_rtx
,
2990 if (i
== LP_REGNO
&& !mep_sibcall_epilogue
&& !interrupt_handler
)
2991 /* Defer this one so we can jump indirect rather than
2992 copying the RA to $lp and "ret". EH epilogues
2993 automatically skip this anyway. */
2994 lp_slot
= sp_offset
-rss
;
2997 emit_move_insn (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2999 plus_constant (Pmode
,
3002 emit_move_insn (gen_rtx_REG (rmode
, i
),
3003 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
));
3009 /* Restore this one last so we know it will be in the temp
3010 register when we return by jumping indirectly via the temp. */
3011 emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
3012 gen_rtx_MEM (SImode
,
3013 plus_constant (Pmode
, stack_pointer_rtx
,
3015 lp_temp
= REGSAVE_CONTROL_TEMP
;
3019 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
, 0);
3021 if (crtl
->calls_eh_return
&& mep_prevent_lp_restore
)
3022 emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, SP_REGNO
),
3023 gen_rtx_REG (SImode
, SP_REGNO
),
3024 cfun
->machine
->eh_stack_adjust
));
3026 if (mep_sibcall_epilogue
)
3029 if (mep_disinterrupt_p ())
3030 emit_insn (gen_mep_enable_int ());
3032 if (mep_prevent_lp_restore
)
3034 emit_jump_insn (gen_eh_return_internal ());
3037 else if (interrupt_handler
)
3038 emit_jump_insn (gen_mep_reti ());
3040 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode
, lp_temp
)));
3044 mep_expand_eh_return (rtx
*operands
)
3046 if (GET_CODE (operands
[0]) != REG
|| REGNO (operands
[0]) != LP_REGNO
)
3048 rtx ra
= gen_rtx_REG (Pmode
, LP_REGNO
);
3049 emit_move_insn (ra
, operands
[0]);
3053 emit_insn (gen_eh_epilogue (operands
[0]));
3057 mep_emit_eh_epilogue (rtx
*operands ATTRIBUTE_UNUSED
)
3059 cfun
->machine
->eh_stack_adjust
= gen_rtx_REG (Pmode
, 0);
3060 mep_prevent_lp_restore
= 1;
3061 mep_expand_epilogue ();
3062 mep_prevent_lp_restore
= 0;
3066 mep_expand_sibcall_epilogue (void)
3068 mep_sibcall_epilogue
= 1;
3069 mep_expand_epilogue ();
3070 mep_sibcall_epilogue
= 0;
3074 mep_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
3079 if (mep_section_tag (DECL_RTL (decl
)) == 'f')
3082 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3083 if (mep_interrupt_p () || mep_disinterrupt_p ())
3090 mep_return_stackadj_rtx (void)
3092 return gen_rtx_REG (SImode
, 10);
3096 mep_return_handler_rtx (void)
3098 return gen_rtx_REG (SImode
, LP_REGNO
);
3102 mep_function_profiler (FILE *file
)
3104 /* Always right at the beginning of the function. */
3105 fprintf (file
, "\t# mep function profiler\n");
3106 fprintf (file
, "\tadd\t$sp, -8\n");
3107 fprintf (file
, "\tsw\t$0, ($sp)\n");
3108 fprintf (file
, "\tldc\t$0, $lp\n");
3109 fprintf (file
, "\tsw\t$0, 4($sp)\n");
3110 fprintf (file
, "\tbsr\t__mep_mcount\n");
3111 fprintf (file
, "\tlw\t$0, 4($sp)\n");
3112 fprintf (file
, "\tstc\t$0, $lp\n");
3113 fprintf (file
, "\tlw\t$0, ($sp)\n");
3114 fprintf (file
, "\tadd\t$sp, 8\n\n");
3118 mep_emit_bb_trace_ret (void)
3120 fprintf (asm_out_file
, "\t# end of block profiling\n");
3121 fprintf (asm_out_file
, "\tadd\t$sp, -8\n");
3122 fprintf (asm_out_file
, "\tsw\t$0, ($sp)\n");
3123 fprintf (asm_out_file
, "\tldc\t$0, $lp\n");
3124 fprintf (asm_out_file
, "\tsw\t$0, 4($sp)\n");
3125 fprintf (asm_out_file
, "\tbsr\t__bb_trace_ret\n");
3126 fprintf (asm_out_file
, "\tlw\t$0, 4($sp)\n");
3127 fprintf (asm_out_file
, "\tstc\t$0, $lp\n");
3128 fprintf (asm_out_file
, "\tlw\t$0, ($sp)\n");
3129 fprintf (asm_out_file
, "\tadd\t$sp, 8\n\n");
3136 /* Operand Printing. */
3139 mep_print_operand_address (FILE *stream
, rtx address
)
3141 if (GET_CODE (address
) == MEM
)
3142 address
= XEXP (address
, 0);
3144 /* cf: gcc.dg/asm-4.c. */
3145 gcc_assert (GET_CODE (address
) == REG
);
3147 mep_print_operand (stream
, address
, 0);
3153 const char *pattern
;
3156 const conversions
[] =
3159 { 0, "m+ri", "3(2)" },
3163 { 0, "mLrs", "%lo(3)(2)" },
3164 { 0, "mLr+si", "%lo(4+5)(2)" },
3165 { 0, "m+ru2s", "%tpoff(5)(2)" },
3166 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3167 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3168 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3169 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3170 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3172 { 0, "m+si", "(2+3)" },
3173 { 0, "m+li", "(2+3)" },
3176 { 0, "+si", "1+2" },
3177 { 0, "+u2si", "%tpoff(3+4)" },
3178 { 0, "+u3si", "%sdaoff(3+4)" },
3184 { 'h', "Hs", "%hi(1)" },
3186 { 'I', "u2s", "%tpoff(2)" },
3187 { 'I', "u3s", "%sdaoff(2)" },
3188 { 'I', "+u2si", "%tpoff(3+4)" },
3189 { 'I', "+u3si", "%sdaoff(3+4)" },
3191 { 'P', "mr", "(1\\+),\\0" },
3197 unique_bit_in (HOST_WIDE_INT i
)
3201 case 0x01: case 0xfe: return 0;
3202 case 0x02: case 0xfd: return 1;
3203 case 0x04: case 0xfb: return 2;
3204 case 0x08: case 0xf7: return 3;
3205 case 0x10: case 0x7f: return 4;
3206 case 0x20: case 0xbf: return 5;
3207 case 0x40: case 0xdf: return 6;
3208 case 0x80: case 0xef: return 7;
3215 bit_size_for_clip (HOST_WIDE_INT i
)
3219 for (rv
= 0; rv
< 31; rv
++)
3220 if (((HOST_WIDE_INT
) 1 << rv
) > i
)
3225 /* Print an operand to a assembler instruction. */
3228 mep_print_operand (FILE *file
, rtx x
, int code
)
3231 const char *real_name
;
3235 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3236 we're using, then skip over the "mep_" part of its name. */
3237 const struct cgen_insn
*insn
;
3239 if (mep_get_move_insn (mep_cmov
, &insn
))
3240 fputs (cgen_intrinsics
[insn
->intrinsic
] + 4, file
);
3242 mep_intrinsic_unavailable (mep_cmov
);
3247 switch (GET_CODE (x
))
3250 fputs ("clr", file
);
3253 fputs ("set", file
);
3256 fputs ("not", file
);
3259 output_operand_lossage ("invalid %%L code");
3264 /* Print the second operand of a CR <- CR move. If we're using
3265 a two-operand instruction (i.e., a real cmov), then just print
3266 the operand normally. If we're using a "reg, reg, immediate"
3267 instruction such as caddi3, print the operand followed by a
3268 zero field. If we're using a three-register instruction,
3269 print the operand twice. */
3270 const struct cgen_insn
*insn
;
3272 mep_print_operand (file
, x
, 0);
3273 if (mep_get_move_insn (mep_cmov
, &insn
)
3274 && insn_data
[insn
->icode
].n_operands
== 3)
3277 if (insn_data
[insn
->icode
].operand
[2].predicate (x
, VOIDmode
))
3278 mep_print_operand (file
, x
, 0);
3280 mep_print_operand (file
, const0_rtx
, 0);
3286 for (i
= 0; conversions
[i
].pattern
; i
++)
3287 if (conversions
[i
].code
== code
3288 && strcmp(conversions
[i
].pattern
, pattern
) == 0)
3290 for (j
= 0; conversions
[i
].format
[j
]; j
++)
3291 if (conversions
[i
].format
[j
] == '\\')
3293 fputc (conversions
[i
].format
[j
+1], file
);
3296 else if (ISDIGIT(conversions
[i
].format
[j
]))
3298 rtx r
= patternr
[conversions
[i
].format
[j
] - '0'];
3299 switch (GET_CODE (r
))
3302 fprintf (file
, "%s", reg_names
[REGNO (r
)]);
3308 fprintf (file
, "%d", unique_bit_in (INTVAL (r
)));
3311 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)));
3314 fprintf (file
, "0x%x", ((int) INTVAL (r
) >> 16) & 0xffff);
3317 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)) - 1);
3320 fprintf (file
, "0x%x", (int) INTVAL (r
) & 0xffff);
3323 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3324 && !(INTVAL (r
) & 0xff))
3325 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL(r
));
3327 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3330 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3331 && conversions
[i
].format
[j
+1] == 0)
3333 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (r
));
3334 fprintf (file
, " # 0x%x", (int) INTVAL(r
) & 0xffff);
3337 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3340 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3345 fprintf(file
, "[const_double 0x%lx]",
3346 (unsigned long) CONST_DOUBLE_HIGH(r
));
3349 real_name
= targetm
.strip_name_encoding (XSTR (r
, 0));
3350 assemble_name (file
, real_name
);
3353 output_asm_label (r
);
3356 fprintf (stderr
, "don't know how to print this operand:");
3363 if (conversions
[i
].format
[j
] == '+'
3364 && (!code
|| code
== 'I')
3365 && ISDIGIT (conversions
[i
].format
[j
+1])
3366 && GET_CODE (patternr
[conversions
[i
].format
[j
+1] - '0']) == CONST_INT
3367 && INTVAL (patternr
[conversions
[i
].format
[j
+1] - '0']) < 0)
3369 fputc(conversions
[i
].format
[j
], file
);
3373 if (!conversions
[i
].pattern
)
3375 error ("unconvertible operand %c %qs", code
?code
:'-', pattern
);
3383 mep_final_prescan_insn (rtx_insn
*insn
, rtx
*operands ATTRIBUTE_UNUSED
,
3384 int noperands ATTRIBUTE_UNUSED
)
3386 /* Despite the fact that MeP is perfectly capable of branching and
3387 doing something else in the same bundle, gcc does jump
3388 optimization *after* scheduling, so we cannot trust the bundling
3389 flags on jump instructions. */
3390 if (GET_MODE (insn
) == BImode
3391 && get_attr_slots (insn
) != SLOTS_CORE
)
3392 fputc ('+', asm_out_file
);
3395 /* Function args in registers. */
3398 mep_setup_incoming_varargs (cumulative_args_t cum
,
3399 machine_mode mode ATTRIBUTE_UNUSED
,
3400 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
3401 int second_time ATTRIBUTE_UNUSED
)
3403 int nsave
= 4 - (get_cumulative_args (cum
)->nregs
+ 1);
3406 cfun
->machine
->arg_regs_to_save
= nsave
;
3407 *pretend_size
= nsave
* 4;
3411 bytesize (const_tree type
, machine_mode mode
)
3413 if (mode
== BLKmode
)
3414 return int_size_in_bytes (type
);
3415 return GET_MODE_SIZE (mode
);
3419 mep_expand_builtin_saveregs (void)
3424 ns
= cfun
->machine
->arg_regs_to_save
;
3427 bufsize
= 8 * ((ns
+ 1) / 2) + 8 * ns
;
3428 regbuf
= assign_stack_local (SImode
, bufsize
, 64);
3433 regbuf
= assign_stack_local (SImode
, bufsize
, 32);
3436 move_block_from_reg (5-ns
, regbuf
, ns
);
3440 rtx tmp
= gen_rtx_MEM (DImode
, XEXP (regbuf
, 0));
3441 int ofs
= 8 * ((ns
+1)/2);
3443 for (i
=0; i
<ns
; i
++)
3445 int rn
= (4-ns
) + i
+ 49;
3448 ptr
= offset_address (tmp
, GEN_INT (ofs
), 2);
3449 emit_move_insn (ptr
, gen_rtx_REG (DImode
, rn
));
3453 return XEXP (regbuf
, 0);
3457 mep_build_builtin_va_list (void)
3459 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3463 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3465 f_next_gp
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3466 get_identifier ("__va_next_gp"), ptr_type_node
);
3467 f_next_gp_limit
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3468 get_identifier ("__va_next_gp_limit"),
3470 f_next_cop
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_cop"),
3472 f_next_stack
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_stack"),
3475 DECL_FIELD_CONTEXT (f_next_gp
) = record
;
3476 DECL_FIELD_CONTEXT (f_next_gp_limit
) = record
;
3477 DECL_FIELD_CONTEXT (f_next_cop
) = record
;
3478 DECL_FIELD_CONTEXT (f_next_stack
) = record
;
3480 TYPE_FIELDS (record
) = f_next_gp
;
3481 DECL_CHAIN (f_next_gp
) = f_next_gp_limit
;
3482 DECL_CHAIN (f_next_gp_limit
) = f_next_cop
;
3483 DECL_CHAIN (f_next_cop
) = f_next_stack
;
3485 layout_type (record
);
3491 mep_expand_va_start (tree valist
, rtx nextarg
)
3493 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3494 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3498 ns
= cfun
->machine
->arg_regs_to_save
;
3500 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3501 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3502 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3503 f_next_stack
= DECL_CHAIN (f_next_cop
);
3505 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3507 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3508 valist
, f_next_gp_limit
, NULL_TREE
);
3509 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3511 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3512 valist
, f_next_stack
, NULL_TREE
);
3514 /* va_list.next_gp = expand_builtin_saveregs (); */
3515 u
= make_tree (sizetype
, expand_builtin_saveregs ());
3516 u
= fold_convert (ptr_type_node
, u
);
3517 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp
, u
);
3518 TREE_SIDE_EFFECTS (t
) = 1;
3519 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3521 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3522 u
= fold_build_pointer_plus_hwi (u
, 4 * ns
);
3523 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp_limit
, u
);
3524 TREE_SIDE_EFFECTS (t
) = 1;
3525 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3527 u
= fold_build_pointer_plus_hwi (u
, 8 * ((ns
+1)/2));
3528 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3529 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_cop
, u
);
3530 TREE_SIDE_EFFECTS (t
) = 1;
3531 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3533 /* va_list.next_stack = nextarg; */
3534 u
= make_tree (ptr_type_node
, nextarg
);
3535 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_stack
, u
);
3536 TREE_SIDE_EFFECTS (t
) = 1;
3537 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3541 mep_gimplify_va_arg_expr (tree valist
, tree type
,
3543 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
3545 HOST_WIDE_INT size
, rsize
;
3546 bool by_reference
, ivc2_vec
;
3547 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3548 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3549 tree label_sover
, label_selse
;
3552 ivc2_vec
= TARGET_IVC2
&& VECTOR_TYPE_P (type
);
3554 size
= int_size_in_bytes (type
);
3555 by_reference
= (size
> (ivc2_vec
? 8 : 4)) || (size
<= 0);
3559 type
= build_pointer_type (type
);
3562 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
3564 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3565 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3566 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3567 f_next_stack
= DECL_CHAIN (f_next_cop
);
3569 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3571 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3572 valist
, f_next_gp_limit
, NULL_TREE
);
3573 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3575 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3576 valist
, f_next_stack
, NULL_TREE
);
3578 /* if f_next_gp < f_next_gp_limit
3579 IF (VECTOR_P && IVC2)
3587 val = *f_next_stack;
3588 f_next_stack += rsize;
3592 label_sover
= create_artificial_label (UNKNOWN_LOCATION
);
3593 label_selse
= create_artificial_label (UNKNOWN_LOCATION
);
3594 res_addr
= create_tmp_var (ptr_type_node
);
3596 tmp
= build2 (GE_EXPR
, boolean_type_node
, next_gp
,
3597 unshare_expr (next_gp_limit
));
3598 tmp
= build3 (COND_EXPR
, void_type_node
, tmp
,
3599 build1 (GOTO_EXPR
, void_type_node
,
3600 unshare_expr (label_selse
)),
3602 gimplify_and_add (tmp
, pre_p
);
3606 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_cop
);
3607 gimplify_and_add (tmp
, pre_p
);
3611 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_gp
);
3612 gimplify_and_add (tmp
, pre_p
);
3615 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_gp
), 4);
3616 gimplify_assign (unshare_expr (next_gp
), tmp
, pre_p
);
3618 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_cop
), 8);
3619 gimplify_assign (unshare_expr (next_cop
), tmp
, pre_p
);
3621 tmp
= build1 (GOTO_EXPR
, void_type_node
, unshare_expr (label_sover
));
3622 gimplify_and_add (tmp
, pre_p
);
3626 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_selse
));
3627 gimplify_and_add (tmp
, pre_p
);
3629 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, unshare_expr (next_stack
));
3630 gimplify_and_add (tmp
, pre_p
);
3632 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_stack
), rsize
);
3633 gimplify_assign (unshare_expr (next_stack
), tmp
, pre_p
);
3637 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_sover
));
3638 gimplify_and_add (tmp
, pre_p
);
3640 res_addr
= fold_convert (build_pointer_type (type
), res_addr
);
3643 res_addr
= build_va_arg_indirect_ref (res_addr
);
3645 return build_va_arg_indirect_ref (res_addr
);
3649 mep_init_cumulative_args (CUMULATIVE_ARGS
*pcum
, tree fntype
,
3650 rtx libname ATTRIBUTE_UNUSED
,
3651 tree fndecl ATTRIBUTE_UNUSED
)
3655 if (fntype
&& lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype
)))
3661 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3662 larger than 4 bytes are passed indirectly. Return value in 0,
3663 unless bigger than 4 bytes, then the caller passes a pointer as the
3664 first arg. For varargs, we copy $1..$4 to the stack. */
3667 mep_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
3668 const_tree type ATTRIBUTE_UNUSED
,
3669 bool named ATTRIBUTE_UNUSED
)
3671 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3673 /* VOIDmode is a signal for the backend to pass data to the call
3674 expander via the second operand to the call pattern. We use
3675 this to determine whether to use "jsr" or "jsrv". */
3676 if (mode
== VOIDmode
)
3677 return GEN_INT (cum
->vliw
);
3679 /* If we havn't run out of argument registers, return the next. */
3682 if (type
&& TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3683 return gen_rtx_REG (mode
, cum
->nregs
+ 49);
3685 return gen_rtx_REG (mode
, cum
->nregs
+ 1);
3688 /* Otherwise the argument goes on the stack. */
3693 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
3696 bool named ATTRIBUTE_UNUSED
)
3698 int size
= bytesize (type
, mode
);
3700 /* This is non-obvious, but yes, large values passed after we've run
3701 out of registers are *still* passed by reference - we put the
3702 address of the parameter on the stack, as well as putting the
3703 parameter itself elsewhere on the stack. */
3705 if (size
<= 0 || size
> 8)
3709 if (TARGET_IVC2
&& get_cumulative_args (cum
)->nregs
< 4
3710 && type
!= NULL_TREE
&& VECTOR_TYPE_P (type
))
3716 mep_function_arg_advance (cumulative_args_t pcum
,
3717 machine_mode mode ATTRIBUTE_UNUSED
,
3718 const_tree type ATTRIBUTE_UNUSED
,
3719 bool named ATTRIBUTE_UNUSED
)
3721 get_cumulative_args (pcum
)->nregs
+= 1;
3725 mep_return_in_memory (const_tree type
, const_tree decl ATTRIBUTE_UNUSED
)
3727 int size
= bytesize (type
, BLKmode
);
3728 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3729 return size
> 0 && size
<= 8 ? 0 : 1;
3730 return size
> 0 && size
<= 4 ? 0 : 1;
3734 mep_narrow_volatile_bitfield (void)
3740 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3743 mep_function_value (const_tree type
, const_tree func ATTRIBUTE_UNUSED
)
3745 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3746 return gen_rtx_REG (TYPE_MODE (type
), 48);
3747 return gen_rtx_REG (TYPE_MODE (type
), RETURN_VALUE_REGNUM
);
3750 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3753 mep_libcall_value (machine_mode mode
)
3755 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
3758 /* Handle pipeline hazards. */
3760 typedef enum { op_none
, op_stc
, op_fsft
, op_ret
} op_num
;
3761 static const char *opnames
[] = { "", "stc", "fsft", "ret" };
3763 static int prev_opcode
= 0;
3765 /* This isn't as optimal as it could be, because we don't know what
3766 control register the STC opcode is storing in. We only need to add
3767 the nop if it's the relevant register, but we add it for irrelevant
3771 mep_asm_output_opcode (FILE *file
, const char *ptr
)
3773 int this_opcode
= op_none
;
3774 const char *hazard
= 0;
3779 if (strncmp (ptr
, "fsft", 4) == 0 && !ISGRAPH (ptr
[4]))
3780 this_opcode
= op_fsft
;
3783 if (strncmp (ptr
, "ret", 3) == 0 && !ISGRAPH (ptr
[3]))
3784 this_opcode
= op_ret
;
3787 if (strncmp (ptr
, "stc", 3) == 0 && !ISGRAPH (ptr
[3]))
3788 this_opcode
= op_stc
;
3792 if (prev_opcode
== op_stc
&& this_opcode
== op_fsft
)
3794 if (prev_opcode
== op_stc
&& this_opcode
== op_ret
)
3798 fprintf(file
, "%s\t# %s-%s hazard\n\t",
3799 hazard
, opnames
[prev_opcode
], opnames
[this_opcode
]);
3801 prev_opcode
= this_opcode
;
3804 /* Handle attributes. */
3807 mep_validate_based_tiny (tree
*node
, tree name
, tree args
,
3808 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3810 if (TREE_CODE (*node
) != VAR_DECL
3811 && TREE_CODE (*node
) != POINTER_TYPE
3812 && TREE_CODE (*node
) != TYPE_DECL
)
3814 warning (0, "%qE attribute only applies to variables", name
);
3817 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3819 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3821 warning (0, "address region attributes not allowed with auto storage class");
3824 /* Ignore storage attribute of pointed to variable: char __far * x; */
3825 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3827 warning (0, "address region attributes on pointed-to types ignored");
3836 mep_multiple_address_regions (tree list
, bool check_section_attr
)
3839 int count_sections
= 0;
3840 int section_attr_count
= 0;
3842 for (a
= list
; a
; a
= TREE_CHAIN (a
))
3844 if (is_attribute_p ("based", TREE_PURPOSE (a
))
3845 || is_attribute_p ("tiny", TREE_PURPOSE (a
))
3846 || is_attribute_p ("near", TREE_PURPOSE (a
))
3847 || is_attribute_p ("far", TREE_PURPOSE (a
))
3848 || is_attribute_p ("io", TREE_PURPOSE (a
)))
3850 if (check_section_attr
)
3851 section_attr_count
+= is_attribute_p ("section", TREE_PURPOSE (a
));
3854 if (check_section_attr
)
3855 return section_attr_count
;
3857 return count_sections
;
3860 #define MEP_ATTRIBUTES(decl) \
3861 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3862 : DECL_ATTRIBUTES (decl) \
3863 ? (DECL_ATTRIBUTES (decl)) \
3864 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3867 mep_validate_near_far (tree
*node
, tree name
, tree args
,
3868 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3870 if (TREE_CODE (*node
) != VAR_DECL
3871 && TREE_CODE (*node
) != FUNCTION_DECL
3872 && TREE_CODE (*node
) != METHOD_TYPE
3873 && TREE_CODE (*node
) != POINTER_TYPE
3874 && TREE_CODE (*node
) != TYPE_DECL
)
3876 warning (0, "%qE attribute only applies to variables and functions",
3880 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3882 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3884 warning (0, "address region attributes not allowed with auto storage class");
3887 /* Ignore storage attribute of pointed to variable: char __far * x; */
3888 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3890 warning (0, "address region attributes on pointed-to types ignored");
3894 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node
), false) > 0)
3896 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3897 name
, DECL_NAME (*node
), DECL_SOURCE_LINE (*node
));
3898 DECL_ATTRIBUTES (*node
) = NULL_TREE
;
3904 mep_validate_disinterrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3905 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3907 if (TREE_CODE (*node
) != FUNCTION_DECL
3908 && TREE_CODE (*node
) != METHOD_TYPE
)
3910 warning (0, "%qE attribute only applies to functions", name
);
3917 mep_validate_interrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3918 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3922 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3924 warning (0, "%qE attribute only applies to functions", name
);
3929 if (DECL_DECLARED_INLINE_P (*node
))
3930 error ("cannot inline interrupt function %qE", DECL_NAME (*node
));
3931 DECL_UNINLINABLE (*node
) = 1;
3933 function_type
= TREE_TYPE (*node
);
3935 if (TREE_TYPE (function_type
) != void_type_node
)
3936 error ("interrupt function must have return type of void");
3938 if (prototype_p (function_type
)
3939 && (TREE_VALUE (TYPE_ARG_TYPES (function_type
)) != void_type_node
3940 || TREE_CHAIN (TYPE_ARG_TYPES (function_type
)) != NULL_TREE
))
3941 error ("interrupt function must have no arguments");
3947 mep_validate_io_cb (tree
*node
, tree name
, tree args
,
3948 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3950 if (TREE_CODE (*node
) != VAR_DECL
)
3952 warning (0, "%qE attribute only applies to variables", name
);
3956 if (args
!= NULL_TREE
)
3958 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
3959 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
3960 if (TREE_CODE (TREE_VALUE (args
)) != INTEGER_CST
)
3962 warning (0, "%qE attribute allows only an integer constant argument",
3968 if (*no_add
== false && !TARGET_IO_NO_VOLATILE
)
3969 TREE_THIS_VOLATILE (*node
) = 1;
3975 mep_validate_vliw (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3976 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3978 if (TREE_CODE (*node
) != FUNCTION_TYPE
3979 && TREE_CODE (*node
) != FUNCTION_DECL
3980 && TREE_CODE (*node
) != METHOD_TYPE
3981 && TREE_CODE (*node
) != FIELD_DECL
3982 && TREE_CODE (*node
) != TYPE_DECL
)
3984 static int gave_pointer_note
= 0;
3985 static int gave_array_note
= 0;
3986 static const char * given_type
= NULL
;
3988 given_type
= get_tree_code_name (TREE_CODE (*node
));
3989 if (TREE_CODE (*node
) == POINTER_TYPE
)
3990 given_type
= "pointers";
3991 if (TREE_CODE (*node
) == ARRAY_TYPE
)
3992 given_type
= "arrays";
3995 warning (0, "%qE attribute only applies to functions, not %s",
3998 warning (0, "%qE attribute only applies to functions",
4002 if (TREE_CODE (*node
) == POINTER_TYPE
4003 && !gave_pointer_note
)
4005 inform (input_location
,
4006 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4007 " typedef int (__vliw *vfuncptr) ();");
4008 gave_pointer_note
= 1;
4011 if (TREE_CODE (*node
) == ARRAY_TYPE
4012 && !gave_array_note
)
4014 inform (input_location
,
4015 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4016 " typedef int (__vliw *vfuncptr[]) ();");
4017 gave_array_note
= 1;
4021 error ("VLIW functions are not allowed without a VLIW configuration");
4025 static const struct attribute_spec mep_attribute_table
[11] =
4027 /* name min max decl type func handler
4028 affects_type_identity */
4029 { "based", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4030 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4031 { "near", 0, 0, false, false, false, mep_validate_near_far
, false },
4032 { "far", 0, 0, false, false, false, mep_validate_near_far
, false },
4033 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt
,
4035 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt
, false },
4036 { "io", 0, 1, false, false, false, mep_validate_io_cb
, false },
4037 { "cb", 0, 1, false, false, false, mep_validate_io_cb
, false },
4038 { "vliw", 0, 0, false, true, false, mep_validate_vliw
, false },
4039 { NULL
, 0, 0, false, false, false, NULL
, false }
4043 mep_function_attribute_inlinable_p (const_tree callee
)
4045 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (callee
));
4046 if (!attrs
) attrs
= DECL_ATTRIBUTES (callee
);
4047 return (lookup_attribute ("disinterrupt", attrs
) == 0
4048 && lookup_attribute ("interrupt", attrs
) == 0);
4052 mep_can_inline_p (tree caller
, tree callee
)
4054 if (TREE_CODE (callee
) == ADDR_EXPR
)
4055 callee
= TREE_OPERAND (callee
, 0);
4057 if (!mep_vliw_function_p (caller
)
4058 && mep_vliw_function_p (callee
))
4066 #define FUNC_DISINTERRUPT 2
4069 struct GTY(()) pragma_entry
{
4074 struct pragma_traits
: default_hashmap_traits
4076 static hashval_t
hash (const char *s
) { return htab_hash_string (s
); }
4078 equal_keys (const char *a
, const char *b
)
4080 return strcmp (a
, b
) == 0;
4084 /* Hash table of farcall-tagged sections. */
4085 static GTY(()) hash_map
<const char *, pragma_entry
, pragma_traits
> *
4089 mep_note_pragma_flag (const char *funcname
, int flag
)
4093 = hash_map
<const char *, pragma_entry
, pragma_traits
>::create_ggc (31);
4096 const char *name
= ggc_strdup (funcname
);
4097 pragma_entry
*slot
= &pragma_htab
->get_or_insert (name
, &existed
);
4107 mep_lookup_pragma_flag (const char *funcname
, int flag
)
4112 if (funcname
[0] == '@' && funcname
[2] == '.')
4115 pragma_entry
*slot
= pragma_htab
->get (funcname
);
4116 if (slot
&& (slot
->flag
& flag
))
4125 mep_lookup_pragma_call (const char *funcname
)
4127 return mep_lookup_pragma_flag (funcname
, FUNC_CALL
);
4131 mep_note_pragma_call (const char *funcname
)
4133 mep_note_pragma_flag (funcname
, FUNC_CALL
);
4137 mep_lookup_pragma_disinterrupt (const char *funcname
)
4139 return mep_lookup_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4143 mep_note_pragma_disinterrupt (const char *funcname
)
4145 mep_note_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4149 note_unused_pragma_disinterrupt (const char *const &s
, const pragma_entry
&e
,
4152 if ((e
.flag
& FUNC_DISINTERRUPT
)
4153 && !(e
.used
& FUNC_DISINTERRUPT
))
4154 warning (0, "\"#pragma disinterrupt %s\" not used", s
);
4159 mep_file_cleanups (void)
4162 pragma_htab
->traverse
<void *, note_unused_pragma_disinterrupt
> (NULL
);
4165 /* These three functions provide a bridge between the pramgas that
4166 affect register classes, and the functions that maintain them. We
4167 can't call those functions directly as pragma handling is part of
4168 the front end and doesn't have direct access to them. */
4171 mep_save_register_info (void)
4173 save_register_info ();
4177 mep_reinit_regs (void)
4183 mep_init_regs (void)
4191 mep_attrlist_to_encoding (tree list
, tree decl
)
4193 if (mep_multiple_address_regions (list
, false) > 1)
4195 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4196 TREE_PURPOSE (TREE_CHAIN (list
)),
4198 DECL_SOURCE_LINE (decl
));
4199 TREE_CHAIN (list
) = NULL_TREE
;
4204 if (is_attribute_p ("based", TREE_PURPOSE (list
)))
4206 if (is_attribute_p ("tiny", TREE_PURPOSE (list
)))
4208 if (is_attribute_p ("near", TREE_PURPOSE (list
)))
4210 if (is_attribute_p ("far", TREE_PURPOSE (list
)))
4212 if (is_attribute_p ("io", TREE_PURPOSE (list
)))
4214 if (TREE_VALUE (list
)
4215 && TREE_VALUE (TREE_VALUE (list
))
4216 && TREE_CODE (TREE_VALUE (TREE_VALUE (list
))) == INTEGER_CST
)
4218 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list
)));
4220 && location
<= 0x1000000)
4225 if (is_attribute_p ("cb", TREE_PURPOSE (list
)))
4227 list
= TREE_CHAIN (list
);
4230 && TREE_CODE (decl
) == FUNCTION_DECL
4231 && DECL_SECTION_NAME (decl
) == 0)
4237 mep_comp_type_attributes (const_tree t1
, const_tree t2
)
4241 vliw1
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1
)) != 0);
4242 vliw2
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2
)) != 0);
4251 mep_insert_attributes (tree decl
, tree
*attributes
)
4254 const char *secname
= 0;
4255 tree attrib
, attrlist
;
4258 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4260 const char *funcname
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4262 if (mep_lookup_pragma_disinterrupt (funcname
))
4264 attrib
= build_tree_list (get_identifier ("disinterrupt"), NULL_TREE
);
4265 *attributes
= chainon (*attributes
, attrib
);
4269 if (TREE_CODE (decl
) != VAR_DECL
4270 || ! (TREE_PUBLIC (decl
) || TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
4273 if (TREE_READONLY (decl
) && TARGET_DC
)
4274 /* -mdc means that const variables default to the near section,
4275 regardless of the size cutoff. */
4278 /* User specified an attribute, so override the default.
4279 Ignore storage attribute of pointed to variable. char __far * x; */
4280 if (! (TREE_TYPE (decl
) && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
))
4282 if (TYPE_P (decl
) && TYPE_ATTRIBUTES (decl
) && *attributes
)
4283 TYPE_ATTRIBUTES (decl
) = NULL_TREE
;
4284 else if (DECL_ATTRIBUTES (decl
) && *attributes
)
4285 DECL_ATTRIBUTES (decl
) = NULL_TREE
;
4288 attrlist
= *attributes
? *attributes
: DECL_ATTRIBUTES (decl
);
4289 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4290 if (!encoding
&& TYPE_P (TREE_TYPE (decl
)))
4292 attrlist
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
4293 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4297 /* This means that the declaration has a specific section
4298 attribute, so we should not apply the default rules. */
4300 if (encoding
== 'i' || encoding
== 'I')
4302 tree attr
= lookup_attribute ("io", attrlist
);
4304 && TREE_VALUE (attr
)
4305 && TREE_VALUE (TREE_VALUE(attr
)))
4307 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4308 static tree previous_value
= 0;
4309 static int previous_location
= 0;
4310 static tree previous_name
= 0;
4312 /* We take advantage of the fact that gcc will reuse the
4313 same tree pointer when applying an attribute to a
4314 list of decls, but produce a new tree for attributes
4315 on separate source lines, even when they're textually
4316 identical. This is the behavior we want. */
4317 if (TREE_VALUE (attr
) == previous_value
4318 && location
== previous_location
)
4320 warning(0, "__io address 0x%x is the same for %qE and %qE",
4321 location
, previous_name
, DECL_NAME (decl
));
4323 previous_name
= DECL_NAME (decl
);
4324 previous_location
= location
;
4325 previous_value
= TREE_VALUE (attr
);
4332 /* Declarations of arrays can change size. Don't trust them. */
4333 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
4336 size
= int_size_in_bytes (TREE_TYPE (decl
));
4338 if (TARGET_RAND_TPGP
&& size
<= 4 && size
> 0)
4340 if (TREE_PUBLIC (decl
)
4341 || DECL_EXTERNAL (decl
)
4342 || TREE_STATIC (decl
))
4344 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4368 if (size
<= mep_based_cutoff
&& size
> 0)
4370 else if (size
<= mep_tiny_cutoff
&& size
> 0)
4376 if (mep_const_section
&& TREE_READONLY (decl
))
4378 if (strcmp (mep_const_section
, "tiny") == 0)
4380 else if (strcmp (mep_const_section
, "near") == 0)
4382 else if (strcmp (mep_const_section
, "far") == 0)
4389 if (!mep_multiple_address_regions (*attributes
, true)
4390 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl
), false))
4392 attrib
= build_tree_list (get_identifier (secname
), NULL_TREE
);
4394 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4395 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4396 and mep_validate_based_tiny. */
4397 DECL_ATTRIBUTES (decl
) = chainon (DECL_ATTRIBUTES (decl
), attrib
);
4402 mep_encode_section_info (tree decl
, rtx rtl
, int first
)
4405 const char *oldname
;
4406 const char *secname
;
4412 tree mep_attributes
;
4417 if (TREE_CODE (decl
) != VAR_DECL
4418 && TREE_CODE (decl
) != FUNCTION_DECL
)
4421 rtlname
= XEXP (rtl
, 0);
4422 if (GET_CODE (rtlname
) == SYMBOL_REF
)
4423 oldname
= XSTR (rtlname
, 0);
4424 else if (GET_CODE (rtlname
) == MEM
4425 && GET_CODE (XEXP (rtlname
, 0)) == SYMBOL_REF
)
4426 oldname
= XSTR (XEXP (rtlname
, 0), 0);
4430 type
= TREE_TYPE (decl
);
4431 if (type
== error_mark_node
)
4433 mep_attributes
= MEP_ATTRIBUTES (decl
);
4435 encoding
= mep_attrlist_to_encoding (mep_attributes
, decl
);
4439 newname
= (char *) alloca (strlen (oldname
) + 4);
4440 sprintf (newname
, "@%c.%s", encoding
, oldname
);
4441 idp
= get_identifier (newname
);
4443 gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (idp
));
4444 SYMBOL_REF_WEAK (XEXP (rtl
, 0)) = DECL_WEAK (decl
);
4445 SET_SYMBOL_REF_DECL (XEXP (rtl
, 0), decl
);
4458 maxsize
= 0x1000000;
4466 if (maxsize
&& int_size_in_bytes (TREE_TYPE (decl
)) > maxsize
)
4468 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4470 (long) int_size_in_bytes (TREE_TYPE (decl
)),
4478 mep_strip_name_encoding (const char *sym
)
4484 else if (*sym
== '@' && sym
[2] == '.')
4492 mep_select_section (tree decl
, int reloc ATTRIBUTE_UNUSED
,
4493 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
4498 switch (TREE_CODE (decl
))
4501 if (!TREE_READONLY (decl
)
4502 || TREE_SIDE_EFFECTS (decl
)
4503 || !DECL_INITIAL (decl
)
4504 || (DECL_INITIAL (decl
) != error_mark_node
4505 && !TREE_CONSTANT (DECL_INITIAL (decl
))))
4509 if (! TREE_CONSTANT (decl
))
4517 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4519 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4521 if (name
[0] == '@' && name
[2] == '.')
4526 if (flag_function_sections
|| DECL_COMDAT_GROUP (decl
))
4527 mep_unique_section (decl
, 0);
4528 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4530 if (encoding
== 'f')
4531 return vftext_section
;
4533 return vtext_section
;
4535 else if (encoding
== 'f')
4536 return ftext_section
;
4538 return text_section
;
4541 if (TREE_CODE (decl
) == VAR_DECL
)
4543 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4545 if (name
[0] == '@' && name
[2] == '.')
4549 return based_section
;
4553 return srodata_section
;
4554 if (DECL_INITIAL (decl
))
4555 return sdata_section
;
4556 return tinybss_section
;
4560 return frodata_section
;
4565 error_at (DECL_SOURCE_LOCATION (decl
),
4566 "variable %D of type %<io%> must be uninitialized", decl
);
4567 return data_section
;
4570 error_at (DECL_SOURCE_LOCATION (decl
),
4571 "variable %D of type %<cb%> must be uninitialized", decl
);
4572 return data_section
;
4577 return readonly_data_section
;
4579 return data_section
;
4583 mep_unique_section (tree decl
, int reloc
)
4585 static const char *prefixes
[][2] =
4587 { ".text.", ".gnu.linkonce.t." },
4588 { ".rodata.", ".gnu.linkonce.r." },
4589 { ".data.", ".gnu.linkonce.d." },
4590 { ".based.", ".gnu.linkonce.based." },
4591 { ".sdata.", ".gnu.linkonce.s." },
4592 { ".far.", ".gnu.linkonce.far." },
4593 { ".ftext.", ".gnu.linkonce.ft." },
4594 { ".frodata.", ".gnu.linkonce.frd." },
4595 { ".srodata.", ".gnu.linkonce.srd." },
4596 { ".vtext.", ".gnu.linkonce.v." },
4597 { ".vftext.", ".gnu.linkonce.vf." }
4599 int sec
= 2; /* .data */
4601 const char *name
, *prefix
;
4604 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
4605 if (DECL_RTL (decl
))
4606 name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4608 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4610 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4611 sec
= 9; /* .vtext */
4613 sec
= 0; /* .text */
4615 else if (decl_readonly_section (decl
, reloc
))
4616 sec
= 1; /* .rodata */
4618 if (name
[0] == '@' && name
[2] == '.')
4623 sec
= 3; /* .based */
4627 sec
= 8; /* .srodata */
4629 sec
= 4; /* .sdata */
4633 sec
= 6; /* .ftext */
4635 sec
= 10; /* .vftext */
4637 sec
= 7; /* .frodata */
4639 sec
= 5; /* .far. */
4645 prefix
= prefixes
[sec
][DECL_COMDAT_GROUP(decl
) != NULL
];
4646 len
= strlen (name
) + strlen (prefix
);
4647 string
= (char *) alloca (len
+ 1);
4649 sprintf (string
, "%s%s", prefix
, name
);
4651 set_decl_section_name (decl
, string
);
4654 /* Given a decl, a section name, and whether the decl initializer
4655 has relocs, choose attributes for the section. */
4657 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4660 mep_section_type_flags (tree decl
, const char *name
, int reloc
)
4662 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4664 if (decl
&& TREE_CODE (decl
) == FUNCTION_DECL
4665 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4666 flags
|= SECTION_MEP_VLIW
;
4671 /* Switch to an arbitrary section NAME with attributes as specified
4672 by FLAGS. ALIGN specifies any known alignment requirements for
4673 the section; 0 if the default should be used.
4675 Differs from the standard ELF version only in support of VLIW mode. */
4678 mep_asm_named_section (const char *name
, unsigned int flags
, tree decl ATTRIBUTE_UNUSED
)
4680 char flagchars
[8], *f
= flagchars
;
4683 if (!(flags
& SECTION_DEBUG
))
4685 if (flags
& SECTION_WRITE
)
4687 if (flags
& SECTION_CODE
)
4689 if (flags
& SECTION_SMALL
)
4691 if (flags
& SECTION_MEP_VLIW
)
4695 if (flags
& SECTION_BSS
)
4700 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",@%s\n",
4701 name
, flagchars
, type
);
4703 if (flags
& SECTION_CODE
)
4704 fputs ((flags
& SECTION_MEP_VLIW
? "\t.vliw\n" : "\t.core\n"),
4709 mep_output_aligned_common (FILE *stream
, tree decl
, const char *name
,
4710 int size
, int align
, int global
)
4712 /* We intentionally don't use mep_section_tag() here. */
4714 && (name
[1] == 'i' || name
[1] == 'I' || name
[1] == 'c')
4718 tree attr
= lookup_attribute ((name
[1] == 'c' ? "cb" : "io"),
4719 DECL_ATTRIBUTES (decl
));
4721 && TREE_VALUE (attr
)
4722 && TREE_VALUE (TREE_VALUE(attr
)))
4723 location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4728 fprintf (stream
, "\t.globl\t");
4729 assemble_name (stream
, name
);
4730 fprintf (stream
, "\n");
4732 assemble_name (stream
, name
);
4733 fprintf (stream
, " = %d\n", location
);
4736 if (name
[0] == '@' && name
[2] == '.')
4738 const char *sec
= 0;
4742 switch_to_section (based_section
);
4746 switch_to_section (tinybss_section
);
4750 switch_to_section (farbss_section
);
4759 while (align
> BITS_PER_UNIT
)
4764 name2
= targetm
.strip_name_encoding (name
);
4766 fprintf (stream
, "\t.globl\t%s\n", name2
);
4767 fprintf (stream
, "\t.p2align %d\n", p2align
);
4768 fprintf (stream
, "\t.type\t%s,@object\n", name2
);
4769 fprintf (stream
, "\t.size\t%s,%d\n", name2
, size
);
4770 fprintf (stream
, "%s:\n\t.zero\t%d\n", name2
, size
);
4777 fprintf (stream
, "\t.local\t");
4778 assemble_name (stream
, name
);
4779 fprintf (stream
, "\n");
4781 fprintf (stream
, "\t.comm\t");
4782 assemble_name (stream
, name
);
4783 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
4789 mep_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4791 rtx addr
= XEXP (m_tramp
, 0);
4792 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
4794 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__mep_trampoline_helper"),
4795 LCT_NORMAL
, VOIDmode
, 3,
4798 static_chain
, Pmode
);
4801 /* Experimental Reorg. */
4804 mep_mentioned_p (rtx in
,
4805 rtx reg
, /* NULL for mem */
4806 int modes_too
) /* if nonzero, modes must match also. */
4814 if (reg
&& GET_CODE (reg
) != REG
)
4817 if (GET_CODE (in
) == LABEL_REF
)
4820 code
= GET_CODE (in
);
4826 return mep_mentioned_p (XEXP (in
, 0), reg
, modes_too
);
4832 if (modes_too
&& (GET_MODE (in
) != GET_MODE (reg
)))
4834 return (REGNO (in
) == REGNO (reg
));
4847 /* Set's source should be read-only. */
4848 if (code
== SET
&& !reg
)
4849 return mep_mentioned_p (SET_DEST (in
), reg
, modes_too
);
4851 fmt
= GET_RTX_FORMAT (code
);
4853 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4858 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
4859 if (mep_mentioned_p (XVECEXP (in
, i
, j
), reg
, modes_too
))
4862 else if (fmt
[i
] == 'e'
4863 && mep_mentioned_p (XEXP (in
, i
), reg
, modes_too
))
4869 #define EXPERIMENTAL_REGMOVE_REORG 1
4871 #if EXPERIMENTAL_REGMOVE_REORG
4874 mep_compatible_reg_class (int r1
, int r2
)
4876 if (GR_REGNO_P (r1
) && GR_REGNO_P (r2
))
4878 if (CR_REGNO_P (r1
) && CR_REGNO_P (r2
))
4884 mep_reorg_regmove (rtx_insn
*insns
)
4886 rtx_insn
*insn
, *next
, *follow
;
4888 int count
= 0, done
= 0, replace
, before
= 0;
4891 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4892 if (NONJUMP_INSN_P (insn
))
4895 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4896 set that uses the r2 and r2 dies there. We replace r2 with r1
4897 and see if it's still a valid insn. If so, delete the first set.
4898 Copied from reorg.c. */
4903 for (insn
= insns
; insn
; insn
= next
)
4905 next
= next_nonnote_nondebug_insn (insn
);
4906 if (! NONJUMP_INSN_P (insn
))
4908 pat
= PATTERN (insn
);
4912 if (GET_CODE (pat
) == SET
4913 && GET_CODE (SET_SRC (pat
)) == REG
4914 && GET_CODE (SET_DEST (pat
)) == REG
4915 && find_regno_note (insn
, REG_DEAD
, REGNO (SET_SRC (pat
)))
4916 && mep_compatible_reg_class (REGNO (SET_SRC (pat
)), REGNO (SET_DEST (pat
))))
4918 follow
= next_nonnote_nondebug_insn (insn
);
4920 fprintf (dump_file
, "superfluous moves: considering %d\n", INSN_UID (insn
));
4922 while (follow
&& NONJUMP_INSN_P (follow
)
4923 && GET_CODE (PATTERN (follow
)) == SET
4924 && !dead_or_set_p (follow
, SET_SRC (pat
))
4925 && !mep_mentioned_p (PATTERN (follow
), SET_SRC (pat
), 0)
4926 && !mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 0))
4929 fprintf (dump_file
, "\tskipping %d\n", INSN_UID (follow
));
4930 follow
= next_nonnote_insn (follow
);
4934 fprintf (dump_file
, "\tfollow is %d\n", INSN_UID (follow
));
4935 if (follow
&& NONJUMP_INSN_P (follow
)
4936 && GET_CODE (PATTERN (follow
)) == SET
4937 && find_regno_note (follow
, REG_DEAD
, REGNO (SET_DEST (pat
))))
4939 if (GET_CODE (SET_DEST (PATTERN (follow
))) == REG
)
4941 if (mep_mentioned_p (SET_SRC (PATTERN (follow
)), SET_DEST (pat
), 1))
4944 where
= & SET_SRC (PATTERN (follow
));
4947 else if (GET_CODE (SET_DEST (PATTERN (follow
))) == MEM
)
4949 if (mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 1))
4952 where
= & PATTERN (follow
);
4958 /* If so, follow is the corresponding insn */
4965 fprintf (dump_file
, "----- Candidate for superfluous move deletion:\n\n");
4966 for (x
= insn
; x
;x
= NEXT_INSN (x
))
4968 print_rtl_single (dump_file
, x
);
4971 fprintf (dump_file
, "\n");
4975 if (validate_replace_rtx_subexp (SET_DEST (pat
), SET_SRC (pat
),
4982 fprintf (dump_file
, "\n----- Success! new insn:\n\n");
4983 print_rtl_single (dump_file
, follow
);
4993 fprintf (dump_file
, "\n%d insn%s deleted out of %d.\n\n", count
, count
== 1 ? "" : "s", before
);
4994 fprintf (dump_file
, "=====\n");
5000 /* Figure out where to put LABEL, which is the label for a repeat loop.
5001 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5002 the loop ends just before LAST_INSN. If SHARED, insns other than the
5003 "repeat" might use LABEL to jump to the loop's continuation point.
5005 Return the last instruction in the adjusted loop. */
5008 mep_insert_repeat_label_last (rtx_insn
*last_insn
, rtx_code_label
*label
,
5009 bool including
, bool shared
)
5011 rtx_insn
*next
, *prev
;
5012 int count
= 0, code
, icode
;
5015 fprintf (dump_file
, "considering end of repeat loop at insn %d\n",
5016 INSN_UID (last_insn
));
5018 /* Set PREV to the last insn in the loop. */
5021 prev
= PREV_INSN (prev
);
5023 /* Set NEXT to the next insn after the repeat label. */
5028 code
= GET_CODE (prev
);
5029 if (code
== CALL_INSN
|| code
== CODE_LABEL
|| code
== BARRIER
)
5034 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
5035 prev
= as_a
<rtx_insn
*> (XVECEXP (PATTERN (prev
), 0, 1));
5037 /* Other insns that should not be in the last two opcodes. */
5038 icode
= recog_memoized (prev
);
5040 || icode
== CODE_FOR_repeat
5041 || icode
== CODE_FOR_erepeat
5042 || get_attr_may_trap (prev
) == MAY_TRAP_YES
)
5045 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5046 is the second instruction in a VLIW bundle. In that case,
5047 loop again: if the first instruction also satisfies the
5048 conditions above then we will reach here again and put
5049 both of them into the repeat epilogue. Otherwise both
5050 should remain outside. */
5051 if (GET_MODE (prev
) != BImode
)
5056 print_rtl_single (dump_file
, next
);
5061 prev
= PREV_INSN (prev
);
5064 /* See if we're adding the label immediately after the repeat insn.
5065 If so, we need to separate them with a nop. */
5066 prev
= prev_real_insn (next
);
5068 switch (recog_memoized (prev
))
5070 case CODE_FOR_repeat
:
5071 case CODE_FOR_erepeat
:
5073 fprintf (dump_file
, "Adding nop inside loop\n");
5074 emit_insn_before (gen_nop (), next
);
5081 /* Insert the label. */
5082 emit_label_before (label
, next
);
5084 /* Insert the nops. */
5085 if (dump_file
&& count
< 2)
5086 fprintf (dump_file
, "Adding %d nop%s\n\n",
5087 2 - count
, count
== 1 ? "" : "s");
5089 for (; count
< 2; count
++)
5091 last_insn
= emit_insn_after (gen_nop (), last_insn
);
5093 emit_insn_before (gen_nop (), last_insn
);
5100 mep_emit_doloop (rtx
*operands
, int is_end
)
5104 if (cfun
->machine
->doloop_tags
== 0
5105 || cfun
->machine
->doloop_tag_from_end
== is_end
)
5107 cfun
->machine
->doloop_tags
++;
5108 cfun
->machine
->doloop_tag_from_end
= is_end
;
5111 tag
= GEN_INT (cfun
->machine
->doloop_tags
- 1);
5113 emit_jump_insn (gen_doloop_end_internal (operands
[0], operands
[1], tag
));
5115 emit_insn (gen_doloop_begin_internal (operands
[0], operands
[0], tag
));
5119 /* Code for converting doloop_begins and doloop_ends into valid
5120 MeP instructions. A doloop_begin is just a placeholder:
5122 $count = unspec ($count)
5124 where $count is initially the number of iterations - 1.
5125 doloop_end has the form:
5127 if ($count-- == 0) goto label
5129 The counter variable is private to the doloop insns, nothing else
5130 relies on its value.
5132 There are three cases, in decreasing order of preference:
5134 1. A loop has exactly one doloop_begin and one doloop_end.
5135 The doloop_end branches to the first instruction after
5138 In this case we can replace the doloop_begin with a repeat
5139 instruction and remove the doloop_end. I.e.:
5141 $count1 = unspec ($count1)
5146 if ($count2-- == 0) goto label
5150 repeat $count1,repeat_label
5158 2. As for (1), except there are several doloop_ends. One of them
5159 (call it X) falls through to a label L. All the others fall
5160 through to branches to L.
5162 In this case, we remove X and replace the other doloop_ends
5163 with branches to the repeat label. For example:
5165 $count1 = unspec ($count1)
5168 if ($count2-- == 0) goto label
5171 if ($count3-- == 0) goto label
5176 repeat $count1,repeat_label
5187 3. The fallback case. Replace doloop_begins with:
5191 Replace doloop_ends with the equivalent of:
5194 if ($count == 0) goto label
5196 Note that this might need a scratch register if $count
5197 is stored in memory. */
5199 /* A structure describing one doloop_begin. */
5200 struct mep_doloop_begin
{
5201 /* The next doloop_begin with the same tag. */
5202 struct mep_doloop_begin
*next
;
5204 /* The instruction itself. */
5207 /* The initial counter value. This is known to be a general register. */
5211 /* A structure describing a doloop_end. */
5212 struct mep_doloop_end
{
5213 /* The next doloop_end with the same loop tag. */
5214 struct mep_doloop_end
*next
;
5216 /* The instruction itself. */
5219 /* The first instruction after INSN when the branch isn't taken. */
5220 rtx_insn
*fallthrough
;
5222 /* The location of the counter value. Since doloop_end_internal is a
5223 jump instruction, it has to allow the counter to be stored anywhere
5224 (any non-fixed register or memory location). */
5227 /* The target label (the place where the insn branches when the counter
5231 /* A scratch register. Only available when COUNTER isn't stored
5232 in a general register. */
5237 /* One do-while loop. */
5239 /* All the doloop_begins for this loop (in no particular order). */
5240 struct mep_doloop_begin
*begin
;
5242 /* All the doloop_ends. When there is more than one, arrange things
5243 so that the first one is the most likely to be X in case (2) above. */
5244 struct mep_doloop_end
*end
;
5248 /* Return true if LOOP can be converted into repeat/repeat_end form
5249 (that is, if it matches cases (1) or (2) above). */
5252 mep_repeat_loop_p (struct mep_doloop
*loop
)
5254 struct mep_doloop_end
*end
;
5257 /* There must be exactly one doloop_begin and at least one doloop_end. */
5258 if (loop
->begin
== 0 || loop
->end
== 0 || loop
->begin
->next
!= 0)
5261 /* The first doloop_end (X) must branch back to the insn after
5262 the doloop_begin. */
5263 if (prev_real_insn (loop
->end
->label
) != loop
->begin
->insn
)
5266 /* All the other doloop_ends must branch to the same place as X.
5267 When the branch isn't taken, they must jump to the instruction
5269 fallthrough
= loop
->end
->fallthrough
;
5270 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5271 if (end
->label
!= loop
->end
->label
5272 || !simplejump_p (end
->fallthrough
)
5273 || next_real_insn (JUMP_LABEL (end
->fallthrough
)) != fallthrough
)
5280 /* The main repeat reorg function. See comment above for details. */
5283 mep_reorg_repeat (rtx_insn
*insns
)
5286 struct mep_doloop
*loops
, *loop
;
5287 struct mep_doloop_begin
*begin
;
5288 struct mep_doloop_end
*end
;
5290 /* Quick exit if we haven't created any loops. */
5291 if (cfun
->machine
->doloop_tags
== 0)
5294 /* Create an array of mep_doloop structures. */
5295 loops
= (struct mep_doloop
*) alloca (sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5296 memset (loops
, 0, sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5298 /* Search the function for do-while insns and group them by loop tag. */
5299 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5301 switch (recog_memoized (insn
))
5303 case CODE_FOR_doloop_begin_internal
:
5304 insn_extract (insn
);
5305 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5307 begin
= (struct mep_doloop_begin
*) alloca (sizeof (struct mep_doloop_begin
));
5308 begin
->next
= loop
->begin
;
5310 begin
->counter
= recog_data
.operand
[0];
5312 loop
->begin
= begin
;
5315 case CODE_FOR_doloop_end_internal
:
5316 insn_extract (insn
);
5317 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5319 end
= (struct mep_doloop_end
*) alloca (sizeof (struct mep_doloop_end
));
5321 end
->fallthrough
= next_real_insn (insn
);
5322 end
->counter
= recog_data
.operand
[0];
5323 end
->label
= recog_data
.operand
[1];
5324 end
->scratch
= recog_data
.operand
[3];
5326 /* If this insn falls through to an unconditional jump,
5327 give it a lower priority than the others. */
5328 if (loop
->end
!= 0 && simplejump_p (end
->fallthrough
))
5330 end
->next
= loop
->end
->next
;
5331 loop
->end
->next
= end
;
5335 end
->next
= loop
->end
;
5341 /* Convert the insns for each loop in turn. */
5342 for (loop
= loops
; loop
< loops
+ cfun
->machine
->doloop_tags
; loop
++)
5343 if (mep_repeat_loop_p (loop
))
5345 /* Case (1) or (2). */
5346 rtx_code_label
*repeat_label
;
5349 /* Create a new label for the repeat insn. */
5350 repeat_label
= gen_label_rtx ();
5352 /* Replace the doloop_begin with a repeat. */
5353 label_ref
= gen_rtx_LABEL_REF (VOIDmode
, repeat_label
);
5354 emit_insn_before (gen_repeat (loop
->begin
->counter
, label_ref
),
5356 delete_insn (loop
->begin
->insn
);
5358 /* Insert the repeat label before the first doloop_end.
5359 Fill the gap with nops if there are other doloop_ends. */
5360 mep_insert_repeat_label_last (loop
->end
->insn
, repeat_label
,
5361 false, loop
->end
->next
!= 0);
5363 /* Emit a repeat_end (to improve the readability of the output). */
5364 emit_insn_before (gen_repeat_end (), loop
->end
->insn
);
5366 /* Delete the first doloop_end. */
5367 delete_insn (loop
->end
->insn
);
5369 /* Replace the others with branches to REPEAT_LABEL. */
5370 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5372 emit_jump_insn_before (gen_jump (repeat_label
), end
->insn
);
5373 delete_insn (end
->insn
);
5374 delete_insn (end
->fallthrough
);
5379 /* Case (3). First replace all the doloop_begins with increment
5381 for (begin
= loop
->begin
; begin
!= 0; begin
= begin
->next
)
5383 emit_insn_before (gen_add3_insn (copy_rtx (begin
->counter
),
5384 begin
->counter
, const1_rtx
),
5386 delete_insn (begin
->insn
);
5389 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5390 for (end
= loop
->end
; end
!= 0; end
= end
->next
)
5396 /* Load the counter value into a general register. */
5398 if (!REG_P (reg
) || REGNO (reg
) > 15)
5401 emit_move_insn (copy_rtx (reg
), copy_rtx (end
->counter
));
5404 /* Decrement the counter. */
5405 emit_insn (gen_add3_insn (copy_rtx (reg
), copy_rtx (reg
),
5408 /* Copy it back to its original location. */
5409 if (reg
!= end
->counter
)
5410 emit_move_insn (copy_rtx (end
->counter
), copy_rtx (reg
));
5412 /* Jump back to the start label. */
5413 insn
= emit_jump_insn (gen_mep_bne_true (reg
, const0_rtx
,
5415 JUMP_LABEL (insn
) = end
->label
;
5416 LABEL_NUSES (end
->label
)++;
5418 /* Emit the whole sequence before the doloop_end. */
5419 insn
= get_insns ();
5421 emit_insn_before (insn
, end
->insn
);
5423 /* Delete the doloop_end. */
5424 delete_insn (end
->insn
);
5431 mep_invertable_branch_p (rtx_insn
*insn
)
5434 enum rtx_code old_code
;
5437 set
= PATTERN (insn
);
5438 if (GET_CODE (set
) != SET
)
5440 if (GET_CODE (XEXP (set
, 1)) != IF_THEN_ELSE
)
5442 cond
= XEXP (XEXP (set
, 1), 0);
5443 old_code
= GET_CODE (cond
);
5447 PUT_CODE (cond
, NE
);
5450 PUT_CODE (cond
, EQ
);
5453 PUT_CODE (cond
, GE
);
5456 PUT_CODE (cond
, LT
);
5461 INSN_CODE (insn
) = -1;
5462 i
= recog_memoized (insn
);
5463 PUT_CODE (cond
, old_code
);
5464 INSN_CODE (insn
) = -1;
5469 mep_invert_branch (rtx_insn
*insn
, rtx_insn
*after
)
5471 rtx cond
, set
, label
;
5474 set
= PATTERN (insn
);
5476 gcc_assert (GET_CODE (set
) == SET
);
5477 gcc_assert (GET_CODE (XEXP (set
, 1)) == IF_THEN_ELSE
);
5479 cond
= XEXP (XEXP (set
, 1), 0);
5480 switch (GET_CODE (cond
))
5483 PUT_CODE (cond
, NE
);
5486 PUT_CODE (cond
, EQ
);
5489 PUT_CODE (cond
, GE
);
5492 PUT_CODE (cond
, LT
);
5497 label
= gen_label_rtx ();
5498 emit_label_after (label
, after
);
5499 for (i
=1; i
<=2; i
++)
5500 if (GET_CODE (XEXP (XEXP (set
, 1), i
)) == LABEL_REF
)
5502 rtx ref
= XEXP (XEXP (set
, 1), i
);
5503 if (LABEL_NUSES (XEXP (ref
, 0)) == 1)
5504 delete_insn (XEXP (ref
, 0));
5505 XEXP (ref
, 0) = label
;
5506 LABEL_NUSES (label
) ++;
5507 JUMP_LABEL (insn
) = label
;
5509 INSN_CODE (insn
) = -1;
5510 i
= recog_memoized (insn
);
5511 gcc_assert (i
>= 0);
5515 mep_reorg_erepeat (rtx_insn
*insns
)
5517 rtx_insn
*insn
, *prev
;
5522 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5524 && mep_invertable_branch_p (insn
))
5528 fprintf (dump_file
, "\n------------------------------\n");
5529 fprintf (dump_file
, "erepeat: considering this jump:\n");
5530 print_rtl_single (dump_file
, insn
);
5532 count
= simplejump_p (insn
) ? 0 : 1;
5533 for (prev
= PREV_INSN (insn
); prev
; prev
= PREV_INSN (prev
))
5535 if (CALL_P (prev
) || BARRIER_P (prev
))
5538 if (prev
== JUMP_LABEL (insn
))
5542 fprintf (dump_file
, "found loop top, %d insns\n", count
);
5544 if (LABEL_NUSES (prev
) == 1)
5545 /* We're the only user, always safe */ ;
5546 else if (LABEL_NUSES (prev
) == 2)
5548 /* See if there's a barrier before this label. If
5549 so, we know nobody inside the loop uses it.
5550 But we must be careful to put the erepeat
5551 *after* the label. */
5553 for (barrier
= PREV_INSN (prev
);
5554 barrier
&& NOTE_P (barrier
);
5555 barrier
= PREV_INSN (barrier
))
5557 if (barrier
&& ! BARRIER_P (barrier
))
5562 /* We don't know who else, within or without our loop, uses this */
5564 fprintf (dump_file
, "... but there are multiple users, too risky.\n");
5568 /* Generate a label to be used by the erepat insn. */
5569 l
= gen_label_rtx ();
5571 /* Insert the erepeat after INSN's target label. */
5572 x
= gen_erepeat (gen_rtx_LABEL_REF (VOIDmode
, l
));
5574 emit_insn_after (x
, prev
);
5576 /* Insert the erepeat label. */
5577 newlast
= (mep_insert_repeat_label_last
5578 (insn
, l
, !simplejump_p (insn
), false));
5579 if (simplejump_p (insn
))
5581 emit_insn_before (gen_erepeat_end (), insn
);
5586 mep_invert_branch (insn
, newlast
);
5587 emit_insn_after (gen_erepeat_end (), newlast
);
5594 /* A label is OK if there is exactly one user, and we
5595 can find that user before the next label. */
5598 if (LABEL_NUSES (prev
) == 1)
5600 for (user
= PREV_INSN (prev
);
5601 user
&& (INSN_P (user
) || NOTE_P (user
));
5602 user
= PREV_INSN (user
))
5603 if (JUMP_P (user
) && JUMP_LABEL (user
) == prev
)
5605 safe
= INSN_UID (user
);
5612 fprintf (dump_file
, "... ignoring jump from insn %d to %d\n",
5613 safe
, INSN_UID (prev
));
5623 fprintf (dump_file
, "\n==============================\n");
5626 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5627 always do this on its own. */
5630 mep_jmp_return_reorg (rtx_insn
*insns
)
5632 rtx_insn
*insn
, *label
, *ret
;
5635 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5636 if (simplejump_p (insn
))
5638 /* Find the fist real insn the jump jumps to. */
5639 label
= ret
= safe_as_a
<rtx_insn
*> (JUMP_LABEL (insn
));
5643 || GET_CODE (PATTERN (ret
)) == USE
))
5644 ret
= NEXT_INSN (ret
);
5648 /* Is it a return? */
5649 ret_code
= recog_memoized (ret
);
5650 if (ret_code
== CODE_FOR_return_internal
5651 || ret_code
== CODE_FOR_eh_return_internal
)
5653 /* It is. Replace the jump with a return. */
5654 LABEL_NUSES (label
) --;
5655 if (LABEL_NUSES (label
) == 0)
5656 delete_insn (label
);
5657 PATTERN (insn
) = copy_rtx (PATTERN (ret
));
5658 INSN_CODE (insn
) = -1;
5666 mep_reorg_addcombine (rtx_insn
*insns
)
5670 for (i
= insns
; i
; i
= NEXT_INSN (i
))
5672 && INSN_CODE (i
) == CODE_FOR_addsi3
5673 && GET_CODE (SET_DEST (PATTERN (i
))) == REG
5674 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 0)) == REG
5675 && REGNO (SET_DEST (PATTERN (i
))) == REGNO (XEXP (SET_SRC (PATTERN (i
)), 0))
5676 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 1)) == CONST_INT
)
5680 && INSN_CODE (n
) == CODE_FOR_addsi3
5681 && GET_CODE (SET_DEST (PATTERN (n
))) == REG
5682 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 0)) == REG
5683 && REGNO (SET_DEST (PATTERN (n
))) == REGNO (XEXP (SET_SRC (PATTERN (n
)), 0))
5684 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 1)) == CONST_INT
)
5686 int ic
= INTVAL (XEXP (SET_SRC (PATTERN (i
)), 1));
5687 int nc
= INTVAL (XEXP (SET_SRC (PATTERN (n
)), 1));
5688 if (REGNO (SET_DEST (PATTERN (i
))) == REGNO (SET_DEST (PATTERN (n
)))
5690 && ic
+ nc
> -32768)
5692 XEXP (SET_SRC (PATTERN (i
)), 1) = GEN_INT (ic
+ nc
);
5693 SET_NEXT_INSN (i
) = NEXT_INSN (n
);
5695 SET_PREV_INSN (NEXT_INSN (i
)) = i
;
5701 /* If this insn adjusts the stack, return the adjustment, else return
5704 add_sp_insn_p (rtx_insn
*insn
)
5708 if (! single_set (insn
))
5710 pat
= PATTERN (insn
);
5711 if (GET_CODE (SET_DEST (pat
)) != REG
)
5713 if (REGNO (SET_DEST (pat
)) != SP_REGNO
)
5715 if (GET_CODE (SET_SRC (pat
)) != PLUS
)
5717 if (GET_CODE (XEXP (SET_SRC (pat
), 0)) != REG
)
5719 if (REGNO (XEXP (SET_SRC (pat
), 0)) != SP_REGNO
)
5721 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) != CONST_INT
)
5723 return INTVAL (XEXP (SET_SRC (pat
), 1));
5726 /* Check for trivial functions that set up an unneeded stack
5729 mep_reorg_noframe (rtx_insn
*insns
)
5731 rtx_insn
*start_frame_insn
;
5732 rtx_insn
*end_frame_insn
= 0;
5736 /* The first insn should be $sp = $sp + N */
5737 while (insns
&& ! INSN_P (insns
))
5738 insns
= NEXT_INSN (insns
);
5742 sp_adjust
= add_sp_insn_p (insns
);
5746 start_frame_insn
= insns
;
5747 sp
= SET_DEST (PATTERN (start_frame_insn
));
5749 insns
= next_real_insn (insns
);
5753 rtx_insn
*next
= next_real_insn (insns
);
5757 sp2
= add_sp_insn_p (insns
);
5762 end_frame_insn
= insns
;
5763 if (sp2
!= -sp_adjust
)
5766 else if (mep_mentioned_p (insns
, sp
, 0))
5768 else if (CALL_P (insns
))
5776 delete_insn (start_frame_insn
);
5777 delete_insn (end_frame_insn
);
5784 rtx_insn
*insns
= get_insns ();
5786 /* We require accurate REG_DEAD notes. */
5787 compute_bb_for_insn ();
5788 df_note_add_problem ();
5791 mep_reorg_addcombine (insns
);
5792 #if EXPERIMENTAL_REGMOVE_REORG
5793 /* VLIW packing has been done already, so we can't just delete things. */
5794 if (!mep_vliw_function_p (cfun
->decl
))
5795 mep_reorg_regmove (insns
);
5797 mep_jmp_return_reorg (insns
);
5798 mep_bundle_insns (insns
);
5799 mep_reorg_repeat (insns
);
5802 && !profile_arc_flag
5803 && TARGET_OPT_REPEAT
5804 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO
)))
5805 mep_reorg_erepeat (insns
);
5807 /* This may delete *insns so make sure it's last. */
5808 mep_reorg_noframe (insns
);
5810 df_finish_pass (false);
5815 /*----------------------------------------------------------------------*/
5817 /*----------------------------------------------------------------------*/
5819 /* Element X gives the index into cgen_insns[] of the most general
5820 implementation of intrinsic X. Unimplemented intrinsics are
5822 int mep_intrinsic_insn
[ARRAY_SIZE (cgen_intrinsics
)];
5824 /* Element X gives the index of another instruction that is mapped to
5825 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5828 Things are set up so that mep_intrinsic_chain[X] < X. */
5829 static int mep_intrinsic_chain
[ARRAY_SIZE (cgen_insns
)];
5831 /* The bitmask for the current ISA. The ISA masks are declared
5833 unsigned int mep_selected_isa
;
5836 const char *config_name
;
5840 static struct mep_config mep_configs
[] = {
5841 #ifdef COPROC_SELECTION_TABLE
5842 COPROC_SELECTION_TABLE
,
5847 /* Initialize the global intrinsics variables above. */
5850 mep_init_intrinsics (void)
5854 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5855 mep_selected_isa
= mep_configs
[0].isa
;
5856 if (mep_config_string
!= 0)
5857 for (i
= 0; mep_configs
[i
].config_name
; i
++)
5858 if (strcmp (mep_config_string
, mep_configs
[i
].config_name
) == 0)
5860 mep_selected_isa
= mep_configs
[i
].isa
;
5864 /* Assume all intrinsics are unavailable. */
5865 for (i
= 0; i
< ARRAY_SIZE (mep_intrinsic_insn
); i
++)
5866 mep_intrinsic_insn
[i
] = -1;
5868 /* Build up the global intrinsic tables. */
5869 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5870 if ((cgen_insns
[i
].isas
& mep_selected_isa
) != 0)
5872 mep_intrinsic_chain
[i
] = mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
];
5873 mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] = i
;
5875 /* See whether we can directly move values between one coprocessor
5876 register and another. */
5877 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
5878 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns
[i
]))
5879 mep_have_copro_copro_moves_p
= true;
5881 /* See whether we can directly move values between core and
5882 coprocessor registers. */
5883 mep_have_core_copro_moves_p
= (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1
)
5884 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2
));
5886 mep_have_core_copro_moves_p
= 1;
5889 /* Declare all available intrinsic functions. Called once only. */
5891 static tree cp_data_bus_int_type_node
;
5892 static tree opaque_vector_type_node
;
5893 static tree v8qi_type_node
;
5894 static tree v4hi_type_node
;
5895 static tree v2si_type_node
;
5896 static tree v8uqi_type_node
;
5897 static tree v4uhi_type_node
;
5898 static tree v2usi_type_node
;
5901 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr
)
5905 case cgen_regnum_operand_type_POINTER
: return ptr_type_node
;
5906 case cgen_regnum_operand_type_LONG
: return long_integer_type_node
;
5907 case cgen_regnum_operand_type_ULONG
: return long_unsigned_type_node
;
5908 case cgen_regnum_operand_type_SHORT
: return short_integer_type_node
;
5909 case cgen_regnum_operand_type_USHORT
: return short_unsigned_type_node
;
5910 case cgen_regnum_operand_type_CHAR
: return char_type_node
;
5911 case cgen_regnum_operand_type_UCHAR
: return unsigned_char_type_node
;
5912 case cgen_regnum_operand_type_SI
: return intSI_type_node
;
5913 case cgen_regnum_operand_type_DI
: return intDI_type_node
;
5914 case cgen_regnum_operand_type_VECTOR
: return opaque_vector_type_node
;
5915 case cgen_regnum_operand_type_V8QI
: return v8qi_type_node
;
5916 case cgen_regnum_operand_type_V4HI
: return v4hi_type_node
;
5917 case cgen_regnum_operand_type_V2SI
: return v2si_type_node
;
5918 case cgen_regnum_operand_type_V8UQI
: return v8uqi_type_node
;
5919 case cgen_regnum_operand_type_V4UHI
: return v4uhi_type_node
;
5920 case cgen_regnum_operand_type_V2USI
: return v2usi_type_node
;
5921 case cgen_regnum_operand_type_CP_DATA_BUS_INT
: return cp_data_bus_int_type_node
;
5923 return void_type_node
;
5928 mep_init_builtins (void)
5932 if (TARGET_64BIT_CR_REGS
)
5933 cp_data_bus_int_type_node
= long_long_integer_type_node
;
5935 cp_data_bus_int_type_node
= long_integer_type_node
;
5937 opaque_vector_type_node
= build_opaque_vector_type (intQI_type_node
, 8);
5938 v8qi_type_node
= build_vector_type (intQI_type_node
, 8);
5939 v4hi_type_node
= build_vector_type (intHI_type_node
, 4);
5940 v2si_type_node
= build_vector_type (intSI_type_node
, 2);
5941 v8uqi_type_node
= build_vector_type (unsigned_intQI_type_node
, 8);
5942 v4uhi_type_node
= build_vector_type (unsigned_intHI_type_node
, 4);
5943 v2usi_type_node
= build_vector_type (unsigned_intSI_type_node
, 2);
5945 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node
);
5947 add_builtin_type ("cp_vector", opaque_vector_type_node
);
5949 add_builtin_type ("cp_v8qi", v8qi_type_node
);
5950 add_builtin_type ("cp_v4hi", v4hi_type_node
);
5951 add_builtin_type ("cp_v2si", v2si_type_node
);
5953 add_builtin_type ("cp_v8uqi", v8uqi_type_node
);
5954 add_builtin_type ("cp_v4uhi", v4uhi_type_node
);
5955 add_builtin_type ("cp_v2usi", v2usi_type_node
);
5957 /* Intrinsics like mep_cadd3 are implemented with two groups of
5958 instructions, one which uses UNSPECs and one which uses a specific
5959 rtl code such as PLUS. Instructions in the latter group belong
5960 to GROUP_KNOWN_CODE.
5962 In such cases, the intrinsic will have two entries in the global
5963 tables above. The unspec form is accessed using builtin functions
5964 while the specific form is accessed using the mep_* enum in
5967 The idea is that __cop arithmetic and builtin functions have
5968 different optimization requirements. If mep_cadd3() appears in
5969 the source code, the user will surely except gcc to use cadd3
5970 rather than a work-alike such as add3. However, if the user
5971 just writes "a + b", where a or b are __cop variables, it is
5972 reasonable for gcc to choose a core instruction rather than
5973 cadd3 if it believes that is more optimal. */
5974 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5975 if ((cgen_insns
[i
].groups
& GROUP_KNOWN_CODE
) == 0
5976 && mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] >= 0)
5978 tree ret_type
= void_type_node
;
5981 if (i
> 0 && cgen_insns
[i
].intrinsic
== cgen_insns
[i
-1].intrinsic
)
5984 if (cgen_insns
[i
].cret_p
)
5985 ret_type
= mep_cgen_regnum_to_type (cgen_insns
[i
].regnums
[0].type
);
5987 bi_type
= build_function_type_list (ret_type
, NULL_TREE
);
5988 add_builtin_function (cgen_intrinsics
[cgen_insns
[i
].intrinsic
],
5990 cgen_insns
[i
].intrinsic
, BUILT_IN_MD
, NULL
, NULL
);
5994 /* Report the unavailablity of the given intrinsic. */
5998 mep_intrinsic_unavailable (int intrinsic
)
6000 static int already_reported_p
[ARRAY_SIZE (cgen_intrinsics
)];
6002 if (already_reported_p
[intrinsic
])
6005 if (mep_intrinsic_insn
[intrinsic
] < 0)
6006 error ("coprocessor intrinsic %qs is not available in this configuration",
6007 cgen_intrinsics
[intrinsic
]);
6008 else if (CGEN_CURRENT_GROUP
== GROUP_VLIW
)
6009 error ("%qs is not available in VLIW functions",
6010 cgen_intrinsics
[intrinsic
]);
6012 error ("%qs is not available in non-VLIW functions",
6013 cgen_intrinsics
[intrinsic
]);
6015 already_reported_p
[intrinsic
] = 1;
6020 /* See if any implementation of INTRINSIC is available to the
6021 current function. If so, store the most general implementation
6022 in *INSN_PTR and return true. Return false otherwise. */
6025 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED
, const struct cgen_insn
**insn_ptr ATTRIBUTE_UNUSED
)
6029 i
= mep_intrinsic_insn
[intrinsic
];
6030 while (i
>= 0 && !CGEN_ENABLE_INSN_P (i
))
6031 i
= mep_intrinsic_chain
[i
];
6035 *insn_ptr
= &cgen_insns
[i
];
6042 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6043 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6044 try using a work-alike instead. In this case, the returned insn
6045 may have three operands rather than two. */
6048 mep_get_move_insn (int intrinsic
, const struct cgen_insn
**cgen_insn
)
6052 if (intrinsic
== mep_cmov
)
6054 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
6055 if (mep_get_intrinsic_insn (mep_cmov_insns
[i
], cgen_insn
))
6059 return mep_get_intrinsic_insn (intrinsic
, cgen_insn
);
6063 /* If ARG is a register operand that is the same size as MODE, convert it
6064 to MODE using a subreg. Otherwise return ARG as-is. */
6067 mep_convert_arg (machine_mode mode
, rtx arg
)
6069 if (GET_MODE (arg
) != mode
6070 && register_operand (arg
, VOIDmode
)
6071 && GET_MODE_SIZE (GET_MODE (arg
)) == GET_MODE_SIZE (mode
))
6072 return simplify_gen_subreg (mode
, arg
, GET_MODE (arg
), 0);
6077 /* Apply regnum conversions to ARG using the description given by REGNUM.
6078 Return the new argument on success and null on failure. */
6081 mep_convert_regnum (const struct cgen_regnum_operand
*regnum
, rtx arg
)
6083 if (regnum
->count
== 0)
6086 if (GET_CODE (arg
) != CONST_INT
6088 || INTVAL (arg
) >= regnum
->count
)
6091 return gen_rtx_REG (SImode
, INTVAL (arg
) + regnum
->base
);
6095 /* Try to make intrinsic argument ARG match the given operand.
6096 UNSIGNED_P is true if the argument has an unsigned type. */
6099 mep_legitimize_arg (const struct insn_operand_data
*operand
, rtx arg
,
6102 if (GET_CODE (arg
) == CONST_INT
)
6104 /* CONST_INTs can only be bound to integer operands. */
6105 if (GET_MODE_CLASS (operand
->mode
) != MODE_INT
)
6108 else if (GET_CODE (arg
) == CONST_DOUBLE
)
6109 /* These hold vector constants. */;
6110 else if (GET_MODE_SIZE (GET_MODE (arg
)) != GET_MODE_SIZE (operand
->mode
))
6112 /* If the argument is a different size from what's expected, we must
6113 have a value in the right mode class in order to convert it. */
6114 if (GET_MODE_CLASS (operand
->mode
) != GET_MODE_CLASS (GET_MODE (arg
)))
6117 /* If the operand is an rvalue, promote or demote it to match the
6118 operand's size. This might not need extra instructions when
6119 ARG is a register value. */
6120 if (operand
->constraint
[0] != '=')
6121 arg
= convert_to_mode (operand
->mode
, arg
, unsigned_p
);
6124 /* If the operand is an lvalue, bind the operand to a new register.
6125 The caller will copy this value into ARG after the main
6126 instruction. By doing this always, we produce slightly more
6128 /* But not for control registers. */
6129 if (operand
->constraint
[0] == '='
6131 || ! (CONTROL_REGNO_P (REGNO (arg
))
6132 || CCR_REGNO_P (REGNO (arg
))
6133 || CR_REGNO_P (REGNO (arg
)))
6135 return gen_reg_rtx (operand
->mode
);
6137 /* Try simple mode punning. */
6138 arg
= mep_convert_arg (operand
->mode
, arg
);
6139 if (operand
->predicate (arg
, operand
->mode
))
6142 /* See if forcing the argument into a register will make it match. */
6143 if (GET_CODE (arg
) == CONST_INT
|| GET_CODE (arg
) == CONST_DOUBLE
)
6144 arg
= force_reg (operand
->mode
, arg
);
6146 arg
= mep_convert_arg (operand
->mode
, force_reg (GET_MODE (arg
), arg
));
6147 if (operand
->predicate (arg
, operand
->mode
))
6154 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6155 function FNNAME. OPERAND describes the operand to which ARGNUM
6159 mep_incompatible_arg (const struct insn_operand_data
*operand
, rtx arg
,
6160 int argnum
, tree fnname
)
6164 if (GET_CODE (arg
) == CONST_INT
)
6165 for (i
= 0; i
< ARRAY_SIZE (cgen_immediate_predicates
); i
++)
6166 if (operand
->predicate
== cgen_immediate_predicates
[i
].predicate
)
6168 const struct cgen_immediate_predicate
*predicate
;
6169 HOST_WIDE_INT argval
;
6171 predicate
= &cgen_immediate_predicates
[i
];
6172 argval
= INTVAL (arg
);
6173 if (argval
< predicate
->lower
|| argval
>= predicate
->upper
)
6174 error ("argument %d of %qE must be in the range %d...%d",
6175 argnum
, fnname
, predicate
->lower
, predicate
->upper
- 1);
6177 error ("argument %d of %qE must be a multiple of %d",
6178 argnum
, fnname
, predicate
->align
);
6182 error ("incompatible type for argument %d of %qE", argnum
, fnname
);
6186 mep_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6187 rtx subtarget ATTRIBUTE_UNUSED
,
6188 machine_mode mode ATTRIBUTE_UNUSED
,
6189 int ignore ATTRIBUTE_UNUSED
)
6191 rtx pat
, op
[10], arg
[10];
6193 int opindex
, unsigned_p
[10];
6195 unsigned int n_args
;
6197 const struct cgen_insn
*cgen_insn
;
6198 const struct insn_data_d
*idata
;
6199 unsigned int first_arg
= 0;
6200 unsigned int builtin_n_args
;
6202 fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6203 fnname
= DECL_NAME (fndecl
);
6205 /* Find out which instruction we should emit. Note that some coprocessor
6206 intrinsics may only be available in VLIW mode, or only in normal mode. */
6207 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl
), &cgen_insn
))
6209 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl
));
6212 idata
= &insn_data
[cgen_insn
->icode
];
6214 builtin_n_args
= cgen_insn
->num_args
;
6216 if (cgen_insn
->cret_p
)
6218 if (cgen_insn
->cret_p
> 1)
6221 mep_cgen_regnum_to_type (cgen_insn
->regnums
[0].type
);
6225 /* Evaluate each argument. */
6226 n_args
= call_expr_nargs (exp
);
6228 if (n_args
< builtin_n_args
)
6230 error ("too few arguments to %qE", fnname
);
6233 if (n_args
> builtin_n_args
)
6235 error ("too many arguments to %qE", fnname
);
6239 for (a
= first_arg
; a
< builtin_n_args
+ first_arg
; a
++)
6243 args
= CALL_EXPR_ARG (exp
, a
- first_arg
);
6248 if (cgen_insn
->regnums
[a
].reference_p
)
6250 if (TREE_CODE (value
) != ADDR_EXPR
)
6253 error ("argument %d of %qE must be an address", a
+1, fnname
);
6256 value
= TREE_OPERAND (value
, 0);
6260 /* If the argument has been promoted to int, get the unpromoted
6261 value. This is necessary when sub-int memory values are bound
6262 to reference parameters. */
6263 if (TREE_CODE (value
) == NOP_EXPR
6264 && TREE_TYPE (value
) == integer_type_node
6265 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6266 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value
, 0)))
6267 < TYPE_PRECISION (TREE_TYPE (value
))))
6268 value
= TREE_OPERAND (value
, 0);
6270 /* If the argument has been promoted to double, get the unpromoted
6271 SFmode value. This is necessary for FMAX support, for example. */
6272 if (TREE_CODE (value
) == NOP_EXPR
6273 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value
))
6274 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6275 && TYPE_MODE (TREE_TYPE (value
)) == DFmode
6276 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value
, 0))) == SFmode
)
6277 value
= TREE_OPERAND (value
, 0);
6279 unsigned_p
[a
] = TYPE_UNSIGNED (TREE_TYPE (value
));
6280 arg
[a
] = expand_expr (value
, NULL
, VOIDmode
, EXPAND_NORMAL
);
6281 arg
[a
] = mep_convert_regnum (&cgen_insn
->regnums
[a
], arg
[a
]);
6282 if (cgen_insn
->regnums
[a
].reference_p
)
6284 tree pointed_to
= TREE_TYPE (TREE_TYPE (value
));
6285 machine_mode pointed_mode
= TYPE_MODE (pointed_to
);
6287 arg
[a
] = gen_rtx_MEM (pointed_mode
, arg
[a
]);
6291 error ("argument %d of %qE must be in the range %d...%d",
6292 a
+ 1, fnname
, 0, cgen_insn
->regnums
[a
].count
- 1);
6297 for (a
= 0; a
< first_arg
; a
++)
6299 if (a
== 0 && target
&& GET_MODE (target
) == idata
->operand
[0].mode
)
6302 arg
[a
] = gen_reg_rtx (idata
->operand
[0].mode
);
6305 /* Convert the arguments into a form suitable for the intrinsic.
6306 Report an error if this isn't possible. */
6307 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6309 a
= cgen_insn
->op_mapping
[opindex
];
6310 op
[opindex
] = mep_legitimize_arg (&idata
->operand
[opindex
],
6311 arg
[a
], unsigned_p
[a
]);
6312 if (op
[opindex
] == 0)
6314 mep_incompatible_arg (&idata
->operand
[opindex
],
6315 arg
[a
], a
+ 1 - first_arg
, fnname
);
6320 /* Emit the instruction. */
6321 pat
= idata
->genfun (op
[0], op
[1], op
[2], op
[3], op
[4],
6322 op
[5], op
[6], op
[7], op
[8], op
[9]);
6324 if (GET_CODE (pat
) == SET
6325 && GET_CODE (SET_DEST (pat
)) == PC
6326 && GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
6327 emit_jump_insn (pat
);
6331 /* Copy lvalues back to their final locations. */
6332 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6333 if (idata
->operand
[opindex
].constraint
[0] == '=')
6335 a
= cgen_insn
->op_mapping
[opindex
];
6338 if (GET_MODE_CLASS (GET_MODE (arg
[a
]))
6339 != GET_MODE_CLASS (GET_MODE (op
[opindex
])))
6340 emit_move_insn (arg
[a
], gen_lowpart (GET_MODE (arg
[a
]),
6344 /* First convert the operand to the right mode, then copy it
6345 into the destination. Doing the conversion as a separate
6346 step (rather than using convert_move) means that we can
6347 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6348 refer to the same register. */
6349 op
[opindex
] = convert_to_mode (GET_MODE (arg
[a
]),
6350 op
[opindex
], unsigned_p
[a
]);
6351 if (!rtx_equal_p (arg
[a
], op
[opindex
]))
6352 emit_move_insn (arg
[a
], op
[opindex
]);
6357 if (first_arg
> 0 && target
&& target
!= op
[0])
6359 emit_move_insn (target
, op
[0]);
6366 mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED
)
6371 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6372 a global register. */
6375 global_reg_mentioned_p_1 (const_rtx x
)
6379 switch (GET_CODE (x
))
6382 if (REG_P (SUBREG_REG (x
)))
6384 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
6385 && global_regs
[subreg_regno (x
)])
6393 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
6398 /* A non-constant call might use a global register. */
6408 /* Returns nonzero if X mentions a global register. */
6411 global_reg_mentioned_p (rtx x
)
6417 if (! RTL_CONST_OR_PURE_CALL_P (x
))
6419 x
= CALL_INSN_FUNCTION_USAGE (x
);
6427 subrtx_iterator::array_type array
;
6428 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
6429 if (global_reg_mentioned_p_1 (*iter
))
6433 /* Scheduling hooks for VLIW mode.
6435 Conceptually this is very simple: we have a two-pack architecture
6436 that takes one core insn and one coprocessor insn to make up either
6437 a 32- or 64-bit instruction word (depending on the option bit set in
6438 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6439 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6440 and one 48-bit cop insn or two 32-bit core/cop insns.
6442 In practice, instruction selection will be a bear. Consider in
6443 VL64 mode the following insns
6448 these cannot pack, since the add is a 16-bit core insn and cmov
6449 is a 32-bit cop insn. However,
6454 packs just fine. For good VLIW code generation in VL64 mode, we
6455 will have to have 32-bit alternatives for many of the common core
6456 insns. Not implemented. */
6459 mep_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
, int cost
)
6463 if (REG_NOTE_KIND (link
) != 0)
6465 /* See whether INSN and DEP_INSN are intrinsics that set the same
6466 hard register. If so, it is more important to free up DEP_INSN
6467 than it is to free up INSN.
6469 Note that intrinsics like mep_mulr are handled differently from
6470 the equivalent mep.md patterns. In mep.md, if we don't care
6471 about the value of $lo and $hi, the pattern will just clobber
6472 the registers, not set them. Since clobbers don't count as
6473 output dependencies, it is often possible to reorder two mulrs,
6476 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6477 so any pair of mep_mulr()s will be inter-dependent. We should
6478 therefore give the first mep_mulr() a higher priority. */
6479 if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
6480 && global_reg_mentioned_p (PATTERN (insn
))
6481 && global_reg_mentioned_p (PATTERN (dep_insn
)))
6484 /* If the dependence is an anti or output dependence, assume it
6489 /* If we can't recognize the insns, we can't really do anything. */
6490 if (recog_memoized (dep_insn
) < 0)
6493 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6494 attribute instead. */
6497 cost_specified
= get_attr_latency (dep_insn
);
6498 if (cost_specified
!= 0)
6499 return cost_specified
;
6505 /* ??? We don't properly compute the length of a load/store insn,
6506 taking into account the addressing mode. */
6509 mep_issue_rate (void)
6511 return TARGET_IVC2
? 3 : 2;
6514 /* Return true if function DECL was declared with the vliw attribute. */
6517 mep_vliw_function_p (tree decl
)
6519 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))) != 0;
6523 mep_find_ready_insn (rtx_insn
**ready
, int nready
, enum attr_slot slot
,
6528 for (i
= nready
- 1; i
>= 0; --i
)
6530 rtx_insn
*insn
= ready
[i
];
6531 if (recog_memoized (insn
) >= 0
6532 && get_attr_slot (insn
) == slot
6533 && get_attr_length (insn
) == length
)
6541 mep_move_ready_insn (rtx_insn
**ready
, int nready
, rtx_insn
*insn
)
6545 for (i
= 0; i
< nready
; ++i
)
6546 if (ready
[i
] == insn
)
6548 for (; i
< nready
- 1; ++i
)
6549 ready
[i
] = ready
[i
+ 1];
6558 mep_print_sched_insn (FILE *dump
, rtx_insn
*insn
)
6560 const char *slots
= "none";
6561 const char *name
= NULL
;
6565 if (GET_CODE (PATTERN (insn
)) == SET
6566 || GET_CODE (PATTERN (insn
)) == PARALLEL
)
6568 switch (get_attr_slots (insn
))
6570 case SLOTS_CORE
: slots
= "core"; break;
6571 case SLOTS_C3
: slots
= "c3"; break;
6572 case SLOTS_P0
: slots
= "p0"; break;
6573 case SLOTS_P0_P0S
: slots
= "p0,p0s"; break;
6574 case SLOTS_P0_P1
: slots
= "p0,p1"; break;
6575 case SLOTS_P0S
: slots
= "p0s"; break;
6576 case SLOTS_P0S_P1
: slots
= "p0s,p1"; break;
6577 case SLOTS_P1
: slots
= "p1"; break;
6579 sprintf(buf
, "%d", get_attr_slots (insn
));
6584 if (GET_CODE (PATTERN (insn
)) == USE
)
6587 code
= INSN_CODE (insn
);
6589 name
= get_insn_name (code
);
6594 "insn %4d %4d %8s %s\n",
6602 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
,
6603 int sched_verbose ATTRIBUTE_UNUSED
, rtx_insn
**ready
,
6604 int *pnready
, int clock ATTRIBUTE_UNUSED
)
6606 int nready
= *pnready
;
6607 rtx_insn
*core_insn
, *cop_insn
;
6610 if (dump
&& sched_verbose
> 1)
6612 fprintf (dump
, "\nsched_reorder: clock %d nready %d\n", clock
, nready
);
6613 for (i
=0; i
<nready
; i
++)
6614 mep_print_sched_insn (dump
, ready
[i
]);
6615 fprintf (dump
, "\n");
6618 if (!mep_vliw_function_p (cfun
->decl
))
6623 /* IVC2 uses a DFA to determine what's ready and what's not. */
6627 /* We can issue either a core or coprocessor instruction.
6628 Look for a matched pair of insns to reorder. If we don't
6629 find any, don't second-guess the scheduler's priorities. */
6631 if ((core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 2))
6632 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
,
6633 TARGET_OPT_VL64
? 6 : 2)))
6635 else if (TARGET_OPT_VL64
6636 && (core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 4))
6637 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
, 4)))
6640 /* We didn't find a pair. Issue the single insn at the head
6641 of the ready list. */
6644 /* Reorder the two insns first. */
6645 mep_move_ready_insn (ready
, nready
, core_insn
);
6646 mep_move_ready_insn (ready
, nready
- 1, cop_insn
);
6650 /* Return true if X contains a register that is set by insn PREV. */
6653 mep_store_find_set (const_rtx x
, const rtx_insn
*prev
)
6655 subrtx_iterator::array_type array
;
6656 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
6657 if (REG_P (x
) && reg_set_p (x
, prev
))
6662 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6663 not the containing insn. */
6666 mep_store_data_bypass_1 (rtx_insn
*prev
, rtx pat
)
6668 /* Cope with intrinsics like swcpa. */
6669 if (GET_CODE (pat
) == PARALLEL
)
6673 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6674 if (mep_store_data_bypass_p (prev
,
6675 as_a
<rtx_insn
*> (XVECEXP (pat
, 0, i
))))
6681 /* Check for some sort of store. */
6682 if (GET_CODE (pat
) != SET
6683 || GET_CODE (SET_DEST (pat
)) != MEM
)
6686 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6687 The first operand to the unspec is the store data and the other operands
6688 are used to calculate the address. */
6689 if (GET_CODE (SET_SRC (pat
)) == UNSPEC
)
6694 src
= SET_SRC (pat
);
6695 for (i
= 1; i
< XVECLEN (src
, 0); i
++)
6696 if (mep_store_find_set (XVECEXP (src
, 0, i
), prev
))
6702 /* Otherwise just check that PREV doesn't modify any register mentioned
6703 in the memory destination. */
6704 return !mep_store_find_set (SET_DEST (pat
), prev
);
6707 /* Return true if INSN is a store instruction and if the store address
6708 has no true dependence on PREV. */
6711 mep_store_data_bypass_p (rtx_insn
*prev
, rtx_insn
*insn
)
6713 return INSN_P (insn
) ? mep_store_data_bypass_1 (prev
, PATTERN (insn
)) : false;
6716 /* Return true if, apart from HI/LO, there are no true dependencies
6717 between multiplication instructions PREV and INSN. */
6720 mep_mul_hilo_bypass_p (rtx_insn
*prev
, rtx_insn
*insn
)
6724 pat
= PATTERN (insn
);
6725 if (GET_CODE (pat
) == PARALLEL
)
6726 pat
= XVECEXP (pat
, 0, 0);
6727 if (GET_CODE (pat
) != SET
)
6729 subrtx_iterator::array_type array
;
6730 FOR_EACH_SUBRTX (iter
, array
, SET_SRC (pat
), NONCONST
)
6732 const_rtx x
= *iter
;
6734 && REGNO (x
) != LO_REGNO
6735 && REGNO (x
) != HI_REGNO
6736 && reg_set_p (x
, prev
))
6742 /* Return true if INSN is an ldc instruction that issues to the
6743 MeP-h1 integer pipeline. This is true for instructions that
6744 read from PSW, LP, SAR, HI and LO. */
6747 mep_ipipe_ldc_p (rtx_insn
*insn
)
6751 pat
= PATTERN (insn
);
6753 /* Cope with instrinsics that set both a hard register and its shadow.
6754 The set of the hard register comes first. */
6755 if (GET_CODE (pat
) == PARALLEL
)
6756 pat
= XVECEXP (pat
, 0, 0);
6758 if (GET_CODE (pat
) == SET
)
6760 src
= SET_SRC (pat
);
6762 /* Cope with intrinsics. The first operand to the unspec is
6763 the source register. */
6764 if (GET_CODE (src
) == UNSPEC
|| GET_CODE (src
) == UNSPEC_VOLATILE
)
6765 src
= XVECEXP (src
, 0, 0);
6768 switch (REGNO (src
))
6781 /* Create a VLIW bundle from core instruction CORE and coprocessor
6782 instruction COP. COP always satisfies INSN_P, but CORE can be
6783 either a new pattern or an existing instruction.
6785 Emit the bundle in place of COP and return it. */
6788 mep_make_bundle (rtx core_insn_or_pat
, rtx_insn
*cop
)
6791 rtx_insn
*core_insn
;
6794 /* If CORE is an existing instruction, remove it, otherwise put
6795 the new pattern in an INSN harness. */
6796 if (INSN_P (core_insn_or_pat
))
6798 core_insn
= as_a
<rtx_insn
*> (core_insn_or_pat
);
6799 remove_insn (core_insn
);
6802 core_insn
= make_insn_raw (core_insn_or_pat
);
6804 /* Generate the bundle sequence and replace COP with it. */
6805 seq
= gen_rtx_SEQUENCE (VOIDmode
, gen_rtvec (2, core_insn
, cop
));
6806 insn
= emit_insn_after (seq
, cop
);
6809 /* Set up the links of the insns inside the SEQUENCE. */
6810 SET_PREV_INSN (core_insn
) = PREV_INSN (insn
);
6811 SET_NEXT_INSN (core_insn
) = cop
;
6812 SET_PREV_INSN (cop
) = core_insn
;
6813 SET_NEXT_INSN (cop
) = NEXT_INSN (insn
);
6815 /* Set the VLIW flag for the coprocessor instruction. */
6816 PUT_MODE (core_insn
, VOIDmode
);
6817 PUT_MODE (cop
, BImode
);
6819 /* Derive a location for the bundle. Individual instructions cannot
6820 have their own location because there can be no assembler labels
6821 between CORE_INSN and COP. */
6822 INSN_LOCATION (insn
) = INSN_LOCATION (INSN_LOCATION (core_insn
) ? core_insn
: cop
);
6823 INSN_LOCATION (core_insn
) = 0;
6824 INSN_LOCATION (cop
) = 0;
6829 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6832 mep_insn_dependent_p_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
6834 rtx
* pinsn
= (rtx
*) data
;
6836 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
6840 /* Return true if anything in insn X is (anti,output,true) dependent on
6841 anything in insn Y. */
6844 mep_insn_dependent_p (rtx x
, rtx y
)
6848 gcc_assert (INSN_P (x
));
6849 gcc_assert (INSN_P (y
));
6852 note_stores (PATTERN (x
), mep_insn_dependent_p_1
, &tmp
);
6853 if (tmp
== NULL_RTX
)
6857 note_stores (PATTERN (y
), mep_insn_dependent_p_1
, &tmp
);
6858 if (tmp
== NULL_RTX
)
6865 core_insn_p (rtx_insn
*insn
)
6867 if (GET_CODE (PATTERN (insn
)) == USE
)
6869 if (get_attr_slot (insn
) == SLOT_CORE
)
6874 /* Mark coprocessor instructions that can be bundled together with
6875 the immediately preceding core instruction. This is later used
6876 to emit the "+" that tells the assembler to create a VLIW insn.
6878 For unbundled insns, the assembler will automatically add coprocessor
6879 nops, and 16-bit core nops. Due to an apparent oversight in the
6880 spec, the assembler will _not_ automatically add 32-bit core nops,
6881 so we have to emit those here.
6883 Called from mep_insn_reorg. */
6886 mep_bundle_insns (rtx_insn
*insns
)
6888 rtx_insn
*insn
, *last
= NULL
, *first
= NULL
;
6889 int saw_scheduling
= 0;
6891 /* Only do bundling if we're in vliw mode. */
6892 if (!mep_vliw_function_p (cfun
->decl
))
6895 /* The first insn in a bundle are TImode, the remainder are
6896 VOIDmode. After this function, the first has VOIDmode and the
6897 rest have BImode. */
6899 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6901 /* First, move any NOTEs that are within a bundle, to the beginning
6903 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6905 if (NOTE_P (insn
) && first
)
6906 /* Don't clear FIRST. */;
6908 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == TImode
)
6911 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == VOIDmode
&& first
)
6913 rtx_insn
*note
, *prev
;
6915 /* INSN is part of a bundle; FIRST is the first insn in that
6916 bundle. Move all intervening notes out of the bundle.
6917 In addition, since the debug pass may insert a label
6918 whenever the current line changes, set the location info
6919 for INSN to match FIRST. */
6921 INSN_LOCATION (insn
) = INSN_LOCATION (first
);
6923 note
= PREV_INSN (insn
);
6924 while (note
&& note
!= first
)
6926 prev
= PREV_INSN (note
);
6930 /* Remove NOTE from here... */
6931 SET_PREV_INSN (NEXT_INSN (note
)) = PREV_INSN (note
);
6932 SET_NEXT_INSN (PREV_INSN (note
)) = NEXT_INSN (note
);
6933 /* ...and put it in here. */
6934 SET_NEXT_INSN (note
) = first
;
6935 SET_PREV_INSN (note
) = PREV_INSN (first
);
6936 SET_NEXT_INSN (PREV_INSN (note
)) = note
;
6937 SET_PREV_INSN (NEXT_INSN (note
)) = note
;
6944 else if (!NONJUMP_INSN_P (insn
))
6948 /* Now fix up the bundles. */
6949 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6954 if (!NONJUMP_INSN_P (insn
))
6960 /* If we're not optimizing enough, there won't be scheduling
6961 info. We detect that here. */
6962 if (GET_MODE (insn
) == TImode
)
6964 if (!saw_scheduling
)
6969 rtx_insn
*core_insn
= NULL
;
6971 /* IVC2 slots are scheduled by DFA, so we just accept
6972 whatever the scheduler gives us. However, we must make
6973 sure the core insn (if any) is the first in the bundle.
6974 The IVC2 assembler can insert whatever NOPs are needed,
6975 and allows a COP insn to be first. */
6977 if (NONJUMP_INSN_P (insn
)
6978 && GET_CODE (PATTERN (insn
)) != USE
6979 && GET_MODE (insn
) == TImode
)
6983 && GET_MODE (NEXT_INSN (last
)) == VOIDmode
6984 && NONJUMP_INSN_P (NEXT_INSN (last
));
6985 last
= NEXT_INSN (last
))
6987 if (core_insn_p (last
))
6990 if (core_insn_p (last
))
6993 if (core_insn
&& core_insn
!= insn
)
6995 /* Swap core insn to first in the bundle. */
6997 /* Remove core insn. */
6998 if (PREV_INSN (core_insn
))
6999 SET_NEXT_INSN (PREV_INSN (core_insn
)) = NEXT_INSN (core_insn
);
7000 if (NEXT_INSN (core_insn
))
7001 SET_PREV_INSN (NEXT_INSN (core_insn
)) = PREV_INSN (core_insn
);
7003 /* Re-insert core insn. */
7004 SET_PREV_INSN (core_insn
) = PREV_INSN (insn
);
7005 SET_NEXT_INSN (core_insn
) = insn
;
7007 if (PREV_INSN (core_insn
))
7008 SET_NEXT_INSN (PREV_INSN (core_insn
)) = core_insn
;
7009 SET_PREV_INSN (insn
) = core_insn
;
7011 PUT_MODE (core_insn
, TImode
);
7012 PUT_MODE (insn
, VOIDmode
);
7016 /* The first insn has TImode, the rest have VOIDmode */
7017 if (GET_MODE (insn
) == TImode
)
7018 PUT_MODE (insn
, VOIDmode
);
7020 PUT_MODE (insn
, BImode
);
7024 PUT_MODE (insn
, VOIDmode
);
7025 if (recog_memoized (insn
) >= 0
7026 && get_attr_slot (insn
) == SLOT_COP
)
7030 || recog_memoized (last
) < 0
7031 || get_attr_slot (last
) != SLOT_CORE
7032 || (get_attr_length (insn
)
7033 != (TARGET_OPT_VL64
? 8 : 4) - get_attr_length (last
))
7034 || mep_insn_dependent_p (insn
, last
))
7036 switch (get_attr_length (insn
))
7041 insn
= mep_make_bundle (gen_nop (), insn
);
7044 if (TARGET_OPT_VL64
)
7045 insn
= mep_make_bundle (gen_nop32 (), insn
);
7048 if (TARGET_OPT_VL64
)
7049 error ("2 byte cop instructions are"
7050 " not allowed in 64-bit VLIW mode");
7052 insn
= mep_make_bundle (gen_nop (), insn
);
7055 error ("unexpected %d byte cop instruction",
7056 get_attr_length (insn
));
7061 insn
= mep_make_bundle (last
, insn
);
7069 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7070 Return true on success. This function can fail if the intrinsic
7071 is unavailable or if the operands don't satisfy their predicates. */
7074 mep_emit_intrinsic (int intrinsic
, const rtx
*operands
)
7076 const struct cgen_insn
*cgen_insn
;
7077 const struct insn_data_d
*idata
;
7081 if (!mep_get_intrinsic_insn (intrinsic
, &cgen_insn
))
7084 idata
= &insn_data
[cgen_insn
->icode
];
7085 for (i
= 0; i
< idata
->n_operands
; i
++)
7087 newop
[i
] = mep_convert_arg (idata
->operand
[i
].mode
, operands
[i
]);
7088 if (!idata
->operand
[i
].predicate (newop
[i
], idata
->operand
[i
].mode
))
7092 emit_insn (idata
->genfun (newop
[0], newop
[1], newop
[2],
7093 newop
[3], newop
[4], newop
[5],
7094 newop
[6], newop
[7], newop
[8]));
7100 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7101 OPERANDS[0]. Report an error if the instruction could not
7102 be synthesized. OPERANDS[1] is a register_operand. For sign
7103 and zero extensions, it may be smaller than SImode. */
7106 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic
,
7107 rtx
* operands ATTRIBUTE_UNUSED
)
7113 /* Likewise, but apply a binary operation to OPERANDS[1] and
7114 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7115 can be a general_operand.
7117 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7118 third operand. REG and REG3 take register operands only. */
7121 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate
,
7122 int ATTRIBUTE_UNUSED immediate3
,
7123 int ATTRIBUTE_UNUSED reg
,
7124 int ATTRIBUTE_UNUSED reg3
,
7125 rtx
* operands ATTRIBUTE_UNUSED
)
7131 mep_rtx_cost (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
7132 int opno ATTRIBUTE_UNUSED
, int *total
,
7133 bool ATTRIBUTE_UNUSED speed_t
)
7138 if (INTVAL (x
) >= -128 && INTVAL (x
) < 127)
7140 else if (INTVAL (x
) >= -32768 && INTVAL (x
) < 65536)
7147 *total
= optimize_size
? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7151 *total
= (GET_CODE (XEXP (x
, 1)) == CONST_INT
7153 : COSTS_N_INSNS (2));
7160 mep_address_cost (rtx addr ATTRIBUTE_UNUSED
,
7161 machine_mode mode ATTRIBUTE_UNUSED
,
7162 addr_space_t as ATTRIBUTE_UNUSED
,
7163 bool ATTRIBUTE_UNUSED speed_p
)
7169 mep_asm_init_sections (void)
7172 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7173 "\t.section .based,\"aw\"");
7176 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7177 "\t.section .sbss,\"aw\"");
7180 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7181 "\t.section .sdata,\"aw\",@progbits");
7184 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7185 "\t.section .far,\"aw\"");
7188 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7189 "\t.section .farbss,\"aw\"");
7192 = get_unnamed_section (0, output_section_asm_op
,
7193 "\t.section .frodata,\"a\"");
7196 = get_unnamed_section (0, output_section_asm_op
,
7197 "\t.section .srodata,\"a\"");
7200 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7201 "\t.section .vtext,\"axv\"\n\t.vliw");
7204 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7205 "\t.section .vftext,\"axv\"\n\t.vliw");
7208 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7209 "\t.section .ftext,\"ax\"\n\t.core");
7213 /* Initialize the GCC target structure. */
7215 #undef TARGET_ASM_FUNCTION_PROLOGUE
7216 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7217 #undef TARGET_ATTRIBUTE_TABLE
7218 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7219 #undef TARGET_COMP_TYPE_ATTRIBUTES
7220 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7221 #undef TARGET_INSERT_ATTRIBUTES
7222 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7223 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7224 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7225 #undef TARGET_CAN_INLINE_P
7226 #define TARGET_CAN_INLINE_P mep_can_inline_p
7227 #undef TARGET_SECTION_TYPE_FLAGS
7228 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7229 #undef TARGET_ASM_NAMED_SECTION
7230 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7231 #undef TARGET_INIT_BUILTINS
7232 #define TARGET_INIT_BUILTINS mep_init_builtins
7233 #undef TARGET_EXPAND_BUILTIN
7234 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7235 #undef TARGET_SCHED_ADJUST_COST
7236 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7237 #undef TARGET_SCHED_ISSUE_RATE
7238 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7239 #undef TARGET_SCHED_REORDER
7240 #define TARGET_SCHED_REORDER mep_sched_reorder
7241 #undef TARGET_STRIP_NAME_ENCODING
7242 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7243 #undef TARGET_ASM_SELECT_SECTION
7244 #define TARGET_ASM_SELECT_SECTION mep_select_section
7245 #undef TARGET_ASM_UNIQUE_SECTION
7246 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7247 #undef TARGET_ENCODE_SECTION_INFO
7248 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7249 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7250 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7251 #undef TARGET_RTX_COSTS
7252 #define TARGET_RTX_COSTS mep_rtx_cost
7253 #undef TARGET_ADDRESS_COST
7254 #define TARGET_ADDRESS_COST mep_address_cost
7255 #undef TARGET_MACHINE_DEPENDENT_REORG
7256 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7257 #undef TARGET_SETUP_INCOMING_VARARGS
7258 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7259 #undef TARGET_PASS_BY_REFERENCE
7260 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7261 #undef TARGET_FUNCTION_ARG
7262 #define TARGET_FUNCTION_ARG mep_function_arg
7263 #undef TARGET_FUNCTION_ARG_ADVANCE
7264 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7265 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7266 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7267 #undef TARGET_OPTION_OVERRIDE
7268 #define TARGET_OPTION_OVERRIDE mep_option_override
7269 #undef TARGET_ALLOCATE_INITIAL_VALUE
7270 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7271 #undef TARGET_ASM_INIT_SECTIONS
7272 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7273 #undef TARGET_RETURN_IN_MEMORY
7274 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7275 #undef TARGET_NARROW_VOLATILE_BITFIELD
7276 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7277 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7278 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7279 #undef TARGET_BUILD_BUILTIN_VA_LIST
7280 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7281 #undef TARGET_EXPAND_BUILTIN_VA_START
7282 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7283 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7284 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7285 #undef TARGET_CAN_ELIMINATE
7286 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7287 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7288 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7289 #undef TARGET_TRAMPOLINE_INIT
7290 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7291 #undef TARGET_LEGITIMATE_CONSTANT_P
7292 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7293 #undef TARGET_CAN_USE_DOLOOP_P
7294 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7296 struct gcc_target targetm
= TARGET_INITIALIZER
;