1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "insn-flags.h"
37 #include "insn-attr.h"
49 #include "diagnostic-core.h"
51 #include "target-def.h"
52 #include "langhooks.h"
59 /* Structure of this file:
61 + Command Line Option Support
62 + Pattern support - constraints, predicates, expanders
65 + Functions to save and restore machine-specific function data.
66 + Frame/Epilog/Prolog Related
68 + Function args in registers
69 + Handle pipeline hazards
72 + Machine-dependent Reorg
77 Symbols are encoded as @ <char> . <name> where <char> is one of these:
85 c - cb (control bus) */
87 struct GTY(()) machine_function
89 int mep_frame_pointer_needed
;
97 /* Records __builtin_return address. */
101 int reg_save_slot
[FIRST_PSEUDO_REGISTER
];
102 unsigned char reg_saved
[FIRST_PSEUDO_REGISTER
];
104 /* 2 if the current function has an interrupt attribute, 1 if not, 0
105 if unknown. This is here because resource.c uses EPILOGUE_USES
107 int interrupt_handler
;
109 /* Likewise, for disinterrupt attribute. */
110 int disable_interrupts
;
112 /* Number of doloop tags used so far. */
115 /* True if the last tag was allocated to a doloop_end. */
116 bool doloop_tag_from_end
;
118 /* True if reload changes $TP. */
119 bool reload_changes_tp
;
121 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
122 We only set this if the function is an interrupt handler. */
123 int asms_without_operands
;
126 #define MEP_CONTROL_REG(x) \
127 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
129 static GTY(()) section
* based_section
;
130 static GTY(()) section
* tinybss_section
;
131 static GTY(()) section
* far_section
;
132 static GTY(()) section
* farbss_section
;
133 static GTY(()) section
* frodata_section
;
134 static GTY(()) section
* srodata_section
;
136 static GTY(()) section
* vtext_section
;
137 static GTY(()) section
* vftext_section
;
138 static GTY(()) section
* ftext_section
;
140 static void mep_set_leaf_registers (int);
141 static bool symbol_p (rtx
);
142 static bool symbolref_p (rtx
);
143 static void encode_pattern_1 (rtx
);
144 static void encode_pattern (rtx
);
145 static bool const_in_range (rtx
, int, int);
146 static void mep_rewrite_mult (rtx
, rtx
);
147 static void mep_rewrite_mulsi3 (rtx
, rtx
, rtx
, rtx
);
148 static void mep_rewrite_maddsi3 (rtx
, rtx
, rtx
, rtx
, rtx
);
149 static bool mep_reuse_lo_p_1 (rtx
, rtx
, rtx
, bool);
150 static bool move_needs_splitting (rtx
, rtx
, enum machine_mode
);
151 static bool mep_expand_setcc_1 (enum rtx_code
, rtx
, rtx
, rtx
);
152 static bool mep_nongeneral_reg (rtx
);
153 static bool mep_general_copro_reg (rtx
);
154 static bool mep_nonregister (rtx
);
155 static struct machine_function
* mep_init_machine_status (void);
156 static rtx
mep_tp_rtx (void);
157 static rtx
mep_gp_rtx (void);
158 static bool mep_interrupt_p (void);
159 static bool mep_disinterrupt_p (void);
160 static bool mep_reg_set_p (rtx
, rtx
);
161 static bool mep_reg_set_in_function (int);
162 static bool mep_interrupt_saved_reg (int);
163 static bool mep_call_saves_register (int);
165 static void add_constant (int, int, int, int);
166 static rtx
maybe_dead_move (rtx
, rtx
, bool);
167 static void mep_reload_pointer (int, const char *);
168 static void mep_start_function (FILE *, HOST_WIDE_INT
);
169 static bool mep_function_ok_for_sibcall (tree
, tree
);
170 static int unique_bit_in (HOST_WIDE_INT
);
171 static int bit_size_for_clip (HOST_WIDE_INT
);
172 static int bytesize (const_tree
, enum machine_mode
);
173 static tree
mep_validate_based_tiny (tree
*, tree
, tree
, int, bool *);
174 static tree
mep_validate_near_far (tree
*, tree
, tree
, int, bool *);
175 static tree
mep_validate_disinterrupt (tree
*, tree
, tree
, int, bool *);
176 static tree
mep_validate_interrupt (tree
*, tree
, tree
, int, bool *);
177 static tree
mep_validate_io_cb (tree
*, tree
, tree
, int, bool *);
178 static tree
mep_validate_vliw (tree
*, tree
, tree
, int, bool *);
179 static bool mep_function_attribute_inlinable_p (const_tree
);
180 static bool mep_can_inline_p (tree
, tree
);
181 static bool mep_lookup_pragma_disinterrupt (const char *);
182 static int mep_multiple_address_regions (tree
, bool);
183 static int mep_attrlist_to_encoding (tree
, tree
);
184 static void mep_insert_attributes (tree
, tree
*);
185 static void mep_encode_section_info (tree
, rtx
, int);
186 static section
* mep_select_section (tree
, int, unsigned HOST_WIDE_INT
);
187 static void mep_unique_section (tree
, int);
188 static unsigned int mep_section_type_flags (tree
, const char *, int);
189 static void mep_asm_named_section (const char *, unsigned int, tree
);
190 static bool mep_mentioned_p (rtx
, rtx
, int);
191 static void mep_reorg_regmove (rtx
);
192 static rtx
mep_insert_repeat_label_last (rtx
, rtx
, bool, bool);
193 static void mep_reorg_repeat (rtx
);
194 static bool mep_invertable_branch_p (rtx
);
195 static void mep_invert_branch (rtx
, rtx
);
196 static void mep_reorg_erepeat (rtx
);
197 static void mep_jmp_return_reorg (rtx
);
198 static void mep_reorg_addcombine (rtx
);
199 static void mep_reorg (void);
200 static void mep_init_intrinsics (void);
201 static void mep_init_builtins (void);
202 static void mep_intrinsic_unavailable (int);
203 static bool mep_get_intrinsic_insn (int, const struct cgen_insn
**);
204 static bool mep_get_move_insn (int, const struct cgen_insn
**);
205 static rtx
mep_convert_arg (enum machine_mode
, rtx
);
206 static rtx
mep_convert_regnum (const struct cgen_regnum_operand
*, rtx
);
207 static rtx
mep_legitimize_arg (const struct insn_operand_data
*, rtx
, int);
208 static void mep_incompatible_arg (const struct insn_operand_data
*, rtx
, int, tree
);
209 static rtx
mep_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
210 static int mep_adjust_cost (rtx
, rtx
, rtx
, int);
211 static int mep_issue_rate (void);
212 static rtx
mep_find_ready_insn (rtx
*, int, enum attr_slot
, int);
213 static void mep_move_ready_insn (rtx
*, int, rtx
);
214 static int mep_sched_reorder (FILE *, int, rtx
*, int *, int);
215 static rtx
mep_make_bundle (rtx
, rtx
);
216 static void mep_bundle_insns (rtx
);
217 static bool mep_rtx_cost (rtx
, int, int, int, int *, bool);
218 static int mep_address_cost (rtx
, enum machine_mode
, addr_space_t
, bool);
219 static void mep_setup_incoming_varargs (cumulative_args_t
, enum machine_mode
,
221 static bool mep_pass_by_reference (cumulative_args_t cum
, enum machine_mode
,
223 static rtx
mep_function_arg (cumulative_args_t
, enum machine_mode
,
225 static void mep_function_arg_advance (cumulative_args_t
, enum machine_mode
,
227 static bool mep_vector_mode_supported_p (enum machine_mode
);
228 static rtx
mep_allocate_initial_value (rtx
);
229 static void mep_asm_init_sections (void);
230 static int mep_comp_type_attributes (const_tree
, const_tree
);
231 static bool mep_narrow_volatile_bitfield (void);
232 static rtx
mep_expand_builtin_saveregs (void);
233 static tree
mep_build_builtin_va_list (void);
234 static void mep_expand_va_start (tree
, rtx
);
235 static tree
mep_gimplify_va_arg_expr (tree
, tree
, gimple_seq
*, gimple_seq
*);
236 static bool mep_can_eliminate (const int, const int);
237 static void mep_conditional_register_usage (void);
238 static void mep_trampoline_init (rtx
, tree
, rtx
);
240 #define WANT_GCC_DEFINITIONS
241 #include "mep-intrin.h"
242 #undef WANT_GCC_DEFINITIONS
245 /* Command Line Option Support. */
247 char mep_leaf_registers
[FIRST_PSEUDO_REGISTER
];
249 /* True if we can use cmov instructions to move values back and forth
250 between core and coprocessor registers. */
251 bool mep_have_core_copro_moves_p
;
253 /* True if we can use cmov instructions (or a work-alike) to move
254 values between coprocessor registers. */
255 bool mep_have_copro_copro_moves_p
;
257 /* A table of all coprocessor instructions that can act like
258 a coprocessor-to-coprocessor cmov. */
259 static const int mep_cmov_insns
[] = {
274 mep_set_leaf_registers (int enable
)
278 if (mep_leaf_registers
[0] != enable
)
279 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
280 mep_leaf_registers
[i
] = enable
;
284 mep_conditional_register_usage (void)
288 if (!TARGET_OPT_MULT
&& !TARGET_OPT_DIV
)
290 fixed_regs
[HI_REGNO
] = 1;
291 fixed_regs
[LO_REGNO
] = 1;
292 call_used_regs
[HI_REGNO
] = 1;
293 call_used_regs
[LO_REGNO
] = 1;
296 for (i
= FIRST_SHADOW_REGISTER
; i
<= LAST_SHADOW_REGISTER
; i
++)
301 mep_option_override (void)
305 cl_deferred_option
*opt
;
306 vec
<cl_deferred_option
> *v
= (vec
<cl_deferred_option
> *) mep_deferred_options
;
309 FOR_EACH_VEC_ELT (*v
, i
, opt
)
311 switch (opt
->opt_index
)
314 for (j
= 0; j
< 32; j
++)
315 fixed_regs
[j
+ 48] = 0;
316 for (j
= 0; j
< 32; j
++)
317 call_used_regs
[j
+ 48] = 1;
318 for (j
= 6; j
< 8; j
++)
319 call_used_regs
[j
+ 48] = 0;
321 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
356 warning (OPT_fpic
, "-fpic is not supported");
358 warning (OPT_fPIC
, "-fPIC is not supported");
359 if (TARGET_S
&& TARGET_M
)
360 error ("only one of -ms and -mm may be given");
361 if (TARGET_S
&& TARGET_L
)
362 error ("only one of -ms and -ml may be given");
363 if (TARGET_M
&& TARGET_L
)
364 error ("only one of -mm and -ml may be given");
365 if (TARGET_S
&& global_options_set
.x_mep_tiny_cutoff
)
366 error ("only one of -ms and -mtiny= may be given");
367 if (TARGET_M
&& global_options_set
.x_mep_tiny_cutoff
)
368 error ("only one of -mm and -mtiny= may be given");
369 if (TARGET_OPT_CLIP
&& ! TARGET_OPT_MINMAX
)
370 warning (0, "-mclip currently has no effect without -mminmax");
372 if (mep_const_section
)
374 if (strcmp (mep_const_section
, "tiny") != 0
375 && strcmp (mep_const_section
, "near") != 0
376 && strcmp (mep_const_section
, "far") != 0)
377 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
381 mep_tiny_cutoff
= 65536;
384 if (TARGET_L
&& ! global_options_set
.x_mep_tiny_cutoff
)
387 if (TARGET_64BIT_CR_REGS
)
388 flag_split_wide_types
= 0;
390 init_machine_status
= mep_init_machine_status
;
391 mep_init_intrinsics ();
394 /* Pattern Support - constraints, predicates, expanders. */
396 /* MEP has very few instructions that can refer to the span of
397 addresses used by symbols, so it's common to check for them. */
402 int c
= GET_CODE (x
);
404 return (c
== CONST_INT
414 if (GET_CODE (x
) != MEM
)
417 c
= GET_CODE (XEXP (x
, 0));
418 return (c
== CONST_INT
423 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
425 #define GEN_REG(R, STRICT) \
428 && ((R) == ARG_POINTER_REGNUM \
429 || (R) >= FIRST_PSEUDO_REGISTER)))
431 static char pattern
[12], *patternp
;
432 static GTY(()) rtx patternr
[12];
433 #define RTX_IS(x) (strcmp (pattern, x) == 0)
436 encode_pattern_1 (rtx x
)
440 if (patternp
== pattern
+ sizeof (pattern
) - 2)
446 patternr
[patternp
-pattern
] = x
;
448 switch (GET_CODE (x
))
456 encode_pattern_1 (XEXP(x
, 0));
460 encode_pattern_1 (XEXP(x
, 0));
461 encode_pattern_1 (XEXP(x
, 1));
465 encode_pattern_1 (XEXP(x
, 0));
466 encode_pattern_1 (XEXP(x
, 1));
470 encode_pattern_1 (XEXP(x
, 0));
484 *patternp
++ = '0' + XCINT(x
, 1, UNSPEC
);
485 for (i
=0; i
<XVECLEN (x
, 0); i
++)
486 encode_pattern_1 (XVECEXP (x
, 0, i
));
494 fprintf (stderr
, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x
)));
503 encode_pattern (rtx x
)
506 encode_pattern_1 (x
);
511 mep_section_tag (rtx x
)
517 switch (GET_CODE (x
))
524 x
= XVECEXP (x
, 0, 0);
527 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
536 if (GET_CODE (x
) != SYMBOL_REF
)
539 if (name
[0] == '@' && name
[2] == '.')
541 if (name
[1] == 'i' || name
[1] == 'I')
544 return 'f'; /* near */
545 return 'n'; /* far */
553 mep_regno_reg_class (int regno
)
557 case SP_REGNO
: return SP_REGS
;
558 case TP_REGNO
: return TP_REGS
;
559 case GP_REGNO
: return GP_REGS
;
560 case 0: return R0_REGS
;
561 case HI_REGNO
: return HI_REGS
;
562 case LO_REGNO
: return LO_REGS
;
563 case ARG_POINTER_REGNUM
: return GENERAL_REGS
;
566 if (GR_REGNO_P (regno
))
567 return regno
< FIRST_GR_REGNO
+ 8 ? TPREL_REGS
: GENERAL_REGS
;
568 if (CONTROL_REGNO_P (regno
))
571 if (CR_REGNO_P (regno
))
575 /* Search for the register amongst user-defined subclasses of
576 the coprocessor registers. */
577 for (i
= USER0_REGS
; i
<= USER3_REGS
; ++i
)
579 if (! TEST_HARD_REG_BIT (reg_class_contents
[i
], regno
))
581 for (j
= 0; j
< N_REG_CLASSES
; ++j
)
583 enum reg_class sub
= reg_class_subclasses
[i
][j
];
585 if (sub
== LIM_REG_CLASSES
)
587 if (TEST_HARD_REG_BIT (reg_class_contents
[sub
], regno
))
592 return LOADABLE_CR_REGNO_P (regno
) ? LOADABLE_CR_REGS
: CR_REGS
;
595 if (CCR_REGNO_P (regno
))
598 gcc_assert (regno
>= FIRST_SHADOW_REGISTER
&& regno
<= LAST_SHADOW_REGISTER
);
603 const_in_range (rtx x
, int minv
, int maxv
)
605 return (GET_CODE (x
) == CONST_INT
606 && INTVAL (x
) >= minv
607 && INTVAL (x
) <= maxv
);
610 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
611 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
612 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
613 at the end of the insn stream. */
616 mep_mulr_source (rtx insn
, rtx dest
, rtx src1
, rtx src2
)
618 if (rtx_equal_p (dest
, src1
))
620 else if (rtx_equal_p (dest
, src2
))
625 emit_insn (gen_movsi (copy_rtx (dest
), src1
));
627 emit_insn_before (gen_movsi (copy_rtx (dest
), src1
), insn
);
632 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
633 Change the last element of PATTERN from (clobber (scratch:SI))
634 to (clobber (reg:SI HI_REGNO)). */
637 mep_rewrite_mult (rtx insn
, rtx pattern
)
641 hi_clobber
= XVECEXP (pattern
, 0, XVECLEN (pattern
, 0) - 1);
642 XEXP (hi_clobber
, 0) = gen_rtx_REG (SImode
, HI_REGNO
);
643 PATTERN (insn
) = pattern
;
644 INSN_CODE (insn
) = -1;
647 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
648 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
649 store the result in DEST if nonnull. */
652 mep_rewrite_mulsi3 (rtx insn
, rtx dest
, rtx src1
, rtx src2
)
656 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
658 pattern
= gen_mulsi3r (lo
, dest
, copy_rtx (dest
),
659 mep_mulr_source (insn
, dest
, src1
, src2
));
661 pattern
= gen_mulsi3_lo (lo
, src1
, src2
);
662 mep_rewrite_mult (insn
, pattern
);
665 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
666 SRC3 into $lo, then use either madd or maddr. The move into $lo will
667 be deleted by a peephole2 if SRC3 is already in $lo. */
670 mep_rewrite_maddsi3 (rtx insn
, rtx dest
, rtx src1
, rtx src2
, rtx src3
)
674 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
675 emit_insn_before (gen_movsi (copy_rtx (lo
), src3
), insn
);
677 pattern
= gen_maddsi3r (lo
, dest
, copy_rtx (dest
),
678 mep_mulr_source (insn
, dest
, src1
, src2
),
681 pattern
= gen_maddsi3_lo (lo
, src1
, src2
, copy_rtx (lo
));
682 mep_rewrite_mult (insn
, pattern
);
685 /* Return true if $lo has the same value as integer register GPR when
686 instruction INSN is reached. If necessary, rewrite the instruction
687 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
688 rtx for (reg:SI LO_REGNO).
690 This function is intended to be used by the peephole2 pass. Since
691 that pass goes from the end of a basic block to the beginning, and
692 propagates liveness information on the way, there is no need to
693 update register notes here.
695 If GPR_DEAD_P is true on entry, and this function returns true,
696 then the caller will replace _every_ use of GPR in and after INSN
697 with LO. This means that if the instruction that sets $lo is a
698 mulr- or maddr-type instruction, we can rewrite it to use mul or
699 madd instead. In combination with the copy progagation pass,
700 this allows us to replace sequences like:
709 if GPR is no longer used. */
712 mep_reuse_lo_p_1 (rtx lo
, rtx gpr
, rtx insn
, bool gpr_dead_p
)
716 insn
= PREV_INSN (insn
);
718 switch (recog_memoized (insn
))
720 case CODE_FOR_mulsi3_1
:
722 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
724 mep_rewrite_mulsi3 (insn
,
725 gpr_dead_p
? NULL
: recog_data
.operand
[0],
726 recog_data
.operand
[1],
727 recog_data
.operand
[2]);
732 case CODE_FOR_maddsi3
:
734 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
736 mep_rewrite_maddsi3 (insn
,
737 gpr_dead_p
? NULL
: recog_data
.operand
[0],
738 recog_data
.operand
[1],
739 recog_data
.operand
[2],
740 recog_data
.operand
[3]);
745 case CODE_FOR_mulsi3r
:
746 case CODE_FOR_maddsi3r
:
748 return rtx_equal_p (recog_data
.operand
[1], gpr
);
751 if (reg_set_p (lo
, insn
)
752 || reg_set_p (gpr
, insn
)
753 || volatile_insn_p (PATTERN (insn
)))
756 if (gpr_dead_p
&& reg_referenced_p (gpr
, PATTERN (insn
)))
761 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
765 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
768 mep_reuse_lo_p (rtx lo
, rtx gpr
, rtx insn
, bool gpr_dead_p
)
770 bool result
= mep_reuse_lo_p_1 (lo
, gpr
, insn
, gpr_dead_p
);
775 /* Return true if SET can be turned into a post-modify load or store
776 that adds OFFSET to GPR. In other words, return true if SET can be
779 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
781 It's OK to change SET to an equivalent operation in order to
785 mep_use_post_modify_for_set_p (rtx set
, rtx gpr
, rtx offset
)
788 unsigned int reg_bytes
, mem_bytes
;
789 enum machine_mode reg_mode
, mem_mode
;
791 /* Only simple SETs can be converted. */
792 if (GET_CODE (set
) != SET
)
795 /* Point REG to what we hope will be the register side of the set and
796 MEM to what we hope will be the memory side. */
797 if (GET_CODE (SET_DEST (set
)) == MEM
)
799 mem
= &SET_DEST (set
);
800 reg
= &SET_SRC (set
);
804 reg
= &SET_DEST (set
);
805 mem
= &SET_SRC (set
);
806 if (GET_CODE (*mem
) == SIGN_EXTEND
)
807 mem
= &XEXP (*mem
, 0);
810 /* Check that *REG is a suitable coprocessor register. */
811 if (GET_CODE (*reg
) != REG
|| !LOADABLE_CR_REGNO_P (REGNO (*reg
)))
814 /* Check that *MEM is a suitable memory reference. */
815 if (GET_CODE (*mem
) != MEM
|| !rtx_equal_p (XEXP (*mem
, 0), gpr
))
818 /* Get the number of bytes in each operand. */
819 mem_bytes
= GET_MODE_SIZE (GET_MODE (*mem
));
820 reg_bytes
= GET_MODE_SIZE (GET_MODE (*reg
));
822 /* Check that OFFSET is suitably aligned. */
823 if (INTVAL (offset
) & (mem_bytes
- 1))
826 /* Convert *MEM to a normal integer mode. */
827 mem_mode
= mode_for_size (mem_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
828 *mem
= change_address (*mem
, mem_mode
, NULL
);
830 /* Adjust *REG as well. */
831 *reg
= shallow_copy_rtx (*reg
);
832 if (reg
== &SET_DEST (set
) && reg_bytes
< UNITS_PER_WORD
)
834 /* SET is a subword load. Convert it to an explicit extension. */
835 PUT_MODE (*reg
, SImode
);
836 *mem
= gen_rtx_SIGN_EXTEND (SImode
, *mem
);
840 reg_mode
= mode_for_size (reg_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
841 PUT_MODE (*reg
, reg_mode
);
846 /* Return the effect of frame-related instruction INSN. */
849 mep_frame_expr (rtx insn
)
853 note
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, 0);
854 expr
= (note
!= 0 ? XEXP (note
, 0) : copy_rtx (PATTERN (insn
)));
855 RTX_FRAME_RELATED_P (expr
) = 1;
859 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
860 new pattern in INSN1; INSN2 will be deleted by the caller. */
863 mep_make_parallel (rtx insn1
, rtx insn2
)
867 if (RTX_FRAME_RELATED_P (insn2
))
869 expr
= mep_frame_expr (insn2
);
870 if (RTX_FRAME_RELATED_P (insn1
))
871 expr
= gen_rtx_SEQUENCE (VOIDmode
,
872 gen_rtvec (2, mep_frame_expr (insn1
), expr
));
873 set_unique_reg_note (insn1
, REG_FRAME_RELATED_EXPR
, expr
);
874 RTX_FRAME_RELATED_P (insn1
) = 1;
877 PATTERN (insn1
) = gen_rtx_PARALLEL (VOIDmode
,
878 gen_rtvec (2, PATTERN (insn1
),
880 INSN_CODE (insn1
) = -1;
883 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
884 the basic block to see if any previous load or store instruction can
885 be persuaded to do SET_INSN as a side-effect. Return true if so. */
888 mep_use_post_modify_p_1 (rtx set_insn
, rtx reg
, rtx offset
)
895 insn
= PREV_INSN (insn
);
898 if (mep_use_post_modify_for_set_p (PATTERN (insn
), reg
, offset
))
900 mep_make_parallel (insn
, set_insn
);
904 if (reg_set_p (reg
, insn
)
905 || reg_referenced_p (reg
, PATTERN (insn
))
906 || volatile_insn_p (PATTERN (insn
)))
910 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
914 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
917 mep_use_post_modify_p (rtx insn
, rtx reg
, rtx offset
)
919 bool result
= mep_use_post_modify_p_1 (insn
, reg
, offset
);
925 mep_allow_clip (rtx ux
, rtx lx
, int s
)
927 HOST_WIDE_INT u
= INTVAL (ux
);
928 HOST_WIDE_INT l
= INTVAL (lx
);
931 if (!TARGET_OPT_CLIP
)
936 for (i
= 0; i
< 30; i
++)
937 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1)
938 && (l
== - ((HOST_WIDE_INT
) 1 << i
)))
946 for (i
= 0; i
< 30; i
++)
947 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1))
954 mep_bit_position_p (rtx x
, bool looking_for
)
956 if (GET_CODE (x
) != CONST_INT
)
958 switch ((int) INTVAL(x
) & 0xff)
960 case 0x01: case 0x02: case 0x04: case 0x08:
961 case 0x10: case 0x20: case 0x40: case 0x80:
963 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
964 case 0xef: case 0xdf: case 0xbf: case 0x7f:
971 move_needs_splitting (rtx dest
, rtx src
,
972 enum machine_mode mode ATTRIBUTE_UNUSED
)
974 int s
= mep_section_tag (src
);
978 if (GET_CODE (src
) == CONST
979 || GET_CODE (src
) == MEM
)
981 else if (GET_CODE (src
) == SYMBOL_REF
982 || GET_CODE (src
) == LABEL_REF
983 || GET_CODE (src
) == PLUS
)
989 || (GET_CODE (src
) == PLUS
990 && GET_CODE (XEXP (src
, 1)) == CONST_INT
991 && (INTVAL (XEXP (src
, 1)) < -65536
992 || INTVAL (XEXP (src
, 1)) > 0xffffff))
993 || (GET_CODE (dest
) == REG
994 && REGNO (dest
) > 7 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
))
1000 mep_split_mov (rtx
*operands
, int symbolic
)
1004 if (move_needs_splitting (operands
[0], operands
[1], SImode
))
1009 if (GET_CODE (operands
[1]) != CONST_INT
)
1012 if (constraint_satisfied_p (operands
[1], CONSTRAINT_I
)
1013 || constraint_satisfied_p (operands
[1], CONSTRAINT_J
)
1014 || constraint_satisfied_p (operands
[1], CONSTRAINT_O
))
1017 if (((!reload_completed
&& !reload_in_progress
)
1018 || (REG_P (operands
[0]) && REGNO (operands
[0]) < 8))
1019 && constraint_satisfied_p (operands
[1], CONSTRAINT_K
))
1025 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1026 it to one specific value. So the insn chosen depends on whether
1027 the source and destination modes match. */
1030 mep_vliw_mode_match (rtx tgt
)
1032 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1033 bool tgt_vliw
= INTVAL (tgt
);
1035 return src_vliw
== tgt_vliw
;
1038 /* Like the above, but also test for near/far mismatches. */
1041 mep_vliw_jmp_match (rtx tgt
)
1043 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1044 bool tgt_vliw
= INTVAL (tgt
);
1046 if (mep_section_tag (DECL_RTL (cfun
->decl
)) == 'f')
1049 return src_vliw
== tgt_vliw
;
1053 mep_multi_slot (rtx x
)
1055 return get_attr_slot (x
) == SLOT_MULTI
;
1058 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1061 mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1063 /* We can't convert symbol values to gp- or tp-rel values after
1064 reload, as reload might have used $gp or $tp for other
1066 if (GET_CODE (x
) == SYMBOL_REF
&& (reload_in_progress
|| reload_completed
))
1068 char e
= mep_section_tag (x
);
1069 return (e
!= 't' && e
!= 'b');
1074 /* Be careful not to use macros that need to be compiled one way for
1075 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1078 mep_legitimate_address (enum machine_mode mode
, rtx x
, int strict
)
1082 #define DEBUG_LEGIT 0
1084 fprintf (stderr
, "legit: mode %s strict %d ", mode_name
[mode
], strict
);
1088 if (GET_CODE (x
) == LO_SUM
1089 && GET_CODE (XEXP (x
, 0)) == REG
1090 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1091 && CONSTANT_P (XEXP (x
, 1)))
1093 if (GET_MODE_SIZE (mode
) > 4)
1095 /* We will end up splitting this, and lo_sums are not
1096 offsettable for us. */
1098 fprintf(stderr
, " - nope, %%lo(sym)[reg] not splittable\n");
1103 fprintf (stderr
, " - yup, %%lo(sym)[reg]\n");
1108 if (GET_CODE (x
) == REG
1109 && GEN_REG (REGNO (x
), strict
))
1112 fprintf (stderr
, " - yup, [reg]\n");
1117 if (GET_CODE (x
) == PLUS
1118 && GET_CODE (XEXP (x
, 0)) == REG
1119 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1120 && const_in_range (XEXP (x
, 1), -32768, 32767))
1123 fprintf (stderr
, " - yup, [reg+const]\n");
1128 if (GET_CODE (x
) == PLUS
1129 && GET_CODE (XEXP (x
, 0)) == REG
1130 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1131 && GET_CODE (XEXP (x
, 1)) == CONST
1132 && (GET_CODE (XEXP (XEXP (x
, 1), 0)) == UNSPEC
1133 || (GET_CODE (XEXP (XEXP (x
, 1), 0)) == PLUS
1134 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == UNSPEC
1135 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == CONST_INT
)))
1138 fprintf (stderr
, " - yup, [reg+unspec]\n");
1143 the_tag
= mep_section_tag (x
);
1148 fprintf (stderr
, " - nope, [far]\n");
1153 if (mode
== VOIDmode
1154 && GET_CODE (x
) == SYMBOL_REF
)
1157 fprintf (stderr
, " - yup, call [symbol]\n");
1162 if ((mode
== SImode
|| mode
== SFmode
)
1164 && mep_legitimate_constant_p (mode
, x
)
1165 && the_tag
!= 't' && the_tag
!= 'b')
1167 if (GET_CODE (x
) != CONST_INT
1168 || (INTVAL (x
) <= 0xfffff
1170 && (INTVAL (x
) % 4) == 0))
1173 fprintf (stderr
, " - yup, [const]\n");
1180 fprintf (stderr
, " - nope.\n");
1186 mep_legitimize_reload_address (rtx
*x
, enum machine_mode mode
, int opnum
,
1188 int ind_levels ATTRIBUTE_UNUSED
)
1190 enum reload_type type
= (enum reload_type
) type_i
;
1192 if (GET_CODE (*x
) == PLUS
1193 && GET_CODE (XEXP (*x
, 0)) == MEM
1194 && GET_CODE (XEXP (*x
, 1)) == REG
)
1196 /* GCC will by default copy the MEM into a REG, which results in
1197 an invalid address. For us, the best thing to do is move the
1198 whole expression to a REG. */
1199 push_reload (*x
, NULL_RTX
, x
, NULL
,
1200 GENERAL_REGS
, mode
, VOIDmode
,
1205 if (GET_CODE (*x
) == PLUS
1206 && GET_CODE (XEXP (*x
, 0)) == SYMBOL_REF
1207 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
)
1209 char e
= mep_section_tag (XEXP (*x
, 0));
1211 if (e
!= 't' && e
!= 'b')
1213 /* GCC thinks that (sym+const) is a valid address. Well,
1214 sometimes it is, this time it isn't. The best thing to
1215 do is reload the symbol to a register, since reg+int
1216 tends to work, and we can't just add the symbol and
1218 push_reload (XEXP (*x
, 0), NULL_RTX
, &(XEXP(*x
, 0)), NULL
,
1219 GENERAL_REGS
, mode
, VOIDmode
,
1228 mep_core_address_length (rtx insn
, int opn
)
1230 rtx set
= single_set (insn
);
1231 rtx mem
= XEXP (set
, opn
);
1232 rtx other
= XEXP (set
, 1-opn
);
1233 rtx addr
= XEXP (mem
, 0);
1235 if (register_operand (addr
, Pmode
))
1237 if (GET_CODE (addr
) == PLUS
)
1239 rtx addend
= XEXP (addr
, 1);
1241 gcc_assert (REG_P (XEXP (addr
, 0)));
1243 switch (REGNO (XEXP (addr
, 0)))
1245 case STACK_POINTER_REGNUM
:
1246 if (GET_MODE_SIZE (GET_MODE (mem
)) == 4
1247 && mep_imm7a4_operand (addend
, VOIDmode
))
1252 gcc_assert (REG_P (other
));
1254 if (REGNO (other
) >= 8)
1257 if (GET_CODE (addend
) == CONST
1258 && GET_CODE (XEXP (addend
, 0)) == UNSPEC
1259 && XINT (XEXP (addend
, 0), 1) == UNS_TPREL
)
1262 if (GET_CODE (addend
) == CONST_INT
1263 && INTVAL (addend
) >= 0
1264 && INTVAL (addend
) <= 127
1265 && INTVAL (addend
) % GET_MODE_SIZE (GET_MODE (mem
)) == 0)
1275 mep_cop_address_length (rtx insn
, int opn
)
1277 rtx set
= single_set (insn
);
1278 rtx mem
= XEXP (set
, opn
);
1279 rtx addr
= XEXP (mem
, 0);
1281 if (GET_CODE (mem
) != MEM
)
1283 if (register_operand (addr
, Pmode
))
1285 if (GET_CODE (addr
) == POST_INC
)
1291 #define DEBUG_EXPAND_MOV 0
1293 mep_expand_mov (rtx
*operands
, enum machine_mode mode
)
1298 int post_reload
= 0;
1300 tag
[0] = mep_section_tag (operands
[0]);
1301 tag
[1] = mep_section_tag (operands
[1]);
1303 if (!reload_in_progress
1304 && !reload_completed
1305 && GET_CODE (operands
[0]) != REG
1306 && GET_CODE (operands
[0]) != SUBREG
1307 && GET_CODE (operands
[1]) != REG
1308 && GET_CODE (operands
[1]) != SUBREG
)
1309 operands
[1] = copy_to_mode_reg (mode
, operands
[1]);
1311 #if DEBUG_EXPAND_MOV
1312 fprintf(stderr
, "expand move %s %d\n", mode_name
[mode
],
1313 reload_in_progress
|| reload_completed
);
1314 debug_rtx (operands
[0]);
1315 debug_rtx (operands
[1]);
1318 if (mode
== DImode
|| mode
== DFmode
)
1321 if (reload_in_progress
|| reload_completed
)
1325 if (GET_CODE (operands
[0]) == REG
&& REGNO (operands
[0]) == TP_REGNO
)
1326 cfun
->machine
->reload_changes_tp
= true;
1328 if (tag
[0] == 't' || tag
[1] == 't')
1330 r
= has_hard_reg_initial_val (Pmode
, GP_REGNO
);
1331 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != GP_REGNO
)
1334 if (tag
[0] == 'b' || tag
[1] == 'b')
1336 r
= has_hard_reg_initial_val (Pmode
, TP_REGNO
);
1337 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != TP_REGNO
)
1340 if (cfun
->machine
->reload_changes_tp
== true)
1347 if (symbol_p (operands
[1]))
1349 t
= mep_section_tag (operands
[1]);
1350 if (t
== 'b' || t
== 't')
1353 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
1355 tpsym
= operands
[1];
1356 n
= gen_rtx_UNSPEC (mode
,
1357 gen_rtvec (1, operands
[1]),
1358 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1359 n
= gen_rtx_CONST (mode
, n
);
1361 else if (GET_CODE (operands
[1]) == CONST
1362 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
1363 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
1364 && GET_CODE (XEXP (XEXP (operands
[1], 0), 1)) == CONST_INT
)
1366 tpsym
= XEXP (XEXP (operands
[1], 0), 0);
1367 tpoffs
= XEXP (XEXP (operands
[1], 0), 1);
1368 n
= gen_rtx_UNSPEC (mode
,
1369 gen_rtvec (1, tpsym
),
1370 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1371 n
= gen_rtx_PLUS (mode
, n
, tpoffs
);
1372 n
= gen_rtx_CONST (mode
, n
);
1374 else if (GET_CODE (operands
[1]) == CONST
1375 && GET_CODE (XEXP (operands
[1], 0)) == UNSPEC
)
1379 error ("unusual TP-relative address");
1383 n
= gen_rtx_PLUS (mode
, (t
== 'b' ? mep_tp_rtx ()
1384 : mep_gp_rtx ()), n
);
1385 n
= emit_insn (gen_rtx_SET (mode
, operands
[0], n
));
1386 #if DEBUG_EXPAND_MOV
1387 fprintf(stderr
, "mep_expand_mov emitting ");
1394 for (i
=0; i
< 2; i
++)
1396 t
= mep_section_tag (operands
[i
]);
1397 if (GET_CODE (operands
[i
]) == MEM
&& (t
== 'b' || t
== 't'))
1402 sym
= XEXP (operands
[i
], 0);
1403 if (GET_CODE (sym
) == CONST
1404 && GET_CODE (XEXP (sym
, 0)) == UNSPEC
)
1405 sym
= XVECEXP (XEXP (sym
, 0), 0, 0);
1418 n
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, sym
), u
);
1419 n
= gen_rtx_CONST (Pmode
, n
);
1420 n
= gen_rtx_PLUS (Pmode
, r
, n
);
1421 operands
[i
] = replace_equiv_address (operands
[i
], n
);
1426 if ((GET_CODE (operands
[1]) != REG
1427 && MEP_CONTROL_REG (operands
[0]))
1428 || (GET_CODE (operands
[0]) != REG
1429 && MEP_CONTROL_REG (operands
[1])))
1432 #if DEBUG_EXPAND_MOV
1433 fprintf (stderr
, "cr-mem, forcing op1 to reg\n");
1435 temp
= gen_reg_rtx (mode
);
1436 emit_move_insn (temp
, operands
[1]);
1440 if (symbolref_p (operands
[0])
1441 && (mep_section_tag (XEXP (operands
[0], 0)) == 'f'
1442 || (GET_MODE_SIZE (mode
) != 4)))
1446 gcc_assert (!reload_in_progress
&& !reload_completed
);
1448 temp
= force_reg (Pmode
, XEXP (operands
[0], 0));
1449 operands
[0] = replace_equiv_address (operands
[0], temp
);
1450 emit_move_insn (operands
[0], operands
[1]);
1454 if (!post_reload
&& (tag
[1] == 't' || tag
[1] == 'b'))
1457 if (symbol_p (operands
[1])
1458 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1460 emit_insn (gen_movsi_topsym_s (operands
[0], operands
[1]));
1461 emit_insn (gen_movsi_botsym_s (operands
[0], operands
[0], operands
[1]));
1465 if (symbolref_p (operands
[1])
1466 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1470 if (reload_in_progress
|| reload_completed
)
1473 temp
= gen_reg_rtx (Pmode
);
1475 emit_insn (gen_movsi_topsym_s (temp
, operands
[1]));
1476 emit_insn (gen_movsi_botsym_s (temp
, temp
, operands
[1]));
1477 emit_move_insn (operands
[0], replace_equiv_address (operands
[1], temp
));
1484 /* Cases where the pattern can't be made to use at all. */
1487 mep_mov_ok (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1491 #define DEBUG_MOV_OK 0
1493 fprintf (stderr
, "mep_mov_ok %s %c=%c\n", mode_name
[mode
], mep_section_tag (operands
[0]),
1494 mep_section_tag (operands
[1]));
1495 debug_rtx (operands
[0]);
1496 debug_rtx (operands
[1]);
1499 /* We want the movh patterns to get these. */
1500 if (GET_CODE (operands
[1]) == HIGH
)
1503 /* We can't store a register to a far variable without using a
1504 scratch register to hold the address. Using far variables should
1505 be split by mep_emit_mov anyway. */
1506 if (mep_section_tag (operands
[0]) == 'f'
1507 || mep_section_tag (operands
[1]) == 'f')
1510 fprintf (stderr
, " - no, f\n");
1514 i
= mep_section_tag (operands
[1]);
1515 if ((i
== 'b' || i
== 't') && !reload_completed
&& !reload_in_progress
)
1516 /* These are supposed to be generated with adds of the appropriate
1517 register. During and after reload, however, we allow them to
1518 be accessed as normal symbols because adding a dependency on
1519 the base register now might cause problems. */
1522 fprintf (stderr
, " - no, bt\n");
1527 /* The only moves we can allow involve at least one general
1528 register, so require it. */
1529 for (i
= 0; i
< 2; i
++)
1531 /* Allow subregs too, before reload. */
1532 rtx x
= operands
[i
];
1534 if (GET_CODE (x
) == SUBREG
)
1536 if (GET_CODE (x
) == REG
1537 && ! MEP_CONTROL_REG (x
))
1540 fprintf (stderr
, " - ok\n");
1546 fprintf (stderr
, " - no, no gen reg\n");
1551 #define DEBUG_SPLIT_WIDE_MOVE 0
1553 mep_split_wide_move (rtx
*operands
, enum machine_mode mode
)
1557 #if DEBUG_SPLIT_WIDE_MOVE
1558 fprintf (stderr
, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name
[mode
]);
1559 debug_rtx (operands
[0]);
1560 debug_rtx (operands
[1]);
1563 for (i
= 0; i
<= 1; i
++)
1565 rtx op
= operands
[i
], hi
, lo
;
1567 switch (GET_CODE (op
))
1571 unsigned int regno
= REGNO (op
);
1573 if (TARGET_64BIT_CR_REGS
&& CR_REGNO_P (regno
))
1577 lo
= gen_rtx_REG (SImode
, regno
);
1579 hi
= gen_rtx_ZERO_EXTRACT (SImode
,
1580 gen_rtx_REG (DImode
, regno
),
1585 hi
= gen_rtx_REG (SImode
, regno
+ TARGET_LITTLE_ENDIAN
);
1586 lo
= gen_rtx_REG (SImode
, regno
+ TARGET_BIG_ENDIAN
);
1594 hi
= operand_subword (op
, TARGET_LITTLE_ENDIAN
, 0, mode
);
1595 lo
= operand_subword (op
, TARGET_BIG_ENDIAN
, 0, mode
);
1602 /* The high part of CR <- GPR moves must be done after the low part. */
1603 operands
[i
+ 4] = lo
;
1604 operands
[i
+ 2] = hi
;
1607 if (reg_mentioned_p (operands
[2], operands
[5])
1608 || GET_CODE (operands
[2]) == ZERO_EXTRACT
1609 || GET_CODE (operands
[4]) == ZERO_EXTRACT
)
1613 /* Overlapping register pairs -- make sure we don't
1614 early-clobber ourselves. */
1616 operands
[2] = operands
[4];
1619 operands
[3] = operands
[5];
1623 #if DEBUG_SPLIT_WIDE_MOVE
1624 fprintf(stderr
, "\033[34m");
1625 debug_rtx (operands
[2]);
1626 debug_rtx (operands
[3]);
1627 debug_rtx (operands
[4]);
1628 debug_rtx (operands
[5]);
1629 fprintf(stderr
, "\033[0m");
1633 /* Emit a setcc instruction in its entirity. */
1636 mep_expand_setcc_1 (enum rtx_code code
, rtx dest
, rtx op1
, rtx op2
)
1644 tmp
= op1
, op1
= op2
, op2
= tmp
;
1645 code
= swap_condition (code
);
1650 op1
= force_reg (SImode
, op1
);
1651 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1652 gen_rtx_fmt_ee (code
, SImode
, op1
, op2
)));
1656 if (op2
!= const0_rtx
)
1657 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1658 mep_expand_setcc_1 (LTU
, dest
, op1
, const1_rtx
);
1662 /* Branchful sequence:
1664 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1667 Branchless sequence:
1668 add3 tmp, op1, -op2 32-bit (or mov + sub)
1669 sltu3 tmp, tmp, 1 16-bit
1670 xor3 dest, tmp, 1 32-bit
1672 if (optimize_size
&& op2
!= const0_rtx
)
1675 if (op2
!= const0_rtx
)
1676 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1678 op2
= gen_reg_rtx (SImode
);
1679 mep_expand_setcc_1 (LTU
, op2
, op1
, const1_rtx
);
1681 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1682 gen_rtx_XOR (SImode
, op2
, const1_rtx
)));
1686 if (GET_CODE (op2
) != CONST_INT
1687 || INTVAL (op2
) == 0x7ffffff)
1689 op2
= GEN_INT (INTVAL (op2
) + 1);
1690 return mep_expand_setcc_1 (LT
, dest
, op1
, op2
);
1693 if (GET_CODE (op2
) != CONST_INT
1694 || INTVAL (op2
) == -1)
1696 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) + 1, SImode
));
1697 return mep_expand_setcc_1 (LTU
, dest
, op1
, op2
);
1700 if (GET_CODE (op2
) != CONST_INT
1701 || INTVAL (op2
) == trunc_int_for_mode (0x80000000, SImode
))
1703 op2
= GEN_INT (INTVAL (op2
) - 1);
1704 return mep_expand_setcc_1 (GT
, dest
, op1
, op2
);
1707 if (GET_CODE (op2
) != CONST_INT
1708 || op2
== const0_rtx
)
1710 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) - 1, SImode
));
1711 return mep_expand_setcc_1 (GTU
, dest
, op1
, op2
);
1719 mep_expand_setcc (rtx
*operands
)
1721 rtx dest
= operands
[0];
1722 enum rtx_code code
= GET_CODE (operands
[1]);
1723 rtx op0
= operands
[2];
1724 rtx op1
= operands
[3];
1726 return mep_expand_setcc_1 (code
, dest
, op0
, op1
);
1730 mep_expand_cbranch (rtx
*operands
)
1732 enum rtx_code code
= GET_CODE (operands
[0]);
1733 rtx op0
= operands
[1];
1734 rtx op1
= operands
[2];
1741 if (mep_imm4_operand (op1
, SImode
))
1744 tmp
= gen_reg_rtx (SImode
);
1745 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1752 if (mep_imm4_operand (op1
, SImode
))
1755 tmp
= gen_reg_rtx (SImode
);
1756 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1765 if (! mep_reg_or_imm4_operand (op1
, SImode
))
1766 op1
= force_reg (SImode
, op1
);
1771 if (GET_CODE (op1
) == CONST_INT
1772 && INTVAL (op1
) != 0x7fffffff)
1774 op1
= GEN_INT (INTVAL (op1
) + 1);
1775 code
= (code
== LE
? LT
: GE
);
1779 tmp
= gen_reg_rtx (SImode
);
1780 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op1
, op0
));
1782 code
= (code
== LE
? EQ
: NE
);
1788 if (op1
== const1_rtx
)
1795 tmp
= gen_reg_rtx (SImode
);
1796 gcc_assert (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
));
1803 tmp
= gen_reg_rtx (SImode
);
1804 if (mep_expand_setcc_1 (LEU
, tmp
, op0
, op1
))
1806 else if (mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
))
1815 tmp
= gen_reg_rtx (SImode
);
1816 gcc_assert (mep_expand_setcc_1 (GTU
, tmp
, op0
, op1
)
1817 || mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
));
1824 tmp
= gen_reg_rtx (SImode
);
1825 if (mep_expand_setcc_1 (GEU
, tmp
, op0
, op1
))
1827 else if (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
))
1839 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
1843 mep_emit_cbranch (rtx
*operands
, int ne
)
1845 if (GET_CODE (operands
[1]) == REG
)
1846 return ne
? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1847 else if (INTVAL (operands
[1]) == 0 && !mep_vliw_function_p(cfun
->decl
))
1848 return ne
? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1850 return ne
? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1854 mep_expand_call (rtx
*operands
, int returns_value
)
1856 rtx addr
= operands
[returns_value
];
1857 rtx tp
= mep_tp_rtx ();
1858 rtx gp
= mep_gp_rtx ();
1860 gcc_assert (GET_CODE (addr
) == MEM
);
1862 addr
= XEXP (addr
, 0);
1864 if (! mep_call_address_operand (addr
, VOIDmode
))
1865 addr
= force_reg (SImode
, addr
);
1867 if (! operands
[returns_value
+2])
1868 operands
[returns_value
+2] = const0_rtx
;
1871 emit_call_insn (gen_call_value_internal (operands
[0], addr
, operands
[2],
1872 operands
[3], tp
, gp
));
1874 emit_call_insn (gen_call_internal (addr
, operands
[1],
1875 operands
[2], tp
, gp
));
1878 /* Aliasing Support. */
1880 /* If X is a machine specific address (i.e. a symbol or label being
1881 referenced as a displacement from the GOT implemented using an
1882 UNSPEC), then return the base term. Otherwise return X. */
1885 mep_find_base_term (rtx x
)
1890 if (GET_CODE (x
) != PLUS
)
1895 if (has_hard_reg_initial_val(Pmode
, TP_REGNO
)
1896 && base
== mep_tp_rtx ())
1898 else if (has_hard_reg_initial_val(Pmode
, GP_REGNO
)
1899 && base
== mep_gp_rtx ())
1904 if (GET_CODE (term
) != CONST
)
1906 term
= XEXP (term
, 0);
1908 if (GET_CODE (term
) != UNSPEC
1909 || XINT (term
, 1) != unspec
)
1912 return XVECEXP (term
, 0, 0);
1915 /* Reload Support. */
1917 /* Return true if the registers in CLASS cannot represent the change from
1918 modes FROM to TO. */
1921 mep_cannot_change_mode_class (enum machine_mode from
, enum machine_mode to
,
1922 enum reg_class regclass
)
1927 /* 64-bit COP regs must remain 64-bit COP regs. */
1928 if (TARGET_64BIT_CR_REGS
1929 && (regclass
== CR_REGS
1930 || regclass
== LOADABLE_CR_REGS
)
1931 && (GET_MODE_SIZE (to
) < 8
1932 || GET_MODE_SIZE (from
) < 8))
1938 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1941 mep_general_reg (rtx x
)
1943 while (GET_CODE (x
) == SUBREG
)
1945 return GET_CODE (x
) == REG
&& GR_REGNO_P (REGNO (x
));
1949 mep_nongeneral_reg (rtx x
)
1951 while (GET_CODE (x
) == SUBREG
)
1953 return (GET_CODE (x
) == REG
1954 && !GR_REGNO_P (REGNO (x
)) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
1958 mep_general_copro_reg (rtx x
)
1960 while (GET_CODE (x
) == SUBREG
)
1962 return (GET_CODE (x
) == REG
&& CR_REGNO_P (REGNO (x
)));
1966 mep_nonregister (rtx x
)
1968 while (GET_CODE (x
) == SUBREG
)
1970 return (GET_CODE (x
) != REG
|| REGNO (x
) >= FIRST_PSEUDO_REGISTER
);
1973 #define DEBUG_RELOAD 0
1975 /* Return the secondary reload class needed for moving value X to or
1976 from a register in coprocessor register class CLASS. */
1978 static enum reg_class
1979 mep_secondary_copro_reload_class (enum reg_class rclass
, rtx x
)
1981 if (mep_general_reg (x
))
1982 /* We can do the move directly if mep_have_core_copro_moves_p,
1983 otherwise we need to go through memory. Either way, no secondary
1984 register is needed. */
1987 if (mep_general_copro_reg (x
))
1989 /* We can do the move directly if mep_have_copro_copro_moves_p. */
1990 if (mep_have_copro_copro_moves_p
)
1993 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
1994 if (mep_have_core_copro_moves_p
)
1995 return GENERAL_REGS
;
1997 /* Otherwise we need to do it through memory. No secondary
1998 register is needed. */
2002 if (reg_class_subset_p (rclass
, LOADABLE_CR_REGS
)
2003 && constraint_satisfied_p (x
, CONSTRAINT_U
))
2004 /* X is a memory value that we can access directly. */
2007 /* We have to move X into a GPR first and then copy it to
2008 the coprocessor register. The move from the GPR to the
2009 coprocessor might be done directly or through memory,
2010 depending on mep_have_core_copro_moves_p. */
2011 return GENERAL_REGS
;
2014 /* Copying X to register in RCLASS. */
2017 mep_secondary_input_reload_class (enum reg_class rclass
,
2018 enum machine_mode mode ATTRIBUTE_UNUSED
,
2024 fprintf (stderr
, "secondary input reload copy to %s %s from ", reg_class_names
[rclass
], mode_name
[mode
]);
2028 if (reg_class_subset_p (rclass
, CR_REGS
))
2029 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2030 else if (MEP_NONGENERAL_CLASS (rclass
)
2031 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2035 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2037 return (enum reg_class
) rv
;
2040 /* Copying register in RCLASS to X. */
2043 mep_secondary_output_reload_class (enum reg_class rclass
,
2044 enum machine_mode mode ATTRIBUTE_UNUSED
,
2050 fprintf (stderr
, "secondary output reload copy from %s %s to ", reg_class_names
[rclass
], mode_name
[mode
]);
2054 if (reg_class_subset_p (rclass
, CR_REGS
))
2055 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2056 else if (MEP_NONGENERAL_CLASS (rclass
)
2057 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2061 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2064 return (enum reg_class
) rv
;
2067 /* Implement SECONDARY_MEMORY_NEEDED. */
2070 mep_secondary_memory_needed (enum reg_class rclass1
, enum reg_class rclass2
,
2071 enum machine_mode mode ATTRIBUTE_UNUSED
)
2073 if (!mep_have_core_copro_moves_p
)
2075 if (reg_classes_intersect_p (rclass1
, CR_REGS
)
2076 && reg_classes_intersect_p (rclass2
, GENERAL_REGS
))
2078 if (reg_classes_intersect_p (rclass2
, CR_REGS
)
2079 && reg_classes_intersect_p (rclass1
, GENERAL_REGS
))
2081 if (!mep_have_copro_copro_moves_p
2082 && reg_classes_intersect_p (rclass1
, CR_REGS
)
2083 && reg_classes_intersect_p (rclass2
, CR_REGS
))
2090 mep_expand_reload (rtx
*operands
, enum machine_mode mode
)
2092 /* There are three cases for each direction:
2097 int s0
= mep_section_tag (operands
[0]) == 'f';
2098 int s1
= mep_section_tag (operands
[1]) == 'f';
2099 int c0
= mep_nongeneral_reg (operands
[0]);
2100 int c1
= mep_nongeneral_reg (operands
[1]);
2101 int which
= (s0
? 20:0) + (c0
? 10:0) + (s1
? 2:0) + (c1
? 1:0);
2104 fprintf (stderr
, "expand_reload %s\n", mode_name
[mode
]);
2105 debug_rtx (operands
[0]);
2106 debug_rtx (operands
[1]);
2111 case 00: /* Don't know why this gets here. */
2112 case 02: /* general = far */
2113 emit_move_insn (operands
[0], operands
[1]);
2116 case 10: /* cr = mem */
2117 case 11: /* cr = cr */
2118 case 01: /* mem = cr */
2119 case 12: /* cr = far */
2120 emit_move_insn (operands
[2], operands
[1]);
2121 emit_move_insn (operands
[0], operands
[2]);
2124 case 20: /* far = general */
2125 emit_move_insn (operands
[2], XEXP (operands
[1], 0));
2126 emit_move_insn (operands
[0], gen_rtx_MEM (mode
, operands
[2]));
2129 case 21: /* far = cr */
2130 case 22: /* far = far */
2132 fprintf (stderr
, "unsupported expand reload case %02d for mode %s\n",
2133 which
, mode_name
[mode
]);
2134 debug_rtx (operands
[0]);
2135 debug_rtx (operands
[1]);
2140 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2141 can be moved directly into registers 0 to 7, but not into the rest.
2142 If so, and if the required class includes registers 0 to 7, restrict
2143 it to those registers. */
2146 mep_preferred_reload_class (rtx x
, enum reg_class rclass
)
2148 switch (GET_CODE (x
))
2151 if (INTVAL (x
) >= 0x10000
2152 && INTVAL (x
) < 0x01000000
2153 && (INTVAL (x
) & 0xffff) != 0
2154 && reg_class_subset_p (TPREL_REGS
, rclass
))
2155 rclass
= TPREL_REGS
;
2161 if (mep_section_tag (x
) != 'f'
2162 && reg_class_subset_p (TPREL_REGS
, rclass
))
2163 rclass
= TPREL_REGS
;
2172 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2173 moves, 4 for direct double-register moves, and 1000 for anything
2174 that requires a temporary register or temporary stack slot. */
2177 mep_register_move_cost (enum machine_mode mode
, enum reg_class from
, enum reg_class to
)
2179 if (mep_have_copro_copro_moves_p
2180 && reg_class_subset_p (from
, CR_REGS
)
2181 && reg_class_subset_p (to
, CR_REGS
))
2183 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2187 if (reg_class_subset_p (from
, CR_REGS
)
2188 && reg_class_subset_p (to
, CR_REGS
))
2190 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2194 if (reg_class_subset_p (from
, CR_REGS
)
2195 || reg_class_subset_p (to
, CR_REGS
))
2197 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2201 if (mep_secondary_memory_needed (from
, to
, mode
))
2203 if (MEP_NONGENERAL_CLASS (from
) && MEP_NONGENERAL_CLASS (to
))
2206 if (GET_MODE_SIZE (mode
) > 4)
2213 /* Functions to save and restore machine-specific function data. */
2215 static struct machine_function
*
2216 mep_init_machine_status (void)
2218 return ggc_alloc_cleared_machine_function ();
2222 mep_allocate_initial_value (rtx reg
)
2226 if (GET_CODE (reg
) != REG
)
2229 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2232 /* In interrupt functions, the "initial" values of $gp and $tp are
2233 provided by the prologue. They are not necessarily the same as
2234 the values that the caller was using. */
2235 if (REGNO (reg
) == TP_REGNO
|| REGNO (reg
) == GP_REGNO
)
2236 if (mep_interrupt_p ())
2239 if (! cfun
->machine
->reg_save_slot
[REGNO(reg
)])
2241 cfun
->machine
->reg_save_size
+= 4;
2242 cfun
->machine
->reg_save_slot
[REGNO(reg
)] = cfun
->machine
->reg_save_size
;
2245 rss
= cfun
->machine
->reg_save_slot
[REGNO(reg
)];
2246 return gen_rtx_MEM (SImode
, plus_constant (Pmode
, arg_pointer_rtx
, -rss
));
2250 mep_return_addr_rtx (int count
)
2255 return get_hard_reg_initial_val (Pmode
, LP_REGNO
);
2261 return get_hard_reg_initial_val (Pmode
, TP_REGNO
);
2267 return get_hard_reg_initial_val (Pmode
, GP_REGNO
);
2271 mep_interrupt_p (void)
2273 if (cfun
->machine
->interrupt_handler
== 0)
2275 int interrupt_handler
2276 = (lookup_attribute ("interrupt",
2277 DECL_ATTRIBUTES (current_function_decl
))
2279 cfun
->machine
->interrupt_handler
= interrupt_handler
? 2 : 1;
2281 return cfun
->machine
->interrupt_handler
== 2;
2285 mep_disinterrupt_p (void)
2287 if (cfun
->machine
->disable_interrupts
== 0)
2289 int disable_interrupts
2290 = (lookup_attribute ("disinterrupt",
2291 DECL_ATTRIBUTES (current_function_decl
))
2293 cfun
->machine
->disable_interrupts
= disable_interrupts
? 2 : 1;
2295 return cfun
->machine
->disable_interrupts
== 2;
2299 /* Frame/Epilog/Prolog Related. */
2302 mep_reg_set_p (rtx reg
, rtx insn
)
2304 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2307 if (FIND_REG_INC_NOTE (insn
, reg
))
2309 insn
= PATTERN (insn
);
2312 if (GET_CODE (insn
) == SET
2313 && GET_CODE (XEXP (insn
, 0)) == REG
2314 && GET_CODE (XEXP (insn
, 1)) == REG
2315 && REGNO (XEXP (insn
, 0)) == REGNO (XEXP (insn
, 1)))
2318 return set_of (reg
, insn
) != NULL_RTX
;
2322 #define MEP_SAVES_UNKNOWN 0
2323 #define MEP_SAVES_YES 1
2324 #define MEP_SAVES_MAYBE 2
2325 #define MEP_SAVES_NO 3
2328 mep_reg_set_in_function (int regno
)
2332 if (mep_interrupt_p () && df_regs_ever_live_p(regno
))
2335 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2338 push_topmost_sequence ();
2339 insn
= get_insns ();
2340 pop_topmost_sequence ();
2345 reg
= gen_rtx_REG (SImode
, regno
);
2347 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
2348 if (INSN_P (insn
) && mep_reg_set_p (reg
, insn
))
2354 mep_asm_without_operands_p (void)
2356 if (cfun
->machine
->asms_without_operands
== 0)
2360 push_topmost_sequence ();
2361 insn
= get_insns ();
2362 pop_topmost_sequence ();
2364 cfun
->machine
->asms_without_operands
= 1;
2368 && GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
2370 cfun
->machine
->asms_without_operands
= 2;
2373 insn
= NEXT_INSN (insn
);
2377 return cfun
->machine
->asms_without_operands
== 2;
2380 /* Interrupt functions save/restore every call-preserved register, and
2381 any call-used register it uses (or all if it calls any function,
2382 since they may get clobbered there too). Here we check to see
2383 which call-used registers need saving. */
2385 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2386 && (r == FIRST_CCR_REGNO + 1 \
2387 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2388 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2391 mep_interrupt_saved_reg (int r
)
2393 if (!mep_interrupt_p ())
2395 if (r
== REGSAVE_CONTROL_TEMP
2396 || (TARGET_64BIT_CR_REGS
&& TARGET_COP
&& r
== REGSAVE_CONTROL_TEMP
+1))
2398 if (mep_asm_without_operands_p ()
2400 || (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
|| r
== LP_REGNO
)
2401 || IVC2_ISAVED_REG (r
)))
2404 /* Function calls mean we need to save $lp. */
2405 if (r
== LP_REGNO
|| IVC2_ISAVED_REG (r
))
2407 if (!crtl
->is_leaf
|| cfun
->machine
->doloop_tags
> 0)
2408 /* The interrupt handler might use these registers for repeat blocks,
2409 or it might call a function that does so. */
2410 if (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
)
2412 if (crtl
->is_leaf
&& call_used_regs
[r
] && !df_regs_ever_live_p(r
))
2414 /* Functions we call might clobber these. */
2415 if (call_used_regs
[r
] && !fixed_regs
[r
])
2417 /* Additional registers that need to be saved for IVC2. */
2418 if (IVC2_ISAVED_REG (r
))
2425 mep_call_saves_register (int r
)
2427 if (! cfun
->machine
->frame_locked
)
2429 int rv
= MEP_SAVES_NO
;
2431 if (cfun
->machine
->reg_save_slot
[r
])
2433 else if (r
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2435 else if (r
== FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
2437 else if ((!call_used_regs
[r
] || r
== LP_REGNO
) && df_regs_ever_live_p(r
))
2439 else if (crtl
->calls_eh_return
&& (r
== 10 || r
== 11))
2440 /* We need these to have stack slots so that they can be set during
2443 else if (mep_interrupt_saved_reg (r
))
2445 cfun
->machine
->reg_saved
[r
] = rv
;
2447 return cfun
->machine
->reg_saved
[r
] == MEP_SAVES_YES
;
2450 /* Return true if epilogue uses register REGNO. */
2453 mep_epilogue_uses (int regno
)
2455 /* Since $lp is a call-saved register, the generic code will normally
2456 mark it used in the epilogue if it needs to be saved and restored.
2457 However, when profiling is enabled, the profiling code will implicitly
2458 clobber $11. This case has to be handled specially both here and in
2459 mep_call_saves_register. */
2460 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2462 /* Interrupt functions save/restore pretty much everything. */
2463 return (reload_completed
&& mep_interrupt_saved_reg (regno
));
2467 mep_reg_size (int regno
)
2469 if (CR_REGNO_P (regno
) && TARGET_64BIT_CR_REGS
)
2474 /* Worker function for TARGET_CAN_ELIMINATE. */
2477 mep_can_eliminate (const int from
, const int to
)
2479 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
2480 ? ! frame_pointer_needed
2485 mep_elimination_offset (int from
, int to
)
2489 int frame_size
= get_frame_size () + crtl
->outgoing_args_size
;
2492 if (!cfun
->machine
->frame_locked
)
2493 memset (cfun
->machine
->reg_saved
, 0, sizeof (cfun
->machine
->reg_saved
));
2495 /* We don't count arg_regs_to_save in the arg pointer offset, because
2496 gcc thinks the arg pointer has moved along with the saved regs.
2497 However, we do count it when we adjust $sp in the prologue. */
2499 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2500 if (mep_call_saves_register (i
))
2501 reg_save_size
+= mep_reg_size (i
);
2503 if (reg_save_size
% 8)
2504 cfun
->machine
->regsave_filler
= 8 - (reg_save_size
% 8);
2506 cfun
->machine
->regsave_filler
= 0;
2508 /* This is what our total stack adjustment looks like. */
2509 total_size
= (reg_save_size
+ frame_size
+ cfun
->machine
->regsave_filler
);
2512 cfun
->machine
->frame_filler
= 8 - (total_size
% 8);
2514 cfun
->machine
->frame_filler
= 0;
2517 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
2518 return reg_save_size
+ cfun
->machine
->regsave_filler
;
2520 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2521 return cfun
->machine
->frame_filler
+ frame_size
;
2523 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2524 return reg_save_size
+ cfun
->machine
->regsave_filler
+ cfun
->machine
->frame_filler
+ frame_size
;
2532 RTX_FRAME_RELATED_P (x
) = 1;
2536 /* Since the prologue/epilogue code is generated after optimization,
2537 we can't rely on gcc to split constants for us. So, this code
2538 captures all the ways to add a constant to a register in one logic
2539 chunk, including optimizing away insns we just don't need. This
2540 makes the prolog/epilog code easier to follow. */
2542 add_constant (int dest
, int src
, int value
, int mark_frame
)
2547 if (src
== dest
&& value
== 0)
2552 insn
= emit_move_insn (gen_rtx_REG (SImode
, dest
),
2553 gen_rtx_REG (SImode
, src
));
2555 RTX_FRAME_RELATED_P(insn
) = 1;
2559 if (value
>= -32768 && value
<= 32767)
2561 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2562 gen_rtx_REG (SImode
, src
),
2565 RTX_FRAME_RELATED_P(insn
) = 1;
2569 /* Big constant, need to use a temp register. We use
2570 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2571 area is always small enough to directly add to). */
2573 hi
= trunc_int_for_mode (value
& 0xffff0000, SImode
);
2574 lo
= value
& 0xffff;
2576 insn
= emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2581 insn
= emit_insn (gen_iorsi3 (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2582 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2586 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2587 gen_rtx_REG (SImode
, src
),
2588 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
)));
2591 RTX_FRAME_RELATED_P(insn
) = 1;
2592 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2593 gen_rtx_SET (SImode
,
2594 gen_rtx_REG (SImode
, dest
),
2595 gen_rtx_PLUS (SImode
,
2596 gen_rtx_REG (SImode
, dest
),
2601 /* Move SRC to DEST. Mark the move as being potentially dead if
2605 maybe_dead_move (rtx dest
, rtx src
, bool ATTRIBUTE_UNUSED maybe_dead_p
)
2607 rtx insn
= emit_move_insn (dest
, src
);
2610 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
2615 /* Used for interrupt functions, which can't assume that $tp and $gp
2616 contain the correct pointers. */
2619 mep_reload_pointer (int regno
, const char *symbol
)
2623 if (!df_regs_ever_live_p(regno
) && crtl
->is_leaf
)
2626 reg
= gen_rtx_REG (SImode
, regno
);
2627 sym
= gen_rtx_SYMBOL_REF (SImode
, symbol
);
2628 emit_insn (gen_movsi_topsym_s (reg
, sym
));
2629 emit_insn (gen_movsi_botsym_s (reg
, reg
, sym
));
2632 /* Assign save slots for any register not already saved. DImode
2633 registers go at the end of the reg save area; the rest go at the
2634 beginning. This is for alignment purposes. Returns true if a frame
2635 is really needed. */
2637 mep_assign_save_slots (int reg_save_size
)
2639 bool really_need_stack_frame
= false;
2643 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2644 if (mep_call_saves_register(i
))
2646 int regsize
= mep_reg_size (i
);
2648 if ((i
!= TP_REGNO
&& i
!= GP_REGNO
&& i
!= LP_REGNO
)
2649 || mep_reg_set_in_function (i
))
2650 really_need_stack_frame
= true;
2652 if (cfun
->machine
->reg_save_slot
[i
])
2657 cfun
->machine
->reg_save_size
+= regsize
;
2658 cfun
->machine
->reg_save_slot
[i
] = cfun
->machine
->reg_save_size
;
2662 cfun
->machine
->reg_save_slot
[i
] = reg_save_size
- di_ofs
;
2666 cfun
->machine
->frame_locked
= 1;
2667 return really_need_stack_frame
;
2671 mep_expand_prologue (void)
2673 int i
, rss
, sp_offset
= 0;
2676 int really_need_stack_frame
;
2678 /* We must not allow register renaming in interrupt functions,
2679 because that invalidates the correctness of the set of call-used
2680 registers we're going to save/restore. */
2681 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2683 if (mep_disinterrupt_p ())
2684 emit_insn (gen_mep_disable_int ());
2686 cfun
->machine
->mep_frame_pointer_needed
= frame_pointer_needed
;
2688 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2689 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2690 really_need_stack_frame
= frame_size
;
2692 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2694 sp_offset
= reg_save_size
;
2695 if (sp_offset
+ frame_size
< 128)
2696 sp_offset
+= frame_size
;
2698 add_constant (SP_REGNO
, SP_REGNO
, -sp_offset
, 1);
2700 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2701 if (mep_call_saves_register(i
))
2705 enum machine_mode rmode
;
2707 rss
= cfun
->machine
->reg_save_slot
[i
];
2709 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2710 && (!mep_reg_set_in_function (i
)
2711 && !mep_interrupt_p ()))
2714 if (mep_reg_size (i
) == 8)
2719 /* If there is a pseudo associated with this register's initial value,
2720 reload might have already spilt it to the stack slot suggested by
2721 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2723 mem
= gen_rtx_MEM (rmode
,
2724 plus_constant (Pmode
, stack_pointer_rtx
,
2726 maybe_dead_p
= rtx_equal_p (mem
, has_hard_reg_initial_val (rmode
, i
));
2728 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2729 F(maybe_dead_move (mem
, gen_rtx_REG (rmode
, i
), maybe_dead_p
));
2730 else if (rmode
== DImode
)
2733 int be
= TARGET_BIG_ENDIAN
? 4 : 0;
2735 mem
= gen_rtx_MEM (SImode
,
2736 plus_constant (Pmode
, stack_pointer_rtx
,
2737 sp_offset
- rss
+ be
));
2739 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2740 gen_rtx_REG (SImode
, i
),
2742 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2743 gen_rtx_ZERO_EXTRACT (SImode
,
2744 gen_rtx_REG (DImode
, i
),
2748 insn
= maybe_dead_move (mem
,
2749 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2751 RTX_FRAME_RELATED_P (insn
) = 1;
2753 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2754 gen_rtx_SET (VOIDmode
,
2756 gen_rtx_REG (rmode
, i
)));
2757 mem
= gen_rtx_MEM (SImode
,
2758 plus_constant (Pmode
, stack_pointer_rtx
,
2759 sp_offset
- rss
+ (4-be
)));
2760 insn
= maybe_dead_move (mem
,
2761 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2767 maybe_dead_move (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2768 gen_rtx_REG (rmode
, i
),
2770 insn
= maybe_dead_move (mem
,
2771 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2773 RTX_FRAME_RELATED_P (insn
) = 1;
2775 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2776 gen_rtx_SET (VOIDmode
,
2778 gen_rtx_REG (rmode
, i
)));
2782 if (frame_pointer_needed
)
2784 /* We've already adjusted down by sp_offset. Total $sp change
2785 is reg_save_size + frame_size. We want a net change here of
2786 just reg_save_size. */
2787 add_constant (FP_REGNO
, SP_REGNO
, sp_offset
- reg_save_size
, 1);
2790 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
-(reg_save_size
+frame_size
), 1);
2792 if (mep_interrupt_p ())
2794 mep_reload_pointer(GP_REGNO
, "__sdabase");
2795 mep_reload_pointer(TP_REGNO
, "__tpbase");
2800 mep_start_function (FILE *file
, HOST_WIDE_INT hwi_local
)
2802 int local
= hwi_local
;
2803 int frame_size
= local
+ crtl
->outgoing_args_size
;
2808 int slot_map
[FIRST_PSEUDO_REGISTER
], si
, sj
;
2810 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2811 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2812 sp_offset
= reg_save_size
+ frame_size
;
2814 ffill
= cfun
->machine
->frame_filler
;
2816 if (cfun
->machine
->mep_frame_pointer_needed
)
2817 reg_names
[FP_REGNO
] = "$fp";
2819 reg_names
[FP_REGNO
] = "$8";
2824 if (debug_info_level
== DINFO_LEVEL_NONE
)
2826 fprintf (file
, "\t# frame: %d", sp_offset
);
2828 fprintf (file
, " %d regs", reg_save_size
);
2830 fprintf (file
, " %d locals", local
);
2831 if (crtl
->outgoing_args_size
)
2832 fprintf (file
, " %d args", crtl
->outgoing_args_size
);
2833 fprintf (file
, "\n");
2837 fprintf (file
, "\t#\n");
2838 fprintf (file
, "\t# Initial Frame Information:\n");
2839 if (sp_offset
|| !frame_pointer_needed
)
2840 fprintf (file
, "\t# Entry ---------- 0\n");
2842 /* Sort registers by save slots, so they're printed in the order
2843 they appear in memory, not the order they're saved in. */
2844 for (si
=0; si
<FIRST_PSEUDO_REGISTER
; si
++)
2846 for (si
=0; si
<FIRST_PSEUDO_REGISTER
-1; si
++)
2847 for (sj
=si
+1; sj
<FIRST_PSEUDO_REGISTER
; sj
++)
2848 if (cfun
->machine
->reg_save_slot
[slot_map
[si
]]
2849 > cfun
->machine
->reg_save_slot
[slot_map
[sj
]])
2851 int t
= slot_map
[si
];
2852 slot_map
[si
] = slot_map
[sj
];
2857 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2860 int r
= slot_map
[i
];
2861 int rss
= cfun
->machine
->reg_save_slot
[r
];
2863 if (!mep_call_saves_register (r
))
2866 if ((r
== TP_REGNO
|| r
== GP_REGNO
|| r
== LP_REGNO
)
2867 && (!mep_reg_set_in_function (r
)
2868 && !mep_interrupt_p ()))
2871 rsize
= mep_reg_size(r
);
2872 skip
= rss
- (sp
+rsize
);
2874 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2875 fprintf (file
, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2876 rsize
, reg_names
[r
], sp_offset
- rss
);
2880 skip
= reg_save_size
- sp
;
2882 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2884 if (frame_pointer_needed
)
2885 fprintf (file
, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size
, sp_offset
-reg_save_size
);
2887 fprintf (file
, "\t# %3d bytes for local vars\n", local
);
2889 fprintf (file
, "\t# %3d bytes for alignment\n", ffill
);
2890 if (crtl
->outgoing_args_size
)
2891 fprintf (file
, "\t# %3d bytes for outgoing args\n",
2892 crtl
->outgoing_args_size
);
2893 fprintf (file
, "\t# SP ---> ---------- %d\n", sp_offset
);
2894 fprintf (file
, "\t#\n");
2898 static int mep_prevent_lp_restore
= 0;
2899 static int mep_sibcall_epilogue
= 0;
2902 mep_expand_epilogue (void)
2904 int i
, sp_offset
= 0;
2905 int reg_save_size
= 0;
2907 int lp_temp
= LP_REGNO
, lp_slot
= -1;
2908 int really_need_stack_frame
= get_frame_size() + crtl
->outgoing_args_size
;
2909 int interrupt_handler
= mep_interrupt_p ();
2911 if (profile_arc_flag
== 2)
2912 emit_insn (gen_mep_bb_trace_ret ());
2914 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2915 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2917 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2919 if (frame_pointer_needed
)
2921 /* If we have a frame pointer, we won't have a reliable stack
2922 pointer (alloca, you know), so rebase SP from FP */
2923 emit_move_insn (gen_rtx_REG (SImode
, SP_REGNO
),
2924 gen_rtx_REG (SImode
, FP_REGNO
));
2925 sp_offset
= reg_save_size
;
2929 /* SP is right under our local variable space. Adjust it if
2931 sp_offset
= reg_save_size
+ frame_size
;
2932 if (sp_offset
>= 128)
2934 add_constant (SP_REGNO
, SP_REGNO
, frame_size
, 0);
2935 sp_offset
-= frame_size
;
2939 /* This is backwards so that we restore the control and coprocessor
2940 registers before the temporary registers we use to restore
2942 for (i
=FIRST_PSEUDO_REGISTER
-1; i
>=1; i
--)
2943 if (mep_call_saves_register (i
))
2945 enum machine_mode rmode
;
2946 int rss
= cfun
->machine
->reg_save_slot
[i
];
2948 if (mep_reg_size (i
) == 8)
2953 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2954 && !(mep_reg_set_in_function (i
) || interrupt_handler
))
2956 if (mep_prevent_lp_restore
&& i
== LP_REGNO
)
2958 if (!mep_prevent_lp_restore
2959 && !interrupt_handler
2960 && (i
== 10 || i
== 11))
2963 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2964 emit_move_insn (gen_rtx_REG (rmode
, i
),
2966 plus_constant (Pmode
, stack_pointer_rtx
,
2970 if (i
== LP_REGNO
&& !mep_sibcall_epilogue
&& !interrupt_handler
)
2971 /* Defer this one so we can jump indirect rather than
2972 copying the RA to $lp and "ret". EH epilogues
2973 automatically skip this anyway. */
2974 lp_slot
= sp_offset
-rss
;
2977 emit_move_insn (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2979 plus_constant (Pmode
,
2982 emit_move_insn (gen_rtx_REG (rmode
, i
),
2983 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
));
2989 /* Restore this one last so we know it will be in the temp
2990 register when we return by jumping indirectly via the temp. */
2991 emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2992 gen_rtx_MEM (SImode
,
2993 plus_constant (Pmode
, stack_pointer_rtx
,
2995 lp_temp
= REGSAVE_CONTROL_TEMP
;
2999 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
, 0);
3001 if (crtl
->calls_eh_return
&& mep_prevent_lp_restore
)
3002 emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, SP_REGNO
),
3003 gen_rtx_REG (SImode
, SP_REGNO
),
3004 cfun
->machine
->eh_stack_adjust
));
3006 if (mep_sibcall_epilogue
)
3009 if (mep_disinterrupt_p ())
3010 emit_insn (gen_mep_enable_int ());
3012 if (mep_prevent_lp_restore
)
3014 emit_jump_insn (gen_eh_return_internal ());
3017 else if (interrupt_handler
)
3018 emit_jump_insn (gen_mep_reti ());
3020 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode
, lp_temp
)));
3024 mep_expand_eh_return (rtx
*operands
)
3026 if (GET_CODE (operands
[0]) != REG
|| REGNO (operands
[0]) != LP_REGNO
)
3028 rtx ra
= gen_rtx_REG (Pmode
, LP_REGNO
);
3029 emit_move_insn (ra
, operands
[0]);
3033 emit_insn (gen_eh_epilogue (operands
[0]));
3037 mep_emit_eh_epilogue (rtx
*operands ATTRIBUTE_UNUSED
)
3039 cfun
->machine
->eh_stack_adjust
= gen_rtx_REG (Pmode
, 0);
3040 mep_prevent_lp_restore
= 1;
3041 mep_expand_epilogue ();
3042 mep_prevent_lp_restore
= 0;
3046 mep_expand_sibcall_epilogue (void)
3048 mep_sibcall_epilogue
= 1;
3049 mep_expand_epilogue ();
3050 mep_sibcall_epilogue
= 0;
3054 mep_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
3059 if (mep_section_tag (DECL_RTL (decl
)) == 'f')
3062 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3063 if (mep_interrupt_p () || mep_disinterrupt_p ())
3070 mep_return_stackadj_rtx (void)
3072 return gen_rtx_REG (SImode
, 10);
3076 mep_return_handler_rtx (void)
3078 return gen_rtx_REG (SImode
, LP_REGNO
);
3082 mep_function_profiler (FILE *file
)
3084 /* Always right at the beginning of the function. */
3085 fprintf (file
, "\t# mep function profiler\n");
3086 fprintf (file
, "\tadd\t$sp, -8\n");
3087 fprintf (file
, "\tsw\t$0, ($sp)\n");
3088 fprintf (file
, "\tldc\t$0, $lp\n");
3089 fprintf (file
, "\tsw\t$0, 4($sp)\n");
3090 fprintf (file
, "\tbsr\t__mep_mcount\n");
3091 fprintf (file
, "\tlw\t$0, 4($sp)\n");
3092 fprintf (file
, "\tstc\t$0, $lp\n");
3093 fprintf (file
, "\tlw\t$0, ($sp)\n");
3094 fprintf (file
, "\tadd\t$sp, 8\n\n");
3098 mep_emit_bb_trace_ret (void)
3100 fprintf (asm_out_file
, "\t# end of block profiling\n");
3101 fprintf (asm_out_file
, "\tadd\t$sp, -8\n");
3102 fprintf (asm_out_file
, "\tsw\t$0, ($sp)\n");
3103 fprintf (asm_out_file
, "\tldc\t$0, $lp\n");
3104 fprintf (asm_out_file
, "\tsw\t$0, 4($sp)\n");
3105 fprintf (asm_out_file
, "\tbsr\t__bb_trace_ret\n");
3106 fprintf (asm_out_file
, "\tlw\t$0, 4($sp)\n");
3107 fprintf (asm_out_file
, "\tstc\t$0, $lp\n");
3108 fprintf (asm_out_file
, "\tlw\t$0, ($sp)\n");
3109 fprintf (asm_out_file
, "\tadd\t$sp, 8\n\n");
3116 /* Operand Printing. */
3119 mep_print_operand_address (FILE *stream
, rtx address
)
3121 if (GET_CODE (address
) == MEM
)
3122 address
= XEXP (address
, 0);
3124 /* cf: gcc.dg/asm-4.c. */
3125 gcc_assert (GET_CODE (address
) == REG
);
3127 mep_print_operand (stream
, address
, 0);
3133 const char *pattern
;
3136 const conversions
[] =
3139 { 0, "m+ri", "3(2)" },
3143 { 0, "mLrs", "%lo(3)(2)" },
3144 { 0, "mLr+si", "%lo(4+5)(2)" },
3145 { 0, "m+ru2s", "%tpoff(5)(2)" },
3146 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3147 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3148 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3149 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3150 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3152 { 0, "m+si", "(2+3)" },
3153 { 0, "m+li", "(2+3)" },
3156 { 0, "+si", "1+2" },
3157 { 0, "+u2si", "%tpoff(3+4)" },
3158 { 0, "+u3si", "%sdaoff(3+4)" },
3164 { 'h', "Hs", "%hi(1)" },
3166 { 'I', "u2s", "%tpoff(2)" },
3167 { 'I', "u3s", "%sdaoff(2)" },
3168 { 'I', "+u2si", "%tpoff(3+4)" },
3169 { 'I', "+u3si", "%sdaoff(3+4)" },
3171 { 'P', "mr", "(1\\+),\\0" },
3177 unique_bit_in (HOST_WIDE_INT i
)
3181 case 0x01: case 0xfe: return 0;
3182 case 0x02: case 0xfd: return 1;
3183 case 0x04: case 0xfb: return 2;
3184 case 0x08: case 0xf7: return 3;
3185 case 0x10: case 0x7f: return 4;
3186 case 0x20: case 0xbf: return 5;
3187 case 0x40: case 0xdf: return 6;
3188 case 0x80: case 0xef: return 7;
3195 bit_size_for_clip (HOST_WIDE_INT i
)
3199 for (rv
= 0; rv
< 31; rv
++)
3200 if (((HOST_WIDE_INT
) 1 << rv
) > i
)
3205 /* Print an operand to a assembler instruction. */
3208 mep_print_operand (FILE *file
, rtx x
, int code
)
3211 const char *real_name
;
3215 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3216 we're using, then skip over the "mep_" part of its name. */
3217 const struct cgen_insn
*insn
;
3219 if (mep_get_move_insn (mep_cmov
, &insn
))
3220 fputs (cgen_intrinsics
[insn
->intrinsic
] + 4, file
);
3222 mep_intrinsic_unavailable (mep_cmov
);
3227 switch (GET_CODE (x
))
3230 fputs ("clr", file
);
3233 fputs ("set", file
);
3236 fputs ("not", file
);
3239 output_operand_lossage ("invalid %%L code");
3244 /* Print the second operand of a CR <- CR move. If we're using
3245 a two-operand instruction (i.e., a real cmov), then just print
3246 the operand normally. If we're using a "reg, reg, immediate"
3247 instruction such as caddi3, print the operand followed by a
3248 zero field. If we're using a three-register instruction,
3249 print the operand twice. */
3250 const struct cgen_insn
*insn
;
3252 mep_print_operand (file
, x
, 0);
3253 if (mep_get_move_insn (mep_cmov
, &insn
)
3254 && insn_data
[insn
->icode
].n_operands
== 3)
3257 if (insn_data
[insn
->icode
].operand
[2].predicate (x
, VOIDmode
))
3258 mep_print_operand (file
, x
, 0);
3260 mep_print_operand (file
, const0_rtx
, 0);
3266 for (i
= 0; conversions
[i
].pattern
; i
++)
3267 if (conversions
[i
].code
== code
3268 && strcmp(conversions
[i
].pattern
, pattern
) == 0)
3270 for (j
= 0; conversions
[i
].format
[j
]; j
++)
3271 if (conversions
[i
].format
[j
] == '\\')
3273 fputc (conversions
[i
].format
[j
+1], file
);
3276 else if (ISDIGIT(conversions
[i
].format
[j
]))
3278 rtx r
= patternr
[conversions
[i
].format
[j
] - '0'];
3279 switch (GET_CODE (r
))
3282 fprintf (file
, "%s", reg_names
[REGNO (r
)]);
3288 fprintf (file
, "%d", unique_bit_in (INTVAL (r
)));
3291 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)));
3294 fprintf (file
, "0x%x", ((int) INTVAL (r
) >> 16) & 0xffff);
3297 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)) - 1);
3300 fprintf (file
, "0x%x", (int) INTVAL (r
) & 0xffff);
3303 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3304 && !(INTVAL (r
) & 0xff))
3305 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL(r
));
3307 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3310 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3311 && conversions
[i
].format
[j
+1] == 0)
3313 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (r
));
3314 fprintf (file
, " # 0x%x", (int) INTVAL(r
) & 0xffff);
3317 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3320 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3325 fprintf(file
, "[const_double 0x%lx]",
3326 (unsigned long) CONST_DOUBLE_HIGH(r
));
3329 real_name
= targetm
.strip_name_encoding (XSTR (r
, 0));
3330 assemble_name (file
, real_name
);
3333 output_asm_label (r
);
3336 fprintf (stderr
, "don't know how to print this operand:");
3343 if (conversions
[i
].format
[j
] == '+'
3344 && (!code
|| code
== 'I')
3345 && ISDIGIT (conversions
[i
].format
[j
+1])
3346 && GET_CODE (patternr
[conversions
[i
].format
[j
+1] - '0']) == CONST_INT
3347 && INTVAL (patternr
[conversions
[i
].format
[j
+1] - '0']) < 0)
3349 fputc(conversions
[i
].format
[j
], file
);
3353 if (!conversions
[i
].pattern
)
3355 error ("unconvertible operand %c %qs", code
?code
:'-', pattern
);
3363 mep_final_prescan_insn (rtx insn
, rtx
*operands ATTRIBUTE_UNUSED
,
3364 int noperands ATTRIBUTE_UNUSED
)
3366 /* Despite the fact that MeP is perfectly capable of branching and
3367 doing something else in the same bundle, gcc does jump
3368 optimization *after* scheduling, so we cannot trust the bundling
3369 flags on jump instructions. */
3370 if (GET_MODE (insn
) == BImode
3371 && get_attr_slots (insn
) != SLOTS_CORE
)
3372 fputc ('+', asm_out_file
);
3375 /* Function args in registers. */
3378 mep_setup_incoming_varargs (cumulative_args_t cum
,
3379 enum machine_mode mode ATTRIBUTE_UNUSED
,
3380 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
3381 int second_time ATTRIBUTE_UNUSED
)
3383 int nsave
= 4 - (get_cumulative_args (cum
)->nregs
+ 1);
3386 cfun
->machine
->arg_regs_to_save
= nsave
;
3387 *pretend_size
= nsave
* 4;
3391 bytesize (const_tree type
, enum machine_mode mode
)
3393 if (mode
== BLKmode
)
3394 return int_size_in_bytes (type
);
3395 return GET_MODE_SIZE (mode
);
3399 mep_expand_builtin_saveregs (void)
3404 ns
= cfun
->machine
->arg_regs_to_save
;
3407 bufsize
= 8 * ((ns
+ 1) / 2) + 8 * ns
;
3408 regbuf
= assign_stack_local (SImode
, bufsize
, 64);
3413 regbuf
= assign_stack_local (SImode
, bufsize
, 32);
3416 move_block_from_reg (5-ns
, regbuf
, ns
);
3420 rtx tmp
= gen_rtx_MEM (DImode
, XEXP (regbuf
, 0));
3421 int ofs
= 8 * ((ns
+1)/2);
3423 for (i
=0; i
<ns
; i
++)
3425 int rn
= (4-ns
) + i
+ 49;
3428 ptr
= offset_address (tmp
, GEN_INT (ofs
), 2);
3429 emit_move_insn (ptr
, gen_rtx_REG (DImode
, rn
));
3433 return XEXP (regbuf
, 0);
3436 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3439 mep_build_builtin_va_list (void)
3441 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3445 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3447 f_next_gp
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3448 get_identifier ("__va_next_gp"), ptr_type_node
);
3449 f_next_gp_limit
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3450 get_identifier ("__va_next_gp_limit"),
3452 f_next_cop
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_cop"),
3454 f_next_stack
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_stack"),
3457 DECL_FIELD_CONTEXT (f_next_gp
) = record
;
3458 DECL_FIELD_CONTEXT (f_next_gp_limit
) = record
;
3459 DECL_FIELD_CONTEXT (f_next_cop
) = record
;
3460 DECL_FIELD_CONTEXT (f_next_stack
) = record
;
3462 TYPE_FIELDS (record
) = f_next_gp
;
3463 DECL_CHAIN (f_next_gp
) = f_next_gp_limit
;
3464 DECL_CHAIN (f_next_gp_limit
) = f_next_cop
;
3465 DECL_CHAIN (f_next_cop
) = f_next_stack
;
3467 layout_type (record
);
3473 mep_expand_va_start (tree valist
, rtx nextarg
)
3475 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3476 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3480 ns
= cfun
->machine
->arg_regs_to_save
;
3482 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3483 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3484 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3485 f_next_stack
= DECL_CHAIN (f_next_cop
);
3487 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3489 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3490 valist
, f_next_gp_limit
, NULL_TREE
);
3491 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3493 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3494 valist
, f_next_stack
, NULL_TREE
);
3496 /* va_list.next_gp = expand_builtin_saveregs (); */
3497 u
= make_tree (sizetype
, expand_builtin_saveregs ());
3498 u
= fold_convert (ptr_type_node
, u
);
3499 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp
, u
);
3500 TREE_SIDE_EFFECTS (t
) = 1;
3501 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3503 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3504 u
= fold_build_pointer_plus_hwi (u
, 4 * ns
);
3505 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp_limit
, u
);
3506 TREE_SIDE_EFFECTS (t
) = 1;
3507 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3509 u
= fold_build_pointer_plus_hwi (u
, 8 * ((ns
+1)/2));
3510 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3511 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_cop
, u
);
3512 TREE_SIDE_EFFECTS (t
) = 1;
3513 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3515 /* va_list.next_stack = nextarg; */
3516 u
= make_tree (ptr_type_node
, nextarg
);
3517 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_stack
, u
);
3518 TREE_SIDE_EFFECTS (t
) = 1;
3519 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3523 mep_gimplify_va_arg_expr (tree valist
, tree type
,
3525 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
3527 HOST_WIDE_INT size
, rsize
;
3528 bool by_reference
, ivc2_vec
;
3529 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3530 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3531 tree label_sover
, label_selse
;
3534 ivc2_vec
= TARGET_IVC2
&& VECTOR_TYPE_P (type
);
3536 size
= int_size_in_bytes (type
);
3537 by_reference
= (size
> (ivc2_vec
? 8 : 4)) || (size
<= 0);
3541 type
= build_pointer_type (type
);
3544 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
3546 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3547 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3548 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3549 f_next_stack
= DECL_CHAIN (f_next_cop
);
3551 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3553 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3554 valist
, f_next_gp_limit
, NULL_TREE
);
3555 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3557 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3558 valist
, f_next_stack
, NULL_TREE
);
3560 /* if f_next_gp < f_next_gp_limit
3561 IF (VECTOR_P && IVC2)
3569 val = *f_next_stack;
3570 f_next_stack += rsize;
3574 label_sover
= create_artificial_label (UNKNOWN_LOCATION
);
3575 label_selse
= create_artificial_label (UNKNOWN_LOCATION
);
3576 res_addr
= create_tmp_var (ptr_type_node
, NULL
);
3578 tmp
= build2 (GE_EXPR
, boolean_type_node
, next_gp
,
3579 unshare_expr (next_gp_limit
));
3580 tmp
= build3 (COND_EXPR
, void_type_node
, tmp
,
3581 build1 (GOTO_EXPR
, void_type_node
,
3582 unshare_expr (label_selse
)),
3584 gimplify_and_add (tmp
, pre_p
);
3588 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_cop
);
3589 gimplify_and_add (tmp
, pre_p
);
3593 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_gp
);
3594 gimplify_and_add (tmp
, pre_p
);
3597 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_gp
), 4);
3598 gimplify_assign (unshare_expr (next_gp
), tmp
, pre_p
);
3600 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_cop
), 8);
3601 gimplify_assign (unshare_expr (next_cop
), tmp
, pre_p
);
3603 tmp
= build1 (GOTO_EXPR
, void_type_node
, unshare_expr (label_sover
));
3604 gimplify_and_add (tmp
, pre_p
);
3608 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_selse
));
3609 gimplify_and_add (tmp
, pre_p
);
3611 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, unshare_expr (next_stack
));
3612 gimplify_and_add (tmp
, pre_p
);
3614 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_stack
), rsize
);
3615 gimplify_assign (unshare_expr (next_stack
), tmp
, pre_p
);
3619 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_sover
));
3620 gimplify_and_add (tmp
, pre_p
);
3622 res_addr
= fold_convert (build_pointer_type (type
), res_addr
);
3625 res_addr
= build_va_arg_indirect_ref (res_addr
);
3627 return build_va_arg_indirect_ref (res_addr
);
3631 mep_init_cumulative_args (CUMULATIVE_ARGS
*pcum
, tree fntype
,
3632 rtx libname ATTRIBUTE_UNUSED
,
3633 tree fndecl ATTRIBUTE_UNUSED
)
3637 if (fntype
&& lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype
)))
3643 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3644 larger than 4 bytes are passed indirectly. Return value in 0,
3645 unless bigger than 4 bytes, then the caller passes a pointer as the
3646 first arg. For varargs, we copy $1..$4 to the stack. */
3649 mep_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
3650 const_tree type ATTRIBUTE_UNUSED
,
3651 bool named ATTRIBUTE_UNUSED
)
3653 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3655 /* VOIDmode is a signal for the backend to pass data to the call
3656 expander via the second operand to the call pattern. We use
3657 this to determine whether to use "jsr" or "jsrv". */
3658 if (mode
== VOIDmode
)
3659 return GEN_INT (cum
->vliw
);
3661 /* If we havn't run out of argument registers, return the next. */
3664 if (type
&& TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3665 return gen_rtx_REG (mode
, cum
->nregs
+ 49);
3667 return gen_rtx_REG (mode
, cum
->nregs
+ 1);
3670 /* Otherwise the argument goes on the stack. */
3675 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
3676 enum machine_mode mode
,
3678 bool named ATTRIBUTE_UNUSED
)
3680 int size
= bytesize (type
, mode
);
3682 /* This is non-obvious, but yes, large values passed after we've run
3683 out of registers are *still* passed by reference - we put the
3684 address of the parameter on the stack, as well as putting the
3685 parameter itself elsewhere on the stack. */
3687 if (size
<= 0 || size
> 8)
3691 if (TARGET_IVC2
&& get_cumulative_args (cum
)->nregs
< 4
3692 && type
!= NULL_TREE
&& VECTOR_TYPE_P (type
))
3698 mep_function_arg_advance (cumulative_args_t pcum
,
3699 enum machine_mode mode ATTRIBUTE_UNUSED
,
3700 const_tree type ATTRIBUTE_UNUSED
,
3701 bool named ATTRIBUTE_UNUSED
)
3703 get_cumulative_args (pcum
)->nregs
+= 1;
3707 mep_return_in_memory (const_tree type
, const_tree decl ATTRIBUTE_UNUSED
)
3709 int size
= bytesize (type
, BLKmode
);
3710 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3711 return size
> 0 && size
<= 8 ? 0 : 1;
3712 return size
> 0 && size
<= 4 ? 0 : 1;
3716 mep_narrow_volatile_bitfield (void)
3722 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3725 mep_function_value (const_tree type
, const_tree func ATTRIBUTE_UNUSED
)
3727 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3728 return gen_rtx_REG (TYPE_MODE (type
), 48);
3729 return gen_rtx_REG (TYPE_MODE (type
), RETURN_VALUE_REGNUM
);
3732 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3735 mep_libcall_value (enum machine_mode mode
)
3737 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
3740 /* Handle pipeline hazards. */
3742 typedef enum { op_none
, op_stc
, op_fsft
, op_ret
} op_num
;
3743 static const char *opnames
[] = { "", "stc", "fsft", "ret" };
3745 static int prev_opcode
= 0;
3747 /* This isn't as optimal as it could be, because we don't know what
3748 control register the STC opcode is storing in. We only need to add
3749 the nop if it's the relevant register, but we add it for irrelevant
3753 mep_asm_output_opcode (FILE *file
, const char *ptr
)
3755 int this_opcode
= op_none
;
3756 const char *hazard
= 0;
3761 if (strncmp (ptr
, "fsft", 4) == 0 && !ISGRAPH (ptr
[4]))
3762 this_opcode
= op_fsft
;
3765 if (strncmp (ptr
, "ret", 3) == 0 && !ISGRAPH (ptr
[3]))
3766 this_opcode
= op_ret
;
3769 if (strncmp (ptr
, "stc", 3) == 0 && !ISGRAPH (ptr
[3]))
3770 this_opcode
= op_stc
;
3774 if (prev_opcode
== op_stc
&& this_opcode
== op_fsft
)
3776 if (prev_opcode
== op_stc
&& this_opcode
== op_ret
)
3780 fprintf(file
, "%s\t# %s-%s hazard\n\t",
3781 hazard
, opnames
[prev_opcode
], opnames
[this_opcode
]);
3783 prev_opcode
= this_opcode
;
3786 /* Handle attributes. */
3789 mep_validate_based_tiny (tree
*node
, tree name
, tree args
,
3790 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3792 if (TREE_CODE (*node
) != VAR_DECL
3793 && TREE_CODE (*node
) != POINTER_TYPE
3794 && TREE_CODE (*node
) != TYPE_DECL
)
3796 warning (0, "%qE attribute only applies to variables", name
);
3799 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3801 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3803 warning (0, "address region attributes not allowed with auto storage class");
3806 /* Ignore storage attribute of pointed to variable: char __far * x; */
3807 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3809 warning (0, "address region attributes on pointed-to types ignored");
3818 mep_multiple_address_regions (tree list
, bool check_section_attr
)
3821 int count_sections
= 0;
3822 int section_attr_count
= 0;
3824 for (a
= list
; a
; a
= TREE_CHAIN (a
))
3826 if (is_attribute_p ("based", TREE_PURPOSE (a
))
3827 || is_attribute_p ("tiny", TREE_PURPOSE (a
))
3828 || is_attribute_p ("near", TREE_PURPOSE (a
))
3829 || is_attribute_p ("far", TREE_PURPOSE (a
))
3830 || is_attribute_p ("io", TREE_PURPOSE (a
)))
3832 if (check_section_attr
)
3833 section_attr_count
+= is_attribute_p ("section", TREE_PURPOSE (a
));
3836 if (check_section_attr
)
3837 return section_attr_count
;
3839 return count_sections
;
3842 #define MEP_ATTRIBUTES(decl) \
3843 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3844 : DECL_ATTRIBUTES (decl) \
3845 ? (DECL_ATTRIBUTES (decl)) \
3846 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3849 mep_validate_near_far (tree
*node
, tree name
, tree args
,
3850 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3852 if (TREE_CODE (*node
) != VAR_DECL
3853 && TREE_CODE (*node
) != FUNCTION_DECL
3854 && TREE_CODE (*node
) != METHOD_TYPE
3855 && TREE_CODE (*node
) != POINTER_TYPE
3856 && TREE_CODE (*node
) != TYPE_DECL
)
3858 warning (0, "%qE attribute only applies to variables and functions",
3862 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3864 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3866 warning (0, "address region attributes not allowed with auto storage class");
3869 /* Ignore storage attribute of pointed to variable: char __far * x; */
3870 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3872 warning (0, "address region attributes on pointed-to types ignored");
3876 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node
), false) > 0)
3878 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3879 name
, DECL_NAME (*node
), DECL_SOURCE_LINE (*node
));
3880 DECL_ATTRIBUTES (*node
) = NULL_TREE
;
3886 mep_validate_disinterrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3887 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3889 if (TREE_CODE (*node
) != FUNCTION_DECL
3890 && TREE_CODE (*node
) != METHOD_TYPE
)
3892 warning (0, "%qE attribute only applies to functions", name
);
3899 mep_validate_interrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3900 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3904 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3906 warning (0, "%qE attribute only applies to functions", name
);
3911 if (DECL_DECLARED_INLINE_P (*node
))
3912 error ("cannot inline interrupt function %qE", DECL_NAME (*node
));
3913 DECL_UNINLINABLE (*node
) = 1;
3915 function_type
= TREE_TYPE (*node
);
3917 if (TREE_TYPE (function_type
) != void_type_node
)
3918 error ("interrupt function must have return type of void");
3920 if (prototype_p (function_type
)
3921 && (TREE_VALUE (TYPE_ARG_TYPES (function_type
)) != void_type_node
3922 || TREE_CHAIN (TYPE_ARG_TYPES (function_type
)) != NULL_TREE
))
3923 error ("interrupt function must have no arguments");
3929 mep_validate_io_cb (tree
*node
, tree name
, tree args
,
3930 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3932 if (TREE_CODE (*node
) != VAR_DECL
)
3934 warning (0, "%qE attribute only applies to variables", name
);
3938 if (args
!= NULL_TREE
)
3940 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
3941 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
3942 if (TREE_CODE (TREE_VALUE (args
)) != INTEGER_CST
)
3944 warning (0, "%qE attribute allows only an integer constant argument",
3950 if (*no_add
== false && !TARGET_IO_NO_VOLATILE
)
3951 TREE_THIS_VOLATILE (*node
) = 1;
3957 mep_validate_vliw (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3958 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3960 if (TREE_CODE (*node
) != FUNCTION_TYPE
3961 && TREE_CODE (*node
) != FUNCTION_DECL
3962 && TREE_CODE (*node
) != METHOD_TYPE
3963 && TREE_CODE (*node
) != FIELD_DECL
3964 && TREE_CODE (*node
) != TYPE_DECL
)
3966 static int gave_pointer_note
= 0;
3967 static int gave_array_note
= 0;
3968 static const char * given_type
= NULL
;
3970 given_type
= get_tree_code_name (TREE_CODE (*node
));
3971 if (TREE_CODE (*node
) == POINTER_TYPE
)
3972 given_type
= "pointers";
3973 if (TREE_CODE (*node
) == ARRAY_TYPE
)
3974 given_type
= "arrays";
3977 warning (0, "%qE attribute only applies to functions, not %s",
3980 warning (0, "%qE attribute only applies to functions",
3984 if (TREE_CODE (*node
) == POINTER_TYPE
3985 && !gave_pointer_note
)
3987 inform (input_location
,
3988 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3989 " typedef int (__vliw *vfuncptr) ();");
3990 gave_pointer_note
= 1;
3993 if (TREE_CODE (*node
) == ARRAY_TYPE
3994 && !gave_array_note
)
3996 inform (input_location
,
3997 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
3998 " typedef int (__vliw *vfuncptr[]) ();");
3999 gave_array_note
= 1;
4003 error ("VLIW functions are not allowed without a VLIW configuration");
4007 static const struct attribute_spec mep_attribute_table
[11] =
4009 /* name min max decl type func handler
4010 affects_type_identity */
4011 { "based", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4012 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4013 { "near", 0, 0, false, false, false, mep_validate_near_far
, false },
4014 { "far", 0, 0, false, false, false, mep_validate_near_far
, false },
4015 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt
,
4017 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt
, false },
4018 { "io", 0, 1, false, false, false, mep_validate_io_cb
, false },
4019 { "cb", 0, 1, false, false, false, mep_validate_io_cb
, false },
4020 { "vliw", 0, 0, false, true, false, mep_validate_vliw
, false },
4021 { NULL
, 0, 0, false, false, false, NULL
, false }
4025 mep_function_attribute_inlinable_p (const_tree callee
)
4027 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (callee
));
4028 if (!attrs
) attrs
= DECL_ATTRIBUTES (callee
);
4029 return (lookup_attribute ("disinterrupt", attrs
) == 0
4030 && lookup_attribute ("interrupt", attrs
) == 0);
4034 mep_can_inline_p (tree caller
, tree callee
)
4036 if (TREE_CODE (callee
) == ADDR_EXPR
)
4037 callee
= TREE_OPERAND (callee
, 0);
4039 if (!mep_vliw_function_p (caller
)
4040 && mep_vliw_function_p (callee
))
4048 #define FUNC_DISINTERRUPT 2
4051 struct GTY(()) pragma_entry
{
4054 const char *funcname
;
4056 typedef struct pragma_entry pragma_entry
;
4058 /* Hash table of farcall-tagged sections. */
4059 static GTY((param_is (pragma_entry
))) htab_t pragma_htab
;
4062 pragma_entry_eq (const void *p1
, const void *p2
)
4064 const pragma_entry
*old
= (const pragma_entry
*) p1
;
4065 const char *new_name
= (const char *) p2
;
4067 return strcmp (old
->funcname
, new_name
) == 0;
4071 pragma_entry_hash (const void *p
)
4073 const pragma_entry
*old
= (const pragma_entry
*) p
;
4074 return htab_hash_string (old
->funcname
);
4078 mep_note_pragma_flag (const char *funcname
, int flag
)
4080 pragma_entry
**slot
;
4083 pragma_htab
= htab_create_ggc (31, pragma_entry_hash
,
4084 pragma_entry_eq
, NULL
);
4086 slot
= (pragma_entry
**)
4087 htab_find_slot_with_hash (pragma_htab
, funcname
,
4088 htab_hash_string (funcname
), INSERT
);
4092 *slot
= ggc_alloc_pragma_entry ();
4095 (*slot
)->funcname
= ggc_strdup (funcname
);
4097 (*slot
)->flag
|= flag
;
4101 mep_lookup_pragma_flag (const char *funcname
, int flag
)
4103 pragma_entry
**slot
;
4108 if (funcname
[0] == '@' && funcname
[2] == '.')
4111 slot
= (pragma_entry
**)
4112 htab_find_slot_with_hash (pragma_htab
, funcname
,
4113 htab_hash_string (funcname
), NO_INSERT
);
4114 if (slot
&& *slot
&& ((*slot
)->flag
& flag
))
4116 (*slot
)->used
|= flag
;
4123 mep_lookup_pragma_call (const char *funcname
)
4125 return mep_lookup_pragma_flag (funcname
, FUNC_CALL
);
4129 mep_note_pragma_call (const char *funcname
)
4131 mep_note_pragma_flag (funcname
, FUNC_CALL
);
4135 mep_lookup_pragma_disinterrupt (const char *funcname
)
4137 return mep_lookup_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4141 mep_note_pragma_disinterrupt (const char *funcname
)
4143 mep_note_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4147 note_unused_pragma_disinterrupt (void **slot
, void *data ATTRIBUTE_UNUSED
)
4149 const pragma_entry
*d
= (const pragma_entry
*)(*slot
);
4151 if ((d
->flag
& FUNC_DISINTERRUPT
)
4152 && !(d
->used
& FUNC_DISINTERRUPT
))
4153 warning (0, "\"#pragma disinterrupt %s\" not used", d
->funcname
);
4158 mep_file_cleanups (void)
4161 htab_traverse (pragma_htab
, note_unused_pragma_disinterrupt
, NULL
);
4164 /* These three functions provide a bridge between the pramgas that
4165 affect register classes, and the functions that maintain them. We
4166 can't call those functions directly as pragma handling is part of
4167 the front end and doesn't have direct access to them. */
4170 mep_save_register_info (void)
4172 save_register_info ();
4176 mep_reinit_regs (void)
4182 mep_init_regs (void)
4190 mep_attrlist_to_encoding (tree list
, tree decl
)
4192 if (mep_multiple_address_regions (list
, false) > 1)
4194 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4195 TREE_PURPOSE (TREE_CHAIN (list
)),
4197 DECL_SOURCE_LINE (decl
));
4198 TREE_CHAIN (list
) = NULL_TREE
;
4203 if (is_attribute_p ("based", TREE_PURPOSE (list
)))
4205 if (is_attribute_p ("tiny", TREE_PURPOSE (list
)))
4207 if (is_attribute_p ("near", TREE_PURPOSE (list
)))
4209 if (is_attribute_p ("far", TREE_PURPOSE (list
)))
4211 if (is_attribute_p ("io", TREE_PURPOSE (list
)))
4213 if (TREE_VALUE (list
)
4214 && TREE_VALUE (TREE_VALUE (list
))
4215 && TREE_CODE (TREE_VALUE (TREE_VALUE (list
))) == INTEGER_CST
)
4217 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list
)));
4219 && location
<= 0x1000000)
4224 if (is_attribute_p ("cb", TREE_PURPOSE (list
)))
4226 list
= TREE_CHAIN (list
);
4229 && TREE_CODE (decl
) == FUNCTION_DECL
4230 && DECL_SECTION_NAME (decl
) == 0)
4236 mep_comp_type_attributes (const_tree t1
, const_tree t2
)
4240 vliw1
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1
)) != 0);
4241 vliw2
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2
)) != 0);
4250 mep_insert_attributes (tree decl
, tree
*attributes
)
4253 const char *secname
= 0;
4254 tree attrib
, attrlist
;
4257 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4259 const char *funcname
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4261 if (mep_lookup_pragma_disinterrupt (funcname
))
4263 attrib
= build_tree_list (get_identifier ("disinterrupt"), NULL_TREE
);
4264 *attributes
= chainon (*attributes
, attrib
);
4268 if (TREE_CODE (decl
) != VAR_DECL
4269 || ! (TREE_PUBLIC (decl
) || TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
4272 if (TREE_READONLY (decl
) && TARGET_DC
)
4273 /* -mdc means that const variables default to the near section,
4274 regardless of the size cutoff. */
4277 /* User specified an attribute, so override the default.
4278 Ignore storage attribute of pointed to variable. char __far * x; */
4279 if (! (TREE_TYPE (decl
) && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
))
4281 if (TYPE_P (decl
) && TYPE_ATTRIBUTES (decl
) && *attributes
)
4282 TYPE_ATTRIBUTES (decl
) = NULL_TREE
;
4283 else if (DECL_ATTRIBUTES (decl
) && *attributes
)
4284 DECL_ATTRIBUTES (decl
) = NULL_TREE
;
4287 attrlist
= *attributes
? *attributes
: DECL_ATTRIBUTES (decl
);
4288 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4289 if (!encoding
&& TYPE_P (TREE_TYPE (decl
)))
4291 attrlist
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
4292 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4296 /* This means that the declaration has a specific section
4297 attribute, so we should not apply the default rules. */
4299 if (encoding
== 'i' || encoding
== 'I')
4301 tree attr
= lookup_attribute ("io", attrlist
);
4303 && TREE_VALUE (attr
)
4304 && TREE_VALUE (TREE_VALUE(attr
)))
4306 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4307 static tree previous_value
= 0;
4308 static int previous_location
= 0;
4309 static tree previous_name
= 0;
4311 /* We take advantage of the fact that gcc will reuse the
4312 same tree pointer when applying an attribute to a
4313 list of decls, but produce a new tree for attributes
4314 on separate source lines, even when they're textually
4315 identical. This is the behavior we want. */
4316 if (TREE_VALUE (attr
) == previous_value
4317 && location
== previous_location
)
4319 warning(0, "__io address 0x%x is the same for %qE and %qE",
4320 location
, previous_name
, DECL_NAME (decl
));
4322 previous_name
= DECL_NAME (decl
);
4323 previous_location
= location
;
4324 previous_value
= TREE_VALUE (attr
);
4331 /* Declarations of arrays can change size. Don't trust them. */
4332 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
4335 size
= int_size_in_bytes (TREE_TYPE (decl
));
4337 if (TARGET_RAND_TPGP
&& size
<= 4 && size
> 0)
4339 if (TREE_PUBLIC (decl
)
4340 || DECL_EXTERNAL (decl
)
4341 || TREE_STATIC (decl
))
4343 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4367 if (size
<= mep_based_cutoff
&& size
> 0)
4369 else if (size
<= mep_tiny_cutoff
&& size
> 0)
4375 if (mep_const_section
&& TREE_READONLY (decl
))
4377 if (strcmp (mep_const_section
, "tiny") == 0)
4379 else if (strcmp (mep_const_section
, "near") == 0)
4381 else if (strcmp (mep_const_section
, "far") == 0)
4388 if (!mep_multiple_address_regions (*attributes
, true)
4389 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl
), false))
4391 attrib
= build_tree_list (get_identifier (secname
), NULL_TREE
);
4393 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4394 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4395 and mep_validate_based_tiny. */
4396 DECL_ATTRIBUTES (decl
) = chainon (DECL_ATTRIBUTES (decl
), attrib
);
4401 mep_encode_section_info (tree decl
, rtx rtl
, int first
)
4404 const char *oldname
;
4405 const char *secname
;
4411 tree mep_attributes
;
4416 if (TREE_CODE (decl
) != VAR_DECL
4417 && TREE_CODE (decl
) != FUNCTION_DECL
)
4420 rtlname
= XEXP (rtl
, 0);
4421 if (GET_CODE (rtlname
) == SYMBOL_REF
)
4422 oldname
= XSTR (rtlname
, 0);
4423 else if (GET_CODE (rtlname
) == MEM
4424 && GET_CODE (XEXP (rtlname
, 0)) == SYMBOL_REF
)
4425 oldname
= XSTR (XEXP (rtlname
, 0), 0);
4429 type
= TREE_TYPE (decl
);
4430 if (type
== error_mark_node
)
4432 mep_attributes
= MEP_ATTRIBUTES (decl
);
4434 encoding
= mep_attrlist_to_encoding (mep_attributes
, decl
);
4438 newname
= (char *) alloca (strlen (oldname
) + 4);
4439 sprintf (newname
, "@%c.%s", encoding
, oldname
);
4440 idp
= get_identifier (newname
);
4442 gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (idp
));
4443 SYMBOL_REF_WEAK (XEXP (rtl
, 0)) = DECL_WEAK (decl
);
4444 SET_SYMBOL_REF_DECL (XEXP (rtl
, 0), decl
);
4457 maxsize
= 0x1000000;
4465 if (maxsize
&& int_size_in_bytes (TREE_TYPE (decl
)) > maxsize
)
4467 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4469 (long) int_size_in_bytes (TREE_TYPE (decl
)),
4477 mep_strip_name_encoding (const char *sym
)
4483 else if (*sym
== '@' && sym
[2] == '.')
4491 mep_select_section (tree decl
, int reloc ATTRIBUTE_UNUSED
,
4492 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
4497 switch (TREE_CODE (decl
))
4500 if (!TREE_READONLY (decl
)
4501 || TREE_SIDE_EFFECTS (decl
)
4502 || !DECL_INITIAL (decl
)
4503 || (DECL_INITIAL (decl
) != error_mark_node
4504 && !TREE_CONSTANT (DECL_INITIAL (decl
))))
4508 if (! TREE_CONSTANT (decl
))
4516 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4518 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4520 if (name
[0] == '@' && name
[2] == '.')
4525 if (flag_function_sections
|| DECL_ONE_ONLY (decl
))
4526 mep_unique_section (decl
, 0);
4527 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4529 if (encoding
== 'f')
4530 return vftext_section
;
4532 return vtext_section
;
4534 else if (encoding
== 'f')
4535 return ftext_section
;
4537 return text_section
;
4540 if (TREE_CODE (decl
) == VAR_DECL
)
4542 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4544 if (name
[0] == '@' && name
[2] == '.')
4548 return based_section
;
4552 return srodata_section
;
4553 if (DECL_INITIAL (decl
))
4554 return sdata_section
;
4555 return tinybss_section
;
4559 return frodata_section
;
4564 error_at (DECL_SOURCE_LOCATION (decl
),
4565 "variable %D of type %<io%> must be uninitialized", decl
);
4566 return data_section
;
4569 error_at (DECL_SOURCE_LOCATION (decl
),
4570 "variable %D of type %<cb%> must be uninitialized", decl
);
4571 return data_section
;
4576 return readonly_data_section
;
4578 return data_section
;
4582 mep_unique_section (tree decl
, int reloc
)
4584 static const char *prefixes
[][2] =
4586 { ".text.", ".gnu.linkonce.t." },
4587 { ".rodata.", ".gnu.linkonce.r." },
4588 { ".data.", ".gnu.linkonce.d." },
4589 { ".based.", ".gnu.linkonce.based." },
4590 { ".sdata.", ".gnu.linkonce.s." },
4591 { ".far.", ".gnu.linkonce.far." },
4592 { ".ftext.", ".gnu.linkonce.ft." },
4593 { ".frodata.", ".gnu.linkonce.frd." },
4594 { ".srodata.", ".gnu.linkonce.srd." },
4595 { ".vtext.", ".gnu.linkonce.v." },
4596 { ".vftext.", ".gnu.linkonce.vf." }
4598 int sec
= 2; /* .data */
4600 const char *name
, *prefix
;
4603 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
4604 if (DECL_RTL (decl
))
4605 name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4607 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4609 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4610 sec
= 9; /* .vtext */
4612 sec
= 0; /* .text */
4614 else if (decl_readonly_section (decl
, reloc
))
4615 sec
= 1; /* .rodata */
4617 if (name
[0] == '@' && name
[2] == '.')
4622 sec
= 3; /* .based */
4626 sec
= 8; /* .srodata */
4628 sec
= 4; /* .sdata */
4632 sec
= 6; /* .ftext */
4634 sec
= 10; /* .vftext */
4636 sec
= 7; /* .frodata */
4638 sec
= 5; /* .far. */
4644 prefix
= prefixes
[sec
][DECL_ONE_ONLY(decl
)];
4645 len
= strlen (name
) + strlen (prefix
);
4646 string
= (char *) alloca (len
+ 1);
4648 sprintf (string
, "%s%s", prefix
, name
);
4650 DECL_SECTION_NAME (decl
) = build_string (len
, string
);
4653 /* Given a decl, a section name, and whether the decl initializer
4654 has relocs, choose attributes for the section. */
4656 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4659 mep_section_type_flags (tree decl
, const char *name
, int reloc
)
4661 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4663 if (decl
&& TREE_CODE (decl
) == FUNCTION_DECL
4664 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4665 flags
|= SECTION_MEP_VLIW
;
4670 /* Switch to an arbitrary section NAME with attributes as specified
4671 by FLAGS. ALIGN specifies any known alignment requirements for
4672 the section; 0 if the default should be used.
4674 Differs from the standard ELF version only in support of VLIW mode. */
4677 mep_asm_named_section (const char *name
, unsigned int flags
, tree decl ATTRIBUTE_UNUSED
)
4679 char flagchars
[8], *f
= flagchars
;
4682 if (!(flags
& SECTION_DEBUG
))
4684 if (flags
& SECTION_WRITE
)
4686 if (flags
& SECTION_CODE
)
4688 if (flags
& SECTION_SMALL
)
4690 if (flags
& SECTION_MEP_VLIW
)
4694 if (flags
& SECTION_BSS
)
4699 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",@%s\n",
4700 name
, flagchars
, type
);
4702 if (flags
& SECTION_CODE
)
4703 fputs ((flags
& SECTION_MEP_VLIW
? "\t.vliw\n" : "\t.core\n"),
4708 mep_output_aligned_common (FILE *stream
, tree decl
, const char *name
,
4709 int size
, int align
, int global
)
4711 /* We intentionally don't use mep_section_tag() here. */
4713 && (name
[1] == 'i' || name
[1] == 'I' || name
[1] == 'c')
4717 tree attr
= lookup_attribute ((name
[1] == 'c' ? "cb" : "io"),
4718 DECL_ATTRIBUTES (decl
));
4720 && TREE_VALUE (attr
)
4721 && TREE_VALUE (TREE_VALUE(attr
)))
4722 location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4727 fprintf (stream
, "\t.globl\t");
4728 assemble_name (stream
, name
);
4729 fprintf (stream
, "\n");
4731 assemble_name (stream
, name
);
4732 fprintf (stream
, " = %d\n", location
);
4735 if (name
[0] == '@' && name
[2] == '.')
4737 const char *sec
= 0;
4741 switch_to_section (based_section
);
4745 switch_to_section (tinybss_section
);
4749 switch_to_section (farbss_section
);
4758 while (align
> BITS_PER_UNIT
)
4763 name2
= targetm
.strip_name_encoding (name
);
4765 fprintf (stream
, "\t.globl\t%s\n", name2
);
4766 fprintf (stream
, "\t.p2align %d\n", p2align
);
4767 fprintf (stream
, "\t.type\t%s,@object\n", name2
);
4768 fprintf (stream
, "\t.size\t%s,%d\n", name2
, size
);
4769 fprintf (stream
, "%s:\n\t.zero\t%d\n", name2
, size
);
4776 fprintf (stream
, "\t.local\t");
4777 assemble_name (stream
, name
);
4778 fprintf (stream
, "\n");
4780 fprintf (stream
, "\t.comm\t");
4781 assemble_name (stream
, name
);
4782 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
4788 mep_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4790 rtx addr
= XEXP (m_tramp
, 0);
4791 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
4793 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__mep_trampoline_helper"),
4794 LCT_NORMAL
, VOIDmode
, 3,
4797 static_chain
, Pmode
);
4800 /* Experimental Reorg. */
4803 mep_mentioned_p (rtx in
,
4804 rtx reg
, /* NULL for mem */
4805 int modes_too
) /* if nonzero, modes must match also. */
4813 if (reg
&& GET_CODE (reg
) != REG
)
4816 if (GET_CODE (in
) == LABEL_REF
)
4819 code
= GET_CODE (in
);
4825 return mep_mentioned_p (XEXP (in
, 0), reg
, modes_too
);
4831 if (modes_too
&& (GET_MODE (in
) != GET_MODE (reg
)))
4833 return (REGNO (in
) == REGNO (reg
));
4846 /* Set's source should be read-only. */
4847 if (code
== SET
&& !reg
)
4848 return mep_mentioned_p (SET_DEST (in
), reg
, modes_too
);
4850 fmt
= GET_RTX_FORMAT (code
);
4852 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4857 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
4858 if (mep_mentioned_p (XVECEXP (in
, i
, j
), reg
, modes_too
))
4861 else if (fmt
[i
] == 'e'
4862 && mep_mentioned_p (XEXP (in
, i
), reg
, modes_too
))
4868 #define EXPERIMENTAL_REGMOVE_REORG 1
4870 #if EXPERIMENTAL_REGMOVE_REORG
4873 mep_compatible_reg_class (int r1
, int r2
)
4875 if (GR_REGNO_P (r1
) && GR_REGNO_P (r2
))
4877 if (CR_REGNO_P (r1
) && CR_REGNO_P (r2
))
4883 mep_reorg_regmove (rtx insns
)
4885 rtx insn
, next
, pat
, follow
, *where
;
4886 int count
= 0, done
= 0, replace
, before
= 0;
4889 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4890 if (NONJUMP_INSN_P (insn
))
4893 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4894 set that uses the r2 and r2 dies there. We replace r2 with r1
4895 and see if it's still a valid insn. If so, delete the first set.
4896 Copied from reorg.c. */
4901 for (insn
= insns
; insn
; insn
= next
)
4903 next
= next_nonnote_nondebug_insn (insn
);
4904 if (! NONJUMP_INSN_P (insn
))
4906 pat
= PATTERN (insn
);
4910 if (GET_CODE (pat
) == SET
4911 && GET_CODE (SET_SRC (pat
)) == REG
4912 && GET_CODE (SET_DEST (pat
)) == REG
4913 && find_regno_note (insn
, REG_DEAD
, REGNO (SET_SRC (pat
)))
4914 && mep_compatible_reg_class (REGNO (SET_SRC (pat
)), REGNO (SET_DEST (pat
))))
4916 follow
= next_nonnote_nondebug_insn (insn
);
4918 fprintf (dump_file
, "superfluous moves: considering %d\n", INSN_UID (insn
));
4920 while (follow
&& NONJUMP_INSN_P (follow
)
4921 && GET_CODE (PATTERN (follow
)) == SET
4922 && !dead_or_set_p (follow
, SET_SRC (pat
))
4923 && !mep_mentioned_p (PATTERN (follow
), SET_SRC (pat
), 0)
4924 && !mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 0))
4927 fprintf (dump_file
, "\tskipping %d\n", INSN_UID (follow
));
4928 follow
= next_nonnote_insn (follow
);
4932 fprintf (dump_file
, "\tfollow is %d\n", INSN_UID (follow
));
4933 if (follow
&& NONJUMP_INSN_P (follow
)
4934 && GET_CODE (PATTERN (follow
)) == SET
4935 && find_regno_note (follow
, REG_DEAD
, REGNO (SET_DEST (pat
))))
4937 if (GET_CODE (SET_DEST (PATTERN (follow
))) == REG
)
4939 if (mep_mentioned_p (SET_SRC (PATTERN (follow
)), SET_DEST (pat
), 1))
4942 where
= & SET_SRC (PATTERN (follow
));
4945 else if (GET_CODE (SET_DEST (PATTERN (follow
))) == MEM
)
4947 if (mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 1))
4950 where
= & PATTERN (follow
);
4956 /* If so, follow is the corresponding insn */
4963 fprintf (dump_file
, "----- Candidate for superfluous move deletion:\n\n");
4964 for (x
= insn
; x
;x
= NEXT_INSN (x
))
4966 print_rtl_single (dump_file
, x
);
4969 fprintf (dump_file
, "\n");
4973 if (validate_replace_rtx_subexp (SET_DEST (pat
), SET_SRC (pat
),
4980 fprintf (dump_file
, "\n----- Success! new insn:\n\n");
4981 print_rtl_single (dump_file
, follow
);
4991 fprintf (dump_file
, "\n%d insn%s deleted out of %d.\n\n", count
, count
== 1 ? "" : "s", before
);
4992 fprintf (dump_file
, "=====\n");
4998 /* Figure out where to put LABEL, which is the label for a repeat loop.
4999 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5000 the loop ends just before LAST_INSN. If SHARED, insns other than the
5001 "repeat" might use LABEL to jump to the loop's continuation point.
5003 Return the last instruction in the adjusted loop. */
5006 mep_insert_repeat_label_last (rtx last_insn
, rtx label
, bool including
,
5010 int count
= 0, code
, icode
;
5013 fprintf (dump_file
, "considering end of repeat loop at insn %d\n",
5014 INSN_UID (last_insn
));
5016 /* Set PREV to the last insn in the loop. */
5019 prev
= PREV_INSN (prev
);
5021 /* Set NEXT to the next insn after the repeat label. */
5026 code
= GET_CODE (prev
);
5027 if (code
== CALL_INSN
|| code
== CODE_LABEL
|| code
== BARRIER
)
5032 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
5033 prev
= XVECEXP (PATTERN (prev
), 0, 1);
5035 /* Other insns that should not be in the last two opcodes. */
5036 icode
= recog_memoized (prev
);
5038 || icode
== CODE_FOR_repeat
5039 || icode
== CODE_FOR_erepeat
5040 || get_attr_may_trap (prev
) == MAY_TRAP_YES
)
5043 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5044 is the second instruction in a VLIW bundle. In that case,
5045 loop again: if the first instruction also satisfies the
5046 conditions above then we will reach here again and put
5047 both of them into the repeat epilogue. Otherwise both
5048 should remain outside. */
5049 if (GET_MODE (prev
) != BImode
)
5054 print_rtl_single (dump_file
, next
);
5059 prev
= PREV_INSN (prev
);
5062 /* See if we're adding the label immediately after the repeat insn.
5063 If so, we need to separate them with a nop. */
5064 prev
= prev_real_insn (next
);
5066 switch (recog_memoized (prev
))
5068 case CODE_FOR_repeat
:
5069 case CODE_FOR_erepeat
:
5071 fprintf (dump_file
, "Adding nop inside loop\n");
5072 emit_insn_before (gen_nop (), next
);
5079 /* Insert the label. */
5080 emit_label_before (label
, next
);
5082 /* Insert the nops. */
5083 if (dump_file
&& count
< 2)
5084 fprintf (dump_file
, "Adding %d nop%s\n\n",
5085 2 - count
, count
== 1 ? "" : "s");
5087 for (; count
< 2; count
++)
5089 last_insn
= emit_insn_after (gen_nop (), last_insn
);
5091 emit_insn_before (gen_nop (), last_insn
);
5098 mep_emit_doloop (rtx
*operands
, int is_end
)
5102 if (cfun
->machine
->doloop_tags
== 0
5103 || cfun
->machine
->doloop_tag_from_end
== is_end
)
5105 cfun
->machine
->doloop_tags
++;
5106 cfun
->machine
->doloop_tag_from_end
= is_end
;
5109 tag
= GEN_INT (cfun
->machine
->doloop_tags
- 1);
5111 emit_jump_insn (gen_doloop_end_internal (operands
[0], operands
[1], tag
));
5113 emit_insn (gen_doloop_begin_internal (operands
[0], operands
[0], tag
));
5117 /* Code for converting doloop_begins and doloop_ends into valid
5118 MeP instructions. A doloop_begin is just a placeholder:
5120 $count = unspec ($count)
5122 where $count is initially the number of iterations - 1.
5123 doloop_end has the form:
5125 if ($count-- == 0) goto label
5127 The counter variable is private to the doloop insns, nothing else
5128 relies on its value.
5130 There are three cases, in decreasing order of preference:
5132 1. A loop has exactly one doloop_begin and one doloop_end.
5133 The doloop_end branches to the first instruction after
5136 In this case we can replace the doloop_begin with a repeat
5137 instruction and remove the doloop_end. I.e.:
5139 $count1 = unspec ($count1)
5144 if ($count2-- == 0) goto label
5148 repeat $count1,repeat_label
5156 2. As for (1), except there are several doloop_ends. One of them
5157 (call it X) falls through to a label L. All the others fall
5158 through to branches to L.
5160 In this case, we remove X and replace the other doloop_ends
5161 with branches to the repeat label. For example:
5163 $count1 = unspec ($count1)
5166 if ($count2-- == 0) goto label
5169 if ($count3-- == 0) goto label
5174 repeat $count1,repeat_label
5185 3. The fallback case. Replace doloop_begins with:
5189 Replace doloop_ends with the equivalent of:
5192 if ($count == 0) goto label
5194 Note that this might need a scratch register if $count
5195 is stored in memory. */
5197 /* A structure describing one doloop_begin. */
5198 struct mep_doloop_begin
{
5199 /* The next doloop_begin with the same tag. */
5200 struct mep_doloop_begin
*next
;
5202 /* The instruction itself. */
5205 /* The initial counter value. This is known to be a general register. */
5209 /* A structure describing a doloop_end. */
5210 struct mep_doloop_end
{
5211 /* The next doloop_end with the same loop tag. */
5212 struct mep_doloop_end
*next
;
5214 /* The instruction itself. */
5217 /* The first instruction after INSN when the branch isn't taken. */
5220 /* The location of the counter value. Since doloop_end_internal is a
5221 jump instruction, it has to allow the counter to be stored anywhere
5222 (any non-fixed register or memory location). */
5225 /* The target label (the place where the insn branches when the counter
5229 /* A scratch register. Only available when COUNTER isn't stored
5230 in a general register. */
5235 /* One do-while loop. */
5237 /* All the doloop_begins for this loop (in no particular order). */
5238 struct mep_doloop_begin
*begin
;
5240 /* All the doloop_ends. When there is more than one, arrange things
5241 so that the first one is the most likely to be X in case (2) above. */
5242 struct mep_doloop_end
*end
;
5246 /* Return true if LOOP can be converted into repeat/repeat_end form
5247 (that is, if it matches cases (1) or (2) above). */
5250 mep_repeat_loop_p (struct mep_doloop
*loop
)
5252 struct mep_doloop_end
*end
;
5255 /* There must be exactly one doloop_begin and at least one doloop_end. */
5256 if (loop
->begin
== 0 || loop
->end
== 0 || loop
->begin
->next
!= 0)
5259 /* The first doloop_end (X) must branch back to the insn after
5260 the doloop_begin. */
5261 if (prev_real_insn (loop
->end
->label
) != loop
->begin
->insn
)
5264 /* All the other doloop_ends must branch to the same place as X.
5265 When the branch isn't taken, they must jump to the instruction
5267 fallthrough
= loop
->end
->fallthrough
;
5268 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5269 if (end
->label
!= loop
->end
->label
5270 || !simplejump_p (end
->fallthrough
)
5271 || next_real_insn (JUMP_LABEL (end
->fallthrough
)) != fallthrough
)
5278 /* The main repeat reorg function. See comment above for details. */
5281 mep_reorg_repeat (rtx insns
)
5284 struct mep_doloop
*loops
, *loop
;
5285 struct mep_doloop_begin
*begin
;
5286 struct mep_doloop_end
*end
;
5288 /* Quick exit if we haven't created any loops. */
5289 if (cfun
->machine
->doloop_tags
== 0)
5292 /* Create an array of mep_doloop structures. */
5293 loops
= (struct mep_doloop
*) alloca (sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5294 memset (loops
, 0, sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5296 /* Search the function for do-while insns and group them by loop tag. */
5297 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5299 switch (recog_memoized (insn
))
5301 case CODE_FOR_doloop_begin_internal
:
5302 insn_extract (insn
);
5303 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5305 begin
= (struct mep_doloop_begin
*) alloca (sizeof (struct mep_doloop_begin
));
5306 begin
->next
= loop
->begin
;
5308 begin
->counter
= recog_data
.operand
[0];
5310 loop
->begin
= begin
;
5313 case CODE_FOR_doloop_end_internal
:
5314 insn_extract (insn
);
5315 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5317 end
= (struct mep_doloop_end
*) alloca (sizeof (struct mep_doloop_end
));
5319 end
->fallthrough
= next_real_insn (insn
);
5320 end
->counter
= recog_data
.operand
[0];
5321 end
->label
= recog_data
.operand
[1];
5322 end
->scratch
= recog_data
.operand
[3];
5324 /* If this insn falls through to an unconditional jump,
5325 give it a lower priority than the others. */
5326 if (loop
->end
!= 0 && simplejump_p (end
->fallthrough
))
5328 end
->next
= loop
->end
->next
;
5329 loop
->end
->next
= end
;
5333 end
->next
= loop
->end
;
5339 /* Convert the insns for each loop in turn. */
5340 for (loop
= loops
; loop
< loops
+ cfun
->machine
->doloop_tags
; loop
++)
5341 if (mep_repeat_loop_p (loop
))
5343 /* Case (1) or (2). */
5344 rtx repeat_label
, label_ref
;
5346 /* Create a new label for the repeat insn. */
5347 repeat_label
= gen_label_rtx ();
5349 /* Replace the doloop_begin with a repeat. */
5350 label_ref
= gen_rtx_LABEL_REF (VOIDmode
, repeat_label
);
5351 emit_insn_before (gen_repeat (loop
->begin
->counter
, label_ref
),
5353 delete_insn (loop
->begin
->insn
);
5355 /* Insert the repeat label before the first doloop_end.
5356 Fill the gap with nops if there are other doloop_ends. */
5357 mep_insert_repeat_label_last (loop
->end
->insn
, repeat_label
,
5358 false, loop
->end
->next
!= 0);
5360 /* Emit a repeat_end (to improve the readability of the output). */
5361 emit_insn_before (gen_repeat_end (), loop
->end
->insn
);
5363 /* Delete the first doloop_end. */
5364 delete_insn (loop
->end
->insn
);
5366 /* Replace the others with branches to REPEAT_LABEL. */
5367 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5369 emit_jump_insn_before (gen_jump (repeat_label
), end
->insn
);
5370 delete_insn (end
->insn
);
5371 delete_insn (end
->fallthrough
);
5376 /* Case (3). First replace all the doloop_begins with increment
5378 for (begin
= loop
->begin
; begin
!= 0; begin
= begin
->next
)
5380 emit_insn_before (gen_add3_insn (copy_rtx (begin
->counter
),
5381 begin
->counter
, const1_rtx
),
5383 delete_insn (begin
->insn
);
5386 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5387 for (end
= loop
->end
; end
!= 0; end
= end
->next
)
5393 /* Load the counter value into a general register. */
5395 if (!REG_P (reg
) || REGNO (reg
) > 15)
5398 emit_move_insn (copy_rtx (reg
), copy_rtx (end
->counter
));
5401 /* Decrement the counter. */
5402 emit_insn (gen_add3_insn (copy_rtx (reg
), copy_rtx (reg
),
5405 /* Copy it back to its original location. */
5406 if (reg
!= end
->counter
)
5407 emit_move_insn (copy_rtx (end
->counter
), copy_rtx (reg
));
5409 /* Jump back to the start label. */
5410 insn
= emit_jump_insn (gen_mep_bne_true (reg
, const0_rtx
,
5412 JUMP_LABEL (insn
) = end
->label
;
5413 LABEL_NUSES (end
->label
)++;
5415 /* Emit the whole sequence before the doloop_end. */
5416 insn
= get_insns ();
5418 emit_insn_before (insn
, end
->insn
);
5420 /* Delete the doloop_end. */
5421 delete_insn (end
->insn
);
5428 mep_invertable_branch_p (rtx insn
)
5431 enum rtx_code old_code
;
5434 set
= PATTERN (insn
);
5435 if (GET_CODE (set
) != SET
)
5437 if (GET_CODE (XEXP (set
, 1)) != IF_THEN_ELSE
)
5439 cond
= XEXP (XEXP (set
, 1), 0);
5440 old_code
= GET_CODE (cond
);
5444 PUT_CODE (cond
, NE
);
5447 PUT_CODE (cond
, EQ
);
5450 PUT_CODE (cond
, GE
);
5453 PUT_CODE (cond
, LT
);
5458 INSN_CODE (insn
) = -1;
5459 i
= recog_memoized (insn
);
5460 PUT_CODE (cond
, old_code
);
5461 INSN_CODE (insn
) = -1;
5466 mep_invert_branch (rtx insn
, rtx after
)
5468 rtx cond
, set
, label
;
5471 set
= PATTERN (insn
);
5473 gcc_assert (GET_CODE (set
) == SET
);
5474 gcc_assert (GET_CODE (XEXP (set
, 1)) == IF_THEN_ELSE
);
5476 cond
= XEXP (XEXP (set
, 1), 0);
5477 switch (GET_CODE (cond
))
5480 PUT_CODE (cond
, NE
);
5483 PUT_CODE (cond
, EQ
);
5486 PUT_CODE (cond
, GE
);
5489 PUT_CODE (cond
, LT
);
5494 label
= gen_label_rtx ();
5495 emit_label_after (label
, after
);
5496 for (i
=1; i
<=2; i
++)
5497 if (GET_CODE (XEXP (XEXP (set
, 1), i
)) == LABEL_REF
)
5499 rtx ref
= XEXP (XEXP (set
, 1), i
);
5500 if (LABEL_NUSES (XEXP (ref
, 0)) == 1)
5501 delete_insn (XEXP (ref
, 0));
5502 XEXP (ref
, 0) = label
;
5503 LABEL_NUSES (label
) ++;
5504 JUMP_LABEL (insn
) = label
;
5506 INSN_CODE (insn
) = -1;
5507 i
= recog_memoized (insn
);
5508 gcc_assert (i
>= 0);
5512 mep_reorg_erepeat (rtx insns
)
5514 rtx insn
, prev
, l
, x
;
5517 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5519 && mep_invertable_branch_p (insn
))
5523 fprintf (dump_file
, "\n------------------------------\n");
5524 fprintf (dump_file
, "erepeat: considering this jump:\n");
5525 print_rtl_single (dump_file
, insn
);
5527 count
= simplejump_p (insn
) ? 0 : 1;
5528 for (prev
= PREV_INSN (insn
); prev
; prev
= PREV_INSN (prev
))
5530 if (CALL_P (prev
) || BARRIER_P (prev
))
5533 if (prev
== JUMP_LABEL (insn
))
5537 fprintf (dump_file
, "found loop top, %d insns\n", count
);
5539 if (LABEL_NUSES (prev
) == 1)
5540 /* We're the only user, always safe */ ;
5541 else if (LABEL_NUSES (prev
) == 2)
5543 /* See if there's a barrier before this label. If
5544 so, we know nobody inside the loop uses it.
5545 But we must be careful to put the erepeat
5546 *after* the label. */
5548 for (barrier
= PREV_INSN (prev
);
5549 barrier
&& NOTE_P (barrier
);
5550 barrier
= PREV_INSN (barrier
))
5552 if (barrier
&& ! BARRIER_P (barrier
))
5557 /* We don't know who else, within or without our loop, uses this */
5559 fprintf (dump_file
, "... but there are multiple users, too risky.\n");
5563 /* Generate a label to be used by the erepat insn. */
5564 l
= gen_label_rtx ();
5566 /* Insert the erepeat after INSN's target label. */
5567 x
= gen_erepeat (gen_rtx_LABEL_REF (VOIDmode
, l
));
5569 emit_insn_after (x
, prev
);
5571 /* Insert the erepeat label. */
5572 newlast
= (mep_insert_repeat_label_last
5573 (insn
, l
, !simplejump_p (insn
), false));
5574 if (simplejump_p (insn
))
5576 emit_insn_before (gen_erepeat_end (), insn
);
5581 mep_invert_branch (insn
, newlast
);
5582 emit_insn_after (gen_erepeat_end (), newlast
);
5589 /* A label is OK if there is exactly one user, and we
5590 can find that user before the next label. */
5593 if (LABEL_NUSES (prev
) == 1)
5595 for (user
= PREV_INSN (prev
);
5596 user
&& (INSN_P (user
) || NOTE_P (user
));
5597 user
= PREV_INSN (user
))
5598 if (JUMP_P (user
) && JUMP_LABEL (user
) == prev
)
5600 safe
= INSN_UID (user
);
5607 fprintf (dump_file
, "... ignoring jump from insn %d to %d\n",
5608 safe
, INSN_UID (prev
));
5618 fprintf (dump_file
, "\n==============================\n");
5621 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5622 always do this on its own. */
5625 mep_jmp_return_reorg (rtx insns
)
5627 rtx insn
, label
, ret
;
5630 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5631 if (simplejump_p (insn
))
5633 /* Find the fist real insn the jump jumps to. */
5634 label
= ret
= JUMP_LABEL (insn
);
5638 || GET_CODE (PATTERN (ret
)) == USE
))
5639 ret
= NEXT_INSN (ret
);
5643 /* Is it a return? */
5644 ret_code
= recog_memoized (ret
);
5645 if (ret_code
== CODE_FOR_return_internal
5646 || ret_code
== CODE_FOR_eh_return_internal
)
5648 /* It is. Replace the jump with a return. */
5649 LABEL_NUSES (label
) --;
5650 if (LABEL_NUSES (label
) == 0)
5651 delete_insn (label
);
5652 PATTERN (insn
) = copy_rtx (PATTERN (ret
));
5653 INSN_CODE (insn
) = -1;
5661 mep_reorg_addcombine (rtx insns
)
5665 for (i
= insns
; i
; i
= NEXT_INSN (i
))
5667 && INSN_CODE (i
) == CODE_FOR_addsi3
5668 && GET_CODE (SET_DEST (PATTERN (i
))) == REG
5669 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 0)) == REG
5670 && REGNO (SET_DEST (PATTERN (i
))) == REGNO (XEXP (SET_SRC (PATTERN (i
)), 0))
5671 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 1)) == CONST_INT
)
5675 && INSN_CODE (n
) == CODE_FOR_addsi3
5676 && GET_CODE (SET_DEST (PATTERN (n
))) == REG
5677 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 0)) == REG
5678 && REGNO (SET_DEST (PATTERN (n
))) == REGNO (XEXP (SET_SRC (PATTERN (n
)), 0))
5679 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 1)) == CONST_INT
)
5681 int ic
= INTVAL (XEXP (SET_SRC (PATTERN (i
)), 1));
5682 int nc
= INTVAL (XEXP (SET_SRC (PATTERN (n
)), 1));
5683 if (REGNO (SET_DEST (PATTERN (i
))) == REGNO (SET_DEST (PATTERN (n
)))
5685 && ic
+ nc
> -32768)
5687 XEXP (SET_SRC (PATTERN (i
)), 1) = GEN_INT (ic
+ nc
);
5688 NEXT_INSN (i
) = NEXT_INSN (n
);
5690 PREV_INSN (NEXT_INSN (i
)) = i
;
5696 /* If this insn adjusts the stack, return the adjustment, else return
5699 add_sp_insn_p (rtx insn
)
5703 if (! single_set (insn
))
5705 pat
= PATTERN (insn
);
5706 if (GET_CODE (SET_DEST (pat
)) != REG
)
5708 if (REGNO (SET_DEST (pat
)) != SP_REGNO
)
5710 if (GET_CODE (SET_SRC (pat
)) != PLUS
)
5712 if (GET_CODE (XEXP (SET_SRC (pat
), 0)) != REG
)
5714 if (REGNO (XEXP (SET_SRC (pat
), 0)) != SP_REGNO
)
5716 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) != CONST_INT
)
5718 return INTVAL (XEXP (SET_SRC (pat
), 1));
5721 /* Check for trivial functions that set up an unneeded stack
5724 mep_reorg_noframe (rtx insns
)
5726 rtx start_frame_insn
;
5727 rtx end_frame_insn
= 0;
5731 /* The first insn should be $sp = $sp + N */
5732 while (insns
&& ! INSN_P (insns
))
5733 insns
= NEXT_INSN (insns
);
5737 sp_adjust
= add_sp_insn_p (insns
);
5741 start_frame_insn
= insns
;
5742 sp
= SET_DEST (PATTERN (start_frame_insn
));
5744 insns
= next_real_insn (insns
);
5748 rtx next
= next_real_insn (insns
);
5752 sp2
= add_sp_insn_p (insns
);
5757 end_frame_insn
= insns
;
5758 if (sp2
!= -sp_adjust
)
5761 else if (mep_mentioned_p (insns
, sp
, 0))
5763 else if (CALL_P (insns
))
5771 delete_insn (start_frame_insn
);
5772 delete_insn (end_frame_insn
);
5779 rtx insns
= get_insns ();
5781 /* We require accurate REG_DEAD notes. */
5782 compute_bb_for_insn ();
5783 df_note_add_problem ();
5786 mep_reorg_addcombine (insns
);
5787 #if EXPERIMENTAL_REGMOVE_REORG
5788 /* VLIW packing has been done already, so we can't just delete things. */
5789 if (!mep_vliw_function_p (cfun
->decl
))
5790 mep_reorg_regmove (insns
);
5792 mep_jmp_return_reorg (insns
);
5793 mep_bundle_insns (insns
);
5794 mep_reorg_repeat (insns
);
5797 && !profile_arc_flag
5798 && TARGET_OPT_REPEAT
5799 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO
)))
5800 mep_reorg_erepeat (insns
);
5802 /* This may delete *insns so make sure it's last. */
5803 mep_reorg_noframe (insns
);
5805 df_finish_pass (false);
5810 /*----------------------------------------------------------------------*/
5812 /*----------------------------------------------------------------------*/
5814 /* Element X gives the index into cgen_insns[] of the most general
5815 implementation of intrinsic X. Unimplemented intrinsics are
5817 int mep_intrinsic_insn
[ARRAY_SIZE (cgen_intrinsics
)];
5819 /* Element X gives the index of another instruction that is mapped to
5820 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5823 Things are set up so that mep_intrinsic_chain[X] < X. */
5824 static int mep_intrinsic_chain
[ARRAY_SIZE (cgen_insns
)];
5826 /* The bitmask for the current ISA. The ISA masks are declared
5828 unsigned int mep_selected_isa
;
5831 const char *config_name
;
5835 static struct mep_config mep_configs
[] = {
5836 #ifdef COPROC_SELECTION_TABLE
5837 COPROC_SELECTION_TABLE
,
5842 /* Initialize the global intrinsics variables above. */
5845 mep_init_intrinsics (void)
5849 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5850 mep_selected_isa
= mep_configs
[0].isa
;
5851 if (mep_config_string
!= 0)
5852 for (i
= 0; mep_configs
[i
].config_name
; i
++)
5853 if (strcmp (mep_config_string
, mep_configs
[i
].config_name
) == 0)
5855 mep_selected_isa
= mep_configs
[i
].isa
;
5859 /* Assume all intrinsics are unavailable. */
5860 for (i
= 0; i
< ARRAY_SIZE (mep_intrinsic_insn
); i
++)
5861 mep_intrinsic_insn
[i
] = -1;
5863 /* Build up the global intrinsic tables. */
5864 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5865 if ((cgen_insns
[i
].isas
& mep_selected_isa
) != 0)
5867 mep_intrinsic_chain
[i
] = mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
];
5868 mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] = i
;
5870 /* See whether we can directly move values between one coprocessor
5871 register and another. */
5872 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
5873 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns
[i
]))
5874 mep_have_copro_copro_moves_p
= true;
5876 /* See whether we can directly move values between core and
5877 coprocessor registers. */
5878 mep_have_core_copro_moves_p
= (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1
)
5879 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2
));
5881 mep_have_core_copro_moves_p
= 1;
5884 /* Declare all available intrinsic functions. Called once only. */
5886 static tree cp_data_bus_int_type_node
;
5887 static tree opaque_vector_type_node
;
5888 static tree v8qi_type_node
;
5889 static tree v4hi_type_node
;
5890 static tree v2si_type_node
;
5891 static tree v8uqi_type_node
;
5892 static tree v4uhi_type_node
;
5893 static tree v2usi_type_node
;
5896 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr
)
5900 case cgen_regnum_operand_type_POINTER
: return ptr_type_node
;
5901 case cgen_regnum_operand_type_LONG
: return long_integer_type_node
;
5902 case cgen_regnum_operand_type_ULONG
: return long_unsigned_type_node
;
5903 case cgen_regnum_operand_type_SHORT
: return short_integer_type_node
;
5904 case cgen_regnum_operand_type_USHORT
: return short_unsigned_type_node
;
5905 case cgen_regnum_operand_type_CHAR
: return char_type_node
;
5906 case cgen_regnum_operand_type_UCHAR
: return unsigned_char_type_node
;
5907 case cgen_regnum_operand_type_SI
: return intSI_type_node
;
5908 case cgen_regnum_operand_type_DI
: return intDI_type_node
;
5909 case cgen_regnum_operand_type_VECTOR
: return opaque_vector_type_node
;
5910 case cgen_regnum_operand_type_V8QI
: return v8qi_type_node
;
5911 case cgen_regnum_operand_type_V4HI
: return v4hi_type_node
;
5912 case cgen_regnum_operand_type_V2SI
: return v2si_type_node
;
5913 case cgen_regnum_operand_type_V8UQI
: return v8uqi_type_node
;
5914 case cgen_regnum_operand_type_V4UHI
: return v4uhi_type_node
;
5915 case cgen_regnum_operand_type_V2USI
: return v2usi_type_node
;
5916 case cgen_regnum_operand_type_CP_DATA_BUS_INT
: return cp_data_bus_int_type_node
;
5918 return void_type_node
;
5923 mep_init_builtins (void)
5927 if (TARGET_64BIT_CR_REGS
)
5928 cp_data_bus_int_type_node
= long_long_integer_type_node
;
5930 cp_data_bus_int_type_node
= long_integer_type_node
;
5932 opaque_vector_type_node
= build_opaque_vector_type (intQI_type_node
, 8);
5933 v8qi_type_node
= build_vector_type (intQI_type_node
, 8);
5934 v4hi_type_node
= build_vector_type (intHI_type_node
, 4);
5935 v2si_type_node
= build_vector_type (intSI_type_node
, 2);
5936 v8uqi_type_node
= build_vector_type (unsigned_intQI_type_node
, 8);
5937 v4uhi_type_node
= build_vector_type (unsigned_intHI_type_node
, 4);
5938 v2usi_type_node
= build_vector_type (unsigned_intSI_type_node
, 2);
5940 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node
);
5942 add_builtin_type ("cp_vector", opaque_vector_type_node
);
5944 add_builtin_type ("cp_v8qi", v8qi_type_node
);
5945 add_builtin_type ("cp_v4hi", v4hi_type_node
);
5946 add_builtin_type ("cp_v2si", v2si_type_node
);
5948 add_builtin_type ("cp_v8uqi", v8uqi_type_node
);
5949 add_builtin_type ("cp_v4uhi", v4uhi_type_node
);
5950 add_builtin_type ("cp_v2usi", v2usi_type_node
);
5952 /* Intrinsics like mep_cadd3 are implemented with two groups of
5953 instructions, one which uses UNSPECs and one which uses a specific
5954 rtl code such as PLUS. Instructions in the latter group belong
5955 to GROUP_KNOWN_CODE.
5957 In such cases, the intrinsic will have two entries in the global
5958 tables above. The unspec form is accessed using builtin functions
5959 while the specific form is accessed using the mep_* enum in
5962 The idea is that __cop arithmetic and builtin functions have
5963 different optimization requirements. If mep_cadd3() appears in
5964 the source code, the user will surely except gcc to use cadd3
5965 rather than a work-alike such as add3. However, if the user
5966 just writes "a + b", where a or b are __cop variables, it is
5967 reasonable for gcc to choose a core instruction rather than
5968 cadd3 if it believes that is more optimal. */
5969 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5970 if ((cgen_insns
[i
].groups
& GROUP_KNOWN_CODE
) == 0
5971 && mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] >= 0)
5973 tree ret_type
= void_type_node
;
5976 if (i
> 0 && cgen_insns
[i
].intrinsic
== cgen_insns
[i
-1].intrinsic
)
5979 if (cgen_insns
[i
].cret_p
)
5980 ret_type
= mep_cgen_regnum_to_type (cgen_insns
[i
].regnums
[0].type
);
5982 bi_type
= build_function_type_list (ret_type
, NULL_TREE
);
5983 add_builtin_function (cgen_intrinsics
[cgen_insns
[i
].intrinsic
],
5985 cgen_insns
[i
].intrinsic
, BUILT_IN_MD
, NULL
, NULL
);
5989 /* Report the unavailablity of the given intrinsic. */
5993 mep_intrinsic_unavailable (int intrinsic
)
5995 static int already_reported_p
[ARRAY_SIZE (cgen_intrinsics
)];
5997 if (already_reported_p
[intrinsic
])
6000 if (mep_intrinsic_insn
[intrinsic
] < 0)
6001 error ("coprocessor intrinsic %qs is not available in this configuration",
6002 cgen_intrinsics
[intrinsic
]);
6003 else if (CGEN_CURRENT_GROUP
== GROUP_VLIW
)
6004 error ("%qs is not available in VLIW functions",
6005 cgen_intrinsics
[intrinsic
]);
6007 error ("%qs is not available in non-VLIW functions",
6008 cgen_intrinsics
[intrinsic
]);
6010 already_reported_p
[intrinsic
] = 1;
6015 /* See if any implementation of INTRINSIC is available to the
6016 current function. If so, store the most general implementation
6017 in *INSN_PTR and return true. Return false otherwise. */
6020 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED
, const struct cgen_insn
**insn_ptr ATTRIBUTE_UNUSED
)
6024 i
= mep_intrinsic_insn
[intrinsic
];
6025 while (i
>= 0 && !CGEN_ENABLE_INSN_P (i
))
6026 i
= mep_intrinsic_chain
[i
];
6030 *insn_ptr
= &cgen_insns
[i
];
6037 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6038 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6039 try using a work-alike instead. In this case, the returned insn
6040 may have three operands rather than two. */
6043 mep_get_move_insn (int intrinsic
, const struct cgen_insn
**cgen_insn
)
6047 if (intrinsic
== mep_cmov
)
6049 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
6050 if (mep_get_intrinsic_insn (mep_cmov_insns
[i
], cgen_insn
))
6054 return mep_get_intrinsic_insn (intrinsic
, cgen_insn
);
6058 /* If ARG is a register operand that is the same size as MODE, convert it
6059 to MODE using a subreg. Otherwise return ARG as-is. */
6062 mep_convert_arg (enum machine_mode mode
, rtx arg
)
6064 if (GET_MODE (arg
) != mode
6065 && register_operand (arg
, VOIDmode
)
6066 && GET_MODE_SIZE (GET_MODE (arg
)) == GET_MODE_SIZE (mode
))
6067 return simplify_gen_subreg (mode
, arg
, GET_MODE (arg
), 0);
6072 /* Apply regnum conversions to ARG using the description given by REGNUM.
6073 Return the new argument on success and null on failure. */
6076 mep_convert_regnum (const struct cgen_regnum_operand
*regnum
, rtx arg
)
6078 if (regnum
->count
== 0)
6081 if (GET_CODE (arg
) != CONST_INT
6083 || INTVAL (arg
) >= regnum
->count
)
6086 return gen_rtx_REG (SImode
, INTVAL (arg
) + regnum
->base
);
6090 /* Try to make intrinsic argument ARG match the given operand.
6091 UNSIGNED_P is true if the argument has an unsigned type. */
6094 mep_legitimize_arg (const struct insn_operand_data
*operand
, rtx arg
,
6097 if (GET_CODE (arg
) == CONST_INT
)
6099 /* CONST_INTs can only be bound to integer operands. */
6100 if (GET_MODE_CLASS (operand
->mode
) != MODE_INT
)
6103 else if (GET_CODE (arg
) == CONST_DOUBLE
)
6104 /* These hold vector constants. */;
6105 else if (GET_MODE_SIZE (GET_MODE (arg
)) != GET_MODE_SIZE (operand
->mode
))
6107 /* If the argument is a different size from what's expected, we must
6108 have a value in the right mode class in order to convert it. */
6109 if (GET_MODE_CLASS (operand
->mode
) != GET_MODE_CLASS (GET_MODE (arg
)))
6112 /* If the operand is an rvalue, promote or demote it to match the
6113 operand's size. This might not need extra instructions when
6114 ARG is a register value. */
6115 if (operand
->constraint
[0] != '=')
6116 arg
= convert_to_mode (operand
->mode
, arg
, unsigned_p
);
6119 /* If the operand is an lvalue, bind the operand to a new register.
6120 The caller will copy this value into ARG after the main
6121 instruction. By doing this always, we produce slightly more
6123 /* But not for control registers. */
6124 if (operand
->constraint
[0] == '='
6126 || ! (CONTROL_REGNO_P (REGNO (arg
))
6127 || CCR_REGNO_P (REGNO (arg
))
6128 || CR_REGNO_P (REGNO (arg
)))
6130 return gen_reg_rtx (operand
->mode
);
6132 /* Try simple mode punning. */
6133 arg
= mep_convert_arg (operand
->mode
, arg
);
6134 if (operand
->predicate (arg
, operand
->mode
))
6137 /* See if forcing the argument into a register will make it match. */
6138 if (GET_CODE (arg
) == CONST_INT
|| GET_CODE (arg
) == CONST_DOUBLE
)
6139 arg
= force_reg (operand
->mode
, arg
);
6141 arg
= mep_convert_arg (operand
->mode
, force_reg (GET_MODE (arg
), arg
));
6142 if (operand
->predicate (arg
, operand
->mode
))
6149 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6150 function FNNAME. OPERAND describes the operand to which ARGNUM
6154 mep_incompatible_arg (const struct insn_operand_data
*operand
, rtx arg
,
6155 int argnum
, tree fnname
)
6159 if (GET_CODE (arg
) == CONST_INT
)
6160 for (i
= 0; i
< ARRAY_SIZE (cgen_immediate_predicates
); i
++)
6161 if (operand
->predicate
== cgen_immediate_predicates
[i
].predicate
)
6163 const struct cgen_immediate_predicate
*predicate
;
6164 HOST_WIDE_INT argval
;
6166 predicate
= &cgen_immediate_predicates
[i
];
6167 argval
= INTVAL (arg
);
6168 if (argval
< predicate
->lower
|| argval
>= predicate
->upper
)
6169 error ("argument %d of %qE must be in the range %d...%d",
6170 argnum
, fnname
, predicate
->lower
, predicate
->upper
- 1);
6172 error ("argument %d of %qE must be a multiple of %d",
6173 argnum
, fnname
, predicate
->align
);
6177 error ("incompatible type for argument %d of %qE", argnum
, fnname
);
6181 mep_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6182 rtx subtarget ATTRIBUTE_UNUSED
,
6183 enum machine_mode mode ATTRIBUTE_UNUSED
,
6184 int ignore ATTRIBUTE_UNUSED
)
6186 rtx pat
, op
[10], arg
[10];
6188 int opindex
, unsigned_p
[10];
6190 unsigned int n_args
;
6192 const struct cgen_insn
*cgen_insn
;
6193 const struct insn_data_d
*idata
;
6194 unsigned int first_arg
= 0;
6195 unsigned int builtin_n_args
;
6197 fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6198 fnname
= DECL_NAME (fndecl
);
6200 /* Find out which instruction we should emit. Note that some coprocessor
6201 intrinsics may only be available in VLIW mode, or only in normal mode. */
6202 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl
), &cgen_insn
))
6204 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl
));
6207 idata
= &insn_data
[cgen_insn
->icode
];
6209 builtin_n_args
= cgen_insn
->num_args
;
6211 if (cgen_insn
->cret_p
)
6213 if (cgen_insn
->cret_p
> 1)
6216 mep_cgen_regnum_to_type (cgen_insn
->regnums
[0].type
);
6220 /* Evaluate each argument. */
6221 n_args
= call_expr_nargs (exp
);
6223 if (n_args
< builtin_n_args
)
6225 error ("too few arguments to %qE", fnname
);
6228 if (n_args
> builtin_n_args
)
6230 error ("too many arguments to %qE", fnname
);
6234 for (a
= first_arg
; a
< builtin_n_args
+ first_arg
; a
++)
6238 args
= CALL_EXPR_ARG (exp
, a
- first_arg
);
6243 if (cgen_insn
->regnums
[a
].reference_p
)
6245 if (TREE_CODE (value
) != ADDR_EXPR
)
6248 error ("argument %d of %qE must be an address", a
+1, fnname
);
6251 value
= TREE_OPERAND (value
, 0);
6255 /* If the argument has been promoted to int, get the unpromoted
6256 value. This is necessary when sub-int memory values are bound
6257 to reference parameters. */
6258 if (TREE_CODE (value
) == NOP_EXPR
6259 && TREE_TYPE (value
) == integer_type_node
6260 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6261 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value
, 0)))
6262 < TYPE_PRECISION (TREE_TYPE (value
))))
6263 value
= TREE_OPERAND (value
, 0);
6265 /* If the argument has been promoted to double, get the unpromoted
6266 SFmode value. This is necessary for FMAX support, for example. */
6267 if (TREE_CODE (value
) == NOP_EXPR
6268 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value
))
6269 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6270 && TYPE_MODE (TREE_TYPE (value
)) == DFmode
6271 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value
, 0))) == SFmode
)
6272 value
= TREE_OPERAND (value
, 0);
6274 unsigned_p
[a
] = TYPE_UNSIGNED (TREE_TYPE (value
));
6275 arg
[a
] = expand_expr (value
, NULL
, VOIDmode
, EXPAND_NORMAL
);
6276 arg
[a
] = mep_convert_regnum (&cgen_insn
->regnums
[a
], arg
[a
]);
6277 if (cgen_insn
->regnums
[a
].reference_p
)
6279 tree pointed_to
= TREE_TYPE (TREE_TYPE (value
));
6280 enum machine_mode pointed_mode
= TYPE_MODE (pointed_to
);
6282 arg
[a
] = gen_rtx_MEM (pointed_mode
, arg
[a
]);
6286 error ("argument %d of %qE must be in the range %d...%d",
6287 a
+ 1, fnname
, 0, cgen_insn
->regnums
[a
].count
- 1);
6292 for (a
= 0; a
< first_arg
; a
++)
6294 if (a
== 0 && target
&& GET_MODE (target
) == idata
->operand
[0].mode
)
6297 arg
[a
] = gen_reg_rtx (idata
->operand
[0].mode
);
6300 /* Convert the arguments into a form suitable for the intrinsic.
6301 Report an error if this isn't possible. */
6302 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6304 a
= cgen_insn
->op_mapping
[opindex
];
6305 op
[opindex
] = mep_legitimize_arg (&idata
->operand
[opindex
],
6306 arg
[a
], unsigned_p
[a
]);
6307 if (op
[opindex
] == 0)
6309 mep_incompatible_arg (&idata
->operand
[opindex
],
6310 arg
[a
], a
+ 1 - first_arg
, fnname
);
6315 /* Emit the instruction. */
6316 pat
= idata
->genfun (op
[0], op
[1], op
[2], op
[3], op
[4],
6317 op
[5], op
[6], op
[7], op
[8], op
[9]);
6319 if (GET_CODE (pat
) == SET
6320 && GET_CODE (SET_DEST (pat
)) == PC
6321 && GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
6322 emit_jump_insn (pat
);
6326 /* Copy lvalues back to their final locations. */
6327 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6328 if (idata
->operand
[opindex
].constraint
[0] == '=')
6330 a
= cgen_insn
->op_mapping
[opindex
];
6333 if (GET_MODE_CLASS (GET_MODE (arg
[a
]))
6334 != GET_MODE_CLASS (GET_MODE (op
[opindex
])))
6335 emit_move_insn (arg
[a
], gen_lowpart (GET_MODE (arg
[a
]),
6339 /* First convert the operand to the right mode, then copy it
6340 into the destination. Doing the conversion as a separate
6341 step (rather than using convert_move) means that we can
6342 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6343 refer to the same register. */
6344 op
[opindex
] = convert_to_mode (GET_MODE (arg
[a
]),
6345 op
[opindex
], unsigned_p
[a
]);
6346 if (!rtx_equal_p (arg
[a
], op
[opindex
]))
6347 emit_move_insn (arg
[a
], op
[opindex
]);
6352 if (first_arg
> 0 && target
&& target
!= op
[0])
6354 emit_move_insn (target
, op
[0]);
6361 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED
)
6366 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6367 a global register. */
6370 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
6378 switch (GET_CODE (x
))
6381 if (REG_P (SUBREG_REG (x
)))
6383 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
6384 && global_regs
[subreg_regno (x
)])
6392 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
6406 /* A non-constant call might use a global register. */
6416 /* Returns nonzero if X mentions a global register. */
6419 global_reg_mentioned_p (rtx x
)
6425 if (! RTL_CONST_OR_PURE_CALL_P (x
))
6427 x
= CALL_INSN_FUNCTION_USAGE (x
);
6435 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
6437 /* Scheduling hooks for VLIW mode.
6439 Conceptually this is very simple: we have a two-pack architecture
6440 that takes one core insn and one coprocessor insn to make up either
6441 a 32- or 64-bit instruction word (depending on the option bit set in
6442 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6443 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6444 and one 48-bit cop insn or two 32-bit core/cop insns.
6446 In practice, instruction selection will be a bear. Consider in
6447 VL64 mode the following insns
6452 these cannot pack, since the add is a 16-bit core insn and cmov
6453 is a 32-bit cop insn. However,
6458 packs just fine. For good VLIW code generation in VL64 mode, we
6459 will have to have 32-bit alternatives for many of the common core
6460 insns. Not implemented. */
6463 mep_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
6467 if (REG_NOTE_KIND (link
) != 0)
6469 /* See whether INSN and DEP_INSN are intrinsics that set the same
6470 hard register. If so, it is more important to free up DEP_INSN
6471 than it is to free up INSN.
6473 Note that intrinsics like mep_mulr are handled differently from
6474 the equivalent mep.md patterns. In mep.md, if we don't care
6475 about the value of $lo and $hi, the pattern will just clobber
6476 the registers, not set them. Since clobbers don't count as
6477 output dependencies, it is often possible to reorder two mulrs,
6480 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6481 so any pair of mep_mulr()s will be inter-dependent. We should
6482 therefore give the first mep_mulr() a higher priority. */
6483 if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
6484 && global_reg_mentioned_p (PATTERN (insn
))
6485 && global_reg_mentioned_p (PATTERN (dep_insn
)))
6488 /* If the dependence is an anti or output dependence, assume it
6493 /* If we can't recognize the insns, we can't really do anything. */
6494 if (recog_memoized (dep_insn
) < 0)
6497 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6498 attribute instead. */
6501 cost_specified
= get_attr_latency (dep_insn
);
6502 if (cost_specified
!= 0)
6503 return cost_specified
;
6509 /* ??? We don't properly compute the length of a load/store insn,
6510 taking into account the addressing mode. */
6513 mep_issue_rate (void)
6515 return TARGET_IVC2
? 3 : 2;
6518 /* Return true if function DECL was declared with the vliw attribute. */
6521 mep_vliw_function_p (tree decl
)
6523 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))) != 0;
6527 mep_find_ready_insn (rtx
*ready
, int nready
, enum attr_slot slot
, int length
)
6531 for (i
= nready
- 1; i
>= 0; --i
)
6533 rtx insn
= ready
[i
];
6534 if (recog_memoized (insn
) >= 0
6535 && get_attr_slot (insn
) == slot
6536 && get_attr_length (insn
) == length
)
6544 mep_move_ready_insn (rtx
*ready
, int nready
, rtx insn
)
6548 for (i
= 0; i
< nready
; ++i
)
6549 if (ready
[i
] == insn
)
6551 for (; i
< nready
- 1; ++i
)
6552 ready
[i
] = ready
[i
+ 1];
6561 mep_print_sched_insn (FILE *dump
, rtx insn
)
6563 const char *slots
= "none";
6564 const char *name
= NULL
;
6568 if (GET_CODE (PATTERN (insn
)) == SET
6569 || GET_CODE (PATTERN (insn
)) == PARALLEL
)
6571 switch (get_attr_slots (insn
))
6573 case SLOTS_CORE
: slots
= "core"; break;
6574 case SLOTS_C3
: slots
= "c3"; break;
6575 case SLOTS_P0
: slots
= "p0"; break;
6576 case SLOTS_P0_P0S
: slots
= "p0,p0s"; break;
6577 case SLOTS_P0_P1
: slots
= "p0,p1"; break;
6578 case SLOTS_P0S
: slots
= "p0s"; break;
6579 case SLOTS_P0S_P1
: slots
= "p0s,p1"; break;
6580 case SLOTS_P1
: slots
= "p1"; break;
6582 sprintf(buf
, "%d", get_attr_slots (insn
));
6587 if (GET_CODE (PATTERN (insn
)) == USE
)
6590 code
= INSN_CODE (insn
);
6592 name
= get_insn_name (code
);
6597 "insn %4d %4d %8s %s\n",
6605 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
,
6606 int sched_verbose ATTRIBUTE_UNUSED
, rtx
*ready
,
6607 int *pnready
, int clock ATTRIBUTE_UNUSED
)
6609 int nready
= *pnready
;
6610 rtx core_insn
, cop_insn
;
6613 if (dump
&& sched_verbose
> 1)
6615 fprintf (dump
, "\nsched_reorder: clock %d nready %d\n", clock
, nready
);
6616 for (i
=0; i
<nready
; i
++)
6617 mep_print_sched_insn (dump
, ready
[i
]);
6618 fprintf (dump
, "\n");
6621 if (!mep_vliw_function_p (cfun
->decl
))
6626 /* IVC2 uses a DFA to determine what's ready and what's not. */
6630 /* We can issue either a core or coprocessor instruction.
6631 Look for a matched pair of insns to reorder. If we don't
6632 find any, don't second-guess the scheduler's priorities. */
6634 if ((core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 2))
6635 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
,
6636 TARGET_OPT_VL64
? 6 : 2)))
6638 else if (TARGET_OPT_VL64
6639 && (core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 4))
6640 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
, 4)))
6643 /* We didn't find a pair. Issue the single insn at the head
6644 of the ready list. */
6647 /* Reorder the two insns first. */
6648 mep_move_ready_insn (ready
, nready
, core_insn
);
6649 mep_move_ready_insn (ready
, nready
- 1, cop_insn
);
6653 /* A for_each_rtx callback. Return true if *X is a register that is
6654 set by insn PREV. */
6657 mep_store_find_set (rtx
*x
, void *prev
)
6659 return REG_P (*x
) && reg_set_p (*x
, (const_rtx
) prev
);
6662 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6663 not the containing insn. */
6666 mep_store_data_bypass_1 (rtx prev
, rtx pat
)
6668 /* Cope with intrinsics like swcpa. */
6669 if (GET_CODE (pat
) == PARALLEL
)
6673 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6674 if (mep_store_data_bypass_p (prev
, XVECEXP (pat
, 0, i
)))
6680 /* Check for some sort of store. */
6681 if (GET_CODE (pat
) != SET
6682 || GET_CODE (SET_DEST (pat
)) != MEM
)
6685 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6686 The first operand to the unspec is the store data and the other operands
6687 are used to calculate the address. */
6688 if (GET_CODE (SET_SRC (pat
)) == UNSPEC
)
6693 src
= SET_SRC (pat
);
6694 for (i
= 1; i
< XVECLEN (src
, 0); i
++)
6695 if (for_each_rtx (&XVECEXP (src
, 0, i
), mep_store_find_set
, prev
))
6701 /* Otherwise just check that PREV doesn't modify any register mentioned
6702 in the memory destination. */
6703 return !for_each_rtx (&SET_DEST (pat
), mep_store_find_set
, prev
);
6706 /* Return true if INSN is a store instruction and if the store address
6707 has no true dependence on PREV. */
6710 mep_store_data_bypass_p (rtx prev
, rtx insn
)
6712 return INSN_P (insn
) ? mep_store_data_bypass_1 (prev
, PATTERN (insn
)) : false;
6715 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6716 is a register other than LO or HI and if PREV sets *X. */
6719 mep_mul_hilo_bypass_1 (rtx
*x
, void *prev
)
6722 && REGNO (*x
) != LO_REGNO
6723 && REGNO (*x
) != HI_REGNO
6724 && reg_set_p (*x
, (const_rtx
) prev
));
6727 /* Return true if, apart from HI/LO, there are no true dependencies
6728 between multiplication instructions PREV and INSN. */
6731 mep_mul_hilo_bypass_p (rtx prev
, rtx insn
)
6735 pat
= PATTERN (insn
);
6736 if (GET_CODE (pat
) == PARALLEL
)
6737 pat
= XVECEXP (pat
, 0, 0);
6738 return (GET_CODE (pat
) == SET
6739 && !for_each_rtx (&SET_SRC (pat
), mep_mul_hilo_bypass_1
, prev
));
6742 /* Return true if INSN is an ldc instruction that issues to the
6743 MeP-h1 integer pipeline. This is true for instructions that
6744 read from PSW, LP, SAR, HI and LO. */
6747 mep_ipipe_ldc_p (rtx insn
)
6751 pat
= PATTERN (insn
);
6753 /* Cope with instrinsics that set both a hard register and its shadow.
6754 The set of the hard register comes first. */
6755 if (GET_CODE (pat
) == PARALLEL
)
6756 pat
= XVECEXP (pat
, 0, 0);
6758 if (GET_CODE (pat
) == SET
)
6760 src
= SET_SRC (pat
);
6762 /* Cope with intrinsics. The first operand to the unspec is
6763 the source register. */
6764 if (GET_CODE (src
) == UNSPEC
|| GET_CODE (src
) == UNSPEC_VOLATILE
)
6765 src
= XVECEXP (src
, 0, 0);
6768 switch (REGNO (src
))
6781 /* Create a VLIW bundle from core instruction CORE and coprocessor
6782 instruction COP. COP always satisfies INSN_P, but CORE can be
6783 either a new pattern or an existing instruction.
6785 Emit the bundle in place of COP and return it. */
6788 mep_make_bundle (rtx core
, rtx cop
)
6792 /* If CORE is an existing instruction, remove it, otherwise put
6793 the new pattern in an INSN harness. */
6797 core
= make_insn_raw (core
);
6799 /* Generate the bundle sequence and replace COP with it. */
6800 insn
= gen_rtx_SEQUENCE (VOIDmode
, gen_rtvec (2, core
, cop
));
6801 insn
= emit_insn_after (insn
, cop
);
6804 /* Set up the links of the insns inside the SEQUENCE. */
6805 PREV_INSN (core
) = PREV_INSN (insn
);
6806 NEXT_INSN (core
) = cop
;
6807 PREV_INSN (cop
) = core
;
6808 NEXT_INSN (cop
) = NEXT_INSN (insn
);
6810 /* Set the VLIW flag for the coprocessor instruction. */
6811 PUT_MODE (core
, VOIDmode
);
6812 PUT_MODE (cop
, BImode
);
6814 /* Derive a location for the bundle. Individual instructions cannot
6815 have their own location because there can be no assembler labels
6816 between CORE and COP. */
6817 INSN_LOCATION (insn
) = INSN_LOCATION (INSN_LOCATION (core
) ? core
: cop
);
6818 INSN_LOCATION (core
) = 0;
6819 INSN_LOCATION (cop
) = 0;
6824 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6827 mep_insn_dependent_p_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
6829 rtx
* pinsn
= (rtx
*) data
;
6831 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
6835 /* Return true if anything in insn X is (anti,output,true) dependent on
6836 anything in insn Y. */
6839 mep_insn_dependent_p (rtx x
, rtx y
)
6843 gcc_assert (INSN_P (x
));
6844 gcc_assert (INSN_P (y
));
6847 note_stores (PATTERN (x
), mep_insn_dependent_p_1
, &tmp
);
6848 if (tmp
== NULL_RTX
)
6852 note_stores (PATTERN (y
), mep_insn_dependent_p_1
, &tmp
);
6853 if (tmp
== NULL_RTX
)
6860 core_insn_p (rtx insn
)
6862 if (GET_CODE (PATTERN (insn
)) == USE
)
6864 if (get_attr_slot (insn
) == SLOT_CORE
)
6869 /* Mark coprocessor instructions that can be bundled together with
6870 the immediately preceding core instruction. This is later used
6871 to emit the "+" that tells the assembler to create a VLIW insn.
6873 For unbundled insns, the assembler will automatically add coprocessor
6874 nops, and 16-bit core nops. Due to an apparent oversight in the
6875 spec, the assembler will _not_ automatically add 32-bit core nops,
6876 so we have to emit those here.
6878 Called from mep_insn_reorg. */
6881 mep_bundle_insns (rtx insns
)
6883 rtx insn
, last
= NULL_RTX
, first
= NULL_RTX
;
6884 int saw_scheduling
= 0;
6886 /* Only do bundling if we're in vliw mode. */
6887 if (!mep_vliw_function_p (cfun
->decl
))
6890 /* The first insn in a bundle are TImode, the remainder are
6891 VOIDmode. After this function, the first has VOIDmode and the
6892 rest have BImode. */
6894 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6896 /* First, move any NOTEs that are within a bundle, to the beginning
6898 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6900 if (NOTE_P (insn
) && first
)
6901 /* Don't clear FIRST. */;
6903 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == TImode
)
6906 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == VOIDmode
&& first
)
6910 /* INSN is part of a bundle; FIRST is the first insn in that
6911 bundle. Move all intervening notes out of the bundle.
6912 In addition, since the debug pass may insert a label
6913 whenever the current line changes, set the location info
6914 for INSN to match FIRST. */
6916 INSN_LOCATION (insn
) = INSN_LOCATION (first
);
6918 note
= PREV_INSN (insn
);
6919 while (note
&& note
!= first
)
6921 prev
= PREV_INSN (note
);
6925 /* Remove NOTE from here... */
6926 PREV_INSN (NEXT_INSN (note
)) = PREV_INSN (note
);
6927 NEXT_INSN (PREV_INSN (note
)) = NEXT_INSN (note
);
6928 /* ...and put it in here. */
6929 NEXT_INSN (note
) = first
;
6930 PREV_INSN (note
) = PREV_INSN (first
);
6931 NEXT_INSN (PREV_INSN (note
)) = note
;
6932 PREV_INSN (NEXT_INSN (note
)) = note
;
6939 else if (!NONJUMP_INSN_P (insn
))
6943 /* Now fix up the bundles. */
6944 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6949 if (!NONJUMP_INSN_P (insn
))
6955 /* If we're not optimizing enough, there won't be scheduling
6956 info. We detect that here. */
6957 if (GET_MODE (insn
) == TImode
)
6959 if (!saw_scheduling
)
6964 rtx core_insn
= NULL_RTX
;
6966 /* IVC2 slots are scheduled by DFA, so we just accept
6967 whatever the scheduler gives us. However, we must make
6968 sure the core insn (if any) is the first in the bundle.
6969 The IVC2 assembler can insert whatever NOPs are needed,
6970 and allows a COP insn to be first. */
6972 if (NONJUMP_INSN_P (insn
)
6973 && GET_CODE (PATTERN (insn
)) != USE
6974 && GET_MODE (insn
) == TImode
)
6978 && GET_MODE (NEXT_INSN (last
)) == VOIDmode
6979 && NONJUMP_INSN_P (NEXT_INSN (last
));
6980 last
= NEXT_INSN (last
))
6982 if (core_insn_p (last
))
6985 if (core_insn_p (last
))
6988 if (core_insn
&& core_insn
!= insn
)
6990 /* Swap core insn to first in the bundle. */
6992 /* Remove core insn. */
6993 if (PREV_INSN (core_insn
))
6994 NEXT_INSN (PREV_INSN (core_insn
)) = NEXT_INSN (core_insn
);
6995 if (NEXT_INSN (core_insn
))
6996 PREV_INSN (NEXT_INSN (core_insn
)) = PREV_INSN (core_insn
);
6998 /* Re-insert core insn. */
6999 PREV_INSN (core_insn
) = PREV_INSN (insn
);
7000 NEXT_INSN (core_insn
) = insn
;
7002 if (PREV_INSN (core_insn
))
7003 NEXT_INSN (PREV_INSN (core_insn
)) = core_insn
;
7004 PREV_INSN (insn
) = core_insn
;
7006 PUT_MODE (core_insn
, TImode
);
7007 PUT_MODE (insn
, VOIDmode
);
7011 /* The first insn has TImode, the rest have VOIDmode */
7012 if (GET_MODE (insn
) == TImode
)
7013 PUT_MODE (insn
, VOIDmode
);
7015 PUT_MODE (insn
, BImode
);
7019 PUT_MODE (insn
, VOIDmode
);
7020 if (recog_memoized (insn
) >= 0
7021 && get_attr_slot (insn
) == SLOT_COP
)
7025 || recog_memoized (last
) < 0
7026 || get_attr_slot (last
) != SLOT_CORE
7027 || (get_attr_length (insn
)
7028 != (TARGET_OPT_VL64
? 8 : 4) - get_attr_length (last
))
7029 || mep_insn_dependent_p (insn
, last
))
7031 switch (get_attr_length (insn
))
7036 insn
= mep_make_bundle (gen_nop (), insn
);
7039 if (TARGET_OPT_VL64
)
7040 insn
= mep_make_bundle (gen_nop32 (), insn
);
7043 if (TARGET_OPT_VL64
)
7044 error ("2 byte cop instructions are"
7045 " not allowed in 64-bit VLIW mode");
7047 insn
= mep_make_bundle (gen_nop (), insn
);
7050 error ("unexpected %d byte cop instruction",
7051 get_attr_length (insn
));
7056 insn
= mep_make_bundle (last
, insn
);
7064 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7065 Return true on success. This function can fail if the intrinsic
7066 is unavailable or if the operands don't satisfy their predicates. */
7069 mep_emit_intrinsic (int intrinsic
, const rtx
*operands
)
7071 const struct cgen_insn
*cgen_insn
;
7072 const struct insn_data_d
*idata
;
7076 if (!mep_get_intrinsic_insn (intrinsic
, &cgen_insn
))
7079 idata
= &insn_data
[cgen_insn
->icode
];
7080 for (i
= 0; i
< idata
->n_operands
; i
++)
7082 newop
[i
] = mep_convert_arg (idata
->operand
[i
].mode
, operands
[i
]);
7083 if (!idata
->operand
[i
].predicate (newop
[i
], idata
->operand
[i
].mode
))
7087 emit_insn (idata
->genfun (newop
[0], newop
[1], newop
[2],
7088 newop
[3], newop
[4], newop
[5],
7089 newop
[6], newop
[7], newop
[8]));
7095 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7096 OPERANDS[0]. Report an error if the instruction could not
7097 be synthesized. OPERANDS[1] is a register_operand. For sign
7098 and zero extensions, it may be smaller than SImode. */
7101 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic
,
7102 rtx
* operands ATTRIBUTE_UNUSED
)
7108 /* Likewise, but apply a binary operation to OPERANDS[1] and
7109 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7110 can be a general_operand.
7112 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7113 third operand. REG and REG3 take register operands only. */
7116 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate
,
7117 int ATTRIBUTE_UNUSED immediate3
,
7118 int ATTRIBUTE_UNUSED reg
,
7119 int ATTRIBUTE_UNUSED reg3
,
7120 rtx
* operands ATTRIBUTE_UNUSED
)
7126 mep_rtx_cost (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
7127 int opno ATTRIBUTE_UNUSED
, int *total
,
7128 bool ATTRIBUTE_UNUSED speed_t
)
7133 if (INTVAL (x
) >= -128 && INTVAL (x
) < 127)
7135 else if (INTVAL (x
) >= -32768 && INTVAL (x
) < 65536)
7142 *total
= optimize_size
? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7146 *total
= (GET_CODE (XEXP (x
, 1)) == CONST_INT
7148 : COSTS_N_INSNS (2));
7155 mep_address_cost (rtx addr ATTRIBUTE_UNUSED
,
7156 enum machine_mode mode ATTRIBUTE_UNUSED
,
7157 addr_space_t as ATTRIBUTE_UNUSED
,
7158 bool ATTRIBUTE_UNUSED speed_p
)
7164 mep_asm_init_sections (void)
7167 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7168 "\t.section .based,\"aw\"");
7171 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7172 "\t.section .sbss,\"aw\"");
7175 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7176 "\t.section .sdata,\"aw\",@progbits");
7179 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7180 "\t.section .far,\"aw\"");
7183 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7184 "\t.section .farbss,\"aw\"");
7187 = get_unnamed_section (0, output_section_asm_op
,
7188 "\t.section .frodata,\"a\"");
7191 = get_unnamed_section (0, output_section_asm_op
,
7192 "\t.section .srodata,\"a\"");
7195 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7196 "\t.section .vtext,\"axv\"\n\t.vliw");
7199 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7200 "\t.section .vftext,\"axv\"\n\t.vliw");
7203 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7204 "\t.section .ftext,\"ax\"\n\t.core");
7208 /* Initialize the GCC target structure. */
7210 #undef TARGET_ASM_FUNCTION_PROLOGUE
7211 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7212 #undef TARGET_ATTRIBUTE_TABLE
7213 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7214 #undef TARGET_COMP_TYPE_ATTRIBUTES
7215 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7216 #undef TARGET_INSERT_ATTRIBUTES
7217 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7218 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7219 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7220 #undef TARGET_CAN_INLINE_P
7221 #define TARGET_CAN_INLINE_P mep_can_inline_p
7222 #undef TARGET_SECTION_TYPE_FLAGS
7223 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7224 #undef TARGET_ASM_NAMED_SECTION
7225 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7226 #undef TARGET_INIT_BUILTINS
7227 #define TARGET_INIT_BUILTINS mep_init_builtins
7228 #undef TARGET_EXPAND_BUILTIN
7229 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7230 #undef TARGET_SCHED_ADJUST_COST
7231 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7232 #undef TARGET_SCHED_ISSUE_RATE
7233 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7234 #undef TARGET_SCHED_REORDER
7235 #define TARGET_SCHED_REORDER mep_sched_reorder
7236 #undef TARGET_STRIP_NAME_ENCODING
7237 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7238 #undef TARGET_ASM_SELECT_SECTION
7239 #define TARGET_ASM_SELECT_SECTION mep_select_section
7240 #undef TARGET_ASM_UNIQUE_SECTION
7241 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7242 #undef TARGET_ENCODE_SECTION_INFO
7243 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7244 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7245 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7246 #undef TARGET_RTX_COSTS
7247 #define TARGET_RTX_COSTS mep_rtx_cost
7248 #undef TARGET_ADDRESS_COST
7249 #define TARGET_ADDRESS_COST mep_address_cost
7250 #undef TARGET_MACHINE_DEPENDENT_REORG
7251 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7252 #undef TARGET_SETUP_INCOMING_VARARGS
7253 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7254 #undef TARGET_PASS_BY_REFERENCE
7255 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7256 #undef TARGET_FUNCTION_ARG
7257 #define TARGET_FUNCTION_ARG mep_function_arg
7258 #undef TARGET_FUNCTION_ARG_ADVANCE
7259 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7260 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7261 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7262 #undef TARGET_OPTION_OVERRIDE
7263 #define TARGET_OPTION_OVERRIDE mep_option_override
7264 #undef TARGET_ALLOCATE_INITIAL_VALUE
7265 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7266 #undef TARGET_ASM_INIT_SECTIONS
7267 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7268 #undef TARGET_RETURN_IN_MEMORY
7269 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7270 #undef TARGET_NARROW_VOLATILE_BITFIELD
7271 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7272 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7273 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7274 #undef TARGET_BUILD_BUILTIN_VA_LIST
7275 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7276 #undef TARGET_EXPAND_BUILTIN_VA_START
7277 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7278 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7279 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7280 #undef TARGET_CAN_ELIMINATE
7281 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7282 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7283 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7284 #undef TARGET_TRAMPOLINE_INIT
7285 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7286 #undef TARGET_LEGITIMATE_CONSTANT_P
7287 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7288 #undef TARGET_CAN_USE_DOLOOP_P
7289 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7291 struct gcc_target targetm
= TARGET_INITIALIZER
;