1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Red Hat, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
47 #include "diagnostic-core.h"
49 #include "target-def.h"
50 #include "langhooks.h"
56 /* Structure of this file:
58 + Command Line Option Support
59 + Pattern support - constraints, predicates, expanders
62 + Functions to save and restore machine-specific function data.
63 + Frame/Epilog/Prolog Related
65 + Function args in registers
66 + Handle pipeline hazards
69 + Machine-dependent Reorg
74 Symbols are encoded as @ <char> . <name> where <char> is one of these:
82 c - cb (control bus) */
84 struct GTY(()) machine_function
86 int mep_frame_pointer_needed
;
94 /* Records __builtin_return address. */
98 int reg_save_slot
[FIRST_PSEUDO_REGISTER
];
99 unsigned char reg_saved
[FIRST_PSEUDO_REGISTER
];
101 /* 2 if the current function has an interrupt attribute, 1 if not, 0
102 if unknown. This is here because resource.c uses EPILOGUE_USES
104 int interrupt_handler
;
106 /* Likewise, for disinterrupt attribute. */
107 int disable_interrupts
;
109 /* Number of doloop tags used so far. */
112 /* True if the last tag was allocated to a doloop_end. */
113 bool doloop_tag_from_end
;
115 /* True if reload changes $TP. */
116 bool reload_changes_tp
;
118 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
119 We only set this if the function is an interrupt handler. */
120 int asms_without_operands
;
123 #define MEP_CONTROL_REG(x) \
124 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
126 static GTY(()) section
* based_section
;
127 static GTY(()) section
* tinybss_section
;
128 static GTY(()) section
* far_section
;
129 static GTY(()) section
* farbss_section
;
130 static GTY(()) section
* frodata_section
;
131 static GTY(()) section
* srodata_section
;
133 static GTY(()) section
* vtext_section
;
134 static GTY(()) section
* vftext_section
;
135 static GTY(()) section
* ftext_section
;
137 static void mep_set_leaf_registers (int);
138 static bool symbol_p (rtx
);
139 static bool symbolref_p (rtx
);
140 static void encode_pattern_1 (rtx
);
141 static void encode_pattern (rtx
);
142 static bool const_in_range (rtx
, int, int);
143 static void mep_rewrite_mult (rtx
, rtx
);
144 static void mep_rewrite_mulsi3 (rtx
, rtx
, rtx
, rtx
);
145 static void mep_rewrite_maddsi3 (rtx
, rtx
, rtx
, rtx
, rtx
);
146 static bool mep_reuse_lo_p_1 (rtx
, rtx
, rtx
, bool);
147 static bool move_needs_splitting (rtx
, rtx
, enum machine_mode
);
148 static bool mep_expand_setcc_1 (enum rtx_code
, rtx
, rtx
, rtx
);
149 static bool mep_nongeneral_reg (rtx
);
150 static bool mep_general_copro_reg (rtx
);
151 static bool mep_nonregister (rtx
);
152 static struct machine_function
* mep_init_machine_status (void);
153 static rtx
mep_tp_rtx (void);
154 static rtx
mep_gp_rtx (void);
155 static bool mep_interrupt_p (void);
156 static bool mep_disinterrupt_p (void);
157 static bool mep_reg_set_p (rtx
, rtx
);
158 static bool mep_reg_set_in_function (int);
159 static bool mep_interrupt_saved_reg (int);
160 static bool mep_call_saves_register (int);
162 static void add_constant (int, int, int, int);
163 static rtx
maybe_dead_move (rtx
, rtx
, bool);
164 static void mep_reload_pointer (int, const char *);
165 static void mep_start_function (FILE *, HOST_WIDE_INT
);
166 static bool mep_function_ok_for_sibcall (tree
, tree
);
167 static int unique_bit_in (HOST_WIDE_INT
);
168 static int bit_size_for_clip (HOST_WIDE_INT
);
169 static int bytesize (const_tree
, enum machine_mode
);
170 static tree
mep_validate_based_tiny (tree
*, tree
, tree
, int, bool *);
171 static tree
mep_validate_near_far (tree
*, tree
, tree
, int, bool *);
172 static tree
mep_validate_disinterrupt (tree
*, tree
, tree
, int, bool *);
173 static tree
mep_validate_interrupt (tree
*, tree
, tree
, int, bool *);
174 static tree
mep_validate_io_cb (tree
*, tree
, tree
, int, bool *);
175 static tree
mep_validate_vliw (tree
*, tree
, tree
, int, bool *);
176 static bool mep_function_attribute_inlinable_p (const_tree
);
177 static bool mep_can_inline_p (tree
, tree
);
178 static bool mep_lookup_pragma_disinterrupt (const char *);
179 static int mep_multiple_address_regions (tree
, bool);
180 static int mep_attrlist_to_encoding (tree
, tree
);
181 static void mep_insert_attributes (tree
, tree
*);
182 static void mep_encode_section_info (tree
, rtx
, int);
183 static section
* mep_select_section (tree
, int, unsigned HOST_WIDE_INT
);
184 static void mep_unique_section (tree
, int);
185 static unsigned int mep_section_type_flags (tree
, const char *, int);
186 static void mep_asm_named_section (const char *, unsigned int, tree
);
187 static bool mep_mentioned_p (rtx
, rtx
, int);
188 static void mep_reorg_regmove (rtx
);
189 static rtx
mep_insert_repeat_label_last (rtx
, rtx
, bool, bool);
190 static void mep_reorg_repeat (rtx
);
191 static bool mep_invertable_branch_p (rtx
);
192 static void mep_invert_branch (rtx
, rtx
);
193 static void mep_reorg_erepeat (rtx
);
194 static void mep_jmp_return_reorg (rtx
);
195 static void mep_reorg_addcombine (rtx
);
196 static void mep_reorg (void);
197 static void mep_init_intrinsics (void);
198 static void mep_init_builtins (void);
199 static void mep_intrinsic_unavailable (int);
200 static bool mep_get_intrinsic_insn (int, const struct cgen_insn
**);
201 static bool mep_get_move_insn (int, const struct cgen_insn
**);
202 static rtx
mep_convert_arg (enum machine_mode
, rtx
);
203 static rtx
mep_convert_regnum (const struct cgen_regnum_operand
*, rtx
);
204 static rtx
mep_legitimize_arg (const struct insn_operand_data
*, rtx
, int);
205 static void mep_incompatible_arg (const struct insn_operand_data
*, rtx
, int, tree
);
206 static rtx
mep_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
207 static int mep_adjust_cost (rtx
, rtx
, rtx
, int);
208 static int mep_issue_rate (void);
209 static rtx
mep_find_ready_insn (rtx
*, int, enum attr_slot
, int);
210 static void mep_move_ready_insn (rtx
*, int, rtx
);
211 static int mep_sched_reorder (FILE *, int, rtx
*, int *, int);
212 static rtx
mep_make_bundle (rtx
, rtx
);
213 static void mep_bundle_insns (rtx
);
214 static bool mep_rtx_cost (rtx
, int, int, int, int *, bool);
215 static int mep_address_cost (rtx
, enum machine_mode
, addr_space_t
, bool);
216 static void mep_setup_incoming_varargs (cumulative_args_t
, enum machine_mode
,
218 static bool mep_pass_by_reference (cumulative_args_t cum
, enum machine_mode
,
220 static rtx
mep_function_arg (cumulative_args_t
, enum machine_mode
,
222 static void mep_function_arg_advance (cumulative_args_t
, enum machine_mode
,
224 static bool mep_vector_mode_supported_p (enum machine_mode
);
225 static rtx
mep_allocate_initial_value (rtx
);
226 static void mep_asm_init_sections (void);
227 static int mep_comp_type_attributes (const_tree
, const_tree
);
228 static bool mep_narrow_volatile_bitfield (void);
229 static rtx
mep_expand_builtin_saveregs (void);
230 static tree
mep_build_builtin_va_list (void);
231 static void mep_expand_va_start (tree
, rtx
);
232 static tree
mep_gimplify_va_arg_expr (tree
, tree
, gimple_seq
*, gimple_seq
*);
233 static bool mep_can_eliminate (const int, const int);
234 static void mep_conditional_register_usage (void);
235 static void mep_trampoline_init (rtx
, tree
, rtx
);
237 #define WANT_GCC_DEFINITIONS
238 #include "mep-intrin.h"
239 #undef WANT_GCC_DEFINITIONS
242 /* Command Line Option Support. */
244 char mep_leaf_registers
[FIRST_PSEUDO_REGISTER
];
246 /* True if we can use cmov instructions to move values back and forth
247 between core and coprocessor registers. */
248 bool mep_have_core_copro_moves_p
;
250 /* True if we can use cmov instructions (or a work-alike) to move
251 values between coprocessor registers. */
252 bool mep_have_copro_copro_moves_p
;
254 /* A table of all coprocessor instructions that can act like
255 a coprocessor-to-coprocessor cmov. */
256 static const int mep_cmov_insns
[] = {
271 mep_set_leaf_registers (int enable
)
275 if (mep_leaf_registers
[0] != enable
)
276 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
277 mep_leaf_registers
[i
] = enable
;
281 mep_conditional_register_usage (void)
285 if (!TARGET_OPT_MULT
&& !TARGET_OPT_DIV
)
287 fixed_regs
[HI_REGNO
] = 1;
288 fixed_regs
[LO_REGNO
] = 1;
289 call_used_regs
[HI_REGNO
] = 1;
290 call_used_regs
[LO_REGNO
] = 1;
293 for (i
= FIRST_SHADOW_REGISTER
; i
<= LAST_SHADOW_REGISTER
; i
++)
298 mep_option_override (void)
302 cl_deferred_option
*opt
;
303 vec
<cl_deferred_option
> *v
= (vec
<cl_deferred_option
> *) mep_deferred_options
;
306 FOR_EACH_VEC_ELT (*v
, i
, opt
)
308 switch (opt
->opt_index
)
311 for (j
= 0; j
< 32; j
++)
312 fixed_regs
[j
+ 48] = 0;
313 for (j
= 0; j
< 32; j
++)
314 call_used_regs
[j
+ 48] = 1;
315 for (j
= 6; j
< 8; j
++)
316 call_used_regs
[j
+ 48] = 0;
318 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
353 warning (OPT_fpic
, "-fpic is not supported");
355 warning (OPT_fPIC
, "-fPIC is not supported");
356 if (TARGET_S
&& TARGET_M
)
357 error ("only one of -ms and -mm may be given");
358 if (TARGET_S
&& TARGET_L
)
359 error ("only one of -ms and -ml may be given");
360 if (TARGET_M
&& TARGET_L
)
361 error ("only one of -mm and -ml may be given");
362 if (TARGET_S
&& global_options_set
.x_mep_tiny_cutoff
)
363 error ("only one of -ms and -mtiny= may be given");
364 if (TARGET_M
&& global_options_set
.x_mep_tiny_cutoff
)
365 error ("only one of -mm and -mtiny= may be given");
366 if (TARGET_OPT_CLIP
&& ! TARGET_OPT_MINMAX
)
367 warning (0, "-mclip currently has no effect without -mminmax");
369 if (mep_const_section
)
371 if (strcmp (mep_const_section
, "tiny") != 0
372 && strcmp (mep_const_section
, "near") != 0
373 && strcmp (mep_const_section
, "far") != 0)
374 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
378 mep_tiny_cutoff
= 65536;
381 if (TARGET_L
&& ! global_options_set
.x_mep_tiny_cutoff
)
384 if (TARGET_64BIT_CR_REGS
)
385 flag_split_wide_types
= 0;
387 init_machine_status
= mep_init_machine_status
;
388 mep_init_intrinsics ();
391 /* Pattern Support - constraints, predicates, expanders. */
393 /* MEP has very few instructions that can refer to the span of
394 addresses used by symbols, so it's common to check for them. */
399 int c
= GET_CODE (x
);
401 return (c
== CONST_INT
411 if (GET_CODE (x
) != MEM
)
414 c
= GET_CODE (XEXP (x
, 0));
415 return (c
== CONST_INT
420 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
422 #define GEN_REG(R, STRICT) \
425 && ((R) == ARG_POINTER_REGNUM \
426 || (R) >= FIRST_PSEUDO_REGISTER)))
428 static char pattern
[12], *patternp
;
429 static GTY(()) rtx patternr
[12];
430 #define RTX_IS(x) (strcmp (pattern, x) == 0)
433 encode_pattern_1 (rtx x
)
437 if (patternp
== pattern
+ sizeof (pattern
) - 2)
443 patternr
[patternp
-pattern
] = x
;
445 switch (GET_CODE (x
))
453 encode_pattern_1 (XEXP(x
, 0));
457 encode_pattern_1 (XEXP(x
, 0));
458 encode_pattern_1 (XEXP(x
, 1));
462 encode_pattern_1 (XEXP(x
, 0));
463 encode_pattern_1 (XEXP(x
, 1));
467 encode_pattern_1 (XEXP(x
, 0));
481 *patternp
++ = '0' + XCINT(x
, 1, UNSPEC
);
482 for (i
=0; i
<XVECLEN (x
, 0); i
++)
483 encode_pattern_1 (XVECEXP (x
, 0, i
));
491 fprintf (stderr
, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x
)));
500 encode_pattern (rtx x
)
503 encode_pattern_1 (x
);
508 mep_section_tag (rtx x
)
514 switch (GET_CODE (x
))
521 x
= XVECEXP (x
, 0, 0);
524 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
533 if (GET_CODE (x
) != SYMBOL_REF
)
536 if (name
[0] == '@' && name
[2] == '.')
538 if (name
[1] == 'i' || name
[1] == 'I')
541 return 'f'; /* near */
542 return 'n'; /* far */
550 mep_regno_reg_class (int regno
)
554 case SP_REGNO
: return SP_REGS
;
555 case TP_REGNO
: return TP_REGS
;
556 case GP_REGNO
: return GP_REGS
;
557 case 0: return R0_REGS
;
558 case HI_REGNO
: return HI_REGS
;
559 case LO_REGNO
: return LO_REGS
;
560 case ARG_POINTER_REGNUM
: return GENERAL_REGS
;
563 if (GR_REGNO_P (regno
))
564 return regno
< FIRST_GR_REGNO
+ 8 ? TPREL_REGS
: GENERAL_REGS
;
565 if (CONTROL_REGNO_P (regno
))
568 if (CR_REGNO_P (regno
))
572 /* Search for the register amongst user-defined subclasses of
573 the coprocessor registers. */
574 for (i
= USER0_REGS
; i
<= USER3_REGS
; ++i
)
576 if (! TEST_HARD_REG_BIT (reg_class_contents
[i
], regno
))
578 for (j
= 0; j
< N_REG_CLASSES
; ++j
)
580 enum reg_class sub
= reg_class_subclasses
[i
][j
];
582 if (sub
== LIM_REG_CLASSES
)
584 if (TEST_HARD_REG_BIT (reg_class_contents
[sub
], regno
))
589 return LOADABLE_CR_REGNO_P (regno
) ? LOADABLE_CR_REGS
: CR_REGS
;
592 if (CCR_REGNO_P (regno
))
595 gcc_assert (regno
>= FIRST_SHADOW_REGISTER
&& regno
<= LAST_SHADOW_REGISTER
);
600 const_in_range (rtx x
, int minv
, int maxv
)
602 return (GET_CODE (x
) == CONST_INT
603 && INTVAL (x
) >= minv
604 && INTVAL (x
) <= maxv
);
607 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
608 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
609 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
610 at the end of the insn stream. */
613 mep_mulr_source (rtx insn
, rtx dest
, rtx src1
, rtx src2
)
615 if (rtx_equal_p (dest
, src1
))
617 else if (rtx_equal_p (dest
, src2
))
622 emit_insn (gen_movsi (copy_rtx (dest
), src1
));
624 emit_insn_before (gen_movsi (copy_rtx (dest
), src1
), insn
);
629 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
630 Change the last element of PATTERN from (clobber (scratch:SI))
631 to (clobber (reg:SI HI_REGNO)). */
634 mep_rewrite_mult (rtx insn
, rtx pattern
)
638 hi_clobber
= XVECEXP (pattern
, 0, XVECLEN (pattern
, 0) - 1);
639 XEXP (hi_clobber
, 0) = gen_rtx_REG (SImode
, HI_REGNO
);
640 PATTERN (insn
) = pattern
;
641 INSN_CODE (insn
) = -1;
644 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
645 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
646 store the result in DEST if nonnull. */
649 mep_rewrite_mulsi3 (rtx insn
, rtx dest
, rtx src1
, rtx src2
)
653 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
655 pattern
= gen_mulsi3r (lo
, dest
, copy_rtx (dest
),
656 mep_mulr_source (insn
, dest
, src1
, src2
));
658 pattern
= gen_mulsi3_lo (lo
, src1
, src2
);
659 mep_rewrite_mult (insn
, pattern
);
662 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
663 SRC3 into $lo, then use either madd or maddr. The move into $lo will
664 be deleted by a peephole2 if SRC3 is already in $lo. */
667 mep_rewrite_maddsi3 (rtx insn
, rtx dest
, rtx src1
, rtx src2
, rtx src3
)
671 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
672 emit_insn_before (gen_movsi (copy_rtx (lo
), src3
), insn
);
674 pattern
= gen_maddsi3r (lo
, dest
, copy_rtx (dest
),
675 mep_mulr_source (insn
, dest
, src1
, src2
),
678 pattern
= gen_maddsi3_lo (lo
, src1
, src2
, copy_rtx (lo
));
679 mep_rewrite_mult (insn
, pattern
);
682 /* Return true if $lo has the same value as integer register GPR when
683 instruction INSN is reached. If necessary, rewrite the instruction
684 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
685 rtx for (reg:SI LO_REGNO).
687 This function is intended to be used by the peephole2 pass. Since
688 that pass goes from the end of a basic block to the beginning, and
689 propagates liveness information on the way, there is no need to
690 update register notes here.
692 If GPR_DEAD_P is true on entry, and this function returns true,
693 then the caller will replace _every_ use of GPR in and after INSN
694 with LO. This means that if the instruction that sets $lo is a
695 mulr- or maddr-type instruction, we can rewrite it to use mul or
696 madd instead. In combination with the copy progagation pass,
697 this allows us to replace sequences like:
706 if GPR is no longer used. */
709 mep_reuse_lo_p_1 (rtx lo
, rtx gpr
, rtx insn
, bool gpr_dead_p
)
713 insn
= PREV_INSN (insn
);
715 switch (recog_memoized (insn
))
717 case CODE_FOR_mulsi3_1
:
719 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
721 mep_rewrite_mulsi3 (insn
,
722 gpr_dead_p
? NULL
: recog_data
.operand
[0],
723 recog_data
.operand
[1],
724 recog_data
.operand
[2]);
729 case CODE_FOR_maddsi3
:
731 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
733 mep_rewrite_maddsi3 (insn
,
734 gpr_dead_p
? NULL
: recog_data
.operand
[0],
735 recog_data
.operand
[1],
736 recog_data
.operand
[2],
737 recog_data
.operand
[3]);
742 case CODE_FOR_mulsi3r
:
743 case CODE_FOR_maddsi3r
:
745 return rtx_equal_p (recog_data
.operand
[1], gpr
);
748 if (reg_set_p (lo
, insn
)
749 || reg_set_p (gpr
, insn
)
750 || volatile_insn_p (PATTERN (insn
)))
753 if (gpr_dead_p
&& reg_referenced_p (gpr
, PATTERN (insn
)))
758 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
762 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
765 mep_reuse_lo_p (rtx lo
, rtx gpr
, rtx insn
, bool gpr_dead_p
)
767 bool result
= mep_reuse_lo_p_1 (lo
, gpr
, insn
, gpr_dead_p
);
772 /* Return true if SET can be turned into a post-modify load or store
773 that adds OFFSET to GPR. In other words, return true if SET can be
776 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
778 It's OK to change SET to an equivalent operation in order to
782 mep_use_post_modify_for_set_p (rtx set
, rtx gpr
, rtx offset
)
785 unsigned int reg_bytes
, mem_bytes
;
786 enum machine_mode reg_mode
, mem_mode
;
788 /* Only simple SETs can be converted. */
789 if (GET_CODE (set
) != SET
)
792 /* Point REG to what we hope will be the register side of the set and
793 MEM to what we hope will be the memory side. */
794 if (GET_CODE (SET_DEST (set
)) == MEM
)
796 mem
= &SET_DEST (set
);
797 reg
= &SET_SRC (set
);
801 reg
= &SET_DEST (set
);
802 mem
= &SET_SRC (set
);
803 if (GET_CODE (*mem
) == SIGN_EXTEND
)
804 mem
= &XEXP (*mem
, 0);
807 /* Check that *REG is a suitable coprocessor register. */
808 if (GET_CODE (*reg
) != REG
|| !LOADABLE_CR_REGNO_P (REGNO (*reg
)))
811 /* Check that *MEM is a suitable memory reference. */
812 if (GET_CODE (*mem
) != MEM
|| !rtx_equal_p (XEXP (*mem
, 0), gpr
))
815 /* Get the number of bytes in each operand. */
816 mem_bytes
= GET_MODE_SIZE (GET_MODE (*mem
));
817 reg_bytes
= GET_MODE_SIZE (GET_MODE (*reg
));
819 /* Check that OFFSET is suitably aligned. */
820 if (INTVAL (offset
) & (mem_bytes
- 1))
823 /* Convert *MEM to a normal integer mode. */
824 mem_mode
= mode_for_size (mem_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
825 *mem
= change_address (*mem
, mem_mode
, NULL
);
827 /* Adjust *REG as well. */
828 *reg
= shallow_copy_rtx (*reg
);
829 if (reg
== &SET_DEST (set
) && reg_bytes
< UNITS_PER_WORD
)
831 /* SET is a subword load. Convert it to an explicit extension. */
832 PUT_MODE (*reg
, SImode
);
833 *mem
= gen_rtx_SIGN_EXTEND (SImode
, *mem
);
837 reg_mode
= mode_for_size (reg_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
838 PUT_MODE (*reg
, reg_mode
);
843 /* Return the effect of frame-related instruction INSN. */
846 mep_frame_expr (rtx insn
)
850 note
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, 0);
851 expr
= (note
!= 0 ? XEXP (note
, 0) : copy_rtx (PATTERN (insn
)));
852 RTX_FRAME_RELATED_P (expr
) = 1;
856 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
857 new pattern in INSN1; INSN2 will be deleted by the caller. */
860 mep_make_parallel (rtx insn1
, rtx insn2
)
864 if (RTX_FRAME_RELATED_P (insn2
))
866 expr
= mep_frame_expr (insn2
);
867 if (RTX_FRAME_RELATED_P (insn1
))
868 expr
= gen_rtx_SEQUENCE (VOIDmode
,
869 gen_rtvec (2, mep_frame_expr (insn1
), expr
));
870 set_unique_reg_note (insn1
, REG_FRAME_RELATED_EXPR
, expr
);
871 RTX_FRAME_RELATED_P (insn1
) = 1;
874 PATTERN (insn1
) = gen_rtx_PARALLEL (VOIDmode
,
875 gen_rtvec (2, PATTERN (insn1
),
877 INSN_CODE (insn1
) = -1;
880 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
881 the basic block to see if any previous load or store instruction can
882 be persuaded to do SET_INSN as a side-effect. Return true if so. */
885 mep_use_post_modify_p_1 (rtx set_insn
, rtx reg
, rtx offset
)
892 insn
= PREV_INSN (insn
);
895 if (mep_use_post_modify_for_set_p (PATTERN (insn
), reg
, offset
))
897 mep_make_parallel (insn
, set_insn
);
901 if (reg_set_p (reg
, insn
)
902 || reg_referenced_p (reg
, PATTERN (insn
))
903 || volatile_insn_p (PATTERN (insn
)))
907 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
911 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
914 mep_use_post_modify_p (rtx insn
, rtx reg
, rtx offset
)
916 bool result
= mep_use_post_modify_p_1 (insn
, reg
, offset
);
922 mep_allow_clip (rtx ux
, rtx lx
, int s
)
924 HOST_WIDE_INT u
= INTVAL (ux
);
925 HOST_WIDE_INT l
= INTVAL (lx
);
928 if (!TARGET_OPT_CLIP
)
933 for (i
= 0; i
< 30; i
++)
934 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1)
935 && (l
== - ((HOST_WIDE_INT
) 1 << i
)))
943 for (i
= 0; i
< 30; i
++)
944 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1))
951 mep_bit_position_p (rtx x
, bool looking_for
)
953 if (GET_CODE (x
) != CONST_INT
)
955 switch ((int) INTVAL(x
) & 0xff)
957 case 0x01: case 0x02: case 0x04: case 0x08:
958 case 0x10: case 0x20: case 0x40: case 0x80:
960 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
961 case 0xef: case 0xdf: case 0xbf: case 0x7f:
968 move_needs_splitting (rtx dest
, rtx src
,
969 enum machine_mode mode ATTRIBUTE_UNUSED
)
971 int s
= mep_section_tag (src
);
975 if (GET_CODE (src
) == CONST
976 || GET_CODE (src
) == MEM
)
978 else if (GET_CODE (src
) == SYMBOL_REF
979 || GET_CODE (src
) == LABEL_REF
980 || GET_CODE (src
) == PLUS
)
986 || (GET_CODE (src
) == PLUS
987 && GET_CODE (XEXP (src
, 1)) == CONST_INT
988 && (INTVAL (XEXP (src
, 1)) < -65536
989 || INTVAL (XEXP (src
, 1)) > 0xffffff))
990 || (GET_CODE (dest
) == REG
991 && REGNO (dest
) > 7 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
))
997 mep_split_mov (rtx
*operands
, int symbolic
)
1001 if (move_needs_splitting (operands
[0], operands
[1], SImode
))
1006 if (GET_CODE (operands
[1]) != CONST_INT
)
1009 if (constraint_satisfied_p (operands
[1], CONSTRAINT_I
)
1010 || constraint_satisfied_p (operands
[1], CONSTRAINT_J
)
1011 || constraint_satisfied_p (operands
[1], CONSTRAINT_O
))
1014 if (((!reload_completed
&& !reload_in_progress
)
1015 || (REG_P (operands
[0]) && REGNO (operands
[0]) < 8))
1016 && constraint_satisfied_p (operands
[1], CONSTRAINT_K
))
1022 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1023 it to one specific value. So the insn chosen depends on whether
1024 the source and destination modes match. */
1027 mep_vliw_mode_match (rtx tgt
)
1029 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1030 bool tgt_vliw
= INTVAL (tgt
);
1032 return src_vliw
== tgt_vliw
;
1035 /* Like the above, but also test for near/far mismatches. */
1038 mep_vliw_jmp_match (rtx tgt
)
1040 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1041 bool tgt_vliw
= INTVAL (tgt
);
1043 if (mep_section_tag (DECL_RTL (cfun
->decl
)) == 'f')
1046 return src_vliw
== tgt_vliw
;
1050 mep_multi_slot (rtx x
)
1052 return get_attr_slot (x
) == SLOT_MULTI
;
1055 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1058 mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1060 /* We can't convert symbol values to gp- or tp-rel values after
1061 reload, as reload might have used $gp or $tp for other
1063 if (GET_CODE (x
) == SYMBOL_REF
&& (reload_in_progress
|| reload_completed
))
1065 char e
= mep_section_tag (x
);
1066 return (e
!= 't' && e
!= 'b');
1071 /* Be careful not to use macros that need to be compiled one way for
1072 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1075 mep_legitimate_address (enum machine_mode mode
, rtx x
, int strict
)
1079 #define DEBUG_LEGIT 0
1081 fprintf (stderr
, "legit: mode %s strict %d ", mode_name
[mode
], strict
);
1085 if (GET_CODE (x
) == LO_SUM
1086 && GET_CODE (XEXP (x
, 0)) == REG
1087 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1088 && CONSTANT_P (XEXP (x
, 1)))
1090 if (GET_MODE_SIZE (mode
) > 4)
1092 /* We will end up splitting this, and lo_sums are not
1093 offsettable for us. */
1095 fprintf(stderr
, " - nope, %%lo(sym)[reg] not splittable\n");
1100 fprintf (stderr
, " - yup, %%lo(sym)[reg]\n");
1105 if (GET_CODE (x
) == REG
1106 && GEN_REG (REGNO (x
), strict
))
1109 fprintf (stderr
, " - yup, [reg]\n");
1114 if (GET_CODE (x
) == PLUS
1115 && GET_CODE (XEXP (x
, 0)) == REG
1116 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1117 && const_in_range (XEXP (x
, 1), -32768, 32767))
1120 fprintf (stderr
, " - yup, [reg+const]\n");
1125 if (GET_CODE (x
) == PLUS
1126 && GET_CODE (XEXP (x
, 0)) == REG
1127 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1128 && GET_CODE (XEXP (x
, 1)) == CONST
1129 && (GET_CODE (XEXP (XEXP (x
, 1), 0)) == UNSPEC
1130 || (GET_CODE (XEXP (XEXP (x
, 1), 0)) == PLUS
1131 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == UNSPEC
1132 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == CONST_INT
)))
1135 fprintf (stderr
, " - yup, [reg+unspec]\n");
1140 the_tag
= mep_section_tag (x
);
1145 fprintf (stderr
, " - nope, [far]\n");
1150 if (mode
== VOIDmode
1151 && GET_CODE (x
) == SYMBOL_REF
)
1154 fprintf (stderr
, " - yup, call [symbol]\n");
1159 if ((mode
== SImode
|| mode
== SFmode
)
1161 && mep_legitimate_constant_p (mode
, x
)
1162 && the_tag
!= 't' && the_tag
!= 'b')
1164 if (GET_CODE (x
) != CONST_INT
1165 || (INTVAL (x
) <= 0xfffff
1167 && (INTVAL (x
) % 4) == 0))
1170 fprintf (stderr
, " - yup, [const]\n");
1177 fprintf (stderr
, " - nope.\n");
1183 mep_legitimize_reload_address (rtx
*x
, enum machine_mode mode
, int opnum
,
1185 int ind_levels ATTRIBUTE_UNUSED
)
1187 enum reload_type type
= (enum reload_type
) type_i
;
1189 if (GET_CODE (*x
) == PLUS
1190 && GET_CODE (XEXP (*x
, 0)) == MEM
1191 && GET_CODE (XEXP (*x
, 1)) == REG
)
1193 /* GCC will by default copy the MEM into a REG, which results in
1194 an invalid address. For us, the best thing to do is move the
1195 whole expression to a REG. */
1196 push_reload (*x
, NULL_RTX
, x
, NULL
,
1197 GENERAL_REGS
, mode
, VOIDmode
,
1202 if (GET_CODE (*x
) == PLUS
1203 && GET_CODE (XEXP (*x
, 0)) == SYMBOL_REF
1204 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
)
1206 char e
= mep_section_tag (XEXP (*x
, 0));
1208 if (e
!= 't' && e
!= 'b')
1210 /* GCC thinks that (sym+const) is a valid address. Well,
1211 sometimes it is, this time it isn't. The best thing to
1212 do is reload the symbol to a register, since reg+int
1213 tends to work, and we can't just add the symbol and
1215 push_reload (XEXP (*x
, 0), NULL_RTX
, &(XEXP(*x
, 0)), NULL
,
1216 GENERAL_REGS
, mode
, VOIDmode
,
1225 mep_core_address_length (rtx insn
, int opn
)
1227 rtx set
= single_set (insn
);
1228 rtx mem
= XEXP (set
, opn
);
1229 rtx other
= XEXP (set
, 1-opn
);
1230 rtx addr
= XEXP (mem
, 0);
1232 if (register_operand (addr
, Pmode
))
1234 if (GET_CODE (addr
) == PLUS
)
1236 rtx addend
= XEXP (addr
, 1);
1238 gcc_assert (REG_P (XEXP (addr
, 0)));
1240 switch (REGNO (XEXP (addr
, 0)))
1242 case STACK_POINTER_REGNUM
:
1243 if (GET_MODE_SIZE (GET_MODE (mem
)) == 4
1244 && mep_imm7a4_operand (addend
, VOIDmode
))
1249 gcc_assert (REG_P (other
));
1251 if (REGNO (other
) >= 8)
1254 if (GET_CODE (addend
) == CONST
1255 && GET_CODE (XEXP (addend
, 0)) == UNSPEC
1256 && XINT (XEXP (addend
, 0), 1) == UNS_TPREL
)
1259 if (GET_CODE (addend
) == CONST_INT
1260 && INTVAL (addend
) >= 0
1261 && INTVAL (addend
) <= 127
1262 && INTVAL (addend
) % GET_MODE_SIZE (GET_MODE (mem
)) == 0)
1272 mep_cop_address_length (rtx insn
, int opn
)
1274 rtx set
= single_set (insn
);
1275 rtx mem
= XEXP (set
, opn
);
1276 rtx addr
= XEXP (mem
, 0);
1278 if (GET_CODE (mem
) != MEM
)
1280 if (register_operand (addr
, Pmode
))
1282 if (GET_CODE (addr
) == POST_INC
)
1288 #define DEBUG_EXPAND_MOV 0
1290 mep_expand_mov (rtx
*operands
, enum machine_mode mode
)
1295 int post_reload
= 0;
1297 tag
[0] = mep_section_tag (operands
[0]);
1298 tag
[1] = mep_section_tag (operands
[1]);
1300 if (!reload_in_progress
1301 && !reload_completed
1302 && GET_CODE (operands
[0]) != REG
1303 && GET_CODE (operands
[0]) != SUBREG
1304 && GET_CODE (operands
[1]) != REG
1305 && GET_CODE (operands
[1]) != SUBREG
)
1306 operands
[1] = copy_to_mode_reg (mode
, operands
[1]);
1308 #if DEBUG_EXPAND_MOV
1309 fprintf(stderr
, "expand move %s %d\n", mode_name
[mode
],
1310 reload_in_progress
|| reload_completed
);
1311 debug_rtx (operands
[0]);
1312 debug_rtx (operands
[1]);
1315 if (mode
== DImode
|| mode
== DFmode
)
1318 if (reload_in_progress
|| reload_completed
)
1322 if (GET_CODE (operands
[0]) == REG
&& REGNO (operands
[0]) == TP_REGNO
)
1323 cfun
->machine
->reload_changes_tp
= true;
1325 if (tag
[0] == 't' || tag
[1] == 't')
1327 r
= has_hard_reg_initial_val (Pmode
, GP_REGNO
);
1328 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != GP_REGNO
)
1331 if (tag
[0] == 'b' || tag
[1] == 'b')
1333 r
= has_hard_reg_initial_val (Pmode
, TP_REGNO
);
1334 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != TP_REGNO
)
1337 if (cfun
->machine
->reload_changes_tp
== true)
1344 if (symbol_p (operands
[1]))
1346 t
= mep_section_tag (operands
[1]);
1347 if (t
== 'b' || t
== 't')
1350 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
1352 tpsym
= operands
[1];
1353 n
= gen_rtx_UNSPEC (mode
,
1354 gen_rtvec (1, operands
[1]),
1355 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1356 n
= gen_rtx_CONST (mode
, n
);
1358 else if (GET_CODE (operands
[1]) == CONST
1359 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
1360 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
1361 && GET_CODE (XEXP (XEXP (operands
[1], 0), 1)) == CONST_INT
)
1363 tpsym
= XEXP (XEXP (operands
[1], 0), 0);
1364 tpoffs
= XEXP (XEXP (operands
[1], 0), 1);
1365 n
= gen_rtx_UNSPEC (mode
,
1366 gen_rtvec (1, tpsym
),
1367 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1368 n
= gen_rtx_PLUS (mode
, n
, tpoffs
);
1369 n
= gen_rtx_CONST (mode
, n
);
1371 else if (GET_CODE (operands
[1]) == CONST
1372 && GET_CODE (XEXP (operands
[1], 0)) == UNSPEC
)
1376 error ("unusual TP-relative address");
1380 n
= gen_rtx_PLUS (mode
, (t
== 'b' ? mep_tp_rtx ()
1381 : mep_gp_rtx ()), n
);
1382 n
= emit_insn (gen_rtx_SET (mode
, operands
[0], n
));
1383 #if DEBUG_EXPAND_MOV
1384 fprintf(stderr
, "mep_expand_mov emitting ");
1391 for (i
=0; i
< 2; i
++)
1393 t
= mep_section_tag (operands
[i
]);
1394 if (GET_CODE (operands
[i
]) == MEM
&& (t
== 'b' || t
== 't'))
1399 sym
= XEXP (operands
[i
], 0);
1400 if (GET_CODE (sym
) == CONST
1401 && GET_CODE (XEXP (sym
, 0)) == UNSPEC
)
1402 sym
= XVECEXP (XEXP (sym
, 0), 0, 0);
1415 n
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, sym
), u
);
1416 n
= gen_rtx_CONST (Pmode
, n
);
1417 n
= gen_rtx_PLUS (Pmode
, r
, n
);
1418 operands
[i
] = replace_equiv_address (operands
[i
], n
);
1423 if ((GET_CODE (operands
[1]) != REG
1424 && MEP_CONTROL_REG (operands
[0]))
1425 || (GET_CODE (operands
[0]) != REG
1426 && MEP_CONTROL_REG (operands
[1])))
1429 #if DEBUG_EXPAND_MOV
1430 fprintf (stderr
, "cr-mem, forcing op1 to reg\n");
1432 temp
= gen_reg_rtx (mode
);
1433 emit_move_insn (temp
, operands
[1]);
1437 if (symbolref_p (operands
[0])
1438 && (mep_section_tag (XEXP (operands
[0], 0)) == 'f'
1439 || (GET_MODE_SIZE (mode
) != 4)))
1443 gcc_assert (!reload_in_progress
&& !reload_completed
);
1445 temp
= force_reg (Pmode
, XEXP (operands
[0], 0));
1446 operands
[0] = replace_equiv_address (operands
[0], temp
);
1447 emit_move_insn (operands
[0], operands
[1]);
1451 if (!post_reload
&& (tag
[1] == 't' || tag
[1] == 'b'))
1454 if (symbol_p (operands
[1])
1455 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1457 emit_insn (gen_movsi_topsym_s (operands
[0], operands
[1]));
1458 emit_insn (gen_movsi_botsym_s (operands
[0], operands
[0], operands
[1]));
1462 if (symbolref_p (operands
[1])
1463 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1467 if (reload_in_progress
|| reload_completed
)
1470 temp
= gen_reg_rtx (Pmode
);
1472 emit_insn (gen_movsi_topsym_s (temp
, operands
[1]));
1473 emit_insn (gen_movsi_botsym_s (temp
, temp
, operands
[1]));
1474 emit_move_insn (operands
[0], replace_equiv_address (operands
[1], temp
));
1481 /* Cases where the pattern can't be made to use at all. */
1484 mep_mov_ok (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1488 #define DEBUG_MOV_OK 0
1490 fprintf (stderr
, "mep_mov_ok %s %c=%c\n", mode_name
[mode
], mep_section_tag (operands
[0]),
1491 mep_section_tag (operands
[1]));
1492 debug_rtx (operands
[0]);
1493 debug_rtx (operands
[1]);
1496 /* We want the movh patterns to get these. */
1497 if (GET_CODE (operands
[1]) == HIGH
)
1500 /* We can't store a register to a far variable without using a
1501 scratch register to hold the address. Using far variables should
1502 be split by mep_emit_mov anyway. */
1503 if (mep_section_tag (operands
[0]) == 'f'
1504 || mep_section_tag (operands
[1]) == 'f')
1507 fprintf (stderr
, " - no, f\n");
1511 i
= mep_section_tag (operands
[1]);
1512 if ((i
== 'b' || i
== 't') && !reload_completed
&& !reload_in_progress
)
1513 /* These are supposed to be generated with adds of the appropriate
1514 register. During and after reload, however, we allow them to
1515 be accessed as normal symbols because adding a dependency on
1516 the base register now might cause problems. */
1519 fprintf (stderr
, " - no, bt\n");
1524 /* The only moves we can allow involve at least one general
1525 register, so require it. */
1526 for (i
= 0; i
< 2; i
++)
1528 /* Allow subregs too, before reload. */
1529 rtx x
= operands
[i
];
1531 if (GET_CODE (x
) == SUBREG
)
1533 if (GET_CODE (x
) == REG
1534 && ! MEP_CONTROL_REG (x
))
1537 fprintf (stderr
, " - ok\n");
1543 fprintf (stderr
, " - no, no gen reg\n");
1548 #define DEBUG_SPLIT_WIDE_MOVE 0
1550 mep_split_wide_move (rtx
*operands
, enum machine_mode mode
)
1554 #if DEBUG_SPLIT_WIDE_MOVE
1555 fprintf (stderr
, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name
[mode
]);
1556 debug_rtx (operands
[0]);
1557 debug_rtx (operands
[1]);
1560 for (i
= 0; i
<= 1; i
++)
1562 rtx op
= operands
[i
], hi
, lo
;
1564 switch (GET_CODE (op
))
1568 unsigned int regno
= REGNO (op
);
1570 if (TARGET_64BIT_CR_REGS
&& CR_REGNO_P (regno
))
1574 lo
= gen_rtx_REG (SImode
, regno
);
1576 hi
= gen_rtx_ZERO_EXTRACT (SImode
,
1577 gen_rtx_REG (DImode
, regno
),
1582 hi
= gen_rtx_REG (SImode
, regno
+ TARGET_LITTLE_ENDIAN
);
1583 lo
= gen_rtx_REG (SImode
, regno
+ TARGET_BIG_ENDIAN
);
1591 hi
= operand_subword (op
, TARGET_LITTLE_ENDIAN
, 0, mode
);
1592 lo
= operand_subword (op
, TARGET_BIG_ENDIAN
, 0, mode
);
1599 /* The high part of CR <- GPR moves must be done after the low part. */
1600 operands
[i
+ 4] = lo
;
1601 operands
[i
+ 2] = hi
;
1604 if (reg_mentioned_p (operands
[2], operands
[5])
1605 || GET_CODE (operands
[2]) == ZERO_EXTRACT
1606 || GET_CODE (operands
[4]) == ZERO_EXTRACT
)
1610 /* Overlapping register pairs -- make sure we don't
1611 early-clobber ourselves. */
1613 operands
[2] = operands
[4];
1616 operands
[3] = operands
[5];
1620 #if DEBUG_SPLIT_WIDE_MOVE
1621 fprintf(stderr
, "\033[34m");
1622 debug_rtx (operands
[2]);
1623 debug_rtx (operands
[3]);
1624 debug_rtx (operands
[4]);
1625 debug_rtx (operands
[5]);
1626 fprintf(stderr
, "\033[0m");
1630 /* Emit a setcc instruction in its entirity. */
1633 mep_expand_setcc_1 (enum rtx_code code
, rtx dest
, rtx op1
, rtx op2
)
1641 tmp
= op1
, op1
= op2
, op2
= tmp
;
1642 code
= swap_condition (code
);
1647 op1
= force_reg (SImode
, op1
);
1648 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1649 gen_rtx_fmt_ee (code
, SImode
, op1
, op2
)));
1653 if (op2
!= const0_rtx
)
1654 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1655 mep_expand_setcc_1 (LTU
, dest
, op1
, const1_rtx
);
1659 /* Branchful sequence:
1661 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1664 Branchless sequence:
1665 add3 tmp, op1, -op2 32-bit (or mov + sub)
1666 sltu3 tmp, tmp, 1 16-bit
1667 xor3 dest, tmp, 1 32-bit
1669 if (optimize_size
&& op2
!= const0_rtx
)
1672 if (op2
!= const0_rtx
)
1673 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1675 op2
= gen_reg_rtx (SImode
);
1676 mep_expand_setcc_1 (LTU
, op2
, op1
, const1_rtx
);
1678 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1679 gen_rtx_XOR (SImode
, op2
, const1_rtx
)));
1683 if (GET_CODE (op2
) != CONST_INT
1684 || INTVAL (op2
) == 0x7ffffff)
1686 op2
= GEN_INT (INTVAL (op2
) + 1);
1687 return mep_expand_setcc_1 (LT
, dest
, op1
, op2
);
1690 if (GET_CODE (op2
) != CONST_INT
1691 || INTVAL (op2
) == -1)
1693 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) + 1, SImode
));
1694 return mep_expand_setcc_1 (LTU
, dest
, op1
, op2
);
1697 if (GET_CODE (op2
) != CONST_INT
1698 || INTVAL (op2
) == trunc_int_for_mode (0x80000000, SImode
))
1700 op2
= GEN_INT (INTVAL (op2
) - 1);
1701 return mep_expand_setcc_1 (GT
, dest
, op1
, op2
);
1704 if (GET_CODE (op2
) != CONST_INT
1705 || op2
== const0_rtx
)
1707 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) - 1, SImode
));
1708 return mep_expand_setcc_1 (GTU
, dest
, op1
, op2
);
1716 mep_expand_setcc (rtx
*operands
)
1718 rtx dest
= operands
[0];
1719 enum rtx_code code
= GET_CODE (operands
[1]);
1720 rtx op0
= operands
[2];
1721 rtx op1
= operands
[3];
1723 return mep_expand_setcc_1 (code
, dest
, op0
, op1
);
1727 mep_expand_cbranch (rtx
*operands
)
1729 enum rtx_code code
= GET_CODE (operands
[0]);
1730 rtx op0
= operands
[1];
1731 rtx op1
= operands
[2];
1738 if (mep_imm4_operand (op1
, SImode
))
1741 tmp
= gen_reg_rtx (SImode
);
1742 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1749 if (mep_imm4_operand (op1
, SImode
))
1752 tmp
= gen_reg_rtx (SImode
);
1753 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1762 if (! mep_reg_or_imm4_operand (op1
, SImode
))
1763 op1
= force_reg (SImode
, op1
);
1768 if (GET_CODE (op1
) == CONST_INT
1769 && INTVAL (op1
) != 0x7fffffff)
1771 op1
= GEN_INT (INTVAL (op1
) + 1);
1772 code
= (code
== LE
? LT
: GE
);
1776 tmp
= gen_reg_rtx (SImode
);
1777 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op1
, op0
));
1779 code
= (code
== LE
? EQ
: NE
);
1785 if (op1
== const1_rtx
)
1792 tmp
= gen_reg_rtx (SImode
);
1793 gcc_assert (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
));
1800 tmp
= gen_reg_rtx (SImode
);
1801 if (mep_expand_setcc_1 (LEU
, tmp
, op0
, op1
))
1803 else if (mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
))
1812 tmp
= gen_reg_rtx (SImode
);
1813 gcc_assert (mep_expand_setcc_1 (GTU
, tmp
, op0
, op1
)
1814 || mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
));
1821 tmp
= gen_reg_rtx (SImode
);
1822 if (mep_expand_setcc_1 (GEU
, tmp
, op0
, op1
))
1824 else if (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
))
1836 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
1840 mep_emit_cbranch (rtx
*operands
, int ne
)
1842 if (GET_CODE (operands
[1]) == REG
)
1843 return ne
? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1844 else if (INTVAL (operands
[1]) == 0 && !mep_vliw_function_p(cfun
->decl
))
1845 return ne
? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1847 return ne
? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1851 mep_expand_call (rtx
*operands
, int returns_value
)
1853 rtx addr
= operands
[returns_value
];
1854 rtx tp
= mep_tp_rtx ();
1855 rtx gp
= mep_gp_rtx ();
1857 gcc_assert (GET_CODE (addr
) == MEM
);
1859 addr
= XEXP (addr
, 0);
1861 if (! mep_call_address_operand (addr
, VOIDmode
))
1862 addr
= force_reg (SImode
, addr
);
1864 if (! operands
[returns_value
+2])
1865 operands
[returns_value
+2] = const0_rtx
;
1868 emit_call_insn (gen_call_value_internal (operands
[0], addr
, operands
[2],
1869 operands
[3], tp
, gp
));
1871 emit_call_insn (gen_call_internal (addr
, operands
[1],
1872 operands
[2], tp
, gp
));
1875 /* Aliasing Support. */
1877 /* If X is a machine specific address (i.e. a symbol or label being
1878 referenced as a displacement from the GOT implemented using an
1879 UNSPEC), then return the base term. Otherwise return X. */
1882 mep_find_base_term (rtx x
)
1887 if (GET_CODE (x
) != PLUS
)
1892 if (has_hard_reg_initial_val(Pmode
, TP_REGNO
)
1893 && base
== mep_tp_rtx ())
1895 else if (has_hard_reg_initial_val(Pmode
, GP_REGNO
)
1896 && base
== mep_gp_rtx ())
1901 if (GET_CODE (term
) != CONST
)
1903 term
= XEXP (term
, 0);
1905 if (GET_CODE (term
) != UNSPEC
1906 || XINT (term
, 1) != unspec
)
1909 return XVECEXP (term
, 0, 0);
1912 /* Reload Support. */
1914 /* Return true if the registers in CLASS cannot represent the change from
1915 modes FROM to TO. */
1918 mep_cannot_change_mode_class (enum machine_mode from
, enum machine_mode to
,
1919 enum reg_class regclass
)
1924 /* 64-bit COP regs must remain 64-bit COP regs. */
1925 if (TARGET_64BIT_CR_REGS
1926 && (regclass
== CR_REGS
1927 || regclass
== LOADABLE_CR_REGS
)
1928 && (GET_MODE_SIZE (to
) < 8
1929 || GET_MODE_SIZE (from
) < 8))
1935 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1938 mep_general_reg (rtx x
)
1940 while (GET_CODE (x
) == SUBREG
)
1942 return GET_CODE (x
) == REG
&& GR_REGNO_P (REGNO (x
));
1946 mep_nongeneral_reg (rtx x
)
1948 while (GET_CODE (x
) == SUBREG
)
1950 return (GET_CODE (x
) == REG
1951 && !GR_REGNO_P (REGNO (x
)) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
1955 mep_general_copro_reg (rtx x
)
1957 while (GET_CODE (x
) == SUBREG
)
1959 return (GET_CODE (x
) == REG
&& CR_REGNO_P (REGNO (x
)));
1963 mep_nonregister (rtx x
)
1965 while (GET_CODE (x
) == SUBREG
)
1967 return (GET_CODE (x
) != REG
|| REGNO (x
) >= FIRST_PSEUDO_REGISTER
);
1970 #define DEBUG_RELOAD 0
1972 /* Return the secondary reload class needed for moving value X to or
1973 from a register in coprocessor register class CLASS. */
1975 static enum reg_class
1976 mep_secondary_copro_reload_class (enum reg_class rclass
, rtx x
)
1978 if (mep_general_reg (x
))
1979 /* We can do the move directly if mep_have_core_copro_moves_p,
1980 otherwise we need to go through memory. Either way, no secondary
1981 register is needed. */
1984 if (mep_general_copro_reg (x
))
1986 /* We can do the move directly if mep_have_copro_copro_moves_p. */
1987 if (mep_have_copro_copro_moves_p
)
1990 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
1991 if (mep_have_core_copro_moves_p
)
1992 return GENERAL_REGS
;
1994 /* Otherwise we need to do it through memory. No secondary
1995 register is needed. */
1999 if (reg_class_subset_p (rclass
, LOADABLE_CR_REGS
)
2000 && constraint_satisfied_p (x
, CONSTRAINT_U
))
2001 /* X is a memory value that we can access directly. */
2004 /* We have to move X into a GPR first and then copy it to
2005 the coprocessor register. The move from the GPR to the
2006 coprocessor might be done directly or through memory,
2007 depending on mep_have_core_copro_moves_p. */
2008 return GENERAL_REGS
;
2011 /* Copying X to register in RCLASS. */
2014 mep_secondary_input_reload_class (enum reg_class rclass
,
2015 enum machine_mode mode ATTRIBUTE_UNUSED
,
2021 fprintf (stderr
, "secondary input reload copy to %s %s from ", reg_class_names
[rclass
], mode_name
[mode
]);
2025 if (reg_class_subset_p (rclass
, CR_REGS
))
2026 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2027 else if (MEP_NONGENERAL_CLASS (rclass
)
2028 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2032 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2034 return (enum reg_class
) rv
;
2037 /* Copying register in RCLASS to X. */
2040 mep_secondary_output_reload_class (enum reg_class rclass
,
2041 enum machine_mode mode ATTRIBUTE_UNUSED
,
2047 fprintf (stderr
, "secondary output reload copy from %s %s to ", reg_class_names
[rclass
], mode_name
[mode
]);
2051 if (reg_class_subset_p (rclass
, CR_REGS
))
2052 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2053 else if (MEP_NONGENERAL_CLASS (rclass
)
2054 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2058 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2061 return (enum reg_class
) rv
;
2064 /* Implement SECONDARY_MEMORY_NEEDED. */
2067 mep_secondary_memory_needed (enum reg_class rclass1
, enum reg_class rclass2
,
2068 enum machine_mode mode ATTRIBUTE_UNUSED
)
2070 if (!mep_have_core_copro_moves_p
)
2072 if (reg_classes_intersect_p (rclass1
, CR_REGS
)
2073 && reg_classes_intersect_p (rclass2
, GENERAL_REGS
))
2075 if (reg_classes_intersect_p (rclass2
, CR_REGS
)
2076 && reg_classes_intersect_p (rclass1
, GENERAL_REGS
))
2078 if (!mep_have_copro_copro_moves_p
2079 && reg_classes_intersect_p (rclass1
, CR_REGS
)
2080 && reg_classes_intersect_p (rclass2
, CR_REGS
))
2087 mep_expand_reload (rtx
*operands
, enum machine_mode mode
)
2089 /* There are three cases for each direction:
2094 int s0
= mep_section_tag (operands
[0]) == 'f';
2095 int s1
= mep_section_tag (operands
[1]) == 'f';
2096 int c0
= mep_nongeneral_reg (operands
[0]);
2097 int c1
= mep_nongeneral_reg (operands
[1]);
2098 int which
= (s0
? 20:0) + (c0
? 10:0) + (s1
? 2:0) + (c1
? 1:0);
2101 fprintf (stderr
, "expand_reload %s\n", mode_name
[mode
]);
2102 debug_rtx (operands
[0]);
2103 debug_rtx (operands
[1]);
2108 case 00: /* Don't know why this gets here. */
2109 case 02: /* general = far */
2110 emit_move_insn (operands
[0], operands
[1]);
2113 case 10: /* cr = mem */
2114 case 11: /* cr = cr */
2115 case 01: /* mem = cr */
2116 case 12: /* cr = far */
2117 emit_move_insn (operands
[2], operands
[1]);
2118 emit_move_insn (operands
[0], operands
[2]);
2121 case 20: /* far = general */
2122 emit_move_insn (operands
[2], XEXP (operands
[1], 0));
2123 emit_move_insn (operands
[0], gen_rtx_MEM (mode
, operands
[2]));
2126 case 21: /* far = cr */
2127 case 22: /* far = far */
2129 fprintf (stderr
, "unsupported expand reload case %02d for mode %s\n",
2130 which
, mode_name
[mode
]);
2131 debug_rtx (operands
[0]);
2132 debug_rtx (operands
[1]);
2137 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2138 can be moved directly into registers 0 to 7, but not into the rest.
2139 If so, and if the required class includes registers 0 to 7, restrict
2140 it to those registers. */
2143 mep_preferred_reload_class (rtx x
, enum reg_class rclass
)
2145 switch (GET_CODE (x
))
2148 if (INTVAL (x
) >= 0x10000
2149 && INTVAL (x
) < 0x01000000
2150 && (INTVAL (x
) & 0xffff) != 0
2151 && reg_class_subset_p (TPREL_REGS
, rclass
))
2152 rclass
= TPREL_REGS
;
2158 if (mep_section_tag (x
) != 'f'
2159 && reg_class_subset_p (TPREL_REGS
, rclass
))
2160 rclass
= TPREL_REGS
;
2169 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2170 moves, 4 for direct double-register moves, and 1000 for anything
2171 that requires a temporary register or temporary stack slot. */
2174 mep_register_move_cost (enum machine_mode mode
, enum reg_class from
, enum reg_class to
)
2176 if (mep_have_copro_copro_moves_p
2177 && reg_class_subset_p (from
, CR_REGS
)
2178 && reg_class_subset_p (to
, CR_REGS
))
2180 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2184 if (reg_class_subset_p (from
, CR_REGS
)
2185 && reg_class_subset_p (to
, CR_REGS
))
2187 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2191 if (reg_class_subset_p (from
, CR_REGS
)
2192 || reg_class_subset_p (to
, CR_REGS
))
2194 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2198 if (mep_secondary_memory_needed (from
, to
, mode
))
2200 if (MEP_NONGENERAL_CLASS (from
) && MEP_NONGENERAL_CLASS (to
))
2203 if (GET_MODE_SIZE (mode
) > 4)
2210 /* Functions to save and restore machine-specific function data. */
2212 static struct machine_function
*
2213 mep_init_machine_status (void)
2215 return ggc_alloc_cleared_machine_function ();
2219 mep_allocate_initial_value (rtx reg
)
2223 if (GET_CODE (reg
) != REG
)
2226 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2229 /* In interrupt functions, the "initial" values of $gp and $tp are
2230 provided by the prologue. They are not necessarily the same as
2231 the values that the caller was using. */
2232 if (REGNO (reg
) == TP_REGNO
|| REGNO (reg
) == GP_REGNO
)
2233 if (mep_interrupt_p ())
2236 if (! cfun
->machine
->reg_save_slot
[REGNO(reg
)])
2238 cfun
->machine
->reg_save_size
+= 4;
2239 cfun
->machine
->reg_save_slot
[REGNO(reg
)] = cfun
->machine
->reg_save_size
;
2242 rss
= cfun
->machine
->reg_save_slot
[REGNO(reg
)];
2243 return gen_rtx_MEM (SImode
, plus_constant (Pmode
, arg_pointer_rtx
, -rss
));
2247 mep_return_addr_rtx (int count
)
2252 return get_hard_reg_initial_val (Pmode
, LP_REGNO
);
2258 return get_hard_reg_initial_val (Pmode
, TP_REGNO
);
2264 return get_hard_reg_initial_val (Pmode
, GP_REGNO
);
2268 mep_interrupt_p (void)
2270 if (cfun
->machine
->interrupt_handler
== 0)
2272 int interrupt_handler
2273 = (lookup_attribute ("interrupt",
2274 DECL_ATTRIBUTES (current_function_decl
))
2276 cfun
->machine
->interrupt_handler
= interrupt_handler
? 2 : 1;
2278 return cfun
->machine
->interrupt_handler
== 2;
2282 mep_disinterrupt_p (void)
2284 if (cfun
->machine
->disable_interrupts
== 0)
2286 int disable_interrupts
2287 = (lookup_attribute ("disinterrupt",
2288 DECL_ATTRIBUTES (current_function_decl
))
2290 cfun
->machine
->disable_interrupts
= disable_interrupts
? 2 : 1;
2292 return cfun
->machine
->disable_interrupts
== 2;
2296 /* Frame/Epilog/Prolog Related. */
2299 mep_reg_set_p (rtx reg
, rtx insn
)
2301 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2304 if (FIND_REG_INC_NOTE (insn
, reg
))
2306 insn
= PATTERN (insn
);
2309 if (GET_CODE (insn
) == SET
2310 && GET_CODE (XEXP (insn
, 0)) == REG
2311 && GET_CODE (XEXP (insn
, 1)) == REG
2312 && REGNO (XEXP (insn
, 0)) == REGNO (XEXP (insn
, 1)))
2315 return set_of (reg
, insn
) != NULL_RTX
;
2319 #define MEP_SAVES_UNKNOWN 0
2320 #define MEP_SAVES_YES 1
2321 #define MEP_SAVES_MAYBE 2
2322 #define MEP_SAVES_NO 3
2325 mep_reg_set_in_function (int regno
)
2329 if (mep_interrupt_p () && df_regs_ever_live_p(regno
))
2332 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2335 push_topmost_sequence ();
2336 insn
= get_insns ();
2337 pop_topmost_sequence ();
2342 reg
= gen_rtx_REG (SImode
, regno
);
2344 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
2345 if (INSN_P (insn
) && mep_reg_set_p (reg
, insn
))
2351 mep_asm_without_operands_p (void)
2353 if (cfun
->machine
->asms_without_operands
== 0)
2357 push_topmost_sequence ();
2358 insn
= get_insns ();
2359 pop_topmost_sequence ();
2361 cfun
->machine
->asms_without_operands
= 1;
2365 && GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
2367 cfun
->machine
->asms_without_operands
= 2;
2370 insn
= NEXT_INSN (insn
);
2374 return cfun
->machine
->asms_without_operands
== 2;
2377 /* Interrupt functions save/restore every call-preserved register, and
2378 any call-used register it uses (or all if it calls any function,
2379 since they may get clobbered there too). Here we check to see
2380 which call-used registers need saving. */
2382 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2383 && (r == FIRST_CCR_REGNO + 1 \
2384 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2385 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2388 mep_interrupt_saved_reg (int r
)
2390 if (!mep_interrupt_p ())
2392 if (r
== REGSAVE_CONTROL_TEMP
2393 || (TARGET_64BIT_CR_REGS
&& TARGET_COP
&& r
== REGSAVE_CONTROL_TEMP
+1))
2395 if (mep_asm_without_operands_p ()
2397 || (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
|| r
== LP_REGNO
)
2398 || IVC2_ISAVED_REG (r
)))
2401 /* Function calls mean we need to save $lp. */
2402 if (r
== LP_REGNO
|| IVC2_ISAVED_REG (r
))
2404 if (!crtl
->is_leaf
|| cfun
->machine
->doloop_tags
> 0)
2405 /* The interrupt handler might use these registers for repeat blocks,
2406 or it might call a function that does so. */
2407 if (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
)
2409 if (crtl
->is_leaf
&& call_used_regs
[r
] && !df_regs_ever_live_p(r
))
2411 /* Functions we call might clobber these. */
2412 if (call_used_regs
[r
] && !fixed_regs
[r
])
2414 /* Additional registers that need to be saved for IVC2. */
2415 if (IVC2_ISAVED_REG (r
))
2422 mep_call_saves_register (int r
)
2424 if (! cfun
->machine
->frame_locked
)
2426 int rv
= MEP_SAVES_NO
;
2428 if (cfun
->machine
->reg_save_slot
[r
])
2430 else if (r
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2432 else if (r
== FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
2434 else if ((!call_used_regs
[r
] || r
== LP_REGNO
) && df_regs_ever_live_p(r
))
2436 else if (crtl
->calls_eh_return
&& (r
== 10 || r
== 11))
2437 /* We need these to have stack slots so that they can be set during
2440 else if (mep_interrupt_saved_reg (r
))
2442 cfun
->machine
->reg_saved
[r
] = rv
;
2444 return cfun
->machine
->reg_saved
[r
] == MEP_SAVES_YES
;
2447 /* Return true if epilogue uses register REGNO. */
2450 mep_epilogue_uses (int regno
)
2452 /* Since $lp is a call-saved register, the generic code will normally
2453 mark it used in the epilogue if it needs to be saved and restored.
2454 However, when profiling is enabled, the profiling code will implicitly
2455 clobber $11. This case has to be handled specially both here and in
2456 mep_call_saves_register. */
2457 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2459 /* Interrupt functions save/restore pretty much everything. */
2460 return (reload_completed
&& mep_interrupt_saved_reg (regno
));
2464 mep_reg_size (int regno
)
2466 if (CR_REGNO_P (regno
) && TARGET_64BIT_CR_REGS
)
2471 /* Worker function for TARGET_CAN_ELIMINATE. */
2474 mep_can_eliminate (const int from
, const int to
)
2476 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
2477 ? ! frame_pointer_needed
2482 mep_elimination_offset (int from
, int to
)
2486 int frame_size
= get_frame_size () + crtl
->outgoing_args_size
;
2489 if (!cfun
->machine
->frame_locked
)
2490 memset (cfun
->machine
->reg_saved
, 0, sizeof (cfun
->machine
->reg_saved
));
2492 /* We don't count arg_regs_to_save in the arg pointer offset, because
2493 gcc thinks the arg pointer has moved along with the saved regs.
2494 However, we do count it when we adjust $sp in the prologue. */
2496 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2497 if (mep_call_saves_register (i
))
2498 reg_save_size
+= mep_reg_size (i
);
2500 if (reg_save_size
% 8)
2501 cfun
->machine
->regsave_filler
= 8 - (reg_save_size
% 8);
2503 cfun
->machine
->regsave_filler
= 0;
2505 /* This is what our total stack adjustment looks like. */
2506 total_size
= (reg_save_size
+ frame_size
+ cfun
->machine
->regsave_filler
);
2509 cfun
->machine
->frame_filler
= 8 - (total_size
% 8);
2511 cfun
->machine
->frame_filler
= 0;
2514 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
2515 return reg_save_size
+ cfun
->machine
->regsave_filler
;
2517 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2518 return cfun
->machine
->frame_filler
+ frame_size
;
2520 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2521 return reg_save_size
+ cfun
->machine
->regsave_filler
+ cfun
->machine
->frame_filler
+ frame_size
;
2529 RTX_FRAME_RELATED_P (x
) = 1;
2533 /* Since the prologue/epilogue code is generated after optimization,
2534 we can't rely on gcc to split constants for us. So, this code
2535 captures all the ways to add a constant to a register in one logic
2536 chunk, including optimizing away insns we just don't need. This
2537 makes the prolog/epilog code easier to follow. */
2539 add_constant (int dest
, int src
, int value
, int mark_frame
)
2544 if (src
== dest
&& value
== 0)
2549 insn
= emit_move_insn (gen_rtx_REG (SImode
, dest
),
2550 gen_rtx_REG (SImode
, src
));
2552 RTX_FRAME_RELATED_P(insn
) = 1;
2556 if (value
>= -32768 && value
<= 32767)
2558 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2559 gen_rtx_REG (SImode
, src
),
2562 RTX_FRAME_RELATED_P(insn
) = 1;
2566 /* Big constant, need to use a temp register. We use
2567 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2568 area is always small enough to directly add to). */
2570 hi
= trunc_int_for_mode (value
& 0xffff0000, SImode
);
2571 lo
= value
& 0xffff;
2573 insn
= emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2578 insn
= emit_insn (gen_iorsi3 (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2579 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2583 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2584 gen_rtx_REG (SImode
, src
),
2585 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
)));
2588 RTX_FRAME_RELATED_P(insn
) = 1;
2589 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2590 gen_rtx_SET (SImode
,
2591 gen_rtx_REG (SImode
, dest
),
2592 gen_rtx_PLUS (SImode
,
2593 gen_rtx_REG (SImode
, dest
),
2598 /* Move SRC to DEST. Mark the move as being potentially dead if
2602 maybe_dead_move (rtx dest
, rtx src
, bool ATTRIBUTE_UNUSED maybe_dead_p
)
2604 rtx insn
= emit_move_insn (dest
, src
);
2607 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
2612 /* Used for interrupt functions, which can't assume that $tp and $gp
2613 contain the correct pointers. */
2616 mep_reload_pointer (int regno
, const char *symbol
)
2620 if (!df_regs_ever_live_p(regno
) && crtl
->is_leaf
)
2623 reg
= gen_rtx_REG (SImode
, regno
);
2624 sym
= gen_rtx_SYMBOL_REF (SImode
, symbol
);
2625 emit_insn (gen_movsi_topsym_s (reg
, sym
));
2626 emit_insn (gen_movsi_botsym_s (reg
, reg
, sym
));
2629 /* Assign save slots for any register not already saved. DImode
2630 registers go at the end of the reg save area; the rest go at the
2631 beginning. This is for alignment purposes. Returns true if a frame
2632 is really needed. */
2634 mep_assign_save_slots (int reg_save_size
)
2636 bool really_need_stack_frame
= false;
2640 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2641 if (mep_call_saves_register(i
))
2643 int regsize
= mep_reg_size (i
);
2645 if ((i
!= TP_REGNO
&& i
!= GP_REGNO
&& i
!= LP_REGNO
)
2646 || mep_reg_set_in_function (i
))
2647 really_need_stack_frame
= true;
2649 if (cfun
->machine
->reg_save_slot
[i
])
2654 cfun
->machine
->reg_save_size
+= regsize
;
2655 cfun
->machine
->reg_save_slot
[i
] = cfun
->machine
->reg_save_size
;
2659 cfun
->machine
->reg_save_slot
[i
] = reg_save_size
- di_ofs
;
2663 cfun
->machine
->frame_locked
= 1;
2664 return really_need_stack_frame
;
2668 mep_expand_prologue (void)
2670 int i
, rss
, sp_offset
= 0;
2673 int really_need_stack_frame
;
2675 /* We must not allow register renaming in interrupt functions,
2676 because that invalidates the correctness of the set of call-used
2677 registers we're going to save/restore. */
2678 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2680 if (mep_disinterrupt_p ())
2681 emit_insn (gen_mep_disable_int ());
2683 cfun
->machine
->mep_frame_pointer_needed
= frame_pointer_needed
;
2685 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2686 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2687 really_need_stack_frame
= frame_size
;
2689 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2691 sp_offset
= reg_save_size
;
2692 if (sp_offset
+ frame_size
< 128)
2693 sp_offset
+= frame_size
;
2695 add_constant (SP_REGNO
, SP_REGNO
, -sp_offset
, 1);
2697 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2698 if (mep_call_saves_register(i
))
2702 enum machine_mode rmode
;
2704 rss
= cfun
->machine
->reg_save_slot
[i
];
2706 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2707 && (!mep_reg_set_in_function (i
)
2708 && !mep_interrupt_p ()))
2711 if (mep_reg_size (i
) == 8)
2716 /* If there is a pseudo associated with this register's initial value,
2717 reload might have already spilt it to the stack slot suggested by
2718 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2720 mem
= gen_rtx_MEM (rmode
,
2721 plus_constant (Pmode
, stack_pointer_rtx
,
2723 maybe_dead_p
= rtx_equal_p (mem
, has_hard_reg_initial_val (rmode
, i
));
2725 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2726 F(maybe_dead_move (mem
, gen_rtx_REG (rmode
, i
), maybe_dead_p
));
2727 else if (rmode
== DImode
)
2730 int be
= TARGET_BIG_ENDIAN
? 4 : 0;
2732 mem
= gen_rtx_MEM (SImode
,
2733 plus_constant (Pmode
, stack_pointer_rtx
,
2734 sp_offset
- rss
+ be
));
2736 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2737 gen_rtx_REG (SImode
, i
),
2739 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2740 gen_rtx_ZERO_EXTRACT (SImode
,
2741 gen_rtx_REG (DImode
, i
),
2745 insn
= maybe_dead_move (mem
,
2746 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2748 RTX_FRAME_RELATED_P (insn
) = 1;
2750 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2751 gen_rtx_SET (VOIDmode
,
2753 gen_rtx_REG (rmode
, i
)));
2754 mem
= gen_rtx_MEM (SImode
,
2755 plus_constant (Pmode
, stack_pointer_rtx
,
2756 sp_offset
- rss
+ (4-be
)));
2757 insn
= maybe_dead_move (mem
,
2758 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2764 maybe_dead_move (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2765 gen_rtx_REG (rmode
, i
),
2767 insn
= maybe_dead_move (mem
,
2768 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2770 RTX_FRAME_RELATED_P (insn
) = 1;
2772 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2773 gen_rtx_SET (VOIDmode
,
2775 gen_rtx_REG (rmode
, i
)));
2779 if (frame_pointer_needed
)
2781 /* We've already adjusted down by sp_offset. Total $sp change
2782 is reg_save_size + frame_size. We want a net change here of
2783 just reg_save_size. */
2784 add_constant (FP_REGNO
, SP_REGNO
, sp_offset
- reg_save_size
, 1);
2787 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
-(reg_save_size
+frame_size
), 1);
2789 if (mep_interrupt_p ())
2791 mep_reload_pointer(GP_REGNO
, "__sdabase");
2792 mep_reload_pointer(TP_REGNO
, "__tpbase");
2797 mep_start_function (FILE *file
, HOST_WIDE_INT hwi_local
)
2799 int local
= hwi_local
;
2800 int frame_size
= local
+ crtl
->outgoing_args_size
;
2805 int slot_map
[FIRST_PSEUDO_REGISTER
], si
, sj
;
2807 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2808 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2809 sp_offset
= reg_save_size
+ frame_size
;
2811 ffill
= cfun
->machine
->frame_filler
;
2813 if (cfun
->machine
->mep_frame_pointer_needed
)
2814 reg_names
[FP_REGNO
] = "$fp";
2816 reg_names
[FP_REGNO
] = "$8";
2821 if (debug_info_level
== DINFO_LEVEL_NONE
)
2823 fprintf (file
, "\t# frame: %d", sp_offset
);
2825 fprintf (file
, " %d regs", reg_save_size
);
2827 fprintf (file
, " %d locals", local
);
2828 if (crtl
->outgoing_args_size
)
2829 fprintf (file
, " %d args", crtl
->outgoing_args_size
);
2830 fprintf (file
, "\n");
2834 fprintf (file
, "\t#\n");
2835 fprintf (file
, "\t# Initial Frame Information:\n");
2836 if (sp_offset
|| !frame_pointer_needed
)
2837 fprintf (file
, "\t# Entry ---------- 0\n");
2839 /* Sort registers by save slots, so they're printed in the order
2840 they appear in memory, not the order they're saved in. */
2841 for (si
=0; si
<FIRST_PSEUDO_REGISTER
; si
++)
2843 for (si
=0; si
<FIRST_PSEUDO_REGISTER
-1; si
++)
2844 for (sj
=si
+1; sj
<FIRST_PSEUDO_REGISTER
; sj
++)
2845 if (cfun
->machine
->reg_save_slot
[slot_map
[si
]]
2846 > cfun
->machine
->reg_save_slot
[slot_map
[sj
]])
2848 int t
= slot_map
[si
];
2849 slot_map
[si
] = slot_map
[sj
];
2854 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2857 int r
= slot_map
[i
];
2858 int rss
= cfun
->machine
->reg_save_slot
[r
];
2860 if (!mep_call_saves_register (r
))
2863 if ((r
== TP_REGNO
|| r
== GP_REGNO
|| r
== LP_REGNO
)
2864 && (!mep_reg_set_in_function (r
)
2865 && !mep_interrupt_p ()))
2868 rsize
= mep_reg_size(r
);
2869 skip
= rss
- (sp
+rsize
);
2871 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2872 fprintf (file
, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2873 rsize
, reg_names
[r
], sp_offset
- rss
);
2877 skip
= reg_save_size
- sp
;
2879 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2881 if (frame_pointer_needed
)
2882 fprintf (file
, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size
, sp_offset
-reg_save_size
);
2884 fprintf (file
, "\t# %3d bytes for local vars\n", local
);
2886 fprintf (file
, "\t# %3d bytes for alignment\n", ffill
);
2887 if (crtl
->outgoing_args_size
)
2888 fprintf (file
, "\t# %3d bytes for outgoing args\n",
2889 crtl
->outgoing_args_size
);
2890 fprintf (file
, "\t# SP ---> ---------- %d\n", sp_offset
);
2891 fprintf (file
, "\t#\n");
2895 static int mep_prevent_lp_restore
= 0;
2896 static int mep_sibcall_epilogue
= 0;
2899 mep_expand_epilogue (void)
2901 int i
, sp_offset
= 0;
2902 int reg_save_size
= 0;
2904 int lp_temp
= LP_REGNO
, lp_slot
= -1;
2905 int really_need_stack_frame
= get_frame_size() + crtl
->outgoing_args_size
;
2906 int interrupt_handler
= mep_interrupt_p ();
2908 if (profile_arc_flag
== 2)
2909 emit_insn (gen_mep_bb_trace_ret ());
2911 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2912 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2914 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2916 if (frame_pointer_needed
)
2918 /* If we have a frame pointer, we won't have a reliable stack
2919 pointer (alloca, you know), so rebase SP from FP */
2920 emit_move_insn (gen_rtx_REG (SImode
, SP_REGNO
),
2921 gen_rtx_REG (SImode
, FP_REGNO
));
2922 sp_offset
= reg_save_size
;
2926 /* SP is right under our local variable space. Adjust it if
2928 sp_offset
= reg_save_size
+ frame_size
;
2929 if (sp_offset
>= 128)
2931 add_constant (SP_REGNO
, SP_REGNO
, frame_size
, 0);
2932 sp_offset
-= frame_size
;
2936 /* This is backwards so that we restore the control and coprocessor
2937 registers before the temporary registers we use to restore
2939 for (i
=FIRST_PSEUDO_REGISTER
-1; i
>=1; i
--)
2940 if (mep_call_saves_register (i
))
2942 enum machine_mode rmode
;
2943 int rss
= cfun
->machine
->reg_save_slot
[i
];
2945 if (mep_reg_size (i
) == 8)
2950 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2951 && !(mep_reg_set_in_function (i
) || interrupt_handler
))
2953 if (mep_prevent_lp_restore
&& i
== LP_REGNO
)
2955 if (!mep_prevent_lp_restore
2956 && !interrupt_handler
2957 && (i
== 10 || i
== 11))
2960 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2961 emit_move_insn (gen_rtx_REG (rmode
, i
),
2963 plus_constant (Pmode
, stack_pointer_rtx
,
2967 if (i
== LP_REGNO
&& !mep_sibcall_epilogue
&& !interrupt_handler
)
2968 /* Defer this one so we can jump indirect rather than
2969 copying the RA to $lp and "ret". EH epilogues
2970 automatically skip this anyway. */
2971 lp_slot
= sp_offset
-rss
;
2974 emit_move_insn (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2976 plus_constant (Pmode
,
2979 emit_move_insn (gen_rtx_REG (rmode
, i
),
2980 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
));
2986 /* Restore this one last so we know it will be in the temp
2987 register when we return by jumping indirectly via the temp. */
2988 emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2989 gen_rtx_MEM (SImode
,
2990 plus_constant (Pmode
, stack_pointer_rtx
,
2992 lp_temp
= REGSAVE_CONTROL_TEMP
;
2996 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
, 0);
2998 if (crtl
->calls_eh_return
&& mep_prevent_lp_restore
)
2999 emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, SP_REGNO
),
3000 gen_rtx_REG (SImode
, SP_REGNO
),
3001 cfun
->machine
->eh_stack_adjust
));
3003 if (mep_sibcall_epilogue
)
3006 if (mep_disinterrupt_p ())
3007 emit_insn (gen_mep_enable_int ());
3009 if (mep_prevent_lp_restore
)
3011 emit_jump_insn (gen_eh_return_internal ());
3014 else if (interrupt_handler
)
3015 emit_jump_insn (gen_mep_reti ());
3017 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode
, lp_temp
)));
3021 mep_expand_eh_return (rtx
*operands
)
3023 if (GET_CODE (operands
[0]) != REG
|| REGNO (operands
[0]) != LP_REGNO
)
3025 rtx ra
= gen_rtx_REG (Pmode
, LP_REGNO
);
3026 emit_move_insn (ra
, operands
[0]);
3030 emit_insn (gen_eh_epilogue (operands
[0]));
3034 mep_emit_eh_epilogue (rtx
*operands ATTRIBUTE_UNUSED
)
3036 cfun
->machine
->eh_stack_adjust
= gen_rtx_REG (Pmode
, 0);
3037 mep_prevent_lp_restore
= 1;
3038 mep_expand_epilogue ();
3039 mep_prevent_lp_restore
= 0;
3043 mep_expand_sibcall_epilogue (void)
3045 mep_sibcall_epilogue
= 1;
3046 mep_expand_epilogue ();
3047 mep_sibcall_epilogue
= 0;
3051 mep_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
3056 if (mep_section_tag (DECL_RTL (decl
)) == 'f')
3059 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3060 if (mep_interrupt_p () || mep_disinterrupt_p ())
3067 mep_return_stackadj_rtx (void)
3069 return gen_rtx_REG (SImode
, 10);
3073 mep_return_handler_rtx (void)
3075 return gen_rtx_REG (SImode
, LP_REGNO
);
3079 mep_function_profiler (FILE *file
)
3081 /* Always right at the beginning of the function. */
3082 fprintf (file
, "\t# mep function profiler\n");
3083 fprintf (file
, "\tadd\t$sp, -8\n");
3084 fprintf (file
, "\tsw\t$0, ($sp)\n");
3085 fprintf (file
, "\tldc\t$0, $lp\n");
3086 fprintf (file
, "\tsw\t$0, 4($sp)\n");
3087 fprintf (file
, "\tbsr\t__mep_mcount\n");
3088 fprintf (file
, "\tlw\t$0, 4($sp)\n");
3089 fprintf (file
, "\tstc\t$0, $lp\n");
3090 fprintf (file
, "\tlw\t$0, ($sp)\n");
3091 fprintf (file
, "\tadd\t$sp, 8\n\n");
3095 mep_emit_bb_trace_ret (void)
3097 fprintf (asm_out_file
, "\t# end of block profiling\n");
3098 fprintf (asm_out_file
, "\tadd\t$sp, -8\n");
3099 fprintf (asm_out_file
, "\tsw\t$0, ($sp)\n");
3100 fprintf (asm_out_file
, "\tldc\t$0, $lp\n");
3101 fprintf (asm_out_file
, "\tsw\t$0, 4($sp)\n");
3102 fprintf (asm_out_file
, "\tbsr\t__bb_trace_ret\n");
3103 fprintf (asm_out_file
, "\tlw\t$0, 4($sp)\n");
3104 fprintf (asm_out_file
, "\tstc\t$0, $lp\n");
3105 fprintf (asm_out_file
, "\tlw\t$0, ($sp)\n");
3106 fprintf (asm_out_file
, "\tadd\t$sp, 8\n\n");
3113 /* Operand Printing. */
3116 mep_print_operand_address (FILE *stream
, rtx address
)
3118 if (GET_CODE (address
) == MEM
)
3119 address
= XEXP (address
, 0);
3121 /* cf: gcc.dg/asm-4.c. */
3122 gcc_assert (GET_CODE (address
) == REG
);
3124 mep_print_operand (stream
, address
, 0);
3130 const char *pattern
;
3133 const conversions
[] =
3136 { 0, "m+ri", "3(2)" },
3140 { 0, "mLrs", "%lo(3)(2)" },
3141 { 0, "mLr+si", "%lo(4+5)(2)" },
3142 { 0, "m+ru2s", "%tpoff(5)(2)" },
3143 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3144 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3145 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3146 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3147 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3149 { 0, "m+si", "(2+3)" },
3150 { 0, "m+li", "(2+3)" },
3153 { 0, "+si", "1+2" },
3154 { 0, "+u2si", "%tpoff(3+4)" },
3155 { 0, "+u3si", "%sdaoff(3+4)" },
3161 { 'h', "Hs", "%hi(1)" },
3163 { 'I', "u2s", "%tpoff(2)" },
3164 { 'I', "u3s", "%sdaoff(2)" },
3165 { 'I', "+u2si", "%tpoff(3+4)" },
3166 { 'I', "+u3si", "%sdaoff(3+4)" },
3168 { 'P', "mr", "(1\\+),\\0" },
3174 unique_bit_in (HOST_WIDE_INT i
)
3178 case 0x01: case 0xfe: return 0;
3179 case 0x02: case 0xfd: return 1;
3180 case 0x04: case 0xfb: return 2;
3181 case 0x08: case 0xf7: return 3;
3182 case 0x10: case 0x7f: return 4;
3183 case 0x20: case 0xbf: return 5;
3184 case 0x40: case 0xdf: return 6;
3185 case 0x80: case 0xef: return 7;
3192 bit_size_for_clip (HOST_WIDE_INT i
)
3196 for (rv
= 0; rv
< 31; rv
++)
3197 if (((HOST_WIDE_INT
) 1 << rv
) > i
)
3202 /* Print an operand to a assembler instruction. */
3205 mep_print_operand (FILE *file
, rtx x
, int code
)
3208 const char *real_name
;
3212 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3213 we're using, then skip over the "mep_" part of its name. */
3214 const struct cgen_insn
*insn
;
3216 if (mep_get_move_insn (mep_cmov
, &insn
))
3217 fputs (cgen_intrinsics
[insn
->intrinsic
] + 4, file
);
3219 mep_intrinsic_unavailable (mep_cmov
);
3224 switch (GET_CODE (x
))
3227 fputs ("clr", file
);
3230 fputs ("set", file
);
3233 fputs ("not", file
);
3236 output_operand_lossage ("invalid %%L code");
3241 /* Print the second operand of a CR <- CR move. If we're using
3242 a two-operand instruction (i.e., a real cmov), then just print
3243 the operand normally. If we're using a "reg, reg, immediate"
3244 instruction such as caddi3, print the operand followed by a
3245 zero field. If we're using a three-register instruction,
3246 print the operand twice. */
3247 const struct cgen_insn
*insn
;
3249 mep_print_operand (file
, x
, 0);
3250 if (mep_get_move_insn (mep_cmov
, &insn
)
3251 && insn_data
[insn
->icode
].n_operands
== 3)
3254 if (insn_data
[insn
->icode
].operand
[2].predicate (x
, VOIDmode
))
3255 mep_print_operand (file
, x
, 0);
3257 mep_print_operand (file
, const0_rtx
, 0);
3263 for (i
= 0; conversions
[i
].pattern
; i
++)
3264 if (conversions
[i
].code
== code
3265 && strcmp(conversions
[i
].pattern
, pattern
) == 0)
3267 for (j
= 0; conversions
[i
].format
[j
]; j
++)
3268 if (conversions
[i
].format
[j
] == '\\')
3270 fputc (conversions
[i
].format
[j
+1], file
);
3273 else if (ISDIGIT(conversions
[i
].format
[j
]))
3275 rtx r
= patternr
[conversions
[i
].format
[j
] - '0'];
3276 switch (GET_CODE (r
))
3279 fprintf (file
, "%s", reg_names
[REGNO (r
)]);
3285 fprintf (file
, "%d", unique_bit_in (INTVAL (r
)));
3288 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)));
3291 fprintf (file
, "0x%x", ((int) INTVAL (r
) >> 16) & 0xffff);
3294 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)) - 1);
3297 fprintf (file
, "0x%x", (int) INTVAL (r
) & 0xffff);
3300 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3301 && !(INTVAL (r
) & 0xff))
3302 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL(r
));
3304 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3307 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3308 && conversions
[i
].format
[j
+1] == 0)
3310 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (r
));
3311 fprintf (file
, " # 0x%x", (int) INTVAL(r
) & 0xffff);
3314 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3317 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3322 fprintf(file
, "[const_double 0x%lx]",
3323 (unsigned long) CONST_DOUBLE_HIGH(r
));
3326 real_name
= targetm
.strip_name_encoding (XSTR (r
, 0));
3327 assemble_name (file
, real_name
);
3330 output_asm_label (r
);
3333 fprintf (stderr
, "don't know how to print this operand:");
3340 if (conversions
[i
].format
[j
] == '+'
3341 && (!code
|| code
== 'I')
3342 && ISDIGIT (conversions
[i
].format
[j
+1])
3343 && GET_CODE (patternr
[conversions
[i
].format
[j
+1] - '0']) == CONST_INT
3344 && INTVAL (patternr
[conversions
[i
].format
[j
+1] - '0']) < 0)
3346 fputc(conversions
[i
].format
[j
], file
);
3350 if (!conversions
[i
].pattern
)
3352 error ("unconvertible operand %c %qs", code
?code
:'-', pattern
);
3360 mep_final_prescan_insn (rtx insn
, rtx
*operands ATTRIBUTE_UNUSED
,
3361 int noperands ATTRIBUTE_UNUSED
)
3363 /* Despite the fact that MeP is perfectly capable of branching and
3364 doing something else in the same bundle, gcc does jump
3365 optimization *after* scheduling, so we cannot trust the bundling
3366 flags on jump instructions. */
3367 if (GET_MODE (insn
) == BImode
3368 && get_attr_slots (insn
) != SLOTS_CORE
)
3369 fputc ('+', asm_out_file
);
3372 /* Function args in registers. */
3375 mep_setup_incoming_varargs (cumulative_args_t cum
,
3376 enum machine_mode mode ATTRIBUTE_UNUSED
,
3377 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
3378 int second_time ATTRIBUTE_UNUSED
)
3380 int nsave
= 4 - (get_cumulative_args (cum
)->nregs
+ 1);
3383 cfun
->machine
->arg_regs_to_save
= nsave
;
3384 *pretend_size
= nsave
* 4;
3388 bytesize (const_tree type
, enum machine_mode mode
)
3390 if (mode
== BLKmode
)
3391 return int_size_in_bytes (type
);
3392 return GET_MODE_SIZE (mode
);
3396 mep_expand_builtin_saveregs (void)
3401 ns
= cfun
->machine
->arg_regs_to_save
;
3404 bufsize
= 8 * ((ns
+ 1) / 2) + 8 * ns
;
3405 regbuf
= assign_stack_local (SImode
, bufsize
, 64);
3410 regbuf
= assign_stack_local (SImode
, bufsize
, 32);
3413 move_block_from_reg (5-ns
, regbuf
, ns
);
3417 rtx tmp
= gen_rtx_MEM (DImode
, XEXP (regbuf
, 0));
3418 int ofs
= 8 * ((ns
+1)/2);
3420 for (i
=0; i
<ns
; i
++)
3422 int rn
= (4-ns
) + i
+ 49;
3425 ptr
= offset_address (tmp
, GEN_INT (ofs
), 2);
3426 emit_move_insn (ptr
, gen_rtx_REG (DImode
, rn
));
3430 return XEXP (regbuf
, 0);
3433 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3436 mep_build_builtin_va_list (void)
3438 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3442 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3444 f_next_gp
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3445 get_identifier ("__va_next_gp"), ptr_type_node
);
3446 f_next_gp_limit
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3447 get_identifier ("__va_next_gp_limit"),
3449 f_next_cop
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_cop"),
3451 f_next_stack
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_stack"),
3454 DECL_FIELD_CONTEXT (f_next_gp
) = record
;
3455 DECL_FIELD_CONTEXT (f_next_gp_limit
) = record
;
3456 DECL_FIELD_CONTEXT (f_next_cop
) = record
;
3457 DECL_FIELD_CONTEXT (f_next_stack
) = record
;
3459 TYPE_FIELDS (record
) = f_next_gp
;
3460 DECL_CHAIN (f_next_gp
) = f_next_gp_limit
;
3461 DECL_CHAIN (f_next_gp_limit
) = f_next_cop
;
3462 DECL_CHAIN (f_next_cop
) = f_next_stack
;
3464 layout_type (record
);
3470 mep_expand_va_start (tree valist
, rtx nextarg
)
3472 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3473 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3477 ns
= cfun
->machine
->arg_regs_to_save
;
3479 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3480 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3481 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3482 f_next_stack
= DECL_CHAIN (f_next_cop
);
3484 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3486 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3487 valist
, f_next_gp_limit
, NULL_TREE
);
3488 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3490 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3491 valist
, f_next_stack
, NULL_TREE
);
3493 /* va_list.next_gp = expand_builtin_saveregs (); */
3494 u
= make_tree (sizetype
, expand_builtin_saveregs ());
3495 u
= fold_convert (ptr_type_node
, u
);
3496 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp
, u
);
3497 TREE_SIDE_EFFECTS (t
) = 1;
3498 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3500 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3501 u
= fold_build_pointer_plus_hwi (u
, 4 * ns
);
3502 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp_limit
, u
);
3503 TREE_SIDE_EFFECTS (t
) = 1;
3504 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3506 u
= fold_build_pointer_plus_hwi (u
, 8 * ((ns
+1)/2));
3507 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3508 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_cop
, u
);
3509 TREE_SIDE_EFFECTS (t
) = 1;
3510 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3512 /* va_list.next_stack = nextarg; */
3513 u
= make_tree (ptr_type_node
, nextarg
);
3514 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_stack
, u
);
3515 TREE_SIDE_EFFECTS (t
) = 1;
3516 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3520 mep_gimplify_va_arg_expr (tree valist
, tree type
,
3522 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
3524 HOST_WIDE_INT size
, rsize
;
3525 bool by_reference
, ivc2_vec
;
3526 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3527 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3528 tree label_sover
, label_selse
;
3531 ivc2_vec
= TARGET_IVC2
&& VECTOR_TYPE_P (type
);
3533 size
= int_size_in_bytes (type
);
3534 by_reference
= (size
> (ivc2_vec
? 8 : 4)) || (size
<= 0);
3538 type
= build_pointer_type (type
);
3541 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
3543 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3544 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3545 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3546 f_next_stack
= DECL_CHAIN (f_next_cop
);
3548 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3550 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3551 valist
, f_next_gp_limit
, NULL_TREE
);
3552 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3554 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3555 valist
, f_next_stack
, NULL_TREE
);
3557 /* if f_next_gp < f_next_gp_limit
3558 IF (VECTOR_P && IVC2)
3566 val = *f_next_stack;
3567 f_next_stack += rsize;
3571 label_sover
= create_artificial_label (UNKNOWN_LOCATION
);
3572 label_selse
= create_artificial_label (UNKNOWN_LOCATION
);
3573 res_addr
= create_tmp_var (ptr_type_node
, NULL
);
3575 tmp
= build2 (GE_EXPR
, boolean_type_node
, next_gp
,
3576 unshare_expr (next_gp_limit
));
3577 tmp
= build3 (COND_EXPR
, void_type_node
, tmp
,
3578 build1 (GOTO_EXPR
, void_type_node
,
3579 unshare_expr (label_selse
)),
3581 gimplify_and_add (tmp
, pre_p
);
3585 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_cop
);
3586 gimplify_and_add (tmp
, pre_p
);
3590 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_gp
);
3591 gimplify_and_add (tmp
, pre_p
);
3594 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_gp
), 4);
3595 gimplify_assign (unshare_expr (next_gp
), tmp
, pre_p
);
3597 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_cop
), 8);
3598 gimplify_assign (unshare_expr (next_cop
), tmp
, pre_p
);
3600 tmp
= build1 (GOTO_EXPR
, void_type_node
, unshare_expr (label_sover
));
3601 gimplify_and_add (tmp
, pre_p
);
3605 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_selse
));
3606 gimplify_and_add (tmp
, pre_p
);
3608 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, unshare_expr (next_stack
));
3609 gimplify_and_add (tmp
, pre_p
);
3611 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_stack
), rsize
);
3612 gimplify_assign (unshare_expr (next_stack
), tmp
, pre_p
);
3616 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_sover
));
3617 gimplify_and_add (tmp
, pre_p
);
3619 res_addr
= fold_convert (build_pointer_type (type
), res_addr
);
3622 res_addr
= build_va_arg_indirect_ref (res_addr
);
3624 return build_va_arg_indirect_ref (res_addr
);
3628 mep_init_cumulative_args (CUMULATIVE_ARGS
*pcum
, tree fntype
,
3629 rtx libname ATTRIBUTE_UNUSED
,
3630 tree fndecl ATTRIBUTE_UNUSED
)
3634 if (fntype
&& lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype
)))
3640 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3641 larger than 4 bytes are passed indirectly. Return value in 0,
3642 unless bigger than 4 bytes, then the caller passes a pointer as the
3643 first arg. For varargs, we copy $1..$4 to the stack. */
3646 mep_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
3647 const_tree type ATTRIBUTE_UNUSED
,
3648 bool named ATTRIBUTE_UNUSED
)
3650 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3652 /* VOIDmode is a signal for the backend to pass data to the call
3653 expander via the second operand to the call pattern. We use
3654 this to determine whether to use "jsr" or "jsrv". */
3655 if (mode
== VOIDmode
)
3656 return GEN_INT (cum
->vliw
);
3658 /* If we havn't run out of argument registers, return the next. */
3661 if (type
&& TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3662 return gen_rtx_REG (mode
, cum
->nregs
+ 49);
3664 return gen_rtx_REG (mode
, cum
->nregs
+ 1);
3667 /* Otherwise the argument goes on the stack. */
3672 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
3673 enum machine_mode mode
,
3675 bool named ATTRIBUTE_UNUSED
)
3677 int size
= bytesize (type
, mode
);
3679 /* This is non-obvious, but yes, large values passed after we've run
3680 out of registers are *still* passed by reference - we put the
3681 address of the parameter on the stack, as well as putting the
3682 parameter itself elsewhere on the stack. */
3684 if (size
<= 0 || size
> 8)
3688 if (TARGET_IVC2
&& get_cumulative_args (cum
)->nregs
< 4
3689 && type
!= NULL_TREE
&& VECTOR_TYPE_P (type
))
3695 mep_function_arg_advance (cumulative_args_t pcum
,
3696 enum machine_mode mode ATTRIBUTE_UNUSED
,
3697 const_tree type ATTRIBUTE_UNUSED
,
3698 bool named ATTRIBUTE_UNUSED
)
3700 get_cumulative_args (pcum
)->nregs
+= 1;
3704 mep_return_in_memory (const_tree type
, const_tree decl ATTRIBUTE_UNUSED
)
3706 int size
= bytesize (type
, BLKmode
);
3707 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3708 return size
> 0 && size
<= 8 ? 0 : 1;
3709 return size
> 0 && size
<= 4 ? 0 : 1;
3713 mep_narrow_volatile_bitfield (void)
3719 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3722 mep_function_value (const_tree type
, const_tree func ATTRIBUTE_UNUSED
)
3724 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3725 return gen_rtx_REG (TYPE_MODE (type
), 48);
3726 return gen_rtx_REG (TYPE_MODE (type
), RETURN_VALUE_REGNUM
);
3729 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3732 mep_libcall_value (enum machine_mode mode
)
3734 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
3737 /* Handle pipeline hazards. */
3739 typedef enum { op_none
, op_stc
, op_fsft
, op_ret
} op_num
;
3740 static const char *opnames
[] = { "", "stc", "fsft", "ret" };
3742 static int prev_opcode
= 0;
3744 /* This isn't as optimal as it could be, because we don't know what
3745 control register the STC opcode is storing in. We only need to add
3746 the nop if it's the relevant register, but we add it for irrelevant
3750 mep_asm_output_opcode (FILE *file
, const char *ptr
)
3752 int this_opcode
= op_none
;
3753 const char *hazard
= 0;
3758 if (strncmp (ptr
, "fsft", 4) == 0 && !ISGRAPH (ptr
[4]))
3759 this_opcode
= op_fsft
;
3762 if (strncmp (ptr
, "ret", 3) == 0 && !ISGRAPH (ptr
[3]))
3763 this_opcode
= op_ret
;
3766 if (strncmp (ptr
, "stc", 3) == 0 && !ISGRAPH (ptr
[3]))
3767 this_opcode
= op_stc
;
3771 if (prev_opcode
== op_stc
&& this_opcode
== op_fsft
)
3773 if (prev_opcode
== op_stc
&& this_opcode
== op_ret
)
3777 fprintf(file
, "%s\t# %s-%s hazard\n\t",
3778 hazard
, opnames
[prev_opcode
], opnames
[this_opcode
]);
3780 prev_opcode
= this_opcode
;
3783 /* Handle attributes. */
3786 mep_validate_based_tiny (tree
*node
, tree name
, tree args
,
3787 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3789 if (TREE_CODE (*node
) != VAR_DECL
3790 && TREE_CODE (*node
) != POINTER_TYPE
3791 && TREE_CODE (*node
) != TYPE_DECL
)
3793 warning (0, "%qE attribute only applies to variables", name
);
3796 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3798 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3800 warning (0, "address region attributes not allowed with auto storage class");
3803 /* Ignore storage attribute of pointed to variable: char __far * x; */
3804 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3806 warning (0, "address region attributes on pointed-to types ignored");
3815 mep_multiple_address_regions (tree list
, bool check_section_attr
)
3818 int count_sections
= 0;
3819 int section_attr_count
= 0;
3821 for (a
= list
; a
; a
= TREE_CHAIN (a
))
3823 if (is_attribute_p ("based", TREE_PURPOSE (a
))
3824 || is_attribute_p ("tiny", TREE_PURPOSE (a
))
3825 || is_attribute_p ("near", TREE_PURPOSE (a
))
3826 || is_attribute_p ("far", TREE_PURPOSE (a
))
3827 || is_attribute_p ("io", TREE_PURPOSE (a
)))
3829 if (check_section_attr
)
3830 section_attr_count
+= is_attribute_p ("section", TREE_PURPOSE (a
));
3833 if (check_section_attr
)
3834 return section_attr_count
;
3836 return count_sections
;
3839 #define MEP_ATTRIBUTES(decl) \
3840 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3841 : DECL_ATTRIBUTES (decl) \
3842 ? (DECL_ATTRIBUTES (decl)) \
3843 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3846 mep_validate_near_far (tree
*node
, tree name
, tree args
,
3847 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3849 if (TREE_CODE (*node
) != VAR_DECL
3850 && TREE_CODE (*node
) != FUNCTION_DECL
3851 && TREE_CODE (*node
) != METHOD_TYPE
3852 && TREE_CODE (*node
) != POINTER_TYPE
3853 && TREE_CODE (*node
) != TYPE_DECL
)
3855 warning (0, "%qE attribute only applies to variables and functions",
3859 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3861 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3863 warning (0, "address region attributes not allowed with auto storage class");
3866 /* Ignore storage attribute of pointed to variable: char __far * x; */
3867 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3869 warning (0, "address region attributes on pointed-to types ignored");
3873 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node
), false) > 0)
3875 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3876 name
, DECL_NAME (*node
), DECL_SOURCE_LINE (*node
));
3877 DECL_ATTRIBUTES (*node
) = NULL_TREE
;
3883 mep_validate_disinterrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3884 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3886 if (TREE_CODE (*node
) != FUNCTION_DECL
3887 && TREE_CODE (*node
) != METHOD_TYPE
)
3889 warning (0, "%qE attribute only applies to functions", name
);
3896 mep_validate_interrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3897 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3901 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3903 warning (0, "%qE attribute only applies to functions", name
);
3908 if (DECL_DECLARED_INLINE_P (*node
))
3909 error ("cannot inline interrupt function %qE", DECL_NAME (*node
));
3910 DECL_UNINLINABLE (*node
) = 1;
3912 function_type
= TREE_TYPE (*node
);
3914 if (TREE_TYPE (function_type
) != void_type_node
)
3915 error ("interrupt function must have return type of void");
3917 if (prototype_p (function_type
)
3918 && (TREE_VALUE (TYPE_ARG_TYPES (function_type
)) != void_type_node
3919 || TREE_CHAIN (TYPE_ARG_TYPES (function_type
)) != NULL_TREE
))
3920 error ("interrupt function must have no arguments");
3926 mep_validate_io_cb (tree
*node
, tree name
, tree args
,
3927 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3929 if (TREE_CODE (*node
) != VAR_DECL
)
3931 warning (0, "%qE attribute only applies to variables", name
);
3935 if (args
!= NULL_TREE
)
3937 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
3938 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
3939 if (TREE_CODE (TREE_VALUE (args
)) != INTEGER_CST
)
3941 warning (0, "%qE attribute allows only an integer constant argument",
3947 if (*no_add
== false && !TARGET_IO_NO_VOLATILE
)
3948 TREE_THIS_VOLATILE (*node
) = 1;
3954 mep_validate_vliw (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3955 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3957 if (TREE_CODE (*node
) != FUNCTION_TYPE
3958 && TREE_CODE (*node
) != FUNCTION_DECL
3959 && TREE_CODE (*node
) != METHOD_TYPE
3960 && TREE_CODE (*node
) != FIELD_DECL
3961 && TREE_CODE (*node
) != TYPE_DECL
)
3963 static int gave_pointer_note
= 0;
3964 static int gave_array_note
= 0;
3965 static const char * given_type
= NULL
;
3967 given_type
= tree_code_name
[TREE_CODE (*node
)];
3968 if (TREE_CODE (*node
) == POINTER_TYPE
)
3969 given_type
= "pointers";
3970 if (TREE_CODE (*node
) == ARRAY_TYPE
)
3971 given_type
= "arrays";
3974 warning (0, "%qE attribute only applies to functions, not %s",
3977 warning (0, "%qE attribute only applies to functions",
3981 if (TREE_CODE (*node
) == POINTER_TYPE
3982 && !gave_pointer_note
)
3984 inform (input_location
,
3985 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3986 " typedef int (__vliw *vfuncptr) ();");
3987 gave_pointer_note
= 1;
3990 if (TREE_CODE (*node
) == ARRAY_TYPE
3991 && !gave_array_note
)
3993 inform (input_location
,
3994 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
3995 " typedef int (__vliw *vfuncptr[]) ();");
3996 gave_array_note
= 1;
4000 error ("VLIW functions are not allowed without a VLIW configuration");
4004 static const struct attribute_spec mep_attribute_table
[11] =
4006 /* name min max decl type func handler
4007 affects_type_identity */
4008 { "based", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4009 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4010 { "near", 0, 0, false, false, false, mep_validate_near_far
, false },
4011 { "far", 0, 0, false, false, false, mep_validate_near_far
, false },
4012 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt
,
4014 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt
, false },
4015 { "io", 0, 1, false, false, false, mep_validate_io_cb
, false },
4016 { "cb", 0, 1, false, false, false, mep_validate_io_cb
, false },
4017 { "vliw", 0, 0, false, true, false, mep_validate_vliw
, false },
4018 { NULL
, 0, 0, false, false, false, NULL
, false }
4022 mep_function_attribute_inlinable_p (const_tree callee
)
4024 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (callee
));
4025 if (!attrs
) attrs
= DECL_ATTRIBUTES (callee
);
4026 return (lookup_attribute ("disinterrupt", attrs
) == 0
4027 && lookup_attribute ("interrupt", attrs
) == 0);
4031 mep_can_inline_p (tree caller
, tree callee
)
4033 if (TREE_CODE (callee
) == ADDR_EXPR
)
4034 callee
= TREE_OPERAND (callee
, 0);
4036 if (!mep_vliw_function_p (caller
)
4037 && mep_vliw_function_p (callee
))
4045 #define FUNC_DISINTERRUPT 2
4048 struct GTY(()) pragma_entry
{
4051 const char *funcname
;
4053 typedef struct pragma_entry pragma_entry
;
4055 /* Hash table of farcall-tagged sections. */
4056 static GTY((param_is (pragma_entry
))) htab_t pragma_htab
;
4059 pragma_entry_eq (const void *p1
, const void *p2
)
4061 const pragma_entry
*old
= (const pragma_entry
*) p1
;
4062 const char *new_name
= (const char *) p2
;
4064 return strcmp (old
->funcname
, new_name
) == 0;
4068 pragma_entry_hash (const void *p
)
4070 const pragma_entry
*old
= (const pragma_entry
*) p
;
4071 return htab_hash_string (old
->funcname
);
4075 mep_note_pragma_flag (const char *funcname
, int flag
)
4077 pragma_entry
**slot
;
4080 pragma_htab
= htab_create_ggc (31, pragma_entry_hash
,
4081 pragma_entry_eq
, NULL
);
4083 slot
= (pragma_entry
**)
4084 htab_find_slot_with_hash (pragma_htab
, funcname
,
4085 htab_hash_string (funcname
), INSERT
);
4089 *slot
= ggc_alloc_pragma_entry ();
4092 (*slot
)->funcname
= ggc_strdup (funcname
);
4094 (*slot
)->flag
|= flag
;
4098 mep_lookup_pragma_flag (const char *funcname
, int flag
)
4100 pragma_entry
**slot
;
4105 if (funcname
[0] == '@' && funcname
[2] == '.')
4108 slot
= (pragma_entry
**)
4109 htab_find_slot_with_hash (pragma_htab
, funcname
,
4110 htab_hash_string (funcname
), NO_INSERT
);
4111 if (slot
&& *slot
&& ((*slot
)->flag
& flag
))
4113 (*slot
)->used
|= flag
;
4120 mep_lookup_pragma_call (const char *funcname
)
4122 return mep_lookup_pragma_flag (funcname
, FUNC_CALL
);
4126 mep_note_pragma_call (const char *funcname
)
4128 mep_note_pragma_flag (funcname
, FUNC_CALL
);
4132 mep_lookup_pragma_disinterrupt (const char *funcname
)
4134 return mep_lookup_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4138 mep_note_pragma_disinterrupt (const char *funcname
)
4140 mep_note_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4144 note_unused_pragma_disinterrupt (void **slot
, void *data ATTRIBUTE_UNUSED
)
4146 const pragma_entry
*d
= (const pragma_entry
*)(*slot
);
4148 if ((d
->flag
& FUNC_DISINTERRUPT
)
4149 && !(d
->used
& FUNC_DISINTERRUPT
))
4150 warning (0, "\"#pragma disinterrupt %s\" not used", d
->funcname
);
4155 mep_file_cleanups (void)
4158 htab_traverse (pragma_htab
, note_unused_pragma_disinterrupt
, NULL
);
4161 /* These three functions provide a bridge between the pramgas that
4162 affect register classes, and the functions that maintain them. We
4163 can't call those functions directly as pragma handling is part of
4164 the front end and doesn't have direct access to them. */
4167 mep_save_register_info (void)
4169 save_register_info ();
4173 mep_reinit_regs (void)
4179 mep_init_regs (void)
4187 mep_attrlist_to_encoding (tree list
, tree decl
)
4189 if (mep_multiple_address_regions (list
, false) > 1)
4191 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4192 TREE_PURPOSE (TREE_CHAIN (list
)),
4194 DECL_SOURCE_LINE (decl
));
4195 TREE_CHAIN (list
) = NULL_TREE
;
4200 if (is_attribute_p ("based", TREE_PURPOSE (list
)))
4202 if (is_attribute_p ("tiny", TREE_PURPOSE (list
)))
4204 if (is_attribute_p ("near", TREE_PURPOSE (list
)))
4206 if (is_attribute_p ("far", TREE_PURPOSE (list
)))
4208 if (is_attribute_p ("io", TREE_PURPOSE (list
)))
4210 if (TREE_VALUE (list
)
4211 && TREE_VALUE (TREE_VALUE (list
))
4212 && TREE_CODE (TREE_VALUE (TREE_VALUE (list
))) == INTEGER_CST
)
4214 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list
)));
4216 && location
<= 0x1000000)
4221 if (is_attribute_p ("cb", TREE_PURPOSE (list
)))
4223 list
= TREE_CHAIN (list
);
4226 && TREE_CODE (decl
) == FUNCTION_DECL
4227 && DECL_SECTION_NAME (decl
) == 0)
4233 mep_comp_type_attributes (const_tree t1
, const_tree t2
)
4237 vliw1
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1
)) != 0);
4238 vliw2
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2
)) != 0);
4247 mep_insert_attributes (tree decl
, tree
*attributes
)
4250 const char *secname
= 0;
4251 tree attrib
, attrlist
;
4254 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4256 const char *funcname
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4258 if (mep_lookup_pragma_disinterrupt (funcname
))
4260 attrib
= build_tree_list (get_identifier ("disinterrupt"), NULL_TREE
);
4261 *attributes
= chainon (*attributes
, attrib
);
4265 if (TREE_CODE (decl
) != VAR_DECL
4266 || ! (TREE_PUBLIC (decl
) || TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
4269 if (TREE_READONLY (decl
) && TARGET_DC
)
4270 /* -mdc means that const variables default to the near section,
4271 regardless of the size cutoff. */
4274 /* User specified an attribute, so override the default.
4275 Ignore storage attribute of pointed to variable. char __far * x; */
4276 if (! (TREE_TYPE (decl
) && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
))
4278 if (TYPE_P (decl
) && TYPE_ATTRIBUTES (decl
) && *attributes
)
4279 TYPE_ATTRIBUTES (decl
) = NULL_TREE
;
4280 else if (DECL_ATTRIBUTES (decl
) && *attributes
)
4281 DECL_ATTRIBUTES (decl
) = NULL_TREE
;
4284 attrlist
= *attributes
? *attributes
: DECL_ATTRIBUTES (decl
);
4285 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4286 if (!encoding
&& TYPE_P (TREE_TYPE (decl
)))
4288 attrlist
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
4289 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4293 /* This means that the declaration has a specific section
4294 attribute, so we should not apply the default rules. */
4296 if (encoding
== 'i' || encoding
== 'I')
4298 tree attr
= lookup_attribute ("io", attrlist
);
4300 && TREE_VALUE (attr
)
4301 && TREE_VALUE (TREE_VALUE(attr
)))
4303 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4304 static tree previous_value
= 0;
4305 static int previous_location
= 0;
4306 static tree previous_name
= 0;
4308 /* We take advantage of the fact that gcc will reuse the
4309 same tree pointer when applying an attribute to a
4310 list of decls, but produce a new tree for attributes
4311 on separate source lines, even when they're textually
4312 identical. This is the behavior we want. */
4313 if (TREE_VALUE (attr
) == previous_value
4314 && location
== previous_location
)
4316 warning(0, "__io address 0x%x is the same for %qE and %qE",
4317 location
, previous_name
, DECL_NAME (decl
));
4319 previous_name
= DECL_NAME (decl
);
4320 previous_location
= location
;
4321 previous_value
= TREE_VALUE (attr
);
4328 /* Declarations of arrays can change size. Don't trust them. */
4329 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
4332 size
= int_size_in_bytes (TREE_TYPE (decl
));
4334 if (TARGET_RAND_TPGP
&& size
<= 4 && size
> 0)
4336 if (TREE_PUBLIC (decl
)
4337 || DECL_EXTERNAL (decl
)
4338 || TREE_STATIC (decl
))
4340 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4364 if (size
<= mep_based_cutoff
&& size
> 0)
4366 else if (size
<= mep_tiny_cutoff
&& size
> 0)
4372 if (mep_const_section
&& TREE_READONLY (decl
))
4374 if (strcmp (mep_const_section
, "tiny") == 0)
4376 else if (strcmp (mep_const_section
, "near") == 0)
4378 else if (strcmp (mep_const_section
, "far") == 0)
4385 if (!mep_multiple_address_regions (*attributes
, true)
4386 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl
), false))
4388 attrib
= build_tree_list (get_identifier (secname
), NULL_TREE
);
4390 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4391 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4392 and mep_validate_based_tiny. */
4393 DECL_ATTRIBUTES (decl
) = chainon (DECL_ATTRIBUTES (decl
), attrib
);
4398 mep_encode_section_info (tree decl
, rtx rtl
, int first
)
4401 const char *oldname
;
4402 const char *secname
;
4408 tree mep_attributes
;
4413 if (TREE_CODE (decl
) != VAR_DECL
4414 && TREE_CODE (decl
) != FUNCTION_DECL
)
4417 rtlname
= XEXP (rtl
, 0);
4418 if (GET_CODE (rtlname
) == SYMBOL_REF
)
4419 oldname
= XSTR (rtlname
, 0);
4420 else if (GET_CODE (rtlname
) == MEM
4421 && GET_CODE (XEXP (rtlname
, 0)) == SYMBOL_REF
)
4422 oldname
= XSTR (XEXP (rtlname
, 0), 0);
4426 type
= TREE_TYPE (decl
);
4427 if (type
== error_mark_node
)
4429 mep_attributes
= MEP_ATTRIBUTES (decl
);
4431 encoding
= mep_attrlist_to_encoding (mep_attributes
, decl
);
4435 newname
= (char *) alloca (strlen (oldname
) + 4);
4436 sprintf (newname
, "@%c.%s", encoding
, oldname
);
4437 idp
= get_identifier (newname
);
4439 gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (idp
));
4440 SYMBOL_REF_WEAK (XEXP (rtl
, 0)) = DECL_WEAK (decl
);
4441 SET_SYMBOL_REF_DECL (XEXP (rtl
, 0), decl
);
4454 maxsize
= 0x1000000;
4462 if (maxsize
&& int_size_in_bytes (TREE_TYPE (decl
)) > maxsize
)
4464 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4466 (long) int_size_in_bytes (TREE_TYPE (decl
)),
4474 mep_strip_name_encoding (const char *sym
)
4480 else if (*sym
== '@' && sym
[2] == '.')
4488 mep_select_section (tree decl
, int reloc ATTRIBUTE_UNUSED
,
4489 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
4494 switch (TREE_CODE (decl
))
4497 if (!TREE_READONLY (decl
)
4498 || TREE_SIDE_EFFECTS (decl
)
4499 || !DECL_INITIAL (decl
)
4500 || (DECL_INITIAL (decl
) != error_mark_node
4501 && !TREE_CONSTANT (DECL_INITIAL (decl
))))
4505 if (! TREE_CONSTANT (decl
))
4513 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4515 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4517 if (name
[0] == '@' && name
[2] == '.')
4522 if (flag_function_sections
|| DECL_ONE_ONLY (decl
))
4523 mep_unique_section (decl
, 0);
4524 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4526 if (encoding
== 'f')
4527 return vftext_section
;
4529 return vtext_section
;
4531 else if (encoding
== 'f')
4532 return ftext_section
;
4534 return text_section
;
4537 if (TREE_CODE (decl
) == VAR_DECL
)
4539 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4541 if (name
[0] == '@' && name
[2] == '.')
4545 return based_section
;
4549 return srodata_section
;
4550 if (DECL_INITIAL (decl
))
4551 return sdata_section
;
4552 return tinybss_section
;
4556 return frodata_section
;
4561 error_at (DECL_SOURCE_LOCATION (decl
),
4562 "variable %D of type %<io%> must be uninitialized", decl
);
4563 return data_section
;
4566 error_at (DECL_SOURCE_LOCATION (decl
),
4567 "variable %D of type %<cb%> must be uninitialized", decl
);
4568 return data_section
;
4573 return readonly_data_section
;
4575 return data_section
;
4579 mep_unique_section (tree decl
, int reloc
)
4581 static const char *prefixes
[][2] =
4583 { ".text.", ".gnu.linkonce.t." },
4584 { ".rodata.", ".gnu.linkonce.r." },
4585 { ".data.", ".gnu.linkonce.d." },
4586 { ".based.", ".gnu.linkonce.based." },
4587 { ".sdata.", ".gnu.linkonce.s." },
4588 { ".far.", ".gnu.linkonce.far." },
4589 { ".ftext.", ".gnu.linkonce.ft." },
4590 { ".frodata.", ".gnu.linkonce.frd." },
4591 { ".srodata.", ".gnu.linkonce.srd." },
4592 { ".vtext.", ".gnu.linkonce.v." },
4593 { ".vftext.", ".gnu.linkonce.vf." }
4595 int sec
= 2; /* .data */
4597 const char *name
, *prefix
;
4600 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
4601 if (DECL_RTL (decl
))
4602 name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4604 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4606 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4607 sec
= 9; /* .vtext */
4609 sec
= 0; /* .text */
4611 else if (decl_readonly_section (decl
, reloc
))
4612 sec
= 1; /* .rodata */
4614 if (name
[0] == '@' && name
[2] == '.')
4619 sec
= 3; /* .based */
4623 sec
= 8; /* .srodata */
4625 sec
= 4; /* .sdata */
4629 sec
= 6; /* .ftext */
4631 sec
= 10; /* .vftext */
4633 sec
= 7; /* .frodata */
4635 sec
= 5; /* .far. */
4641 prefix
= prefixes
[sec
][DECL_ONE_ONLY(decl
)];
4642 len
= strlen (name
) + strlen (prefix
);
4643 string
= (char *) alloca (len
+ 1);
4645 sprintf (string
, "%s%s", prefix
, name
);
4647 DECL_SECTION_NAME (decl
) = build_string (len
, string
);
4650 /* Given a decl, a section name, and whether the decl initializer
4651 has relocs, choose attributes for the section. */
4653 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4656 mep_section_type_flags (tree decl
, const char *name
, int reloc
)
4658 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4660 if (decl
&& TREE_CODE (decl
) == FUNCTION_DECL
4661 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4662 flags
|= SECTION_MEP_VLIW
;
4667 /* Switch to an arbitrary section NAME with attributes as specified
4668 by FLAGS. ALIGN specifies any known alignment requirements for
4669 the section; 0 if the default should be used.
4671 Differs from the standard ELF version only in support of VLIW mode. */
4674 mep_asm_named_section (const char *name
, unsigned int flags
, tree decl ATTRIBUTE_UNUSED
)
4676 char flagchars
[8], *f
= flagchars
;
4679 if (!(flags
& SECTION_DEBUG
))
4681 if (flags
& SECTION_WRITE
)
4683 if (flags
& SECTION_CODE
)
4685 if (flags
& SECTION_SMALL
)
4687 if (flags
& SECTION_MEP_VLIW
)
4691 if (flags
& SECTION_BSS
)
4696 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",@%s\n",
4697 name
, flagchars
, type
);
4699 if (flags
& SECTION_CODE
)
4700 fputs ((flags
& SECTION_MEP_VLIW
? "\t.vliw\n" : "\t.core\n"),
4705 mep_output_aligned_common (FILE *stream
, tree decl
, const char *name
,
4706 int size
, int align
, int global
)
4708 /* We intentionally don't use mep_section_tag() here. */
4710 && (name
[1] == 'i' || name
[1] == 'I' || name
[1] == 'c')
4714 tree attr
= lookup_attribute ((name
[1] == 'c' ? "cb" : "io"),
4715 DECL_ATTRIBUTES (decl
));
4717 && TREE_VALUE (attr
)
4718 && TREE_VALUE (TREE_VALUE(attr
)))
4719 location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4724 fprintf (stream
, "\t.globl\t");
4725 assemble_name (stream
, name
);
4726 fprintf (stream
, "\n");
4728 assemble_name (stream
, name
);
4729 fprintf (stream
, " = %d\n", location
);
4732 if (name
[0] == '@' && name
[2] == '.')
4734 const char *sec
= 0;
4738 switch_to_section (based_section
);
4742 switch_to_section (tinybss_section
);
4746 switch_to_section (farbss_section
);
4755 while (align
> BITS_PER_UNIT
)
4760 name2
= targetm
.strip_name_encoding (name
);
4762 fprintf (stream
, "\t.globl\t%s\n", name2
);
4763 fprintf (stream
, "\t.p2align %d\n", p2align
);
4764 fprintf (stream
, "\t.type\t%s,@object\n", name2
);
4765 fprintf (stream
, "\t.size\t%s,%d\n", name2
, size
);
4766 fprintf (stream
, "%s:\n\t.zero\t%d\n", name2
, size
);
4773 fprintf (stream
, "\t.local\t");
4774 assemble_name (stream
, name
);
4775 fprintf (stream
, "\n");
4777 fprintf (stream
, "\t.comm\t");
4778 assemble_name (stream
, name
);
4779 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
4785 mep_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4787 rtx addr
= XEXP (m_tramp
, 0);
4788 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
4790 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__mep_trampoline_helper"),
4791 LCT_NORMAL
, VOIDmode
, 3,
4794 static_chain
, Pmode
);
4797 /* Experimental Reorg. */
4800 mep_mentioned_p (rtx in
,
4801 rtx reg
, /* NULL for mem */
4802 int modes_too
) /* if nonzero, modes must match also. */
4810 if (reg
&& GET_CODE (reg
) != REG
)
4813 if (GET_CODE (in
) == LABEL_REF
)
4816 code
= GET_CODE (in
);
4822 return mep_mentioned_p (XEXP (in
, 0), reg
, modes_too
);
4828 if (modes_too
&& (GET_MODE (in
) != GET_MODE (reg
)))
4830 return (REGNO (in
) == REGNO (reg
));
4843 /* Set's source should be read-only. */
4844 if (code
== SET
&& !reg
)
4845 return mep_mentioned_p (SET_DEST (in
), reg
, modes_too
);
4847 fmt
= GET_RTX_FORMAT (code
);
4849 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4854 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
4855 if (mep_mentioned_p (XVECEXP (in
, i
, j
), reg
, modes_too
))
4858 else if (fmt
[i
] == 'e'
4859 && mep_mentioned_p (XEXP (in
, i
), reg
, modes_too
))
4865 #define EXPERIMENTAL_REGMOVE_REORG 1
4867 #if EXPERIMENTAL_REGMOVE_REORG
4870 mep_compatible_reg_class (int r1
, int r2
)
4872 if (GR_REGNO_P (r1
) && GR_REGNO_P (r2
))
4874 if (CR_REGNO_P (r1
) && CR_REGNO_P (r2
))
4880 mep_reorg_regmove (rtx insns
)
4882 rtx insn
, next
, pat
, follow
, *where
;
4883 int count
= 0, done
= 0, replace
, before
= 0;
4886 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4887 if (GET_CODE (insn
) == INSN
)
4890 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4891 set that uses the r2 and r2 dies there. We replace r2 with r1
4892 and see if it's still a valid insn. If so, delete the first set.
4893 Copied from reorg.c. */
4898 for (insn
= insns
; insn
; insn
= next
)
4900 next
= next_nonnote_nondebug_insn (insn
);
4901 if (GET_CODE (insn
) != INSN
)
4903 pat
= PATTERN (insn
);
4907 if (GET_CODE (pat
) == SET
4908 && GET_CODE (SET_SRC (pat
)) == REG
4909 && GET_CODE (SET_DEST (pat
)) == REG
4910 && find_regno_note (insn
, REG_DEAD
, REGNO (SET_SRC (pat
)))
4911 && mep_compatible_reg_class (REGNO (SET_SRC (pat
)), REGNO (SET_DEST (pat
))))
4913 follow
= next_nonnote_nondebug_insn (insn
);
4915 fprintf (dump_file
, "superfluous moves: considering %d\n", INSN_UID (insn
));
4917 while (follow
&& GET_CODE (follow
) == INSN
4918 && GET_CODE (PATTERN (follow
)) == SET
4919 && !dead_or_set_p (follow
, SET_SRC (pat
))
4920 && !mep_mentioned_p (PATTERN (follow
), SET_SRC (pat
), 0)
4921 && !mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 0))
4924 fprintf (dump_file
, "\tskipping %d\n", INSN_UID (follow
));
4925 follow
= next_nonnote_insn (follow
);
4929 fprintf (dump_file
, "\tfollow is %d\n", INSN_UID (follow
));
4930 if (follow
&& GET_CODE (follow
) == INSN
4931 && GET_CODE (PATTERN (follow
)) == SET
4932 && find_regno_note (follow
, REG_DEAD
, REGNO (SET_DEST (pat
))))
4934 if (GET_CODE (SET_DEST (PATTERN (follow
))) == REG
)
4936 if (mep_mentioned_p (SET_SRC (PATTERN (follow
)), SET_DEST (pat
), 1))
4939 where
= & SET_SRC (PATTERN (follow
));
4942 else if (GET_CODE (SET_DEST (PATTERN (follow
))) == MEM
)
4944 if (mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 1))
4947 where
= & PATTERN (follow
);
4953 /* If so, follow is the corresponding insn */
4960 fprintf (dump_file
, "----- Candidate for superfluous move deletion:\n\n");
4961 for (x
= insn
; x
;x
= NEXT_INSN (x
))
4963 print_rtl_single (dump_file
, x
);
4966 fprintf (dump_file
, "\n");
4970 if (validate_replace_rtx_subexp (SET_DEST (pat
), SET_SRC (pat
),
4977 fprintf (dump_file
, "\n----- Success! new insn:\n\n");
4978 print_rtl_single (dump_file
, follow
);
4988 fprintf (dump_file
, "\n%d insn%s deleted out of %d.\n\n", count
, count
== 1 ? "" : "s", before
);
4989 fprintf (dump_file
, "=====\n");
4995 /* Figure out where to put LABEL, which is the label for a repeat loop.
4996 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
4997 the loop ends just before LAST_INSN. If SHARED, insns other than the
4998 "repeat" might use LABEL to jump to the loop's continuation point.
5000 Return the last instruction in the adjusted loop. */
5003 mep_insert_repeat_label_last (rtx last_insn
, rtx label
, bool including
,
5007 int count
= 0, code
, icode
;
5010 fprintf (dump_file
, "considering end of repeat loop at insn %d\n",
5011 INSN_UID (last_insn
));
5013 /* Set PREV to the last insn in the loop. */
5016 prev
= PREV_INSN (prev
);
5018 /* Set NEXT to the next insn after the repeat label. */
5023 code
= GET_CODE (prev
);
5024 if (code
== CALL_INSN
|| code
== CODE_LABEL
|| code
== BARRIER
)
5029 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
5030 prev
= XVECEXP (PATTERN (prev
), 0, 1);
5032 /* Other insns that should not be in the last two opcodes. */
5033 icode
= recog_memoized (prev
);
5035 || icode
== CODE_FOR_repeat
5036 || icode
== CODE_FOR_erepeat
5037 || get_attr_may_trap (prev
) == MAY_TRAP_YES
)
5040 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5041 is the second instruction in a VLIW bundle. In that case,
5042 loop again: if the first instruction also satisfies the
5043 conditions above then we will reach here again and put
5044 both of them into the repeat epilogue. Otherwise both
5045 should remain outside. */
5046 if (GET_MODE (prev
) != BImode
)
5051 print_rtl_single (dump_file
, next
);
5056 prev
= PREV_INSN (prev
);
5059 /* See if we're adding the label immediately after the repeat insn.
5060 If so, we need to separate them with a nop. */
5061 prev
= prev_real_insn (next
);
5063 switch (recog_memoized (prev
))
5065 case CODE_FOR_repeat
:
5066 case CODE_FOR_erepeat
:
5068 fprintf (dump_file
, "Adding nop inside loop\n");
5069 emit_insn_before (gen_nop (), next
);
5076 /* Insert the label. */
5077 emit_label_before (label
, next
);
5079 /* Insert the nops. */
5080 if (dump_file
&& count
< 2)
5081 fprintf (dump_file
, "Adding %d nop%s\n\n",
5082 2 - count
, count
== 1 ? "" : "s");
5084 for (; count
< 2; count
++)
5086 last_insn
= emit_insn_after (gen_nop (), last_insn
);
5088 emit_insn_before (gen_nop (), last_insn
);
5095 mep_emit_doloop (rtx
*operands
, int is_end
)
5099 if (cfun
->machine
->doloop_tags
== 0
5100 || cfun
->machine
->doloop_tag_from_end
== is_end
)
5102 cfun
->machine
->doloop_tags
++;
5103 cfun
->machine
->doloop_tag_from_end
= is_end
;
5106 tag
= GEN_INT (cfun
->machine
->doloop_tags
- 1);
5108 emit_jump_insn (gen_doloop_end_internal (operands
[0], operands
[4], tag
));
5110 emit_insn (gen_doloop_begin_internal (operands
[0], operands
[0], tag
));
5114 /* Code for converting doloop_begins and doloop_ends into valid
5115 MeP instructions. A doloop_begin is just a placeholder:
5117 $count = unspec ($count)
5119 where $count is initially the number of iterations - 1.
5120 doloop_end has the form:
5122 if ($count-- == 0) goto label
5124 The counter variable is private to the doloop insns, nothing else
5125 relies on its value.
5127 There are three cases, in decreasing order of preference:
5129 1. A loop has exactly one doloop_begin and one doloop_end.
5130 The doloop_end branches to the first instruction after
5133 In this case we can replace the doloop_begin with a repeat
5134 instruction and remove the doloop_end. I.e.:
5136 $count1 = unspec ($count1)
5141 if ($count2-- == 0) goto label
5145 repeat $count1,repeat_label
5153 2. As for (1), except there are several doloop_ends. One of them
5154 (call it X) falls through to a label L. All the others fall
5155 through to branches to L.
5157 In this case, we remove X and replace the other doloop_ends
5158 with branches to the repeat label. For example:
5160 $count1 = unspec ($count1)
5163 if ($count2-- == 0) goto label
5166 if ($count3-- == 0) goto label
5171 repeat $count1,repeat_label
5182 3. The fallback case. Replace doloop_begins with:
5186 Replace doloop_ends with the equivalent of:
5189 if ($count == 0) goto label
5191 Note that this might need a scratch register if $count
5192 is stored in memory. */
5194 /* A structure describing one doloop_begin. */
5195 struct mep_doloop_begin
{
5196 /* The next doloop_begin with the same tag. */
5197 struct mep_doloop_begin
*next
;
5199 /* The instruction itself. */
5202 /* The initial counter value. This is known to be a general register. */
5206 /* A structure describing a doloop_end. */
5207 struct mep_doloop_end
{
5208 /* The next doloop_end with the same loop tag. */
5209 struct mep_doloop_end
*next
;
5211 /* The instruction itself. */
5214 /* The first instruction after INSN when the branch isn't taken. */
5217 /* The location of the counter value. Since doloop_end_internal is a
5218 jump instruction, it has to allow the counter to be stored anywhere
5219 (any non-fixed register or memory location). */
5222 /* The target label (the place where the insn branches when the counter
5226 /* A scratch register. Only available when COUNTER isn't stored
5227 in a general register. */
5232 /* One do-while loop. */
5234 /* All the doloop_begins for this loop (in no particular order). */
5235 struct mep_doloop_begin
*begin
;
5237 /* All the doloop_ends. When there is more than one, arrange things
5238 so that the first one is the most likely to be X in case (2) above. */
5239 struct mep_doloop_end
*end
;
5243 /* Return true if LOOP can be converted into repeat/repeat_end form
5244 (that is, if it matches cases (1) or (2) above). */
5247 mep_repeat_loop_p (struct mep_doloop
*loop
)
5249 struct mep_doloop_end
*end
;
5252 /* There must be exactly one doloop_begin and at least one doloop_end. */
5253 if (loop
->begin
== 0 || loop
->end
== 0 || loop
->begin
->next
!= 0)
5256 /* The first doloop_end (X) must branch back to the insn after
5257 the doloop_begin. */
5258 if (prev_real_insn (loop
->end
->label
) != loop
->begin
->insn
)
5261 /* All the other doloop_ends must branch to the same place as X.
5262 When the branch isn't taken, they must jump to the instruction
5264 fallthrough
= loop
->end
->fallthrough
;
5265 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5266 if (end
->label
!= loop
->end
->label
5267 || !simplejump_p (end
->fallthrough
)
5268 || next_real_insn (JUMP_LABEL (end
->fallthrough
)) != fallthrough
)
5275 /* The main repeat reorg function. See comment above for details. */
5278 mep_reorg_repeat (rtx insns
)
5281 struct mep_doloop
*loops
, *loop
;
5282 struct mep_doloop_begin
*begin
;
5283 struct mep_doloop_end
*end
;
5285 /* Quick exit if we haven't created any loops. */
5286 if (cfun
->machine
->doloop_tags
== 0)
5289 /* Create an array of mep_doloop structures. */
5290 loops
= (struct mep_doloop
*) alloca (sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5291 memset (loops
, 0, sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5293 /* Search the function for do-while insns and group them by loop tag. */
5294 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5296 switch (recog_memoized (insn
))
5298 case CODE_FOR_doloop_begin_internal
:
5299 insn_extract (insn
);
5300 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5302 begin
= (struct mep_doloop_begin
*) alloca (sizeof (struct mep_doloop_begin
));
5303 begin
->next
= loop
->begin
;
5305 begin
->counter
= recog_data
.operand
[0];
5307 loop
->begin
= begin
;
5310 case CODE_FOR_doloop_end_internal
:
5311 insn_extract (insn
);
5312 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5314 end
= (struct mep_doloop_end
*) alloca (sizeof (struct mep_doloop_end
));
5316 end
->fallthrough
= next_real_insn (insn
);
5317 end
->counter
= recog_data
.operand
[0];
5318 end
->label
= recog_data
.operand
[1];
5319 end
->scratch
= recog_data
.operand
[3];
5321 /* If this insn falls through to an unconditional jump,
5322 give it a lower priority than the others. */
5323 if (loop
->end
!= 0 && simplejump_p (end
->fallthrough
))
5325 end
->next
= loop
->end
->next
;
5326 loop
->end
->next
= end
;
5330 end
->next
= loop
->end
;
5336 /* Convert the insns for each loop in turn. */
5337 for (loop
= loops
; loop
< loops
+ cfun
->machine
->doloop_tags
; loop
++)
5338 if (mep_repeat_loop_p (loop
))
5340 /* Case (1) or (2). */
5341 rtx repeat_label
, label_ref
;
5343 /* Create a new label for the repeat insn. */
5344 repeat_label
= gen_label_rtx ();
5346 /* Replace the doloop_begin with a repeat. */
5347 label_ref
= gen_rtx_LABEL_REF (VOIDmode
, repeat_label
);
5348 emit_insn_before (gen_repeat (loop
->begin
->counter
, label_ref
),
5350 delete_insn (loop
->begin
->insn
);
5352 /* Insert the repeat label before the first doloop_end.
5353 Fill the gap with nops if there are other doloop_ends. */
5354 mep_insert_repeat_label_last (loop
->end
->insn
, repeat_label
,
5355 false, loop
->end
->next
!= 0);
5357 /* Emit a repeat_end (to improve the readability of the output). */
5358 emit_insn_before (gen_repeat_end (), loop
->end
->insn
);
5360 /* Delete the first doloop_end. */
5361 delete_insn (loop
->end
->insn
);
5363 /* Replace the others with branches to REPEAT_LABEL. */
5364 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5366 emit_jump_insn_before (gen_jump (repeat_label
), end
->insn
);
5367 delete_insn (end
->insn
);
5368 delete_insn (end
->fallthrough
);
5373 /* Case (3). First replace all the doloop_begins with increment
5375 for (begin
= loop
->begin
; begin
!= 0; begin
= begin
->next
)
5377 emit_insn_before (gen_add3_insn (copy_rtx (begin
->counter
),
5378 begin
->counter
, const1_rtx
),
5380 delete_insn (begin
->insn
);
5383 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5384 for (end
= loop
->end
; end
!= 0; end
= end
->next
)
5390 /* Load the counter value into a general register. */
5392 if (!REG_P (reg
) || REGNO (reg
) > 15)
5395 emit_move_insn (copy_rtx (reg
), copy_rtx (end
->counter
));
5398 /* Decrement the counter. */
5399 emit_insn (gen_add3_insn (copy_rtx (reg
), copy_rtx (reg
),
5402 /* Copy it back to its original location. */
5403 if (reg
!= end
->counter
)
5404 emit_move_insn (copy_rtx (end
->counter
), copy_rtx (reg
));
5406 /* Jump back to the start label. */
5407 insn
= emit_jump_insn (gen_mep_bne_true (reg
, const0_rtx
,
5409 JUMP_LABEL (insn
) = end
->label
;
5410 LABEL_NUSES (end
->label
)++;
5412 /* Emit the whole sequence before the doloop_end. */
5413 insn
= get_insns ();
5415 emit_insn_before (insn
, end
->insn
);
5417 /* Delete the doloop_end. */
5418 delete_insn (end
->insn
);
5425 mep_invertable_branch_p (rtx insn
)
5428 enum rtx_code old_code
;
5431 set
= PATTERN (insn
);
5432 if (GET_CODE (set
) != SET
)
5434 if (GET_CODE (XEXP (set
, 1)) != IF_THEN_ELSE
)
5436 cond
= XEXP (XEXP (set
, 1), 0);
5437 old_code
= GET_CODE (cond
);
5441 PUT_CODE (cond
, NE
);
5444 PUT_CODE (cond
, EQ
);
5447 PUT_CODE (cond
, GE
);
5450 PUT_CODE (cond
, LT
);
5455 INSN_CODE (insn
) = -1;
5456 i
= recog_memoized (insn
);
5457 PUT_CODE (cond
, old_code
);
5458 INSN_CODE (insn
) = -1;
5463 mep_invert_branch (rtx insn
, rtx after
)
5465 rtx cond
, set
, label
;
5468 set
= PATTERN (insn
);
5470 gcc_assert (GET_CODE (set
) == SET
);
5471 gcc_assert (GET_CODE (XEXP (set
, 1)) == IF_THEN_ELSE
);
5473 cond
= XEXP (XEXP (set
, 1), 0);
5474 switch (GET_CODE (cond
))
5477 PUT_CODE (cond
, NE
);
5480 PUT_CODE (cond
, EQ
);
5483 PUT_CODE (cond
, GE
);
5486 PUT_CODE (cond
, LT
);
5491 label
= gen_label_rtx ();
5492 emit_label_after (label
, after
);
5493 for (i
=1; i
<=2; i
++)
5494 if (GET_CODE (XEXP (XEXP (set
, 1), i
)) == LABEL_REF
)
5496 rtx ref
= XEXP (XEXP (set
, 1), i
);
5497 if (LABEL_NUSES (XEXP (ref
, 0)) == 1)
5498 delete_insn (XEXP (ref
, 0));
5499 XEXP (ref
, 0) = label
;
5500 LABEL_NUSES (label
) ++;
5501 JUMP_LABEL (insn
) = label
;
5503 INSN_CODE (insn
) = -1;
5504 i
= recog_memoized (insn
);
5505 gcc_assert (i
>= 0);
5509 mep_reorg_erepeat (rtx insns
)
5511 rtx insn
, prev
, l
, x
;
5514 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5516 && ! JUMP_TABLE_DATA_P (insn
)
5517 && mep_invertable_branch_p (insn
))
5521 fprintf (dump_file
, "\n------------------------------\n");
5522 fprintf (dump_file
, "erepeat: considering this jump:\n");
5523 print_rtl_single (dump_file
, insn
);
5525 count
= simplejump_p (insn
) ? 0 : 1;
5526 for (prev
= PREV_INSN (insn
); prev
; prev
= PREV_INSN (prev
))
5528 if (GET_CODE (prev
) == CALL_INSN
5529 || BARRIER_P (prev
))
5532 if (prev
== JUMP_LABEL (insn
))
5536 fprintf (dump_file
, "found loop top, %d insns\n", count
);
5538 if (LABEL_NUSES (prev
) == 1)
5539 /* We're the only user, always safe */ ;
5540 else if (LABEL_NUSES (prev
) == 2)
5542 /* See if there's a barrier before this label. If
5543 so, we know nobody inside the loop uses it.
5544 But we must be careful to put the erepeat
5545 *after* the label. */
5547 for (barrier
= PREV_INSN (prev
);
5548 barrier
&& GET_CODE (barrier
) == NOTE
;
5549 barrier
= PREV_INSN (barrier
))
5551 if (barrier
&& GET_CODE (barrier
) != BARRIER
)
5556 /* We don't know who else, within or without our loop, uses this */
5558 fprintf (dump_file
, "... but there are multiple users, too risky.\n");
5562 /* Generate a label to be used by the erepat insn. */
5563 l
= gen_label_rtx ();
5565 /* Insert the erepeat after INSN's target label. */
5566 x
= gen_erepeat (gen_rtx_LABEL_REF (VOIDmode
, l
));
5568 emit_insn_after (x
, prev
);
5570 /* Insert the erepeat label. */
5571 newlast
= (mep_insert_repeat_label_last
5572 (insn
, l
, !simplejump_p (insn
), false));
5573 if (simplejump_p (insn
))
5575 emit_insn_before (gen_erepeat_end (), insn
);
5580 mep_invert_branch (insn
, newlast
);
5581 emit_insn_after (gen_erepeat_end (), newlast
);
5588 /* A label is OK if there is exactly one user, and we
5589 can find that user before the next label. */
5592 if (LABEL_NUSES (prev
) == 1)
5594 for (user
= PREV_INSN (prev
);
5595 user
&& (INSN_P (user
) || GET_CODE (user
) == NOTE
);
5596 user
= PREV_INSN (user
))
5597 if (GET_CODE (user
) == JUMP_INSN
5598 && JUMP_LABEL (user
) == prev
)
5600 safe
= INSN_UID (user
);
5607 fprintf (dump_file
, "... ignoring jump from insn %d to %d\n",
5608 safe
, INSN_UID (prev
));
5618 fprintf (dump_file
, "\n==============================\n");
5621 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5622 always do this on its own. */
5625 mep_jmp_return_reorg (rtx insns
)
5627 rtx insn
, label
, ret
;
5630 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5631 if (simplejump_p (insn
))
5633 /* Find the fist real insn the jump jumps to. */
5634 label
= ret
= JUMP_LABEL (insn
);
5636 && (GET_CODE (ret
) == NOTE
5637 || GET_CODE (ret
) == CODE_LABEL
5638 || GET_CODE (PATTERN (ret
)) == USE
))
5639 ret
= NEXT_INSN (ret
);
5643 /* Is it a return? */
5644 ret_code
= recog_memoized (ret
);
5645 if (ret_code
== CODE_FOR_return_internal
5646 || ret_code
== CODE_FOR_eh_return_internal
)
5648 /* It is. Replace the jump with a return. */
5649 LABEL_NUSES (label
) --;
5650 if (LABEL_NUSES (label
) == 0)
5651 delete_insn (label
);
5652 PATTERN (insn
) = copy_rtx (PATTERN (ret
));
5653 INSN_CODE (insn
) = -1;
5661 mep_reorg_addcombine (rtx insns
)
5665 for (i
= insns
; i
; i
= NEXT_INSN (i
))
5667 && INSN_CODE (i
) == CODE_FOR_addsi3
5668 && GET_CODE (SET_DEST (PATTERN (i
))) == REG
5669 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 0)) == REG
5670 && REGNO (SET_DEST (PATTERN (i
))) == REGNO (XEXP (SET_SRC (PATTERN (i
)), 0))
5671 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 1)) == CONST_INT
)
5675 && INSN_CODE (n
) == CODE_FOR_addsi3
5676 && GET_CODE (SET_DEST (PATTERN (n
))) == REG
5677 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 0)) == REG
5678 && REGNO (SET_DEST (PATTERN (n
))) == REGNO (XEXP (SET_SRC (PATTERN (n
)), 0))
5679 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 1)) == CONST_INT
)
5681 int ic
= INTVAL (XEXP (SET_SRC (PATTERN (i
)), 1));
5682 int nc
= INTVAL (XEXP (SET_SRC (PATTERN (n
)), 1));
5683 if (REGNO (SET_DEST (PATTERN (i
))) == REGNO (SET_DEST (PATTERN (n
)))
5685 && ic
+ nc
> -32768)
5687 XEXP (SET_SRC (PATTERN (i
)), 1) = GEN_INT (ic
+ nc
);
5688 NEXT_INSN (i
) = NEXT_INSN (n
);
5690 PREV_INSN (NEXT_INSN (i
)) = i
;
5696 /* If this insn adjusts the stack, return the adjustment, else return
5699 add_sp_insn_p (rtx insn
)
5703 if (! single_set (insn
))
5705 pat
= PATTERN (insn
);
5706 if (GET_CODE (SET_DEST (pat
)) != REG
)
5708 if (REGNO (SET_DEST (pat
)) != SP_REGNO
)
5710 if (GET_CODE (SET_SRC (pat
)) != PLUS
)
5712 if (GET_CODE (XEXP (SET_SRC (pat
), 0)) != REG
)
5714 if (REGNO (XEXP (SET_SRC (pat
), 0)) != SP_REGNO
)
5716 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) != CONST_INT
)
5718 return INTVAL (XEXP (SET_SRC (pat
), 1));
5721 /* Check for trivial functions that set up an unneeded stack
5724 mep_reorg_noframe (rtx insns
)
5726 rtx start_frame_insn
;
5727 rtx end_frame_insn
= 0;
5731 /* The first insn should be $sp = $sp + N */
5732 while (insns
&& ! INSN_P (insns
))
5733 insns
= NEXT_INSN (insns
);
5737 sp_adjust
= add_sp_insn_p (insns
);
5741 start_frame_insn
= insns
;
5742 sp
= SET_DEST (PATTERN (start_frame_insn
));
5744 insns
= next_real_insn (insns
);
5748 rtx next
= next_real_insn (insns
);
5752 sp2
= add_sp_insn_p (insns
);
5757 end_frame_insn
= insns
;
5758 if (sp2
!= -sp_adjust
)
5761 else if (mep_mentioned_p (insns
, sp
, 0))
5763 else if (CALL_P (insns
))
5771 delete_insn (start_frame_insn
);
5772 delete_insn (end_frame_insn
);
5779 rtx insns
= get_insns ();
5781 /* We require accurate REG_DEAD notes. */
5782 compute_bb_for_insn ();
5783 df_note_add_problem ();
5786 mep_reorg_addcombine (insns
);
5787 #if EXPERIMENTAL_REGMOVE_REORG
5788 /* VLIW packing has been done already, so we can't just delete things. */
5789 if (!mep_vliw_function_p (cfun
->decl
))
5790 mep_reorg_regmove (insns
);
5792 mep_jmp_return_reorg (insns
);
5793 mep_bundle_insns (insns
);
5794 mep_reorg_repeat (insns
);
5797 && !profile_arc_flag
5798 && TARGET_OPT_REPEAT
5799 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO
)))
5800 mep_reorg_erepeat (insns
);
5802 /* This may delete *insns so make sure it's last. */
5803 mep_reorg_noframe (insns
);
5805 df_finish_pass (false);
5810 /*----------------------------------------------------------------------*/
5812 /*----------------------------------------------------------------------*/
5814 /* Element X gives the index into cgen_insns[] of the most general
5815 implementation of intrinsic X. Unimplemented intrinsics are
5817 int mep_intrinsic_insn
[ARRAY_SIZE (cgen_intrinsics
)];
5819 /* Element X gives the index of another instruction that is mapped to
5820 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5823 Things are set up so that mep_intrinsic_chain[X] < X. */
5824 static int mep_intrinsic_chain
[ARRAY_SIZE (cgen_insns
)];
5826 /* The bitmask for the current ISA. The ISA masks are declared
5828 unsigned int mep_selected_isa
;
5831 const char *config_name
;
5835 static struct mep_config mep_configs
[] = {
5836 #ifdef COPROC_SELECTION_TABLE
5837 COPROC_SELECTION_TABLE
,
5842 /* Initialize the global intrinsics variables above. */
5845 mep_init_intrinsics (void)
5849 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5850 mep_selected_isa
= mep_configs
[0].isa
;
5851 if (mep_config_string
!= 0)
5852 for (i
= 0; mep_configs
[i
].config_name
; i
++)
5853 if (strcmp (mep_config_string
, mep_configs
[i
].config_name
) == 0)
5855 mep_selected_isa
= mep_configs
[i
].isa
;
5859 /* Assume all intrinsics are unavailable. */
5860 for (i
= 0; i
< ARRAY_SIZE (mep_intrinsic_insn
); i
++)
5861 mep_intrinsic_insn
[i
] = -1;
5863 /* Build up the global intrinsic tables. */
5864 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5865 if ((cgen_insns
[i
].isas
& mep_selected_isa
) != 0)
5867 mep_intrinsic_chain
[i
] = mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
];
5868 mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] = i
;
5870 /* See whether we can directly move values between one coprocessor
5871 register and another. */
5872 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
5873 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns
[i
]))
5874 mep_have_copro_copro_moves_p
= true;
5876 /* See whether we can directly move values between core and
5877 coprocessor registers. */
5878 mep_have_core_copro_moves_p
= (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1
)
5879 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2
));
5881 mep_have_core_copro_moves_p
= 1;
5884 /* Declare all available intrinsic functions. Called once only. */
5886 static tree cp_data_bus_int_type_node
;
5887 static tree opaque_vector_type_node
;
5888 static tree v8qi_type_node
;
5889 static tree v4hi_type_node
;
5890 static tree v2si_type_node
;
5891 static tree v8uqi_type_node
;
5892 static tree v4uhi_type_node
;
5893 static tree v2usi_type_node
;
5896 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr
)
5900 case cgen_regnum_operand_type_POINTER
: return ptr_type_node
;
5901 case cgen_regnum_operand_type_LONG
: return long_integer_type_node
;
5902 case cgen_regnum_operand_type_ULONG
: return long_unsigned_type_node
;
5903 case cgen_regnum_operand_type_SHORT
: return short_integer_type_node
;
5904 case cgen_regnum_operand_type_USHORT
: return short_unsigned_type_node
;
5905 case cgen_regnum_operand_type_CHAR
: return char_type_node
;
5906 case cgen_regnum_operand_type_UCHAR
: return unsigned_char_type_node
;
5907 case cgen_regnum_operand_type_SI
: return intSI_type_node
;
5908 case cgen_regnum_operand_type_DI
: return intDI_type_node
;
5909 case cgen_regnum_operand_type_VECTOR
: return opaque_vector_type_node
;
5910 case cgen_regnum_operand_type_V8QI
: return v8qi_type_node
;
5911 case cgen_regnum_operand_type_V4HI
: return v4hi_type_node
;
5912 case cgen_regnum_operand_type_V2SI
: return v2si_type_node
;
5913 case cgen_regnum_operand_type_V8UQI
: return v8uqi_type_node
;
5914 case cgen_regnum_operand_type_V4UHI
: return v4uhi_type_node
;
5915 case cgen_regnum_operand_type_V2USI
: return v2usi_type_node
;
5916 case cgen_regnum_operand_type_CP_DATA_BUS_INT
: return cp_data_bus_int_type_node
;
5918 return void_type_node
;
5923 mep_init_builtins (void)
5927 if (TARGET_64BIT_CR_REGS
)
5928 cp_data_bus_int_type_node
= long_long_integer_type_node
;
5930 cp_data_bus_int_type_node
= long_integer_type_node
;
5932 opaque_vector_type_node
= build_opaque_vector_type (intQI_type_node
, 8);
5933 v8qi_type_node
= build_vector_type (intQI_type_node
, 8);
5934 v4hi_type_node
= build_vector_type (intHI_type_node
, 4);
5935 v2si_type_node
= build_vector_type (intSI_type_node
, 2);
5936 v8uqi_type_node
= build_vector_type (unsigned_intQI_type_node
, 8);
5937 v4uhi_type_node
= build_vector_type (unsigned_intHI_type_node
, 4);
5938 v2usi_type_node
= build_vector_type (unsigned_intSI_type_node
, 2);
5940 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node
);
5942 add_builtin_type ("cp_vector", opaque_vector_type_node
);
5944 add_builtin_type ("cp_v8qi", v8qi_type_node
);
5945 add_builtin_type ("cp_v4hi", v4hi_type_node
);
5946 add_builtin_type ("cp_v2si", v2si_type_node
);
5948 add_builtin_type ("cp_v8uqi", v8uqi_type_node
);
5949 add_builtin_type ("cp_v4uhi", v4uhi_type_node
);
5950 add_builtin_type ("cp_v2usi", v2usi_type_node
);
5952 /* Intrinsics like mep_cadd3 are implemented with two groups of
5953 instructions, one which uses UNSPECs and one which uses a specific
5954 rtl code such as PLUS. Instructions in the latter group belong
5955 to GROUP_KNOWN_CODE.
5957 In such cases, the intrinsic will have two entries in the global
5958 tables above. The unspec form is accessed using builtin functions
5959 while the specific form is accessed using the mep_* enum in
5962 The idea is that __cop arithmetic and builtin functions have
5963 different optimization requirements. If mep_cadd3() appears in
5964 the source code, the user will surely except gcc to use cadd3
5965 rather than a work-alike such as add3. However, if the user
5966 just writes "a + b", where a or b are __cop variables, it is
5967 reasonable for gcc to choose a core instruction rather than
5968 cadd3 if it believes that is more optimal. */
5969 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5970 if ((cgen_insns
[i
].groups
& GROUP_KNOWN_CODE
) == 0
5971 && mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] >= 0)
5973 tree ret_type
= void_type_node
;
5976 if (i
> 0 && cgen_insns
[i
].intrinsic
== cgen_insns
[i
-1].intrinsic
)
5979 if (cgen_insns
[i
].cret_p
)
5980 ret_type
= mep_cgen_regnum_to_type (cgen_insns
[i
].regnums
[0].type
);
5982 bi_type
= build_function_type_list (ret_type
, NULL_TREE
);
5983 add_builtin_function (cgen_intrinsics
[cgen_insns
[i
].intrinsic
],
5985 cgen_insns
[i
].intrinsic
, BUILT_IN_MD
, NULL
, NULL
);
5989 /* Report the unavailablity of the given intrinsic. */
5993 mep_intrinsic_unavailable (int intrinsic
)
5995 static int already_reported_p
[ARRAY_SIZE (cgen_intrinsics
)];
5997 if (already_reported_p
[intrinsic
])
6000 if (mep_intrinsic_insn
[intrinsic
] < 0)
6001 error ("coprocessor intrinsic %qs is not available in this configuration",
6002 cgen_intrinsics
[intrinsic
]);
6003 else if (CGEN_CURRENT_GROUP
== GROUP_VLIW
)
6004 error ("%qs is not available in VLIW functions",
6005 cgen_intrinsics
[intrinsic
]);
6007 error ("%qs is not available in non-VLIW functions",
6008 cgen_intrinsics
[intrinsic
]);
6010 already_reported_p
[intrinsic
] = 1;
6015 /* See if any implementation of INTRINSIC is available to the
6016 current function. If so, store the most general implementation
6017 in *INSN_PTR and return true. Return false otherwise. */
6020 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED
, const struct cgen_insn
**insn_ptr ATTRIBUTE_UNUSED
)
6024 i
= mep_intrinsic_insn
[intrinsic
];
6025 while (i
>= 0 && !CGEN_ENABLE_INSN_P (i
))
6026 i
= mep_intrinsic_chain
[i
];
6030 *insn_ptr
= &cgen_insns
[i
];
6037 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6038 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6039 try using a work-alike instead. In this case, the returned insn
6040 may have three operands rather than two. */
6043 mep_get_move_insn (int intrinsic
, const struct cgen_insn
**cgen_insn
)
6047 if (intrinsic
== mep_cmov
)
6049 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
6050 if (mep_get_intrinsic_insn (mep_cmov_insns
[i
], cgen_insn
))
6054 return mep_get_intrinsic_insn (intrinsic
, cgen_insn
);
6058 /* If ARG is a register operand that is the same size as MODE, convert it
6059 to MODE using a subreg. Otherwise return ARG as-is. */
6062 mep_convert_arg (enum machine_mode mode
, rtx arg
)
6064 if (GET_MODE (arg
) != mode
6065 && register_operand (arg
, VOIDmode
)
6066 && GET_MODE_SIZE (GET_MODE (arg
)) == GET_MODE_SIZE (mode
))
6067 return simplify_gen_subreg (mode
, arg
, GET_MODE (arg
), 0);
6072 /* Apply regnum conversions to ARG using the description given by REGNUM.
6073 Return the new argument on success and null on failure. */
6076 mep_convert_regnum (const struct cgen_regnum_operand
*regnum
, rtx arg
)
6078 if (regnum
->count
== 0)
6081 if (GET_CODE (arg
) != CONST_INT
6083 || INTVAL (arg
) >= regnum
->count
)
6086 return gen_rtx_REG (SImode
, INTVAL (arg
) + regnum
->base
);
6090 /* Try to make intrinsic argument ARG match the given operand.
6091 UNSIGNED_P is true if the argument has an unsigned type. */
6094 mep_legitimize_arg (const struct insn_operand_data
*operand
, rtx arg
,
6097 if (GET_CODE (arg
) == CONST_INT
)
6099 /* CONST_INTs can only be bound to integer operands. */
6100 if (GET_MODE_CLASS (operand
->mode
) != MODE_INT
)
6103 else if (GET_CODE (arg
) == CONST_DOUBLE
)
6104 /* These hold vector constants. */;
6105 else if (GET_MODE_SIZE (GET_MODE (arg
)) != GET_MODE_SIZE (operand
->mode
))
6107 /* If the argument is a different size from what's expected, we must
6108 have a value in the right mode class in order to convert it. */
6109 if (GET_MODE_CLASS (operand
->mode
) != GET_MODE_CLASS (GET_MODE (arg
)))
6112 /* If the operand is an rvalue, promote or demote it to match the
6113 operand's size. This might not need extra instructions when
6114 ARG is a register value. */
6115 if (operand
->constraint
[0] != '=')
6116 arg
= convert_to_mode (operand
->mode
, arg
, unsigned_p
);
6119 /* If the operand is an lvalue, bind the operand to a new register.
6120 The caller will copy this value into ARG after the main
6121 instruction. By doing this always, we produce slightly more
6123 /* But not for control registers. */
6124 if (operand
->constraint
[0] == '='
6126 || ! (CONTROL_REGNO_P (REGNO (arg
))
6127 || CCR_REGNO_P (REGNO (arg
))
6128 || CR_REGNO_P (REGNO (arg
)))
6130 return gen_reg_rtx (operand
->mode
);
6132 /* Try simple mode punning. */
6133 arg
= mep_convert_arg (operand
->mode
, arg
);
6134 if (operand
->predicate (arg
, operand
->mode
))
6137 /* See if forcing the argument into a register will make it match. */
6138 if (GET_CODE (arg
) == CONST_INT
|| GET_CODE (arg
) == CONST_DOUBLE
)
6139 arg
= force_reg (operand
->mode
, arg
);
6141 arg
= mep_convert_arg (operand
->mode
, force_reg (GET_MODE (arg
), arg
));
6142 if (operand
->predicate (arg
, operand
->mode
))
6149 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6150 function FNNAME. OPERAND describes the operand to which ARGNUM
6154 mep_incompatible_arg (const struct insn_operand_data
*operand
, rtx arg
,
6155 int argnum
, tree fnname
)
6159 if (GET_CODE (arg
) == CONST_INT
)
6160 for (i
= 0; i
< ARRAY_SIZE (cgen_immediate_predicates
); i
++)
6161 if (operand
->predicate
== cgen_immediate_predicates
[i
].predicate
)
6163 const struct cgen_immediate_predicate
*predicate
;
6164 HOST_WIDE_INT argval
;
6166 predicate
= &cgen_immediate_predicates
[i
];
6167 argval
= INTVAL (arg
);
6168 if (argval
< predicate
->lower
|| argval
>= predicate
->upper
)
6169 error ("argument %d of %qE must be in the range %d...%d",
6170 argnum
, fnname
, predicate
->lower
, predicate
->upper
- 1);
6172 error ("argument %d of %qE must be a multiple of %d",
6173 argnum
, fnname
, predicate
->align
);
6177 error ("incompatible type for argument %d of %qE", argnum
, fnname
);
6181 mep_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6182 rtx subtarget ATTRIBUTE_UNUSED
,
6183 enum machine_mode mode ATTRIBUTE_UNUSED
,
6184 int ignore ATTRIBUTE_UNUSED
)
6186 rtx pat
, op
[10], arg
[10];
6188 int opindex
, unsigned_p
[10];
6190 unsigned int n_args
;
6192 const struct cgen_insn
*cgen_insn
;
6193 const struct insn_data_d
*idata
;
6194 unsigned int first_arg
= 0;
6195 unsigned int builtin_n_args
;
6197 fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6198 fnname
= DECL_NAME (fndecl
);
6200 /* Find out which instruction we should emit. Note that some coprocessor
6201 intrinsics may only be available in VLIW mode, or only in normal mode. */
6202 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl
), &cgen_insn
))
6204 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl
));
6207 idata
= &insn_data
[cgen_insn
->icode
];
6209 builtin_n_args
= cgen_insn
->num_args
;
6211 if (cgen_insn
->cret_p
)
6213 if (cgen_insn
->cret_p
> 1)
6216 mep_cgen_regnum_to_type (cgen_insn
->regnums
[0].type
);
6220 /* Evaluate each argument. */
6221 n_args
= call_expr_nargs (exp
);
6223 if (n_args
< builtin_n_args
)
6225 error ("too few arguments to %qE", fnname
);
6228 if (n_args
> builtin_n_args
)
6230 error ("too many arguments to %qE", fnname
);
6234 for (a
= first_arg
; a
< builtin_n_args
+ first_arg
; a
++)
6238 args
= CALL_EXPR_ARG (exp
, a
- first_arg
);
6243 if (cgen_insn
->regnums
[a
].reference_p
)
6245 if (TREE_CODE (value
) != ADDR_EXPR
)
6248 error ("argument %d of %qE must be an address", a
+1, fnname
);
6251 value
= TREE_OPERAND (value
, 0);
6255 /* If the argument has been promoted to int, get the unpromoted
6256 value. This is necessary when sub-int memory values are bound
6257 to reference parameters. */
6258 if (TREE_CODE (value
) == NOP_EXPR
6259 && TREE_TYPE (value
) == integer_type_node
6260 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6261 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value
, 0)))
6262 < TYPE_PRECISION (TREE_TYPE (value
))))
6263 value
= TREE_OPERAND (value
, 0);
6265 /* If the argument has been promoted to double, get the unpromoted
6266 SFmode value. This is necessary for FMAX support, for example. */
6267 if (TREE_CODE (value
) == NOP_EXPR
6268 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value
))
6269 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6270 && TYPE_MODE (TREE_TYPE (value
)) == DFmode
6271 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value
, 0))) == SFmode
)
6272 value
= TREE_OPERAND (value
, 0);
6274 unsigned_p
[a
] = TYPE_UNSIGNED (TREE_TYPE (value
));
6275 arg
[a
] = expand_expr (value
, NULL
, VOIDmode
, EXPAND_NORMAL
);
6276 arg
[a
] = mep_convert_regnum (&cgen_insn
->regnums
[a
], arg
[a
]);
6277 if (cgen_insn
->regnums
[a
].reference_p
)
6279 tree pointed_to
= TREE_TYPE (TREE_TYPE (value
));
6280 enum machine_mode pointed_mode
= TYPE_MODE (pointed_to
);
6282 arg
[a
] = gen_rtx_MEM (pointed_mode
, arg
[a
]);
6286 error ("argument %d of %qE must be in the range %d...%d",
6287 a
+ 1, fnname
, 0, cgen_insn
->regnums
[a
].count
- 1);
6292 for (a
= 0; a
< first_arg
; a
++)
6294 if (a
== 0 && target
&& GET_MODE (target
) == idata
->operand
[0].mode
)
6297 arg
[a
] = gen_reg_rtx (idata
->operand
[0].mode
);
6300 /* Convert the arguments into a form suitable for the intrinsic.
6301 Report an error if this isn't possible. */
6302 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6304 a
= cgen_insn
->op_mapping
[opindex
];
6305 op
[opindex
] = mep_legitimize_arg (&idata
->operand
[opindex
],
6306 arg
[a
], unsigned_p
[a
]);
6307 if (op
[opindex
] == 0)
6309 mep_incompatible_arg (&idata
->operand
[opindex
],
6310 arg
[a
], a
+ 1 - first_arg
, fnname
);
6315 /* Emit the instruction. */
6316 pat
= idata
->genfun (op
[0], op
[1], op
[2], op
[3], op
[4],
6317 op
[5], op
[6], op
[7], op
[8], op
[9]);
6319 if (GET_CODE (pat
) == SET
6320 && GET_CODE (SET_DEST (pat
)) == PC
6321 && GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
6322 emit_jump_insn (pat
);
6326 /* Copy lvalues back to their final locations. */
6327 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6328 if (idata
->operand
[opindex
].constraint
[0] == '=')
6330 a
= cgen_insn
->op_mapping
[opindex
];
6333 if (GET_MODE_CLASS (GET_MODE (arg
[a
]))
6334 != GET_MODE_CLASS (GET_MODE (op
[opindex
])))
6335 emit_move_insn (arg
[a
], gen_lowpart (GET_MODE (arg
[a
]),
6339 /* First convert the operand to the right mode, then copy it
6340 into the destination. Doing the conversion as a separate
6341 step (rather than using convert_move) means that we can
6342 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6343 refer to the same register. */
6344 op
[opindex
] = convert_to_mode (GET_MODE (arg
[a
]),
6345 op
[opindex
], unsigned_p
[a
]);
6346 if (!rtx_equal_p (arg
[a
], op
[opindex
]))
6347 emit_move_insn (arg
[a
], op
[opindex
]);
6352 if (first_arg
> 0 && target
&& target
!= op
[0])
6354 emit_move_insn (target
, op
[0]);
6361 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED
)
6366 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6367 a global register. */
6370 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
6378 switch (GET_CODE (x
))
6381 if (REG_P (SUBREG_REG (x
)))
6383 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
6384 && global_regs
[subreg_regno (x
)])
6392 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
6406 /* A non-constant call might use a global register. */
6416 /* Returns nonzero if X mentions a global register. */
6419 global_reg_mentioned_p (rtx x
)
6425 if (! RTL_CONST_OR_PURE_CALL_P (x
))
6427 x
= CALL_INSN_FUNCTION_USAGE (x
);
6435 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
6437 /* Scheduling hooks for VLIW mode.
6439 Conceptually this is very simple: we have a two-pack architecture
6440 that takes one core insn and one coprocessor insn to make up either
6441 a 32- or 64-bit instruction word (depending on the option bit set in
6442 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6443 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6444 and one 48-bit cop insn or two 32-bit core/cop insns.
6446 In practice, instruction selection will be a bear. Consider in
6447 VL64 mode the following insns
6452 these cannot pack, since the add is a 16-bit core insn and cmov
6453 is a 32-bit cop insn. However,
6458 packs just fine. For good VLIW code generation in VL64 mode, we
6459 will have to have 32-bit alternatives for many of the common core
6460 insns. Not implemented. */
6463 mep_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
6467 if (REG_NOTE_KIND (link
) != 0)
6469 /* See whether INSN and DEP_INSN are intrinsics that set the same
6470 hard register. If so, it is more important to free up DEP_INSN
6471 than it is to free up INSN.
6473 Note that intrinsics like mep_mulr are handled differently from
6474 the equivalent mep.md patterns. In mep.md, if we don't care
6475 about the value of $lo and $hi, the pattern will just clobber
6476 the registers, not set them. Since clobbers don't count as
6477 output dependencies, it is often possible to reorder two mulrs,
6480 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6481 so any pair of mep_mulr()s will be inter-dependent. We should
6482 therefore give the first mep_mulr() a higher priority. */
6483 if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
6484 && global_reg_mentioned_p (PATTERN (insn
))
6485 && global_reg_mentioned_p (PATTERN (dep_insn
)))
6488 /* If the dependence is an anti or output dependence, assume it
6493 /* If we can't recognize the insns, we can't really do anything. */
6494 if (recog_memoized (dep_insn
) < 0)
6497 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6498 attribute instead. */
6501 cost_specified
= get_attr_latency (dep_insn
);
6502 if (cost_specified
!= 0)
6503 return cost_specified
;
6509 /* ??? We don't properly compute the length of a load/store insn,
6510 taking into account the addressing mode. */
6513 mep_issue_rate (void)
6515 return TARGET_IVC2
? 3 : 2;
6518 /* Return true if function DECL was declared with the vliw attribute. */
6521 mep_vliw_function_p (tree decl
)
6523 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))) != 0;
6527 mep_find_ready_insn (rtx
*ready
, int nready
, enum attr_slot slot
, int length
)
6531 for (i
= nready
- 1; i
>= 0; --i
)
6533 rtx insn
= ready
[i
];
6534 if (recog_memoized (insn
) >= 0
6535 && get_attr_slot (insn
) == slot
6536 && get_attr_length (insn
) == length
)
6544 mep_move_ready_insn (rtx
*ready
, int nready
, rtx insn
)
6548 for (i
= 0; i
< nready
; ++i
)
6549 if (ready
[i
] == insn
)
6551 for (; i
< nready
- 1; ++i
)
6552 ready
[i
] = ready
[i
+ 1];
6561 mep_print_sched_insn (FILE *dump
, rtx insn
)
6563 const char *slots
= "none";
6564 const char *name
= NULL
;
6568 if (GET_CODE (PATTERN (insn
)) == SET
6569 || GET_CODE (PATTERN (insn
)) == PARALLEL
)
6571 switch (get_attr_slots (insn
))
6573 case SLOTS_CORE
: slots
= "core"; break;
6574 case SLOTS_C3
: slots
= "c3"; break;
6575 case SLOTS_P0
: slots
= "p0"; break;
6576 case SLOTS_P0_P0S
: slots
= "p0,p0s"; break;
6577 case SLOTS_P0_P1
: slots
= "p0,p1"; break;
6578 case SLOTS_P0S
: slots
= "p0s"; break;
6579 case SLOTS_P0S_P1
: slots
= "p0s,p1"; break;
6580 case SLOTS_P1
: slots
= "p1"; break;
6582 sprintf(buf
, "%d", get_attr_slots (insn
));
6587 if (GET_CODE (PATTERN (insn
)) == USE
)
6590 code
= INSN_CODE (insn
);
6592 name
= get_insn_name (code
);
6597 "insn %4d %4d %8s %s\n",
6605 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
,
6606 int sched_verbose ATTRIBUTE_UNUSED
, rtx
*ready
,
6607 int *pnready
, int clock ATTRIBUTE_UNUSED
)
6609 int nready
= *pnready
;
6610 rtx core_insn
, cop_insn
;
6613 if (dump
&& sched_verbose
> 1)
6615 fprintf (dump
, "\nsched_reorder: clock %d nready %d\n", clock
, nready
);
6616 for (i
=0; i
<nready
; i
++)
6617 mep_print_sched_insn (dump
, ready
[i
]);
6618 fprintf (dump
, "\n");
6621 if (!mep_vliw_function_p (cfun
->decl
))
6626 /* IVC2 uses a DFA to determine what's ready and what's not. */
6630 /* We can issue either a core or coprocessor instruction.
6631 Look for a matched pair of insns to reorder. If we don't
6632 find any, don't second-guess the scheduler's priorities. */
6634 if ((core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 2))
6635 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
,
6636 TARGET_OPT_VL64
? 6 : 2)))
6638 else if (TARGET_OPT_VL64
6639 && (core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 4))
6640 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
, 4)))
6643 /* We didn't find a pair. Issue the single insn at the head
6644 of the ready list. */
6647 /* Reorder the two insns first. */
6648 mep_move_ready_insn (ready
, nready
, core_insn
);
6649 mep_move_ready_insn (ready
, nready
- 1, cop_insn
);
6653 /* A for_each_rtx callback. Return true if *X is a register that is
6654 set by insn PREV. */
6657 mep_store_find_set (rtx
*x
, void *prev
)
6659 return REG_P (*x
) && reg_set_p (*x
, (const_rtx
) prev
);
6662 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6663 not the containing insn. */
6666 mep_store_data_bypass_1 (rtx prev
, rtx pat
)
6668 /* Cope with intrinsics like swcpa. */
6669 if (GET_CODE (pat
) == PARALLEL
)
6673 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6674 if (mep_store_data_bypass_p (prev
, XVECEXP (pat
, 0, i
)))
6680 /* Check for some sort of store. */
6681 if (GET_CODE (pat
) != SET
6682 || GET_CODE (SET_DEST (pat
)) != MEM
)
6685 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6686 The first operand to the unspec is the store data and the other operands
6687 are used to calculate the address. */
6688 if (GET_CODE (SET_SRC (pat
)) == UNSPEC
)
6693 src
= SET_SRC (pat
);
6694 for (i
= 1; i
< XVECLEN (src
, 0); i
++)
6695 if (for_each_rtx (&XVECEXP (src
, 0, i
), mep_store_find_set
, prev
))
6701 /* Otherwise just check that PREV doesn't modify any register mentioned
6702 in the memory destination. */
6703 return !for_each_rtx (&SET_DEST (pat
), mep_store_find_set
, prev
);
6706 /* Return true if INSN is a store instruction and if the store address
6707 has no true dependence on PREV. */
6710 mep_store_data_bypass_p (rtx prev
, rtx insn
)
6712 return INSN_P (insn
) ? mep_store_data_bypass_1 (prev
, PATTERN (insn
)) : false;
6715 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6716 is a register other than LO or HI and if PREV sets *X. */
6719 mep_mul_hilo_bypass_1 (rtx
*x
, void *prev
)
6722 && REGNO (*x
) != LO_REGNO
6723 && REGNO (*x
) != HI_REGNO
6724 && reg_set_p (*x
, (const_rtx
) prev
));
6727 /* Return true if, apart from HI/LO, there are no true dependencies
6728 between multiplication instructions PREV and INSN. */
6731 mep_mul_hilo_bypass_p (rtx prev
, rtx insn
)
6735 pat
= PATTERN (insn
);
6736 if (GET_CODE (pat
) == PARALLEL
)
6737 pat
= XVECEXP (pat
, 0, 0);
6738 return (GET_CODE (pat
) == SET
6739 && !for_each_rtx (&SET_SRC (pat
), mep_mul_hilo_bypass_1
, prev
));
6742 /* Return true if INSN is an ldc instruction that issues to the
6743 MeP-h1 integer pipeline. This is true for instructions that
6744 read from PSW, LP, SAR, HI and LO. */
6747 mep_ipipe_ldc_p (rtx insn
)
6751 pat
= PATTERN (insn
);
6753 /* Cope with instrinsics that set both a hard register and its shadow.
6754 The set of the hard register comes first. */
6755 if (GET_CODE (pat
) == PARALLEL
)
6756 pat
= XVECEXP (pat
, 0, 0);
6758 if (GET_CODE (pat
) == SET
)
6760 src
= SET_SRC (pat
);
6762 /* Cope with intrinsics. The first operand to the unspec is
6763 the source register. */
6764 if (GET_CODE (src
) == UNSPEC
|| GET_CODE (src
) == UNSPEC_VOLATILE
)
6765 src
= XVECEXP (src
, 0, 0);
6768 switch (REGNO (src
))
6781 /* Create a VLIW bundle from core instruction CORE and coprocessor
6782 instruction COP. COP always satisfies INSN_P, but CORE can be
6783 either a new pattern or an existing instruction.
6785 Emit the bundle in place of COP and return it. */
6788 mep_make_bundle (rtx core
, rtx cop
)
6792 /* If CORE is an existing instruction, remove it, otherwise put
6793 the new pattern in an INSN harness. */
6797 core
= make_insn_raw (core
);
6799 /* Generate the bundle sequence and replace COP with it. */
6800 insn
= gen_rtx_SEQUENCE (VOIDmode
, gen_rtvec (2, core
, cop
));
6801 insn
= emit_insn_after (insn
, cop
);
6804 /* Set up the links of the insns inside the SEQUENCE. */
6805 PREV_INSN (core
) = PREV_INSN (insn
);
6806 NEXT_INSN (core
) = cop
;
6807 PREV_INSN (cop
) = core
;
6808 NEXT_INSN (cop
) = NEXT_INSN (insn
);
6810 /* Set the VLIW flag for the coprocessor instruction. */
6811 PUT_MODE (core
, VOIDmode
);
6812 PUT_MODE (cop
, BImode
);
6814 /* Derive a location for the bundle. Individual instructions cannot
6815 have their own location because there can be no assembler labels
6816 between CORE and COP. */
6817 INSN_LOCATION (insn
) = INSN_LOCATION (INSN_LOCATION (core
) ? core
: cop
);
6818 INSN_LOCATION (core
) = 0;
6819 INSN_LOCATION (cop
) = 0;
6824 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6827 mep_insn_dependent_p_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
6829 rtx
* pinsn
= (rtx
*) data
;
6831 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
6835 /* Return true if anything in insn X is (anti,output,true) dependent on
6836 anything in insn Y. */
6839 mep_insn_dependent_p (rtx x
, rtx y
)
6843 gcc_assert (INSN_P (x
));
6844 gcc_assert (INSN_P (y
));
6847 note_stores (PATTERN (x
), mep_insn_dependent_p_1
, &tmp
);
6848 if (tmp
== NULL_RTX
)
6852 note_stores (PATTERN (y
), mep_insn_dependent_p_1
, &tmp
);
6853 if (tmp
== NULL_RTX
)
6860 core_insn_p (rtx insn
)
6862 if (GET_CODE (PATTERN (insn
)) == USE
)
6864 if (get_attr_slot (insn
) == SLOT_CORE
)
6869 /* Mark coprocessor instructions that can be bundled together with
6870 the immediately preceding core instruction. This is later used
6871 to emit the "+" that tells the assembler to create a VLIW insn.
6873 For unbundled insns, the assembler will automatically add coprocessor
6874 nops, and 16-bit core nops. Due to an apparent oversight in the
6875 spec, the assembler will _not_ automatically add 32-bit core nops,
6876 so we have to emit those here.
6878 Called from mep_insn_reorg. */
6881 mep_bundle_insns (rtx insns
)
6883 rtx insn
, last
= NULL_RTX
, first
= NULL_RTX
;
6884 int saw_scheduling
= 0;
6886 /* Only do bundling if we're in vliw mode. */
6887 if (!mep_vliw_function_p (cfun
->decl
))
6890 /* The first insn in a bundle are TImode, the remainder are
6891 VOIDmode. After this function, the first has VOIDmode and the
6892 rest have BImode. */
6894 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6896 /* First, move any NOTEs that are within a bundle, to the beginning
6898 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6900 if (NOTE_P (insn
) && first
)
6901 /* Don't clear FIRST. */;
6903 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == TImode
)
6906 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == VOIDmode
&& first
)
6910 /* INSN is part of a bundle; FIRST is the first insn in that
6911 bundle. Move all intervening notes out of the bundle.
6912 In addition, since the debug pass may insert a label
6913 whenever the current line changes, set the location info
6914 for INSN to match FIRST. */
6916 INSN_LOCATION (insn
) = INSN_LOCATION (first
);
6918 note
= PREV_INSN (insn
);
6919 while (note
&& note
!= first
)
6921 prev
= PREV_INSN (note
);
6925 /* Remove NOTE from here... */
6926 PREV_INSN (NEXT_INSN (note
)) = PREV_INSN (note
);
6927 NEXT_INSN (PREV_INSN (note
)) = NEXT_INSN (note
);
6928 /* ...and put it in here. */
6929 NEXT_INSN (note
) = first
;
6930 PREV_INSN (note
) = PREV_INSN (first
);
6931 NEXT_INSN (PREV_INSN (note
)) = note
;
6932 PREV_INSN (NEXT_INSN (note
)) = note
;
6939 else if (!NONJUMP_INSN_P (insn
))
6943 /* Now fix up the bundles. */
6944 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6949 if (!NONJUMP_INSN_P (insn
))
6955 /* If we're not optimizing enough, there won't be scheduling
6956 info. We detect that here. */
6957 if (GET_MODE (insn
) == TImode
)
6959 if (!saw_scheduling
)
6964 rtx core_insn
= NULL_RTX
;
6966 /* IVC2 slots are scheduled by DFA, so we just accept
6967 whatever the scheduler gives us. However, we must make
6968 sure the core insn (if any) is the first in the bundle.
6969 The IVC2 assembler can insert whatever NOPs are needed,
6970 and allows a COP insn to be first. */
6972 if (NONJUMP_INSN_P (insn
)
6973 && GET_CODE (PATTERN (insn
)) != USE
6974 && GET_MODE (insn
) == TImode
)
6978 && GET_MODE (NEXT_INSN (last
)) == VOIDmode
6979 && NONJUMP_INSN_P (NEXT_INSN (last
));
6980 last
= NEXT_INSN (last
))
6982 if (core_insn_p (last
))
6985 if (core_insn_p (last
))
6988 if (core_insn
&& core_insn
!= insn
)
6990 /* Swap core insn to first in the bundle. */
6992 /* Remove core insn. */
6993 if (PREV_INSN (core_insn
))
6994 NEXT_INSN (PREV_INSN (core_insn
)) = NEXT_INSN (core_insn
);
6995 if (NEXT_INSN (core_insn
))
6996 PREV_INSN (NEXT_INSN (core_insn
)) = PREV_INSN (core_insn
);
6998 /* Re-insert core insn. */
6999 PREV_INSN (core_insn
) = PREV_INSN (insn
);
7000 NEXT_INSN (core_insn
) = insn
;
7002 if (PREV_INSN (core_insn
))
7003 NEXT_INSN (PREV_INSN (core_insn
)) = core_insn
;
7004 PREV_INSN (insn
) = core_insn
;
7006 PUT_MODE (core_insn
, TImode
);
7007 PUT_MODE (insn
, VOIDmode
);
7011 /* The first insn has TImode, the rest have VOIDmode */
7012 if (GET_MODE (insn
) == TImode
)
7013 PUT_MODE (insn
, VOIDmode
);
7015 PUT_MODE (insn
, BImode
);
7019 PUT_MODE (insn
, VOIDmode
);
7020 if (recog_memoized (insn
) >= 0
7021 && get_attr_slot (insn
) == SLOT_COP
)
7023 if (GET_CODE (insn
) == JUMP_INSN
7025 || recog_memoized (last
) < 0
7026 || get_attr_slot (last
) != SLOT_CORE
7027 || (get_attr_length (insn
)
7028 != (TARGET_OPT_VL64
? 8 : 4) - get_attr_length (last
))
7029 || mep_insn_dependent_p (insn
, last
))
7031 switch (get_attr_length (insn
))
7036 insn
= mep_make_bundle (gen_nop (), insn
);
7039 if (TARGET_OPT_VL64
)
7040 insn
= mep_make_bundle (gen_nop32 (), insn
);
7043 if (TARGET_OPT_VL64
)
7044 error ("2 byte cop instructions are"
7045 " not allowed in 64-bit VLIW mode");
7047 insn
= mep_make_bundle (gen_nop (), insn
);
7050 error ("unexpected %d byte cop instruction",
7051 get_attr_length (insn
));
7056 insn
= mep_make_bundle (last
, insn
);
7064 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7065 Return true on success. This function can fail if the intrinsic
7066 is unavailable or if the operands don't satisfy their predicates. */
7069 mep_emit_intrinsic (int intrinsic
, const rtx
*operands
)
7071 const struct cgen_insn
*cgen_insn
;
7072 const struct insn_data_d
*idata
;
7076 if (!mep_get_intrinsic_insn (intrinsic
, &cgen_insn
))
7079 idata
= &insn_data
[cgen_insn
->icode
];
7080 for (i
= 0; i
< idata
->n_operands
; i
++)
7082 newop
[i
] = mep_convert_arg (idata
->operand
[i
].mode
, operands
[i
]);
7083 if (!idata
->operand
[i
].predicate (newop
[i
], idata
->operand
[i
].mode
))
7087 emit_insn (idata
->genfun (newop
[0], newop
[1], newop
[2],
7088 newop
[3], newop
[4], newop
[5],
7089 newop
[6], newop
[7], newop
[8]));
7095 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7096 OPERANDS[0]. Report an error if the instruction could not
7097 be synthesized. OPERANDS[1] is a register_operand. For sign
7098 and zero extensions, it may be smaller than SImode. */
7101 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic
,
7102 rtx
* operands ATTRIBUTE_UNUSED
)
7108 /* Likewise, but apply a binary operation to OPERANDS[1] and
7109 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7110 can be a general_operand.
7112 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7113 third operand. REG and REG3 take register operands only. */
7116 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate
,
7117 int ATTRIBUTE_UNUSED immediate3
,
7118 int ATTRIBUTE_UNUSED reg
,
7119 int ATTRIBUTE_UNUSED reg3
,
7120 rtx
* operands ATTRIBUTE_UNUSED
)
7126 mep_rtx_cost (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
7127 int opno ATTRIBUTE_UNUSED
, int *total
,
7128 bool ATTRIBUTE_UNUSED speed_t
)
7133 if (INTVAL (x
) >= -128 && INTVAL (x
) < 127)
7135 else if (INTVAL (x
) >= -32768 && INTVAL (x
) < 65536)
7142 *total
= optimize_size
? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7146 *total
= (GET_CODE (XEXP (x
, 1)) == CONST_INT
7148 : COSTS_N_INSNS (2));
7155 mep_address_cost (rtx addr ATTRIBUTE_UNUSED
,
7156 enum machine_mode mode ATTRIBUTE_UNUSED
,
7157 addr_space_t as ATTRIBUTE_UNUSED
,
7158 bool ATTRIBUTE_UNUSED speed_p
)
7164 mep_asm_init_sections (void)
7167 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7168 "\t.section .based,\"aw\"");
7171 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7172 "\t.section .sbss,\"aw\"");
7175 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7176 "\t.section .sdata,\"aw\",@progbits");
7179 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7180 "\t.section .far,\"aw\"");
7183 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7184 "\t.section .farbss,\"aw\"");
7187 = get_unnamed_section (0, output_section_asm_op
,
7188 "\t.section .frodata,\"a\"");
7191 = get_unnamed_section (0, output_section_asm_op
,
7192 "\t.section .srodata,\"a\"");
7195 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7196 "\t.section .vtext,\"axv\"\n\t.vliw");
7199 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7200 "\t.section .vftext,\"axv\"\n\t.vliw");
7203 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7204 "\t.section .ftext,\"ax\"\n\t.core");
7208 /* Initialize the GCC target structure. */
7210 #undef TARGET_ASM_FUNCTION_PROLOGUE
7211 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7212 #undef TARGET_ATTRIBUTE_TABLE
7213 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7214 #undef TARGET_COMP_TYPE_ATTRIBUTES
7215 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7216 #undef TARGET_INSERT_ATTRIBUTES
7217 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7218 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7219 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7220 #undef TARGET_CAN_INLINE_P
7221 #define TARGET_CAN_INLINE_P mep_can_inline_p
7222 #undef TARGET_SECTION_TYPE_FLAGS
7223 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7224 #undef TARGET_ASM_NAMED_SECTION
7225 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7226 #undef TARGET_INIT_BUILTINS
7227 #define TARGET_INIT_BUILTINS mep_init_builtins
7228 #undef TARGET_EXPAND_BUILTIN
7229 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7230 #undef TARGET_SCHED_ADJUST_COST
7231 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7232 #undef TARGET_SCHED_ISSUE_RATE
7233 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7234 #undef TARGET_SCHED_REORDER
7235 #define TARGET_SCHED_REORDER mep_sched_reorder
7236 #undef TARGET_STRIP_NAME_ENCODING
7237 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7238 #undef TARGET_ASM_SELECT_SECTION
7239 #define TARGET_ASM_SELECT_SECTION mep_select_section
7240 #undef TARGET_ASM_UNIQUE_SECTION
7241 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7242 #undef TARGET_ENCODE_SECTION_INFO
7243 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7244 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7245 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7246 #undef TARGET_RTX_COSTS
7247 #define TARGET_RTX_COSTS mep_rtx_cost
7248 #undef TARGET_ADDRESS_COST
7249 #define TARGET_ADDRESS_COST mep_address_cost
7250 #undef TARGET_MACHINE_DEPENDENT_REORG
7251 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7252 #undef TARGET_SETUP_INCOMING_VARARGS
7253 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7254 #undef TARGET_PASS_BY_REFERENCE
7255 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7256 #undef TARGET_FUNCTION_ARG
7257 #define TARGET_FUNCTION_ARG mep_function_arg
7258 #undef TARGET_FUNCTION_ARG_ADVANCE
7259 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7260 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7261 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7262 #undef TARGET_OPTION_OVERRIDE
7263 #define TARGET_OPTION_OVERRIDE mep_option_override
7264 #undef TARGET_ALLOCATE_INITIAL_VALUE
7265 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7266 #undef TARGET_ASM_INIT_SECTIONS
7267 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7268 #undef TARGET_RETURN_IN_MEMORY
7269 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7270 #undef TARGET_NARROW_VOLATILE_BITFIELD
7271 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7272 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7273 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7274 #undef TARGET_BUILD_BUILTIN_VA_LIST
7275 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7276 #undef TARGET_EXPAND_BUILTIN_VA_START
7277 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7278 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7279 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7280 #undef TARGET_CAN_ELIMINATE
7281 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7282 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7283 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7284 #undef TARGET_TRAMPOLINE_INIT
7285 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7286 #undef TARGET_LEGITIMATE_CONSTANT_P
7287 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7289 struct gcc_target targetm
= TARGET_INITIALIZER
;