1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
46 #include "diagnostic-core.h"
47 #include "integrate.h"
49 #include "target-def.h"
50 #include "langhooks.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
63 + Function args in registers
64 + Handle pipeline hazards
67 + Machine-dependent Reorg
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed
;
92 /* Records __builtin_return address. */
96 int reg_save_slot
[FIRST_PSEUDO_REGISTER
];
97 unsigned char reg_saved
[FIRST_PSEUDO_REGISTER
];
99 /* 2 if the current function has an interrupt attribute, 1 if not, 0
100 if unknown. This is here because resource.c uses EPILOGUE_USES
102 int interrupt_handler
;
104 /* Likewise, for disinterrupt attribute. */
105 int disable_interrupts
;
107 /* Number of doloop tags used so far. */
110 /* True if the last tag was allocated to a doloop_end. */
111 bool doloop_tag_from_end
;
113 /* True if reload changes $TP. */
114 bool reload_changes_tp
;
116 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
117 We only set this if the function is an interrupt handler. */
118 int asms_without_operands
;
121 #define MEP_CONTROL_REG(x) \
122 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
124 static GTY(()) section
* based_section
;
125 static GTY(()) section
* tinybss_section
;
126 static GTY(()) section
* far_section
;
127 static GTY(()) section
* farbss_section
;
128 static GTY(()) section
* frodata_section
;
129 static GTY(()) section
* srodata_section
;
131 static GTY(()) section
* vtext_section
;
132 static GTY(()) section
* vftext_section
;
133 static GTY(()) section
* ftext_section
;
135 static void mep_set_leaf_registers (int);
136 static bool symbol_p (rtx
);
137 static bool symbolref_p (rtx
);
138 static void encode_pattern_1 (rtx
);
139 static void encode_pattern (rtx
);
140 static bool const_in_range (rtx
, int, int);
141 static void mep_rewrite_mult (rtx
, rtx
);
142 static void mep_rewrite_mulsi3 (rtx
, rtx
, rtx
, rtx
);
143 static void mep_rewrite_maddsi3 (rtx
, rtx
, rtx
, rtx
, rtx
);
144 static bool mep_reuse_lo_p_1 (rtx
, rtx
, rtx
, bool);
145 static bool move_needs_splitting (rtx
, rtx
, enum machine_mode
);
146 static bool mep_expand_setcc_1 (enum rtx_code
, rtx
, rtx
, rtx
);
147 static bool mep_nongeneral_reg (rtx
);
148 static bool mep_general_copro_reg (rtx
);
149 static bool mep_nonregister (rtx
);
150 static struct machine_function
* mep_init_machine_status (void);
151 static rtx
mep_tp_rtx (void);
152 static rtx
mep_gp_rtx (void);
153 static bool mep_interrupt_p (void);
154 static bool mep_disinterrupt_p (void);
155 static bool mep_reg_set_p (rtx
, rtx
);
156 static bool mep_reg_set_in_function (int);
157 static bool mep_interrupt_saved_reg (int);
158 static bool mep_call_saves_register (int);
160 static void add_constant (int, int, int, int);
161 static rtx
maybe_dead_move (rtx
, rtx
, bool);
162 static void mep_reload_pointer (int, const char *);
163 static void mep_start_function (FILE *, HOST_WIDE_INT
);
164 static bool mep_function_ok_for_sibcall (tree
, tree
);
165 static int unique_bit_in (HOST_WIDE_INT
);
166 static int bit_size_for_clip (HOST_WIDE_INT
);
167 static int bytesize (const_tree
, enum machine_mode
);
168 static tree
mep_validate_based_tiny (tree
*, tree
, tree
, int, bool *);
169 static tree
mep_validate_near_far (tree
*, tree
, tree
, int, bool *);
170 static tree
mep_validate_disinterrupt (tree
*, tree
, tree
, int, bool *);
171 static tree
mep_validate_interrupt (tree
*, tree
, tree
, int, bool *);
172 static tree
mep_validate_io_cb (tree
*, tree
, tree
, int, bool *);
173 static tree
mep_validate_vliw (tree
*, tree
, tree
, int, bool *);
174 static bool mep_function_attribute_inlinable_p (const_tree
);
175 static bool mep_can_inline_p (tree
, tree
);
176 static bool mep_lookup_pragma_disinterrupt (const char *);
177 static int mep_multiple_address_regions (tree
, bool);
178 static int mep_attrlist_to_encoding (tree
, tree
);
179 static void mep_insert_attributes (tree
, tree
*);
180 static void mep_encode_section_info (tree
, rtx
, int);
181 static section
* mep_select_section (tree
, int, unsigned HOST_WIDE_INT
);
182 static void mep_unique_section (tree
, int);
183 static unsigned int mep_section_type_flags (tree
, const char *, int);
184 static void mep_asm_named_section (const char *, unsigned int, tree
);
185 static bool mep_mentioned_p (rtx
, rtx
, int);
186 static void mep_reorg_regmove (rtx
);
187 static rtx
mep_insert_repeat_label_last (rtx
, rtx
, bool, bool);
188 static void mep_reorg_repeat (rtx
);
189 static bool mep_invertable_branch_p (rtx
);
190 static void mep_invert_branch (rtx
, rtx
);
191 static void mep_reorg_erepeat (rtx
);
192 static void mep_jmp_return_reorg (rtx
);
193 static void mep_reorg_addcombine (rtx
);
194 static void mep_reorg (void);
195 static void mep_init_intrinsics (void);
196 static void mep_init_builtins (void);
197 static void mep_intrinsic_unavailable (int);
198 static bool mep_get_intrinsic_insn (int, const struct cgen_insn
**);
199 static bool mep_get_move_insn (int, const struct cgen_insn
**);
200 static rtx
mep_convert_arg (enum machine_mode
, rtx
);
201 static rtx
mep_convert_regnum (const struct cgen_regnum_operand
*, rtx
);
202 static rtx
mep_legitimize_arg (const struct insn_operand_data
*, rtx
, int);
203 static void mep_incompatible_arg (const struct insn_operand_data
*, rtx
, int, tree
);
204 static rtx
mep_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
205 static int mep_adjust_cost (rtx
, rtx
, rtx
, int);
206 static int mep_issue_rate (void);
207 static rtx
mep_find_ready_insn (rtx
*, int, enum attr_slot
, int);
208 static void mep_move_ready_insn (rtx
*, int, rtx
);
209 static int mep_sched_reorder (FILE *, int, rtx
*, int *, int);
210 static rtx
mep_make_bundle (rtx
, rtx
);
211 static void mep_bundle_insns (rtx
);
212 static bool mep_rtx_cost (rtx
, int, int, int *, bool);
213 static int mep_address_cost (rtx
, bool);
214 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS
*, enum machine_mode
,
216 static bool mep_pass_by_reference (CUMULATIVE_ARGS
* cum
, enum machine_mode
,
218 static rtx
mep_function_arg (CUMULATIVE_ARGS
*, enum machine_mode
,
220 static void mep_function_arg_advance (CUMULATIVE_ARGS
*, enum machine_mode
,
222 static bool mep_vector_mode_supported_p (enum machine_mode
);
223 static bool mep_handle_option (size_t, const char *, int);
224 static rtx
mep_allocate_initial_value (rtx
);
225 static void mep_asm_init_sections (void);
226 static int mep_comp_type_attributes (const_tree
, const_tree
);
227 static bool mep_narrow_volatile_bitfield (void);
228 static rtx
mep_expand_builtin_saveregs (void);
229 static tree
mep_build_builtin_va_list (void);
230 static void mep_expand_va_start (tree
, rtx
);
231 static tree
mep_gimplify_va_arg_expr (tree
, tree
, gimple_seq
*, gimple_seq
*);
232 static bool mep_can_eliminate (const int, const int);
233 static void mep_conditional_register_usage (void);
234 static void mep_trampoline_init (rtx
, tree
, rtx
);
236 #define WANT_GCC_DEFINITIONS
237 #include "mep-intrin.h"
238 #undef WANT_GCC_DEFINITIONS
241 /* Command Line Option Support. */
243 char mep_leaf_registers
[FIRST_PSEUDO_REGISTER
];
245 /* True if we can use cmov instructions to move values back and forth
246 between core and coprocessor registers. */
247 bool mep_have_core_copro_moves_p
;
249 /* True if we can use cmov instructions (or a work-alike) to move
250 values between coprocessor registers. */
251 bool mep_have_copro_copro_moves_p
;
253 /* A table of all coprocessor instructions that can act like
254 a coprocessor-to-coprocessor cmov. */
255 static const int mep_cmov_insns
[] = {
268 static int option_mtiny_specified
= 0;
272 mep_set_leaf_registers (int enable
)
276 if (mep_leaf_registers
[0] != enable
)
277 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
278 mep_leaf_registers
[i
] = enable
;
282 mep_conditional_register_usage (void)
286 if (!TARGET_OPT_MULT
&& !TARGET_OPT_DIV
)
288 fixed_regs
[HI_REGNO
] = 1;
289 fixed_regs
[LO_REGNO
] = 1;
290 call_used_regs
[HI_REGNO
] = 1;
291 call_used_regs
[LO_REGNO
] = 1;
294 for (i
= FIRST_SHADOW_REGISTER
; i
<= LAST_SHADOW_REGISTER
; i
++)
299 static const struct default_options mep_option_optimization_table
[] =
301 /* The first scheduling pass often increases register pressure and
302 tends to result in more spill code. Only run it when
303 specifically asked. */
304 { OPT_LEVELS_ALL
, OPT_fschedule_insns
, NULL
, 0 },
306 /* Using $fp doesn't gain us much, even when debugging is
308 { OPT_LEVELS_ALL
, OPT_fomit_frame_pointer
, NULL
, 1 },
310 { OPT_LEVELS_NONE
, 0, NULL
, 0 }
314 mep_option_override (void)
317 warning (OPT_fpic
, "-fpic is not supported");
319 warning (OPT_fPIC
, "-fPIC is not supported");
320 if (TARGET_S
&& TARGET_M
)
321 error ("only one of -ms and -mm may be given");
322 if (TARGET_S
&& TARGET_L
)
323 error ("only one of -ms and -ml may be given");
324 if (TARGET_M
&& TARGET_L
)
325 error ("only one of -mm and -ml may be given");
326 if (TARGET_S
&& option_mtiny_specified
)
327 error ("only one of -ms and -mtiny= may be given");
328 if (TARGET_M
&& option_mtiny_specified
)
329 error ("only one of -mm and -mtiny= may be given");
330 if (TARGET_OPT_CLIP
&& ! TARGET_OPT_MINMAX
)
331 warning (0, "-mclip currently has no effect without -mminmax");
333 if (mep_const_section
)
335 if (strcmp (mep_const_section
, "tiny") != 0
336 && strcmp (mep_const_section
, "near") != 0
337 && strcmp (mep_const_section
, "far") != 0)
338 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
342 mep_tiny_cutoff
= 65536;
345 if (TARGET_L
&& ! option_mtiny_specified
)
348 if (TARGET_64BIT_CR_REGS
)
349 flag_split_wide_types
= 0;
351 init_machine_status
= mep_init_machine_status
;
352 mep_init_intrinsics ();
355 /* Pattern Support - constraints, predicates, expanders. */
357 /* MEP has very few instructions that can refer to the span of
358 addresses used by symbols, so it's common to check for them. */
363 int c
= GET_CODE (x
);
365 return (c
== CONST_INT
375 if (GET_CODE (x
) != MEM
)
378 c
= GET_CODE (XEXP (x
, 0));
379 return (c
== CONST_INT
384 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
386 #define GEN_REG(R, STRICT) \
389 && ((R) == ARG_POINTER_REGNUM \
390 || (R) >= FIRST_PSEUDO_REGISTER)))
392 static char pattern
[12], *patternp
;
393 static GTY(()) rtx patternr
[12];
394 #define RTX_IS(x) (strcmp (pattern, x) == 0)
397 encode_pattern_1 (rtx x
)
401 if (patternp
== pattern
+ sizeof (pattern
) - 2)
407 patternr
[patternp
-pattern
] = x
;
409 switch (GET_CODE (x
))
417 encode_pattern_1 (XEXP(x
, 0));
421 encode_pattern_1 (XEXP(x
, 0));
422 encode_pattern_1 (XEXP(x
, 1));
426 encode_pattern_1 (XEXP(x
, 0));
427 encode_pattern_1 (XEXP(x
, 1));
431 encode_pattern_1 (XEXP(x
, 0));
445 *patternp
++ = '0' + XCINT(x
, 1, UNSPEC
);
446 for (i
=0; i
<XVECLEN (x
, 0); i
++)
447 encode_pattern_1 (XVECEXP (x
, 0, i
));
455 fprintf (stderr
, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x
)));
464 encode_pattern (rtx x
)
467 encode_pattern_1 (x
);
472 mep_section_tag (rtx x
)
478 switch (GET_CODE (x
))
485 x
= XVECEXP (x
, 0, 0);
488 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
497 if (GET_CODE (x
) != SYMBOL_REF
)
500 if (name
[0] == '@' && name
[2] == '.')
502 if (name
[1] == 'i' || name
[1] == 'I')
505 return 'f'; /* near */
506 return 'n'; /* far */
514 mep_regno_reg_class (int regno
)
518 case SP_REGNO
: return SP_REGS
;
519 case TP_REGNO
: return TP_REGS
;
520 case GP_REGNO
: return GP_REGS
;
521 case 0: return R0_REGS
;
522 case HI_REGNO
: return HI_REGS
;
523 case LO_REGNO
: return LO_REGS
;
524 case ARG_POINTER_REGNUM
: return GENERAL_REGS
;
527 if (GR_REGNO_P (regno
))
528 return regno
< FIRST_GR_REGNO
+ 8 ? TPREL_REGS
: GENERAL_REGS
;
529 if (CONTROL_REGNO_P (regno
))
532 if (CR_REGNO_P (regno
))
536 /* Search for the register amongst user-defined subclasses of
537 the coprocessor registers. */
538 for (i
= USER0_REGS
; i
<= USER3_REGS
; ++i
)
540 if (! TEST_HARD_REG_BIT (reg_class_contents
[i
], regno
))
542 for (j
= 0; j
< N_REG_CLASSES
; ++j
)
544 enum reg_class sub
= reg_class_subclasses
[i
][j
];
546 if (sub
== LIM_REG_CLASSES
)
548 if (TEST_HARD_REG_BIT (reg_class_contents
[sub
], regno
))
553 return LOADABLE_CR_REGNO_P (regno
) ? LOADABLE_CR_REGS
: CR_REGS
;
556 if (CCR_REGNO_P (regno
))
559 gcc_assert (regno
>= FIRST_SHADOW_REGISTER
&& regno
<= LAST_SHADOW_REGISTER
);
565 mep_reg_class_from_constraint (int c
, const char *str
)
582 return LOADABLE_CR_REGS
;
584 return mep_have_copro_copro_moves_p
? CR_REGS
: NO_REGS
;
586 return mep_have_core_copro_moves_p
? CR_REGS
: NO_REGS
;
613 enum reg_class which
= c
- 'A' + USER0_REGS
;
614 return (reg_class_size
[which
] > 0 ? which
: NO_REGS
);
623 mep_const_ok_for_letter_p (HOST_WIDE_INT value
, int c
)
627 case 'I': return value
>= -32768 && value
< 32768;
628 case 'J': return value
>= 0 && value
< 65536;
629 case 'K': return value
>= 0 && value
< 0x01000000;
630 case 'L': return value
>= -32 && value
< 32;
631 case 'M': return value
>= 0 && value
< 32;
632 case 'N': return value
>= 0 && value
< 16;
636 return value
>= -2147483647-1 && value
<= 2147483647;
643 mep_extra_constraint (rtx value
, int c
)
645 encode_pattern (value
);
650 /* For near symbols, like what call uses. */
651 if (GET_CODE (value
) == REG
)
653 return mep_call_address_operand (value
, GET_MODE (value
));
656 /* For signed 8-bit immediates. */
657 return (GET_CODE (value
) == CONST_INT
658 && INTVAL (value
) >= -128
659 && INTVAL (value
) <= 127);
662 /* For tp/gp relative symbol values. */
663 return (RTX_IS ("u3s") || RTX_IS ("u2s")
664 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
667 /* Non-absolute memories. */
668 return GET_CODE (value
) == MEM
&& ! CONSTANT_P (XEXP (value
, 0));
672 return RTX_IS ("Hs");
675 /* Register indirect. */
676 return RTX_IS ("mr");
679 return mep_section_tag (value
) == 'c' && RTX_IS ("ms");
690 const_in_range (rtx x
, int minv
, int maxv
)
692 return (GET_CODE (x
) == CONST_INT
693 && INTVAL (x
) >= minv
694 && INTVAL (x
) <= maxv
);
697 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
698 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
699 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
700 at the end of the insn stream. */
703 mep_mulr_source (rtx insn
, rtx dest
, rtx src1
, rtx src2
)
705 if (rtx_equal_p (dest
, src1
))
707 else if (rtx_equal_p (dest
, src2
))
712 emit_insn (gen_movsi (copy_rtx (dest
), src1
));
714 emit_insn_before (gen_movsi (copy_rtx (dest
), src1
), insn
);
719 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
720 Change the last element of PATTERN from (clobber (scratch:SI))
721 to (clobber (reg:SI HI_REGNO)). */
724 mep_rewrite_mult (rtx insn
, rtx pattern
)
728 hi_clobber
= XVECEXP (pattern
, 0, XVECLEN (pattern
, 0) - 1);
729 XEXP (hi_clobber
, 0) = gen_rtx_REG (SImode
, HI_REGNO
);
730 PATTERN (insn
) = pattern
;
731 INSN_CODE (insn
) = -1;
734 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
735 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
736 store the result in DEST if nonnull. */
739 mep_rewrite_mulsi3 (rtx insn
, rtx dest
, rtx src1
, rtx src2
)
743 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
745 pattern
= gen_mulsi3r (lo
, dest
, copy_rtx (dest
),
746 mep_mulr_source (insn
, dest
, src1
, src2
));
748 pattern
= gen_mulsi3_lo (lo
, src1
, src2
);
749 mep_rewrite_mult (insn
, pattern
);
752 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
753 SRC3 into $lo, then use either madd or maddr. The move into $lo will
754 be deleted by a peephole2 if SRC3 is already in $lo. */
757 mep_rewrite_maddsi3 (rtx insn
, rtx dest
, rtx src1
, rtx src2
, rtx src3
)
761 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
762 emit_insn_before (gen_movsi (copy_rtx (lo
), src3
), insn
);
764 pattern
= gen_maddsi3r (lo
, dest
, copy_rtx (dest
),
765 mep_mulr_source (insn
, dest
, src1
, src2
),
768 pattern
= gen_maddsi3_lo (lo
, src1
, src2
, copy_rtx (lo
));
769 mep_rewrite_mult (insn
, pattern
);
772 /* Return true if $lo has the same value as integer register GPR when
773 instruction INSN is reached. If necessary, rewrite the instruction
774 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
775 rtx for (reg:SI LO_REGNO).
777 This function is intended to be used by the peephole2 pass. Since
778 that pass goes from the end of a basic block to the beginning, and
779 propagates liveness information on the way, there is no need to
780 update register notes here.
782 If GPR_DEAD_P is true on entry, and this function returns true,
783 then the caller will replace _every_ use of GPR in and after INSN
784 with LO. This means that if the instruction that sets $lo is a
785 mulr- or maddr-type instruction, we can rewrite it to use mul or
786 madd instead. In combination with the copy progagation pass,
787 this allows us to replace sequences like:
796 if GPR is no longer used. */
799 mep_reuse_lo_p_1 (rtx lo
, rtx gpr
, rtx insn
, bool gpr_dead_p
)
803 insn
= PREV_INSN (insn
);
805 switch (recog_memoized (insn
))
807 case CODE_FOR_mulsi3_1
:
809 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
811 mep_rewrite_mulsi3 (insn
,
812 gpr_dead_p
? NULL
: recog_data
.operand
[0],
813 recog_data
.operand
[1],
814 recog_data
.operand
[2]);
819 case CODE_FOR_maddsi3
:
821 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
823 mep_rewrite_maddsi3 (insn
,
824 gpr_dead_p
? NULL
: recog_data
.operand
[0],
825 recog_data
.operand
[1],
826 recog_data
.operand
[2],
827 recog_data
.operand
[3]);
832 case CODE_FOR_mulsi3r
:
833 case CODE_FOR_maddsi3r
:
835 return rtx_equal_p (recog_data
.operand
[1], gpr
);
838 if (reg_set_p (lo
, insn
)
839 || reg_set_p (gpr
, insn
)
840 || volatile_insn_p (PATTERN (insn
)))
843 if (gpr_dead_p
&& reg_referenced_p (gpr
, PATTERN (insn
)))
848 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
852 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
855 mep_reuse_lo_p (rtx lo
, rtx gpr
, rtx insn
, bool gpr_dead_p
)
857 bool result
= mep_reuse_lo_p_1 (lo
, gpr
, insn
, gpr_dead_p
);
862 /* Return true if SET can be turned into a post-modify load or store
863 that adds OFFSET to GPR. In other words, return true if SET can be
866 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
868 It's OK to change SET to an equivalent operation in order to
872 mep_use_post_modify_for_set_p (rtx set
, rtx gpr
, rtx offset
)
875 unsigned int reg_bytes
, mem_bytes
;
876 enum machine_mode reg_mode
, mem_mode
;
878 /* Only simple SETs can be converted. */
879 if (GET_CODE (set
) != SET
)
882 /* Point REG to what we hope will be the register side of the set and
883 MEM to what we hope will be the memory side. */
884 if (GET_CODE (SET_DEST (set
)) == MEM
)
886 mem
= &SET_DEST (set
);
887 reg
= &SET_SRC (set
);
891 reg
= &SET_DEST (set
);
892 mem
= &SET_SRC (set
);
893 if (GET_CODE (*mem
) == SIGN_EXTEND
)
894 mem
= &XEXP (*mem
, 0);
897 /* Check that *REG is a suitable coprocessor register. */
898 if (GET_CODE (*reg
) != REG
|| !LOADABLE_CR_REGNO_P (REGNO (*reg
)))
901 /* Check that *MEM is a suitable memory reference. */
902 if (GET_CODE (*mem
) != MEM
|| !rtx_equal_p (XEXP (*mem
, 0), gpr
))
905 /* Get the number of bytes in each operand. */
906 mem_bytes
= GET_MODE_SIZE (GET_MODE (*mem
));
907 reg_bytes
= GET_MODE_SIZE (GET_MODE (*reg
));
909 /* Check that OFFSET is suitably aligned. */
910 if (INTVAL (offset
) & (mem_bytes
- 1))
913 /* Convert *MEM to a normal integer mode. */
914 mem_mode
= mode_for_size (mem_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
915 *mem
= change_address (*mem
, mem_mode
, NULL
);
917 /* Adjust *REG as well. */
918 *reg
= shallow_copy_rtx (*reg
);
919 if (reg
== &SET_DEST (set
) && reg_bytes
< UNITS_PER_WORD
)
921 /* SET is a subword load. Convert it to an explicit extension. */
922 PUT_MODE (*reg
, SImode
);
923 *mem
= gen_rtx_SIGN_EXTEND (SImode
, *mem
);
927 reg_mode
= mode_for_size (reg_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
928 PUT_MODE (*reg
, reg_mode
);
933 /* Return the effect of frame-related instruction INSN. */
936 mep_frame_expr (rtx insn
)
940 note
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, 0);
941 expr
= (note
!= 0 ? XEXP (note
, 0) : copy_rtx (PATTERN (insn
)));
942 RTX_FRAME_RELATED_P (expr
) = 1;
946 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
947 new pattern in INSN1; INSN2 will be deleted by the caller. */
950 mep_make_parallel (rtx insn1
, rtx insn2
)
954 if (RTX_FRAME_RELATED_P (insn2
))
956 expr
= mep_frame_expr (insn2
);
957 if (RTX_FRAME_RELATED_P (insn1
))
958 expr
= gen_rtx_SEQUENCE (VOIDmode
,
959 gen_rtvec (2, mep_frame_expr (insn1
), expr
));
960 set_unique_reg_note (insn1
, REG_FRAME_RELATED_EXPR
, expr
);
961 RTX_FRAME_RELATED_P (insn1
) = 1;
964 PATTERN (insn1
) = gen_rtx_PARALLEL (VOIDmode
,
965 gen_rtvec (2, PATTERN (insn1
),
967 INSN_CODE (insn1
) = -1;
970 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
971 the basic block to see if any previous load or store instruction can
972 be persuaded to do SET_INSN as a side-effect. Return true if so. */
975 mep_use_post_modify_p_1 (rtx set_insn
, rtx reg
, rtx offset
)
982 insn
= PREV_INSN (insn
);
985 if (mep_use_post_modify_for_set_p (PATTERN (insn
), reg
, offset
))
987 mep_make_parallel (insn
, set_insn
);
991 if (reg_set_p (reg
, insn
)
992 || reg_referenced_p (reg
, PATTERN (insn
))
993 || volatile_insn_p (PATTERN (insn
)))
997 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
1001 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1004 mep_use_post_modify_p (rtx insn
, rtx reg
, rtx offset
)
1006 bool result
= mep_use_post_modify_p_1 (insn
, reg
, offset
);
1007 extract_insn (insn
);
1012 mep_allow_clip (rtx ux
, rtx lx
, int s
)
1014 HOST_WIDE_INT u
= INTVAL (ux
);
1015 HOST_WIDE_INT l
= INTVAL (lx
);
1018 if (!TARGET_OPT_CLIP
)
1023 for (i
= 0; i
< 30; i
++)
1024 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1)
1025 && (l
== - ((HOST_WIDE_INT
) 1 << i
)))
1033 for (i
= 0; i
< 30; i
++)
1034 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1))
1041 mep_bit_position_p (rtx x
, bool looking_for
)
1043 if (GET_CODE (x
) != CONST_INT
)
1045 switch ((int) INTVAL(x
) & 0xff)
1047 case 0x01: case 0x02: case 0x04: case 0x08:
1048 case 0x10: case 0x20: case 0x40: case 0x80:
1050 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1051 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1052 return !looking_for
;
1058 move_needs_splitting (rtx dest
, rtx src
,
1059 enum machine_mode mode ATTRIBUTE_UNUSED
)
1061 int s
= mep_section_tag (src
);
1065 if (GET_CODE (src
) == CONST
1066 || GET_CODE (src
) == MEM
)
1067 src
= XEXP (src
, 0);
1068 else if (GET_CODE (src
) == SYMBOL_REF
1069 || GET_CODE (src
) == LABEL_REF
1070 || GET_CODE (src
) == PLUS
)
1076 || (GET_CODE (src
) == PLUS
1077 && GET_CODE (XEXP (src
, 1)) == CONST_INT
1078 && (INTVAL (XEXP (src
, 1)) < -65536
1079 || INTVAL (XEXP (src
, 1)) > 0xffffff))
1080 || (GET_CODE (dest
) == REG
1081 && REGNO (dest
) > 7 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
))
1087 mep_split_mov (rtx
*operands
, int symbolic
)
1091 if (move_needs_splitting (operands
[0], operands
[1], SImode
))
1096 if (GET_CODE (operands
[1]) != CONST_INT
)
1099 if (constraint_satisfied_p (operands
[1], CONSTRAINT_I
)
1100 || constraint_satisfied_p (operands
[1], CONSTRAINT_J
)
1101 || constraint_satisfied_p (operands
[1], CONSTRAINT_O
))
1104 if (((!reload_completed
&& !reload_in_progress
)
1105 || (REG_P (operands
[0]) && REGNO (operands
[0]) < 8))
1106 && constraint_satisfied_p (operands
[1], CONSTRAINT_K
))
1112 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1113 it to one specific value. So the insn chosen depends on whether
1114 the source and destination modes match. */
1117 mep_vliw_mode_match (rtx tgt
)
1119 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1120 bool tgt_vliw
= INTVAL (tgt
);
1122 return src_vliw
== tgt_vliw
;
1125 /* Like the above, but also test for near/far mismatches. */
1128 mep_vliw_jmp_match (rtx tgt
)
1130 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1131 bool tgt_vliw
= INTVAL (tgt
);
1133 if (mep_section_tag (DECL_RTL (cfun
->decl
)) == 'f')
1136 return src_vliw
== tgt_vliw
;
1140 mep_multi_slot (rtx x
)
1142 return get_attr_slot (x
) == SLOT_MULTI
;
1147 mep_legitimate_constant_p (rtx x
)
1149 /* We can't convert symbol values to gp- or tp-rel values after
1150 reload, as reload might have used $gp or $tp for other
1152 if (GET_CODE (x
) == SYMBOL_REF
&& (reload_in_progress
|| reload_completed
))
1154 char e
= mep_section_tag (x
);
1155 return (e
!= 't' && e
!= 'b');
1160 /* Be careful not to use macros that need to be compiled one way for
1161 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1164 mep_legitimate_address (enum machine_mode mode
, rtx x
, int strict
)
1168 #define DEBUG_LEGIT 0
1170 fprintf (stderr
, "legit: mode %s strict %d ", mode_name
[mode
], strict
);
1174 if (GET_CODE (x
) == LO_SUM
1175 && GET_CODE (XEXP (x
, 0)) == REG
1176 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1177 && CONSTANT_P (XEXP (x
, 1)))
1179 if (GET_MODE_SIZE (mode
) > 4)
1181 /* We will end up splitting this, and lo_sums are not
1182 offsettable for us. */
1184 fprintf(stderr
, " - nope, %%lo(sym)[reg] not splittable\n");
1189 fprintf (stderr
, " - yup, %%lo(sym)[reg]\n");
1194 if (GET_CODE (x
) == REG
1195 && GEN_REG (REGNO (x
), strict
))
1198 fprintf (stderr
, " - yup, [reg]\n");
1203 if (GET_CODE (x
) == PLUS
1204 && GET_CODE (XEXP (x
, 0)) == REG
1205 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1206 && const_in_range (XEXP (x
, 1), -32768, 32767))
1209 fprintf (stderr
, " - yup, [reg+const]\n");
1214 if (GET_CODE (x
) == PLUS
1215 && GET_CODE (XEXP (x
, 0)) == REG
1216 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1217 && GET_CODE (XEXP (x
, 1)) == CONST
1218 && (GET_CODE (XEXP (XEXP (x
, 1), 0)) == UNSPEC
1219 || (GET_CODE (XEXP (XEXP (x
, 1), 0)) == PLUS
1220 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == UNSPEC
1221 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == CONST_INT
)))
1224 fprintf (stderr
, " - yup, [reg+unspec]\n");
1229 the_tag
= mep_section_tag (x
);
1234 fprintf (stderr
, " - nope, [far]\n");
1239 if (mode
== VOIDmode
1240 && GET_CODE (x
) == SYMBOL_REF
)
1243 fprintf (stderr
, " - yup, call [symbol]\n");
1248 if ((mode
== SImode
|| mode
== SFmode
)
1250 && LEGITIMATE_CONSTANT_P (x
)
1251 && the_tag
!= 't' && the_tag
!= 'b')
1253 if (GET_CODE (x
) != CONST_INT
1254 || (INTVAL (x
) <= 0xfffff
1256 && (INTVAL (x
) % 4) == 0))
1259 fprintf (stderr
, " - yup, [const]\n");
1266 fprintf (stderr
, " - nope.\n");
1272 mep_legitimize_reload_address (rtx
*x
, enum machine_mode mode
, int opnum
,
1274 int ind_levels ATTRIBUTE_UNUSED
)
1276 enum reload_type type
= (enum reload_type
) type_i
;
1278 if (GET_CODE (*x
) == PLUS
1279 && GET_CODE (XEXP (*x
, 0)) == MEM
1280 && GET_CODE (XEXP (*x
, 1)) == REG
)
1282 /* GCC will by default copy the MEM into a REG, which results in
1283 an invalid address. For us, the best thing to do is move the
1284 whole expression to a REG. */
1285 push_reload (*x
, NULL_RTX
, x
, NULL
,
1286 GENERAL_REGS
, mode
, VOIDmode
,
1291 if (GET_CODE (*x
) == PLUS
1292 && GET_CODE (XEXP (*x
, 0)) == SYMBOL_REF
1293 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
)
1295 char e
= mep_section_tag (XEXP (*x
, 0));
1297 if (e
!= 't' && e
!= 'b')
1299 /* GCC thinks that (sym+const) is a valid address. Well,
1300 sometimes it is, this time it isn't. The best thing to
1301 do is reload the symbol to a register, since reg+int
1302 tends to work, and we can't just add the symbol and
1304 push_reload (XEXP (*x
, 0), NULL_RTX
, &(XEXP(*x
, 0)), NULL
,
1305 GENERAL_REGS
, mode
, VOIDmode
,
1314 mep_core_address_length (rtx insn
, int opn
)
1316 rtx set
= single_set (insn
);
1317 rtx mem
= XEXP (set
, opn
);
1318 rtx other
= XEXP (set
, 1-opn
);
1319 rtx addr
= XEXP (mem
, 0);
1321 if (register_operand (addr
, Pmode
))
1323 if (GET_CODE (addr
) == PLUS
)
1325 rtx addend
= XEXP (addr
, 1);
1327 gcc_assert (REG_P (XEXP (addr
, 0)));
1329 switch (REGNO (XEXP (addr
, 0)))
1331 case STACK_POINTER_REGNUM
:
1332 if (GET_MODE_SIZE (GET_MODE (mem
)) == 4
1333 && mep_imm7a4_operand (addend
, VOIDmode
))
1338 gcc_assert (REG_P (other
));
1340 if (REGNO (other
) >= 8)
1343 if (GET_CODE (addend
) == CONST
1344 && GET_CODE (XEXP (addend
, 0)) == UNSPEC
1345 && XINT (XEXP (addend
, 0), 1) == UNS_TPREL
)
1348 if (GET_CODE (addend
) == CONST_INT
1349 && INTVAL (addend
) >= 0
1350 && INTVAL (addend
) <= 127
1351 && INTVAL (addend
) % GET_MODE_SIZE (GET_MODE (mem
)) == 0)
1361 mep_cop_address_length (rtx insn
, int opn
)
1363 rtx set
= single_set (insn
);
1364 rtx mem
= XEXP (set
, opn
);
1365 rtx addr
= XEXP (mem
, 0);
1367 if (GET_CODE (mem
) != MEM
)
1369 if (register_operand (addr
, Pmode
))
1371 if (GET_CODE (addr
) == POST_INC
)
1377 #define DEBUG_EXPAND_MOV 0
1379 mep_expand_mov (rtx
*operands
, enum machine_mode mode
)
1384 int post_reload
= 0;
1386 tag
[0] = mep_section_tag (operands
[0]);
1387 tag
[1] = mep_section_tag (operands
[1]);
1389 if (!reload_in_progress
1390 && !reload_completed
1391 && GET_CODE (operands
[0]) != REG
1392 && GET_CODE (operands
[0]) != SUBREG
1393 && GET_CODE (operands
[1]) != REG
1394 && GET_CODE (operands
[1]) != SUBREG
)
1395 operands
[1] = copy_to_mode_reg (mode
, operands
[1]);
1397 #if DEBUG_EXPAND_MOV
1398 fprintf(stderr
, "expand move %s %d\n", mode_name
[mode
],
1399 reload_in_progress
|| reload_completed
);
1400 debug_rtx (operands
[0]);
1401 debug_rtx (operands
[1]);
1404 if (mode
== DImode
|| mode
== DFmode
)
1407 if (reload_in_progress
|| reload_completed
)
1411 if (GET_CODE (operands
[0]) == REG
&& REGNO (operands
[0]) == TP_REGNO
)
1412 cfun
->machine
->reload_changes_tp
= true;
1414 if (tag
[0] == 't' || tag
[1] == 't')
1416 r
= has_hard_reg_initial_val (Pmode
, GP_REGNO
);
1417 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != GP_REGNO
)
1420 if (tag
[0] == 'b' || tag
[1] == 'b')
1422 r
= has_hard_reg_initial_val (Pmode
, TP_REGNO
);
1423 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != TP_REGNO
)
1426 if (cfun
->machine
->reload_changes_tp
== true)
1433 if (symbol_p (operands
[1]))
1435 t
= mep_section_tag (operands
[1]);
1436 if (t
== 'b' || t
== 't')
1439 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
1441 tpsym
= operands
[1];
1442 n
= gen_rtx_UNSPEC (mode
,
1443 gen_rtvec (1, operands
[1]),
1444 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1445 n
= gen_rtx_CONST (mode
, n
);
1447 else if (GET_CODE (operands
[1]) == CONST
1448 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
1449 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
1450 && GET_CODE (XEXP (XEXP (operands
[1], 0), 1)) == CONST_INT
)
1452 tpsym
= XEXP (XEXP (operands
[1], 0), 0);
1453 tpoffs
= XEXP (XEXP (operands
[1], 0), 1);
1454 n
= gen_rtx_UNSPEC (mode
,
1455 gen_rtvec (1, tpsym
),
1456 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1457 n
= gen_rtx_PLUS (mode
, n
, tpoffs
);
1458 n
= gen_rtx_CONST (mode
, n
);
1460 else if (GET_CODE (operands
[1]) == CONST
1461 && GET_CODE (XEXP (operands
[1], 0)) == UNSPEC
)
1465 error ("unusual TP-relative address");
1469 n
= gen_rtx_PLUS (mode
, (t
== 'b' ? mep_tp_rtx ()
1470 : mep_gp_rtx ()), n
);
1471 n
= emit_insn (gen_rtx_SET (mode
, operands
[0], n
));
1472 #if DEBUG_EXPAND_MOV
1473 fprintf(stderr
, "mep_expand_mov emitting ");
1480 for (i
=0; i
< 2; i
++)
1482 t
= mep_section_tag (operands
[i
]);
1483 if (GET_CODE (operands
[i
]) == MEM
&& (t
== 'b' || t
== 't'))
1488 sym
= XEXP (operands
[i
], 0);
1489 if (GET_CODE (sym
) == CONST
1490 && GET_CODE (XEXP (sym
, 0)) == UNSPEC
)
1491 sym
= XVECEXP (XEXP (sym
, 0), 0, 0);
1504 n
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, sym
), u
);
1505 n
= gen_rtx_CONST (Pmode
, n
);
1506 n
= gen_rtx_PLUS (Pmode
, r
, n
);
1507 operands
[i
] = replace_equiv_address (operands
[i
], n
);
1512 if ((GET_CODE (operands
[1]) != REG
1513 && MEP_CONTROL_REG (operands
[0]))
1514 || (GET_CODE (operands
[0]) != REG
1515 && MEP_CONTROL_REG (operands
[1])))
1518 #if DEBUG_EXPAND_MOV
1519 fprintf (stderr
, "cr-mem, forcing op1 to reg\n");
1521 temp
= gen_reg_rtx (mode
);
1522 emit_move_insn (temp
, operands
[1]);
1526 if (symbolref_p (operands
[0])
1527 && (mep_section_tag (XEXP (operands
[0], 0)) == 'f'
1528 || (GET_MODE_SIZE (mode
) != 4)))
1532 gcc_assert (!reload_in_progress
&& !reload_completed
);
1534 temp
= force_reg (Pmode
, XEXP (operands
[0], 0));
1535 operands
[0] = replace_equiv_address (operands
[0], temp
);
1536 emit_move_insn (operands
[0], operands
[1]);
1540 if (!post_reload
&& (tag
[1] == 't' || tag
[1] == 'b'))
1543 if (symbol_p (operands
[1])
1544 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1546 emit_insn (gen_movsi_topsym_s (operands
[0], operands
[1]));
1547 emit_insn (gen_movsi_botsym_s (operands
[0], operands
[0], operands
[1]));
1551 if (symbolref_p (operands
[1])
1552 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1556 if (reload_in_progress
|| reload_completed
)
1559 temp
= gen_reg_rtx (Pmode
);
1561 emit_insn (gen_movsi_topsym_s (temp
, operands
[1]));
1562 emit_insn (gen_movsi_botsym_s (temp
, temp
, operands
[1]));
1563 emit_move_insn (operands
[0], replace_equiv_address (operands
[1], temp
));
1570 /* Cases where the pattern can't be made to use at all. */
1573 mep_mov_ok (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1577 #define DEBUG_MOV_OK 0
1579 fprintf (stderr
, "mep_mov_ok %s %c=%c\n", mode_name
[mode
], mep_section_tag (operands
[0]),
1580 mep_section_tag (operands
[1]));
1581 debug_rtx (operands
[0]);
1582 debug_rtx (operands
[1]);
1585 /* We want the movh patterns to get these. */
1586 if (GET_CODE (operands
[1]) == HIGH
)
1589 /* We can't store a register to a far variable without using a
1590 scratch register to hold the address. Using far variables should
1591 be split by mep_emit_mov anyway. */
1592 if (mep_section_tag (operands
[0]) == 'f'
1593 || mep_section_tag (operands
[1]) == 'f')
1596 fprintf (stderr
, " - no, f\n");
1600 i
= mep_section_tag (operands
[1]);
1601 if ((i
== 'b' || i
== 't') && !reload_completed
&& !reload_in_progress
)
1602 /* These are supposed to be generated with adds of the appropriate
1603 register. During and after reload, however, we allow them to
1604 be accessed as normal symbols because adding a dependency on
1605 the base register now might cause problems. */
1608 fprintf (stderr
, " - no, bt\n");
1613 /* The only moves we can allow involve at least one general
1614 register, so require it. */
1615 for (i
= 0; i
< 2; i
++)
1617 /* Allow subregs too, before reload. */
1618 rtx x
= operands
[i
];
1620 if (GET_CODE (x
) == SUBREG
)
1622 if (GET_CODE (x
) == REG
1623 && ! MEP_CONTROL_REG (x
))
1626 fprintf (stderr
, " - ok\n");
1632 fprintf (stderr
, " - no, no gen reg\n");
1637 #define DEBUG_SPLIT_WIDE_MOVE 0
1639 mep_split_wide_move (rtx
*operands
, enum machine_mode mode
)
1643 #if DEBUG_SPLIT_WIDE_MOVE
1644 fprintf (stderr
, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name
[mode
]);
1645 debug_rtx (operands
[0]);
1646 debug_rtx (operands
[1]);
1649 for (i
= 0; i
<= 1; i
++)
1651 rtx op
= operands
[i
], hi
, lo
;
1653 switch (GET_CODE (op
))
1657 unsigned int regno
= REGNO (op
);
1659 if (TARGET_64BIT_CR_REGS
&& CR_REGNO_P (regno
))
1663 lo
= gen_rtx_REG (SImode
, regno
);
1665 hi
= gen_rtx_ZERO_EXTRACT (SImode
,
1666 gen_rtx_REG (DImode
, regno
),
1671 hi
= gen_rtx_REG (SImode
, regno
+ TARGET_LITTLE_ENDIAN
);
1672 lo
= gen_rtx_REG (SImode
, regno
+ TARGET_BIG_ENDIAN
);
1680 hi
= operand_subword (op
, TARGET_LITTLE_ENDIAN
, 0, mode
);
1681 lo
= operand_subword (op
, TARGET_BIG_ENDIAN
, 0, mode
);
1688 /* The high part of CR <- GPR moves must be done after the low part. */
1689 operands
[i
+ 4] = lo
;
1690 operands
[i
+ 2] = hi
;
1693 if (reg_mentioned_p (operands
[2], operands
[5])
1694 || GET_CODE (operands
[2]) == ZERO_EXTRACT
1695 || GET_CODE (operands
[4]) == ZERO_EXTRACT
)
1699 /* Overlapping register pairs -- make sure we don't
1700 early-clobber ourselves. */
1702 operands
[2] = operands
[4];
1705 operands
[3] = operands
[5];
1709 #if DEBUG_SPLIT_WIDE_MOVE
1710 fprintf(stderr
, "\033[34m");
1711 debug_rtx (operands
[2]);
1712 debug_rtx (operands
[3]);
1713 debug_rtx (operands
[4]);
1714 debug_rtx (operands
[5]);
1715 fprintf(stderr
, "\033[0m");
1719 /* Emit a setcc instruction in its entirity. */
1722 mep_expand_setcc_1 (enum rtx_code code
, rtx dest
, rtx op1
, rtx op2
)
1730 tmp
= op1
, op1
= op2
, op2
= tmp
;
1731 code
= swap_condition (code
);
1736 op1
= force_reg (SImode
, op1
);
1737 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1738 gen_rtx_fmt_ee (code
, SImode
, op1
, op2
)));
1742 if (op2
!= const0_rtx
)
1743 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1744 mep_expand_setcc_1 (LTU
, dest
, op1
, const1_rtx
);
1748 /* Branchful sequence:
1750 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1753 Branchless sequence:
1754 add3 tmp, op1, -op2 32-bit (or mov + sub)
1755 sltu3 tmp, tmp, 1 16-bit
1756 xor3 dest, tmp, 1 32-bit
1758 if (optimize_size
&& op2
!= const0_rtx
)
1761 if (op2
!= const0_rtx
)
1762 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1764 op2
= gen_reg_rtx (SImode
);
1765 mep_expand_setcc_1 (LTU
, op2
, op1
, const1_rtx
);
1767 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1768 gen_rtx_XOR (SImode
, op2
, const1_rtx
)));
1772 if (GET_CODE (op2
) != CONST_INT
1773 || INTVAL (op2
) == 0x7ffffff)
1775 op2
= GEN_INT (INTVAL (op2
) + 1);
1776 return mep_expand_setcc_1 (LT
, dest
, op1
, op2
);
1779 if (GET_CODE (op2
) != CONST_INT
1780 || INTVAL (op2
) == -1)
1782 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) + 1, SImode
));
1783 return mep_expand_setcc_1 (LTU
, dest
, op1
, op2
);
1786 if (GET_CODE (op2
) != CONST_INT
1787 || INTVAL (op2
) == trunc_int_for_mode (0x80000000, SImode
))
1789 op2
= GEN_INT (INTVAL (op2
) - 1);
1790 return mep_expand_setcc_1 (GT
, dest
, op1
, op2
);
1793 if (GET_CODE (op2
) != CONST_INT
1794 || op2
== const0_rtx
)
1796 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) - 1, SImode
));
1797 return mep_expand_setcc_1 (GTU
, dest
, op1
, op2
);
1805 mep_expand_setcc (rtx
*operands
)
1807 rtx dest
= operands
[0];
1808 enum rtx_code code
= GET_CODE (operands
[1]);
1809 rtx op0
= operands
[2];
1810 rtx op1
= operands
[3];
1812 return mep_expand_setcc_1 (code
, dest
, op0
, op1
);
1816 mep_expand_cbranch (rtx
*operands
)
1818 enum rtx_code code
= GET_CODE (operands
[0]);
1819 rtx op0
= operands
[1];
1820 rtx op1
= operands
[2];
1827 if (mep_imm4_operand (op1
, SImode
))
1830 tmp
= gen_reg_rtx (SImode
);
1831 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1838 if (mep_imm4_operand (op1
, SImode
))
1841 tmp
= gen_reg_rtx (SImode
);
1842 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1851 if (! mep_reg_or_imm4_operand (op1
, SImode
))
1852 op1
= force_reg (SImode
, op1
);
1857 if (GET_CODE (op1
) == CONST_INT
1858 && INTVAL (op1
) != 0x7fffffff)
1860 op1
= GEN_INT (INTVAL (op1
) + 1);
1861 code
= (code
== LE
? LT
: GE
);
1865 tmp
= gen_reg_rtx (SImode
);
1866 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op1
, op0
));
1868 code
= (code
== LE
? EQ
: NE
);
1874 if (op1
== const1_rtx
)
1881 tmp
= gen_reg_rtx (SImode
);
1882 gcc_assert (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
));
1889 tmp
= gen_reg_rtx (SImode
);
1890 if (mep_expand_setcc_1 (LEU
, tmp
, op0
, op1
))
1892 else if (mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
))
1901 tmp
= gen_reg_rtx (SImode
);
1902 gcc_assert (mep_expand_setcc_1 (GTU
, tmp
, op0
, op1
)
1903 || mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
));
1910 tmp
= gen_reg_rtx (SImode
);
1911 if (mep_expand_setcc_1 (GEU
, tmp
, op0
, op1
))
1913 else if (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
))
1925 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
1929 mep_emit_cbranch (rtx
*operands
, int ne
)
1931 if (GET_CODE (operands
[1]) == REG
)
1932 return ne
? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1933 else if (INTVAL (operands
[1]) == 0 && !mep_vliw_function_p(cfun
->decl
))
1934 return ne
? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1936 return ne
? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1940 mep_expand_call (rtx
*operands
, int returns_value
)
1942 rtx addr
= operands
[returns_value
];
1943 rtx tp
= mep_tp_rtx ();
1944 rtx gp
= mep_gp_rtx ();
1946 gcc_assert (GET_CODE (addr
) == MEM
);
1948 addr
= XEXP (addr
, 0);
1950 if (! mep_call_address_operand (addr
, VOIDmode
))
1951 addr
= force_reg (SImode
, addr
);
1953 if (! operands
[returns_value
+2])
1954 operands
[returns_value
+2] = const0_rtx
;
1957 emit_call_insn (gen_call_value_internal (operands
[0], addr
, operands
[2],
1958 operands
[3], tp
, gp
));
1960 emit_call_insn (gen_call_internal (addr
, operands
[1],
1961 operands
[2], tp
, gp
));
1964 /* Aliasing Support. */
1966 /* If X is a machine specific address (i.e. a symbol or label being
1967 referenced as a displacement from the GOT implemented using an
1968 UNSPEC), then return the base term. Otherwise return X. */
1971 mep_find_base_term (rtx x
)
1976 if (GET_CODE (x
) != PLUS
)
1981 if (has_hard_reg_initial_val(Pmode
, TP_REGNO
)
1982 && base
== mep_tp_rtx ())
1984 else if (has_hard_reg_initial_val(Pmode
, GP_REGNO
)
1985 && base
== mep_gp_rtx ())
1990 if (GET_CODE (term
) != CONST
)
1992 term
= XEXP (term
, 0);
1994 if (GET_CODE (term
) != UNSPEC
1995 || XINT (term
, 1) != unspec
)
1998 return XVECEXP (term
, 0, 0);
2001 /* Reload Support. */
2003 /* Return true if the registers in CLASS cannot represent the change from
2004 modes FROM to TO. */
2007 mep_cannot_change_mode_class (enum machine_mode from
, enum machine_mode to
,
2008 enum reg_class regclass
)
2013 /* 64-bit COP regs must remain 64-bit COP regs. */
2014 if (TARGET_64BIT_CR_REGS
2015 && (regclass
== CR_REGS
2016 || regclass
== LOADABLE_CR_REGS
)
2017 && (GET_MODE_SIZE (to
) < 8
2018 || GET_MODE_SIZE (from
) < 8))
2024 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2027 mep_general_reg (rtx x
)
2029 while (GET_CODE (x
) == SUBREG
)
2031 return GET_CODE (x
) == REG
&& GR_REGNO_P (REGNO (x
));
2035 mep_nongeneral_reg (rtx x
)
2037 while (GET_CODE (x
) == SUBREG
)
2039 return (GET_CODE (x
) == REG
2040 && !GR_REGNO_P (REGNO (x
)) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2044 mep_general_copro_reg (rtx x
)
2046 while (GET_CODE (x
) == SUBREG
)
2048 return (GET_CODE (x
) == REG
&& CR_REGNO_P (REGNO (x
)));
2052 mep_nonregister (rtx x
)
2054 while (GET_CODE (x
) == SUBREG
)
2056 return (GET_CODE (x
) != REG
|| REGNO (x
) >= FIRST_PSEUDO_REGISTER
);
2059 #define DEBUG_RELOAD 0
2061 /* Return the secondary reload class needed for moving value X to or
2062 from a register in coprocessor register class CLASS. */
2064 static enum reg_class
2065 mep_secondary_copro_reload_class (enum reg_class rclass
, rtx x
)
2067 if (mep_general_reg (x
))
2068 /* We can do the move directly if mep_have_core_copro_moves_p,
2069 otherwise we need to go through memory. Either way, no secondary
2070 register is needed. */
2073 if (mep_general_copro_reg (x
))
2075 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2076 if (mep_have_copro_copro_moves_p
)
2079 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2080 if (mep_have_core_copro_moves_p
)
2081 return GENERAL_REGS
;
2083 /* Otherwise we need to do it through memory. No secondary
2084 register is needed. */
2088 if (reg_class_subset_p (rclass
, LOADABLE_CR_REGS
)
2089 && constraint_satisfied_p (x
, CONSTRAINT_U
))
2090 /* X is a memory value that we can access directly. */
2093 /* We have to move X into a GPR first and then copy it to
2094 the coprocessor register. The move from the GPR to the
2095 coprocessor might be done directly or through memory,
2096 depending on mep_have_core_copro_moves_p. */
2097 return GENERAL_REGS
;
2100 /* Copying X to register in RCLASS. */
2103 mep_secondary_input_reload_class (enum reg_class rclass
,
2104 enum machine_mode mode ATTRIBUTE_UNUSED
,
2110 fprintf (stderr
, "secondary input reload copy to %s %s from ", reg_class_names
[rclass
], mode_name
[mode
]);
2114 if (reg_class_subset_p (rclass
, CR_REGS
))
2115 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2116 else if (MEP_NONGENERAL_CLASS (rclass
)
2117 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2121 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2123 return (enum reg_class
) rv
;
2126 /* Copying register in RCLASS to X. */
2129 mep_secondary_output_reload_class (enum reg_class rclass
,
2130 enum machine_mode mode ATTRIBUTE_UNUSED
,
2136 fprintf (stderr
, "secondary output reload copy from %s %s to ", reg_class_names
[rclass
], mode_name
[mode
]);
2140 if (reg_class_subset_p (rclass
, CR_REGS
))
2141 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2142 else if (MEP_NONGENERAL_CLASS (rclass
)
2143 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2147 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2150 return (enum reg_class
) rv
;
2153 /* Implement SECONDARY_MEMORY_NEEDED. */
2156 mep_secondary_memory_needed (enum reg_class rclass1
, enum reg_class rclass2
,
2157 enum machine_mode mode ATTRIBUTE_UNUSED
)
2159 if (!mep_have_core_copro_moves_p
)
2161 if (reg_classes_intersect_p (rclass1
, CR_REGS
)
2162 && reg_classes_intersect_p (rclass2
, GENERAL_REGS
))
2164 if (reg_classes_intersect_p (rclass2
, CR_REGS
)
2165 && reg_classes_intersect_p (rclass1
, GENERAL_REGS
))
2167 if (!mep_have_copro_copro_moves_p
2168 && reg_classes_intersect_p (rclass1
, CR_REGS
)
2169 && reg_classes_intersect_p (rclass2
, CR_REGS
))
2176 mep_expand_reload (rtx
*operands
, enum machine_mode mode
)
2178 /* There are three cases for each direction:
2183 int s0
= mep_section_tag (operands
[0]) == 'f';
2184 int s1
= mep_section_tag (operands
[1]) == 'f';
2185 int c0
= mep_nongeneral_reg (operands
[0]);
2186 int c1
= mep_nongeneral_reg (operands
[1]);
2187 int which
= (s0
? 20:0) + (c0
? 10:0) + (s1
? 2:0) + (c1
? 1:0);
2190 fprintf (stderr
, "expand_reload %s\n", mode_name
[mode
]);
2191 debug_rtx (operands
[0]);
2192 debug_rtx (operands
[1]);
2197 case 00: /* Don't know why this gets here. */
2198 case 02: /* general = far */
2199 emit_move_insn (operands
[0], operands
[1]);
2202 case 10: /* cr = mem */
2203 case 11: /* cr = cr */
2204 case 01: /* mem = cr */
2205 case 12: /* cr = far */
2206 emit_move_insn (operands
[2], operands
[1]);
2207 emit_move_insn (operands
[0], operands
[2]);
2210 case 20: /* far = general */
2211 emit_move_insn (operands
[2], XEXP (operands
[1], 0));
2212 emit_move_insn (operands
[0], gen_rtx_MEM (mode
, operands
[2]));
2215 case 21: /* far = cr */
2216 case 22: /* far = far */
2218 fprintf (stderr
, "unsupported expand reload case %02d for mode %s\n",
2219 which
, mode_name
[mode
]);
2220 debug_rtx (operands
[0]);
2221 debug_rtx (operands
[1]);
2226 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2227 can be moved directly into registers 0 to 7, but not into the rest.
2228 If so, and if the required class includes registers 0 to 7, restrict
2229 it to those registers. */
2232 mep_preferred_reload_class (rtx x
, enum reg_class rclass
)
2234 switch (GET_CODE (x
))
2237 if (INTVAL (x
) >= 0x10000
2238 && INTVAL (x
) < 0x01000000
2239 && (INTVAL (x
) & 0xffff) != 0
2240 && reg_class_subset_p (TPREL_REGS
, rclass
))
2241 rclass
= TPREL_REGS
;
2247 if (mep_section_tag (x
) != 'f'
2248 && reg_class_subset_p (TPREL_REGS
, rclass
))
2249 rclass
= TPREL_REGS
;
2258 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2259 moves, 4 for direct double-register moves, and 1000 for anything
2260 that requires a temporary register or temporary stack slot. */
2263 mep_register_move_cost (enum machine_mode mode
, enum reg_class from
, enum reg_class to
)
2265 if (mep_have_copro_copro_moves_p
2266 && reg_class_subset_p (from
, CR_REGS
)
2267 && reg_class_subset_p (to
, CR_REGS
))
2269 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2273 if (reg_class_subset_p (from
, CR_REGS
)
2274 && reg_class_subset_p (to
, CR_REGS
))
2276 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2280 if (reg_class_subset_p (from
, CR_REGS
)
2281 || reg_class_subset_p (to
, CR_REGS
))
2283 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2287 if (mep_secondary_memory_needed (from
, to
, mode
))
2289 if (MEP_NONGENERAL_CLASS (from
) && MEP_NONGENERAL_CLASS (to
))
2292 if (GET_MODE_SIZE (mode
) > 4)
2299 /* Functions to save and restore machine-specific function data. */
2301 static struct machine_function
*
2302 mep_init_machine_status (void)
2304 return ggc_alloc_cleared_machine_function ();
2308 mep_allocate_initial_value (rtx reg
)
2312 if (GET_CODE (reg
) != REG
)
2315 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2318 /* In interrupt functions, the "initial" values of $gp and $tp are
2319 provided by the prologue. They are not necessarily the same as
2320 the values that the caller was using. */
2321 if (REGNO (reg
) == TP_REGNO
|| REGNO (reg
) == GP_REGNO
)
2322 if (mep_interrupt_p ())
2325 if (! cfun
->machine
->reg_save_slot
[REGNO(reg
)])
2327 cfun
->machine
->reg_save_size
+= 4;
2328 cfun
->machine
->reg_save_slot
[REGNO(reg
)] = cfun
->machine
->reg_save_size
;
2331 rss
= cfun
->machine
->reg_save_slot
[REGNO(reg
)];
2332 return gen_rtx_MEM (SImode
, plus_constant (arg_pointer_rtx
, -rss
));
2336 mep_return_addr_rtx (int count
)
2341 return get_hard_reg_initial_val (Pmode
, LP_REGNO
);
2347 return get_hard_reg_initial_val (Pmode
, TP_REGNO
);
2353 return get_hard_reg_initial_val (Pmode
, GP_REGNO
);
2357 mep_interrupt_p (void)
2359 if (cfun
->machine
->interrupt_handler
== 0)
2361 int interrupt_handler
2362 = (lookup_attribute ("interrupt",
2363 DECL_ATTRIBUTES (current_function_decl
))
2365 cfun
->machine
->interrupt_handler
= interrupt_handler
? 2 : 1;
2367 return cfun
->machine
->interrupt_handler
== 2;
2371 mep_disinterrupt_p (void)
2373 if (cfun
->machine
->disable_interrupts
== 0)
2375 int disable_interrupts
2376 = (lookup_attribute ("disinterrupt",
2377 DECL_ATTRIBUTES (current_function_decl
))
2379 cfun
->machine
->disable_interrupts
= disable_interrupts
? 2 : 1;
2381 return cfun
->machine
->disable_interrupts
== 2;
2385 /* Frame/Epilog/Prolog Related. */
2388 mep_reg_set_p (rtx reg
, rtx insn
)
2390 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2393 if (FIND_REG_INC_NOTE (insn
, reg
))
2395 insn
= PATTERN (insn
);
2398 if (GET_CODE (insn
) == SET
2399 && GET_CODE (XEXP (insn
, 0)) == REG
2400 && GET_CODE (XEXP (insn
, 1)) == REG
2401 && REGNO (XEXP (insn
, 0)) == REGNO (XEXP (insn
, 1)))
2404 return set_of (reg
, insn
) != NULL_RTX
;
2408 #define MEP_SAVES_UNKNOWN 0
2409 #define MEP_SAVES_YES 1
2410 #define MEP_SAVES_MAYBE 2
2411 #define MEP_SAVES_NO 3
2414 mep_reg_set_in_function (int regno
)
2418 if (mep_interrupt_p () && df_regs_ever_live_p(regno
))
2421 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2424 push_topmost_sequence ();
2425 insn
= get_insns ();
2426 pop_topmost_sequence ();
2431 reg
= gen_rtx_REG (SImode
, regno
);
2433 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
2434 if (INSN_P (insn
) && mep_reg_set_p (reg
, insn
))
2440 mep_asm_without_operands_p (void)
2442 if (cfun
->machine
->asms_without_operands
== 0)
2446 push_topmost_sequence ();
2447 insn
= get_insns ();
2448 pop_topmost_sequence ();
2450 cfun
->machine
->asms_without_operands
= 1;
2454 && GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
2456 cfun
->machine
->asms_without_operands
= 2;
2459 insn
= NEXT_INSN (insn
);
2463 return cfun
->machine
->asms_without_operands
== 2;
2466 /* Interrupt functions save/restore every call-preserved register, and
2467 any call-used register it uses (or all if it calls any function,
2468 since they may get clobbered there too). Here we check to see
2469 which call-used registers need saving. */
2471 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2472 && (r == FIRST_CCR_REGNO + 1 \
2473 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2474 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2477 mep_interrupt_saved_reg (int r
)
2479 if (!mep_interrupt_p ())
2481 if (r
== REGSAVE_CONTROL_TEMP
2482 || (TARGET_64BIT_CR_REGS
&& TARGET_COP
&& r
== REGSAVE_CONTROL_TEMP
+1))
2484 if (mep_asm_without_operands_p ()
2486 || (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
|| r
== LP_REGNO
)
2487 || IVC2_ISAVED_REG (r
)))
2489 if (!current_function_is_leaf
)
2490 /* Function calls mean we need to save $lp. */
2491 if (r
== LP_REGNO
|| IVC2_ISAVED_REG (r
))
2493 if (!current_function_is_leaf
|| cfun
->machine
->doloop_tags
> 0)
2494 /* The interrupt handler might use these registers for repeat blocks,
2495 or it might call a function that does so. */
2496 if (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
)
2498 if (current_function_is_leaf
&& call_used_regs
[r
] && !df_regs_ever_live_p(r
))
2500 /* Functions we call might clobber these. */
2501 if (call_used_regs
[r
] && !fixed_regs
[r
])
2503 /* Additional registers that need to be saved for IVC2. */
2504 if (IVC2_ISAVED_REG (r
))
2511 mep_call_saves_register (int r
)
2513 if (! cfun
->machine
->frame_locked
)
2515 int rv
= MEP_SAVES_NO
;
2517 if (cfun
->machine
->reg_save_slot
[r
])
2519 else if (r
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2521 else if (r
== FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
2523 else if ((!call_used_regs
[r
] || r
== LP_REGNO
) && df_regs_ever_live_p(r
))
2525 else if (crtl
->calls_eh_return
&& (r
== 10 || r
== 11))
2526 /* We need these to have stack slots so that they can be set during
2529 else if (mep_interrupt_saved_reg (r
))
2531 cfun
->machine
->reg_saved
[r
] = rv
;
2533 return cfun
->machine
->reg_saved
[r
] == MEP_SAVES_YES
;
2536 /* Return true if epilogue uses register REGNO. */
2539 mep_epilogue_uses (int regno
)
2541 /* Since $lp is a call-saved register, the generic code will normally
2542 mark it used in the epilogue if it needs to be saved and restored.
2543 However, when profiling is enabled, the profiling code will implicitly
2544 clobber $11. This case has to be handled specially both here and in
2545 mep_call_saves_register. */
2546 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2548 /* Interrupt functions save/restore pretty much everything. */
2549 return (reload_completed
&& mep_interrupt_saved_reg (regno
));
2553 mep_reg_size (int regno
)
2555 if (CR_REGNO_P (regno
) && TARGET_64BIT_CR_REGS
)
2560 /* Worker function for TARGET_CAN_ELIMINATE. */
2563 mep_can_eliminate (const int from
, const int to
)
2565 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
2566 ? ! frame_pointer_needed
2571 mep_elimination_offset (int from
, int to
)
2575 int frame_size
= get_frame_size () + crtl
->outgoing_args_size
;
2578 if (!cfun
->machine
->frame_locked
)
2579 memset (cfun
->machine
->reg_saved
, 0, sizeof (cfun
->machine
->reg_saved
));
2581 /* We don't count arg_regs_to_save in the arg pointer offset, because
2582 gcc thinks the arg pointer has moved along with the saved regs.
2583 However, we do count it when we adjust $sp in the prologue. */
2585 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2586 if (mep_call_saves_register (i
))
2587 reg_save_size
+= mep_reg_size (i
);
2589 if (reg_save_size
% 8)
2590 cfun
->machine
->regsave_filler
= 8 - (reg_save_size
% 8);
2592 cfun
->machine
->regsave_filler
= 0;
2594 /* This is what our total stack adjustment looks like. */
2595 total_size
= (reg_save_size
+ frame_size
+ cfun
->machine
->regsave_filler
);
2598 cfun
->machine
->frame_filler
= 8 - (total_size
% 8);
2600 cfun
->machine
->frame_filler
= 0;
2603 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
2604 return reg_save_size
+ cfun
->machine
->regsave_filler
;
2606 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2607 return cfun
->machine
->frame_filler
+ frame_size
;
2609 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2610 return reg_save_size
+ cfun
->machine
->regsave_filler
+ cfun
->machine
->frame_filler
+ frame_size
;
2618 RTX_FRAME_RELATED_P (x
) = 1;
2622 /* Since the prologue/epilogue code is generated after optimization,
2623 we can't rely on gcc to split constants for us. So, this code
2624 captures all the ways to add a constant to a register in one logic
2625 chunk, including optimizing away insns we just don't need. This
2626 makes the prolog/epilog code easier to follow. */
2628 add_constant (int dest
, int src
, int value
, int mark_frame
)
2633 if (src
== dest
&& value
== 0)
2638 insn
= emit_move_insn (gen_rtx_REG (SImode
, dest
),
2639 gen_rtx_REG (SImode
, src
));
2641 RTX_FRAME_RELATED_P(insn
) = 1;
2645 if (value
>= -32768 && value
<= 32767)
2647 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2648 gen_rtx_REG (SImode
, src
),
2651 RTX_FRAME_RELATED_P(insn
) = 1;
2655 /* Big constant, need to use a temp register. We use
2656 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2657 area is always small enough to directly add to). */
2659 hi
= trunc_int_for_mode (value
& 0xffff0000, SImode
);
2660 lo
= value
& 0xffff;
2662 insn
= emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2667 insn
= emit_insn (gen_iorsi3 (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2668 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2672 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2673 gen_rtx_REG (SImode
, src
),
2674 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
)));
2677 RTX_FRAME_RELATED_P(insn
) = 1;
2678 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2679 gen_rtx_SET (SImode
,
2680 gen_rtx_REG (SImode
, dest
),
2681 gen_rtx_PLUS (SImode
,
2682 gen_rtx_REG (SImode
, dest
),
2687 /* Move SRC to DEST. Mark the move as being potentially dead if
2691 maybe_dead_move (rtx dest
, rtx src
, bool ATTRIBUTE_UNUSED maybe_dead_p
)
2693 rtx insn
= emit_move_insn (dest
, src
);
2696 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
2701 /* Used for interrupt functions, which can't assume that $tp and $gp
2702 contain the correct pointers. */
2705 mep_reload_pointer (int regno
, const char *symbol
)
2709 if (!df_regs_ever_live_p(regno
) && current_function_is_leaf
)
2712 reg
= gen_rtx_REG (SImode
, regno
);
2713 sym
= gen_rtx_SYMBOL_REF (SImode
, symbol
);
2714 emit_insn (gen_movsi_topsym_s (reg
, sym
));
2715 emit_insn (gen_movsi_botsym_s (reg
, reg
, sym
));
2718 /* Assign save slots for any register not already saved. DImode
2719 registers go at the end of the reg save area; the rest go at the
2720 beginning. This is for alignment purposes. Returns true if a frame
2721 is really needed. */
2723 mep_assign_save_slots (int reg_save_size
)
2725 bool really_need_stack_frame
= false;
2729 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2730 if (mep_call_saves_register(i
))
2732 int regsize
= mep_reg_size (i
);
2734 if ((i
!= TP_REGNO
&& i
!= GP_REGNO
&& i
!= LP_REGNO
)
2735 || mep_reg_set_in_function (i
))
2736 really_need_stack_frame
= true;
2738 if (cfun
->machine
->reg_save_slot
[i
])
2743 cfun
->machine
->reg_save_size
+= regsize
;
2744 cfun
->machine
->reg_save_slot
[i
] = cfun
->machine
->reg_save_size
;
2748 cfun
->machine
->reg_save_slot
[i
] = reg_save_size
- di_ofs
;
2752 cfun
->machine
->frame_locked
= 1;
2753 return really_need_stack_frame
;
2757 mep_expand_prologue (void)
2759 int i
, rss
, sp_offset
= 0;
2762 int really_need_stack_frame
;
2764 /* We must not allow register renaming in interrupt functions,
2765 because that invalidates the correctness of the set of call-used
2766 registers we're going to save/restore. */
2767 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2769 if (mep_disinterrupt_p ())
2770 emit_insn (gen_mep_disable_int ());
2772 cfun
->machine
->mep_frame_pointer_needed
= frame_pointer_needed
;
2774 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2775 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2776 really_need_stack_frame
= frame_size
;
2778 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2780 sp_offset
= reg_save_size
;
2781 if (sp_offset
+ frame_size
< 128)
2782 sp_offset
+= frame_size
;
2784 add_constant (SP_REGNO
, SP_REGNO
, -sp_offset
, 1);
2786 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2787 if (mep_call_saves_register(i
))
2791 enum machine_mode rmode
;
2793 rss
= cfun
->machine
->reg_save_slot
[i
];
2795 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2796 && (!mep_reg_set_in_function (i
)
2797 && !mep_interrupt_p ()))
2800 if (mep_reg_size (i
) == 8)
2805 /* If there is a pseudo associated with this register's initial value,
2806 reload might have already spilt it to the stack slot suggested by
2807 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2809 mem
= gen_rtx_MEM (rmode
,
2810 plus_constant (stack_pointer_rtx
, sp_offset
- rss
));
2811 maybe_dead_p
= rtx_equal_p (mem
, has_hard_reg_initial_val (rmode
, i
));
2813 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2814 F(maybe_dead_move (mem
, gen_rtx_REG (rmode
, i
), maybe_dead_p
));
2815 else if (rmode
== DImode
)
2818 int be
= TARGET_BIG_ENDIAN
? 4 : 0;
2820 mem
= gen_rtx_MEM (SImode
,
2821 plus_constant (stack_pointer_rtx
, sp_offset
- rss
+ be
));
2823 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2824 gen_rtx_REG (SImode
, i
),
2826 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2827 gen_rtx_ZERO_EXTRACT (SImode
,
2828 gen_rtx_REG (DImode
, i
),
2832 insn
= maybe_dead_move (mem
,
2833 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2835 RTX_FRAME_RELATED_P (insn
) = 1;
2837 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2838 gen_rtx_SET (VOIDmode
,
2840 gen_rtx_REG (rmode
, i
)));
2841 mem
= gen_rtx_MEM (SImode
,
2842 plus_constant (stack_pointer_rtx
, sp_offset
- rss
+ (4-be
)));
2843 insn
= maybe_dead_move (mem
,
2844 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2850 maybe_dead_move (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2851 gen_rtx_REG (rmode
, i
),
2853 insn
= maybe_dead_move (mem
,
2854 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2856 RTX_FRAME_RELATED_P (insn
) = 1;
2858 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2859 gen_rtx_SET (VOIDmode
,
2861 gen_rtx_REG (rmode
, i
)));
2865 if (frame_pointer_needed
)
2867 /* We've already adjusted down by sp_offset. Total $sp change
2868 is reg_save_size + frame_size. We want a net change here of
2869 just reg_save_size. */
2870 add_constant (FP_REGNO
, SP_REGNO
, sp_offset
- reg_save_size
, 1);
2873 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
-(reg_save_size
+frame_size
), 1);
2875 if (mep_interrupt_p ())
2877 mep_reload_pointer(GP_REGNO
, "__sdabase");
2878 mep_reload_pointer(TP_REGNO
, "__tpbase");
2883 mep_start_function (FILE *file
, HOST_WIDE_INT hwi_local
)
2885 int local
= hwi_local
;
2886 int frame_size
= local
+ crtl
->outgoing_args_size
;
2891 int slot_map
[FIRST_PSEUDO_REGISTER
], si
, sj
;
2893 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2894 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2895 sp_offset
= reg_save_size
+ frame_size
;
2897 ffill
= cfun
->machine
->frame_filler
;
2899 if (cfun
->machine
->mep_frame_pointer_needed
)
2900 reg_names
[FP_REGNO
] = "$fp";
2902 reg_names
[FP_REGNO
] = "$8";
2907 if (debug_info_level
== DINFO_LEVEL_NONE
)
2909 fprintf (file
, "\t# frame: %d", sp_offset
);
2911 fprintf (file
, " %d regs", reg_save_size
);
2913 fprintf (file
, " %d locals", local
);
2914 if (crtl
->outgoing_args_size
)
2915 fprintf (file
, " %d args", crtl
->outgoing_args_size
);
2916 fprintf (file
, "\n");
2920 fprintf (file
, "\t#\n");
2921 fprintf (file
, "\t# Initial Frame Information:\n");
2922 if (sp_offset
|| !frame_pointer_needed
)
2923 fprintf (file
, "\t# Entry ---------- 0\n");
2925 /* Sort registers by save slots, so they're printed in the order
2926 they appear in memory, not the order they're saved in. */
2927 for (si
=0; si
<FIRST_PSEUDO_REGISTER
; si
++)
2929 for (si
=0; si
<FIRST_PSEUDO_REGISTER
-1; si
++)
2930 for (sj
=si
+1; sj
<FIRST_PSEUDO_REGISTER
; sj
++)
2931 if (cfun
->machine
->reg_save_slot
[slot_map
[si
]]
2932 > cfun
->machine
->reg_save_slot
[slot_map
[sj
]])
2934 int t
= slot_map
[si
];
2935 slot_map
[si
] = slot_map
[sj
];
2940 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2943 int r
= slot_map
[i
];
2944 int rss
= cfun
->machine
->reg_save_slot
[r
];
2946 if (!mep_call_saves_register (r
))
2949 if ((r
== TP_REGNO
|| r
== GP_REGNO
|| r
== LP_REGNO
)
2950 && (!mep_reg_set_in_function (r
)
2951 && !mep_interrupt_p ()))
2954 rsize
= mep_reg_size(r
);
2955 skip
= rss
- (sp
+rsize
);
2957 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2958 fprintf (file
, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2959 rsize
, reg_names
[r
], sp_offset
- rss
);
2963 skip
= reg_save_size
- sp
;
2965 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2967 if (frame_pointer_needed
)
2968 fprintf (file
, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size
, sp_offset
-reg_save_size
);
2970 fprintf (file
, "\t# %3d bytes for local vars\n", local
);
2972 fprintf (file
, "\t# %3d bytes for alignment\n", ffill
);
2973 if (crtl
->outgoing_args_size
)
2974 fprintf (file
, "\t# %3d bytes for outgoing args\n",
2975 crtl
->outgoing_args_size
);
2976 fprintf (file
, "\t# SP ---> ---------- %d\n", sp_offset
);
2977 fprintf (file
, "\t#\n");
2981 static int mep_prevent_lp_restore
= 0;
2982 static int mep_sibcall_epilogue
= 0;
2985 mep_expand_epilogue (void)
2987 int i
, sp_offset
= 0;
2988 int reg_save_size
= 0;
2990 int lp_temp
= LP_REGNO
, lp_slot
= -1;
2991 int really_need_stack_frame
= get_frame_size() + crtl
->outgoing_args_size
;
2992 int interrupt_handler
= mep_interrupt_p ();
2994 if (profile_arc_flag
== 2)
2995 emit_insn (gen_mep_bb_trace_ret ());
2997 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2998 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
3000 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
3002 if (frame_pointer_needed
)
3004 /* If we have a frame pointer, we won't have a reliable stack
3005 pointer (alloca, you know), so rebase SP from FP */
3006 emit_move_insn (gen_rtx_REG (SImode
, SP_REGNO
),
3007 gen_rtx_REG (SImode
, FP_REGNO
));
3008 sp_offset
= reg_save_size
;
3012 /* SP is right under our local variable space. Adjust it if
3014 sp_offset
= reg_save_size
+ frame_size
;
3015 if (sp_offset
>= 128)
3017 add_constant (SP_REGNO
, SP_REGNO
, frame_size
, 0);
3018 sp_offset
-= frame_size
;
3022 /* This is backwards so that we restore the control and coprocessor
3023 registers before the temporary registers we use to restore
3025 for (i
=FIRST_PSEUDO_REGISTER
-1; i
>=1; i
--)
3026 if (mep_call_saves_register (i
))
3028 enum machine_mode rmode
;
3029 int rss
= cfun
->machine
->reg_save_slot
[i
];
3031 if (mep_reg_size (i
) == 8)
3036 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
3037 && !(mep_reg_set_in_function (i
) || interrupt_handler
))
3039 if (mep_prevent_lp_restore
&& i
== LP_REGNO
)
3041 if (!mep_prevent_lp_restore
3042 && !interrupt_handler
3043 && (i
== 10 || i
== 11))
3046 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
3047 emit_move_insn (gen_rtx_REG (rmode
, i
),
3049 plus_constant (stack_pointer_rtx
,
3053 if (i
== LP_REGNO
&& !mep_sibcall_epilogue
&& !interrupt_handler
)
3054 /* Defer this one so we can jump indirect rather than
3055 copying the RA to $lp and "ret". EH epilogues
3056 automatically skip this anyway. */
3057 lp_slot
= sp_offset
-rss
;
3060 emit_move_insn (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
3062 plus_constant (stack_pointer_rtx
,
3064 emit_move_insn (gen_rtx_REG (rmode
, i
),
3065 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
));
3071 /* Restore this one last so we know it will be in the temp
3072 register when we return by jumping indirectly via the temp. */
3073 emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
3074 gen_rtx_MEM (SImode
,
3075 plus_constant (stack_pointer_rtx
,
3077 lp_temp
= REGSAVE_CONTROL_TEMP
;
3081 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
, 0);
3083 if (crtl
->calls_eh_return
&& mep_prevent_lp_restore
)
3084 emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, SP_REGNO
),
3085 gen_rtx_REG (SImode
, SP_REGNO
),
3086 cfun
->machine
->eh_stack_adjust
));
3088 if (mep_sibcall_epilogue
)
3091 if (mep_disinterrupt_p ())
3092 emit_insn (gen_mep_enable_int ());
3094 if (mep_prevent_lp_restore
)
3096 emit_jump_insn (gen_eh_return_internal ());
3099 else if (interrupt_handler
)
3100 emit_jump_insn (gen_mep_reti ());
3102 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode
, lp_temp
)));
3106 mep_expand_eh_return (rtx
*operands
)
3108 if (GET_CODE (operands
[0]) != REG
|| REGNO (operands
[0]) != LP_REGNO
)
3110 rtx ra
= gen_rtx_REG (Pmode
, LP_REGNO
);
3111 emit_move_insn (ra
, operands
[0]);
3115 emit_insn (gen_eh_epilogue (operands
[0]));
3119 mep_emit_eh_epilogue (rtx
*operands ATTRIBUTE_UNUSED
)
3121 cfun
->machine
->eh_stack_adjust
= gen_rtx_REG (Pmode
, 0);
3122 mep_prevent_lp_restore
= 1;
3123 mep_expand_epilogue ();
3124 mep_prevent_lp_restore
= 0;
3128 mep_expand_sibcall_epilogue (void)
3130 mep_sibcall_epilogue
= 1;
3131 mep_expand_epilogue ();
3132 mep_sibcall_epilogue
= 0;
3136 mep_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
3141 if (mep_section_tag (DECL_RTL (decl
)) == 'f')
3144 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3145 if (mep_interrupt_p () || mep_disinterrupt_p ())
3152 mep_return_stackadj_rtx (void)
3154 return gen_rtx_REG (SImode
, 10);
3158 mep_return_handler_rtx (void)
3160 return gen_rtx_REG (SImode
, LP_REGNO
);
3164 mep_function_profiler (FILE *file
)
3166 /* Always right at the beginning of the function. */
3167 fprintf (file
, "\t# mep function profiler\n");
3168 fprintf (file
, "\tadd\t$sp, -8\n");
3169 fprintf (file
, "\tsw\t$0, ($sp)\n");
3170 fprintf (file
, "\tldc\t$0, $lp\n");
3171 fprintf (file
, "\tsw\t$0, 4($sp)\n");
3172 fprintf (file
, "\tbsr\t__mep_mcount\n");
3173 fprintf (file
, "\tlw\t$0, 4($sp)\n");
3174 fprintf (file
, "\tstc\t$0, $lp\n");
3175 fprintf (file
, "\tlw\t$0, ($sp)\n");
3176 fprintf (file
, "\tadd\t$sp, 8\n\n");
3180 mep_emit_bb_trace_ret (void)
3182 fprintf (asm_out_file
, "\t# end of block profiling\n");
3183 fprintf (asm_out_file
, "\tadd\t$sp, -8\n");
3184 fprintf (asm_out_file
, "\tsw\t$0, ($sp)\n");
3185 fprintf (asm_out_file
, "\tldc\t$0, $lp\n");
3186 fprintf (asm_out_file
, "\tsw\t$0, 4($sp)\n");
3187 fprintf (asm_out_file
, "\tbsr\t__bb_trace_ret\n");
3188 fprintf (asm_out_file
, "\tlw\t$0, 4($sp)\n");
3189 fprintf (asm_out_file
, "\tstc\t$0, $lp\n");
3190 fprintf (asm_out_file
, "\tlw\t$0, ($sp)\n");
3191 fprintf (asm_out_file
, "\tadd\t$sp, 8\n\n");
3198 /* Operand Printing. */
3201 mep_print_operand_address (FILE *stream
, rtx address
)
3203 if (GET_CODE (address
) == MEM
)
3204 address
= XEXP (address
, 0);
3206 /* cf: gcc.dg/asm-4.c. */
3207 gcc_assert (GET_CODE (address
) == REG
);
3209 mep_print_operand (stream
, address
, 0);
3215 const char *pattern
;
3218 const conversions
[] =
3221 { 0, "m+ri", "3(2)" },
3225 { 0, "mLrs", "%lo(3)(2)" },
3226 { 0, "mLr+si", "%lo(4+5)(2)" },
3227 { 0, "m+ru2s", "%tpoff(5)(2)" },
3228 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3229 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3230 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3231 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3232 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3234 { 0, "m+si", "(2+3)" },
3235 { 0, "m+li", "(2+3)" },
3238 { 0, "+si", "1+2" },
3239 { 0, "+u2si", "%tpoff(3+4)" },
3240 { 0, "+u3si", "%sdaoff(3+4)" },
3246 { 'h', "Hs", "%hi(1)" },
3248 { 'I', "u2s", "%tpoff(2)" },
3249 { 'I', "u3s", "%sdaoff(2)" },
3250 { 'I', "+u2si", "%tpoff(3+4)" },
3251 { 'I', "+u3si", "%sdaoff(3+4)" },
3253 { 'P', "mr", "(1\\+),\\0" },
3259 unique_bit_in (HOST_WIDE_INT i
)
3263 case 0x01: case 0xfe: return 0;
3264 case 0x02: case 0xfd: return 1;
3265 case 0x04: case 0xfb: return 2;
3266 case 0x08: case 0xf7: return 3;
3267 case 0x10: case 0x7f: return 4;
3268 case 0x20: case 0xbf: return 5;
3269 case 0x40: case 0xdf: return 6;
3270 case 0x80: case 0xef: return 7;
3277 bit_size_for_clip (HOST_WIDE_INT i
)
3281 for (rv
= 0; rv
< 31; rv
++)
3282 if (((HOST_WIDE_INT
) 1 << rv
) > i
)
3287 /* Print an operand to a assembler instruction. */
3290 mep_print_operand (FILE *file
, rtx x
, int code
)
3293 const char *real_name
;
3297 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3298 we're using, then skip over the "mep_" part of its name. */
3299 const struct cgen_insn
*insn
;
3301 if (mep_get_move_insn (mep_cmov
, &insn
))
3302 fputs (cgen_intrinsics
[insn
->intrinsic
] + 4, file
);
3304 mep_intrinsic_unavailable (mep_cmov
);
3309 switch (GET_CODE (x
))
3312 fputs ("clr", file
);
3315 fputs ("set", file
);
3318 fputs ("not", file
);
3321 output_operand_lossage ("invalid %%L code");
3326 /* Print the second operand of a CR <- CR move. If we're using
3327 a two-operand instruction (i.e., a real cmov), then just print
3328 the operand normally. If we're using a "reg, reg, immediate"
3329 instruction such as caddi3, print the operand followed by a
3330 zero field. If we're using a three-register instruction,
3331 print the operand twice. */
3332 const struct cgen_insn
*insn
;
3334 mep_print_operand (file
, x
, 0);
3335 if (mep_get_move_insn (mep_cmov
, &insn
)
3336 && insn_data
[insn
->icode
].n_operands
== 3)
3339 if (insn_data
[insn
->icode
].operand
[2].predicate (x
, VOIDmode
))
3340 mep_print_operand (file
, x
, 0);
3342 mep_print_operand (file
, const0_rtx
, 0);
3348 for (i
= 0; conversions
[i
].pattern
; i
++)
3349 if (conversions
[i
].code
== code
3350 && strcmp(conversions
[i
].pattern
, pattern
) == 0)
3352 for (j
= 0; conversions
[i
].format
[j
]; j
++)
3353 if (conversions
[i
].format
[j
] == '\\')
3355 fputc (conversions
[i
].format
[j
+1], file
);
3358 else if (ISDIGIT(conversions
[i
].format
[j
]))
3360 rtx r
= patternr
[conversions
[i
].format
[j
] - '0'];
3361 switch (GET_CODE (r
))
3364 fprintf (file
, "%s", reg_names
[REGNO (r
)]);
3370 fprintf (file
, "%d", unique_bit_in (INTVAL (r
)));
3373 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)));
3376 fprintf (file
, "0x%x", ((int) INTVAL (r
) >> 16) & 0xffff);
3379 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)) - 1);
3382 fprintf (file
, "0x%x", (int) INTVAL (r
) & 0xffff);
3385 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3386 && !(INTVAL (r
) & 0xff))
3387 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL(r
));
3389 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3392 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3393 && conversions
[i
].format
[j
+1] == 0)
3395 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (r
));
3396 fprintf (file
, " # 0x%x", (int) INTVAL(r
) & 0xffff);
3399 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3402 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3407 fprintf(file
, "[const_double 0x%lx]",
3408 (unsigned long) CONST_DOUBLE_HIGH(r
));
3411 real_name
= targetm
.strip_name_encoding (XSTR (r
, 0));
3412 assemble_name (file
, real_name
);
3415 output_asm_label (r
);
3418 fprintf (stderr
, "don't know how to print this operand:");
3425 if (conversions
[i
].format
[j
] == '+'
3426 && (!code
|| code
== 'I')
3427 && ISDIGIT (conversions
[i
].format
[j
+1])
3428 && GET_CODE (patternr
[conversions
[i
].format
[j
+1] - '0']) == CONST_INT
3429 && INTVAL (patternr
[conversions
[i
].format
[j
+1] - '0']) < 0)
3431 fputc(conversions
[i
].format
[j
], file
);
3435 if (!conversions
[i
].pattern
)
3437 error ("unconvertible operand %c %qs", code
?code
:'-', pattern
);
3445 mep_final_prescan_insn (rtx insn
, rtx
*operands ATTRIBUTE_UNUSED
,
3446 int noperands ATTRIBUTE_UNUSED
)
3448 /* Despite the fact that MeP is perfectly capable of branching and
3449 doing something else in the same bundle, gcc does jump
3450 optimization *after* scheduling, so we cannot trust the bundling
3451 flags on jump instructions. */
3452 if (GET_MODE (insn
) == BImode
3453 && get_attr_slots (insn
) != SLOTS_CORE
)
3454 fputc ('+', asm_out_file
);
3457 /* Function args in registers. */
3460 mep_setup_incoming_varargs (CUMULATIVE_ARGS
*cum
,
3461 enum machine_mode mode ATTRIBUTE_UNUSED
,
3462 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
3463 int second_time ATTRIBUTE_UNUSED
)
3465 int nsave
= 4 - (cum
->nregs
+ 1);
3468 cfun
->machine
->arg_regs_to_save
= nsave
;
3469 *pretend_size
= nsave
* 4;
3473 bytesize (const_tree type
, enum machine_mode mode
)
3475 if (mode
== BLKmode
)
3476 return int_size_in_bytes (type
);
3477 return GET_MODE_SIZE (mode
);
3481 mep_expand_builtin_saveregs (void)
3486 ns
= cfun
->machine
->arg_regs_to_save
;
3489 bufsize
= 8 * ((ns
+ 1) / 2) + 8 * ns
;
3490 regbuf
= assign_stack_local (SImode
, bufsize
, 64);
3495 regbuf
= assign_stack_local (SImode
, bufsize
, 32);
3498 move_block_from_reg (5-ns
, regbuf
, ns
);
3502 rtx tmp
= gen_rtx_MEM (DImode
, XEXP (regbuf
, 0));
3503 int ofs
= 8 * ((ns
+1)/2);
3505 for (i
=0; i
<ns
; i
++)
3507 int rn
= (4-ns
) + i
+ 49;
3510 ptr
= offset_address (tmp
, GEN_INT (ofs
), 2);
3511 emit_move_insn (ptr
, gen_rtx_REG (DImode
, rn
));
3515 return XEXP (regbuf
, 0);
3518 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3521 mep_build_builtin_va_list (void)
3523 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3527 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3529 f_next_gp
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3530 get_identifier ("__va_next_gp"), ptr_type_node
);
3531 f_next_gp_limit
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3532 get_identifier ("__va_next_gp_limit"),
3534 f_next_cop
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_cop"),
3536 f_next_stack
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_stack"),
3539 DECL_FIELD_CONTEXT (f_next_gp
) = record
;
3540 DECL_FIELD_CONTEXT (f_next_gp_limit
) = record
;
3541 DECL_FIELD_CONTEXT (f_next_cop
) = record
;
3542 DECL_FIELD_CONTEXT (f_next_stack
) = record
;
3544 TYPE_FIELDS (record
) = f_next_gp
;
3545 DECL_CHAIN (f_next_gp
) = f_next_gp_limit
;
3546 DECL_CHAIN (f_next_gp_limit
) = f_next_cop
;
3547 DECL_CHAIN (f_next_cop
) = f_next_stack
;
3549 layout_type (record
);
3555 mep_expand_va_start (tree valist
, rtx nextarg
)
3557 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3558 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3562 ns
= cfun
->machine
->arg_regs_to_save
;
3564 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3565 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3566 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3567 f_next_stack
= DECL_CHAIN (f_next_cop
);
3569 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3571 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3572 valist
, f_next_gp_limit
, NULL_TREE
);
3573 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3575 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3576 valist
, f_next_stack
, NULL_TREE
);
3578 /* va_list.next_gp = expand_builtin_saveregs (); */
3579 u
= make_tree (sizetype
, expand_builtin_saveregs ());
3580 u
= fold_convert (ptr_type_node
, u
);
3581 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp
, u
);
3582 TREE_SIDE_EFFECTS (t
) = 1;
3583 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3585 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3586 u
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
, u
,
3588 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp_limit
, u
);
3589 TREE_SIDE_EFFECTS (t
) = 1;
3590 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3592 u
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
, u
,
3593 size_int (8 * ((ns
+1)/2)));
3594 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3595 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_cop
, u
);
3596 TREE_SIDE_EFFECTS (t
) = 1;
3597 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3599 /* va_list.next_stack = nextarg; */
3600 u
= make_tree (ptr_type_node
, nextarg
);
3601 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_stack
, u
);
3602 TREE_SIDE_EFFECTS (t
) = 1;
3603 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3607 mep_gimplify_va_arg_expr (tree valist
, tree type
,
3609 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
3611 HOST_WIDE_INT size
, rsize
;
3612 bool by_reference
, ivc2_vec
;
3613 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3614 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3615 tree label_sover
, label_selse
;
3618 ivc2_vec
= TARGET_IVC2
&& VECTOR_TYPE_P (type
);
3620 size
= int_size_in_bytes (type
);
3621 by_reference
= (size
> (ivc2_vec
? 8 : 4)) || (size
<= 0);
3625 type
= build_pointer_type (type
);
3628 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
3630 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3631 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3632 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3633 f_next_stack
= DECL_CHAIN (f_next_cop
);
3635 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3637 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3638 valist
, f_next_gp_limit
, NULL_TREE
);
3639 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3641 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3642 valist
, f_next_stack
, NULL_TREE
);
3644 /* if f_next_gp < f_next_gp_limit
3645 IF (VECTOR_P && IVC2)
3653 val = *f_next_stack;
3654 f_next_stack += rsize;
3658 label_sover
= create_artificial_label (UNKNOWN_LOCATION
);
3659 label_selse
= create_artificial_label (UNKNOWN_LOCATION
);
3660 res_addr
= create_tmp_var (ptr_type_node
, NULL
);
3662 tmp
= build2 (GE_EXPR
, boolean_type_node
, next_gp
,
3663 unshare_expr (next_gp_limit
));
3664 tmp
= build3 (COND_EXPR
, void_type_node
, tmp
,
3665 build1 (GOTO_EXPR
, void_type_node
,
3666 unshare_expr (label_selse
)),
3668 gimplify_and_add (tmp
, pre_p
);
3672 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_cop
);
3673 gimplify_and_add (tmp
, pre_p
);
3677 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_gp
);
3678 gimplify_and_add (tmp
, pre_p
);
3681 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
3682 unshare_expr (next_gp
), size_int (4));
3683 gimplify_assign (unshare_expr (next_gp
), tmp
, pre_p
);
3685 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
3686 unshare_expr (next_cop
), size_int (8));
3687 gimplify_assign (unshare_expr (next_cop
), tmp
, pre_p
);
3689 tmp
= build1 (GOTO_EXPR
, void_type_node
, unshare_expr (label_sover
));
3690 gimplify_and_add (tmp
, pre_p
);
3694 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_selse
));
3695 gimplify_and_add (tmp
, pre_p
);
3697 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, unshare_expr (next_stack
));
3698 gimplify_and_add (tmp
, pre_p
);
3700 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
3701 unshare_expr (next_stack
), size_int (rsize
));
3702 gimplify_assign (unshare_expr (next_stack
), tmp
, pre_p
);
3706 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_sover
));
3707 gimplify_and_add (tmp
, pre_p
);
3709 res_addr
= fold_convert (build_pointer_type (type
), res_addr
);
3712 res_addr
= build_va_arg_indirect_ref (res_addr
);
3714 return build_va_arg_indirect_ref (res_addr
);
3718 mep_init_cumulative_args (CUMULATIVE_ARGS
*pcum
, tree fntype
,
3719 rtx libname ATTRIBUTE_UNUSED
,
3720 tree fndecl ATTRIBUTE_UNUSED
)
3724 if (fntype
&& lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype
)))
3730 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3731 larger than 4 bytes are passed indirectly. Return value in 0,
3732 unless bigger than 4 bytes, then the caller passes a pointer as the
3733 first arg. For varargs, we copy $1..$4 to the stack. */
3736 mep_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
3737 const_tree type ATTRIBUTE_UNUSED
,
3738 bool named ATTRIBUTE_UNUSED
)
3740 /* VOIDmode is a signal for the backend to pass data to the call
3741 expander via the second operand to the call pattern. We use
3742 this to determine whether to use "jsr" or "jsrv". */
3743 if (mode
== VOIDmode
)
3744 return GEN_INT (cum
->vliw
);
3746 /* If we havn't run out of argument registers, return the next. */
3749 if (type
&& TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3750 return gen_rtx_REG (mode
, cum
->nregs
+ 49);
3752 return gen_rtx_REG (mode
, cum
->nregs
+ 1);
3755 /* Otherwise the argument goes on the stack. */
3760 mep_pass_by_reference (CUMULATIVE_ARGS
* cum ATTRIBUTE_UNUSED
,
3761 enum machine_mode mode
,
3763 bool named ATTRIBUTE_UNUSED
)
3765 int size
= bytesize (type
, mode
);
3767 /* This is non-obvious, but yes, large values passed after we've run
3768 out of registers are *still* passed by reference - we put the
3769 address of the parameter on the stack, as well as putting the
3770 parameter itself elsewhere on the stack. */
3772 if (size
<= 0 || size
> 8)
3776 if (TARGET_IVC2
&& cum
->nregs
< 4 && type
!= NULL_TREE
&& VECTOR_TYPE_P (type
))
3782 mep_function_arg_advance (CUMULATIVE_ARGS
*pcum
,
3783 enum machine_mode mode ATTRIBUTE_UNUSED
,
3784 const_tree type ATTRIBUTE_UNUSED
,
3785 bool named ATTRIBUTE_UNUSED
)
3791 mep_return_in_memory (const_tree type
, const_tree decl ATTRIBUTE_UNUSED
)
3793 int size
= bytesize (type
, BLKmode
);
3794 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3795 return size
> 0 && size
<= 8 ? 0 : 1;
3796 return size
> 0 && size
<= 4 ? 0 : 1;
3800 mep_narrow_volatile_bitfield (void)
3806 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3809 mep_function_value (const_tree type
, const_tree func ATTRIBUTE_UNUSED
)
3811 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3812 return gen_rtx_REG (TYPE_MODE (type
), 48);
3813 return gen_rtx_REG (TYPE_MODE (type
), RETURN_VALUE_REGNUM
);
3816 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3819 mep_libcall_value (enum machine_mode mode
)
3821 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
3824 /* Handle pipeline hazards. */
3826 typedef enum { op_none
, op_stc
, op_fsft
, op_ret
} op_num
;
3827 static const char *opnames
[] = { "", "stc", "fsft", "ret" };
3829 static int prev_opcode
= 0;
3831 /* This isn't as optimal as it could be, because we don't know what
3832 control register the STC opcode is storing in. We only need to add
3833 the nop if it's the relevent register, but we add it for irrelevent
3837 mep_asm_output_opcode (FILE *file
, const char *ptr
)
3839 int this_opcode
= op_none
;
3840 const char *hazard
= 0;
3845 if (strncmp (ptr
, "fsft", 4) == 0 && !ISGRAPH (ptr
[4]))
3846 this_opcode
= op_fsft
;
3849 if (strncmp (ptr
, "ret", 3) == 0 && !ISGRAPH (ptr
[3]))
3850 this_opcode
= op_ret
;
3853 if (strncmp (ptr
, "stc", 3) == 0 && !ISGRAPH (ptr
[3]))
3854 this_opcode
= op_stc
;
3858 if (prev_opcode
== op_stc
&& this_opcode
== op_fsft
)
3860 if (prev_opcode
== op_stc
&& this_opcode
== op_ret
)
3864 fprintf(file
, "%s\t# %s-%s hazard\n\t",
3865 hazard
, opnames
[prev_opcode
], opnames
[this_opcode
]);
3867 prev_opcode
= this_opcode
;
3870 /* Handle attributes. */
3873 mep_validate_based_tiny (tree
*node
, tree name
, tree args
,
3874 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3876 if (TREE_CODE (*node
) != VAR_DECL
3877 && TREE_CODE (*node
) != POINTER_TYPE
3878 && TREE_CODE (*node
) != TYPE_DECL
)
3880 warning (0, "%qE attribute only applies to variables", name
);
3883 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3885 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3887 warning (0, "address region attributes not allowed with auto storage class");
3890 /* Ignore storage attribute of pointed to variable: char __far * x; */
3891 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3893 warning (0, "address region attributes on pointed-to types ignored");
3902 mep_multiple_address_regions (tree list
, bool check_section_attr
)
3905 int count_sections
= 0;
3906 int section_attr_count
= 0;
3908 for (a
= list
; a
; a
= TREE_CHAIN (a
))
3910 if (is_attribute_p ("based", TREE_PURPOSE (a
))
3911 || is_attribute_p ("tiny", TREE_PURPOSE (a
))
3912 || is_attribute_p ("near", TREE_PURPOSE (a
))
3913 || is_attribute_p ("far", TREE_PURPOSE (a
))
3914 || is_attribute_p ("io", TREE_PURPOSE (a
)))
3916 if (check_section_attr
)
3917 section_attr_count
+= is_attribute_p ("section", TREE_PURPOSE (a
));
3920 if (check_section_attr
)
3921 return section_attr_count
;
3923 return count_sections
;
3926 #define MEP_ATTRIBUTES(decl) \
3927 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3928 : DECL_ATTRIBUTES (decl) \
3929 ? (DECL_ATTRIBUTES (decl)) \
3930 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3933 mep_validate_near_far (tree
*node
, tree name
, tree args
,
3934 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3936 if (TREE_CODE (*node
) != VAR_DECL
3937 && TREE_CODE (*node
) != FUNCTION_DECL
3938 && TREE_CODE (*node
) != METHOD_TYPE
3939 && TREE_CODE (*node
) != POINTER_TYPE
3940 && TREE_CODE (*node
) != TYPE_DECL
)
3942 warning (0, "%qE attribute only applies to variables and functions",
3946 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3948 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3950 warning (0, "address region attributes not allowed with auto storage class");
3953 /* Ignore storage attribute of pointed to variable: char __far * x; */
3954 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3956 warning (0, "address region attributes on pointed-to types ignored");
3960 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node
), false) > 0)
3962 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3963 name
, DECL_NAME (*node
), DECL_SOURCE_LINE (*node
));
3964 DECL_ATTRIBUTES (*node
) = NULL_TREE
;
3970 mep_validate_disinterrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3971 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3973 if (TREE_CODE (*node
) != FUNCTION_DECL
3974 && TREE_CODE (*node
) != METHOD_TYPE
)
3976 warning (0, "%qE attribute only applies to functions", name
);
3983 mep_validate_interrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3984 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3988 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3990 warning (0, "%qE attribute only applies to functions", name
);
3995 if (DECL_DECLARED_INLINE_P (*node
))
3996 error ("cannot inline interrupt function %qE", DECL_NAME (*node
));
3997 DECL_UNINLINABLE (*node
) = 1;
3999 function_type
= TREE_TYPE (*node
);
4001 if (TREE_TYPE (function_type
) != void_type_node
)
4002 error ("interrupt function must have return type of void");
4004 if (prototype_p (function_type
)
4005 && (TREE_VALUE (TYPE_ARG_TYPES (function_type
)) != void_type_node
4006 || TREE_CHAIN (TYPE_ARG_TYPES (function_type
)) != NULL_TREE
))
4007 error ("interrupt function must have no arguments");
4013 mep_validate_io_cb (tree
*node
, tree name
, tree args
,
4014 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
4016 if (TREE_CODE (*node
) != VAR_DECL
)
4018 warning (0, "%qE attribute only applies to variables", name
);
4022 if (args
!= NULL_TREE
)
4024 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
4025 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
4026 if (TREE_CODE (TREE_VALUE (args
)) != INTEGER_CST
)
4028 warning (0, "%qE attribute allows only an integer constant argument",
4034 if (*no_add
== false && !TARGET_IO_NO_VOLATILE
)
4035 TREE_THIS_VOLATILE (*node
) = 1;
4041 mep_validate_vliw (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
4042 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
4044 if (TREE_CODE (*node
) != FUNCTION_TYPE
4045 && TREE_CODE (*node
) != FUNCTION_DECL
4046 && TREE_CODE (*node
) != METHOD_TYPE
4047 && TREE_CODE (*node
) != FIELD_DECL
4048 && TREE_CODE (*node
) != TYPE_DECL
)
4050 static int gave_pointer_note
= 0;
4051 static int gave_array_note
= 0;
4052 static const char * given_type
= NULL
;
4054 given_type
= tree_code_name
[TREE_CODE (*node
)];
4055 if (TREE_CODE (*node
) == POINTER_TYPE
)
4056 given_type
= "pointers";
4057 if (TREE_CODE (*node
) == ARRAY_TYPE
)
4058 given_type
= "arrays";
4061 warning (0, "%qE attribute only applies to functions, not %s",
4064 warning (0, "%qE attribute only applies to functions",
4068 if (TREE_CODE (*node
) == POINTER_TYPE
4069 && !gave_pointer_note
)
4071 inform (input_location
, "to describe a pointer to a VLIW function, use syntax like this:");
4072 inform (input_location
, " typedef int (__vliw *vfuncptr) ();");
4073 gave_pointer_note
= 1;
4076 if (TREE_CODE (*node
) == ARRAY_TYPE
4077 && !gave_array_note
)
4079 inform (input_location
, "to describe an array of VLIW function pointers, use syntax like this:");
4080 inform (input_location
, " typedef int (__vliw *vfuncptr[]) ();");
4081 gave_array_note
= 1;
4085 error ("VLIW functions are not allowed without a VLIW configuration");
4089 static const struct attribute_spec mep_attribute_table
[11] =
4091 /* name min max decl type func handler
4092 affects_type_identity */
4093 { "based", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4094 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny
, false },
4095 { "near", 0, 0, false, false, false, mep_validate_near_far
, false },
4096 { "far", 0, 0, false, false, false, mep_validate_near_far
, false },
4097 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt
,
4099 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt
, false },
4100 { "io", 0, 1, false, false, false, mep_validate_io_cb
, false },
4101 { "cb", 0, 1, false, false, false, mep_validate_io_cb
, false },
4102 { "vliw", 0, 0, false, true, false, mep_validate_vliw
, false },
4103 { NULL
, 0, 0, false, false, false, NULL
, false }
4107 mep_function_attribute_inlinable_p (const_tree callee
)
4109 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (callee
));
4110 if (!attrs
) attrs
= DECL_ATTRIBUTES (callee
);
4111 return (lookup_attribute ("disinterrupt", attrs
) == 0
4112 && lookup_attribute ("interrupt", attrs
) == 0);
4116 mep_can_inline_p (tree caller
, tree callee
)
4118 if (TREE_CODE (callee
) == ADDR_EXPR
)
4119 callee
= TREE_OPERAND (callee
, 0);
4121 if (!mep_vliw_function_p (caller
)
4122 && mep_vliw_function_p (callee
))
4130 #define FUNC_DISINTERRUPT 2
4133 struct GTY(()) pragma_entry
{
4136 const char *funcname
;
4138 typedef struct pragma_entry pragma_entry
;
4140 /* Hash table of farcall-tagged sections. */
4141 static GTY((param_is (pragma_entry
))) htab_t pragma_htab
;
4144 pragma_entry_eq (const void *p1
, const void *p2
)
4146 const pragma_entry
*old
= (const pragma_entry
*) p1
;
4147 const char *new_name
= (const char *) p2
;
4149 return strcmp (old
->funcname
, new_name
) == 0;
4153 pragma_entry_hash (const void *p
)
4155 const pragma_entry
*old
= (const pragma_entry
*) p
;
4156 return htab_hash_string (old
->funcname
);
4160 mep_note_pragma_flag (const char *funcname
, int flag
)
4162 pragma_entry
**slot
;
4165 pragma_htab
= htab_create_ggc (31, pragma_entry_hash
,
4166 pragma_entry_eq
, NULL
);
4168 slot
= (pragma_entry
**)
4169 htab_find_slot_with_hash (pragma_htab
, funcname
,
4170 htab_hash_string (funcname
), INSERT
);
4174 *slot
= ggc_alloc_pragma_entry ();
4177 (*slot
)->funcname
= ggc_strdup (funcname
);
4179 (*slot
)->flag
|= flag
;
4183 mep_lookup_pragma_flag (const char *funcname
, int flag
)
4185 pragma_entry
**slot
;
4190 if (funcname
[0] == '@' && funcname
[2] == '.')
4193 slot
= (pragma_entry
**)
4194 htab_find_slot_with_hash (pragma_htab
, funcname
,
4195 htab_hash_string (funcname
), NO_INSERT
);
4196 if (slot
&& *slot
&& ((*slot
)->flag
& flag
))
4198 (*slot
)->used
|= flag
;
4205 mep_lookup_pragma_call (const char *funcname
)
4207 return mep_lookup_pragma_flag (funcname
, FUNC_CALL
);
4211 mep_note_pragma_call (const char *funcname
)
4213 mep_note_pragma_flag (funcname
, FUNC_CALL
);
4217 mep_lookup_pragma_disinterrupt (const char *funcname
)
4219 return mep_lookup_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4223 mep_note_pragma_disinterrupt (const char *funcname
)
4225 mep_note_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4229 note_unused_pragma_disinterrupt (void **slot
, void *data ATTRIBUTE_UNUSED
)
4231 const pragma_entry
*d
= (const pragma_entry
*)(*slot
);
4233 if ((d
->flag
& FUNC_DISINTERRUPT
)
4234 && !(d
->used
& FUNC_DISINTERRUPT
))
4235 warning (0, "\"#pragma disinterrupt %s\" not used", d
->funcname
);
4240 mep_file_cleanups (void)
4243 htab_traverse (pragma_htab
, note_unused_pragma_disinterrupt
, NULL
);
4246 /* These three functions provide a bridge between the pramgas that
4247 affect register classes, and the functions that maintain them. We
4248 can't call those functions directly as pragma handling is part of
4249 the front end and doesn't have direct access to them. */
4252 mep_save_register_info (void)
4254 save_register_info ();
4258 mep_reinit_regs (void)
4264 mep_init_regs (void)
4272 mep_attrlist_to_encoding (tree list
, tree decl
)
4274 if (mep_multiple_address_regions (list
, false) > 1)
4276 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4277 TREE_PURPOSE (TREE_CHAIN (list
)),
4279 DECL_SOURCE_LINE (decl
));
4280 TREE_CHAIN (list
) = NULL_TREE
;
4285 if (is_attribute_p ("based", TREE_PURPOSE (list
)))
4287 if (is_attribute_p ("tiny", TREE_PURPOSE (list
)))
4289 if (is_attribute_p ("near", TREE_PURPOSE (list
)))
4291 if (is_attribute_p ("far", TREE_PURPOSE (list
)))
4293 if (is_attribute_p ("io", TREE_PURPOSE (list
)))
4295 if (TREE_VALUE (list
)
4296 && TREE_VALUE (TREE_VALUE (list
))
4297 && TREE_CODE (TREE_VALUE (TREE_VALUE (list
))) == INTEGER_CST
)
4299 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list
)));
4301 && location
<= 0x1000000)
4306 if (is_attribute_p ("cb", TREE_PURPOSE (list
)))
4308 list
= TREE_CHAIN (list
);
4311 && TREE_CODE (decl
) == FUNCTION_DECL
4312 && DECL_SECTION_NAME (decl
) == 0)
4318 mep_comp_type_attributes (const_tree t1
, const_tree t2
)
4322 vliw1
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1
)) != 0);
4323 vliw2
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2
)) != 0);
4332 mep_insert_attributes (tree decl
, tree
*attributes
)
4335 const char *secname
= 0;
4336 tree attrib
, attrlist
;
4339 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4341 const char *funcname
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4343 if (mep_lookup_pragma_disinterrupt (funcname
))
4345 attrib
= build_tree_list (get_identifier ("disinterrupt"), NULL_TREE
);
4346 *attributes
= chainon (*attributes
, attrib
);
4350 if (TREE_CODE (decl
) != VAR_DECL
4351 || ! (TREE_PUBLIC (decl
) || TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
4354 if (TREE_READONLY (decl
) && TARGET_DC
)
4355 /* -mdc means that const variables default to the near section,
4356 regardless of the size cutoff. */
4359 /* User specified an attribute, so override the default.
4360 Ignore storage attribute of pointed to variable. char __far * x; */
4361 if (! (TREE_TYPE (decl
) && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
))
4363 if (TYPE_P (decl
) && TYPE_ATTRIBUTES (decl
) && *attributes
)
4364 TYPE_ATTRIBUTES (decl
) = NULL_TREE
;
4365 else if (DECL_ATTRIBUTES (decl
) && *attributes
)
4366 DECL_ATTRIBUTES (decl
) = NULL_TREE
;
4369 attrlist
= *attributes
? *attributes
: DECL_ATTRIBUTES (decl
);
4370 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4371 if (!encoding
&& TYPE_P (TREE_TYPE (decl
)))
4373 attrlist
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
4374 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4378 /* This means that the declaration has a specific section
4379 attribute, so we should not apply the default rules. */
4381 if (encoding
== 'i' || encoding
== 'I')
4383 tree attr
= lookup_attribute ("io", attrlist
);
4385 && TREE_VALUE (attr
)
4386 && TREE_VALUE (TREE_VALUE(attr
)))
4388 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4389 static tree previous_value
= 0;
4390 static int previous_location
= 0;
4391 static tree previous_name
= 0;
4393 /* We take advantage of the fact that gcc will reuse the
4394 same tree pointer when applying an attribute to a
4395 list of decls, but produce a new tree for attributes
4396 on separate source lines, even when they're textually
4397 identical. This is the behavior we want. */
4398 if (TREE_VALUE (attr
) == previous_value
4399 && location
== previous_location
)
4401 warning(0, "__io address 0x%x is the same for %qE and %qE",
4402 location
, previous_name
, DECL_NAME (decl
));
4404 previous_name
= DECL_NAME (decl
);
4405 previous_location
= location
;
4406 previous_value
= TREE_VALUE (attr
);
4413 /* Declarations of arrays can change size. Don't trust them. */
4414 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
4417 size
= int_size_in_bytes (TREE_TYPE (decl
));
4419 if (TARGET_RAND_TPGP
&& size
<= 4 && size
> 0)
4421 if (TREE_PUBLIC (decl
)
4422 || DECL_EXTERNAL (decl
)
4423 || TREE_STATIC (decl
))
4425 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4449 if (size
<= mep_based_cutoff
&& size
> 0)
4451 else if (size
<= mep_tiny_cutoff
&& size
> 0)
4457 if (mep_const_section
&& TREE_READONLY (decl
))
4459 if (strcmp (mep_const_section
, "tiny") == 0)
4461 else if (strcmp (mep_const_section
, "near") == 0)
4463 else if (strcmp (mep_const_section
, "far") == 0)
4470 if (!mep_multiple_address_regions (*attributes
, true)
4471 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl
), false))
4473 attrib
= build_tree_list (get_identifier (secname
), NULL_TREE
);
4475 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4476 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4477 and mep_validate_based_tiny. */
4478 DECL_ATTRIBUTES (decl
) = chainon (DECL_ATTRIBUTES (decl
), attrib
);
4483 mep_encode_section_info (tree decl
, rtx rtl
, int first
)
4486 const char *oldname
;
4487 const char *secname
;
4493 tree mep_attributes
;
4498 if (TREE_CODE (decl
) != VAR_DECL
4499 && TREE_CODE (decl
) != FUNCTION_DECL
)
4502 rtlname
= XEXP (rtl
, 0);
4503 if (GET_CODE (rtlname
) == SYMBOL_REF
)
4504 oldname
= XSTR (rtlname
, 0);
4505 else if (GET_CODE (rtlname
) == MEM
4506 && GET_CODE (XEXP (rtlname
, 0)) == SYMBOL_REF
)
4507 oldname
= XSTR (XEXP (rtlname
, 0), 0);
4511 type
= TREE_TYPE (decl
);
4512 if (type
== error_mark_node
)
4514 mep_attributes
= MEP_ATTRIBUTES (decl
);
4516 encoding
= mep_attrlist_to_encoding (mep_attributes
, decl
);
4520 newname
= (char *) alloca (strlen (oldname
) + 4);
4521 sprintf (newname
, "@%c.%s", encoding
, oldname
);
4522 idp
= get_identifier (newname
);
4524 gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (idp
));
4525 SYMBOL_REF_WEAK (XEXP (rtl
, 0)) = DECL_WEAK (decl
);
4526 SET_SYMBOL_REF_DECL (XEXP (rtl
, 0), decl
);
4539 maxsize
= 0x1000000;
4547 if (maxsize
&& int_size_in_bytes (TREE_TYPE (decl
)) > maxsize
)
4549 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4551 (long) int_size_in_bytes (TREE_TYPE (decl
)),
4559 mep_strip_name_encoding (const char *sym
)
4565 else if (*sym
== '@' && sym
[2] == '.')
4573 mep_select_section (tree decl
, int reloc ATTRIBUTE_UNUSED
,
4574 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
4579 switch (TREE_CODE (decl
))
4582 if (!TREE_READONLY (decl
)
4583 || TREE_SIDE_EFFECTS (decl
)
4584 || !DECL_INITIAL (decl
)
4585 || (DECL_INITIAL (decl
) != error_mark_node
4586 && !TREE_CONSTANT (DECL_INITIAL (decl
))))
4590 if (! TREE_CONSTANT (decl
))
4598 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4600 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4602 if (name
[0] == '@' && name
[2] == '.')
4607 if (flag_function_sections
|| DECL_ONE_ONLY (decl
))
4608 mep_unique_section (decl
, 0);
4609 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4611 if (encoding
== 'f')
4612 return vftext_section
;
4614 return vtext_section
;
4616 else if (encoding
== 'f')
4617 return ftext_section
;
4619 return text_section
;
4622 if (TREE_CODE (decl
) == VAR_DECL
)
4624 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4626 if (name
[0] == '@' && name
[2] == '.')
4630 return based_section
;
4634 return srodata_section
;
4635 if (DECL_INITIAL (decl
))
4636 return sdata_section
;
4637 return tinybss_section
;
4641 return frodata_section
;
4646 error_at (DECL_SOURCE_LOCATION (decl
),
4647 "variable %D of type %<io%> must be uninitialized", decl
);
4648 return data_section
;
4651 error_at (DECL_SOURCE_LOCATION (decl
),
4652 "variable %D of type %<cb%> must be uninitialized", decl
);
4653 return data_section
;
4658 return readonly_data_section
;
4660 return data_section
;
4664 mep_unique_section (tree decl
, int reloc
)
4666 static const char *prefixes
[][2] =
4668 { ".text.", ".gnu.linkonce.t." },
4669 { ".rodata.", ".gnu.linkonce.r." },
4670 { ".data.", ".gnu.linkonce.d." },
4671 { ".based.", ".gnu.linkonce.based." },
4672 { ".sdata.", ".gnu.linkonce.s." },
4673 { ".far.", ".gnu.linkonce.far." },
4674 { ".ftext.", ".gnu.linkonce.ft." },
4675 { ".frodata.", ".gnu.linkonce.frd." },
4676 { ".srodata.", ".gnu.linkonce.srd." },
4677 { ".vtext.", ".gnu.linkonce.v." },
4678 { ".vftext.", ".gnu.linkonce.vf." }
4680 int sec
= 2; /* .data */
4682 const char *name
, *prefix
;
4685 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
4686 if (DECL_RTL (decl
))
4687 name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4689 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4691 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4692 sec
= 9; /* .vtext */
4694 sec
= 0; /* .text */
4696 else if (decl_readonly_section (decl
, reloc
))
4697 sec
= 1; /* .rodata */
4699 if (name
[0] == '@' && name
[2] == '.')
4704 sec
= 3; /* .based */
4708 sec
= 8; /* .srodata */
4710 sec
= 4; /* .sdata */
4714 sec
= 6; /* .ftext */
4716 sec
= 10; /* .vftext */
4718 sec
= 7; /* .frodata */
4720 sec
= 5; /* .far. */
4726 prefix
= prefixes
[sec
][DECL_ONE_ONLY(decl
)];
4727 len
= strlen (name
) + strlen (prefix
);
4728 string
= (char *) alloca (len
+ 1);
4730 sprintf (string
, "%s%s", prefix
, name
);
4732 DECL_SECTION_NAME (decl
) = build_string (len
, string
);
4735 /* Given a decl, a section name, and whether the decl initializer
4736 has relocs, choose attributes for the section. */
4738 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4741 mep_section_type_flags (tree decl
, const char *name
, int reloc
)
4743 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4745 if (decl
&& TREE_CODE (decl
) == FUNCTION_DECL
4746 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4747 flags
|= SECTION_MEP_VLIW
;
4752 /* Switch to an arbitrary section NAME with attributes as specified
4753 by FLAGS. ALIGN specifies any known alignment requirements for
4754 the section; 0 if the default should be used.
4756 Differs from the standard ELF version only in support of VLIW mode. */
4759 mep_asm_named_section (const char *name
, unsigned int flags
, tree decl ATTRIBUTE_UNUSED
)
4761 char flagchars
[8], *f
= flagchars
;
4764 if (!(flags
& SECTION_DEBUG
))
4766 if (flags
& SECTION_WRITE
)
4768 if (flags
& SECTION_CODE
)
4770 if (flags
& SECTION_SMALL
)
4772 if (flags
& SECTION_MEP_VLIW
)
4776 if (flags
& SECTION_BSS
)
4781 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",@%s\n",
4782 name
, flagchars
, type
);
4784 if (flags
& SECTION_CODE
)
4785 fputs ((flags
& SECTION_MEP_VLIW
? "\t.vliw\n" : "\t.core\n"),
4790 mep_output_aligned_common (FILE *stream
, tree decl
, const char *name
,
4791 int size
, int align
, int global
)
4793 /* We intentionally don't use mep_section_tag() here. */
4795 && (name
[1] == 'i' || name
[1] == 'I' || name
[1] == 'c')
4799 tree attr
= lookup_attribute ((name
[1] == 'c' ? "cb" : "io"),
4800 DECL_ATTRIBUTES (decl
));
4802 && TREE_VALUE (attr
)
4803 && TREE_VALUE (TREE_VALUE(attr
)))
4804 location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4809 fprintf (stream
, "\t.globl\t");
4810 assemble_name (stream
, name
);
4811 fprintf (stream
, "\n");
4813 assemble_name (stream
, name
);
4814 fprintf (stream
, " = %d\n", location
);
4817 if (name
[0] == '@' && name
[2] == '.')
4819 const char *sec
= 0;
4823 switch_to_section (based_section
);
4827 switch_to_section (tinybss_section
);
4831 switch_to_section (farbss_section
);
4840 while (align
> BITS_PER_UNIT
)
4845 name2
= targetm
.strip_name_encoding (name
);
4847 fprintf (stream
, "\t.globl\t%s\n", name2
);
4848 fprintf (stream
, "\t.p2align %d\n", p2align
);
4849 fprintf (stream
, "\t.type\t%s,@object\n", name2
);
4850 fprintf (stream
, "\t.size\t%s,%d\n", name2
, size
);
4851 fprintf (stream
, "%s:\n\t.zero\t%d\n", name2
, size
);
4858 fprintf (stream
, "\t.local\t");
4859 assemble_name (stream
, name
);
4860 fprintf (stream
, "\n");
4862 fprintf (stream
, "\t.comm\t");
4863 assemble_name (stream
, name
);
4864 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
4870 mep_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4872 rtx addr
= XEXP (m_tramp
, 0);
4873 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
4875 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__mep_trampoline_helper"),
4876 LCT_NORMAL
, VOIDmode
, 3,
4879 static_chain
, Pmode
);
4882 /* Experimental Reorg. */
4885 mep_mentioned_p (rtx in
,
4886 rtx reg
, /* NULL for mem */
4887 int modes_too
) /* if nonzero, modes must match also. */
4895 if (reg
&& GET_CODE (reg
) != REG
)
4898 if (GET_CODE (in
) == LABEL_REF
)
4901 code
= GET_CODE (in
);
4907 return mep_mentioned_p (XEXP (in
, 0), reg
, modes_too
);
4913 if (modes_too
&& (GET_MODE (in
) != GET_MODE (reg
)))
4915 return (REGNO (in
) == REGNO (reg
));
4928 /* Set's source should be read-only. */
4929 if (code
== SET
&& !reg
)
4930 return mep_mentioned_p (SET_DEST (in
), reg
, modes_too
);
4932 fmt
= GET_RTX_FORMAT (code
);
4934 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4939 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
4940 if (mep_mentioned_p (XVECEXP (in
, i
, j
), reg
, modes_too
))
4943 else if (fmt
[i
] == 'e'
4944 && mep_mentioned_p (XEXP (in
, i
), reg
, modes_too
))
4950 #define EXPERIMENTAL_REGMOVE_REORG 1
4952 #if EXPERIMENTAL_REGMOVE_REORG
4955 mep_compatible_reg_class (int r1
, int r2
)
4957 if (GR_REGNO_P (r1
) && GR_REGNO_P (r2
))
4959 if (CR_REGNO_P (r1
) && CR_REGNO_P (r2
))
4965 mep_reorg_regmove (rtx insns
)
4967 rtx insn
, next
, pat
, follow
, *where
;
4968 int count
= 0, done
= 0, replace
, before
= 0;
4971 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4972 if (GET_CODE (insn
) == INSN
)
4975 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4976 set that uses the r2 and r2 dies there. We replace r2 with r1
4977 and see if it's still a valid insn. If so, delete the first set.
4978 Copied from reorg.c. */
4983 for (insn
= insns
; insn
; insn
= next
)
4985 next
= NEXT_INSN (insn
);
4986 if (GET_CODE (insn
) != INSN
)
4988 pat
= PATTERN (insn
);
4992 if (GET_CODE (pat
) == SET
4993 && GET_CODE (SET_SRC (pat
)) == REG
4994 && GET_CODE (SET_DEST (pat
)) == REG
4995 && find_regno_note (insn
, REG_DEAD
, REGNO (SET_SRC (pat
)))
4996 && mep_compatible_reg_class (REGNO (SET_SRC (pat
)), REGNO (SET_DEST (pat
))))
4998 follow
= next_nonnote_insn (insn
);
5000 fprintf (dump_file
, "superfluous moves: considering %d\n", INSN_UID (insn
));
5002 while (follow
&& GET_CODE (follow
) == INSN
5003 && GET_CODE (PATTERN (follow
)) == SET
5004 && !dead_or_set_p (follow
, SET_SRC (pat
))
5005 && !mep_mentioned_p (PATTERN (follow
), SET_SRC (pat
), 0)
5006 && !mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 0))
5009 fprintf (dump_file
, "\tskipping %d\n", INSN_UID (follow
));
5010 follow
= next_nonnote_insn (follow
);
5014 fprintf (dump_file
, "\tfollow is %d\n", INSN_UID (follow
));
5015 if (follow
&& GET_CODE (follow
) == INSN
5016 && GET_CODE (PATTERN (follow
)) == SET
5017 && find_regno_note (follow
, REG_DEAD
, REGNO (SET_DEST (pat
))))
5019 if (GET_CODE (SET_DEST (PATTERN (follow
))) == REG
)
5021 if (mep_mentioned_p (SET_SRC (PATTERN (follow
)), SET_DEST (pat
), 1))
5024 where
= & SET_SRC (PATTERN (follow
));
5027 else if (GET_CODE (SET_DEST (PATTERN (follow
))) == MEM
)
5029 if (mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 1))
5032 where
= & PATTERN (follow
);
5038 /* If so, follow is the corresponding insn */
5045 fprintf (dump_file
, "----- Candidate for superfluous move deletion:\n\n");
5046 for (x
= insn
; x
;x
= NEXT_INSN (x
))
5048 print_rtl_single (dump_file
, x
);
5051 fprintf (dump_file
, "\n");
5055 if (validate_replace_rtx_subexp (SET_DEST (pat
), SET_SRC (pat
),
5059 next
= delete_insn (insn
);
5062 fprintf (dump_file
, "\n----- Success! new insn:\n\n");
5063 print_rtl_single (dump_file
, follow
);
5073 fprintf (dump_file
, "\n%d insn%s deleted out of %d.\n\n", count
, count
== 1 ? "" : "s", before
);
5074 fprintf (dump_file
, "=====\n");
5080 /* Figure out where to put LABEL, which is the label for a repeat loop.
5081 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5082 the loop ends just before LAST_INSN. If SHARED, insns other than the
5083 "repeat" might use LABEL to jump to the loop's continuation point.
5085 Return the last instruction in the adjusted loop. */
5088 mep_insert_repeat_label_last (rtx last_insn
, rtx label
, bool including
,
5092 int count
= 0, code
, icode
;
5095 fprintf (dump_file
, "considering end of repeat loop at insn %d\n",
5096 INSN_UID (last_insn
));
5098 /* Set PREV to the last insn in the loop. */
5101 prev
= PREV_INSN (prev
);
5103 /* Set NEXT to the next insn after the repeat label. */
5108 code
= GET_CODE (prev
);
5109 if (code
== CALL_INSN
|| code
== CODE_LABEL
|| code
== BARRIER
)
5114 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
5115 prev
= XVECEXP (PATTERN (prev
), 0, 1);
5117 /* Other insns that should not be in the last two opcodes. */
5118 icode
= recog_memoized (prev
);
5120 || icode
== CODE_FOR_repeat
5121 || icode
== CODE_FOR_erepeat
5122 || get_attr_may_trap (prev
) == MAY_TRAP_YES
)
5125 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5126 is the second instruction in a VLIW bundle. In that case,
5127 loop again: if the first instruction also satisfies the
5128 conditions above then we will reach here again and put
5129 both of them into the repeat epilogue. Otherwise both
5130 should remain outside. */
5131 if (GET_MODE (prev
) != BImode
)
5136 print_rtl_single (dump_file
, next
);
5141 prev
= PREV_INSN (prev
);
5144 /* See if we're adding the label immediately after the repeat insn.
5145 If so, we need to separate them with a nop. */
5146 prev
= prev_real_insn (next
);
5148 switch (recog_memoized (prev
))
5150 case CODE_FOR_repeat
:
5151 case CODE_FOR_erepeat
:
5153 fprintf (dump_file
, "Adding nop inside loop\n");
5154 emit_insn_before (gen_nop (), next
);
5161 /* Insert the label. */
5162 emit_label_before (label
, next
);
5164 /* Insert the nops. */
5165 if (dump_file
&& count
< 2)
5166 fprintf (dump_file
, "Adding %d nop%s\n\n",
5167 2 - count
, count
== 1 ? "" : "s");
5169 for (; count
< 2; count
++)
5171 last_insn
= emit_insn_after (gen_nop (), last_insn
);
5173 emit_insn_before (gen_nop (), last_insn
);
5180 mep_emit_doloop (rtx
*operands
, int is_end
)
5184 if (cfun
->machine
->doloop_tags
== 0
5185 || cfun
->machine
->doloop_tag_from_end
== is_end
)
5187 cfun
->machine
->doloop_tags
++;
5188 cfun
->machine
->doloop_tag_from_end
= is_end
;
5191 tag
= GEN_INT (cfun
->machine
->doloop_tags
- 1);
5193 emit_jump_insn (gen_doloop_end_internal (operands
[0], operands
[4], tag
));
5195 emit_insn (gen_doloop_begin_internal (operands
[0], operands
[0], tag
));
5199 /* Code for converting doloop_begins and doloop_ends into valid
5200 MeP instructions. A doloop_begin is just a placeholder:
5202 $count = unspec ($count)
5204 where $count is initially the number of iterations - 1.
5205 doloop_end has the form:
5207 if ($count-- == 0) goto label
5209 The counter variable is private to the doloop insns, nothing else
5210 relies on its value.
5212 There are three cases, in decreasing order of preference:
5214 1. A loop has exactly one doloop_begin and one doloop_end.
5215 The doloop_end branches to the first instruction after
5218 In this case we can replace the doloop_begin with a repeat
5219 instruction and remove the doloop_end. I.e.:
5221 $count1 = unspec ($count1)
5226 if ($count2-- == 0) goto label
5230 repeat $count1,repeat_label
5238 2. As for (1), except there are several doloop_ends. One of them
5239 (call it X) falls through to a label L. All the others fall
5240 through to branches to L.
5242 In this case, we remove X and replace the other doloop_ends
5243 with branches to the repeat label. For example:
5245 $count1 = unspec ($count1)
5248 if ($count2-- == 0) goto label
5251 if ($count3-- == 0) goto label
5256 repeat $count1,repeat_label
5267 3. The fallback case. Replace doloop_begins with:
5271 Replace doloop_ends with the equivalent of:
5274 if ($count == 0) goto label
5276 Note that this might need a scratch register if $count
5277 is stored in memory. */
5279 /* A structure describing one doloop_begin. */
5280 struct mep_doloop_begin
{
5281 /* The next doloop_begin with the same tag. */
5282 struct mep_doloop_begin
*next
;
5284 /* The instruction itself. */
5287 /* The initial counter value. This is known to be a general register. */
5291 /* A structure describing a doloop_end. */
5292 struct mep_doloop_end
{
5293 /* The next doloop_end with the same loop tag. */
5294 struct mep_doloop_end
*next
;
5296 /* The instruction itself. */
5299 /* The first instruction after INSN when the branch isn't taken. */
5302 /* The location of the counter value. Since doloop_end_internal is a
5303 jump instruction, it has to allow the counter to be stored anywhere
5304 (any non-fixed register or memory location). */
5307 /* The target label (the place where the insn branches when the counter
5311 /* A scratch register. Only available when COUNTER isn't stored
5312 in a general register. */
5317 /* One do-while loop. */
5319 /* All the doloop_begins for this loop (in no particular order). */
5320 struct mep_doloop_begin
*begin
;
5322 /* All the doloop_ends. When there is more than one, arrange things
5323 so that the first one is the most likely to be X in case (2) above. */
5324 struct mep_doloop_end
*end
;
5328 /* Return true if LOOP can be converted into repeat/repeat_end form
5329 (that is, if it matches cases (1) or (2) above). */
5332 mep_repeat_loop_p (struct mep_doloop
*loop
)
5334 struct mep_doloop_end
*end
;
5337 /* There must be exactly one doloop_begin and at least one doloop_end. */
5338 if (loop
->begin
== 0 || loop
->end
== 0 || loop
->begin
->next
!= 0)
5341 /* The first doloop_end (X) must branch back to the insn after
5342 the doloop_begin. */
5343 if (prev_real_insn (loop
->end
->label
) != loop
->begin
->insn
)
5346 /* All the other doloop_ends must branch to the same place as X.
5347 When the branch isn't taken, they must jump to the instruction
5349 fallthrough
= loop
->end
->fallthrough
;
5350 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5351 if (end
->label
!= loop
->end
->label
5352 || !simplejump_p (end
->fallthrough
)
5353 || next_real_insn (JUMP_LABEL (end
->fallthrough
)) != fallthrough
)
5360 /* The main repeat reorg function. See comment above for details. */
5363 mep_reorg_repeat (rtx insns
)
5366 struct mep_doloop
*loops
, *loop
;
5367 struct mep_doloop_begin
*begin
;
5368 struct mep_doloop_end
*end
;
5370 /* Quick exit if we haven't created any loops. */
5371 if (cfun
->machine
->doloop_tags
== 0)
5374 /* Create an array of mep_doloop structures. */
5375 loops
= (struct mep_doloop
*) alloca (sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5376 memset (loops
, 0, sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5378 /* Search the function for do-while insns and group them by loop tag. */
5379 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5381 switch (recog_memoized (insn
))
5383 case CODE_FOR_doloop_begin_internal
:
5384 insn_extract (insn
);
5385 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5387 begin
= (struct mep_doloop_begin
*) alloca (sizeof (struct mep_doloop_begin
));
5388 begin
->next
= loop
->begin
;
5390 begin
->counter
= recog_data
.operand
[0];
5392 loop
->begin
= begin
;
5395 case CODE_FOR_doloop_end_internal
:
5396 insn_extract (insn
);
5397 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5399 end
= (struct mep_doloop_end
*) alloca (sizeof (struct mep_doloop_end
));
5401 end
->fallthrough
= next_real_insn (insn
);
5402 end
->counter
= recog_data
.operand
[0];
5403 end
->label
= recog_data
.operand
[1];
5404 end
->scratch
= recog_data
.operand
[3];
5406 /* If this insn falls through to an unconditional jump,
5407 give it a lower priority than the others. */
5408 if (loop
->end
!= 0 && simplejump_p (end
->fallthrough
))
5410 end
->next
= loop
->end
->next
;
5411 loop
->end
->next
= end
;
5415 end
->next
= loop
->end
;
5421 /* Convert the insns for each loop in turn. */
5422 for (loop
= loops
; loop
< loops
+ cfun
->machine
->doloop_tags
; loop
++)
5423 if (mep_repeat_loop_p (loop
))
5425 /* Case (1) or (2). */
5426 rtx repeat_label
, label_ref
;
5428 /* Create a new label for the repeat insn. */
5429 repeat_label
= gen_label_rtx ();
5431 /* Replace the doloop_begin with a repeat. */
5432 label_ref
= gen_rtx_LABEL_REF (VOIDmode
, repeat_label
);
5433 emit_insn_before (gen_repeat (loop
->begin
->counter
, label_ref
),
5435 delete_insn (loop
->begin
->insn
);
5437 /* Insert the repeat label before the first doloop_end.
5438 Fill the gap with nops if there are other doloop_ends. */
5439 mep_insert_repeat_label_last (loop
->end
->insn
, repeat_label
,
5440 false, loop
->end
->next
!= 0);
5442 /* Emit a repeat_end (to improve the readability of the output). */
5443 emit_insn_before (gen_repeat_end (), loop
->end
->insn
);
5445 /* Delete the first doloop_end. */
5446 delete_insn (loop
->end
->insn
);
5448 /* Replace the others with branches to REPEAT_LABEL. */
5449 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5451 emit_jump_insn_before (gen_jump (repeat_label
), end
->insn
);
5452 delete_insn (end
->insn
);
5453 delete_insn (end
->fallthrough
);
5458 /* Case (3). First replace all the doloop_begins with increment
5460 for (begin
= loop
->begin
; begin
!= 0; begin
= begin
->next
)
5462 emit_insn_before (gen_add3_insn (copy_rtx (begin
->counter
),
5463 begin
->counter
, const1_rtx
),
5465 delete_insn (begin
->insn
);
5468 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5469 for (end
= loop
->end
; end
!= 0; end
= end
->next
)
5475 /* Load the counter value into a general register. */
5477 if (!REG_P (reg
) || REGNO (reg
) > 15)
5480 emit_move_insn (copy_rtx (reg
), copy_rtx (end
->counter
));
5483 /* Decrement the counter. */
5484 emit_insn (gen_add3_insn (copy_rtx (reg
), copy_rtx (reg
),
5487 /* Copy it back to its original location. */
5488 if (reg
!= end
->counter
)
5489 emit_move_insn (copy_rtx (end
->counter
), copy_rtx (reg
));
5491 /* Jump back to the start label. */
5492 insn
= emit_jump_insn (gen_mep_bne_true (reg
, const0_rtx
,
5494 JUMP_LABEL (insn
) = end
->label
;
5495 LABEL_NUSES (end
->label
)++;
5497 /* Emit the whole sequence before the doloop_end. */
5498 insn
= get_insns ();
5500 emit_insn_before (insn
, end
->insn
);
5502 /* Delete the doloop_end. */
5503 delete_insn (end
->insn
);
5510 mep_invertable_branch_p (rtx insn
)
5513 enum rtx_code old_code
;
5516 set
= PATTERN (insn
);
5517 if (GET_CODE (set
) != SET
)
5519 if (GET_CODE (XEXP (set
, 1)) != IF_THEN_ELSE
)
5521 cond
= XEXP (XEXP (set
, 1), 0);
5522 old_code
= GET_CODE (cond
);
5526 PUT_CODE (cond
, NE
);
5529 PUT_CODE (cond
, EQ
);
5532 PUT_CODE (cond
, GE
);
5535 PUT_CODE (cond
, LT
);
5540 INSN_CODE (insn
) = -1;
5541 i
= recog_memoized (insn
);
5542 PUT_CODE (cond
, old_code
);
5543 INSN_CODE (insn
) = -1;
5548 mep_invert_branch (rtx insn
, rtx after
)
5550 rtx cond
, set
, label
;
5553 set
= PATTERN (insn
);
5555 gcc_assert (GET_CODE (set
) == SET
);
5556 gcc_assert (GET_CODE (XEXP (set
, 1)) == IF_THEN_ELSE
);
5558 cond
= XEXP (XEXP (set
, 1), 0);
5559 switch (GET_CODE (cond
))
5562 PUT_CODE (cond
, NE
);
5565 PUT_CODE (cond
, EQ
);
5568 PUT_CODE (cond
, GE
);
5571 PUT_CODE (cond
, LT
);
5576 label
= gen_label_rtx ();
5577 emit_label_after (label
, after
);
5578 for (i
=1; i
<=2; i
++)
5579 if (GET_CODE (XEXP (XEXP (set
, 1), i
)) == LABEL_REF
)
5581 rtx ref
= XEXP (XEXP (set
, 1), i
);
5582 if (LABEL_NUSES (XEXP (ref
, 0)) == 1)
5583 delete_insn (XEXP (ref
, 0));
5584 XEXP (ref
, 0) = label
;
5585 LABEL_NUSES (label
) ++;
5586 JUMP_LABEL (insn
) = label
;
5588 INSN_CODE (insn
) = -1;
5589 i
= recog_memoized (insn
);
5590 gcc_assert (i
>= 0);
5594 mep_reorg_erepeat (rtx insns
)
5596 rtx insn
, prev
, l
, x
;
5599 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5601 && ! JUMP_TABLE_DATA_P (insn
)
5602 && mep_invertable_branch_p (insn
))
5606 fprintf (dump_file
, "\n------------------------------\n");
5607 fprintf (dump_file
, "erepeat: considering this jump:\n");
5608 print_rtl_single (dump_file
, insn
);
5610 count
= simplejump_p (insn
) ? 0 : 1;
5611 for (prev
= PREV_INSN (insn
); prev
; prev
= PREV_INSN (prev
))
5613 if (GET_CODE (prev
) == CALL_INSN
5614 || BARRIER_P (prev
))
5617 if (prev
== JUMP_LABEL (insn
))
5621 fprintf (dump_file
, "found loop top, %d insns\n", count
);
5623 if (LABEL_NUSES (prev
) == 1)
5624 /* We're the only user, always safe */ ;
5625 else if (LABEL_NUSES (prev
) == 2)
5627 /* See if there's a barrier before this label. If
5628 so, we know nobody inside the loop uses it.
5629 But we must be careful to put the erepeat
5630 *after* the label. */
5632 for (barrier
= PREV_INSN (prev
);
5633 barrier
&& GET_CODE (barrier
) == NOTE
;
5634 barrier
= PREV_INSN (barrier
))
5636 if (barrier
&& GET_CODE (barrier
) != BARRIER
)
5641 /* We don't know who else, within or without our loop, uses this */
5643 fprintf (dump_file
, "... but there are multiple users, too risky.\n");
5647 /* Generate a label to be used by the erepat insn. */
5648 l
= gen_label_rtx ();
5650 /* Insert the erepeat after INSN's target label. */
5651 x
= gen_erepeat (gen_rtx_LABEL_REF (VOIDmode
, l
));
5653 emit_insn_after (x
, prev
);
5655 /* Insert the erepeat label. */
5656 newlast
= (mep_insert_repeat_label_last
5657 (insn
, l
, !simplejump_p (insn
), false));
5658 if (simplejump_p (insn
))
5660 emit_insn_before (gen_erepeat_end (), insn
);
5665 mep_invert_branch (insn
, newlast
);
5666 emit_insn_after (gen_erepeat_end (), newlast
);
5673 /* A label is OK if there is exactly one user, and we
5674 can find that user before the next label. */
5677 if (LABEL_NUSES (prev
) == 1)
5679 for (user
= PREV_INSN (prev
);
5680 user
&& (INSN_P (user
) || GET_CODE (user
) == NOTE
);
5681 user
= PREV_INSN (user
))
5682 if (GET_CODE (user
) == JUMP_INSN
5683 && JUMP_LABEL (user
) == prev
)
5685 safe
= INSN_UID (user
);
5692 fprintf (dump_file
, "... ignoring jump from insn %d to %d\n",
5693 safe
, INSN_UID (prev
));
5703 fprintf (dump_file
, "\n==============================\n");
5706 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5707 always do this on its own. */
5710 mep_jmp_return_reorg (rtx insns
)
5712 rtx insn
, label
, ret
;
5715 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5716 if (simplejump_p (insn
))
5718 /* Find the fist real insn the jump jumps to. */
5719 label
= ret
= JUMP_LABEL (insn
);
5721 && (GET_CODE (ret
) == NOTE
5722 || GET_CODE (ret
) == CODE_LABEL
5723 || GET_CODE (PATTERN (ret
)) == USE
))
5724 ret
= NEXT_INSN (ret
);
5728 /* Is it a return? */
5729 ret_code
= recog_memoized (ret
);
5730 if (ret_code
== CODE_FOR_return_internal
5731 || ret_code
== CODE_FOR_eh_return_internal
)
5733 /* It is. Replace the jump with a return. */
5734 LABEL_NUSES (label
) --;
5735 if (LABEL_NUSES (label
) == 0)
5736 delete_insn (label
);
5737 PATTERN (insn
) = copy_rtx (PATTERN (ret
));
5738 INSN_CODE (insn
) = -1;
5746 mep_reorg_addcombine (rtx insns
)
5750 for (i
= insns
; i
; i
= NEXT_INSN (i
))
5752 && INSN_CODE (i
) == CODE_FOR_addsi3
5753 && GET_CODE (SET_DEST (PATTERN (i
))) == REG
5754 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 0)) == REG
5755 && REGNO (SET_DEST (PATTERN (i
))) == REGNO (XEXP (SET_SRC (PATTERN (i
)), 0))
5756 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 1)) == CONST_INT
)
5760 && INSN_CODE (n
) == CODE_FOR_addsi3
5761 && GET_CODE (SET_DEST (PATTERN (n
))) == REG
5762 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 0)) == REG
5763 && REGNO (SET_DEST (PATTERN (n
))) == REGNO (XEXP (SET_SRC (PATTERN (n
)), 0))
5764 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 1)) == CONST_INT
)
5766 int ic
= INTVAL (XEXP (SET_SRC (PATTERN (i
)), 1));
5767 int nc
= INTVAL (XEXP (SET_SRC (PATTERN (n
)), 1));
5768 if (REGNO (SET_DEST (PATTERN (i
))) == REGNO (SET_DEST (PATTERN (n
)))
5770 && ic
+ nc
> -32768)
5772 XEXP (SET_SRC (PATTERN (i
)), 1) = GEN_INT (ic
+ nc
);
5773 NEXT_INSN (i
) = NEXT_INSN (n
);
5775 PREV_INSN (NEXT_INSN (i
)) = i
;
5781 /* If this insn adjusts the stack, return the adjustment, else return
5784 add_sp_insn_p (rtx insn
)
5788 if (! single_set (insn
))
5790 pat
= PATTERN (insn
);
5791 if (GET_CODE (SET_DEST (pat
)) != REG
)
5793 if (REGNO (SET_DEST (pat
)) != SP_REGNO
)
5795 if (GET_CODE (SET_SRC (pat
)) != PLUS
)
5797 if (GET_CODE (XEXP (SET_SRC (pat
), 0)) != REG
)
5799 if (REGNO (XEXP (SET_SRC (pat
), 0)) != SP_REGNO
)
5801 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) != CONST_INT
)
5803 return INTVAL (XEXP (SET_SRC (pat
), 1));
5806 /* Check for trivial functions that set up an unneeded stack
5809 mep_reorg_noframe (rtx insns
)
5811 rtx start_frame_insn
;
5812 rtx end_frame_insn
= 0;
5816 /* The first insn should be $sp = $sp + N */
5817 while (insns
&& ! INSN_P (insns
))
5818 insns
= NEXT_INSN (insns
);
5822 sp_adjust
= add_sp_insn_p (insns
);
5826 start_frame_insn
= insns
;
5827 sp
= SET_DEST (PATTERN (start_frame_insn
));
5829 insns
= next_real_insn (insns
);
5833 rtx next
= next_real_insn (insns
);
5837 sp2
= add_sp_insn_p (insns
);
5842 end_frame_insn
= insns
;
5843 if (sp2
!= -sp_adjust
)
5846 else if (mep_mentioned_p (insns
, sp
, 0))
5848 else if (CALL_P (insns
))
5856 delete_insn (start_frame_insn
);
5857 delete_insn (end_frame_insn
);
5864 rtx insns
= get_insns ();
5866 /* We require accurate REG_DEAD notes. */
5867 compute_bb_for_insn ();
5868 df_note_add_problem ();
5871 mep_reorg_addcombine (insns
);
5872 #if EXPERIMENTAL_REGMOVE_REORG
5873 /* VLIW packing has been done already, so we can't just delete things. */
5874 if (!mep_vliw_function_p (cfun
->decl
))
5875 mep_reorg_regmove (insns
);
5877 mep_jmp_return_reorg (insns
);
5878 mep_bundle_insns (insns
);
5879 mep_reorg_repeat (insns
);
5882 && !profile_arc_flag
5883 && TARGET_OPT_REPEAT
5884 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO
)))
5885 mep_reorg_erepeat (insns
);
5887 /* This may delete *insns so make sure it's last. */
5888 mep_reorg_noframe (insns
);
5890 df_finish_pass (false);
5895 /*----------------------------------------------------------------------*/
5897 /*----------------------------------------------------------------------*/
5899 /* Element X gives the index into cgen_insns[] of the most general
5900 implementation of intrinsic X. Unimplemented intrinsics are
5902 int mep_intrinsic_insn
[ARRAY_SIZE (cgen_intrinsics
)];
5904 /* Element X gives the index of another instruction that is mapped to
5905 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5908 Things are set up so that mep_intrinsic_chain[X] < X. */
5909 static int mep_intrinsic_chain
[ARRAY_SIZE (cgen_insns
)];
5911 /* The bitmask for the current ISA. The ISA masks are declared
5913 unsigned int mep_selected_isa
;
5916 const char *config_name
;
5920 static struct mep_config mep_configs
[] = {
5921 #ifdef COPROC_SELECTION_TABLE
5922 COPROC_SELECTION_TABLE
,
5927 /* Initialize the global intrinsics variables above. */
5930 mep_init_intrinsics (void)
5934 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5935 mep_selected_isa
= mep_configs
[0].isa
;
5936 if (mep_config_string
!= 0)
5937 for (i
= 0; mep_configs
[i
].config_name
; i
++)
5938 if (strcmp (mep_config_string
, mep_configs
[i
].config_name
) == 0)
5940 mep_selected_isa
= mep_configs
[i
].isa
;
5944 /* Assume all intrinsics are unavailable. */
5945 for (i
= 0; i
< ARRAY_SIZE (mep_intrinsic_insn
); i
++)
5946 mep_intrinsic_insn
[i
] = -1;
5948 /* Build up the global intrinsic tables. */
5949 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5950 if ((cgen_insns
[i
].isas
& mep_selected_isa
) != 0)
5952 mep_intrinsic_chain
[i
] = mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
];
5953 mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] = i
;
5955 /* See whether we can directly move values between one coprocessor
5956 register and another. */
5957 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
5958 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns
[i
]))
5959 mep_have_copro_copro_moves_p
= true;
5961 /* See whether we can directly move values between core and
5962 coprocessor registers. */
5963 mep_have_core_copro_moves_p
= (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1
)
5964 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2
));
5966 mep_have_core_copro_moves_p
= 1;
5969 /* Declare all available intrinsic functions. Called once only. */
5971 static tree cp_data_bus_int_type_node
;
5972 static tree opaque_vector_type_node
;
5973 static tree v8qi_type_node
;
5974 static tree v4hi_type_node
;
5975 static tree v2si_type_node
;
5976 static tree v8uqi_type_node
;
5977 static tree v4uhi_type_node
;
5978 static tree v2usi_type_node
;
5981 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr
)
5985 case cgen_regnum_operand_type_POINTER
: return ptr_type_node
;
5986 case cgen_regnum_operand_type_LONG
: return long_integer_type_node
;
5987 case cgen_regnum_operand_type_ULONG
: return long_unsigned_type_node
;
5988 case cgen_regnum_operand_type_SHORT
: return short_integer_type_node
;
5989 case cgen_regnum_operand_type_USHORT
: return short_unsigned_type_node
;
5990 case cgen_regnum_operand_type_CHAR
: return char_type_node
;
5991 case cgen_regnum_operand_type_UCHAR
: return unsigned_char_type_node
;
5992 case cgen_regnum_operand_type_SI
: return intSI_type_node
;
5993 case cgen_regnum_operand_type_DI
: return intDI_type_node
;
5994 case cgen_regnum_operand_type_VECTOR
: return opaque_vector_type_node
;
5995 case cgen_regnum_operand_type_V8QI
: return v8qi_type_node
;
5996 case cgen_regnum_operand_type_V4HI
: return v4hi_type_node
;
5997 case cgen_regnum_operand_type_V2SI
: return v2si_type_node
;
5998 case cgen_regnum_operand_type_V8UQI
: return v8uqi_type_node
;
5999 case cgen_regnum_operand_type_V4UHI
: return v4uhi_type_node
;
6000 case cgen_regnum_operand_type_V2USI
: return v2usi_type_node
;
6001 case cgen_regnum_operand_type_CP_DATA_BUS_INT
: return cp_data_bus_int_type_node
;
6003 return void_type_node
;
6008 mep_init_builtins (void)
6012 if (TARGET_64BIT_CR_REGS
)
6013 cp_data_bus_int_type_node
= long_long_integer_type_node
;
6015 cp_data_bus_int_type_node
= long_integer_type_node
;
6017 opaque_vector_type_node
= build_opaque_vector_type (intQI_type_node
, 8);
6018 v8qi_type_node
= build_vector_type (intQI_type_node
, 8);
6019 v4hi_type_node
= build_vector_type (intHI_type_node
, 4);
6020 v2si_type_node
= build_vector_type (intSI_type_node
, 2);
6021 v8uqi_type_node
= build_vector_type (unsigned_intQI_type_node
, 8);
6022 v4uhi_type_node
= build_vector_type (unsigned_intHI_type_node
, 4);
6023 v2usi_type_node
= build_vector_type (unsigned_intSI_type_node
, 2);
6025 (*lang_hooks
.decls
.pushdecl
)
6026 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_data_bus_int"),
6027 cp_data_bus_int_type_node
));
6029 (*lang_hooks
.decls
.pushdecl
)
6030 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_vector"),
6031 opaque_vector_type_node
));
6033 (*lang_hooks
.decls
.pushdecl
)
6034 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v8qi"),
6036 (*lang_hooks
.decls
.pushdecl
)
6037 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v4hi"),
6039 (*lang_hooks
.decls
.pushdecl
)
6040 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v2si"),
6043 (*lang_hooks
.decls
.pushdecl
)
6044 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v8uqi"),
6046 (*lang_hooks
.decls
.pushdecl
)
6047 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v4uhi"),
6049 (*lang_hooks
.decls
.pushdecl
)
6050 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v2usi"),
6053 /* Intrinsics like mep_cadd3 are implemented with two groups of
6054 instructions, one which uses UNSPECs and one which uses a specific
6055 rtl code such as PLUS. Instructions in the latter group belong
6056 to GROUP_KNOWN_CODE.
6058 In such cases, the intrinsic will have two entries in the global
6059 tables above. The unspec form is accessed using builtin functions
6060 while the specific form is accessed using the mep_* enum in
6063 The idea is that __cop arithmetic and builtin functions have
6064 different optimization requirements. If mep_cadd3() appears in
6065 the source code, the user will surely except gcc to use cadd3
6066 rather than a work-alike such as add3. However, if the user
6067 just writes "a + b", where a or b are __cop variables, it is
6068 reasonable for gcc to choose a core instruction rather than
6069 cadd3 if it believes that is more optimal. */
6070 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
6071 if ((cgen_insns
[i
].groups
& GROUP_KNOWN_CODE
) == 0
6072 && mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] >= 0)
6074 tree ret_type
= void_type_node
;
6077 if (i
> 0 && cgen_insns
[i
].intrinsic
== cgen_insns
[i
-1].intrinsic
)
6080 if (cgen_insns
[i
].cret_p
)
6081 ret_type
= mep_cgen_regnum_to_type (cgen_insns
[i
].regnums
[0].type
);
6083 bi_type
= build_function_type (ret_type
, 0);
6084 add_builtin_function (cgen_intrinsics
[cgen_insns
[i
].intrinsic
],
6086 cgen_insns
[i
].intrinsic
, BUILT_IN_MD
, NULL
, NULL
);
6090 /* Report the unavailablity of the given intrinsic. */
6094 mep_intrinsic_unavailable (int intrinsic
)
6096 static int already_reported_p
[ARRAY_SIZE (cgen_intrinsics
)];
6098 if (already_reported_p
[intrinsic
])
6101 if (mep_intrinsic_insn
[intrinsic
] < 0)
6102 error ("coprocessor intrinsic %qs is not available in this configuration",
6103 cgen_intrinsics
[intrinsic
]);
6104 else if (CGEN_CURRENT_GROUP
== GROUP_VLIW
)
6105 error ("%qs is not available in VLIW functions",
6106 cgen_intrinsics
[intrinsic
]);
6108 error ("%qs is not available in non-VLIW functions",
6109 cgen_intrinsics
[intrinsic
]);
6111 already_reported_p
[intrinsic
] = 1;
6116 /* See if any implementation of INTRINSIC is available to the
6117 current function. If so, store the most general implementation
6118 in *INSN_PTR and return true. Return false otherwise. */
6121 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED
, const struct cgen_insn
**insn_ptr ATTRIBUTE_UNUSED
)
6125 i
= mep_intrinsic_insn
[intrinsic
];
6126 while (i
>= 0 && !CGEN_ENABLE_INSN_P (i
))
6127 i
= mep_intrinsic_chain
[i
];
6131 *insn_ptr
= &cgen_insns
[i
];
6138 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6139 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6140 try using a work-alike instead. In this case, the returned insn
6141 may have three operands rather than two. */
6144 mep_get_move_insn (int intrinsic
, const struct cgen_insn
**cgen_insn
)
6148 if (intrinsic
== mep_cmov
)
6150 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
6151 if (mep_get_intrinsic_insn (mep_cmov_insns
[i
], cgen_insn
))
6155 return mep_get_intrinsic_insn (intrinsic
, cgen_insn
);
6159 /* If ARG is a register operand that is the same size as MODE, convert it
6160 to MODE using a subreg. Otherwise return ARG as-is. */
6163 mep_convert_arg (enum machine_mode mode
, rtx arg
)
6165 if (GET_MODE (arg
) != mode
6166 && register_operand (arg
, VOIDmode
)
6167 && GET_MODE_SIZE (GET_MODE (arg
)) == GET_MODE_SIZE (mode
))
6168 return simplify_gen_subreg (mode
, arg
, GET_MODE (arg
), 0);
6173 /* Apply regnum conversions to ARG using the description given by REGNUM.
6174 Return the new argument on success and null on failure. */
6177 mep_convert_regnum (const struct cgen_regnum_operand
*regnum
, rtx arg
)
6179 if (regnum
->count
== 0)
6182 if (GET_CODE (arg
) != CONST_INT
6184 || INTVAL (arg
) >= regnum
->count
)
6187 return gen_rtx_REG (SImode
, INTVAL (arg
) + regnum
->base
);
6191 /* Try to make intrinsic argument ARG match the given operand.
6192 UNSIGNED_P is true if the argument has an unsigned type. */
6195 mep_legitimize_arg (const struct insn_operand_data
*operand
, rtx arg
,
6198 if (GET_CODE (arg
) == CONST_INT
)
6200 /* CONST_INTs can only be bound to integer operands. */
6201 if (GET_MODE_CLASS (operand
->mode
) != MODE_INT
)
6204 else if (GET_CODE (arg
) == CONST_DOUBLE
)
6205 /* These hold vector constants. */;
6206 else if (GET_MODE_SIZE (GET_MODE (arg
)) != GET_MODE_SIZE (operand
->mode
))
6208 /* If the argument is a different size from what's expected, we must
6209 have a value in the right mode class in order to convert it. */
6210 if (GET_MODE_CLASS (operand
->mode
) != GET_MODE_CLASS (GET_MODE (arg
)))
6213 /* If the operand is an rvalue, promote or demote it to match the
6214 operand's size. This might not need extra instructions when
6215 ARG is a register value. */
6216 if (operand
->constraint
[0] != '=')
6217 arg
= convert_to_mode (operand
->mode
, arg
, unsigned_p
);
6220 /* If the operand is an lvalue, bind the operand to a new register.
6221 The caller will copy this value into ARG after the main
6222 instruction. By doing this always, we produce slightly more
6224 /* But not for control registers. */
6225 if (operand
->constraint
[0] == '='
6227 || ! (CONTROL_REGNO_P (REGNO (arg
))
6228 || CCR_REGNO_P (REGNO (arg
))
6229 || CR_REGNO_P (REGNO (arg
)))
6231 return gen_reg_rtx (operand
->mode
);
6233 /* Try simple mode punning. */
6234 arg
= mep_convert_arg (operand
->mode
, arg
);
6235 if (operand
->predicate (arg
, operand
->mode
))
6238 /* See if forcing the argument into a register will make it match. */
6239 if (GET_CODE (arg
) == CONST_INT
|| GET_CODE (arg
) == CONST_DOUBLE
)
6240 arg
= force_reg (operand
->mode
, arg
);
6242 arg
= mep_convert_arg (operand
->mode
, force_reg (GET_MODE (arg
), arg
));
6243 if (operand
->predicate (arg
, operand
->mode
))
6250 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6251 function FNNAME. OPERAND describes the operand to which ARGNUM
6255 mep_incompatible_arg (const struct insn_operand_data
*operand
, rtx arg
,
6256 int argnum
, tree fnname
)
6260 if (GET_CODE (arg
) == CONST_INT
)
6261 for (i
= 0; i
< ARRAY_SIZE (cgen_immediate_predicates
); i
++)
6262 if (operand
->predicate
== cgen_immediate_predicates
[i
].predicate
)
6264 const struct cgen_immediate_predicate
*predicate
;
6265 HOST_WIDE_INT argval
;
6267 predicate
= &cgen_immediate_predicates
[i
];
6268 argval
= INTVAL (arg
);
6269 if (argval
< predicate
->lower
|| argval
>= predicate
->upper
)
6270 error ("argument %d of %qE must be in the range %d...%d",
6271 argnum
, fnname
, predicate
->lower
, predicate
->upper
- 1);
6273 error ("argument %d of %qE must be a multiple of %d",
6274 argnum
, fnname
, predicate
->align
);
6278 error ("incompatible type for argument %d of %qE", argnum
, fnname
);
6282 mep_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6283 rtx subtarget ATTRIBUTE_UNUSED
,
6284 enum machine_mode mode ATTRIBUTE_UNUSED
,
6285 int ignore ATTRIBUTE_UNUSED
)
6287 rtx pat
, op
[10], arg
[10];
6289 int opindex
, unsigned_p
[10];
6291 unsigned int n_args
;
6293 const struct cgen_insn
*cgen_insn
;
6294 const struct insn_data_d
*idata
;
6295 unsigned int first_arg
= 0;
6296 unsigned int builtin_n_args
;
6298 fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6299 fnname
= DECL_NAME (fndecl
);
6301 /* Find out which instruction we should emit. Note that some coprocessor
6302 intrinsics may only be available in VLIW mode, or only in normal mode. */
6303 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl
), &cgen_insn
))
6305 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl
));
6308 idata
= &insn_data
[cgen_insn
->icode
];
6310 builtin_n_args
= cgen_insn
->num_args
;
6312 if (cgen_insn
->cret_p
)
6314 if (cgen_insn
->cret_p
> 1)
6317 mep_cgen_regnum_to_type (cgen_insn
->regnums
[0].type
);
6321 /* Evaluate each argument. */
6322 n_args
= call_expr_nargs (exp
);
6324 if (n_args
< builtin_n_args
)
6326 error ("too few arguments to %qE", fnname
);
6329 if (n_args
> builtin_n_args
)
6331 error ("too many arguments to %qE", fnname
);
6335 for (a
= first_arg
; a
< builtin_n_args
+ first_arg
; a
++)
6339 args
= CALL_EXPR_ARG (exp
, a
- first_arg
);
6344 if (cgen_insn
->regnums
[a
].reference_p
)
6346 if (TREE_CODE (value
) != ADDR_EXPR
)
6349 error ("argument %d of %qE must be an address", a
+1, fnname
);
6352 value
= TREE_OPERAND (value
, 0);
6356 /* If the argument has been promoted to int, get the unpromoted
6357 value. This is necessary when sub-int memory values are bound
6358 to reference parameters. */
6359 if (TREE_CODE (value
) == NOP_EXPR
6360 && TREE_TYPE (value
) == integer_type_node
6361 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6362 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value
, 0)))
6363 < TYPE_PRECISION (TREE_TYPE (value
))))
6364 value
= TREE_OPERAND (value
, 0);
6366 /* If the argument has been promoted to double, get the unpromoted
6367 SFmode value. This is necessary for FMAX support, for example. */
6368 if (TREE_CODE (value
) == NOP_EXPR
6369 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value
))
6370 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6371 && TYPE_MODE (TREE_TYPE (value
)) == DFmode
6372 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value
, 0))) == SFmode
)
6373 value
= TREE_OPERAND (value
, 0);
6375 unsigned_p
[a
] = TYPE_UNSIGNED (TREE_TYPE (value
));
6376 arg
[a
] = expand_expr (value
, NULL
, VOIDmode
, EXPAND_NORMAL
);
6377 arg
[a
] = mep_convert_regnum (&cgen_insn
->regnums
[a
], arg
[a
]);
6378 if (cgen_insn
->regnums
[a
].reference_p
)
6380 tree pointed_to
= TREE_TYPE (TREE_TYPE (value
));
6381 enum machine_mode pointed_mode
= TYPE_MODE (pointed_to
);
6383 arg
[a
] = gen_rtx_MEM (pointed_mode
, arg
[a
]);
6387 error ("argument %d of %qE must be in the range %d...%d",
6388 a
+ 1, fnname
, 0, cgen_insn
->regnums
[a
].count
- 1);
6393 for (a
= 0; a
< first_arg
; a
++)
6395 if (a
== 0 && target
&& GET_MODE (target
) == idata
->operand
[0].mode
)
6398 arg
[a
] = gen_reg_rtx (idata
->operand
[0].mode
);
6401 /* Convert the arguments into a form suitable for the intrinsic.
6402 Report an error if this isn't possible. */
6403 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6405 a
= cgen_insn
->op_mapping
[opindex
];
6406 op
[opindex
] = mep_legitimize_arg (&idata
->operand
[opindex
],
6407 arg
[a
], unsigned_p
[a
]);
6408 if (op
[opindex
] == 0)
6410 mep_incompatible_arg (&idata
->operand
[opindex
],
6411 arg
[a
], a
+ 1 - first_arg
, fnname
);
6416 /* Emit the instruction. */
6417 pat
= idata
->genfun (op
[0], op
[1], op
[2], op
[3], op
[4],
6418 op
[5], op
[6], op
[7], op
[8], op
[9]);
6420 if (GET_CODE (pat
) == SET
6421 && GET_CODE (SET_DEST (pat
)) == PC
6422 && GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
6423 emit_jump_insn (pat
);
6427 /* Copy lvalues back to their final locations. */
6428 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6429 if (idata
->operand
[opindex
].constraint
[0] == '=')
6431 a
= cgen_insn
->op_mapping
[opindex
];
6434 if (GET_MODE_CLASS (GET_MODE (arg
[a
]))
6435 != GET_MODE_CLASS (GET_MODE (op
[opindex
])))
6436 emit_move_insn (arg
[a
], gen_lowpart (GET_MODE (arg
[a
]),
6440 /* First convert the operand to the right mode, then copy it
6441 into the destination. Doing the conversion as a separate
6442 step (rather than using convert_move) means that we can
6443 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6444 refer to the same register. */
6445 op
[opindex
] = convert_to_mode (GET_MODE (arg
[a
]),
6446 op
[opindex
], unsigned_p
[a
]);
6447 if (!rtx_equal_p (arg
[a
], op
[opindex
]))
6448 emit_move_insn (arg
[a
], op
[opindex
]);
6453 if (first_arg
> 0 && target
&& target
!= op
[0])
6455 emit_move_insn (target
, op
[0]);
6462 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED
)
6467 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6468 a global register. */
6471 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
6479 switch (GET_CODE (x
))
6482 if (REG_P (SUBREG_REG (x
)))
6484 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
6485 && global_regs
[subreg_regno (x
)])
6493 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
6507 /* A non-constant call might use a global register. */
6517 /* Returns nonzero if X mentions a global register. */
6520 global_reg_mentioned_p (rtx x
)
6526 if (! RTL_CONST_OR_PURE_CALL_P (x
))
6528 x
= CALL_INSN_FUNCTION_USAGE (x
);
6536 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
6538 /* Scheduling hooks for VLIW mode.
6540 Conceptually this is very simple: we have a two-pack architecture
6541 that takes one core insn and one coprocessor insn to make up either
6542 a 32- or 64-bit instruction word (depending on the option bit set in
6543 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6544 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6545 and one 48-bit cop insn or two 32-bit core/cop insns.
6547 In practice, instruction selection will be a bear. Consider in
6548 VL64 mode the following insns
6553 these cannot pack, since the add is a 16-bit core insn and cmov
6554 is a 32-bit cop insn. However,
6559 packs just fine. For good VLIW code generation in VL64 mode, we
6560 will have to have 32-bit alternatives for many of the common core
6561 insns. Not implemented. */
6564 mep_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
6568 if (REG_NOTE_KIND (link
) != 0)
6570 /* See whether INSN and DEP_INSN are intrinsics that set the same
6571 hard register. If so, it is more important to free up DEP_INSN
6572 than it is to free up INSN.
6574 Note that intrinsics like mep_mulr are handled differently from
6575 the equivalent mep.md patterns. In mep.md, if we don't care
6576 about the value of $lo and $hi, the pattern will just clobber
6577 the registers, not set them. Since clobbers don't count as
6578 output dependencies, it is often possible to reorder two mulrs,
6581 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6582 so any pair of mep_mulr()s will be inter-dependent. We should
6583 therefore give the first mep_mulr() a higher priority. */
6584 if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
6585 && global_reg_mentioned_p (PATTERN (insn
))
6586 && global_reg_mentioned_p (PATTERN (dep_insn
)))
6589 /* If the dependence is an anti or output dependence, assume it
6594 /* If we can't recognize the insns, we can't really do anything. */
6595 if (recog_memoized (dep_insn
) < 0)
6598 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6599 attribute instead. */
6602 cost_specified
= get_attr_latency (dep_insn
);
6603 if (cost_specified
!= 0)
6604 return cost_specified
;
6610 /* ??? We don't properly compute the length of a load/store insn,
6611 taking into account the addressing mode. */
6614 mep_issue_rate (void)
6616 return TARGET_IVC2
? 3 : 2;
6619 /* Return true if function DECL was declared with the vliw attribute. */
6622 mep_vliw_function_p (tree decl
)
6624 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))) != 0;
6628 mep_find_ready_insn (rtx
*ready
, int nready
, enum attr_slot slot
, int length
)
6632 for (i
= nready
- 1; i
>= 0; --i
)
6634 rtx insn
= ready
[i
];
6635 if (recog_memoized (insn
) >= 0
6636 && get_attr_slot (insn
) == slot
6637 && get_attr_length (insn
) == length
)
6645 mep_move_ready_insn (rtx
*ready
, int nready
, rtx insn
)
6649 for (i
= 0; i
< nready
; ++i
)
6650 if (ready
[i
] == insn
)
6652 for (; i
< nready
- 1; ++i
)
6653 ready
[i
] = ready
[i
+ 1];
6662 mep_print_sched_insn (FILE *dump
, rtx insn
)
6664 const char *slots
= "none";
6665 const char *name
= NULL
;
6669 if (GET_CODE (PATTERN (insn
)) == SET
6670 || GET_CODE (PATTERN (insn
)) == PARALLEL
)
6672 switch (get_attr_slots (insn
))
6674 case SLOTS_CORE
: slots
= "core"; break;
6675 case SLOTS_C3
: slots
= "c3"; break;
6676 case SLOTS_P0
: slots
= "p0"; break;
6677 case SLOTS_P0_P0S
: slots
= "p0,p0s"; break;
6678 case SLOTS_P0_P1
: slots
= "p0,p1"; break;
6679 case SLOTS_P0S
: slots
= "p0s"; break;
6680 case SLOTS_P0S_P1
: slots
= "p0s,p1"; break;
6681 case SLOTS_P1
: slots
= "p1"; break;
6683 sprintf(buf
, "%d", get_attr_slots (insn
));
6688 if (GET_CODE (PATTERN (insn
)) == USE
)
6691 code
= INSN_CODE (insn
);
6693 name
= get_insn_name (code
);
6698 "insn %4d %4d %8s %s\n",
6706 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
,
6707 int sched_verbose ATTRIBUTE_UNUSED
, rtx
*ready
,
6708 int *pnready
, int clock ATTRIBUTE_UNUSED
)
6710 int nready
= *pnready
;
6711 rtx core_insn
, cop_insn
;
6714 if (dump
&& sched_verbose
> 1)
6716 fprintf (dump
, "\nsched_reorder: clock %d nready %d\n", clock
, nready
);
6717 for (i
=0; i
<nready
; i
++)
6718 mep_print_sched_insn (dump
, ready
[i
]);
6719 fprintf (dump
, "\n");
6722 if (!mep_vliw_function_p (cfun
->decl
))
6727 /* IVC2 uses a DFA to determine what's ready and what's not. */
6731 /* We can issue either a core or coprocessor instruction.
6732 Look for a matched pair of insns to reorder. If we don't
6733 find any, don't second-guess the scheduler's priorities. */
6735 if ((core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 2))
6736 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
,
6737 TARGET_OPT_VL64
? 6 : 2)))
6739 else if (TARGET_OPT_VL64
6740 && (core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 4))
6741 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
, 4)))
6744 /* We didn't find a pair. Issue the single insn at the head
6745 of the ready list. */
6748 /* Reorder the two insns first. */
6749 mep_move_ready_insn (ready
, nready
, core_insn
);
6750 mep_move_ready_insn (ready
, nready
- 1, cop_insn
);
6754 /* A for_each_rtx callback. Return true if *X is a register that is
6755 set by insn PREV. */
6758 mep_store_find_set (rtx
*x
, void *prev
)
6760 return REG_P (*x
) && reg_set_p (*x
, (const_rtx
) prev
);
6763 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6764 not the containing insn. */
6767 mep_store_data_bypass_1 (rtx prev
, rtx pat
)
6769 /* Cope with intrinsics like swcpa. */
6770 if (GET_CODE (pat
) == PARALLEL
)
6774 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6775 if (mep_store_data_bypass_p (prev
, XVECEXP (pat
, 0, i
)))
6781 /* Check for some sort of store. */
6782 if (GET_CODE (pat
) != SET
6783 || GET_CODE (SET_DEST (pat
)) != MEM
)
6786 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6787 The first operand to the unspec is the store data and the other operands
6788 are used to calculate the address. */
6789 if (GET_CODE (SET_SRC (pat
)) == UNSPEC
)
6794 src
= SET_SRC (pat
);
6795 for (i
= 1; i
< XVECLEN (src
, 0); i
++)
6796 if (for_each_rtx (&XVECEXP (src
, 0, i
), mep_store_find_set
, prev
))
6802 /* Otherwise just check that PREV doesn't modify any register mentioned
6803 in the memory destination. */
6804 return !for_each_rtx (&SET_DEST (pat
), mep_store_find_set
, prev
);
6807 /* Return true if INSN is a store instruction and if the store address
6808 has no true dependence on PREV. */
6811 mep_store_data_bypass_p (rtx prev
, rtx insn
)
6813 return INSN_P (insn
) ? mep_store_data_bypass_1 (prev
, PATTERN (insn
)) : false;
6816 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6817 is a register other than LO or HI and if PREV sets *X. */
6820 mep_mul_hilo_bypass_1 (rtx
*x
, void *prev
)
6823 && REGNO (*x
) != LO_REGNO
6824 && REGNO (*x
) != HI_REGNO
6825 && reg_set_p (*x
, (const_rtx
) prev
));
6828 /* Return true if, apart from HI/LO, there are no true dependencies
6829 between multiplication instructions PREV and INSN. */
6832 mep_mul_hilo_bypass_p (rtx prev
, rtx insn
)
6836 pat
= PATTERN (insn
);
6837 if (GET_CODE (pat
) == PARALLEL
)
6838 pat
= XVECEXP (pat
, 0, 0);
6839 return (GET_CODE (pat
) == SET
6840 && !for_each_rtx (&SET_SRC (pat
), mep_mul_hilo_bypass_1
, prev
));
6843 /* Return true if INSN is an ldc instruction that issues to the
6844 MeP-h1 integer pipeline. This is true for instructions that
6845 read from PSW, LP, SAR, HI and LO. */
6848 mep_ipipe_ldc_p (rtx insn
)
6852 pat
= PATTERN (insn
);
6854 /* Cope with instrinsics that set both a hard register and its shadow.
6855 The set of the hard register comes first. */
6856 if (GET_CODE (pat
) == PARALLEL
)
6857 pat
= XVECEXP (pat
, 0, 0);
6859 if (GET_CODE (pat
) == SET
)
6861 src
= SET_SRC (pat
);
6863 /* Cope with intrinsics. The first operand to the unspec is
6864 the source register. */
6865 if (GET_CODE (src
) == UNSPEC
|| GET_CODE (src
) == UNSPEC_VOLATILE
)
6866 src
= XVECEXP (src
, 0, 0);
6869 switch (REGNO (src
))
6882 /* Create a VLIW bundle from core instruction CORE and coprocessor
6883 instruction COP. COP always satisfies INSN_P, but CORE can be
6884 either a new pattern or an existing instruction.
6886 Emit the bundle in place of COP and return it. */
6889 mep_make_bundle (rtx core
, rtx cop
)
6893 /* If CORE is an existing instruction, remove it, otherwise put
6894 the new pattern in an INSN harness. */
6898 core
= make_insn_raw (core
);
6900 /* Generate the bundle sequence and replace COP with it. */
6901 insn
= gen_rtx_SEQUENCE (VOIDmode
, gen_rtvec (2, core
, cop
));
6902 insn
= emit_insn_after (insn
, cop
);
6905 /* Set up the links of the insns inside the SEQUENCE. */
6906 PREV_INSN (core
) = PREV_INSN (insn
);
6907 NEXT_INSN (core
) = cop
;
6908 PREV_INSN (cop
) = core
;
6909 NEXT_INSN (cop
) = NEXT_INSN (insn
);
6911 /* Set the VLIW flag for the coprocessor instruction. */
6912 PUT_MODE (core
, VOIDmode
);
6913 PUT_MODE (cop
, BImode
);
6915 /* Derive a location for the bundle. Individual instructions cannot
6916 have their own location because there can be no assembler labels
6917 between CORE and COP. */
6918 INSN_LOCATOR (insn
) = INSN_LOCATOR (INSN_LOCATOR (core
) ? core
: cop
);
6919 INSN_LOCATOR (core
) = 0;
6920 INSN_LOCATOR (cop
) = 0;
6925 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6928 mep_insn_dependent_p_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
6930 rtx
* pinsn
= (rtx
*) data
;
6932 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
6936 /* Return true if anything in insn X is (anti,output,true) dependent on
6937 anything in insn Y. */
6940 mep_insn_dependent_p (rtx x
, rtx y
)
6944 gcc_assert (INSN_P (x
));
6945 gcc_assert (INSN_P (y
));
6948 note_stores (PATTERN (x
), mep_insn_dependent_p_1
, &tmp
);
6949 if (tmp
== NULL_RTX
)
6953 note_stores (PATTERN (y
), mep_insn_dependent_p_1
, &tmp
);
6954 if (tmp
== NULL_RTX
)
6961 core_insn_p (rtx insn
)
6963 if (GET_CODE (PATTERN (insn
)) == USE
)
6965 if (get_attr_slot (insn
) == SLOT_CORE
)
6970 /* Mark coprocessor instructions that can be bundled together with
6971 the immediately preceeding core instruction. This is later used
6972 to emit the "+" that tells the assembler to create a VLIW insn.
6974 For unbundled insns, the assembler will automatically add coprocessor
6975 nops, and 16-bit core nops. Due to an apparent oversight in the
6976 spec, the assembler will _not_ automatically add 32-bit core nops,
6977 so we have to emit those here.
6979 Called from mep_insn_reorg. */
6982 mep_bundle_insns (rtx insns
)
6984 rtx insn
, last
= NULL_RTX
, first
= NULL_RTX
;
6985 int saw_scheduling
= 0;
6987 /* Only do bundling if we're in vliw mode. */
6988 if (!mep_vliw_function_p (cfun
->decl
))
6991 /* The first insn in a bundle are TImode, the remainder are
6992 VOIDmode. After this function, the first has VOIDmode and the
6993 rest have BImode. */
6995 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6997 /* First, move any NOTEs that are within a bundle, to the beginning
6999 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7001 if (NOTE_P (insn
) && first
)
7002 /* Don't clear FIRST. */;
7004 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == TImode
)
7007 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == VOIDmode
&& first
)
7011 /* INSN is part of a bundle; FIRST is the first insn in that
7012 bundle. Move all intervening notes out of the bundle.
7013 In addition, since the debug pass may insert a label
7014 whenever the current line changes, set the location info
7015 for INSN to match FIRST. */
7017 INSN_LOCATOR (insn
) = INSN_LOCATOR (first
);
7019 note
= PREV_INSN (insn
);
7020 while (note
&& note
!= first
)
7022 prev
= PREV_INSN (note
);
7026 /* Remove NOTE from here... */
7027 PREV_INSN (NEXT_INSN (note
)) = PREV_INSN (note
);
7028 NEXT_INSN (PREV_INSN (note
)) = NEXT_INSN (note
);
7029 /* ...and put it in here. */
7030 NEXT_INSN (note
) = first
;
7031 PREV_INSN (note
) = PREV_INSN (first
);
7032 NEXT_INSN (PREV_INSN (note
)) = note
;
7033 PREV_INSN (NEXT_INSN (note
)) = note
;
7040 else if (!NONJUMP_INSN_P (insn
))
7044 /* Now fix up the bundles. */
7045 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7050 if (!NONJUMP_INSN_P (insn
))
7056 /* If we're not optimizing enough, there won't be scheduling
7057 info. We detect that here. */
7058 if (GET_MODE (insn
) == TImode
)
7060 if (!saw_scheduling
)
7065 rtx core_insn
= NULL_RTX
;
7067 /* IVC2 slots are scheduled by DFA, so we just accept
7068 whatever the scheduler gives us. However, we must make
7069 sure the core insn (if any) is the first in the bundle.
7070 The IVC2 assembler can insert whatever NOPs are needed,
7071 and allows a COP insn to be first. */
7073 if (NONJUMP_INSN_P (insn
)
7074 && GET_CODE (PATTERN (insn
)) != USE
7075 && GET_MODE (insn
) == TImode
)
7079 && GET_MODE (NEXT_INSN (last
)) == VOIDmode
7080 && NONJUMP_INSN_P (NEXT_INSN (last
));
7081 last
= NEXT_INSN (last
))
7083 if (core_insn_p (last
))
7086 if (core_insn_p (last
))
7089 if (core_insn
&& core_insn
!= insn
)
7091 /* Swap core insn to first in the bundle. */
7093 /* Remove core insn. */
7094 if (PREV_INSN (core_insn
))
7095 NEXT_INSN (PREV_INSN (core_insn
)) = NEXT_INSN (core_insn
);
7096 if (NEXT_INSN (core_insn
))
7097 PREV_INSN (NEXT_INSN (core_insn
)) = PREV_INSN (core_insn
);
7099 /* Re-insert core insn. */
7100 PREV_INSN (core_insn
) = PREV_INSN (insn
);
7101 NEXT_INSN (core_insn
) = insn
;
7103 if (PREV_INSN (core_insn
))
7104 NEXT_INSN (PREV_INSN (core_insn
)) = core_insn
;
7105 PREV_INSN (insn
) = core_insn
;
7107 PUT_MODE (core_insn
, TImode
);
7108 PUT_MODE (insn
, VOIDmode
);
7112 /* The first insn has TImode, the rest have VOIDmode */
7113 if (GET_MODE (insn
) == TImode
)
7114 PUT_MODE (insn
, VOIDmode
);
7116 PUT_MODE (insn
, BImode
);
7120 PUT_MODE (insn
, VOIDmode
);
7121 if (recog_memoized (insn
) >= 0
7122 && get_attr_slot (insn
) == SLOT_COP
)
7124 if (GET_CODE (insn
) == JUMP_INSN
7126 || recog_memoized (last
) < 0
7127 || get_attr_slot (last
) != SLOT_CORE
7128 || (get_attr_length (insn
)
7129 != (TARGET_OPT_VL64
? 8 : 4) - get_attr_length (last
))
7130 || mep_insn_dependent_p (insn
, last
))
7132 switch (get_attr_length (insn
))
7137 insn
= mep_make_bundle (gen_nop (), insn
);
7140 if (TARGET_OPT_VL64
)
7141 insn
= mep_make_bundle (gen_nop32 (), insn
);
7144 if (TARGET_OPT_VL64
)
7145 error ("2 byte cop instructions are"
7146 " not allowed in 64-bit VLIW mode");
7148 insn
= mep_make_bundle (gen_nop (), insn
);
7151 error ("unexpected %d byte cop instruction",
7152 get_attr_length (insn
));
7157 insn
= mep_make_bundle (last
, insn
);
7165 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7166 Return true on success. This function can fail if the intrinsic
7167 is unavailable or if the operands don't satisfy their predicates. */
7170 mep_emit_intrinsic (int intrinsic
, const rtx
*operands
)
7172 const struct cgen_insn
*cgen_insn
;
7173 const struct insn_data_d
*idata
;
7177 if (!mep_get_intrinsic_insn (intrinsic
, &cgen_insn
))
7180 idata
= &insn_data
[cgen_insn
->icode
];
7181 for (i
= 0; i
< idata
->n_operands
; i
++)
7183 newop
[i
] = mep_convert_arg (idata
->operand
[i
].mode
, operands
[i
]);
7184 if (!idata
->operand
[i
].predicate (newop
[i
], idata
->operand
[i
].mode
))
7188 emit_insn (idata
->genfun (newop
[0], newop
[1], newop
[2],
7189 newop
[3], newop
[4], newop
[5],
7190 newop
[6], newop
[7], newop
[8]));
7196 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7197 OPERANDS[0]. Report an error if the instruction could not
7198 be synthesized. OPERANDS[1] is a register_operand. For sign
7199 and zero extensions, it may be smaller than SImode. */
7202 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic
,
7203 rtx
* operands ATTRIBUTE_UNUSED
)
7209 /* Likewise, but apply a binary operation to OPERANDS[1] and
7210 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7211 can be a general_operand.
7213 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7214 third operand. REG and REG3 take register operands only. */
7217 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate
,
7218 int ATTRIBUTE_UNUSED immediate3
,
7219 int ATTRIBUTE_UNUSED reg
,
7220 int ATTRIBUTE_UNUSED reg3
,
7221 rtx
* operands ATTRIBUTE_UNUSED
)
7227 mep_rtx_cost (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
, int *total
, bool ATTRIBUTE_UNUSED speed_t
)
7232 if (INTVAL (x
) >= -128 && INTVAL (x
) < 127)
7234 else if (INTVAL (x
) >= -32768 && INTVAL (x
) < 65536)
7241 *total
= optimize_size
? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7245 *total
= (GET_CODE (XEXP (x
, 1)) == CONST_INT
7247 : COSTS_N_INSNS (2));
7254 mep_address_cost (rtx addr ATTRIBUTE_UNUSED
, bool ATTRIBUTE_UNUSED speed_p
)
7260 mep_handle_option (size_t code
,
7261 const char *arg ATTRIBUTE_UNUSED
,
7262 int value ATTRIBUTE_UNUSED
)
7269 target_flags
|= MEP_ALL_OPTS
;
7273 target_flags
&= ~ MEP_ALL_OPTS
;
7277 target_flags
|= MASK_COP
;
7278 target_flags
|= MASK_64BIT_CR_REGS
;
7282 option_mtiny_specified
= 1;
7285 target_flags
|= MASK_COP
;
7286 target_flags
|= MASK_64BIT_CR_REGS
;
7287 target_flags
|= MASK_VLIW
;
7288 target_flags
|= MASK_OPT_VL64
;
7289 target_flags
|= MASK_IVC2
;
7291 for (i
=0; i
<32; i
++)
7292 fixed_regs
[i
+48] = 0;
7293 for (i
=0; i
<32; i
++)
7294 call_used_regs
[i
+48] = 1;
7296 call_used_regs
[i
+48] = 0;
7298 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7335 mep_asm_init_sections (void)
7338 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7339 "\t.section .based,\"aw\"");
7342 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7343 "\t.section .sbss,\"aw\"");
7346 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7347 "\t.section .sdata,\"aw\",@progbits");
7350 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7351 "\t.section .far,\"aw\"");
7354 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7355 "\t.section .farbss,\"aw\"");
7358 = get_unnamed_section (0, output_section_asm_op
,
7359 "\t.section .frodata,\"a\"");
7362 = get_unnamed_section (0, output_section_asm_op
,
7363 "\t.section .srodata,\"a\"");
7366 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7367 "\t.section .vtext,\"axv\"\n\t.vliw");
7370 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7371 "\t.section .vftext,\"axv\"\n\t.vliw");
7374 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7375 "\t.section .ftext,\"ax\"\n\t.core");
7379 /* Initialize the GCC target structure. */
7381 #undef TARGET_ASM_FUNCTION_PROLOGUE
7382 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7383 #undef TARGET_ATTRIBUTE_TABLE
7384 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7385 #undef TARGET_COMP_TYPE_ATTRIBUTES
7386 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7387 #undef TARGET_INSERT_ATTRIBUTES
7388 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7389 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7390 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7391 #undef TARGET_CAN_INLINE_P
7392 #define TARGET_CAN_INLINE_P mep_can_inline_p
7393 #undef TARGET_SECTION_TYPE_FLAGS
7394 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7395 #undef TARGET_ASM_NAMED_SECTION
7396 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7397 #undef TARGET_INIT_BUILTINS
7398 #define TARGET_INIT_BUILTINS mep_init_builtins
7399 #undef TARGET_EXPAND_BUILTIN
7400 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7401 #undef TARGET_SCHED_ADJUST_COST
7402 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7403 #undef TARGET_SCHED_ISSUE_RATE
7404 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7405 #undef TARGET_SCHED_REORDER
7406 #define TARGET_SCHED_REORDER mep_sched_reorder
7407 #undef TARGET_STRIP_NAME_ENCODING
7408 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7409 #undef TARGET_ASM_SELECT_SECTION
7410 #define TARGET_ASM_SELECT_SECTION mep_select_section
7411 #undef TARGET_ASM_UNIQUE_SECTION
7412 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7413 #undef TARGET_ENCODE_SECTION_INFO
7414 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7415 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7416 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7417 #undef TARGET_RTX_COSTS
7418 #define TARGET_RTX_COSTS mep_rtx_cost
7419 #undef TARGET_ADDRESS_COST
7420 #define TARGET_ADDRESS_COST mep_address_cost
7421 #undef TARGET_MACHINE_DEPENDENT_REORG
7422 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7423 #undef TARGET_SETUP_INCOMING_VARARGS
7424 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7425 #undef TARGET_PASS_BY_REFERENCE
7426 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7427 #undef TARGET_FUNCTION_ARG
7428 #define TARGET_FUNCTION_ARG mep_function_arg
7429 #undef TARGET_FUNCTION_ARG_ADVANCE
7430 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7431 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7432 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7433 #undef TARGET_HANDLE_OPTION
7434 #define TARGET_HANDLE_OPTION mep_handle_option
7435 #undef TARGET_OPTION_OVERRIDE
7436 #define TARGET_OPTION_OVERRIDE mep_option_override
7437 #undef TARGET_OPTION_OPTIMIZATION_TABLE
7438 #define TARGET_OPTION_OPTIMIZATION_TABLE mep_option_optimization_table
7439 #undef TARGET_DEFAULT_TARGET_FLAGS
7440 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
7441 #undef TARGET_ALLOCATE_INITIAL_VALUE
7442 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7443 #undef TARGET_ASM_INIT_SECTIONS
7444 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7445 #undef TARGET_RETURN_IN_MEMORY
7446 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7447 #undef TARGET_NARROW_VOLATILE_BITFIELD
7448 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7449 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7450 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7451 #undef TARGET_BUILD_BUILTIN_VA_LIST
7452 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7453 #undef TARGET_EXPAND_BUILTIN_VA_START
7454 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7455 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7456 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7457 #undef TARGET_CAN_ELIMINATE
7458 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7459 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7460 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7461 #undef TARGET_TRAMPOLINE_INIT
7462 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7464 struct gcc_target targetm
= TARGET_INITIALIZER
;