]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/mep/mep.c
2015-06-17 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / config / mep / mep.c
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "varasm.h"
31 #include "calls.h"
32 #include "stringpool.h"
33 #include "stor-layout.h"
34 #include "regs.h"
35 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "conditions.h"
38 #include "insn-flags.h"
39 #include "output.h"
40 #include "insn-attr.h"
41 #include "flags.h"
42 #include "recog.h"
43 #include "obstack.h"
44 #include "function.h"
45 #include "expmed.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "emit-rtl.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "except.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "reload.h"
55 #include "tm_p.h"
56 #include "diagnostic-core.h"
57 #include "target.h"
58 #include "target-def.h"
59 #include "langhooks.h"
60 #include "dominance.h"
61 #include "cfg.h"
62 #include "cfgrtl.h"
63 #include "cfganal.h"
64 #include "lcm.h"
65 #include "cfgbuild.h"
66 #include "cfgcleanup.h"
67 #include "predict.h"
68 #include "basic-block.h"
69 #include "df.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "gimple-fold.h"
73 #include "tree-eh.h"
74 #include "gimple-expr.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "opts.h"
78 #include "dumpfile.h"
79 #include "builtins.h"
80 #include "rtl-iter.h"
81
82 /* Structure of this file:
83
84 + Command Line Option Support
85 + Pattern support - constraints, predicates, expanders
86 + Reload Support
87 + Costs
88 + Functions to save and restore machine-specific function data.
89 + Frame/Epilog/Prolog Related
90 + Operand Printing
91 + Function args in registers
92 + Handle pipeline hazards
93 + Handle attributes
94 + Trampolines
95 + Machine-dependent Reorg
96 + Builtins. */
97
98 /* Symbol encodings:
99
100 Symbols are encoded as @ <char> . <name> where <char> is one of these:
101
102 b - based
103 t - tiny
104 n - near
105 f - far
106 i - io, near
107 I - io, far
108 c - cb (control bus) */
109
110 struct GTY(()) machine_function
111 {
112 int mep_frame_pointer_needed;
113
114 /* For varargs. */
115 int arg_regs_to_save;
116 int regsave_filler;
117 int frame_filler;
118 int frame_locked;
119
120 /* Records __builtin_return address. */
121 rtx eh_stack_adjust;
122
123 int reg_save_size;
124 int reg_save_slot[FIRST_PSEUDO_REGISTER];
125 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
126
127 /* 2 if the current function has an interrupt attribute, 1 if not, 0
128 if unknown. This is here because resource.c uses EPILOGUE_USES
129 which needs it. */
130 int interrupt_handler;
131
132 /* Likewise, for disinterrupt attribute. */
133 int disable_interrupts;
134
135 /* Number of doloop tags used so far. */
136 int doloop_tags;
137
138 /* True if the last tag was allocated to a doloop_end. */
139 bool doloop_tag_from_end;
140
141 /* True if reload changes $TP. */
142 bool reload_changes_tp;
143
144 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
145 We only set this if the function is an interrupt handler. */
146 int asms_without_operands;
147 };
148
149 #define MEP_CONTROL_REG(x) \
150 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
151
152 static GTY(()) section * based_section;
153 static GTY(()) section * tinybss_section;
154 static GTY(()) section * far_section;
155 static GTY(()) section * farbss_section;
156 static GTY(()) section * frodata_section;
157 static GTY(()) section * srodata_section;
158
159 static GTY(()) section * vtext_section;
160 static GTY(()) section * vftext_section;
161 static GTY(()) section * ftext_section;
162
163 static void mep_set_leaf_registers (int);
164 static bool symbol_p (rtx);
165 static bool symbolref_p (rtx);
166 static void encode_pattern_1 (rtx);
167 static void encode_pattern (rtx);
168 static bool const_in_range (rtx, int, int);
169 static void mep_rewrite_mult (rtx_insn *, rtx);
170 static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
171 static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
172 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
173 static bool move_needs_splitting (rtx, rtx, machine_mode);
174 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
175 static bool mep_nongeneral_reg (rtx);
176 static bool mep_general_copro_reg (rtx);
177 static bool mep_nonregister (rtx);
178 static struct machine_function* mep_init_machine_status (void);
179 static rtx mep_tp_rtx (void);
180 static rtx mep_gp_rtx (void);
181 static bool mep_interrupt_p (void);
182 static bool mep_disinterrupt_p (void);
183 static bool mep_reg_set_p (rtx, rtx);
184 static bool mep_reg_set_in_function (int);
185 static bool mep_interrupt_saved_reg (int);
186 static bool mep_call_saves_register (int);
187 static rtx_insn *F (rtx_insn *);
188 static void add_constant (int, int, int, int);
189 static rtx_insn *maybe_dead_move (rtx, rtx, bool);
190 static void mep_reload_pointer (int, const char *);
191 static void mep_start_function (FILE *, HOST_WIDE_INT);
192 static bool mep_function_ok_for_sibcall (tree, tree);
193 static int unique_bit_in (HOST_WIDE_INT);
194 static int bit_size_for_clip (HOST_WIDE_INT);
195 static int bytesize (const_tree, machine_mode);
196 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
197 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
198 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
199 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
200 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
201 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
202 static bool mep_function_attribute_inlinable_p (const_tree);
203 static bool mep_can_inline_p (tree, tree);
204 static bool mep_lookup_pragma_disinterrupt (const char *);
205 static int mep_multiple_address_regions (tree, bool);
206 static int mep_attrlist_to_encoding (tree, tree);
207 static void mep_insert_attributes (tree, tree *);
208 static void mep_encode_section_info (tree, rtx, int);
209 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
210 static void mep_unique_section (tree, int);
211 static unsigned int mep_section_type_flags (tree, const char *, int);
212 static void mep_asm_named_section (const char *, unsigned int, tree);
213 static bool mep_mentioned_p (rtx, rtx, int);
214 static void mep_reorg_regmove (rtx_insn *);
215 static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
216 bool, bool);
217 static void mep_reorg_repeat (rtx_insn *);
218 static bool mep_invertable_branch_p (rtx_insn *);
219 static void mep_invert_branch (rtx_insn *, rtx_insn *);
220 static void mep_reorg_erepeat (rtx_insn *);
221 static void mep_jmp_return_reorg (rtx_insn *);
222 static void mep_reorg_addcombine (rtx_insn *);
223 static void mep_reorg (void);
224 static void mep_init_intrinsics (void);
225 static void mep_init_builtins (void);
226 static void mep_intrinsic_unavailable (int);
227 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
228 static bool mep_get_move_insn (int, const struct cgen_insn **);
229 static rtx mep_convert_arg (machine_mode, rtx);
230 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
231 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
232 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
233 static rtx mep_expand_builtin (tree, rtx, rtx, machine_mode, int);
234 static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
235 static int mep_issue_rate (void);
236 static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
237 static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
238 static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
239 static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
240 static void mep_bundle_insns (rtx_insn *);
241 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
242 static int mep_address_cost (rtx, machine_mode, addr_space_t, bool);
243 static void mep_setup_incoming_varargs (cumulative_args_t, machine_mode,
244 tree, int *, int);
245 static bool mep_pass_by_reference (cumulative_args_t cum, machine_mode,
246 const_tree, bool);
247 static rtx mep_function_arg (cumulative_args_t, machine_mode,
248 const_tree, bool);
249 static void mep_function_arg_advance (cumulative_args_t, machine_mode,
250 const_tree, bool);
251 static bool mep_vector_mode_supported_p (machine_mode);
252 static rtx mep_allocate_initial_value (rtx);
253 static void mep_asm_init_sections (void);
254 static int mep_comp_type_attributes (const_tree, const_tree);
255 static bool mep_narrow_volatile_bitfield (void);
256 static rtx mep_expand_builtin_saveregs (void);
257 static tree mep_build_builtin_va_list (void);
258 static void mep_expand_va_start (tree, rtx);
259 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
260 static bool mep_can_eliminate (const int, const int);
261 static void mep_conditional_register_usage (void);
262 static void mep_trampoline_init (rtx, tree, rtx);
263 \f
264 #define WANT_GCC_DEFINITIONS
265 #include "mep-intrin.h"
266 #undef WANT_GCC_DEFINITIONS
267
268 \f
269 /* Command Line Option Support. */
270
271 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
272
273 /* True if we can use cmov instructions to move values back and forth
274 between core and coprocessor registers. */
275 bool mep_have_core_copro_moves_p;
276
277 /* True if we can use cmov instructions (or a work-alike) to move
278 values between coprocessor registers. */
279 bool mep_have_copro_copro_moves_p;
280
281 /* A table of all coprocessor instructions that can act like
282 a coprocessor-to-coprocessor cmov. */
283 static const int mep_cmov_insns[] = {
284 mep_cmov,
285 mep_cpmov,
286 mep_fmovs,
287 mep_caddi3,
288 mep_csubi3,
289 mep_candi3,
290 mep_cori3,
291 mep_cxori3,
292 mep_cand3,
293 mep_cor3
294 };
295
296 \f
297 static void
298 mep_set_leaf_registers (int enable)
299 {
300 int i;
301
302 if (mep_leaf_registers[0] != enable)
303 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
304 mep_leaf_registers[i] = enable;
305 }
306
307 static void
308 mep_conditional_register_usage (void)
309 {
310 int i;
311
312 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
313 {
314 fixed_regs[HI_REGNO] = 1;
315 fixed_regs[LO_REGNO] = 1;
316 call_used_regs[HI_REGNO] = 1;
317 call_used_regs[LO_REGNO] = 1;
318 }
319
320 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
321 global_regs[i] = 1;
322 }
323
324 static void
325 mep_option_override (void)
326 {
327 unsigned int i;
328 int j;
329 cl_deferred_option *opt;
330 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
331
332 if (v)
333 FOR_EACH_VEC_ELT (*v, i, opt)
334 {
335 switch (opt->opt_index)
336 {
337 case OPT_mivc2:
338 for (j = 0; j < 32; j++)
339 fixed_regs[j + 48] = 0;
340 for (j = 0; j < 32; j++)
341 call_used_regs[j + 48] = 1;
342 for (j = 6; j < 8; j++)
343 call_used_regs[j + 48] = 0;
344
345 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
346 RN (0, "$csar0");
347 RN (1, "$cc");
348 RN (4, "$cofr0");
349 RN (5, "$cofr1");
350 RN (6, "$cofa0");
351 RN (7, "$cofa1");
352 RN (15, "$csar1");
353
354 RN (16, "$acc0_0");
355 RN (17, "$acc0_1");
356 RN (18, "$acc0_2");
357 RN (19, "$acc0_3");
358 RN (20, "$acc0_4");
359 RN (21, "$acc0_5");
360 RN (22, "$acc0_6");
361 RN (23, "$acc0_7");
362
363 RN (24, "$acc1_0");
364 RN (25, "$acc1_1");
365 RN (26, "$acc1_2");
366 RN (27, "$acc1_3");
367 RN (28, "$acc1_4");
368 RN (29, "$acc1_5");
369 RN (30, "$acc1_6");
370 RN (31, "$acc1_7");
371 #undef RN
372 break;
373
374 default:
375 gcc_unreachable ();
376 }
377 }
378
379 if (flag_pic == 1)
380 warning (OPT_fpic, "-fpic is not supported");
381 if (flag_pic == 2)
382 warning (OPT_fPIC, "-fPIC is not supported");
383 if (TARGET_S && TARGET_M)
384 error ("only one of -ms and -mm may be given");
385 if (TARGET_S && TARGET_L)
386 error ("only one of -ms and -ml may be given");
387 if (TARGET_M && TARGET_L)
388 error ("only one of -mm and -ml may be given");
389 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
390 error ("only one of -ms and -mtiny= may be given");
391 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
392 error ("only one of -mm and -mtiny= may be given");
393 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
394 warning (0, "-mclip currently has no effect without -mminmax");
395
396 if (mep_const_section)
397 {
398 if (strcmp (mep_const_section, "tiny") != 0
399 && strcmp (mep_const_section, "near") != 0
400 && strcmp (mep_const_section, "far") != 0)
401 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
402 }
403
404 if (TARGET_S)
405 mep_tiny_cutoff = 65536;
406 if (TARGET_M)
407 mep_tiny_cutoff = 0;
408 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
409 mep_tiny_cutoff = 0;
410
411 if (TARGET_64BIT_CR_REGS)
412 flag_split_wide_types = 0;
413
414 init_machine_status = mep_init_machine_status;
415 mep_init_intrinsics ();
416 }
417
418 /* Pattern Support - constraints, predicates, expanders. */
419
420 /* MEP has very few instructions that can refer to the span of
421 addresses used by symbols, so it's common to check for them. */
422
423 static bool
424 symbol_p (rtx x)
425 {
426 int c = GET_CODE (x);
427
428 return (c == CONST_INT
429 || c == CONST
430 || c == SYMBOL_REF);
431 }
432
433 static bool
434 symbolref_p (rtx x)
435 {
436 int c;
437
438 if (GET_CODE (x) != MEM)
439 return false;
440
441 c = GET_CODE (XEXP (x, 0));
442 return (c == CONST_INT
443 || c == CONST
444 || c == SYMBOL_REF);
445 }
446
447 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
448
449 #define GEN_REG(R, STRICT) \
450 (GR_REGNO_P (R) \
451 || (!STRICT \
452 && ((R) == ARG_POINTER_REGNUM \
453 || (R) >= FIRST_PSEUDO_REGISTER)))
454
455 static char pattern[12], *patternp;
456 static GTY(()) rtx patternr[12];
457 #define RTX_IS(x) (strcmp (pattern, x) == 0)
458
459 static void
460 encode_pattern_1 (rtx x)
461 {
462 int i;
463
464 if (patternp == pattern + sizeof (pattern) - 2)
465 {
466 patternp[-1] = '?';
467 return;
468 }
469
470 patternr[patternp-pattern] = x;
471
472 switch (GET_CODE (x))
473 {
474 case REG:
475 *patternp++ = 'r';
476 break;
477 case MEM:
478 *patternp++ = 'm';
479 case CONST:
480 encode_pattern_1 (XEXP(x, 0));
481 break;
482 case PLUS:
483 *patternp++ = '+';
484 encode_pattern_1 (XEXP(x, 0));
485 encode_pattern_1 (XEXP(x, 1));
486 break;
487 case LO_SUM:
488 *patternp++ = 'L';
489 encode_pattern_1 (XEXP(x, 0));
490 encode_pattern_1 (XEXP(x, 1));
491 break;
492 case HIGH:
493 *patternp++ = 'H';
494 encode_pattern_1 (XEXP(x, 0));
495 break;
496 case SYMBOL_REF:
497 *patternp++ = 's';
498 break;
499 case LABEL_REF:
500 *patternp++ = 'l';
501 break;
502 case CONST_INT:
503 case CONST_DOUBLE:
504 *patternp++ = 'i';
505 break;
506 case UNSPEC:
507 *patternp++ = 'u';
508 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
509 for (i=0; i<XVECLEN (x, 0); i++)
510 encode_pattern_1 (XVECEXP (x, 0, i));
511 break;
512 case USE:
513 *patternp++ = 'U';
514 break;
515 default:
516 *patternp++ = '?';
517 #if 0
518 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
519 debug_rtx (x);
520 gcc_unreachable ();
521 #endif
522 break;
523 }
524 }
525
526 static void
527 encode_pattern (rtx x)
528 {
529 patternp = pattern;
530 encode_pattern_1 (x);
531 *patternp = 0;
532 }
533
534 int
535 mep_section_tag (rtx x)
536 {
537 const char *name;
538
539 while (1)
540 {
541 switch (GET_CODE (x))
542 {
543 case MEM:
544 case CONST:
545 x = XEXP (x, 0);
546 break;
547 case UNSPEC:
548 x = XVECEXP (x, 0, 0);
549 break;
550 case PLUS:
551 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
552 return 0;
553 x = XEXP (x, 0);
554 break;
555 default:
556 goto done;
557 }
558 }
559 done:
560 if (GET_CODE (x) != SYMBOL_REF)
561 return 0;
562 name = XSTR (x, 0);
563 if (name[0] == '@' && name[2] == '.')
564 {
565 if (name[1] == 'i' || name[1] == 'I')
566 {
567 if (name[1] == 'I')
568 return 'f'; /* near */
569 return 'n'; /* far */
570 }
571 return name[1];
572 }
573 return 0;
574 }
575
576 int
577 mep_regno_reg_class (int regno)
578 {
579 switch (regno)
580 {
581 case SP_REGNO: return SP_REGS;
582 case TP_REGNO: return TP_REGS;
583 case GP_REGNO: return GP_REGS;
584 case 0: return R0_REGS;
585 case HI_REGNO: return HI_REGS;
586 case LO_REGNO: return LO_REGS;
587 case ARG_POINTER_REGNUM: return GENERAL_REGS;
588 }
589
590 if (GR_REGNO_P (regno))
591 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
592 if (CONTROL_REGNO_P (regno))
593 return CONTROL_REGS;
594
595 if (CR_REGNO_P (regno))
596 {
597 int i, j;
598
599 /* Search for the register amongst user-defined subclasses of
600 the coprocessor registers. */
601 for (i = USER0_REGS; i <= USER3_REGS; ++i)
602 {
603 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
604 continue;
605 for (j = 0; j < N_REG_CLASSES; ++j)
606 {
607 enum reg_class sub = reg_class_subclasses[i][j];
608
609 if (sub == LIM_REG_CLASSES)
610 return i;
611 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
612 break;
613 }
614 }
615
616 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
617 }
618
619 if (CCR_REGNO_P (regno))
620 return CCR_REGS;
621
622 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
623 return NO_REGS;
624 }
625
626 static bool
627 const_in_range (rtx x, int minv, int maxv)
628 {
629 return (GET_CODE (x) == CONST_INT
630 && INTVAL (x) >= minv
631 && INTVAL (x) <= maxv);
632 }
633
634 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
635 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
636 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
637 at the end of the insn stream. */
638
639 rtx
640 mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
641 {
642 if (rtx_equal_p (dest, src1))
643 return src2;
644 else if (rtx_equal_p (dest, src2))
645 return src1;
646 else
647 {
648 if (insn == 0)
649 emit_insn (gen_movsi (copy_rtx (dest), src1));
650 else
651 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
652 return src2;
653 }
654 }
655
656 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
657 Change the last element of PATTERN from (clobber (scratch:SI))
658 to (clobber (reg:SI HI_REGNO)). */
659
660 static void
661 mep_rewrite_mult (rtx_insn *insn, rtx pattern)
662 {
663 rtx hi_clobber;
664
665 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
666 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
667 PATTERN (insn) = pattern;
668 INSN_CODE (insn) = -1;
669 }
670
671 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
672 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
673 store the result in DEST if nonnull. */
674
675 static void
676 mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
677 {
678 rtx lo, pattern;
679
680 lo = gen_rtx_REG (SImode, LO_REGNO);
681 if (dest)
682 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
683 mep_mulr_source (insn, dest, src1, src2));
684 else
685 pattern = gen_mulsi3_lo (lo, src1, src2);
686 mep_rewrite_mult (insn, pattern);
687 }
688
689 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
690 SRC3 into $lo, then use either madd or maddr. The move into $lo will
691 be deleted by a peephole2 if SRC3 is already in $lo. */
692
693 static void
694 mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
695 {
696 rtx lo, pattern;
697
698 lo = gen_rtx_REG (SImode, LO_REGNO);
699 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
700 if (dest)
701 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
702 mep_mulr_source (insn, dest, src1, src2),
703 copy_rtx (lo));
704 else
705 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
706 mep_rewrite_mult (insn, pattern);
707 }
708
709 /* Return true if $lo has the same value as integer register GPR when
710 instruction INSN is reached. If necessary, rewrite the instruction
711 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
712 rtx for (reg:SI LO_REGNO).
713
714 This function is intended to be used by the peephole2 pass. Since
715 that pass goes from the end of a basic block to the beginning, and
716 propagates liveness information on the way, there is no need to
717 update register notes here.
718
719 If GPR_DEAD_P is true on entry, and this function returns true,
720 then the caller will replace _every_ use of GPR in and after INSN
721 with LO. This means that if the instruction that sets $lo is a
722 mulr- or maddr-type instruction, we can rewrite it to use mul or
723 madd instead. In combination with the copy progagation pass,
724 this allows us to replace sequences like:
725
726 mov GPR,R1
727 mulr GPR,R2
728
729 with:
730
731 mul R1,R2
732
733 if GPR is no longer used. */
734
735 static bool
736 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
737 {
738 do
739 {
740 insn = PREV_INSN (insn);
741 if (INSN_P (insn))
742 switch (recog_memoized (insn))
743 {
744 case CODE_FOR_mulsi3_1:
745 extract_insn (insn);
746 if (rtx_equal_p (recog_data.operand[0], gpr))
747 {
748 mep_rewrite_mulsi3 (insn,
749 gpr_dead_p ? NULL : recog_data.operand[0],
750 recog_data.operand[1],
751 recog_data.operand[2]);
752 return true;
753 }
754 return false;
755
756 case CODE_FOR_maddsi3:
757 extract_insn (insn);
758 if (rtx_equal_p (recog_data.operand[0], gpr))
759 {
760 mep_rewrite_maddsi3 (insn,
761 gpr_dead_p ? NULL : recog_data.operand[0],
762 recog_data.operand[1],
763 recog_data.operand[2],
764 recog_data.operand[3]);
765 return true;
766 }
767 return false;
768
769 case CODE_FOR_mulsi3r:
770 case CODE_FOR_maddsi3r:
771 extract_insn (insn);
772 return rtx_equal_p (recog_data.operand[1], gpr);
773
774 default:
775 if (reg_set_p (lo, insn)
776 || reg_set_p (gpr, insn)
777 || volatile_insn_p (PATTERN (insn)))
778 return false;
779
780 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
781 gpr_dead_p = false;
782 break;
783 }
784 }
785 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
786 return false;
787 }
788
789 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
790
791 bool
792 mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
793 {
794 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
795 extract_insn (insn);
796 return result;
797 }
798
799 /* Return true if SET can be turned into a post-modify load or store
800 that adds OFFSET to GPR. In other words, return true if SET can be
801 changed into:
802
803 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
804
805 It's OK to change SET to an equivalent operation in order to
806 make it match. */
807
808 static bool
809 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
810 {
811 rtx *reg, *mem;
812 unsigned int reg_bytes, mem_bytes;
813 machine_mode reg_mode, mem_mode;
814
815 /* Only simple SETs can be converted. */
816 if (GET_CODE (set) != SET)
817 return false;
818
819 /* Point REG to what we hope will be the register side of the set and
820 MEM to what we hope will be the memory side. */
821 if (GET_CODE (SET_DEST (set)) == MEM)
822 {
823 mem = &SET_DEST (set);
824 reg = &SET_SRC (set);
825 }
826 else
827 {
828 reg = &SET_DEST (set);
829 mem = &SET_SRC (set);
830 if (GET_CODE (*mem) == SIGN_EXTEND)
831 mem = &XEXP (*mem, 0);
832 }
833
834 /* Check that *REG is a suitable coprocessor register. */
835 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
836 return false;
837
838 /* Check that *MEM is a suitable memory reference. */
839 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
840 return false;
841
842 /* Get the number of bytes in each operand. */
843 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
844 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
845
846 /* Check that OFFSET is suitably aligned. */
847 if (INTVAL (offset) & (mem_bytes - 1))
848 return false;
849
850 /* Convert *MEM to a normal integer mode. */
851 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
852 *mem = change_address (*mem, mem_mode, NULL);
853
854 /* Adjust *REG as well. */
855 *reg = shallow_copy_rtx (*reg);
856 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
857 {
858 /* SET is a subword load. Convert it to an explicit extension. */
859 PUT_MODE (*reg, SImode);
860 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
861 }
862 else
863 {
864 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
865 PUT_MODE (*reg, reg_mode);
866 }
867 return true;
868 }
869
870 /* Return the effect of frame-related instruction INSN. */
871
872 static rtx
873 mep_frame_expr (rtx_insn *insn)
874 {
875 rtx note, expr;
876
877 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
878 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
879 RTX_FRAME_RELATED_P (expr) = 1;
880 return expr;
881 }
882
883 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
884 new pattern in INSN1; INSN2 will be deleted by the caller. */
885
886 static void
887 mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
888 {
889 rtx expr;
890
891 if (RTX_FRAME_RELATED_P (insn2))
892 {
893 expr = mep_frame_expr (insn2);
894 if (RTX_FRAME_RELATED_P (insn1))
895 expr = gen_rtx_SEQUENCE (VOIDmode,
896 gen_rtvec (2, mep_frame_expr (insn1), expr));
897 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
898 RTX_FRAME_RELATED_P (insn1) = 1;
899 }
900
901 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
902 gen_rtvec (2, PATTERN (insn1),
903 PATTERN (insn2)));
904 INSN_CODE (insn1) = -1;
905 }
906
907 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
908 the basic block to see if any previous load or store instruction can
909 be persuaded to do SET_INSN as a side-effect. Return true if so. */
910
911 static bool
912 mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
913 {
914 rtx_insn *insn;
915
916 insn = set_insn;
917 do
918 {
919 insn = PREV_INSN (insn);
920 if (INSN_P (insn))
921 {
922 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
923 {
924 mep_make_parallel (insn, set_insn);
925 return true;
926 }
927
928 if (reg_set_p (reg, insn)
929 || reg_referenced_p (reg, PATTERN (insn))
930 || volatile_insn_p (PATTERN (insn)))
931 return false;
932 }
933 }
934 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
935 return false;
936 }
937
938 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
939
940 bool
941 mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
942 {
943 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
944 extract_insn (insn);
945 return result;
946 }
947
948 bool
949 mep_allow_clip (rtx ux, rtx lx, int s)
950 {
951 HOST_WIDE_INT u = INTVAL (ux);
952 HOST_WIDE_INT l = INTVAL (lx);
953 int i;
954
955 if (!TARGET_OPT_CLIP)
956 return false;
957
958 if (s)
959 {
960 for (i = 0; i < 30; i ++)
961 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
962 && (l == - ((HOST_WIDE_INT) 1 << i)))
963 return true;
964 }
965 else
966 {
967 if (l != 0)
968 return false;
969
970 for (i = 0; i < 30; i ++)
971 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
972 return true;
973 }
974 return false;
975 }
976
977 bool
978 mep_bit_position_p (rtx x, bool looking_for)
979 {
980 if (GET_CODE (x) != CONST_INT)
981 return false;
982 switch ((int) INTVAL(x) & 0xff)
983 {
984 case 0x01: case 0x02: case 0x04: case 0x08:
985 case 0x10: case 0x20: case 0x40: case 0x80:
986 return looking_for;
987 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
988 case 0xef: case 0xdf: case 0xbf: case 0x7f:
989 return !looking_for;
990 }
991 return false;
992 }
993
994 static bool
995 move_needs_splitting (rtx dest, rtx src,
996 machine_mode mode ATTRIBUTE_UNUSED)
997 {
998 int s = mep_section_tag (src);
999
1000 while (1)
1001 {
1002 if (GET_CODE (src) == CONST
1003 || GET_CODE (src) == MEM)
1004 src = XEXP (src, 0);
1005 else if (GET_CODE (src) == SYMBOL_REF
1006 || GET_CODE (src) == LABEL_REF
1007 || GET_CODE (src) == PLUS)
1008 break;
1009 else
1010 return false;
1011 }
1012 if (s == 'f'
1013 || (GET_CODE (src) == PLUS
1014 && GET_CODE (XEXP (src, 1)) == CONST_INT
1015 && (INTVAL (XEXP (src, 1)) < -65536
1016 || INTVAL (XEXP (src, 1)) > 0xffffff))
1017 || (GET_CODE (dest) == REG
1018 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1019 return true;
1020 return false;
1021 }
1022
1023 bool
1024 mep_split_mov (rtx *operands, int symbolic)
1025 {
1026 if (symbolic)
1027 {
1028 if (move_needs_splitting (operands[0], operands[1], SImode))
1029 return true;
1030 return false;
1031 }
1032
1033 if (GET_CODE (operands[1]) != CONST_INT)
1034 return false;
1035
1036 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1037 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1038 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1039 return false;
1040
1041 if (((!reload_completed && !reload_in_progress)
1042 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1043 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1044 return false;
1045
1046 return true;
1047 }
1048
1049 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1050 it to one specific value. So the insn chosen depends on whether
1051 the source and destination modes match. */
1052
1053 bool
1054 mep_vliw_mode_match (rtx tgt)
1055 {
1056 bool src_vliw = mep_vliw_function_p (cfun->decl);
1057 bool tgt_vliw = INTVAL (tgt);
1058
1059 return src_vliw == tgt_vliw;
1060 }
1061
1062 /* Like the above, but also test for near/far mismatches. */
1063
1064 bool
1065 mep_vliw_jmp_match (rtx tgt)
1066 {
1067 bool src_vliw = mep_vliw_function_p (cfun->decl);
1068 bool tgt_vliw = INTVAL (tgt);
1069
1070 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1071 return false;
1072
1073 return src_vliw == tgt_vliw;
1074 }
1075
1076 bool
1077 mep_multi_slot (rtx_insn *x)
1078 {
1079 return get_attr_slot (x) == SLOT_MULTI;
1080 }
1081
1082 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1083
1084 static bool
1085 mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1086 {
1087 /* We can't convert symbol values to gp- or tp-rel values after
1088 reload, as reload might have used $gp or $tp for other
1089 purposes. */
1090 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1091 {
1092 char e = mep_section_tag (x);
1093 return (e != 't' && e != 'b');
1094 }
1095 return 1;
1096 }
1097
1098 /* Be careful not to use macros that need to be compiled one way for
1099 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1100
1101 bool
1102 mep_legitimate_address (machine_mode mode, rtx x, int strict)
1103 {
1104 int the_tag;
1105
1106 #define DEBUG_LEGIT 0
1107 #if DEBUG_LEGIT
1108 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1109 debug_rtx (x);
1110 #endif
1111
1112 if (GET_CODE (x) == LO_SUM
1113 && GET_CODE (XEXP (x, 0)) == REG
1114 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1115 && CONSTANT_P (XEXP (x, 1)))
1116 {
1117 if (GET_MODE_SIZE (mode) > 4)
1118 {
1119 /* We will end up splitting this, and lo_sums are not
1120 offsettable for us. */
1121 #if DEBUG_LEGIT
1122 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1123 #endif
1124 return false;
1125 }
1126 #if DEBUG_LEGIT
1127 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1128 #endif
1129 return true;
1130 }
1131
1132 if (GET_CODE (x) == REG
1133 && GEN_REG (REGNO (x), strict))
1134 {
1135 #if DEBUG_LEGIT
1136 fprintf (stderr, " - yup, [reg]\n");
1137 #endif
1138 return true;
1139 }
1140
1141 if (GET_CODE (x) == PLUS
1142 && GET_CODE (XEXP (x, 0)) == REG
1143 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1144 && const_in_range (XEXP (x, 1), -32768, 32767))
1145 {
1146 #if DEBUG_LEGIT
1147 fprintf (stderr, " - yup, [reg+const]\n");
1148 #endif
1149 return true;
1150 }
1151
1152 if (GET_CODE (x) == PLUS
1153 && GET_CODE (XEXP (x, 0)) == REG
1154 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1155 && GET_CODE (XEXP (x, 1)) == CONST
1156 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1157 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1158 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1159 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1160 {
1161 #if DEBUG_LEGIT
1162 fprintf (stderr, " - yup, [reg+unspec]\n");
1163 #endif
1164 return true;
1165 }
1166
1167 the_tag = mep_section_tag (x);
1168
1169 if (the_tag == 'f')
1170 {
1171 #if DEBUG_LEGIT
1172 fprintf (stderr, " - nope, [far]\n");
1173 #endif
1174 return false;
1175 }
1176
1177 if (mode == VOIDmode
1178 && GET_CODE (x) == SYMBOL_REF)
1179 {
1180 #if DEBUG_LEGIT
1181 fprintf (stderr, " - yup, call [symbol]\n");
1182 #endif
1183 return true;
1184 }
1185
1186 if ((mode == SImode || mode == SFmode)
1187 && CONSTANT_P (x)
1188 && mep_legitimate_constant_p (mode, x)
1189 && the_tag != 't' && the_tag != 'b')
1190 {
1191 if (GET_CODE (x) != CONST_INT
1192 || (INTVAL (x) <= 0xfffff
1193 && INTVAL (x) >= 0
1194 && (INTVAL (x) % 4) == 0))
1195 {
1196 #if DEBUG_LEGIT
1197 fprintf (stderr, " - yup, [const]\n");
1198 #endif
1199 return true;
1200 }
1201 }
1202
1203 #if DEBUG_LEGIT
1204 fprintf (stderr, " - nope.\n");
1205 #endif
1206 return false;
1207 }
1208
1209 int
1210 mep_legitimize_reload_address (rtx *x, machine_mode mode, int opnum,
1211 int type_i,
1212 int ind_levels ATTRIBUTE_UNUSED)
1213 {
1214 enum reload_type type = (enum reload_type) type_i;
1215
1216 if (GET_CODE (*x) == PLUS
1217 && GET_CODE (XEXP (*x, 0)) == MEM
1218 && GET_CODE (XEXP (*x, 1)) == REG)
1219 {
1220 /* GCC will by default copy the MEM into a REG, which results in
1221 an invalid address. For us, the best thing to do is move the
1222 whole expression to a REG. */
1223 push_reload (*x, NULL_RTX, x, NULL,
1224 GENERAL_REGS, mode, VOIDmode,
1225 0, 0, opnum, type);
1226 return 1;
1227 }
1228
1229 if (GET_CODE (*x) == PLUS
1230 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1231 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1232 {
1233 char e = mep_section_tag (XEXP (*x, 0));
1234
1235 if (e != 't' && e != 'b')
1236 {
1237 /* GCC thinks that (sym+const) is a valid address. Well,
1238 sometimes it is, this time it isn't. The best thing to
1239 do is reload the symbol to a register, since reg+int
1240 tends to work, and we can't just add the symbol and
1241 constant anyway. */
1242 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1243 GENERAL_REGS, mode, VOIDmode,
1244 0, 0, opnum, type);
1245 return 1;
1246 }
1247 }
1248 return 0;
1249 }
1250
1251 int
1252 mep_core_address_length (rtx_insn *insn, int opn)
1253 {
1254 rtx set = single_set (insn);
1255 rtx mem = XEXP (set, opn);
1256 rtx other = XEXP (set, 1-opn);
1257 rtx addr = XEXP (mem, 0);
1258
1259 if (register_operand (addr, Pmode))
1260 return 2;
1261 if (GET_CODE (addr) == PLUS)
1262 {
1263 rtx addend = XEXP (addr, 1);
1264
1265 gcc_assert (REG_P (XEXP (addr, 0)));
1266
1267 switch (REGNO (XEXP (addr, 0)))
1268 {
1269 case STACK_POINTER_REGNUM:
1270 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1271 && mep_imm7a4_operand (addend, VOIDmode))
1272 return 2;
1273 break;
1274
1275 case 13: /* TP */
1276 gcc_assert (REG_P (other));
1277
1278 if (REGNO (other) >= 8)
1279 break;
1280
1281 if (GET_CODE (addend) == CONST
1282 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1283 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1284 return 2;
1285
1286 if (GET_CODE (addend) == CONST_INT
1287 && INTVAL (addend) >= 0
1288 && INTVAL (addend) <= 127
1289 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1290 return 2;
1291 break;
1292 }
1293 }
1294
1295 return 4;
1296 }
1297
1298 int
1299 mep_cop_address_length (rtx_insn *insn, int opn)
1300 {
1301 rtx set = single_set (insn);
1302 rtx mem = XEXP (set, opn);
1303 rtx addr = XEXP (mem, 0);
1304
1305 if (GET_CODE (mem) != MEM)
1306 return 2;
1307 if (register_operand (addr, Pmode))
1308 return 2;
1309 if (GET_CODE (addr) == POST_INC)
1310 return 2;
1311
1312 return 4;
1313 }
1314
1315 #define DEBUG_EXPAND_MOV 0
1316 bool
1317 mep_expand_mov (rtx *operands, machine_mode mode)
1318 {
1319 int i, t;
1320 int tag[2];
1321 rtx tpsym, tpoffs;
1322 int post_reload = 0;
1323
1324 tag[0] = mep_section_tag (operands[0]);
1325 tag[1] = mep_section_tag (operands[1]);
1326
1327 if (!reload_in_progress
1328 && !reload_completed
1329 && GET_CODE (operands[0]) != REG
1330 && GET_CODE (operands[0]) != SUBREG
1331 && GET_CODE (operands[1]) != REG
1332 && GET_CODE (operands[1]) != SUBREG)
1333 operands[1] = copy_to_mode_reg (mode, operands[1]);
1334
1335 #if DEBUG_EXPAND_MOV
1336 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1337 reload_in_progress || reload_completed);
1338 debug_rtx (operands[0]);
1339 debug_rtx (operands[1]);
1340 #endif
1341
1342 if (mode == DImode || mode == DFmode)
1343 return false;
1344
1345 if (reload_in_progress || reload_completed)
1346 {
1347 rtx r;
1348
1349 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1350 cfun->machine->reload_changes_tp = true;
1351
1352 if (tag[0] == 't' || tag[1] == 't')
1353 {
1354 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1355 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1356 post_reload = 1;
1357 }
1358 if (tag[0] == 'b' || tag[1] == 'b')
1359 {
1360 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1361 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1362 post_reload = 1;
1363 }
1364 if (cfun->machine->reload_changes_tp == true)
1365 post_reload = 1;
1366 }
1367
1368 if (!post_reload)
1369 {
1370 rtx n;
1371 if (symbol_p (operands[1]))
1372 {
1373 t = mep_section_tag (operands[1]);
1374 if (t == 'b' || t == 't')
1375 {
1376
1377 if (GET_CODE (operands[1]) == SYMBOL_REF)
1378 {
1379 tpsym = operands[1];
1380 n = gen_rtx_UNSPEC (mode,
1381 gen_rtvec (1, operands[1]),
1382 t == 'b' ? UNS_TPREL : UNS_GPREL);
1383 n = gen_rtx_CONST (mode, n);
1384 }
1385 else if (GET_CODE (operands[1]) == CONST
1386 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1387 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1388 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1389 {
1390 tpsym = XEXP (XEXP (operands[1], 0), 0);
1391 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1392 n = gen_rtx_UNSPEC (mode,
1393 gen_rtvec (1, tpsym),
1394 t == 'b' ? UNS_TPREL : UNS_GPREL);
1395 n = gen_rtx_PLUS (mode, n, tpoffs);
1396 n = gen_rtx_CONST (mode, n);
1397 }
1398 else if (GET_CODE (operands[1]) == CONST
1399 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1400 return false;
1401 else
1402 {
1403 error ("unusual TP-relative address");
1404 return false;
1405 }
1406
1407 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1408 : mep_gp_rtx ()), n);
1409 n = emit_insn (gen_rtx_SET (operands[0], n));
1410 #if DEBUG_EXPAND_MOV
1411 fprintf(stderr, "mep_expand_mov emitting ");
1412 debug_rtx(n);
1413 #endif
1414 return true;
1415 }
1416 }
1417
1418 for (i=0; i < 2; i++)
1419 {
1420 t = mep_section_tag (operands[i]);
1421 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1422 {
1423 rtx sym, n, r;
1424 int u;
1425
1426 sym = XEXP (operands[i], 0);
1427 if (GET_CODE (sym) == CONST
1428 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1429 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1430
1431 if (t == 'b')
1432 {
1433 r = mep_tp_rtx ();
1434 u = UNS_TPREL;
1435 }
1436 else
1437 {
1438 r = mep_gp_rtx ();
1439 u = UNS_GPREL;
1440 }
1441
1442 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1443 n = gen_rtx_CONST (Pmode, n);
1444 n = gen_rtx_PLUS (Pmode, r, n);
1445 operands[i] = replace_equiv_address (operands[i], n);
1446 }
1447 }
1448 }
1449
1450 if ((GET_CODE (operands[1]) != REG
1451 && MEP_CONTROL_REG (operands[0]))
1452 || (GET_CODE (operands[0]) != REG
1453 && MEP_CONTROL_REG (operands[1])))
1454 {
1455 rtx temp;
1456 #if DEBUG_EXPAND_MOV
1457 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1458 #endif
1459 temp = gen_reg_rtx (mode);
1460 emit_move_insn (temp, operands[1]);
1461 operands[1] = temp;
1462 }
1463
1464 if (symbolref_p (operands[0])
1465 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1466 || (GET_MODE_SIZE (mode) != 4)))
1467 {
1468 rtx temp;
1469
1470 gcc_assert (!reload_in_progress && !reload_completed);
1471
1472 temp = force_reg (Pmode, XEXP (operands[0], 0));
1473 operands[0] = replace_equiv_address (operands[0], temp);
1474 emit_move_insn (operands[0], operands[1]);
1475 return true;
1476 }
1477
1478 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1479 tag[1] = 0;
1480
1481 if (symbol_p (operands[1])
1482 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1483 {
1484 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1485 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1486 return true;
1487 }
1488
1489 if (symbolref_p (operands[1])
1490 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1491 {
1492 rtx temp;
1493
1494 if (reload_in_progress || reload_completed)
1495 temp = operands[0];
1496 else
1497 temp = gen_reg_rtx (Pmode);
1498
1499 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1500 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1501 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1502 return true;
1503 }
1504
1505 return false;
1506 }
1507
1508 /* Cases where the pattern can't be made to use at all. */
1509
1510 bool
1511 mep_mov_ok (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
1512 {
1513 int i;
1514
1515 #define DEBUG_MOV_OK 0
1516 #if DEBUG_MOV_OK
1517 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1518 mep_section_tag (operands[1]));
1519 debug_rtx (operands[0]);
1520 debug_rtx (operands[1]);
1521 #endif
1522
1523 /* We want the movh patterns to get these. */
1524 if (GET_CODE (operands[1]) == HIGH)
1525 return false;
1526
1527 /* We can't store a register to a far variable without using a
1528 scratch register to hold the address. Using far variables should
1529 be split by mep_emit_mov anyway. */
1530 if (mep_section_tag (operands[0]) == 'f'
1531 || mep_section_tag (operands[1]) == 'f')
1532 {
1533 #if DEBUG_MOV_OK
1534 fprintf (stderr, " - no, f\n");
1535 #endif
1536 return false;
1537 }
1538 i = mep_section_tag (operands[1]);
1539 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1540 /* These are supposed to be generated with adds of the appropriate
1541 register. During and after reload, however, we allow them to
1542 be accessed as normal symbols because adding a dependency on
1543 the base register now might cause problems. */
1544 {
1545 #if DEBUG_MOV_OK
1546 fprintf (stderr, " - no, bt\n");
1547 #endif
1548 return false;
1549 }
1550
1551 /* The only moves we can allow involve at least one general
1552 register, so require it. */
1553 for (i = 0; i < 2; i ++)
1554 {
1555 /* Allow subregs too, before reload. */
1556 rtx x = operands[i];
1557
1558 if (GET_CODE (x) == SUBREG)
1559 x = XEXP (x, 0);
1560 if (GET_CODE (x) == REG
1561 && ! MEP_CONTROL_REG (x))
1562 {
1563 #if DEBUG_MOV_OK
1564 fprintf (stderr, " - ok\n");
1565 #endif
1566 return true;
1567 }
1568 }
1569 #if DEBUG_MOV_OK
1570 fprintf (stderr, " - no, no gen reg\n");
1571 #endif
1572 return false;
1573 }
1574
1575 #define DEBUG_SPLIT_WIDE_MOVE 0
1576 void
1577 mep_split_wide_move (rtx *operands, machine_mode mode)
1578 {
1579 int i;
1580
1581 #if DEBUG_SPLIT_WIDE_MOVE
1582 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1583 debug_rtx (operands[0]);
1584 debug_rtx (operands[1]);
1585 #endif
1586
1587 for (i = 0; i <= 1; i++)
1588 {
1589 rtx op = operands[i], hi, lo;
1590
1591 switch (GET_CODE (op))
1592 {
1593 case REG:
1594 {
1595 unsigned int regno = REGNO (op);
1596
1597 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1598 {
1599 rtx i32;
1600
1601 lo = gen_rtx_REG (SImode, regno);
1602 i32 = GEN_INT (32);
1603 hi = gen_rtx_ZERO_EXTRACT (SImode,
1604 gen_rtx_REG (DImode, regno),
1605 i32, i32);
1606 }
1607 else
1608 {
1609 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1610 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1611 }
1612 }
1613 break;
1614
1615 case CONST_INT:
1616 case CONST_DOUBLE:
1617 case MEM:
1618 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1619 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1620 break;
1621
1622 default:
1623 gcc_unreachable ();
1624 }
1625
1626 /* The high part of CR <- GPR moves must be done after the low part. */
1627 operands [i + 4] = lo;
1628 operands [i + 2] = hi;
1629 }
1630
1631 if (reg_mentioned_p (operands[2], operands[5])
1632 || GET_CODE (operands[2]) == ZERO_EXTRACT
1633 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1634 {
1635 rtx tmp;
1636
1637 /* Overlapping register pairs -- make sure we don't
1638 early-clobber ourselves. */
1639 tmp = operands[2];
1640 operands[2] = operands[4];
1641 operands[4] = tmp;
1642 tmp = operands[3];
1643 operands[3] = operands[5];
1644 operands[5] = tmp;
1645 }
1646
1647 #if DEBUG_SPLIT_WIDE_MOVE
1648 fprintf(stderr, "\033[34m");
1649 debug_rtx (operands[2]);
1650 debug_rtx (operands[3]);
1651 debug_rtx (operands[4]);
1652 debug_rtx (operands[5]);
1653 fprintf(stderr, "\033[0m");
1654 #endif
1655 }
1656
1657 /* Emit a setcc instruction in its entirity. */
1658
1659 static bool
1660 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1661 {
1662 rtx tmp;
1663
1664 switch (code)
1665 {
1666 case GT:
1667 case GTU:
1668 tmp = op1, op1 = op2, op2 = tmp;
1669 code = swap_condition (code);
1670 /* FALLTHRU */
1671
1672 case LT:
1673 case LTU:
1674 op1 = force_reg (SImode, op1);
1675 emit_insn (gen_rtx_SET (dest, gen_rtx_fmt_ee (code, SImode, op1, op2)));
1676 return true;
1677
1678 case EQ:
1679 if (op2 != const0_rtx)
1680 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1681 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1682 return true;
1683
1684 case NE:
1685 /* Branchful sequence:
1686 mov dest, 0 16-bit
1687 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1688 mov dest, 1 16-bit
1689
1690 Branchless sequence:
1691 add3 tmp, op1, -op2 32-bit (or mov + sub)
1692 sltu3 tmp, tmp, 1 16-bit
1693 xor3 dest, tmp, 1 32-bit
1694 */
1695 if (optimize_size && op2 != const0_rtx)
1696 return false;
1697
1698 if (op2 != const0_rtx)
1699 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1700
1701 op2 = gen_reg_rtx (SImode);
1702 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1703
1704 emit_insn (gen_rtx_SET (dest, gen_rtx_XOR (SImode, op2, const1_rtx)));
1705 return true;
1706
1707 case LE:
1708 if (GET_CODE (op2) != CONST_INT
1709 || INTVAL (op2) == 0x7ffffff)
1710 return false;
1711 op2 = GEN_INT (INTVAL (op2) + 1);
1712 return mep_expand_setcc_1 (LT, dest, op1, op2);
1713
1714 case LEU:
1715 if (GET_CODE (op2) != CONST_INT
1716 || INTVAL (op2) == -1)
1717 return false;
1718 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1719 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1720
1721 case GE:
1722 if (GET_CODE (op2) != CONST_INT
1723 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1724 return false;
1725 op2 = GEN_INT (INTVAL (op2) - 1);
1726 return mep_expand_setcc_1 (GT, dest, op1, op2);
1727
1728 case GEU:
1729 if (GET_CODE (op2) != CONST_INT
1730 || op2 == const0_rtx)
1731 return false;
1732 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1733 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1734
1735 default:
1736 gcc_unreachable ();
1737 }
1738 }
1739
1740 bool
1741 mep_expand_setcc (rtx *operands)
1742 {
1743 rtx dest = operands[0];
1744 enum rtx_code code = GET_CODE (operands[1]);
1745 rtx op0 = operands[2];
1746 rtx op1 = operands[3];
1747
1748 return mep_expand_setcc_1 (code, dest, op0, op1);
1749 }
1750
1751 rtx
1752 mep_expand_cbranch (rtx *operands)
1753 {
1754 enum rtx_code code = GET_CODE (operands[0]);
1755 rtx op0 = operands[1];
1756 rtx op1 = operands[2];
1757 rtx tmp;
1758
1759 restart:
1760 switch (code)
1761 {
1762 case LT:
1763 if (mep_imm4_operand (op1, SImode))
1764 break;
1765
1766 tmp = gen_reg_rtx (SImode);
1767 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1768 code = NE;
1769 op0 = tmp;
1770 op1 = const0_rtx;
1771 break;
1772
1773 case GE:
1774 if (mep_imm4_operand (op1, SImode))
1775 break;
1776
1777 tmp = gen_reg_rtx (SImode);
1778 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1779
1780 code = EQ;
1781 op0 = tmp;
1782 op1 = const0_rtx;
1783 break;
1784
1785 case EQ:
1786 case NE:
1787 if (! mep_reg_or_imm4_operand (op1, SImode))
1788 op1 = force_reg (SImode, op1);
1789 break;
1790
1791 case LE:
1792 case GT:
1793 if (GET_CODE (op1) == CONST_INT
1794 && INTVAL (op1) != 0x7fffffff)
1795 {
1796 op1 = GEN_INT (INTVAL (op1) + 1);
1797 code = (code == LE ? LT : GE);
1798 goto restart;
1799 }
1800
1801 tmp = gen_reg_rtx (SImode);
1802 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1803
1804 code = (code == LE ? EQ : NE);
1805 op0 = tmp;
1806 op1 = const0_rtx;
1807 break;
1808
1809 case LTU:
1810 if (op1 == const1_rtx)
1811 {
1812 code = EQ;
1813 op1 = const0_rtx;
1814 break;
1815 }
1816
1817 tmp = gen_reg_rtx (SImode);
1818 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1819 code = NE;
1820 op0 = tmp;
1821 op1 = const0_rtx;
1822 break;
1823
1824 case LEU:
1825 tmp = gen_reg_rtx (SImode);
1826 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1827 code = NE;
1828 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1829 code = EQ;
1830 else
1831 gcc_unreachable ();
1832 op0 = tmp;
1833 op1 = const0_rtx;
1834 break;
1835
1836 case GTU:
1837 tmp = gen_reg_rtx (SImode);
1838 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1839 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1840 code = NE;
1841 op0 = tmp;
1842 op1 = const0_rtx;
1843 break;
1844
1845 case GEU:
1846 tmp = gen_reg_rtx (SImode);
1847 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1848 code = NE;
1849 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1850 code = EQ;
1851 else
1852 gcc_unreachable ();
1853 op0 = tmp;
1854 op1 = const0_rtx;
1855 break;
1856
1857 default:
1858 gcc_unreachable ();
1859 }
1860
1861 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1862 }
1863
1864 const char *
1865 mep_emit_cbranch (rtx *operands, int ne)
1866 {
1867 if (GET_CODE (operands[1]) == REG)
1868 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1869 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1870 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1871 else
1872 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1873 }
1874
1875 void
1876 mep_expand_call (rtx *operands, int returns_value)
1877 {
1878 rtx addr = operands[returns_value];
1879 rtx tp = mep_tp_rtx ();
1880 rtx gp = mep_gp_rtx ();
1881
1882 gcc_assert (GET_CODE (addr) == MEM);
1883
1884 addr = XEXP (addr, 0);
1885
1886 if (! mep_call_address_operand (addr, VOIDmode))
1887 addr = force_reg (SImode, addr);
1888
1889 if (! operands[returns_value+2])
1890 operands[returns_value+2] = const0_rtx;
1891
1892 if (returns_value)
1893 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1894 operands[3], tp, gp));
1895 else
1896 emit_call_insn (gen_call_internal (addr, operands[1],
1897 operands[2], tp, gp));
1898 }
1899 \f
1900 /* Aliasing Support. */
1901
1902 /* If X is a machine specific address (i.e. a symbol or label being
1903 referenced as a displacement from the GOT implemented using an
1904 UNSPEC), then return the base term. Otherwise return X. */
1905
1906 rtx
1907 mep_find_base_term (rtx x)
1908 {
1909 rtx base, term;
1910 int unspec;
1911
1912 if (GET_CODE (x) != PLUS)
1913 return x;
1914 base = XEXP (x, 0);
1915 term = XEXP (x, 1);
1916
1917 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1918 && base == mep_tp_rtx ())
1919 unspec = UNS_TPREL;
1920 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1921 && base == mep_gp_rtx ())
1922 unspec = UNS_GPREL;
1923 else
1924 return x;
1925
1926 if (GET_CODE (term) != CONST)
1927 return x;
1928 term = XEXP (term, 0);
1929
1930 if (GET_CODE (term) != UNSPEC
1931 || XINT (term, 1) != unspec)
1932 return x;
1933
1934 return XVECEXP (term, 0, 0);
1935 }
1936 \f
1937 /* Reload Support. */
1938
1939 /* Return true if the registers in CLASS cannot represent the change from
1940 modes FROM to TO. */
1941
1942 bool
1943 mep_cannot_change_mode_class (machine_mode from, machine_mode to,
1944 enum reg_class regclass)
1945 {
1946 if (from == to)
1947 return false;
1948
1949 /* 64-bit COP regs must remain 64-bit COP regs. */
1950 if (TARGET_64BIT_CR_REGS
1951 && (regclass == CR_REGS
1952 || regclass == LOADABLE_CR_REGS)
1953 && (GET_MODE_SIZE (to) < 8
1954 || GET_MODE_SIZE (from) < 8))
1955 return true;
1956
1957 return false;
1958 }
1959
1960 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1961
1962 static bool
1963 mep_general_reg (rtx x)
1964 {
1965 while (GET_CODE (x) == SUBREG)
1966 x = XEXP (x, 0);
1967 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1968 }
1969
1970 static bool
1971 mep_nongeneral_reg (rtx x)
1972 {
1973 while (GET_CODE (x) == SUBREG)
1974 x = XEXP (x, 0);
1975 return (GET_CODE (x) == REG
1976 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1977 }
1978
1979 static bool
1980 mep_general_copro_reg (rtx x)
1981 {
1982 while (GET_CODE (x) == SUBREG)
1983 x = XEXP (x, 0);
1984 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1985 }
1986
1987 static bool
1988 mep_nonregister (rtx x)
1989 {
1990 while (GET_CODE (x) == SUBREG)
1991 x = XEXP (x, 0);
1992 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1993 }
1994
1995 #define DEBUG_RELOAD 0
1996
1997 /* Return the secondary reload class needed for moving value X to or
1998 from a register in coprocessor register class CLASS. */
1999
2000 static enum reg_class
2001 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2002 {
2003 if (mep_general_reg (x))
2004 /* We can do the move directly if mep_have_core_copro_moves_p,
2005 otherwise we need to go through memory. Either way, no secondary
2006 register is needed. */
2007 return NO_REGS;
2008
2009 if (mep_general_copro_reg (x))
2010 {
2011 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2012 if (mep_have_copro_copro_moves_p)
2013 return NO_REGS;
2014
2015 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2016 if (mep_have_core_copro_moves_p)
2017 return GENERAL_REGS;
2018
2019 /* Otherwise we need to do it through memory. No secondary
2020 register is needed. */
2021 return NO_REGS;
2022 }
2023
2024 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2025 && constraint_satisfied_p (x, CONSTRAINT_U))
2026 /* X is a memory value that we can access directly. */
2027 return NO_REGS;
2028
2029 /* We have to move X into a GPR first and then copy it to
2030 the coprocessor register. The move from the GPR to the
2031 coprocessor might be done directly or through memory,
2032 depending on mep_have_core_copro_moves_p. */
2033 return GENERAL_REGS;
2034 }
2035
2036 /* Copying X to register in RCLASS. */
2037
2038 enum reg_class
2039 mep_secondary_input_reload_class (enum reg_class rclass,
2040 machine_mode mode ATTRIBUTE_UNUSED,
2041 rtx x)
2042 {
2043 int rv = NO_REGS;
2044
2045 #if DEBUG_RELOAD
2046 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2047 debug_rtx (x);
2048 #endif
2049
2050 if (reg_class_subset_p (rclass, CR_REGS))
2051 rv = mep_secondary_copro_reload_class (rclass, x);
2052 else if (MEP_NONGENERAL_CLASS (rclass)
2053 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2054 rv = GENERAL_REGS;
2055
2056 #if DEBUG_RELOAD
2057 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2058 #endif
2059 return (enum reg_class) rv;
2060 }
2061
2062 /* Copying register in RCLASS to X. */
2063
2064 enum reg_class
2065 mep_secondary_output_reload_class (enum reg_class rclass,
2066 machine_mode mode ATTRIBUTE_UNUSED,
2067 rtx x)
2068 {
2069 int rv = NO_REGS;
2070
2071 #if DEBUG_RELOAD
2072 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2073 debug_rtx (x);
2074 #endif
2075
2076 if (reg_class_subset_p (rclass, CR_REGS))
2077 rv = mep_secondary_copro_reload_class (rclass, x);
2078 else if (MEP_NONGENERAL_CLASS (rclass)
2079 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2080 rv = GENERAL_REGS;
2081
2082 #if DEBUG_RELOAD
2083 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2084 #endif
2085
2086 return (enum reg_class) rv;
2087 }
2088
2089 /* Implement SECONDARY_MEMORY_NEEDED. */
2090
2091 bool
2092 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2093 machine_mode mode ATTRIBUTE_UNUSED)
2094 {
2095 if (!mep_have_core_copro_moves_p)
2096 {
2097 if (reg_classes_intersect_p (rclass1, CR_REGS)
2098 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2099 return true;
2100 if (reg_classes_intersect_p (rclass2, CR_REGS)
2101 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2102 return true;
2103 if (!mep_have_copro_copro_moves_p
2104 && reg_classes_intersect_p (rclass1, CR_REGS)
2105 && reg_classes_intersect_p (rclass2, CR_REGS))
2106 return true;
2107 }
2108 return false;
2109 }
2110
2111 void
2112 mep_expand_reload (rtx *operands, machine_mode mode)
2113 {
2114 /* There are three cases for each direction:
2115 register, farsym
2116 control, farsym
2117 control, nearsym */
2118
2119 int s0 = mep_section_tag (operands[0]) == 'f';
2120 int s1 = mep_section_tag (operands[1]) == 'f';
2121 int c0 = mep_nongeneral_reg (operands[0]);
2122 int c1 = mep_nongeneral_reg (operands[1]);
2123 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2124
2125 #if DEBUG_RELOAD
2126 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2127 debug_rtx (operands[0]);
2128 debug_rtx (operands[1]);
2129 #endif
2130
2131 switch (which)
2132 {
2133 case 00: /* Don't know why this gets here. */
2134 case 02: /* general = far */
2135 emit_move_insn (operands[0], operands[1]);
2136 return;
2137
2138 case 10: /* cr = mem */
2139 case 11: /* cr = cr */
2140 case 01: /* mem = cr */
2141 case 12: /* cr = far */
2142 emit_move_insn (operands[2], operands[1]);
2143 emit_move_insn (operands[0], operands[2]);
2144 return;
2145
2146 case 20: /* far = general */
2147 emit_move_insn (operands[2], XEXP (operands[1], 0));
2148 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2149 return;
2150
2151 case 21: /* far = cr */
2152 case 22: /* far = far */
2153 default:
2154 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2155 which, mode_name[mode]);
2156 debug_rtx (operands[0]);
2157 debug_rtx (operands[1]);
2158 gcc_unreachable ();
2159 }
2160 }
2161
2162 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2163 can be moved directly into registers 0 to 7, but not into the rest.
2164 If so, and if the required class includes registers 0 to 7, restrict
2165 it to those registers. */
2166
2167 enum reg_class
2168 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2169 {
2170 switch (GET_CODE (x))
2171 {
2172 case CONST_INT:
2173 if (INTVAL (x) >= 0x10000
2174 && INTVAL (x) < 0x01000000
2175 && (INTVAL (x) & 0xffff) != 0
2176 && reg_class_subset_p (TPREL_REGS, rclass))
2177 rclass = TPREL_REGS;
2178 break;
2179
2180 case CONST:
2181 case SYMBOL_REF:
2182 case LABEL_REF:
2183 if (mep_section_tag (x) != 'f'
2184 && reg_class_subset_p (TPREL_REGS, rclass))
2185 rclass = TPREL_REGS;
2186 break;
2187
2188 default:
2189 break;
2190 }
2191 return rclass;
2192 }
2193 \f
2194 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2195 moves, 4 for direct double-register moves, and 1000 for anything
2196 that requires a temporary register or temporary stack slot. */
2197
2198 int
2199 mep_register_move_cost (machine_mode mode, enum reg_class from, enum reg_class to)
2200 {
2201 if (mep_have_copro_copro_moves_p
2202 && reg_class_subset_p (from, CR_REGS)
2203 && reg_class_subset_p (to, CR_REGS))
2204 {
2205 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2206 return 4;
2207 return 2;
2208 }
2209 if (reg_class_subset_p (from, CR_REGS)
2210 && reg_class_subset_p (to, CR_REGS))
2211 {
2212 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2213 return 8;
2214 return 4;
2215 }
2216 if (reg_class_subset_p (from, CR_REGS)
2217 || reg_class_subset_p (to, CR_REGS))
2218 {
2219 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2220 return 4;
2221 return 2;
2222 }
2223 if (mep_secondary_memory_needed (from, to, mode))
2224 return 1000;
2225 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2226 return 1000;
2227
2228 if (GET_MODE_SIZE (mode) > 4)
2229 return 4;
2230
2231 return 2;
2232 }
2233
2234 \f
2235 /* Functions to save and restore machine-specific function data. */
2236
2237 static struct machine_function *
2238 mep_init_machine_status (void)
2239 {
2240 return ggc_cleared_alloc<machine_function> ();
2241 }
2242
2243 static rtx
2244 mep_allocate_initial_value (rtx reg)
2245 {
2246 int rss;
2247
2248 if (GET_CODE (reg) != REG)
2249 return NULL_RTX;
2250
2251 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2252 return NULL_RTX;
2253
2254 /* In interrupt functions, the "initial" values of $gp and $tp are
2255 provided by the prologue. They are not necessarily the same as
2256 the values that the caller was using. */
2257 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2258 if (mep_interrupt_p ())
2259 return NULL_RTX;
2260
2261 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2262 {
2263 cfun->machine->reg_save_size += 4;
2264 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2265 }
2266
2267 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2268 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2269 }
2270
2271 rtx
2272 mep_return_addr_rtx (int count)
2273 {
2274 if (count != 0)
2275 return const0_rtx;
2276
2277 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2278 }
2279
2280 static rtx
2281 mep_tp_rtx (void)
2282 {
2283 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2284 }
2285
2286 static rtx
2287 mep_gp_rtx (void)
2288 {
2289 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2290 }
2291
2292 static bool
2293 mep_interrupt_p (void)
2294 {
2295 if (cfun->machine->interrupt_handler == 0)
2296 {
2297 int interrupt_handler
2298 = (lookup_attribute ("interrupt",
2299 DECL_ATTRIBUTES (current_function_decl))
2300 != NULL_TREE);
2301 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2302 }
2303 return cfun->machine->interrupt_handler == 2;
2304 }
2305
2306 static bool
2307 mep_disinterrupt_p (void)
2308 {
2309 if (cfun->machine->disable_interrupts == 0)
2310 {
2311 int disable_interrupts
2312 = (lookup_attribute ("disinterrupt",
2313 DECL_ATTRIBUTES (current_function_decl))
2314 != NULL_TREE);
2315 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2316 }
2317 return cfun->machine->disable_interrupts == 2;
2318 }
2319
2320 \f
2321 /* Frame/Epilog/Prolog Related. */
2322
2323 static bool
2324 mep_reg_set_p (rtx reg, rtx insn)
2325 {
2326 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2327 if (INSN_P (insn))
2328 {
2329 if (FIND_REG_INC_NOTE (insn, reg))
2330 return true;
2331 insn = PATTERN (insn);
2332 }
2333
2334 if (GET_CODE (insn) == SET
2335 && GET_CODE (XEXP (insn, 0)) == REG
2336 && GET_CODE (XEXP (insn, 1)) == REG
2337 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2338 return false;
2339
2340 return set_of (reg, insn) != NULL_RTX;
2341 }
2342
2343
2344 #define MEP_SAVES_UNKNOWN 0
2345 #define MEP_SAVES_YES 1
2346 #define MEP_SAVES_MAYBE 2
2347 #define MEP_SAVES_NO 3
2348
2349 static bool
2350 mep_reg_set_in_function (int regno)
2351 {
2352 rtx reg;
2353 rtx_insn *insn;
2354
2355 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2356 return true;
2357
2358 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2359 return true;
2360
2361 push_topmost_sequence ();
2362 insn = get_insns ();
2363 pop_topmost_sequence ();
2364
2365 if (!insn)
2366 return false;
2367
2368 reg = gen_rtx_REG (SImode, regno);
2369
2370 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2371 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2372 return true;
2373 return false;
2374 }
2375
2376 static bool
2377 mep_asm_without_operands_p (void)
2378 {
2379 if (cfun->machine->asms_without_operands == 0)
2380 {
2381 rtx_insn *insn;
2382
2383 push_topmost_sequence ();
2384 insn = get_insns ();
2385 pop_topmost_sequence ();
2386
2387 cfun->machine->asms_without_operands = 1;
2388 while (insn)
2389 {
2390 if (INSN_P (insn)
2391 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2392 {
2393 cfun->machine->asms_without_operands = 2;
2394 break;
2395 }
2396 insn = NEXT_INSN (insn);
2397 }
2398
2399 }
2400 return cfun->machine->asms_without_operands == 2;
2401 }
2402
2403 /* Interrupt functions save/restore every call-preserved register, and
2404 any call-used register it uses (or all if it calls any function,
2405 since they may get clobbered there too). Here we check to see
2406 which call-used registers need saving. */
2407
2408 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2409 && (r == FIRST_CCR_REGNO + 1 \
2410 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2411 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2412
2413 static bool
2414 mep_interrupt_saved_reg (int r)
2415 {
2416 if (!mep_interrupt_p ())
2417 return false;
2418 if (r == REGSAVE_CONTROL_TEMP
2419 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2420 return true;
2421 if (mep_asm_without_operands_p ()
2422 && (!fixed_regs[r]
2423 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2424 || IVC2_ISAVED_REG (r)))
2425 return true;
2426 if (!crtl->is_leaf)
2427 /* Function calls mean we need to save $lp. */
2428 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2429 return true;
2430 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2431 /* The interrupt handler might use these registers for repeat blocks,
2432 or it might call a function that does so. */
2433 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2434 return true;
2435 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2436 return false;
2437 /* Functions we call might clobber these. */
2438 if (call_used_regs[r] && !fixed_regs[r])
2439 return true;
2440 /* Additional registers that need to be saved for IVC2. */
2441 if (IVC2_ISAVED_REG (r))
2442 return true;
2443
2444 return false;
2445 }
2446
2447 static bool
2448 mep_call_saves_register (int r)
2449 {
2450 if (! cfun->machine->frame_locked)
2451 {
2452 int rv = MEP_SAVES_NO;
2453
2454 if (cfun->machine->reg_save_slot[r])
2455 rv = MEP_SAVES_YES;
2456 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2457 rv = MEP_SAVES_YES;
2458 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2459 rv = MEP_SAVES_YES;
2460 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2461 rv = MEP_SAVES_YES;
2462 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2463 /* We need these to have stack slots so that they can be set during
2464 unwinding. */
2465 rv = MEP_SAVES_YES;
2466 else if (mep_interrupt_saved_reg (r))
2467 rv = MEP_SAVES_YES;
2468 cfun->machine->reg_saved[r] = rv;
2469 }
2470 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2471 }
2472
2473 /* Return true if epilogue uses register REGNO. */
2474
2475 bool
2476 mep_epilogue_uses (int regno)
2477 {
2478 /* Since $lp is a call-saved register, the generic code will normally
2479 mark it used in the epilogue if it needs to be saved and restored.
2480 However, when profiling is enabled, the profiling code will implicitly
2481 clobber $11. This case has to be handled specially both here and in
2482 mep_call_saves_register. */
2483 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2484 return true;
2485 /* Interrupt functions save/restore pretty much everything. */
2486 return (reload_completed && mep_interrupt_saved_reg (regno));
2487 }
2488
2489 static int
2490 mep_reg_size (int regno)
2491 {
2492 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2493 return 8;
2494 return 4;
2495 }
2496
2497 /* Worker function for TARGET_CAN_ELIMINATE. */
2498
2499 bool
2500 mep_can_eliminate (const int from, const int to)
2501 {
2502 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2503 ? ! frame_pointer_needed
2504 : true);
2505 }
2506
2507 int
2508 mep_elimination_offset (int from, int to)
2509 {
2510 int reg_save_size;
2511 int i;
2512 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2513 int total_size;
2514
2515 if (!cfun->machine->frame_locked)
2516 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2517
2518 /* We don't count arg_regs_to_save in the arg pointer offset, because
2519 gcc thinks the arg pointer has moved along with the saved regs.
2520 However, we do count it when we adjust $sp in the prologue. */
2521 reg_save_size = 0;
2522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2523 if (mep_call_saves_register (i))
2524 reg_save_size += mep_reg_size (i);
2525
2526 if (reg_save_size % 8)
2527 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2528 else
2529 cfun->machine->regsave_filler = 0;
2530
2531 /* This is what our total stack adjustment looks like. */
2532 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2533
2534 if (total_size % 8)
2535 cfun->machine->frame_filler = 8 - (total_size % 8);
2536 else
2537 cfun->machine->frame_filler = 0;
2538
2539
2540 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2541 return reg_save_size + cfun->machine->regsave_filler;
2542
2543 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2544 return cfun->machine->frame_filler + frame_size;
2545
2546 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2547 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2548
2549 gcc_unreachable ();
2550 }
2551
2552 static rtx_insn *
2553 F (rtx_insn *x)
2554 {
2555 RTX_FRAME_RELATED_P (x) = 1;
2556 return x;
2557 }
2558
2559 /* Since the prologue/epilogue code is generated after optimization,
2560 we can't rely on gcc to split constants for us. So, this code
2561 captures all the ways to add a constant to a register in one logic
2562 chunk, including optimizing away insns we just don't need. This
2563 makes the prolog/epilog code easier to follow. */
2564 static void
2565 add_constant (int dest, int src, int value, int mark_frame)
2566 {
2567 rtx_insn *insn;
2568 int hi, lo;
2569
2570 if (src == dest && value == 0)
2571 return;
2572
2573 if (value == 0)
2574 {
2575 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2576 gen_rtx_REG (SImode, src));
2577 if (mark_frame)
2578 RTX_FRAME_RELATED_P(insn) = 1;
2579 return;
2580 }
2581
2582 if (value >= -32768 && value <= 32767)
2583 {
2584 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2585 gen_rtx_REG (SImode, src),
2586 GEN_INT (value)));
2587 if (mark_frame)
2588 RTX_FRAME_RELATED_P(insn) = 1;
2589 return;
2590 }
2591
2592 /* Big constant, need to use a temp register. We use
2593 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2594 area is always small enough to directly add to). */
2595
2596 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2597 lo = value & 0xffff;
2598
2599 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2600 GEN_INT (hi));
2601
2602 if (lo)
2603 {
2604 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2605 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2606 GEN_INT (lo)));
2607 }
2608
2609 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2610 gen_rtx_REG (SImode, src),
2611 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2612 if (mark_frame)
2613 {
2614 RTX_FRAME_RELATED_P(insn) = 1;
2615 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2616 gen_rtx_SET (gen_rtx_REG (SImode, dest),
2617 gen_rtx_PLUS (SImode,
2618 gen_rtx_REG (SImode, dest),
2619 GEN_INT (value))));
2620 }
2621 }
2622
2623 /* Move SRC to DEST. Mark the move as being potentially dead if
2624 MAYBE_DEAD_P. */
2625
2626 static rtx_insn *
2627 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2628 {
2629 rtx_insn *insn = emit_move_insn (dest, src);
2630 #if 0
2631 if (maybe_dead_p)
2632 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2633 #endif
2634 return insn;
2635 }
2636
2637 /* Used for interrupt functions, which can't assume that $tp and $gp
2638 contain the correct pointers. */
2639
2640 static void
2641 mep_reload_pointer (int regno, const char *symbol)
2642 {
2643 rtx reg, sym;
2644
2645 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2646 return;
2647
2648 reg = gen_rtx_REG (SImode, regno);
2649 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2650 emit_insn (gen_movsi_topsym_s (reg, sym));
2651 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2652 }
2653
2654 /* Assign save slots for any register not already saved. DImode
2655 registers go at the end of the reg save area; the rest go at the
2656 beginning. This is for alignment purposes. Returns true if a frame
2657 is really needed. */
2658 static bool
2659 mep_assign_save_slots (int reg_save_size)
2660 {
2661 bool really_need_stack_frame = false;
2662 int di_ofs = 0;
2663 int i;
2664
2665 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2666 if (mep_call_saves_register(i))
2667 {
2668 int regsize = mep_reg_size (i);
2669
2670 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2671 || mep_reg_set_in_function (i))
2672 really_need_stack_frame = true;
2673
2674 if (cfun->machine->reg_save_slot[i])
2675 continue;
2676
2677 if (regsize < 8)
2678 {
2679 cfun->machine->reg_save_size += regsize;
2680 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2681 }
2682 else
2683 {
2684 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2685 di_ofs += 8;
2686 }
2687 }
2688 cfun->machine->frame_locked = 1;
2689 return really_need_stack_frame;
2690 }
2691
2692 void
2693 mep_expand_prologue (void)
2694 {
2695 int i, rss, sp_offset = 0;
2696 int reg_save_size;
2697 int frame_size;
2698 int really_need_stack_frame;
2699
2700 /* We must not allow register renaming in interrupt functions,
2701 because that invalidates the correctness of the set of call-used
2702 registers we're going to save/restore. */
2703 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2704
2705 if (mep_disinterrupt_p ())
2706 emit_insn (gen_mep_disable_int ());
2707
2708 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2709
2710 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2711 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2712 really_need_stack_frame = frame_size;
2713
2714 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2715
2716 sp_offset = reg_save_size;
2717 if (sp_offset + frame_size < 128)
2718 sp_offset += frame_size ;
2719
2720 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2721
2722 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2723 if (mep_call_saves_register(i))
2724 {
2725 rtx mem;
2726 bool maybe_dead_p;
2727 machine_mode rmode;
2728
2729 rss = cfun->machine->reg_save_slot[i];
2730
2731 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2732 && (!mep_reg_set_in_function (i)
2733 && !mep_interrupt_p ()))
2734 continue;
2735
2736 if (mep_reg_size (i) == 8)
2737 rmode = DImode;
2738 else
2739 rmode = SImode;
2740
2741 /* If there is a pseudo associated with this register's initial value,
2742 reload might have already spilt it to the stack slot suggested by
2743 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2744 deleted as dead. */
2745 mem = gen_rtx_MEM (rmode,
2746 plus_constant (Pmode, stack_pointer_rtx,
2747 sp_offset - rss));
2748 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2749
2750 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2751 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2752 else if (rmode == DImode)
2753 {
2754 rtx_insn *insn;
2755 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2756
2757 mem = gen_rtx_MEM (SImode,
2758 plus_constant (Pmode, stack_pointer_rtx,
2759 sp_offset - rss + be));
2760
2761 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2762 gen_rtx_REG (SImode, i),
2763 maybe_dead_p);
2764 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2765 gen_rtx_ZERO_EXTRACT (SImode,
2766 gen_rtx_REG (DImode, i),
2767 GEN_INT (32),
2768 GEN_INT (32)),
2769 maybe_dead_p);
2770 insn = maybe_dead_move (mem,
2771 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2772 maybe_dead_p);
2773 RTX_FRAME_RELATED_P (insn) = 1;
2774
2775 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2776 gen_rtx_SET (copy_rtx (mem),
2777 gen_rtx_REG (rmode, i)));
2778 mem = gen_rtx_MEM (SImode,
2779 plus_constant (Pmode, stack_pointer_rtx,
2780 sp_offset - rss + (4-be)));
2781 insn = maybe_dead_move (mem,
2782 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2783 maybe_dead_p);
2784 }
2785 else
2786 {
2787 rtx_insn *insn;
2788 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2789 gen_rtx_REG (rmode, i),
2790 maybe_dead_p);
2791 insn = maybe_dead_move (mem,
2792 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2793 maybe_dead_p);
2794 RTX_FRAME_RELATED_P (insn) = 1;
2795
2796 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2797 gen_rtx_SET (copy_rtx (mem),
2798 gen_rtx_REG (rmode, i)));
2799 }
2800 }
2801
2802 if (frame_pointer_needed)
2803 {
2804 /* We've already adjusted down by sp_offset. Total $sp change
2805 is reg_save_size + frame_size. We want a net change here of
2806 just reg_save_size. */
2807 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2808 }
2809
2810 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2811
2812 if (mep_interrupt_p ())
2813 {
2814 mep_reload_pointer(GP_REGNO, "__sdabase");
2815 mep_reload_pointer(TP_REGNO, "__tpbase");
2816 }
2817 }
2818
2819 static void
2820 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2821 {
2822 int local = hwi_local;
2823 int frame_size = local + crtl->outgoing_args_size;
2824 int reg_save_size;
2825 int ffill;
2826 int i, sp, skip;
2827 int sp_offset;
2828 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2829
2830 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2831 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2832 sp_offset = reg_save_size + frame_size;
2833
2834 ffill = cfun->machine->frame_filler;
2835
2836 if (cfun->machine->mep_frame_pointer_needed)
2837 reg_names[FP_REGNO] = "$fp";
2838 else
2839 reg_names[FP_REGNO] = "$8";
2840
2841 if (sp_offset == 0)
2842 return;
2843
2844 if (debug_info_level == DINFO_LEVEL_NONE)
2845 {
2846 fprintf (file, "\t# frame: %d", sp_offset);
2847 if (reg_save_size)
2848 fprintf (file, " %d regs", reg_save_size);
2849 if (local)
2850 fprintf (file, " %d locals", local);
2851 if (crtl->outgoing_args_size)
2852 fprintf (file, " %d args", crtl->outgoing_args_size);
2853 fprintf (file, "\n");
2854 return;
2855 }
2856
2857 fprintf (file, "\t#\n");
2858 fprintf (file, "\t# Initial Frame Information:\n");
2859 if (sp_offset || !frame_pointer_needed)
2860 fprintf (file, "\t# Entry ---------- 0\n");
2861
2862 /* Sort registers by save slots, so they're printed in the order
2863 they appear in memory, not the order they're saved in. */
2864 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2865 slot_map[si] = si;
2866 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2867 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2868 if (cfun->machine->reg_save_slot[slot_map[si]]
2869 > cfun->machine->reg_save_slot[slot_map[sj]])
2870 {
2871 int t = slot_map[si];
2872 slot_map[si] = slot_map[sj];
2873 slot_map[sj] = t;
2874 }
2875
2876 sp = 0;
2877 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2878 {
2879 int rsize;
2880 int r = slot_map[i];
2881 int rss = cfun->machine->reg_save_slot[r];
2882
2883 if (!mep_call_saves_register (r))
2884 continue;
2885
2886 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2887 && (!mep_reg_set_in_function (r)
2888 && !mep_interrupt_p ()))
2889 continue;
2890
2891 rsize = mep_reg_size(r);
2892 skip = rss - (sp+rsize);
2893 if (skip)
2894 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2895 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2896 rsize, reg_names[r], sp_offset - rss);
2897 sp = rss;
2898 }
2899
2900 skip = reg_save_size - sp;
2901 if (skip)
2902 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2903
2904 if (frame_pointer_needed)
2905 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2906 if (local)
2907 fprintf (file, "\t# %3d bytes for local vars\n", local);
2908 if (ffill)
2909 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2910 if (crtl->outgoing_args_size)
2911 fprintf (file, "\t# %3d bytes for outgoing args\n",
2912 crtl->outgoing_args_size);
2913 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2914 fprintf (file, "\t#\n");
2915 }
2916
2917
2918 static int mep_prevent_lp_restore = 0;
2919 static int mep_sibcall_epilogue = 0;
2920
2921 void
2922 mep_expand_epilogue (void)
2923 {
2924 int i, sp_offset = 0;
2925 int reg_save_size = 0;
2926 int frame_size;
2927 int lp_temp = LP_REGNO, lp_slot = -1;
2928 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2929 int interrupt_handler = mep_interrupt_p ();
2930
2931 if (profile_arc_flag == 2)
2932 emit_insn (gen_mep_bb_trace_ret ());
2933
2934 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2935 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2936
2937 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2938
2939 if (frame_pointer_needed)
2940 {
2941 /* If we have a frame pointer, we won't have a reliable stack
2942 pointer (alloca, you know), so rebase SP from FP */
2943 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2944 gen_rtx_REG (SImode, FP_REGNO));
2945 sp_offset = reg_save_size;
2946 }
2947 else
2948 {
2949 /* SP is right under our local variable space. Adjust it if
2950 needed. */
2951 sp_offset = reg_save_size + frame_size;
2952 if (sp_offset >= 128)
2953 {
2954 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2955 sp_offset -= frame_size;
2956 }
2957 }
2958
2959 /* This is backwards so that we restore the control and coprocessor
2960 registers before the temporary registers we use to restore
2961 them. */
2962 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2963 if (mep_call_saves_register (i))
2964 {
2965 machine_mode rmode;
2966 int rss = cfun->machine->reg_save_slot[i];
2967
2968 if (mep_reg_size (i) == 8)
2969 rmode = DImode;
2970 else
2971 rmode = SImode;
2972
2973 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2974 && !(mep_reg_set_in_function (i) || interrupt_handler))
2975 continue;
2976 if (mep_prevent_lp_restore && i == LP_REGNO)
2977 continue;
2978 if (!mep_prevent_lp_restore
2979 && !interrupt_handler
2980 && (i == 10 || i == 11))
2981 continue;
2982
2983 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2984 emit_move_insn (gen_rtx_REG (rmode, i),
2985 gen_rtx_MEM (rmode,
2986 plus_constant (Pmode, stack_pointer_rtx,
2987 sp_offset - rss)));
2988 else
2989 {
2990 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2991 /* Defer this one so we can jump indirect rather than
2992 copying the RA to $lp and "ret". EH epilogues
2993 automatically skip this anyway. */
2994 lp_slot = sp_offset-rss;
2995 else
2996 {
2997 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2998 gen_rtx_MEM (rmode,
2999 plus_constant (Pmode,
3000 stack_pointer_rtx,
3001 sp_offset-rss)));
3002 emit_move_insn (gen_rtx_REG (rmode, i),
3003 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3004 }
3005 }
3006 }
3007 if (lp_slot != -1)
3008 {
3009 /* Restore this one last so we know it will be in the temp
3010 register when we return by jumping indirectly via the temp. */
3011 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3012 gen_rtx_MEM (SImode,
3013 plus_constant (Pmode, stack_pointer_rtx,
3014 lp_slot)));
3015 lp_temp = REGSAVE_CONTROL_TEMP;
3016 }
3017
3018
3019 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3020
3021 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3022 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3023 gen_rtx_REG (SImode, SP_REGNO),
3024 cfun->machine->eh_stack_adjust));
3025
3026 if (mep_sibcall_epilogue)
3027 return;
3028
3029 if (mep_disinterrupt_p ())
3030 emit_insn (gen_mep_enable_int ());
3031
3032 if (mep_prevent_lp_restore)
3033 {
3034 emit_jump_insn (gen_eh_return_internal ());
3035 emit_barrier ();
3036 }
3037 else if (interrupt_handler)
3038 emit_jump_insn (gen_mep_reti ());
3039 else
3040 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3041 }
3042
3043 void
3044 mep_expand_eh_return (rtx *operands)
3045 {
3046 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3047 {
3048 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3049 emit_move_insn (ra, operands[0]);
3050 operands[0] = ra;
3051 }
3052
3053 emit_insn (gen_eh_epilogue (operands[0]));
3054 }
3055
3056 void
3057 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3058 {
3059 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3060 mep_prevent_lp_restore = 1;
3061 mep_expand_epilogue ();
3062 mep_prevent_lp_restore = 0;
3063 }
3064
3065 void
3066 mep_expand_sibcall_epilogue (void)
3067 {
3068 mep_sibcall_epilogue = 1;
3069 mep_expand_epilogue ();
3070 mep_sibcall_epilogue = 0;
3071 }
3072
3073 static bool
3074 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3075 {
3076 if (decl == NULL)
3077 return false;
3078
3079 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3080 return false;
3081
3082 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3083 if (mep_interrupt_p () || mep_disinterrupt_p ())
3084 return false;
3085
3086 return true;
3087 }
3088
3089 rtx
3090 mep_return_stackadj_rtx (void)
3091 {
3092 return gen_rtx_REG (SImode, 10);
3093 }
3094
3095 rtx
3096 mep_return_handler_rtx (void)
3097 {
3098 return gen_rtx_REG (SImode, LP_REGNO);
3099 }
3100
3101 void
3102 mep_function_profiler (FILE *file)
3103 {
3104 /* Always right at the beginning of the function. */
3105 fprintf (file, "\t# mep function profiler\n");
3106 fprintf (file, "\tadd\t$sp, -8\n");
3107 fprintf (file, "\tsw\t$0, ($sp)\n");
3108 fprintf (file, "\tldc\t$0, $lp\n");
3109 fprintf (file, "\tsw\t$0, 4($sp)\n");
3110 fprintf (file, "\tbsr\t__mep_mcount\n");
3111 fprintf (file, "\tlw\t$0, 4($sp)\n");
3112 fprintf (file, "\tstc\t$0, $lp\n");
3113 fprintf (file, "\tlw\t$0, ($sp)\n");
3114 fprintf (file, "\tadd\t$sp, 8\n\n");
3115 }
3116
3117 const char *
3118 mep_emit_bb_trace_ret (void)
3119 {
3120 fprintf (asm_out_file, "\t# end of block profiling\n");
3121 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3122 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3123 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3124 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3125 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3126 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3127 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3128 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3129 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3130 return "";
3131 }
3132
3133 #undef SAVE
3134 #undef RESTORE
3135 \f
3136 /* Operand Printing. */
3137
3138 void
3139 mep_print_operand_address (FILE *stream, rtx address)
3140 {
3141 if (GET_CODE (address) == MEM)
3142 address = XEXP (address, 0);
3143 else
3144 /* cf: gcc.dg/asm-4.c. */
3145 gcc_assert (GET_CODE (address) == REG);
3146
3147 mep_print_operand (stream, address, 0);
3148 }
3149
3150 static struct
3151 {
3152 char code;
3153 const char *pattern;
3154 const char *format;
3155 }
3156 const conversions[] =
3157 {
3158 { 0, "r", "0" },
3159 { 0, "m+ri", "3(2)" },
3160 { 0, "mr", "(1)" },
3161 { 0, "ms", "(1)" },
3162 { 0, "ml", "(1)" },
3163 { 0, "mLrs", "%lo(3)(2)" },
3164 { 0, "mLr+si", "%lo(4+5)(2)" },
3165 { 0, "m+ru2s", "%tpoff(5)(2)" },
3166 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3167 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3168 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3169 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3170 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3171 { 0, "mi", "(1)" },
3172 { 0, "m+si", "(2+3)" },
3173 { 0, "m+li", "(2+3)" },
3174 { 0, "i", "0" },
3175 { 0, "s", "0" },
3176 { 0, "+si", "1+2" },
3177 { 0, "+u2si", "%tpoff(3+4)" },
3178 { 0, "+u3si", "%sdaoff(3+4)" },
3179 { 0, "l", "0" },
3180 { 'b', "i", "0" },
3181 { 'B', "i", "0" },
3182 { 'U', "i", "0" },
3183 { 'h', "i", "0" },
3184 { 'h', "Hs", "%hi(1)" },
3185 { 'I', "i", "0" },
3186 { 'I', "u2s", "%tpoff(2)" },
3187 { 'I', "u3s", "%sdaoff(2)" },
3188 { 'I', "+u2si", "%tpoff(3+4)" },
3189 { 'I', "+u3si", "%sdaoff(3+4)" },
3190 { 'J', "i", "0" },
3191 { 'P', "mr", "(1\\+),\\0" },
3192 { 'x', "i", "0" },
3193 { 0, 0, 0 }
3194 };
3195
3196 static int
3197 unique_bit_in (HOST_WIDE_INT i)
3198 {
3199 switch (i & 0xff)
3200 {
3201 case 0x01: case 0xfe: return 0;
3202 case 0x02: case 0xfd: return 1;
3203 case 0x04: case 0xfb: return 2;
3204 case 0x08: case 0xf7: return 3;
3205 case 0x10: case 0x7f: return 4;
3206 case 0x20: case 0xbf: return 5;
3207 case 0x40: case 0xdf: return 6;
3208 case 0x80: case 0xef: return 7;
3209 default:
3210 gcc_unreachable ();
3211 }
3212 }
3213
3214 static int
3215 bit_size_for_clip (HOST_WIDE_INT i)
3216 {
3217 int rv;
3218
3219 for (rv = 0; rv < 31; rv ++)
3220 if (((HOST_WIDE_INT) 1 << rv) > i)
3221 return rv + 1;
3222 gcc_unreachable ();
3223 }
3224
3225 /* Print an operand to a assembler instruction. */
3226
3227 void
3228 mep_print_operand (FILE *file, rtx x, int code)
3229 {
3230 int i, j;
3231 const char *real_name;
3232
3233 if (code == '<')
3234 {
3235 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3236 we're using, then skip over the "mep_" part of its name. */
3237 const struct cgen_insn *insn;
3238
3239 if (mep_get_move_insn (mep_cmov, &insn))
3240 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3241 else
3242 mep_intrinsic_unavailable (mep_cmov);
3243 return;
3244 }
3245 if (code == 'L')
3246 {
3247 switch (GET_CODE (x))
3248 {
3249 case AND:
3250 fputs ("clr", file);
3251 return;
3252 case IOR:
3253 fputs ("set", file);
3254 return;
3255 case XOR:
3256 fputs ("not", file);
3257 return;
3258 default:
3259 output_operand_lossage ("invalid %%L code");
3260 }
3261 }
3262 if (code == 'M')
3263 {
3264 /* Print the second operand of a CR <- CR move. If we're using
3265 a two-operand instruction (i.e., a real cmov), then just print
3266 the operand normally. If we're using a "reg, reg, immediate"
3267 instruction such as caddi3, print the operand followed by a
3268 zero field. If we're using a three-register instruction,
3269 print the operand twice. */
3270 const struct cgen_insn *insn;
3271
3272 mep_print_operand (file, x, 0);
3273 if (mep_get_move_insn (mep_cmov, &insn)
3274 && insn_data[insn->icode].n_operands == 3)
3275 {
3276 fputs (", ", file);
3277 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3278 mep_print_operand (file, x, 0);
3279 else
3280 mep_print_operand (file, const0_rtx, 0);
3281 }
3282 return;
3283 }
3284
3285 encode_pattern (x);
3286 for (i = 0; conversions[i].pattern; i++)
3287 if (conversions[i].code == code
3288 && strcmp(conversions[i].pattern, pattern) == 0)
3289 {
3290 for (j = 0; conversions[i].format[j]; j++)
3291 if (conversions[i].format[j] == '\\')
3292 {
3293 fputc (conversions[i].format[j+1], file);
3294 j++;
3295 }
3296 else if (ISDIGIT(conversions[i].format[j]))
3297 {
3298 rtx r = patternr[conversions[i].format[j] - '0'];
3299 switch (GET_CODE (r))
3300 {
3301 case REG:
3302 fprintf (file, "%s", reg_names [REGNO (r)]);
3303 break;
3304 case CONST_INT:
3305 switch (code)
3306 {
3307 case 'b':
3308 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3309 break;
3310 case 'B':
3311 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3312 break;
3313 case 'h':
3314 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3315 break;
3316 case 'U':
3317 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3318 break;
3319 case 'J':
3320 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3321 break;
3322 case 'x':
3323 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3324 && !(INTVAL (r) & 0xff))
3325 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3326 else
3327 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3328 break;
3329 case 'I':
3330 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3331 && conversions[i].format[j+1] == 0)
3332 {
3333 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3334 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3335 }
3336 else
3337 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3338 break;
3339 default:
3340 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3341 break;
3342 }
3343 break;
3344 case CONST_DOUBLE:
3345 fprintf(file, "[const_double 0x%lx]",
3346 (unsigned long) CONST_DOUBLE_HIGH(r));
3347 break;
3348 case SYMBOL_REF:
3349 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3350 assemble_name (file, real_name);
3351 break;
3352 case LABEL_REF:
3353 output_asm_label (r);
3354 break;
3355 default:
3356 fprintf (stderr, "don't know how to print this operand:");
3357 debug_rtx (r);
3358 gcc_unreachable ();
3359 }
3360 }
3361 else
3362 {
3363 if (conversions[i].format[j] == '+'
3364 && (!code || code == 'I')
3365 && ISDIGIT (conversions[i].format[j+1])
3366 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3367 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3368 continue;
3369 fputc(conversions[i].format[j], file);
3370 }
3371 break;
3372 }
3373 if (!conversions[i].pattern)
3374 {
3375 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3376 debug_rtx(x);
3377 }
3378
3379 return;
3380 }
3381
3382 void
3383 mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
3384 int noperands ATTRIBUTE_UNUSED)
3385 {
3386 /* Despite the fact that MeP is perfectly capable of branching and
3387 doing something else in the same bundle, gcc does jump
3388 optimization *after* scheduling, so we cannot trust the bundling
3389 flags on jump instructions. */
3390 if (GET_MODE (insn) == BImode
3391 && get_attr_slots (insn) != SLOTS_CORE)
3392 fputc ('+', asm_out_file);
3393 }
3394
3395 /* Function args in registers. */
3396
3397 static void
3398 mep_setup_incoming_varargs (cumulative_args_t cum,
3399 machine_mode mode ATTRIBUTE_UNUSED,
3400 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3401 int second_time ATTRIBUTE_UNUSED)
3402 {
3403 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3404
3405 if (nsave > 0)
3406 cfun->machine->arg_regs_to_save = nsave;
3407 *pretend_size = nsave * 4;
3408 }
3409
3410 static int
3411 bytesize (const_tree type, machine_mode mode)
3412 {
3413 if (mode == BLKmode)
3414 return int_size_in_bytes (type);
3415 return GET_MODE_SIZE (mode);
3416 }
3417
3418 static rtx
3419 mep_expand_builtin_saveregs (void)
3420 {
3421 int bufsize, i, ns;
3422 rtx regbuf;
3423
3424 ns = cfun->machine->arg_regs_to_save;
3425 if (TARGET_IVC2)
3426 {
3427 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3428 regbuf = assign_stack_local (SImode, bufsize, 64);
3429 }
3430 else
3431 {
3432 bufsize = ns * 4;
3433 regbuf = assign_stack_local (SImode, bufsize, 32);
3434 }
3435
3436 move_block_from_reg (5-ns, regbuf, ns);
3437
3438 if (TARGET_IVC2)
3439 {
3440 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3441 int ofs = 8 * ((ns+1)/2);
3442
3443 for (i=0; i<ns; i++)
3444 {
3445 int rn = (4-ns) + i + 49;
3446 rtx ptr;
3447
3448 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3449 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3450 ofs += 8;
3451 }
3452 }
3453 return XEXP (regbuf, 0);
3454 }
3455
3456 static tree
3457 mep_build_builtin_va_list (void)
3458 {
3459 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3460 tree record;
3461
3462
3463 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3464
3465 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3466 get_identifier ("__va_next_gp"), ptr_type_node);
3467 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3468 get_identifier ("__va_next_gp_limit"),
3469 ptr_type_node);
3470 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3471 ptr_type_node);
3472 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3473 ptr_type_node);
3474
3475 DECL_FIELD_CONTEXT (f_next_gp) = record;
3476 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3477 DECL_FIELD_CONTEXT (f_next_cop) = record;
3478 DECL_FIELD_CONTEXT (f_next_stack) = record;
3479
3480 TYPE_FIELDS (record) = f_next_gp;
3481 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3482 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3483 DECL_CHAIN (f_next_cop) = f_next_stack;
3484
3485 layout_type (record);
3486
3487 return record;
3488 }
3489
3490 static void
3491 mep_expand_va_start (tree valist, rtx nextarg)
3492 {
3493 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3494 tree next_gp, next_gp_limit, next_cop, next_stack;
3495 tree t, u;
3496 int ns;
3497
3498 ns = cfun->machine->arg_regs_to_save;
3499
3500 f_next_gp = TYPE_FIELDS (va_list_type_node);
3501 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3502 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3503 f_next_stack = DECL_CHAIN (f_next_cop);
3504
3505 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3506 NULL_TREE);
3507 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3508 valist, f_next_gp_limit, NULL_TREE);
3509 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3510 NULL_TREE);
3511 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3512 valist, f_next_stack, NULL_TREE);
3513
3514 /* va_list.next_gp = expand_builtin_saveregs (); */
3515 u = make_tree (sizetype, expand_builtin_saveregs ());
3516 u = fold_convert (ptr_type_node, u);
3517 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3518 TREE_SIDE_EFFECTS (t) = 1;
3519 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3520
3521 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3522 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3523 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3524 TREE_SIDE_EFFECTS (t) = 1;
3525 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3526
3527 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3528 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3529 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3530 TREE_SIDE_EFFECTS (t) = 1;
3531 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3532
3533 /* va_list.next_stack = nextarg; */
3534 u = make_tree (ptr_type_node, nextarg);
3535 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3536 TREE_SIDE_EFFECTS (t) = 1;
3537 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3538 }
3539
3540 static tree
3541 mep_gimplify_va_arg_expr (tree valist, tree type,
3542 gimple_seq *pre_p,
3543 gimple_seq *post_p ATTRIBUTE_UNUSED)
3544 {
3545 HOST_WIDE_INT size, rsize;
3546 bool by_reference, ivc2_vec;
3547 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3548 tree next_gp, next_gp_limit, next_cop, next_stack;
3549 tree label_sover, label_selse;
3550 tree tmp, res_addr;
3551
3552 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3553
3554 size = int_size_in_bytes (type);
3555 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3556
3557 if (by_reference)
3558 {
3559 type = build_pointer_type (type);
3560 size = 4;
3561 }
3562 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3563
3564 f_next_gp = TYPE_FIELDS (va_list_type_node);
3565 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3566 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3567 f_next_stack = DECL_CHAIN (f_next_cop);
3568
3569 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3570 NULL_TREE);
3571 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3572 valist, f_next_gp_limit, NULL_TREE);
3573 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3574 NULL_TREE);
3575 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3576 valist, f_next_stack, NULL_TREE);
3577
3578 /* if f_next_gp < f_next_gp_limit
3579 IF (VECTOR_P && IVC2)
3580 val = *f_next_cop;
3581 ELSE
3582 val = *f_next_gp;
3583 f_next_gp += 4;
3584 f_next_cop += 8;
3585 else
3586 label_selse:
3587 val = *f_next_stack;
3588 f_next_stack += rsize;
3589 label_sover:
3590 */
3591
3592 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3593 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3594 res_addr = create_tmp_var (ptr_type_node);
3595
3596 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3597 unshare_expr (next_gp_limit));
3598 tmp = build3 (COND_EXPR, void_type_node, tmp,
3599 build1 (GOTO_EXPR, void_type_node,
3600 unshare_expr (label_selse)),
3601 NULL_TREE);
3602 gimplify_and_add (tmp, pre_p);
3603
3604 if (ivc2_vec)
3605 {
3606 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3607 gimplify_and_add (tmp, pre_p);
3608 }
3609 else
3610 {
3611 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3612 gimplify_and_add (tmp, pre_p);
3613 }
3614
3615 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3616 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3617
3618 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3619 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3620
3621 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3622 gimplify_and_add (tmp, pre_p);
3623
3624 /* - - */
3625
3626 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3627 gimplify_and_add (tmp, pre_p);
3628
3629 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3630 gimplify_and_add (tmp, pre_p);
3631
3632 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3633 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3634
3635 /* - - */
3636
3637 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3638 gimplify_and_add (tmp, pre_p);
3639
3640 res_addr = fold_convert (build_pointer_type (type), res_addr);
3641
3642 if (by_reference)
3643 res_addr = build_va_arg_indirect_ref (res_addr);
3644
3645 return build_va_arg_indirect_ref (res_addr);
3646 }
3647
3648 void
3649 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3650 rtx libname ATTRIBUTE_UNUSED,
3651 tree fndecl ATTRIBUTE_UNUSED)
3652 {
3653 pcum->nregs = 0;
3654
3655 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3656 pcum->vliw = 1;
3657 else
3658 pcum->vliw = 0;
3659 }
3660
3661 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3662 larger than 4 bytes are passed indirectly. Return value in 0,
3663 unless bigger than 4 bytes, then the caller passes a pointer as the
3664 first arg. For varargs, we copy $1..$4 to the stack. */
3665
3666 static rtx
3667 mep_function_arg (cumulative_args_t cum_v, machine_mode mode,
3668 const_tree type ATTRIBUTE_UNUSED,
3669 bool named ATTRIBUTE_UNUSED)
3670 {
3671 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3672
3673 /* VOIDmode is a signal for the backend to pass data to the call
3674 expander via the second operand to the call pattern. We use
3675 this to determine whether to use "jsr" or "jsrv". */
3676 if (mode == VOIDmode)
3677 return GEN_INT (cum->vliw);
3678
3679 /* If we havn't run out of argument registers, return the next. */
3680 if (cum->nregs < 4)
3681 {
3682 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3683 return gen_rtx_REG (mode, cum->nregs + 49);
3684 else
3685 return gen_rtx_REG (mode, cum->nregs + 1);
3686 }
3687
3688 /* Otherwise the argument goes on the stack. */
3689 return NULL_RTX;
3690 }
3691
3692 static bool
3693 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3694 machine_mode mode,
3695 const_tree type,
3696 bool named ATTRIBUTE_UNUSED)
3697 {
3698 int size = bytesize (type, mode);
3699
3700 /* This is non-obvious, but yes, large values passed after we've run
3701 out of registers are *still* passed by reference - we put the
3702 address of the parameter on the stack, as well as putting the
3703 parameter itself elsewhere on the stack. */
3704
3705 if (size <= 0 || size > 8)
3706 return true;
3707 if (size <= 4)
3708 return false;
3709 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3710 && type != NULL_TREE && VECTOR_TYPE_P (type))
3711 return false;
3712 return true;
3713 }
3714
3715 static void
3716 mep_function_arg_advance (cumulative_args_t pcum,
3717 machine_mode mode ATTRIBUTE_UNUSED,
3718 const_tree type ATTRIBUTE_UNUSED,
3719 bool named ATTRIBUTE_UNUSED)
3720 {
3721 get_cumulative_args (pcum)->nregs += 1;
3722 }
3723
3724 bool
3725 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3726 {
3727 int size = bytesize (type, BLKmode);
3728 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3729 return size > 0 && size <= 8 ? 0 : 1;
3730 return size > 0 && size <= 4 ? 0 : 1;
3731 }
3732
3733 static bool
3734 mep_narrow_volatile_bitfield (void)
3735 {
3736 return true;
3737 return false;
3738 }
3739
3740 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3741
3742 rtx
3743 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3744 {
3745 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3746 return gen_rtx_REG (TYPE_MODE (type), 48);
3747 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3748 }
3749
3750 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3751
3752 rtx
3753 mep_libcall_value (machine_mode mode)
3754 {
3755 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3756 }
3757
3758 /* Handle pipeline hazards. */
3759
3760 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3761 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3762
3763 static int prev_opcode = 0;
3764
3765 /* This isn't as optimal as it could be, because we don't know what
3766 control register the STC opcode is storing in. We only need to add
3767 the nop if it's the relevant register, but we add it for irrelevant
3768 registers also. */
3769
3770 void
3771 mep_asm_output_opcode (FILE *file, const char *ptr)
3772 {
3773 int this_opcode = op_none;
3774 const char *hazard = 0;
3775
3776 switch (*ptr)
3777 {
3778 case 'f':
3779 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3780 this_opcode = op_fsft;
3781 break;
3782 case 'r':
3783 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3784 this_opcode = op_ret;
3785 break;
3786 case 's':
3787 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3788 this_opcode = op_stc;
3789 break;
3790 }
3791
3792 if (prev_opcode == op_stc && this_opcode == op_fsft)
3793 hazard = "nop";
3794 if (prev_opcode == op_stc && this_opcode == op_ret)
3795 hazard = "nop";
3796
3797 if (hazard)
3798 fprintf(file, "%s\t# %s-%s hazard\n\t",
3799 hazard, opnames[prev_opcode], opnames[this_opcode]);
3800
3801 prev_opcode = this_opcode;
3802 }
3803
3804 /* Handle attributes. */
3805
3806 static tree
3807 mep_validate_based_tiny (tree *node, tree name, tree args,
3808 int flags ATTRIBUTE_UNUSED, bool *no_add)
3809 {
3810 if (TREE_CODE (*node) != VAR_DECL
3811 && TREE_CODE (*node) != POINTER_TYPE
3812 && TREE_CODE (*node) != TYPE_DECL)
3813 {
3814 warning (0, "%qE attribute only applies to variables", name);
3815 *no_add = true;
3816 }
3817 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3818 {
3819 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3820 {
3821 warning (0, "address region attributes not allowed with auto storage class");
3822 *no_add = true;
3823 }
3824 /* Ignore storage attribute of pointed to variable: char __far * x; */
3825 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3826 {
3827 warning (0, "address region attributes on pointed-to types ignored");
3828 *no_add = true;
3829 }
3830 }
3831
3832 return NULL_TREE;
3833 }
3834
3835 static int
3836 mep_multiple_address_regions (tree list, bool check_section_attr)
3837 {
3838 tree a;
3839 int count_sections = 0;
3840 int section_attr_count = 0;
3841
3842 for (a = list; a; a = TREE_CHAIN (a))
3843 {
3844 if (is_attribute_p ("based", TREE_PURPOSE (a))
3845 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3846 || is_attribute_p ("near", TREE_PURPOSE (a))
3847 || is_attribute_p ("far", TREE_PURPOSE (a))
3848 || is_attribute_p ("io", TREE_PURPOSE (a)))
3849 count_sections ++;
3850 if (check_section_attr)
3851 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3852 }
3853
3854 if (check_section_attr)
3855 return section_attr_count;
3856 else
3857 return count_sections;
3858 }
3859
3860 #define MEP_ATTRIBUTES(decl) \
3861 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3862 : DECL_ATTRIBUTES (decl) \
3863 ? (DECL_ATTRIBUTES (decl)) \
3864 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3865
3866 static tree
3867 mep_validate_near_far (tree *node, tree name, tree args,
3868 int flags ATTRIBUTE_UNUSED, bool *no_add)
3869 {
3870 if (TREE_CODE (*node) != VAR_DECL
3871 && TREE_CODE (*node) != FUNCTION_DECL
3872 && TREE_CODE (*node) != METHOD_TYPE
3873 && TREE_CODE (*node) != POINTER_TYPE
3874 && TREE_CODE (*node) != TYPE_DECL)
3875 {
3876 warning (0, "%qE attribute only applies to variables and functions",
3877 name);
3878 *no_add = true;
3879 }
3880 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3881 {
3882 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3883 {
3884 warning (0, "address region attributes not allowed with auto storage class");
3885 *no_add = true;
3886 }
3887 /* Ignore storage attribute of pointed to variable: char __far * x; */
3888 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3889 {
3890 warning (0, "address region attributes on pointed-to types ignored");
3891 *no_add = true;
3892 }
3893 }
3894 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3895 {
3896 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3897 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3898 DECL_ATTRIBUTES (*node) = NULL_TREE;
3899 }
3900 return NULL_TREE;
3901 }
3902
3903 static tree
3904 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3905 int flags ATTRIBUTE_UNUSED, bool *no_add)
3906 {
3907 if (TREE_CODE (*node) != FUNCTION_DECL
3908 && TREE_CODE (*node) != METHOD_TYPE)
3909 {
3910 warning (0, "%qE attribute only applies to functions", name);
3911 *no_add = true;
3912 }
3913 return NULL_TREE;
3914 }
3915
3916 static tree
3917 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3918 int flags ATTRIBUTE_UNUSED, bool *no_add)
3919 {
3920 tree function_type;
3921
3922 if (TREE_CODE (*node) != FUNCTION_DECL)
3923 {
3924 warning (0, "%qE attribute only applies to functions", name);
3925 *no_add = true;
3926 return NULL_TREE;
3927 }
3928
3929 if (DECL_DECLARED_INLINE_P (*node))
3930 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3931 DECL_UNINLINABLE (*node) = 1;
3932
3933 function_type = TREE_TYPE (*node);
3934
3935 if (TREE_TYPE (function_type) != void_type_node)
3936 error ("interrupt function must have return type of void");
3937
3938 if (prototype_p (function_type)
3939 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3940 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3941 error ("interrupt function must have no arguments");
3942
3943 return NULL_TREE;
3944 }
3945
3946 static tree
3947 mep_validate_io_cb (tree *node, tree name, tree args,
3948 int flags ATTRIBUTE_UNUSED, bool *no_add)
3949 {
3950 if (TREE_CODE (*node) != VAR_DECL)
3951 {
3952 warning (0, "%qE attribute only applies to variables", name);
3953 *no_add = true;
3954 }
3955
3956 if (args != NULL_TREE)
3957 {
3958 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3959 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3960 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3961 {
3962 warning (0, "%qE attribute allows only an integer constant argument",
3963 name);
3964 *no_add = true;
3965 }
3966 }
3967
3968 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3969 TREE_THIS_VOLATILE (*node) = 1;
3970
3971 return NULL_TREE;
3972 }
3973
3974 static tree
3975 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3976 int flags ATTRIBUTE_UNUSED, bool *no_add)
3977 {
3978 if (TREE_CODE (*node) != FUNCTION_TYPE
3979 && TREE_CODE (*node) != FUNCTION_DECL
3980 && TREE_CODE (*node) != METHOD_TYPE
3981 && TREE_CODE (*node) != FIELD_DECL
3982 && TREE_CODE (*node) != TYPE_DECL)
3983 {
3984 static int gave_pointer_note = 0;
3985 static int gave_array_note = 0;
3986 static const char * given_type = NULL;
3987
3988 given_type = get_tree_code_name (TREE_CODE (*node));
3989 if (TREE_CODE (*node) == POINTER_TYPE)
3990 given_type = "pointers";
3991 if (TREE_CODE (*node) == ARRAY_TYPE)
3992 given_type = "arrays";
3993
3994 if (given_type)
3995 warning (0, "%qE attribute only applies to functions, not %s",
3996 name, given_type);
3997 else
3998 warning (0, "%qE attribute only applies to functions",
3999 name);
4000 *no_add = true;
4001
4002 if (TREE_CODE (*node) == POINTER_TYPE
4003 && !gave_pointer_note)
4004 {
4005 inform (input_location,
4006 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4007 " typedef int (__vliw *vfuncptr) ();");
4008 gave_pointer_note = 1;
4009 }
4010
4011 if (TREE_CODE (*node) == ARRAY_TYPE
4012 && !gave_array_note)
4013 {
4014 inform (input_location,
4015 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4016 " typedef int (__vliw *vfuncptr[]) ();");
4017 gave_array_note = 1;
4018 }
4019 }
4020 if (!TARGET_VLIW)
4021 error ("VLIW functions are not allowed without a VLIW configuration");
4022 return NULL_TREE;
4023 }
4024
4025 static const struct attribute_spec mep_attribute_table[11] =
4026 {
4027 /* name min max decl type func handler
4028 affects_type_identity */
4029 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4030 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4031 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4032 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4033 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4034 false },
4035 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4036 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4037 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4038 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4039 { NULL, 0, 0, false, false, false, NULL, false }
4040 };
4041
4042 static bool
4043 mep_function_attribute_inlinable_p (const_tree callee)
4044 {
4045 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4046 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4047 return (lookup_attribute ("disinterrupt", attrs) == 0
4048 && lookup_attribute ("interrupt", attrs) == 0);
4049 }
4050
4051 static bool
4052 mep_can_inline_p (tree caller, tree callee)
4053 {
4054 if (TREE_CODE (callee) == ADDR_EXPR)
4055 callee = TREE_OPERAND (callee, 0);
4056
4057 if (!mep_vliw_function_p (caller)
4058 && mep_vliw_function_p (callee))
4059 {
4060 return false;
4061 }
4062 return true;
4063 }
4064
4065 #define FUNC_CALL 1
4066 #define FUNC_DISINTERRUPT 2
4067
4068
4069 struct GTY(()) pragma_entry {
4070 int used;
4071 int flag;
4072 };
4073
4074 struct pragma_traits : default_hashmap_traits
4075 {
4076 static hashval_t hash (const char *s) { return htab_hash_string (s); }
4077 static bool
4078 equal_keys (const char *a, const char *b)
4079 {
4080 return strcmp (a, b) == 0;
4081 }
4082 };
4083
4084 /* Hash table of farcall-tagged sections. */
4085 static GTY(()) hash_map<const char *, pragma_entry, pragma_traits> *
4086 pragma_htab;
4087
4088 static void
4089 mep_note_pragma_flag (const char *funcname, int flag)
4090 {
4091 if (!pragma_htab)
4092 pragma_htab
4093 = hash_map<const char *, pragma_entry, pragma_traits>::create_ggc (31);
4094
4095 bool existed;
4096 const char *name = ggc_strdup (funcname);
4097 pragma_entry *slot = &pragma_htab->get_or_insert (name, &existed);
4098 if (!existed)
4099 {
4100 slot->flag = 0;
4101 slot->used = 0;
4102 }
4103 slot->flag |= flag;
4104 }
4105
4106 static bool
4107 mep_lookup_pragma_flag (const char *funcname, int flag)
4108 {
4109 if (!pragma_htab)
4110 return false;
4111
4112 if (funcname[0] == '@' && funcname[2] == '.')
4113 funcname += 3;
4114
4115 pragma_entry *slot = pragma_htab->get (funcname);
4116 if (slot && (slot->flag & flag))
4117 {
4118 slot->used |= flag;
4119 return true;
4120 }
4121 return false;
4122 }
4123
4124 bool
4125 mep_lookup_pragma_call (const char *funcname)
4126 {
4127 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4128 }
4129
4130 void
4131 mep_note_pragma_call (const char *funcname)
4132 {
4133 mep_note_pragma_flag (funcname, FUNC_CALL);
4134 }
4135
4136 bool
4137 mep_lookup_pragma_disinterrupt (const char *funcname)
4138 {
4139 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4140 }
4141
4142 void
4143 mep_note_pragma_disinterrupt (const char *funcname)
4144 {
4145 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4146 }
4147
4148 bool
4149 note_unused_pragma_disinterrupt (const char *const &s, const pragma_entry &e,
4150 void *)
4151 {
4152 if ((e.flag & FUNC_DISINTERRUPT)
4153 && !(e.used & FUNC_DISINTERRUPT))
4154 warning (0, "\"#pragma disinterrupt %s\" not used", s);
4155 return 1;
4156 }
4157
4158 void
4159 mep_file_cleanups (void)
4160 {
4161 if (pragma_htab)
4162 pragma_htab->traverse<void *, note_unused_pragma_disinterrupt> (NULL);
4163 }
4164
4165 /* These three functions provide a bridge between the pramgas that
4166 affect register classes, and the functions that maintain them. We
4167 can't call those functions directly as pragma handling is part of
4168 the front end and doesn't have direct access to them. */
4169
4170 void
4171 mep_save_register_info (void)
4172 {
4173 save_register_info ();
4174 }
4175
4176 void
4177 mep_reinit_regs (void)
4178 {
4179 reinit_regs ();
4180 }
4181
4182 void
4183 mep_init_regs (void)
4184 {
4185 init_regs ();
4186 }
4187
4188
4189
4190 static int
4191 mep_attrlist_to_encoding (tree list, tree decl)
4192 {
4193 if (mep_multiple_address_regions (list, false) > 1)
4194 {
4195 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4196 TREE_PURPOSE (TREE_CHAIN (list)),
4197 DECL_NAME (decl),
4198 DECL_SOURCE_LINE (decl));
4199 TREE_CHAIN (list) = NULL_TREE;
4200 }
4201
4202 while (list)
4203 {
4204 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4205 return 'b';
4206 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4207 return 't';
4208 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4209 return 'n';
4210 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4211 return 'f';
4212 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4213 {
4214 if (TREE_VALUE (list)
4215 && TREE_VALUE (TREE_VALUE (list))
4216 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4217 {
4218 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4219 if (location >= 0
4220 && location <= 0x1000000)
4221 return 'i';
4222 }
4223 return 'I';
4224 }
4225 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4226 return 'c';
4227 list = TREE_CHAIN (list);
4228 }
4229 if (TARGET_TF
4230 && TREE_CODE (decl) == FUNCTION_DECL
4231 && DECL_SECTION_NAME (decl) == 0)
4232 return 'f';
4233 return 0;
4234 }
4235
4236 static int
4237 mep_comp_type_attributes (const_tree t1, const_tree t2)
4238 {
4239 int vliw1, vliw2;
4240
4241 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4242 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4243
4244 if (vliw1 != vliw2)
4245 return 0;
4246
4247 return 1;
4248 }
4249
4250 static void
4251 mep_insert_attributes (tree decl, tree *attributes)
4252 {
4253 int size;
4254 const char *secname = 0;
4255 tree attrib, attrlist;
4256 char encoding;
4257
4258 if (TREE_CODE (decl) == FUNCTION_DECL)
4259 {
4260 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4261
4262 if (mep_lookup_pragma_disinterrupt (funcname))
4263 {
4264 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4265 *attributes = chainon (*attributes, attrib);
4266 }
4267 }
4268
4269 if (TREE_CODE (decl) != VAR_DECL
4270 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4271 return;
4272
4273 if (TREE_READONLY (decl) && TARGET_DC)
4274 /* -mdc means that const variables default to the near section,
4275 regardless of the size cutoff. */
4276 return;
4277
4278 /* User specified an attribute, so override the default.
4279 Ignore storage attribute of pointed to variable. char __far * x; */
4280 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4281 {
4282 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4283 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4284 else if (DECL_ATTRIBUTES (decl) && *attributes)
4285 DECL_ATTRIBUTES (decl) = NULL_TREE;
4286 }
4287
4288 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4289 encoding = mep_attrlist_to_encoding (attrlist, decl);
4290 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4291 {
4292 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4293 encoding = mep_attrlist_to_encoding (attrlist, decl);
4294 }
4295 if (encoding)
4296 {
4297 /* This means that the declaration has a specific section
4298 attribute, so we should not apply the default rules. */
4299
4300 if (encoding == 'i' || encoding == 'I')
4301 {
4302 tree attr = lookup_attribute ("io", attrlist);
4303 if (attr
4304 && TREE_VALUE (attr)
4305 && TREE_VALUE (TREE_VALUE(attr)))
4306 {
4307 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4308 static tree previous_value = 0;
4309 static int previous_location = 0;
4310 static tree previous_name = 0;
4311
4312 /* We take advantage of the fact that gcc will reuse the
4313 same tree pointer when applying an attribute to a
4314 list of decls, but produce a new tree for attributes
4315 on separate source lines, even when they're textually
4316 identical. This is the behavior we want. */
4317 if (TREE_VALUE (attr) == previous_value
4318 && location == previous_location)
4319 {
4320 warning(0, "__io address 0x%x is the same for %qE and %qE",
4321 location, previous_name, DECL_NAME (decl));
4322 }
4323 previous_name = DECL_NAME (decl);
4324 previous_location = location;
4325 previous_value = TREE_VALUE (attr);
4326 }
4327 }
4328 return;
4329 }
4330
4331
4332 /* Declarations of arrays can change size. Don't trust them. */
4333 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4334 size = 0;
4335 else
4336 size = int_size_in_bytes (TREE_TYPE (decl));
4337
4338 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4339 {
4340 if (TREE_PUBLIC (decl)
4341 || DECL_EXTERNAL (decl)
4342 || TREE_STATIC (decl))
4343 {
4344 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4345 int key = 0;
4346
4347 while (*name)
4348 key += *name++;
4349
4350 switch (key & 3)
4351 {
4352 case 0:
4353 secname = "based";
4354 break;
4355 case 1:
4356 secname = "tiny";
4357 break;
4358 case 2:
4359 secname = "far";
4360 break;
4361 default:
4362 ;
4363 }
4364 }
4365 }
4366 else
4367 {
4368 if (size <= mep_based_cutoff && size > 0)
4369 secname = "based";
4370 else if (size <= mep_tiny_cutoff && size > 0)
4371 secname = "tiny";
4372 else if (TARGET_L)
4373 secname = "far";
4374 }
4375
4376 if (mep_const_section && TREE_READONLY (decl))
4377 {
4378 if (strcmp (mep_const_section, "tiny") == 0)
4379 secname = "tiny";
4380 else if (strcmp (mep_const_section, "near") == 0)
4381 return;
4382 else if (strcmp (mep_const_section, "far") == 0)
4383 secname = "far";
4384 }
4385
4386 if (!secname)
4387 return;
4388
4389 if (!mep_multiple_address_regions (*attributes, true)
4390 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4391 {
4392 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4393
4394 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4395 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4396 and mep_validate_based_tiny. */
4397 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4398 }
4399 }
4400
4401 static void
4402 mep_encode_section_info (tree decl, rtx rtl, int first)
4403 {
4404 rtx rtlname;
4405 const char *oldname;
4406 const char *secname;
4407 char encoding;
4408 char *newname;
4409 tree idp;
4410 int maxsize;
4411 tree type;
4412 tree mep_attributes;
4413
4414 if (! first)
4415 return;
4416
4417 if (TREE_CODE (decl) != VAR_DECL
4418 && TREE_CODE (decl) != FUNCTION_DECL)
4419 return;
4420
4421 rtlname = XEXP (rtl, 0);
4422 if (GET_CODE (rtlname) == SYMBOL_REF)
4423 oldname = XSTR (rtlname, 0);
4424 else if (GET_CODE (rtlname) == MEM
4425 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4426 oldname = XSTR (XEXP (rtlname, 0), 0);
4427 else
4428 gcc_unreachable ();
4429
4430 type = TREE_TYPE (decl);
4431 if (type == error_mark_node)
4432 return;
4433 mep_attributes = MEP_ATTRIBUTES (decl);
4434
4435 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4436
4437 if (encoding)
4438 {
4439 newname = (char *) alloca (strlen (oldname) + 4);
4440 sprintf (newname, "@%c.%s", encoding, oldname);
4441 idp = get_identifier (newname);
4442 XEXP (rtl, 0) =
4443 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4444 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4445 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4446
4447 switch (encoding)
4448 {
4449 case 'b':
4450 maxsize = 128;
4451 secname = "based";
4452 break;
4453 case 't':
4454 maxsize = 65536;
4455 secname = "tiny";
4456 break;
4457 case 'n':
4458 maxsize = 0x1000000;
4459 secname = "near";
4460 break;
4461 default:
4462 maxsize = 0;
4463 secname = 0;
4464 break;
4465 }
4466 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4467 {
4468 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4469 oldname,
4470 (long) int_size_in_bytes (TREE_TYPE (decl)),
4471 secname,
4472 maxsize);
4473 }
4474 }
4475 }
4476
4477 const char *
4478 mep_strip_name_encoding (const char *sym)
4479 {
4480 while (1)
4481 {
4482 if (*sym == '*')
4483 sym++;
4484 else if (*sym == '@' && sym[2] == '.')
4485 sym += 3;
4486 else
4487 return sym;
4488 }
4489 }
4490
4491 static section *
4492 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4493 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4494 {
4495 int readonly = 1;
4496 int encoding;
4497
4498 switch (TREE_CODE (decl))
4499 {
4500 case VAR_DECL:
4501 if (!TREE_READONLY (decl)
4502 || TREE_SIDE_EFFECTS (decl)
4503 || !DECL_INITIAL (decl)
4504 || (DECL_INITIAL (decl) != error_mark_node
4505 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4506 readonly = 0;
4507 break;
4508 case CONSTRUCTOR:
4509 if (! TREE_CONSTANT (decl))
4510 readonly = 0;
4511 break;
4512
4513 default:
4514 break;
4515 }
4516
4517 if (TREE_CODE (decl) == FUNCTION_DECL)
4518 {
4519 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4520
4521 if (name[0] == '@' && name[2] == '.')
4522 encoding = name[1];
4523 else
4524 encoding = 0;
4525
4526 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
4527 mep_unique_section (decl, 0);
4528 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4529 {
4530 if (encoding == 'f')
4531 return vftext_section;
4532 else
4533 return vtext_section;
4534 }
4535 else if (encoding == 'f')
4536 return ftext_section;
4537 else
4538 return text_section;
4539 }
4540
4541 if (TREE_CODE (decl) == VAR_DECL)
4542 {
4543 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4544
4545 if (name[0] == '@' && name[2] == '.')
4546 switch (name[1])
4547 {
4548 case 'b':
4549 return based_section;
4550
4551 case 't':
4552 if (readonly)
4553 return srodata_section;
4554 if (DECL_INITIAL (decl))
4555 return sdata_section;
4556 return tinybss_section;
4557
4558 case 'f':
4559 if (readonly)
4560 return frodata_section;
4561 return far_section;
4562
4563 case 'i':
4564 case 'I':
4565 error_at (DECL_SOURCE_LOCATION (decl),
4566 "variable %D of type %<io%> must be uninitialized", decl);
4567 return data_section;
4568
4569 case 'c':
4570 error_at (DECL_SOURCE_LOCATION (decl),
4571 "variable %D of type %<cb%> must be uninitialized", decl);
4572 return data_section;
4573 }
4574 }
4575
4576 if (readonly)
4577 return readonly_data_section;
4578
4579 return data_section;
4580 }
4581
4582 static void
4583 mep_unique_section (tree decl, int reloc)
4584 {
4585 static const char *prefixes[][2] =
4586 {
4587 { ".text.", ".gnu.linkonce.t." },
4588 { ".rodata.", ".gnu.linkonce.r." },
4589 { ".data.", ".gnu.linkonce.d." },
4590 { ".based.", ".gnu.linkonce.based." },
4591 { ".sdata.", ".gnu.linkonce.s." },
4592 { ".far.", ".gnu.linkonce.far." },
4593 { ".ftext.", ".gnu.linkonce.ft." },
4594 { ".frodata.", ".gnu.linkonce.frd." },
4595 { ".srodata.", ".gnu.linkonce.srd." },
4596 { ".vtext.", ".gnu.linkonce.v." },
4597 { ".vftext.", ".gnu.linkonce.vf." }
4598 };
4599 int sec = 2; /* .data */
4600 int len;
4601 const char *name, *prefix;
4602 char *string;
4603
4604 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4605 if (DECL_RTL (decl))
4606 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4607
4608 if (TREE_CODE (decl) == FUNCTION_DECL)
4609 {
4610 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4611 sec = 9; /* .vtext */
4612 else
4613 sec = 0; /* .text */
4614 }
4615 else if (decl_readonly_section (decl, reloc))
4616 sec = 1; /* .rodata */
4617
4618 if (name[0] == '@' && name[2] == '.')
4619 {
4620 switch (name[1])
4621 {
4622 case 'b':
4623 sec = 3; /* .based */
4624 break;
4625 case 't':
4626 if (sec == 1)
4627 sec = 8; /* .srodata */
4628 else
4629 sec = 4; /* .sdata */
4630 break;
4631 case 'f':
4632 if (sec == 0)
4633 sec = 6; /* .ftext */
4634 else if (sec == 9)
4635 sec = 10; /* .vftext */
4636 else if (sec == 1)
4637 sec = 7; /* .frodata */
4638 else
4639 sec = 5; /* .far. */
4640 break;
4641 }
4642 name += 3;
4643 }
4644
4645 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
4646 len = strlen (name) + strlen (prefix);
4647 string = (char *) alloca (len + 1);
4648
4649 sprintf (string, "%s%s", prefix, name);
4650
4651 set_decl_section_name (decl, string);
4652 }
4653
4654 /* Given a decl, a section name, and whether the decl initializer
4655 has relocs, choose attributes for the section. */
4656
4657 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4658
4659 static unsigned int
4660 mep_section_type_flags (tree decl, const char *name, int reloc)
4661 {
4662 unsigned int flags = default_section_type_flags (decl, name, reloc);
4663
4664 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4665 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4666 flags |= SECTION_MEP_VLIW;
4667
4668 return flags;
4669 }
4670
4671 /* Switch to an arbitrary section NAME with attributes as specified
4672 by FLAGS. ALIGN specifies any known alignment requirements for
4673 the section; 0 if the default should be used.
4674
4675 Differs from the standard ELF version only in support of VLIW mode. */
4676
4677 static void
4678 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4679 {
4680 char flagchars[8], *f = flagchars;
4681 const char *type;
4682
4683 if (!(flags & SECTION_DEBUG))
4684 *f++ = 'a';
4685 if (flags & SECTION_WRITE)
4686 *f++ = 'w';
4687 if (flags & SECTION_CODE)
4688 *f++ = 'x';
4689 if (flags & SECTION_SMALL)
4690 *f++ = 's';
4691 if (flags & SECTION_MEP_VLIW)
4692 *f++ = 'v';
4693 *f = '\0';
4694
4695 if (flags & SECTION_BSS)
4696 type = "nobits";
4697 else
4698 type = "progbits";
4699
4700 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4701 name, flagchars, type);
4702
4703 if (flags & SECTION_CODE)
4704 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4705 asm_out_file);
4706 }
4707
4708 void
4709 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4710 int size, int align, int global)
4711 {
4712 /* We intentionally don't use mep_section_tag() here. */
4713 if (name[0] == '@'
4714 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4715 && name[2] == '.')
4716 {
4717 int location = -1;
4718 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4719 DECL_ATTRIBUTES (decl));
4720 if (attr
4721 && TREE_VALUE (attr)
4722 && TREE_VALUE (TREE_VALUE(attr)))
4723 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4724 if (location == -1)
4725 return;
4726 if (global)
4727 {
4728 fprintf (stream, "\t.globl\t");
4729 assemble_name (stream, name);
4730 fprintf (stream, "\n");
4731 }
4732 assemble_name (stream, name);
4733 fprintf (stream, " = %d\n", location);
4734 return;
4735 }
4736 if (name[0] == '@' && name[2] == '.')
4737 {
4738 const char *sec = 0;
4739 switch (name[1])
4740 {
4741 case 'b':
4742 switch_to_section (based_section);
4743 sec = ".based";
4744 break;
4745 case 't':
4746 switch_to_section (tinybss_section);
4747 sec = ".sbss";
4748 break;
4749 case 'f':
4750 switch_to_section (farbss_section);
4751 sec = ".farbss";
4752 break;
4753 }
4754 if (sec)
4755 {
4756 const char *name2;
4757 int p2align = 0;
4758
4759 while (align > BITS_PER_UNIT)
4760 {
4761 align /= 2;
4762 p2align ++;
4763 }
4764 name2 = targetm.strip_name_encoding (name);
4765 if (global)
4766 fprintf (stream, "\t.globl\t%s\n", name2);
4767 fprintf (stream, "\t.p2align %d\n", p2align);
4768 fprintf (stream, "\t.type\t%s,@object\n", name2);
4769 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4770 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4771 return;
4772 }
4773 }
4774
4775 if (!global)
4776 {
4777 fprintf (stream, "\t.local\t");
4778 assemble_name (stream, name);
4779 fprintf (stream, "\n");
4780 }
4781 fprintf (stream, "\t.comm\t");
4782 assemble_name (stream, name);
4783 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4784 }
4785
4786 /* Trampolines. */
4787
4788 static void
4789 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4790 {
4791 rtx addr = XEXP (m_tramp, 0);
4792 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4793
4794 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4795 LCT_NORMAL, VOIDmode, 3,
4796 addr, Pmode,
4797 fnaddr, Pmode,
4798 static_chain, Pmode);
4799 }
4800
4801 /* Experimental Reorg. */
4802
4803 static bool
4804 mep_mentioned_p (rtx in,
4805 rtx reg, /* NULL for mem */
4806 int modes_too) /* if nonzero, modes must match also. */
4807 {
4808 const char *fmt;
4809 int i;
4810 enum rtx_code code;
4811
4812 if (in == 0)
4813 return false;
4814 if (reg && GET_CODE (reg) != REG)
4815 return false;
4816
4817 if (GET_CODE (in) == LABEL_REF)
4818 return (reg == 0);
4819
4820 code = GET_CODE (in);
4821
4822 switch (code)
4823 {
4824 case MEM:
4825 if (reg)
4826 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4827 return true;
4828
4829 case REG:
4830 if (!reg)
4831 return false;
4832 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4833 return false;
4834 return (REGNO (in) == REGNO (reg));
4835
4836 case SCRATCH:
4837 case CC0:
4838 case PC:
4839 case CONST_INT:
4840 case CONST_DOUBLE:
4841 return false;
4842
4843 default:
4844 break;
4845 }
4846
4847 /* Set's source should be read-only. */
4848 if (code == SET && !reg)
4849 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4850
4851 fmt = GET_RTX_FORMAT (code);
4852
4853 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4854 {
4855 if (fmt[i] == 'E')
4856 {
4857 register int j;
4858 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4859 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4860 return true;
4861 }
4862 else if (fmt[i] == 'e'
4863 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4864 return true;
4865 }
4866 return false;
4867 }
4868
4869 #define EXPERIMENTAL_REGMOVE_REORG 1
4870
4871 #if EXPERIMENTAL_REGMOVE_REORG
4872
4873 static int
4874 mep_compatible_reg_class (int r1, int r2)
4875 {
4876 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4877 return 1;
4878 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4879 return 1;
4880 return 0;
4881 }
4882
4883 static void
4884 mep_reorg_regmove (rtx_insn *insns)
4885 {
4886 rtx_insn *insn, *next, *follow;
4887 rtx pat, *where;
4888 int count = 0, done = 0, replace, before = 0;
4889
4890 if (dump_file)
4891 for (insn = insns; insn; insn = NEXT_INSN (insn))
4892 if (NONJUMP_INSN_P (insn))
4893 before++;
4894
4895 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4896 set that uses the r2 and r2 dies there. We replace r2 with r1
4897 and see if it's still a valid insn. If so, delete the first set.
4898 Copied from reorg.c. */
4899
4900 while (!done)
4901 {
4902 done = 1;
4903 for (insn = insns; insn; insn = next)
4904 {
4905 next = next_nonnote_nondebug_insn (insn);
4906 if (! NONJUMP_INSN_P (insn))
4907 continue;
4908 pat = PATTERN (insn);
4909
4910 replace = 0;
4911
4912 if (GET_CODE (pat) == SET
4913 && GET_CODE (SET_SRC (pat)) == REG
4914 && GET_CODE (SET_DEST (pat)) == REG
4915 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4916 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4917 {
4918 follow = next_nonnote_nondebug_insn (insn);
4919 if (dump_file)
4920 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4921
4922 while (follow && NONJUMP_INSN_P (follow)
4923 && GET_CODE (PATTERN (follow)) == SET
4924 && !dead_or_set_p (follow, SET_SRC (pat))
4925 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4926 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4927 {
4928 if (dump_file)
4929 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4930 follow = next_nonnote_insn (follow);
4931 }
4932
4933 if (dump_file)
4934 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4935 if (follow && NONJUMP_INSN_P (follow)
4936 && GET_CODE (PATTERN (follow)) == SET
4937 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4938 {
4939 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4940 {
4941 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4942 {
4943 replace = 1;
4944 where = & SET_SRC (PATTERN (follow));
4945 }
4946 }
4947 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4948 {
4949 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4950 {
4951 replace = 1;
4952 where = & PATTERN (follow);
4953 }
4954 }
4955 }
4956 }
4957
4958 /* If so, follow is the corresponding insn */
4959 if (replace)
4960 {
4961 if (dump_file)
4962 {
4963 rtx_insn *x;
4964
4965 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4966 for (x = insn; x ;x = NEXT_INSN (x))
4967 {
4968 print_rtl_single (dump_file, x);
4969 if (x == follow)
4970 break;
4971 fprintf (dump_file, "\n");
4972 }
4973 }
4974
4975 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4976 follow, where))
4977 {
4978 count ++;
4979 delete_insn (insn);
4980 if (dump_file)
4981 {
4982 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4983 print_rtl_single (dump_file, follow);
4984 }
4985 done = 0;
4986 }
4987 }
4988 }
4989 }
4990
4991 if (dump_file)
4992 {
4993 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
4994 fprintf (dump_file, "=====\n");
4995 }
4996 }
4997 #endif
4998
4999
5000 /* Figure out where to put LABEL, which is the label for a repeat loop.
5001 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5002 the loop ends just before LAST_INSN. If SHARED, insns other than the
5003 "repeat" might use LABEL to jump to the loop's continuation point.
5004
5005 Return the last instruction in the adjusted loop. */
5006
5007 static rtx_insn *
5008 mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
5009 bool including, bool shared)
5010 {
5011 rtx_insn *next, *prev;
5012 int count = 0, code, icode;
5013
5014 if (dump_file)
5015 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5016 INSN_UID (last_insn));
5017
5018 /* Set PREV to the last insn in the loop. */
5019 prev = last_insn;
5020 if (!including)
5021 prev = PREV_INSN (prev);
5022
5023 /* Set NEXT to the next insn after the repeat label. */
5024 next = last_insn;
5025 if (!shared)
5026 while (prev != 0)
5027 {
5028 code = GET_CODE (prev);
5029 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5030 break;
5031
5032 if (INSN_P (prev))
5033 {
5034 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5035 prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
5036
5037 /* Other insns that should not be in the last two opcodes. */
5038 icode = recog_memoized (prev);
5039 if (icode < 0
5040 || icode == CODE_FOR_repeat
5041 || icode == CODE_FOR_erepeat
5042 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5043 break;
5044
5045 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5046 is the second instruction in a VLIW bundle. In that case,
5047 loop again: if the first instruction also satisfies the
5048 conditions above then we will reach here again and put
5049 both of them into the repeat epilogue. Otherwise both
5050 should remain outside. */
5051 if (GET_MODE (prev) != BImode)
5052 {
5053 count++;
5054 next = prev;
5055 if (dump_file)
5056 print_rtl_single (dump_file, next);
5057 if (count == 2)
5058 break;
5059 }
5060 }
5061 prev = PREV_INSN (prev);
5062 }
5063
5064 /* See if we're adding the label immediately after the repeat insn.
5065 If so, we need to separate them with a nop. */
5066 prev = prev_real_insn (next);
5067 if (prev)
5068 switch (recog_memoized (prev))
5069 {
5070 case CODE_FOR_repeat:
5071 case CODE_FOR_erepeat:
5072 if (dump_file)
5073 fprintf (dump_file, "Adding nop inside loop\n");
5074 emit_insn_before (gen_nop (), next);
5075 break;
5076
5077 default:
5078 break;
5079 }
5080
5081 /* Insert the label. */
5082 emit_label_before (label, next);
5083
5084 /* Insert the nops. */
5085 if (dump_file && count < 2)
5086 fprintf (dump_file, "Adding %d nop%s\n\n",
5087 2 - count, count == 1 ? "" : "s");
5088
5089 for (; count < 2; count++)
5090 if (including)
5091 last_insn = emit_insn_after (gen_nop (), last_insn);
5092 else
5093 emit_insn_before (gen_nop (), last_insn);
5094
5095 return last_insn;
5096 }
5097
5098
5099 void
5100 mep_emit_doloop (rtx *operands, int is_end)
5101 {
5102 rtx tag;
5103
5104 if (cfun->machine->doloop_tags == 0
5105 || cfun->machine->doloop_tag_from_end == is_end)
5106 {
5107 cfun->machine->doloop_tags++;
5108 cfun->machine->doloop_tag_from_end = is_end;
5109 }
5110
5111 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5112 if (is_end)
5113 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5114 else
5115 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5116 }
5117
5118
5119 /* Code for converting doloop_begins and doloop_ends into valid
5120 MeP instructions. A doloop_begin is just a placeholder:
5121
5122 $count = unspec ($count)
5123
5124 where $count is initially the number of iterations - 1.
5125 doloop_end has the form:
5126
5127 if ($count-- == 0) goto label
5128
5129 The counter variable is private to the doloop insns, nothing else
5130 relies on its value.
5131
5132 There are three cases, in decreasing order of preference:
5133
5134 1. A loop has exactly one doloop_begin and one doloop_end.
5135 The doloop_end branches to the first instruction after
5136 the doloop_begin.
5137
5138 In this case we can replace the doloop_begin with a repeat
5139 instruction and remove the doloop_end. I.e.:
5140
5141 $count1 = unspec ($count1)
5142 label:
5143 ...
5144 insn1
5145 insn2
5146 if ($count2-- == 0) goto label
5147
5148 becomes:
5149
5150 repeat $count1,repeat_label
5151 label:
5152 ...
5153 repeat_label:
5154 insn1
5155 insn2
5156 # end repeat
5157
5158 2. As for (1), except there are several doloop_ends. One of them
5159 (call it X) falls through to a label L. All the others fall
5160 through to branches to L.
5161
5162 In this case, we remove X and replace the other doloop_ends
5163 with branches to the repeat label. For example:
5164
5165 $count1 = unspec ($count1)
5166 start:
5167 ...
5168 if ($count2-- == 0) goto label
5169 end:
5170 ...
5171 if ($count3-- == 0) goto label
5172 goto end
5173
5174 becomes:
5175
5176 repeat $count1,repeat_label
5177 start:
5178 ...
5179 repeat_label:
5180 nop
5181 nop
5182 # end repeat
5183 end:
5184 ...
5185 goto repeat_label
5186
5187 3. The fallback case. Replace doloop_begins with:
5188
5189 $count = $count + 1
5190
5191 Replace doloop_ends with the equivalent of:
5192
5193 $count = $count - 1
5194 if ($count == 0) goto label
5195
5196 Note that this might need a scratch register if $count
5197 is stored in memory. */
5198
5199 /* A structure describing one doloop_begin. */
5200 struct mep_doloop_begin {
5201 /* The next doloop_begin with the same tag. */
5202 struct mep_doloop_begin *next;
5203
5204 /* The instruction itself. */
5205 rtx_insn *insn;
5206
5207 /* The initial counter value. This is known to be a general register. */
5208 rtx counter;
5209 };
5210
5211 /* A structure describing a doloop_end. */
5212 struct mep_doloop_end {
5213 /* The next doloop_end with the same loop tag. */
5214 struct mep_doloop_end *next;
5215
5216 /* The instruction itself. */
5217 rtx_insn *insn;
5218
5219 /* The first instruction after INSN when the branch isn't taken. */
5220 rtx_insn *fallthrough;
5221
5222 /* The location of the counter value. Since doloop_end_internal is a
5223 jump instruction, it has to allow the counter to be stored anywhere
5224 (any non-fixed register or memory location). */
5225 rtx counter;
5226
5227 /* The target label (the place where the insn branches when the counter
5228 isn't zero). */
5229 rtx label;
5230
5231 /* A scratch register. Only available when COUNTER isn't stored
5232 in a general register. */
5233 rtx scratch;
5234 };
5235
5236
5237 /* One do-while loop. */
5238 struct mep_doloop {
5239 /* All the doloop_begins for this loop (in no particular order). */
5240 struct mep_doloop_begin *begin;
5241
5242 /* All the doloop_ends. When there is more than one, arrange things
5243 so that the first one is the most likely to be X in case (2) above. */
5244 struct mep_doloop_end *end;
5245 };
5246
5247
5248 /* Return true if LOOP can be converted into repeat/repeat_end form
5249 (that is, if it matches cases (1) or (2) above). */
5250
5251 static bool
5252 mep_repeat_loop_p (struct mep_doloop *loop)
5253 {
5254 struct mep_doloop_end *end;
5255 rtx fallthrough;
5256
5257 /* There must be exactly one doloop_begin and at least one doloop_end. */
5258 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5259 return false;
5260
5261 /* The first doloop_end (X) must branch back to the insn after
5262 the doloop_begin. */
5263 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5264 return false;
5265
5266 /* All the other doloop_ends must branch to the same place as X.
5267 When the branch isn't taken, they must jump to the instruction
5268 after X. */
5269 fallthrough = loop->end->fallthrough;
5270 for (end = loop->end->next; end != 0; end = end->next)
5271 if (end->label != loop->end->label
5272 || !simplejump_p (end->fallthrough)
5273 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5274 return false;
5275
5276 return true;
5277 }
5278
5279
5280 /* The main repeat reorg function. See comment above for details. */
5281
5282 static void
5283 mep_reorg_repeat (rtx_insn *insns)
5284 {
5285 rtx_insn *insn;
5286 struct mep_doloop *loops, *loop;
5287 struct mep_doloop_begin *begin;
5288 struct mep_doloop_end *end;
5289
5290 /* Quick exit if we haven't created any loops. */
5291 if (cfun->machine->doloop_tags == 0)
5292 return;
5293
5294 /* Create an array of mep_doloop structures. */
5295 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5296 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5297
5298 /* Search the function for do-while insns and group them by loop tag. */
5299 for (insn = insns; insn; insn = NEXT_INSN (insn))
5300 if (INSN_P (insn))
5301 switch (recog_memoized (insn))
5302 {
5303 case CODE_FOR_doloop_begin_internal:
5304 insn_extract (insn);
5305 loop = &loops[INTVAL (recog_data.operand[2])];
5306
5307 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5308 begin->next = loop->begin;
5309 begin->insn = insn;
5310 begin->counter = recog_data.operand[0];
5311
5312 loop->begin = begin;
5313 break;
5314
5315 case CODE_FOR_doloop_end_internal:
5316 insn_extract (insn);
5317 loop = &loops[INTVAL (recog_data.operand[2])];
5318
5319 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5320 end->insn = insn;
5321 end->fallthrough = next_real_insn (insn);
5322 end->counter = recog_data.operand[0];
5323 end->label = recog_data.operand[1];
5324 end->scratch = recog_data.operand[3];
5325
5326 /* If this insn falls through to an unconditional jump,
5327 give it a lower priority than the others. */
5328 if (loop->end != 0 && simplejump_p (end->fallthrough))
5329 {
5330 end->next = loop->end->next;
5331 loop->end->next = end;
5332 }
5333 else
5334 {
5335 end->next = loop->end;
5336 loop->end = end;
5337 }
5338 break;
5339 }
5340
5341 /* Convert the insns for each loop in turn. */
5342 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5343 if (mep_repeat_loop_p (loop))
5344 {
5345 /* Case (1) or (2). */
5346 rtx_code_label *repeat_label;
5347 rtx label_ref;
5348
5349 /* Create a new label for the repeat insn. */
5350 repeat_label = gen_label_rtx ();
5351
5352 /* Replace the doloop_begin with a repeat. */
5353 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5354 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5355 loop->begin->insn);
5356 delete_insn (loop->begin->insn);
5357
5358 /* Insert the repeat label before the first doloop_end.
5359 Fill the gap with nops if there are other doloop_ends. */
5360 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5361 false, loop->end->next != 0);
5362
5363 /* Emit a repeat_end (to improve the readability of the output). */
5364 emit_insn_before (gen_repeat_end (), loop->end->insn);
5365
5366 /* Delete the first doloop_end. */
5367 delete_insn (loop->end->insn);
5368
5369 /* Replace the others with branches to REPEAT_LABEL. */
5370 for (end = loop->end->next; end != 0; end = end->next)
5371 {
5372 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5373 delete_insn (end->insn);
5374 delete_insn (end->fallthrough);
5375 }
5376 }
5377 else
5378 {
5379 /* Case (3). First replace all the doloop_begins with increment
5380 instructions. */
5381 for (begin = loop->begin; begin != 0; begin = begin->next)
5382 {
5383 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5384 begin->counter, const1_rtx),
5385 begin->insn);
5386 delete_insn (begin->insn);
5387 }
5388
5389 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5390 for (end = loop->end; end != 0; end = end->next)
5391 {
5392 rtx reg;
5393
5394 start_sequence ();
5395
5396 /* Load the counter value into a general register. */
5397 reg = end->counter;
5398 if (!REG_P (reg) || REGNO (reg) > 15)
5399 {
5400 reg = end->scratch;
5401 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5402 }
5403
5404 /* Decrement the counter. */
5405 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5406 constm1_rtx));
5407
5408 /* Copy it back to its original location. */
5409 if (reg != end->counter)
5410 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5411
5412 /* Jump back to the start label. */
5413 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5414 end->label));
5415 JUMP_LABEL (insn) = end->label;
5416 LABEL_NUSES (end->label)++;
5417
5418 /* Emit the whole sequence before the doloop_end. */
5419 insn = get_insns ();
5420 end_sequence ();
5421 emit_insn_before (insn, end->insn);
5422
5423 /* Delete the doloop_end. */
5424 delete_insn (end->insn);
5425 }
5426 }
5427 }
5428
5429
5430 static bool
5431 mep_invertable_branch_p (rtx_insn *insn)
5432 {
5433 rtx cond, set;
5434 enum rtx_code old_code;
5435 int i;
5436
5437 set = PATTERN (insn);
5438 if (GET_CODE (set) != SET)
5439 return false;
5440 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5441 return false;
5442 cond = XEXP (XEXP (set, 1), 0);
5443 old_code = GET_CODE (cond);
5444 switch (old_code)
5445 {
5446 case EQ:
5447 PUT_CODE (cond, NE);
5448 break;
5449 case NE:
5450 PUT_CODE (cond, EQ);
5451 break;
5452 case LT:
5453 PUT_CODE (cond, GE);
5454 break;
5455 case GE:
5456 PUT_CODE (cond, LT);
5457 break;
5458 default:
5459 return false;
5460 }
5461 INSN_CODE (insn) = -1;
5462 i = recog_memoized (insn);
5463 PUT_CODE (cond, old_code);
5464 INSN_CODE (insn) = -1;
5465 return i >= 0;
5466 }
5467
5468 static void
5469 mep_invert_branch (rtx_insn *insn, rtx_insn *after)
5470 {
5471 rtx cond, set, label;
5472 int i;
5473
5474 set = PATTERN (insn);
5475
5476 gcc_assert (GET_CODE (set) == SET);
5477 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5478
5479 cond = XEXP (XEXP (set, 1), 0);
5480 switch (GET_CODE (cond))
5481 {
5482 case EQ:
5483 PUT_CODE (cond, NE);
5484 break;
5485 case NE:
5486 PUT_CODE (cond, EQ);
5487 break;
5488 case LT:
5489 PUT_CODE (cond, GE);
5490 break;
5491 case GE:
5492 PUT_CODE (cond, LT);
5493 break;
5494 default:
5495 gcc_unreachable ();
5496 }
5497 label = gen_label_rtx ();
5498 emit_label_after (label, after);
5499 for (i=1; i<=2; i++)
5500 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5501 {
5502 rtx ref = XEXP (XEXP (set, 1), i);
5503 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5504 delete_insn (XEXP (ref, 0));
5505 XEXP (ref, 0) = label;
5506 LABEL_NUSES (label) ++;
5507 JUMP_LABEL (insn) = label;
5508 }
5509 INSN_CODE (insn) = -1;
5510 i = recog_memoized (insn);
5511 gcc_assert (i >= 0);
5512 }
5513
5514 static void
5515 mep_reorg_erepeat (rtx_insn *insns)
5516 {
5517 rtx_insn *insn, *prev;
5518 rtx_code_label *l;
5519 rtx x;
5520 int count;
5521
5522 for (insn = insns; insn; insn = NEXT_INSN (insn))
5523 if (JUMP_P (insn)
5524 && mep_invertable_branch_p (insn))
5525 {
5526 if (dump_file)
5527 {
5528 fprintf (dump_file, "\n------------------------------\n");
5529 fprintf (dump_file, "erepeat: considering this jump:\n");
5530 print_rtl_single (dump_file, insn);
5531 }
5532 count = simplejump_p (insn) ? 0 : 1;
5533 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5534 {
5535 if (CALL_P (prev) || BARRIER_P (prev))
5536 break;
5537
5538 if (prev == JUMP_LABEL (insn))
5539 {
5540 rtx_insn *newlast;
5541 if (dump_file)
5542 fprintf (dump_file, "found loop top, %d insns\n", count);
5543
5544 if (LABEL_NUSES (prev) == 1)
5545 /* We're the only user, always safe */ ;
5546 else if (LABEL_NUSES (prev) == 2)
5547 {
5548 /* See if there's a barrier before this label. If
5549 so, we know nobody inside the loop uses it.
5550 But we must be careful to put the erepeat
5551 *after* the label. */
5552 rtx_insn *barrier;
5553 for (barrier = PREV_INSN (prev);
5554 barrier && NOTE_P (barrier);
5555 barrier = PREV_INSN (barrier))
5556 ;
5557 if (barrier && ! BARRIER_P (barrier))
5558 break;
5559 }
5560 else
5561 {
5562 /* We don't know who else, within or without our loop, uses this */
5563 if (dump_file)
5564 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5565 break;
5566 }
5567
5568 /* Generate a label to be used by the erepat insn. */
5569 l = gen_label_rtx ();
5570
5571 /* Insert the erepeat after INSN's target label. */
5572 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5573 LABEL_NUSES (l)++;
5574 emit_insn_after (x, prev);
5575
5576 /* Insert the erepeat label. */
5577 newlast = (mep_insert_repeat_label_last
5578 (insn, l, !simplejump_p (insn), false));
5579 if (simplejump_p (insn))
5580 {
5581 emit_insn_before (gen_erepeat_end (), insn);
5582 delete_insn (insn);
5583 }
5584 else
5585 {
5586 mep_invert_branch (insn, newlast);
5587 emit_insn_after (gen_erepeat_end (), newlast);
5588 }
5589 break;
5590 }
5591
5592 if (LABEL_P (prev))
5593 {
5594 /* A label is OK if there is exactly one user, and we
5595 can find that user before the next label. */
5596 rtx_insn *user = 0;
5597 int safe = 0;
5598 if (LABEL_NUSES (prev) == 1)
5599 {
5600 for (user = PREV_INSN (prev);
5601 user && (INSN_P (user) || NOTE_P (user));
5602 user = PREV_INSN (user))
5603 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5604 {
5605 safe = INSN_UID (user);
5606 break;
5607 }
5608 }
5609 if (!safe)
5610 break;
5611 if (dump_file)
5612 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5613 safe, INSN_UID (prev));
5614 }
5615
5616 if (INSN_P (prev))
5617 {
5618 count ++;
5619 }
5620 }
5621 }
5622 if (dump_file)
5623 fprintf (dump_file, "\n==============================\n");
5624 }
5625
5626 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5627 always do this on its own. */
5628
5629 static void
5630 mep_jmp_return_reorg (rtx_insn *insns)
5631 {
5632 rtx_insn *insn, *label, *ret;
5633 int ret_code;
5634
5635 for (insn = insns; insn; insn = NEXT_INSN (insn))
5636 if (simplejump_p (insn))
5637 {
5638 /* Find the fist real insn the jump jumps to. */
5639 label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
5640 while (ret
5641 && (NOTE_P (ret)
5642 || LABEL_P (ret)
5643 || GET_CODE (PATTERN (ret)) == USE))
5644 ret = NEXT_INSN (ret);
5645
5646 if (ret)
5647 {
5648 /* Is it a return? */
5649 ret_code = recog_memoized (ret);
5650 if (ret_code == CODE_FOR_return_internal
5651 || ret_code == CODE_FOR_eh_return_internal)
5652 {
5653 /* It is. Replace the jump with a return. */
5654 LABEL_NUSES (label) --;
5655 if (LABEL_NUSES (label) == 0)
5656 delete_insn (label);
5657 PATTERN (insn) = copy_rtx (PATTERN (ret));
5658 INSN_CODE (insn) = -1;
5659 }
5660 }
5661 }
5662 }
5663
5664
5665 static void
5666 mep_reorg_addcombine (rtx_insn *insns)
5667 {
5668 rtx_insn *i, *n;
5669
5670 for (i = insns; i; i = NEXT_INSN (i))
5671 if (INSN_P (i)
5672 && INSN_CODE (i) == CODE_FOR_addsi3
5673 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5674 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5675 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5676 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5677 {
5678 n = NEXT_INSN (i);
5679 if (INSN_P (n)
5680 && INSN_CODE (n) == CODE_FOR_addsi3
5681 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5682 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5683 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5684 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5685 {
5686 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5687 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5688 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5689 && ic + nc < 32767
5690 && ic + nc > -32768)
5691 {
5692 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5693 SET_NEXT_INSN (i) = NEXT_INSN (n);
5694 if (NEXT_INSN (i))
5695 SET_PREV_INSN (NEXT_INSN (i)) = i;
5696 }
5697 }
5698 }
5699 }
5700
5701 /* If this insn adjusts the stack, return the adjustment, else return
5702 zero. */
5703 static int
5704 add_sp_insn_p (rtx_insn *insn)
5705 {
5706 rtx pat;
5707
5708 if (! single_set (insn))
5709 return 0;
5710 pat = PATTERN (insn);
5711 if (GET_CODE (SET_DEST (pat)) != REG)
5712 return 0;
5713 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5714 return 0;
5715 if (GET_CODE (SET_SRC (pat)) != PLUS)
5716 return 0;
5717 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5718 return 0;
5719 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5720 return 0;
5721 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5722 return 0;
5723 return INTVAL (XEXP (SET_SRC (pat), 1));
5724 }
5725
5726 /* Check for trivial functions that set up an unneeded stack
5727 frame. */
5728 static void
5729 mep_reorg_noframe (rtx_insn *insns)
5730 {
5731 rtx_insn *start_frame_insn;
5732 rtx_insn *end_frame_insn = 0;
5733 int sp_adjust, sp2;
5734 rtx sp;
5735
5736 /* The first insn should be $sp = $sp + N */
5737 while (insns && ! INSN_P (insns))
5738 insns = NEXT_INSN (insns);
5739 if (!insns)
5740 return;
5741
5742 sp_adjust = add_sp_insn_p (insns);
5743 if (sp_adjust == 0)
5744 return;
5745
5746 start_frame_insn = insns;
5747 sp = SET_DEST (PATTERN (start_frame_insn));
5748
5749 insns = next_real_insn (insns);
5750
5751 while (insns)
5752 {
5753 rtx_insn *next = next_real_insn (insns);
5754 if (!next)
5755 break;
5756
5757 sp2 = add_sp_insn_p (insns);
5758 if (sp2)
5759 {
5760 if (end_frame_insn)
5761 return;
5762 end_frame_insn = insns;
5763 if (sp2 != -sp_adjust)
5764 return;
5765 }
5766 else if (mep_mentioned_p (insns, sp, 0))
5767 return;
5768 else if (CALL_P (insns))
5769 return;
5770
5771 insns = next;
5772 }
5773
5774 if (end_frame_insn)
5775 {
5776 delete_insn (start_frame_insn);
5777 delete_insn (end_frame_insn);
5778 }
5779 }
5780
5781 static void
5782 mep_reorg (void)
5783 {
5784 rtx_insn *insns = get_insns ();
5785
5786 /* We require accurate REG_DEAD notes. */
5787 compute_bb_for_insn ();
5788 df_note_add_problem ();
5789 df_analyze ();
5790
5791 mep_reorg_addcombine (insns);
5792 #if EXPERIMENTAL_REGMOVE_REORG
5793 /* VLIW packing has been done already, so we can't just delete things. */
5794 if (!mep_vliw_function_p (cfun->decl))
5795 mep_reorg_regmove (insns);
5796 #endif
5797 mep_jmp_return_reorg (insns);
5798 mep_bundle_insns (insns);
5799 mep_reorg_repeat (insns);
5800 if (optimize
5801 && !profile_flag
5802 && !profile_arc_flag
5803 && TARGET_OPT_REPEAT
5804 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5805 mep_reorg_erepeat (insns);
5806
5807 /* This may delete *insns so make sure it's last. */
5808 mep_reorg_noframe (insns);
5809
5810 df_finish_pass (false);
5811 }
5812
5813 \f
5814
5815 /*----------------------------------------------------------------------*/
5816 /* Builtins */
5817 /*----------------------------------------------------------------------*/
5818
5819 /* Element X gives the index into cgen_insns[] of the most general
5820 implementation of intrinsic X. Unimplemented intrinsics are
5821 mapped to -1. */
5822 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5823
5824 /* Element X gives the index of another instruction that is mapped to
5825 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5826 instruction.
5827
5828 Things are set up so that mep_intrinsic_chain[X] < X. */
5829 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5830
5831 /* The bitmask for the current ISA. The ISA masks are declared
5832 in mep-intrin.h. */
5833 unsigned int mep_selected_isa;
5834
5835 struct mep_config {
5836 const char *config_name;
5837 unsigned int isa;
5838 };
5839
5840 static struct mep_config mep_configs[] = {
5841 #ifdef COPROC_SELECTION_TABLE
5842 COPROC_SELECTION_TABLE,
5843 #endif
5844 { 0, 0 }
5845 };
5846
5847 /* Initialize the global intrinsics variables above. */
5848
5849 static void
5850 mep_init_intrinsics (void)
5851 {
5852 size_t i;
5853
5854 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5855 mep_selected_isa = mep_configs[0].isa;
5856 if (mep_config_string != 0)
5857 for (i = 0; mep_configs[i].config_name; i++)
5858 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5859 {
5860 mep_selected_isa = mep_configs[i].isa;
5861 break;
5862 }
5863
5864 /* Assume all intrinsics are unavailable. */
5865 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5866 mep_intrinsic_insn[i] = -1;
5867
5868 /* Build up the global intrinsic tables. */
5869 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5870 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5871 {
5872 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5873 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5874 }
5875 /* See whether we can directly move values between one coprocessor
5876 register and another. */
5877 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5878 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5879 mep_have_copro_copro_moves_p = true;
5880
5881 /* See whether we can directly move values between core and
5882 coprocessor registers. */
5883 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5884 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5885
5886 mep_have_core_copro_moves_p = 1;
5887 }
5888
5889 /* Declare all available intrinsic functions. Called once only. */
5890
5891 static tree cp_data_bus_int_type_node;
5892 static tree opaque_vector_type_node;
5893 static tree v8qi_type_node;
5894 static tree v4hi_type_node;
5895 static tree v2si_type_node;
5896 static tree v8uqi_type_node;
5897 static tree v4uhi_type_node;
5898 static tree v2usi_type_node;
5899
5900 static tree
5901 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5902 {
5903 switch (cr)
5904 {
5905 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5906 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5907 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5908 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5909 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5910 case cgen_regnum_operand_type_CHAR: return char_type_node;
5911 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5912 case cgen_regnum_operand_type_SI: return intSI_type_node;
5913 case cgen_regnum_operand_type_DI: return intDI_type_node;
5914 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5915 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5916 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5917 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5918 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5919 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5920 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5921 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5922 default:
5923 return void_type_node;
5924 }
5925 }
5926
5927 static void
5928 mep_init_builtins (void)
5929 {
5930 size_t i;
5931
5932 if (TARGET_64BIT_CR_REGS)
5933 cp_data_bus_int_type_node = long_long_integer_type_node;
5934 else
5935 cp_data_bus_int_type_node = long_integer_type_node;
5936
5937 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5938 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5939 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5940 v2si_type_node = build_vector_type (intSI_type_node, 2);
5941 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5942 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5943 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5944
5945 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5946
5947 add_builtin_type ("cp_vector", opaque_vector_type_node);
5948
5949 add_builtin_type ("cp_v8qi", v8qi_type_node);
5950 add_builtin_type ("cp_v4hi", v4hi_type_node);
5951 add_builtin_type ("cp_v2si", v2si_type_node);
5952
5953 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5954 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5955 add_builtin_type ("cp_v2usi", v2usi_type_node);
5956
5957 /* Intrinsics like mep_cadd3 are implemented with two groups of
5958 instructions, one which uses UNSPECs and one which uses a specific
5959 rtl code such as PLUS. Instructions in the latter group belong
5960 to GROUP_KNOWN_CODE.
5961
5962 In such cases, the intrinsic will have two entries in the global
5963 tables above. The unspec form is accessed using builtin functions
5964 while the specific form is accessed using the mep_* enum in
5965 mep-intrin.h.
5966
5967 The idea is that __cop arithmetic and builtin functions have
5968 different optimization requirements. If mep_cadd3() appears in
5969 the source code, the user will surely except gcc to use cadd3
5970 rather than a work-alike such as add3. However, if the user
5971 just writes "a + b", where a or b are __cop variables, it is
5972 reasonable for gcc to choose a core instruction rather than
5973 cadd3 if it believes that is more optimal. */
5974 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5975 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5976 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5977 {
5978 tree ret_type = void_type_node;
5979 tree bi_type;
5980
5981 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5982 continue;
5983
5984 if (cgen_insns[i].cret_p)
5985 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5986
5987 bi_type = build_function_type_list (ret_type, NULL_TREE);
5988 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5989 bi_type,
5990 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5991 }
5992 }
5993
5994 /* Report the unavailablity of the given intrinsic. */
5995
5996 #if 1
5997 static void
5998 mep_intrinsic_unavailable (int intrinsic)
5999 {
6000 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6001
6002 if (already_reported_p[intrinsic])
6003 return;
6004
6005 if (mep_intrinsic_insn[intrinsic] < 0)
6006 error ("coprocessor intrinsic %qs is not available in this configuration",
6007 cgen_intrinsics[intrinsic]);
6008 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6009 error ("%qs is not available in VLIW functions",
6010 cgen_intrinsics[intrinsic]);
6011 else
6012 error ("%qs is not available in non-VLIW functions",
6013 cgen_intrinsics[intrinsic]);
6014
6015 already_reported_p[intrinsic] = 1;
6016 }
6017 #endif
6018
6019
6020 /* See if any implementation of INTRINSIC is available to the
6021 current function. If so, store the most general implementation
6022 in *INSN_PTR and return true. Return false otherwise. */
6023
6024 static bool
6025 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6026 {
6027 int i;
6028
6029 i = mep_intrinsic_insn[intrinsic];
6030 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6031 i = mep_intrinsic_chain[i];
6032
6033 if (i >= 0)
6034 {
6035 *insn_ptr = &cgen_insns[i];
6036 return true;
6037 }
6038 return false;
6039 }
6040
6041
6042 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6043 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6044 try using a work-alike instead. In this case, the returned insn
6045 may have three operands rather than two. */
6046
6047 static bool
6048 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6049 {
6050 size_t i;
6051
6052 if (intrinsic == mep_cmov)
6053 {
6054 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6055 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6056 return true;
6057 return false;
6058 }
6059 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6060 }
6061
6062
6063 /* If ARG is a register operand that is the same size as MODE, convert it
6064 to MODE using a subreg. Otherwise return ARG as-is. */
6065
6066 static rtx
6067 mep_convert_arg (machine_mode mode, rtx arg)
6068 {
6069 if (GET_MODE (arg) != mode
6070 && register_operand (arg, VOIDmode)
6071 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6072 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6073 return arg;
6074 }
6075
6076
6077 /* Apply regnum conversions to ARG using the description given by REGNUM.
6078 Return the new argument on success and null on failure. */
6079
6080 static rtx
6081 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6082 {
6083 if (regnum->count == 0)
6084 return arg;
6085
6086 if (GET_CODE (arg) != CONST_INT
6087 || INTVAL (arg) < 0
6088 || INTVAL (arg) >= regnum->count)
6089 return 0;
6090
6091 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6092 }
6093
6094
6095 /* Try to make intrinsic argument ARG match the given operand.
6096 UNSIGNED_P is true if the argument has an unsigned type. */
6097
6098 static rtx
6099 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6100 int unsigned_p)
6101 {
6102 if (GET_CODE (arg) == CONST_INT)
6103 {
6104 /* CONST_INTs can only be bound to integer operands. */
6105 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6106 return 0;
6107 }
6108 else if (GET_CODE (arg) == CONST_DOUBLE)
6109 /* These hold vector constants. */;
6110 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6111 {
6112 /* If the argument is a different size from what's expected, we must
6113 have a value in the right mode class in order to convert it. */
6114 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6115 return 0;
6116
6117 /* If the operand is an rvalue, promote or demote it to match the
6118 operand's size. This might not need extra instructions when
6119 ARG is a register value. */
6120 if (operand->constraint[0] != '=')
6121 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6122 }
6123
6124 /* If the operand is an lvalue, bind the operand to a new register.
6125 The caller will copy this value into ARG after the main
6126 instruction. By doing this always, we produce slightly more
6127 optimal code. */
6128 /* But not for control registers. */
6129 if (operand->constraint[0] == '='
6130 && (! REG_P (arg)
6131 || ! (CONTROL_REGNO_P (REGNO (arg))
6132 || CCR_REGNO_P (REGNO (arg))
6133 || CR_REGNO_P (REGNO (arg)))
6134 ))
6135 return gen_reg_rtx (operand->mode);
6136
6137 /* Try simple mode punning. */
6138 arg = mep_convert_arg (operand->mode, arg);
6139 if (operand->predicate (arg, operand->mode))
6140 return arg;
6141
6142 /* See if forcing the argument into a register will make it match. */
6143 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6144 arg = force_reg (operand->mode, arg);
6145 else
6146 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6147 if (operand->predicate (arg, operand->mode))
6148 return arg;
6149
6150 return 0;
6151 }
6152
6153
6154 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6155 function FNNAME. OPERAND describes the operand to which ARGNUM
6156 is mapped. */
6157
6158 static void
6159 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6160 int argnum, tree fnname)
6161 {
6162 size_t i;
6163
6164 if (GET_CODE (arg) == CONST_INT)
6165 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6166 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6167 {
6168 const struct cgen_immediate_predicate *predicate;
6169 HOST_WIDE_INT argval;
6170
6171 predicate = &cgen_immediate_predicates[i];
6172 argval = INTVAL (arg);
6173 if (argval < predicate->lower || argval >= predicate->upper)
6174 error ("argument %d of %qE must be in the range %d...%d",
6175 argnum, fnname, predicate->lower, predicate->upper - 1);
6176 else
6177 error ("argument %d of %qE must be a multiple of %d",
6178 argnum, fnname, predicate->align);
6179 return;
6180 }
6181
6182 error ("incompatible type for argument %d of %qE", argnum, fnname);
6183 }
6184
6185 static rtx
6186 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6187 rtx subtarget ATTRIBUTE_UNUSED,
6188 machine_mode mode ATTRIBUTE_UNUSED,
6189 int ignore ATTRIBUTE_UNUSED)
6190 {
6191 rtx pat, op[10], arg[10];
6192 unsigned int a;
6193 int opindex, unsigned_p[10];
6194 tree fndecl, args;
6195 unsigned int n_args;
6196 tree fnname;
6197 const struct cgen_insn *cgen_insn;
6198 const struct insn_data_d *idata;
6199 unsigned int first_arg = 0;
6200 unsigned int builtin_n_args;
6201
6202 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6203 fnname = DECL_NAME (fndecl);
6204
6205 /* Find out which instruction we should emit. Note that some coprocessor
6206 intrinsics may only be available in VLIW mode, or only in normal mode. */
6207 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6208 {
6209 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6210 return NULL_RTX;
6211 }
6212 idata = &insn_data[cgen_insn->icode];
6213
6214 builtin_n_args = cgen_insn->num_args;
6215
6216 if (cgen_insn->cret_p)
6217 {
6218 if (cgen_insn->cret_p > 1)
6219 builtin_n_args ++;
6220 first_arg = 1;
6221 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6222 builtin_n_args --;
6223 }
6224
6225 /* Evaluate each argument. */
6226 n_args = call_expr_nargs (exp);
6227
6228 if (n_args < builtin_n_args)
6229 {
6230 error ("too few arguments to %qE", fnname);
6231 return NULL_RTX;
6232 }
6233 if (n_args > builtin_n_args)
6234 {
6235 error ("too many arguments to %qE", fnname);
6236 return NULL_RTX;
6237 }
6238
6239 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6240 {
6241 tree value;
6242
6243 args = CALL_EXPR_ARG (exp, a - first_arg);
6244
6245 value = args;
6246
6247 #if 0
6248 if (cgen_insn->regnums[a].reference_p)
6249 {
6250 if (TREE_CODE (value) != ADDR_EXPR)
6251 {
6252 debug_tree(value);
6253 error ("argument %d of %qE must be an address", a+1, fnname);
6254 return NULL_RTX;
6255 }
6256 value = TREE_OPERAND (value, 0);
6257 }
6258 #endif
6259
6260 /* If the argument has been promoted to int, get the unpromoted
6261 value. This is necessary when sub-int memory values are bound
6262 to reference parameters. */
6263 if (TREE_CODE (value) == NOP_EXPR
6264 && TREE_TYPE (value) == integer_type_node
6265 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6266 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6267 < TYPE_PRECISION (TREE_TYPE (value))))
6268 value = TREE_OPERAND (value, 0);
6269
6270 /* If the argument has been promoted to double, get the unpromoted
6271 SFmode value. This is necessary for FMAX support, for example. */
6272 if (TREE_CODE (value) == NOP_EXPR
6273 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6274 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6275 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6276 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6277 value = TREE_OPERAND (value, 0);
6278
6279 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6280 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6281 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6282 if (cgen_insn->regnums[a].reference_p)
6283 {
6284 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6285 machine_mode pointed_mode = TYPE_MODE (pointed_to);
6286
6287 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6288 }
6289 if (arg[a] == 0)
6290 {
6291 error ("argument %d of %qE must be in the range %d...%d",
6292 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6293 return NULL_RTX;
6294 }
6295 }
6296
6297 for (a = 0; a < first_arg; a++)
6298 {
6299 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6300 arg[a] = target;
6301 else
6302 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6303 }
6304
6305 /* Convert the arguments into a form suitable for the intrinsic.
6306 Report an error if this isn't possible. */
6307 for (opindex = 0; opindex < idata->n_operands; opindex++)
6308 {
6309 a = cgen_insn->op_mapping[opindex];
6310 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6311 arg[a], unsigned_p[a]);
6312 if (op[opindex] == 0)
6313 {
6314 mep_incompatible_arg (&idata->operand[opindex],
6315 arg[a], a + 1 - first_arg, fnname);
6316 return NULL_RTX;
6317 }
6318 }
6319
6320 /* Emit the instruction. */
6321 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6322 op[5], op[6], op[7], op[8], op[9]);
6323
6324 if (GET_CODE (pat) == SET
6325 && GET_CODE (SET_DEST (pat)) == PC
6326 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6327 emit_jump_insn (pat);
6328 else
6329 emit_insn (pat);
6330
6331 /* Copy lvalues back to their final locations. */
6332 for (opindex = 0; opindex < idata->n_operands; opindex++)
6333 if (idata->operand[opindex].constraint[0] == '=')
6334 {
6335 a = cgen_insn->op_mapping[opindex];
6336 if (a >= first_arg)
6337 {
6338 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6339 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6340 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6341 op[opindex]));
6342 else
6343 {
6344 /* First convert the operand to the right mode, then copy it
6345 into the destination. Doing the conversion as a separate
6346 step (rather than using convert_move) means that we can
6347 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6348 refer to the same register. */
6349 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6350 op[opindex], unsigned_p[a]);
6351 if (!rtx_equal_p (arg[a], op[opindex]))
6352 emit_move_insn (arg[a], op[opindex]);
6353 }
6354 }
6355 }
6356
6357 if (first_arg > 0 && target && target != op[0])
6358 {
6359 emit_move_insn (target, op[0]);
6360 }
6361
6362 return target;
6363 }
6364
6365 static bool
6366 mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED)
6367 {
6368 return false;
6369 }
6370 \f
6371 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6372 a global register. */
6373
6374 static bool
6375 global_reg_mentioned_p_1 (const_rtx x)
6376 {
6377 int regno;
6378
6379 switch (GET_CODE (x))
6380 {
6381 case SUBREG:
6382 if (REG_P (SUBREG_REG (x)))
6383 {
6384 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6385 && global_regs[subreg_regno (x)])
6386 return true;
6387 return false;
6388 }
6389 break;
6390
6391 case REG:
6392 regno = REGNO (x);
6393 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6394 return true;
6395 return false;
6396
6397 case CALL:
6398 /* A non-constant call might use a global register. */
6399 return true;
6400
6401 default:
6402 break;
6403 }
6404
6405 return false;
6406 }
6407
6408 /* Returns nonzero if X mentions a global register. */
6409
6410 static bool
6411 global_reg_mentioned_p (rtx x)
6412 {
6413 if (INSN_P (x))
6414 {
6415 if (CALL_P (x))
6416 {
6417 if (! RTL_CONST_OR_PURE_CALL_P (x))
6418 return true;
6419 x = CALL_INSN_FUNCTION_USAGE (x);
6420 if (x == 0)
6421 return false;
6422 }
6423 else
6424 x = PATTERN (x);
6425 }
6426
6427 subrtx_iterator::array_type array;
6428 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6429 if (global_reg_mentioned_p_1 (*iter))
6430 return true;
6431 return false;
6432 }
6433 /* Scheduling hooks for VLIW mode.
6434
6435 Conceptually this is very simple: we have a two-pack architecture
6436 that takes one core insn and one coprocessor insn to make up either
6437 a 32- or 64-bit instruction word (depending on the option bit set in
6438 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6439 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6440 and one 48-bit cop insn or two 32-bit core/cop insns.
6441
6442 In practice, instruction selection will be a bear. Consider in
6443 VL64 mode the following insns
6444
6445 add $1, 1
6446 cmov $cr0, $0
6447
6448 these cannot pack, since the add is a 16-bit core insn and cmov
6449 is a 32-bit cop insn. However,
6450
6451 add3 $1, $1, 1
6452 cmov $cr0, $0
6453
6454 packs just fine. For good VLIW code generation in VL64 mode, we
6455 will have to have 32-bit alternatives for many of the common core
6456 insns. Not implemented. */
6457
6458 static int
6459 mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
6460 {
6461 int cost_specified;
6462
6463 if (REG_NOTE_KIND (link) != 0)
6464 {
6465 /* See whether INSN and DEP_INSN are intrinsics that set the same
6466 hard register. If so, it is more important to free up DEP_INSN
6467 than it is to free up INSN.
6468
6469 Note that intrinsics like mep_mulr are handled differently from
6470 the equivalent mep.md patterns. In mep.md, if we don't care
6471 about the value of $lo and $hi, the pattern will just clobber
6472 the registers, not set them. Since clobbers don't count as
6473 output dependencies, it is often possible to reorder two mulrs,
6474 even after reload.
6475
6476 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6477 so any pair of mep_mulr()s will be inter-dependent. We should
6478 therefore give the first mep_mulr() a higher priority. */
6479 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6480 && global_reg_mentioned_p (PATTERN (insn))
6481 && global_reg_mentioned_p (PATTERN (dep_insn)))
6482 return 1;
6483
6484 /* If the dependence is an anti or output dependence, assume it
6485 has no cost. */
6486 return 0;
6487 }
6488
6489 /* If we can't recognize the insns, we can't really do anything. */
6490 if (recog_memoized (dep_insn) < 0)
6491 return cost;
6492
6493 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6494 attribute instead. */
6495 if (!TARGET_H1)
6496 {
6497 cost_specified = get_attr_latency (dep_insn);
6498 if (cost_specified != 0)
6499 return cost_specified;
6500 }
6501
6502 return cost;
6503 }
6504
6505 /* ??? We don't properly compute the length of a load/store insn,
6506 taking into account the addressing mode. */
6507
6508 static int
6509 mep_issue_rate (void)
6510 {
6511 return TARGET_IVC2 ? 3 : 2;
6512 }
6513
6514 /* Return true if function DECL was declared with the vliw attribute. */
6515
6516 bool
6517 mep_vliw_function_p (tree decl)
6518 {
6519 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6520 }
6521
6522 static rtx_insn *
6523 mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6524 int length)
6525 {
6526 int i;
6527
6528 for (i = nready - 1; i >= 0; --i)
6529 {
6530 rtx_insn *insn = ready[i];
6531 if (recog_memoized (insn) >= 0
6532 && get_attr_slot (insn) == slot
6533 && get_attr_length (insn) == length)
6534 return insn;
6535 }
6536
6537 return NULL;
6538 }
6539
6540 static void
6541 mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
6542 {
6543 int i;
6544
6545 for (i = 0; i < nready; ++i)
6546 if (ready[i] == insn)
6547 {
6548 for (; i < nready - 1; ++i)
6549 ready[i] = ready[i + 1];
6550 ready[i] = insn;
6551 return;
6552 }
6553
6554 gcc_unreachable ();
6555 }
6556
6557 static void
6558 mep_print_sched_insn (FILE *dump, rtx_insn *insn)
6559 {
6560 const char *slots = "none";
6561 const char *name = NULL;
6562 int code;
6563 char buf[30];
6564
6565 if (GET_CODE (PATTERN (insn)) == SET
6566 || GET_CODE (PATTERN (insn)) == PARALLEL)
6567 {
6568 switch (get_attr_slots (insn))
6569 {
6570 case SLOTS_CORE: slots = "core"; break;
6571 case SLOTS_C3: slots = "c3"; break;
6572 case SLOTS_P0: slots = "p0"; break;
6573 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6574 case SLOTS_P0_P1: slots = "p0,p1"; break;
6575 case SLOTS_P0S: slots = "p0s"; break;
6576 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6577 case SLOTS_P1: slots = "p1"; break;
6578 default:
6579 sprintf(buf, "%d", get_attr_slots (insn));
6580 slots = buf;
6581 break;
6582 }
6583 }
6584 if (GET_CODE (PATTERN (insn)) == USE)
6585 slots = "use";
6586
6587 code = INSN_CODE (insn);
6588 if (code >= 0)
6589 name = get_insn_name (code);
6590 if (!name)
6591 name = "{unknown}";
6592
6593 fprintf (dump,
6594 "insn %4d %4d %8s %s\n",
6595 code,
6596 INSN_UID (insn),
6597 name,
6598 slots);
6599 }
6600
6601 static int
6602 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6603 int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
6604 int *pnready, int clock ATTRIBUTE_UNUSED)
6605 {
6606 int nready = *pnready;
6607 rtx_insn *core_insn, *cop_insn;
6608 int i;
6609
6610 if (dump && sched_verbose > 1)
6611 {
6612 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6613 for (i=0; i<nready; i++)
6614 mep_print_sched_insn (dump, ready[i]);
6615 fprintf (dump, "\n");
6616 }
6617
6618 if (!mep_vliw_function_p (cfun->decl))
6619 return 1;
6620 if (nready < 2)
6621 return 1;
6622
6623 /* IVC2 uses a DFA to determine what's ready and what's not. */
6624 if (TARGET_IVC2)
6625 return nready;
6626
6627 /* We can issue either a core or coprocessor instruction.
6628 Look for a matched pair of insns to reorder. If we don't
6629 find any, don't second-guess the scheduler's priorities. */
6630
6631 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6632 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6633 TARGET_OPT_VL64 ? 6 : 2)))
6634 ;
6635 else if (TARGET_OPT_VL64
6636 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6637 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6638 ;
6639 else
6640 /* We didn't find a pair. Issue the single insn at the head
6641 of the ready list. */
6642 return 1;
6643
6644 /* Reorder the two insns first. */
6645 mep_move_ready_insn (ready, nready, core_insn);
6646 mep_move_ready_insn (ready, nready - 1, cop_insn);
6647 return 2;
6648 }
6649
6650 /* Return true if X contains a register that is set by insn PREV. */
6651
6652 static bool
6653 mep_store_find_set (const_rtx x, const rtx_insn *prev)
6654 {
6655 subrtx_iterator::array_type array;
6656 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6657 if (REG_P (x) && reg_set_p (x, prev))
6658 return true;
6659 return false;
6660 }
6661
6662 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6663 not the containing insn. */
6664
6665 static bool
6666 mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
6667 {
6668 /* Cope with intrinsics like swcpa. */
6669 if (GET_CODE (pat) == PARALLEL)
6670 {
6671 int i;
6672
6673 for (i = 0; i < XVECLEN (pat, 0); i++)
6674 if (mep_store_data_bypass_p (prev,
6675 as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
6676 return true;
6677
6678 return false;
6679 }
6680
6681 /* Check for some sort of store. */
6682 if (GET_CODE (pat) != SET
6683 || GET_CODE (SET_DEST (pat)) != MEM)
6684 return false;
6685
6686 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6687 The first operand to the unspec is the store data and the other operands
6688 are used to calculate the address. */
6689 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6690 {
6691 rtx src;
6692 int i;
6693
6694 src = SET_SRC (pat);
6695 for (i = 1; i < XVECLEN (src, 0); i++)
6696 if (mep_store_find_set (XVECEXP (src, 0, i), prev))
6697 return false;
6698
6699 return true;
6700 }
6701
6702 /* Otherwise just check that PREV doesn't modify any register mentioned
6703 in the memory destination. */
6704 return !mep_store_find_set (SET_DEST (pat), prev);
6705 }
6706
6707 /* Return true if INSN is a store instruction and if the store address
6708 has no true dependence on PREV. */
6709
6710 bool
6711 mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
6712 {
6713 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6714 }
6715
6716 /* Return true if, apart from HI/LO, there are no true dependencies
6717 between multiplication instructions PREV and INSN. */
6718
6719 bool
6720 mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
6721 {
6722 rtx pat;
6723
6724 pat = PATTERN (insn);
6725 if (GET_CODE (pat) == PARALLEL)
6726 pat = XVECEXP (pat, 0, 0);
6727 if (GET_CODE (pat) != SET)
6728 return false;
6729 subrtx_iterator::array_type array;
6730 FOR_EACH_SUBRTX (iter, array, SET_SRC (pat), NONCONST)
6731 {
6732 const_rtx x = *iter;
6733 if (REG_P (x)
6734 && REGNO (x) != LO_REGNO
6735 && REGNO (x) != HI_REGNO
6736 && reg_set_p (x, prev))
6737 return false;
6738 }
6739 return true;
6740 }
6741
6742 /* Return true if INSN is an ldc instruction that issues to the
6743 MeP-h1 integer pipeline. This is true for instructions that
6744 read from PSW, LP, SAR, HI and LO. */
6745
6746 bool
6747 mep_ipipe_ldc_p (rtx_insn *insn)
6748 {
6749 rtx pat, src;
6750
6751 pat = PATTERN (insn);
6752
6753 /* Cope with instrinsics that set both a hard register and its shadow.
6754 The set of the hard register comes first. */
6755 if (GET_CODE (pat) == PARALLEL)
6756 pat = XVECEXP (pat, 0, 0);
6757
6758 if (GET_CODE (pat) == SET)
6759 {
6760 src = SET_SRC (pat);
6761
6762 /* Cope with intrinsics. The first operand to the unspec is
6763 the source register. */
6764 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6765 src = XVECEXP (src, 0, 0);
6766
6767 if (REG_P (src))
6768 switch (REGNO (src))
6769 {
6770 case PSW_REGNO:
6771 case LP_REGNO:
6772 case SAR_REGNO:
6773 case HI_REGNO:
6774 case LO_REGNO:
6775 return true;
6776 }
6777 }
6778 return false;
6779 }
6780
6781 /* Create a VLIW bundle from core instruction CORE and coprocessor
6782 instruction COP. COP always satisfies INSN_P, but CORE can be
6783 either a new pattern or an existing instruction.
6784
6785 Emit the bundle in place of COP and return it. */
6786
6787 static rtx_insn *
6788 mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
6789 {
6790 rtx seq;
6791 rtx_insn *core_insn;
6792 rtx_insn *insn;
6793
6794 /* If CORE is an existing instruction, remove it, otherwise put
6795 the new pattern in an INSN harness. */
6796 if (INSN_P (core_insn_or_pat))
6797 {
6798 core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6799 remove_insn (core_insn);
6800 }
6801 else
6802 core_insn = make_insn_raw (core_insn_or_pat);
6803
6804 /* Generate the bundle sequence and replace COP with it. */
6805 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
6806 insn = emit_insn_after (seq, cop);
6807 remove_insn (cop);
6808
6809 /* Set up the links of the insns inside the SEQUENCE. */
6810 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6811 SET_NEXT_INSN (core_insn) = cop;
6812 SET_PREV_INSN (cop) = core_insn;
6813 SET_NEXT_INSN (cop) = NEXT_INSN (insn);
6814
6815 /* Set the VLIW flag for the coprocessor instruction. */
6816 PUT_MODE (core_insn, VOIDmode);
6817 PUT_MODE (cop, BImode);
6818
6819 /* Derive a location for the bundle. Individual instructions cannot
6820 have their own location because there can be no assembler labels
6821 between CORE_INSN and COP. */
6822 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6823 INSN_LOCATION (core_insn) = 0;
6824 INSN_LOCATION (cop) = 0;
6825
6826 return insn;
6827 }
6828
6829 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6830
6831 static void
6832 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6833 {
6834 rtx * pinsn = (rtx *) data;
6835
6836 if (*pinsn && reg_mentioned_p (x, *pinsn))
6837 *pinsn = NULL_RTX;
6838 }
6839
6840 /* Return true if anything in insn X is (anti,output,true) dependent on
6841 anything in insn Y. */
6842
6843 static int
6844 mep_insn_dependent_p (rtx x, rtx y)
6845 {
6846 rtx tmp;
6847
6848 gcc_assert (INSN_P (x));
6849 gcc_assert (INSN_P (y));
6850
6851 tmp = PATTERN (y);
6852 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6853 if (tmp == NULL_RTX)
6854 return 1;
6855
6856 tmp = PATTERN (x);
6857 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6858 if (tmp == NULL_RTX)
6859 return 1;
6860
6861 return 0;
6862 }
6863
6864 static int
6865 core_insn_p (rtx_insn *insn)
6866 {
6867 if (GET_CODE (PATTERN (insn)) == USE)
6868 return 0;
6869 if (get_attr_slot (insn) == SLOT_CORE)
6870 return 1;
6871 return 0;
6872 }
6873
6874 /* Mark coprocessor instructions that can be bundled together with
6875 the immediately preceding core instruction. This is later used
6876 to emit the "+" that tells the assembler to create a VLIW insn.
6877
6878 For unbundled insns, the assembler will automatically add coprocessor
6879 nops, and 16-bit core nops. Due to an apparent oversight in the
6880 spec, the assembler will _not_ automatically add 32-bit core nops,
6881 so we have to emit those here.
6882
6883 Called from mep_insn_reorg. */
6884
6885 static void
6886 mep_bundle_insns (rtx_insn *insns)
6887 {
6888 rtx_insn *insn, *last = NULL, *first = NULL;
6889 int saw_scheduling = 0;
6890
6891 /* Only do bundling if we're in vliw mode. */
6892 if (!mep_vliw_function_p (cfun->decl))
6893 return;
6894
6895 /* The first insn in a bundle are TImode, the remainder are
6896 VOIDmode. After this function, the first has VOIDmode and the
6897 rest have BImode. */
6898
6899 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6900
6901 /* First, move any NOTEs that are within a bundle, to the beginning
6902 of the bundle. */
6903 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6904 {
6905 if (NOTE_P (insn) && first)
6906 /* Don't clear FIRST. */;
6907
6908 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6909 first = insn;
6910
6911 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6912 {
6913 rtx_insn *note, *prev;
6914
6915 /* INSN is part of a bundle; FIRST is the first insn in that
6916 bundle. Move all intervening notes out of the bundle.
6917 In addition, since the debug pass may insert a label
6918 whenever the current line changes, set the location info
6919 for INSN to match FIRST. */
6920
6921 INSN_LOCATION (insn) = INSN_LOCATION (first);
6922
6923 note = PREV_INSN (insn);
6924 while (note && note != first)
6925 {
6926 prev = PREV_INSN (note);
6927
6928 if (NOTE_P (note))
6929 {
6930 /* Remove NOTE from here... */
6931 SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6932 SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6933 /* ...and put it in here. */
6934 SET_NEXT_INSN (note) = first;
6935 SET_PREV_INSN (note) = PREV_INSN (first);
6936 SET_NEXT_INSN (PREV_INSN (note)) = note;
6937 SET_PREV_INSN (NEXT_INSN (note)) = note;
6938 }
6939
6940 note = prev;
6941 }
6942 }
6943
6944 else if (!NONJUMP_INSN_P (insn))
6945 first = 0;
6946 }
6947
6948 /* Now fix up the bundles. */
6949 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6950 {
6951 if (NOTE_P (insn))
6952 continue;
6953
6954 if (!NONJUMP_INSN_P (insn))
6955 {
6956 last = 0;
6957 continue;
6958 }
6959
6960 /* If we're not optimizing enough, there won't be scheduling
6961 info. We detect that here. */
6962 if (GET_MODE (insn) == TImode)
6963 saw_scheduling = 1;
6964 if (!saw_scheduling)
6965 continue;
6966
6967 if (TARGET_IVC2)
6968 {
6969 rtx_insn *core_insn = NULL;
6970
6971 /* IVC2 slots are scheduled by DFA, so we just accept
6972 whatever the scheduler gives us. However, we must make
6973 sure the core insn (if any) is the first in the bundle.
6974 The IVC2 assembler can insert whatever NOPs are needed,
6975 and allows a COP insn to be first. */
6976
6977 if (NONJUMP_INSN_P (insn)
6978 && GET_CODE (PATTERN (insn)) != USE
6979 && GET_MODE (insn) == TImode)
6980 {
6981 for (last = insn;
6982 NEXT_INSN (last)
6983 && GET_MODE (NEXT_INSN (last)) == VOIDmode
6984 && NONJUMP_INSN_P (NEXT_INSN (last));
6985 last = NEXT_INSN (last))
6986 {
6987 if (core_insn_p (last))
6988 core_insn = last;
6989 }
6990 if (core_insn_p (last))
6991 core_insn = last;
6992
6993 if (core_insn && core_insn != insn)
6994 {
6995 /* Swap core insn to first in the bundle. */
6996
6997 /* Remove core insn. */
6998 if (PREV_INSN (core_insn))
6999 SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7000 if (NEXT_INSN (core_insn))
7001 SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7002
7003 /* Re-insert core insn. */
7004 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
7005 SET_NEXT_INSN (core_insn) = insn;
7006
7007 if (PREV_INSN (core_insn))
7008 SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7009 SET_PREV_INSN (insn) = core_insn;
7010
7011 PUT_MODE (core_insn, TImode);
7012 PUT_MODE (insn, VOIDmode);
7013 }
7014 }
7015
7016 /* The first insn has TImode, the rest have VOIDmode */
7017 if (GET_MODE (insn) == TImode)
7018 PUT_MODE (insn, VOIDmode);
7019 else
7020 PUT_MODE (insn, BImode);
7021 continue;
7022 }
7023
7024 PUT_MODE (insn, VOIDmode);
7025 if (recog_memoized (insn) >= 0
7026 && get_attr_slot (insn) == SLOT_COP)
7027 {
7028 if (JUMP_P (insn)
7029 || ! last
7030 || recog_memoized (last) < 0
7031 || get_attr_slot (last) != SLOT_CORE
7032 || (get_attr_length (insn)
7033 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7034 || mep_insn_dependent_p (insn, last))
7035 {
7036 switch (get_attr_length (insn))
7037 {
7038 case 8:
7039 break;
7040 case 6:
7041 insn = mep_make_bundle (gen_nop (), insn);
7042 break;
7043 case 4:
7044 if (TARGET_OPT_VL64)
7045 insn = mep_make_bundle (gen_nop32 (), insn);
7046 break;
7047 case 2:
7048 if (TARGET_OPT_VL64)
7049 error ("2 byte cop instructions are"
7050 " not allowed in 64-bit VLIW mode");
7051 else
7052 insn = mep_make_bundle (gen_nop (), insn);
7053 break;
7054 default:
7055 error ("unexpected %d byte cop instruction",
7056 get_attr_length (insn));
7057 break;
7058 }
7059 }
7060 else
7061 insn = mep_make_bundle (last, insn);
7062 }
7063
7064 last = insn;
7065 }
7066 }
7067
7068
7069 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7070 Return true on success. This function can fail if the intrinsic
7071 is unavailable or if the operands don't satisfy their predicates. */
7072
7073 bool
7074 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7075 {
7076 const struct cgen_insn *cgen_insn;
7077 const struct insn_data_d *idata;
7078 rtx newop[10];
7079 int i;
7080
7081 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7082 return false;
7083
7084 idata = &insn_data[cgen_insn->icode];
7085 for (i = 0; i < idata->n_operands; i++)
7086 {
7087 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7088 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7089 return false;
7090 }
7091
7092 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7093 newop[3], newop[4], newop[5],
7094 newop[6], newop[7], newop[8]));
7095
7096 return true;
7097 }
7098
7099
7100 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7101 OPERANDS[0]. Report an error if the instruction could not
7102 be synthesized. OPERANDS[1] is a register_operand. For sign
7103 and zero extensions, it may be smaller than SImode. */
7104
7105 bool
7106 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7107 rtx * operands ATTRIBUTE_UNUSED)
7108 {
7109 return false;
7110 }
7111
7112
7113 /* Likewise, but apply a binary operation to OPERANDS[1] and
7114 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7115 can be a general_operand.
7116
7117 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7118 third operand. REG and REG3 take register operands only. */
7119
7120 bool
7121 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7122 int ATTRIBUTE_UNUSED immediate3,
7123 int ATTRIBUTE_UNUSED reg,
7124 int ATTRIBUTE_UNUSED reg3,
7125 rtx * operands ATTRIBUTE_UNUSED)
7126 {
7127 return false;
7128 }
7129
7130 static bool
7131 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7132 int opno ATTRIBUTE_UNUSED, int *total,
7133 bool ATTRIBUTE_UNUSED speed_t)
7134 {
7135 switch (code)
7136 {
7137 case CONST_INT:
7138 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7139 *total = 0;
7140 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7141 *total = 1;
7142 else
7143 *total = 3;
7144 return true;
7145
7146 case SYMBOL_REF:
7147 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7148 return true;
7149
7150 case MULT:
7151 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7152 ? COSTS_N_INSNS (3)
7153 : COSTS_N_INSNS (2));
7154 return true;
7155 }
7156 return false;
7157 }
7158
7159 static int
7160 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7161 machine_mode mode ATTRIBUTE_UNUSED,
7162 addr_space_t as ATTRIBUTE_UNUSED,
7163 bool ATTRIBUTE_UNUSED speed_p)
7164 {
7165 return 1;
7166 }
7167
7168 static void
7169 mep_asm_init_sections (void)
7170 {
7171 based_section
7172 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7173 "\t.section .based,\"aw\"");
7174
7175 tinybss_section
7176 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7177 "\t.section .sbss,\"aw\"");
7178
7179 sdata_section
7180 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7181 "\t.section .sdata,\"aw\",@progbits");
7182
7183 far_section
7184 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7185 "\t.section .far,\"aw\"");
7186
7187 farbss_section
7188 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7189 "\t.section .farbss,\"aw\"");
7190
7191 frodata_section
7192 = get_unnamed_section (0, output_section_asm_op,
7193 "\t.section .frodata,\"a\"");
7194
7195 srodata_section
7196 = get_unnamed_section (0, output_section_asm_op,
7197 "\t.section .srodata,\"a\"");
7198
7199 vtext_section
7200 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7201 "\t.section .vtext,\"axv\"\n\t.vliw");
7202
7203 vftext_section
7204 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7205 "\t.section .vftext,\"axv\"\n\t.vliw");
7206
7207 ftext_section
7208 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7209 "\t.section .ftext,\"ax\"\n\t.core");
7210
7211 }
7212 \f
7213 /* Initialize the GCC target structure. */
7214
7215 #undef TARGET_ASM_FUNCTION_PROLOGUE
7216 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7217 #undef TARGET_ATTRIBUTE_TABLE
7218 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7219 #undef TARGET_COMP_TYPE_ATTRIBUTES
7220 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7221 #undef TARGET_INSERT_ATTRIBUTES
7222 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7223 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7224 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7225 #undef TARGET_CAN_INLINE_P
7226 #define TARGET_CAN_INLINE_P mep_can_inline_p
7227 #undef TARGET_SECTION_TYPE_FLAGS
7228 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7229 #undef TARGET_ASM_NAMED_SECTION
7230 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7231 #undef TARGET_INIT_BUILTINS
7232 #define TARGET_INIT_BUILTINS mep_init_builtins
7233 #undef TARGET_EXPAND_BUILTIN
7234 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7235 #undef TARGET_SCHED_ADJUST_COST
7236 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7237 #undef TARGET_SCHED_ISSUE_RATE
7238 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7239 #undef TARGET_SCHED_REORDER
7240 #define TARGET_SCHED_REORDER mep_sched_reorder
7241 #undef TARGET_STRIP_NAME_ENCODING
7242 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7243 #undef TARGET_ASM_SELECT_SECTION
7244 #define TARGET_ASM_SELECT_SECTION mep_select_section
7245 #undef TARGET_ASM_UNIQUE_SECTION
7246 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7247 #undef TARGET_ENCODE_SECTION_INFO
7248 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7249 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7250 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7251 #undef TARGET_RTX_COSTS
7252 #define TARGET_RTX_COSTS mep_rtx_cost
7253 #undef TARGET_ADDRESS_COST
7254 #define TARGET_ADDRESS_COST mep_address_cost
7255 #undef TARGET_MACHINE_DEPENDENT_REORG
7256 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7257 #undef TARGET_SETUP_INCOMING_VARARGS
7258 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7259 #undef TARGET_PASS_BY_REFERENCE
7260 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7261 #undef TARGET_FUNCTION_ARG
7262 #define TARGET_FUNCTION_ARG mep_function_arg
7263 #undef TARGET_FUNCTION_ARG_ADVANCE
7264 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7265 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7266 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7267 #undef TARGET_OPTION_OVERRIDE
7268 #define TARGET_OPTION_OVERRIDE mep_option_override
7269 #undef TARGET_ALLOCATE_INITIAL_VALUE
7270 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7271 #undef TARGET_ASM_INIT_SECTIONS
7272 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7273 #undef TARGET_RETURN_IN_MEMORY
7274 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7275 #undef TARGET_NARROW_VOLATILE_BITFIELD
7276 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7277 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7278 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7279 #undef TARGET_BUILD_BUILTIN_VA_LIST
7280 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7281 #undef TARGET_EXPAND_BUILTIN_VA_START
7282 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7283 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7284 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7285 #undef TARGET_CAN_ELIMINATE
7286 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7287 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7288 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7289 #undef TARGET_TRAMPOLINE_INIT
7290 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7291 #undef TARGET_LEGITIMATE_CONSTANT_P
7292 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7293 #undef TARGET_CAN_USE_DOLOOP_P
7294 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7295
7296 struct gcc_target targetm = TARGET_INITIALIZER;
7297
7298 #include "gt-mep.h"