]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/mep/mep.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / config / mep / mep.c
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "varasm.h"
38 #include "calls.h"
39 #include "stringpool.h"
40 #include "stor-layout.h"
41 #include "regs.h"
42 #include "hard-reg-set.h"
43 #include "insn-config.h"
44 #include "conditions.h"
45 #include "insn-flags.h"
46 #include "output.h"
47 #include "insn-attr.h"
48 #include "flags.h"
49 #include "recog.h"
50 #include "obstack.h"
51 #include "tree.h"
52 #include "expr.h"
53 #include "except.h"
54 #include "input.h"
55 #include "function.h"
56 #include "insn-codes.h"
57 #include "optabs.h"
58 #include "reload.h"
59 #include "tm_p.h"
60 #include "ggc.h"
61 #include "diagnostic-core.h"
62 #include "target.h"
63 #include "target-def.h"
64 #include "langhooks.h"
65 #include "dominance.h"
66 #include "cfg.h"
67 #include "cfgrtl.h"
68 #include "cfganal.h"
69 #include "lcm.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "predict.h"
73 #include "basic-block.h"
74 #include "df.h"
75 #include "hash-table.h"
76 #include "tree-ssa-alias.h"
77 #include "internal-fn.h"
78 #include "gimple-fold.h"
79 #include "tree-eh.h"
80 #include "gimple-expr.h"
81 #include "is-a.h"
82 #include "gimple.h"
83 #include "gimplify.h"
84 #include "opts.h"
85 #include "dumpfile.h"
86 #include "builtins.h"
87 #include "rtl-iter.h"
88
89 /* Structure of this file:
90
91 + Command Line Option Support
92 + Pattern support - constraints, predicates, expanders
93 + Reload Support
94 + Costs
95 + Functions to save and restore machine-specific function data.
96 + Frame/Epilog/Prolog Related
97 + Operand Printing
98 + Function args in registers
99 + Handle pipeline hazards
100 + Handle attributes
101 + Trampolines
102 + Machine-dependent Reorg
103 + Builtins. */
104
105 /* Symbol encodings:
106
107 Symbols are encoded as @ <char> . <name> where <char> is one of these:
108
109 b - based
110 t - tiny
111 n - near
112 f - far
113 i - io, near
114 I - io, far
115 c - cb (control bus) */
116
117 struct GTY(()) machine_function
118 {
119 int mep_frame_pointer_needed;
120
121 /* For varargs. */
122 int arg_regs_to_save;
123 int regsave_filler;
124 int frame_filler;
125 int frame_locked;
126
127 /* Records __builtin_return address. */
128 rtx eh_stack_adjust;
129
130 int reg_save_size;
131 int reg_save_slot[FIRST_PSEUDO_REGISTER];
132 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
133
134 /* 2 if the current function has an interrupt attribute, 1 if not, 0
135 if unknown. This is here because resource.c uses EPILOGUE_USES
136 which needs it. */
137 int interrupt_handler;
138
139 /* Likewise, for disinterrupt attribute. */
140 int disable_interrupts;
141
142 /* Number of doloop tags used so far. */
143 int doloop_tags;
144
145 /* True if the last tag was allocated to a doloop_end. */
146 bool doloop_tag_from_end;
147
148 /* True if reload changes $TP. */
149 bool reload_changes_tp;
150
151 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
152 We only set this if the function is an interrupt handler. */
153 int asms_without_operands;
154 };
155
156 #define MEP_CONTROL_REG(x) \
157 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
158
159 static GTY(()) section * based_section;
160 static GTY(()) section * tinybss_section;
161 static GTY(()) section * far_section;
162 static GTY(()) section * farbss_section;
163 static GTY(()) section * frodata_section;
164 static GTY(()) section * srodata_section;
165
166 static GTY(()) section * vtext_section;
167 static GTY(()) section * vftext_section;
168 static GTY(()) section * ftext_section;
169
170 static void mep_set_leaf_registers (int);
171 static bool symbol_p (rtx);
172 static bool symbolref_p (rtx);
173 static void encode_pattern_1 (rtx);
174 static void encode_pattern (rtx);
175 static bool const_in_range (rtx, int, int);
176 static void mep_rewrite_mult (rtx_insn *, rtx);
177 static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
178 static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
179 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
180 static bool move_needs_splitting (rtx, rtx, machine_mode);
181 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
182 static bool mep_nongeneral_reg (rtx);
183 static bool mep_general_copro_reg (rtx);
184 static bool mep_nonregister (rtx);
185 static struct machine_function* mep_init_machine_status (void);
186 static rtx mep_tp_rtx (void);
187 static rtx mep_gp_rtx (void);
188 static bool mep_interrupt_p (void);
189 static bool mep_disinterrupt_p (void);
190 static bool mep_reg_set_p (rtx, rtx);
191 static bool mep_reg_set_in_function (int);
192 static bool mep_interrupt_saved_reg (int);
193 static bool mep_call_saves_register (int);
194 static rtx_insn *F (rtx_insn *);
195 static void add_constant (int, int, int, int);
196 static rtx_insn *maybe_dead_move (rtx, rtx, bool);
197 static void mep_reload_pointer (int, const char *);
198 static void mep_start_function (FILE *, HOST_WIDE_INT);
199 static bool mep_function_ok_for_sibcall (tree, tree);
200 static int unique_bit_in (HOST_WIDE_INT);
201 static int bit_size_for_clip (HOST_WIDE_INT);
202 static int bytesize (const_tree, machine_mode);
203 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
204 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
205 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
206 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
207 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
208 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
209 static bool mep_function_attribute_inlinable_p (const_tree);
210 static bool mep_can_inline_p (tree, tree);
211 static bool mep_lookup_pragma_disinterrupt (const char *);
212 static int mep_multiple_address_regions (tree, bool);
213 static int mep_attrlist_to_encoding (tree, tree);
214 static void mep_insert_attributes (tree, tree *);
215 static void mep_encode_section_info (tree, rtx, int);
216 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
217 static void mep_unique_section (tree, int);
218 static unsigned int mep_section_type_flags (tree, const char *, int);
219 static void mep_asm_named_section (const char *, unsigned int, tree);
220 static bool mep_mentioned_p (rtx, rtx, int);
221 static void mep_reorg_regmove (rtx_insn *);
222 static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
223 bool, bool);
224 static void mep_reorg_repeat (rtx_insn *);
225 static bool mep_invertable_branch_p (rtx_insn *);
226 static void mep_invert_branch (rtx_insn *, rtx_insn *);
227 static void mep_reorg_erepeat (rtx_insn *);
228 static void mep_jmp_return_reorg (rtx_insn *);
229 static void mep_reorg_addcombine (rtx_insn *);
230 static void mep_reorg (void);
231 static void mep_init_intrinsics (void);
232 static void mep_init_builtins (void);
233 static void mep_intrinsic_unavailable (int);
234 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
235 static bool mep_get_move_insn (int, const struct cgen_insn **);
236 static rtx mep_convert_arg (machine_mode, rtx);
237 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
238 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
239 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
240 static rtx mep_expand_builtin (tree, rtx, rtx, machine_mode, int);
241 static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
242 static int mep_issue_rate (void);
243 static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
244 static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
245 static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
246 static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
247 static void mep_bundle_insns (rtx_insn *);
248 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
249 static int mep_address_cost (rtx, machine_mode, addr_space_t, bool);
250 static void mep_setup_incoming_varargs (cumulative_args_t, machine_mode,
251 tree, int *, int);
252 static bool mep_pass_by_reference (cumulative_args_t cum, machine_mode,
253 const_tree, bool);
254 static rtx mep_function_arg (cumulative_args_t, machine_mode,
255 const_tree, bool);
256 static void mep_function_arg_advance (cumulative_args_t, machine_mode,
257 const_tree, bool);
258 static bool mep_vector_mode_supported_p (machine_mode);
259 static rtx mep_allocate_initial_value (rtx);
260 static void mep_asm_init_sections (void);
261 static int mep_comp_type_attributes (const_tree, const_tree);
262 static bool mep_narrow_volatile_bitfield (void);
263 static rtx mep_expand_builtin_saveregs (void);
264 static tree mep_build_builtin_va_list (void);
265 static void mep_expand_va_start (tree, rtx);
266 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
267 static bool mep_can_eliminate (const int, const int);
268 static void mep_conditional_register_usage (void);
269 static void mep_trampoline_init (rtx, tree, rtx);
270 \f
271 #define WANT_GCC_DEFINITIONS
272 #include "mep-intrin.h"
273 #undef WANT_GCC_DEFINITIONS
274
275 \f
276 /* Command Line Option Support. */
277
278 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
279
280 /* True if we can use cmov instructions to move values back and forth
281 between core and coprocessor registers. */
282 bool mep_have_core_copro_moves_p;
283
284 /* True if we can use cmov instructions (or a work-alike) to move
285 values between coprocessor registers. */
286 bool mep_have_copro_copro_moves_p;
287
288 /* A table of all coprocessor instructions that can act like
289 a coprocessor-to-coprocessor cmov. */
290 static const int mep_cmov_insns[] = {
291 mep_cmov,
292 mep_cpmov,
293 mep_fmovs,
294 mep_caddi3,
295 mep_csubi3,
296 mep_candi3,
297 mep_cori3,
298 mep_cxori3,
299 mep_cand3,
300 mep_cor3
301 };
302
303 \f
304 static void
305 mep_set_leaf_registers (int enable)
306 {
307 int i;
308
309 if (mep_leaf_registers[0] != enable)
310 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
311 mep_leaf_registers[i] = enable;
312 }
313
314 static void
315 mep_conditional_register_usage (void)
316 {
317 int i;
318
319 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
320 {
321 fixed_regs[HI_REGNO] = 1;
322 fixed_regs[LO_REGNO] = 1;
323 call_used_regs[HI_REGNO] = 1;
324 call_used_regs[LO_REGNO] = 1;
325 }
326
327 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
328 global_regs[i] = 1;
329 }
330
331 static void
332 mep_option_override (void)
333 {
334 unsigned int i;
335 int j;
336 cl_deferred_option *opt;
337 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
338
339 if (v)
340 FOR_EACH_VEC_ELT (*v, i, opt)
341 {
342 switch (opt->opt_index)
343 {
344 case OPT_mivc2:
345 for (j = 0; j < 32; j++)
346 fixed_regs[j + 48] = 0;
347 for (j = 0; j < 32; j++)
348 call_used_regs[j + 48] = 1;
349 for (j = 6; j < 8; j++)
350 call_used_regs[j + 48] = 0;
351
352 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
353 RN (0, "$csar0");
354 RN (1, "$cc");
355 RN (4, "$cofr0");
356 RN (5, "$cofr1");
357 RN (6, "$cofa0");
358 RN (7, "$cofa1");
359 RN (15, "$csar1");
360
361 RN (16, "$acc0_0");
362 RN (17, "$acc0_1");
363 RN (18, "$acc0_2");
364 RN (19, "$acc0_3");
365 RN (20, "$acc0_4");
366 RN (21, "$acc0_5");
367 RN (22, "$acc0_6");
368 RN (23, "$acc0_7");
369
370 RN (24, "$acc1_0");
371 RN (25, "$acc1_1");
372 RN (26, "$acc1_2");
373 RN (27, "$acc1_3");
374 RN (28, "$acc1_4");
375 RN (29, "$acc1_5");
376 RN (30, "$acc1_6");
377 RN (31, "$acc1_7");
378 #undef RN
379 break;
380
381 default:
382 gcc_unreachable ();
383 }
384 }
385
386 if (flag_pic == 1)
387 warning (OPT_fpic, "-fpic is not supported");
388 if (flag_pic == 2)
389 warning (OPT_fPIC, "-fPIC is not supported");
390 if (TARGET_S && TARGET_M)
391 error ("only one of -ms and -mm may be given");
392 if (TARGET_S && TARGET_L)
393 error ("only one of -ms and -ml may be given");
394 if (TARGET_M && TARGET_L)
395 error ("only one of -mm and -ml may be given");
396 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
397 error ("only one of -ms and -mtiny= may be given");
398 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
399 error ("only one of -mm and -mtiny= may be given");
400 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
401 warning (0, "-mclip currently has no effect without -mminmax");
402
403 if (mep_const_section)
404 {
405 if (strcmp (mep_const_section, "tiny") != 0
406 && strcmp (mep_const_section, "near") != 0
407 && strcmp (mep_const_section, "far") != 0)
408 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
409 }
410
411 if (TARGET_S)
412 mep_tiny_cutoff = 65536;
413 if (TARGET_M)
414 mep_tiny_cutoff = 0;
415 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
416 mep_tiny_cutoff = 0;
417
418 if (TARGET_64BIT_CR_REGS)
419 flag_split_wide_types = 0;
420
421 init_machine_status = mep_init_machine_status;
422 mep_init_intrinsics ();
423 }
424
425 /* Pattern Support - constraints, predicates, expanders. */
426
427 /* MEP has very few instructions that can refer to the span of
428 addresses used by symbols, so it's common to check for them. */
429
430 static bool
431 symbol_p (rtx x)
432 {
433 int c = GET_CODE (x);
434
435 return (c == CONST_INT
436 || c == CONST
437 || c == SYMBOL_REF);
438 }
439
440 static bool
441 symbolref_p (rtx x)
442 {
443 int c;
444
445 if (GET_CODE (x) != MEM)
446 return false;
447
448 c = GET_CODE (XEXP (x, 0));
449 return (c == CONST_INT
450 || c == CONST
451 || c == SYMBOL_REF);
452 }
453
454 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
455
456 #define GEN_REG(R, STRICT) \
457 (GR_REGNO_P (R) \
458 || (!STRICT \
459 && ((R) == ARG_POINTER_REGNUM \
460 || (R) >= FIRST_PSEUDO_REGISTER)))
461
462 static char pattern[12], *patternp;
463 static GTY(()) rtx patternr[12];
464 #define RTX_IS(x) (strcmp (pattern, x) == 0)
465
466 static void
467 encode_pattern_1 (rtx x)
468 {
469 int i;
470
471 if (patternp == pattern + sizeof (pattern) - 2)
472 {
473 patternp[-1] = '?';
474 return;
475 }
476
477 patternr[patternp-pattern] = x;
478
479 switch (GET_CODE (x))
480 {
481 case REG:
482 *patternp++ = 'r';
483 break;
484 case MEM:
485 *patternp++ = 'm';
486 case CONST:
487 encode_pattern_1 (XEXP(x, 0));
488 break;
489 case PLUS:
490 *patternp++ = '+';
491 encode_pattern_1 (XEXP(x, 0));
492 encode_pattern_1 (XEXP(x, 1));
493 break;
494 case LO_SUM:
495 *patternp++ = 'L';
496 encode_pattern_1 (XEXP(x, 0));
497 encode_pattern_1 (XEXP(x, 1));
498 break;
499 case HIGH:
500 *patternp++ = 'H';
501 encode_pattern_1 (XEXP(x, 0));
502 break;
503 case SYMBOL_REF:
504 *patternp++ = 's';
505 break;
506 case LABEL_REF:
507 *patternp++ = 'l';
508 break;
509 case CONST_INT:
510 case CONST_DOUBLE:
511 *patternp++ = 'i';
512 break;
513 case UNSPEC:
514 *patternp++ = 'u';
515 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
516 for (i=0; i<XVECLEN (x, 0); i++)
517 encode_pattern_1 (XVECEXP (x, 0, i));
518 break;
519 case USE:
520 *patternp++ = 'U';
521 break;
522 default:
523 *patternp++ = '?';
524 #if 0
525 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
526 debug_rtx (x);
527 gcc_unreachable ();
528 #endif
529 break;
530 }
531 }
532
533 static void
534 encode_pattern (rtx x)
535 {
536 patternp = pattern;
537 encode_pattern_1 (x);
538 *patternp = 0;
539 }
540
541 int
542 mep_section_tag (rtx x)
543 {
544 const char *name;
545
546 while (1)
547 {
548 switch (GET_CODE (x))
549 {
550 case MEM:
551 case CONST:
552 x = XEXP (x, 0);
553 break;
554 case UNSPEC:
555 x = XVECEXP (x, 0, 0);
556 break;
557 case PLUS:
558 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
559 return 0;
560 x = XEXP (x, 0);
561 break;
562 default:
563 goto done;
564 }
565 }
566 done:
567 if (GET_CODE (x) != SYMBOL_REF)
568 return 0;
569 name = XSTR (x, 0);
570 if (name[0] == '@' && name[2] == '.')
571 {
572 if (name[1] == 'i' || name[1] == 'I')
573 {
574 if (name[1] == 'I')
575 return 'f'; /* near */
576 return 'n'; /* far */
577 }
578 return name[1];
579 }
580 return 0;
581 }
582
583 int
584 mep_regno_reg_class (int regno)
585 {
586 switch (regno)
587 {
588 case SP_REGNO: return SP_REGS;
589 case TP_REGNO: return TP_REGS;
590 case GP_REGNO: return GP_REGS;
591 case 0: return R0_REGS;
592 case HI_REGNO: return HI_REGS;
593 case LO_REGNO: return LO_REGS;
594 case ARG_POINTER_REGNUM: return GENERAL_REGS;
595 }
596
597 if (GR_REGNO_P (regno))
598 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
599 if (CONTROL_REGNO_P (regno))
600 return CONTROL_REGS;
601
602 if (CR_REGNO_P (regno))
603 {
604 int i, j;
605
606 /* Search for the register amongst user-defined subclasses of
607 the coprocessor registers. */
608 for (i = USER0_REGS; i <= USER3_REGS; ++i)
609 {
610 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
611 continue;
612 for (j = 0; j < N_REG_CLASSES; ++j)
613 {
614 enum reg_class sub = reg_class_subclasses[i][j];
615
616 if (sub == LIM_REG_CLASSES)
617 return i;
618 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
619 break;
620 }
621 }
622
623 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
624 }
625
626 if (CCR_REGNO_P (regno))
627 return CCR_REGS;
628
629 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
630 return NO_REGS;
631 }
632
633 static bool
634 const_in_range (rtx x, int minv, int maxv)
635 {
636 return (GET_CODE (x) == CONST_INT
637 && INTVAL (x) >= minv
638 && INTVAL (x) <= maxv);
639 }
640
641 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
642 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
643 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
644 at the end of the insn stream. */
645
646 rtx
647 mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
648 {
649 if (rtx_equal_p (dest, src1))
650 return src2;
651 else if (rtx_equal_p (dest, src2))
652 return src1;
653 else
654 {
655 if (insn == 0)
656 emit_insn (gen_movsi (copy_rtx (dest), src1));
657 else
658 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
659 return src2;
660 }
661 }
662
663 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
664 Change the last element of PATTERN from (clobber (scratch:SI))
665 to (clobber (reg:SI HI_REGNO)). */
666
667 static void
668 mep_rewrite_mult (rtx_insn *insn, rtx pattern)
669 {
670 rtx hi_clobber;
671
672 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
673 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
674 PATTERN (insn) = pattern;
675 INSN_CODE (insn) = -1;
676 }
677
678 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
679 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
680 store the result in DEST if nonnull. */
681
682 static void
683 mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
684 {
685 rtx lo, pattern;
686
687 lo = gen_rtx_REG (SImode, LO_REGNO);
688 if (dest)
689 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
690 mep_mulr_source (insn, dest, src1, src2));
691 else
692 pattern = gen_mulsi3_lo (lo, src1, src2);
693 mep_rewrite_mult (insn, pattern);
694 }
695
696 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
697 SRC3 into $lo, then use either madd or maddr. The move into $lo will
698 be deleted by a peephole2 if SRC3 is already in $lo. */
699
700 static void
701 mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
702 {
703 rtx lo, pattern;
704
705 lo = gen_rtx_REG (SImode, LO_REGNO);
706 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
707 if (dest)
708 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
709 mep_mulr_source (insn, dest, src1, src2),
710 copy_rtx (lo));
711 else
712 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
713 mep_rewrite_mult (insn, pattern);
714 }
715
716 /* Return true if $lo has the same value as integer register GPR when
717 instruction INSN is reached. If necessary, rewrite the instruction
718 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
719 rtx for (reg:SI LO_REGNO).
720
721 This function is intended to be used by the peephole2 pass. Since
722 that pass goes from the end of a basic block to the beginning, and
723 propagates liveness information on the way, there is no need to
724 update register notes here.
725
726 If GPR_DEAD_P is true on entry, and this function returns true,
727 then the caller will replace _every_ use of GPR in and after INSN
728 with LO. This means that if the instruction that sets $lo is a
729 mulr- or maddr-type instruction, we can rewrite it to use mul or
730 madd instead. In combination with the copy progagation pass,
731 this allows us to replace sequences like:
732
733 mov GPR,R1
734 mulr GPR,R2
735
736 with:
737
738 mul R1,R2
739
740 if GPR is no longer used. */
741
742 static bool
743 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
744 {
745 do
746 {
747 insn = PREV_INSN (insn);
748 if (INSN_P (insn))
749 switch (recog_memoized (insn))
750 {
751 case CODE_FOR_mulsi3_1:
752 extract_insn (insn);
753 if (rtx_equal_p (recog_data.operand[0], gpr))
754 {
755 mep_rewrite_mulsi3 (insn,
756 gpr_dead_p ? NULL : recog_data.operand[0],
757 recog_data.operand[1],
758 recog_data.operand[2]);
759 return true;
760 }
761 return false;
762
763 case CODE_FOR_maddsi3:
764 extract_insn (insn);
765 if (rtx_equal_p (recog_data.operand[0], gpr))
766 {
767 mep_rewrite_maddsi3 (insn,
768 gpr_dead_p ? NULL : recog_data.operand[0],
769 recog_data.operand[1],
770 recog_data.operand[2],
771 recog_data.operand[3]);
772 return true;
773 }
774 return false;
775
776 case CODE_FOR_mulsi3r:
777 case CODE_FOR_maddsi3r:
778 extract_insn (insn);
779 return rtx_equal_p (recog_data.operand[1], gpr);
780
781 default:
782 if (reg_set_p (lo, insn)
783 || reg_set_p (gpr, insn)
784 || volatile_insn_p (PATTERN (insn)))
785 return false;
786
787 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
788 gpr_dead_p = false;
789 break;
790 }
791 }
792 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
793 return false;
794 }
795
796 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
797
798 bool
799 mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
800 {
801 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
802 extract_insn (insn);
803 return result;
804 }
805
806 /* Return true if SET can be turned into a post-modify load or store
807 that adds OFFSET to GPR. In other words, return true if SET can be
808 changed into:
809
810 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
811
812 It's OK to change SET to an equivalent operation in order to
813 make it match. */
814
815 static bool
816 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
817 {
818 rtx *reg, *mem;
819 unsigned int reg_bytes, mem_bytes;
820 machine_mode reg_mode, mem_mode;
821
822 /* Only simple SETs can be converted. */
823 if (GET_CODE (set) != SET)
824 return false;
825
826 /* Point REG to what we hope will be the register side of the set and
827 MEM to what we hope will be the memory side. */
828 if (GET_CODE (SET_DEST (set)) == MEM)
829 {
830 mem = &SET_DEST (set);
831 reg = &SET_SRC (set);
832 }
833 else
834 {
835 reg = &SET_DEST (set);
836 mem = &SET_SRC (set);
837 if (GET_CODE (*mem) == SIGN_EXTEND)
838 mem = &XEXP (*mem, 0);
839 }
840
841 /* Check that *REG is a suitable coprocessor register. */
842 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
843 return false;
844
845 /* Check that *MEM is a suitable memory reference. */
846 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
847 return false;
848
849 /* Get the number of bytes in each operand. */
850 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
851 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
852
853 /* Check that OFFSET is suitably aligned. */
854 if (INTVAL (offset) & (mem_bytes - 1))
855 return false;
856
857 /* Convert *MEM to a normal integer mode. */
858 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
859 *mem = change_address (*mem, mem_mode, NULL);
860
861 /* Adjust *REG as well. */
862 *reg = shallow_copy_rtx (*reg);
863 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
864 {
865 /* SET is a subword load. Convert it to an explicit extension. */
866 PUT_MODE (*reg, SImode);
867 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
868 }
869 else
870 {
871 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
872 PUT_MODE (*reg, reg_mode);
873 }
874 return true;
875 }
876
877 /* Return the effect of frame-related instruction INSN. */
878
879 static rtx
880 mep_frame_expr (rtx_insn *insn)
881 {
882 rtx note, expr;
883
884 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
885 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
886 RTX_FRAME_RELATED_P (expr) = 1;
887 return expr;
888 }
889
890 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
891 new pattern in INSN1; INSN2 will be deleted by the caller. */
892
893 static void
894 mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
895 {
896 rtx expr;
897
898 if (RTX_FRAME_RELATED_P (insn2))
899 {
900 expr = mep_frame_expr (insn2);
901 if (RTX_FRAME_RELATED_P (insn1))
902 expr = gen_rtx_SEQUENCE (VOIDmode,
903 gen_rtvec (2, mep_frame_expr (insn1), expr));
904 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
905 RTX_FRAME_RELATED_P (insn1) = 1;
906 }
907
908 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
909 gen_rtvec (2, PATTERN (insn1),
910 PATTERN (insn2)));
911 INSN_CODE (insn1) = -1;
912 }
913
914 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
915 the basic block to see if any previous load or store instruction can
916 be persuaded to do SET_INSN as a side-effect. Return true if so. */
917
918 static bool
919 mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
920 {
921 rtx_insn *insn;
922
923 insn = set_insn;
924 do
925 {
926 insn = PREV_INSN (insn);
927 if (INSN_P (insn))
928 {
929 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
930 {
931 mep_make_parallel (insn, set_insn);
932 return true;
933 }
934
935 if (reg_set_p (reg, insn)
936 || reg_referenced_p (reg, PATTERN (insn))
937 || volatile_insn_p (PATTERN (insn)))
938 return false;
939 }
940 }
941 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
942 return false;
943 }
944
945 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
946
947 bool
948 mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
949 {
950 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
951 extract_insn (insn);
952 return result;
953 }
954
955 bool
956 mep_allow_clip (rtx ux, rtx lx, int s)
957 {
958 HOST_WIDE_INT u = INTVAL (ux);
959 HOST_WIDE_INT l = INTVAL (lx);
960 int i;
961
962 if (!TARGET_OPT_CLIP)
963 return false;
964
965 if (s)
966 {
967 for (i = 0; i < 30; i ++)
968 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
969 && (l == - ((HOST_WIDE_INT) 1 << i)))
970 return true;
971 }
972 else
973 {
974 if (l != 0)
975 return false;
976
977 for (i = 0; i < 30; i ++)
978 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
979 return true;
980 }
981 return false;
982 }
983
984 bool
985 mep_bit_position_p (rtx x, bool looking_for)
986 {
987 if (GET_CODE (x) != CONST_INT)
988 return false;
989 switch ((int) INTVAL(x) & 0xff)
990 {
991 case 0x01: case 0x02: case 0x04: case 0x08:
992 case 0x10: case 0x20: case 0x40: case 0x80:
993 return looking_for;
994 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
995 case 0xef: case 0xdf: case 0xbf: case 0x7f:
996 return !looking_for;
997 }
998 return false;
999 }
1000
1001 static bool
1002 move_needs_splitting (rtx dest, rtx src,
1003 machine_mode mode ATTRIBUTE_UNUSED)
1004 {
1005 int s = mep_section_tag (src);
1006
1007 while (1)
1008 {
1009 if (GET_CODE (src) == CONST
1010 || GET_CODE (src) == MEM)
1011 src = XEXP (src, 0);
1012 else if (GET_CODE (src) == SYMBOL_REF
1013 || GET_CODE (src) == LABEL_REF
1014 || GET_CODE (src) == PLUS)
1015 break;
1016 else
1017 return false;
1018 }
1019 if (s == 'f'
1020 || (GET_CODE (src) == PLUS
1021 && GET_CODE (XEXP (src, 1)) == CONST_INT
1022 && (INTVAL (XEXP (src, 1)) < -65536
1023 || INTVAL (XEXP (src, 1)) > 0xffffff))
1024 || (GET_CODE (dest) == REG
1025 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1026 return true;
1027 return false;
1028 }
1029
1030 bool
1031 mep_split_mov (rtx *operands, int symbolic)
1032 {
1033 if (symbolic)
1034 {
1035 if (move_needs_splitting (operands[0], operands[1], SImode))
1036 return true;
1037 return false;
1038 }
1039
1040 if (GET_CODE (operands[1]) != CONST_INT)
1041 return false;
1042
1043 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1044 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1045 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1046 return false;
1047
1048 if (((!reload_completed && !reload_in_progress)
1049 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1050 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1051 return false;
1052
1053 return true;
1054 }
1055
1056 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1057 it to one specific value. So the insn chosen depends on whether
1058 the source and destination modes match. */
1059
1060 bool
1061 mep_vliw_mode_match (rtx tgt)
1062 {
1063 bool src_vliw = mep_vliw_function_p (cfun->decl);
1064 bool tgt_vliw = INTVAL (tgt);
1065
1066 return src_vliw == tgt_vliw;
1067 }
1068
1069 /* Like the above, but also test for near/far mismatches. */
1070
1071 bool
1072 mep_vliw_jmp_match (rtx tgt)
1073 {
1074 bool src_vliw = mep_vliw_function_p (cfun->decl);
1075 bool tgt_vliw = INTVAL (tgt);
1076
1077 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1078 return false;
1079
1080 return src_vliw == tgt_vliw;
1081 }
1082
1083 bool
1084 mep_multi_slot (rtx_insn *x)
1085 {
1086 return get_attr_slot (x) == SLOT_MULTI;
1087 }
1088
1089 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1090
1091 static bool
1092 mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1093 {
1094 /* We can't convert symbol values to gp- or tp-rel values after
1095 reload, as reload might have used $gp or $tp for other
1096 purposes. */
1097 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1098 {
1099 char e = mep_section_tag (x);
1100 return (e != 't' && e != 'b');
1101 }
1102 return 1;
1103 }
1104
1105 /* Be careful not to use macros that need to be compiled one way for
1106 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1107
1108 bool
1109 mep_legitimate_address (machine_mode mode, rtx x, int strict)
1110 {
1111 int the_tag;
1112
1113 #define DEBUG_LEGIT 0
1114 #if DEBUG_LEGIT
1115 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1116 debug_rtx (x);
1117 #endif
1118
1119 if (GET_CODE (x) == LO_SUM
1120 && GET_CODE (XEXP (x, 0)) == REG
1121 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1122 && CONSTANT_P (XEXP (x, 1)))
1123 {
1124 if (GET_MODE_SIZE (mode) > 4)
1125 {
1126 /* We will end up splitting this, and lo_sums are not
1127 offsettable for us. */
1128 #if DEBUG_LEGIT
1129 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1130 #endif
1131 return false;
1132 }
1133 #if DEBUG_LEGIT
1134 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1135 #endif
1136 return true;
1137 }
1138
1139 if (GET_CODE (x) == REG
1140 && GEN_REG (REGNO (x), strict))
1141 {
1142 #if DEBUG_LEGIT
1143 fprintf (stderr, " - yup, [reg]\n");
1144 #endif
1145 return true;
1146 }
1147
1148 if (GET_CODE (x) == PLUS
1149 && GET_CODE (XEXP (x, 0)) == REG
1150 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1151 && const_in_range (XEXP (x, 1), -32768, 32767))
1152 {
1153 #if DEBUG_LEGIT
1154 fprintf (stderr, " - yup, [reg+const]\n");
1155 #endif
1156 return true;
1157 }
1158
1159 if (GET_CODE (x) == PLUS
1160 && GET_CODE (XEXP (x, 0)) == REG
1161 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1162 && GET_CODE (XEXP (x, 1)) == CONST
1163 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1164 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1165 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1166 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1167 {
1168 #if DEBUG_LEGIT
1169 fprintf (stderr, " - yup, [reg+unspec]\n");
1170 #endif
1171 return true;
1172 }
1173
1174 the_tag = mep_section_tag (x);
1175
1176 if (the_tag == 'f')
1177 {
1178 #if DEBUG_LEGIT
1179 fprintf (stderr, " - nope, [far]\n");
1180 #endif
1181 return false;
1182 }
1183
1184 if (mode == VOIDmode
1185 && GET_CODE (x) == SYMBOL_REF)
1186 {
1187 #if DEBUG_LEGIT
1188 fprintf (stderr, " - yup, call [symbol]\n");
1189 #endif
1190 return true;
1191 }
1192
1193 if ((mode == SImode || mode == SFmode)
1194 && CONSTANT_P (x)
1195 && mep_legitimate_constant_p (mode, x)
1196 && the_tag != 't' && the_tag != 'b')
1197 {
1198 if (GET_CODE (x) != CONST_INT
1199 || (INTVAL (x) <= 0xfffff
1200 && INTVAL (x) >= 0
1201 && (INTVAL (x) % 4) == 0))
1202 {
1203 #if DEBUG_LEGIT
1204 fprintf (stderr, " - yup, [const]\n");
1205 #endif
1206 return true;
1207 }
1208 }
1209
1210 #if DEBUG_LEGIT
1211 fprintf (stderr, " - nope.\n");
1212 #endif
1213 return false;
1214 }
1215
1216 int
1217 mep_legitimize_reload_address (rtx *x, machine_mode mode, int opnum,
1218 int type_i,
1219 int ind_levels ATTRIBUTE_UNUSED)
1220 {
1221 enum reload_type type = (enum reload_type) type_i;
1222
1223 if (GET_CODE (*x) == PLUS
1224 && GET_CODE (XEXP (*x, 0)) == MEM
1225 && GET_CODE (XEXP (*x, 1)) == REG)
1226 {
1227 /* GCC will by default copy the MEM into a REG, which results in
1228 an invalid address. For us, the best thing to do is move the
1229 whole expression to a REG. */
1230 push_reload (*x, NULL_RTX, x, NULL,
1231 GENERAL_REGS, mode, VOIDmode,
1232 0, 0, opnum, type);
1233 return 1;
1234 }
1235
1236 if (GET_CODE (*x) == PLUS
1237 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1238 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1239 {
1240 char e = mep_section_tag (XEXP (*x, 0));
1241
1242 if (e != 't' && e != 'b')
1243 {
1244 /* GCC thinks that (sym+const) is a valid address. Well,
1245 sometimes it is, this time it isn't. The best thing to
1246 do is reload the symbol to a register, since reg+int
1247 tends to work, and we can't just add the symbol and
1248 constant anyway. */
1249 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1250 GENERAL_REGS, mode, VOIDmode,
1251 0, 0, opnum, type);
1252 return 1;
1253 }
1254 }
1255 return 0;
1256 }
1257
1258 int
1259 mep_core_address_length (rtx_insn *insn, int opn)
1260 {
1261 rtx set = single_set (insn);
1262 rtx mem = XEXP (set, opn);
1263 rtx other = XEXP (set, 1-opn);
1264 rtx addr = XEXP (mem, 0);
1265
1266 if (register_operand (addr, Pmode))
1267 return 2;
1268 if (GET_CODE (addr) == PLUS)
1269 {
1270 rtx addend = XEXP (addr, 1);
1271
1272 gcc_assert (REG_P (XEXP (addr, 0)));
1273
1274 switch (REGNO (XEXP (addr, 0)))
1275 {
1276 case STACK_POINTER_REGNUM:
1277 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1278 && mep_imm7a4_operand (addend, VOIDmode))
1279 return 2;
1280 break;
1281
1282 case 13: /* TP */
1283 gcc_assert (REG_P (other));
1284
1285 if (REGNO (other) >= 8)
1286 break;
1287
1288 if (GET_CODE (addend) == CONST
1289 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1290 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1291 return 2;
1292
1293 if (GET_CODE (addend) == CONST_INT
1294 && INTVAL (addend) >= 0
1295 && INTVAL (addend) <= 127
1296 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1297 return 2;
1298 break;
1299 }
1300 }
1301
1302 return 4;
1303 }
1304
1305 int
1306 mep_cop_address_length (rtx_insn *insn, int opn)
1307 {
1308 rtx set = single_set (insn);
1309 rtx mem = XEXP (set, opn);
1310 rtx addr = XEXP (mem, 0);
1311
1312 if (GET_CODE (mem) != MEM)
1313 return 2;
1314 if (register_operand (addr, Pmode))
1315 return 2;
1316 if (GET_CODE (addr) == POST_INC)
1317 return 2;
1318
1319 return 4;
1320 }
1321
1322 #define DEBUG_EXPAND_MOV 0
1323 bool
1324 mep_expand_mov (rtx *operands, machine_mode mode)
1325 {
1326 int i, t;
1327 int tag[2];
1328 rtx tpsym, tpoffs;
1329 int post_reload = 0;
1330
1331 tag[0] = mep_section_tag (operands[0]);
1332 tag[1] = mep_section_tag (operands[1]);
1333
1334 if (!reload_in_progress
1335 && !reload_completed
1336 && GET_CODE (operands[0]) != REG
1337 && GET_CODE (operands[0]) != SUBREG
1338 && GET_CODE (operands[1]) != REG
1339 && GET_CODE (operands[1]) != SUBREG)
1340 operands[1] = copy_to_mode_reg (mode, operands[1]);
1341
1342 #if DEBUG_EXPAND_MOV
1343 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1344 reload_in_progress || reload_completed);
1345 debug_rtx (operands[0]);
1346 debug_rtx (operands[1]);
1347 #endif
1348
1349 if (mode == DImode || mode == DFmode)
1350 return false;
1351
1352 if (reload_in_progress || reload_completed)
1353 {
1354 rtx r;
1355
1356 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1357 cfun->machine->reload_changes_tp = true;
1358
1359 if (tag[0] == 't' || tag[1] == 't')
1360 {
1361 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1362 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1363 post_reload = 1;
1364 }
1365 if (tag[0] == 'b' || tag[1] == 'b')
1366 {
1367 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1368 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1369 post_reload = 1;
1370 }
1371 if (cfun->machine->reload_changes_tp == true)
1372 post_reload = 1;
1373 }
1374
1375 if (!post_reload)
1376 {
1377 rtx n;
1378 if (symbol_p (operands[1]))
1379 {
1380 t = mep_section_tag (operands[1]);
1381 if (t == 'b' || t == 't')
1382 {
1383
1384 if (GET_CODE (operands[1]) == SYMBOL_REF)
1385 {
1386 tpsym = operands[1];
1387 n = gen_rtx_UNSPEC (mode,
1388 gen_rtvec (1, operands[1]),
1389 t == 'b' ? UNS_TPREL : UNS_GPREL);
1390 n = gen_rtx_CONST (mode, n);
1391 }
1392 else if (GET_CODE (operands[1]) == CONST
1393 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1394 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1395 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1396 {
1397 tpsym = XEXP (XEXP (operands[1], 0), 0);
1398 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1399 n = gen_rtx_UNSPEC (mode,
1400 gen_rtvec (1, tpsym),
1401 t == 'b' ? UNS_TPREL : UNS_GPREL);
1402 n = gen_rtx_PLUS (mode, n, tpoffs);
1403 n = gen_rtx_CONST (mode, n);
1404 }
1405 else if (GET_CODE (operands[1]) == CONST
1406 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1407 return false;
1408 else
1409 {
1410 error ("unusual TP-relative address");
1411 return false;
1412 }
1413
1414 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1415 : mep_gp_rtx ()), n);
1416 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1417 #if DEBUG_EXPAND_MOV
1418 fprintf(stderr, "mep_expand_mov emitting ");
1419 debug_rtx(n);
1420 #endif
1421 return true;
1422 }
1423 }
1424
1425 for (i=0; i < 2; i++)
1426 {
1427 t = mep_section_tag (operands[i]);
1428 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1429 {
1430 rtx sym, n, r;
1431 int u;
1432
1433 sym = XEXP (operands[i], 0);
1434 if (GET_CODE (sym) == CONST
1435 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1436 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1437
1438 if (t == 'b')
1439 {
1440 r = mep_tp_rtx ();
1441 u = UNS_TPREL;
1442 }
1443 else
1444 {
1445 r = mep_gp_rtx ();
1446 u = UNS_GPREL;
1447 }
1448
1449 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1450 n = gen_rtx_CONST (Pmode, n);
1451 n = gen_rtx_PLUS (Pmode, r, n);
1452 operands[i] = replace_equiv_address (operands[i], n);
1453 }
1454 }
1455 }
1456
1457 if ((GET_CODE (operands[1]) != REG
1458 && MEP_CONTROL_REG (operands[0]))
1459 || (GET_CODE (operands[0]) != REG
1460 && MEP_CONTROL_REG (operands[1])))
1461 {
1462 rtx temp;
1463 #if DEBUG_EXPAND_MOV
1464 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1465 #endif
1466 temp = gen_reg_rtx (mode);
1467 emit_move_insn (temp, operands[1]);
1468 operands[1] = temp;
1469 }
1470
1471 if (symbolref_p (operands[0])
1472 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1473 || (GET_MODE_SIZE (mode) != 4)))
1474 {
1475 rtx temp;
1476
1477 gcc_assert (!reload_in_progress && !reload_completed);
1478
1479 temp = force_reg (Pmode, XEXP (operands[0], 0));
1480 operands[0] = replace_equiv_address (operands[0], temp);
1481 emit_move_insn (operands[0], operands[1]);
1482 return true;
1483 }
1484
1485 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1486 tag[1] = 0;
1487
1488 if (symbol_p (operands[1])
1489 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1490 {
1491 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1492 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1493 return true;
1494 }
1495
1496 if (symbolref_p (operands[1])
1497 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1498 {
1499 rtx temp;
1500
1501 if (reload_in_progress || reload_completed)
1502 temp = operands[0];
1503 else
1504 temp = gen_reg_rtx (Pmode);
1505
1506 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1507 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1508 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1509 return true;
1510 }
1511
1512 return false;
1513 }
1514
1515 /* Cases where the pattern can't be made to use at all. */
1516
1517 bool
1518 mep_mov_ok (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
1519 {
1520 int i;
1521
1522 #define DEBUG_MOV_OK 0
1523 #if DEBUG_MOV_OK
1524 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1525 mep_section_tag (operands[1]));
1526 debug_rtx (operands[0]);
1527 debug_rtx (operands[1]);
1528 #endif
1529
1530 /* We want the movh patterns to get these. */
1531 if (GET_CODE (operands[1]) == HIGH)
1532 return false;
1533
1534 /* We can't store a register to a far variable without using a
1535 scratch register to hold the address. Using far variables should
1536 be split by mep_emit_mov anyway. */
1537 if (mep_section_tag (operands[0]) == 'f'
1538 || mep_section_tag (operands[1]) == 'f')
1539 {
1540 #if DEBUG_MOV_OK
1541 fprintf (stderr, " - no, f\n");
1542 #endif
1543 return false;
1544 }
1545 i = mep_section_tag (operands[1]);
1546 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1547 /* These are supposed to be generated with adds of the appropriate
1548 register. During and after reload, however, we allow them to
1549 be accessed as normal symbols because adding a dependency on
1550 the base register now might cause problems. */
1551 {
1552 #if DEBUG_MOV_OK
1553 fprintf (stderr, " - no, bt\n");
1554 #endif
1555 return false;
1556 }
1557
1558 /* The only moves we can allow involve at least one general
1559 register, so require it. */
1560 for (i = 0; i < 2; i ++)
1561 {
1562 /* Allow subregs too, before reload. */
1563 rtx x = operands[i];
1564
1565 if (GET_CODE (x) == SUBREG)
1566 x = XEXP (x, 0);
1567 if (GET_CODE (x) == REG
1568 && ! MEP_CONTROL_REG (x))
1569 {
1570 #if DEBUG_MOV_OK
1571 fprintf (stderr, " - ok\n");
1572 #endif
1573 return true;
1574 }
1575 }
1576 #if DEBUG_MOV_OK
1577 fprintf (stderr, " - no, no gen reg\n");
1578 #endif
1579 return false;
1580 }
1581
1582 #define DEBUG_SPLIT_WIDE_MOVE 0
1583 void
1584 mep_split_wide_move (rtx *operands, machine_mode mode)
1585 {
1586 int i;
1587
1588 #if DEBUG_SPLIT_WIDE_MOVE
1589 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1590 debug_rtx (operands[0]);
1591 debug_rtx (operands[1]);
1592 #endif
1593
1594 for (i = 0; i <= 1; i++)
1595 {
1596 rtx op = operands[i], hi, lo;
1597
1598 switch (GET_CODE (op))
1599 {
1600 case REG:
1601 {
1602 unsigned int regno = REGNO (op);
1603
1604 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1605 {
1606 rtx i32;
1607
1608 lo = gen_rtx_REG (SImode, regno);
1609 i32 = GEN_INT (32);
1610 hi = gen_rtx_ZERO_EXTRACT (SImode,
1611 gen_rtx_REG (DImode, regno),
1612 i32, i32);
1613 }
1614 else
1615 {
1616 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1617 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1618 }
1619 }
1620 break;
1621
1622 case CONST_INT:
1623 case CONST_DOUBLE:
1624 case MEM:
1625 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1626 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1627 break;
1628
1629 default:
1630 gcc_unreachable ();
1631 }
1632
1633 /* The high part of CR <- GPR moves must be done after the low part. */
1634 operands [i + 4] = lo;
1635 operands [i + 2] = hi;
1636 }
1637
1638 if (reg_mentioned_p (operands[2], operands[5])
1639 || GET_CODE (operands[2]) == ZERO_EXTRACT
1640 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1641 {
1642 rtx tmp;
1643
1644 /* Overlapping register pairs -- make sure we don't
1645 early-clobber ourselves. */
1646 tmp = operands[2];
1647 operands[2] = operands[4];
1648 operands[4] = tmp;
1649 tmp = operands[3];
1650 operands[3] = operands[5];
1651 operands[5] = tmp;
1652 }
1653
1654 #if DEBUG_SPLIT_WIDE_MOVE
1655 fprintf(stderr, "\033[34m");
1656 debug_rtx (operands[2]);
1657 debug_rtx (operands[3]);
1658 debug_rtx (operands[4]);
1659 debug_rtx (operands[5]);
1660 fprintf(stderr, "\033[0m");
1661 #endif
1662 }
1663
1664 /* Emit a setcc instruction in its entirity. */
1665
1666 static bool
1667 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1668 {
1669 rtx tmp;
1670
1671 switch (code)
1672 {
1673 case GT:
1674 case GTU:
1675 tmp = op1, op1 = op2, op2 = tmp;
1676 code = swap_condition (code);
1677 /* FALLTHRU */
1678
1679 case LT:
1680 case LTU:
1681 op1 = force_reg (SImode, op1);
1682 emit_insn (gen_rtx_SET (VOIDmode, dest,
1683 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1684 return true;
1685
1686 case EQ:
1687 if (op2 != const0_rtx)
1688 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1689 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1690 return true;
1691
1692 case NE:
1693 /* Branchful sequence:
1694 mov dest, 0 16-bit
1695 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1696 mov dest, 1 16-bit
1697
1698 Branchless sequence:
1699 add3 tmp, op1, -op2 32-bit (or mov + sub)
1700 sltu3 tmp, tmp, 1 16-bit
1701 xor3 dest, tmp, 1 32-bit
1702 */
1703 if (optimize_size && op2 != const0_rtx)
1704 return false;
1705
1706 if (op2 != const0_rtx)
1707 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1708
1709 op2 = gen_reg_rtx (SImode);
1710 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1711
1712 emit_insn (gen_rtx_SET (VOIDmode, dest,
1713 gen_rtx_XOR (SImode, op2, const1_rtx)));
1714 return true;
1715
1716 case LE:
1717 if (GET_CODE (op2) != CONST_INT
1718 || INTVAL (op2) == 0x7ffffff)
1719 return false;
1720 op2 = GEN_INT (INTVAL (op2) + 1);
1721 return mep_expand_setcc_1 (LT, dest, op1, op2);
1722
1723 case LEU:
1724 if (GET_CODE (op2) != CONST_INT
1725 || INTVAL (op2) == -1)
1726 return false;
1727 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1728 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1729
1730 case GE:
1731 if (GET_CODE (op2) != CONST_INT
1732 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1733 return false;
1734 op2 = GEN_INT (INTVAL (op2) - 1);
1735 return mep_expand_setcc_1 (GT, dest, op1, op2);
1736
1737 case GEU:
1738 if (GET_CODE (op2) != CONST_INT
1739 || op2 == const0_rtx)
1740 return false;
1741 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1742 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1743
1744 default:
1745 gcc_unreachable ();
1746 }
1747 }
1748
1749 bool
1750 mep_expand_setcc (rtx *operands)
1751 {
1752 rtx dest = operands[0];
1753 enum rtx_code code = GET_CODE (operands[1]);
1754 rtx op0 = operands[2];
1755 rtx op1 = operands[3];
1756
1757 return mep_expand_setcc_1 (code, dest, op0, op1);
1758 }
1759
1760 rtx
1761 mep_expand_cbranch (rtx *operands)
1762 {
1763 enum rtx_code code = GET_CODE (operands[0]);
1764 rtx op0 = operands[1];
1765 rtx op1 = operands[2];
1766 rtx tmp;
1767
1768 restart:
1769 switch (code)
1770 {
1771 case LT:
1772 if (mep_imm4_operand (op1, SImode))
1773 break;
1774
1775 tmp = gen_reg_rtx (SImode);
1776 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1777 code = NE;
1778 op0 = tmp;
1779 op1 = const0_rtx;
1780 break;
1781
1782 case GE:
1783 if (mep_imm4_operand (op1, SImode))
1784 break;
1785
1786 tmp = gen_reg_rtx (SImode);
1787 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1788
1789 code = EQ;
1790 op0 = tmp;
1791 op1 = const0_rtx;
1792 break;
1793
1794 case EQ:
1795 case NE:
1796 if (! mep_reg_or_imm4_operand (op1, SImode))
1797 op1 = force_reg (SImode, op1);
1798 break;
1799
1800 case LE:
1801 case GT:
1802 if (GET_CODE (op1) == CONST_INT
1803 && INTVAL (op1) != 0x7fffffff)
1804 {
1805 op1 = GEN_INT (INTVAL (op1) + 1);
1806 code = (code == LE ? LT : GE);
1807 goto restart;
1808 }
1809
1810 tmp = gen_reg_rtx (SImode);
1811 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1812
1813 code = (code == LE ? EQ : NE);
1814 op0 = tmp;
1815 op1 = const0_rtx;
1816 break;
1817
1818 case LTU:
1819 if (op1 == const1_rtx)
1820 {
1821 code = EQ;
1822 op1 = const0_rtx;
1823 break;
1824 }
1825
1826 tmp = gen_reg_rtx (SImode);
1827 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1828 code = NE;
1829 op0 = tmp;
1830 op1 = const0_rtx;
1831 break;
1832
1833 case LEU:
1834 tmp = gen_reg_rtx (SImode);
1835 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1836 code = NE;
1837 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1838 code = EQ;
1839 else
1840 gcc_unreachable ();
1841 op0 = tmp;
1842 op1 = const0_rtx;
1843 break;
1844
1845 case GTU:
1846 tmp = gen_reg_rtx (SImode);
1847 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1848 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1849 code = NE;
1850 op0 = tmp;
1851 op1 = const0_rtx;
1852 break;
1853
1854 case GEU:
1855 tmp = gen_reg_rtx (SImode);
1856 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1857 code = NE;
1858 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1859 code = EQ;
1860 else
1861 gcc_unreachable ();
1862 op0 = tmp;
1863 op1 = const0_rtx;
1864 break;
1865
1866 default:
1867 gcc_unreachable ();
1868 }
1869
1870 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1871 }
1872
1873 const char *
1874 mep_emit_cbranch (rtx *operands, int ne)
1875 {
1876 if (GET_CODE (operands[1]) == REG)
1877 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1878 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1879 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1880 else
1881 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1882 }
1883
1884 void
1885 mep_expand_call (rtx *operands, int returns_value)
1886 {
1887 rtx addr = operands[returns_value];
1888 rtx tp = mep_tp_rtx ();
1889 rtx gp = mep_gp_rtx ();
1890
1891 gcc_assert (GET_CODE (addr) == MEM);
1892
1893 addr = XEXP (addr, 0);
1894
1895 if (! mep_call_address_operand (addr, VOIDmode))
1896 addr = force_reg (SImode, addr);
1897
1898 if (! operands[returns_value+2])
1899 operands[returns_value+2] = const0_rtx;
1900
1901 if (returns_value)
1902 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1903 operands[3], tp, gp));
1904 else
1905 emit_call_insn (gen_call_internal (addr, operands[1],
1906 operands[2], tp, gp));
1907 }
1908 \f
1909 /* Aliasing Support. */
1910
1911 /* If X is a machine specific address (i.e. a symbol or label being
1912 referenced as a displacement from the GOT implemented using an
1913 UNSPEC), then return the base term. Otherwise return X. */
1914
1915 rtx
1916 mep_find_base_term (rtx x)
1917 {
1918 rtx base, term;
1919 int unspec;
1920
1921 if (GET_CODE (x) != PLUS)
1922 return x;
1923 base = XEXP (x, 0);
1924 term = XEXP (x, 1);
1925
1926 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1927 && base == mep_tp_rtx ())
1928 unspec = UNS_TPREL;
1929 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1930 && base == mep_gp_rtx ())
1931 unspec = UNS_GPREL;
1932 else
1933 return x;
1934
1935 if (GET_CODE (term) != CONST)
1936 return x;
1937 term = XEXP (term, 0);
1938
1939 if (GET_CODE (term) != UNSPEC
1940 || XINT (term, 1) != unspec)
1941 return x;
1942
1943 return XVECEXP (term, 0, 0);
1944 }
1945 \f
1946 /* Reload Support. */
1947
1948 /* Return true if the registers in CLASS cannot represent the change from
1949 modes FROM to TO. */
1950
1951 bool
1952 mep_cannot_change_mode_class (machine_mode from, machine_mode to,
1953 enum reg_class regclass)
1954 {
1955 if (from == to)
1956 return false;
1957
1958 /* 64-bit COP regs must remain 64-bit COP regs. */
1959 if (TARGET_64BIT_CR_REGS
1960 && (regclass == CR_REGS
1961 || regclass == LOADABLE_CR_REGS)
1962 && (GET_MODE_SIZE (to) < 8
1963 || GET_MODE_SIZE (from) < 8))
1964 return true;
1965
1966 return false;
1967 }
1968
1969 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1970
1971 static bool
1972 mep_general_reg (rtx x)
1973 {
1974 while (GET_CODE (x) == SUBREG)
1975 x = XEXP (x, 0);
1976 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1977 }
1978
1979 static bool
1980 mep_nongeneral_reg (rtx x)
1981 {
1982 while (GET_CODE (x) == SUBREG)
1983 x = XEXP (x, 0);
1984 return (GET_CODE (x) == REG
1985 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1986 }
1987
1988 static bool
1989 mep_general_copro_reg (rtx x)
1990 {
1991 while (GET_CODE (x) == SUBREG)
1992 x = XEXP (x, 0);
1993 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1994 }
1995
1996 static bool
1997 mep_nonregister (rtx x)
1998 {
1999 while (GET_CODE (x) == SUBREG)
2000 x = XEXP (x, 0);
2001 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2002 }
2003
2004 #define DEBUG_RELOAD 0
2005
2006 /* Return the secondary reload class needed for moving value X to or
2007 from a register in coprocessor register class CLASS. */
2008
2009 static enum reg_class
2010 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2011 {
2012 if (mep_general_reg (x))
2013 /* We can do the move directly if mep_have_core_copro_moves_p,
2014 otherwise we need to go through memory. Either way, no secondary
2015 register is needed. */
2016 return NO_REGS;
2017
2018 if (mep_general_copro_reg (x))
2019 {
2020 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2021 if (mep_have_copro_copro_moves_p)
2022 return NO_REGS;
2023
2024 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2025 if (mep_have_core_copro_moves_p)
2026 return GENERAL_REGS;
2027
2028 /* Otherwise we need to do it through memory. No secondary
2029 register is needed. */
2030 return NO_REGS;
2031 }
2032
2033 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2034 && constraint_satisfied_p (x, CONSTRAINT_U))
2035 /* X is a memory value that we can access directly. */
2036 return NO_REGS;
2037
2038 /* We have to move X into a GPR first and then copy it to
2039 the coprocessor register. The move from the GPR to the
2040 coprocessor might be done directly or through memory,
2041 depending on mep_have_core_copro_moves_p. */
2042 return GENERAL_REGS;
2043 }
2044
2045 /* Copying X to register in RCLASS. */
2046
2047 enum reg_class
2048 mep_secondary_input_reload_class (enum reg_class rclass,
2049 machine_mode mode ATTRIBUTE_UNUSED,
2050 rtx x)
2051 {
2052 int rv = NO_REGS;
2053
2054 #if DEBUG_RELOAD
2055 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2056 debug_rtx (x);
2057 #endif
2058
2059 if (reg_class_subset_p (rclass, CR_REGS))
2060 rv = mep_secondary_copro_reload_class (rclass, x);
2061 else if (MEP_NONGENERAL_CLASS (rclass)
2062 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2063 rv = GENERAL_REGS;
2064
2065 #if DEBUG_RELOAD
2066 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2067 #endif
2068 return (enum reg_class) rv;
2069 }
2070
2071 /* Copying register in RCLASS to X. */
2072
2073 enum reg_class
2074 mep_secondary_output_reload_class (enum reg_class rclass,
2075 machine_mode mode ATTRIBUTE_UNUSED,
2076 rtx x)
2077 {
2078 int rv = NO_REGS;
2079
2080 #if DEBUG_RELOAD
2081 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2082 debug_rtx (x);
2083 #endif
2084
2085 if (reg_class_subset_p (rclass, CR_REGS))
2086 rv = mep_secondary_copro_reload_class (rclass, x);
2087 else if (MEP_NONGENERAL_CLASS (rclass)
2088 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2089 rv = GENERAL_REGS;
2090
2091 #if DEBUG_RELOAD
2092 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2093 #endif
2094
2095 return (enum reg_class) rv;
2096 }
2097
2098 /* Implement SECONDARY_MEMORY_NEEDED. */
2099
2100 bool
2101 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2102 machine_mode mode ATTRIBUTE_UNUSED)
2103 {
2104 if (!mep_have_core_copro_moves_p)
2105 {
2106 if (reg_classes_intersect_p (rclass1, CR_REGS)
2107 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2108 return true;
2109 if (reg_classes_intersect_p (rclass2, CR_REGS)
2110 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2111 return true;
2112 if (!mep_have_copro_copro_moves_p
2113 && reg_classes_intersect_p (rclass1, CR_REGS)
2114 && reg_classes_intersect_p (rclass2, CR_REGS))
2115 return true;
2116 }
2117 return false;
2118 }
2119
2120 void
2121 mep_expand_reload (rtx *operands, machine_mode mode)
2122 {
2123 /* There are three cases for each direction:
2124 register, farsym
2125 control, farsym
2126 control, nearsym */
2127
2128 int s0 = mep_section_tag (operands[0]) == 'f';
2129 int s1 = mep_section_tag (operands[1]) == 'f';
2130 int c0 = mep_nongeneral_reg (operands[0]);
2131 int c1 = mep_nongeneral_reg (operands[1]);
2132 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2133
2134 #if DEBUG_RELOAD
2135 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2136 debug_rtx (operands[0]);
2137 debug_rtx (operands[1]);
2138 #endif
2139
2140 switch (which)
2141 {
2142 case 00: /* Don't know why this gets here. */
2143 case 02: /* general = far */
2144 emit_move_insn (operands[0], operands[1]);
2145 return;
2146
2147 case 10: /* cr = mem */
2148 case 11: /* cr = cr */
2149 case 01: /* mem = cr */
2150 case 12: /* cr = far */
2151 emit_move_insn (operands[2], operands[1]);
2152 emit_move_insn (operands[0], operands[2]);
2153 return;
2154
2155 case 20: /* far = general */
2156 emit_move_insn (operands[2], XEXP (operands[1], 0));
2157 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2158 return;
2159
2160 case 21: /* far = cr */
2161 case 22: /* far = far */
2162 default:
2163 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2164 which, mode_name[mode]);
2165 debug_rtx (operands[0]);
2166 debug_rtx (operands[1]);
2167 gcc_unreachable ();
2168 }
2169 }
2170
2171 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2172 can be moved directly into registers 0 to 7, but not into the rest.
2173 If so, and if the required class includes registers 0 to 7, restrict
2174 it to those registers. */
2175
2176 enum reg_class
2177 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2178 {
2179 switch (GET_CODE (x))
2180 {
2181 case CONST_INT:
2182 if (INTVAL (x) >= 0x10000
2183 && INTVAL (x) < 0x01000000
2184 && (INTVAL (x) & 0xffff) != 0
2185 && reg_class_subset_p (TPREL_REGS, rclass))
2186 rclass = TPREL_REGS;
2187 break;
2188
2189 case CONST:
2190 case SYMBOL_REF:
2191 case LABEL_REF:
2192 if (mep_section_tag (x) != 'f'
2193 && reg_class_subset_p (TPREL_REGS, rclass))
2194 rclass = TPREL_REGS;
2195 break;
2196
2197 default:
2198 break;
2199 }
2200 return rclass;
2201 }
2202 \f
2203 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2204 moves, 4 for direct double-register moves, and 1000 for anything
2205 that requires a temporary register or temporary stack slot. */
2206
2207 int
2208 mep_register_move_cost (machine_mode mode, enum reg_class from, enum reg_class to)
2209 {
2210 if (mep_have_copro_copro_moves_p
2211 && reg_class_subset_p (from, CR_REGS)
2212 && reg_class_subset_p (to, CR_REGS))
2213 {
2214 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2215 return 4;
2216 return 2;
2217 }
2218 if (reg_class_subset_p (from, CR_REGS)
2219 && reg_class_subset_p (to, CR_REGS))
2220 {
2221 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2222 return 8;
2223 return 4;
2224 }
2225 if (reg_class_subset_p (from, CR_REGS)
2226 || reg_class_subset_p (to, CR_REGS))
2227 {
2228 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2229 return 4;
2230 return 2;
2231 }
2232 if (mep_secondary_memory_needed (from, to, mode))
2233 return 1000;
2234 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2235 return 1000;
2236
2237 if (GET_MODE_SIZE (mode) > 4)
2238 return 4;
2239
2240 return 2;
2241 }
2242
2243 \f
2244 /* Functions to save and restore machine-specific function data. */
2245
2246 static struct machine_function *
2247 mep_init_machine_status (void)
2248 {
2249 return ggc_cleared_alloc<machine_function> ();
2250 }
2251
2252 static rtx
2253 mep_allocate_initial_value (rtx reg)
2254 {
2255 int rss;
2256
2257 if (GET_CODE (reg) != REG)
2258 return NULL_RTX;
2259
2260 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2261 return NULL_RTX;
2262
2263 /* In interrupt functions, the "initial" values of $gp and $tp are
2264 provided by the prologue. They are not necessarily the same as
2265 the values that the caller was using. */
2266 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2267 if (mep_interrupt_p ())
2268 return NULL_RTX;
2269
2270 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2271 {
2272 cfun->machine->reg_save_size += 4;
2273 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2274 }
2275
2276 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2277 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2278 }
2279
2280 rtx
2281 mep_return_addr_rtx (int count)
2282 {
2283 if (count != 0)
2284 return const0_rtx;
2285
2286 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2287 }
2288
2289 static rtx
2290 mep_tp_rtx (void)
2291 {
2292 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2293 }
2294
2295 static rtx
2296 mep_gp_rtx (void)
2297 {
2298 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2299 }
2300
2301 static bool
2302 mep_interrupt_p (void)
2303 {
2304 if (cfun->machine->interrupt_handler == 0)
2305 {
2306 int interrupt_handler
2307 = (lookup_attribute ("interrupt",
2308 DECL_ATTRIBUTES (current_function_decl))
2309 != NULL_TREE);
2310 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2311 }
2312 return cfun->machine->interrupt_handler == 2;
2313 }
2314
2315 static bool
2316 mep_disinterrupt_p (void)
2317 {
2318 if (cfun->machine->disable_interrupts == 0)
2319 {
2320 int disable_interrupts
2321 = (lookup_attribute ("disinterrupt",
2322 DECL_ATTRIBUTES (current_function_decl))
2323 != NULL_TREE);
2324 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2325 }
2326 return cfun->machine->disable_interrupts == 2;
2327 }
2328
2329 \f
2330 /* Frame/Epilog/Prolog Related. */
2331
2332 static bool
2333 mep_reg_set_p (rtx reg, rtx insn)
2334 {
2335 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2336 if (INSN_P (insn))
2337 {
2338 if (FIND_REG_INC_NOTE (insn, reg))
2339 return true;
2340 insn = PATTERN (insn);
2341 }
2342
2343 if (GET_CODE (insn) == SET
2344 && GET_CODE (XEXP (insn, 0)) == REG
2345 && GET_CODE (XEXP (insn, 1)) == REG
2346 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2347 return false;
2348
2349 return set_of (reg, insn) != NULL_RTX;
2350 }
2351
2352
2353 #define MEP_SAVES_UNKNOWN 0
2354 #define MEP_SAVES_YES 1
2355 #define MEP_SAVES_MAYBE 2
2356 #define MEP_SAVES_NO 3
2357
2358 static bool
2359 mep_reg_set_in_function (int regno)
2360 {
2361 rtx reg;
2362 rtx_insn *insn;
2363
2364 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2365 return true;
2366
2367 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2368 return true;
2369
2370 push_topmost_sequence ();
2371 insn = get_insns ();
2372 pop_topmost_sequence ();
2373
2374 if (!insn)
2375 return false;
2376
2377 reg = gen_rtx_REG (SImode, regno);
2378
2379 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2380 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2381 return true;
2382 return false;
2383 }
2384
2385 static bool
2386 mep_asm_without_operands_p (void)
2387 {
2388 if (cfun->machine->asms_without_operands == 0)
2389 {
2390 rtx_insn *insn;
2391
2392 push_topmost_sequence ();
2393 insn = get_insns ();
2394 pop_topmost_sequence ();
2395
2396 cfun->machine->asms_without_operands = 1;
2397 while (insn)
2398 {
2399 if (INSN_P (insn)
2400 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2401 {
2402 cfun->machine->asms_without_operands = 2;
2403 break;
2404 }
2405 insn = NEXT_INSN (insn);
2406 }
2407
2408 }
2409 return cfun->machine->asms_without_operands == 2;
2410 }
2411
2412 /* Interrupt functions save/restore every call-preserved register, and
2413 any call-used register it uses (or all if it calls any function,
2414 since they may get clobbered there too). Here we check to see
2415 which call-used registers need saving. */
2416
2417 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2418 && (r == FIRST_CCR_REGNO + 1 \
2419 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2420 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2421
2422 static bool
2423 mep_interrupt_saved_reg (int r)
2424 {
2425 if (!mep_interrupt_p ())
2426 return false;
2427 if (r == REGSAVE_CONTROL_TEMP
2428 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2429 return true;
2430 if (mep_asm_without_operands_p ()
2431 && (!fixed_regs[r]
2432 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2433 || IVC2_ISAVED_REG (r)))
2434 return true;
2435 if (!crtl->is_leaf)
2436 /* Function calls mean we need to save $lp. */
2437 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2438 return true;
2439 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2440 /* The interrupt handler might use these registers for repeat blocks,
2441 or it might call a function that does so. */
2442 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2443 return true;
2444 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2445 return false;
2446 /* Functions we call might clobber these. */
2447 if (call_used_regs[r] && !fixed_regs[r])
2448 return true;
2449 /* Additional registers that need to be saved for IVC2. */
2450 if (IVC2_ISAVED_REG (r))
2451 return true;
2452
2453 return false;
2454 }
2455
2456 static bool
2457 mep_call_saves_register (int r)
2458 {
2459 if (! cfun->machine->frame_locked)
2460 {
2461 int rv = MEP_SAVES_NO;
2462
2463 if (cfun->machine->reg_save_slot[r])
2464 rv = MEP_SAVES_YES;
2465 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2466 rv = MEP_SAVES_YES;
2467 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2468 rv = MEP_SAVES_YES;
2469 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2470 rv = MEP_SAVES_YES;
2471 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2472 /* We need these to have stack slots so that they can be set during
2473 unwinding. */
2474 rv = MEP_SAVES_YES;
2475 else if (mep_interrupt_saved_reg (r))
2476 rv = MEP_SAVES_YES;
2477 cfun->machine->reg_saved[r] = rv;
2478 }
2479 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2480 }
2481
2482 /* Return true if epilogue uses register REGNO. */
2483
2484 bool
2485 mep_epilogue_uses (int regno)
2486 {
2487 /* Since $lp is a call-saved register, the generic code will normally
2488 mark it used in the epilogue if it needs to be saved and restored.
2489 However, when profiling is enabled, the profiling code will implicitly
2490 clobber $11. This case has to be handled specially both here and in
2491 mep_call_saves_register. */
2492 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2493 return true;
2494 /* Interrupt functions save/restore pretty much everything. */
2495 return (reload_completed && mep_interrupt_saved_reg (regno));
2496 }
2497
2498 static int
2499 mep_reg_size (int regno)
2500 {
2501 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2502 return 8;
2503 return 4;
2504 }
2505
2506 /* Worker function for TARGET_CAN_ELIMINATE. */
2507
2508 bool
2509 mep_can_eliminate (const int from, const int to)
2510 {
2511 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2512 ? ! frame_pointer_needed
2513 : true);
2514 }
2515
2516 int
2517 mep_elimination_offset (int from, int to)
2518 {
2519 int reg_save_size;
2520 int i;
2521 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2522 int total_size;
2523
2524 if (!cfun->machine->frame_locked)
2525 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2526
2527 /* We don't count arg_regs_to_save in the arg pointer offset, because
2528 gcc thinks the arg pointer has moved along with the saved regs.
2529 However, we do count it when we adjust $sp in the prologue. */
2530 reg_save_size = 0;
2531 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2532 if (mep_call_saves_register (i))
2533 reg_save_size += mep_reg_size (i);
2534
2535 if (reg_save_size % 8)
2536 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2537 else
2538 cfun->machine->regsave_filler = 0;
2539
2540 /* This is what our total stack adjustment looks like. */
2541 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2542
2543 if (total_size % 8)
2544 cfun->machine->frame_filler = 8 - (total_size % 8);
2545 else
2546 cfun->machine->frame_filler = 0;
2547
2548
2549 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2550 return reg_save_size + cfun->machine->regsave_filler;
2551
2552 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2553 return cfun->machine->frame_filler + frame_size;
2554
2555 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2556 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2557
2558 gcc_unreachable ();
2559 }
2560
2561 static rtx_insn *
2562 F (rtx_insn *x)
2563 {
2564 RTX_FRAME_RELATED_P (x) = 1;
2565 return x;
2566 }
2567
2568 /* Since the prologue/epilogue code is generated after optimization,
2569 we can't rely on gcc to split constants for us. So, this code
2570 captures all the ways to add a constant to a register in one logic
2571 chunk, including optimizing away insns we just don't need. This
2572 makes the prolog/epilog code easier to follow. */
2573 static void
2574 add_constant (int dest, int src, int value, int mark_frame)
2575 {
2576 rtx_insn *insn;
2577 int hi, lo;
2578
2579 if (src == dest && value == 0)
2580 return;
2581
2582 if (value == 0)
2583 {
2584 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2585 gen_rtx_REG (SImode, src));
2586 if (mark_frame)
2587 RTX_FRAME_RELATED_P(insn) = 1;
2588 return;
2589 }
2590
2591 if (value >= -32768 && value <= 32767)
2592 {
2593 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2594 gen_rtx_REG (SImode, src),
2595 GEN_INT (value)));
2596 if (mark_frame)
2597 RTX_FRAME_RELATED_P(insn) = 1;
2598 return;
2599 }
2600
2601 /* Big constant, need to use a temp register. We use
2602 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2603 area is always small enough to directly add to). */
2604
2605 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2606 lo = value & 0xffff;
2607
2608 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2609 GEN_INT (hi));
2610
2611 if (lo)
2612 {
2613 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2614 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2615 GEN_INT (lo)));
2616 }
2617
2618 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2619 gen_rtx_REG (SImode, src),
2620 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2621 if (mark_frame)
2622 {
2623 RTX_FRAME_RELATED_P(insn) = 1;
2624 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2625 gen_rtx_SET (SImode,
2626 gen_rtx_REG (SImode, dest),
2627 gen_rtx_PLUS (SImode,
2628 gen_rtx_REG (SImode, dest),
2629 GEN_INT (value))));
2630 }
2631 }
2632
2633 /* Move SRC to DEST. Mark the move as being potentially dead if
2634 MAYBE_DEAD_P. */
2635
2636 static rtx_insn *
2637 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2638 {
2639 rtx_insn *insn = emit_move_insn (dest, src);
2640 #if 0
2641 if (maybe_dead_p)
2642 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2643 #endif
2644 return insn;
2645 }
2646
2647 /* Used for interrupt functions, which can't assume that $tp and $gp
2648 contain the correct pointers. */
2649
2650 static void
2651 mep_reload_pointer (int regno, const char *symbol)
2652 {
2653 rtx reg, sym;
2654
2655 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2656 return;
2657
2658 reg = gen_rtx_REG (SImode, regno);
2659 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2660 emit_insn (gen_movsi_topsym_s (reg, sym));
2661 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2662 }
2663
2664 /* Assign save slots for any register not already saved. DImode
2665 registers go at the end of the reg save area; the rest go at the
2666 beginning. This is for alignment purposes. Returns true if a frame
2667 is really needed. */
2668 static bool
2669 mep_assign_save_slots (int reg_save_size)
2670 {
2671 bool really_need_stack_frame = false;
2672 int di_ofs = 0;
2673 int i;
2674
2675 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2676 if (mep_call_saves_register(i))
2677 {
2678 int regsize = mep_reg_size (i);
2679
2680 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2681 || mep_reg_set_in_function (i))
2682 really_need_stack_frame = true;
2683
2684 if (cfun->machine->reg_save_slot[i])
2685 continue;
2686
2687 if (regsize < 8)
2688 {
2689 cfun->machine->reg_save_size += regsize;
2690 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2691 }
2692 else
2693 {
2694 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2695 di_ofs += 8;
2696 }
2697 }
2698 cfun->machine->frame_locked = 1;
2699 return really_need_stack_frame;
2700 }
2701
2702 void
2703 mep_expand_prologue (void)
2704 {
2705 int i, rss, sp_offset = 0;
2706 int reg_save_size;
2707 int frame_size;
2708 int really_need_stack_frame;
2709
2710 /* We must not allow register renaming in interrupt functions,
2711 because that invalidates the correctness of the set of call-used
2712 registers we're going to save/restore. */
2713 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2714
2715 if (mep_disinterrupt_p ())
2716 emit_insn (gen_mep_disable_int ());
2717
2718 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2719
2720 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2721 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2722 really_need_stack_frame = frame_size;
2723
2724 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2725
2726 sp_offset = reg_save_size;
2727 if (sp_offset + frame_size < 128)
2728 sp_offset += frame_size ;
2729
2730 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2731
2732 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2733 if (mep_call_saves_register(i))
2734 {
2735 rtx mem;
2736 bool maybe_dead_p;
2737 machine_mode rmode;
2738
2739 rss = cfun->machine->reg_save_slot[i];
2740
2741 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2742 && (!mep_reg_set_in_function (i)
2743 && !mep_interrupt_p ()))
2744 continue;
2745
2746 if (mep_reg_size (i) == 8)
2747 rmode = DImode;
2748 else
2749 rmode = SImode;
2750
2751 /* If there is a pseudo associated with this register's initial value,
2752 reload might have already spilt it to the stack slot suggested by
2753 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2754 deleted as dead. */
2755 mem = gen_rtx_MEM (rmode,
2756 plus_constant (Pmode, stack_pointer_rtx,
2757 sp_offset - rss));
2758 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2759
2760 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2761 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2762 else if (rmode == DImode)
2763 {
2764 rtx_insn *insn;
2765 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2766
2767 mem = gen_rtx_MEM (SImode,
2768 plus_constant (Pmode, stack_pointer_rtx,
2769 sp_offset - rss + be));
2770
2771 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2772 gen_rtx_REG (SImode, i),
2773 maybe_dead_p);
2774 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2775 gen_rtx_ZERO_EXTRACT (SImode,
2776 gen_rtx_REG (DImode, i),
2777 GEN_INT (32),
2778 GEN_INT (32)),
2779 maybe_dead_p);
2780 insn = maybe_dead_move (mem,
2781 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2782 maybe_dead_p);
2783 RTX_FRAME_RELATED_P (insn) = 1;
2784
2785 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2786 gen_rtx_SET (VOIDmode,
2787 copy_rtx (mem),
2788 gen_rtx_REG (rmode, i)));
2789 mem = gen_rtx_MEM (SImode,
2790 plus_constant (Pmode, stack_pointer_rtx,
2791 sp_offset - rss + (4-be)));
2792 insn = maybe_dead_move (mem,
2793 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2794 maybe_dead_p);
2795 }
2796 else
2797 {
2798 rtx_insn *insn;
2799 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2800 gen_rtx_REG (rmode, i),
2801 maybe_dead_p);
2802 insn = maybe_dead_move (mem,
2803 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2804 maybe_dead_p);
2805 RTX_FRAME_RELATED_P (insn) = 1;
2806
2807 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2808 gen_rtx_SET (VOIDmode,
2809 copy_rtx (mem),
2810 gen_rtx_REG (rmode, i)));
2811 }
2812 }
2813
2814 if (frame_pointer_needed)
2815 {
2816 /* We've already adjusted down by sp_offset. Total $sp change
2817 is reg_save_size + frame_size. We want a net change here of
2818 just reg_save_size. */
2819 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2820 }
2821
2822 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2823
2824 if (mep_interrupt_p ())
2825 {
2826 mep_reload_pointer(GP_REGNO, "__sdabase");
2827 mep_reload_pointer(TP_REGNO, "__tpbase");
2828 }
2829 }
2830
2831 static void
2832 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2833 {
2834 int local = hwi_local;
2835 int frame_size = local + crtl->outgoing_args_size;
2836 int reg_save_size;
2837 int ffill;
2838 int i, sp, skip;
2839 int sp_offset;
2840 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2841
2842 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2843 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2844 sp_offset = reg_save_size + frame_size;
2845
2846 ffill = cfun->machine->frame_filler;
2847
2848 if (cfun->machine->mep_frame_pointer_needed)
2849 reg_names[FP_REGNO] = "$fp";
2850 else
2851 reg_names[FP_REGNO] = "$8";
2852
2853 if (sp_offset == 0)
2854 return;
2855
2856 if (debug_info_level == DINFO_LEVEL_NONE)
2857 {
2858 fprintf (file, "\t# frame: %d", sp_offset);
2859 if (reg_save_size)
2860 fprintf (file, " %d regs", reg_save_size);
2861 if (local)
2862 fprintf (file, " %d locals", local);
2863 if (crtl->outgoing_args_size)
2864 fprintf (file, " %d args", crtl->outgoing_args_size);
2865 fprintf (file, "\n");
2866 return;
2867 }
2868
2869 fprintf (file, "\t#\n");
2870 fprintf (file, "\t# Initial Frame Information:\n");
2871 if (sp_offset || !frame_pointer_needed)
2872 fprintf (file, "\t# Entry ---------- 0\n");
2873
2874 /* Sort registers by save slots, so they're printed in the order
2875 they appear in memory, not the order they're saved in. */
2876 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2877 slot_map[si] = si;
2878 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2879 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2880 if (cfun->machine->reg_save_slot[slot_map[si]]
2881 > cfun->machine->reg_save_slot[slot_map[sj]])
2882 {
2883 int t = slot_map[si];
2884 slot_map[si] = slot_map[sj];
2885 slot_map[sj] = t;
2886 }
2887
2888 sp = 0;
2889 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2890 {
2891 int rsize;
2892 int r = slot_map[i];
2893 int rss = cfun->machine->reg_save_slot[r];
2894
2895 if (!mep_call_saves_register (r))
2896 continue;
2897
2898 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2899 && (!mep_reg_set_in_function (r)
2900 && !mep_interrupt_p ()))
2901 continue;
2902
2903 rsize = mep_reg_size(r);
2904 skip = rss - (sp+rsize);
2905 if (skip)
2906 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2907 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2908 rsize, reg_names[r], sp_offset - rss);
2909 sp = rss;
2910 }
2911
2912 skip = reg_save_size - sp;
2913 if (skip)
2914 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2915
2916 if (frame_pointer_needed)
2917 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2918 if (local)
2919 fprintf (file, "\t# %3d bytes for local vars\n", local);
2920 if (ffill)
2921 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2922 if (crtl->outgoing_args_size)
2923 fprintf (file, "\t# %3d bytes for outgoing args\n",
2924 crtl->outgoing_args_size);
2925 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2926 fprintf (file, "\t#\n");
2927 }
2928
2929
2930 static int mep_prevent_lp_restore = 0;
2931 static int mep_sibcall_epilogue = 0;
2932
2933 void
2934 mep_expand_epilogue (void)
2935 {
2936 int i, sp_offset = 0;
2937 int reg_save_size = 0;
2938 int frame_size;
2939 int lp_temp = LP_REGNO, lp_slot = -1;
2940 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2941 int interrupt_handler = mep_interrupt_p ();
2942
2943 if (profile_arc_flag == 2)
2944 emit_insn (gen_mep_bb_trace_ret ());
2945
2946 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2947 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2948
2949 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2950
2951 if (frame_pointer_needed)
2952 {
2953 /* If we have a frame pointer, we won't have a reliable stack
2954 pointer (alloca, you know), so rebase SP from FP */
2955 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2956 gen_rtx_REG (SImode, FP_REGNO));
2957 sp_offset = reg_save_size;
2958 }
2959 else
2960 {
2961 /* SP is right under our local variable space. Adjust it if
2962 needed. */
2963 sp_offset = reg_save_size + frame_size;
2964 if (sp_offset >= 128)
2965 {
2966 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2967 sp_offset -= frame_size;
2968 }
2969 }
2970
2971 /* This is backwards so that we restore the control and coprocessor
2972 registers before the temporary registers we use to restore
2973 them. */
2974 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2975 if (mep_call_saves_register (i))
2976 {
2977 machine_mode rmode;
2978 int rss = cfun->machine->reg_save_slot[i];
2979
2980 if (mep_reg_size (i) == 8)
2981 rmode = DImode;
2982 else
2983 rmode = SImode;
2984
2985 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2986 && !(mep_reg_set_in_function (i) || interrupt_handler))
2987 continue;
2988 if (mep_prevent_lp_restore && i == LP_REGNO)
2989 continue;
2990 if (!mep_prevent_lp_restore
2991 && !interrupt_handler
2992 && (i == 10 || i == 11))
2993 continue;
2994
2995 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2996 emit_move_insn (gen_rtx_REG (rmode, i),
2997 gen_rtx_MEM (rmode,
2998 plus_constant (Pmode, stack_pointer_rtx,
2999 sp_offset - rss)));
3000 else
3001 {
3002 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3003 /* Defer this one so we can jump indirect rather than
3004 copying the RA to $lp and "ret". EH epilogues
3005 automatically skip this anyway. */
3006 lp_slot = sp_offset-rss;
3007 else
3008 {
3009 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3010 gen_rtx_MEM (rmode,
3011 plus_constant (Pmode,
3012 stack_pointer_rtx,
3013 sp_offset-rss)));
3014 emit_move_insn (gen_rtx_REG (rmode, i),
3015 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3016 }
3017 }
3018 }
3019 if (lp_slot != -1)
3020 {
3021 /* Restore this one last so we know it will be in the temp
3022 register when we return by jumping indirectly via the temp. */
3023 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3024 gen_rtx_MEM (SImode,
3025 plus_constant (Pmode, stack_pointer_rtx,
3026 lp_slot)));
3027 lp_temp = REGSAVE_CONTROL_TEMP;
3028 }
3029
3030
3031 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3032
3033 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3034 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3035 gen_rtx_REG (SImode, SP_REGNO),
3036 cfun->machine->eh_stack_adjust));
3037
3038 if (mep_sibcall_epilogue)
3039 return;
3040
3041 if (mep_disinterrupt_p ())
3042 emit_insn (gen_mep_enable_int ());
3043
3044 if (mep_prevent_lp_restore)
3045 {
3046 emit_jump_insn (gen_eh_return_internal ());
3047 emit_barrier ();
3048 }
3049 else if (interrupt_handler)
3050 emit_jump_insn (gen_mep_reti ());
3051 else
3052 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3053 }
3054
3055 void
3056 mep_expand_eh_return (rtx *operands)
3057 {
3058 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3059 {
3060 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3061 emit_move_insn (ra, operands[0]);
3062 operands[0] = ra;
3063 }
3064
3065 emit_insn (gen_eh_epilogue (operands[0]));
3066 }
3067
3068 void
3069 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3070 {
3071 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3072 mep_prevent_lp_restore = 1;
3073 mep_expand_epilogue ();
3074 mep_prevent_lp_restore = 0;
3075 }
3076
3077 void
3078 mep_expand_sibcall_epilogue (void)
3079 {
3080 mep_sibcall_epilogue = 1;
3081 mep_expand_epilogue ();
3082 mep_sibcall_epilogue = 0;
3083 }
3084
3085 static bool
3086 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3087 {
3088 if (decl == NULL)
3089 return false;
3090
3091 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3092 return false;
3093
3094 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3095 if (mep_interrupt_p () || mep_disinterrupt_p ())
3096 return false;
3097
3098 return true;
3099 }
3100
3101 rtx
3102 mep_return_stackadj_rtx (void)
3103 {
3104 return gen_rtx_REG (SImode, 10);
3105 }
3106
3107 rtx
3108 mep_return_handler_rtx (void)
3109 {
3110 return gen_rtx_REG (SImode, LP_REGNO);
3111 }
3112
3113 void
3114 mep_function_profiler (FILE *file)
3115 {
3116 /* Always right at the beginning of the function. */
3117 fprintf (file, "\t# mep function profiler\n");
3118 fprintf (file, "\tadd\t$sp, -8\n");
3119 fprintf (file, "\tsw\t$0, ($sp)\n");
3120 fprintf (file, "\tldc\t$0, $lp\n");
3121 fprintf (file, "\tsw\t$0, 4($sp)\n");
3122 fprintf (file, "\tbsr\t__mep_mcount\n");
3123 fprintf (file, "\tlw\t$0, 4($sp)\n");
3124 fprintf (file, "\tstc\t$0, $lp\n");
3125 fprintf (file, "\tlw\t$0, ($sp)\n");
3126 fprintf (file, "\tadd\t$sp, 8\n\n");
3127 }
3128
3129 const char *
3130 mep_emit_bb_trace_ret (void)
3131 {
3132 fprintf (asm_out_file, "\t# end of block profiling\n");
3133 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3134 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3135 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3136 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3137 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3138 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3139 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3140 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3141 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3142 return "";
3143 }
3144
3145 #undef SAVE
3146 #undef RESTORE
3147 \f
3148 /* Operand Printing. */
3149
3150 void
3151 mep_print_operand_address (FILE *stream, rtx address)
3152 {
3153 if (GET_CODE (address) == MEM)
3154 address = XEXP (address, 0);
3155 else
3156 /* cf: gcc.dg/asm-4.c. */
3157 gcc_assert (GET_CODE (address) == REG);
3158
3159 mep_print_operand (stream, address, 0);
3160 }
3161
3162 static struct
3163 {
3164 char code;
3165 const char *pattern;
3166 const char *format;
3167 }
3168 const conversions[] =
3169 {
3170 { 0, "r", "0" },
3171 { 0, "m+ri", "3(2)" },
3172 { 0, "mr", "(1)" },
3173 { 0, "ms", "(1)" },
3174 { 0, "ml", "(1)" },
3175 { 0, "mLrs", "%lo(3)(2)" },
3176 { 0, "mLr+si", "%lo(4+5)(2)" },
3177 { 0, "m+ru2s", "%tpoff(5)(2)" },
3178 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3179 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3180 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3181 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3182 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3183 { 0, "mi", "(1)" },
3184 { 0, "m+si", "(2+3)" },
3185 { 0, "m+li", "(2+3)" },
3186 { 0, "i", "0" },
3187 { 0, "s", "0" },
3188 { 0, "+si", "1+2" },
3189 { 0, "+u2si", "%tpoff(3+4)" },
3190 { 0, "+u3si", "%sdaoff(3+4)" },
3191 { 0, "l", "0" },
3192 { 'b', "i", "0" },
3193 { 'B', "i", "0" },
3194 { 'U', "i", "0" },
3195 { 'h', "i", "0" },
3196 { 'h', "Hs", "%hi(1)" },
3197 { 'I', "i", "0" },
3198 { 'I', "u2s", "%tpoff(2)" },
3199 { 'I', "u3s", "%sdaoff(2)" },
3200 { 'I', "+u2si", "%tpoff(3+4)" },
3201 { 'I', "+u3si", "%sdaoff(3+4)" },
3202 { 'J', "i", "0" },
3203 { 'P', "mr", "(1\\+),\\0" },
3204 { 'x', "i", "0" },
3205 { 0, 0, 0 }
3206 };
3207
3208 static int
3209 unique_bit_in (HOST_WIDE_INT i)
3210 {
3211 switch (i & 0xff)
3212 {
3213 case 0x01: case 0xfe: return 0;
3214 case 0x02: case 0xfd: return 1;
3215 case 0x04: case 0xfb: return 2;
3216 case 0x08: case 0xf7: return 3;
3217 case 0x10: case 0x7f: return 4;
3218 case 0x20: case 0xbf: return 5;
3219 case 0x40: case 0xdf: return 6;
3220 case 0x80: case 0xef: return 7;
3221 default:
3222 gcc_unreachable ();
3223 }
3224 }
3225
3226 static int
3227 bit_size_for_clip (HOST_WIDE_INT i)
3228 {
3229 int rv;
3230
3231 for (rv = 0; rv < 31; rv ++)
3232 if (((HOST_WIDE_INT) 1 << rv) > i)
3233 return rv + 1;
3234 gcc_unreachable ();
3235 }
3236
3237 /* Print an operand to a assembler instruction. */
3238
3239 void
3240 mep_print_operand (FILE *file, rtx x, int code)
3241 {
3242 int i, j;
3243 const char *real_name;
3244
3245 if (code == '<')
3246 {
3247 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3248 we're using, then skip over the "mep_" part of its name. */
3249 const struct cgen_insn *insn;
3250
3251 if (mep_get_move_insn (mep_cmov, &insn))
3252 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3253 else
3254 mep_intrinsic_unavailable (mep_cmov);
3255 return;
3256 }
3257 if (code == 'L')
3258 {
3259 switch (GET_CODE (x))
3260 {
3261 case AND:
3262 fputs ("clr", file);
3263 return;
3264 case IOR:
3265 fputs ("set", file);
3266 return;
3267 case XOR:
3268 fputs ("not", file);
3269 return;
3270 default:
3271 output_operand_lossage ("invalid %%L code");
3272 }
3273 }
3274 if (code == 'M')
3275 {
3276 /* Print the second operand of a CR <- CR move. If we're using
3277 a two-operand instruction (i.e., a real cmov), then just print
3278 the operand normally. If we're using a "reg, reg, immediate"
3279 instruction such as caddi3, print the operand followed by a
3280 zero field. If we're using a three-register instruction,
3281 print the operand twice. */
3282 const struct cgen_insn *insn;
3283
3284 mep_print_operand (file, x, 0);
3285 if (mep_get_move_insn (mep_cmov, &insn)
3286 && insn_data[insn->icode].n_operands == 3)
3287 {
3288 fputs (", ", file);
3289 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3290 mep_print_operand (file, x, 0);
3291 else
3292 mep_print_operand (file, const0_rtx, 0);
3293 }
3294 return;
3295 }
3296
3297 encode_pattern (x);
3298 for (i = 0; conversions[i].pattern; i++)
3299 if (conversions[i].code == code
3300 && strcmp(conversions[i].pattern, pattern) == 0)
3301 {
3302 for (j = 0; conversions[i].format[j]; j++)
3303 if (conversions[i].format[j] == '\\')
3304 {
3305 fputc (conversions[i].format[j+1], file);
3306 j++;
3307 }
3308 else if (ISDIGIT(conversions[i].format[j]))
3309 {
3310 rtx r = patternr[conversions[i].format[j] - '0'];
3311 switch (GET_CODE (r))
3312 {
3313 case REG:
3314 fprintf (file, "%s", reg_names [REGNO (r)]);
3315 break;
3316 case CONST_INT:
3317 switch (code)
3318 {
3319 case 'b':
3320 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3321 break;
3322 case 'B':
3323 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3324 break;
3325 case 'h':
3326 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3327 break;
3328 case 'U':
3329 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3330 break;
3331 case 'J':
3332 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3333 break;
3334 case 'x':
3335 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3336 && !(INTVAL (r) & 0xff))
3337 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3338 else
3339 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3340 break;
3341 case 'I':
3342 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3343 && conversions[i].format[j+1] == 0)
3344 {
3345 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3346 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3347 }
3348 else
3349 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3350 break;
3351 default:
3352 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3353 break;
3354 }
3355 break;
3356 case CONST_DOUBLE:
3357 fprintf(file, "[const_double 0x%lx]",
3358 (unsigned long) CONST_DOUBLE_HIGH(r));
3359 break;
3360 case SYMBOL_REF:
3361 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3362 assemble_name (file, real_name);
3363 break;
3364 case LABEL_REF:
3365 output_asm_label (r);
3366 break;
3367 default:
3368 fprintf (stderr, "don't know how to print this operand:");
3369 debug_rtx (r);
3370 gcc_unreachable ();
3371 }
3372 }
3373 else
3374 {
3375 if (conversions[i].format[j] == '+'
3376 && (!code || code == 'I')
3377 && ISDIGIT (conversions[i].format[j+1])
3378 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3379 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3380 continue;
3381 fputc(conversions[i].format[j], file);
3382 }
3383 break;
3384 }
3385 if (!conversions[i].pattern)
3386 {
3387 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3388 debug_rtx(x);
3389 }
3390
3391 return;
3392 }
3393
3394 void
3395 mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
3396 int noperands ATTRIBUTE_UNUSED)
3397 {
3398 /* Despite the fact that MeP is perfectly capable of branching and
3399 doing something else in the same bundle, gcc does jump
3400 optimization *after* scheduling, so we cannot trust the bundling
3401 flags on jump instructions. */
3402 if (GET_MODE (insn) == BImode
3403 && get_attr_slots (insn) != SLOTS_CORE)
3404 fputc ('+', asm_out_file);
3405 }
3406
3407 /* Function args in registers. */
3408
3409 static void
3410 mep_setup_incoming_varargs (cumulative_args_t cum,
3411 machine_mode mode ATTRIBUTE_UNUSED,
3412 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3413 int second_time ATTRIBUTE_UNUSED)
3414 {
3415 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3416
3417 if (nsave > 0)
3418 cfun->machine->arg_regs_to_save = nsave;
3419 *pretend_size = nsave * 4;
3420 }
3421
3422 static int
3423 bytesize (const_tree type, machine_mode mode)
3424 {
3425 if (mode == BLKmode)
3426 return int_size_in_bytes (type);
3427 return GET_MODE_SIZE (mode);
3428 }
3429
3430 static rtx
3431 mep_expand_builtin_saveregs (void)
3432 {
3433 int bufsize, i, ns;
3434 rtx regbuf;
3435
3436 ns = cfun->machine->arg_regs_to_save;
3437 if (TARGET_IVC2)
3438 {
3439 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3440 regbuf = assign_stack_local (SImode, bufsize, 64);
3441 }
3442 else
3443 {
3444 bufsize = ns * 4;
3445 regbuf = assign_stack_local (SImode, bufsize, 32);
3446 }
3447
3448 move_block_from_reg (5-ns, regbuf, ns);
3449
3450 if (TARGET_IVC2)
3451 {
3452 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3453 int ofs = 8 * ((ns+1)/2);
3454
3455 for (i=0; i<ns; i++)
3456 {
3457 int rn = (4-ns) + i + 49;
3458 rtx ptr;
3459
3460 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3461 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3462 ofs += 8;
3463 }
3464 }
3465 return XEXP (regbuf, 0);
3466 }
3467
3468 static tree
3469 mep_build_builtin_va_list (void)
3470 {
3471 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3472 tree record;
3473
3474
3475 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3476
3477 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3478 get_identifier ("__va_next_gp"), ptr_type_node);
3479 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3480 get_identifier ("__va_next_gp_limit"),
3481 ptr_type_node);
3482 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3483 ptr_type_node);
3484 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3485 ptr_type_node);
3486
3487 DECL_FIELD_CONTEXT (f_next_gp) = record;
3488 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3489 DECL_FIELD_CONTEXT (f_next_cop) = record;
3490 DECL_FIELD_CONTEXT (f_next_stack) = record;
3491
3492 TYPE_FIELDS (record) = f_next_gp;
3493 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3494 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3495 DECL_CHAIN (f_next_cop) = f_next_stack;
3496
3497 layout_type (record);
3498
3499 return record;
3500 }
3501
3502 static void
3503 mep_expand_va_start (tree valist, rtx nextarg)
3504 {
3505 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3506 tree next_gp, next_gp_limit, next_cop, next_stack;
3507 tree t, u;
3508 int ns;
3509
3510 ns = cfun->machine->arg_regs_to_save;
3511
3512 f_next_gp = TYPE_FIELDS (va_list_type_node);
3513 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3514 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3515 f_next_stack = DECL_CHAIN (f_next_cop);
3516
3517 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3518 NULL_TREE);
3519 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3520 valist, f_next_gp_limit, NULL_TREE);
3521 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3522 NULL_TREE);
3523 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3524 valist, f_next_stack, NULL_TREE);
3525
3526 /* va_list.next_gp = expand_builtin_saveregs (); */
3527 u = make_tree (sizetype, expand_builtin_saveregs ());
3528 u = fold_convert (ptr_type_node, u);
3529 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3530 TREE_SIDE_EFFECTS (t) = 1;
3531 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3532
3533 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3534 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3535 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3536 TREE_SIDE_EFFECTS (t) = 1;
3537 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3538
3539 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3540 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3541 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3542 TREE_SIDE_EFFECTS (t) = 1;
3543 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3544
3545 /* va_list.next_stack = nextarg; */
3546 u = make_tree (ptr_type_node, nextarg);
3547 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3548 TREE_SIDE_EFFECTS (t) = 1;
3549 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3550 }
3551
3552 static tree
3553 mep_gimplify_va_arg_expr (tree valist, tree type,
3554 gimple_seq *pre_p,
3555 gimple_seq *post_p ATTRIBUTE_UNUSED)
3556 {
3557 HOST_WIDE_INT size, rsize;
3558 bool by_reference, ivc2_vec;
3559 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3560 tree next_gp, next_gp_limit, next_cop, next_stack;
3561 tree label_sover, label_selse;
3562 tree tmp, res_addr;
3563
3564 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3565
3566 size = int_size_in_bytes (type);
3567 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3568
3569 if (by_reference)
3570 {
3571 type = build_pointer_type (type);
3572 size = 4;
3573 }
3574 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3575
3576 f_next_gp = TYPE_FIELDS (va_list_type_node);
3577 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3578 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3579 f_next_stack = DECL_CHAIN (f_next_cop);
3580
3581 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3582 NULL_TREE);
3583 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3584 valist, f_next_gp_limit, NULL_TREE);
3585 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3586 NULL_TREE);
3587 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3588 valist, f_next_stack, NULL_TREE);
3589
3590 /* if f_next_gp < f_next_gp_limit
3591 IF (VECTOR_P && IVC2)
3592 val = *f_next_cop;
3593 ELSE
3594 val = *f_next_gp;
3595 f_next_gp += 4;
3596 f_next_cop += 8;
3597 else
3598 label_selse:
3599 val = *f_next_stack;
3600 f_next_stack += rsize;
3601 label_sover:
3602 */
3603
3604 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3605 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3606 res_addr = create_tmp_var (ptr_type_node);
3607
3608 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3609 unshare_expr (next_gp_limit));
3610 tmp = build3 (COND_EXPR, void_type_node, tmp,
3611 build1 (GOTO_EXPR, void_type_node,
3612 unshare_expr (label_selse)),
3613 NULL_TREE);
3614 gimplify_and_add (tmp, pre_p);
3615
3616 if (ivc2_vec)
3617 {
3618 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3619 gimplify_and_add (tmp, pre_p);
3620 }
3621 else
3622 {
3623 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3624 gimplify_and_add (tmp, pre_p);
3625 }
3626
3627 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3628 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3629
3630 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3631 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3632
3633 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3634 gimplify_and_add (tmp, pre_p);
3635
3636 /* - - */
3637
3638 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3639 gimplify_and_add (tmp, pre_p);
3640
3641 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3642 gimplify_and_add (tmp, pre_p);
3643
3644 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3645 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3646
3647 /* - - */
3648
3649 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3650 gimplify_and_add (tmp, pre_p);
3651
3652 res_addr = fold_convert (build_pointer_type (type), res_addr);
3653
3654 if (by_reference)
3655 res_addr = build_va_arg_indirect_ref (res_addr);
3656
3657 return build_va_arg_indirect_ref (res_addr);
3658 }
3659
3660 void
3661 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3662 rtx libname ATTRIBUTE_UNUSED,
3663 tree fndecl ATTRIBUTE_UNUSED)
3664 {
3665 pcum->nregs = 0;
3666
3667 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3668 pcum->vliw = 1;
3669 else
3670 pcum->vliw = 0;
3671 }
3672
3673 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3674 larger than 4 bytes are passed indirectly. Return value in 0,
3675 unless bigger than 4 bytes, then the caller passes a pointer as the
3676 first arg. For varargs, we copy $1..$4 to the stack. */
3677
3678 static rtx
3679 mep_function_arg (cumulative_args_t cum_v, machine_mode mode,
3680 const_tree type ATTRIBUTE_UNUSED,
3681 bool named ATTRIBUTE_UNUSED)
3682 {
3683 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3684
3685 /* VOIDmode is a signal for the backend to pass data to the call
3686 expander via the second operand to the call pattern. We use
3687 this to determine whether to use "jsr" or "jsrv". */
3688 if (mode == VOIDmode)
3689 return GEN_INT (cum->vliw);
3690
3691 /* If we havn't run out of argument registers, return the next. */
3692 if (cum->nregs < 4)
3693 {
3694 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3695 return gen_rtx_REG (mode, cum->nregs + 49);
3696 else
3697 return gen_rtx_REG (mode, cum->nregs + 1);
3698 }
3699
3700 /* Otherwise the argument goes on the stack. */
3701 return NULL_RTX;
3702 }
3703
3704 static bool
3705 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3706 machine_mode mode,
3707 const_tree type,
3708 bool named ATTRIBUTE_UNUSED)
3709 {
3710 int size = bytesize (type, mode);
3711
3712 /* This is non-obvious, but yes, large values passed after we've run
3713 out of registers are *still* passed by reference - we put the
3714 address of the parameter on the stack, as well as putting the
3715 parameter itself elsewhere on the stack. */
3716
3717 if (size <= 0 || size > 8)
3718 return true;
3719 if (size <= 4)
3720 return false;
3721 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3722 && type != NULL_TREE && VECTOR_TYPE_P (type))
3723 return false;
3724 return true;
3725 }
3726
3727 static void
3728 mep_function_arg_advance (cumulative_args_t pcum,
3729 machine_mode mode ATTRIBUTE_UNUSED,
3730 const_tree type ATTRIBUTE_UNUSED,
3731 bool named ATTRIBUTE_UNUSED)
3732 {
3733 get_cumulative_args (pcum)->nregs += 1;
3734 }
3735
3736 bool
3737 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3738 {
3739 int size = bytesize (type, BLKmode);
3740 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3741 return size > 0 && size <= 8 ? 0 : 1;
3742 return size > 0 && size <= 4 ? 0 : 1;
3743 }
3744
3745 static bool
3746 mep_narrow_volatile_bitfield (void)
3747 {
3748 return true;
3749 return false;
3750 }
3751
3752 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3753
3754 rtx
3755 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3756 {
3757 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3758 return gen_rtx_REG (TYPE_MODE (type), 48);
3759 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3760 }
3761
3762 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3763
3764 rtx
3765 mep_libcall_value (machine_mode mode)
3766 {
3767 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3768 }
3769
3770 /* Handle pipeline hazards. */
3771
3772 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3773 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3774
3775 static int prev_opcode = 0;
3776
3777 /* This isn't as optimal as it could be, because we don't know what
3778 control register the STC opcode is storing in. We only need to add
3779 the nop if it's the relevant register, but we add it for irrelevant
3780 registers also. */
3781
3782 void
3783 mep_asm_output_opcode (FILE *file, const char *ptr)
3784 {
3785 int this_opcode = op_none;
3786 const char *hazard = 0;
3787
3788 switch (*ptr)
3789 {
3790 case 'f':
3791 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3792 this_opcode = op_fsft;
3793 break;
3794 case 'r':
3795 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3796 this_opcode = op_ret;
3797 break;
3798 case 's':
3799 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3800 this_opcode = op_stc;
3801 break;
3802 }
3803
3804 if (prev_opcode == op_stc && this_opcode == op_fsft)
3805 hazard = "nop";
3806 if (prev_opcode == op_stc && this_opcode == op_ret)
3807 hazard = "nop";
3808
3809 if (hazard)
3810 fprintf(file, "%s\t# %s-%s hazard\n\t",
3811 hazard, opnames[prev_opcode], opnames[this_opcode]);
3812
3813 prev_opcode = this_opcode;
3814 }
3815
3816 /* Handle attributes. */
3817
3818 static tree
3819 mep_validate_based_tiny (tree *node, tree name, tree args,
3820 int flags ATTRIBUTE_UNUSED, bool *no_add)
3821 {
3822 if (TREE_CODE (*node) != VAR_DECL
3823 && TREE_CODE (*node) != POINTER_TYPE
3824 && TREE_CODE (*node) != TYPE_DECL)
3825 {
3826 warning (0, "%qE attribute only applies to variables", name);
3827 *no_add = true;
3828 }
3829 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3830 {
3831 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3832 {
3833 warning (0, "address region attributes not allowed with auto storage class");
3834 *no_add = true;
3835 }
3836 /* Ignore storage attribute of pointed to variable: char __far * x; */
3837 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3838 {
3839 warning (0, "address region attributes on pointed-to types ignored");
3840 *no_add = true;
3841 }
3842 }
3843
3844 return NULL_TREE;
3845 }
3846
3847 static int
3848 mep_multiple_address_regions (tree list, bool check_section_attr)
3849 {
3850 tree a;
3851 int count_sections = 0;
3852 int section_attr_count = 0;
3853
3854 for (a = list; a; a = TREE_CHAIN (a))
3855 {
3856 if (is_attribute_p ("based", TREE_PURPOSE (a))
3857 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3858 || is_attribute_p ("near", TREE_PURPOSE (a))
3859 || is_attribute_p ("far", TREE_PURPOSE (a))
3860 || is_attribute_p ("io", TREE_PURPOSE (a)))
3861 count_sections ++;
3862 if (check_section_attr)
3863 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3864 }
3865
3866 if (check_section_attr)
3867 return section_attr_count;
3868 else
3869 return count_sections;
3870 }
3871
3872 #define MEP_ATTRIBUTES(decl) \
3873 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3874 : DECL_ATTRIBUTES (decl) \
3875 ? (DECL_ATTRIBUTES (decl)) \
3876 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3877
3878 static tree
3879 mep_validate_near_far (tree *node, tree name, tree args,
3880 int flags ATTRIBUTE_UNUSED, bool *no_add)
3881 {
3882 if (TREE_CODE (*node) != VAR_DECL
3883 && TREE_CODE (*node) != FUNCTION_DECL
3884 && TREE_CODE (*node) != METHOD_TYPE
3885 && TREE_CODE (*node) != POINTER_TYPE
3886 && TREE_CODE (*node) != TYPE_DECL)
3887 {
3888 warning (0, "%qE attribute only applies to variables and functions",
3889 name);
3890 *no_add = true;
3891 }
3892 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3893 {
3894 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3895 {
3896 warning (0, "address region attributes not allowed with auto storage class");
3897 *no_add = true;
3898 }
3899 /* Ignore storage attribute of pointed to variable: char __far * x; */
3900 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3901 {
3902 warning (0, "address region attributes on pointed-to types ignored");
3903 *no_add = true;
3904 }
3905 }
3906 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3907 {
3908 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3909 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3910 DECL_ATTRIBUTES (*node) = NULL_TREE;
3911 }
3912 return NULL_TREE;
3913 }
3914
3915 static tree
3916 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3917 int flags ATTRIBUTE_UNUSED, bool *no_add)
3918 {
3919 if (TREE_CODE (*node) != FUNCTION_DECL
3920 && TREE_CODE (*node) != METHOD_TYPE)
3921 {
3922 warning (0, "%qE attribute only applies to functions", name);
3923 *no_add = true;
3924 }
3925 return NULL_TREE;
3926 }
3927
3928 static tree
3929 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3930 int flags ATTRIBUTE_UNUSED, bool *no_add)
3931 {
3932 tree function_type;
3933
3934 if (TREE_CODE (*node) != FUNCTION_DECL)
3935 {
3936 warning (0, "%qE attribute only applies to functions", name);
3937 *no_add = true;
3938 return NULL_TREE;
3939 }
3940
3941 if (DECL_DECLARED_INLINE_P (*node))
3942 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3943 DECL_UNINLINABLE (*node) = 1;
3944
3945 function_type = TREE_TYPE (*node);
3946
3947 if (TREE_TYPE (function_type) != void_type_node)
3948 error ("interrupt function must have return type of void");
3949
3950 if (prototype_p (function_type)
3951 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3952 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3953 error ("interrupt function must have no arguments");
3954
3955 return NULL_TREE;
3956 }
3957
3958 static tree
3959 mep_validate_io_cb (tree *node, tree name, tree args,
3960 int flags ATTRIBUTE_UNUSED, bool *no_add)
3961 {
3962 if (TREE_CODE (*node) != VAR_DECL)
3963 {
3964 warning (0, "%qE attribute only applies to variables", name);
3965 *no_add = true;
3966 }
3967
3968 if (args != NULL_TREE)
3969 {
3970 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3971 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3972 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3973 {
3974 warning (0, "%qE attribute allows only an integer constant argument",
3975 name);
3976 *no_add = true;
3977 }
3978 }
3979
3980 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3981 TREE_THIS_VOLATILE (*node) = 1;
3982
3983 return NULL_TREE;
3984 }
3985
3986 static tree
3987 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3988 int flags ATTRIBUTE_UNUSED, bool *no_add)
3989 {
3990 if (TREE_CODE (*node) != FUNCTION_TYPE
3991 && TREE_CODE (*node) != FUNCTION_DECL
3992 && TREE_CODE (*node) != METHOD_TYPE
3993 && TREE_CODE (*node) != FIELD_DECL
3994 && TREE_CODE (*node) != TYPE_DECL)
3995 {
3996 static int gave_pointer_note = 0;
3997 static int gave_array_note = 0;
3998 static const char * given_type = NULL;
3999
4000 given_type = get_tree_code_name (TREE_CODE (*node));
4001 if (TREE_CODE (*node) == POINTER_TYPE)
4002 given_type = "pointers";
4003 if (TREE_CODE (*node) == ARRAY_TYPE)
4004 given_type = "arrays";
4005
4006 if (given_type)
4007 warning (0, "%qE attribute only applies to functions, not %s",
4008 name, given_type);
4009 else
4010 warning (0, "%qE attribute only applies to functions",
4011 name);
4012 *no_add = true;
4013
4014 if (TREE_CODE (*node) == POINTER_TYPE
4015 && !gave_pointer_note)
4016 {
4017 inform (input_location,
4018 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4019 " typedef int (__vliw *vfuncptr) ();");
4020 gave_pointer_note = 1;
4021 }
4022
4023 if (TREE_CODE (*node) == ARRAY_TYPE
4024 && !gave_array_note)
4025 {
4026 inform (input_location,
4027 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4028 " typedef int (__vliw *vfuncptr[]) ();");
4029 gave_array_note = 1;
4030 }
4031 }
4032 if (!TARGET_VLIW)
4033 error ("VLIW functions are not allowed without a VLIW configuration");
4034 return NULL_TREE;
4035 }
4036
4037 static const struct attribute_spec mep_attribute_table[11] =
4038 {
4039 /* name min max decl type func handler
4040 affects_type_identity */
4041 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4042 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4043 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4044 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4045 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4046 false },
4047 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4048 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4049 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4050 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4051 { NULL, 0, 0, false, false, false, NULL, false }
4052 };
4053
4054 static bool
4055 mep_function_attribute_inlinable_p (const_tree callee)
4056 {
4057 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4058 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4059 return (lookup_attribute ("disinterrupt", attrs) == 0
4060 && lookup_attribute ("interrupt", attrs) == 0);
4061 }
4062
4063 static bool
4064 mep_can_inline_p (tree caller, tree callee)
4065 {
4066 if (TREE_CODE (callee) == ADDR_EXPR)
4067 callee = TREE_OPERAND (callee, 0);
4068
4069 if (!mep_vliw_function_p (caller)
4070 && mep_vliw_function_p (callee))
4071 {
4072 return false;
4073 }
4074 return true;
4075 }
4076
4077 #define FUNC_CALL 1
4078 #define FUNC_DISINTERRUPT 2
4079
4080
4081 struct GTY(()) pragma_entry {
4082 int used;
4083 int flag;
4084 };
4085
4086 struct pragma_traits : default_hashmap_traits
4087 {
4088 static hashval_t hash (const char *s) { return htab_hash_string (s); }
4089 static bool
4090 equal_keys (const char *a, const char *b)
4091 {
4092 return strcmp (a, b) == 0;
4093 }
4094 };
4095
4096 /* Hash table of farcall-tagged sections. */
4097 static GTY(()) hash_map<const char *, pragma_entry, pragma_traits> *
4098 pragma_htab;
4099
4100 static void
4101 mep_note_pragma_flag (const char *funcname, int flag)
4102 {
4103 if (!pragma_htab)
4104 pragma_htab
4105 = hash_map<const char *, pragma_entry, pragma_traits>::create_ggc (31);
4106
4107 bool existed;
4108 const char *name = ggc_strdup (funcname);
4109 pragma_entry *slot = &pragma_htab->get_or_insert (name, &existed);
4110 if (!existed)
4111 {
4112 slot->flag = 0;
4113 slot->used = 0;
4114 }
4115 slot->flag |= flag;
4116 }
4117
4118 static bool
4119 mep_lookup_pragma_flag (const char *funcname, int flag)
4120 {
4121 if (!pragma_htab)
4122 return false;
4123
4124 if (funcname[0] == '@' && funcname[2] == '.')
4125 funcname += 3;
4126
4127 pragma_entry *slot = pragma_htab->get (funcname);
4128 if (slot && (slot->flag & flag))
4129 {
4130 slot->used |= flag;
4131 return true;
4132 }
4133 return false;
4134 }
4135
4136 bool
4137 mep_lookup_pragma_call (const char *funcname)
4138 {
4139 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4140 }
4141
4142 void
4143 mep_note_pragma_call (const char *funcname)
4144 {
4145 mep_note_pragma_flag (funcname, FUNC_CALL);
4146 }
4147
4148 bool
4149 mep_lookup_pragma_disinterrupt (const char *funcname)
4150 {
4151 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4152 }
4153
4154 void
4155 mep_note_pragma_disinterrupt (const char *funcname)
4156 {
4157 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4158 }
4159
4160 bool
4161 note_unused_pragma_disinterrupt (const char *const &s, const pragma_entry &e,
4162 void *)
4163 {
4164 if ((e.flag & FUNC_DISINTERRUPT)
4165 && !(e.used & FUNC_DISINTERRUPT))
4166 warning (0, "\"#pragma disinterrupt %s\" not used", s);
4167 return 1;
4168 }
4169
4170 void
4171 mep_file_cleanups (void)
4172 {
4173 if (pragma_htab)
4174 pragma_htab->traverse<void *, note_unused_pragma_disinterrupt> (NULL);
4175 }
4176
4177 /* These three functions provide a bridge between the pramgas that
4178 affect register classes, and the functions that maintain them. We
4179 can't call those functions directly as pragma handling is part of
4180 the front end and doesn't have direct access to them. */
4181
4182 void
4183 mep_save_register_info (void)
4184 {
4185 save_register_info ();
4186 }
4187
4188 void
4189 mep_reinit_regs (void)
4190 {
4191 reinit_regs ();
4192 }
4193
4194 void
4195 mep_init_regs (void)
4196 {
4197 init_regs ();
4198 }
4199
4200
4201
4202 static int
4203 mep_attrlist_to_encoding (tree list, tree decl)
4204 {
4205 if (mep_multiple_address_regions (list, false) > 1)
4206 {
4207 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4208 TREE_PURPOSE (TREE_CHAIN (list)),
4209 DECL_NAME (decl),
4210 DECL_SOURCE_LINE (decl));
4211 TREE_CHAIN (list) = NULL_TREE;
4212 }
4213
4214 while (list)
4215 {
4216 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4217 return 'b';
4218 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4219 return 't';
4220 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4221 return 'n';
4222 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4223 return 'f';
4224 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4225 {
4226 if (TREE_VALUE (list)
4227 && TREE_VALUE (TREE_VALUE (list))
4228 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4229 {
4230 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4231 if (location >= 0
4232 && location <= 0x1000000)
4233 return 'i';
4234 }
4235 return 'I';
4236 }
4237 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4238 return 'c';
4239 list = TREE_CHAIN (list);
4240 }
4241 if (TARGET_TF
4242 && TREE_CODE (decl) == FUNCTION_DECL
4243 && DECL_SECTION_NAME (decl) == 0)
4244 return 'f';
4245 return 0;
4246 }
4247
4248 static int
4249 mep_comp_type_attributes (const_tree t1, const_tree t2)
4250 {
4251 int vliw1, vliw2;
4252
4253 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4254 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4255
4256 if (vliw1 != vliw2)
4257 return 0;
4258
4259 return 1;
4260 }
4261
4262 static void
4263 mep_insert_attributes (tree decl, tree *attributes)
4264 {
4265 int size;
4266 const char *secname = 0;
4267 tree attrib, attrlist;
4268 char encoding;
4269
4270 if (TREE_CODE (decl) == FUNCTION_DECL)
4271 {
4272 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4273
4274 if (mep_lookup_pragma_disinterrupt (funcname))
4275 {
4276 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4277 *attributes = chainon (*attributes, attrib);
4278 }
4279 }
4280
4281 if (TREE_CODE (decl) != VAR_DECL
4282 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4283 return;
4284
4285 if (TREE_READONLY (decl) && TARGET_DC)
4286 /* -mdc means that const variables default to the near section,
4287 regardless of the size cutoff. */
4288 return;
4289
4290 /* User specified an attribute, so override the default.
4291 Ignore storage attribute of pointed to variable. char __far * x; */
4292 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4293 {
4294 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4295 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4296 else if (DECL_ATTRIBUTES (decl) && *attributes)
4297 DECL_ATTRIBUTES (decl) = NULL_TREE;
4298 }
4299
4300 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4301 encoding = mep_attrlist_to_encoding (attrlist, decl);
4302 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4303 {
4304 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4305 encoding = mep_attrlist_to_encoding (attrlist, decl);
4306 }
4307 if (encoding)
4308 {
4309 /* This means that the declaration has a specific section
4310 attribute, so we should not apply the default rules. */
4311
4312 if (encoding == 'i' || encoding == 'I')
4313 {
4314 tree attr = lookup_attribute ("io", attrlist);
4315 if (attr
4316 && TREE_VALUE (attr)
4317 && TREE_VALUE (TREE_VALUE(attr)))
4318 {
4319 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4320 static tree previous_value = 0;
4321 static int previous_location = 0;
4322 static tree previous_name = 0;
4323
4324 /* We take advantage of the fact that gcc will reuse the
4325 same tree pointer when applying an attribute to a
4326 list of decls, but produce a new tree for attributes
4327 on separate source lines, even when they're textually
4328 identical. This is the behavior we want. */
4329 if (TREE_VALUE (attr) == previous_value
4330 && location == previous_location)
4331 {
4332 warning(0, "__io address 0x%x is the same for %qE and %qE",
4333 location, previous_name, DECL_NAME (decl));
4334 }
4335 previous_name = DECL_NAME (decl);
4336 previous_location = location;
4337 previous_value = TREE_VALUE (attr);
4338 }
4339 }
4340 return;
4341 }
4342
4343
4344 /* Declarations of arrays can change size. Don't trust them. */
4345 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4346 size = 0;
4347 else
4348 size = int_size_in_bytes (TREE_TYPE (decl));
4349
4350 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4351 {
4352 if (TREE_PUBLIC (decl)
4353 || DECL_EXTERNAL (decl)
4354 || TREE_STATIC (decl))
4355 {
4356 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4357 int key = 0;
4358
4359 while (*name)
4360 key += *name++;
4361
4362 switch (key & 3)
4363 {
4364 case 0:
4365 secname = "based";
4366 break;
4367 case 1:
4368 secname = "tiny";
4369 break;
4370 case 2:
4371 secname = "far";
4372 break;
4373 default:
4374 ;
4375 }
4376 }
4377 }
4378 else
4379 {
4380 if (size <= mep_based_cutoff && size > 0)
4381 secname = "based";
4382 else if (size <= mep_tiny_cutoff && size > 0)
4383 secname = "tiny";
4384 else if (TARGET_L)
4385 secname = "far";
4386 }
4387
4388 if (mep_const_section && TREE_READONLY (decl))
4389 {
4390 if (strcmp (mep_const_section, "tiny") == 0)
4391 secname = "tiny";
4392 else if (strcmp (mep_const_section, "near") == 0)
4393 return;
4394 else if (strcmp (mep_const_section, "far") == 0)
4395 secname = "far";
4396 }
4397
4398 if (!secname)
4399 return;
4400
4401 if (!mep_multiple_address_regions (*attributes, true)
4402 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4403 {
4404 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4405
4406 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4407 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4408 and mep_validate_based_tiny. */
4409 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4410 }
4411 }
4412
4413 static void
4414 mep_encode_section_info (tree decl, rtx rtl, int first)
4415 {
4416 rtx rtlname;
4417 const char *oldname;
4418 const char *secname;
4419 char encoding;
4420 char *newname;
4421 tree idp;
4422 int maxsize;
4423 tree type;
4424 tree mep_attributes;
4425
4426 if (! first)
4427 return;
4428
4429 if (TREE_CODE (decl) != VAR_DECL
4430 && TREE_CODE (decl) != FUNCTION_DECL)
4431 return;
4432
4433 rtlname = XEXP (rtl, 0);
4434 if (GET_CODE (rtlname) == SYMBOL_REF)
4435 oldname = XSTR (rtlname, 0);
4436 else if (GET_CODE (rtlname) == MEM
4437 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4438 oldname = XSTR (XEXP (rtlname, 0), 0);
4439 else
4440 gcc_unreachable ();
4441
4442 type = TREE_TYPE (decl);
4443 if (type == error_mark_node)
4444 return;
4445 mep_attributes = MEP_ATTRIBUTES (decl);
4446
4447 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4448
4449 if (encoding)
4450 {
4451 newname = (char *) alloca (strlen (oldname) + 4);
4452 sprintf (newname, "@%c.%s", encoding, oldname);
4453 idp = get_identifier (newname);
4454 XEXP (rtl, 0) =
4455 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4456 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4457 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4458
4459 switch (encoding)
4460 {
4461 case 'b':
4462 maxsize = 128;
4463 secname = "based";
4464 break;
4465 case 't':
4466 maxsize = 65536;
4467 secname = "tiny";
4468 break;
4469 case 'n':
4470 maxsize = 0x1000000;
4471 secname = "near";
4472 break;
4473 default:
4474 maxsize = 0;
4475 secname = 0;
4476 break;
4477 }
4478 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4479 {
4480 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4481 oldname,
4482 (long) int_size_in_bytes (TREE_TYPE (decl)),
4483 secname,
4484 maxsize);
4485 }
4486 }
4487 }
4488
4489 const char *
4490 mep_strip_name_encoding (const char *sym)
4491 {
4492 while (1)
4493 {
4494 if (*sym == '*')
4495 sym++;
4496 else if (*sym == '@' && sym[2] == '.')
4497 sym += 3;
4498 else
4499 return sym;
4500 }
4501 }
4502
4503 static section *
4504 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4505 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4506 {
4507 int readonly = 1;
4508 int encoding;
4509
4510 switch (TREE_CODE (decl))
4511 {
4512 case VAR_DECL:
4513 if (!TREE_READONLY (decl)
4514 || TREE_SIDE_EFFECTS (decl)
4515 || !DECL_INITIAL (decl)
4516 || (DECL_INITIAL (decl) != error_mark_node
4517 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4518 readonly = 0;
4519 break;
4520 case CONSTRUCTOR:
4521 if (! TREE_CONSTANT (decl))
4522 readonly = 0;
4523 break;
4524
4525 default:
4526 break;
4527 }
4528
4529 if (TREE_CODE (decl) == FUNCTION_DECL)
4530 {
4531 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4532
4533 if (name[0] == '@' && name[2] == '.')
4534 encoding = name[1];
4535 else
4536 encoding = 0;
4537
4538 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
4539 mep_unique_section (decl, 0);
4540 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4541 {
4542 if (encoding == 'f')
4543 return vftext_section;
4544 else
4545 return vtext_section;
4546 }
4547 else if (encoding == 'f')
4548 return ftext_section;
4549 else
4550 return text_section;
4551 }
4552
4553 if (TREE_CODE (decl) == VAR_DECL)
4554 {
4555 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4556
4557 if (name[0] == '@' && name[2] == '.')
4558 switch (name[1])
4559 {
4560 case 'b':
4561 return based_section;
4562
4563 case 't':
4564 if (readonly)
4565 return srodata_section;
4566 if (DECL_INITIAL (decl))
4567 return sdata_section;
4568 return tinybss_section;
4569
4570 case 'f':
4571 if (readonly)
4572 return frodata_section;
4573 return far_section;
4574
4575 case 'i':
4576 case 'I':
4577 error_at (DECL_SOURCE_LOCATION (decl),
4578 "variable %D of type %<io%> must be uninitialized", decl);
4579 return data_section;
4580
4581 case 'c':
4582 error_at (DECL_SOURCE_LOCATION (decl),
4583 "variable %D of type %<cb%> must be uninitialized", decl);
4584 return data_section;
4585 }
4586 }
4587
4588 if (readonly)
4589 return readonly_data_section;
4590
4591 return data_section;
4592 }
4593
4594 static void
4595 mep_unique_section (tree decl, int reloc)
4596 {
4597 static const char *prefixes[][2] =
4598 {
4599 { ".text.", ".gnu.linkonce.t." },
4600 { ".rodata.", ".gnu.linkonce.r." },
4601 { ".data.", ".gnu.linkonce.d." },
4602 { ".based.", ".gnu.linkonce.based." },
4603 { ".sdata.", ".gnu.linkonce.s." },
4604 { ".far.", ".gnu.linkonce.far." },
4605 { ".ftext.", ".gnu.linkonce.ft." },
4606 { ".frodata.", ".gnu.linkonce.frd." },
4607 { ".srodata.", ".gnu.linkonce.srd." },
4608 { ".vtext.", ".gnu.linkonce.v." },
4609 { ".vftext.", ".gnu.linkonce.vf." }
4610 };
4611 int sec = 2; /* .data */
4612 int len;
4613 const char *name, *prefix;
4614 char *string;
4615
4616 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4617 if (DECL_RTL (decl))
4618 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4619
4620 if (TREE_CODE (decl) == FUNCTION_DECL)
4621 {
4622 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4623 sec = 9; /* .vtext */
4624 else
4625 sec = 0; /* .text */
4626 }
4627 else if (decl_readonly_section (decl, reloc))
4628 sec = 1; /* .rodata */
4629
4630 if (name[0] == '@' && name[2] == '.')
4631 {
4632 switch (name[1])
4633 {
4634 case 'b':
4635 sec = 3; /* .based */
4636 break;
4637 case 't':
4638 if (sec == 1)
4639 sec = 8; /* .srodata */
4640 else
4641 sec = 4; /* .sdata */
4642 break;
4643 case 'f':
4644 if (sec == 0)
4645 sec = 6; /* .ftext */
4646 else if (sec == 9)
4647 sec = 10; /* .vftext */
4648 else if (sec == 1)
4649 sec = 7; /* .frodata */
4650 else
4651 sec = 5; /* .far. */
4652 break;
4653 }
4654 name += 3;
4655 }
4656
4657 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
4658 len = strlen (name) + strlen (prefix);
4659 string = (char *) alloca (len + 1);
4660
4661 sprintf (string, "%s%s", prefix, name);
4662
4663 set_decl_section_name (decl, string);
4664 }
4665
4666 /* Given a decl, a section name, and whether the decl initializer
4667 has relocs, choose attributes for the section. */
4668
4669 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4670
4671 static unsigned int
4672 mep_section_type_flags (tree decl, const char *name, int reloc)
4673 {
4674 unsigned int flags = default_section_type_flags (decl, name, reloc);
4675
4676 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4677 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4678 flags |= SECTION_MEP_VLIW;
4679
4680 return flags;
4681 }
4682
4683 /* Switch to an arbitrary section NAME with attributes as specified
4684 by FLAGS. ALIGN specifies any known alignment requirements for
4685 the section; 0 if the default should be used.
4686
4687 Differs from the standard ELF version only in support of VLIW mode. */
4688
4689 static void
4690 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4691 {
4692 char flagchars[8], *f = flagchars;
4693 const char *type;
4694
4695 if (!(flags & SECTION_DEBUG))
4696 *f++ = 'a';
4697 if (flags & SECTION_WRITE)
4698 *f++ = 'w';
4699 if (flags & SECTION_CODE)
4700 *f++ = 'x';
4701 if (flags & SECTION_SMALL)
4702 *f++ = 's';
4703 if (flags & SECTION_MEP_VLIW)
4704 *f++ = 'v';
4705 *f = '\0';
4706
4707 if (flags & SECTION_BSS)
4708 type = "nobits";
4709 else
4710 type = "progbits";
4711
4712 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4713 name, flagchars, type);
4714
4715 if (flags & SECTION_CODE)
4716 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4717 asm_out_file);
4718 }
4719
4720 void
4721 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4722 int size, int align, int global)
4723 {
4724 /* We intentionally don't use mep_section_tag() here. */
4725 if (name[0] == '@'
4726 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4727 && name[2] == '.')
4728 {
4729 int location = -1;
4730 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4731 DECL_ATTRIBUTES (decl));
4732 if (attr
4733 && TREE_VALUE (attr)
4734 && TREE_VALUE (TREE_VALUE(attr)))
4735 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4736 if (location == -1)
4737 return;
4738 if (global)
4739 {
4740 fprintf (stream, "\t.globl\t");
4741 assemble_name (stream, name);
4742 fprintf (stream, "\n");
4743 }
4744 assemble_name (stream, name);
4745 fprintf (stream, " = %d\n", location);
4746 return;
4747 }
4748 if (name[0] == '@' && name[2] == '.')
4749 {
4750 const char *sec = 0;
4751 switch (name[1])
4752 {
4753 case 'b':
4754 switch_to_section (based_section);
4755 sec = ".based";
4756 break;
4757 case 't':
4758 switch_to_section (tinybss_section);
4759 sec = ".sbss";
4760 break;
4761 case 'f':
4762 switch_to_section (farbss_section);
4763 sec = ".farbss";
4764 break;
4765 }
4766 if (sec)
4767 {
4768 const char *name2;
4769 int p2align = 0;
4770
4771 while (align > BITS_PER_UNIT)
4772 {
4773 align /= 2;
4774 p2align ++;
4775 }
4776 name2 = targetm.strip_name_encoding (name);
4777 if (global)
4778 fprintf (stream, "\t.globl\t%s\n", name2);
4779 fprintf (stream, "\t.p2align %d\n", p2align);
4780 fprintf (stream, "\t.type\t%s,@object\n", name2);
4781 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4782 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4783 return;
4784 }
4785 }
4786
4787 if (!global)
4788 {
4789 fprintf (stream, "\t.local\t");
4790 assemble_name (stream, name);
4791 fprintf (stream, "\n");
4792 }
4793 fprintf (stream, "\t.comm\t");
4794 assemble_name (stream, name);
4795 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4796 }
4797
4798 /* Trampolines. */
4799
4800 static void
4801 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4802 {
4803 rtx addr = XEXP (m_tramp, 0);
4804 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4805
4806 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4807 LCT_NORMAL, VOIDmode, 3,
4808 addr, Pmode,
4809 fnaddr, Pmode,
4810 static_chain, Pmode);
4811 }
4812
4813 /* Experimental Reorg. */
4814
4815 static bool
4816 mep_mentioned_p (rtx in,
4817 rtx reg, /* NULL for mem */
4818 int modes_too) /* if nonzero, modes must match also. */
4819 {
4820 const char *fmt;
4821 int i;
4822 enum rtx_code code;
4823
4824 if (in == 0)
4825 return false;
4826 if (reg && GET_CODE (reg) != REG)
4827 return false;
4828
4829 if (GET_CODE (in) == LABEL_REF)
4830 return (reg == 0);
4831
4832 code = GET_CODE (in);
4833
4834 switch (code)
4835 {
4836 case MEM:
4837 if (reg)
4838 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4839 return true;
4840
4841 case REG:
4842 if (!reg)
4843 return false;
4844 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4845 return false;
4846 return (REGNO (in) == REGNO (reg));
4847
4848 case SCRATCH:
4849 case CC0:
4850 case PC:
4851 case CONST_INT:
4852 case CONST_DOUBLE:
4853 return false;
4854
4855 default:
4856 break;
4857 }
4858
4859 /* Set's source should be read-only. */
4860 if (code == SET && !reg)
4861 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4862
4863 fmt = GET_RTX_FORMAT (code);
4864
4865 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4866 {
4867 if (fmt[i] == 'E')
4868 {
4869 register int j;
4870 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4871 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4872 return true;
4873 }
4874 else if (fmt[i] == 'e'
4875 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4876 return true;
4877 }
4878 return false;
4879 }
4880
4881 #define EXPERIMENTAL_REGMOVE_REORG 1
4882
4883 #if EXPERIMENTAL_REGMOVE_REORG
4884
4885 static int
4886 mep_compatible_reg_class (int r1, int r2)
4887 {
4888 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4889 return 1;
4890 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4891 return 1;
4892 return 0;
4893 }
4894
4895 static void
4896 mep_reorg_regmove (rtx_insn *insns)
4897 {
4898 rtx_insn *insn, *next, *follow;
4899 rtx pat, *where;
4900 int count = 0, done = 0, replace, before = 0;
4901
4902 if (dump_file)
4903 for (insn = insns; insn; insn = NEXT_INSN (insn))
4904 if (NONJUMP_INSN_P (insn))
4905 before++;
4906
4907 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4908 set that uses the r2 and r2 dies there. We replace r2 with r1
4909 and see if it's still a valid insn. If so, delete the first set.
4910 Copied from reorg.c. */
4911
4912 while (!done)
4913 {
4914 done = 1;
4915 for (insn = insns; insn; insn = next)
4916 {
4917 next = next_nonnote_nondebug_insn (insn);
4918 if (! NONJUMP_INSN_P (insn))
4919 continue;
4920 pat = PATTERN (insn);
4921
4922 replace = 0;
4923
4924 if (GET_CODE (pat) == SET
4925 && GET_CODE (SET_SRC (pat)) == REG
4926 && GET_CODE (SET_DEST (pat)) == REG
4927 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4928 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4929 {
4930 follow = next_nonnote_nondebug_insn (insn);
4931 if (dump_file)
4932 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4933
4934 while (follow && NONJUMP_INSN_P (follow)
4935 && GET_CODE (PATTERN (follow)) == SET
4936 && !dead_or_set_p (follow, SET_SRC (pat))
4937 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4938 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4939 {
4940 if (dump_file)
4941 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4942 follow = next_nonnote_insn (follow);
4943 }
4944
4945 if (dump_file)
4946 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4947 if (follow && NONJUMP_INSN_P (follow)
4948 && GET_CODE (PATTERN (follow)) == SET
4949 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4950 {
4951 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4952 {
4953 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4954 {
4955 replace = 1;
4956 where = & SET_SRC (PATTERN (follow));
4957 }
4958 }
4959 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4960 {
4961 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4962 {
4963 replace = 1;
4964 where = & PATTERN (follow);
4965 }
4966 }
4967 }
4968 }
4969
4970 /* If so, follow is the corresponding insn */
4971 if (replace)
4972 {
4973 if (dump_file)
4974 {
4975 rtx_insn *x;
4976
4977 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4978 for (x = insn; x ;x = NEXT_INSN (x))
4979 {
4980 print_rtl_single (dump_file, x);
4981 if (x == follow)
4982 break;
4983 fprintf (dump_file, "\n");
4984 }
4985 }
4986
4987 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4988 follow, where))
4989 {
4990 count ++;
4991 delete_insn (insn);
4992 if (dump_file)
4993 {
4994 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4995 print_rtl_single (dump_file, follow);
4996 }
4997 done = 0;
4998 }
4999 }
5000 }
5001 }
5002
5003 if (dump_file)
5004 {
5005 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5006 fprintf (dump_file, "=====\n");
5007 }
5008 }
5009 #endif
5010
5011
5012 /* Figure out where to put LABEL, which is the label for a repeat loop.
5013 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5014 the loop ends just before LAST_INSN. If SHARED, insns other than the
5015 "repeat" might use LABEL to jump to the loop's continuation point.
5016
5017 Return the last instruction in the adjusted loop. */
5018
5019 static rtx_insn *
5020 mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
5021 bool including, bool shared)
5022 {
5023 rtx_insn *next, *prev;
5024 int count = 0, code, icode;
5025
5026 if (dump_file)
5027 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5028 INSN_UID (last_insn));
5029
5030 /* Set PREV to the last insn in the loop. */
5031 prev = last_insn;
5032 if (!including)
5033 prev = PREV_INSN (prev);
5034
5035 /* Set NEXT to the next insn after the repeat label. */
5036 next = last_insn;
5037 if (!shared)
5038 while (prev != 0)
5039 {
5040 code = GET_CODE (prev);
5041 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5042 break;
5043
5044 if (INSN_P (prev))
5045 {
5046 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5047 prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
5048
5049 /* Other insns that should not be in the last two opcodes. */
5050 icode = recog_memoized (prev);
5051 if (icode < 0
5052 || icode == CODE_FOR_repeat
5053 || icode == CODE_FOR_erepeat
5054 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5055 break;
5056
5057 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5058 is the second instruction in a VLIW bundle. In that case,
5059 loop again: if the first instruction also satisfies the
5060 conditions above then we will reach here again and put
5061 both of them into the repeat epilogue. Otherwise both
5062 should remain outside. */
5063 if (GET_MODE (prev) != BImode)
5064 {
5065 count++;
5066 next = prev;
5067 if (dump_file)
5068 print_rtl_single (dump_file, next);
5069 if (count == 2)
5070 break;
5071 }
5072 }
5073 prev = PREV_INSN (prev);
5074 }
5075
5076 /* See if we're adding the label immediately after the repeat insn.
5077 If so, we need to separate them with a nop. */
5078 prev = prev_real_insn (next);
5079 if (prev)
5080 switch (recog_memoized (prev))
5081 {
5082 case CODE_FOR_repeat:
5083 case CODE_FOR_erepeat:
5084 if (dump_file)
5085 fprintf (dump_file, "Adding nop inside loop\n");
5086 emit_insn_before (gen_nop (), next);
5087 break;
5088
5089 default:
5090 break;
5091 }
5092
5093 /* Insert the label. */
5094 emit_label_before (label, next);
5095
5096 /* Insert the nops. */
5097 if (dump_file && count < 2)
5098 fprintf (dump_file, "Adding %d nop%s\n\n",
5099 2 - count, count == 1 ? "" : "s");
5100
5101 for (; count < 2; count++)
5102 if (including)
5103 last_insn = emit_insn_after (gen_nop (), last_insn);
5104 else
5105 emit_insn_before (gen_nop (), last_insn);
5106
5107 return last_insn;
5108 }
5109
5110
5111 void
5112 mep_emit_doloop (rtx *operands, int is_end)
5113 {
5114 rtx tag;
5115
5116 if (cfun->machine->doloop_tags == 0
5117 || cfun->machine->doloop_tag_from_end == is_end)
5118 {
5119 cfun->machine->doloop_tags++;
5120 cfun->machine->doloop_tag_from_end = is_end;
5121 }
5122
5123 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5124 if (is_end)
5125 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5126 else
5127 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5128 }
5129
5130
5131 /* Code for converting doloop_begins and doloop_ends into valid
5132 MeP instructions. A doloop_begin is just a placeholder:
5133
5134 $count = unspec ($count)
5135
5136 where $count is initially the number of iterations - 1.
5137 doloop_end has the form:
5138
5139 if ($count-- == 0) goto label
5140
5141 The counter variable is private to the doloop insns, nothing else
5142 relies on its value.
5143
5144 There are three cases, in decreasing order of preference:
5145
5146 1. A loop has exactly one doloop_begin and one doloop_end.
5147 The doloop_end branches to the first instruction after
5148 the doloop_begin.
5149
5150 In this case we can replace the doloop_begin with a repeat
5151 instruction and remove the doloop_end. I.e.:
5152
5153 $count1 = unspec ($count1)
5154 label:
5155 ...
5156 insn1
5157 insn2
5158 if ($count2-- == 0) goto label
5159
5160 becomes:
5161
5162 repeat $count1,repeat_label
5163 label:
5164 ...
5165 repeat_label:
5166 insn1
5167 insn2
5168 # end repeat
5169
5170 2. As for (1), except there are several doloop_ends. One of them
5171 (call it X) falls through to a label L. All the others fall
5172 through to branches to L.
5173
5174 In this case, we remove X and replace the other doloop_ends
5175 with branches to the repeat label. For example:
5176
5177 $count1 = unspec ($count1)
5178 start:
5179 ...
5180 if ($count2-- == 0) goto label
5181 end:
5182 ...
5183 if ($count3-- == 0) goto label
5184 goto end
5185
5186 becomes:
5187
5188 repeat $count1,repeat_label
5189 start:
5190 ...
5191 repeat_label:
5192 nop
5193 nop
5194 # end repeat
5195 end:
5196 ...
5197 goto repeat_label
5198
5199 3. The fallback case. Replace doloop_begins with:
5200
5201 $count = $count + 1
5202
5203 Replace doloop_ends with the equivalent of:
5204
5205 $count = $count - 1
5206 if ($count == 0) goto label
5207
5208 Note that this might need a scratch register if $count
5209 is stored in memory. */
5210
5211 /* A structure describing one doloop_begin. */
5212 struct mep_doloop_begin {
5213 /* The next doloop_begin with the same tag. */
5214 struct mep_doloop_begin *next;
5215
5216 /* The instruction itself. */
5217 rtx_insn *insn;
5218
5219 /* The initial counter value. This is known to be a general register. */
5220 rtx counter;
5221 };
5222
5223 /* A structure describing a doloop_end. */
5224 struct mep_doloop_end {
5225 /* The next doloop_end with the same loop tag. */
5226 struct mep_doloop_end *next;
5227
5228 /* The instruction itself. */
5229 rtx_insn *insn;
5230
5231 /* The first instruction after INSN when the branch isn't taken. */
5232 rtx_insn *fallthrough;
5233
5234 /* The location of the counter value. Since doloop_end_internal is a
5235 jump instruction, it has to allow the counter to be stored anywhere
5236 (any non-fixed register or memory location). */
5237 rtx counter;
5238
5239 /* The target label (the place where the insn branches when the counter
5240 isn't zero). */
5241 rtx label;
5242
5243 /* A scratch register. Only available when COUNTER isn't stored
5244 in a general register. */
5245 rtx scratch;
5246 };
5247
5248
5249 /* One do-while loop. */
5250 struct mep_doloop {
5251 /* All the doloop_begins for this loop (in no particular order). */
5252 struct mep_doloop_begin *begin;
5253
5254 /* All the doloop_ends. When there is more than one, arrange things
5255 so that the first one is the most likely to be X in case (2) above. */
5256 struct mep_doloop_end *end;
5257 };
5258
5259
5260 /* Return true if LOOP can be converted into repeat/repeat_end form
5261 (that is, if it matches cases (1) or (2) above). */
5262
5263 static bool
5264 mep_repeat_loop_p (struct mep_doloop *loop)
5265 {
5266 struct mep_doloop_end *end;
5267 rtx fallthrough;
5268
5269 /* There must be exactly one doloop_begin and at least one doloop_end. */
5270 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5271 return false;
5272
5273 /* The first doloop_end (X) must branch back to the insn after
5274 the doloop_begin. */
5275 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5276 return false;
5277
5278 /* All the other doloop_ends must branch to the same place as X.
5279 When the branch isn't taken, they must jump to the instruction
5280 after X. */
5281 fallthrough = loop->end->fallthrough;
5282 for (end = loop->end->next; end != 0; end = end->next)
5283 if (end->label != loop->end->label
5284 || !simplejump_p (end->fallthrough)
5285 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5286 return false;
5287
5288 return true;
5289 }
5290
5291
5292 /* The main repeat reorg function. See comment above for details. */
5293
5294 static void
5295 mep_reorg_repeat (rtx_insn *insns)
5296 {
5297 rtx_insn *insn;
5298 struct mep_doloop *loops, *loop;
5299 struct mep_doloop_begin *begin;
5300 struct mep_doloop_end *end;
5301
5302 /* Quick exit if we haven't created any loops. */
5303 if (cfun->machine->doloop_tags == 0)
5304 return;
5305
5306 /* Create an array of mep_doloop structures. */
5307 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5308 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5309
5310 /* Search the function for do-while insns and group them by loop tag. */
5311 for (insn = insns; insn; insn = NEXT_INSN (insn))
5312 if (INSN_P (insn))
5313 switch (recog_memoized (insn))
5314 {
5315 case CODE_FOR_doloop_begin_internal:
5316 insn_extract (insn);
5317 loop = &loops[INTVAL (recog_data.operand[2])];
5318
5319 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5320 begin->next = loop->begin;
5321 begin->insn = insn;
5322 begin->counter = recog_data.operand[0];
5323
5324 loop->begin = begin;
5325 break;
5326
5327 case CODE_FOR_doloop_end_internal:
5328 insn_extract (insn);
5329 loop = &loops[INTVAL (recog_data.operand[2])];
5330
5331 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5332 end->insn = insn;
5333 end->fallthrough = next_real_insn (insn);
5334 end->counter = recog_data.operand[0];
5335 end->label = recog_data.operand[1];
5336 end->scratch = recog_data.operand[3];
5337
5338 /* If this insn falls through to an unconditional jump,
5339 give it a lower priority than the others. */
5340 if (loop->end != 0 && simplejump_p (end->fallthrough))
5341 {
5342 end->next = loop->end->next;
5343 loop->end->next = end;
5344 }
5345 else
5346 {
5347 end->next = loop->end;
5348 loop->end = end;
5349 }
5350 break;
5351 }
5352
5353 /* Convert the insns for each loop in turn. */
5354 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5355 if (mep_repeat_loop_p (loop))
5356 {
5357 /* Case (1) or (2). */
5358 rtx_code_label *repeat_label;
5359 rtx label_ref;
5360
5361 /* Create a new label for the repeat insn. */
5362 repeat_label = gen_label_rtx ();
5363
5364 /* Replace the doloop_begin with a repeat. */
5365 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5366 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5367 loop->begin->insn);
5368 delete_insn (loop->begin->insn);
5369
5370 /* Insert the repeat label before the first doloop_end.
5371 Fill the gap with nops if there are other doloop_ends. */
5372 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5373 false, loop->end->next != 0);
5374
5375 /* Emit a repeat_end (to improve the readability of the output). */
5376 emit_insn_before (gen_repeat_end (), loop->end->insn);
5377
5378 /* Delete the first doloop_end. */
5379 delete_insn (loop->end->insn);
5380
5381 /* Replace the others with branches to REPEAT_LABEL. */
5382 for (end = loop->end->next; end != 0; end = end->next)
5383 {
5384 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5385 delete_insn (end->insn);
5386 delete_insn (end->fallthrough);
5387 }
5388 }
5389 else
5390 {
5391 /* Case (3). First replace all the doloop_begins with increment
5392 instructions. */
5393 for (begin = loop->begin; begin != 0; begin = begin->next)
5394 {
5395 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5396 begin->counter, const1_rtx),
5397 begin->insn);
5398 delete_insn (begin->insn);
5399 }
5400
5401 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5402 for (end = loop->end; end != 0; end = end->next)
5403 {
5404 rtx reg;
5405
5406 start_sequence ();
5407
5408 /* Load the counter value into a general register. */
5409 reg = end->counter;
5410 if (!REG_P (reg) || REGNO (reg) > 15)
5411 {
5412 reg = end->scratch;
5413 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5414 }
5415
5416 /* Decrement the counter. */
5417 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5418 constm1_rtx));
5419
5420 /* Copy it back to its original location. */
5421 if (reg != end->counter)
5422 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5423
5424 /* Jump back to the start label. */
5425 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5426 end->label));
5427 JUMP_LABEL (insn) = end->label;
5428 LABEL_NUSES (end->label)++;
5429
5430 /* Emit the whole sequence before the doloop_end. */
5431 insn = get_insns ();
5432 end_sequence ();
5433 emit_insn_before (insn, end->insn);
5434
5435 /* Delete the doloop_end. */
5436 delete_insn (end->insn);
5437 }
5438 }
5439 }
5440
5441
5442 static bool
5443 mep_invertable_branch_p (rtx_insn *insn)
5444 {
5445 rtx cond, set;
5446 enum rtx_code old_code;
5447 int i;
5448
5449 set = PATTERN (insn);
5450 if (GET_CODE (set) != SET)
5451 return false;
5452 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5453 return false;
5454 cond = XEXP (XEXP (set, 1), 0);
5455 old_code = GET_CODE (cond);
5456 switch (old_code)
5457 {
5458 case EQ:
5459 PUT_CODE (cond, NE);
5460 break;
5461 case NE:
5462 PUT_CODE (cond, EQ);
5463 break;
5464 case LT:
5465 PUT_CODE (cond, GE);
5466 break;
5467 case GE:
5468 PUT_CODE (cond, LT);
5469 break;
5470 default:
5471 return false;
5472 }
5473 INSN_CODE (insn) = -1;
5474 i = recog_memoized (insn);
5475 PUT_CODE (cond, old_code);
5476 INSN_CODE (insn) = -1;
5477 return i >= 0;
5478 }
5479
5480 static void
5481 mep_invert_branch (rtx_insn *insn, rtx_insn *after)
5482 {
5483 rtx cond, set, label;
5484 int i;
5485
5486 set = PATTERN (insn);
5487
5488 gcc_assert (GET_CODE (set) == SET);
5489 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5490
5491 cond = XEXP (XEXP (set, 1), 0);
5492 switch (GET_CODE (cond))
5493 {
5494 case EQ:
5495 PUT_CODE (cond, NE);
5496 break;
5497 case NE:
5498 PUT_CODE (cond, EQ);
5499 break;
5500 case LT:
5501 PUT_CODE (cond, GE);
5502 break;
5503 case GE:
5504 PUT_CODE (cond, LT);
5505 break;
5506 default:
5507 gcc_unreachable ();
5508 }
5509 label = gen_label_rtx ();
5510 emit_label_after (label, after);
5511 for (i=1; i<=2; i++)
5512 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5513 {
5514 rtx ref = XEXP (XEXP (set, 1), i);
5515 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5516 delete_insn (XEXP (ref, 0));
5517 XEXP (ref, 0) = label;
5518 LABEL_NUSES (label) ++;
5519 JUMP_LABEL (insn) = label;
5520 }
5521 INSN_CODE (insn) = -1;
5522 i = recog_memoized (insn);
5523 gcc_assert (i >= 0);
5524 }
5525
5526 static void
5527 mep_reorg_erepeat (rtx_insn *insns)
5528 {
5529 rtx_insn *insn, *prev;
5530 rtx_code_label *l;
5531 rtx x;
5532 int count;
5533
5534 for (insn = insns; insn; insn = NEXT_INSN (insn))
5535 if (JUMP_P (insn)
5536 && mep_invertable_branch_p (insn))
5537 {
5538 if (dump_file)
5539 {
5540 fprintf (dump_file, "\n------------------------------\n");
5541 fprintf (dump_file, "erepeat: considering this jump:\n");
5542 print_rtl_single (dump_file, insn);
5543 }
5544 count = simplejump_p (insn) ? 0 : 1;
5545 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5546 {
5547 if (CALL_P (prev) || BARRIER_P (prev))
5548 break;
5549
5550 if (prev == JUMP_LABEL (insn))
5551 {
5552 rtx_insn *newlast;
5553 if (dump_file)
5554 fprintf (dump_file, "found loop top, %d insns\n", count);
5555
5556 if (LABEL_NUSES (prev) == 1)
5557 /* We're the only user, always safe */ ;
5558 else if (LABEL_NUSES (prev) == 2)
5559 {
5560 /* See if there's a barrier before this label. If
5561 so, we know nobody inside the loop uses it.
5562 But we must be careful to put the erepeat
5563 *after* the label. */
5564 rtx_insn *barrier;
5565 for (barrier = PREV_INSN (prev);
5566 barrier && NOTE_P (barrier);
5567 barrier = PREV_INSN (barrier))
5568 ;
5569 if (barrier && ! BARRIER_P (barrier))
5570 break;
5571 }
5572 else
5573 {
5574 /* We don't know who else, within or without our loop, uses this */
5575 if (dump_file)
5576 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5577 break;
5578 }
5579
5580 /* Generate a label to be used by the erepat insn. */
5581 l = gen_label_rtx ();
5582
5583 /* Insert the erepeat after INSN's target label. */
5584 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5585 LABEL_NUSES (l)++;
5586 emit_insn_after (x, prev);
5587
5588 /* Insert the erepeat label. */
5589 newlast = (mep_insert_repeat_label_last
5590 (insn, l, !simplejump_p (insn), false));
5591 if (simplejump_p (insn))
5592 {
5593 emit_insn_before (gen_erepeat_end (), insn);
5594 delete_insn (insn);
5595 }
5596 else
5597 {
5598 mep_invert_branch (insn, newlast);
5599 emit_insn_after (gen_erepeat_end (), newlast);
5600 }
5601 break;
5602 }
5603
5604 if (LABEL_P (prev))
5605 {
5606 /* A label is OK if there is exactly one user, and we
5607 can find that user before the next label. */
5608 rtx_insn *user = 0;
5609 int safe = 0;
5610 if (LABEL_NUSES (prev) == 1)
5611 {
5612 for (user = PREV_INSN (prev);
5613 user && (INSN_P (user) || NOTE_P (user));
5614 user = PREV_INSN (user))
5615 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5616 {
5617 safe = INSN_UID (user);
5618 break;
5619 }
5620 }
5621 if (!safe)
5622 break;
5623 if (dump_file)
5624 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5625 safe, INSN_UID (prev));
5626 }
5627
5628 if (INSN_P (prev))
5629 {
5630 count ++;
5631 }
5632 }
5633 }
5634 if (dump_file)
5635 fprintf (dump_file, "\n==============================\n");
5636 }
5637
5638 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5639 always do this on its own. */
5640
5641 static void
5642 mep_jmp_return_reorg (rtx_insn *insns)
5643 {
5644 rtx_insn *insn, *label, *ret;
5645 int ret_code;
5646
5647 for (insn = insns; insn; insn = NEXT_INSN (insn))
5648 if (simplejump_p (insn))
5649 {
5650 /* Find the fist real insn the jump jumps to. */
5651 label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
5652 while (ret
5653 && (NOTE_P (ret)
5654 || LABEL_P (ret)
5655 || GET_CODE (PATTERN (ret)) == USE))
5656 ret = NEXT_INSN (ret);
5657
5658 if (ret)
5659 {
5660 /* Is it a return? */
5661 ret_code = recog_memoized (ret);
5662 if (ret_code == CODE_FOR_return_internal
5663 || ret_code == CODE_FOR_eh_return_internal)
5664 {
5665 /* It is. Replace the jump with a return. */
5666 LABEL_NUSES (label) --;
5667 if (LABEL_NUSES (label) == 0)
5668 delete_insn (label);
5669 PATTERN (insn) = copy_rtx (PATTERN (ret));
5670 INSN_CODE (insn) = -1;
5671 }
5672 }
5673 }
5674 }
5675
5676
5677 static void
5678 mep_reorg_addcombine (rtx_insn *insns)
5679 {
5680 rtx_insn *i, *n;
5681
5682 for (i = insns; i; i = NEXT_INSN (i))
5683 if (INSN_P (i)
5684 && INSN_CODE (i) == CODE_FOR_addsi3
5685 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5686 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5687 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5688 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5689 {
5690 n = NEXT_INSN (i);
5691 if (INSN_P (n)
5692 && INSN_CODE (n) == CODE_FOR_addsi3
5693 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5694 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5695 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5696 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5697 {
5698 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5699 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5700 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5701 && ic + nc < 32767
5702 && ic + nc > -32768)
5703 {
5704 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5705 SET_NEXT_INSN (i) = NEXT_INSN (n);
5706 if (NEXT_INSN (i))
5707 SET_PREV_INSN (NEXT_INSN (i)) = i;
5708 }
5709 }
5710 }
5711 }
5712
5713 /* If this insn adjusts the stack, return the adjustment, else return
5714 zero. */
5715 static int
5716 add_sp_insn_p (rtx_insn *insn)
5717 {
5718 rtx pat;
5719
5720 if (! single_set (insn))
5721 return 0;
5722 pat = PATTERN (insn);
5723 if (GET_CODE (SET_DEST (pat)) != REG)
5724 return 0;
5725 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5726 return 0;
5727 if (GET_CODE (SET_SRC (pat)) != PLUS)
5728 return 0;
5729 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5730 return 0;
5731 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5732 return 0;
5733 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5734 return 0;
5735 return INTVAL (XEXP (SET_SRC (pat), 1));
5736 }
5737
5738 /* Check for trivial functions that set up an unneeded stack
5739 frame. */
5740 static void
5741 mep_reorg_noframe (rtx_insn *insns)
5742 {
5743 rtx_insn *start_frame_insn;
5744 rtx_insn *end_frame_insn = 0;
5745 int sp_adjust, sp2;
5746 rtx sp;
5747
5748 /* The first insn should be $sp = $sp + N */
5749 while (insns && ! INSN_P (insns))
5750 insns = NEXT_INSN (insns);
5751 if (!insns)
5752 return;
5753
5754 sp_adjust = add_sp_insn_p (insns);
5755 if (sp_adjust == 0)
5756 return;
5757
5758 start_frame_insn = insns;
5759 sp = SET_DEST (PATTERN (start_frame_insn));
5760
5761 insns = next_real_insn (insns);
5762
5763 while (insns)
5764 {
5765 rtx_insn *next = next_real_insn (insns);
5766 if (!next)
5767 break;
5768
5769 sp2 = add_sp_insn_p (insns);
5770 if (sp2)
5771 {
5772 if (end_frame_insn)
5773 return;
5774 end_frame_insn = insns;
5775 if (sp2 != -sp_adjust)
5776 return;
5777 }
5778 else if (mep_mentioned_p (insns, sp, 0))
5779 return;
5780 else if (CALL_P (insns))
5781 return;
5782
5783 insns = next;
5784 }
5785
5786 if (end_frame_insn)
5787 {
5788 delete_insn (start_frame_insn);
5789 delete_insn (end_frame_insn);
5790 }
5791 }
5792
5793 static void
5794 mep_reorg (void)
5795 {
5796 rtx_insn *insns = get_insns ();
5797
5798 /* We require accurate REG_DEAD notes. */
5799 compute_bb_for_insn ();
5800 df_note_add_problem ();
5801 df_analyze ();
5802
5803 mep_reorg_addcombine (insns);
5804 #if EXPERIMENTAL_REGMOVE_REORG
5805 /* VLIW packing has been done already, so we can't just delete things. */
5806 if (!mep_vliw_function_p (cfun->decl))
5807 mep_reorg_regmove (insns);
5808 #endif
5809 mep_jmp_return_reorg (insns);
5810 mep_bundle_insns (insns);
5811 mep_reorg_repeat (insns);
5812 if (optimize
5813 && !profile_flag
5814 && !profile_arc_flag
5815 && TARGET_OPT_REPEAT
5816 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5817 mep_reorg_erepeat (insns);
5818
5819 /* This may delete *insns so make sure it's last. */
5820 mep_reorg_noframe (insns);
5821
5822 df_finish_pass (false);
5823 }
5824
5825 \f
5826
5827 /*----------------------------------------------------------------------*/
5828 /* Builtins */
5829 /*----------------------------------------------------------------------*/
5830
5831 /* Element X gives the index into cgen_insns[] of the most general
5832 implementation of intrinsic X. Unimplemented intrinsics are
5833 mapped to -1. */
5834 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5835
5836 /* Element X gives the index of another instruction that is mapped to
5837 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5838 instruction.
5839
5840 Things are set up so that mep_intrinsic_chain[X] < X. */
5841 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5842
5843 /* The bitmask for the current ISA. The ISA masks are declared
5844 in mep-intrin.h. */
5845 unsigned int mep_selected_isa;
5846
5847 struct mep_config {
5848 const char *config_name;
5849 unsigned int isa;
5850 };
5851
5852 static struct mep_config mep_configs[] = {
5853 #ifdef COPROC_SELECTION_TABLE
5854 COPROC_SELECTION_TABLE,
5855 #endif
5856 { 0, 0 }
5857 };
5858
5859 /* Initialize the global intrinsics variables above. */
5860
5861 static void
5862 mep_init_intrinsics (void)
5863 {
5864 size_t i;
5865
5866 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5867 mep_selected_isa = mep_configs[0].isa;
5868 if (mep_config_string != 0)
5869 for (i = 0; mep_configs[i].config_name; i++)
5870 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5871 {
5872 mep_selected_isa = mep_configs[i].isa;
5873 break;
5874 }
5875
5876 /* Assume all intrinsics are unavailable. */
5877 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5878 mep_intrinsic_insn[i] = -1;
5879
5880 /* Build up the global intrinsic tables. */
5881 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5882 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5883 {
5884 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5885 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5886 }
5887 /* See whether we can directly move values between one coprocessor
5888 register and another. */
5889 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5890 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5891 mep_have_copro_copro_moves_p = true;
5892
5893 /* See whether we can directly move values between core and
5894 coprocessor registers. */
5895 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5896 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5897
5898 mep_have_core_copro_moves_p = 1;
5899 }
5900
5901 /* Declare all available intrinsic functions. Called once only. */
5902
5903 static tree cp_data_bus_int_type_node;
5904 static tree opaque_vector_type_node;
5905 static tree v8qi_type_node;
5906 static tree v4hi_type_node;
5907 static tree v2si_type_node;
5908 static tree v8uqi_type_node;
5909 static tree v4uhi_type_node;
5910 static tree v2usi_type_node;
5911
5912 static tree
5913 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5914 {
5915 switch (cr)
5916 {
5917 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5918 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5919 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5920 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5921 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5922 case cgen_regnum_operand_type_CHAR: return char_type_node;
5923 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5924 case cgen_regnum_operand_type_SI: return intSI_type_node;
5925 case cgen_regnum_operand_type_DI: return intDI_type_node;
5926 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5927 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5928 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5929 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5930 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5931 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5932 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5933 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5934 default:
5935 return void_type_node;
5936 }
5937 }
5938
5939 static void
5940 mep_init_builtins (void)
5941 {
5942 size_t i;
5943
5944 if (TARGET_64BIT_CR_REGS)
5945 cp_data_bus_int_type_node = long_long_integer_type_node;
5946 else
5947 cp_data_bus_int_type_node = long_integer_type_node;
5948
5949 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5950 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5951 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5952 v2si_type_node = build_vector_type (intSI_type_node, 2);
5953 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5954 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5955 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5956
5957 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5958
5959 add_builtin_type ("cp_vector", opaque_vector_type_node);
5960
5961 add_builtin_type ("cp_v8qi", v8qi_type_node);
5962 add_builtin_type ("cp_v4hi", v4hi_type_node);
5963 add_builtin_type ("cp_v2si", v2si_type_node);
5964
5965 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5966 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5967 add_builtin_type ("cp_v2usi", v2usi_type_node);
5968
5969 /* Intrinsics like mep_cadd3 are implemented with two groups of
5970 instructions, one which uses UNSPECs and one which uses a specific
5971 rtl code such as PLUS. Instructions in the latter group belong
5972 to GROUP_KNOWN_CODE.
5973
5974 In such cases, the intrinsic will have two entries in the global
5975 tables above. The unspec form is accessed using builtin functions
5976 while the specific form is accessed using the mep_* enum in
5977 mep-intrin.h.
5978
5979 The idea is that __cop arithmetic and builtin functions have
5980 different optimization requirements. If mep_cadd3() appears in
5981 the source code, the user will surely except gcc to use cadd3
5982 rather than a work-alike such as add3. However, if the user
5983 just writes "a + b", where a or b are __cop variables, it is
5984 reasonable for gcc to choose a core instruction rather than
5985 cadd3 if it believes that is more optimal. */
5986 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5987 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5988 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5989 {
5990 tree ret_type = void_type_node;
5991 tree bi_type;
5992
5993 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5994 continue;
5995
5996 if (cgen_insns[i].cret_p)
5997 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5998
5999 bi_type = build_function_type_list (ret_type, NULL_TREE);
6000 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6001 bi_type,
6002 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6003 }
6004 }
6005
6006 /* Report the unavailablity of the given intrinsic. */
6007
6008 #if 1
6009 static void
6010 mep_intrinsic_unavailable (int intrinsic)
6011 {
6012 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6013
6014 if (already_reported_p[intrinsic])
6015 return;
6016
6017 if (mep_intrinsic_insn[intrinsic] < 0)
6018 error ("coprocessor intrinsic %qs is not available in this configuration",
6019 cgen_intrinsics[intrinsic]);
6020 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6021 error ("%qs is not available in VLIW functions",
6022 cgen_intrinsics[intrinsic]);
6023 else
6024 error ("%qs is not available in non-VLIW functions",
6025 cgen_intrinsics[intrinsic]);
6026
6027 already_reported_p[intrinsic] = 1;
6028 }
6029 #endif
6030
6031
6032 /* See if any implementation of INTRINSIC is available to the
6033 current function. If so, store the most general implementation
6034 in *INSN_PTR and return true. Return false otherwise. */
6035
6036 static bool
6037 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6038 {
6039 int i;
6040
6041 i = mep_intrinsic_insn[intrinsic];
6042 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6043 i = mep_intrinsic_chain[i];
6044
6045 if (i >= 0)
6046 {
6047 *insn_ptr = &cgen_insns[i];
6048 return true;
6049 }
6050 return false;
6051 }
6052
6053
6054 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6055 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6056 try using a work-alike instead. In this case, the returned insn
6057 may have three operands rather than two. */
6058
6059 static bool
6060 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6061 {
6062 size_t i;
6063
6064 if (intrinsic == mep_cmov)
6065 {
6066 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6067 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6068 return true;
6069 return false;
6070 }
6071 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6072 }
6073
6074
6075 /* If ARG is a register operand that is the same size as MODE, convert it
6076 to MODE using a subreg. Otherwise return ARG as-is. */
6077
6078 static rtx
6079 mep_convert_arg (machine_mode mode, rtx arg)
6080 {
6081 if (GET_MODE (arg) != mode
6082 && register_operand (arg, VOIDmode)
6083 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6084 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6085 return arg;
6086 }
6087
6088
6089 /* Apply regnum conversions to ARG using the description given by REGNUM.
6090 Return the new argument on success and null on failure. */
6091
6092 static rtx
6093 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6094 {
6095 if (regnum->count == 0)
6096 return arg;
6097
6098 if (GET_CODE (arg) != CONST_INT
6099 || INTVAL (arg) < 0
6100 || INTVAL (arg) >= regnum->count)
6101 return 0;
6102
6103 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6104 }
6105
6106
6107 /* Try to make intrinsic argument ARG match the given operand.
6108 UNSIGNED_P is true if the argument has an unsigned type. */
6109
6110 static rtx
6111 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6112 int unsigned_p)
6113 {
6114 if (GET_CODE (arg) == CONST_INT)
6115 {
6116 /* CONST_INTs can only be bound to integer operands. */
6117 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6118 return 0;
6119 }
6120 else if (GET_CODE (arg) == CONST_DOUBLE)
6121 /* These hold vector constants. */;
6122 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6123 {
6124 /* If the argument is a different size from what's expected, we must
6125 have a value in the right mode class in order to convert it. */
6126 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6127 return 0;
6128
6129 /* If the operand is an rvalue, promote or demote it to match the
6130 operand's size. This might not need extra instructions when
6131 ARG is a register value. */
6132 if (operand->constraint[0] != '=')
6133 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6134 }
6135
6136 /* If the operand is an lvalue, bind the operand to a new register.
6137 The caller will copy this value into ARG after the main
6138 instruction. By doing this always, we produce slightly more
6139 optimal code. */
6140 /* But not for control registers. */
6141 if (operand->constraint[0] == '='
6142 && (! REG_P (arg)
6143 || ! (CONTROL_REGNO_P (REGNO (arg))
6144 || CCR_REGNO_P (REGNO (arg))
6145 || CR_REGNO_P (REGNO (arg)))
6146 ))
6147 return gen_reg_rtx (operand->mode);
6148
6149 /* Try simple mode punning. */
6150 arg = mep_convert_arg (operand->mode, arg);
6151 if (operand->predicate (arg, operand->mode))
6152 return arg;
6153
6154 /* See if forcing the argument into a register will make it match. */
6155 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6156 arg = force_reg (operand->mode, arg);
6157 else
6158 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6159 if (operand->predicate (arg, operand->mode))
6160 return arg;
6161
6162 return 0;
6163 }
6164
6165
6166 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6167 function FNNAME. OPERAND describes the operand to which ARGNUM
6168 is mapped. */
6169
6170 static void
6171 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6172 int argnum, tree fnname)
6173 {
6174 size_t i;
6175
6176 if (GET_CODE (arg) == CONST_INT)
6177 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6178 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6179 {
6180 const struct cgen_immediate_predicate *predicate;
6181 HOST_WIDE_INT argval;
6182
6183 predicate = &cgen_immediate_predicates[i];
6184 argval = INTVAL (arg);
6185 if (argval < predicate->lower || argval >= predicate->upper)
6186 error ("argument %d of %qE must be in the range %d...%d",
6187 argnum, fnname, predicate->lower, predicate->upper - 1);
6188 else
6189 error ("argument %d of %qE must be a multiple of %d",
6190 argnum, fnname, predicate->align);
6191 return;
6192 }
6193
6194 error ("incompatible type for argument %d of %qE", argnum, fnname);
6195 }
6196
6197 static rtx
6198 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6199 rtx subtarget ATTRIBUTE_UNUSED,
6200 machine_mode mode ATTRIBUTE_UNUSED,
6201 int ignore ATTRIBUTE_UNUSED)
6202 {
6203 rtx pat, op[10], arg[10];
6204 unsigned int a;
6205 int opindex, unsigned_p[10];
6206 tree fndecl, args;
6207 unsigned int n_args;
6208 tree fnname;
6209 const struct cgen_insn *cgen_insn;
6210 const struct insn_data_d *idata;
6211 unsigned int first_arg = 0;
6212 unsigned int builtin_n_args;
6213
6214 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6215 fnname = DECL_NAME (fndecl);
6216
6217 /* Find out which instruction we should emit. Note that some coprocessor
6218 intrinsics may only be available in VLIW mode, or only in normal mode. */
6219 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6220 {
6221 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6222 return NULL_RTX;
6223 }
6224 idata = &insn_data[cgen_insn->icode];
6225
6226 builtin_n_args = cgen_insn->num_args;
6227
6228 if (cgen_insn->cret_p)
6229 {
6230 if (cgen_insn->cret_p > 1)
6231 builtin_n_args ++;
6232 first_arg = 1;
6233 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6234 builtin_n_args --;
6235 }
6236
6237 /* Evaluate each argument. */
6238 n_args = call_expr_nargs (exp);
6239
6240 if (n_args < builtin_n_args)
6241 {
6242 error ("too few arguments to %qE", fnname);
6243 return NULL_RTX;
6244 }
6245 if (n_args > builtin_n_args)
6246 {
6247 error ("too many arguments to %qE", fnname);
6248 return NULL_RTX;
6249 }
6250
6251 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6252 {
6253 tree value;
6254
6255 args = CALL_EXPR_ARG (exp, a - first_arg);
6256
6257 value = args;
6258
6259 #if 0
6260 if (cgen_insn->regnums[a].reference_p)
6261 {
6262 if (TREE_CODE (value) != ADDR_EXPR)
6263 {
6264 debug_tree(value);
6265 error ("argument %d of %qE must be an address", a+1, fnname);
6266 return NULL_RTX;
6267 }
6268 value = TREE_OPERAND (value, 0);
6269 }
6270 #endif
6271
6272 /* If the argument has been promoted to int, get the unpromoted
6273 value. This is necessary when sub-int memory values are bound
6274 to reference parameters. */
6275 if (TREE_CODE (value) == NOP_EXPR
6276 && TREE_TYPE (value) == integer_type_node
6277 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6278 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6279 < TYPE_PRECISION (TREE_TYPE (value))))
6280 value = TREE_OPERAND (value, 0);
6281
6282 /* If the argument has been promoted to double, get the unpromoted
6283 SFmode value. This is necessary for FMAX support, for example. */
6284 if (TREE_CODE (value) == NOP_EXPR
6285 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6286 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6287 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6288 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6289 value = TREE_OPERAND (value, 0);
6290
6291 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6292 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6293 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6294 if (cgen_insn->regnums[a].reference_p)
6295 {
6296 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6297 machine_mode pointed_mode = TYPE_MODE (pointed_to);
6298
6299 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6300 }
6301 if (arg[a] == 0)
6302 {
6303 error ("argument %d of %qE must be in the range %d...%d",
6304 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6305 return NULL_RTX;
6306 }
6307 }
6308
6309 for (a = 0; a < first_arg; a++)
6310 {
6311 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6312 arg[a] = target;
6313 else
6314 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6315 }
6316
6317 /* Convert the arguments into a form suitable for the intrinsic.
6318 Report an error if this isn't possible. */
6319 for (opindex = 0; opindex < idata->n_operands; opindex++)
6320 {
6321 a = cgen_insn->op_mapping[opindex];
6322 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6323 arg[a], unsigned_p[a]);
6324 if (op[opindex] == 0)
6325 {
6326 mep_incompatible_arg (&idata->operand[opindex],
6327 arg[a], a + 1 - first_arg, fnname);
6328 return NULL_RTX;
6329 }
6330 }
6331
6332 /* Emit the instruction. */
6333 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6334 op[5], op[6], op[7], op[8], op[9]);
6335
6336 if (GET_CODE (pat) == SET
6337 && GET_CODE (SET_DEST (pat)) == PC
6338 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6339 emit_jump_insn (pat);
6340 else
6341 emit_insn (pat);
6342
6343 /* Copy lvalues back to their final locations. */
6344 for (opindex = 0; opindex < idata->n_operands; opindex++)
6345 if (idata->operand[opindex].constraint[0] == '=')
6346 {
6347 a = cgen_insn->op_mapping[opindex];
6348 if (a >= first_arg)
6349 {
6350 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6351 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6352 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6353 op[opindex]));
6354 else
6355 {
6356 /* First convert the operand to the right mode, then copy it
6357 into the destination. Doing the conversion as a separate
6358 step (rather than using convert_move) means that we can
6359 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6360 refer to the same register. */
6361 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6362 op[opindex], unsigned_p[a]);
6363 if (!rtx_equal_p (arg[a], op[opindex]))
6364 emit_move_insn (arg[a], op[opindex]);
6365 }
6366 }
6367 }
6368
6369 if (first_arg > 0 && target && target != op[0])
6370 {
6371 emit_move_insn (target, op[0]);
6372 }
6373
6374 return target;
6375 }
6376
6377 static bool
6378 mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED)
6379 {
6380 return false;
6381 }
6382 \f
6383 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6384 a global register. */
6385
6386 static bool
6387 global_reg_mentioned_p_1 (const_rtx x)
6388 {
6389 int regno;
6390
6391 switch (GET_CODE (x))
6392 {
6393 case SUBREG:
6394 if (REG_P (SUBREG_REG (x)))
6395 {
6396 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6397 && global_regs[subreg_regno (x)])
6398 return true;
6399 return false;
6400 }
6401 break;
6402
6403 case REG:
6404 regno = REGNO (x);
6405 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6406 return true;
6407 return false;
6408
6409 case CALL:
6410 /* A non-constant call might use a global register. */
6411 return true;
6412
6413 default:
6414 break;
6415 }
6416
6417 return false;
6418 }
6419
6420 /* Returns nonzero if X mentions a global register. */
6421
6422 static bool
6423 global_reg_mentioned_p (rtx x)
6424 {
6425 if (INSN_P (x))
6426 {
6427 if (CALL_P (x))
6428 {
6429 if (! RTL_CONST_OR_PURE_CALL_P (x))
6430 return true;
6431 x = CALL_INSN_FUNCTION_USAGE (x);
6432 if (x == 0)
6433 return false;
6434 }
6435 else
6436 x = PATTERN (x);
6437 }
6438
6439 subrtx_iterator::array_type array;
6440 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6441 if (global_reg_mentioned_p_1 (*iter))
6442 return true;
6443 return false;
6444 }
6445 /* Scheduling hooks for VLIW mode.
6446
6447 Conceptually this is very simple: we have a two-pack architecture
6448 that takes one core insn and one coprocessor insn to make up either
6449 a 32- or 64-bit instruction word (depending on the option bit set in
6450 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6451 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6452 and one 48-bit cop insn or two 32-bit core/cop insns.
6453
6454 In practice, instruction selection will be a bear. Consider in
6455 VL64 mode the following insns
6456
6457 add $1, 1
6458 cmov $cr0, $0
6459
6460 these cannot pack, since the add is a 16-bit core insn and cmov
6461 is a 32-bit cop insn. However,
6462
6463 add3 $1, $1, 1
6464 cmov $cr0, $0
6465
6466 packs just fine. For good VLIW code generation in VL64 mode, we
6467 will have to have 32-bit alternatives for many of the common core
6468 insns. Not implemented. */
6469
6470 static int
6471 mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
6472 {
6473 int cost_specified;
6474
6475 if (REG_NOTE_KIND (link) != 0)
6476 {
6477 /* See whether INSN and DEP_INSN are intrinsics that set the same
6478 hard register. If so, it is more important to free up DEP_INSN
6479 than it is to free up INSN.
6480
6481 Note that intrinsics like mep_mulr are handled differently from
6482 the equivalent mep.md patterns. In mep.md, if we don't care
6483 about the value of $lo and $hi, the pattern will just clobber
6484 the registers, not set them. Since clobbers don't count as
6485 output dependencies, it is often possible to reorder two mulrs,
6486 even after reload.
6487
6488 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6489 so any pair of mep_mulr()s will be inter-dependent. We should
6490 therefore give the first mep_mulr() a higher priority. */
6491 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6492 && global_reg_mentioned_p (PATTERN (insn))
6493 && global_reg_mentioned_p (PATTERN (dep_insn)))
6494 return 1;
6495
6496 /* If the dependence is an anti or output dependence, assume it
6497 has no cost. */
6498 return 0;
6499 }
6500
6501 /* If we can't recognize the insns, we can't really do anything. */
6502 if (recog_memoized (dep_insn) < 0)
6503 return cost;
6504
6505 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6506 attribute instead. */
6507 if (!TARGET_H1)
6508 {
6509 cost_specified = get_attr_latency (dep_insn);
6510 if (cost_specified != 0)
6511 return cost_specified;
6512 }
6513
6514 return cost;
6515 }
6516
6517 /* ??? We don't properly compute the length of a load/store insn,
6518 taking into account the addressing mode. */
6519
6520 static int
6521 mep_issue_rate (void)
6522 {
6523 return TARGET_IVC2 ? 3 : 2;
6524 }
6525
6526 /* Return true if function DECL was declared with the vliw attribute. */
6527
6528 bool
6529 mep_vliw_function_p (tree decl)
6530 {
6531 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6532 }
6533
6534 static rtx_insn *
6535 mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6536 int length)
6537 {
6538 int i;
6539
6540 for (i = nready - 1; i >= 0; --i)
6541 {
6542 rtx_insn *insn = ready[i];
6543 if (recog_memoized (insn) >= 0
6544 && get_attr_slot (insn) == slot
6545 && get_attr_length (insn) == length)
6546 return insn;
6547 }
6548
6549 return NULL;
6550 }
6551
6552 static void
6553 mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
6554 {
6555 int i;
6556
6557 for (i = 0; i < nready; ++i)
6558 if (ready[i] == insn)
6559 {
6560 for (; i < nready - 1; ++i)
6561 ready[i] = ready[i + 1];
6562 ready[i] = insn;
6563 return;
6564 }
6565
6566 gcc_unreachable ();
6567 }
6568
6569 static void
6570 mep_print_sched_insn (FILE *dump, rtx_insn *insn)
6571 {
6572 const char *slots = "none";
6573 const char *name = NULL;
6574 int code;
6575 char buf[30];
6576
6577 if (GET_CODE (PATTERN (insn)) == SET
6578 || GET_CODE (PATTERN (insn)) == PARALLEL)
6579 {
6580 switch (get_attr_slots (insn))
6581 {
6582 case SLOTS_CORE: slots = "core"; break;
6583 case SLOTS_C3: slots = "c3"; break;
6584 case SLOTS_P0: slots = "p0"; break;
6585 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6586 case SLOTS_P0_P1: slots = "p0,p1"; break;
6587 case SLOTS_P0S: slots = "p0s"; break;
6588 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6589 case SLOTS_P1: slots = "p1"; break;
6590 default:
6591 sprintf(buf, "%d", get_attr_slots (insn));
6592 slots = buf;
6593 break;
6594 }
6595 }
6596 if (GET_CODE (PATTERN (insn)) == USE)
6597 slots = "use";
6598
6599 code = INSN_CODE (insn);
6600 if (code >= 0)
6601 name = get_insn_name (code);
6602 if (!name)
6603 name = "{unknown}";
6604
6605 fprintf (dump,
6606 "insn %4d %4d %8s %s\n",
6607 code,
6608 INSN_UID (insn),
6609 name,
6610 slots);
6611 }
6612
6613 static int
6614 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6615 int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
6616 int *pnready, int clock ATTRIBUTE_UNUSED)
6617 {
6618 int nready = *pnready;
6619 rtx_insn *core_insn, *cop_insn;
6620 int i;
6621
6622 if (dump && sched_verbose > 1)
6623 {
6624 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6625 for (i=0; i<nready; i++)
6626 mep_print_sched_insn (dump, ready[i]);
6627 fprintf (dump, "\n");
6628 }
6629
6630 if (!mep_vliw_function_p (cfun->decl))
6631 return 1;
6632 if (nready < 2)
6633 return 1;
6634
6635 /* IVC2 uses a DFA to determine what's ready and what's not. */
6636 if (TARGET_IVC2)
6637 return nready;
6638
6639 /* We can issue either a core or coprocessor instruction.
6640 Look for a matched pair of insns to reorder. If we don't
6641 find any, don't second-guess the scheduler's priorities. */
6642
6643 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6644 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6645 TARGET_OPT_VL64 ? 6 : 2)))
6646 ;
6647 else if (TARGET_OPT_VL64
6648 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6649 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6650 ;
6651 else
6652 /* We didn't find a pair. Issue the single insn at the head
6653 of the ready list. */
6654 return 1;
6655
6656 /* Reorder the two insns first. */
6657 mep_move_ready_insn (ready, nready, core_insn);
6658 mep_move_ready_insn (ready, nready - 1, cop_insn);
6659 return 2;
6660 }
6661
6662 /* Return true if X contains a register that is set by insn PREV. */
6663
6664 static bool
6665 mep_store_find_set (const_rtx x, const rtx_insn *prev)
6666 {
6667 subrtx_iterator::array_type array;
6668 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6669 if (REG_P (x) && reg_set_p (x, prev))
6670 return true;
6671 return false;
6672 }
6673
6674 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6675 not the containing insn. */
6676
6677 static bool
6678 mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
6679 {
6680 /* Cope with intrinsics like swcpa. */
6681 if (GET_CODE (pat) == PARALLEL)
6682 {
6683 int i;
6684
6685 for (i = 0; i < XVECLEN (pat, 0); i++)
6686 if (mep_store_data_bypass_p (prev,
6687 as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
6688 return true;
6689
6690 return false;
6691 }
6692
6693 /* Check for some sort of store. */
6694 if (GET_CODE (pat) != SET
6695 || GET_CODE (SET_DEST (pat)) != MEM)
6696 return false;
6697
6698 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6699 The first operand to the unspec is the store data and the other operands
6700 are used to calculate the address. */
6701 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6702 {
6703 rtx src;
6704 int i;
6705
6706 src = SET_SRC (pat);
6707 for (i = 1; i < XVECLEN (src, 0); i++)
6708 if (mep_store_find_set (XVECEXP (src, 0, i), prev))
6709 return false;
6710
6711 return true;
6712 }
6713
6714 /* Otherwise just check that PREV doesn't modify any register mentioned
6715 in the memory destination. */
6716 return !mep_store_find_set (SET_DEST (pat), prev);
6717 }
6718
6719 /* Return true if INSN is a store instruction and if the store address
6720 has no true dependence on PREV. */
6721
6722 bool
6723 mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
6724 {
6725 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6726 }
6727
6728 /* Return true if, apart from HI/LO, there are no true dependencies
6729 between multiplication instructions PREV and INSN. */
6730
6731 bool
6732 mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
6733 {
6734 rtx pat;
6735
6736 pat = PATTERN (insn);
6737 if (GET_CODE (pat) == PARALLEL)
6738 pat = XVECEXP (pat, 0, 0);
6739 if (GET_CODE (pat) != SET)
6740 return false;
6741 subrtx_iterator::array_type array;
6742 FOR_EACH_SUBRTX (iter, array, SET_SRC (pat), NONCONST)
6743 {
6744 const_rtx x = *iter;
6745 if (REG_P (x)
6746 && REGNO (x) != LO_REGNO
6747 && REGNO (x) != HI_REGNO
6748 && reg_set_p (x, prev))
6749 return false;
6750 }
6751 return true;
6752 }
6753
6754 /* Return true if INSN is an ldc instruction that issues to the
6755 MeP-h1 integer pipeline. This is true for instructions that
6756 read from PSW, LP, SAR, HI and LO. */
6757
6758 bool
6759 mep_ipipe_ldc_p (rtx_insn *insn)
6760 {
6761 rtx pat, src;
6762
6763 pat = PATTERN (insn);
6764
6765 /* Cope with instrinsics that set both a hard register and its shadow.
6766 The set of the hard register comes first. */
6767 if (GET_CODE (pat) == PARALLEL)
6768 pat = XVECEXP (pat, 0, 0);
6769
6770 if (GET_CODE (pat) == SET)
6771 {
6772 src = SET_SRC (pat);
6773
6774 /* Cope with intrinsics. The first operand to the unspec is
6775 the source register. */
6776 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6777 src = XVECEXP (src, 0, 0);
6778
6779 if (REG_P (src))
6780 switch (REGNO (src))
6781 {
6782 case PSW_REGNO:
6783 case LP_REGNO:
6784 case SAR_REGNO:
6785 case HI_REGNO:
6786 case LO_REGNO:
6787 return true;
6788 }
6789 }
6790 return false;
6791 }
6792
6793 /* Create a VLIW bundle from core instruction CORE and coprocessor
6794 instruction COP. COP always satisfies INSN_P, but CORE can be
6795 either a new pattern or an existing instruction.
6796
6797 Emit the bundle in place of COP and return it. */
6798
6799 static rtx_insn *
6800 mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
6801 {
6802 rtx seq;
6803 rtx_insn *core_insn;
6804 rtx_insn *insn;
6805
6806 /* If CORE is an existing instruction, remove it, otherwise put
6807 the new pattern in an INSN harness. */
6808 if (INSN_P (core_insn_or_pat))
6809 {
6810 core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6811 remove_insn (core_insn);
6812 }
6813 else
6814 core_insn = make_insn_raw (core_insn_or_pat);
6815
6816 /* Generate the bundle sequence and replace COP with it. */
6817 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
6818 insn = emit_insn_after (seq, cop);
6819 remove_insn (cop);
6820
6821 /* Set up the links of the insns inside the SEQUENCE. */
6822 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6823 SET_NEXT_INSN (core_insn) = cop;
6824 SET_PREV_INSN (cop) = core_insn;
6825 SET_NEXT_INSN (cop) = NEXT_INSN (insn);
6826
6827 /* Set the VLIW flag for the coprocessor instruction. */
6828 PUT_MODE (core_insn, VOIDmode);
6829 PUT_MODE (cop, BImode);
6830
6831 /* Derive a location for the bundle. Individual instructions cannot
6832 have their own location because there can be no assembler labels
6833 between CORE_INSN and COP. */
6834 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6835 INSN_LOCATION (core_insn) = 0;
6836 INSN_LOCATION (cop) = 0;
6837
6838 return insn;
6839 }
6840
6841 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6842
6843 static void
6844 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6845 {
6846 rtx * pinsn = (rtx *) data;
6847
6848 if (*pinsn && reg_mentioned_p (x, *pinsn))
6849 *pinsn = NULL_RTX;
6850 }
6851
6852 /* Return true if anything in insn X is (anti,output,true) dependent on
6853 anything in insn Y. */
6854
6855 static int
6856 mep_insn_dependent_p (rtx x, rtx y)
6857 {
6858 rtx tmp;
6859
6860 gcc_assert (INSN_P (x));
6861 gcc_assert (INSN_P (y));
6862
6863 tmp = PATTERN (y);
6864 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6865 if (tmp == NULL_RTX)
6866 return 1;
6867
6868 tmp = PATTERN (x);
6869 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6870 if (tmp == NULL_RTX)
6871 return 1;
6872
6873 return 0;
6874 }
6875
6876 static int
6877 core_insn_p (rtx_insn *insn)
6878 {
6879 if (GET_CODE (PATTERN (insn)) == USE)
6880 return 0;
6881 if (get_attr_slot (insn) == SLOT_CORE)
6882 return 1;
6883 return 0;
6884 }
6885
6886 /* Mark coprocessor instructions that can be bundled together with
6887 the immediately preceding core instruction. This is later used
6888 to emit the "+" that tells the assembler to create a VLIW insn.
6889
6890 For unbundled insns, the assembler will automatically add coprocessor
6891 nops, and 16-bit core nops. Due to an apparent oversight in the
6892 spec, the assembler will _not_ automatically add 32-bit core nops,
6893 so we have to emit those here.
6894
6895 Called from mep_insn_reorg. */
6896
6897 static void
6898 mep_bundle_insns (rtx_insn *insns)
6899 {
6900 rtx_insn *insn, *last = NULL, *first = NULL;
6901 int saw_scheduling = 0;
6902
6903 /* Only do bundling if we're in vliw mode. */
6904 if (!mep_vliw_function_p (cfun->decl))
6905 return;
6906
6907 /* The first insn in a bundle are TImode, the remainder are
6908 VOIDmode. After this function, the first has VOIDmode and the
6909 rest have BImode. */
6910
6911 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6912
6913 /* First, move any NOTEs that are within a bundle, to the beginning
6914 of the bundle. */
6915 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6916 {
6917 if (NOTE_P (insn) && first)
6918 /* Don't clear FIRST. */;
6919
6920 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6921 first = insn;
6922
6923 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6924 {
6925 rtx_insn *note, *prev;
6926
6927 /* INSN is part of a bundle; FIRST is the first insn in that
6928 bundle. Move all intervening notes out of the bundle.
6929 In addition, since the debug pass may insert a label
6930 whenever the current line changes, set the location info
6931 for INSN to match FIRST. */
6932
6933 INSN_LOCATION (insn) = INSN_LOCATION (first);
6934
6935 note = PREV_INSN (insn);
6936 while (note && note != first)
6937 {
6938 prev = PREV_INSN (note);
6939
6940 if (NOTE_P (note))
6941 {
6942 /* Remove NOTE from here... */
6943 SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6944 SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6945 /* ...and put it in here. */
6946 SET_NEXT_INSN (note) = first;
6947 SET_PREV_INSN (note) = PREV_INSN (first);
6948 SET_NEXT_INSN (PREV_INSN (note)) = note;
6949 SET_PREV_INSN (NEXT_INSN (note)) = note;
6950 }
6951
6952 note = prev;
6953 }
6954 }
6955
6956 else if (!NONJUMP_INSN_P (insn))
6957 first = 0;
6958 }
6959
6960 /* Now fix up the bundles. */
6961 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6962 {
6963 if (NOTE_P (insn))
6964 continue;
6965
6966 if (!NONJUMP_INSN_P (insn))
6967 {
6968 last = 0;
6969 continue;
6970 }
6971
6972 /* If we're not optimizing enough, there won't be scheduling
6973 info. We detect that here. */
6974 if (GET_MODE (insn) == TImode)
6975 saw_scheduling = 1;
6976 if (!saw_scheduling)
6977 continue;
6978
6979 if (TARGET_IVC2)
6980 {
6981 rtx_insn *core_insn = NULL;
6982
6983 /* IVC2 slots are scheduled by DFA, so we just accept
6984 whatever the scheduler gives us. However, we must make
6985 sure the core insn (if any) is the first in the bundle.
6986 The IVC2 assembler can insert whatever NOPs are needed,
6987 and allows a COP insn to be first. */
6988
6989 if (NONJUMP_INSN_P (insn)
6990 && GET_CODE (PATTERN (insn)) != USE
6991 && GET_MODE (insn) == TImode)
6992 {
6993 for (last = insn;
6994 NEXT_INSN (last)
6995 && GET_MODE (NEXT_INSN (last)) == VOIDmode
6996 && NONJUMP_INSN_P (NEXT_INSN (last));
6997 last = NEXT_INSN (last))
6998 {
6999 if (core_insn_p (last))
7000 core_insn = last;
7001 }
7002 if (core_insn_p (last))
7003 core_insn = last;
7004
7005 if (core_insn && core_insn != insn)
7006 {
7007 /* Swap core insn to first in the bundle. */
7008
7009 /* Remove core insn. */
7010 if (PREV_INSN (core_insn))
7011 SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7012 if (NEXT_INSN (core_insn))
7013 SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7014
7015 /* Re-insert core insn. */
7016 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
7017 SET_NEXT_INSN (core_insn) = insn;
7018
7019 if (PREV_INSN (core_insn))
7020 SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7021 SET_PREV_INSN (insn) = core_insn;
7022
7023 PUT_MODE (core_insn, TImode);
7024 PUT_MODE (insn, VOIDmode);
7025 }
7026 }
7027
7028 /* The first insn has TImode, the rest have VOIDmode */
7029 if (GET_MODE (insn) == TImode)
7030 PUT_MODE (insn, VOIDmode);
7031 else
7032 PUT_MODE (insn, BImode);
7033 continue;
7034 }
7035
7036 PUT_MODE (insn, VOIDmode);
7037 if (recog_memoized (insn) >= 0
7038 && get_attr_slot (insn) == SLOT_COP)
7039 {
7040 if (JUMP_P (insn)
7041 || ! last
7042 || recog_memoized (last) < 0
7043 || get_attr_slot (last) != SLOT_CORE
7044 || (get_attr_length (insn)
7045 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7046 || mep_insn_dependent_p (insn, last))
7047 {
7048 switch (get_attr_length (insn))
7049 {
7050 case 8:
7051 break;
7052 case 6:
7053 insn = mep_make_bundle (gen_nop (), insn);
7054 break;
7055 case 4:
7056 if (TARGET_OPT_VL64)
7057 insn = mep_make_bundle (gen_nop32 (), insn);
7058 break;
7059 case 2:
7060 if (TARGET_OPT_VL64)
7061 error ("2 byte cop instructions are"
7062 " not allowed in 64-bit VLIW mode");
7063 else
7064 insn = mep_make_bundle (gen_nop (), insn);
7065 break;
7066 default:
7067 error ("unexpected %d byte cop instruction",
7068 get_attr_length (insn));
7069 break;
7070 }
7071 }
7072 else
7073 insn = mep_make_bundle (last, insn);
7074 }
7075
7076 last = insn;
7077 }
7078 }
7079
7080
7081 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7082 Return true on success. This function can fail if the intrinsic
7083 is unavailable or if the operands don't satisfy their predicates. */
7084
7085 bool
7086 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7087 {
7088 const struct cgen_insn *cgen_insn;
7089 const struct insn_data_d *idata;
7090 rtx newop[10];
7091 int i;
7092
7093 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7094 return false;
7095
7096 idata = &insn_data[cgen_insn->icode];
7097 for (i = 0; i < idata->n_operands; i++)
7098 {
7099 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7100 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7101 return false;
7102 }
7103
7104 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7105 newop[3], newop[4], newop[5],
7106 newop[6], newop[7], newop[8]));
7107
7108 return true;
7109 }
7110
7111
7112 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7113 OPERANDS[0]. Report an error if the instruction could not
7114 be synthesized. OPERANDS[1] is a register_operand. For sign
7115 and zero extensions, it may be smaller than SImode. */
7116
7117 bool
7118 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7119 rtx * operands ATTRIBUTE_UNUSED)
7120 {
7121 return false;
7122 }
7123
7124
7125 /* Likewise, but apply a binary operation to OPERANDS[1] and
7126 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7127 can be a general_operand.
7128
7129 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7130 third operand. REG and REG3 take register operands only. */
7131
7132 bool
7133 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7134 int ATTRIBUTE_UNUSED immediate3,
7135 int ATTRIBUTE_UNUSED reg,
7136 int ATTRIBUTE_UNUSED reg3,
7137 rtx * operands ATTRIBUTE_UNUSED)
7138 {
7139 return false;
7140 }
7141
7142 static bool
7143 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7144 int opno ATTRIBUTE_UNUSED, int *total,
7145 bool ATTRIBUTE_UNUSED speed_t)
7146 {
7147 switch (code)
7148 {
7149 case CONST_INT:
7150 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7151 *total = 0;
7152 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7153 *total = 1;
7154 else
7155 *total = 3;
7156 return true;
7157
7158 case SYMBOL_REF:
7159 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7160 return true;
7161
7162 case MULT:
7163 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7164 ? COSTS_N_INSNS (3)
7165 : COSTS_N_INSNS (2));
7166 return true;
7167 }
7168 return false;
7169 }
7170
7171 static int
7172 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7173 machine_mode mode ATTRIBUTE_UNUSED,
7174 addr_space_t as ATTRIBUTE_UNUSED,
7175 bool ATTRIBUTE_UNUSED speed_p)
7176 {
7177 return 1;
7178 }
7179
7180 static void
7181 mep_asm_init_sections (void)
7182 {
7183 based_section
7184 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7185 "\t.section .based,\"aw\"");
7186
7187 tinybss_section
7188 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7189 "\t.section .sbss,\"aw\"");
7190
7191 sdata_section
7192 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7193 "\t.section .sdata,\"aw\",@progbits");
7194
7195 far_section
7196 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7197 "\t.section .far,\"aw\"");
7198
7199 farbss_section
7200 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7201 "\t.section .farbss,\"aw\"");
7202
7203 frodata_section
7204 = get_unnamed_section (0, output_section_asm_op,
7205 "\t.section .frodata,\"a\"");
7206
7207 srodata_section
7208 = get_unnamed_section (0, output_section_asm_op,
7209 "\t.section .srodata,\"a\"");
7210
7211 vtext_section
7212 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7213 "\t.section .vtext,\"axv\"\n\t.vliw");
7214
7215 vftext_section
7216 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7217 "\t.section .vftext,\"axv\"\n\t.vliw");
7218
7219 ftext_section
7220 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7221 "\t.section .ftext,\"ax\"\n\t.core");
7222
7223 }
7224 \f
7225 /* Initialize the GCC target structure. */
7226
7227 #undef TARGET_ASM_FUNCTION_PROLOGUE
7228 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7229 #undef TARGET_ATTRIBUTE_TABLE
7230 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7231 #undef TARGET_COMP_TYPE_ATTRIBUTES
7232 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7233 #undef TARGET_INSERT_ATTRIBUTES
7234 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7235 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7236 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7237 #undef TARGET_CAN_INLINE_P
7238 #define TARGET_CAN_INLINE_P mep_can_inline_p
7239 #undef TARGET_SECTION_TYPE_FLAGS
7240 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7241 #undef TARGET_ASM_NAMED_SECTION
7242 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7243 #undef TARGET_INIT_BUILTINS
7244 #define TARGET_INIT_BUILTINS mep_init_builtins
7245 #undef TARGET_EXPAND_BUILTIN
7246 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7247 #undef TARGET_SCHED_ADJUST_COST
7248 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7249 #undef TARGET_SCHED_ISSUE_RATE
7250 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7251 #undef TARGET_SCHED_REORDER
7252 #define TARGET_SCHED_REORDER mep_sched_reorder
7253 #undef TARGET_STRIP_NAME_ENCODING
7254 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7255 #undef TARGET_ASM_SELECT_SECTION
7256 #define TARGET_ASM_SELECT_SECTION mep_select_section
7257 #undef TARGET_ASM_UNIQUE_SECTION
7258 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7259 #undef TARGET_ENCODE_SECTION_INFO
7260 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7261 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7262 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7263 #undef TARGET_RTX_COSTS
7264 #define TARGET_RTX_COSTS mep_rtx_cost
7265 #undef TARGET_ADDRESS_COST
7266 #define TARGET_ADDRESS_COST mep_address_cost
7267 #undef TARGET_MACHINE_DEPENDENT_REORG
7268 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7269 #undef TARGET_SETUP_INCOMING_VARARGS
7270 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7271 #undef TARGET_PASS_BY_REFERENCE
7272 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7273 #undef TARGET_FUNCTION_ARG
7274 #define TARGET_FUNCTION_ARG mep_function_arg
7275 #undef TARGET_FUNCTION_ARG_ADVANCE
7276 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7277 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7278 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7279 #undef TARGET_OPTION_OVERRIDE
7280 #define TARGET_OPTION_OVERRIDE mep_option_override
7281 #undef TARGET_ALLOCATE_INITIAL_VALUE
7282 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7283 #undef TARGET_ASM_INIT_SECTIONS
7284 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7285 #undef TARGET_RETURN_IN_MEMORY
7286 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7287 #undef TARGET_NARROW_VOLATILE_BITFIELD
7288 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7289 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7290 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7291 #undef TARGET_BUILD_BUILTIN_VA_LIST
7292 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7293 #undef TARGET_EXPAND_BUILTIN_VA_START
7294 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7295 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7296 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7297 #undef TARGET_CAN_ELIMINATE
7298 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7299 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7300 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7301 #undef TARGET_TRAMPOLINE_INIT
7302 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7303 #undef TARGET_LEGITIMATE_CONSTANT_P
7304 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7305 #undef TARGET_CAN_USE_DOLOOP_P
7306 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7307
7308 struct gcc_target targetm = TARGET_INITIALIZER;
7309
7310 #include "gt-mep.h"