]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/mep/mep.c
6ce6c530077a911ca76c5251d92870dd3e96ae82
[thirdparty/gcc.git] / gcc / config / mep / mep.c
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "varasm.h"
28 #include "calls.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "insn-flags.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "recog.h"
40 #include "obstack.h"
41 #include "tree.h"
42 #include "expr.h"
43 #include "except.h"
44 #include "function.h"
45 #include "optabs.h"
46 #include "reload.h"
47 #include "tm_p.h"
48 #include "ggc.h"
49 #include "diagnostic-core.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "langhooks.h"
53 #include "df.h"
54 #include "gimple.h"
55 #include "gimplify.h"
56 #include "opts.h"
57 #include "dumpfile.h"
58
59 /* Structure of this file:
60
61 + Command Line Option Support
62 + Pattern support - constraints, predicates, expanders
63 + Reload Support
64 + Costs
65 + Functions to save and restore machine-specific function data.
66 + Frame/Epilog/Prolog Related
67 + Operand Printing
68 + Function args in registers
69 + Handle pipeline hazards
70 + Handle attributes
71 + Trampolines
72 + Machine-dependent Reorg
73 + Builtins. */
74
75 /* Symbol encodings:
76
77 Symbols are encoded as @ <char> . <name> where <char> is one of these:
78
79 b - based
80 t - tiny
81 n - near
82 f - far
83 i - io, near
84 I - io, far
85 c - cb (control bus) */
86
87 struct GTY(()) machine_function
88 {
89 int mep_frame_pointer_needed;
90
91 /* For varargs. */
92 int arg_regs_to_save;
93 int regsave_filler;
94 int frame_filler;
95 int frame_locked;
96
97 /* Records __builtin_return address. */
98 rtx eh_stack_adjust;
99
100 int reg_save_size;
101 int reg_save_slot[FIRST_PSEUDO_REGISTER];
102 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
103
104 /* 2 if the current function has an interrupt attribute, 1 if not, 0
105 if unknown. This is here because resource.c uses EPILOGUE_USES
106 which needs it. */
107 int interrupt_handler;
108
109 /* Likewise, for disinterrupt attribute. */
110 int disable_interrupts;
111
112 /* Number of doloop tags used so far. */
113 int doloop_tags;
114
115 /* True if the last tag was allocated to a doloop_end. */
116 bool doloop_tag_from_end;
117
118 /* True if reload changes $TP. */
119 bool reload_changes_tp;
120
121 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
122 We only set this if the function is an interrupt handler. */
123 int asms_without_operands;
124 };
125
126 #define MEP_CONTROL_REG(x) \
127 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
128
129 static GTY(()) section * based_section;
130 static GTY(()) section * tinybss_section;
131 static GTY(()) section * far_section;
132 static GTY(()) section * farbss_section;
133 static GTY(()) section * frodata_section;
134 static GTY(()) section * srodata_section;
135
136 static GTY(()) section * vtext_section;
137 static GTY(()) section * vftext_section;
138 static GTY(()) section * ftext_section;
139
140 static void mep_set_leaf_registers (int);
141 static bool symbol_p (rtx);
142 static bool symbolref_p (rtx);
143 static void encode_pattern_1 (rtx);
144 static void encode_pattern (rtx);
145 static bool const_in_range (rtx, int, int);
146 static void mep_rewrite_mult (rtx, rtx);
147 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
148 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
149 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
150 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
151 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
152 static bool mep_nongeneral_reg (rtx);
153 static bool mep_general_copro_reg (rtx);
154 static bool mep_nonregister (rtx);
155 static struct machine_function* mep_init_machine_status (void);
156 static rtx mep_tp_rtx (void);
157 static rtx mep_gp_rtx (void);
158 static bool mep_interrupt_p (void);
159 static bool mep_disinterrupt_p (void);
160 static bool mep_reg_set_p (rtx, rtx);
161 static bool mep_reg_set_in_function (int);
162 static bool mep_interrupt_saved_reg (int);
163 static bool mep_call_saves_register (int);
164 static rtx F (rtx);
165 static void add_constant (int, int, int, int);
166 static rtx maybe_dead_move (rtx, rtx, bool);
167 static void mep_reload_pointer (int, const char *);
168 static void mep_start_function (FILE *, HOST_WIDE_INT);
169 static bool mep_function_ok_for_sibcall (tree, tree);
170 static int unique_bit_in (HOST_WIDE_INT);
171 static int bit_size_for_clip (HOST_WIDE_INT);
172 static int bytesize (const_tree, enum machine_mode);
173 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
174 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
175 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
176 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
177 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
178 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
179 static bool mep_function_attribute_inlinable_p (const_tree);
180 static bool mep_can_inline_p (tree, tree);
181 static bool mep_lookup_pragma_disinterrupt (const char *);
182 static int mep_multiple_address_regions (tree, bool);
183 static int mep_attrlist_to_encoding (tree, tree);
184 static void mep_insert_attributes (tree, tree *);
185 static void mep_encode_section_info (tree, rtx, int);
186 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
187 static void mep_unique_section (tree, int);
188 static unsigned int mep_section_type_flags (tree, const char *, int);
189 static void mep_asm_named_section (const char *, unsigned int, tree);
190 static bool mep_mentioned_p (rtx, rtx, int);
191 static void mep_reorg_regmove (rtx);
192 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
193 static void mep_reorg_repeat (rtx);
194 static bool mep_invertable_branch_p (rtx);
195 static void mep_invert_branch (rtx, rtx);
196 static void mep_reorg_erepeat (rtx);
197 static void mep_jmp_return_reorg (rtx);
198 static void mep_reorg_addcombine (rtx);
199 static void mep_reorg (void);
200 static void mep_init_intrinsics (void);
201 static void mep_init_builtins (void);
202 static void mep_intrinsic_unavailable (int);
203 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
204 static bool mep_get_move_insn (int, const struct cgen_insn **);
205 static rtx mep_convert_arg (enum machine_mode, rtx);
206 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
207 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
208 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
209 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
210 static int mep_adjust_cost (rtx, rtx, rtx, int);
211 static int mep_issue_rate (void);
212 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
213 static void mep_move_ready_insn (rtx *, int, rtx);
214 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
215 static rtx mep_make_bundle (rtx, rtx);
216 static void mep_bundle_insns (rtx);
217 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
218 static int mep_address_cost (rtx, enum machine_mode, addr_space_t, bool);
219 static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
220 tree, int *, int);
221 static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
222 const_tree, bool);
223 static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
224 const_tree, bool);
225 static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
226 const_tree, bool);
227 static bool mep_vector_mode_supported_p (enum machine_mode);
228 static rtx mep_allocate_initial_value (rtx);
229 static void mep_asm_init_sections (void);
230 static int mep_comp_type_attributes (const_tree, const_tree);
231 static bool mep_narrow_volatile_bitfield (void);
232 static rtx mep_expand_builtin_saveregs (void);
233 static tree mep_build_builtin_va_list (void);
234 static void mep_expand_va_start (tree, rtx);
235 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
236 static bool mep_can_eliminate (const int, const int);
237 static void mep_conditional_register_usage (void);
238 static void mep_trampoline_init (rtx, tree, rtx);
239 \f
240 #define WANT_GCC_DEFINITIONS
241 #include "mep-intrin.h"
242 #undef WANT_GCC_DEFINITIONS
243
244 \f
245 /* Command Line Option Support. */
246
247 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
248
249 /* True if we can use cmov instructions to move values back and forth
250 between core and coprocessor registers. */
251 bool mep_have_core_copro_moves_p;
252
253 /* True if we can use cmov instructions (or a work-alike) to move
254 values between coprocessor registers. */
255 bool mep_have_copro_copro_moves_p;
256
257 /* A table of all coprocessor instructions that can act like
258 a coprocessor-to-coprocessor cmov. */
259 static const int mep_cmov_insns[] = {
260 mep_cmov,
261 mep_cpmov,
262 mep_fmovs,
263 mep_caddi3,
264 mep_csubi3,
265 mep_candi3,
266 mep_cori3,
267 mep_cxori3,
268 mep_cand3,
269 mep_cor3
270 };
271
272 \f
273 static void
274 mep_set_leaf_registers (int enable)
275 {
276 int i;
277
278 if (mep_leaf_registers[0] != enable)
279 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
280 mep_leaf_registers[i] = enable;
281 }
282
283 static void
284 mep_conditional_register_usage (void)
285 {
286 int i;
287
288 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
289 {
290 fixed_regs[HI_REGNO] = 1;
291 fixed_regs[LO_REGNO] = 1;
292 call_used_regs[HI_REGNO] = 1;
293 call_used_regs[LO_REGNO] = 1;
294 }
295
296 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
297 global_regs[i] = 1;
298 }
299
300 static void
301 mep_option_override (void)
302 {
303 unsigned int i;
304 int j;
305 cl_deferred_option *opt;
306 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
307
308 if (v)
309 FOR_EACH_VEC_ELT (*v, i, opt)
310 {
311 switch (opt->opt_index)
312 {
313 case OPT_mivc2:
314 for (j = 0; j < 32; j++)
315 fixed_regs[j + 48] = 0;
316 for (j = 0; j < 32; j++)
317 call_used_regs[j + 48] = 1;
318 for (j = 6; j < 8; j++)
319 call_used_regs[j + 48] = 0;
320
321 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
322 RN (0, "$csar0");
323 RN (1, "$cc");
324 RN (4, "$cofr0");
325 RN (5, "$cofr1");
326 RN (6, "$cofa0");
327 RN (7, "$cofa1");
328 RN (15, "$csar1");
329
330 RN (16, "$acc0_0");
331 RN (17, "$acc0_1");
332 RN (18, "$acc0_2");
333 RN (19, "$acc0_3");
334 RN (20, "$acc0_4");
335 RN (21, "$acc0_5");
336 RN (22, "$acc0_6");
337 RN (23, "$acc0_7");
338
339 RN (24, "$acc1_0");
340 RN (25, "$acc1_1");
341 RN (26, "$acc1_2");
342 RN (27, "$acc1_3");
343 RN (28, "$acc1_4");
344 RN (29, "$acc1_5");
345 RN (30, "$acc1_6");
346 RN (31, "$acc1_7");
347 #undef RN
348 break;
349
350 default:
351 gcc_unreachable ();
352 }
353 }
354
355 if (flag_pic == 1)
356 warning (OPT_fpic, "-fpic is not supported");
357 if (flag_pic == 2)
358 warning (OPT_fPIC, "-fPIC is not supported");
359 if (TARGET_S && TARGET_M)
360 error ("only one of -ms and -mm may be given");
361 if (TARGET_S && TARGET_L)
362 error ("only one of -ms and -ml may be given");
363 if (TARGET_M && TARGET_L)
364 error ("only one of -mm and -ml may be given");
365 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
366 error ("only one of -ms and -mtiny= may be given");
367 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
368 error ("only one of -mm and -mtiny= may be given");
369 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
370 warning (0, "-mclip currently has no effect without -mminmax");
371
372 if (mep_const_section)
373 {
374 if (strcmp (mep_const_section, "tiny") != 0
375 && strcmp (mep_const_section, "near") != 0
376 && strcmp (mep_const_section, "far") != 0)
377 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
378 }
379
380 if (TARGET_S)
381 mep_tiny_cutoff = 65536;
382 if (TARGET_M)
383 mep_tiny_cutoff = 0;
384 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
385 mep_tiny_cutoff = 0;
386
387 if (TARGET_64BIT_CR_REGS)
388 flag_split_wide_types = 0;
389
390 init_machine_status = mep_init_machine_status;
391 mep_init_intrinsics ();
392 }
393
394 /* Pattern Support - constraints, predicates, expanders. */
395
396 /* MEP has very few instructions that can refer to the span of
397 addresses used by symbols, so it's common to check for them. */
398
399 static bool
400 symbol_p (rtx x)
401 {
402 int c = GET_CODE (x);
403
404 return (c == CONST_INT
405 || c == CONST
406 || c == SYMBOL_REF);
407 }
408
409 static bool
410 symbolref_p (rtx x)
411 {
412 int c;
413
414 if (GET_CODE (x) != MEM)
415 return false;
416
417 c = GET_CODE (XEXP (x, 0));
418 return (c == CONST_INT
419 || c == CONST
420 || c == SYMBOL_REF);
421 }
422
423 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
424
425 #define GEN_REG(R, STRICT) \
426 (GR_REGNO_P (R) \
427 || (!STRICT \
428 && ((R) == ARG_POINTER_REGNUM \
429 || (R) >= FIRST_PSEUDO_REGISTER)))
430
431 static char pattern[12], *patternp;
432 static GTY(()) rtx patternr[12];
433 #define RTX_IS(x) (strcmp (pattern, x) == 0)
434
435 static void
436 encode_pattern_1 (rtx x)
437 {
438 int i;
439
440 if (patternp == pattern + sizeof (pattern) - 2)
441 {
442 patternp[-1] = '?';
443 return;
444 }
445
446 patternr[patternp-pattern] = x;
447
448 switch (GET_CODE (x))
449 {
450 case REG:
451 *patternp++ = 'r';
452 break;
453 case MEM:
454 *patternp++ = 'm';
455 case CONST:
456 encode_pattern_1 (XEXP(x, 0));
457 break;
458 case PLUS:
459 *patternp++ = '+';
460 encode_pattern_1 (XEXP(x, 0));
461 encode_pattern_1 (XEXP(x, 1));
462 break;
463 case LO_SUM:
464 *patternp++ = 'L';
465 encode_pattern_1 (XEXP(x, 0));
466 encode_pattern_1 (XEXP(x, 1));
467 break;
468 case HIGH:
469 *patternp++ = 'H';
470 encode_pattern_1 (XEXP(x, 0));
471 break;
472 case SYMBOL_REF:
473 *patternp++ = 's';
474 break;
475 case LABEL_REF:
476 *patternp++ = 'l';
477 break;
478 case CONST_INT:
479 case CONST_DOUBLE:
480 *patternp++ = 'i';
481 break;
482 case UNSPEC:
483 *patternp++ = 'u';
484 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
485 for (i=0; i<XVECLEN (x, 0); i++)
486 encode_pattern_1 (XVECEXP (x, 0, i));
487 break;
488 case USE:
489 *patternp++ = 'U';
490 break;
491 default:
492 *patternp++ = '?';
493 #if 0
494 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
495 debug_rtx (x);
496 gcc_unreachable ();
497 #endif
498 break;
499 }
500 }
501
502 static void
503 encode_pattern (rtx x)
504 {
505 patternp = pattern;
506 encode_pattern_1 (x);
507 *patternp = 0;
508 }
509
510 int
511 mep_section_tag (rtx x)
512 {
513 const char *name;
514
515 while (1)
516 {
517 switch (GET_CODE (x))
518 {
519 case MEM:
520 case CONST:
521 x = XEXP (x, 0);
522 break;
523 case UNSPEC:
524 x = XVECEXP (x, 0, 0);
525 break;
526 case PLUS:
527 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
528 return 0;
529 x = XEXP (x, 0);
530 break;
531 default:
532 goto done;
533 }
534 }
535 done:
536 if (GET_CODE (x) != SYMBOL_REF)
537 return 0;
538 name = XSTR (x, 0);
539 if (name[0] == '@' && name[2] == '.')
540 {
541 if (name[1] == 'i' || name[1] == 'I')
542 {
543 if (name[1] == 'I')
544 return 'f'; /* near */
545 return 'n'; /* far */
546 }
547 return name[1];
548 }
549 return 0;
550 }
551
552 int
553 mep_regno_reg_class (int regno)
554 {
555 switch (regno)
556 {
557 case SP_REGNO: return SP_REGS;
558 case TP_REGNO: return TP_REGS;
559 case GP_REGNO: return GP_REGS;
560 case 0: return R0_REGS;
561 case HI_REGNO: return HI_REGS;
562 case LO_REGNO: return LO_REGS;
563 case ARG_POINTER_REGNUM: return GENERAL_REGS;
564 }
565
566 if (GR_REGNO_P (regno))
567 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
568 if (CONTROL_REGNO_P (regno))
569 return CONTROL_REGS;
570
571 if (CR_REGNO_P (regno))
572 {
573 int i, j;
574
575 /* Search for the register amongst user-defined subclasses of
576 the coprocessor registers. */
577 for (i = USER0_REGS; i <= USER3_REGS; ++i)
578 {
579 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
580 continue;
581 for (j = 0; j < N_REG_CLASSES; ++j)
582 {
583 enum reg_class sub = reg_class_subclasses[i][j];
584
585 if (sub == LIM_REG_CLASSES)
586 return i;
587 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
588 break;
589 }
590 }
591
592 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
593 }
594
595 if (CCR_REGNO_P (regno))
596 return CCR_REGS;
597
598 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
599 return NO_REGS;
600 }
601
602 static bool
603 const_in_range (rtx x, int minv, int maxv)
604 {
605 return (GET_CODE (x) == CONST_INT
606 && INTVAL (x) >= minv
607 && INTVAL (x) <= maxv);
608 }
609
610 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
611 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
612 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
613 at the end of the insn stream. */
614
615 rtx
616 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
617 {
618 if (rtx_equal_p (dest, src1))
619 return src2;
620 else if (rtx_equal_p (dest, src2))
621 return src1;
622 else
623 {
624 if (insn == 0)
625 emit_insn (gen_movsi (copy_rtx (dest), src1));
626 else
627 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
628 return src2;
629 }
630 }
631
632 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
633 Change the last element of PATTERN from (clobber (scratch:SI))
634 to (clobber (reg:SI HI_REGNO)). */
635
636 static void
637 mep_rewrite_mult (rtx insn, rtx pattern)
638 {
639 rtx hi_clobber;
640
641 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
642 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
643 PATTERN (insn) = pattern;
644 INSN_CODE (insn) = -1;
645 }
646
647 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
648 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
649 store the result in DEST if nonnull. */
650
651 static void
652 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
653 {
654 rtx lo, pattern;
655
656 lo = gen_rtx_REG (SImode, LO_REGNO);
657 if (dest)
658 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
659 mep_mulr_source (insn, dest, src1, src2));
660 else
661 pattern = gen_mulsi3_lo (lo, src1, src2);
662 mep_rewrite_mult (insn, pattern);
663 }
664
665 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
666 SRC3 into $lo, then use either madd or maddr. The move into $lo will
667 be deleted by a peephole2 if SRC3 is already in $lo. */
668
669 static void
670 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
671 {
672 rtx lo, pattern;
673
674 lo = gen_rtx_REG (SImode, LO_REGNO);
675 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
676 if (dest)
677 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
678 mep_mulr_source (insn, dest, src1, src2),
679 copy_rtx (lo));
680 else
681 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
682 mep_rewrite_mult (insn, pattern);
683 }
684
685 /* Return true if $lo has the same value as integer register GPR when
686 instruction INSN is reached. If necessary, rewrite the instruction
687 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
688 rtx for (reg:SI LO_REGNO).
689
690 This function is intended to be used by the peephole2 pass. Since
691 that pass goes from the end of a basic block to the beginning, and
692 propagates liveness information on the way, there is no need to
693 update register notes here.
694
695 If GPR_DEAD_P is true on entry, and this function returns true,
696 then the caller will replace _every_ use of GPR in and after INSN
697 with LO. This means that if the instruction that sets $lo is a
698 mulr- or maddr-type instruction, we can rewrite it to use mul or
699 madd instead. In combination with the copy progagation pass,
700 this allows us to replace sequences like:
701
702 mov GPR,R1
703 mulr GPR,R2
704
705 with:
706
707 mul R1,R2
708
709 if GPR is no longer used. */
710
711 static bool
712 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
713 {
714 do
715 {
716 insn = PREV_INSN (insn);
717 if (INSN_P (insn))
718 switch (recog_memoized (insn))
719 {
720 case CODE_FOR_mulsi3_1:
721 extract_insn (insn);
722 if (rtx_equal_p (recog_data.operand[0], gpr))
723 {
724 mep_rewrite_mulsi3 (insn,
725 gpr_dead_p ? NULL : recog_data.operand[0],
726 recog_data.operand[1],
727 recog_data.operand[2]);
728 return true;
729 }
730 return false;
731
732 case CODE_FOR_maddsi3:
733 extract_insn (insn);
734 if (rtx_equal_p (recog_data.operand[0], gpr))
735 {
736 mep_rewrite_maddsi3 (insn,
737 gpr_dead_p ? NULL : recog_data.operand[0],
738 recog_data.operand[1],
739 recog_data.operand[2],
740 recog_data.operand[3]);
741 return true;
742 }
743 return false;
744
745 case CODE_FOR_mulsi3r:
746 case CODE_FOR_maddsi3r:
747 extract_insn (insn);
748 return rtx_equal_p (recog_data.operand[1], gpr);
749
750 default:
751 if (reg_set_p (lo, insn)
752 || reg_set_p (gpr, insn)
753 || volatile_insn_p (PATTERN (insn)))
754 return false;
755
756 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
757 gpr_dead_p = false;
758 break;
759 }
760 }
761 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
762 return false;
763 }
764
765 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
766
767 bool
768 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
769 {
770 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
771 extract_insn (insn);
772 return result;
773 }
774
775 /* Return true if SET can be turned into a post-modify load or store
776 that adds OFFSET to GPR. In other words, return true if SET can be
777 changed into:
778
779 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
780
781 It's OK to change SET to an equivalent operation in order to
782 make it match. */
783
784 static bool
785 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
786 {
787 rtx *reg, *mem;
788 unsigned int reg_bytes, mem_bytes;
789 enum machine_mode reg_mode, mem_mode;
790
791 /* Only simple SETs can be converted. */
792 if (GET_CODE (set) != SET)
793 return false;
794
795 /* Point REG to what we hope will be the register side of the set and
796 MEM to what we hope will be the memory side. */
797 if (GET_CODE (SET_DEST (set)) == MEM)
798 {
799 mem = &SET_DEST (set);
800 reg = &SET_SRC (set);
801 }
802 else
803 {
804 reg = &SET_DEST (set);
805 mem = &SET_SRC (set);
806 if (GET_CODE (*mem) == SIGN_EXTEND)
807 mem = &XEXP (*mem, 0);
808 }
809
810 /* Check that *REG is a suitable coprocessor register. */
811 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
812 return false;
813
814 /* Check that *MEM is a suitable memory reference. */
815 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
816 return false;
817
818 /* Get the number of bytes in each operand. */
819 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
820 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
821
822 /* Check that OFFSET is suitably aligned. */
823 if (INTVAL (offset) & (mem_bytes - 1))
824 return false;
825
826 /* Convert *MEM to a normal integer mode. */
827 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
828 *mem = change_address (*mem, mem_mode, NULL);
829
830 /* Adjust *REG as well. */
831 *reg = shallow_copy_rtx (*reg);
832 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
833 {
834 /* SET is a subword load. Convert it to an explicit extension. */
835 PUT_MODE (*reg, SImode);
836 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
837 }
838 else
839 {
840 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
841 PUT_MODE (*reg, reg_mode);
842 }
843 return true;
844 }
845
846 /* Return the effect of frame-related instruction INSN. */
847
848 static rtx
849 mep_frame_expr (rtx insn)
850 {
851 rtx note, expr;
852
853 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
854 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
855 RTX_FRAME_RELATED_P (expr) = 1;
856 return expr;
857 }
858
859 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
860 new pattern in INSN1; INSN2 will be deleted by the caller. */
861
862 static void
863 mep_make_parallel (rtx insn1, rtx insn2)
864 {
865 rtx expr;
866
867 if (RTX_FRAME_RELATED_P (insn2))
868 {
869 expr = mep_frame_expr (insn2);
870 if (RTX_FRAME_RELATED_P (insn1))
871 expr = gen_rtx_SEQUENCE (VOIDmode,
872 gen_rtvec (2, mep_frame_expr (insn1), expr));
873 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
874 RTX_FRAME_RELATED_P (insn1) = 1;
875 }
876
877 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
878 gen_rtvec (2, PATTERN (insn1),
879 PATTERN (insn2)));
880 INSN_CODE (insn1) = -1;
881 }
882
883 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
884 the basic block to see if any previous load or store instruction can
885 be persuaded to do SET_INSN as a side-effect. Return true if so. */
886
887 static bool
888 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
889 {
890 rtx insn;
891
892 insn = set_insn;
893 do
894 {
895 insn = PREV_INSN (insn);
896 if (INSN_P (insn))
897 {
898 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
899 {
900 mep_make_parallel (insn, set_insn);
901 return true;
902 }
903
904 if (reg_set_p (reg, insn)
905 || reg_referenced_p (reg, PATTERN (insn))
906 || volatile_insn_p (PATTERN (insn)))
907 return false;
908 }
909 }
910 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
911 return false;
912 }
913
914 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
915
916 bool
917 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
918 {
919 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
920 extract_insn (insn);
921 return result;
922 }
923
924 bool
925 mep_allow_clip (rtx ux, rtx lx, int s)
926 {
927 HOST_WIDE_INT u = INTVAL (ux);
928 HOST_WIDE_INT l = INTVAL (lx);
929 int i;
930
931 if (!TARGET_OPT_CLIP)
932 return false;
933
934 if (s)
935 {
936 for (i = 0; i < 30; i ++)
937 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
938 && (l == - ((HOST_WIDE_INT) 1 << i)))
939 return true;
940 }
941 else
942 {
943 if (l != 0)
944 return false;
945
946 for (i = 0; i < 30; i ++)
947 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
948 return true;
949 }
950 return false;
951 }
952
953 bool
954 mep_bit_position_p (rtx x, bool looking_for)
955 {
956 if (GET_CODE (x) != CONST_INT)
957 return false;
958 switch ((int) INTVAL(x) & 0xff)
959 {
960 case 0x01: case 0x02: case 0x04: case 0x08:
961 case 0x10: case 0x20: case 0x40: case 0x80:
962 return looking_for;
963 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
964 case 0xef: case 0xdf: case 0xbf: case 0x7f:
965 return !looking_for;
966 }
967 return false;
968 }
969
970 static bool
971 move_needs_splitting (rtx dest, rtx src,
972 enum machine_mode mode ATTRIBUTE_UNUSED)
973 {
974 int s = mep_section_tag (src);
975
976 while (1)
977 {
978 if (GET_CODE (src) == CONST
979 || GET_CODE (src) == MEM)
980 src = XEXP (src, 0);
981 else if (GET_CODE (src) == SYMBOL_REF
982 || GET_CODE (src) == LABEL_REF
983 || GET_CODE (src) == PLUS)
984 break;
985 else
986 return false;
987 }
988 if (s == 'f'
989 || (GET_CODE (src) == PLUS
990 && GET_CODE (XEXP (src, 1)) == CONST_INT
991 && (INTVAL (XEXP (src, 1)) < -65536
992 || INTVAL (XEXP (src, 1)) > 0xffffff))
993 || (GET_CODE (dest) == REG
994 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
995 return true;
996 return false;
997 }
998
999 bool
1000 mep_split_mov (rtx *operands, int symbolic)
1001 {
1002 if (symbolic)
1003 {
1004 if (move_needs_splitting (operands[0], operands[1], SImode))
1005 return true;
1006 return false;
1007 }
1008
1009 if (GET_CODE (operands[1]) != CONST_INT)
1010 return false;
1011
1012 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1013 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1014 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1015 return false;
1016
1017 if (((!reload_completed && !reload_in_progress)
1018 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1019 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1020 return false;
1021
1022 return true;
1023 }
1024
1025 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1026 it to one specific value. So the insn chosen depends on whether
1027 the source and destination modes match. */
1028
1029 bool
1030 mep_vliw_mode_match (rtx tgt)
1031 {
1032 bool src_vliw = mep_vliw_function_p (cfun->decl);
1033 bool tgt_vliw = INTVAL (tgt);
1034
1035 return src_vliw == tgt_vliw;
1036 }
1037
1038 /* Like the above, but also test for near/far mismatches. */
1039
1040 bool
1041 mep_vliw_jmp_match (rtx tgt)
1042 {
1043 bool src_vliw = mep_vliw_function_p (cfun->decl);
1044 bool tgt_vliw = INTVAL (tgt);
1045
1046 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1047 return false;
1048
1049 return src_vliw == tgt_vliw;
1050 }
1051
1052 bool
1053 mep_multi_slot (rtx x)
1054 {
1055 return get_attr_slot (x) == SLOT_MULTI;
1056 }
1057
1058 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1059
1060 static bool
1061 mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1062 {
1063 /* We can't convert symbol values to gp- or tp-rel values after
1064 reload, as reload might have used $gp or $tp for other
1065 purposes. */
1066 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1067 {
1068 char e = mep_section_tag (x);
1069 return (e != 't' && e != 'b');
1070 }
1071 return 1;
1072 }
1073
1074 /* Be careful not to use macros that need to be compiled one way for
1075 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1076
1077 bool
1078 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1079 {
1080 int the_tag;
1081
1082 #define DEBUG_LEGIT 0
1083 #if DEBUG_LEGIT
1084 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1085 debug_rtx (x);
1086 #endif
1087
1088 if (GET_CODE (x) == LO_SUM
1089 && GET_CODE (XEXP (x, 0)) == REG
1090 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1091 && CONSTANT_P (XEXP (x, 1)))
1092 {
1093 if (GET_MODE_SIZE (mode) > 4)
1094 {
1095 /* We will end up splitting this, and lo_sums are not
1096 offsettable for us. */
1097 #if DEBUG_LEGIT
1098 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1099 #endif
1100 return false;
1101 }
1102 #if DEBUG_LEGIT
1103 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1104 #endif
1105 return true;
1106 }
1107
1108 if (GET_CODE (x) == REG
1109 && GEN_REG (REGNO (x), strict))
1110 {
1111 #if DEBUG_LEGIT
1112 fprintf (stderr, " - yup, [reg]\n");
1113 #endif
1114 return true;
1115 }
1116
1117 if (GET_CODE (x) == PLUS
1118 && GET_CODE (XEXP (x, 0)) == REG
1119 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1120 && const_in_range (XEXP (x, 1), -32768, 32767))
1121 {
1122 #if DEBUG_LEGIT
1123 fprintf (stderr, " - yup, [reg+const]\n");
1124 #endif
1125 return true;
1126 }
1127
1128 if (GET_CODE (x) == PLUS
1129 && GET_CODE (XEXP (x, 0)) == REG
1130 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1131 && GET_CODE (XEXP (x, 1)) == CONST
1132 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1133 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1134 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1135 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1136 {
1137 #if DEBUG_LEGIT
1138 fprintf (stderr, " - yup, [reg+unspec]\n");
1139 #endif
1140 return true;
1141 }
1142
1143 the_tag = mep_section_tag (x);
1144
1145 if (the_tag == 'f')
1146 {
1147 #if DEBUG_LEGIT
1148 fprintf (stderr, " - nope, [far]\n");
1149 #endif
1150 return false;
1151 }
1152
1153 if (mode == VOIDmode
1154 && GET_CODE (x) == SYMBOL_REF)
1155 {
1156 #if DEBUG_LEGIT
1157 fprintf (stderr, " - yup, call [symbol]\n");
1158 #endif
1159 return true;
1160 }
1161
1162 if ((mode == SImode || mode == SFmode)
1163 && CONSTANT_P (x)
1164 && mep_legitimate_constant_p (mode, x)
1165 && the_tag != 't' && the_tag != 'b')
1166 {
1167 if (GET_CODE (x) != CONST_INT
1168 || (INTVAL (x) <= 0xfffff
1169 && INTVAL (x) >= 0
1170 && (INTVAL (x) % 4) == 0))
1171 {
1172 #if DEBUG_LEGIT
1173 fprintf (stderr, " - yup, [const]\n");
1174 #endif
1175 return true;
1176 }
1177 }
1178
1179 #if DEBUG_LEGIT
1180 fprintf (stderr, " - nope.\n");
1181 #endif
1182 return false;
1183 }
1184
1185 int
1186 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1187 int type_i,
1188 int ind_levels ATTRIBUTE_UNUSED)
1189 {
1190 enum reload_type type = (enum reload_type) type_i;
1191
1192 if (GET_CODE (*x) == PLUS
1193 && GET_CODE (XEXP (*x, 0)) == MEM
1194 && GET_CODE (XEXP (*x, 1)) == REG)
1195 {
1196 /* GCC will by default copy the MEM into a REG, which results in
1197 an invalid address. For us, the best thing to do is move the
1198 whole expression to a REG. */
1199 push_reload (*x, NULL_RTX, x, NULL,
1200 GENERAL_REGS, mode, VOIDmode,
1201 0, 0, opnum, type);
1202 return 1;
1203 }
1204
1205 if (GET_CODE (*x) == PLUS
1206 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1207 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1208 {
1209 char e = mep_section_tag (XEXP (*x, 0));
1210
1211 if (e != 't' && e != 'b')
1212 {
1213 /* GCC thinks that (sym+const) is a valid address. Well,
1214 sometimes it is, this time it isn't. The best thing to
1215 do is reload the symbol to a register, since reg+int
1216 tends to work, and we can't just add the symbol and
1217 constant anyway. */
1218 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1219 GENERAL_REGS, mode, VOIDmode,
1220 0, 0, opnum, type);
1221 return 1;
1222 }
1223 }
1224 return 0;
1225 }
1226
1227 int
1228 mep_core_address_length (rtx insn, int opn)
1229 {
1230 rtx set = single_set (insn);
1231 rtx mem = XEXP (set, opn);
1232 rtx other = XEXP (set, 1-opn);
1233 rtx addr = XEXP (mem, 0);
1234
1235 if (register_operand (addr, Pmode))
1236 return 2;
1237 if (GET_CODE (addr) == PLUS)
1238 {
1239 rtx addend = XEXP (addr, 1);
1240
1241 gcc_assert (REG_P (XEXP (addr, 0)));
1242
1243 switch (REGNO (XEXP (addr, 0)))
1244 {
1245 case STACK_POINTER_REGNUM:
1246 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1247 && mep_imm7a4_operand (addend, VOIDmode))
1248 return 2;
1249 break;
1250
1251 case 13: /* TP */
1252 gcc_assert (REG_P (other));
1253
1254 if (REGNO (other) >= 8)
1255 break;
1256
1257 if (GET_CODE (addend) == CONST
1258 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1259 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1260 return 2;
1261
1262 if (GET_CODE (addend) == CONST_INT
1263 && INTVAL (addend) >= 0
1264 && INTVAL (addend) <= 127
1265 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1266 return 2;
1267 break;
1268 }
1269 }
1270
1271 return 4;
1272 }
1273
1274 int
1275 mep_cop_address_length (rtx insn, int opn)
1276 {
1277 rtx set = single_set (insn);
1278 rtx mem = XEXP (set, opn);
1279 rtx addr = XEXP (mem, 0);
1280
1281 if (GET_CODE (mem) != MEM)
1282 return 2;
1283 if (register_operand (addr, Pmode))
1284 return 2;
1285 if (GET_CODE (addr) == POST_INC)
1286 return 2;
1287
1288 return 4;
1289 }
1290
1291 #define DEBUG_EXPAND_MOV 0
1292 bool
1293 mep_expand_mov (rtx *operands, enum machine_mode mode)
1294 {
1295 int i, t;
1296 int tag[2];
1297 rtx tpsym, tpoffs;
1298 int post_reload = 0;
1299
1300 tag[0] = mep_section_tag (operands[0]);
1301 tag[1] = mep_section_tag (operands[1]);
1302
1303 if (!reload_in_progress
1304 && !reload_completed
1305 && GET_CODE (operands[0]) != REG
1306 && GET_CODE (operands[0]) != SUBREG
1307 && GET_CODE (operands[1]) != REG
1308 && GET_CODE (operands[1]) != SUBREG)
1309 operands[1] = copy_to_mode_reg (mode, operands[1]);
1310
1311 #if DEBUG_EXPAND_MOV
1312 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1313 reload_in_progress || reload_completed);
1314 debug_rtx (operands[0]);
1315 debug_rtx (operands[1]);
1316 #endif
1317
1318 if (mode == DImode || mode == DFmode)
1319 return false;
1320
1321 if (reload_in_progress || reload_completed)
1322 {
1323 rtx r;
1324
1325 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1326 cfun->machine->reload_changes_tp = true;
1327
1328 if (tag[0] == 't' || tag[1] == 't')
1329 {
1330 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1331 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1332 post_reload = 1;
1333 }
1334 if (tag[0] == 'b' || tag[1] == 'b')
1335 {
1336 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1337 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1338 post_reload = 1;
1339 }
1340 if (cfun->machine->reload_changes_tp == true)
1341 post_reload = 1;
1342 }
1343
1344 if (!post_reload)
1345 {
1346 rtx n;
1347 if (symbol_p (operands[1]))
1348 {
1349 t = mep_section_tag (operands[1]);
1350 if (t == 'b' || t == 't')
1351 {
1352
1353 if (GET_CODE (operands[1]) == SYMBOL_REF)
1354 {
1355 tpsym = operands[1];
1356 n = gen_rtx_UNSPEC (mode,
1357 gen_rtvec (1, operands[1]),
1358 t == 'b' ? UNS_TPREL : UNS_GPREL);
1359 n = gen_rtx_CONST (mode, n);
1360 }
1361 else if (GET_CODE (operands[1]) == CONST
1362 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1363 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1364 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1365 {
1366 tpsym = XEXP (XEXP (operands[1], 0), 0);
1367 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1368 n = gen_rtx_UNSPEC (mode,
1369 gen_rtvec (1, tpsym),
1370 t == 'b' ? UNS_TPREL : UNS_GPREL);
1371 n = gen_rtx_PLUS (mode, n, tpoffs);
1372 n = gen_rtx_CONST (mode, n);
1373 }
1374 else if (GET_CODE (operands[1]) == CONST
1375 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1376 return false;
1377 else
1378 {
1379 error ("unusual TP-relative address");
1380 return false;
1381 }
1382
1383 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1384 : mep_gp_rtx ()), n);
1385 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1386 #if DEBUG_EXPAND_MOV
1387 fprintf(stderr, "mep_expand_mov emitting ");
1388 debug_rtx(n);
1389 #endif
1390 return true;
1391 }
1392 }
1393
1394 for (i=0; i < 2; i++)
1395 {
1396 t = mep_section_tag (operands[i]);
1397 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1398 {
1399 rtx sym, n, r;
1400 int u;
1401
1402 sym = XEXP (operands[i], 0);
1403 if (GET_CODE (sym) == CONST
1404 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1405 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1406
1407 if (t == 'b')
1408 {
1409 r = mep_tp_rtx ();
1410 u = UNS_TPREL;
1411 }
1412 else
1413 {
1414 r = mep_gp_rtx ();
1415 u = UNS_GPREL;
1416 }
1417
1418 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1419 n = gen_rtx_CONST (Pmode, n);
1420 n = gen_rtx_PLUS (Pmode, r, n);
1421 operands[i] = replace_equiv_address (operands[i], n);
1422 }
1423 }
1424 }
1425
1426 if ((GET_CODE (operands[1]) != REG
1427 && MEP_CONTROL_REG (operands[0]))
1428 || (GET_CODE (operands[0]) != REG
1429 && MEP_CONTROL_REG (operands[1])))
1430 {
1431 rtx temp;
1432 #if DEBUG_EXPAND_MOV
1433 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1434 #endif
1435 temp = gen_reg_rtx (mode);
1436 emit_move_insn (temp, operands[1]);
1437 operands[1] = temp;
1438 }
1439
1440 if (symbolref_p (operands[0])
1441 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1442 || (GET_MODE_SIZE (mode) != 4)))
1443 {
1444 rtx temp;
1445
1446 gcc_assert (!reload_in_progress && !reload_completed);
1447
1448 temp = force_reg (Pmode, XEXP (operands[0], 0));
1449 operands[0] = replace_equiv_address (operands[0], temp);
1450 emit_move_insn (operands[0], operands[1]);
1451 return true;
1452 }
1453
1454 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1455 tag[1] = 0;
1456
1457 if (symbol_p (operands[1])
1458 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1459 {
1460 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1461 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1462 return true;
1463 }
1464
1465 if (symbolref_p (operands[1])
1466 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1467 {
1468 rtx temp;
1469
1470 if (reload_in_progress || reload_completed)
1471 temp = operands[0];
1472 else
1473 temp = gen_reg_rtx (Pmode);
1474
1475 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1476 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1477 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1478 return true;
1479 }
1480
1481 return false;
1482 }
1483
1484 /* Cases where the pattern can't be made to use at all. */
1485
1486 bool
1487 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1488 {
1489 int i;
1490
1491 #define DEBUG_MOV_OK 0
1492 #if DEBUG_MOV_OK
1493 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1494 mep_section_tag (operands[1]));
1495 debug_rtx (operands[0]);
1496 debug_rtx (operands[1]);
1497 #endif
1498
1499 /* We want the movh patterns to get these. */
1500 if (GET_CODE (operands[1]) == HIGH)
1501 return false;
1502
1503 /* We can't store a register to a far variable without using a
1504 scratch register to hold the address. Using far variables should
1505 be split by mep_emit_mov anyway. */
1506 if (mep_section_tag (operands[0]) == 'f'
1507 || mep_section_tag (operands[1]) == 'f')
1508 {
1509 #if DEBUG_MOV_OK
1510 fprintf (stderr, " - no, f\n");
1511 #endif
1512 return false;
1513 }
1514 i = mep_section_tag (operands[1]);
1515 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1516 /* These are supposed to be generated with adds of the appropriate
1517 register. During and after reload, however, we allow them to
1518 be accessed as normal symbols because adding a dependency on
1519 the base register now might cause problems. */
1520 {
1521 #if DEBUG_MOV_OK
1522 fprintf (stderr, " - no, bt\n");
1523 #endif
1524 return false;
1525 }
1526
1527 /* The only moves we can allow involve at least one general
1528 register, so require it. */
1529 for (i = 0; i < 2; i ++)
1530 {
1531 /* Allow subregs too, before reload. */
1532 rtx x = operands[i];
1533
1534 if (GET_CODE (x) == SUBREG)
1535 x = XEXP (x, 0);
1536 if (GET_CODE (x) == REG
1537 && ! MEP_CONTROL_REG (x))
1538 {
1539 #if DEBUG_MOV_OK
1540 fprintf (stderr, " - ok\n");
1541 #endif
1542 return true;
1543 }
1544 }
1545 #if DEBUG_MOV_OK
1546 fprintf (stderr, " - no, no gen reg\n");
1547 #endif
1548 return false;
1549 }
1550
1551 #define DEBUG_SPLIT_WIDE_MOVE 0
1552 void
1553 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1554 {
1555 int i;
1556
1557 #if DEBUG_SPLIT_WIDE_MOVE
1558 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1559 debug_rtx (operands[0]);
1560 debug_rtx (operands[1]);
1561 #endif
1562
1563 for (i = 0; i <= 1; i++)
1564 {
1565 rtx op = operands[i], hi, lo;
1566
1567 switch (GET_CODE (op))
1568 {
1569 case REG:
1570 {
1571 unsigned int regno = REGNO (op);
1572
1573 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1574 {
1575 rtx i32;
1576
1577 lo = gen_rtx_REG (SImode, regno);
1578 i32 = GEN_INT (32);
1579 hi = gen_rtx_ZERO_EXTRACT (SImode,
1580 gen_rtx_REG (DImode, regno),
1581 i32, i32);
1582 }
1583 else
1584 {
1585 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1586 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1587 }
1588 }
1589 break;
1590
1591 case CONST_INT:
1592 case CONST_DOUBLE:
1593 case MEM:
1594 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1595 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1596 break;
1597
1598 default:
1599 gcc_unreachable ();
1600 }
1601
1602 /* The high part of CR <- GPR moves must be done after the low part. */
1603 operands [i + 4] = lo;
1604 operands [i + 2] = hi;
1605 }
1606
1607 if (reg_mentioned_p (operands[2], operands[5])
1608 || GET_CODE (operands[2]) == ZERO_EXTRACT
1609 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1610 {
1611 rtx tmp;
1612
1613 /* Overlapping register pairs -- make sure we don't
1614 early-clobber ourselves. */
1615 tmp = operands[2];
1616 operands[2] = operands[4];
1617 operands[4] = tmp;
1618 tmp = operands[3];
1619 operands[3] = operands[5];
1620 operands[5] = tmp;
1621 }
1622
1623 #if DEBUG_SPLIT_WIDE_MOVE
1624 fprintf(stderr, "\033[34m");
1625 debug_rtx (operands[2]);
1626 debug_rtx (operands[3]);
1627 debug_rtx (operands[4]);
1628 debug_rtx (operands[5]);
1629 fprintf(stderr, "\033[0m");
1630 #endif
1631 }
1632
1633 /* Emit a setcc instruction in its entirity. */
1634
1635 static bool
1636 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1637 {
1638 rtx tmp;
1639
1640 switch (code)
1641 {
1642 case GT:
1643 case GTU:
1644 tmp = op1, op1 = op2, op2 = tmp;
1645 code = swap_condition (code);
1646 /* FALLTHRU */
1647
1648 case LT:
1649 case LTU:
1650 op1 = force_reg (SImode, op1);
1651 emit_insn (gen_rtx_SET (VOIDmode, dest,
1652 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1653 return true;
1654
1655 case EQ:
1656 if (op2 != const0_rtx)
1657 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1658 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1659 return true;
1660
1661 case NE:
1662 /* Branchful sequence:
1663 mov dest, 0 16-bit
1664 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1665 mov dest, 1 16-bit
1666
1667 Branchless sequence:
1668 add3 tmp, op1, -op2 32-bit (or mov + sub)
1669 sltu3 tmp, tmp, 1 16-bit
1670 xor3 dest, tmp, 1 32-bit
1671 */
1672 if (optimize_size && op2 != const0_rtx)
1673 return false;
1674
1675 if (op2 != const0_rtx)
1676 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1677
1678 op2 = gen_reg_rtx (SImode);
1679 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1680
1681 emit_insn (gen_rtx_SET (VOIDmode, dest,
1682 gen_rtx_XOR (SImode, op2, const1_rtx)));
1683 return true;
1684
1685 case LE:
1686 if (GET_CODE (op2) != CONST_INT
1687 || INTVAL (op2) == 0x7ffffff)
1688 return false;
1689 op2 = GEN_INT (INTVAL (op2) + 1);
1690 return mep_expand_setcc_1 (LT, dest, op1, op2);
1691
1692 case LEU:
1693 if (GET_CODE (op2) != CONST_INT
1694 || INTVAL (op2) == -1)
1695 return false;
1696 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1697 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1698
1699 case GE:
1700 if (GET_CODE (op2) != CONST_INT
1701 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1702 return false;
1703 op2 = GEN_INT (INTVAL (op2) - 1);
1704 return mep_expand_setcc_1 (GT, dest, op1, op2);
1705
1706 case GEU:
1707 if (GET_CODE (op2) != CONST_INT
1708 || op2 == const0_rtx)
1709 return false;
1710 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1711 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1712
1713 default:
1714 gcc_unreachable ();
1715 }
1716 }
1717
1718 bool
1719 mep_expand_setcc (rtx *operands)
1720 {
1721 rtx dest = operands[0];
1722 enum rtx_code code = GET_CODE (operands[1]);
1723 rtx op0 = operands[2];
1724 rtx op1 = operands[3];
1725
1726 return mep_expand_setcc_1 (code, dest, op0, op1);
1727 }
1728
1729 rtx
1730 mep_expand_cbranch (rtx *operands)
1731 {
1732 enum rtx_code code = GET_CODE (operands[0]);
1733 rtx op0 = operands[1];
1734 rtx op1 = operands[2];
1735 rtx tmp;
1736
1737 restart:
1738 switch (code)
1739 {
1740 case LT:
1741 if (mep_imm4_operand (op1, SImode))
1742 break;
1743
1744 tmp = gen_reg_rtx (SImode);
1745 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1746 code = NE;
1747 op0 = tmp;
1748 op1 = const0_rtx;
1749 break;
1750
1751 case GE:
1752 if (mep_imm4_operand (op1, SImode))
1753 break;
1754
1755 tmp = gen_reg_rtx (SImode);
1756 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1757
1758 code = EQ;
1759 op0 = tmp;
1760 op1 = const0_rtx;
1761 break;
1762
1763 case EQ:
1764 case NE:
1765 if (! mep_reg_or_imm4_operand (op1, SImode))
1766 op1 = force_reg (SImode, op1);
1767 break;
1768
1769 case LE:
1770 case GT:
1771 if (GET_CODE (op1) == CONST_INT
1772 && INTVAL (op1) != 0x7fffffff)
1773 {
1774 op1 = GEN_INT (INTVAL (op1) + 1);
1775 code = (code == LE ? LT : GE);
1776 goto restart;
1777 }
1778
1779 tmp = gen_reg_rtx (SImode);
1780 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1781
1782 code = (code == LE ? EQ : NE);
1783 op0 = tmp;
1784 op1 = const0_rtx;
1785 break;
1786
1787 case LTU:
1788 if (op1 == const1_rtx)
1789 {
1790 code = EQ;
1791 op1 = const0_rtx;
1792 break;
1793 }
1794
1795 tmp = gen_reg_rtx (SImode);
1796 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1797 code = NE;
1798 op0 = tmp;
1799 op1 = const0_rtx;
1800 break;
1801
1802 case LEU:
1803 tmp = gen_reg_rtx (SImode);
1804 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1805 code = NE;
1806 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1807 code = EQ;
1808 else
1809 gcc_unreachable ();
1810 op0 = tmp;
1811 op1 = const0_rtx;
1812 break;
1813
1814 case GTU:
1815 tmp = gen_reg_rtx (SImode);
1816 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1817 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1818 code = NE;
1819 op0 = tmp;
1820 op1 = const0_rtx;
1821 break;
1822
1823 case GEU:
1824 tmp = gen_reg_rtx (SImode);
1825 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1826 code = NE;
1827 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1828 code = EQ;
1829 else
1830 gcc_unreachable ();
1831 op0 = tmp;
1832 op1 = const0_rtx;
1833 break;
1834
1835 default:
1836 gcc_unreachable ();
1837 }
1838
1839 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1840 }
1841
1842 const char *
1843 mep_emit_cbranch (rtx *operands, int ne)
1844 {
1845 if (GET_CODE (operands[1]) == REG)
1846 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1847 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1848 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1849 else
1850 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1851 }
1852
1853 void
1854 mep_expand_call (rtx *operands, int returns_value)
1855 {
1856 rtx addr = operands[returns_value];
1857 rtx tp = mep_tp_rtx ();
1858 rtx gp = mep_gp_rtx ();
1859
1860 gcc_assert (GET_CODE (addr) == MEM);
1861
1862 addr = XEXP (addr, 0);
1863
1864 if (! mep_call_address_operand (addr, VOIDmode))
1865 addr = force_reg (SImode, addr);
1866
1867 if (! operands[returns_value+2])
1868 operands[returns_value+2] = const0_rtx;
1869
1870 if (returns_value)
1871 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1872 operands[3], tp, gp));
1873 else
1874 emit_call_insn (gen_call_internal (addr, operands[1],
1875 operands[2], tp, gp));
1876 }
1877 \f
1878 /* Aliasing Support. */
1879
1880 /* If X is a machine specific address (i.e. a symbol or label being
1881 referenced as a displacement from the GOT implemented using an
1882 UNSPEC), then return the base term. Otherwise return X. */
1883
1884 rtx
1885 mep_find_base_term (rtx x)
1886 {
1887 rtx base, term;
1888 int unspec;
1889
1890 if (GET_CODE (x) != PLUS)
1891 return x;
1892 base = XEXP (x, 0);
1893 term = XEXP (x, 1);
1894
1895 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1896 && base == mep_tp_rtx ())
1897 unspec = UNS_TPREL;
1898 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1899 && base == mep_gp_rtx ())
1900 unspec = UNS_GPREL;
1901 else
1902 return x;
1903
1904 if (GET_CODE (term) != CONST)
1905 return x;
1906 term = XEXP (term, 0);
1907
1908 if (GET_CODE (term) != UNSPEC
1909 || XINT (term, 1) != unspec)
1910 return x;
1911
1912 return XVECEXP (term, 0, 0);
1913 }
1914 \f
1915 /* Reload Support. */
1916
1917 /* Return true if the registers in CLASS cannot represent the change from
1918 modes FROM to TO. */
1919
1920 bool
1921 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
1922 enum reg_class regclass)
1923 {
1924 if (from == to)
1925 return false;
1926
1927 /* 64-bit COP regs must remain 64-bit COP regs. */
1928 if (TARGET_64BIT_CR_REGS
1929 && (regclass == CR_REGS
1930 || regclass == LOADABLE_CR_REGS)
1931 && (GET_MODE_SIZE (to) < 8
1932 || GET_MODE_SIZE (from) < 8))
1933 return true;
1934
1935 return false;
1936 }
1937
1938 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1939
1940 static bool
1941 mep_general_reg (rtx x)
1942 {
1943 while (GET_CODE (x) == SUBREG)
1944 x = XEXP (x, 0);
1945 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1946 }
1947
1948 static bool
1949 mep_nongeneral_reg (rtx x)
1950 {
1951 while (GET_CODE (x) == SUBREG)
1952 x = XEXP (x, 0);
1953 return (GET_CODE (x) == REG
1954 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1955 }
1956
1957 static bool
1958 mep_general_copro_reg (rtx x)
1959 {
1960 while (GET_CODE (x) == SUBREG)
1961 x = XEXP (x, 0);
1962 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1963 }
1964
1965 static bool
1966 mep_nonregister (rtx x)
1967 {
1968 while (GET_CODE (x) == SUBREG)
1969 x = XEXP (x, 0);
1970 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1971 }
1972
1973 #define DEBUG_RELOAD 0
1974
1975 /* Return the secondary reload class needed for moving value X to or
1976 from a register in coprocessor register class CLASS. */
1977
1978 static enum reg_class
1979 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
1980 {
1981 if (mep_general_reg (x))
1982 /* We can do the move directly if mep_have_core_copro_moves_p,
1983 otherwise we need to go through memory. Either way, no secondary
1984 register is needed. */
1985 return NO_REGS;
1986
1987 if (mep_general_copro_reg (x))
1988 {
1989 /* We can do the move directly if mep_have_copro_copro_moves_p. */
1990 if (mep_have_copro_copro_moves_p)
1991 return NO_REGS;
1992
1993 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
1994 if (mep_have_core_copro_moves_p)
1995 return GENERAL_REGS;
1996
1997 /* Otherwise we need to do it through memory. No secondary
1998 register is needed. */
1999 return NO_REGS;
2000 }
2001
2002 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2003 && constraint_satisfied_p (x, CONSTRAINT_U))
2004 /* X is a memory value that we can access directly. */
2005 return NO_REGS;
2006
2007 /* We have to move X into a GPR first and then copy it to
2008 the coprocessor register. The move from the GPR to the
2009 coprocessor might be done directly or through memory,
2010 depending on mep_have_core_copro_moves_p. */
2011 return GENERAL_REGS;
2012 }
2013
2014 /* Copying X to register in RCLASS. */
2015
2016 enum reg_class
2017 mep_secondary_input_reload_class (enum reg_class rclass,
2018 enum machine_mode mode ATTRIBUTE_UNUSED,
2019 rtx x)
2020 {
2021 int rv = NO_REGS;
2022
2023 #if DEBUG_RELOAD
2024 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2025 debug_rtx (x);
2026 #endif
2027
2028 if (reg_class_subset_p (rclass, CR_REGS))
2029 rv = mep_secondary_copro_reload_class (rclass, x);
2030 else if (MEP_NONGENERAL_CLASS (rclass)
2031 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2032 rv = GENERAL_REGS;
2033
2034 #if DEBUG_RELOAD
2035 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2036 #endif
2037 return (enum reg_class) rv;
2038 }
2039
2040 /* Copying register in RCLASS to X. */
2041
2042 enum reg_class
2043 mep_secondary_output_reload_class (enum reg_class rclass,
2044 enum machine_mode mode ATTRIBUTE_UNUSED,
2045 rtx x)
2046 {
2047 int rv = NO_REGS;
2048
2049 #if DEBUG_RELOAD
2050 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2051 debug_rtx (x);
2052 #endif
2053
2054 if (reg_class_subset_p (rclass, CR_REGS))
2055 rv = mep_secondary_copro_reload_class (rclass, x);
2056 else if (MEP_NONGENERAL_CLASS (rclass)
2057 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2058 rv = GENERAL_REGS;
2059
2060 #if DEBUG_RELOAD
2061 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2062 #endif
2063
2064 return (enum reg_class) rv;
2065 }
2066
2067 /* Implement SECONDARY_MEMORY_NEEDED. */
2068
2069 bool
2070 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2071 enum machine_mode mode ATTRIBUTE_UNUSED)
2072 {
2073 if (!mep_have_core_copro_moves_p)
2074 {
2075 if (reg_classes_intersect_p (rclass1, CR_REGS)
2076 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2077 return true;
2078 if (reg_classes_intersect_p (rclass2, CR_REGS)
2079 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2080 return true;
2081 if (!mep_have_copro_copro_moves_p
2082 && reg_classes_intersect_p (rclass1, CR_REGS)
2083 && reg_classes_intersect_p (rclass2, CR_REGS))
2084 return true;
2085 }
2086 return false;
2087 }
2088
2089 void
2090 mep_expand_reload (rtx *operands, enum machine_mode mode)
2091 {
2092 /* There are three cases for each direction:
2093 register, farsym
2094 control, farsym
2095 control, nearsym */
2096
2097 int s0 = mep_section_tag (operands[0]) == 'f';
2098 int s1 = mep_section_tag (operands[1]) == 'f';
2099 int c0 = mep_nongeneral_reg (operands[0]);
2100 int c1 = mep_nongeneral_reg (operands[1]);
2101 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2102
2103 #if DEBUG_RELOAD
2104 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2105 debug_rtx (operands[0]);
2106 debug_rtx (operands[1]);
2107 #endif
2108
2109 switch (which)
2110 {
2111 case 00: /* Don't know why this gets here. */
2112 case 02: /* general = far */
2113 emit_move_insn (operands[0], operands[1]);
2114 return;
2115
2116 case 10: /* cr = mem */
2117 case 11: /* cr = cr */
2118 case 01: /* mem = cr */
2119 case 12: /* cr = far */
2120 emit_move_insn (operands[2], operands[1]);
2121 emit_move_insn (operands[0], operands[2]);
2122 return;
2123
2124 case 20: /* far = general */
2125 emit_move_insn (operands[2], XEXP (operands[1], 0));
2126 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2127 return;
2128
2129 case 21: /* far = cr */
2130 case 22: /* far = far */
2131 default:
2132 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2133 which, mode_name[mode]);
2134 debug_rtx (operands[0]);
2135 debug_rtx (operands[1]);
2136 gcc_unreachable ();
2137 }
2138 }
2139
2140 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2141 can be moved directly into registers 0 to 7, but not into the rest.
2142 If so, and if the required class includes registers 0 to 7, restrict
2143 it to those registers. */
2144
2145 enum reg_class
2146 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2147 {
2148 switch (GET_CODE (x))
2149 {
2150 case CONST_INT:
2151 if (INTVAL (x) >= 0x10000
2152 && INTVAL (x) < 0x01000000
2153 && (INTVAL (x) & 0xffff) != 0
2154 && reg_class_subset_p (TPREL_REGS, rclass))
2155 rclass = TPREL_REGS;
2156 break;
2157
2158 case CONST:
2159 case SYMBOL_REF:
2160 case LABEL_REF:
2161 if (mep_section_tag (x) != 'f'
2162 && reg_class_subset_p (TPREL_REGS, rclass))
2163 rclass = TPREL_REGS;
2164 break;
2165
2166 default:
2167 break;
2168 }
2169 return rclass;
2170 }
2171 \f
2172 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2173 moves, 4 for direct double-register moves, and 1000 for anything
2174 that requires a temporary register or temporary stack slot. */
2175
2176 int
2177 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2178 {
2179 if (mep_have_copro_copro_moves_p
2180 && reg_class_subset_p (from, CR_REGS)
2181 && reg_class_subset_p (to, CR_REGS))
2182 {
2183 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2184 return 4;
2185 return 2;
2186 }
2187 if (reg_class_subset_p (from, CR_REGS)
2188 && reg_class_subset_p (to, CR_REGS))
2189 {
2190 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2191 return 8;
2192 return 4;
2193 }
2194 if (reg_class_subset_p (from, CR_REGS)
2195 || reg_class_subset_p (to, CR_REGS))
2196 {
2197 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2198 return 4;
2199 return 2;
2200 }
2201 if (mep_secondary_memory_needed (from, to, mode))
2202 return 1000;
2203 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2204 return 1000;
2205
2206 if (GET_MODE_SIZE (mode) > 4)
2207 return 4;
2208
2209 return 2;
2210 }
2211
2212 \f
2213 /* Functions to save and restore machine-specific function data. */
2214
2215 static struct machine_function *
2216 mep_init_machine_status (void)
2217 {
2218 return ggc_alloc_cleared_machine_function ();
2219 }
2220
2221 static rtx
2222 mep_allocate_initial_value (rtx reg)
2223 {
2224 int rss;
2225
2226 if (GET_CODE (reg) != REG)
2227 return NULL_RTX;
2228
2229 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2230 return NULL_RTX;
2231
2232 /* In interrupt functions, the "initial" values of $gp and $tp are
2233 provided by the prologue. They are not necessarily the same as
2234 the values that the caller was using. */
2235 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2236 if (mep_interrupt_p ())
2237 return NULL_RTX;
2238
2239 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2240 {
2241 cfun->machine->reg_save_size += 4;
2242 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2243 }
2244
2245 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2246 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2247 }
2248
2249 rtx
2250 mep_return_addr_rtx (int count)
2251 {
2252 if (count != 0)
2253 return const0_rtx;
2254
2255 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2256 }
2257
2258 static rtx
2259 mep_tp_rtx (void)
2260 {
2261 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2262 }
2263
2264 static rtx
2265 mep_gp_rtx (void)
2266 {
2267 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2268 }
2269
2270 static bool
2271 mep_interrupt_p (void)
2272 {
2273 if (cfun->machine->interrupt_handler == 0)
2274 {
2275 int interrupt_handler
2276 = (lookup_attribute ("interrupt",
2277 DECL_ATTRIBUTES (current_function_decl))
2278 != NULL_TREE);
2279 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2280 }
2281 return cfun->machine->interrupt_handler == 2;
2282 }
2283
2284 static bool
2285 mep_disinterrupt_p (void)
2286 {
2287 if (cfun->machine->disable_interrupts == 0)
2288 {
2289 int disable_interrupts
2290 = (lookup_attribute ("disinterrupt",
2291 DECL_ATTRIBUTES (current_function_decl))
2292 != NULL_TREE);
2293 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2294 }
2295 return cfun->machine->disable_interrupts == 2;
2296 }
2297
2298 \f
2299 /* Frame/Epilog/Prolog Related. */
2300
2301 static bool
2302 mep_reg_set_p (rtx reg, rtx insn)
2303 {
2304 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2305 if (INSN_P (insn))
2306 {
2307 if (FIND_REG_INC_NOTE (insn, reg))
2308 return true;
2309 insn = PATTERN (insn);
2310 }
2311
2312 if (GET_CODE (insn) == SET
2313 && GET_CODE (XEXP (insn, 0)) == REG
2314 && GET_CODE (XEXP (insn, 1)) == REG
2315 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2316 return false;
2317
2318 return set_of (reg, insn) != NULL_RTX;
2319 }
2320
2321
2322 #define MEP_SAVES_UNKNOWN 0
2323 #define MEP_SAVES_YES 1
2324 #define MEP_SAVES_MAYBE 2
2325 #define MEP_SAVES_NO 3
2326
2327 static bool
2328 mep_reg_set_in_function (int regno)
2329 {
2330 rtx reg, insn;
2331
2332 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2333 return true;
2334
2335 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2336 return true;
2337
2338 push_topmost_sequence ();
2339 insn = get_insns ();
2340 pop_topmost_sequence ();
2341
2342 if (!insn)
2343 return false;
2344
2345 reg = gen_rtx_REG (SImode, regno);
2346
2347 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2348 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2349 return true;
2350 return false;
2351 }
2352
2353 static bool
2354 mep_asm_without_operands_p (void)
2355 {
2356 if (cfun->machine->asms_without_operands == 0)
2357 {
2358 rtx insn;
2359
2360 push_topmost_sequence ();
2361 insn = get_insns ();
2362 pop_topmost_sequence ();
2363
2364 cfun->machine->asms_without_operands = 1;
2365 while (insn)
2366 {
2367 if (INSN_P (insn)
2368 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2369 {
2370 cfun->machine->asms_without_operands = 2;
2371 break;
2372 }
2373 insn = NEXT_INSN (insn);
2374 }
2375
2376 }
2377 return cfun->machine->asms_without_operands == 2;
2378 }
2379
2380 /* Interrupt functions save/restore every call-preserved register, and
2381 any call-used register it uses (or all if it calls any function,
2382 since they may get clobbered there too). Here we check to see
2383 which call-used registers need saving. */
2384
2385 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2386 && (r == FIRST_CCR_REGNO + 1 \
2387 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2388 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2389
2390 static bool
2391 mep_interrupt_saved_reg (int r)
2392 {
2393 if (!mep_interrupt_p ())
2394 return false;
2395 if (r == REGSAVE_CONTROL_TEMP
2396 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2397 return true;
2398 if (mep_asm_without_operands_p ()
2399 && (!fixed_regs[r]
2400 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2401 || IVC2_ISAVED_REG (r)))
2402 return true;
2403 if (!crtl->is_leaf)
2404 /* Function calls mean we need to save $lp. */
2405 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2406 return true;
2407 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2408 /* The interrupt handler might use these registers for repeat blocks,
2409 or it might call a function that does so. */
2410 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2411 return true;
2412 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2413 return false;
2414 /* Functions we call might clobber these. */
2415 if (call_used_regs[r] && !fixed_regs[r])
2416 return true;
2417 /* Additional registers that need to be saved for IVC2. */
2418 if (IVC2_ISAVED_REG (r))
2419 return true;
2420
2421 return false;
2422 }
2423
2424 static bool
2425 mep_call_saves_register (int r)
2426 {
2427 if (! cfun->machine->frame_locked)
2428 {
2429 int rv = MEP_SAVES_NO;
2430
2431 if (cfun->machine->reg_save_slot[r])
2432 rv = MEP_SAVES_YES;
2433 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2434 rv = MEP_SAVES_YES;
2435 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2436 rv = MEP_SAVES_YES;
2437 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2438 rv = MEP_SAVES_YES;
2439 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2440 /* We need these to have stack slots so that they can be set during
2441 unwinding. */
2442 rv = MEP_SAVES_YES;
2443 else if (mep_interrupt_saved_reg (r))
2444 rv = MEP_SAVES_YES;
2445 cfun->machine->reg_saved[r] = rv;
2446 }
2447 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2448 }
2449
2450 /* Return true if epilogue uses register REGNO. */
2451
2452 bool
2453 mep_epilogue_uses (int regno)
2454 {
2455 /* Since $lp is a call-saved register, the generic code will normally
2456 mark it used in the epilogue if it needs to be saved and restored.
2457 However, when profiling is enabled, the profiling code will implicitly
2458 clobber $11. This case has to be handled specially both here and in
2459 mep_call_saves_register. */
2460 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2461 return true;
2462 /* Interrupt functions save/restore pretty much everything. */
2463 return (reload_completed && mep_interrupt_saved_reg (regno));
2464 }
2465
2466 static int
2467 mep_reg_size (int regno)
2468 {
2469 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2470 return 8;
2471 return 4;
2472 }
2473
2474 /* Worker function for TARGET_CAN_ELIMINATE. */
2475
2476 bool
2477 mep_can_eliminate (const int from, const int to)
2478 {
2479 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2480 ? ! frame_pointer_needed
2481 : true);
2482 }
2483
2484 int
2485 mep_elimination_offset (int from, int to)
2486 {
2487 int reg_save_size;
2488 int i;
2489 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2490 int total_size;
2491
2492 if (!cfun->machine->frame_locked)
2493 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2494
2495 /* We don't count arg_regs_to_save in the arg pointer offset, because
2496 gcc thinks the arg pointer has moved along with the saved regs.
2497 However, we do count it when we adjust $sp in the prologue. */
2498 reg_save_size = 0;
2499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2500 if (mep_call_saves_register (i))
2501 reg_save_size += mep_reg_size (i);
2502
2503 if (reg_save_size % 8)
2504 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2505 else
2506 cfun->machine->regsave_filler = 0;
2507
2508 /* This is what our total stack adjustment looks like. */
2509 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2510
2511 if (total_size % 8)
2512 cfun->machine->frame_filler = 8 - (total_size % 8);
2513 else
2514 cfun->machine->frame_filler = 0;
2515
2516
2517 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2518 return reg_save_size + cfun->machine->regsave_filler;
2519
2520 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2521 return cfun->machine->frame_filler + frame_size;
2522
2523 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2524 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2525
2526 gcc_unreachable ();
2527 }
2528
2529 static rtx
2530 F (rtx x)
2531 {
2532 RTX_FRAME_RELATED_P (x) = 1;
2533 return x;
2534 }
2535
2536 /* Since the prologue/epilogue code is generated after optimization,
2537 we can't rely on gcc to split constants for us. So, this code
2538 captures all the ways to add a constant to a register in one logic
2539 chunk, including optimizing away insns we just don't need. This
2540 makes the prolog/epilog code easier to follow. */
2541 static void
2542 add_constant (int dest, int src, int value, int mark_frame)
2543 {
2544 rtx insn;
2545 int hi, lo;
2546
2547 if (src == dest && value == 0)
2548 return;
2549
2550 if (value == 0)
2551 {
2552 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2553 gen_rtx_REG (SImode, src));
2554 if (mark_frame)
2555 RTX_FRAME_RELATED_P(insn) = 1;
2556 return;
2557 }
2558
2559 if (value >= -32768 && value <= 32767)
2560 {
2561 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2562 gen_rtx_REG (SImode, src),
2563 GEN_INT (value)));
2564 if (mark_frame)
2565 RTX_FRAME_RELATED_P(insn) = 1;
2566 return;
2567 }
2568
2569 /* Big constant, need to use a temp register. We use
2570 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2571 area is always small enough to directly add to). */
2572
2573 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2574 lo = value & 0xffff;
2575
2576 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2577 GEN_INT (hi));
2578
2579 if (lo)
2580 {
2581 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2582 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2583 GEN_INT (lo)));
2584 }
2585
2586 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2587 gen_rtx_REG (SImode, src),
2588 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2589 if (mark_frame)
2590 {
2591 RTX_FRAME_RELATED_P(insn) = 1;
2592 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2593 gen_rtx_SET (SImode,
2594 gen_rtx_REG (SImode, dest),
2595 gen_rtx_PLUS (SImode,
2596 gen_rtx_REG (SImode, dest),
2597 GEN_INT (value))));
2598 }
2599 }
2600
2601 /* Move SRC to DEST. Mark the move as being potentially dead if
2602 MAYBE_DEAD_P. */
2603
2604 static rtx
2605 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2606 {
2607 rtx insn = emit_move_insn (dest, src);
2608 #if 0
2609 if (maybe_dead_p)
2610 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2611 #endif
2612 return insn;
2613 }
2614
2615 /* Used for interrupt functions, which can't assume that $tp and $gp
2616 contain the correct pointers. */
2617
2618 static void
2619 mep_reload_pointer (int regno, const char *symbol)
2620 {
2621 rtx reg, sym;
2622
2623 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2624 return;
2625
2626 reg = gen_rtx_REG (SImode, regno);
2627 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2628 emit_insn (gen_movsi_topsym_s (reg, sym));
2629 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2630 }
2631
2632 /* Assign save slots for any register not already saved. DImode
2633 registers go at the end of the reg save area; the rest go at the
2634 beginning. This is for alignment purposes. Returns true if a frame
2635 is really needed. */
2636 static bool
2637 mep_assign_save_slots (int reg_save_size)
2638 {
2639 bool really_need_stack_frame = false;
2640 int di_ofs = 0;
2641 int i;
2642
2643 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2644 if (mep_call_saves_register(i))
2645 {
2646 int regsize = mep_reg_size (i);
2647
2648 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2649 || mep_reg_set_in_function (i))
2650 really_need_stack_frame = true;
2651
2652 if (cfun->machine->reg_save_slot[i])
2653 continue;
2654
2655 if (regsize < 8)
2656 {
2657 cfun->machine->reg_save_size += regsize;
2658 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2659 }
2660 else
2661 {
2662 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2663 di_ofs += 8;
2664 }
2665 }
2666 cfun->machine->frame_locked = 1;
2667 return really_need_stack_frame;
2668 }
2669
2670 void
2671 mep_expand_prologue (void)
2672 {
2673 int i, rss, sp_offset = 0;
2674 int reg_save_size;
2675 int frame_size;
2676 int really_need_stack_frame;
2677
2678 /* We must not allow register renaming in interrupt functions,
2679 because that invalidates the correctness of the set of call-used
2680 registers we're going to save/restore. */
2681 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2682
2683 if (mep_disinterrupt_p ())
2684 emit_insn (gen_mep_disable_int ());
2685
2686 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2687
2688 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2689 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2690 really_need_stack_frame = frame_size;
2691
2692 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2693
2694 sp_offset = reg_save_size;
2695 if (sp_offset + frame_size < 128)
2696 sp_offset += frame_size ;
2697
2698 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2699
2700 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2701 if (mep_call_saves_register(i))
2702 {
2703 rtx mem;
2704 bool maybe_dead_p;
2705 enum machine_mode rmode;
2706
2707 rss = cfun->machine->reg_save_slot[i];
2708
2709 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2710 && (!mep_reg_set_in_function (i)
2711 && !mep_interrupt_p ()))
2712 continue;
2713
2714 if (mep_reg_size (i) == 8)
2715 rmode = DImode;
2716 else
2717 rmode = SImode;
2718
2719 /* If there is a pseudo associated with this register's initial value,
2720 reload might have already spilt it to the stack slot suggested by
2721 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2722 deleted as dead. */
2723 mem = gen_rtx_MEM (rmode,
2724 plus_constant (Pmode, stack_pointer_rtx,
2725 sp_offset - rss));
2726 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2727
2728 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2729 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2730 else if (rmode == DImode)
2731 {
2732 rtx insn;
2733 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2734
2735 mem = gen_rtx_MEM (SImode,
2736 plus_constant (Pmode, stack_pointer_rtx,
2737 sp_offset - rss + be));
2738
2739 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2740 gen_rtx_REG (SImode, i),
2741 maybe_dead_p);
2742 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2743 gen_rtx_ZERO_EXTRACT (SImode,
2744 gen_rtx_REG (DImode, i),
2745 GEN_INT (32),
2746 GEN_INT (32)),
2747 maybe_dead_p);
2748 insn = maybe_dead_move (mem,
2749 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2750 maybe_dead_p);
2751 RTX_FRAME_RELATED_P (insn) = 1;
2752
2753 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2754 gen_rtx_SET (VOIDmode,
2755 copy_rtx (mem),
2756 gen_rtx_REG (rmode, i)));
2757 mem = gen_rtx_MEM (SImode,
2758 plus_constant (Pmode, stack_pointer_rtx,
2759 sp_offset - rss + (4-be)));
2760 insn = maybe_dead_move (mem,
2761 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2762 maybe_dead_p);
2763 }
2764 else
2765 {
2766 rtx insn;
2767 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2768 gen_rtx_REG (rmode, i),
2769 maybe_dead_p);
2770 insn = maybe_dead_move (mem,
2771 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2772 maybe_dead_p);
2773 RTX_FRAME_RELATED_P (insn) = 1;
2774
2775 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2776 gen_rtx_SET (VOIDmode,
2777 copy_rtx (mem),
2778 gen_rtx_REG (rmode, i)));
2779 }
2780 }
2781
2782 if (frame_pointer_needed)
2783 {
2784 /* We've already adjusted down by sp_offset. Total $sp change
2785 is reg_save_size + frame_size. We want a net change here of
2786 just reg_save_size. */
2787 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2788 }
2789
2790 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2791
2792 if (mep_interrupt_p ())
2793 {
2794 mep_reload_pointer(GP_REGNO, "__sdabase");
2795 mep_reload_pointer(TP_REGNO, "__tpbase");
2796 }
2797 }
2798
2799 static void
2800 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2801 {
2802 int local = hwi_local;
2803 int frame_size = local + crtl->outgoing_args_size;
2804 int reg_save_size;
2805 int ffill;
2806 int i, sp, skip;
2807 int sp_offset;
2808 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2809
2810 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2811 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2812 sp_offset = reg_save_size + frame_size;
2813
2814 ffill = cfun->machine->frame_filler;
2815
2816 if (cfun->machine->mep_frame_pointer_needed)
2817 reg_names[FP_REGNO] = "$fp";
2818 else
2819 reg_names[FP_REGNO] = "$8";
2820
2821 if (sp_offset == 0)
2822 return;
2823
2824 if (debug_info_level == DINFO_LEVEL_NONE)
2825 {
2826 fprintf (file, "\t# frame: %d", sp_offset);
2827 if (reg_save_size)
2828 fprintf (file, " %d regs", reg_save_size);
2829 if (local)
2830 fprintf (file, " %d locals", local);
2831 if (crtl->outgoing_args_size)
2832 fprintf (file, " %d args", crtl->outgoing_args_size);
2833 fprintf (file, "\n");
2834 return;
2835 }
2836
2837 fprintf (file, "\t#\n");
2838 fprintf (file, "\t# Initial Frame Information:\n");
2839 if (sp_offset || !frame_pointer_needed)
2840 fprintf (file, "\t# Entry ---------- 0\n");
2841
2842 /* Sort registers by save slots, so they're printed in the order
2843 they appear in memory, not the order they're saved in. */
2844 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2845 slot_map[si] = si;
2846 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2847 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2848 if (cfun->machine->reg_save_slot[slot_map[si]]
2849 > cfun->machine->reg_save_slot[slot_map[sj]])
2850 {
2851 int t = slot_map[si];
2852 slot_map[si] = slot_map[sj];
2853 slot_map[sj] = t;
2854 }
2855
2856 sp = 0;
2857 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2858 {
2859 int rsize;
2860 int r = slot_map[i];
2861 int rss = cfun->machine->reg_save_slot[r];
2862
2863 if (!mep_call_saves_register (r))
2864 continue;
2865
2866 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2867 && (!mep_reg_set_in_function (r)
2868 && !mep_interrupt_p ()))
2869 continue;
2870
2871 rsize = mep_reg_size(r);
2872 skip = rss - (sp+rsize);
2873 if (skip)
2874 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2875 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2876 rsize, reg_names[r], sp_offset - rss);
2877 sp = rss;
2878 }
2879
2880 skip = reg_save_size - sp;
2881 if (skip)
2882 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2883
2884 if (frame_pointer_needed)
2885 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2886 if (local)
2887 fprintf (file, "\t# %3d bytes for local vars\n", local);
2888 if (ffill)
2889 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2890 if (crtl->outgoing_args_size)
2891 fprintf (file, "\t# %3d bytes for outgoing args\n",
2892 crtl->outgoing_args_size);
2893 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2894 fprintf (file, "\t#\n");
2895 }
2896
2897
2898 static int mep_prevent_lp_restore = 0;
2899 static int mep_sibcall_epilogue = 0;
2900
2901 void
2902 mep_expand_epilogue (void)
2903 {
2904 int i, sp_offset = 0;
2905 int reg_save_size = 0;
2906 int frame_size;
2907 int lp_temp = LP_REGNO, lp_slot = -1;
2908 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2909 int interrupt_handler = mep_interrupt_p ();
2910
2911 if (profile_arc_flag == 2)
2912 emit_insn (gen_mep_bb_trace_ret ());
2913
2914 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2915 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2916
2917 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2918
2919 if (frame_pointer_needed)
2920 {
2921 /* If we have a frame pointer, we won't have a reliable stack
2922 pointer (alloca, you know), so rebase SP from FP */
2923 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2924 gen_rtx_REG (SImode, FP_REGNO));
2925 sp_offset = reg_save_size;
2926 }
2927 else
2928 {
2929 /* SP is right under our local variable space. Adjust it if
2930 needed. */
2931 sp_offset = reg_save_size + frame_size;
2932 if (sp_offset >= 128)
2933 {
2934 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2935 sp_offset -= frame_size;
2936 }
2937 }
2938
2939 /* This is backwards so that we restore the control and coprocessor
2940 registers before the temporary registers we use to restore
2941 them. */
2942 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2943 if (mep_call_saves_register (i))
2944 {
2945 enum machine_mode rmode;
2946 int rss = cfun->machine->reg_save_slot[i];
2947
2948 if (mep_reg_size (i) == 8)
2949 rmode = DImode;
2950 else
2951 rmode = SImode;
2952
2953 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2954 && !(mep_reg_set_in_function (i) || interrupt_handler))
2955 continue;
2956 if (mep_prevent_lp_restore && i == LP_REGNO)
2957 continue;
2958 if (!mep_prevent_lp_restore
2959 && !interrupt_handler
2960 && (i == 10 || i == 11))
2961 continue;
2962
2963 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2964 emit_move_insn (gen_rtx_REG (rmode, i),
2965 gen_rtx_MEM (rmode,
2966 plus_constant (Pmode, stack_pointer_rtx,
2967 sp_offset - rss)));
2968 else
2969 {
2970 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2971 /* Defer this one so we can jump indirect rather than
2972 copying the RA to $lp and "ret". EH epilogues
2973 automatically skip this anyway. */
2974 lp_slot = sp_offset-rss;
2975 else
2976 {
2977 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2978 gen_rtx_MEM (rmode,
2979 plus_constant (Pmode,
2980 stack_pointer_rtx,
2981 sp_offset-rss)));
2982 emit_move_insn (gen_rtx_REG (rmode, i),
2983 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
2984 }
2985 }
2986 }
2987 if (lp_slot != -1)
2988 {
2989 /* Restore this one last so we know it will be in the temp
2990 register when we return by jumping indirectly via the temp. */
2991 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2992 gen_rtx_MEM (SImode,
2993 plus_constant (Pmode, stack_pointer_rtx,
2994 lp_slot)));
2995 lp_temp = REGSAVE_CONTROL_TEMP;
2996 }
2997
2998
2999 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3000
3001 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3002 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3003 gen_rtx_REG (SImode, SP_REGNO),
3004 cfun->machine->eh_stack_adjust));
3005
3006 if (mep_sibcall_epilogue)
3007 return;
3008
3009 if (mep_disinterrupt_p ())
3010 emit_insn (gen_mep_enable_int ());
3011
3012 if (mep_prevent_lp_restore)
3013 {
3014 emit_jump_insn (gen_eh_return_internal ());
3015 emit_barrier ();
3016 }
3017 else if (interrupt_handler)
3018 emit_jump_insn (gen_mep_reti ());
3019 else
3020 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3021 }
3022
3023 void
3024 mep_expand_eh_return (rtx *operands)
3025 {
3026 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3027 {
3028 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3029 emit_move_insn (ra, operands[0]);
3030 operands[0] = ra;
3031 }
3032
3033 emit_insn (gen_eh_epilogue (operands[0]));
3034 }
3035
3036 void
3037 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3038 {
3039 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3040 mep_prevent_lp_restore = 1;
3041 mep_expand_epilogue ();
3042 mep_prevent_lp_restore = 0;
3043 }
3044
3045 void
3046 mep_expand_sibcall_epilogue (void)
3047 {
3048 mep_sibcall_epilogue = 1;
3049 mep_expand_epilogue ();
3050 mep_sibcall_epilogue = 0;
3051 }
3052
3053 static bool
3054 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3055 {
3056 if (decl == NULL)
3057 return false;
3058
3059 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3060 return false;
3061
3062 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3063 if (mep_interrupt_p () || mep_disinterrupt_p ())
3064 return false;
3065
3066 return true;
3067 }
3068
3069 rtx
3070 mep_return_stackadj_rtx (void)
3071 {
3072 return gen_rtx_REG (SImode, 10);
3073 }
3074
3075 rtx
3076 mep_return_handler_rtx (void)
3077 {
3078 return gen_rtx_REG (SImode, LP_REGNO);
3079 }
3080
3081 void
3082 mep_function_profiler (FILE *file)
3083 {
3084 /* Always right at the beginning of the function. */
3085 fprintf (file, "\t# mep function profiler\n");
3086 fprintf (file, "\tadd\t$sp, -8\n");
3087 fprintf (file, "\tsw\t$0, ($sp)\n");
3088 fprintf (file, "\tldc\t$0, $lp\n");
3089 fprintf (file, "\tsw\t$0, 4($sp)\n");
3090 fprintf (file, "\tbsr\t__mep_mcount\n");
3091 fprintf (file, "\tlw\t$0, 4($sp)\n");
3092 fprintf (file, "\tstc\t$0, $lp\n");
3093 fprintf (file, "\tlw\t$0, ($sp)\n");
3094 fprintf (file, "\tadd\t$sp, 8\n\n");
3095 }
3096
3097 const char *
3098 mep_emit_bb_trace_ret (void)
3099 {
3100 fprintf (asm_out_file, "\t# end of block profiling\n");
3101 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3102 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3103 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3104 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3105 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3106 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3107 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3108 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3109 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3110 return "";
3111 }
3112
3113 #undef SAVE
3114 #undef RESTORE
3115 \f
3116 /* Operand Printing. */
3117
3118 void
3119 mep_print_operand_address (FILE *stream, rtx address)
3120 {
3121 if (GET_CODE (address) == MEM)
3122 address = XEXP (address, 0);
3123 else
3124 /* cf: gcc.dg/asm-4.c. */
3125 gcc_assert (GET_CODE (address) == REG);
3126
3127 mep_print_operand (stream, address, 0);
3128 }
3129
3130 static struct
3131 {
3132 char code;
3133 const char *pattern;
3134 const char *format;
3135 }
3136 const conversions[] =
3137 {
3138 { 0, "r", "0" },
3139 { 0, "m+ri", "3(2)" },
3140 { 0, "mr", "(1)" },
3141 { 0, "ms", "(1)" },
3142 { 0, "ml", "(1)" },
3143 { 0, "mLrs", "%lo(3)(2)" },
3144 { 0, "mLr+si", "%lo(4+5)(2)" },
3145 { 0, "m+ru2s", "%tpoff(5)(2)" },
3146 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3147 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3148 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3149 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3150 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3151 { 0, "mi", "(1)" },
3152 { 0, "m+si", "(2+3)" },
3153 { 0, "m+li", "(2+3)" },
3154 { 0, "i", "0" },
3155 { 0, "s", "0" },
3156 { 0, "+si", "1+2" },
3157 { 0, "+u2si", "%tpoff(3+4)" },
3158 { 0, "+u3si", "%sdaoff(3+4)" },
3159 { 0, "l", "0" },
3160 { 'b', "i", "0" },
3161 { 'B', "i", "0" },
3162 { 'U', "i", "0" },
3163 { 'h', "i", "0" },
3164 { 'h', "Hs", "%hi(1)" },
3165 { 'I', "i", "0" },
3166 { 'I', "u2s", "%tpoff(2)" },
3167 { 'I', "u3s", "%sdaoff(2)" },
3168 { 'I', "+u2si", "%tpoff(3+4)" },
3169 { 'I', "+u3si", "%sdaoff(3+4)" },
3170 { 'J', "i", "0" },
3171 { 'P', "mr", "(1\\+),\\0" },
3172 { 'x', "i", "0" },
3173 { 0, 0, 0 }
3174 };
3175
3176 static int
3177 unique_bit_in (HOST_WIDE_INT i)
3178 {
3179 switch (i & 0xff)
3180 {
3181 case 0x01: case 0xfe: return 0;
3182 case 0x02: case 0xfd: return 1;
3183 case 0x04: case 0xfb: return 2;
3184 case 0x08: case 0xf7: return 3;
3185 case 0x10: case 0x7f: return 4;
3186 case 0x20: case 0xbf: return 5;
3187 case 0x40: case 0xdf: return 6;
3188 case 0x80: case 0xef: return 7;
3189 default:
3190 gcc_unreachable ();
3191 }
3192 }
3193
3194 static int
3195 bit_size_for_clip (HOST_WIDE_INT i)
3196 {
3197 int rv;
3198
3199 for (rv = 0; rv < 31; rv ++)
3200 if (((HOST_WIDE_INT) 1 << rv) > i)
3201 return rv + 1;
3202 gcc_unreachable ();
3203 }
3204
3205 /* Print an operand to a assembler instruction. */
3206
3207 void
3208 mep_print_operand (FILE *file, rtx x, int code)
3209 {
3210 int i, j;
3211 const char *real_name;
3212
3213 if (code == '<')
3214 {
3215 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3216 we're using, then skip over the "mep_" part of its name. */
3217 const struct cgen_insn *insn;
3218
3219 if (mep_get_move_insn (mep_cmov, &insn))
3220 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3221 else
3222 mep_intrinsic_unavailable (mep_cmov);
3223 return;
3224 }
3225 if (code == 'L')
3226 {
3227 switch (GET_CODE (x))
3228 {
3229 case AND:
3230 fputs ("clr", file);
3231 return;
3232 case IOR:
3233 fputs ("set", file);
3234 return;
3235 case XOR:
3236 fputs ("not", file);
3237 return;
3238 default:
3239 output_operand_lossage ("invalid %%L code");
3240 }
3241 }
3242 if (code == 'M')
3243 {
3244 /* Print the second operand of a CR <- CR move. If we're using
3245 a two-operand instruction (i.e., a real cmov), then just print
3246 the operand normally. If we're using a "reg, reg, immediate"
3247 instruction such as caddi3, print the operand followed by a
3248 zero field. If we're using a three-register instruction,
3249 print the operand twice. */
3250 const struct cgen_insn *insn;
3251
3252 mep_print_operand (file, x, 0);
3253 if (mep_get_move_insn (mep_cmov, &insn)
3254 && insn_data[insn->icode].n_operands == 3)
3255 {
3256 fputs (", ", file);
3257 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3258 mep_print_operand (file, x, 0);
3259 else
3260 mep_print_operand (file, const0_rtx, 0);
3261 }
3262 return;
3263 }
3264
3265 encode_pattern (x);
3266 for (i = 0; conversions[i].pattern; i++)
3267 if (conversions[i].code == code
3268 && strcmp(conversions[i].pattern, pattern) == 0)
3269 {
3270 for (j = 0; conversions[i].format[j]; j++)
3271 if (conversions[i].format[j] == '\\')
3272 {
3273 fputc (conversions[i].format[j+1], file);
3274 j++;
3275 }
3276 else if (ISDIGIT(conversions[i].format[j]))
3277 {
3278 rtx r = patternr[conversions[i].format[j] - '0'];
3279 switch (GET_CODE (r))
3280 {
3281 case REG:
3282 fprintf (file, "%s", reg_names [REGNO (r)]);
3283 break;
3284 case CONST_INT:
3285 switch (code)
3286 {
3287 case 'b':
3288 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3289 break;
3290 case 'B':
3291 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3292 break;
3293 case 'h':
3294 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3295 break;
3296 case 'U':
3297 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3298 break;
3299 case 'J':
3300 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3301 break;
3302 case 'x':
3303 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3304 && !(INTVAL (r) & 0xff))
3305 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3306 else
3307 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3308 break;
3309 case 'I':
3310 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3311 && conversions[i].format[j+1] == 0)
3312 {
3313 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3314 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3315 }
3316 else
3317 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3318 break;
3319 default:
3320 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3321 break;
3322 }
3323 break;
3324 case CONST_DOUBLE:
3325 fprintf(file, "[const_double 0x%lx]",
3326 (unsigned long) CONST_DOUBLE_HIGH(r));
3327 break;
3328 case SYMBOL_REF:
3329 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3330 assemble_name (file, real_name);
3331 break;
3332 case LABEL_REF:
3333 output_asm_label (r);
3334 break;
3335 default:
3336 fprintf (stderr, "don't know how to print this operand:");
3337 debug_rtx (r);
3338 gcc_unreachable ();
3339 }
3340 }
3341 else
3342 {
3343 if (conversions[i].format[j] == '+'
3344 && (!code || code == 'I')
3345 && ISDIGIT (conversions[i].format[j+1])
3346 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3347 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3348 continue;
3349 fputc(conversions[i].format[j], file);
3350 }
3351 break;
3352 }
3353 if (!conversions[i].pattern)
3354 {
3355 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3356 debug_rtx(x);
3357 }
3358
3359 return;
3360 }
3361
3362 void
3363 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3364 int noperands ATTRIBUTE_UNUSED)
3365 {
3366 /* Despite the fact that MeP is perfectly capable of branching and
3367 doing something else in the same bundle, gcc does jump
3368 optimization *after* scheduling, so we cannot trust the bundling
3369 flags on jump instructions. */
3370 if (GET_MODE (insn) == BImode
3371 && get_attr_slots (insn) != SLOTS_CORE)
3372 fputc ('+', asm_out_file);
3373 }
3374
3375 /* Function args in registers. */
3376
3377 static void
3378 mep_setup_incoming_varargs (cumulative_args_t cum,
3379 enum machine_mode mode ATTRIBUTE_UNUSED,
3380 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3381 int second_time ATTRIBUTE_UNUSED)
3382 {
3383 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3384
3385 if (nsave > 0)
3386 cfun->machine->arg_regs_to_save = nsave;
3387 *pretend_size = nsave * 4;
3388 }
3389
3390 static int
3391 bytesize (const_tree type, enum machine_mode mode)
3392 {
3393 if (mode == BLKmode)
3394 return int_size_in_bytes (type);
3395 return GET_MODE_SIZE (mode);
3396 }
3397
3398 static rtx
3399 mep_expand_builtin_saveregs (void)
3400 {
3401 int bufsize, i, ns;
3402 rtx regbuf;
3403
3404 ns = cfun->machine->arg_regs_to_save;
3405 if (TARGET_IVC2)
3406 {
3407 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3408 regbuf = assign_stack_local (SImode, bufsize, 64);
3409 }
3410 else
3411 {
3412 bufsize = ns * 4;
3413 regbuf = assign_stack_local (SImode, bufsize, 32);
3414 }
3415
3416 move_block_from_reg (5-ns, regbuf, ns);
3417
3418 if (TARGET_IVC2)
3419 {
3420 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3421 int ofs = 8 * ((ns+1)/2);
3422
3423 for (i=0; i<ns; i++)
3424 {
3425 int rn = (4-ns) + i + 49;
3426 rtx ptr;
3427
3428 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3429 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3430 ofs += 8;
3431 }
3432 }
3433 return XEXP (regbuf, 0);
3434 }
3435
3436 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3437
3438 static tree
3439 mep_build_builtin_va_list (void)
3440 {
3441 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3442 tree record;
3443
3444
3445 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3446
3447 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3448 get_identifier ("__va_next_gp"), ptr_type_node);
3449 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3450 get_identifier ("__va_next_gp_limit"),
3451 ptr_type_node);
3452 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3453 ptr_type_node);
3454 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3455 ptr_type_node);
3456
3457 DECL_FIELD_CONTEXT (f_next_gp) = record;
3458 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3459 DECL_FIELD_CONTEXT (f_next_cop) = record;
3460 DECL_FIELD_CONTEXT (f_next_stack) = record;
3461
3462 TYPE_FIELDS (record) = f_next_gp;
3463 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3464 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3465 DECL_CHAIN (f_next_cop) = f_next_stack;
3466
3467 layout_type (record);
3468
3469 return record;
3470 }
3471
3472 static void
3473 mep_expand_va_start (tree valist, rtx nextarg)
3474 {
3475 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3476 tree next_gp, next_gp_limit, next_cop, next_stack;
3477 tree t, u;
3478 int ns;
3479
3480 ns = cfun->machine->arg_regs_to_save;
3481
3482 f_next_gp = TYPE_FIELDS (va_list_type_node);
3483 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3484 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3485 f_next_stack = DECL_CHAIN (f_next_cop);
3486
3487 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3488 NULL_TREE);
3489 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3490 valist, f_next_gp_limit, NULL_TREE);
3491 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3492 NULL_TREE);
3493 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3494 valist, f_next_stack, NULL_TREE);
3495
3496 /* va_list.next_gp = expand_builtin_saveregs (); */
3497 u = make_tree (sizetype, expand_builtin_saveregs ());
3498 u = fold_convert (ptr_type_node, u);
3499 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3500 TREE_SIDE_EFFECTS (t) = 1;
3501 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3502
3503 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3504 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3505 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3506 TREE_SIDE_EFFECTS (t) = 1;
3507 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3508
3509 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3510 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3511 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3512 TREE_SIDE_EFFECTS (t) = 1;
3513 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3514
3515 /* va_list.next_stack = nextarg; */
3516 u = make_tree (ptr_type_node, nextarg);
3517 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3518 TREE_SIDE_EFFECTS (t) = 1;
3519 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3520 }
3521
3522 static tree
3523 mep_gimplify_va_arg_expr (tree valist, tree type,
3524 gimple_seq *pre_p,
3525 gimple_seq *post_p ATTRIBUTE_UNUSED)
3526 {
3527 HOST_WIDE_INT size, rsize;
3528 bool by_reference, ivc2_vec;
3529 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3530 tree next_gp, next_gp_limit, next_cop, next_stack;
3531 tree label_sover, label_selse;
3532 tree tmp, res_addr;
3533
3534 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3535
3536 size = int_size_in_bytes (type);
3537 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3538
3539 if (by_reference)
3540 {
3541 type = build_pointer_type (type);
3542 size = 4;
3543 }
3544 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3545
3546 f_next_gp = TYPE_FIELDS (va_list_type_node);
3547 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3548 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3549 f_next_stack = DECL_CHAIN (f_next_cop);
3550
3551 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3552 NULL_TREE);
3553 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3554 valist, f_next_gp_limit, NULL_TREE);
3555 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3556 NULL_TREE);
3557 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3558 valist, f_next_stack, NULL_TREE);
3559
3560 /* if f_next_gp < f_next_gp_limit
3561 IF (VECTOR_P && IVC2)
3562 val = *f_next_cop;
3563 ELSE
3564 val = *f_next_gp;
3565 f_next_gp += 4;
3566 f_next_cop += 8;
3567 else
3568 label_selse:
3569 val = *f_next_stack;
3570 f_next_stack += rsize;
3571 label_sover:
3572 */
3573
3574 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3575 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3576 res_addr = create_tmp_var (ptr_type_node, NULL);
3577
3578 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3579 unshare_expr (next_gp_limit));
3580 tmp = build3 (COND_EXPR, void_type_node, tmp,
3581 build1 (GOTO_EXPR, void_type_node,
3582 unshare_expr (label_selse)),
3583 NULL_TREE);
3584 gimplify_and_add (tmp, pre_p);
3585
3586 if (ivc2_vec)
3587 {
3588 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3589 gimplify_and_add (tmp, pre_p);
3590 }
3591 else
3592 {
3593 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3594 gimplify_and_add (tmp, pre_p);
3595 }
3596
3597 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3598 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3599
3600 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3601 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3602
3603 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3604 gimplify_and_add (tmp, pre_p);
3605
3606 /* - - */
3607
3608 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3609 gimplify_and_add (tmp, pre_p);
3610
3611 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3612 gimplify_and_add (tmp, pre_p);
3613
3614 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3615 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3616
3617 /* - - */
3618
3619 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3620 gimplify_and_add (tmp, pre_p);
3621
3622 res_addr = fold_convert (build_pointer_type (type), res_addr);
3623
3624 if (by_reference)
3625 res_addr = build_va_arg_indirect_ref (res_addr);
3626
3627 return build_va_arg_indirect_ref (res_addr);
3628 }
3629
3630 void
3631 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3632 rtx libname ATTRIBUTE_UNUSED,
3633 tree fndecl ATTRIBUTE_UNUSED)
3634 {
3635 pcum->nregs = 0;
3636
3637 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3638 pcum->vliw = 1;
3639 else
3640 pcum->vliw = 0;
3641 }
3642
3643 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3644 larger than 4 bytes are passed indirectly. Return value in 0,
3645 unless bigger than 4 bytes, then the caller passes a pointer as the
3646 first arg. For varargs, we copy $1..$4 to the stack. */
3647
3648 static rtx
3649 mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
3650 const_tree type ATTRIBUTE_UNUSED,
3651 bool named ATTRIBUTE_UNUSED)
3652 {
3653 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3654
3655 /* VOIDmode is a signal for the backend to pass data to the call
3656 expander via the second operand to the call pattern. We use
3657 this to determine whether to use "jsr" or "jsrv". */
3658 if (mode == VOIDmode)
3659 return GEN_INT (cum->vliw);
3660
3661 /* If we havn't run out of argument registers, return the next. */
3662 if (cum->nregs < 4)
3663 {
3664 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3665 return gen_rtx_REG (mode, cum->nregs + 49);
3666 else
3667 return gen_rtx_REG (mode, cum->nregs + 1);
3668 }
3669
3670 /* Otherwise the argument goes on the stack. */
3671 return NULL_RTX;
3672 }
3673
3674 static bool
3675 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3676 enum machine_mode mode,
3677 const_tree type,
3678 bool named ATTRIBUTE_UNUSED)
3679 {
3680 int size = bytesize (type, mode);
3681
3682 /* This is non-obvious, but yes, large values passed after we've run
3683 out of registers are *still* passed by reference - we put the
3684 address of the parameter on the stack, as well as putting the
3685 parameter itself elsewhere on the stack. */
3686
3687 if (size <= 0 || size > 8)
3688 return true;
3689 if (size <= 4)
3690 return false;
3691 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3692 && type != NULL_TREE && VECTOR_TYPE_P (type))
3693 return false;
3694 return true;
3695 }
3696
3697 static void
3698 mep_function_arg_advance (cumulative_args_t pcum,
3699 enum machine_mode mode ATTRIBUTE_UNUSED,
3700 const_tree type ATTRIBUTE_UNUSED,
3701 bool named ATTRIBUTE_UNUSED)
3702 {
3703 get_cumulative_args (pcum)->nregs += 1;
3704 }
3705
3706 bool
3707 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3708 {
3709 int size = bytesize (type, BLKmode);
3710 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3711 return size > 0 && size <= 8 ? 0 : 1;
3712 return size > 0 && size <= 4 ? 0 : 1;
3713 }
3714
3715 static bool
3716 mep_narrow_volatile_bitfield (void)
3717 {
3718 return true;
3719 return false;
3720 }
3721
3722 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3723
3724 rtx
3725 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3726 {
3727 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3728 return gen_rtx_REG (TYPE_MODE (type), 48);
3729 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3730 }
3731
3732 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3733
3734 rtx
3735 mep_libcall_value (enum machine_mode mode)
3736 {
3737 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3738 }
3739
3740 /* Handle pipeline hazards. */
3741
3742 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3743 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3744
3745 static int prev_opcode = 0;
3746
3747 /* This isn't as optimal as it could be, because we don't know what
3748 control register the STC opcode is storing in. We only need to add
3749 the nop if it's the relevant register, but we add it for irrelevant
3750 registers also. */
3751
3752 void
3753 mep_asm_output_opcode (FILE *file, const char *ptr)
3754 {
3755 int this_opcode = op_none;
3756 const char *hazard = 0;
3757
3758 switch (*ptr)
3759 {
3760 case 'f':
3761 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3762 this_opcode = op_fsft;
3763 break;
3764 case 'r':
3765 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3766 this_opcode = op_ret;
3767 break;
3768 case 's':
3769 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3770 this_opcode = op_stc;
3771 break;
3772 }
3773
3774 if (prev_opcode == op_stc && this_opcode == op_fsft)
3775 hazard = "nop";
3776 if (prev_opcode == op_stc && this_opcode == op_ret)
3777 hazard = "nop";
3778
3779 if (hazard)
3780 fprintf(file, "%s\t# %s-%s hazard\n\t",
3781 hazard, opnames[prev_opcode], opnames[this_opcode]);
3782
3783 prev_opcode = this_opcode;
3784 }
3785
3786 /* Handle attributes. */
3787
3788 static tree
3789 mep_validate_based_tiny (tree *node, tree name, tree args,
3790 int flags ATTRIBUTE_UNUSED, bool *no_add)
3791 {
3792 if (TREE_CODE (*node) != VAR_DECL
3793 && TREE_CODE (*node) != POINTER_TYPE
3794 && TREE_CODE (*node) != TYPE_DECL)
3795 {
3796 warning (0, "%qE attribute only applies to variables", name);
3797 *no_add = true;
3798 }
3799 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3800 {
3801 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3802 {
3803 warning (0, "address region attributes not allowed with auto storage class");
3804 *no_add = true;
3805 }
3806 /* Ignore storage attribute of pointed to variable: char __far * x; */
3807 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3808 {
3809 warning (0, "address region attributes on pointed-to types ignored");
3810 *no_add = true;
3811 }
3812 }
3813
3814 return NULL_TREE;
3815 }
3816
3817 static int
3818 mep_multiple_address_regions (tree list, bool check_section_attr)
3819 {
3820 tree a;
3821 int count_sections = 0;
3822 int section_attr_count = 0;
3823
3824 for (a = list; a; a = TREE_CHAIN (a))
3825 {
3826 if (is_attribute_p ("based", TREE_PURPOSE (a))
3827 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3828 || is_attribute_p ("near", TREE_PURPOSE (a))
3829 || is_attribute_p ("far", TREE_PURPOSE (a))
3830 || is_attribute_p ("io", TREE_PURPOSE (a)))
3831 count_sections ++;
3832 if (check_section_attr)
3833 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3834 }
3835
3836 if (check_section_attr)
3837 return section_attr_count;
3838 else
3839 return count_sections;
3840 }
3841
3842 #define MEP_ATTRIBUTES(decl) \
3843 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3844 : DECL_ATTRIBUTES (decl) \
3845 ? (DECL_ATTRIBUTES (decl)) \
3846 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3847
3848 static tree
3849 mep_validate_near_far (tree *node, tree name, tree args,
3850 int flags ATTRIBUTE_UNUSED, bool *no_add)
3851 {
3852 if (TREE_CODE (*node) != VAR_DECL
3853 && TREE_CODE (*node) != FUNCTION_DECL
3854 && TREE_CODE (*node) != METHOD_TYPE
3855 && TREE_CODE (*node) != POINTER_TYPE
3856 && TREE_CODE (*node) != TYPE_DECL)
3857 {
3858 warning (0, "%qE attribute only applies to variables and functions",
3859 name);
3860 *no_add = true;
3861 }
3862 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3863 {
3864 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3865 {
3866 warning (0, "address region attributes not allowed with auto storage class");
3867 *no_add = true;
3868 }
3869 /* Ignore storage attribute of pointed to variable: char __far * x; */
3870 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3871 {
3872 warning (0, "address region attributes on pointed-to types ignored");
3873 *no_add = true;
3874 }
3875 }
3876 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3877 {
3878 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3879 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3880 DECL_ATTRIBUTES (*node) = NULL_TREE;
3881 }
3882 return NULL_TREE;
3883 }
3884
3885 static tree
3886 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3887 int flags ATTRIBUTE_UNUSED, bool *no_add)
3888 {
3889 if (TREE_CODE (*node) != FUNCTION_DECL
3890 && TREE_CODE (*node) != METHOD_TYPE)
3891 {
3892 warning (0, "%qE attribute only applies to functions", name);
3893 *no_add = true;
3894 }
3895 return NULL_TREE;
3896 }
3897
3898 static tree
3899 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3900 int flags ATTRIBUTE_UNUSED, bool *no_add)
3901 {
3902 tree function_type;
3903
3904 if (TREE_CODE (*node) != FUNCTION_DECL)
3905 {
3906 warning (0, "%qE attribute only applies to functions", name);
3907 *no_add = true;
3908 return NULL_TREE;
3909 }
3910
3911 if (DECL_DECLARED_INLINE_P (*node))
3912 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3913 DECL_UNINLINABLE (*node) = 1;
3914
3915 function_type = TREE_TYPE (*node);
3916
3917 if (TREE_TYPE (function_type) != void_type_node)
3918 error ("interrupt function must have return type of void");
3919
3920 if (prototype_p (function_type)
3921 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3922 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3923 error ("interrupt function must have no arguments");
3924
3925 return NULL_TREE;
3926 }
3927
3928 static tree
3929 mep_validate_io_cb (tree *node, tree name, tree args,
3930 int flags ATTRIBUTE_UNUSED, bool *no_add)
3931 {
3932 if (TREE_CODE (*node) != VAR_DECL)
3933 {
3934 warning (0, "%qE attribute only applies to variables", name);
3935 *no_add = true;
3936 }
3937
3938 if (args != NULL_TREE)
3939 {
3940 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3941 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3942 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3943 {
3944 warning (0, "%qE attribute allows only an integer constant argument",
3945 name);
3946 *no_add = true;
3947 }
3948 }
3949
3950 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3951 TREE_THIS_VOLATILE (*node) = 1;
3952
3953 return NULL_TREE;
3954 }
3955
3956 static tree
3957 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3958 int flags ATTRIBUTE_UNUSED, bool *no_add)
3959 {
3960 if (TREE_CODE (*node) != FUNCTION_TYPE
3961 && TREE_CODE (*node) != FUNCTION_DECL
3962 && TREE_CODE (*node) != METHOD_TYPE
3963 && TREE_CODE (*node) != FIELD_DECL
3964 && TREE_CODE (*node) != TYPE_DECL)
3965 {
3966 static int gave_pointer_note = 0;
3967 static int gave_array_note = 0;
3968 static const char * given_type = NULL;
3969
3970 given_type = get_tree_code_name (TREE_CODE (*node));
3971 if (TREE_CODE (*node) == POINTER_TYPE)
3972 given_type = "pointers";
3973 if (TREE_CODE (*node) == ARRAY_TYPE)
3974 given_type = "arrays";
3975
3976 if (given_type)
3977 warning (0, "%qE attribute only applies to functions, not %s",
3978 name, given_type);
3979 else
3980 warning (0, "%qE attribute only applies to functions",
3981 name);
3982 *no_add = true;
3983
3984 if (TREE_CODE (*node) == POINTER_TYPE
3985 && !gave_pointer_note)
3986 {
3987 inform (input_location,
3988 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3989 " typedef int (__vliw *vfuncptr) ();");
3990 gave_pointer_note = 1;
3991 }
3992
3993 if (TREE_CODE (*node) == ARRAY_TYPE
3994 && !gave_array_note)
3995 {
3996 inform (input_location,
3997 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
3998 " typedef int (__vliw *vfuncptr[]) ();");
3999 gave_array_note = 1;
4000 }
4001 }
4002 if (!TARGET_VLIW)
4003 error ("VLIW functions are not allowed without a VLIW configuration");
4004 return NULL_TREE;
4005 }
4006
4007 static const struct attribute_spec mep_attribute_table[11] =
4008 {
4009 /* name min max decl type func handler
4010 affects_type_identity */
4011 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4012 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4013 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4014 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4015 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4016 false },
4017 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4018 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4019 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4020 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4021 { NULL, 0, 0, false, false, false, NULL, false }
4022 };
4023
4024 static bool
4025 mep_function_attribute_inlinable_p (const_tree callee)
4026 {
4027 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4028 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4029 return (lookup_attribute ("disinterrupt", attrs) == 0
4030 && lookup_attribute ("interrupt", attrs) == 0);
4031 }
4032
4033 static bool
4034 mep_can_inline_p (tree caller, tree callee)
4035 {
4036 if (TREE_CODE (callee) == ADDR_EXPR)
4037 callee = TREE_OPERAND (callee, 0);
4038
4039 if (!mep_vliw_function_p (caller)
4040 && mep_vliw_function_p (callee))
4041 {
4042 return false;
4043 }
4044 return true;
4045 }
4046
4047 #define FUNC_CALL 1
4048 #define FUNC_DISINTERRUPT 2
4049
4050
4051 struct GTY(()) pragma_entry {
4052 int used;
4053 int flag;
4054 const char *funcname;
4055 };
4056 typedef struct pragma_entry pragma_entry;
4057
4058 /* Hash table of farcall-tagged sections. */
4059 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4060
4061 static int
4062 pragma_entry_eq (const void *p1, const void *p2)
4063 {
4064 const pragma_entry *old = (const pragma_entry *) p1;
4065 const char *new_name = (const char *) p2;
4066
4067 return strcmp (old->funcname, new_name) == 0;
4068 }
4069
4070 static hashval_t
4071 pragma_entry_hash (const void *p)
4072 {
4073 const pragma_entry *old = (const pragma_entry *) p;
4074 return htab_hash_string (old->funcname);
4075 }
4076
4077 static void
4078 mep_note_pragma_flag (const char *funcname, int flag)
4079 {
4080 pragma_entry **slot;
4081
4082 if (!pragma_htab)
4083 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4084 pragma_entry_eq, NULL);
4085
4086 slot = (pragma_entry **)
4087 htab_find_slot_with_hash (pragma_htab, funcname,
4088 htab_hash_string (funcname), INSERT);
4089
4090 if (!*slot)
4091 {
4092 *slot = ggc_alloc_pragma_entry ();
4093 (*slot)->flag = 0;
4094 (*slot)->used = 0;
4095 (*slot)->funcname = ggc_strdup (funcname);
4096 }
4097 (*slot)->flag |= flag;
4098 }
4099
4100 static bool
4101 mep_lookup_pragma_flag (const char *funcname, int flag)
4102 {
4103 pragma_entry **slot;
4104
4105 if (!pragma_htab)
4106 return false;
4107
4108 if (funcname[0] == '@' && funcname[2] == '.')
4109 funcname += 3;
4110
4111 slot = (pragma_entry **)
4112 htab_find_slot_with_hash (pragma_htab, funcname,
4113 htab_hash_string (funcname), NO_INSERT);
4114 if (slot && *slot && ((*slot)->flag & flag))
4115 {
4116 (*slot)->used |= flag;
4117 return true;
4118 }
4119 return false;
4120 }
4121
4122 bool
4123 mep_lookup_pragma_call (const char *funcname)
4124 {
4125 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4126 }
4127
4128 void
4129 mep_note_pragma_call (const char *funcname)
4130 {
4131 mep_note_pragma_flag (funcname, FUNC_CALL);
4132 }
4133
4134 bool
4135 mep_lookup_pragma_disinterrupt (const char *funcname)
4136 {
4137 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4138 }
4139
4140 void
4141 mep_note_pragma_disinterrupt (const char *funcname)
4142 {
4143 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4144 }
4145
4146 static int
4147 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4148 {
4149 const pragma_entry *d = (const pragma_entry *)(*slot);
4150
4151 if ((d->flag & FUNC_DISINTERRUPT)
4152 && !(d->used & FUNC_DISINTERRUPT))
4153 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4154 return 1;
4155 }
4156
4157 void
4158 mep_file_cleanups (void)
4159 {
4160 if (pragma_htab)
4161 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4162 }
4163
4164 /* These three functions provide a bridge between the pramgas that
4165 affect register classes, and the functions that maintain them. We
4166 can't call those functions directly as pragma handling is part of
4167 the front end and doesn't have direct access to them. */
4168
4169 void
4170 mep_save_register_info (void)
4171 {
4172 save_register_info ();
4173 }
4174
4175 void
4176 mep_reinit_regs (void)
4177 {
4178 reinit_regs ();
4179 }
4180
4181 void
4182 mep_init_regs (void)
4183 {
4184 init_regs ();
4185 }
4186
4187
4188
4189 static int
4190 mep_attrlist_to_encoding (tree list, tree decl)
4191 {
4192 if (mep_multiple_address_regions (list, false) > 1)
4193 {
4194 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4195 TREE_PURPOSE (TREE_CHAIN (list)),
4196 DECL_NAME (decl),
4197 DECL_SOURCE_LINE (decl));
4198 TREE_CHAIN (list) = NULL_TREE;
4199 }
4200
4201 while (list)
4202 {
4203 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4204 return 'b';
4205 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4206 return 't';
4207 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4208 return 'n';
4209 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4210 return 'f';
4211 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4212 {
4213 if (TREE_VALUE (list)
4214 && TREE_VALUE (TREE_VALUE (list))
4215 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4216 {
4217 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4218 if (location >= 0
4219 && location <= 0x1000000)
4220 return 'i';
4221 }
4222 return 'I';
4223 }
4224 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4225 return 'c';
4226 list = TREE_CHAIN (list);
4227 }
4228 if (TARGET_TF
4229 && TREE_CODE (decl) == FUNCTION_DECL
4230 && DECL_SECTION_NAME (decl) == 0)
4231 return 'f';
4232 return 0;
4233 }
4234
4235 static int
4236 mep_comp_type_attributes (const_tree t1, const_tree t2)
4237 {
4238 int vliw1, vliw2;
4239
4240 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4241 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4242
4243 if (vliw1 != vliw2)
4244 return 0;
4245
4246 return 1;
4247 }
4248
4249 static void
4250 mep_insert_attributes (tree decl, tree *attributes)
4251 {
4252 int size;
4253 const char *secname = 0;
4254 tree attrib, attrlist;
4255 char encoding;
4256
4257 if (TREE_CODE (decl) == FUNCTION_DECL)
4258 {
4259 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4260
4261 if (mep_lookup_pragma_disinterrupt (funcname))
4262 {
4263 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4264 *attributes = chainon (*attributes, attrib);
4265 }
4266 }
4267
4268 if (TREE_CODE (decl) != VAR_DECL
4269 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4270 return;
4271
4272 if (TREE_READONLY (decl) && TARGET_DC)
4273 /* -mdc means that const variables default to the near section,
4274 regardless of the size cutoff. */
4275 return;
4276
4277 /* User specified an attribute, so override the default.
4278 Ignore storage attribute of pointed to variable. char __far * x; */
4279 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4280 {
4281 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4282 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4283 else if (DECL_ATTRIBUTES (decl) && *attributes)
4284 DECL_ATTRIBUTES (decl) = NULL_TREE;
4285 }
4286
4287 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4288 encoding = mep_attrlist_to_encoding (attrlist, decl);
4289 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4290 {
4291 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4292 encoding = mep_attrlist_to_encoding (attrlist, decl);
4293 }
4294 if (encoding)
4295 {
4296 /* This means that the declaration has a specific section
4297 attribute, so we should not apply the default rules. */
4298
4299 if (encoding == 'i' || encoding == 'I')
4300 {
4301 tree attr = lookup_attribute ("io", attrlist);
4302 if (attr
4303 && TREE_VALUE (attr)
4304 && TREE_VALUE (TREE_VALUE(attr)))
4305 {
4306 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4307 static tree previous_value = 0;
4308 static int previous_location = 0;
4309 static tree previous_name = 0;
4310
4311 /* We take advantage of the fact that gcc will reuse the
4312 same tree pointer when applying an attribute to a
4313 list of decls, but produce a new tree for attributes
4314 on separate source lines, even when they're textually
4315 identical. This is the behavior we want. */
4316 if (TREE_VALUE (attr) == previous_value
4317 && location == previous_location)
4318 {
4319 warning(0, "__io address 0x%x is the same for %qE and %qE",
4320 location, previous_name, DECL_NAME (decl));
4321 }
4322 previous_name = DECL_NAME (decl);
4323 previous_location = location;
4324 previous_value = TREE_VALUE (attr);
4325 }
4326 }
4327 return;
4328 }
4329
4330
4331 /* Declarations of arrays can change size. Don't trust them. */
4332 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4333 size = 0;
4334 else
4335 size = int_size_in_bytes (TREE_TYPE (decl));
4336
4337 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4338 {
4339 if (TREE_PUBLIC (decl)
4340 || DECL_EXTERNAL (decl)
4341 || TREE_STATIC (decl))
4342 {
4343 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4344 int key = 0;
4345
4346 while (*name)
4347 key += *name++;
4348
4349 switch (key & 3)
4350 {
4351 case 0:
4352 secname = "based";
4353 break;
4354 case 1:
4355 secname = "tiny";
4356 break;
4357 case 2:
4358 secname = "far";
4359 break;
4360 default:
4361 ;
4362 }
4363 }
4364 }
4365 else
4366 {
4367 if (size <= mep_based_cutoff && size > 0)
4368 secname = "based";
4369 else if (size <= mep_tiny_cutoff && size > 0)
4370 secname = "tiny";
4371 else if (TARGET_L)
4372 secname = "far";
4373 }
4374
4375 if (mep_const_section && TREE_READONLY (decl))
4376 {
4377 if (strcmp (mep_const_section, "tiny") == 0)
4378 secname = "tiny";
4379 else if (strcmp (mep_const_section, "near") == 0)
4380 return;
4381 else if (strcmp (mep_const_section, "far") == 0)
4382 secname = "far";
4383 }
4384
4385 if (!secname)
4386 return;
4387
4388 if (!mep_multiple_address_regions (*attributes, true)
4389 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4390 {
4391 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4392
4393 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4394 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4395 and mep_validate_based_tiny. */
4396 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4397 }
4398 }
4399
4400 static void
4401 mep_encode_section_info (tree decl, rtx rtl, int first)
4402 {
4403 rtx rtlname;
4404 const char *oldname;
4405 const char *secname;
4406 char encoding;
4407 char *newname;
4408 tree idp;
4409 int maxsize;
4410 tree type;
4411 tree mep_attributes;
4412
4413 if (! first)
4414 return;
4415
4416 if (TREE_CODE (decl) != VAR_DECL
4417 && TREE_CODE (decl) != FUNCTION_DECL)
4418 return;
4419
4420 rtlname = XEXP (rtl, 0);
4421 if (GET_CODE (rtlname) == SYMBOL_REF)
4422 oldname = XSTR (rtlname, 0);
4423 else if (GET_CODE (rtlname) == MEM
4424 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4425 oldname = XSTR (XEXP (rtlname, 0), 0);
4426 else
4427 gcc_unreachable ();
4428
4429 type = TREE_TYPE (decl);
4430 if (type == error_mark_node)
4431 return;
4432 mep_attributes = MEP_ATTRIBUTES (decl);
4433
4434 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4435
4436 if (encoding)
4437 {
4438 newname = (char *) alloca (strlen (oldname) + 4);
4439 sprintf (newname, "@%c.%s", encoding, oldname);
4440 idp = get_identifier (newname);
4441 XEXP (rtl, 0) =
4442 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4443 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4444 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4445
4446 switch (encoding)
4447 {
4448 case 'b':
4449 maxsize = 128;
4450 secname = "based";
4451 break;
4452 case 't':
4453 maxsize = 65536;
4454 secname = "tiny";
4455 break;
4456 case 'n':
4457 maxsize = 0x1000000;
4458 secname = "near";
4459 break;
4460 default:
4461 maxsize = 0;
4462 secname = 0;
4463 break;
4464 }
4465 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4466 {
4467 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4468 oldname,
4469 (long) int_size_in_bytes (TREE_TYPE (decl)),
4470 secname,
4471 maxsize);
4472 }
4473 }
4474 }
4475
4476 const char *
4477 mep_strip_name_encoding (const char *sym)
4478 {
4479 while (1)
4480 {
4481 if (*sym == '*')
4482 sym++;
4483 else if (*sym == '@' && sym[2] == '.')
4484 sym += 3;
4485 else
4486 return sym;
4487 }
4488 }
4489
4490 static section *
4491 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4492 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4493 {
4494 int readonly = 1;
4495 int encoding;
4496
4497 switch (TREE_CODE (decl))
4498 {
4499 case VAR_DECL:
4500 if (!TREE_READONLY (decl)
4501 || TREE_SIDE_EFFECTS (decl)
4502 || !DECL_INITIAL (decl)
4503 || (DECL_INITIAL (decl) != error_mark_node
4504 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4505 readonly = 0;
4506 break;
4507 case CONSTRUCTOR:
4508 if (! TREE_CONSTANT (decl))
4509 readonly = 0;
4510 break;
4511
4512 default:
4513 break;
4514 }
4515
4516 if (TREE_CODE (decl) == FUNCTION_DECL)
4517 {
4518 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4519
4520 if (name[0] == '@' && name[2] == '.')
4521 encoding = name[1];
4522 else
4523 encoding = 0;
4524
4525 if (flag_function_sections || DECL_ONE_ONLY (decl))
4526 mep_unique_section (decl, 0);
4527 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4528 {
4529 if (encoding == 'f')
4530 return vftext_section;
4531 else
4532 return vtext_section;
4533 }
4534 else if (encoding == 'f')
4535 return ftext_section;
4536 else
4537 return text_section;
4538 }
4539
4540 if (TREE_CODE (decl) == VAR_DECL)
4541 {
4542 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4543
4544 if (name[0] == '@' && name[2] == '.')
4545 switch (name[1])
4546 {
4547 case 'b':
4548 return based_section;
4549
4550 case 't':
4551 if (readonly)
4552 return srodata_section;
4553 if (DECL_INITIAL (decl))
4554 return sdata_section;
4555 return tinybss_section;
4556
4557 case 'f':
4558 if (readonly)
4559 return frodata_section;
4560 return far_section;
4561
4562 case 'i':
4563 case 'I':
4564 error_at (DECL_SOURCE_LOCATION (decl),
4565 "variable %D of type %<io%> must be uninitialized", decl);
4566 return data_section;
4567
4568 case 'c':
4569 error_at (DECL_SOURCE_LOCATION (decl),
4570 "variable %D of type %<cb%> must be uninitialized", decl);
4571 return data_section;
4572 }
4573 }
4574
4575 if (readonly)
4576 return readonly_data_section;
4577
4578 return data_section;
4579 }
4580
4581 static void
4582 mep_unique_section (tree decl, int reloc)
4583 {
4584 static const char *prefixes[][2] =
4585 {
4586 { ".text.", ".gnu.linkonce.t." },
4587 { ".rodata.", ".gnu.linkonce.r." },
4588 { ".data.", ".gnu.linkonce.d." },
4589 { ".based.", ".gnu.linkonce.based." },
4590 { ".sdata.", ".gnu.linkonce.s." },
4591 { ".far.", ".gnu.linkonce.far." },
4592 { ".ftext.", ".gnu.linkonce.ft." },
4593 { ".frodata.", ".gnu.linkonce.frd." },
4594 { ".srodata.", ".gnu.linkonce.srd." },
4595 { ".vtext.", ".gnu.linkonce.v." },
4596 { ".vftext.", ".gnu.linkonce.vf." }
4597 };
4598 int sec = 2; /* .data */
4599 int len;
4600 const char *name, *prefix;
4601 char *string;
4602
4603 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4604 if (DECL_RTL (decl))
4605 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4606
4607 if (TREE_CODE (decl) == FUNCTION_DECL)
4608 {
4609 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4610 sec = 9; /* .vtext */
4611 else
4612 sec = 0; /* .text */
4613 }
4614 else if (decl_readonly_section (decl, reloc))
4615 sec = 1; /* .rodata */
4616
4617 if (name[0] == '@' && name[2] == '.')
4618 {
4619 switch (name[1])
4620 {
4621 case 'b':
4622 sec = 3; /* .based */
4623 break;
4624 case 't':
4625 if (sec == 1)
4626 sec = 8; /* .srodata */
4627 else
4628 sec = 4; /* .sdata */
4629 break;
4630 case 'f':
4631 if (sec == 0)
4632 sec = 6; /* .ftext */
4633 else if (sec == 9)
4634 sec = 10; /* .vftext */
4635 else if (sec == 1)
4636 sec = 7; /* .frodata */
4637 else
4638 sec = 5; /* .far. */
4639 break;
4640 }
4641 name += 3;
4642 }
4643
4644 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4645 len = strlen (name) + strlen (prefix);
4646 string = (char *) alloca (len + 1);
4647
4648 sprintf (string, "%s%s", prefix, name);
4649
4650 DECL_SECTION_NAME (decl) = build_string (len, string);
4651 }
4652
4653 /* Given a decl, a section name, and whether the decl initializer
4654 has relocs, choose attributes for the section. */
4655
4656 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4657
4658 static unsigned int
4659 mep_section_type_flags (tree decl, const char *name, int reloc)
4660 {
4661 unsigned int flags = default_section_type_flags (decl, name, reloc);
4662
4663 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4664 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4665 flags |= SECTION_MEP_VLIW;
4666
4667 return flags;
4668 }
4669
4670 /* Switch to an arbitrary section NAME with attributes as specified
4671 by FLAGS. ALIGN specifies any known alignment requirements for
4672 the section; 0 if the default should be used.
4673
4674 Differs from the standard ELF version only in support of VLIW mode. */
4675
4676 static void
4677 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4678 {
4679 char flagchars[8], *f = flagchars;
4680 const char *type;
4681
4682 if (!(flags & SECTION_DEBUG))
4683 *f++ = 'a';
4684 if (flags & SECTION_WRITE)
4685 *f++ = 'w';
4686 if (flags & SECTION_CODE)
4687 *f++ = 'x';
4688 if (flags & SECTION_SMALL)
4689 *f++ = 's';
4690 if (flags & SECTION_MEP_VLIW)
4691 *f++ = 'v';
4692 *f = '\0';
4693
4694 if (flags & SECTION_BSS)
4695 type = "nobits";
4696 else
4697 type = "progbits";
4698
4699 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4700 name, flagchars, type);
4701
4702 if (flags & SECTION_CODE)
4703 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4704 asm_out_file);
4705 }
4706
4707 void
4708 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4709 int size, int align, int global)
4710 {
4711 /* We intentionally don't use mep_section_tag() here. */
4712 if (name[0] == '@'
4713 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4714 && name[2] == '.')
4715 {
4716 int location = -1;
4717 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4718 DECL_ATTRIBUTES (decl));
4719 if (attr
4720 && TREE_VALUE (attr)
4721 && TREE_VALUE (TREE_VALUE(attr)))
4722 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4723 if (location == -1)
4724 return;
4725 if (global)
4726 {
4727 fprintf (stream, "\t.globl\t");
4728 assemble_name (stream, name);
4729 fprintf (stream, "\n");
4730 }
4731 assemble_name (stream, name);
4732 fprintf (stream, " = %d\n", location);
4733 return;
4734 }
4735 if (name[0] == '@' && name[2] == '.')
4736 {
4737 const char *sec = 0;
4738 switch (name[1])
4739 {
4740 case 'b':
4741 switch_to_section (based_section);
4742 sec = ".based";
4743 break;
4744 case 't':
4745 switch_to_section (tinybss_section);
4746 sec = ".sbss";
4747 break;
4748 case 'f':
4749 switch_to_section (farbss_section);
4750 sec = ".farbss";
4751 break;
4752 }
4753 if (sec)
4754 {
4755 const char *name2;
4756 int p2align = 0;
4757
4758 while (align > BITS_PER_UNIT)
4759 {
4760 align /= 2;
4761 p2align ++;
4762 }
4763 name2 = targetm.strip_name_encoding (name);
4764 if (global)
4765 fprintf (stream, "\t.globl\t%s\n", name2);
4766 fprintf (stream, "\t.p2align %d\n", p2align);
4767 fprintf (stream, "\t.type\t%s,@object\n", name2);
4768 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4769 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4770 return;
4771 }
4772 }
4773
4774 if (!global)
4775 {
4776 fprintf (stream, "\t.local\t");
4777 assemble_name (stream, name);
4778 fprintf (stream, "\n");
4779 }
4780 fprintf (stream, "\t.comm\t");
4781 assemble_name (stream, name);
4782 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4783 }
4784
4785 /* Trampolines. */
4786
4787 static void
4788 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4789 {
4790 rtx addr = XEXP (m_tramp, 0);
4791 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4792
4793 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4794 LCT_NORMAL, VOIDmode, 3,
4795 addr, Pmode,
4796 fnaddr, Pmode,
4797 static_chain, Pmode);
4798 }
4799
4800 /* Experimental Reorg. */
4801
4802 static bool
4803 mep_mentioned_p (rtx in,
4804 rtx reg, /* NULL for mem */
4805 int modes_too) /* if nonzero, modes must match also. */
4806 {
4807 const char *fmt;
4808 int i;
4809 enum rtx_code code;
4810
4811 if (in == 0)
4812 return false;
4813 if (reg && GET_CODE (reg) != REG)
4814 return false;
4815
4816 if (GET_CODE (in) == LABEL_REF)
4817 return (reg == 0);
4818
4819 code = GET_CODE (in);
4820
4821 switch (code)
4822 {
4823 case MEM:
4824 if (reg)
4825 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4826 return true;
4827
4828 case REG:
4829 if (!reg)
4830 return false;
4831 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4832 return false;
4833 return (REGNO (in) == REGNO (reg));
4834
4835 case SCRATCH:
4836 case CC0:
4837 case PC:
4838 case CONST_INT:
4839 case CONST_DOUBLE:
4840 return false;
4841
4842 default:
4843 break;
4844 }
4845
4846 /* Set's source should be read-only. */
4847 if (code == SET && !reg)
4848 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4849
4850 fmt = GET_RTX_FORMAT (code);
4851
4852 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4853 {
4854 if (fmt[i] == 'E')
4855 {
4856 register int j;
4857 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4858 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4859 return true;
4860 }
4861 else if (fmt[i] == 'e'
4862 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4863 return true;
4864 }
4865 return false;
4866 }
4867
4868 #define EXPERIMENTAL_REGMOVE_REORG 1
4869
4870 #if EXPERIMENTAL_REGMOVE_REORG
4871
4872 static int
4873 mep_compatible_reg_class (int r1, int r2)
4874 {
4875 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4876 return 1;
4877 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4878 return 1;
4879 return 0;
4880 }
4881
4882 static void
4883 mep_reorg_regmove (rtx insns)
4884 {
4885 rtx insn, next, pat, follow, *where;
4886 int count = 0, done = 0, replace, before = 0;
4887
4888 if (dump_file)
4889 for (insn = insns; insn; insn = NEXT_INSN (insn))
4890 if (NONJUMP_INSN_P (insn))
4891 before++;
4892
4893 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4894 set that uses the r2 and r2 dies there. We replace r2 with r1
4895 and see if it's still a valid insn. If so, delete the first set.
4896 Copied from reorg.c. */
4897
4898 while (!done)
4899 {
4900 done = 1;
4901 for (insn = insns; insn; insn = next)
4902 {
4903 next = next_nonnote_nondebug_insn (insn);
4904 if (! NONJUMP_INSN_P (insn))
4905 continue;
4906 pat = PATTERN (insn);
4907
4908 replace = 0;
4909
4910 if (GET_CODE (pat) == SET
4911 && GET_CODE (SET_SRC (pat)) == REG
4912 && GET_CODE (SET_DEST (pat)) == REG
4913 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4914 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4915 {
4916 follow = next_nonnote_nondebug_insn (insn);
4917 if (dump_file)
4918 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4919
4920 while (follow && NONJUMP_INSN_P (follow)
4921 && GET_CODE (PATTERN (follow)) == SET
4922 && !dead_or_set_p (follow, SET_SRC (pat))
4923 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4924 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4925 {
4926 if (dump_file)
4927 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4928 follow = next_nonnote_insn (follow);
4929 }
4930
4931 if (dump_file)
4932 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4933 if (follow && NONJUMP_INSN_P (follow)
4934 && GET_CODE (PATTERN (follow)) == SET
4935 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4936 {
4937 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4938 {
4939 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4940 {
4941 replace = 1;
4942 where = & SET_SRC (PATTERN (follow));
4943 }
4944 }
4945 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4946 {
4947 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4948 {
4949 replace = 1;
4950 where = & PATTERN (follow);
4951 }
4952 }
4953 }
4954 }
4955
4956 /* If so, follow is the corresponding insn */
4957 if (replace)
4958 {
4959 if (dump_file)
4960 {
4961 rtx x;
4962
4963 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4964 for (x = insn; x ;x = NEXT_INSN (x))
4965 {
4966 print_rtl_single (dump_file, x);
4967 if (x == follow)
4968 break;
4969 fprintf (dump_file, "\n");
4970 }
4971 }
4972
4973 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4974 follow, where))
4975 {
4976 count ++;
4977 delete_insn (insn);
4978 if (dump_file)
4979 {
4980 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4981 print_rtl_single (dump_file, follow);
4982 }
4983 done = 0;
4984 }
4985 }
4986 }
4987 }
4988
4989 if (dump_file)
4990 {
4991 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
4992 fprintf (dump_file, "=====\n");
4993 }
4994 }
4995 #endif
4996
4997
4998 /* Figure out where to put LABEL, which is the label for a repeat loop.
4999 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5000 the loop ends just before LAST_INSN. If SHARED, insns other than the
5001 "repeat" might use LABEL to jump to the loop's continuation point.
5002
5003 Return the last instruction in the adjusted loop. */
5004
5005 static rtx
5006 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5007 bool shared)
5008 {
5009 rtx next, prev;
5010 int count = 0, code, icode;
5011
5012 if (dump_file)
5013 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5014 INSN_UID (last_insn));
5015
5016 /* Set PREV to the last insn in the loop. */
5017 prev = last_insn;
5018 if (!including)
5019 prev = PREV_INSN (prev);
5020
5021 /* Set NEXT to the next insn after the repeat label. */
5022 next = last_insn;
5023 if (!shared)
5024 while (prev != 0)
5025 {
5026 code = GET_CODE (prev);
5027 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5028 break;
5029
5030 if (INSN_P (prev))
5031 {
5032 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5033 prev = XVECEXP (PATTERN (prev), 0, 1);
5034
5035 /* Other insns that should not be in the last two opcodes. */
5036 icode = recog_memoized (prev);
5037 if (icode < 0
5038 || icode == CODE_FOR_repeat
5039 || icode == CODE_FOR_erepeat
5040 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5041 break;
5042
5043 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5044 is the second instruction in a VLIW bundle. In that case,
5045 loop again: if the first instruction also satisfies the
5046 conditions above then we will reach here again and put
5047 both of them into the repeat epilogue. Otherwise both
5048 should remain outside. */
5049 if (GET_MODE (prev) != BImode)
5050 {
5051 count++;
5052 next = prev;
5053 if (dump_file)
5054 print_rtl_single (dump_file, next);
5055 if (count == 2)
5056 break;
5057 }
5058 }
5059 prev = PREV_INSN (prev);
5060 }
5061
5062 /* See if we're adding the label immediately after the repeat insn.
5063 If so, we need to separate them with a nop. */
5064 prev = prev_real_insn (next);
5065 if (prev)
5066 switch (recog_memoized (prev))
5067 {
5068 case CODE_FOR_repeat:
5069 case CODE_FOR_erepeat:
5070 if (dump_file)
5071 fprintf (dump_file, "Adding nop inside loop\n");
5072 emit_insn_before (gen_nop (), next);
5073 break;
5074
5075 default:
5076 break;
5077 }
5078
5079 /* Insert the label. */
5080 emit_label_before (label, next);
5081
5082 /* Insert the nops. */
5083 if (dump_file && count < 2)
5084 fprintf (dump_file, "Adding %d nop%s\n\n",
5085 2 - count, count == 1 ? "" : "s");
5086
5087 for (; count < 2; count++)
5088 if (including)
5089 last_insn = emit_insn_after (gen_nop (), last_insn);
5090 else
5091 emit_insn_before (gen_nop (), last_insn);
5092
5093 return last_insn;
5094 }
5095
5096
5097 void
5098 mep_emit_doloop (rtx *operands, int is_end)
5099 {
5100 rtx tag;
5101
5102 if (cfun->machine->doloop_tags == 0
5103 || cfun->machine->doloop_tag_from_end == is_end)
5104 {
5105 cfun->machine->doloop_tags++;
5106 cfun->machine->doloop_tag_from_end = is_end;
5107 }
5108
5109 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5110 if (is_end)
5111 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5112 else
5113 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5114 }
5115
5116
5117 /* Code for converting doloop_begins and doloop_ends into valid
5118 MeP instructions. A doloop_begin is just a placeholder:
5119
5120 $count = unspec ($count)
5121
5122 where $count is initially the number of iterations - 1.
5123 doloop_end has the form:
5124
5125 if ($count-- == 0) goto label
5126
5127 The counter variable is private to the doloop insns, nothing else
5128 relies on its value.
5129
5130 There are three cases, in decreasing order of preference:
5131
5132 1. A loop has exactly one doloop_begin and one doloop_end.
5133 The doloop_end branches to the first instruction after
5134 the doloop_begin.
5135
5136 In this case we can replace the doloop_begin with a repeat
5137 instruction and remove the doloop_end. I.e.:
5138
5139 $count1 = unspec ($count1)
5140 label:
5141 ...
5142 insn1
5143 insn2
5144 if ($count2-- == 0) goto label
5145
5146 becomes:
5147
5148 repeat $count1,repeat_label
5149 label:
5150 ...
5151 repeat_label:
5152 insn1
5153 insn2
5154 # end repeat
5155
5156 2. As for (1), except there are several doloop_ends. One of them
5157 (call it X) falls through to a label L. All the others fall
5158 through to branches to L.
5159
5160 In this case, we remove X and replace the other doloop_ends
5161 with branches to the repeat label. For example:
5162
5163 $count1 = unspec ($count1)
5164 start:
5165 ...
5166 if ($count2-- == 0) goto label
5167 end:
5168 ...
5169 if ($count3-- == 0) goto label
5170 goto end
5171
5172 becomes:
5173
5174 repeat $count1,repeat_label
5175 start:
5176 ...
5177 repeat_label:
5178 nop
5179 nop
5180 # end repeat
5181 end:
5182 ...
5183 goto repeat_label
5184
5185 3. The fallback case. Replace doloop_begins with:
5186
5187 $count = $count + 1
5188
5189 Replace doloop_ends with the equivalent of:
5190
5191 $count = $count - 1
5192 if ($count == 0) goto label
5193
5194 Note that this might need a scratch register if $count
5195 is stored in memory. */
5196
5197 /* A structure describing one doloop_begin. */
5198 struct mep_doloop_begin {
5199 /* The next doloop_begin with the same tag. */
5200 struct mep_doloop_begin *next;
5201
5202 /* The instruction itself. */
5203 rtx insn;
5204
5205 /* The initial counter value. This is known to be a general register. */
5206 rtx counter;
5207 };
5208
5209 /* A structure describing a doloop_end. */
5210 struct mep_doloop_end {
5211 /* The next doloop_end with the same loop tag. */
5212 struct mep_doloop_end *next;
5213
5214 /* The instruction itself. */
5215 rtx insn;
5216
5217 /* The first instruction after INSN when the branch isn't taken. */
5218 rtx fallthrough;
5219
5220 /* The location of the counter value. Since doloop_end_internal is a
5221 jump instruction, it has to allow the counter to be stored anywhere
5222 (any non-fixed register or memory location). */
5223 rtx counter;
5224
5225 /* The target label (the place where the insn branches when the counter
5226 isn't zero). */
5227 rtx label;
5228
5229 /* A scratch register. Only available when COUNTER isn't stored
5230 in a general register. */
5231 rtx scratch;
5232 };
5233
5234
5235 /* One do-while loop. */
5236 struct mep_doloop {
5237 /* All the doloop_begins for this loop (in no particular order). */
5238 struct mep_doloop_begin *begin;
5239
5240 /* All the doloop_ends. When there is more than one, arrange things
5241 so that the first one is the most likely to be X in case (2) above. */
5242 struct mep_doloop_end *end;
5243 };
5244
5245
5246 /* Return true if LOOP can be converted into repeat/repeat_end form
5247 (that is, if it matches cases (1) or (2) above). */
5248
5249 static bool
5250 mep_repeat_loop_p (struct mep_doloop *loop)
5251 {
5252 struct mep_doloop_end *end;
5253 rtx fallthrough;
5254
5255 /* There must be exactly one doloop_begin and at least one doloop_end. */
5256 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5257 return false;
5258
5259 /* The first doloop_end (X) must branch back to the insn after
5260 the doloop_begin. */
5261 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5262 return false;
5263
5264 /* All the other doloop_ends must branch to the same place as X.
5265 When the branch isn't taken, they must jump to the instruction
5266 after X. */
5267 fallthrough = loop->end->fallthrough;
5268 for (end = loop->end->next; end != 0; end = end->next)
5269 if (end->label != loop->end->label
5270 || !simplejump_p (end->fallthrough)
5271 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5272 return false;
5273
5274 return true;
5275 }
5276
5277
5278 /* The main repeat reorg function. See comment above for details. */
5279
5280 static void
5281 mep_reorg_repeat (rtx insns)
5282 {
5283 rtx insn;
5284 struct mep_doloop *loops, *loop;
5285 struct mep_doloop_begin *begin;
5286 struct mep_doloop_end *end;
5287
5288 /* Quick exit if we haven't created any loops. */
5289 if (cfun->machine->doloop_tags == 0)
5290 return;
5291
5292 /* Create an array of mep_doloop structures. */
5293 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5294 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5295
5296 /* Search the function for do-while insns and group them by loop tag. */
5297 for (insn = insns; insn; insn = NEXT_INSN (insn))
5298 if (INSN_P (insn))
5299 switch (recog_memoized (insn))
5300 {
5301 case CODE_FOR_doloop_begin_internal:
5302 insn_extract (insn);
5303 loop = &loops[INTVAL (recog_data.operand[2])];
5304
5305 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5306 begin->next = loop->begin;
5307 begin->insn = insn;
5308 begin->counter = recog_data.operand[0];
5309
5310 loop->begin = begin;
5311 break;
5312
5313 case CODE_FOR_doloop_end_internal:
5314 insn_extract (insn);
5315 loop = &loops[INTVAL (recog_data.operand[2])];
5316
5317 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5318 end->insn = insn;
5319 end->fallthrough = next_real_insn (insn);
5320 end->counter = recog_data.operand[0];
5321 end->label = recog_data.operand[1];
5322 end->scratch = recog_data.operand[3];
5323
5324 /* If this insn falls through to an unconditional jump,
5325 give it a lower priority than the others. */
5326 if (loop->end != 0 && simplejump_p (end->fallthrough))
5327 {
5328 end->next = loop->end->next;
5329 loop->end->next = end;
5330 }
5331 else
5332 {
5333 end->next = loop->end;
5334 loop->end = end;
5335 }
5336 break;
5337 }
5338
5339 /* Convert the insns for each loop in turn. */
5340 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5341 if (mep_repeat_loop_p (loop))
5342 {
5343 /* Case (1) or (2). */
5344 rtx repeat_label, label_ref;
5345
5346 /* Create a new label for the repeat insn. */
5347 repeat_label = gen_label_rtx ();
5348
5349 /* Replace the doloop_begin with a repeat. */
5350 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5351 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5352 loop->begin->insn);
5353 delete_insn (loop->begin->insn);
5354
5355 /* Insert the repeat label before the first doloop_end.
5356 Fill the gap with nops if there are other doloop_ends. */
5357 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5358 false, loop->end->next != 0);
5359
5360 /* Emit a repeat_end (to improve the readability of the output). */
5361 emit_insn_before (gen_repeat_end (), loop->end->insn);
5362
5363 /* Delete the first doloop_end. */
5364 delete_insn (loop->end->insn);
5365
5366 /* Replace the others with branches to REPEAT_LABEL. */
5367 for (end = loop->end->next; end != 0; end = end->next)
5368 {
5369 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5370 delete_insn (end->insn);
5371 delete_insn (end->fallthrough);
5372 }
5373 }
5374 else
5375 {
5376 /* Case (3). First replace all the doloop_begins with increment
5377 instructions. */
5378 for (begin = loop->begin; begin != 0; begin = begin->next)
5379 {
5380 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5381 begin->counter, const1_rtx),
5382 begin->insn);
5383 delete_insn (begin->insn);
5384 }
5385
5386 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5387 for (end = loop->end; end != 0; end = end->next)
5388 {
5389 rtx reg;
5390
5391 start_sequence ();
5392
5393 /* Load the counter value into a general register. */
5394 reg = end->counter;
5395 if (!REG_P (reg) || REGNO (reg) > 15)
5396 {
5397 reg = end->scratch;
5398 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5399 }
5400
5401 /* Decrement the counter. */
5402 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5403 constm1_rtx));
5404
5405 /* Copy it back to its original location. */
5406 if (reg != end->counter)
5407 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5408
5409 /* Jump back to the start label. */
5410 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5411 end->label));
5412 JUMP_LABEL (insn) = end->label;
5413 LABEL_NUSES (end->label)++;
5414
5415 /* Emit the whole sequence before the doloop_end. */
5416 insn = get_insns ();
5417 end_sequence ();
5418 emit_insn_before (insn, end->insn);
5419
5420 /* Delete the doloop_end. */
5421 delete_insn (end->insn);
5422 }
5423 }
5424 }
5425
5426
5427 static bool
5428 mep_invertable_branch_p (rtx insn)
5429 {
5430 rtx cond, set;
5431 enum rtx_code old_code;
5432 int i;
5433
5434 set = PATTERN (insn);
5435 if (GET_CODE (set) != SET)
5436 return false;
5437 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5438 return false;
5439 cond = XEXP (XEXP (set, 1), 0);
5440 old_code = GET_CODE (cond);
5441 switch (old_code)
5442 {
5443 case EQ:
5444 PUT_CODE (cond, NE);
5445 break;
5446 case NE:
5447 PUT_CODE (cond, EQ);
5448 break;
5449 case LT:
5450 PUT_CODE (cond, GE);
5451 break;
5452 case GE:
5453 PUT_CODE (cond, LT);
5454 break;
5455 default:
5456 return false;
5457 }
5458 INSN_CODE (insn) = -1;
5459 i = recog_memoized (insn);
5460 PUT_CODE (cond, old_code);
5461 INSN_CODE (insn) = -1;
5462 return i >= 0;
5463 }
5464
5465 static void
5466 mep_invert_branch (rtx insn, rtx after)
5467 {
5468 rtx cond, set, label;
5469 int i;
5470
5471 set = PATTERN (insn);
5472
5473 gcc_assert (GET_CODE (set) == SET);
5474 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5475
5476 cond = XEXP (XEXP (set, 1), 0);
5477 switch (GET_CODE (cond))
5478 {
5479 case EQ:
5480 PUT_CODE (cond, NE);
5481 break;
5482 case NE:
5483 PUT_CODE (cond, EQ);
5484 break;
5485 case LT:
5486 PUT_CODE (cond, GE);
5487 break;
5488 case GE:
5489 PUT_CODE (cond, LT);
5490 break;
5491 default:
5492 gcc_unreachable ();
5493 }
5494 label = gen_label_rtx ();
5495 emit_label_after (label, after);
5496 for (i=1; i<=2; i++)
5497 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5498 {
5499 rtx ref = XEXP (XEXP (set, 1), i);
5500 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5501 delete_insn (XEXP (ref, 0));
5502 XEXP (ref, 0) = label;
5503 LABEL_NUSES (label) ++;
5504 JUMP_LABEL (insn) = label;
5505 }
5506 INSN_CODE (insn) = -1;
5507 i = recog_memoized (insn);
5508 gcc_assert (i >= 0);
5509 }
5510
5511 static void
5512 mep_reorg_erepeat (rtx insns)
5513 {
5514 rtx insn, prev, l, x;
5515 int count;
5516
5517 for (insn = insns; insn; insn = NEXT_INSN (insn))
5518 if (JUMP_P (insn)
5519 && mep_invertable_branch_p (insn))
5520 {
5521 if (dump_file)
5522 {
5523 fprintf (dump_file, "\n------------------------------\n");
5524 fprintf (dump_file, "erepeat: considering this jump:\n");
5525 print_rtl_single (dump_file, insn);
5526 }
5527 count = simplejump_p (insn) ? 0 : 1;
5528 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5529 {
5530 if (CALL_P (prev) || BARRIER_P (prev))
5531 break;
5532
5533 if (prev == JUMP_LABEL (insn))
5534 {
5535 rtx newlast;
5536 if (dump_file)
5537 fprintf (dump_file, "found loop top, %d insns\n", count);
5538
5539 if (LABEL_NUSES (prev) == 1)
5540 /* We're the only user, always safe */ ;
5541 else if (LABEL_NUSES (prev) == 2)
5542 {
5543 /* See if there's a barrier before this label. If
5544 so, we know nobody inside the loop uses it.
5545 But we must be careful to put the erepeat
5546 *after* the label. */
5547 rtx barrier;
5548 for (barrier = PREV_INSN (prev);
5549 barrier && NOTE_P (barrier);
5550 barrier = PREV_INSN (barrier))
5551 ;
5552 if (barrier && ! BARRIER_P (barrier))
5553 break;
5554 }
5555 else
5556 {
5557 /* We don't know who else, within or without our loop, uses this */
5558 if (dump_file)
5559 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5560 break;
5561 }
5562
5563 /* Generate a label to be used by the erepat insn. */
5564 l = gen_label_rtx ();
5565
5566 /* Insert the erepeat after INSN's target label. */
5567 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5568 LABEL_NUSES (l)++;
5569 emit_insn_after (x, prev);
5570
5571 /* Insert the erepeat label. */
5572 newlast = (mep_insert_repeat_label_last
5573 (insn, l, !simplejump_p (insn), false));
5574 if (simplejump_p (insn))
5575 {
5576 emit_insn_before (gen_erepeat_end (), insn);
5577 delete_insn (insn);
5578 }
5579 else
5580 {
5581 mep_invert_branch (insn, newlast);
5582 emit_insn_after (gen_erepeat_end (), newlast);
5583 }
5584 break;
5585 }
5586
5587 if (LABEL_P (prev))
5588 {
5589 /* A label is OK if there is exactly one user, and we
5590 can find that user before the next label. */
5591 rtx user = 0;
5592 int safe = 0;
5593 if (LABEL_NUSES (prev) == 1)
5594 {
5595 for (user = PREV_INSN (prev);
5596 user && (INSN_P (user) || NOTE_P (user));
5597 user = PREV_INSN (user))
5598 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5599 {
5600 safe = INSN_UID (user);
5601 break;
5602 }
5603 }
5604 if (!safe)
5605 break;
5606 if (dump_file)
5607 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5608 safe, INSN_UID (prev));
5609 }
5610
5611 if (INSN_P (prev))
5612 {
5613 count ++;
5614 }
5615 }
5616 }
5617 if (dump_file)
5618 fprintf (dump_file, "\n==============================\n");
5619 }
5620
5621 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5622 always do this on its own. */
5623
5624 static void
5625 mep_jmp_return_reorg (rtx insns)
5626 {
5627 rtx insn, label, ret;
5628 int ret_code;
5629
5630 for (insn = insns; insn; insn = NEXT_INSN (insn))
5631 if (simplejump_p (insn))
5632 {
5633 /* Find the fist real insn the jump jumps to. */
5634 label = ret = JUMP_LABEL (insn);
5635 while (ret
5636 && (NOTE_P (ret)
5637 || LABEL_P (ret)
5638 || GET_CODE (PATTERN (ret)) == USE))
5639 ret = NEXT_INSN (ret);
5640
5641 if (ret)
5642 {
5643 /* Is it a return? */
5644 ret_code = recog_memoized (ret);
5645 if (ret_code == CODE_FOR_return_internal
5646 || ret_code == CODE_FOR_eh_return_internal)
5647 {
5648 /* It is. Replace the jump with a return. */
5649 LABEL_NUSES (label) --;
5650 if (LABEL_NUSES (label) == 0)
5651 delete_insn (label);
5652 PATTERN (insn) = copy_rtx (PATTERN (ret));
5653 INSN_CODE (insn) = -1;
5654 }
5655 }
5656 }
5657 }
5658
5659
5660 static void
5661 mep_reorg_addcombine (rtx insns)
5662 {
5663 rtx i, n;
5664
5665 for (i = insns; i; i = NEXT_INSN (i))
5666 if (INSN_P (i)
5667 && INSN_CODE (i) == CODE_FOR_addsi3
5668 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5669 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5670 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5671 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5672 {
5673 n = NEXT_INSN (i);
5674 if (INSN_P (n)
5675 && INSN_CODE (n) == CODE_FOR_addsi3
5676 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5677 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5678 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5679 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5680 {
5681 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5682 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5683 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5684 && ic + nc < 32767
5685 && ic + nc > -32768)
5686 {
5687 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5688 NEXT_INSN (i) = NEXT_INSN (n);
5689 if (NEXT_INSN (i))
5690 PREV_INSN (NEXT_INSN (i)) = i;
5691 }
5692 }
5693 }
5694 }
5695
5696 /* If this insn adjusts the stack, return the adjustment, else return
5697 zero. */
5698 static int
5699 add_sp_insn_p (rtx insn)
5700 {
5701 rtx pat;
5702
5703 if (! single_set (insn))
5704 return 0;
5705 pat = PATTERN (insn);
5706 if (GET_CODE (SET_DEST (pat)) != REG)
5707 return 0;
5708 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5709 return 0;
5710 if (GET_CODE (SET_SRC (pat)) != PLUS)
5711 return 0;
5712 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5713 return 0;
5714 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5715 return 0;
5716 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5717 return 0;
5718 return INTVAL (XEXP (SET_SRC (pat), 1));
5719 }
5720
5721 /* Check for trivial functions that set up an unneeded stack
5722 frame. */
5723 static void
5724 mep_reorg_noframe (rtx insns)
5725 {
5726 rtx start_frame_insn;
5727 rtx end_frame_insn = 0;
5728 int sp_adjust, sp2;
5729 rtx sp;
5730
5731 /* The first insn should be $sp = $sp + N */
5732 while (insns && ! INSN_P (insns))
5733 insns = NEXT_INSN (insns);
5734 if (!insns)
5735 return;
5736
5737 sp_adjust = add_sp_insn_p (insns);
5738 if (sp_adjust == 0)
5739 return;
5740
5741 start_frame_insn = insns;
5742 sp = SET_DEST (PATTERN (start_frame_insn));
5743
5744 insns = next_real_insn (insns);
5745
5746 while (insns)
5747 {
5748 rtx next = next_real_insn (insns);
5749 if (!next)
5750 break;
5751
5752 sp2 = add_sp_insn_p (insns);
5753 if (sp2)
5754 {
5755 if (end_frame_insn)
5756 return;
5757 end_frame_insn = insns;
5758 if (sp2 != -sp_adjust)
5759 return;
5760 }
5761 else if (mep_mentioned_p (insns, sp, 0))
5762 return;
5763 else if (CALL_P (insns))
5764 return;
5765
5766 insns = next;
5767 }
5768
5769 if (end_frame_insn)
5770 {
5771 delete_insn (start_frame_insn);
5772 delete_insn (end_frame_insn);
5773 }
5774 }
5775
5776 static void
5777 mep_reorg (void)
5778 {
5779 rtx insns = get_insns ();
5780
5781 /* We require accurate REG_DEAD notes. */
5782 compute_bb_for_insn ();
5783 df_note_add_problem ();
5784 df_analyze ();
5785
5786 mep_reorg_addcombine (insns);
5787 #if EXPERIMENTAL_REGMOVE_REORG
5788 /* VLIW packing has been done already, so we can't just delete things. */
5789 if (!mep_vliw_function_p (cfun->decl))
5790 mep_reorg_regmove (insns);
5791 #endif
5792 mep_jmp_return_reorg (insns);
5793 mep_bundle_insns (insns);
5794 mep_reorg_repeat (insns);
5795 if (optimize
5796 && !profile_flag
5797 && !profile_arc_flag
5798 && TARGET_OPT_REPEAT
5799 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5800 mep_reorg_erepeat (insns);
5801
5802 /* This may delete *insns so make sure it's last. */
5803 mep_reorg_noframe (insns);
5804
5805 df_finish_pass (false);
5806 }
5807
5808 \f
5809
5810 /*----------------------------------------------------------------------*/
5811 /* Builtins */
5812 /*----------------------------------------------------------------------*/
5813
5814 /* Element X gives the index into cgen_insns[] of the most general
5815 implementation of intrinsic X. Unimplemented intrinsics are
5816 mapped to -1. */
5817 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5818
5819 /* Element X gives the index of another instruction that is mapped to
5820 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5821 instruction.
5822
5823 Things are set up so that mep_intrinsic_chain[X] < X. */
5824 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5825
5826 /* The bitmask for the current ISA. The ISA masks are declared
5827 in mep-intrin.h. */
5828 unsigned int mep_selected_isa;
5829
5830 struct mep_config {
5831 const char *config_name;
5832 unsigned int isa;
5833 };
5834
5835 static struct mep_config mep_configs[] = {
5836 #ifdef COPROC_SELECTION_TABLE
5837 COPROC_SELECTION_TABLE,
5838 #endif
5839 { 0, 0 }
5840 };
5841
5842 /* Initialize the global intrinsics variables above. */
5843
5844 static void
5845 mep_init_intrinsics (void)
5846 {
5847 size_t i;
5848
5849 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5850 mep_selected_isa = mep_configs[0].isa;
5851 if (mep_config_string != 0)
5852 for (i = 0; mep_configs[i].config_name; i++)
5853 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5854 {
5855 mep_selected_isa = mep_configs[i].isa;
5856 break;
5857 }
5858
5859 /* Assume all intrinsics are unavailable. */
5860 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5861 mep_intrinsic_insn[i] = -1;
5862
5863 /* Build up the global intrinsic tables. */
5864 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5865 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5866 {
5867 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5868 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5869 }
5870 /* See whether we can directly move values between one coprocessor
5871 register and another. */
5872 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5873 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5874 mep_have_copro_copro_moves_p = true;
5875
5876 /* See whether we can directly move values between core and
5877 coprocessor registers. */
5878 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5879 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5880
5881 mep_have_core_copro_moves_p = 1;
5882 }
5883
5884 /* Declare all available intrinsic functions. Called once only. */
5885
5886 static tree cp_data_bus_int_type_node;
5887 static tree opaque_vector_type_node;
5888 static tree v8qi_type_node;
5889 static tree v4hi_type_node;
5890 static tree v2si_type_node;
5891 static tree v8uqi_type_node;
5892 static tree v4uhi_type_node;
5893 static tree v2usi_type_node;
5894
5895 static tree
5896 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5897 {
5898 switch (cr)
5899 {
5900 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5901 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5902 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5903 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5904 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5905 case cgen_regnum_operand_type_CHAR: return char_type_node;
5906 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5907 case cgen_regnum_operand_type_SI: return intSI_type_node;
5908 case cgen_regnum_operand_type_DI: return intDI_type_node;
5909 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5910 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5911 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5912 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5913 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5914 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5915 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5916 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5917 default:
5918 return void_type_node;
5919 }
5920 }
5921
5922 static void
5923 mep_init_builtins (void)
5924 {
5925 size_t i;
5926
5927 if (TARGET_64BIT_CR_REGS)
5928 cp_data_bus_int_type_node = long_long_integer_type_node;
5929 else
5930 cp_data_bus_int_type_node = long_integer_type_node;
5931
5932 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5933 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5934 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5935 v2si_type_node = build_vector_type (intSI_type_node, 2);
5936 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5937 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5938 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5939
5940 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5941
5942 add_builtin_type ("cp_vector", opaque_vector_type_node);
5943
5944 add_builtin_type ("cp_v8qi", v8qi_type_node);
5945 add_builtin_type ("cp_v4hi", v4hi_type_node);
5946 add_builtin_type ("cp_v2si", v2si_type_node);
5947
5948 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5949 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5950 add_builtin_type ("cp_v2usi", v2usi_type_node);
5951
5952 /* Intrinsics like mep_cadd3 are implemented with two groups of
5953 instructions, one which uses UNSPECs and one which uses a specific
5954 rtl code such as PLUS. Instructions in the latter group belong
5955 to GROUP_KNOWN_CODE.
5956
5957 In such cases, the intrinsic will have two entries in the global
5958 tables above. The unspec form is accessed using builtin functions
5959 while the specific form is accessed using the mep_* enum in
5960 mep-intrin.h.
5961
5962 The idea is that __cop arithmetic and builtin functions have
5963 different optimization requirements. If mep_cadd3() appears in
5964 the source code, the user will surely except gcc to use cadd3
5965 rather than a work-alike such as add3. However, if the user
5966 just writes "a + b", where a or b are __cop variables, it is
5967 reasonable for gcc to choose a core instruction rather than
5968 cadd3 if it believes that is more optimal. */
5969 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5970 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5971 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5972 {
5973 tree ret_type = void_type_node;
5974 tree bi_type;
5975
5976 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5977 continue;
5978
5979 if (cgen_insns[i].cret_p)
5980 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5981
5982 bi_type = build_function_type_list (ret_type, NULL_TREE);
5983 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5984 bi_type,
5985 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5986 }
5987 }
5988
5989 /* Report the unavailablity of the given intrinsic. */
5990
5991 #if 1
5992 static void
5993 mep_intrinsic_unavailable (int intrinsic)
5994 {
5995 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
5996
5997 if (already_reported_p[intrinsic])
5998 return;
5999
6000 if (mep_intrinsic_insn[intrinsic] < 0)
6001 error ("coprocessor intrinsic %qs is not available in this configuration",
6002 cgen_intrinsics[intrinsic]);
6003 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6004 error ("%qs is not available in VLIW functions",
6005 cgen_intrinsics[intrinsic]);
6006 else
6007 error ("%qs is not available in non-VLIW functions",
6008 cgen_intrinsics[intrinsic]);
6009
6010 already_reported_p[intrinsic] = 1;
6011 }
6012 #endif
6013
6014
6015 /* See if any implementation of INTRINSIC is available to the
6016 current function. If so, store the most general implementation
6017 in *INSN_PTR and return true. Return false otherwise. */
6018
6019 static bool
6020 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6021 {
6022 int i;
6023
6024 i = mep_intrinsic_insn[intrinsic];
6025 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6026 i = mep_intrinsic_chain[i];
6027
6028 if (i >= 0)
6029 {
6030 *insn_ptr = &cgen_insns[i];
6031 return true;
6032 }
6033 return false;
6034 }
6035
6036
6037 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6038 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6039 try using a work-alike instead. In this case, the returned insn
6040 may have three operands rather than two. */
6041
6042 static bool
6043 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6044 {
6045 size_t i;
6046
6047 if (intrinsic == mep_cmov)
6048 {
6049 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6050 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6051 return true;
6052 return false;
6053 }
6054 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6055 }
6056
6057
6058 /* If ARG is a register operand that is the same size as MODE, convert it
6059 to MODE using a subreg. Otherwise return ARG as-is. */
6060
6061 static rtx
6062 mep_convert_arg (enum machine_mode mode, rtx arg)
6063 {
6064 if (GET_MODE (arg) != mode
6065 && register_operand (arg, VOIDmode)
6066 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6067 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6068 return arg;
6069 }
6070
6071
6072 /* Apply regnum conversions to ARG using the description given by REGNUM.
6073 Return the new argument on success and null on failure. */
6074
6075 static rtx
6076 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6077 {
6078 if (regnum->count == 0)
6079 return arg;
6080
6081 if (GET_CODE (arg) != CONST_INT
6082 || INTVAL (arg) < 0
6083 || INTVAL (arg) >= regnum->count)
6084 return 0;
6085
6086 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6087 }
6088
6089
6090 /* Try to make intrinsic argument ARG match the given operand.
6091 UNSIGNED_P is true if the argument has an unsigned type. */
6092
6093 static rtx
6094 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6095 int unsigned_p)
6096 {
6097 if (GET_CODE (arg) == CONST_INT)
6098 {
6099 /* CONST_INTs can only be bound to integer operands. */
6100 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6101 return 0;
6102 }
6103 else if (GET_CODE (arg) == CONST_DOUBLE)
6104 /* These hold vector constants. */;
6105 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6106 {
6107 /* If the argument is a different size from what's expected, we must
6108 have a value in the right mode class in order to convert it. */
6109 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6110 return 0;
6111
6112 /* If the operand is an rvalue, promote or demote it to match the
6113 operand's size. This might not need extra instructions when
6114 ARG is a register value. */
6115 if (operand->constraint[0] != '=')
6116 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6117 }
6118
6119 /* If the operand is an lvalue, bind the operand to a new register.
6120 The caller will copy this value into ARG after the main
6121 instruction. By doing this always, we produce slightly more
6122 optimal code. */
6123 /* But not for control registers. */
6124 if (operand->constraint[0] == '='
6125 && (! REG_P (arg)
6126 || ! (CONTROL_REGNO_P (REGNO (arg))
6127 || CCR_REGNO_P (REGNO (arg))
6128 || CR_REGNO_P (REGNO (arg)))
6129 ))
6130 return gen_reg_rtx (operand->mode);
6131
6132 /* Try simple mode punning. */
6133 arg = mep_convert_arg (operand->mode, arg);
6134 if (operand->predicate (arg, operand->mode))
6135 return arg;
6136
6137 /* See if forcing the argument into a register will make it match. */
6138 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6139 arg = force_reg (operand->mode, arg);
6140 else
6141 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6142 if (operand->predicate (arg, operand->mode))
6143 return arg;
6144
6145 return 0;
6146 }
6147
6148
6149 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6150 function FNNAME. OPERAND describes the operand to which ARGNUM
6151 is mapped. */
6152
6153 static void
6154 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6155 int argnum, tree fnname)
6156 {
6157 size_t i;
6158
6159 if (GET_CODE (arg) == CONST_INT)
6160 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6161 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6162 {
6163 const struct cgen_immediate_predicate *predicate;
6164 HOST_WIDE_INT argval;
6165
6166 predicate = &cgen_immediate_predicates[i];
6167 argval = INTVAL (arg);
6168 if (argval < predicate->lower || argval >= predicate->upper)
6169 error ("argument %d of %qE must be in the range %d...%d",
6170 argnum, fnname, predicate->lower, predicate->upper - 1);
6171 else
6172 error ("argument %d of %qE must be a multiple of %d",
6173 argnum, fnname, predicate->align);
6174 return;
6175 }
6176
6177 error ("incompatible type for argument %d of %qE", argnum, fnname);
6178 }
6179
6180 static rtx
6181 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6182 rtx subtarget ATTRIBUTE_UNUSED,
6183 enum machine_mode mode ATTRIBUTE_UNUSED,
6184 int ignore ATTRIBUTE_UNUSED)
6185 {
6186 rtx pat, op[10], arg[10];
6187 unsigned int a;
6188 int opindex, unsigned_p[10];
6189 tree fndecl, args;
6190 unsigned int n_args;
6191 tree fnname;
6192 const struct cgen_insn *cgen_insn;
6193 const struct insn_data_d *idata;
6194 unsigned int first_arg = 0;
6195 unsigned int builtin_n_args;
6196
6197 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6198 fnname = DECL_NAME (fndecl);
6199
6200 /* Find out which instruction we should emit. Note that some coprocessor
6201 intrinsics may only be available in VLIW mode, or only in normal mode. */
6202 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6203 {
6204 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6205 return NULL_RTX;
6206 }
6207 idata = &insn_data[cgen_insn->icode];
6208
6209 builtin_n_args = cgen_insn->num_args;
6210
6211 if (cgen_insn->cret_p)
6212 {
6213 if (cgen_insn->cret_p > 1)
6214 builtin_n_args ++;
6215 first_arg = 1;
6216 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6217 builtin_n_args --;
6218 }
6219
6220 /* Evaluate each argument. */
6221 n_args = call_expr_nargs (exp);
6222
6223 if (n_args < builtin_n_args)
6224 {
6225 error ("too few arguments to %qE", fnname);
6226 return NULL_RTX;
6227 }
6228 if (n_args > builtin_n_args)
6229 {
6230 error ("too many arguments to %qE", fnname);
6231 return NULL_RTX;
6232 }
6233
6234 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6235 {
6236 tree value;
6237
6238 args = CALL_EXPR_ARG (exp, a - first_arg);
6239
6240 value = args;
6241
6242 #if 0
6243 if (cgen_insn->regnums[a].reference_p)
6244 {
6245 if (TREE_CODE (value) != ADDR_EXPR)
6246 {
6247 debug_tree(value);
6248 error ("argument %d of %qE must be an address", a+1, fnname);
6249 return NULL_RTX;
6250 }
6251 value = TREE_OPERAND (value, 0);
6252 }
6253 #endif
6254
6255 /* If the argument has been promoted to int, get the unpromoted
6256 value. This is necessary when sub-int memory values are bound
6257 to reference parameters. */
6258 if (TREE_CODE (value) == NOP_EXPR
6259 && TREE_TYPE (value) == integer_type_node
6260 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6261 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6262 < TYPE_PRECISION (TREE_TYPE (value))))
6263 value = TREE_OPERAND (value, 0);
6264
6265 /* If the argument has been promoted to double, get the unpromoted
6266 SFmode value. This is necessary for FMAX support, for example. */
6267 if (TREE_CODE (value) == NOP_EXPR
6268 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6269 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6270 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6271 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6272 value = TREE_OPERAND (value, 0);
6273
6274 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6275 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6276 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6277 if (cgen_insn->regnums[a].reference_p)
6278 {
6279 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6280 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6281
6282 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6283 }
6284 if (arg[a] == 0)
6285 {
6286 error ("argument %d of %qE must be in the range %d...%d",
6287 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6288 return NULL_RTX;
6289 }
6290 }
6291
6292 for (a = 0; a < first_arg; a++)
6293 {
6294 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6295 arg[a] = target;
6296 else
6297 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6298 }
6299
6300 /* Convert the arguments into a form suitable for the intrinsic.
6301 Report an error if this isn't possible. */
6302 for (opindex = 0; opindex < idata->n_operands; opindex++)
6303 {
6304 a = cgen_insn->op_mapping[opindex];
6305 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6306 arg[a], unsigned_p[a]);
6307 if (op[opindex] == 0)
6308 {
6309 mep_incompatible_arg (&idata->operand[opindex],
6310 arg[a], a + 1 - first_arg, fnname);
6311 return NULL_RTX;
6312 }
6313 }
6314
6315 /* Emit the instruction. */
6316 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6317 op[5], op[6], op[7], op[8], op[9]);
6318
6319 if (GET_CODE (pat) == SET
6320 && GET_CODE (SET_DEST (pat)) == PC
6321 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6322 emit_jump_insn (pat);
6323 else
6324 emit_insn (pat);
6325
6326 /* Copy lvalues back to their final locations. */
6327 for (opindex = 0; opindex < idata->n_operands; opindex++)
6328 if (idata->operand[opindex].constraint[0] == '=')
6329 {
6330 a = cgen_insn->op_mapping[opindex];
6331 if (a >= first_arg)
6332 {
6333 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6334 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6335 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6336 op[opindex]));
6337 else
6338 {
6339 /* First convert the operand to the right mode, then copy it
6340 into the destination. Doing the conversion as a separate
6341 step (rather than using convert_move) means that we can
6342 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6343 refer to the same register. */
6344 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6345 op[opindex], unsigned_p[a]);
6346 if (!rtx_equal_p (arg[a], op[opindex]))
6347 emit_move_insn (arg[a], op[opindex]);
6348 }
6349 }
6350 }
6351
6352 if (first_arg > 0 && target && target != op[0])
6353 {
6354 emit_move_insn (target, op[0]);
6355 }
6356
6357 return target;
6358 }
6359
6360 static bool
6361 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6362 {
6363 return false;
6364 }
6365 \f
6366 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6367 a global register. */
6368
6369 static int
6370 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6371 {
6372 int regno;
6373 rtx x = *loc;
6374
6375 if (! x)
6376 return 0;
6377
6378 switch (GET_CODE (x))
6379 {
6380 case SUBREG:
6381 if (REG_P (SUBREG_REG (x)))
6382 {
6383 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6384 && global_regs[subreg_regno (x)])
6385 return 1;
6386 return 0;
6387 }
6388 break;
6389
6390 case REG:
6391 regno = REGNO (x);
6392 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6393 return 1;
6394 return 0;
6395
6396 case SCRATCH:
6397 case PC:
6398 case CC0:
6399 case CONST_INT:
6400 case CONST_DOUBLE:
6401 case CONST:
6402 case LABEL_REF:
6403 return 0;
6404
6405 case CALL:
6406 /* A non-constant call might use a global register. */
6407 return 1;
6408
6409 default:
6410 break;
6411 }
6412
6413 return 0;
6414 }
6415
6416 /* Returns nonzero if X mentions a global register. */
6417
6418 static int
6419 global_reg_mentioned_p (rtx x)
6420 {
6421 if (INSN_P (x))
6422 {
6423 if (CALL_P (x))
6424 {
6425 if (! RTL_CONST_OR_PURE_CALL_P (x))
6426 return 1;
6427 x = CALL_INSN_FUNCTION_USAGE (x);
6428 if (x == 0)
6429 return 0;
6430 }
6431 else
6432 x = PATTERN (x);
6433 }
6434
6435 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6436 }
6437 /* Scheduling hooks for VLIW mode.
6438
6439 Conceptually this is very simple: we have a two-pack architecture
6440 that takes one core insn and one coprocessor insn to make up either
6441 a 32- or 64-bit instruction word (depending on the option bit set in
6442 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6443 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6444 and one 48-bit cop insn or two 32-bit core/cop insns.
6445
6446 In practice, instruction selection will be a bear. Consider in
6447 VL64 mode the following insns
6448
6449 add $1, 1
6450 cmov $cr0, $0
6451
6452 these cannot pack, since the add is a 16-bit core insn and cmov
6453 is a 32-bit cop insn. However,
6454
6455 add3 $1, $1, 1
6456 cmov $cr0, $0
6457
6458 packs just fine. For good VLIW code generation in VL64 mode, we
6459 will have to have 32-bit alternatives for many of the common core
6460 insns. Not implemented. */
6461
6462 static int
6463 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6464 {
6465 int cost_specified;
6466
6467 if (REG_NOTE_KIND (link) != 0)
6468 {
6469 /* See whether INSN and DEP_INSN are intrinsics that set the same
6470 hard register. If so, it is more important to free up DEP_INSN
6471 than it is to free up INSN.
6472
6473 Note that intrinsics like mep_mulr are handled differently from
6474 the equivalent mep.md patterns. In mep.md, if we don't care
6475 about the value of $lo and $hi, the pattern will just clobber
6476 the registers, not set them. Since clobbers don't count as
6477 output dependencies, it is often possible to reorder two mulrs,
6478 even after reload.
6479
6480 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6481 so any pair of mep_mulr()s will be inter-dependent. We should
6482 therefore give the first mep_mulr() a higher priority. */
6483 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6484 && global_reg_mentioned_p (PATTERN (insn))
6485 && global_reg_mentioned_p (PATTERN (dep_insn)))
6486 return 1;
6487
6488 /* If the dependence is an anti or output dependence, assume it
6489 has no cost. */
6490 return 0;
6491 }
6492
6493 /* If we can't recognize the insns, we can't really do anything. */
6494 if (recog_memoized (dep_insn) < 0)
6495 return cost;
6496
6497 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6498 attribute instead. */
6499 if (!TARGET_H1)
6500 {
6501 cost_specified = get_attr_latency (dep_insn);
6502 if (cost_specified != 0)
6503 return cost_specified;
6504 }
6505
6506 return cost;
6507 }
6508
6509 /* ??? We don't properly compute the length of a load/store insn,
6510 taking into account the addressing mode. */
6511
6512 static int
6513 mep_issue_rate (void)
6514 {
6515 return TARGET_IVC2 ? 3 : 2;
6516 }
6517
6518 /* Return true if function DECL was declared with the vliw attribute. */
6519
6520 bool
6521 mep_vliw_function_p (tree decl)
6522 {
6523 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6524 }
6525
6526 static rtx
6527 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6528 {
6529 int i;
6530
6531 for (i = nready - 1; i >= 0; --i)
6532 {
6533 rtx insn = ready[i];
6534 if (recog_memoized (insn) >= 0
6535 && get_attr_slot (insn) == slot
6536 && get_attr_length (insn) == length)
6537 return insn;
6538 }
6539
6540 return NULL_RTX;
6541 }
6542
6543 static void
6544 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6545 {
6546 int i;
6547
6548 for (i = 0; i < nready; ++i)
6549 if (ready[i] == insn)
6550 {
6551 for (; i < nready - 1; ++i)
6552 ready[i] = ready[i + 1];
6553 ready[i] = insn;
6554 return;
6555 }
6556
6557 gcc_unreachable ();
6558 }
6559
6560 static void
6561 mep_print_sched_insn (FILE *dump, rtx insn)
6562 {
6563 const char *slots = "none";
6564 const char *name = NULL;
6565 int code;
6566 char buf[30];
6567
6568 if (GET_CODE (PATTERN (insn)) == SET
6569 || GET_CODE (PATTERN (insn)) == PARALLEL)
6570 {
6571 switch (get_attr_slots (insn))
6572 {
6573 case SLOTS_CORE: slots = "core"; break;
6574 case SLOTS_C3: slots = "c3"; break;
6575 case SLOTS_P0: slots = "p0"; break;
6576 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6577 case SLOTS_P0_P1: slots = "p0,p1"; break;
6578 case SLOTS_P0S: slots = "p0s"; break;
6579 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6580 case SLOTS_P1: slots = "p1"; break;
6581 default:
6582 sprintf(buf, "%d", get_attr_slots (insn));
6583 slots = buf;
6584 break;
6585 }
6586 }
6587 if (GET_CODE (PATTERN (insn)) == USE)
6588 slots = "use";
6589
6590 code = INSN_CODE (insn);
6591 if (code >= 0)
6592 name = get_insn_name (code);
6593 if (!name)
6594 name = "{unknown}";
6595
6596 fprintf (dump,
6597 "insn %4d %4d %8s %s\n",
6598 code,
6599 INSN_UID (insn),
6600 name,
6601 slots);
6602 }
6603
6604 static int
6605 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6606 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6607 int *pnready, int clock ATTRIBUTE_UNUSED)
6608 {
6609 int nready = *pnready;
6610 rtx core_insn, cop_insn;
6611 int i;
6612
6613 if (dump && sched_verbose > 1)
6614 {
6615 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6616 for (i=0; i<nready; i++)
6617 mep_print_sched_insn (dump, ready[i]);
6618 fprintf (dump, "\n");
6619 }
6620
6621 if (!mep_vliw_function_p (cfun->decl))
6622 return 1;
6623 if (nready < 2)
6624 return 1;
6625
6626 /* IVC2 uses a DFA to determine what's ready and what's not. */
6627 if (TARGET_IVC2)
6628 return nready;
6629
6630 /* We can issue either a core or coprocessor instruction.
6631 Look for a matched pair of insns to reorder. If we don't
6632 find any, don't second-guess the scheduler's priorities. */
6633
6634 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6635 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6636 TARGET_OPT_VL64 ? 6 : 2)))
6637 ;
6638 else if (TARGET_OPT_VL64
6639 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6640 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6641 ;
6642 else
6643 /* We didn't find a pair. Issue the single insn at the head
6644 of the ready list. */
6645 return 1;
6646
6647 /* Reorder the two insns first. */
6648 mep_move_ready_insn (ready, nready, core_insn);
6649 mep_move_ready_insn (ready, nready - 1, cop_insn);
6650 return 2;
6651 }
6652
6653 /* A for_each_rtx callback. Return true if *X is a register that is
6654 set by insn PREV. */
6655
6656 static int
6657 mep_store_find_set (rtx *x, void *prev)
6658 {
6659 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6660 }
6661
6662 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6663 not the containing insn. */
6664
6665 static bool
6666 mep_store_data_bypass_1 (rtx prev, rtx pat)
6667 {
6668 /* Cope with intrinsics like swcpa. */
6669 if (GET_CODE (pat) == PARALLEL)
6670 {
6671 int i;
6672
6673 for (i = 0; i < XVECLEN (pat, 0); i++)
6674 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6675 return true;
6676
6677 return false;
6678 }
6679
6680 /* Check for some sort of store. */
6681 if (GET_CODE (pat) != SET
6682 || GET_CODE (SET_DEST (pat)) != MEM)
6683 return false;
6684
6685 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6686 The first operand to the unspec is the store data and the other operands
6687 are used to calculate the address. */
6688 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6689 {
6690 rtx src;
6691 int i;
6692
6693 src = SET_SRC (pat);
6694 for (i = 1; i < XVECLEN (src, 0); i++)
6695 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6696 return false;
6697
6698 return true;
6699 }
6700
6701 /* Otherwise just check that PREV doesn't modify any register mentioned
6702 in the memory destination. */
6703 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6704 }
6705
6706 /* Return true if INSN is a store instruction and if the store address
6707 has no true dependence on PREV. */
6708
6709 bool
6710 mep_store_data_bypass_p (rtx prev, rtx insn)
6711 {
6712 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6713 }
6714
6715 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6716 is a register other than LO or HI and if PREV sets *X. */
6717
6718 static int
6719 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6720 {
6721 return (REG_P (*x)
6722 && REGNO (*x) != LO_REGNO
6723 && REGNO (*x) != HI_REGNO
6724 && reg_set_p (*x, (const_rtx) prev));
6725 }
6726
6727 /* Return true if, apart from HI/LO, there are no true dependencies
6728 between multiplication instructions PREV and INSN. */
6729
6730 bool
6731 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6732 {
6733 rtx pat;
6734
6735 pat = PATTERN (insn);
6736 if (GET_CODE (pat) == PARALLEL)
6737 pat = XVECEXP (pat, 0, 0);
6738 return (GET_CODE (pat) == SET
6739 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6740 }
6741
6742 /* Return true if INSN is an ldc instruction that issues to the
6743 MeP-h1 integer pipeline. This is true for instructions that
6744 read from PSW, LP, SAR, HI and LO. */
6745
6746 bool
6747 mep_ipipe_ldc_p (rtx insn)
6748 {
6749 rtx pat, src;
6750
6751 pat = PATTERN (insn);
6752
6753 /* Cope with instrinsics that set both a hard register and its shadow.
6754 The set of the hard register comes first. */
6755 if (GET_CODE (pat) == PARALLEL)
6756 pat = XVECEXP (pat, 0, 0);
6757
6758 if (GET_CODE (pat) == SET)
6759 {
6760 src = SET_SRC (pat);
6761
6762 /* Cope with intrinsics. The first operand to the unspec is
6763 the source register. */
6764 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6765 src = XVECEXP (src, 0, 0);
6766
6767 if (REG_P (src))
6768 switch (REGNO (src))
6769 {
6770 case PSW_REGNO:
6771 case LP_REGNO:
6772 case SAR_REGNO:
6773 case HI_REGNO:
6774 case LO_REGNO:
6775 return true;
6776 }
6777 }
6778 return false;
6779 }
6780
6781 /* Create a VLIW bundle from core instruction CORE and coprocessor
6782 instruction COP. COP always satisfies INSN_P, but CORE can be
6783 either a new pattern or an existing instruction.
6784
6785 Emit the bundle in place of COP and return it. */
6786
6787 static rtx
6788 mep_make_bundle (rtx core, rtx cop)
6789 {
6790 rtx insn;
6791
6792 /* If CORE is an existing instruction, remove it, otherwise put
6793 the new pattern in an INSN harness. */
6794 if (INSN_P (core))
6795 remove_insn (core);
6796 else
6797 core = make_insn_raw (core);
6798
6799 /* Generate the bundle sequence and replace COP with it. */
6800 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6801 insn = emit_insn_after (insn, cop);
6802 remove_insn (cop);
6803
6804 /* Set up the links of the insns inside the SEQUENCE. */
6805 PREV_INSN (core) = PREV_INSN (insn);
6806 NEXT_INSN (core) = cop;
6807 PREV_INSN (cop) = core;
6808 NEXT_INSN (cop) = NEXT_INSN (insn);
6809
6810 /* Set the VLIW flag for the coprocessor instruction. */
6811 PUT_MODE (core, VOIDmode);
6812 PUT_MODE (cop, BImode);
6813
6814 /* Derive a location for the bundle. Individual instructions cannot
6815 have their own location because there can be no assembler labels
6816 between CORE and COP. */
6817 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core) ? core : cop);
6818 INSN_LOCATION (core) = 0;
6819 INSN_LOCATION (cop) = 0;
6820
6821 return insn;
6822 }
6823
6824 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6825
6826 static void
6827 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6828 {
6829 rtx * pinsn = (rtx *) data;
6830
6831 if (*pinsn && reg_mentioned_p (x, *pinsn))
6832 *pinsn = NULL_RTX;
6833 }
6834
6835 /* Return true if anything in insn X is (anti,output,true) dependent on
6836 anything in insn Y. */
6837
6838 static int
6839 mep_insn_dependent_p (rtx x, rtx y)
6840 {
6841 rtx tmp;
6842
6843 gcc_assert (INSN_P (x));
6844 gcc_assert (INSN_P (y));
6845
6846 tmp = PATTERN (y);
6847 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6848 if (tmp == NULL_RTX)
6849 return 1;
6850
6851 tmp = PATTERN (x);
6852 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6853 if (tmp == NULL_RTX)
6854 return 1;
6855
6856 return 0;
6857 }
6858
6859 static int
6860 core_insn_p (rtx insn)
6861 {
6862 if (GET_CODE (PATTERN (insn)) == USE)
6863 return 0;
6864 if (get_attr_slot (insn) == SLOT_CORE)
6865 return 1;
6866 return 0;
6867 }
6868
6869 /* Mark coprocessor instructions that can be bundled together with
6870 the immediately preceding core instruction. This is later used
6871 to emit the "+" that tells the assembler to create a VLIW insn.
6872
6873 For unbundled insns, the assembler will automatically add coprocessor
6874 nops, and 16-bit core nops. Due to an apparent oversight in the
6875 spec, the assembler will _not_ automatically add 32-bit core nops,
6876 so we have to emit those here.
6877
6878 Called from mep_insn_reorg. */
6879
6880 static void
6881 mep_bundle_insns (rtx insns)
6882 {
6883 rtx insn, last = NULL_RTX, first = NULL_RTX;
6884 int saw_scheduling = 0;
6885
6886 /* Only do bundling if we're in vliw mode. */
6887 if (!mep_vliw_function_p (cfun->decl))
6888 return;
6889
6890 /* The first insn in a bundle are TImode, the remainder are
6891 VOIDmode. After this function, the first has VOIDmode and the
6892 rest have BImode. */
6893
6894 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6895
6896 /* First, move any NOTEs that are within a bundle, to the beginning
6897 of the bundle. */
6898 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6899 {
6900 if (NOTE_P (insn) && first)
6901 /* Don't clear FIRST. */;
6902
6903 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6904 first = insn;
6905
6906 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6907 {
6908 rtx note, prev;
6909
6910 /* INSN is part of a bundle; FIRST is the first insn in that
6911 bundle. Move all intervening notes out of the bundle.
6912 In addition, since the debug pass may insert a label
6913 whenever the current line changes, set the location info
6914 for INSN to match FIRST. */
6915
6916 INSN_LOCATION (insn) = INSN_LOCATION (first);
6917
6918 note = PREV_INSN (insn);
6919 while (note && note != first)
6920 {
6921 prev = PREV_INSN (note);
6922
6923 if (NOTE_P (note))
6924 {
6925 /* Remove NOTE from here... */
6926 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6927 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6928 /* ...and put it in here. */
6929 NEXT_INSN (note) = first;
6930 PREV_INSN (note) = PREV_INSN (first);
6931 NEXT_INSN (PREV_INSN (note)) = note;
6932 PREV_INSN (NEXT_INSN (note)) = note;
6933 }
6934
6935 note = prev;
6936 }
6937 }
6938
6939 else if (!NONJUMP_INSN_P (insn))
6940 first = 0;
6941 }
6942
6943 /* Now fix up the bundles. */
6944 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6945 {
6946 if (NOTE_P (insn))
6947 continue;
6948
6949 if (!NONJUMP_INSN_P (insn))
6950 {
6951 last = 0;
6952 continue;
6953 }
6954
6955 /* If we're not optimizing enough, there won't be scheduling
6956 info. We detect that here. */
6957 if (GET_MODE (insn) == TImode)
6958 saw_scheduling = 1;
6959 if (!saw_scheduling)
6960 continue;
6961
6962 if (TARGET_IVC2)
6963 {
6964 rtx core_insn = NULL_RTX;
6965
6966 /* IVC2 slots are scheduled by DFA, so we just accept
6967 whatever the scheduler gives us. However, we must make
6968 sure the core insn (if any) is the first in the bundle.
6969 The IVC2 assembler can insert whatever NOPs are needed,
6970 and allows a COP insn to be first. */
6971
6972 if (NONJUMP_INSN_P (insn)
6973 && GET_CODE (PATTERN (insn)) != USE
6974 && GET_MODE (insn) == TImode)
6975 {
6976 for (last = insn;
6977 NEXT_INSN (last)
6978 && GET_MODE (NEXT_INSN (last)) == VOIDmode
6979 && NONJUMP_INSN_P (NEXT_INSN (last));
6980 last = NEXT_INSN (last))
6981 {
6982 if (core_insn_p (last))
6983 core_insn = last;
6984 }
6985 if (core_insn_p (last))
6986 core_insn = last;
6987
6988 if (core_insn && core_insn != insn)
6989 {
6990 /* Swap core insn to first in the bundle. */
6991
6992 /* Remove core insn. */
6993 if (PREV_INSN (core_insn))
6994 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
6995 if (NEXT_INSN (core_insn))
6996 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
6997
6998 /* Re-insert core insn. */
6999 PREV_INSN (core_insn) = PREV_INSN (insn);
7000 NEXT_INSN (core_insn) = insn;
7001
7002 if (PREV_INSN (core_insn))
7003 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7004 PREV_INSN (insn) = core_insn;
7005
7006 PUT_MODE (core_insn, TImode);
7007 PUT_MODE (insn, VOIDmode);
7008 }
7009 }
7010
7011 /* The first insn has TImode, the rest have VOIDmode */
7012 if (GET_MODE (insn) == TImode)
7013 PUT_MODE (insn, VOIDmode);
7014 else
7015 PUT_MODE (insn, BImode);
7016 continue;
7017 }
7018
7019 PUT_MODE (insn, VOIDmode);
7020 if (recog_memoized (insn) >= 0
7021 && get_attr_slot (insn) == SLOT_COP)
7022 {
7023 if (JUMP_P (insn)
7024 || ! last
7025 || recog_memoized (last) < 0
7026 || get_attr_slot (last) != SLOT_CORE
7027 || (get_attr_length (insn)
7028 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7029 || mep_insn_dependent_p (insn, last))
7030 {
7031 switch (get_attr_length (insn))
7032 {
7033 case 8:
7034 break;
7035 case 6:
7036 insn = mep_make_bundle (gen_nop (), insn);
7037 break;
7038 case 4:
7039 if (TARGET_OPT_VL64)
7040 insn = mep_make_bundle (gen_nop32 (), insn);
7041 break;
7042 case 2:
7043 if (TARGET_OPT_VL64)
7044 error ("2 byte cop instructions are"
7045 " not allowed in 64-bit VLIW mode");
7046 else
7047 insn = mep_make_bundle (gen_nop (), insn);
7048 break;
7049 default:
7050 error ("unexpected %d byte cop instruction",
7051 get_attr_length (insn));
7052 break;
7053 }
7054 }
7055 else
7056 insn = mep_make_bundle (last, insn);
7057 }
7058
7059 last = insn;
7060 }
7061 }
7062
7063
7064 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7065 Return true on success. This function can fail if the intrinsic
7066 is unavailable or if the operands don't satisfy their predicates. */
7067
7068 bool
7069 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7070 {
7071 const struct cgen_insn *cgen_insn;
7072 const struct insn_data_d *idata;
7073 rtx newop[10];
7074 int i;
7075
7076 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7077 return false;
7078
7079 idata = &insn_data[cgen_insn->icode];
7080 for (i = 0; i < idata->n_operands; i++)
7081 {
7082 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7083 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7084 return false;
7085 }
7086
7087 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7088 newop[3], newop[4], newop[5],
7089 newop[6], newop[7], newop[8]));
7090
7091 return true;
7092 }
7093
7094
7095 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7096 OPERANDS[0]. Report an error if the instruction could not
7097 be synthesized. OPERANDS[1] is a register_operand. For sign
7098 and zero extensions, it may be smaller than SImode. */
7099
7100 bool
7101 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7102 rtx * operands ATTRIBUTE_UNUSED)
7103 {
7104 return false;
7105 }
7106
7107
7108 /* Likewise, but apply a binary operation to OPERANDS[1] and
7109 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7110 can be a general_operand.
7111
7112 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7113 third operand. REG and REG3 take register operands only. */
7114
7115 bool
7116 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7117 int ATTRIBUTE_UNUSED immediate3,
7118 int ATTRIBUTE_UNUSED reg,
7119 int ATTRIBUTE_UNUSED reg3,
7120 rtx * operands ATTRIBUTE_UNUSED)
7121 {
7122 return false;
7123 }
7124
7125 static bool
7126 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7127 int opno ATTRIBUTE_UNUSED, int *total,
7128 bool ATTRIBUTE_UNUSED speed_t)
7129 {
7130 switch (code)
7131 {
7132 case CONST_INT:
7133 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7134 *total = 0;
7135 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7136 *total = 1;
7137 else
7138 *total = 3;
7139 return true;
7140
7141 case SYMBOL_REF:
7142 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7143 return true;
7144
7145 case MULT:
7146 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7147 ? COSTS_N_INSNS (3)
7148 : COSTS_N_INSNS (2));
7149 return true;
7150 }
7151 return false;
7152 }
7153
7154 static int
7155 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7156 enum machine_mode mode ATTRIBUTE_UNUSED,
7157 addr_space_t as ATTRIBUTE_UNUSED,
7158 bool ATTRIBUTE_UNUSED speed_p)
7159 {
7160 return 1;
7161 }
7162
7163 static void
7164 mep_asm_init_sections (void)
7165 {
7166 based_section
7167 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7168 "\t.section .based,\"aw\"");
7169
7170 tinybss_section
7171 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7172 "\t.section .sbss,\"aw\"");
7173
7174 sdata_section
7175 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7176 "\t.section .sdata,\"aw\",@progbits");
7177
7178 far_section
7179 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7180 "\t.section .far,\"aw\"");
7181
7182 farbss_section
7183 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7184 "\t.section .farbss,\"aw\"");
7185
7186 frodata_section
7187 = get_unnamed_section (0, output_section_asm_op,
7188 "\t.section .frodata,\"a\"");
7189
7190 srodata_section
7191 = get_unnamed_section (0, output_section_asm_op,
7192 "\t.section .srodata,\"a\"");
7193
7194 vtext_section
7195 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7196 "\t.section .vtext,\"axv\"\n\t.vliw");
7197
7198 vftext_section
7199 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7200 "\t.section .vftext,\"axv\"\n\t.vliw");
7201
7202 ftext_section
7203 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7204 "\t.section .ftext,\"ax\"\n\t.core");
7205
7206 }
7207 \f
7208 /* Initialize the GCC target structure. */
7209
7210 #undef TARGET_ASM_FUNCTION_PROLOGUE
7211 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7212 #undef TARGET_ATTRIBUTE_TABLE
7213 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7214 #undef TARGET_COMP_TYPE_ATTRIBUTES
7215 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7216 #undef TARGET_INSERT_ATTRIBUTES
7217 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7218 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7219 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7220 #undef TARGET_CAN_INLINE_P
7221 #define TARGET_CAN_INLINE_P mep_can_inline_p
7222 #undef TARGET_SECTION_TYPE_FLAGS
7223 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7224 #undef TARGET_ASM_NAMED_SECTION
7225 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7226 #undef TARGET_INIT_BUILTINS
7227 #define TARGET_INIT_BUILTINS mep_init_builtins
7228 #undef TARGET_EXPAND_BUILTIN
7229 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7230 #undef TARGET_SCHED_ADJUST_COST
7231 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7232 #undef TARGET_SCHED_ISSUE_RATE
7233 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7234 #undef TARGET_SCHED_REORDER
7235 #define TARGET_SCHED_REORDER mep_sched_reorder
7236 #undef TARGET_STRIP_NAME_ENCODING
7237 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7238 #undef TARGET_ASM_SELECT_SECTION
7239 #define TARGET_ASM_SELECT_SECTION mep_select_section
7240 #undef TARGET_ASM_UNIQUE_SECTION
7241 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7242 #undef TARGET_ENCODE_SECTION_INFO
7243 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7244 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7245 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7246 #undef TARGET_RTX_COSTS
7247 #define TARGET_RTX_COSTS mep_rtx_cost
7248 #undef TARGET_ADDRESS_COST
7249 #define TARGET_ADDRESS_COST mep_address_cost
7250 #undef TARGET_MACHINE_DEPENDENT_REORG
7251 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7252 #undef TARGET_SETUP_INCOMING_VARARGS
7253 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7254 #undef TARGET_PASS_BY_REFERENCE
7255 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7256 #undef TARGET_FUNCTION_ARG
7257 #define TARGET_FUNCTION_ARG mep_function_arg
7258 #undef TARGET_FUNCTION_ARG_ADVANCE
7259 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7260 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7261 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7262 #undef TARGET_OPTION_OVERRIDE
7263 #define TARGET_OPTION_OVERRIDE mep_option_override
7264 #undef TARGET_ALLOCATE_INITIAL_VALUE
7265 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7266 #undef TARGET_ASM_INIT_SECTIONS
7267 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7268 #undef TARGET_RETURN_IN_MEMORY
7269 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7270 #undef TARGET_NARROW_VOLATILE_BITFIELD
7271 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7272 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7273 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7274 #undef TARGET_BUILD_BUILTIN_VA_LIST
7275 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7276 #undef TARGET_EXPAND_BUILTIN_VA_START
7277 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7278 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7279 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7280 #undef TARGET_CAN_ELIMINATE
7281 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7282 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7283 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7284 #undef TARGET_TRAMPOLINE_INIT
7285 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7286 #undef TARGET_LEGITIMATE_CONSTANT_P
7287 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7288 #undef TARGET_CAN_USE_DOLOOP_P
7289 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7290
7291 struct gcc_target targetm = TARGET_INITIALIZER;
7292
7293 #include "gt-mep.h"