]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/mep/mep.c
* gimple.h: Remove all includes.
[thirdparty/gcc.git] / gcc / config / mep / mep.c
CommitLineData
46222c18 1/* Definitions for Toshiba Media Processor
711789cc 2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
46222c18 3 Contributed by Red Hat, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
9ed99284 27#include "varasm.h"
28#include "calls.h"
29#include "stringpool.h"
30#include "stor-layout.h"
46222c18 31#include "regs.h"
32#include "hard-reg-set.h"
46222c18 33#include "insn-config.h"
34#include "conditions.h"
35#include "insn-flags.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
39#include "recog.h"
40#include "obstack.h"
41#include "tree.h"
42#include "expr.h"
43#include "except.h"
44#include "function.h"
45#include "optabs.h"
46#include "reload.h"
47#include "tm_p.h"
48#include "ggc.h"
0b205f4c 49#include "diagnostic-core.h"
46222c18 50#include "target.h"
51#include "target-def.h"
52#include "langhooks.h"
53#include "df.h"
bc61cadb 54#include "pointer-set.h"
55#include "hash-table.h"
56#include "vec.h"
57#include "basic-block.h"
58#include "tree-ssa-alias.h"
59#include "internal-fn.h"
60#include "gimple-fold.h"
61#include "tree-eh.h"
62#include "gimple-expr.h"
63#include "is-a.h"
e795d6e1 64#include "gimple.h"
a8783bee 65#include "gimplify.h"
fba5dd52 66#include "opts.h"
b9ed1410 67#include "dumpfile.h"
46222c18 68
69/* Structure of this file:
70
71 + Command Line Option Support
72 + Pattern support - constraints, predicates, expanders
73 + Reload Support
74 + Costs
75 + Functions to save and restore machine-specific function data.
76 + Frame/Epilog/Prolog Related
77 + Operand Printing
78 + Function args in registers
79 + Handle pipeline hazards
80 + Handle attributes
81 + Trampolines
82 + Machine-dependent Reorg
83 + Builtins. */
84
85/* Symbol encodings:
86
87 Symbols are encoded as @ <char> . <name> where <char> is one of these:
88
89 b - based
90 t - tiny
91 n - near
92 f - far
93 i - io, near
94 I - io, far
95 c - cb (control bus) */
96
97struct GTY(()) machine_function
98{
99 int mep_frame_pointer_needed;
100
101 /* For varargs. */
102 int arg_regs_to_save;
103 int regsave_filler;
104 int frame_filler;
142c2869 105 int frame_locked;
46222c18 106
107 /* Records __builtin_return address. */
108 rtx eh_stack_adjust;
109
110 int reg_save_size;
111 int reg_save_slot[FIRST_PSEUDO_REGISTER];
112 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
113
114 /* 2 if the current function has an interrupt attribute, 1 if not, 0
115 if unknown. This is here because resource.c uses EPILOGUE_USES
116 which needs it. */
117 int interrupt_handler;
118
119 /* Likewise, for disinterrupt attribute. */
120 int disable_interrupts;
121
122 /* Number of doloop tags used so far. */
123 int doloop_tags;
124
125 /* True if the last tag was allocated to a doloop_end. */
126 bool doloop_tag_from_end;
127
128 /* True if reload changes $TP. */
129 bool reload_changes_tp;
130
131 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
132 We only set this if the function is an interrupt handler. */
133 int asms_without_operands;
134};
135
136#define MEP_CONTROL_REG(x) \
137 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
138
46222c18 139static GTY(()) section * based_section;
140static GTY(()) section * tinybss_section;
141static GTY(()) section * far_section;
142static GTY(()) section * farbss_section;
143static GTY(()) section * frodata_section;
144static GTY(()) section * srodata_section;
145
e3b9264d 146static GTY(()) section * vtext_section;
147static GTY(()) section * vftext_section;
148static GTY(()) section * ftext_section;
149
46222c18 150static void mep_set_leaf_registers (int);
151static bool symbol_p (rtx);
152static bool symbolref_p (rtx);
153static void encode_pattern_1 (rtx);
154static void encode_pattern (rtx);
155static bool const_in_range (rtx, int, int);
156static void mep_rewrite_mult (rtx, rtx);
157static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
158static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
159static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
160static bool move_needs_splitting (rtx, rtx, enum machine_mode);
161static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
162static bool mep_nongeneral_reg (rtx);
163static bool mep_general_copro_reg (rtx);
164static bool mep_nonregister (rtx);
165static struct machine_function* mep_init_machine_status (void);
166static rtx mep_tp_rtx (void);
167static rtx mep_gp_rtx (void);
168static bool mep_interrupt_p (void);
169static bool mep_disinterrupt_p (void);
170static bool mep_reg_set_p (rtx, rtx);
171static bool mep_reg_set_in_function (int);
172static bool mep_interrupt_saved_reg (int);
173static bool mep_call_saves_register (int);
174static rtx F (rtx);
175static void add_constant (int, int, int, int);
46222c18 176static rtx maybe_dead_move (rtx, rtx, bool);
177static void mep_reload_pointer (int, const char *);
178static void mep_start_function (FILE *, HOST_WIDE_INT);
179static bool mep_function_ok_for_sibcall (tree, tree);
180static int unique_bit_in (HOST_WIDE_INT);
181static int bit_size_for_clip (HOST_WIDE_INT);
182static int bytesize (const_tree, enum machine_mode);
183static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
184static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
185static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
186static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
187static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
188static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
189static bool mep_function_attribute_inlinable_p (const_tree);
7c88e513 190static bool mep_can_inline_p (tree, tree);
46222c18 191static bool mep_lookup_pragma_disinterrupt (const char *);
192static int mep_multiple_address_regions (tree, bool);
193static int mep_attrlist_to_encoding (tree, tree);
194static void mep_insert_attributes (tree, tree *);
195static void mep_encode_section_info (tree, rtx, int);
196static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
197static void mep_unique_section (tree, int);
198static unsigned int mep_section_type_flags (tree, const char *, int);
199static void mep_asm_named_section (const char *, unsigned int, tree);
200static bool mep_mentioned_p (rtx, rtx, int);
201static void mep_reorg_regmove (rtx);
202static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
203static void mep_reorg_repeat (rtx);
204static bool mep_invertable_branch_p (rtx);
205static void mep_invert_branch (rtx, rtx);
206static void mep_reorg_erepeat (rtx);
207static void mep_jmp_return_reorg (rtx);
208static void mep_reorg_addcombine (rtx);
209static void mep_reorg (void);
210static void mep_init_intrinsics (void);
211static void mep_init_builtins (void);
212static void mep_intrinsic_unavailable (int);
213static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
214static bool mep_get_move_insn (int, const struct cgen_insn **);
215static rtx mep_convert_arg (enum machine_mode, rtx);
216static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
217static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
218static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
219static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
220static int mep_adjust_cost (rtx, rtx, rtx, int);
221static int mep_issue_rate (void);
222static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
223static void mep_move_ready_insn (rtx *, int, rtx);
224static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
225static rtx mep_make_bundle (rtx, rtx);
226static void mep_bundle_insns (rtx);
20d892d1 227static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
d9c5e5f4 228static int mep_address_cost (rtx, enum machine_mode, addr_space_t, bool);
39cba157 229static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
46222c18 230 tree, int *, int);
39cba157 231static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
46222c18 232 const_tree, bool);
39cba157 233static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
4f6b272a 234 const_tree, bool);
39cba157 235static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
4f6b272a 236 const_tree, bool);
46222c18 237static bool mep_vector_mode_supported_p (enum machine_mode);
46222c18 238static rtx mep_allocate_initial_value (rtx);
239static void mep_asm_init_sections (void);
240static int mep_comp_type_attributes (const_tree, const_tree);
241static bool mep_narrow_volatile_bitfield (void);
242static rtx mep_expand_builtin_saveregs (void);
243static tree mep_build_builtin_va_list (void);
244static void mep_expand_va_start (tree, rtx);
260f365f 245static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
cd90919d 246static bool mep_can_eliminate (const int, const int);
b2d7ede1 247static void mep_conditional_register_usage (void);
8786c274 248static void mep_trampoline_init (rtx, tree, rtx);
46222c18 249\f
46222c18 250#define WANT_GCC_DEFINITIONS
251#include "mep-intrin.h"
252#undef WANT_GCC_DEFINITIONS
253
254\f
255/* Command Line Option Support. */
256
257char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
258
259/* True if we can use cmov instructions to move values back and forth
260 between core and coprocessor registers. */
261bool mep_have_core_copro_moves_p;
262
263/* True if we can use cmov instructions (or a work-alike) to move
264 values between coprocessor registers. */
265bool mep_have_copro_copro_moves_p;
266
267/* A table of all coprocessor instructions that can act like
268 a coprocessor-to-coprocessor cmov. */
269static const int mep_cmov_insns[] = {
270 mep_cmov,
271 mep_cpmov,
272 mep_fmovs,
273 mep_caddi3,
274 mep_csubi3,
275 mep_candi3,
276 mep_cori3,
277 mep_cxori3,
278 mep_cand3,
279 mep_cor3
280};
281
46222c18 282\f
283static void
284mep_set_leaf_registers (int enable)
285{
286 int i;
287
288 if (mep_leaf_registers[0] != enable)
289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
290 mep_leaf_registers[i] = enable;
291}
292
b2d7ede1 293static void
9dc6e4a0 294mep_conditional_register_usage (void)
46222c18 295{
296 int i;
297
298 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
299 {
300 fixed_regs[HI_REGNO] = 1;
301 fixed_regs[LO_REGNO] = 1;
302 call_used_regs[HI_REGNO] = 1;
303 call_used_regs[LO_REGNO] = 1;
304 }
305
306 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
307 global_regs[i] = 1;
308}
309
4c834714 310static void
311mep_option_override (void)
46222c18 312{
e9e3a45a 313 unsigned int i;
314 int j;
315 cl_deferred_option *opt;
f1f41a6c 316 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
317
318 if (v)
319 FOR_EACH_VEC_ELT (*v, i, opt)
320 {
321 switch (opt->opt_index)
322 {
323 case OPT_mivc2:
324 for (j = 0; j < 32; j++)
325 fixed_regs[j + 48] = 0;
326 for (j = 0; j < 32; j++)
327 call_used_regs[j + 48] = 1;
328 for (j = 6; j < 8; j++)
329 call_used_regs[j + 48] = 0;
e9e3a45a 330
331#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
f1f41a6c 332 RN (0, "$csar0");
333 RN (1, "$cc");
334 RN (4, "$cofr0");
335 RN (5, "$cofr1");
336 RN (6, "$cofa0");
337 RN (7, "$cofa1");
338 RN (15, "$csar1");
339
340 RN (16, "$acc0_0");
341 RN (17, "$acc0_1");
342 RN (18, "$acc0_2");
343 RN (19, "$acc0_3");
344 RN (20, "$acc0_4");
345 RN (21, "$acc0_5");
346 RN (22, "$acc0_6");
347 RN (23, "$acc0_7");
348
349 RN (24, "$acc1_0");
350 RN (25, "$acc1_1");
351 RN (26, "$acc1_2");
352 RN (27, "$acc1_3");
353 RN (28, "$acc1_4");
354 RN (29, "$acc1_5");
355 RN (30, "$acc1_6");
356 RN (31, "$acc1_7");
e9e3a45a 357#undef RN
f1f41a6c 358 break;
e9e3a45a 359
f1f41a6c 360 default:
361 gcc_unreachable ();
362 }
363 }
e9e3a45a 364
46222c18 365 if (flag_pic == 1)
366 warning (OPT_fpic, "-fpic is not supported");
367 if (flag_pic == 2)
368 warning (OPT_fPIC, "-fPIC is not supported");
369 if (TARGET_S && TARGET_M)
370 error ("only one of -ms and -mm may be given");
371 if (TARGET_S && TARGET_L)
372 error ("only one of -ms and -ml may be given");
373 if (TARGET_M && TARGET_L)
374 error ("only one of -mm and -ml may be given");
e9e3a45a 375 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
46222c18 376 error ("only one of -ms and -mtiny= may be given");
e9e3a45a 377 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
46222c18 378 error ("only one of -mm and -mtiny= may be given");
379 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
380 warning (0, "-mclip currently has no effect without -mminmax");
381
382 if (mep_const_section)
383 {
384 if (strcmp (mep_const_section, "tiny") != 0
385 && strcmp (mep_const_section, "near") != 0
386 && strcmp (mep_const_section, "far") != 0)
387 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
388 }
389
390 if (TARGET_S)
391 mep_tiny_cutoff = 65536;
392 if (TARGET_M)
393 mep_tiny_cutoff = 0;
e9e3a45a 394 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
46222c18 395 mep_tiny_cutoff = 0;
396
397 if (TARGET_64BIT_CR_REGS)
398 flag_split_wide_types = 0;
399
400 init_machine_status = mep_init_machine_status;
401 mep_init_intrinsics ();
402}
403
404/* Pattern Support - constraints, predicates, expanders. */
405
406/* MEP has very few instructions that can refer to the span of
407 addresses used by symbols, so it's common to check for them. */
408
409static bool
410symbol_p (rtx x)
411{
412 int c = GET_CODE (x);
413
414 return (c == CONST_INT
415 || c == CONST
416 || c == SYMBOL_REF);
417}
418
419static bool
420symbolref_p (rtx x)
421{
422 int c;
423
424 if (GET_CODE (x) != MEM)
425 return false;
426
427 c = GET_CODE (XEXP (x, 0));
428 return (c == CONST_INT
429 || c == CONST
430 || c == SYMBOL_REF);
431}
432
433/* static const char *reg_class_names[] = REG_CLASS_NAMES; */
434
435#define GEN_REG(R, STRICT) \
436 (GR_REGNO_P (R) \
437 || (!STRICT \
438 && ((R) == ARG_POINTER_REGNUM \
439 || (R) >= FIRST_PSEUDO_REGISTER)))
440
441static char pattern[12], *patternp;
442static GTY(()) rtx patternr[12];
443#define RTX_IS(x) (strcmp (pattern, x) == 0)
444
445static void
446encode_pattern_1 (rtx x)
447{
448 int i;
449
450 if (patternp == pattern + sizeof (pattern) - 2)
451 {
452 patternp[-1] = '?';
453 return;
454 }
455
456 patternr[patternp-pattern] = x;
457
458 switch (GET_CODE (x))
459 {
460 case REG:
461 *patternp++ = 'r';
462 break;
463 case MEM:
464 *patternp++ = 'm';
465 case CONST:
466 encode_pattern_1 (XEXP(x, 0));
467 break;
468 case PLUS:
469 *patternp++ = '+';
470 encode_pattern_1 (XEXP(x, 0));
471 encode_pattern_1 (XEXP(x, 1));
472 break;
473 case LO_SUM:
474 *patternp++ = 'L';
475 encode_pattern_1 (XEXP(x, 0));
476 encode_pattern_1 (XEXP(x, 1));
477 break;
478 case HIGH:
479 *patternp++ = 'H';
480 encode_pattern_1 (XEXP(x, 0));
481 break;
482 case SYMBOL_REF:
483 *patternp++ = 's';
484 break;
485 case LABEL_REF:
486 *patternp++ = 'l';
487 break;
488 case CONST_INT:
489 case CONST_DOUBLE:
490 *patternp++ = 'i';
491 break;
492 case UNSPEC:
493 *patternp++ = 'u';
494 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
495 for (i=0; i<XVECLEN (x, 0); i++)
496 encode_pattern_1 (XVECEXP (x, 0, i));
497 break;
498 case USE:
499 *patternp++ = 'U';
500 break;
501 default:
502 *patternp++ = '?';
503#if 0
504 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
505 debug_rtx (x);
506 gcc_unreachable ();
507#endif
508 break;
509 }
510}
511
512static void
513encode_pattern (rtx x)
514{
515 patternp = pattern;
516 encode_pattern_1 (x);
517 *patternp = 0;
518}
519
520int
521mep_section_tag (rtx x)
522{
523 const char *name;
524
525 while (1)
526 {
527 switch (GET_CODE (x))
528 {
529 case MEM:
530 case CONST:
531 x = XEXP (x, 0);
532 break;
533 case UNSPEC:
534 x = XVECEXP (x, 0, 0);
535 break;
536 case PLUS:
537 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
538 return 0;
539 x = XEXP (x, 0);
540 break;
541 default:
542 goto done;
543 }
544 }
545 done:
546 if (GET_CODE (x) != SYMBOL_REF)
547 return 0;
548 name = XSTR (x, 0);
549 if (name[0] == '@' && name[2] == '.')
550 {
551 if (name[1] == 'i' || name[1] == 'I')
552 {
553 if (name[1] == 'I')
554 return 'f'; /* near */
555 return 'n'; /* far */
556 }
557 return name[1];
558 }
559 return 0;
560}
561
562int
563mep_regno_reg_class (int regno)
564{
565 switch (regno)
566 {
567 case SP_REGNO: return SP_REGS;
568 case TP_REGNO: return TP_REGS;
569 case GP_REGNO: return GP_REGS;
570 case 0: return R0_REGS;
571 case HI_REGNO: return HI_REGS;
572 case LO_REGNO: return LO_REGS;
573 case ARG_POINTER_REGNUM: return GENERAL_REGS;
574 }
575
576 if (GR_REGNO_P (regno))
577 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
578 if (CONTROL_REGNO_P (regno))
579 return CONTROL_REGS;
580
581 if (CR_REGNO_P (regno))
582 {
583 int i, j;
584
585 /* Search for the register amongst user-defined subclasses of
586 the coprocessor registers. */
587 for (i = USER0_REGS; i <= USER3_REGS; ++i)
588 {
589 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
590 continue;
591 for (j = 0; j < N_REG_CLASSES; ++j)
592 {
593 enum reg_class sub = reg_class_subclasses[i][j];
594
595 if (sub == LIM_REG_CLASSES)
596 return i;
597 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
598 break;
599 }
600 }
601
602 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
603 }
604
605 if (CCR_REGNO_P (regno))
606 return CCR_REGS;
607
608 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
609 return NO_REGS;
610}
611
46222c18 612static bool
613const_in_range (rtx x, int minv, int maxv)
614{
615 return (GET_CODE (x) == CONST_INT
616 && INTVAL (x) >= minv
617 && INTVAL (x) <= maxv);
618}
619
620/* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
621 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
622 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
623 at the end of the insn stream. */
624
625rtx
626mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
627{
628 if (rtx_equal_p (dest, src1))
629 return src2;
630 else if (rtx_equal_p (dest, src2))
631 return src1;
632 else
633 {
634 if (insn == 0)
635 emit_insn (gen_movsi (copy_rtx (dest), src1));
636 else
637 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
638 return src2;
639 }
640}
641
642/* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
643 Change the last element of PATTERN from (clobber (scratch:SI))
644 to (clobber (reg:SI HI_REGNO)). */
645
646static void
647mep_rewrite_mult (rtx insn, rtx pattern)
648{
649 rtx hi_clobber;
650
651 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
652 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
653 PATTERN (insn) = pattern;
654 INSN_CODE (insn) = -1;
655}
656
657/* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
658 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
659 store the result in DEST if nonnull. */
660
661static void
662mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
663{
664 rtx lo, pattern;
665
666 lo = gen_rtx_REG (SImode, LO_REGNO);
667 if (dest)
668 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
669 mep_mulr_source (insn, dest, src1, src2));
670 else
671 pattern = gen_mulsi3_lo (lo, src1, src2);
672 mep_rewrite_mult (insn, pattern);
673}
674
675/* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
676 SRC3 into $lo, then use either madd or maddr. The move into $lo will
677 be deleted by a peephole2 if SRC3 is already in $lo. */
678
679static void
680mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
681{
682 rtx lo, pattern;
683
684 lo = gen_rtx_REG (SImode, LO_REGNO);
685 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
686 if (dest)
687 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
688 mep_mulr_source (insn, dest, src1, src2),
689 copy_rtx (lo));
690 else
691 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
692 mep_rewrite_mult (insn, pattern);
693}
694
695/* Return true if $lo has the same value as integer register GPR when
696 instruction INSN is reached. If necessary, rewrite the instruction
697 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
698 rtx for (reg:SI LO_REGNO).
699
700 This function is intended to be used by the peephole2 pass. Since
701 that pass goes from the end of a basic block to the beginning, and
702 propagates liveness information on the way, there is no need to
703 update register notes here.
704
705 If GPR_DEAD_P is true on entry, and this function returns true,
706 then the caller will replace _every_ use of GPR in and after INSN
707 with LO. This means that if the instruction that sets $lo is a
708 mulr- or maddr-type instruction, we can rewrite it to use mul or
709 madd instead. In combination with the copy progagation pass,
710 this allows us to replace sequences like:
711
712 mov GPR,R1
713 mulr GPR,R2
714
715 with:
716
717 mul R1,R2
718
719 if GPR is no longer used. */
720
721static bool
722mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
723{
724 do
725 {
726 insn = PREV_INSN (insn);
727 if (INSN_P (insn))
728 switch (recog_memoized (insn))
729 {
730 case CODE_FOR_mulsi3_1:
731 extract_insn (insn);
732 if (rtx_equal_p (recog_data.operand[0], gpr))
733 {
734 mep_rewrite_mulsi3 (insn,
735 gpr_dead_p ? NULL : recog_data.operand[0],
736 recog_data.operand[1],
737 recog_data.operand[2]);
738 return true;
739 }
740 return false;
741
742 case CODE_FOR_maddsi3:
743 extract_insn (insn);
744 if (rtx_equal_p (recog_data.operand[0], gpr))
745 {
746 mep_rewrite_maddsi3 (insn,
747 gpr_dead_p ? NULL : recog_data.operand[0],
748 recog_data.operand[1],
749 recog_data.operand[2],
750 recog_data.operand[3]);
751 return true;
752 }
753 return false;
754
755 case CODE_FOR_mulsi3r:
756 case CODE_FOR_maddsi3r:
757 extract_insn (insn);
758 return rtx_equal_p (recog_data.operand[1], gpr);
759
760 default:
761 if (reg_set_p (lo, insn)
762 || reg_set_p (gpr, insn)
763 || volatile_insn_p (PATTERN (insn)))
764 return false;
765
766 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
767 gpr_dead_p = false;
768 break;
769 }
770 }
771 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
772 return false;
773}
774
775/* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
776
777bool
778mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
779{
780 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
781 extract_insn (insn);
782 return result;
783}
784
785/* Return true if SET can be turned into a post-modify load or store
786 that adds OFFSET to GPR. In other words, return true if SET can be
787 changed into:
788
789 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
790
791 It's OK to change SET to an equivalent operation in order to
792 make it match. */
793
794static bool
795mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
796{
797 rtx *reg, *mem;
798 unsigned int reg_bytes, mem_bytes;
799 enum machine_mode reg_mode, mem_mode;
800
801 /* Only simple SETs can be converted. */
802 if (GET_CODE (set) != SET)
803 return false;
804
805 /* Point REG to what we hope will be the register side of the set and
806 MEM to what we hope will be the memory side. */
807 if (GET_CODE (SET_DEST (set)) == MEM)
808 {
809 mem = &SET_DEST (set);
810 reg = &SET_SRC (set);
811 }
812 else
813 {
814 reg = &SET_DEST (set);
815 mem = &SET_SRC (set);
816 if (GET_CODE (*mem) == SIGN_EXTEND)
817 mem = &XEXP (*mem, 0);
818 }
819
820 /* Check that *REG is a suitable coprocessor register. */
821 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
822 return false;
823
824 /* Check that *MEM is a suitable memory reference. */
825 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
826 return false;
827
828 /* Get the number of bytes in each operand. */
829 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
830 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
831
832 /* Check that OFFSET is suitably aligned. */
833 if (INTVAL (offset) & (mem_bytes - 1))
834 return false;
835
836 /* Convert *MEM to a normal integer mode. */
837 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
838 *mem = change_address (*mem, mem_mode, NULL);
839
840 /* Adjust *REG as well. */
841 *reg = shallow_copy_rtx (*reg);
842 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
843 {
844 /* SET is a subword load. Convert it to an explicit extension. */
845 PUT_MODE (*reg, SImode);
846 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
847 }
848 else
849 {
850 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
851 PUT_MODE (*reg, reg_mode);
852 }
853 return true;
854}
855
856/* Return the effect of frame-related instruction INSN. */
857
858static rtx
859mep_frame_expr (rtx insn)
860{
861 rtx note, expr;
862
863 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
864 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
865 RTX_FRAME_RELATED_P (expr) = 1;
866 return expr;
867}
868
869/* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
870 new pattern in INSN1; INSN2 will be deleted by the caller. */
871
872static void
873mep_make_parallel (rtx insn1, rtx insn2)
874{
875 rtx expr;
876
877 if (RTX_FRAME_RELATED_P (insn2))
878 {
879 expr = mep_frame_expr (insn2);
880 if (RTX_FRAME_RELATED_P (insn1))
881 expr = gen_rtx_SEQUENCE (VOIDmode,
882 gen_rtvec (2, mep_frame_expr (insn1), expr));
883 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
884 RTX_FRAME_RELATED_P (insn1) = 1;
885 }
886
887 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
888 gen_rtvec (2, PATTERN (insn1),
889 PATTERN (insn2)));
890 INSN_CODE (insn1) = -1;
891}
892
893/* SET_INSN is an instruction that adds OFFSET to REG. Go back through
894 the basic block to see if any previous load or store instruction can
895 be persuaded to do SET_INSN as a side-effect. Return true if so. */
896
897static bool
898mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
899{
900 rtx insn;
901
902 insn = set_insn;
903 do
904 {
905 insn = PREV_INSN (insn);
906 if (INSN_P (insn))
907 {
908 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
909 {
910 mep_make_parallel (insn, set_insn);
911 return true;
912 }
913
914 if (reg_set_p (reg, insn)
915 || reg_referenced_p (reg, PATTERN (insn))
916 || volatile_insn_p (PATTERN (insn)))
917 return false;
918 }
919 }
920 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
921 return false;
922}
923
924/* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
925
926bool
927mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
928{
929 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
930 extract_insn (insn);
931 return result;
932}
933
934bool
935mep_allow_clip (rtx ux, rtx lx, int s)
936{
937 HOST_WIDE_INT u = INTVAL (ux);
938 HOST_WIDE_INT l = INTVAL (lx);
939 int i;
940
941 if (!TARGET_OPT_CLIP)
942 return false;
943
944 if (s)
945 {
946 for (i = 0; i < 30; i ++)
947 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
948 && (l == - ((HOST_WIDE_INT) 1 << i)))
949 return true;
950 }
951 else
952 {
953 if (l != 0)
954 return false;
955
956 for (i = 0; i < 30; i ++)
957 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
958 return true;
959 }
960 return false;
961}
962
963bool
964mep_bit_position_p (rtx x, bool looking_for)
965{
966 if (GET_CODE (x) != CONST_INT)
967 return false;
968 switch ((int) INTVAL(x) & 0xff)
969 {
970 case 0x01: case 0x02: case 0x04: case 0x08:
971 case 0x10: case 0x20: case 0x40: case 0x80:
972 return looking_for;
973 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
974 case 0xef: case 0xdf: case 0xbf: case 0x7f:
975 return !looking_for;
976 }
977 return false;
978}
979
980static bool
981move_needs_splitting (rtx dest, rtx src,
982 enum machine_mode mode ATTRIBUTE_UNUSED)
983{
984 int s = mep_section_tag (src);
985
986 while (1)
987 {
988 if (GET_CODE (src) == CONST
989 || GET_CODE (src) == MEM)
990 src = XEXP (src, 0);
991 else if (GET_CODE (src) == SYMBOL_REF
992 || GET_CODE (src) == LABEL_REF
993 || GET_CODE (src) == PLUS)
994 break;
995 else
996 return false;
997 }
998 if (s == 'f'
999 || (GET_CODE (src) == PLUS
1000 && GET_CODE (XEXP (src, 1)) == CONST_INT
1001 && (INTVAL (XEXP (src, 1)) < -65536
1002 || INTVAL (XEXP (src, 1)) > 0xffffff))
1003 || (GET_CODE (dest) == REG
1004 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1005 return true;
1006 return false;
1007}
1008
1009bool
1010mep_split_mov (rtx *operands, int symbolic)
1011{
1012 if (symbolic)
1013 {
1014 if (move_needs_splitting (operands[0], operands[1], SImode))
1015 return true;
1016 return false;
1017 }
1018
1019 if (GET_CODE (operands[1]) != CONST_INT)
1020 return false;
1021
1022 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1023 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1024 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1025 return false;
1026
1027 if (((!reload_completed && !reload_in_progress)
1028 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1029 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1030 return false;
1031
1032 return true;
1033}
1034
1035/* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1036 it to one specific value. So the insn chosen depends on whether
1037 the source and destination modes match. */
1038
1039bool
1040mep_vliw_mode_match (rtx tgt)
1041{
1042 bool src_vliw = mep_vliw_function_p (cfun->decl);
1043 bool tgt_vliw = INTVAL (tgt);
1044
1045 return src_vliw == tgt_vliw;
1046}
1047
2053b71f 1048/* Like the above, but also test for near/far mismatches. */
1049
1050bool
1051mep_vliw_jmp_match (rtx tgt)
1052{
1053 bool src_vliw = mep_vliw_function_p (cfun->decl);
1054 bool tgt_vliw = INTVAL (tgt);
1055
1056 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1057 return false;
1058
1059 return src_vliw == tgt_vliw;
1060}
1061
46222c18 1062bool
1063mep_multi_slot (rtx x)
1064{
1065 return get_attr_slot (x) == SLOT_MULTI;
1066}
1067
ca316360 1068/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
46222c18 1069
ca316360 1070static bool
1071mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
f4eeff0e 1072{
1073 /* We can't convert symbol values to gp- or tp-rel values after
1074 reload, as reload might have used $gp or $tp for other
1075 purposes. */
1076 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1077 {
1078 char e = mep_section_tag (x);
1079 return (e != 't' && e != 'b');
1080 }
1081 return 1;
1082}
1083
46222c18 1084/* Be careful not to use macros that need to be compiled one way for
1085 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1086
1087bool
1088mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1089{
1090 int the_tag;
1091
1092#define DEBUG_LEGIT 0
1093#if DEBUG_LEGIT
1094 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1095 debug_rtx (x);
1096#endif
1097
1098 if (GET_CODE (x) == LO_SUM
1099 && GET_CODE (XEXP (x, 0)) == REG
1100 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1101 && CONSTANT_P (XEXP (x, 1)))
1102 {
1103 if (GET_MODE_SIZE (mode) > 4)
1104 {
1105 /* We will end up splitting this, and lo_sums are not
1106 offsettable for us. */
1107#if DEBUG_LEGIT
1108 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1109#endif
1110 return false;
1111 }
1112#if DEBUG_LEGIT
1113 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1114#endif
1115 return true;
1116 }
1117
1118 if (GET_CODE (x) == REG
1119 && GEN_REG (REGNO (x), strict))
1120 {
1121#if DEBUG_LEGIT
1122 fprintf (stderr, " - yup, [reg]\n");
1123#endif
1124 return true;
1125 }
1126
1127 if (GET_CODE (x) == PLUS
1128 && GET_CODE (XEXP (x, 0)) == REG
1129 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1130 && const_in_range (XEXP (x, 1), -32768, 32767))
1131 {
1132#if DEBUG_LEGIT
1133 fprintf (stderr, " - yup, [reg+const]\n");
1134#endif
1135 return true;
1136 }
1137
1138 if (GET_CODE (x) == PLUS
1139 && GET_CODE (XEXP (x, 0)) == REG
1140 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1141 && GET_CODE (XEXP (x, 1)) == CONST
1142 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1143 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1144 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1145 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1146 {
1147#if DEBUG_LEGIT
1148 fprintf (stderr, " - yup, [reg+unspec]\n");
1149#endif
1150 return true;
1151 }
1152
1153 the_tag = mep_section_tag (x);
1154
1155 if (the_tag == 'f')
1156 {
1157#if DEBUG_LEGIT
1158 fprintf (stderr, " - nope, [far]\n");
1159#endif
1160 return false;
1161 }
1162
1163 if (mode == VOIDmode
1164 && GET_CODE (x) == SYMBOL_REF)
1165 {
1166#if DEBUG_LEGIT
1167 fprintf (stderr, " - yup, call [symbol]\n");
1168#endif
1169 return true;
1170 }
1171
1172 if ((mode == SImode || mode == SFmode)
1173 && CONSTANT_P (x)
ca316360 1174 && mep_legitimate_constant_p (mode, x)
46222c18 1175 && the_tag != 't' && the_tag != 'b')
1176 {
1177 if (GET_CODE (x) != CONST_INT
1178 || (INTVAL (x) <= 0xfffff
1179 && INTVAL (x) >= 0
1180 && (INTVAL (x) % 4) == 0))
1181 {
1182#if DEBUG_LEGIT
1183 fprintf (stderr, " - yup, [const]\n");
1184#endif
1185 return true;
1186 }
1187 }
1188
1189#if DEBUG_LEGIT
1190 fprintf (stderr, " - nope.\n");
1191#endif
1192 return false;
1193}
1194
1195int
1196mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
7290d950 1197 int type_i,
46222c18 1198 int ind_levels ATTRIBUTE_UNUSED)
1199{
7290d950 1200 enum reload_type type = (enum reload_type) type_i;
1201
46222c18 1202 if (GET_CODE (*x) == PLUS
1203 && GET_CODE (XEXP (*x, 0)) == MEM
1204 && GET_CODE (XEXP (*x, 1)) == REG)
1205 {
1206 /* GCC will by default copy the MEM into a REG, which results in
1207 an invalid address. For us, the best thing to do is move the
1208 whole expression to a REG. */
1209 push_reload (*x, NULL_RTX, x, NULL,
1210 GENERAL_REGS, mode, VOIDmode,
1211 0, 0, opnum, type);
1212 return 1;
1213 }
1214
1215 if (GET_CODE (*x) == PLUS
1216 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1217 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1218 {
1219 char e = mep_section_tag (XEXP (*x, 0));
1220
1221 if (e != 't' && e != 'b')
1222 {
1223 /* GCC thinks that (sym+const) is a valid address. Well,
1224 sometimes it is, this time it isn't. The best thing to
1225 do is reload the symbol to a register, since reg+int
1226 tends to work, and we can't just add the symbol and
1227 constant anyway. */
1228 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1229 GENERAL_REGS, mode, VOIDmode,
1230 0, 0, opnum, type);
1231 return 1;
1232 }
1233 }
1234 return 0;
1235}
1236
1237int
1238mep_core_address_length (rtx insn, int opn)
1239{
1240 rtx set = single_set (insn);
1241 rtx mem = XEXP (set, opn);
1242 rtx other = XEXP (set, 1-opn);
1243 rtx addr = XEXP (mem, 0);
1244
1245 if (register_operand (addr, Pmode))
1246 return 2;
1247 if (GET_CODE (addr) == PLUS)
1248 {
1249 rtx addend = XEXP (addr, 1);
1250
1251 gcc_assert (REG_P (XEXP (addr, 0)));
1252
1253 switch (REGNO (XEXP (addr, 0)))
1254 {
1255 case STACK_POINTER_REGNUM:
1256 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1257 && mep_imm7a4_operand (addend, VOIDmode))
1258 return 2;
1259 break;
1260
1261 case 13: /* TP */
1262 gcc_assert (REG_P (other));
1263
1264 if (REGNO (other) >= 8)
1265 break;
1266
1267 if (GET_CODE (addend) == CONST
1268 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1269 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1270 return 2;
1271
1272 if (GET_CODE (addend) == CONST_INT
1273 && INTVAL (addend) >= 0
1274 && INTVAL (addend) <= 127
1275 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1276 return 2;
1277 break;
1278 }
1279 }
1280
1281 return 4;
1282}
1283
1284int
1285mep_cop_address_length (rtx insn, int opn)
1286{
1287 rtx set = single_set (insn);
1288 rtx mem = XEXP (set, opn);
1289 rtx addr = XEXP (mem, 0);
1290
1291 if (GET_CODE (mem) != MEM)
1292 return 2;
1293 if (register_operand (addr, Pmode))
1294 return 2;
1295 if (GET_CODE (addr) == POST_INC)
1296 return 2;
1297
1298 return 4;
1299}
1300
1301#define DEBUG_EXPAND_MOV 0
1302bool
1303mep_expand_mov (rtx *operands, enum machine_mode mode)
1304{
1305 int i, t;
1306 int tag[2];
1307 rtx tpsym, tpoffs;
1308 int post_reload = 0;
1309
1310 tag[0] = mep_section_tag (operands[0]);
1311 tag[1] = mep_section_tag (operands[1]);
1312
1313 if (!reload_in_progress
1314 && !reload_completed
1315 && GET_CODE (operands[0]) != REG
1316 && GET_CODE (operands[0]) != SUBREG
1317 && GET_CODE (operands[1]) != REG
1318 && GET_CODE (operands[1]) != SUBREG)
1319 operands[1] = copy_to_mode_reg (mode, operands[1]);
1320
1321#if DEBUG_EXPAND_MOV
1322 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1323 reload_in_progress || reload_completed);
1324 debug_rtx (operands[0]);
1325 debug_rtx (operands[1]);
1326#endif
1327
1328 if (mode == DImode || mode == DFmode)
1329 return false;
1330
1331 if (reload_in_progress || reload_completed)
1332 {
1333 rtx r;
1334
1335 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1336 cfun->machine->reload_changes_tp = true;
1337
1338 if (tag[0] == 't' || tag[1] == 't')
1339 {
1340 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1341 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1342 post_reload = 1;
1343 }
1344 if (tag[0] == 'b' || tag[1] == 'b')
1345 {
1346 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1347 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1348 post_reload = 1;
1349 }
1350 if (cfun->machine->reload_changes_tp == true)
1351 post_reload = 1;
1352 }
1353
1354 if (!post_reload)
1355 {
1356 rtx n;
1357 if (symbol_p (operands[1]))
1358 {
1359 t = mep_section_tag (operands[1]);
1360 if (t == 'b' || t == 't')
1361 {
1362
1363 if (GET_CODE (operands[1]) == SYMBOL_REF)
1364 {
1365 tpsym = operands[1];
1366 n = gen_rtx_UNSPEC (mode,
1367 gen_rtvec (1, operands[1]),
1368 t == 'b' ? UNS_TPREL : UNS_GPREL);
1369 n = gen_rtx_CONST (mode, n);
1370 }
1371 else if (GET_CODE (operands[1]) == CONST
1372 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1373 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1374 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1375 {
1376 tpsym = XEXP (XEXP (operands[1], 0), 0);
1377 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1378 n = gen_rtx_UNSPEC (mode,
1379 gen_rtvec (1, tpsym),
1380 t == 'b' ? UNS_TPREL : UNS_GPREL);
1381 n = gen_rtx_PLUS (mode, n, tpoffs);
1382 n = gen_rtx_CONST (mode, n);
1383 }
1384 else if (GET_CODE (operands[1]) == CONST
1385 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1386 return false;
1387 else
1388 {
1389 error ("unusual TP-relative address");
1390 return false;
1391 }
1392
1393 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1394 : mep_gp_rtx ()), n);
1395 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1396#if DEBUG_EXPAND_MOV
1397 fprintf(stderr, "mep_expand_mov emitting ");
1398 debug_rtx(n);
1399#endif
1400 return true;
1401 }
1402 }
1403
1404 for (i=0; i < 2; i++)
1405 {
1406 t = mep_section_tag (operands[i]);
1407 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1408 {
1409 rtx sym, n, r;
1410 int u;
1411
1412 sym = XEXP (operands[i], 0);
1413 if (GET_CODE (sym) == CONST
1414 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1415 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1416
1417 if (t == 'b')
1418 {
1419 r = mep_tp_rtx ();
1420 u = UNS_TPREL;
1421 }
1422 else
1423 {
1424 r = mep_gp_rtx ();
1425 u = UNS_GPREL;
1426 }
1427
1428 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1429 n = gen_rtx_CONST (Pmode, n);
1430 n = gen_rtx_PLUS (Pmode, r, n);
1431 operands[i] = replace_equiv_address (operands[i], n);
1432 }
1433 }
1434 }
1435
1436 if ((GET_CODE (operands[1]) != REG
1437 && MEP_CONTROL_REG (operands[0]))
1438 || (GET_CODE (operands[0]) != REG
1439 && MEP_CONTROL_REG (operands[1])))
1440 {
1441 rtx temp;
1442#if DEBUG_EXPAND_MOV
1443 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1444#endif
1445 temp = gen_reg_rtx (mode);
1446 emit_move_insn (temp, operands[1]);
1447 operands[1] = temp;
1448 }
1449
1450 if (symbolref_p (operands[0])
1451 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1452 || (GET_MODE_SIZE (mode) != 4)))
1453 {
1454 rtx temp;
1455
1456 gcc_assert (!reload_in_progress && !reload_completed);
1457
1458 temp = force_reg (Pmode, XEXP (operands[0], 0));
1459 operands[0] = replace_equiv_address (operands[0], temp);
1460 emit_move_insn (operands[0], operands[1]);
1461 return true;
1462 }
1463
1464 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1465 tag[1] = 0;
1466
1467 if (symbol_p (operands[1])
1468 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1469 {
1470 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1471 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1472 return true;
1473 }
1474
1475 if (symbolref_p (operands[1])
1476 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1477 {
1478 rtx temp;
1479
1480 if (reload_in_progress || reload_completed)
1481 temp = operands[0];
1482 else
1483 temp = gen_reg_rtx (Pmode);
1484
1485 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1486 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1487 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1488 return true;
1489 }
1490
1491 return false;
1492}
1493
1494/* Cases where the pattern can't be made to use at all. */
1495
1496bool
1497mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1498{
1499 int i;
1500
1501#define DEBUG_MOV_OK 0
1502#if DEBUG_MOV_OK
1503 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1504 mep_section_tag (operands[1]));
1505 debug_rtx (operands[0]);
1506 debug_rtx (operands[1]);
1507#endif
1508
1509 /* We want the movh patterns to get these. */
1510 if (GET_CODE (operands[1]) == HIGH)
1511 return false;
1512
1513 /* We can't store a register to a far variable without using a
1514 scratch register to hold the address. Using far variables should
1515 be split by mep_emit_mov anyway. */
1516 if (mep_section_tag (operands[0]) == 'f'
1517 || mep_section_tag (operands[1]) == 'f')
1518 {
1519#if DEBUG_MOV_OK
1520 fprintf (stderr, " - no, f\n");
1521#endif
1522 return false;
1523 }
1524 i = mep_section_tag (operands[1]);
1525 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1526 /* These are supposed to be generated with adds of the appropriate
1527 register. During and after reload, however, we allow them to
1528 be accessed as normal symbols because adding a dependency on
1529 the base register now might cause problems. */
1530 {
1531#if DEBUG_MOV_OK
1532 fprintf (stderr, " - no, bt\n");
1533#endif
1534 return false;
1535 }
1536
1537 /* The only moves we can allow involve at least one general
1538 register, so require it. */
1539 for (i = 0; i < 2; i ++)
1540 {
1541 /* Allow subregs too, before reload. */
1542 rtx x = operands[i];
1543
1544 if (GET_CODE (x) == SUBREG)
1545 x = XEXP (x, 0);
1546 if (GET_CODE (x) == REG
1547 && ! MEP_CONTROL_REG (x))
1548 {
1549#if DEBUG_MOV_OK
1550 fprintf (stderr, " - ok\n");
1551#endif
1552 return true;
1553 }
1554 }
1555#if DEBUG_MOV_OK
1556 fprintf (stderr, " - no, no gen reg\n");
1557#endif
1558 return false;
1559}
1560
1561#define DEBUG_SPLIT_WIDE_MOVE 0
1562void
1563mep_split_wide_move (rtx *operands, enum machine_mode mode)
1564{
1565 int i;
1566
1567#if DEBUG_SPLIT_WIDE_MOVE
1568 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1569 debug_rtx (operands[0]);
1570 debug_rtx (operands[1]);
1571#endif
1572
1573 for (i = 0; i <= 1; i++)
1574 {
1575 rtx op = operands[i], hi, lo;
1576
1577 switch (GET_CODE (op))
1578 {
1579 case REG:
1580 {
1581 unsigned int regno = REGNO (op);
1582
1583 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1584 {
1585 rtx i32;
1586
1587 lo = gen_rtx_REG (SImode, regno);
1588 i32 = GEN_INT (32);
1589 hi = gen_rtx_ZERO_EXTRACT (SImode,
1590 gen_rtx_REG (DImode, regno),
1591 i32, i32);
1592 }
1593 else
1594 {
1595 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1596 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1597 }
1598 }
1599 break;
1600
1601 case CONST_INT:
1602 case CONST_DOUBLE:
1603 case MEM:
1604 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1605 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1606 break;
1607
1608 default:
1609 gcc_unreachable ();
1610 }
1611
1612 /* The high part of CR <- GPR moves must be done after the low part. */
1613 operands [i + 4] = lo;
1614 operands [i + 2] = hi;
1615 }
1616
1617 if (reg_mentioned_p (operands[2], operands[5])
1618 || GET_CODE (operands[2]) == ZERO_EXTRACT
1619 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1620 {
1621 rtx tmp;
1622
1623 /* Overlapping register pairs -- make sure we don't
1624 early-clobber ourselves. */
1625 tmp = operands[2];
1626 operands[2] = operands[4];
1627 operands[4] = tmp;
1628 tmp = operands[3];
1629 operands[3] = operands[5];
1630 operands[5] = tmp;
1631 }
1632
1633#if DEBUG_SPLIT_WIDE_MOVE
1634 fprintf(stderr, "\033[34m");
1635 debug_rtx (operands[2]);
1636 debug_rtx (operands[3]);
1637 debug_rtx (operands[4]);
1638 debug_rtx (operands[5]);
1639 fprintf(stderr, "\033[0m");
1640#endif
1641}
1642
1643/* Emit a setcc instruction in its entirity. */
1644
1645static bool
1646mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1647{
1648 rtx tmp;
1649
1650 switch (code)
1651 {
1652 case GT:
1653 case GTU:
1654 tmp = op1, op1 = op2, op2 = tmp;
1655 code = swap_condition (code);
1656 /* FALLTHRU */
1657
1658 case LT:
1659 case LTU:
1660 op1 = force_reg (SImode, op1);
1661 emit_insn (gen_rtx_SET (VOIDmode, dest,
1662 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1663 return true;
1664
1665 case EQ:
1666 if (op2 != const0_rtx)
1667 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1668 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1669 return true;
1670
1671 case NE:
1672 /* Branchful sequence:
1673 mov dest, 0 16-bit
1674 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1675 mov dest, 1 16-bit
1676
1677 Branchless sequence:
1678 add3 tmp, op1, -op2 32-bit (or mov + sub)
1679 sltu3 tmp, tmp, 1 16-bit
1680 xor3 dest, tmp, 1 32-bit
1681 */
1682 if (optimize_size && op2 != const0_rtx)
1683 return false;
1684
1685 if (op2 != const0_rtx)
1686 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1687
1688 op2 = gen_reg_rtx (SImode);
1689 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1690
1691 emit_insn (gen_rtx_SET (VOIDmode, dest,
1692 gen_rtx_XOR (SImode, op2, const1_rtx)));
1693 return true;
1694
1695 case LE:
1696 if (GET_CODE (op2) != CONST_INT
1697 || INTVAL (op2) == 0x7ffffff)
1698 return false;
1699 op2 = GEN_INT (INTVAL (op2) + 1);
1700 return mep_expand_setcc_1 (LT, dest, op1, op2);
1701
1702 case LEU:
1703 if (GET_CODE (op2) != CONST_INT
1704 || INTVAL (op2) == -1)
1705 return false;
1706 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1707 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1708
1709 case GE:
1710 if (GET_CODE (op2) != CONST_INT
1711 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1712 return false;
1713 op2 = GEN_INT (INTVAL (op2) - 1);
1714 return mep_expand_setcc_1 (GT, dest, op1, op2);
1715
1716 case GEU:
1717 if (GET_CODE (op2) != CONST_INT
1718 || op2 == const0_rtx)
1719 return false;
1720 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1721 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1722
1723 default:
1724 gcc_unreachable ();
1725 }
1726}
1727
1728bool
1729mep_expand_setcc (rtx *operands)
1730{
1731 rtx dest = operands[0];
1732 enum rtx_code code = GET_CODE (operands[1]);
1733 rtx op0 = operands[2];
1734 rtx op1 = operands[3];
1735
1736 return mep_expand_setcc_1 (code, dest, op0, op1);
1737}
1738
1739rtx
1740mep_expand_cbranch (rtx *operands)
1741{
1742 enum rtx_code code = GET_CODE (operands[0]);
1743 rtx op0 = operands[1];
1744 rtx op1 = operands[2];
1745 rtx tmp;
1746
1747 restart:
1748 switch (code)
1749 {
1750 case LT:
1751 if (mep_imm4_operand (op1, SImode))
1752 break;
1753
1754 tmp = gen_reg_rtx (SImode);
1755 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1756 code = NE;
1757 op0 = tmp;
1758 op1 = const0_rtx;
1759 break;
1760
1761 case GE:
1762 if (mep_imm4_operand (op1, SImode))
1763 break;
1764
1765 tmp = gen_reg_rtx (SImode);
1766 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1767
1768 code = EQ;
1769 op0 = tmp;
1770 op1 = const0_rtx;
1771 break;
1772
1773 case EQ:
1774 case NE:
1775 if (! mep_reg_or_imm4_operand (op1, SImode))
1776 op1 = force_reg (SImode, op1);
1777 break;
1778
1779 case LE:
1780 case GT:
1781 if (GET_CODE (op1) == CONST_INT
1782 && INTVAL (op1) != 0x7fffffff)
1783 {
1784 op1 = GEN_INT (INTVAL (op1) + 1);
1785 code = (code == LE ? LT : GE);
1786 goto restart;
1787 }
1788
1789 tmp = gen_reg_rtx (SImode);
1790 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1791
1792 code = (code == LE ? EQ : NE);
1793 op0 = tmp;
1794 op1 = const0_rtx;
1795 break;
1796
1797 case LTU:
1798 if (op1 == const1_rtx)
1799 {
1800 code = EQ;
1801 op1 = const0_rtx;
1802 break;
1803 }
1804
1805 tmp = gen_reg_rtx (SImode);
1806 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1807 code = NE;
1808 op0 = tmp;
1809 op1 = const0_rtx;
1810 break;
1811
1812 case LEU:
1813 tmp = gen_reg_rtx (SImode);
1814 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1815 code = NE;
1816 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1817 code = EQ;
1818 else
1819 gcc_unreachable ();
1820 op0 = tmp;
1821 op1 = const0_rtx;
1822 break;
1823
1824 case GTU:
1825 tmp = gen_reg_rtx (SImode);
1826 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1827 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1828 code = NE;
1829 op0 = tmp;
1830 op1 = const0_rtx;
1831 break;
1832
1833 case GEU:
1834 tmp = gen_reg_rtx (SImode);
1835 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1836 code = NE;
1837 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1838 code = EQ;
1839 else
1840 gcc_unreachable ();
1841 op0 = tmp;
1842 op1 = const0_rtx;
1843 break;
1844
1845 default:
1846 gcc_unreachable ();
1847 }
1848
1849 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1850}
1851
1852const char *
1853mep_emit_cbranch (rtx *operands, int ne)
1854{
1855 if (GET_CODE (operands[1]) == REG)
1856 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
d22d3228 1857 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
46222c18 1858 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1859 else
1860 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1861}
1862
1863void
1864mep_expand_call (rtx *operands, int returns_value)
1865{
1866 rtx addr = operands[returns_value];
1867 rtx tp = mep_tp_rtx ();
1868 rtx gp = mep_gp_rtx ();
1869
1870 gcc_assert (GET_CODE (addr) == MEM);
1871
1872 addr = XEXP (addr, 0);
1873
1874 if (! mep_call_address_operand (addr, VOIDmode))
1875 addr = force_reg (SImode, addr);
1876
1877 if (! operands[returns_value+2])
1878 operands[returns_value+2] = const0_rtx;
1879
1880 if (returns_value)
1881 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1882 operands[3], tp, gp));
1883 else
1884 emit_call_insn (gen_call_internal (addr, operands[1],
1885 operands[2], tp, gp));
1886}
1887\f
1888/* Aliasing Support. */
1889
1890/* If X is a machine specific address (i.e. a symbol or label being
1891 referenced as a displacement from the GOT implemented using an
1892 UNSPEC), then return the base term. Otherwise return X. */
1893
1894rtx
1895mep_find_base_term (rtx x)
1896{
1897 rtx base, term;
1898 int unspec;
1899
1900 if (GET_CODE (x) != PLUS)
1901 return x;
1902 base = XEXP (x, 0);
1903 term = XEXP (x, 1);
1904
1905 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1906 && base == mep_tp_rtx ())
1907 unspec = UNS_TPREL;
1908 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1909 && base == mep_gp_rtx ())
1910 unspec = UNS_GPREL;
1911 else
1912 return x;
1913
1914 if (GET_CODE (term) != CONST)
1915 return x;
1916 term = XEXP (term, 0);
1917
1918 if (GET_CODE (term) != UNSPEC
1919 || XINT (term, 1) != unspec)
1920 return x;
1921
1922 return XVECEXP (term, 0, 0);
1923}
1924\f
1925/* Reload Support. */
1926
1927/* Return true if the registers in CLASS cannot represent the change from
1928 modes FROM to TO. */
1929
1930bool
1931mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
1932 enum reg_class regclass)
1933{
1934 if (from == to)
1935 return false;
1936
1937 /* 64-bit COP regs must remain 64-bit COP regs. */
1938 if (TARGET_64BIT_CR_REGS
1939 && (regclass == CR_REGS
1940 || regclass == LOADABLE_CR_REGS)
1941 && (GET_MODE_SIZE (to) < 8
1942 || GET_MODE_SIZE (from) < 8))
1943 return true;
1944
1945 return false;
1946}
1947
1948#define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1949
1950static bool
1951mep_general_reg (rtx x)
1952{
1953 while (GET_CODE (x) == SUBREG)
1954 x = XEXP (x, 0);
1955 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1956}
1957
1958static bool
1959mep_nongeneral_reg (rtx x)
1960{
1961 while (GET_CODE (x) == SUBREG)
1962 x = XEXP (x, 0);
1963 return (GET_CODE (x) == REG
1964 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1965}
1966
1967static bool
1968mep_general_copro_reg (rtx x)
1969{
1970 while (GET_CODE (x) == SUBREG)
1971 x = XEXP (x, 0);
1972 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1973}
1974
1975static bool
1976mep_nonregister (rtx x)
1977{
1978 while (GET_CODE (x) == SUBREG)
1979 x = XEXP (x, 0);
1980 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1981}
1982
1983#define DEBUG_RELOAD 0
1984
1985/* Return the secondary reload class needed for moving value X to or
1986 from a register in coprocessor register class CLASS. */
1987
1988static enum reg_class
1989mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
1990{
1991 if (mep_general_reg (x))
1992 /* We can do the move directly if mep_have_core_copro_moves_p,
1993 otherwise we need to go through memory. Either way, no secondary
1994 register is needed. */
1995 return NO_REGS;
1996
1997 if (mep_general_copro_reg (x))
1998 {
1999 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2000 if (mep_have_copro_copro_moves_p)
2001 return NO_REGS;
2002
2003 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2004 if (mep_have_core_copro_moves_p)
2005 return GENERAL_REGS;
2006
2007 /* Otherwise we need to do it through memory. No secondary
2008 register is needed. */
2009 return NO_REGS;
2010 }
2011
2012 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2013 && constraint_satisfied_p (x, CONSTRAINT_U))
2014 /* X is a memory value that we can access directly. */
2015 return NO_REGS;
2016
2017 /* We have to move X into a GPR first and then copy it to
2018 the coprocessor register. The move from the GPR to the
2019 coprocessor might be done directly or through memory,
2020 depending on mep_have_core_copro_moves_p. */
2021 return GENERAL_REGS;
2022}
2023
2024/* Copying X to register in RCLASS. */
2025
7290d950 2026enum reg_class
46222c18 2027mep_secondary_input_reload_class (enum reg_class rclass,
2028 enum machine_mode mode ATTRIBUTE_UNUSED,
2029 rtx x)
2030{
2031 int rv = NO_REGS;
2032
2033#if DEBUG_RELOAD
2034 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2035 debug_rtx (x);
2036#endif
2037
2038 if (reg_class_subset_p (rclass, CR_REGS))
2039 rv = mep_secondary_copro_reload_class (rclass, x);
2040 else if (MEP_NONGENERAL_CLASS (rclass)
2041 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2042 rv = GENERAL_REGS;
2043
2044#if DEBUG_RELOAD
2045 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2046#endif
7290d950 2047 return (enum reg_class) rv;
46222c18 2048}
2049
2050/* Copying register in RCLASS to X. */
2051
7290d950 2052enum reg_class
46222c18 2053mep_secondary_output_reload_class (enum reg_class rclass,
2054 enum machine_mode mode ATTRIBUTE_UNUSED,
2055 rtx x)
2056{
2057 int rv = NO_REGS;
2058
2059#if DEBUG_RELOAD
2060 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2061 debug_rtx (x);
2062#endif
2063
2064 if (reg_class_subset_p (rclass, CR_REGS))
2065 rv = mep_secondary_copro_reload_class (rclass, x);
2066 else if (MEP_NONGENERAL_CLASS (rclass)
2067 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2068 rv = GENERAL_REGS;
2069
2070#if DEBUG_RELOAD
2071 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2072#endif
2073
7290d950 2074 return (enum reg_class) rv;
46222c18 2075}
2076
2077/* Implement SECONDARY_MEMORY_NEEDED. */
2078
2079bool
2080mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2081 enum machine_mode mode ATTRIBUTE_UNUSED)
2082{
2083 if (!mep_have_core_copro_moves_p)
2084 {
2085 if (reg_classes_intersect_p (rclass1, CR_REGS)
2086 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2087 return true;
2088 if (reg_classes_intersect_p (rclass2, CR_REGS)
2089 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2090 return true;
2091 if (!mep_have_copro_copro_moves_p
2092 && reg_classes_intersect_p (rclass1, CR_REGS)
2093 && reg_classes_intersect_p (rclass2, CR_REGS))
2094 return true;
2095 }
2096 return false;
2097}
2098
2099void
2100mep_expand_reload (rtx *operands, enum machine_mode mode)
2101{
2102 /* There are three cases for each direction:
2103 register, farsym
2104 control, farsym
2105 control, nearsym */
2106
2107 int s0 = mep_section_tag (operands[0]) == 'f';
2108 int s1 = mep_section_tag (operands[1]) == 'f';
2109 int c0 = mep_nongeneral_reg (operands[0]);
2110 int c1 = mep_nongeneral_reg (operands[1]);
2111 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2112
2113#if DEBUG_RELOAD
2114 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2115 debug_rtx (operands[0]);
2116 debug_rtx (operands[1]);
2117#endif
2118
2119 switch (which)
2120 {
2121 case 00: /* Don't know why this gets here. */
2122 case 02: /* general = far */
2123 emit_move_insn (operands[0], operands[1]);
2124 return;
2125
2126 case 10: /* cr = mem */
2127 case 11: /* cr = cr */
2128 case 01: /* mem = cr */
2129 case 12: /* cr = far */
2130 emit_move_insn (operands[2], operands[1]);
2131 emit_move_insn (operands[0], operands[2]);
2132 return;
2133
2134 case 20: /* far = general */
2135 emit_move_insn (operands[2], XEXP (operands[1], 0));
2136 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2137 return;
2138
2139 case 21: /* far = cr */
2140 case 22: /* far = far */
2141 default:
2142 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2143 which, mode_name[mode]);
2144 debug_rtx (operands[0]);
2145 debug_rtx (operands[1]);
2146 gcc_unreachable ();
2147 }
2148}
2149
2150/* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2151 can be moved directly into registers 0 to 7, but not into the rest.
2152 If so, and if the required class includes registers 0 to 7, restrict
2153 it to those registers. */
2154
2155enum reg_class
2156mep_preferred_reload_class (rtx x, enum reg_class rclass)
2157{
2158 switch (GET_CODE (x))
2159 {
2160 case CONST_INT:
2161 if (INTVAL (x) >= 0x10000
2162 && INTVAL (x) < 0x01000000
2163 && (INTVAL (x) & 0xffff) != 0
2164 && reg_class_subset_p (TPREL_REGS, rclass))
2165 rclass = TPREL_REGS;
2166 break;
2167
2168 case CONST:
2169 case SYMBOL_REF:
2170 case LABEL_REF:
2171 if (mep_section_tag (x) != 'f'
2172 && reg_class_subset_p (TPREL_REGS, rclass))
2173 rclass = TPREL_REGS;
2174 break;
2175
2176 default:
2177 break;
2178 }
2179 return rclass;
2180}
2181\f
2182/* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2183 moves, 4 for direct double-register moves, and 1000 for anything
2184 that requires a temporary register or temporary stack slot. */
2185
2186int
2187mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2188{
2189 if (mep_have_copro_copro_moves_p
2190 && reg_class_subset_p (from, CR_REGS)
2191 && reg_class_subset_p (to, CR_REGS))
2192 {
2193 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2194 return 4;
2195 return 2;
2196 }
2197 if (reg_class_subset_p (from, CR_REGS)
2198 && reg_class_subset_p (to, CR_REGS))
2199 {
2200 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2201 return 8;
2202 return 4;
2203 }
2204 if (reg_class_subset_p (from, CR_REGS)
2205 || reg_class_subset_p (to, CR_REGS))
2206 {
2207 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2208 return 4;
2209 return 2;
2210 }
2211 if (mep_secondary_memory_needed (from, to, mode))
2212 return 1000;
2213 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2214 return 1000;
2215
2216 if (GET_MODE_SIZE (mode) > 4)
2217 return 4;
2218
2219 return 2;
2220}
2221
2222\f
2223/* Functions to save and restore machine-specific function data. */
2224
2225static struct machine_function *
2226mep_init_machine_status (void)
2227{
ba72912a 2228 return ggc_alloc_cleared_machine_function ();
46222c18 2229}
2230
2231static rtx
2232mep_allocate_initial_value (rtx reg)
2233{
2234 int rss;
2235
2236 if (GET_CODE (reg) != REG)
2237 return NULL_RTX;
2238
2239 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2240 return NULL_RTX;
2241
2242 /* In interrupt functions, the "initial" values of $gp and $tp are
2243 provided by the prologue. They are not necessarily the same as
2244 the values that the caller was using. */
2245 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2246 if (mep_interrupt_p ())
2247 return NULL_RTX;
2248
2249 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2250 {
2251 cfun->machine->reg_save_size += 4;
2252 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2253 }
2254
2255 rss = cfun->machine->reg_save_slot[REGNO(reg)];
29c05e22 2256 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
46222c18 2257}
2258
2259rtx
2260mep_return_addr_rtx (int count)
2261{
2262 if (count != 0)
2263 return const0_rtx;
2264
2265 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2266}
2267
2268static rtx
2269mep_tp_rtx (void)
2270{
2271 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2272}
2273
2274static rtx
2275mep_gp_rtx (void)
2276{
2277 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2278}
2279
2280static bool
2281mep_interrupt_p (void)
2282{
2283 if (cfun->machine->interrupt_handler == 0)
2284 {
2285 int interrupt_handler
2286 = (lookup_attribute ("interrupt",
2287 DECL_ATTRIBUTES (current_function_decl))
2288 != NULL_TREE);
2289 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2290 }
2291 return cfun->machine->interrupt_handler == 2;
2292}
2293
2294static bool
2295mep_disinterrupt_p (void)
2296{
2297 if (cfun->machine->disable_interrupts == 0)
2298 {
2299 int disable_interrupts
2300 = (lookup_attribute ("disinterrupt",
2301 DECL_ATTRIBUTES (current_function_decl))
2302 != NULL_TREE);
2303 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2304 }
2305 return cfun->machine->disable_interrupts == 2;
2306}
2307
2308\f
2309/* Frame/Epilog/Prolog Related. */
2310
2311static bool
2312mep_reg_set_p (rtx reg, rtx insn)
2313{
2314 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2315 if (INSN_P (insn))
2316 {
2317 if (FIND_REG_INC_NOTE (insn, reg))
2318 return true;
2319 insn = PATTERN (insn);
2320 }
2321
2322 if (GET_CODE (insn) == SET
2323 && GET_CODE (XEXP (insn, 0)) == REG
2324 && GET_CODE (XEXP (insn, 1)) == REG
2325 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2326 return false;
2327
2328 return set_of (reg, insn) != NULL_RTX;
2329}
2330
2331
2332#define MEP_SAVES_UNKNOWN 0
2333#define MEP_SAVES_YES 1
2334#define MEP_SAVES_MAYBE 2
2335#define MEP_SAVES_NO 3
2336
2337static bool
2338mep_reg_set_in_function (int regno)
2339{
2340 rtx reg, insn;
2341
2342 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2343 return true;
2344
2345 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2346 return true;
2347
2348 push_topmost_sequence ();
2349 insn = get_insns ();
2350 pop_topmost_sequence ();
2351
2352 if (!insn)
2353 return false;
2354
2355 reg = gen_rtx_REG (SImode, regno);
2356
2357 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2358 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2359 return true;
2360 return false;
2361}
2362
2363static bool
2364mep_asm_without_operands_p (void)
2365{
2366 if (cfun->machine->asms_without_operands == 0)
2367 {
2368 rtx insn;
2369
2370 push_topmost_sequence ();
2371 insn = get_insns ();
2372 pop_topmost_sequence ();
2373
2374 cfun->machine->asms_without_operands = 1;
2375 while (insn)
2376 {
2377 if (INSN_P (insn)
2378 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2379 {
2380 cfun->machine->asms_without_operands = 2;
2381 break;
2382 }
2383 insn = NEXT_INSN (insn);
2384 }
2385
2386 }
2387 return cfun->machine->asms_without_operands == 2;
2388}
2389
2390/* Interrupt functions save/restore every call-preserved register, and
2391 any call-used register it uses (or all if it calls any function,
2392 since they may get clobbered there too). Here we check to see
2393 which call-used registers need saving. */
2394
aecef6a2 2395#define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2396 && (r == FIRST_CCR_REGNO + 1 \
2397 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2398 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2399
46222c18 2400static bool
2401mep_interrupt_saved_reg (int r)
2402{
2403 if (!mep_interrupt_p ())
2404 return false;
2405 if (r == REGSAVE_CONTROL_TEMP
2406 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2407 return true;
2408 if (mep_asm_without_operands_p ()
2409 && (!fixed_regs[r]
aecef6a2 2410 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2411 || IVC2_ISAVED_REG (r)))
46222c18 2412 return true;
d5bf7b64 2413 if (!crtl->is_leaf)
46222c18 2414 /* Function calls mean we need to save $lp. */
aecef6a2 2415 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
46222c18 2416 return true;
d5bf7b64 2417 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
46222c18 2418 /* The interrupt handler might use these registers for repeat blocks,
2419 or it might call a function that does so. */
2420 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2421 return true;
d5bf7b64 2422 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
46222c18 2423 return false;
2424 /* Functions we call might clobber these. */
2425 if (call_used_regs[r] && !fixed_regs[r])
2426 return true;
32a5f905 2427 /* Additional registers that need to be saved for IVC2. */
aecef6a2 2428 if (IVC2_ISAVED_REG (r))
32a5f905 2429 return true;
2430
46222c18 2431 return false;
2432}
2433
2434static bool
2435mep_call_saves_register (int r)
2436{
142c2869 2437 if (! cfun->machine->frame_locked)
46222c18 2438 {
2439 int rv = MEP_SAVES_NO;
2440
2441 if (cfun->machine->reg_save_slot[r])
2442 rv = MEP_SAVES_YES;
2443 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2444 rv = MEP_SAVES_YES;
2445 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2446 rv = MEP_SAVES_YES;
2447 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2448 rv = MEP_SAVES_YES;
2449 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2450 /* We need these to have stack slots so that they can be set during
2451 unwinding. */
2452 rv = MEP_SAVES_YES;
2453 else if (mep_interrupt_saved_reg (r))
2454 rv = MEP_SAVES_YES;
2455 cfun->machine->reg_saved[r] = rv;
2456 }
2457 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2458}
2459
2460/* Return true if epilogue uses register REGNO. */
2461
2462bool
2463mep_epilogue_uses (int regno)
2464{
2465 /* Since $lp is a call-saved register, the generic code will normally
2466 mark it used in the epilogue if it needs to be saved and restored.
2467 However, when profiling is enabled, the profiling code will implicitly
2468 clobber $11. This case has to be handled specially both here and in
2469 mep_call_saves_register. */
2470 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2471 return true;
2472 /* Interrupt functions save/restore pretty much everything. */
2473 return (reload_completed && mep_interrupt_saved_reg (regno));
2474}
2475
2476static int
2477mep_reg_size (int regno)
2478{
2479 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2480 return 8;
2481 return 4;
2482}
2483
cd90919d 2484/* Worker function for TARGET_CAN_ELIMINATE. */
2485
2486bool
2487mep_can_eliminate (const int from, const int to)
2488{
2489 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2490 ? ! frame_pointer_needed
2491 : true);
2492}
2493
46222c18 2494int
2495mep_elimination_offset (int from, int to)
2496{
2497 int reg_save_size;
2498 int i;
2499 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2500 int total_size;
2501
142c2869 2502 if (!cfun->machine->frame_locked)
2503 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
46222c18 2504
2505 /* We don't count arg_regs_to_save in the arg pointer offset, because
2506 gcc thinks the arg pointer has moved along with the saved regs.
2507 However, we do count it when we adjust $sp in the prologue. */
2508 reg_save_size = 0;
2509 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2510 if (mep_call_saves_register (i))
2511 reg_save_size += mep_reg_size (i);
2512
2513 if (reg_save_size % 8)
2514 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2515 else
2516 cfun->machine->regsave_filler = 0;
2517
2518 /* This is what our total stack adjustment looks like. */
2519 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2520
2521 if (total_size % 8)
2522 cfun->machine->frame_filler = 8 - (total_size % 8);
2523 else
2524 cfun->machine->frame_filler = 0;
2525
2526
2527 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2528 return reg_save_size + cfun->machine->regsave_filler;
2529
2530 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2531 return cfun->machine->frame_filler + frame_size;
2532
2533 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2534 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2535
2536 gcc_unreachable ();
2537}
2538
2539static rtx
2540F (rtx x)
2541{
2542 RTX_FRAME_RELATED_P (x) = 1;
2543 return x;
2544}
2545
2546/* Since the prologue/epilogue code is generated after optimization,
2547 we can't rely on gcc to split constants for us. So, this code
2548 captures all the ways to add a constant to a register in one logic
2549 chunk, including optimizing away insns we just don't need. This
2550 makes the prolog/epilog code easier to follow. */
2551static void
2552add_constant (int dest, int src, int value, int mark_frame)
2553{
2554 rtx insn;
2555 int hi, lo;
2556
2557 if (src == dest && value == 0)
2558 return;
2559
2560 if (value == 0)
2561 {
2562 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2563 gen_rtx_REG (SImode, src));
2564 if (mark_frame)
2565 RTX_FRAME_RELATED_P(insn) = 1;
2566 return;
2567 }
2568
2569 if (value >= -32768 && value <= 32767)
2570 {
2571 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2572 gen_rtx_REG (SImode, src),
2573 GEN_INT (value)));
2574 if (mark_frame)
2575 RTX_FRAME_RELATED_P(insn) = 1;
2576 return;
2577 }
2578
2579 /* Big constant, need to use a temp register. We use
2580 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2581 area is always small enough to directly add to). */
2582
2583 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2584 lo = value & 0xffff;
2585
2586 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2587 GEN_INT (hi));
2588
2589 if (lo)
2590 {
2591 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2592 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2593 GEN_INT (lo)));
2594 }
2595
2596 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2597 gen_rtx_REG (SImode, src),
2598 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2599 if (mark_frame)
2600 {
2601 RTX_FRAME_RELATED_P(insn) = 1;
2602 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2603 gen_rtx_SET (SImode,
2604 gen_rtx_REG (SImode, dest),
2605 gen_rtx_PLUS (SImode,
2606 gen_rtx_REG (SImode, dest),
2607 GEN_INT (value))));
2608 }
2609}
2610
46222c18 2611/* Move SRC to DEST. Mark the move as being potentially dead if
2612 MAYBE_DEAD_P. */
2613
2614static rtx
2615maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2616{
2617 rtx insn = emit_move_insn (dest, src);
2618#if 0
2619 if (maybe_dead_p)
2620 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2621#endif
2622 return insn;
2623}
2624
2625/* Used for interrupt functions, which can't assume that $tp and $gp
2626 contain the correct pointers. */
2627
2628static void
2629mep_reload_pointer (int regno, const char *symbol)
2630{
2631 rtx reg, sym;
2632
d5bf7b64 2633 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
46222c18 2634 return;
2635
2636 reg = gen_rtx_REG (SImode, regno);
2637 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2638 emit_insn (gen_movsi_topsym_s (reg, sym));
2639 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2640}
2641
142c2869 2642/* Assign save slots for any register not already saved. DImode
2643 registers go at the end of the reg save area; the rest go at the
2644 beginning. This is for alignment purposes. Returns true if a frame
2645 is really needed. */
2646static bool
2647mep_assign_save_slots (int reg_save_size)
46222c18 2648{
142c2869 2649 bool really_need_stack_frame = false;
46222c18 2650 int di_ofs = 0;
142c2869 2651 int i;
46222c18 2652
46222c18 2653 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2654 if (mep_call_saves_register(i))
2655 {
2656 int regsize = mep_reg_size (i);
2657
2658 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2659 || mep_reg_set_in_function (i))
142c2869 2660 really_need_stack_frame = true;
46222c18 2661
2662 if (cfun->machine->reg_save_slot[i])
2663 continue;
2664
2665 if (regsize < 8)
2666 {
2667 cfun->machine->reg_save_size += regsize;
2668 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2669 }
2670 else
2671 {
2672 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2673 di_ofs += 8;
2674 }
2675 }
142c2869 2676 cfun->machine->frame_locked = 1;
2677 return really_need_stack_frame;
2678}
2679
2680void
2681mep_expand_prologue (void)
2682{
2683 int i, rss, sp_offset = 0;
2684 int reg_save_size;
2685 int frame_size;
1f4a1e65 2686 int really_need_stack_frame;
142c2869 2687
2688 /* We must not allow register renaming in interrupt functions,
2689 because that invalidates the correctness of the set of call-used
2690 registers we're going to save/restore. */
2691 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2692
2693 if (mep_disinterrupt_p ())
2694 emit_insn (gen_mep_disable_int ());
2695
2696 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2697
2698 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2699 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
1f4a1e65 2700 really_need_stack_frame = frame_size;
142c2869 2701
2702 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
46222c18 2703
2704 sp_offset = reg_save_size;
2705 if (sp_offset + frame_size < 128)
2706 sp_offset += frame_size ;
2707
2708 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2709
2710 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2711 if (mep_call_saves_register(i))
2712 {
2713 rtx mem;
2714 bool maybe_dead_p;
2715 enum machine_mode rmode;
2716
2717 rss = cfun->machine->reg_save_slot[i];
2718
2719 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2720 && (!mep_reg_set_in_function (i)
2721 && !mep_interrupt_p ()))
2722 continue;
2723
2724 if (mep_reg_size (i) == 8)
2725 rmode = DImode;
2726 else
2727 rmode = SImode;
2728
2729 /* If there is a pseudo associated with this register's initial value,
2730 reload might have already spilt it to the stack slot suggested by
2731 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2732 deleted as dead. */
2733 mem = gen_rtx_MEM (rmode,
29c05e22 2734 plus_constant (Pmode, stack_pointer_rtx,
2735 sp_offset - rss));
46222c18 2736 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2737
2738 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2739 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2740 else if (rmode == DImode)
2741 {
2742 rtx insn;
2743 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2744
2745 mem = gen_rtx_MEM (SImode,
29c05e22 2746 plus_constant (Pmode, stack_pointer_rtx,
2747 sp_offset - rss + be));
46222c18 2748
2749 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2750 gen_rtx_REG (SImode, i),
2751 maybe_dead_p);
2752 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2753 gen_rtx_ZERO_EXTRACT (SImode,
2754 gen_rtx_REG (DImode, i),
2755 GEN_INT (32),
2756 GEN_INT (32)),
2757 maybe_dead_p);
2758 insn = maybe_dead_move (mem,
2759 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2760 maybe_dead_p);
2761 RTX_FRAME_RELATED_P (insn) = 1;
2762
2763 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2764 gen_rtx_SET (VOIDmode,
2765 copy_rtx (mem),
2766 gen_rtx_REG (rmode, i)));
2767 mem = gen_rtx_MEM (SImode,
29c05e22 2768 plus_constant (Pmode, stack_pointer_rtx,
2769 sp_offset - rss + (4-be)));
46222c18 2770 insn = maybe_dead_move (mem,
2771 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2772 maybe_dead_p);
2773 }
2774 else
2775 {
2776 rtx insn;
2777 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2778 gen_rtx_REG (rmode, i),
2779 maybe_dead_p);
2780 insn = maybe_dead_move (mem,
2781 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2782 maybe_dead_p);
2783 RTX_FRAME_RELATED_P (insn) = 1;
2784
2785 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2786 gen_rtx_SET (VOIDmode,
2787 copy_rtx (mem),
2788 gen_rtx_REG (rmode, i)));
2789 }
2790 }
2791
2792 if (frame_pointer_needed)
28767f83 2793 {
2794 /* We've already adjusted down by sp_offset. Total $sp change
2795 is reg_save_size + frame_size. We want a net change here of
2796 just reg_save_size. */
2797 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2798 }
46222c18 2799
2800 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2801
2802 if (mep_interrupt_p ())
2803 {
2804 mep_reload_pointer(GP_REGNO, "__sdabase");
2805 mep_reload_pointer(TP_REGNO, "__tpbase");
2806 }
2807}
2808
2809static void
2810mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2811{
2812 int local = hwi_local;
2813 int frame_size = local + crtl->outgoing_args_size;
2814 int reg_save_size;
2815 int ffill;
2816 int i, sp, skip;
2817 int sp_offset;
2818 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2819
2820 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2821 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2822 sp_offset = reg_save_size + frame_size;
2823
2824 ffill = cfun->machine->frame_filler;
2825
2826 if (cfun->machine->mep_frame_pointer_needed)
2827 reg_names[FP_REGNO] = "$fp";
2828 else
2829 reg_names[FP_REGNO] = "$8";
2830
2831 if (sp_offset == 0)
2832 return;
2833
2834 if (debug_info_level == DINFO_LEVEL_NONE)
2835 {
2836 fprintf (file, "\t# frame: %d", sp_offset);
2837 if (reg_save_size)
2838 fprintf (file, " %d regs", reg_save_size);
2839 if (local)
2840 fprintf (file, " %d locals", local);
2841 if (crtl->outgoing_args_size)
2842 fprintf (file, " %d args", crtl->outgoing_args_size);
2843 fprintf (file, "\n");
2844 return;
2845 }
2846
2847 fprintf (file, "\t#\n");
2848 fprintf (file, "\t# Initial Frame Information:\n");
2849 if (sp_offset || !frame_pointer_needed)
2850 fprintf (file, "\t# Entry ---------- 0\n");
2851
2852 /* Sort registers by save slots, so they're printed in the order
2853 they appear in memory, not the order they're saved in. */
2854 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2855 slot_map[si] = si;
2856 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2857 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2858 if (cfun->machine->reg_save_slot[slot_map[si]]
2859 > cfun->machine->reg_save_slot[slot_map[sj]])
2860 {
2861 int t = slot_map[si];
2862 slot_map[si] = slot_map[sj];
2863 slot_map[sj] = t;
2864 }
2865
2866 sp = 0;
2867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2868 {
2869 int rsize;
2870 int r = slot_map[i];
2871 int rss = cfun->machine->reg_save_slot[r];
2872
142c2869 2873 if (!mep_call_saves_register (r))
2874 continue;
2875
2876 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2877 && (!mep_reg_set_in_function (r)
2878 && !mep_interrupt_p ()))
46222c18 2879 continue;
2880
2881 rsize = mep_reg_size(r);
2882 skip = rss - (sp+rsize);
2883 if (skip)
2884 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2885 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2886 rsize, reg_names[r], sp_offset - rss);
2887 sp = rss;
2888 }
2889
2890 skip = reg_save_size - sp;
2891 if (skip)
2892 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2893
2894 if (frame_pointer_needed)
2895 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2896 if (local)
2897 fprintf (file, "\t# %3d bytes for local vars\n", local);
2898 if (ffill)
2899 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2900 if (crtl->outgoing_args_size)
2901 fprintf (file, "\t# %3d bytes for outgoing args\n",
2902 crtl->outgoing_args_size);
2903 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2904 fprintf (file, "\t#\n");
2905}
2906
2907
2908static int mep_prevent_lp_restore = 0;
2909static int mep_sibcall_epilogue = 0;
2910
2911void
2912mep_expand_epilogue (void)
2913{
2914 int i, sp_offset = 0;
2915 int reg_save_size = 0;
2916 int frame_size;
2917 int lp_temp = LP_REGNO, lp_slot = -1;
2918 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2919 int interrupt_handler = mep_interrupt_p ();
2920
2921 if (profile_arc_flag == 2)
2922 emit_insn (gen_mep_bb_trace_ret ());
2923
2924 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2925 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2926
142c2869 2927 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
46222c18 2928
2929 if (frame_pointer_needed)
2930 {
2931 /* If we have a frame pointer, we won't have a reliable stack
2932 pointer (alloca, you know), so rebase SP from FP */
2933 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2934 gen_rtx_REG (SImode, FP_REGNO));
2935 sp_offset = reg_save_size;
2936 }
2937 else
2938 {
2939 /* SP is right under our local variable space. Adjust it if
2940 needed. */
2941 sp_offset = reg_save_size + frame_size;
2942 if (sp_offset >= 128)
2943 {
2944 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2945 sp_offset -= frame_size;
2946 }
2947 }
2948
2949 /* This is backwards so that we restore the control and coprocessor
2950 registers before the temporary registers we use to restore
2951 them. */
2952 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2953 if (mep_call_saves_register (i))
2954 {
2955 enum machine_mode rmode;
2956 int rss = cfun->machine->reg_save_slot[i];
2957
2958 if (mep_reg_size (i) == 8)
2959 rmode = DImode;
2960 else
2961 rmode = SImode;
2962
2963 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2964 && !(mep_reg_set_in_function (i) || interrupt_handler))
2965 continue;
2966 if (mep_prevent_lp_restore && i == LP_REGNO)
2967 continue;
2968 if (!mep_prevent_lp_restore
2969 && !interrupt_handler
2970 && (i == 10 || i == 11))
2971 continue;
2972
2973 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2974 emit_move_insn (gen_rtx_REG (rmode, i),
2975 gen_rtx_MEM (rmode,
29c05e22 2976 plus_constant (Pmode, stack_pointer_rtx,
2977 sp_offset - rss)));
46222c18 2978 else
2979 {
2980 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2981 /* Defer this one so we can jump indirect rather than
2982 copying the RA to $lp and "ret". EH epilogues
2983 automatically skip this anyway. */
2984 lp_slot = sp_offset-rss;
2985 else
2986 {
2987 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2988 gen_rtx_MEM (rmode,
29c05e22 2989 plus_constant (Pmode,
2990 stack_pointer_rtx,
46222c18 2991 sp_offset-rss)));
2992 emit_move_insn (gen_rtx_REG (rmode, i),
2993 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
2994 }
2995 }
2996 }
2997 if (lp_slot != -1)
2998 {
2999 /* Restore this one last so we know it will be in the temp
3000 register when we return by jumping indirectly via the temp. */
3001 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3002 gen_rtx_MEM (SImode,
29c05e22 3003 plus_constant (Pmode, stack_pointer_rtx,
46222c18 3004 lp_slot)));
3005 lp_temp = REGSAVE_CONTROL_TEMP;
3006 }
3007
3008
3009 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3010
3011 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3012 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3013 gen_rtx_REG (SImode, SP_REGNO),
3014 cfun->machine->eh_stack_adjust));
3015
3016 if (mep_sibcall_epilogue)
3017 return;
3018
3019 if (mep_disinterrupt_p ())
3020 emit_insn (gen_mep_enable_int ());
3021
3022 if (mep_prevent_lp_restore)
3023 {
3024 emit_jump_insn (gen_eh_return_internal ());
3025 emit_barrier ();
3026 }
3027 else if (interrupt_handler)
3028 emit_jump_insn (gen_mep_reti ());
3029 else
3030 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3031}
3032
3033void
3034mep_expand_eh_return (rtx *operands)
3035{
3036 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3037 {
3038 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3039 emit_move_insn (ra, operands[0]);
3040 operands[0] = ra;
3041 }
3042
3043 emit_insn (gen_eh_epilogue (operands[0]));
3044}
3045
3046void
3047mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3048{
3049 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3050 mep_prevent_lp_restore = 1;
3051 mep_expand_epilogue ();
3052 mep_prevent_lp_restore = 0;
3053}
3054
3055void
3056mep_expand_sibcall_epilogue (void)
3057{
3058 mep_sibcall_epilogue = 1;
3059 mep_expand_epilogue ();
3060 mep_sibcall_epilogue = 0;
3061}
3062
3063static bool
3064mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3065{
3066 if (decl == NULL)
3067 return false;
3068
3069 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3070 return false;
3071
3072 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3073 if (mep_interrupt_p () || mep_disinterrupt_p ())
3074 return false;
3075
3076 return true;
3077}
3078
3079rtx
3080mep_return_stackadj_rtx (void)
3081{
3082 return gen_rtx_REG (SImode, 10);
3083}
3084
3085rtx
3086mep_return_handler_rtx (void)
3087{
3088 return gen_rtx_REG (SImode, LP_REGNO);
3089}
3090
3091void
3092mep_function_profiler (FILE *file)
3093{
3094 /* Always right at the beginning of the function. */
3095 fprintf (file, "\t# mep function profiler\n");
3096 fprintf (file, "\tadd\t$sp, -8\n");
3097 fprintf (file, "\tsw\t$0, ($sp)\n");
3098 fprintf (file, "\tldc\t$0, $lp\n");
3099 fprintf (file, "\tsw\t$0, 4($sp)\n");
3100 fprintf (file, "\tbsr\t__mep_mcount\n");
3101 fprintf (file, "\tlw\t$0, 4($sp)\n");
3102 fprintf (file, "\tstc\t$0, $lp\n");
3103 fprintf (file, "\tlw\t$0, ($sp)\n");
3104 fprintf (file, "\tadd\t$sp, 8\n\n");
3105}
3106
3107const char *
3108mep_emit_bb_trace_ret (void)
3109{
3110 fprintf (asm_out_file, "\t# end of block profiling\n");
3111 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3112 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3113 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3114 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3115 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3116 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3117 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3118 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3119 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3120 return "";
3121}
3122
3123#undef SAVE
3124#undef RESTORE
3125\f
3126/* Operand Printing. */
3127
3128void
3129mep_print_operand_address (FILE *stream, rtx address)
3130{
3131 if (GET_CODE (address) == MEM)
3132 address = XEXP (address, 0);
3133 else
3134 /* cf: gcc.dg/asm-4.c. */
3135 gcc_assert (GET_CODE (address) == REG);
3136
3137 mep_print_operand (stream, address, 0);
3138}
3139
3140static struct
3141{
3142 char code;
3143 const char *pattern;
3144 const char *format;
3145}
3146const conversions[] =
3147{
3148 { 0, "r", "0" },
3149 { 0, "m+ri", "3(2)" },
3150 { 0, "mr", "(1)" },
3151 { 0, "ms", "(1)" },
2e24255f 3152 { 0, "ml", "(1)" },
46222c18 3153 { 0, "mLrs", "%lo(3)(2)" },
3154 { 0, "mLr+si", "%lo(4+5)(2)" },
3155 { 0, "m+ru2s", "%tpoff(5)(2)" },
3156 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3157 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3158 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3159 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3160 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3161 { 0, "mi", "(1)" },
3162 { 0, "m+si", "(2+3)" },
3163 { 0, "m+li", "(2+3)" },
3164 { 0, "i", "0" },
3165 { 0, "s", "0" },
3166 { 0, "+si", "1+2" },
3167 { 0, "+u2si", "%tpoff(3+4)" },
3168 { 0, "+u3si", "%sdaoff(3+4)" },
3169 { 0, "l", "0" },
3170 { 'b', "i", "0" },
3171 { 'B', "i", "0" },
3172 { 'U', "i", "0" },
3173 { 'h', "i", "0" },
3174 { 'h', "Hs", "%hi(1)" },
3175 { 'I', "i", "0" },
3176 { 'I', "u2s", "%tpoff(2)" },
3177 { 'I', "u3s", "%sdaoff(2)" },
3178 { 'I', "+u2si", "%tpoff(3+4)" },
3179 { 'I', "+u3si", "%sdaoff(3+4)" },
3180 { 'J', "i", "0" },
3181 { 'P', "mr", "(1\\+),\\0" },
3182 { 'x', "i", "0" },
3183 { 0, 0, 0 }
3184};
3185
3186static int
3187unique_bit_in (HOST_WIDE_INT i)
3188{
3189 switch (i & 0xff)
3190 {
3191 case 0x01: case 0xfe: return 0;
3192 case 0x02: case 0xfd: return 1;
3193 case 0x04: case 0xfb: return 2;
3194 case 0x08: case 0xf7: return 3;
3195 case 0x10: case 0x7f: return 4;
3196 case 0x20: case 0xbf: return 5;
3197 case 0x40: case 0xdf: return 6;
3198 case 0x80: case 0xef: return 7;
3199 default:
3200 gcc_unreachable ();
3201 }
3202}
3203
3204static int
3205bit_size_for_clip (HOST_WIDE_INT i)
3206{
3207 int rv;
3208
3209 for (rv = 0; rv < 31; rv ++)
3210 if (((HOST_WIDE_INT) 1 << rv) > i)
3211 return rv + 1;
3212 gcc_unreachable ();
3213}
3214
3215/* Print an operand to a assembler instruction. */
3216
3217void
3218mep_print_operand (FILE *file, rtx x, int code)
3219{
3220 int i, j;
3221 const char *real_name;
3222
3223 if (code == '<')
3224 {
3225 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3226 we're using, then skip over the "mep_" part of its name. */
3227 const struct cgen_insn *insn;
3228
3229 if (mep_get_move_insn (mep_cmov, &insn))
3230 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3231 else
3232 mep_intrinsic_unavailable (mep_cmov);
3233 return;
3234 }
3235 if (code == 'L')
3236 {
3237 switch (GET_CODE (x))
3238 {
3239 case AND:
3240 fputs ("clr", file);
3241 return;
3242 case IOR:
3243 fputs ("set", file);
3244 return;
3245 case XOR:
3246 fputs ("not", file);
3247 return;
3248 default:
3249 output_operand_lossage ("invalid %%L code");
3250 }
3251 }
3252 if (code == 'M')
3253 {
3254 /* Print the second operand of a CR <- CR move. If we're using
3255 a two-operand instruction (i.e., a real cmov), then just print
3256 the operand normally. If we're using a "reg, reg, immediate"
3257 instruction such as caddi3, print the operand followed by a
3258 zero field. If we're using a three-register instruction,
3259 print the operand twice. */
3260 const struct cgen_insn *insn;
3261
3262 mep_print_operand (file, x, 0);
3263 if (mep_get_move_insn (mep_cmov, &insn)
3264 && insn_data[insn->icode].n_operands == 3)
3265 {
3266 fputs (", ", file);
3267 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3268 mep_print_operand (file, x, 0);
3269 else
3270 mep_print_operand (file, const0_rtx, 0);
3271 }
3272 return;
3273 }
3274
3275 encode_pattern (x);
3276 for (i = 0; conversions[i].pattern; i++)
3277 if (conversions[i].code == code
3278 && strcmp(conversions[i].pattern, pattern) == 0)
3279 {
3280 for (j = 0; conversions[i].format[j]; j++)
3281 if (conversions[i].format[j] == '\\')
3282 {
3283 fputc (conversions[i].format[j+1], file);
3284 j++;
3285 }
3286 else if (ISDIGIT(conversions[i].format[j]))
3287 {
3288 rtx r = patternr[conversions[i].format[j] - '0'];
3289 switch (GET_CODE (r))
3290 {
3291 case REG:
3292 fprintf (file, "%s", reg_names [REGNO (r)]);
3293 break;
3294 case CONST_INT:
3295 switch (code)
3296 {
3297 case 'b':
3298 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3299 break;
3300 case 'B':
3301 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3302 break;
3303 case 'h':
3304 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3305 break;
3306 case 'U':
3307 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3308 break;
3309 case 'J':
3310 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3311 break;
3312 case 'x':
3313 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3314 && !(INTVAL (r) & 0xff))
3315 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3316 else
3317 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3318 break;
3319 case 'I':
3320 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3321 && conversions[i].format[j+1] == 0)
3322 {
3323 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3324 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3325 }
3326 else
3327 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3328 break;
3329 default:
3330 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3331 break;
3332 }
3333 break;
3334 case CONST_DOUBLE:
3335 fprintf(file, "[const_double 0x%lx]",
3336 (unsigned long) CONST_DOUBLE_HIGH(r));
3337 break;
3338 case SYMBOL_REF:
44ddcf5e 3339 real_name = targetm.strip_name_encoding (XSTR (r, 0));
46222c18 3340 assemble_name (file, real_name);
3341 break;
3342 case LABEL_REF:
3343 output_asm_label (r);
3344 break;
3345 default:
3346 fprintf (stderr, "don't know how to print this operand:");
3347 debug_rtx (r);
3348 gcc_unreachable ();
3349 }
3350 }
3351 else
3352 {
3353 if (conversions[i].format[j] == '+'
3354 && (!code || code == 'I')
3355 && ISDIGIT (conversions[i].format[j+1])
3356 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3357 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3358 continue;
3359 fputc(conversions[i].format[j], file);
3360 }
3361 break;
3362 }
3363 if (!conversions[i].pattern)
3364 {
3365 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3366 debug_rtx(x);
3367 }
3368
3369 return;
3370}
3371
3372void
3373mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3374 int noperands ATTRIBUTE_UNUSED)
3375{
3376 /* Despite the fact that MeP is perfectly capable of branching and
3377 doing something else in the same bundle, gcc does jump
3378 optimization *after* scheduling, so we cannot trust the bundling
3379 flags on jump instructions. */
3380 if (GET_MODE (insn) == BImode
3381 && get_attr_slots (insn) != SLOTS_CORE)
3382 fputc ('+', asm_out_file);
3383}
3384
3385/* Function args in registers. */
3386
3387static void
39cba157 3388mep_setup_incoming_varargs (cumulative_args_t cum,
46222c18 3389 enum machine_mode mode ATTRIBUTE_UNUSED,
3390 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3391 int second_time ATTRIBUTE_UNUSED)
3392{
39cba157 3393 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
46222c18 3394
3395 if (nsave > 0)
3396 cfun->machine->arg_regs_to_save = nsave;
3397 *pretend_size = nsave * 4;
3398}
3399
3400static int
3401bytesize (const_tree type, enum machine_mode mode)
3402{
3403 if (mode == BLKmode)
3404 return int_size_in_bytes (type);
3405 return GET_MODE_SIZE (mode);
3406}
3407
3408static rtx
3409mep_expand_builtin_saveregs (void)
3410{
3411 int bufsize, i, ns;
3412 rtx regbuf;
3413
3414 ns = cfun->machine->arg_regs_to_save;
09bb2dae 3415 if (TARGET_IVC2)
3416 {
3417 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3418 regbuf = assign_stack_local (SImode, bufsize, 64);
3419 }
3420 else
3421 {
3422 bufsize = ns * 4;
3423 regbuf = assign_stack_local (SImode, bufsize, 32);
3424 }
46222c18 3425
3426 move_block_from_reg (5-ns, regbuf, ns);
3427
3428 if (TARGET_IVC2)
3429 {
3430 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
09bb2dae 3431 int ofs = 8 * ((ns+1)/2);
46222c18 3432
3433 for (i=0; i<ns; i++)
3434 {
3435 int rn = (4-ns) + i + 49;
3436 rtx ptr;
3437
3438 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3439 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3440 ofs += 8;
3441 }
3442 }
3443 return XEXP (regbuf, 0);
3444}
3445
3446#define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3447
3448static tree
3449mep_build_builtin_va_list (void)
3450{
3451 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3452 tree record;
3453
3454
3455 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3456
3457 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3458 get_identifier ("__va_next_gp"), ptr_type_node);
3459 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3460 get_identifier ("__va_next_gp_limit"),
3461 ptr_type_node);
3462 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3463 ptr_type_node);
3464 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3465 ptr_type_node);
3466
3467 DECL_FIELD_CONTEXT (f_next_gp) = record;
3468 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3469 DECL_FIELD_CONTEXT (f_next_cop) = record;
3470 DECL_FIELD_CONTEXT (f_next_stack) = record;
3471
3472 TYPE_FIELDS (record) = f_next_gp;
1767a056 3473 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3474 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3475 DECL_CHAIN (f_next_cop) = f_next_stack;
46222c18 3476
3477 layout_type (record);
3478
3479 return record;
3480}
3481
3482static void
3483mep_expand_va_start (tree valist, rtx nextarg)
3484{
3485 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3486 tree next_gp, next_gp_limit, next_cop, next_stack;
3487 tree t, u;
3488 int ns;
3489
3490 ns = cfun->machine->arg_regs_to_save;
3491
3492 f_next_gp = TYPE_FIELDS (va_list_type_node);
1767a056 3493 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3494 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3495 f_next_stack = DECL_CHAIN (f_next_cop);
46222c18 3496
3497 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3498 NULL_TREE);
3499 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3500 valist, f_next_gp_limit, NULL_TREE);
3501 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3502 NULL_TREE);
3503 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3504 valist, f_next_stack, NULL_TREE);
3505
3506 /* va_list.next_gp = expand_builtin_saveregs (); */
3507 u = make_tree (sizetype, expand_builtin_saveregs ());
3508 u = fold_convert (ptr_type_node, u);
3509 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3510 TREE_SIDE_EFFECTS (t) = 1;
3511 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3512
3513 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
2cc66f2a 3514 u = fold_build_pointer_plus_hwi (u, 4 * ns);
46222c18 3515 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3516 TREE_SIDE_EFFECTS (t) = 1;
3517 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3518
2cc66f2a 3519 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
09bb2dae 3520 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
46222c18 3521 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3522 TREE_SIDE_EFFECTS (t) = 1;
3523 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3524
3525 /* va_list.next_stack = nextarg; */
3526 u = make_tree (ptr_type_node, nextarg);
3527 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3528 TREE_SIDE_EFFECTS (t) = 1;
3529 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3530}
3531
3532static tree
3533mep_gimplify_va_arg_expr (tree valist, tree type,
260f365f 3534 gimple_seq *pre_p,
3535 gimple_seq *post_p ATTRIBUTE_UNUSED)
46222c18 3536{
3537 HOST_WIDE_INT size, rsize;
3538 bool by_reference, ivc2_vec;
3539 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3540 tree next_gp, next_gp_limit, next_cop, next_stack;
3541 tree label_sover, label_selse;
3542 tree tmp, res_addr;
3543
3544 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3545
3546 size = int_size_in_bytes (type);
3547 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3548
3549 if (by_reference)
3550 {
3551 type = build_pointer_type (type);
3552 size = 4;
3553 }
3554 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3555
3556 f_next_gp = TYPE_FIELDS (va_list_type_node);
1767a056 3557 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3558 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3559 f_next_stack = DECL_CHAIN (f_next_cop);
46222c18 3560
3561 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3562 NULL_TREE);
3563 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3564 valist, f_next_gp_limit, NULL_TREE);
3565 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3566 NULL_TREE);
3567 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3568 valist, f_next_stack, NULL_TREE);
3569
3570 /* if f_next_gp < f_next_gp_limit
3571 IF (VECTOR_P && IVC2)
3572 val = *f_next_cop;
3573 ELSE
3574 val = *f_next_gp;
3575 f_next_gp += 4;
3576 f_next_cop += 8;
3577 else
3578 label_selse:
3579 val = *f_next_stack;
3580 f_next_stack += rsize;
3581 label_sover:
3582 */
3583
3584 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3585 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3586 res_addr = create_tmp_var (ptr_type_node, NULL);
3587
3588 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3589 unshare_expr (next_gp_limit));
3590 tmp = build3 (COND_EXPR, void_type_node, tmp,
3591 build1 (GOTO_EXPR, void_type_node,
3592 unshare_expr (label_selse)),
3593 NULL_TREE);
3594 gimplify_and_add (tmp, pre_p);
3595
3596 if (ivc2_vec)
3597 {
3598 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3599 gimplify_and_add (tmp, pre_p);
3600 }
3601 else
3602 {
3603 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3604 gimplify_and_add (tmp, pre_p);
3605 }
3606
2cc66f2a 3607 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
46222c18 3608 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3609
2cc66f2a 3610 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
46222c18 3611 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3612
3613 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3614 gimplify_and_add (tmp, pre_p);
3615
3616 /* - - */
3617
3618 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3619 gimplify_and_add (tmp, pre_p);
3620
3621 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3622 gimplify_and_add (tmp, pre_p);
3623
2cc66f2a 3624 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
46222c18 3625 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3626
3627 /* - - */
3628
3629 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3630 gimplify_and_add (tmp, pre_p);
3631
3632 res_addr = fold_convert (build_pointer_type (type), res_addr);
3633
3634 if (by_reference)
3635 res_addr = build_va_arg_indirect_ref (res_addr);
3636
3637 return build_va_arg_indirect_ref (res_addr);
3638}
3639
3640void
3641mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3642 rtx libname ATTRIBUTE_UNUSED,
3643 tree fndecl ATTRIBUTE_UNUSED)
3644{
3645 pcum->nregs = 0;
3646
3647 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3648 pcum->vliw = 1;
3649 else
3650 pcum->vliw = 0;
3651}
3652
4f6b272a 3653/* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3654 larger than 4 bytes are passed indirectly. Return value in 0,
3655 unless bigger than 4 bytes, then the caller passes a pointer as the
3656 first arg. For varargs, we copy $1..$4 to the stack. */
3657
3658static rtx
39cba157 3659mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
4f6b272a 3660 const_tree type ATTRIBUTE_UNUSED,
3661 bool named ATTRIBUTE_UNUSED)
46222c18 3662{
39cba157 3663 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3664
46222c18 3665 /* VOIDmode is a signal for the backend to pass data to the call
3666 expander via the second operand to the call pattern. We use
3667 this to determine whether to use "jsr" or "jsrv". */
3668 if (mode == VOIDmode)
4f6b272a 3669 return GEN_INT (cum->vliw);
46222c18 3670
3671 /* If we havn't run out of argument registers, return the next. */
4f6b272a 3672 if (cum->nregs < 4)
46222c18 3673 {
3674 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
4f6b272a 3675 return gen_rtx_REG (mode, cum->nregs + 49);
46222c18 3676 else
4f6b272a 3677 return gen_rtx_REG (mode, cum->nregs + 1);
46222c18 3678 }
3679
3680 /* Otherwise the argument goes on the stack. */
3681 return NULL_RTX;
3682}
3683
3684static bool
39cba157 3685mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
46222c18 3686 enum machine_mode mode,
3687 const_tree type,
3688 bool named ATTRIBUTE_UNUSED)
3689{
3690 int size = bytesize (type, mode);
142c2869 3691
3692 /* This is non-obvious, but yes, large values passed after we've run
3693 out of registers are *still* passed by reference - we put the
3694 address of the parameter on the stack, as well as putting the
3695 parameter itself elsewhere on the stack. */
3696
3697 if (size <= 0 || size > 8)
3698 return true;
3699 if (size <= 4)
3700 return false;
39cba157 3701 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3702 && type != NULL_TREE && VECTOR_TYPE_P (type))
142c2869 3703 return false;
3704 return true;
46222c18 3705}
3706
4f6b272a 3707static void
39cba157 3708mep_function_arg_advance (cumulative_args_t pcum,
4f6b272a 3709 enum machine_mode mode ATTRIBUTE_UNUSED,
3710 const_tree type ATTRIBUTE_UNUSED,
3711 bool named ATTRIBUTE_UNUSED)
46222c18 3712{
39cba157 3713 get_cumulative_args (pcum)->nregs += 1;
46222c18 3714}
3715
3716bool
3717mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3718{
3719 int size = bytesize (type, BLKmode);
3720 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
142c2869 3721 return size > 0 && size <= 8 ? 0 : 1;
3722 return size > 0 && size <= 4 ? 0 : 1;
46222c18 3723}
3724
3725static bool
3726mep_narrow_volatile_bitfield (void)
3727{
3728 return true;
3729 return false;
3730}
3731
3732/* Implement FUNCTION_VALUE. All values are returned in $0. */
3733
3734rtx
7290d950 3735mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
46222c18 3736{
3737 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3738 return gen_rtx_REG (TYPE_MODE (type), 48);
3739 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3740}
3741
3742/* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3743
3744rtx
3745mep_libcall_value (enum machine_mode mode)
3746{
3747 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3748}
3749
3750/* Handle pipeline hazards. */
3751
3752typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3753static const char *opnames[] = { "", "stc", "fsft", "ret" };
3754
3755static int prev_opcode = 0;
3756
3757/* This isn't as optimal as it could be, because we don't know what
3758 control register the STC opcode is storing in. We only need to add
9d75589a 3759 the nop if it's the relevant register, but we add it for irrelevant
46222c18 3760 registers also. */
3761
3762void
3763mep_asm_output_opcode (FILE *file, const char *ptr)
3764{
3765 int this_opcode = op_none;
3766 const char *hazard = 0;
3767
3768 switch (*ptr)
3769 {
3770 case 'f':
3771 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3772 this_opcode = op_fsft;
3773 break;
3774 case 'r':
3775 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3776 this_opcode = op_ret;
3777 break;
3778 case 's':
3779 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3780 this_opcode = op_stc;
3781 break;
3782 }
3783
3784 if (prev_opcode == op_stc && this_opcode == op_fsft)
3785 hazard = "nop";
3786 if (prev_opcode == op_stc && this_opcode == op_ret)
3787 hazard = "nop";
3788
3789 if (hazard)
3790 fprintf(file, "%s\t# %s-%s hazard\n\t",
3791 hazard, opnames[prev_opcode], opnames[this_opcode]);
3792
3793 prev_opcode = this_opcode;
3794}
3795
3796/* Handle attributes. */
3797
3798static tree
3799mep_validate_based_tiny (tree *node, tree name, tree args,
3800 int flags ATTRIBUTE_UNUSED, bool *no_add)
3801{
3802 if (TREE_CODE (*node) != VAR_DECL
3803 && TREE_CODE (*node) != POINTER_TYPE
3804 && TREE_CODE (*node) != TYPE_DECL)
3805 {
3806 warning (0, "%qE attribute only applies to variables", name);
3807 *no_add = true;
3808 }
3809 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3810 {
3811 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3812 {
3813 warning (0, "address region attributes not allowed with auto storage class");
3814 *no_add = true;
3815 }
3816 /* Ignore storage attribute of pointed to variable: char __far * x; */
3817 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3818 {
3819 warning (0, "address region attributes on pointed-to types ignored");
3820 *no_add = true;
3821 }
3822 }
3823
3824 return NULL_TREE;
3825}
3826
3827static int
3828mep_multiple_address_regions (tree list, bool check_section_attr)
3829{
3830 tree a;
3831 int count_sections = 0;
3832 int section_attr_count = 0;
3833
3834 for (a = list; a; a = TREE_CHAIN (a))
3835 {
3836 if (is_attribute_p ("based", TREE_PURPOSE (a))
3837 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3838 || is_attribute_p ("near", TREE_PURPOSE (a))
3839 || is_attribute_p ("far", TREE_PURPOSE (a))
3840 || is_attribute_p ("io", TREE_PURPOSE (a)))
3841 count_sections ++;
3842 if (check_section_attr)
3843 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3844 }
3845
3846 if (check_section_attr)
3847 return section_attr_count;
3848 else
3849 return count_sections;
3850}
3851
3852#define MEP_ATTRIBUTES(decl) \
3853 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3854 : DECL_ATTRIBUTES (decl) \
3855 ? (DECL_ATTRIBUTES (decl)) \
3856 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3857
3858static tree
3859mep_validate_near_far (tree *node, tree name, tree args,
3860 int flags ATTRIBUTE_UNUSED, bool *no_add)
3861{
3862 if (TREE_CODE (*node) != VAR_DECL
3863 && TREE_CODE (*node) != FUNCTION_DECL
3864 && TREE_CODE (*node) != METHOD_TYPE
3865 && TREE_CODE (*node) != POINTER_TYPE
3866 && TREE_CODE (*node) != TYPE_DECL)
3867 {
3868 warning (0, "%qE attribute only applies to variables and functions",
3869 name);
3870 *no_add = true;
3871 }
3872 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3873 {
3874 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3875 {
3876 warning (0, "address region attributes not allowed with auto storage class");
3877 *no_add = true;
3878 }
3879 /* Ignore storage attribute of pointed to variable: char __far * x; */
3880 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3881 {
3882 warning (0, "address region attributes on pointed-to types ignored");
3883 *no_add = true;
3884 }
3885 }
3886 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3887 {
3888 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3889 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3890 DECL_ATTRIBUTES (*node) = NULL_TREE;
3891 }
3892 return NULL_TREE;
3893}
3894
3895static tree
3896mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3897 int flags ATTRIBUTE_UNUSED, bool *no_add)
3898{
3899 if (TREE_CODE (*node) != FUNCTION_DECL
3900 && TREE_CODE (*node) != METHOD_TYPE)
3901 {
3902 warning (0, "%qE attribute only applies to functions", name);
3903 *no_add = true;
3904 }
3905 return NULL_TREE;
3906}
3907
3908static tree
3909mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3910 int flags ATTRIBUTE_UNUSED, bool *no_add)
3911{
3912 tree function_type;
3913
3914 if (TREE_CODE (*node) != FUNCTION_DECL)
3915 {
3916 warning (0, "%qE attribute only applies to functions", name);
3917 *no_add = true;
3918 return NULL_TREE;
3919 }
3920
3921 if (DECL_DECLARED_INLINE_P (*node))
3922 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3923 DECL_UNINLINABLE (*node) = 1;
3924
3925 function_type = TREE_TYPE (*node);
3926
3927 if (TREE_TYPE (function_type) != void_type_node)
3928 error ("interrupt function must have return type of void");
3929
a36cf284 3930 if (prototype_p (function_type)
46222c18 3931 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3932 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3933 error ("interrupt function must have no arguments");
3934
3935 return NULL_TREE;
3936}
3937
3938static tree
3939mep_validate_io_cb (tree *node, tree name, tree args,
3940 int flags ATTRIBUTE_UNUSED, bool *no_add)
3941{
3942 if (TREE_CODE (*node) != VAR_DECL)
3943 {
3944 warning (0, "%qE attribute only applies to variables", name);
3945 *no_add = true;
3946 }
3947
3948 if (args != NULL_TREE)
3949 {
3950 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3951 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3952 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3953 {
3954 warning (0, "%qE attribute allows only an integer constant argument",
3955 name);
3956 *no_add = true;
3957 }
3958 }
3959
3960 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3961 TREE_THIS_VOLATILE (*node) = 1;
3962
3963 return NULL_TREE;
3964}
3965
3966static tree
3967mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3968 int flags ATTRIBUTE_UNUSED, bool *no_add)
3969{
3970 if (TREE_CODE (*node) != FUNCTION_TYPE
3971 && TREE_CODE (*node) != FUNCTION_DECL
3972 && TREE_CODE (*node) != METHOD_TYPE
3973 && TREE_CODE (*node) != FIELD_DECL
3974 && TREE_CODE (*node) != TYPE_DECL)
3975 {
3976 static int gave_pointer_note = 0;
3977 static int gave_array_note = 0;
3978 static const char * given_type = NULL;
3979
f3d35d4d 3980 given_type = get_tree_code_name (TREE_CODE (*node));
46222c18 3981 if (TREE_CODE (*node) == POINTER_TYPE)
3982 given_type = "pointers";
3983 if (TREE_CODE (*node) == ARRAY_TYPE)
3984 given_type = "arrays";
3985
3986 if (given_type)
3987 warning (0, "%qE attribute only applies to functions, not %s",
3988 name, given_type);
3989 else
3990 warning (0, "%qE attribute only applies to functions",
3991 name);
3992 *no_add = true;
3993
3994 if (TREE_CODE (*node) == POINTER_TYPE
3995 && !gave_pointer_note)
3996 {
516bc58f 3997 inform (input_location,
3998 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3999 " typedef int (__vliw *vfuncptr) ();");
46222c18 4000 gave_pointer_note = 1;
4001 }
4002
4003 if (TREE_CODE (*node) == ARRAY_TYPE
4004 && !gave_array_note)
4005 {
516bc58f 4006 inform (input_location,
4007 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4008 " typedef int (__vliw *vfuncptr[]) ();");
46222c18 4009 gave_array_note = 1;
4010 }
4011 }
4012 if (!TARGET_VLIW)
4013 error ("VLIW functions are not allowed without a VLIW configuration");
4014 return NULL_TREE;
4015}
4016
4017static const struct attribute_spec mep_attribute_table[11] =
4018{
ac86af5d 4019 /* name min max decl type func handler
4020 affects_type_identity */
4021 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4022 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4023 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4024 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4025 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4026 false },
4027 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4028 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4029 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4030 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4031 { NULL, 0, 0, false, false, false, NULL, false }
46222c18 4032};
4033
4034static bool
4035mep_function_attribute_inlinable_p (const_tree callee)
4036{
4037 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4038 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4039 return (lookup_attribute ("disinterrupt", attrs) == 0
4040 && lookup_attribute ("interrupt", attrs) == 0);
4041}
4042
a71ff2a7 4043static bool
7c88e513 4044mep_can_inline_p (tree caller, tree callee)
a71ff2a7 4045{
4046 if (TREE_CODE (callee) == ADDR_EXPR)
4047 callee = TREE_OPERAND (callee, 0);
4048
1756c1fe 4049 if (!mep_vliw_function_p (caller)
a71ff2a7 4050 && mep_vliw_function_p (callee))
4051 {
1756c1fe 4052 return false;
a71ff2a7 4053 }
1756c1fe 4054 return true;
a71ff2a7 4055}
4056
46222c18 4057#define FUNC_CALL 1
4058#define FUNC_DISINTERRUPT 2
4059
4060
4061struct GTY(()) pragma_entry {
4062 int used;
4063 int flag;
4064 const char *funcname;
4065};
4066typedef struct pragma_entry pragma_entry;
4067
4068/* Hash table of farcall-tagged sections. */
4069static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4070
4071static int
4072pragma_entry_eq (const void *p1, const void *p2)
4073{
4074 const pragma_entry *old = (const pragma_entry *) p1;
4075 const char *new_name = (const char *) p2;
4076
4077 return strcmp (old->funcname, new_name) == 0;
4078}
4079
4080static hashval_t
4081pragma_entry_hash (const void *p)
4082{
4083 const pragma_entry *old = (const pragma_entry *) p;
4084 return htab_hash_string (old->funcname);
4085}
4086
4087static void
4088mep_note_pragma_flag (const char *funcname, int flag)
4089{
4090 pragma_entry **slot;
4091
4092 if (!pragma_htab)
4093 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4094 pragma_entry_eq, NULL);
4095
4096 slot = (pragma_entry **)
4097 htab_find_slot_with_hash (pragma_htab, funcname,
4098 htab_hash_string (funcname), INSERT);
4099
4100 if (!*slot)
4101 {
ba72912a 4102 *slot = ggc_alloc_pragma_entry ();
46222c18 4103 (*slot)->flag = 0;
4104 (*slot)->used = 0;
4105 (*slot)->funcname = ggc_strdup (funcname);
4106 }
4107 (*slot)->flag |= flag;
4108}
4109
4110static bool
4111mep_lookup_pragma_flag (const char *funcname, int flag)
4112{
4113 pragma_entry **slot;
4114
4115 if (!pragma_htab)
4116 return false;
4117
4118 if (funcname[0] == '@' && funcname[2] == '.')
4119 funcname += 3;
4120
4121 slot = (pragma_entry **)
4122 htab_find_slot_with_hash (pragma_htab, funcname,
4123 htab_hash_string (funcname), NO_INSERT);
4124 if (slot && *slot && ((*slot)->flag & flag))
4125 {
4126 (*slot)->used |= flag;
4127 return true;
4128 }
4129 return false;
4130}
4131
4132bool
4133mep_lookup_pragma_call (const char *funcname)
4134{
4135 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4136}
4137
4138void
4139mep_note_pragma_call (const char *funcname)
4140{
4141 mep_note_pragma_flag (funcname, FUNC_CALL);
4142}
4143
4144bool
4145mep_lookup_pragma_disinterrupt (const char *funcname)
4146{
4147 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4148}
4149
4150void
4151mep_note_pragma_disinterrupt (const char *funcname)
4152{
4153 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4154}
4155
4156static int
4157note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4158{
4159 const pragma_entry *d = (const pragma_entry *)(*slot);
4160
4161 if ((d->flag & FUNC_DISINTERRUPT)
4162 && !(d->used & FUNC_DISINTERRUPT))
4163 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4164 return 1;
4165}
4166
4167void
4168mep_file_cleanups (void)
4169{
4170 if (pragma_htab)
4171 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4172}
7d86c715 4173
4174/* These three functions provide a bridge between the pramgas that
4175 affect register classes, and the functions that maintain them. We
4176 can't call those functions directly as pragma handling is part of
4177 the front end and doesn't have direct access to them. */
4178
4179void
4180mep_save_register_info (void)
4181{
4182 save_register_info ();
4183}
4184
4185void
4186mep_reinit_regs (void)
4187{
4188 reinit_regs ();
4189}
4190
4191void
4192mep_init_regs (void)
4193{
4194 init_regs ();
4195}
4196
46222c18 4197
4198
4199static int
4200mep_attrlist_to_encoding (tree list, tree decl)
4201{
4202 if (mep_multiple_address_regions (list, false) > 1)
4203 {
4204 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4205 TREE_PURPOSE (TREE_CHAIN (list)),
4206 DECL_NAME (decl),
4207 DECL_SOURCE_LINE (decl));
4208 TREE_CHAIN (list) = NULL_TREE;
4209 }
4210
4211 while (list)
4212 {
4213 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4214 return 'b';
4215 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4216 return 't';
4217 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4218 return 'n';
4219 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4220 return 'f';
4221 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4222 {
4223 if (TREE_VALUE (list)
4224 && TREE_VALUE (TREE_VALUE (list))
4225 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4226 {
4227 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4228 if (location >= 0
4229 && location <= 0x1000000)
4230 return 'i';
4231 }
4232 return 'I';
4233 }
4234 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4235 return 'c';
4236 list = TREE_CHAIN (list);
4237 }
4238 if (TARGET_TF
4239 && TREE_CODE (decl) == FUNCTION_DECL
4240 && DECL_SECTION_NAME (decl) == 0)
4241 return 'f';
4242 return 0;
4243}
4244
4245static int
4246mep_comp_type_attributes (const_tree t1, const_tree t2)
4247{
4248 int vliw1, vliw2;
4249
4250 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4251 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4252
4253 if (vliw1 != vliw2)
4254 return 0;
4255
4256 return 1;
4257}
4258
4259static void
4260mep_insert_attributes (tree decl, tree *attributes)
4261{
4262 int size;
4263 const char *secname = 0;
4264 tree attrib, attrlist;
4265 char encoding;
4266
4267 if (TREE_CODE (decl) == FUNCTION_DECL)
4268 {
4269 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4270
4271 if (mep_lookup_pragma_disinterrupt (funcname))
4272 {
4273 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4274 *attributes = chainon (*attributes, attrib);
4275 }
4276 }
4277
4278 if (TREE_CODE (decl) != VAR_DECL
4279 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4280 return;
4281
4282 if (TREE_READONLY (decl) && TARGET_DC)
4283 /* -mdc means that const variables default to the near section,
4284 regardless of the size cutoff. */
4285 return;
4286
4287 /* User specified an attribute, so override the default.
4288 Ignore storage attribute of pointed to variable. char __far * x; */
4289 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4290 {
4291 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4292 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4293 else if (DECL_ATTRIBUTES (decl) && *attributes)
4294 DECL_ATTRIBUTES (decl) = NULL_TREE;
4295 }
4296
4297 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4298 encoding = mep_attrlist_to_encoding (attrlist, decl);
4299 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4300 {
4301 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4302 encoding = mep_attrlist_to_encoding (attrlist, decl);
4303 }
4304 if (encoding)
4305 {
4306 /* This means that the declaration has a specific section
4307 attribute, so we should not apply the default rules. */
4308
4309 if (encoding == 'i' || encoding == 'I')
4310 {
4311 tree attr = lookup_attribute ("io", attrlist);
4312 if (attr
4313 && TREE_VALUE (attr)
4314 && TREE_VALUE (TREE_VALUE(attr)))
4315 {
4316 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4317 static tree previous_value = 0;
4318 static int previous_location = 0;
4319 static tree previous_name = 0;
4320
4321 /* We take advantage of the fact that gcc will reuse the
4322 same tree pointer when applying an attribute to a
4323 list of decls, but produce a new tree for attributes
4324 on separate source lines, even when they're textually
4325 identical. This is the behavior we want. */
4326 if (TREE_VALUE (attr) == previous_value
4327 && location == previous_location)
4328 {
4329 warning(0, "__io address 0x%x is the same for %qE and %qE",
4330 location, previous_name, DECL_NAME (decl));
4331 }
4332 previous_name = DECL_NAME (decl);
4333 previous_location = location;
4334 previous_value = TREE_VALUE (attr);
4335 }
4336 }
4337 return;
4338 }
4339
4340
4341 /* Declarations of arrays can change size. Don't trust them. */
4342 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4343 size = 0;
4344 else
4345 size = int_size_in_bytes (TREE_TYPE (decl));
4346
4347 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4348 {
4349 if (TREE_PUBLIC (decl)
4350 || DECL_EXTERNAL (decl)
4351 || TREE_STATIC (decl))
4352 {
4353 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4354 int key = 0;
4355
4356 while (*name)
4357 key += *name++;
4358
4359 switch (key & 3)
4360 {
4361 case 0:
4362 secname = "based";
4363 break;
4364 case 1:
4365 secname = "tiny";
4366 break;
4367 case 2:
4368 secname = "far";
4369 break;
4370 default:
4371 ;
4372 }
4373 }
4374 }
4375 else
4376 {
4377 if (size <= mep_based_cutoff && size > 0)
4378 secname = "based";
4379 else if (size <= mep_tiny_cutoff && size > 0)
4380 secname = "tiny";
4381 else if (TARGET_L)
4382 secname = "far";
4383 }
4384
4385 if (mep_const_section && TREE_READONLY (decl))
4386 {
4387 if (strcmp (mep_const_section, "tiny") == 0)
4388 secname = "tiny";
4389 else if (strcmp (mep_const_section, "near") == 0)
4390 return;
4391 else if (strcmp (mep_const_section, "far") == 0)
4392 secname = "far";
4393 }
4394
4395 if (!secname)
4396 return;
4397
4398 if (!mep_multiple_address_regions (*attributes, true)
4399 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4400 {
4401 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4402
4403 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4404 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4405 and mep_validate_based_tiny. */
4406 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4407 }
4408}
4409
4410static void
4411mep_encode_section_info (tree decl, rtx rtl, int first)
4412{
4413 rtx rtlname;
4414 const char *oldname;
4415 const char *secname;
4416 char encoding;
4417 char *newname;
4418 tree idp;
4419 int maxsize;
4420 tree type;
4421 tree mep_attributes;
4422
4423 if (! first)
4424 return;
4425
4426 if (TREE_CODE (decl) != VAR_DECL
4427 && TREE_CODE (decl) != FUNCTION_DECL)
4428 return;
4429
4430 rtlname = XEXP (rtl, 0);
4431 if (GET_CODE (rtlname) == SYMBOL_REF)
4432 oldname = XSTR (rtlname, 0);
4433 else if (GET_CODE (rtlname) == MEM
4434 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4435 oldname = XSTR (XEXP (rtlname, 0), 0);
4436 else
4437 gcc_unreachable ();
4438
4439 type = TREE_TYPE (decl);
4440 if (type == error_mark_node)
4441 return;
4442 mep_attributes = MEP_ATTRIBUTES (decl);
4443
4444 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4445
4446 if (encoding)
4447 {
4448 newname = (char *) alloca (strlen (oldname) + 4);
4449 sprintf (newname, "@%c.%s", encoding, oldname);
4450 idp = get_identifier (newname);
4451 XEXP (rtl, 0) =
4452 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
2d6df6a7 4453 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4454 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
46222c18 4455
4456 switch (encoding)
4457 {
4458 case 'b':
4459 maxsize = 128;
4460 secname = "based";
4461 break;
4462 case 't':
4463 maxsize = 65536;
4464 secname = "tiny";
4465 break;
4466 case 'n':
4467 maxsize = 0x1000000;
4468 secname = "near";
4469 break;
4470 default:
4471 maxsize = 0;
4472 secname = 0;
4473 break;
4474 }
4475 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4476 {
4477 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4478 oldname,
4479 (long) int_size_in_bytes (TREE_TYPE (decl)),
4480 secname,
4481 maxsize);
4482 }
4483 }
46222c18 4484}
4485
4486const char *
4487mep_strip_name_encoding (const char *sym)
4488{
4489 while (1)
4490 {
4491 if (*sym == '*')
4492 sym++;
4493 else if (*sym == '@' && sym[2] == '.')
4494 sym += 3;
4495 else
4496 return sym;
4497 }
4498}
4499
4500static section *
4501mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4502 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4503{
4504 int readonly = 1;
e3b9264d 4505 int encoding;
46222c18 4506
4507 switch (TREE_CODE (decl))
4508 {
4509 case VAR_DECL:
4510 if (!TREE_READONLY (decl)
4511 || TREE_SIDE_EFFECTS (decl)
4512 || !DECL_INITIAL (decl)
4513 || (DECL_INITIAL (decl) != error_mark_node
4514 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4515 readonly = 0;
4516 break;
4517 case CONSTRUCTOR:
4518 if (! TREE_CONSTANT (decl))
4519 readonly = 0;
4520 break;
4521
4522 default:
4523 break;
4524 }
4525
e3b9264d 4526 if (TREE_CODE (decl) == FUNCTION_DECL)
4527 {
4528 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4529
4530 if (name[0] == '@' && name[2] == '.')
4531 encoding = name[1];
4532 else
4533 encoding = 0;
4534
4535 if (flag_function_sections || DECL_ONE_ONLY (decl))
4536 mep_unique_section (decl, 0);
4537 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4538 {
4539 if (encoding == 'f')
4540 return vftext_section;
4541 else
4542 return vtext_section;
4543 }
4544 else if (encoding == 'f')
4545 return ftext_section;
4546 else
4547 return text_section;
4548 }
4549
46222c18 4550 if (TREE_CODE (decl) == VAR_DECL)
4551 {
4552 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4553
4554 if (name[0] == '@' && name[2] == '.')
4555 switch (name[1])
4556 {
4557 case 'b':
4558 return based_section;
4559
4560 case 't':
4561 if (readonly)
4562 return srodata_section;
4563 if (DECL_INITIAL (decl))
4564 return sdata_section;
4565 return tinybss_section;
4566
4567 case 'f':
4568 if (readonly)
4569 return frodata_section;
4570 return far_section;
4571
4572 case 'i':
4573 case 'I':
a426d5d9 4574 error_at (DECL_SOURCE_LOCATION (decl),
4575 "variable %D of type %<io%> must be uninitialized", decl);
46222c18 4576 return data_section;
4577
4578 case 'c':
a426d5d9 4579 error_at (DECL_SOURCE_LOCATION (decl),
4580 "variable %D of type %<cb%> must be uninitialized", decl);
46222c18 4581 return data_section;
4582 }
4583 }
4584
4585 if (readonly)
4586 return readonly_data_section;
4587
4588 return data_section;
4589}
4590
4591static void
4592mep_unique_section (tree decl, int reloc)
4593{
4594 static const char *prefixes[][2] =
4595 {
4596 { ".text.", ".gnu.linkonce.t." },
4597 { ".rodata.", ".gnu.linkonce.r." },
4598 { ".data.", ".gnu.linkonce.d." },
4599 { ".based.", ".gnu.linkonce.based." },
4600 { ".sdata.", ".gnu.linkonce.s." },
4601 { ".far.", ".gnu.linkonce.far." },
4602 { ".ftext.", ".gnu.linkonce.ft." },
4603 { ".frodata.", ".gnu.linkonce.frd." },
e3b9264d 4604 { ".srodata.", ".gnu.linkonce.srd." },
4605 { ".vtext.", ".gnu.linkonce.v." },
4606 { ".vftext.", ".gnu.linkonce.vf." }
46222c18 4607 };
4608 int sec = 2; /* .data */
4609 int len;
4610 const char *name, *prefix;
4611 char *string;
4612
4613 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4614 if (DECL_RTL (decl))
4615 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4616
4617 if (TREE_CODE (decl) == FUNCTION_DECL)
e3b9264d 4618 {
4619 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4620 sec = 9; /* .vtext */
4621 else
4622 sec = 0; /* .text */
4623 }
46222c18 4624 else if (decl_readonly_section (decl, reloc))
4625 sec = 1; /* .rodata */
4626
4627 if (name[0] == '@' && name[2] == '.')
4628 {
4629 switch (name[1])
4630 {
4631 case 'b':
4632 sec = 3; /* .based */
4633 break;
4634 case 't':
4635 if (sec == 1)
4636 sec = 8; /* .srodata */
4637 else
4638 sec = 4; /* .sdata */
4639 break;
4640 case 'f':
4641 if (sec == 0)
4642 sec = 6; /* .ftext */
e3b9264d 4643 else if (sec == 9)
4644 sec = 10; /* .vftext */
46222c18 4645 else if (sec == 1)
4646 sec = 7; /* .frodata */
4647 else
4648 sec = 5; /* .far. */
4649 break;
4650 }
4651 name += 3;
4652 }
4653
4654 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4655 len = strlen (name) + strlen (prefix);
4656 string = (char *) alloca (len + 1);
4657
4658 sprintf (string, "%s%s", prefix, name);
4659
4660 DECL_SECTION_NAME (decl) = build_string (len, string);
4661}
4662
4663/* Given a decl, a section name, and whether the decl initializer
4664 has relocs, choose attributes for the section. */
4665
4666#define SECTION_MEP_VLIW SECTION_MACH_DEP
4667
4668static unsigned int
4669mep_section_type_flags (tree decl, const char *name, int reloc)
4670{
4671 unsigned int flags = default_section_type_flags (decl, name, reloc);
4672
4673 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4674 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4675 flags |= SECTION_MEP_VLIW;
4676
4677 return flags;
4678}
4679
4680/* Switch to an arbitrary section NAME with attributes as specified
4681 by FLAGS. ALIGN specifies any known alignment requirements for
4682 the section; 0 if the default should be used.
4683
4684 Differs from the standard ELF version only in support of VLIW mode. */
4685
4686static void
4687mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4688{
4689 char flagchars[8], *f = flagchars;
4690 const char *type;
4691
4692 if (!(flags & SECTION_DEBUG))
4693 *f++ = 'a';
4694 if (flags & SECTION_WRITE)
4695 *f++ = 'w';
4696 if (flags & SECTION_CODE)
4697 *f++ = 'x';
4698 if (flags & SECTION_SMALL)
4699 *f++ = 's';
4700 if (flags & SECTION_MEP_VLIW)
4701 *f++ = 'v';
4702 *f = '\0';
4703
4704 if (flags & SECTION_BSS)
4705 type = "nobits";
4706 else
4707 type = "progbits";
4708
4709 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4710 name, flagchars, type);
4711
4712 if (flags & SECTION_CODE)
4713 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4714 asm_out_file);
4715}
4716
4717void
4718mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4719 int size, int align, int global)
4720{
4721 /* We intentionally don't use mep_section_tag() here. */
4722 if (name[0] == '@'
4723 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4724 && name[2] == '.')
4725 {
4726 int location = -1;
4727 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4728 DECL_ATTRIBUTES (decl));
4729 if (attr
4730 && TREE_VALUE (attr)
4731 && TREE_VALUE (TREE_VALUE(attr)))
4732 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4733 if (location == -1)
4734 return;
4735 if (global)
4736 {
4737 fprintf (stream, "\t.globl\t");
4738 assemble_name (stream, name);
4739 fprintf (stream, "\n");
4740 }
4741 assemble_name (stream, name);
4742 fprintf (stream, " = %d\n", location);
4743 return;
4744 }
4745 if (name[0] == '@' && name[2] == '.')
4746 {
4747 const char *sec = 0;
4748 switch (name[1])
4749 {
4750 case 'b':
4751 switch_to_section (based_section);
4752 sec = ".based";
4753 break;
4754 case 't':
4755 switch_to_section (tinybss_section);
4756 sec = ".sbss";
4757 break;
4758 case 'f':
4759 switch_to_section (farbss_section);
4760 sec = ".farbss";
4761 break;
4762 }
4763 if (sec)
4764 {
4765 const char *name2;
4766 int p2align = 0;
4767
4768 while (align > BITS_PER_UNIT)
4769 {
4770 align /= 2;
4771 p2align ++;
4772 }
44ddcf5e 4773 name2 = targetm.strip_name_encoding (name);
46222c18 4774 if (global)
4775 fprintf (stream, "\t.globl\t%s\n", name2);
4776 fprintf (stream, "\t.p2align %d\n", p2align);
4777 fprintf (stream, "\t.type\t%s,@object\n", name2);
4778 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4779 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4780 return;
4781 }
4782 }
4783
4784 if (!global)
4785 {
4786 fprintf (stream, "\t.local\t");
4787 assemble_name (stream, name);
4788 fprintf (stream, "\n");
4789 }
4790 fprintf (stream, "\t.comm\t");
4791 assemble_name (stream, name);
4792 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4793}
4794
4795/* Trampolines. */
4796
8786c274 4797static void
4798mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
46222c18 4799{
8786c274 4800 rtx addr = XEXP (m_tramp, 0);
4801 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4802
46222c18 4803 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4804 LCT_NORMAL, VOIDmode, 3,
4805 addr, Pmode,
4806 fnaddr, Pmode,
4807 static_chain, Pmode);
4808}
4809
4810/* Experimental Reorg. */
4811
4812static bool
4813mep_mentioned_p (rtx in,
4814 rtx reg, /* NULL for mem */
4815 int modes_too) /* if nonzero, modes must match also. */
4816{
4817 const char *fmt;
4818 int i;
4819 enum rtx_code code;
4820
4821 if (in == 0)
4822 return false;
4823 if (reg && GET_CODE (reg) != REG)
4824 return false;
4825
4826 if (GET_CODE (in) == LABEL_REF)
4827 return (reg == 0);
4828
4829 code = GET_CODE (in);
4830
4831 switch (code)
4832 {
4833 case MEM:
4834 if (reg)
4835 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4836 return true;
4837
4838 case REG:
4839 if (!reg)
4840 return false;
4841 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4842 return false;
4843 return (REGNO (in) == REGNO (reg));
4844
4845 case SCRATCH:
4846 case CC0:
4847 case PC:
4848 case CONST_INT:
4849 case CONST_DOUBLE:
4850 return false;
4851
4852 default:
4853 break;
4854 }
4855
4856 /* Set's source should be read-only. */
4857 if (code == SET && !reg)
4858 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4859
4860 fmt = GET_RTX_FORMAT (code);
4861
4862 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4863 {
4864 if (fmt[i] == 'E')
4865 {
4866 register int j;
4867 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4868 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4869 return true;
4870 }
4871 else if (fmt[i] == 'e'
4872 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4873 return true;
4874 }
4875 return false;
4876}
4877
4878#define EXPERIMENTAL_REGMOVE_REORG 1
4879
4880#if EXPERIMENTAL_REGMOVE_REORG
4881
4882static int
4883mep_compatible_reg_class (int r1, int r2)
4884{
4885 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4886 return 1;
4887 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4888 return 1;
4889 return 0;
4890}
4891
4892static void
4893mep_reorg_regmove (rtx insns)
4894{
4895 rtx insn, next, pat, follow, *where;
4896 int count = 0, done = 0, replace, before = 0;
4897
4898 if (dump_file)
4899 for (insn = insns; insn; insn = NEXT_INSN (insn))
aa90bb35 4900 if (NONJUMP_INSN_P (insn))
46222c18 4901 before++;
4902
4903 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4904 set that uses the r2 and r2 dies there. We replace r2 with r1
4905 and see if it's still a valid insn. If so, delete the first set.
4906 Copied from reorg.c. */
4907
4908 while (!done)
4909 {
4910 done = 1;
4911 for (insn = insns; insn; insn = next)
4912 {
6f3836d6 4913 next = next_nonnote_nondebug_insn (insn);
aa90bb35 4914 if (! NONJUMP_INSN_P (insn))
46222c18 4915 continue;
4916 pat = PATTERN (insn);
4917
4918 replace = 0;
4919
4920 if (GET_CODE (pat) == SET
4921 && GET_CODE (SET_SRC (pat)) == REG
4922 && GET_CODE (SET_DEST (pat)) == REG
4923 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4924 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4925 {
6f3836d6 4926 follow = next_nonnote_nondebug_insn (insn);
46222c18 4927 if (dump_file)
4928 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4929
aa90bb35 4930 while (follow && NONJUMP_INSN_P (follow)
46222c18 4931 && GET_CODE (PATTERN (follow)) == SET
4932 && !dead_or_set_p (follow, SET_SRC (pat))
4933 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4934 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4935 {
4936 if (dump_file)
4937 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4938 follow = next_nonnote_insn (follow);
4939 }
4940
4941 if (dump_file)
4942 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
aa90bb35 4943 if (follow && NONJUMP_INSN_P (follow)
46222c18 4944 && GET_CODE (PATTERN (follow)) == SET
4945 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4946 {
4947 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4948 {
4949 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4950 {
4951 replace = 1;
4952 where = & SET_SRC (PATTERN (follow));
4953 }
4954 }
4955 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4956 {
4957 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4958 {
4959 replace = 1;
4960 where = & PATTERN (follow);
4961 }
4962 }
4963 }
4964 }
4965
4966 /* If so, follow is the corresponding insn */
4967 if (replace)
4968 {
4969 if (dump_file)
4970 {
4971 rtx x;
4972
4973 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4974 for (x = insn; x ;x = NEXT_INSN (x))
4975 {
4976 print_rtl_single (dump_file, x);
4977 if (x == follow)
4978 break;
4979 fprintf (dump_file, "\n");
4980 }
4981 }
4982
4983 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4984 follow, where))
4985 {
4986 count ++;
6f3836d6 4987 delete_insn (insn);
46222c18 4988 if (dump_file)
4989 {
4990 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4991 print_rtl_single (dump_file, follow);
4992 }
4993 done = 0;
4994 }
4995 }
4996 }
4997 }
4998
4999 if (dump_file)
5000 {
5001 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5002 fprintf (dump_file, "=====\n");
5003 }
5004}
5005#endif
5006
5007
5008/* Figure out where to put LABEL, which is the label for a repeat loop.
5009 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5010 the loop ends just before LAST_INSN. If SHARED, insns other than the
5011 "repeat" might use LABEL to jump to the loop's continuation point.
5012
5013 Return the last instruction in the adjusted loop. */
5014
5015static rtx
5016mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5017 bool shared)
5018{
5019 rtx next, prev;
5020 int count = 0, code, icode;
5021
5022 if (dump_file)
5023 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5024 INSN_UID (last_insn));
5025
5026 /* Set PREV to the last insn in the loop. */
5027 prev = last_insn;
5028 if (!including)
5029 prev = PREV_INSN (prev);
5030
5031 /* Set NEXT to the next insn after the repeat label. */
5032 next = last_insn;
5033 if (!shared)
5034 while (prev != 0)
5035 {
5036 code = GET_CODE (prev);
5037 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5038 break;
5039
5040 if (INSN_P (prev))
5041 {
5042 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5043 prev = XVECEXP (PATTERN (prev), 0, 1);
5044
5045 /* Other insns that should not be in the last two opcodes. */
5046 icode = recog_memoized (prev);
5047 if (icode < 0
5048 || icode == CODE_FOR_repeat
5049 || icode == CODE_FOR_erepeat
5050 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5051 break;
5052
5053 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5054 is the second instruction in a VLIW bundle. In that case,
5055 loop again: if the first instruction also satisfies the
5056 conditions above then we will reach here again and put
5057 both of them into the repeat epilogue. Otherwise both
5058 should remain outside. */
5059 if (GET_MODE (prev) != BImode)
5060 {
5061 count++;
5062 next = prev;
5063 if (dump_file)
5064 print_rtl_single (dump_file, next);
5065 if (count == 2)
5066 break;
5067 }
5068 }
5069 prev = PREV_INSN (prev);
5070 }
5071
5072 /* See if we're adding the label immediately after the repeat insn.
5073 If so, we need to separate them with a nop. */
5074 prev = prev_real_insn (next);
5075 if (prev)
5076 switch (recog_memoized (prev))
5077 {
5078 case CODE_FOR_repeat:
5079 case CODE_FOR_erepeat:
5080 if (dump_file)
5081 fprintf (dump_file, "Adding nop inside loop\n");
5082 emit_insn_before (gen_nop (), next);
5083 break;
5084
5085 default:
5086 break;
5087 }
5088
5089 /* Insert the label. */
5090 emit_label_before (label, next);
5091
5092 /* Insert the nops. */
5093 if (dump_file && count < 2)
5094 fprintf (dump_file, "Adding %d nop%s\n\n",
5095 2 - count, count == 1 ? "" : "s");
5096
5097 for (; count < 2; count++)
5098 if (including)
5099 last_insn = emit_insn_after (gen_nop (), last_insn);
5100 else
5101 emit_insn_before (gen_nop (), last_insn);
5102
5103 return last_insn;
5104}
5105
5106
5107void
5108mep_emit_doloop (rtx *operands, int is_end)
5109{
5110 rtx tag;
5111
5112 if (cfun->machine->doloop_tags == 0
5113 || cfun->machine->doloop_tag_from_end == is_end)
5114 {
5115 cfun->machine->doloop_tags++;
5116 cfun->machine->doloop_tag_from_end = is_end;
5117 }
5118
5119 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5120 if (is_end)
5f35dd0e 5121 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
46222c18 5122 else
5123 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5124}
5125
5126
5127/* Code for converting doloop_begins and doloop_ends into valid
5128 MeP instructions. A doloop_begin is just a placeholder:
5129
5130 $count = unspec ($count)
5131
5132 where $count is initially the number of iterations - 1.
5133 doloop_end has the form:
5134
5135 if ($count-- == 0) goto label
5136
5137 The counter variable is private to the doloop insns, nothing else
5138 relies on its value.
5139
5140 There are three cases, in decreasing order of preference:
5141
5142 1. A loop has exactly one doloop_begin and one doloop_end.
5143 The doloop_end branches to the first instruction after
5144 the doloop_begin.
5145
5146 In this case we can replace the doloop_begin with a repeat
5147 instruction and remove the doloop_end. I.e.:
5148
5149 $count1 = unspec ($count1)
5150 label:
5151 ...
5152 insn1
5153 insn2
5154 if ($count2-- == 0) goto label
5155
5156 becomes:
5157
5158 repeat $count1,repeat_label
5159 label:
5160 ...
5161 repeat_label:
5162 insn1
5163 insn2
5164 # end repeat
5165
5166 2. As for (1), except there are several doloop_ends. One of them
5167 (call it X) falls through to a label L. All the others fall
5168 through to branches to L.
5169
5170 In this case, we remove X and replace the other doloop_ends
5171 with branches to the repeat label. For example:
5172
5173 $count1 = unspec ($count1)
5174 start:
5175 ...
5176 if ($count2-- == 0) goto label
5177 end:
5178 ...
5179 if ($count3-- == 0) goto label
5180 goto end
5181
5182 becomes:
5183
5184 repeat $count1,repeat_label
5185 start:
5186 ...
5187 repeat_label:
5188 nop
5189 nop
5190 # end repeat
5191 end:
5192 ...
5193 goto repeat_label
5194
5195 3. The fallback case. Replace doloop_begins with:
5196
5197 $count = $count + 1
5198
5199 Replace doloop_ends with the equivalent of:
5200
5201 $count = $count - 1
5202 if ($count == 0) goto label
5203
5204 Note that this might need a scratch register if $count
5205 is stored in memory. */
5206
5207/* A structure describing one doloop_begin. */
5208struct mep_doloop_begin {
5209 /* The next doloop_begin with the same tag. */
5210 struct mep_doloop_begin *next;
5211
5212 /* The instruction itself. */
5213 rtx insn;
5214
5215 /* The initial counter value. This is known to be a general register. */
5216 rtx counter;
5217};
5218
5219/* A structure describing a doloop_end. */
5220struct mep_doloop_end {
5221 /* The next doloop_end with the same loop tag. */
5222 struct mep_doloop_end *next;
5223
5224 /* The instruction itself. */
5225 rtx insn;
5226
5227 /* The first instruction after INSN when the branch isn't taken. */
5228 rtx fallthrough;
5229
5230 /* The location of the counter value. Since doloop_end_internal is a
5231 jump instruction, it has to allow the counter to be stored anywhere
5232 (any non-fixed register or memory location). */
5233 rtx counter;
5234
5235 /* The target label (the place where the insn branches when the counter
5236 isn't zero). */
5237 rtx label;
5238
5239 /* A scratch register. Only available when COUNTER isn't stored
5240 in a general register. */
5241 rtx scratch;
5242};
5243
5244
5245/* One do-while loop. */
5246struct mep_doloop {
5247 /* All the doloop_begins for this loop (in no particular order). */
5248 struct mep_doloop_begin *begin;
5249
5250 /* All the doloop_ends. When there is more than one, arrange things
5251 so that the first one is the most likely to be X in case (2) above. */
5252 struct mep_doloop_end *end;
5253};
5254
5255
5256/* Return true if LOOP can be converted into repeat/repeat_end form
5257 (that is, if it matches cases (1) or (2) above). */
5258
5259static bool
5260mep_repeat_loop_p (struct mep_doloop *loop)
5261{
5262 struct mep_doloop_end *end;
5263 rtx fallthrough;
5264
5265 /* There must be exactly one doloop_begin and at least one doloop_end. */
5266 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5267 return false;
5268
5269 /* The first doloop_end (X) must branch back to the insn after
5270 the doloop_begin. */
5271 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5272 return false;
5273
5274 /* All the other doloop_ends must branch to the same place as X.
5275 When the branch isn't taken, they must jump to the instruction
5276 after X. */
5277 fallthrough = loop->end->fallthrough;
5278 for (end = loop->end->next; end != 0; end = end->next)
5279 if (end->label != loop->end->label
5280 || !simplejump_p (end->fallthrough)
5281 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5282 return false;
5283
5284 return true;
5285}
5286
5287
5288/* The main repeat reorg function. See comment above for details. */
5289
5290static void
5291mep_reorg_repeat (rtx insns)
5292{
5293 rtx insn;
5294 struct mep_doloop *loops, *loop;
5295 struct mep_doloop_begin *begin;
5296 struct mep_doloop_end *end;
5297
5298 /* Quick exit if we haven't created any loops. */
5299 if (cfun->machine->doloop_tags == 0)
5300 return;
5301
5302 /* Create an array of mep_doloop structures. */
5303 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5304 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5305
5306 /* Search the function for do-while insns and group them by loop tag. */
5307 for (insn = insns; insn; insn = NEXT_INSN (insn))
5308 if (INSN_P (insn))
5309 switch (recog_memoized (insn))
5310 {
5311 case CODE_FOR_doloop_begin_internal:
5312 insn_extract (insn);
5313 loop = &loops[INTVAL (recog_data.operand[2])];
5314
5315 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5316 begin->next = loop->begin;
5317 begin->insn = insn;
5318 begin->counter = recog_data.operand[0];
5319
5320 loop->begin = begin;
5321 break;
5322
5323 case CODE_FOR_doloop_end_internal:
5324 insn_extract (insn);
5325 loop = &loops[INTVAL (recog_data.operand[2])];
5326
5327 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5328 end->insn = insn;
5329 end->fallthrough = next_real_insn (insn);
5330 end->counter = recog_data.operand[0];
5331 end->label = recog_data.operand[1];
5332 end->scratch = recog_data.operand[3];
5333
5334 /* If this insn falls through to an unconditional jump,
5335 give it a lower priority than the others. */
5336 if (loop->end != 0 && simplejump_p (end->fallthrough))
5337 {
5338 end->next = loop->end->next;
5339 loop->end->next = end;
5340 }
5341 else
5342 {
5343 end->next = loop->end;
5344 loop->end = end;
5345 }
5346 break;
5347 }
5348
5349 /* Convert the insns for each loop in turn. */
5350 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5351 if (mep_repeat_loop_p (loop))
5352 {
5353 /* Case (1) or (2). */
5354 rtx repeat_label, label_ref;
5355
5356 /* Create a new label for the repeat insn. */
5357 repeat_label = gen_label_rtx ();
5358
5359 /* Replace the doloop_begin with a repeat. */
5360 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5361 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5362 loop->begin->insn);
5363 delete_insn (loop->begin->insn);
5364
5365 /* Insert the repeat label before the first doloop_end.
5366 Fill the gap with nops if there are other doloop_ends. */
5367 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5368 false, loop->end->next != 0);
5369
5370 /* Emit a repeat_end (to improve the readability of the output). */
5371 emit_insn_before (gen_repeat_end (), loop->end->insn);
5372
5373 /* Delete the first doloop_end. */
5374 delete_insn (loop->end->insn);
5375
5376 /* Replace the others with branches to REPEAT_LABEL. */
5377 for (end = loop->end->next; end != 0; end = end->next)
5378 {
5379 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5380 delete_insn (end->insn);
5381 delete_insn (end->fallthrough);
5382 }
5383 }
5384 else
5385 {
5386 /* Case (3). First replace all the doloop_begins with increment
5387 instructions. */
5388 for (begin = loop->begin; begin != 0; begin = begin->next)
5389 {
5390 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5391 begin->counter, const1_rtx),
5392 begin->insn);
5393 delete_insn (begin->insn);
5394 }
5395
5396 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5397 for (end = loop->end; end != 0; end = end->next)
5398 {
5399 rtx reg;
5400
5401 start_sequence ();
5402
5403 /* Load the counter value into a general register. */
5404 reg = end->counter;
5405 if (!REG_P (reg) || REGNO (reg) > 15)
5406 {
5407 reg = end->scratch;
5408 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5409 }
5410
5411 /* Decrement the counter. */
5412 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5413 constm1_rtx));
5414
5415 /* Copy it back to its original location. */
5416 if (reg != end->counter)
5417 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5418
5419 /* Jump back to the start label. */
5420 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5421 end->label));
5422 JUMP_LABEL (insn) = end->label;
5423 LABEL_NUSES (end->label)++;
5424
5425 /* Emit the whole sequence before the doloop_end. */
5426 insn = get_insns ();
5427 end_sequence ();
5428 emit_insn_before (insn, end->insn);
5429
5430 /* Delete the doloop_end. */
5431 delete_insn (end->insn);
5432 }
5433 }
5434}
5435
5436
5437static bool
5438mep_invertable_branch_p (rtx insn)
5439{
5440 rtx cond, set;
5441 enum rtx_code old_code;
5442 int i;
5443
5444 set = PATTERN (insn);
5445 if (GET_CODE (set) != SET)
5446 return false;
5447 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5448 return false;
5449 cond = XEXP (XEXP (set, 1), 0);
5450 old_code = GET_CODE (cond);
5451 switch (old_code)
5452 {
5453 case EQ:
5454 PUT_CODE (cond, NE);
5455 break;
5456 case NE:
5457 PUT_CODE (cond, EQ);
5458 break;
5459 case LT:
5460 PUT_CODE (cond, GE);
5461 break;
5462 case GE:
5463 PUT_CODE (cond, LT);
5464 break;
5465 default:
5466 return false;
5467 }
5468 INSN_CODE (insn) = -1;
5469 i = recog_memoized (insn);
5470 PUT_CODE (cond, old_code);
5471 INSN_CODE (insn) = -1;
5472 return i >= 0;
5473}
5474
5475static void
5476mep_invert_branch (rtx insn, rtx after)
5477{
5478 rtx cond, set, label;
5479 int i;
5480
5481 set = PATTERN (insn);
5482
5483 gcc_assert (GET_CODE (set) == SET);
5484 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5485
5486 cond = XEXP (XEXP (set, 1), 0);
5487 switch (GET_CODE (cond))
5488 {
5489 case EQ:
5490 PUT_CODE (cond, NE);
5491 break;
5492 case NE:
5493 PUT_CODE (cond, EQ);
5494 break;
5495 case LT:
5496 PUT_CODE (cond, GE);
5497 break;
5498 case GE:
5499 PUT_CODE (cond, LT);
5500 break;
5501 default:
5502 gcc_unreachable ();
5503 }
5504 label = gen_label_rtx ();
5505 emit_label_after (label, after);
5506 for (i=1; i<=2; i++)
5507 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5508 {
5509 rtx ref = XEXP (XEXP (set, 1), i);
5510 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5511 delete_insn (XEXP (ref, 0));
5512 XEXP (ref, 0) = label;
5513 LABEL_NUSES (label) ++;
5514 JUMP_LABEL (insn) = label;
5515 }
5516 INSN_CODE (insn) = -1;
5517 i = recog_memoized (insn);
5518 gcc_assert (i >= 0);
5519}
5520
5521static void
5522mep_reorg_erepeat (rtx insns)
5523{
7d86c715 5524 rtx insn, prev, l, x;
46222c18 5525 int count;
5526
5527 for (insn = insns; insn; insn = NEXT_INSN (insn))
5528 if (JUMP_P (insn)
46222c18 5529 && mep_invertable_branch_p (insn))
5530 {
5531 if (dump_file)
5532 {
5533 fprintf (dump_file, "\n------------------------------\n");
5534 fprintf (dump_file, "erepeat: considering this jump:\n");
5535 print_rtl_single (dump_file, insn);
5536 }
5537 count = simplejump_p (insn) ? 0 : 1;
46222c18 5538 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5539 {
aa90bb35 5540 if (CALL_P (prev) || BARRIER_P (prev))
46222c18 5541 break;
5542
5543 if (prev == JUMP_LABEL (insn))
5544 {
5545 rtx newlast;
5546 if (dump_file)
5547 fprintf (dump_file, "found loop top, %d insns\n", count);
5548
5549 if (LABEL_NUSES (prev) == 1)
5550 /* We're the only user, always safe */ ;
5551 else if (LABEL_NUSES (prev) == 2)
5552 {
5553 /* See if there's a barrier before this label. If
5554 so, we know nobody inside the loop uses it.
5555 But we must be careful to put the erepeat
5556 *after* the label. */
5557 rtx barrier;
5558 for (barrier = PREV_INSN (prev);
aa90bb35 5559 barrier && NOTE_P (barrier);
46222c18 5560 barrier = PREV_INSN (barrier))
5561 ;
aa90bb35 5562 if (barrier && ! BARRIER_P (barrier))
46222c18 5563 break;
5564 }
5565 else
5566 {
5567 /* We don't know who else, within or without our loop, uses this */
5568 if (dump_file)
5569 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5570 break;
5571 }
5572
5573 /* Generate a label to be used by the erepat insn. */
5574 l = gen_label_rtx ();
5575
5576 /* Insert the erepeat after INSN's target label. */
5577 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5578 LABEL_NUSES (l)++;
5579 emit_insn_after (x, prev);
5580
5581 /* Insert the erepeat label. */
5582 newlast = (mep_insert_repeat_label_last
5583 (insn, l, !simplejump_p (insn), false));
5584 if (simplejump_p (insn))
5585 {
5586 emit_insn_before (gen_erepeat_end (), insn);
5587 delete_insn (insn);
5588 }
5589 else
5590 {
5591 mep_invert_branch (insn, newlast);
5592 emit_insn_after (gen_erepeat_end (), newlast);
5593 }
5594 break;
5595 }
5596
5597 if (LABEL_P (prev))
5598 {
5599 /* A label is OK if there is exactly one user, and we
5600 can find that user before the next label. */
5601 rtx user = 0;
5602 int safe = 0;
5603 if (LABEL_NUSES (prev) == 1)
5604 {
5605 for (user = PREV_INSN (prev);
aa90bb35 5606 user && (INSN_P (user) || NOTE_P (user));
46222c18 5607 user = PREV_INSN (user))
aa90bb35 5608 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
46222c18 5609 {
5610 safe = INSN_UID (user);
5611 break;
5612 }
5613 }
5614 if (!safe)
5615 break;
5616 if (dump_file)
5617 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5618 safe, INSN_UID (prev));
5619 }
5620
5621 if (INSN_P (prev))
5622 {
5623 count ++;
46222c18 5624 }
5625 }
5626 }
5627 if (dump_file)
5628 fprintf (dump_file, "\n==============================\n");
5629}
5630
5631/* Replace a jump to a return, with a copy of the return. GCC doesn't
5632 always do this on its own. */
5633
5634static void
5635mep_jmp_return_reorg (rtx insns)
5636{
5637 rtx insn, label, ret;
5638 int ret_code;
5639
5640 for (insn = insns; insn; insn = NEXT_INSN (insn))
5641 if (simplejump_p (insn))
5642 {
5643 /* Find the fist real insn the jump jumps to. */
5644 label = ret = JUMP_LABEL (insn);
5645 while (ret
aa90bb35 5646 && (NOTE_P (ret)
5647 || LABEL_P (ret)
46222c18 5648 || GET_CODE (PATTERN (ret)) == USE))
5649 ret = NEXT_INSN (ret);
5650
5651 if (ret)
5652 {
5653 /* Is it a return? */
5654 ret_code = recog_memoized (ret);
5655 if (ret_code == CODE_FOR_return_internal
5656 || ret_code == CODE_FOR_eh_return_internal)
5657 {
5658 /* It is. Replace the jump with a return. */
5659 LABEL_NUSES (label) --;
5660 if (LABEL_NUSES (label) == 0)
5661 delete_insn (label);
5662 PATTERN (insn) = copy_rtx (PATTERN (ret));
5663 INSN_CODE (insn) = -1;
5664 }
5665 }
5666 }
5667}
5668
5669
5670static void
5671mep_reorg_addcombine (rtx insns)
5672{
5673 rtx i, n;
5674
5675 for (i = insns; i; i = NEXT_INSN (i))
5676 if (INSN_P (i)
5677 && INSN_CODE (i) == CODE_FOR_addsi3
5678 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5679 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5680 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5681 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5682 {
5683 n = NEXT_INSN (i);
5684 if (INSN_P (n)
5685 && INSN_CODE (n) == CODE_FOR_addsi3
5686 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5687 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5688 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5689 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5690 {
5691 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5692 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5693 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5694 && ic + nc < 32767
5695 && ic + nc > -32768)
5696 {
5697 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5698 NEXT_INSN (i) = NEXT_INSN (n);
5699 if (NEXT_INSN (i))
5700 PREV_INSN (NEXT_INSN (i)) = i;
5701 }
5702 }
5703 }
5704}
5705
5706/* If this insn adjusts the stack, return the adjustment, else return
5707 zero. */
5708static int
5709add_sp_insn_p (rtx insn)
5710{
5711 rtx pat;
5712
5713 if (! single_set (insn))
5714 return 0;
5715 pat = PATTERN (insn);
5716 if (GET_CODE (SET_DEST (pat)) != REG)
5717 return 0;
5718 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5719 return 0;
5720 if (GET_CODE (SET_SRC (pat)) != PLUS)
5721 return 0;
5722 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5723 return 0;
5724 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5725 return 0;
5726 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5727 return 0;
5728 return INTVAL (XEXP (SET_SRC (pat), 1));
5729}
5730
5731/* Check for trivial functions that set up an unneeded stack
5732 frame. */
5733static void
5734mep_reorg_noframe (rtx insns)
5735{
5736 rtx start_frame_insn;
5737 rtx end_frame_insn = 0;
5738 int sp_adjust, sp2;
5739 rtx sp;
5740
5741 /* The first insn should be $sp = $sp + N */
5742 while (insns && ! INSN_P (insns))
5743 insns = NEXT_INSN (insns);
5744 if (!insns)
5745 return;
5746
5747 sp_adjust = add_sp_insn_p (insns);
5748 if (sp_adjust == 0)
5749 return;
5750
5751 start_frame_insn = insns;
5752 sp = SET_DEST (PATTERN (start_frame_insn));
5753
5754 insns = next_real_insn (insns);
5755
5756 while (insns)
5757 {
5758 rtx next = next_real_insn (insns);
5759 if (!next)
5760 break;
5761
5762 sp2 = add_sp_insn_p (insns);
5763 if (sp2)
5764 {
5765 if (end_frame_insn)
5766 return;
5767 end_frame_insn = insns;
5768 if (sp2 != -sp_adjust)
5769 return;
5770 }
5771 else if (mep_mentioned_p (insns, sp, 0))
5772 return;
5773 else if (CALL_P (insns))
5774 return;
5775
5776 insns = next;
5777 }
5778
5779 if (end_frame_insn)
5780 {
5781 delete_insn (start_frame_insn);
5782 delete_insn (end_frame_insn);
5783 }
5784}
5785
5786static void
5787mep_reorg (void)
5788{
5789 rtx insns = get_insns ();
142c2869 5790
5791 /* We require accurate REG_DEAD notes. */
5792 compute_bb_for_insn ();
5793 df_note_add_problem ();
5794 df_analyze ();
5795
46222c18 5796 mep_reorg_addcombine (insns);
5797#if EXPERIMENTAL_REGMOVE_REORG
5798 /* VLIW packing has been done already, so we can't just delete things. */
5799 if (!mep_vliw_function_p (cfun->decl))
5800 mep_reorg_regmove (insns);
5801#endif
5802 mep_jmp_return_reorg (insns);
5803 mep_bundle_insns (insns);
5804 mep_reorg_repeat (insns);
5805 if (optimize
5806 && !profile_flag
5807 && !profile_arc_flag
5808 && TARGET_OPT_REPEAT
5809 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5810 mep_reorg_erepeat (insns);
5811
5812 /* This may delete *insns so make sure it's last. */
5813 mep_reorg_noframe (insns);
142c2869 5814
5815 df_finish_pass (false);
46222c18 5816}
5817
5818\f
5819
5820/*----------------------------------------------------------------------*/
5821/* Builtins */
5822/*----------------------------------------------------------------------*/
5823
5824/* Element X gives the index into cgen_insns[] of the most general
5825 implementation of intrinsic X. Unimplemented intrinsics are
5826 mapped to -1. */
5827int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5828
5829/* Element X gives the index of another instruction that is mapped to
5830 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5831 instruction.
5832
5833 Things are set up so that mep_intrinsic_chain[X] < X. */
5834static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5835
5836/* The bitmask for the current ISA. The ISA masks are declared
5837 in mep-intrin.h. */
5838unsigned int mep_selected_isa;
5839
5840struct mep_config {
5841 const char *config_name;
5842 unsigned int isa;
5843};
5844
5845static struct mep_config mep_configs[] = {
5846#ifdef COPROC_SELECTION_TABLE
5847 COPROC_SELECTION_TABLE,
5848#endif
5849 { 0, 0 }
5850};
5851
5852/* Initialize the global intrinsics variables above. */
5853
5854static void
5855mep_init_intrinsics (void)
5856{
5857 size_t i;
5858
5859 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5860 mep_selected_isa = mep_configs[0].isa;
5861 if (mep_config_string != 0)
5862 for (i = 0; mep_configs[i].config_name; i++)
5863 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5864 {
5865 mep_selected_isa = mep_configs[i].isa;
5866 break;
5867 }
5868
5869 /* Assume all intrinsics are unavailable. */
5870 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5871 mep_intrinsic_insn[i] = -1;
5872
5873 /* Build up the global intrinsic tables. */
5874 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5875 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5876 {
5877 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5878 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5879 }
5880 /* See whether we can directly move values between one coprocessor
5881 register and another. */
5882 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5883 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5884 mep_have_copro_copro_moves_p = true;
5885
5886 /* See whether we can directly move values between core and
5887 coprocessor registers. */
5888 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5889 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5890
5891 mep_have_core_copro_moves_p = 1;
5892}
5893
5894/* Declare all available intrinsic functions. Called once only. */
5895
5896static tree cp_data_bus_int_type_node;
5897static tree opaque_vector_type_node;
5898static tree v8qi_type_node;
5899static tree v4hi_type_node;
5900static tree v2si_type_node;
5901static tree v8uqi_type_node;
5902static tree v4uhi_type_node;
5903static tree v2usi_type_node;
5904
5905static tree
5906mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5907{
5908 switch (cr)
5909 {
5910 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5911 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5912 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5913 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5914 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5915 case cgen_regnum_operand_type_CHAR: return char_type_node;
5916 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5917 case cgen_regnum_operand_type_SI: return intSI_type_node;
5918 case cgen_regnum_operand_type_DI: return intDI_type_node;
5919 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5920 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5921 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5922 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5923 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5924 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5925 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5926 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5927 default:
5928 return void_type_node;
5929 }
5930}
5931
5932static void
5933mep_init_builtins (void)
5934{
5935 size_t i;
5936
5937 if (TARGET_64BIT_CR_REGS)
5938 cp_data_bus_int_type_node = long_long_integer_type_node;
5939 else
5940 cp_data_bus_int_type_node = long_integer_type_node;
5941
5942 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5943 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5944 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5945 v2si_type_node = build_vector_type (intSI_type_node, 2);
5946 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5947 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5948 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5949
549aab72 5950 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5951
5952 add_builtin_type ("cp_vector", opaque_vector_type_node);
5953
5954 add_builtin_type ("cp_v8qi", v8qi_type_node);
5955 add_builtin_type ("cp_v4hi", v4hi_type_node);
5956 add_builtin_type ("cp_v2si", v2si_type_node);
5957
5958 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5959 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5960 add_builtin_type ("cp_v2usi", v2usi_type_node);
46222c18 5961
5962 /* Intrinsics like mep_cadd3 are implemented with two groups of
5963 instructions, one which uses UNSPECs and one which uses a specific
5964 rtl code such as PLUS. Instructions in the latter group belong
5965 to GROUP_KNOWN_CODE.
5966
5967 In such cases, the intrinsic will have two entries in the global
5968 tables above. The unspec form is accessed using builtin functions
5969 while the specific form is accessed using the mep_* enum in
5970 mep-intrin.h.
5971
5972 The idea is that __cop arithmetic and builtin functions have
5973 different optimization requirements. If mep_cadd3() appears in
5974 the source code, the user will surely except gcc to use cadd3
5975 rather than a work-alike such as add3. However, if the user
5976 just writes "a + b", where a or b are __cop variables, it is
5977 reasonable for gcc to choose a core instruction rather than
5978 cadd3 if it believes that is more optimal. */
5979 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5980 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5981 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5982 {
5983 tree ret_type = void_type_node;
5984 tree bi_type;
5985
5986 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5987 continue;
5988
5989 if (cgen_insns[i].cret_p)
5990 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5991
43a83738 5992 bi_type = build_function_type_list (ret_type, NULL_TREE);
46222c18 5993 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5994 bi_type,
5995 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5996 }
5997}
5998
5999/* Report the unavailablity of the given intrinsic. */
6000
6001#if 1
6002static void
6003mep_intrinsic_unavailable (int intrinsic)
6004{
6005 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6006
6007 if (already_reported_p[intrinsic])
6008 return;
6009
6010 if (mep_intrinsic_insn[intrinsic] < 0)
6011 error ("coprocessor intrinsic %qs is not available in this configuration",
6012 cgen_intrinsics[intrinsic]);
6013 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6014 error ("%qs is not available in VLIW functions",
6015 cgen_intrinsics[intrinsic]);
6016 else
6017 error ("%qs is not available in non-VLIW functions",
6018 cgen_intrinsics[intrinsic]);
6019
6020 already_reported_p[intrinsic] = 1;
6021}
6022#endif
6023
6024
6025/* See if any implementation of INTRINSIC is available to the
6026 current function. If so, store the most general implementation
6027 in *INSN_PTR and return true. Return false otherwise. */
6028
6029static bool
6030mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6031{
6032 int i;
6033
6034 i = mep_intrinsic_insn[intrinsic];
6035 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6036 i = mep_intrinsic_chain[i];
6037
6038 if (i >= 0)
6039 {
6040 *insn_ptr = &cgen_insns[i];
6041 return true;
6042 }
6043 return false;
6044}
6045
6046
6047/* Like mep_get_intrinsic_insn, but with extra handling for moves.
6048 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6049 try using a work-alike instead. In this case, the returned insn
6050 may have three operands rather than two. */
6051
6052static bool
6053mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6054{
6055 size_t i;
6056
6057 if (intrinsic == mep_cmov)
6058 {
6059 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6060 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6061 return true;
6062 return false;
6063 }
6064 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6065}
6066
6067
6068/* If ARG is a register operand that is the same size as MODE, convert it
6069 to MODE using a subreg. Otherwise return ARG as-is. */
6070
6071static rtx
6072mep_convert_arg (enum machine_mode mode, rtx arg)
6073{
6074 if (GET_MODE (arg) != mode
6075 && register_operand (arg, VOIDmode)
6076 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6077 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6078 return arg;
6079}
6080
6081
6082/* Apply regnum conversions to ARG using the description given by REGNUM.
6083 Return the new argument on success and null on failure. */
6084
6085static rtx
6086mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6087{
6088 if (regnum->count == 0)
6089 return arg;
6090
6091 if (GET_CODE (arg) != CONST_INT
6092 || INTVAL (arg) < 0
6093 || INTVAL (arg) >= regnum->count)
6094 return 0;
6095
6096 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6097}
6098
6099
6100/* Try to make intrinsic argument ARG match the given operand.
6101 UNSIGNED_P is true if the argument has an unsigned type. */
6102
6103static rtx
6104mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6105 int unsigned_p)
6106{
6107 if (GET_CODE (arg) == CONST_INT)
6108 {
6109 /* CONST_INTs can only be bound to integer operands. */
6110 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6111 return 0;
6112 }
6113 else if (GET_CODE (arg) == CONST_DOUBLE)
6114 /* These hold vector constants. */;
6115 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6116 {
6117 /* If the argument is a different size from what's expected, we must
6118 have a value in the right mode class in order to convert it. */
6119 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6120 return 0;
6121
6122 /* If the operand is an rvalue, promote or demote it to match the
6123 operand's size. This might not need extra instructions when
6124 ARG is a register value. */
6125 if (operand->constraint[0] != '=')
6126 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6127 }
6128
6129 /* If the operand is an lvalue, bind the operand to a new register.
6130 The caller will copy this value into ARG after the main
6131 instruction. By doing this always, we produce slightly more
6132 optimal code. */
6133 /* But not for control registers. */
6134 if (operand->constraint[0] == '='
6135 && (! REG_P (arg)
829e79ce 6136 || ! (CONTROL_REGNO_P (REGNO (arg))
6137 || CCR_REGNO_P (REGNO (arg))
6138 || CR_REGNO_P (REGNO (arg)))
46222c18 6139 ))
6140 return gen_reg_rtx (operand->mode);
6141
6142 /* Try simple mode punning. */
6143 arg = mep_convert_arg (operand->mode, arg);
6144 if (operand->predicate (arg, operand->mode))
6145 return arg;
6146
6147 /* See if forcing the argument into a register will make it match. */
6148 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6149 arg = force_reg (operand->mode, arg);
6150 else
6151 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6152 if (operand->predicate (arg, operand->mode))
6153 return arg;
6154
6155 return 0;
6156}
6157
6158
6159/* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6160 function FNNAME. OPERAND describes the operand to which ARGNUM
6161 is mapped. */
6162
6163static void
6164mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6165 int argnum, tree fnname)
6166{
6167 size_t i;
6168
6169 if (GET_CODE (arg) == CONST_INT)
6170 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6171 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6172 {
6173 const struct cgen_immediate_predicate *predicate;
6174 HOST_WIDE_INT argval;
6175
6176 predicate = &cgen_immediate_predicates[i];
6177 argval = INTVAL (arg);
6178 if (argval < predicate->lower || argval >= predicate->upper)
6179 error ("argument %d of %qE must be in the range %d...%d",
6180 argnum, fnname, predicate->lower, predicate->upper - 1);
6181 else
6182 error ("argument %d of %qE must be a multiple of %d",
6183 argnum, fnname, predicate->align);
6184 return;
6185 }
6186
6187 error ("incompatible type for argument %d of %qE", argnum, fnname);
6188}
6189
6190static rtx
6191mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6192 rtx subtarget ATTRIBUTE_UNUSED,
6193 enum machine_mode mode ATTRIBUTE_UNUSED,
6194 int ignore ATTRIBUTE_UNUSED)
6195{
6196 rtx pat, op[10], arg[10];
6197 unsigned int a;
6198 int opindex, unsigned_p[10];
6199 tree fndecl, args;
6200 unsigned int n_args;
6201 tree fnname;
6202 const struct cgen_insn *cgen_insn;
f2956fc5 6203 const struct insn_data_d *idata;
260f365f 6204 unsigned int first_arg = 0;
260f365f 6205 unsigned int builtin_n_args;
46222c18 6206
6207 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6208 fnname = DECL_NAME (fndecl);
6209
6210 /* Find out which instruction we should emit. Note that some coprocessor
6211 intrinsics may only be available in VLIW mode, or only in normal mode. */
6212 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6213 {
6214 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
260f365f 6215 return NULL_RTX;
46222c18 6216 }
6217 idata = &insn_data[cgen_insn->icode];
6218
6219 builtin_n_args = cgen_insn->num_args;
6220
6221 if (cgen_insn->cret_p)
6222 {
6223 if (cgen_insn->cret_p > 1)
6224 builtin_n_args ++;
6225 first_arg = 1;
7d86c715 6226 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
46222c18 6227 builtin_n_args --;
6228 }
6229
6230 /* Evaluate each argument. */
6231 n_args = call_expr_nargs (exp);
6232
6233 if (n_args < builtin_n_args)
6234 {
6235 error ("too few arguments to %qE", fnname);
260f365f 6236 return NULL_RTX;
46222c18 6237 }
6238 if (n_args > builtin_n_args)
6239 {
6240 error ("too many arguments to %qE", fnname);
260f365f 6241 return NULL_RTX;
46222c18 6242 }
6243
260f365f 6244 for (a = first_arg; a < builtin_n_args + first_arg; a++)
46222c18 6245 {
6246 tree value;
6247
260f365f 6248 args = CALL_EXPR_ARG (exp, a - first_arg);
46222c18 6249
6250 value = args;
6251
6252#if 0
6253 if (cgen_insn->regnums[a].reference_p)
6254 {
6255 if (TREE_CODE (value) != ADDR_EXPR)
6256 {
6257 debug_tree(value);
6258 error ("argument %d of %qE must be an address", a+1, fnname);
260f365f 6259 return NULL_RTX;
46222c18 6260 }
6261 value = TREE_OPERAND (value, 0);
6262 }
6263#endif
6264
6265 /* If the argument has been promoted to int, get the unpromoted
6266 value. This is necessary when sub-int memory values are bound
6267 to reference parameters. */
6268 if (TREE_CODE (value) == NOP_EXPR
6269 && TREE_TYPE (value) == integer_type_node
6270 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6271 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6272 < TYPE_PRECISION (TREE_TYPE (value))))
6273 value = TREE_OPERAND (value, 0);
6274
6275 /* If the argument has been promoted to double, get the unpromoted
6276 SFmode value. This is necessary for FMAX support, for example. */
6277 if (TREE_CODE (value) == NOP_EXPR
6278 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6279 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6280 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6281 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6282 value = TREE_OPERAND (value, 0);
6283
6284 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6285 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6286 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6287 if (cgen_insn->regnums[a].reference_p)
6288 {
6289 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6290 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6291
6292 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6293 }
6294 if (arg[a] == 0)
6295 {
6296 error ("argument %d of %qE must be in the range %d...%d",
6297 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
260f365f 6298 return NULL_RTX;
46222c18 6299 }
6300 }
6301
260f365f 6302 for (a = 0; a < first_arg; a++)
46222c18 6303 {
6304 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6305 arg[a] = target;
6306 else
6307 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6308 }
6309
6310 /* Convert the arguments into a form suitable for the intrinsic.
6311 Report an error if this isn't possible. */
6312 for (opindex = 0; opindex < idata->n_operands; opindex++)
6313 {
6314 a = cgen_insn->op_mapping[opindex];
6315 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6316 arg[a], unsigned_p[a]);
6317 if (op[opindex] == 0)
6318 {
6319 mep_incompatible_arg (&idata->operand[opindex],
6320 arg[a], a + 1 - first_arg, fnname);
260f365f 6321 return NULL_RTX;
46222c18 6322 }
6323 }
6324
6325 /* Emit the instruction. */
6326 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6327 op[5], op[6], op[7], op[8], op[9]);
6328
6329 if (GET_CODE (pat) == SET
6330 && GET_CODE (SET_DEST (pat)) == PC
6331 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6332 emit_jump_insn (pat);
6333 else
6334 emit_insn (pat);
6335
6336 /* Copy lvalues back to their final locations. */
6337 for (opindex = 0; opindex < idata->n_operands; opindex++)
6338 if (idata->operand[opindex].constraint[0] == '=')
6339 {
6340 a = cgen_insn->op_mapping[opindex];
6341 if (a >= first_arg)
6342 {
6343 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6344 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6345 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6346 op[opindex]));
6347 else
6348 {
6349 /* First convert the operand to the right mode, then copy it
6350 into the destination. Doing the conversion as a separate
6351 step (rather than using convert_move) means that we can
6352 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6353 refer to the same register. */
6354 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6355 op[opindex], unsigned_p[a]);
6356 if (!rtx_equal_p (arg[a], op[opindex]))
6357 emit_move_insn (arg[a], op[opindex]);
6358 }
6359 }
6360 }
6361
6362 if (first_arg > 0 && target && target != op[0])
6363 {
6364 emit_move_insn (target, op[0]);
6365 }
6366
6367 return target;
6368}
6369
6370static bool
6371mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6372{
6373 return false;
6374}
6375\f
6376/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6377 a global register. */
6378
6379static int
6380global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6381{
6382 int regno;
6383 rtx x = *loc;
6384
6385 if (! x)
6386 return 0;
6387
6388 switch (GET_CODE (x))
6389 {
6390 case SUBREG:
6391 if (REG_P (SUBREG_REG (x)))
6392 {
6393 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6394 && global_regs[subreg_regno (x)])
6395 return 1;
6396 return 0;
6397 }
6398 break;
6399
6400 case REG:
6401 regno = REGNO (x);
6402 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6403 return 1;
6404 return 0;
6405
6406 case SCRATCH:
6407 case PC:
6408 case CC0:
6409 case CONST_INT:
6410 case CONST_DOUBLE:
6411 case CONST:
6412 case LABEL_REF:
6413 return 0;
6414
6415 case CALL:
6416 /* A non-constant call might use a global register. */
6417 return 1;
6418
6419 default:
6420 break;
6421 }
6422
6423 return 0;
6424}
6425
6426/* Returns nonzero if X mentions a global register. */
6427
6428static int
6429global_reg_mentioned_p (rtx x)
6430{
6431 if (INSN_P (x))
6432 {
6433 if (CALL_P (x))
6434 {
6435 if (! RTL_CONST_OR_PURE_CALL_P (x))
6436 return 1;
6437 x = CALL_INSN_FUNCTION_USAGE (x);
6438 if (x == 0)
6439 return 0;
6440 }
6441 else
6442 x = PATTERN (x);
6443 }
6444
6445 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6446}
6447/* Scheduling hooks for VLIW mode.
6448
6449 Conceptually this is very simple: we have a two-pack architecture
6450 that takes one core insn and one coprocessor insn to make up either
6451 a 32- or 64-bit instruction word (depending on the option bit set in
6452 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6453 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6454 and one 48-bit cop insn or two 32-bit core/cop insns.
6455
6456 In practice, instruction selection will be a bear. Consider in
6457 VL64 mode the following insns
6458
6459 add $1, 1
6460 cmov $cr0, $0
6461
6462 these cannot pack, since the add is a 16-bit core insn and cmov
6463 is a 32-bit cop insn. However,
6464
6465 add3 $1, $1, 1
6466 cmov $cr0, $0
6467
6468 packs just fine. For good VLIW code generation in VL64 mode, we
6469 will have to have 32-bit alternatives for many of the common core
6470 insns. Not implemented. */
6471
6472static int
6473mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6474{
6475 int cost_specified;
6476
6477 if (REG_NOTE_KIND (link) != 0)
6478 {
6479 /* See whether INSN and DEP_INSN are intrinsics that set the same
6480 hard register. If so, it is more important to free up DEP_INSN
6481 than it is to free up INSN.
6482
6483 Note that intrinsics like mep_mulr are handled differently from
6484 the equivalent mep.md patterns. In mep.md, if we don't care
6485 about the value of $lo and $hi, the pattern will just clobber
6486 the registers, not set them. Since clobbers don't count as
6487 output dependencies, it is often possible to reorder two mulrs,
6488 even after reload.
6489
6490 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6491 so any pair of mep_mulr()s will be inter-dependent. We should
6492 therefore give the first mep_mulr() a higher priority. */
6493 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6494 && global_reg_mentioned_p (PATTERN (insn))
6495 && global_reg_mentioned_p (PATTERN (dep_insn)))
6496 return 1;
6497
6498 /* If the dependence is an anti or output dependence, assume it
6499 has no cost. */
6500 return 0;
6501 }
6502
6503 /* If we can't recognize the insns, we can't really do anything. */
6504 if (recog_memoized (dep_insn) < 0)
6505 return cost;
6506
6507 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6508 attribute instead. */
6509 if (!TARGET_H1)
6510 {
6511 cost_specified = get_attr_latency (dep_insn);
6512 if (cost_specified != 0)
6513 return cost_specified;
6514 }
6515
6516 return cost;
6517}
6518
6519/* ??? We don't properly compute the length of a load/store insn,
6520 taking into account the addressing mode. */
6521
6522static int
6523mep_issue_rate (void)
6524{
6525 return TARGET_IVC2 ? 3 : 2;
6526}
6527
6528/* Return true if function DECL was declared with the vliw attribute. */
6529
6530bool
6531mep_vliw_function_p (tree decl)
6532{
6533 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6534}
6535
6536static rtx
6537mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6538{
6539 int i;
6540
6541 for (i = nready - 1; i >= 0; --i)
6542 {
6543 rtx insn = ready[i];
6544 if (recog_memoized (insn) >= 0
6545 && get_attr_slot (insn) == slot
6546 && get_attr_length (insn) == length)
6547 return insn;
6548 }
6549
6550 return NULL_RTX;
6551}
6552
6553static void
6554mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6555{
6556 int i;
6557
6558 for (i = 0; i < nready; ++i)
6559 if (ready[i] == insn)
6560 {
6561 for (; i < nready - 1; ++i)
6562 ready[i] = ready[i + 1];
6563 ready[i] = insn;
6564 return;
6565 }
6566
6567 gcc_unreachable ();
6568}
6569
6570static void
6571mep_print_sched_insn (FILE *dump, rtx insn)
6572{
6573 const char *slots = "none";
6574 const char *name = NULL;
6575 int code;
6576 char buf[30];
6577
6578 if (GET_CODE (PATTERN (insn)) == SET
6579 || GET_CODE (PATTERN (insn)) == PARALLEL)
6580 {
6581 switch (get_attr_slots (insn))
6582 {
6583 case SLOTS_CORE: slots = "core"; break;
6584 case SLOTS_C3: slots = "c3"; break;
6585 case SLOTS_P0: slots = "p0"; break;
6586 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6587 case SLOTS_P0_P1: slots = "p0,p1"; break;
6588 case SLOTS_P0S: slots = "p0s"; break;
6589 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6590 case SLOTS_P1: slots = "p1"; break;
6591 default:
6592 sprintf(buf, "%d", get_attr_slots (insn));
6593 slots = buf;
6594 break;
6595 }
6596 }
6597 if (GET_CODE (PATTERN (insn)) == USE)
6598 slots = "use";
6599
6600 code = INSN_CODE (insn);
6601 if (code >= 0)
6602 name = get_insn_name (code);
6603 if (!name)
6604 name = "{unknown}";
6605
6606 fprintf (dump,
6607 "insn %4d %4d %8s %s\n",
6608 code,
6609 INSN_UID (insn),
6610 name,
6611 slots);
6612}
6613
6614static int
6615mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6616 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6617 int *pnready, int clock ATTRIBUTE_UNUSED)
6618{
6619 int nready = *pnready;
6620 rtx core_insn, cop_insn;
6621 int i;
6622
6623 if (dump && sched_verbose > 1)
6624 {
6625 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6626 for (i=0; i<nready; i++)
6627 mep_print_sched_insn (dump, ready[i]);
6628 fprintf (dump, "\n");
6629 }
6630
6631 if (!mep_vliw_function_p (cfun->decl))
6632 return 1;
6633 if (nready < 2)
6634 return 1;
6635
6636 /* IVC2 uses a DFA to determine what's ready and what's not. */
6637 if (TARGET_IVC2)
6638 return nready;
6639
6640 /* We can issue either a core or coprocessor instruction.
6641 Look for a matched pair of insns to reorder. If we don't
6642 find any, don't second-guess the scheduler's priorities. */
6643
6644 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6645 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6646 TARGET_OPT_VL64 ? 6 : 2)))
6647 ;
6648 else if (TARGET_OPT_VL64
6649 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6650 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6651 ;
6652 else
6653 /* We didn't find a pair. Issue the single insn at the head
6654 of the ready list. */
6655 return 1;
6656
6657 /* Reorder the two insns first. */
6658 mep_move_ready_insn (ready, nready, core_insn);
6659 mep_move_ready_insn (ready, nready - 1, cop_insn);
6660 return 2;
6661}
6662
6663/* A for_each_rtx callback. Return true if *X is a register that is
6664 set by insn PREV. */
6665
6666static int
6667mep_store_find_set (rtx *x, void *prev)
6668{
6669 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6670}
6671
6672/* Like mep_store_bypass_p, but takes a pattern as the second argument,
6673 not the containing insn. */
6674
6675static bool
6676mep_store_data_bypass_1 (rtx prev, rtx pat)
6677{
6678 /* Cope with intrinsics like swcpa. */
6679 if (GET_CODE (pat) == PARALLEL)
6680 {
6681 int i;
6682
6683 for (i = 0; i < XVECLEN (pat, 0); i++)
6684 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6685 return true;
6686
6687 return false;
6688 }
6689
6690 /* Check for some sort of store. */
6691 if (GET_CODE (pat) != SET
6692 || GET_CODE (SET_DEST (pat)) != MEM)
6693 return false;
6694
6695 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6696 The first operand to the unspec is the store data and the other operands
6697 are used to calculate the address. */
6698 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6699 {
6700 rtx src;
6701 int i;
6702
6703 src = SET_SRC (pat);
6704 for (i = 1; i < XVECLEN (src, 0); i++)
6705 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6706 return false;
6707
6708 return true;
6709 }
6710
6711 /* Otherwise just check that PREV doesn't modify any register mentioned
6712 in the memory destination. */
6713 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6714}
6715
6716/* Return true if INSN is a store instruction and if the store address
6717 has no true dependence on PREV. */
6718
6719bool
6720mep_store_data_bypass_p (rtx prev, rtx insn)
6721{
6722 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6723}
6724
6725/* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6726 is a register other than LO or HI and if PREV sets *X. */
6727
6728static int
6729mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6730{
6731 return (REG_P (*x)
6732 && REGNO (*x) != LO_REGNO
6733 && REGNO (*x) != HI_REGNO
6734 && reg_set_p (*x, (const_rtx) prev));
6735}
6736
6737/* Return true if, apart from HI/LO, there are no true dependencies
6738 between multiplication instructions PREV and INSN. */
6739
6740bool
6741mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6742{
6743 rtx pat;
6744
6745 pat = PATTERN (insn);
6746 if (GET_CODE (pat) == PARALLEL)
6747 pat = XVECEXP (pat, 0, 0);
6748 return (GET_CODE (pat) == SET
6749 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6750}
6751
6752/* Return true if INSN is an ldc instruction that issues to the
6753 MeP-h1 integer pipeline. This is true for instructions that
6754 read from PSW, LP, SAR, HI and LO. */
6755
6756bool
6757mep_ipipe_ldc_p (rtx insn)
6758{
6759 rtx pat, src;
6760
6761 pat = PATTERN (insn);
6762
6763 /* Cope with instrinsics that set both a hard register and its shadow.
6764 The set of the hard register comes first. */
6765 if (GET_CODE (pat) == PARALLEL)
6766 pat = XVECEXP (pat, 0, 0);
6767
6768 if (GET_CODE (pat) == SET)
6769 {
6770 src = SET_SRC (pat);
6771
6772 /* Cope with intrinsics. The first operand to the unspec is
6773 the source register. */
6774 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6775 src = XVECEXP (src, 0, 0);
6776
6777 if (REG_P (src))
6778 switch (REGNO (src))
6779 {
6780 case PSW_REGNO:
6781 case LP_REGNO:
6782 case SAR_REGNO:
6783 case HI_REGNO:
6784 case LO_REGNO:
6785 return true;
6786 }
6787 }
6788 return false;
6789}
6790
6791/* Create a VLIW bundle from core instruction CORE and coprocessor
6792 instruction COP. COP always satisfies INSN_P, but CORE can be
6793 either a new pattern or an existing instruction.
6794
6795 Emit the bundle in place of COP and return it. */
6796
6797static rtx
6798mep_make_bundle (rtx core, rtx cop)
6799{
6800 rtx insn;
6801
6802 /* If CORE is an existing instruction, remove it, otherwise put
6803 the new pattern in an INSN harness. */
6804 if (INSN_P (core))
6805 remove_insn (core);
6806 else
6807 core = make_insn_raw (core);
6808
6809 /* Generate the bundle sequence and replace COP with it. */
6810 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6811 insn = emit_insn_after (insn, cop);
6812 remove_insn (cop);
6813
6814 /* Set up the links of the insns inside the SEQUENCE. */
6815 PREV_INSN (core) = PREV_INSN (insn);
6816 NEXT_INSN (core) = cop;
6817 PREV_INSN (cop) = core;
6818 NEXT_INSN (cop) = NEXT_INSN (insn);
6819
6820 /* Set the VLIW flag for the coprocessor instruction. */
6821 PUT_MODE (core, VOIDmode);
6822 PUT_MODE (cop, BImode);
6823
6824 /* Derive a location for the bundle. Individual instructions cannot
6825 have their own location because there can be no assembler labels
6826 between CORE and COP. */
d53c050c 6827 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core) ? core : cop);
6828 INSN_LOCATION (core) = 0;
6829 INSN_LOCATION (cop) = 0;
46222c18 6830
6831 return insn;
6832}
6833
6834/* A helper routine for ms1_insn_dependent_p called through note_stores. */
6835
6836static void
6837mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6838{
6839 rtx * pinsn = (rtx *) data;
6840
6841 if (*pinsn && reg_mentioned_p (x, *pinsn))
6842 *pinsn = NULL_RTX;
6843}
6844
6845/* Return true if anything in insn X is (anti,output,true) dependent on
6846 anything in insn Y. */
6847
6848static int
6849mep_insn_dependent_p (rtx x, rtx y)
6850{
6851 rtx tmp;
6852
6853 gcc_assert (INSN_P (x));
6854 gcc_assert (INSN_P (y));
6855
6856 tmp = PATTERN (y);
6857 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6858 if (tmp == NULL_RTX)
6859 return 1;
6860
6861 tmp = PATTERN (x);
6862 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6863 if (tmp == NULL_RTX)
6864 return 1;
6865
6866 return 0;
6867}
6868
6869static int
6870core_insn_p (rtx insn)
6871{
6872 if (GET_CODE (PATTERN (insn)) == USE)
6873 return 0;
6874 if (get_attr_slot (insn) == SLOT_CORE)
6875 return 1;
6876 return 0;
6877}
6878
6879/* Mark coprocessor instructions that can be bundled together with
9d75589a 6880 the immediately preceding core instruction. This is later used
46222c18 6881 to emit the "+" that tells the assembler to create a VLIW insn.
6882
6883 For unbundled insns, the assembler will automatically add coprocessor
6884 nops, and 16-bit core nops. Due to an apparent oversight in the
6885 spec, the assembler will _not_ automatically add 32-bit core nops,
6886 so we have to emit those here.
6887
6888 Called from mep_insn_reorg. */
6889
6890static void
6891mep_bundle_insns (rtx insns)
6892{
6893 rtx insn, last = NULL_RTX, first = NULL_RTX;
6894 int saw_scheduling = 0;
6895
6896 /* Only do bundling if we're in vliw mode. */
6897 if (!mep_vliw_function_p (cfun->decl))
6898 return;
6899
6900 /* The first insn in a bundle are TImode, the remainder are
6901 VOIDmode. After this function, the first has VOIDmode and the
6902 rest have BImode. */
6903
1f2d41ad 6904 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6905
46222c18 6906 /* First, move any NOTEs that are within a bundle, to the beginning
6907 of the bundle. */
6908 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6909 {
6910 if (NOTE_P (insn) && first)
6911 /* Don't clear FIRST. */;
6912
1f2d41ad 6913 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
46222c18 6914 first = insn;
6915
1f2d41ad 6916 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
46222c18 6917 {
6918 rtx note, prev;
6919
6920 /* INSN is part of a bundle; FIRST is the first insn in that
6921 bundle. Move all intervening notes out of the bundle.
6922 In addition, since the debug pass may insert a label
6923 whenever the current line changes, set the location info
6924 for INSN to match FIRST. */
6925
d53c050c 6926 INSN_LOCATION (insn) = INSN_LOCATION (first);
46222c18 6927
6928 note = PREV_INSN (insn);
6929 while (note && note != first)
6930 {
6931 prev = PREV_INSN (note);
6932
6933 if (NOTE_P (note))
6934 {
6935 /* Remove NOTE from here... */
6936 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6937 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6938 /* ...and put it in here. */
6939 NEXT_INSN (note) = first;
6940 PREV_INSN (note) = PREV_INSN (first);
6941 NEXT_INSN (PREV_INSN (note)) = note;
6942 PREV_INSN (NEXT_INSN (note)) = note;
6943 }
6944
6945 note = prev;
6946 }
6947 }
6948
1f2d41ad 6949 else if (!NONJUMP_INSN_P (insn))
46222c18 6950 first = 0;
6951 }
6952
6953 /* Now fix up the bundles. */
6954 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6955 {
6956 if (NOTE_P (insn))
6957 continue;
6958
1f2d41ad 6959 if (!NONJUMP_INSN_P (insn))
46222c18 6960 {
6961 last = 0;
6962 continue;
6963 }
6964
6965 /* If we're not optimizing enough, there won't be scheduling
6966 info. We detect that here. */
6967 if (GET_MODE (insn) == TImode)
6968 saw_scheduling = 1;
6969 if (!saw_scheduling)
6970 continue;
6971
6972 if (TARGET_IVC2)
6973 {
6974 rtx core_insn = NULL_RTX;
6975
6976 /* IVC2 slots are scheduled by DFA, so we just accept
6977 whatever the scheduler gives us. However, we must make
6978 sure the core insn (if any) is the first in the bundle.
6979 The IVC2 assembler can insert whatever NOPs are needed,
6980 and allows a COP insn to be first. */
6981
1f2d41ad 6982 if (NONJUMP_INSN_P (insn)
46222c18 6983 && GET_CODE (PATTERN (insn)) != USE
6984 && GET_MODE (insn) == TImode)
6985 {
6986 for (last = insn;
6987 NEXT_INSN (last)
6988 && GET_MODE (NEXT_INSN (last)) == VOIDmode
1f2d41ad 6989 && NONJUMP_INSN_P (NEXT_INSN (last));
46222c18 6990 last = NEXT_INSN (last))
6991 {
6992 if (core_insn_p (last))
6993 core_insn = last;
6994 }
6995 if (core_insn_p (last))
6996 core_insn = last;
6997
6998 if (core_insn && core_insn != insn)
6999 {
7000 /* Swap core insn to first in the bundle. */
7001
7002 /* Remove core insn. */
7003 if (PREV_INSN (core_insn))
7004 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7005 if (NEXT_INSN (core_insn))
7006 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7007
7008 /* Re-insert core insn. */
7009 PREV_INSN (core_insn) = PREV_INSN (insn);
7010 NEXT_INSN (core_insn) = insn;
7011
7012 if (PREV_INSN (core_insn))
7013 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7014 PREV_INSN (insn) = core_insn;
7015
7016 PUT_MODE (core_insn, TImode);
7017 PUT_MODE (insn, VOIDmode);
7018 }
7019 }
7020
7021 /* The first insn has TImode, the rest have VOIDmode */
7022 if (GET_MODE (insn) == TImode)
7023 PUT_MODE (insn, VOIDmode);
7024 else
7025 PUT_MODE (insn, BImode);
7026 continue;
7027 }
7028
7029 PUT_MODE (insn, VOIDmode);
7030 if (recog_memoized (insn) >= 0
7031 && get_attr_slot (insn) == SLOT_COP)
7032 {
aa90bb35 7033 if (JUMP_P (insn)
46222c18 7034 || ! last
7035 || recog_memoized (last) < 0
7036 || get_attr_slot (last) != SLOT_CORE
7037 || (get_attr_length (insn)
7038 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7039 || mep_insn_dependent_p (insn, last))
7040 {
7041 switch (get_attr_length (insn))
7042 {
7043 case 8:
7044 break;
7045 case 6:
7046 insn = mep_make_bundle (gen_nop (), insn);
7047 break;
7048 case 4:
7049 if (TARGET_OPT_VL64)
7050 insn = mep_make_bundle (gen_nop32 (), insn);
7051 break;
7052 case 2:
7053 if (TARGET_OPT_VL64)
7054 error ("2 byte cop instructions are"
7055 " not allowed in 64-bit VLIW mode");
7056 else
7057 insn = mep_make_bundle (gen_nop (), insn);
7058 break;
7059 default:
7060 error ("unexpected %d byte cop instruction",
7061 get_attr_length (insn));
7062 break;
7063 }
7064 }
7065 else
7066 insn = mep_make_bundle (last, insn);
7067 }
7068
7069 last = insn;
7070 }
7071}
7072
7073
7074/* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7075 Return true on success. This function can fail if the intrinsic
7076 is unavailable or if the operands don't satisfy their predicates. */
7077
7078bool
7079mep_emit_intrinsic (int intrinsic, const rtx *operands)
7080{
7081 const struct cgen_insn *cgen_insn;
f2956fc5 7082 const struct insn_data_d *idata;
46222c18 7083 rtx newop[10];
7084 int i;
7085
7086 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7087 return false;
7088
7089 idata = &insn_data[cgen_insn->icode];
7090 for (i = 0; i < idata->n_operands; i++)
7091 {
7092 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7093 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7094 return false;
7095 }
7096
7097 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7098 newop[3], newop[4], newop[5],
7099 newop[6], newop[7], newop[8]));
7100
7101 return true;
7102}
7103
7104
7105/* Apply the given unary intrinsic to OPERANDS[1] and store it on
7106 OPERANDS[0]. Report an error if the instruction could not
7107 be synthesized. OPERANDS[1] is a register_operand. For sign
7108 and zero extensions, it may be smaller than SImode. */
7109
7110bool
7111mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7112 rtx * operands ATTRIBUTE_UNUSED)
7113{
7114 return false;
7115}
7116
7117
7118/* Likewise, but apply a binary operation to OPERANDS[1] and
7119 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7120 can be a general_operand.
7121
7122 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7123 third operand. REG and REG3 take register operands only. */
7124
7125bool
7126mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7127 int ATTRIBUTE_UNUSED immediate3,
7128 int ATTRIBUTE_UNUSED reg,
7129 int ATTRIBUTE_UNUSED reg3,
7130 rtx * operands ATTRIBUTE_UNUSED)
7131{
7132 return false;
7133}
7134
7135static bool
20d892d1 7136mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7137 int opno ATTRIBUTE_UNUSED, int *total,
7138 bool ATTRIBUTE_UNUSED speed_t)
46222c18 7139{
7140 switch (code)
7141 {
7142 case CONST_INT:
7143 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7144 *total = 0;
7145 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7146 *total = 1;
7147 else
7148 *total = 3;
7149 return true;
7150
7151 case SYMBOL_REF:
7152 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7153 return true;
7154
7155 case MULT:
7156 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7157 ? COSTS_N_INSNS (3)
7158 : COSTS_N_INSNS (2));
7159 return true;
7160 }
7161 return false;
7162}
7163
7164static int
d9c5e5f4 7165mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7166 enum machine_mode mode ATTRIBUTE_UNUSED,
7167 addr_space_t as ATTRIBUTE_UNUSED,
7168 bool ATTRIBUTE_UNUSED speed_p)
46222c18 7169{
7170 return 1;
7171}
7172
46222c18 7173static void
7174mep_asm_init_sections (void)
7175{
7176 based_section
7177 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7178 "\t.section .based,\"aw\"");
7179
7180 tinybss_section
7181 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7182 "\t.section .sbss,\"aw\"");
7183
7184 sdata_section
7185 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7186 "\t.section .sdata,\"aw\",@progbits");
7187
7188 far_section
7189 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7190 "\t.section .far,\"aw\"");
7191
7192 farbss_section
7193 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7194 "\t.section .farbss,\"aw\"");
7195
7196 frodata_section
7197 = get_unnamed_section (0, output_section_asm_op,
7198 "\t.section .frodata,\"a\"");
7199
7200 srodata_section
7201 = get_unnamed_section (0, output_section_asm_op,
7202 "\t.section .srodata,\"a\"");
7203
e3b9264d 7204 vtext_section
987ce674 7205 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7206 "\t.section .vtext,\"axv\"\n\t.vliw");
e3b9264d 7207
7208 vftext_section
987ce674 7209 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
ce4c65f3 7210 "\t.section .vftext,\"axv\"\n\t.vliw");
e3b9264d 7211
7212 ftext_section
987ce674 7213 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
ce4c65f3 7214 "\t.section .ftext,\"ax\"\n\t.core");
e3b9264d 7215
46222c18 7216}
7d86c715 7217\f
7218/* Initialize the GCC target structure. */
7219
7220#undef TARGET_ASM_FUNCTION_PROLOGUE
7221#define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7222#undef TARGET_ATTRIBUTE_TABLE
7223#define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7224#undef TARGET_COMP_TYPE_ATTRIBUTES
7225#define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7226#undef TARGET_INSERT_ATTRIBUTES
7227#define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7228#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7229#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7230#undef TARGET_CAN_INLINE_P
7231#define TARGET_CAN_INLINE_P mep_can_inline_p
7232#undef TARGET_SECTION_TYPE_FLAGS
7233#define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7234#undef TARGET_ASM_NAMED_SECTION
7235#define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7236#undef TARGET_INIT_BUILTINS
7237#define TARGET_INIT_BUILTINS mep_init_builtins
7238#undef TARGET_EXPAND_BUILTIN
7239#define TARGET_EXPAND_BUILTIN mep_expand_builtin
7240#undef TARGET_SCHED_ADJUST_COST
7241#define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7242#undef TARGET_SCHED_ISSUE_RATE
7243#define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7244#undef TARGET_SCHED_REORDER
7245#define TARGET_SCHED_REORDER mep_sched_reorder
7246#undef TARGET_STRIP_NAME_ENCODING
7247#define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7248#undef TARGET_ASM_SELECT_SECTION
7249#define TARGET_ASM_SELECT_SECTION mep_select_section
7250#undef TARGET_ASM_UNIQUE_SECTION
7251#define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7252#undef TARGET_ENCODE_SECTION_INFO
7253#define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7254#undef TARGET_FUNCTION_OK_FOR_SIBCALL
7255#define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7256#undef TARGET_RTX_COSTS
7257#define TARGET_RTX_COSTS mep_rtx_cost
7258#undef TARGET_ADDRESS_COST
7259#define TARGET_ADDRESS_COST mep_address_cost
7260#undef TARGET_MACHINE_DEPENDENT_REORG
7261#define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7262#undef TARGET_SETUP_INCOMING_VARARGS
7263#define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7264#undef TARGET_PASS_BY_REFERENCE
7265#define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
4f6b272a 7266#undef TARGET_FUNCTION_ARG
7267#define TARGET_FUNCTION_ARG mep_function_arg
7268#undef TARGET_FUNCTION_ARG_ADVANCE
7269#define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7d86c715 7270#undef TARGET_VECTOR_MODE_SUPPORTED_P
7271#define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
4c834714 7272#undef TARGET_OPTION_OVERRIDE
7273#define TARGET_OPTION_OVERRIDE mep_option_override
7d86c715 7274#undef TARGET_ALLOCATE_INITIAL_VALUE
7275#define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7276#undef TARGET_ASM_INIT_SECTIONS
7277#define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7278#undef TARGET_RETURN_IN_MEMORY
7279#define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7280#undef TARGET_NARROW_VOLATILE_BITFIELD
7281#define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7282#undef TARGET_EXPAND_BUILTIN_SAVEREGS
7283#define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7284#undef TARGET_BUILD_BUILTIN_VA_LIST
7285#define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7286#undef TARGET_EXPAND_BUILTIN_VA_START
7287#define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7288#undef TARGET_GIMPLIFY_VA_ARG_EXPR
7289#define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7290#undef TARGET_CAN_ELIMINATE
7291#define TARGET_CAN_ELIMINATE mep_can_eliminate
b2d7ede1 7292#undef TARGET_CONDITIONAL_REGISTER_USAGE
7293#define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7d86c715 7294#undef TARGET_TRAMPOLINE_INIT
7295#define TARGET_TRAMPOLINE_INIT mep_trampoline_init
ca316360 7296#undef TARGET_LEGITIMATE_CONSTANT_P
7297#define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
5f35dd0e 7298#undef TARGET_CAN_USE_DOLOOP_P
7299#define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7d86c715 7300
7301struct gcc_target targetm = TARGET_INITIALIZER;
46222c18 7302
7303#include "gt-mep.h"