]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/mep/mep.c
2015-06-04 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / config / mep / mep.c
CommitLineData
46222c18 1/* Definitions for Toshiba Media Processor
d353bf18 2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
46222c18 3 Contributed by Red Hat, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
b20a8bb4 26#include "hash-set.h"
b20a8bb4 27#include "vec.h"
b20a8bb4 28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
b20a8bb4 31#include "inchash.h"
46222c18 32#include "tree.h"
b20a8bb4 33#include "fold-const.h"
9ed99284 34#include "varasm.h"
35#include "calls.h"
36#include "stringpool.h"
37#include "stor-layout.h"
46222c18 38#include "regs.h"
39#include "hard-reg-set.h"
46222c18 40#include "insn-config.h"
41#include "conditions.h"
42#include "insn-flags.h"
43#include "output.h"
44#include "insn-attr.h"
45#include "flags.h"
46#include "recog.h"
47#include "obstack.h"
d53441c8 48#include "hashtab.h"
49#include "function.h"
50#include "statistics.h"
d53441c8 51#include "expmed.h"
52#include "dojump.h"
53#include "explow.h"
54#include "emit-rtl.h"
55#include "stmt.h"
46222c18 56#include "expr.h"
57#include "except.h"
34517c64 58#include "insn-codes.h"
46222c18 59#include "optabs.h"
60#include "reload.h"
61#include "tm_p.h"
62#include "ggc.h"
0b205f4c 63#include "diagnostic-core.h"
46222c18 64#include "target.h"
65#include "target-def.h"
66#include "langhooks.h"
94ea8568 67#include "dominance.h"
68#include "cfg.h"
69#include "cfgrtl.h"
70#include "cfganal.h"
71#include "lcm.h"
72#include "cfgbuild.h"
73#include "cfgcleanup.h"
74#include "predict.h"
75#include "basic-block.h"
46222c18 76#include "df.h"
bc61cadb 77#include "hash-table.h"
bc61cadb 78#include "tree-ssa-alias.h"
79#include "internal-fn.h"
80#include "gimple-fold.h"
81#include "tree-eh.h"
82#include "gimple-expr.h"
83#include "is-a.h"
e795d6e1 84#include "gimple.h"
a8783bee 85#include "gimplify.h"
fba5dd52 86#include "opts.h"
b9ed1410 87#include "dumpfile.h"
f7715905 88#include "builtins.h"
be83dbcd 89#include "rtl-iter.h"
46222c18 90
91/* Structure of this file:
92
93 + Command Line Option Support
94 + Pattern support - constraints, predicates, expanders
95 + Reload Support
96 + Costs
97 + Functions to save and restore machine-specific function data.
98 + Frame/Epilog/Prolog Related
99 + Operand Printing
100 + Function args in registers
101 + Handle pipeline hazards
102 + Handle attributes
103 + Trampolines
104 + Machine-dependent Reorg
105 + Builtins. */
106
107/* Symbol encodings:
108
109 Symbols are encoded as @ <char> . <name> where <char> is one of these:
110
111 b - based
112 t - tiny
113 n - near
114 f - far
115 i - io, near
116 I - io, far
117 c - cb (control bus) */
118
119struct GTY(()) machine_function
120{
121 int mep_frame_pointer_needed;
122
123 /* For varargs. */
124 int arg_regs_to_save;
125 int regsave_filler;
126 int frame_filler;
142c2869 127 int frame_locked;
46222c18 128
129 /* Records __builtin_return address. */
130 rtx eh_stack_adjust;
131
132 int reg_save_size;
133 int reg_save_slot[FIRST_PSEUDO_REGISTER];
134 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
135
136 /* 2 if the current function has an interrupt attribute, 1 if not, 0
137 if unknown. This is here because resource.c uses EPILOGUE_USES
138 which needs it. */
139 int interrupt_handler;
140
141 /* Likewise, for disinterrupt attribute. */
142 int disable_interrupts;
143
144 /* Number of doloop tags used so far. */
145 int doloop_tags;
146
147 /* True if the last tag was allocated to a doloop_end. */
148 bool doloop_tag_from_end;
149
150 /* True if reload changes $TP. */
151 bool reload_changes_tp;
152
153 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
154 We only set this if the function is an interrupt handler. */
155 int asms_without_operands;
156};
157
158#define MEP_CONTROL_REG(x) \
159 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
160
46222c18 161static GTY(()) section * based_section;
162static GTY(()) section * tinybss_section;
163static GTY(()) section * far_section;
164static GTY(()) section * farbss_section;
165static GTY(()) section * frodata_section;
166static GTY(()) section * srodata_section;
167
e3b9264d 168static GTY(()) section * vtext_section;
169static GTY(()) section * vftext_section;
170static GTY(()) section * ftext_section;
171
46222c18 172static void mep_set_leaf_registers (int);
173static bool symbol_p (rtx);
174static bool symbolref_p (rtx);
175static void encode_pattern_1 (rtx);
176static void encode_pattern (rtx);
177static bool const_in_range (rtx, int, int);
d8691ecc 178static void mep_rewrite_mult (rtx_insn *, rtx);
179static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
180static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
181static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
3754d046 182static bool move_needs_splitting (rtx, rtx, machine_mode);
46222c18 183static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
184static bool mep_nongeneral_reg (rtx);
185static bool mep_general_copro_reg (rtx);
186static bool mep_nonregister (rtx);
187static struct machine_function* mep_init_machine_status (void);
188static rtx mep_tp_rtx (void);
189static rtx mep_gp_rtx (void);
190static bool mep_interrupt_p (void);
191static bool mep_disinterrupt_p (void);
192static bool mep_reg_set_p (rtx, rtx);
193static bool mep_reg_set_in_function (int);
194static bool mep_interrupt_saved_reg (int);
195static bool mep_call_saves_register (int);
d8691ecc 196static rtx_insn *F (rtx_insn *);
46222c18 197static void add_constant (int, int, int, int);
d8691ecc 198static rtx_insn *maybe_dead_move (rtx, rtx, bool);
46222c18 199static void mep_reload_pointer (int, const char *);
200static void mep_start_function (FILE *, HOST_WIDE_INT);
201static bool mep_function_ok_for_sibcall (tree, tree);
202static int unique_bit_in (HOST_WIDE_INT);
203static int bit_size_for_clip (HOST_WIDE_INT);
3754d046 204static int bytesize (const_tree, machine_mode);
46222c18 205static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
206static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
207static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
208static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
209static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
210static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
211static bool mep_function_attribute_inlinable_p (const_tree);
7c88e513 212static bool mep_can_inline_p (tree, tree);
46222c18 213static bool mep_lookup_pragma_disinterrupt (const char *);
214static int mep_multiple_address_regions (tree, bool);
215static int mep_attrlist_to_encoding (tree, tree);
216static void mep_insert_attributes (tree, tree *);
217static void mep_encode_section_info (tree, rtx, int);
218static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
219static void mep_unique_section (tree, int);
220static unsigned int mep_section_type_flags (tree, const char *, int);
221static void mep_asm_named_section (const char *, unsigned int, tree);
222static bool mep_mentioned_p (rtx, rtx, int);
d8691ecc 223static void mep_reorg_regmove (rtx_insn *);
224static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
225 bool, bool);
226static void mep_reorg_repeat (rtx_insn *);
227static bool mep_invertable_branch_p (rtx_insn *);
228static void mep_invert_branch (rtx_insn *, rtx_insn *);
229static void mep_reorg_erepeat (rtx_insn *);
230static void mep_jmp_return_reorg (rtx_insn *);
231static void mep_reorg_addcombine (rtx_insn *);
46222c18 232static void mep_reorg (void);
233static void mep_init_intrinsics (void);
234static void mep_init_builtins (void);
235static void mep_intrinsic_unavailable (int);
236static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
237static bool mep_get_move_insn (int, const struct cgen_insn **);
3754d046 238static rtx mep_convert_arg (machine_mode, rtx);
46222c18 239static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
240static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
241static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
3754d046 242static rtx mep_expand_builtin (tree, rtx, rtx, machine_mode, int);
18282db0 243static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
46222c18 244static int mep_issue_rate (void);
b24ef467 245static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
246static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
247static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
d8691ecc 248static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
249static void mep_bundle_insns (rtx_insn *);
20d892d1 250static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
3754d046 251static int mep_address_cost (rtx, machine_mode, addr_space_t, bool);
252static void mep_setup_incoming_varargs (cumulative_args_t, machine_mode,
46222c18 253 tree, int *, int);
3754d046 254static bool mep_pass_by_reference (cumulative_args_t cum, machine_mode,
46222c18 255 const_tree, bool);
3754d046 256static rtx mep_function_arg (cumulative_args_t, machine_mode,
4f6b272a 257 const_tree, bool);
3754d046 258static void mep_function_arg_advance (cumulative_args_t, machine_mode,
4f6b272a 259 const_tree, bool);
3754d046 260static bool mep_vector_mode_supported_p (machine_mode);
46222c18 261static rtx mep_allocate_initial_value (rtx);
262static void mep_asm_init_sections (void);
263static int mep_comp_type_attributes (const_tree, const_tree);
264static bool mep_narrow_volatile_bitfield (void);
265static rtx mep_expand_builtin_saveregs (void);
266static tree mep_build_builtin_va_list (void);
267static void mep_expand_va_start (tree, rtx);
260f365f 268static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
cd90919d 269static bool mep_can_eliminate (const int, const int);
b2d7ede1 270static void mep_conditional_register_usage (void);
8786c274 271static void mep_trampoline_init (rtx, tree, rtx);
46222c18 272\f
46222c18 273#define WANT_GCC_DEFINITIONS
274#include "mep-intrin.h"
275#undef WANT_GCC_DEFINITIONS
276
277\f
278/* Command Line Option Support. */
279
280char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
281
282/* True if we can use cmov instructions to move values back and forth
283 between core and coprocessor registers. */
284bool mep_have_core_copro_moves_p;
285
286/* True if we can use cmov instructions (or a work-alike) to move
287 values between coprocessor registers. */
288bool mep_have_copro_copro_moves_p;
289
290/* A table of all coprocessor instructions that can act like
291 a coprocessor-to-coprocessor cmov. */
292static const int mep_cmov_insns[] = {
293 mep_cmov,
294 mep_cpmov,
295 mep_fmovs,
296 mep_caddi3,
297 mep_csubi3,
298 mep_candi3,
299 mep_cori3,
300 mep_cxori3,
301 mep_cand3,
302 mep_cor3
303};
304
46222c18 305\f
306static void
307mep_set_leaf_registers (int enable)
308{
309 int i;
310
311 if (mep_leaf_registers[0] != enable)
312 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
313 mep_leaf_registers[i] = enable;
314}
315
b2d7ede1 316static void
9dc6e4a0 317mep_conditional_register_usage (void)
46222c18 318{
319 int i;
320
321 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
322 {
323 fixed_regs[HI_REGNO] = 1;
324 fixed_regs[LO_REGNO] = 1;
325 call_used_regs[HI_REGNO] = 1;
326 call_used_regs[LO_REGNO] = 1;
327 }
328
329 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
330 global_regs[i] = 1;
331}
332
4c834714 333static void
334mep_option_override (void)
46222c18 335{
e9e3a45a 336 unsigned int i;
337 int j;
338 cl_deferred_option *opt;
f1f41a6c 339 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
340
341 if (v)
342 FOR_EACH_VEC_ELT (*v, i, opt)
343 {
344 switch (opt->opt_index)
345 {
346 case OPT_mivc2:
347 for (j = 0; j < 32; j++)
348 fixed_regs[j + 48] = 0;
349 for (j = 0; j < 32; j++)
350 call_used_regs[j + 48] = 1;
351 for (j = 6; j < 8; j++)
352 call_used_regs[j + 48] = 0;
e9e3a45a 353
354#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
f1f41a6c 355 RN (0, "$csar0");
356 RN (1, "$cc");
357 RN (4, "$cofr0");
358 RN (5, "$cofr1");
359 RN (6, "$cofa0");
360 RN (7, "$cofa1");
361 RN (15, "$csar1");
362
363 RN (16, "$acc0_0");
364 RN (17, "$acc0_1");
365 RN (18, "$acc0_2");
366 RN (19, "$acc0_3");
367 RN (20, "$acc0_4");
368 RN (21, "$acc0_5");
369 RN (22, "$acc0_6");
370 RN (23, "$acc0_7");
371
372 RN (24, "$acc1_0");
373 RN (25, "$acc1_1");
374 RN (26, "$acc1_2");
375 RN (27, "$acc1_3");
376 RN (28, "$acc1_4");
377 RN (29, "$acc1_5");
378 RN (30, "$acc1_6");
379 RN (31, "$acc1_7");
e9e3a45a 380#undef RN
f1f41a6c 381 break;
e9e3a45a 382
f1f41a6c 383 default:
384 gcc_unreachable ();
385 }
386 }
e9e3a45a 387
46222c18 388 if (flag_pic == 1)
389 warning (OPT_fpic, "-fpic is not supported");
390 if (flag_pic == 2)
391 warning (OPT_fPIC, "-fPIC is not supported");
392 if (TARGET_S && TARGET_M)
393 error ("only one of -ms and -mm may be given");
394 if (TARGET_S && TARGET_L)
395 error ("only one of -ms and -ml may be given");
396 if (TARGET_M && TARGET_L)
397 error ("only one of -mm and -ml may be given");
e9e3a45a 398 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
46222c18 399 error ("only one of -ms and -mtiny= may be given");
e9e3a45a 400 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
46222c18 401 error ("only one of -mm and -mtiny= may be given");
402 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
403 warning (0, "-mclip currently has no effect without -mminmax");
404
405 if (mep_const_section)
406 {
407 if (strcmp (mep_const_section, "tiny") != 0
408 && strcmp (mep_const_section, "near") != 0
409 && strcmp (mep_const_section, "far") != 0)
410 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
411 }
412
413 if (TARGET_S)
414 mep_tiny_cutoff = 65536;
415 if (TARGET_M)
416 mep_tiny_cutoff = 0;
e9e3a45a 417 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
46222c18 418 mep_tiny_cutoff = 0;
419
420 if (TARGET_64BIT_CR_REGS)
421 flag_split_wide_types = 0;
422
423 init_machine_status = mep_init_machine_status;
424 mep_init_intrinsics ();
425}
426
427/* Pattern Support - constraints, predicates, expanders. */
428
429/* MEP has very few instructions that can refer to the span of
430 addresses used by symbols, so it's common to check for them. */
431
432static bool
433symbol_p (rtx x)
434{
435 int c = GET_CODE (x);
436
437 return (c == CONST_INT
438 || c == CONST
439 || c == SYMBOL_REF);
440}
441
442static bool
443symbolref_p (rtx x)
444{
445 int c;
446
447 if (GET_CODE (x) != MEM)
448 return false;
449
450 c = GET_CODE (XEXP (x, 0));
451 return (c == CONST_INT
452 || c == CONST
453 || c == SYMBOL_REF);
454}
455
456/* static const char *reg_class_names[] = REG_CLASS_NAMES; */
457
458#define GEN_REG(R, STRICT) \
459 (GR_REGNO_P (R) \
460 || (!STRICT \
461 && ((R) == ARG_POINTER_REGNUM \
462 || (R) >= FIRST_PSEUDO_REGISTER)))
463
464static char pattern[12], *patternp;
465static GTY(()) rtx patternr[12];
466#define RTX_IS(x) (strcmp (pattern, x) == 0)
467
468static void
469encode_pattern_1 (rtx x)
470{
471 int i;
472
473 if (patternp == pattern + sizeof (pattern) - 2)
474 {
475 patternp[-1] = '?';
476 return;
477 }
478
479 patternr[patternp-pattern] = x;
480
481 switch (GET_CODE (x))
482 {
483 case REG:
484 *patternp++ = 'r';
485 break;
486 case MEM:
487 *patternp++ = 'm';
488 case CONST:
489 encode_pattern_1 (XEXP(x, 0));
490 break;
491 case PLUS:
492 *patternp++ = '+';
493 encode_pattern_1 (XEXP(x, 0));
494 encode_pattern_1 (XEXP(x, 1));
495 break;
496 case LO_SUM:
497 *patternp++ = 'L';
498 encode_pattern_1 (XEXP(x, 0));
499 encode_pattern_1 (XEXP(x, 1));
500 break;
501 case HIGH:
502 *patternp++ = 'H';
503 encode_pattern_1 (XEXP(x, 0));
504 break;
505 case SYMBOL_REF:
506 *patternp++ = 's';
507 break;
508 case LABEL_REF:
509 *patternp++ = 'l';
510 break;
511 case CONST_INT:
512 case CONST_DOUBLE:
513 *patternp++ = 'i';
514 break;
515 case UNSPEC:
516 *patternp++ = 'u';
517 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
518 for (i=0; i<XVECLEN (x, 0); i++)
519 encode_pattern_1 (XVECEXP (x, 0, i));
520 break;
521 case USE:
522 *patternp++ = 'U';
523 break;
524 default:
525 *patternp++ = '?';
526#if 0
527 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
528 debug_rtx (x);
529 gcc_unreachable ();
530#endif
531 break;
532 }
533}
534
535static void
536encode_pattern (rtx x)
537{
538 patternp = pattern;
539 encode_pattern_1 (x);
540 *patternp = 0;
541}
542
543int
544mep_section_tag (rtx x)
545{
546 const char *name;
547
548 while (1)
549 {
550 switch (GET_CODE (x))
551 {
552 case MEM:
553 case CONST:
554 x = XEXP (x, 0);
555 break;
556 case UNSPEC:
557 x = XVECEXP (x, 0, 0);
558 break;
559 case PLUS:
560 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
561 return 0;
562 x = XEXP (x, 0);
563 break;
564 default:
565 goto done;
566 }
567 }
568 done:
569 if (GET_CODE (x) != SYMBOL_REF)
570 return 0;
571 name = XSTR (x, 0);
572 if (name[0] == '@' && name[2] == '.')
573 {
574 if (name[1] == 'i' || name[1] == 'I')
575 {
576 if (name[1] == 'I')
577 return 'f'; /* near */
578 return 'n'; /* far */
579 }
580 return name[1];
581 }
582 return 0;
583}
584
585int
586mep_regno_reg_class (int regno)
587{
588 switch (regno)
589 {
590 case SP_REGNO: return SP_REGS;
591 case TP_REGNO: return TP_REGS;
592 case GP_REGNO: return GP_REGS;
593 case 0: return R0_REGS;
594 case HI_REGNO: return HI_REGS;
595 case LO_REGNO: return LO_REGS;
596 case ARG_POINTER_REGNUM: return GENERAL_REGS;
597 }
598
599 if (GR_REGNO_P (regno))
600 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
601 if (CONTROL_REGNO_P (regno))
602 return CONTROL_REGS;
603
604 if (CR_REGNO_P (regno))
605 {
606 int i, j;
607
608 /* Search for the register amongst user-defined subclasses of
609 the coprocessor registers. */
610 for (i = USER0_REGS; i <= USER3_REGS; ++i)
611 {
612 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
613 continue;
614 for (j = 0; j < N_REG_CLASSES; ++j)
615 {
616 enum reg_class sub = reg_class_subclasses[i][j];
617
618 if (sub == LIM_REG_CLASSES)
619 return i;
620 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
621 break;
622 }
623 }
624
625 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
626 }
627
628 if (CCR_REGNO_P (regno))
629 return CCR_REGS;
630
631 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
632 return NO_REGS;
633}
634
46222c18 635static bool
636const_in_range (rtx x, int minv, int maxv)
637{
638 return (GET_CODE (x) == CONST_INT
639 && INTVAL (x) >= minv
640 && INTVAL (x) <= maxv);
641}
642
643/* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
644 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
645 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
646 at the end of the insn stream. */
647
648rtx
d8691ecc 649mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
46222c18 650{
651 if (rtx_equal_p (dest, src1))
652 return src2;
653 else if (rtx_equal_p (dest, src2))
654 return src1;
655 else
656 {
657 if (insn == 0)
658 emit_insn (gen_movsi (copy_rtx (dest), src1));
659 else
660 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
661 return src2;
662 }
663}
664
665/* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
666 Change the last element of PATTERN from (clobber (scratch:SI))
667 to (clobber (reg:SI HI_REGNO)). */
668
669static void
d8691ecc 670mep_rewrite_mult (rtx_insn *insn, rtx pattern)
46222c18 671{
672 rtx hi_clobber;
673
674 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
675 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
676 PATTERN (insn) = pattern;
677 INSN_CODE (insn) = -1;
678}
679
680/* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
681 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
682 store the result in DEST if nonnull. */
683
684static void
d8691ecc 685mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
46222c18 686{
687 rtx lo, pattern;
688
689 lo = gen_rtx_REG (SImode, LO_REGNO);
690 if (dest)
691 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
692 mep_mulr_source (insn, dest, src1, src2));
693 else
694 pattern = gen_mulsi3_lo (lo, src1, src2);
695 mep_rewrite_mult (insn, pattern);
696}
697
698/* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
699 SRC3 into $lo, then use either madd or maddr. The move into $lo will
700 be deleted by a peephole2 if SRC3 is already in $lo. */
701
702static void
d8691ecc 703mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
46222c18 704{
705 rtx lo, pattern;
706
707 lo = gen_rtx_REG (SImode, LO_REGNO);
708 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
709 if (dest)
710 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
711 mep_mulr_source (insn, dest, src1, src2),
712 copy_rtx (lo));
713 else
714 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
715 mep_rewrite_mult (insn, pattern);
716}
717
718/* Return true if $lo has the same value as integer register GPR when
719 instruction INSN is reached. If necessary, rewrite the instruction
720 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
721 rtx for (reg:SI LO_REGNO).
722
723 This function is intended to be used by the peephole2 pass. Since
724 that pass goes from the end of a basic block to the beginning, and
725 propagates liveness information on the way, there is no need to
726 update register notes here.
727
728 If GPR_DEAD_P is true on entry, and this function returns true,
729 then the caller will replace _every_ use of GPR in and after INSN
730 with LO. This means that if the instruction that sets $lo is a
731 mulr- or maddr-type instruction, we can rewrite it to use mul or
732 madd instead. In combination with the copy progagation pass,
733 this allows us to replace sequences like:
734
735 mov GPR,R1
736 mulr GPR,R2
737
738 with:
739
740 mul R1,R2
741
742 if GPR is no longer used. */
743
744static bool
d8691ecc 745mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
46222c18 746{
747 do
748 {
749 insn = PREV_INSN (insn);
750 if (INSN_P (insn))
751 switch (recog_memoized (insn))
752 {
753 case CODE_FOR_mulsi3_1:
754 extract_insn (insn);
755 if (rtx_equal_p (recog_data.operand[0], gpr))
756 {
757 mep_rewrite_mulsi3 (insn,
758 gpr_dead_p ? NULL : recog_data.operand[0],
759 recog_data.operand[1],
760 recog_data.operand[2]);
761 return true;
762 }
763 return false;
764
765 case CODE_FOR_maddsi3:
766 extract_insn (insn);
767 if (rtx_equal_p (recog_data.operand[0], gpr))
768 {
769 mep_rewrite_maddsi3 (insn,
770 gpr_dead_p ? NULL : recog_data.operand[0],
771 recog_data.operand[1],
772 recog_data.operand[2],
773 recog_data.operand[3]);
774 return true;
775 }
776 return false;
777
778 case CODE_FOR_mulsi3r:
779 case CODE_FOR_maddsi3r:
780 extract_insn (insn);
781 return rtx_equal_p (recog_data.operand[1], gpr);
782
783 default:
784 if (reg_set_p (lo, insn)
785 || reg_set_p (gpr, insn)
786 || volatile_insn_p (PATTERN (insn)))
787 return false;
788
789 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
790 gpr_dead_p = false;
791 break;
792 }
793 }
794 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
795 return false;
796}
797
798/* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
799
800bool
d8691ecc 801mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
46222c18 802{
803 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
804 extract_insn (insn);
805 return result;
806}
807
808/* Return true if SET can be turned into a post-modify load or store
809 that adds OFFSET to GPR. In other words, return true if SET can be
810 changed into:
811
812 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
813
814 It's OK to change SET to an equivalent operation in order to
815 make it match. */
816
817static bool
818mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
819{
820 rtx *reg, *mem;
821 unsigned int reg_bytes, mem_bytes;
3754d046 822 machine_mode reg_mode, mem_mode;
46222c18 823
824 /* Only simple SETs can be converted. */
825 if (GET_CODE (set) != SET)
826 return false;
827
828 /* Point REG to what we hope will be the register side of the set and
829 MEM to what we hope will be the memory side. */
830 if (GET_CODE (SET_DEST (set)) == MEM)
831 {
832 mem = &SET_DEST (set);
833 reg = &SET_SRC (set);
834 }
835 else
836 {
837 reg = &SET_DEST (set);
838 mem = &SET_SRC (set);
839 if (GET_CODE (*mem) == SIGN_EXTEND)
840 mem = &XEXP (*mem, 0);
841 }
842
843 /* Check that *REG is a suitable coprocessor register. */
844 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
845 return false;
846
847 /* Check that *MEM is a suitable memory reference. */
848 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
849 return false;
850
851 /* Get the number of bytes in each operand. */
852 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
853 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
854
855 /* Check that OFFSET is suitably aligned. */
856 if (INTVAL (offset) & (mem_bytes - 1))
857 return false;
858
859 /* Convert *MEM to a normal integer mode. */
860 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
861 *mem = change_address (*mem, mem_mode, NULL);
862
863 /* Adjust *REG as well. */
864 *reg = shallow_copy_rtx (*reg);
865 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
866 {
867 /* SET is a subword load. Convert it to an explicit extension. */
868 PUT_MODE (*reg, SImode);
869 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
870 }
871 else
872 {
873 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
874 PUT_MODE (*reg, reg_mode);
875 }
876 return true;
877}
878
879/* Return the effect of frame-related instruction INSN. */
880
881static rtx
d8691ecc 882mep_frame_expr (rtx_insn *insn)
46222c18 883{
884 rtx note, expr;
885
886 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
887 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
888 RTX_FRAME_RELATED_P (expr) = 1;
889 return expr;
890}
891
892/* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
893 new pattern in INSN1; INSN2 will be deleted by the caller. */
894
895static void
d8691ecc 896mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
46222c18 897{
898 rtx expr;
899
900 if (RTX_FRAME_RELATED_P (insn2))
901 {
902 expr = mep_frame_expr (insn2);
903 if (RTX_FRAME_RELATED_P (insn1))
904 expr = gen_rtx_SEQUENCE (VOIDmode,
905 gen_rtvec (2, mep_frame_expr (insn1), expr));
906 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
907 RTX_FRAME_RELATED_P (insn1) = 1;
908 }
909
910 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
911 gen_rtvec (2, PATTERN (insn1),
912 PATTERN (insn2)));
913 INSN_CODE (insn1) = -1;
914}
915
916/* SET_INSN is an instruction that adds OFFSET to REG. Go back through
917 the basic block to see if any previous load or store instruction can
918 be persuaded to do SET_INSN as a side-effect. Return true if so. */
919
920static bool
d8691ecc 921mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
46222c18 922{
d8691ecc 923 rtx_insn *insn;
46222c18 924
925 insn = set_insn;
926 do
927 {
928 insn = PREV_INSN (insn);
929 if (INSN_P (insn))
930 {
931 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
932 {
933 mep_make_parallel (insn, set_insn);
934 return true;
935 }
936
937 if (reg_set_p (reg, insn)
938 || reg_referenced_p (reg, PATTERN (insn))
939 || volatile_insn_p (PATTERN (insn)))
940 return false;
941 }
942 }
943 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
944 return false;
945}
946
947/* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
948
949bool
d8691ecc 950mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
46222c18 951{
952 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
953 extract_insn (insn);
954 return result;
955}
956
957bool
958mep_allow_clip (rtx ux, rtx lx, int s)
959{
960 HOST_WIDE_INT u = INTVAL (ux);
961 HOST_WIDE_INT l = INTVAL (lx);
962 int i;
963
964 if (!TARGET_OPT_CLIP)
965 return false;
966
967 if (s)
968 {
969 for (i = 0; i < 30; i ++)
970 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
971 && (l == - ((HOST_WIDE_INT) 1 << i)))
972 return true;
973 }
974 else
975 {
976 if (l != 0)
977 return false;
978
979 for (i = 0; i < 30; i ++)
980 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
981 return true;
982 }
983 return false;
984}
985
986bool
987mep_bit_position_p (rtx x, bool looking_for)
988{
989 if (GET_CODE (x) != CONST_INT)
990 return false;
991 switch ((int) INTVAL(x) & 0xff)
992 {
993 case 0x01: case 0x02: case 0x04: case 0x08:
994 case 0x10: case 0x20: case 0x40: case 0x80:
995 return looking_for;
996 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
997 case 0xef: case 0xdf: case 0xbf: case 0x7f:
998 return !looking_for;
999 }
1000 return false;
1001}
1002
1003static bool
1004move_needs_splitting (rtx dest, rtx src,
3754d046 1005 machine_mode mode ATTRIBUTE_UNUSED)
46222c18 1006{
1007 int s = mep_section_tag (src);
1008
1009 while (1)
1010 {
1011 if (GET_CODE (src) == CONST
1012 || GET_CODE (src) == MEM)
1013 src = XEXP (src, 0);
1014 else if (GET_CODE (src) == SYMBOL_REF
1015 || GET_CODE (src) == LABEL_REF
1016 || GET_CODE (src) == PLUS)
1017 break;
1018 else
1019 return false;
1020 }
1021 if (s == 'f'
1022 || (GET_CODE (src) == PLUS
1023 && GET_CODE (XEXP (src, 1)) == CONST_INT
1024 && (INTVAL (XEXP (src, 1)) < -65536
1025 || INTVAL (XEXP (src, 1)) > 0xffffff))
1026 || (GET_CODE (dest) == REG
1027 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1028 return true;
1029 return false;
1030}
1031
1032bool
1033mep_split_mov (rtx *operands, int symbolic)
1034{
1035 if (symbolic)
1036 {
1037 if (move_needs_splitting (operands[0], operands[1], SImode))
1038 return true;
1039 return false;
1040 }
1041
1042 if (GET_CODE (operands[1]) != CONST_INT)
1043 return false;
1044
1045 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1046 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1047 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1048 return false;
1049
1050 if (((!reload_completed && !reload_in_progress)
1051 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1052 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1053 return false;
1054
1055 return true;
1056}
1057
1058/* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1059 it to one specific value. So the insn chosen depends on whether
1060 the source and destination modes match. */
1061
1062bool
1063mep_vliw_mode_match (rtx tgt)
1064{
1065 bool src_vliw = mep_vliw_function_p (cfun->decl);
1066 bool tgt_vliw = INTVAL (tgt);
1067
1068 return src_vliw == tgt_vliw;
1069}
1070
2053b71f 1071/* Like the above, but also test for near/far mismatches. */
1072
1073bool
1074mep_vliw_jmp_match (rtx tgt)
1075{
1076 bool src_vliw = mep_vliw_function_p (cfun->decl);
1077 bool tgt_vliw = INTVAL (tgt);
1078
1079 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1080 return false;
1081
1082 return src_vliw == tgt_vliw;
1083}
1084
46222c18 1085bool
d3ffa7b4 1086mep_multi_slot (rtx_insn *x)
46222c18 1087{
1088 return get_attr_slot (x) == SLOT_MULTI;
1089}
1090
ca316360 1091/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
46222c18 1092
ca316360 1093static bool
3754d046 1094mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
f4eeff0e 1095{
1096 /* We can't convert symbol values to gp- or tp-rel values after
1097 reload, as reload might have used $gp or $tp for other
1098 purposes. */
1099 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1100 {
1101 char e = mep_section_tag (x);
1102 return (e != 't' && e != 'b');
1103 }
1104 return 1;
1105}
1106
46222c18 1107/* Be careful not to use macros that need to be compiled one way for
1108 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1109
1110bool
3754d046 1111mep_legitimate_address (machine_mode mode, rtx x, int strict)
46222c18 1112{
1113 int the_tag;
1114
1115#define DEBUG_LEGIT 0
1116#if DEBUG_LEGIT
1117 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1118 debug_rtx (x);
1119#endif
1120
1121 if (GET_CODE (x) == LO_SUM
1122 && GET_CODE (XEXP (x, 0)) == REG
1123 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1124 && CONSTANT_P (XEXP (x, 1)))
1125 {
1126 if (GET_MODE_SIZE (mode) > 4)
1127 {
1128 /* We will end up splitting this, and lo_sums are not
1129 offsettable for us. */
1130#if DEBUG_LEGIT
1131 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1132#endif
1133 return false;
1134 }
1135#if DEBUG_LEGIT
1136 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1137#endif
1138 return true;
1139 }
1140
1141 if (GET_CODE (x) == REG
1142 && GEN_REG (REGNO (x), strict))
1143 {
1144#if DEBUG_LEGIT
1145 fprintf (stderr, " - yup, [reg]\n");
1146#endif
1147 return true;
1148 }
1149
1150 if (GET_CODE (x) == PLUS
1151 && GET_CODE (XEXP (x, 0)) == REG
1152 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1153 && const_in_range (XEXP (x, 1), -32768, 32767))
1154 {
1155#if DEBUG_LEGIT
1156 fprintf (stderr, " - yup, [reg+const]\n");
1157#endif
1158 return true;
1159 }
1160
1161 if (GET_CODE (x) == PLUS
1162 && GET_CODE (XEXP (x, 0)) == REG
1163 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1164 && GET_CODE (XEXP (x, 1)) == CONST
1165 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1166 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1167 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1168 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1169 {
1170#if DEBUG_LEGIT
1171 fprintf (stderr, " - yup, [reg+unspec]\n");
1172#endif
1173 return true;
1174 }
1175
1176 the_tag = mep_section_tag (x);
1177
1178 if (the_tag == 'f')
1179 {
1180#if DEBUG_LEGIT
1181 fprintf (stderr, " - nope, [far]\n");
1182#endif
1183 return false;
1184 }
1185
1186 if (mode == VOIDmode
1187 && GET_CODE (x) == SYMBOL_REF)
1188 {
1189#if DEBUG_LEGIT
1190 fprintf (stderr, " - yup, call [symbol]\n");
1191#endif
1192 return true;
1193 }
1194
1195 if ((mode == SImode || mode == SFmode)
1196 && CONSTANT_P (x)
ca316360 1197 && mep_legitimate_constant_p (mode, x)
46222c18 1198 && the_tag != 't' && the_tag != 'b')
1199 {
1200 if (GET_CODE (x) != CONST_INT
1201 || (INTVAL (x) <= 0xfffff
1202 && INTVAL (x) >= 0
1203 && (INTVAL (x) % 4) == 0))
1204 {
1205#if DEBUG_LEGIT
1206 fprintf (stderr, " - yup, [const]\n");
1207#endif
1208 return true;
1209 }
1210 }
1211
1212#if DEBUG_LEGIT
1213 fprintf (stderr, " - nope.\n");
1214#endif
1215 return false;
1216}
1217
1218int
3754d046 1219mep_legitimize_reload_address (rtx *x, machine_mode mode, int opnum,
7290d950 1220 int type_i,
46222c18 1221 int ind_levels ATTRIBUTE_UNUSED)
1222{
7290d950 1223 enum reload_type type = (enum reload_type) type_i;
1224
46222c18 1225 if (GET_CODE (*x) == PLUS
1226 && GET_CODE (XEXP (*x, 0)) == MEM
1227 && GET_CODE (XEXP (*x, 1)) == REG)
1228 {
1229 /* GCC will by default copy the MEM into a REG, which results in
1230 an invalid address. For us, the best thing to do is move the
1231 whole expression to a REG. */
1232 push_reload (*x, NULL_RTX, x, NULL,
1233 GENERAL_REGS, mode, VOIDmode,
1234 0, 0, opnum, type);
1235 return 1;
1236 }
1237
1238 if (GET_CODE (*x) == PLUS
1239 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1240 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1241 {
1242 char e = mep_section_tag (XEXP (*x, 0));
1243
1244 if (e != 't' && e != 'b')
1245 {
1246 /* GCC thinks that (sym+const) is a valid address. Well,
1247 sometimes it is, this time it isn't. The best thing to
1248 do is reload the symbol to a register, since reg+int
1249 tends to work, and we can't just add the symbol and
1250 constant anyway. */
1251 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1252 GENERAL_REGS, mode, VOIDmode,
1253 0, 0, opnum, type);
1254 return 1;
1255 }
1256 }
1257 return 0;
1258}
1259
1260int
d8691ecc 1261mep_core_address_length (rtx_insn *insn, int opn)
46222c18 1262{
1263 rtx set = single_set (insn);
1264 rtx mem = XEXP (set, opn);
1265 rtx other = XEXP (set, 1-opn);
1266 rtx addr = XEXP (mem, 0);
1267
1268 if (register_operand (addr, Pmode))
1269 return 2;
1270 if (GET_CODE (addr) == PLUS)
1271 {
1272 rtx addend = XEXP (addr, 1);
1273
1274 gcc_assert (REG_P (XEXP (addr, 0)));
1275
1276 switch (REGNO (XEXP (addr, 0)))
1277 {
1278 case STACK_POINTER_REGNUM:
1279 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1280 && mep_imm7a4_operand (addend, VOIDmode))
1281 return 2;
1282 break;
1283
1284 case 13: /* TP */
1285 gcc_assert (REG_P (other));
1286
1287 if (REGNO (other) >= 8)
1288 break;
1289
1290 if (GET_CODE (addend) == CONST
1291 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1292 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1293 return 2;
1294
1295 if (GET_CODE (addend) == CONST_INT
1296 && INTVAL (addend) >= 0
1297 && INTVAL (addend) <= 127
1298 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1299 return 2;
1300 break;
1301 }
1302 }
1303
1304 return 4;
1305}
1306
1307int
d8691ecc 1308mep_cop_address_length (rtx_insn *insn, int opn)
46222c18 1309{
1310 rtx set = single_set (insn);
1311 rtx mem = XEXP (set, opn);
1312 rtx addr = XEXP (mem, 0);
1313
1314 if (GET_CODE (mem) != MEM)
1315 return 2;
1316 if (register_operand (addr, Pmode))
1317 return 2;
1318 if (GET_CODE (addr) == POST_INC)
1319 return 2;
1320
1321 return 4;
1322}
1323
1324#define DEBUG_EXPAND_MOV 0
1325bool
3754d046 1326mep_expand_mov (rtx *operands, machine_mode mode)
46222c18 1327{
1328 int i, t;
1329 int tag[2];
1330 rtx tpsym, tpoffs;
1331 int post_reload = 0;
1332
1333 tag[0] = mep_section_tag (operands[0]);
1334 tag[1] = mep_section_tag (operands[1]);
1335
1336 if (!reload_in_progress
1337 && !reload_completed
1338 && GET_CODE (operands[0]) != REG
1339 && GET_CODE (operands[0]) != SUBREG
1340 && GET_CODE (operands[1]) != REG
1341 && GET_CODE (operands[1]) != SUBREG)
1342 operands[1] = copy_to_mode_reg (mode, operands[1]);
1343
1344#if DEBUG_EXPAND_MOV
1345 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1346 reload_in_progress || reload_completed);
1347 debug_rtx (operands[0]);
1348 debug_rtx (operands[1]);
1349#endif
1350
1351 if (mode == DImode || mode == DFmode)
1352 return false;
1353
1354 if (reload_in_progress || reload_completed)
1355 {
1356 rtx r;
1357
1358 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1359 cfun->machine->reload_changes_tp = true;
1360
1361 if (tag[0] == 't' || tag[1] == 't')
1362 {
1363 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1364 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1365 post_reload = 1;
1366 }
1367 if (tag[0] == 'b' || tag[1] == 'b')
1368 {
1369 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1370 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1371 post_reload = 1;
1372 }
1373 if (cfun->machine->reload_changes_tp == true)
1374 post_reload = 1;
1375 }
1376
1377 if (!post_reload)
1378 {
1379 rtx n;
1380 if (symbol_p (operands[1]))
1381 {
1382 t = mep_section_tag (operands[1]);
1383 if (t == 'b' || t == 't')
1384 {
1385
1386 if (GET_CODE (operands[1]) == SYMBOL_REF)
1387 {
1388 tpsym = operands[1];
1389 n = gen_rtx_UNSPEC (mode,
1390 gen_rtvec (1, operands[1]),
1391 t == 'b' ? UNS_TPREL : UNS_GPREL);
1392 n = gen_rtx_CONST (mode, n);
1393 }
1394 else if (GET_CODE (operands[1]) == CONST
1395 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1396 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1397 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1398 {
1399 tpsym = XEXP (XEXP (operands[1], 0), 0);
1400 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1401 n = gen_rtx_UNSPEC (mode,
1402 gen_rtvec (1, tpsym),
1403 t == 'b' ? UNS_TPREL : UNS_GPREL);
1404 n = gen_rtx_PLUS (mode, n, tpoffs);
1405 n = gen_rtx_CONST (mode, n);
1406 }
1407 else if (GET_CODE (operands[1]) == CONST
1408 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1409 return false;
1410 else
1411 {
1412 error ("unusual TP-relative address");
1413 return false;
1414 }
1415
1416 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1417 : mep_gp_rtx ()), n);
d1f9b275 1418 n = emit_insn (gen_rtx_SET (operands[0], n));
46222c18 1419#if DEBUG_EXPAND_MOV
1420 fprintf(stderr, "mep_expand_mov emitting ");
1421 debug_rtx(n);
1422#endif
1423 return true;
1424 }
1425 }
1426
1427 for (i=0; i < 2; i++)
1428 {
1429 t = mep_section_tag (operands[i]);
1430 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1431 {
1432 rtx sym, n, r;
1433 int u;
1434
1435 sym = XEXP (operands[i], 0);
1436 if (GET_CODE (sym) == CONST
1437 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1438 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1439
1440 if (t == 'b')
1441 {
1442 r = mep_tp_rtx ();
1443 u = UNS_TPREL;
1444 }
1445 else
1446 {
1447 r = mep_gp_rtx ();
1448 u = UNS_GPREL;
1449 }
1450
1451 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1452 n = gen_rtx_CONST (Pmode, n);
1453 n = gen_rtx_PLUS (Pmode, r, n);
1454 operands[i] = replace_equiv_address (operands[i], n);
1455 }
1456 }
1457 }
1458
1459 if ((GET_CODE (operands[1]) != REG
1460 && MEP_CONTROL_REG (operands[0]))
1461 || (GET_CODE (operands[0]) != REG
1462 && MEP_CONTROL_REG (operands[1])))
1463 {
1464 rtx temp;
1465#if DEBUG_EXPAND_MOV
1466 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1467#endif
1468 temp = gen_reg_rtx (mode);
1469 emit_move_insn (temp, operands[1]);
1470 operands[1] = temp;
1471 }
1472
1473 if (symbolref_p (operands[0])
1474 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1475 || (GET_MODE_SIZE (mode) != 4)))
1476 {
1477 rtx temp;
1478
1479 gcc_assert (!reload_in_progress && !reload_completed);
1480
1481 temp = force_reg (Pmode, XEXP (operands[0], 0));
1482 operands[0] = replace_equiv_address (operands[0], temp);
1483 emit_move_insn (operands[0], operands[1]);
1484 return true;
1485 }
1486
1487 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1488 tag[1] = 0;
1489
1490 if (symbol_p (operands[1])
1491 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1492 {
1493 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1494 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1495 return true;
1496 }
1497
1498 if (symbolref_p (operands[1])
1499 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1500 {
1501 rtx temp;
1502
1503 if (reload_in_progress || reload_completed)
1504 temp = operands[0];
1505 else
1506 temp = gen_reg_rtx (Pmode);
1507
1508 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1509 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1510 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1511 return true;
1512 }
1513
1514 return false;
1515}
1516
1517/* Cases where the pattern can't be made to use at all. */
1518
1519bool
3754d046 1520mep_mov_ok (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
46222c18 1521{
1522 int i;
1523
1524#define DEBUG_MOV_OK 0
1525#if DEBUG_MOV_OK
1526 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1527 mep_section_tag (operands[1]));
1528 debug_rtx (operands[0]);
1529 debug_rtx (operands[1]);
1530#endif
1531
1532 /* We want the movh patterns to get these. */
1533 if (GET_CODE (operands[1]) == HIGH)
1534 return false;
1535
1536 /* We can't store a register to a far variable without using a
1537 scratch register to hold the address. Using far variables should
1538 be split by mep_emit_mov anyway. */
1539 if (mep_section_tag (operands[0]) == 'f'
1540 || mep_section_tag (operands[1]) == 'f')
1541 {
1542#if DEBUG_MOV_OK
1543 fprintf (stderr, " - no, f\n");
1544#endif
1545 return false;
1546 }
1547 i = mep_section_tag (operands[1]);
1548 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1549 /* These are supposed to be generated with adds of the appropriate
1550 register. During and after reload, however, we allow them to
1551 be accessed as normal symbols because adding a dependency on
1552 the base register now might cause problems. */
1553 {
1554#if DEBUG_MOV_OK
1555 fprintf (stderr, " - no, bt\n");
1556#endif
1557 return false;
1558 }
1559
1560 /* The only moves we can allow involve at least one general
1561 register, so require it. */
1562 for (i = 0; i < 2; i ++)
1563 {
1564 /* Allow subregs too, before reload. */
1565 rtx x = operands[i];
1566
1567 if (GET_CODE (x) == SUBREG)
1568 x = XEXP (x, 0);
1569 if (GET_CODE (x) == REG
1570 && ! MEP_CONTROL_REG (x))
1571 {
1572#if DEBUG_MOV_OK
1573 fprintf (stderr, " - ok\n");
1574#endif
1575 return true;
1576 }
1577 }
1578#if DEBUG_MOV_OK
1579 fprintf (stderr, " - no, no gen reg\n");
1580#endif
1581 return false;
1582}
1583
1584#define DEBUG_SPLIT_WIDE_MOVE 0
1585void
3754d046 1586mep_split_wide_move (rtx *operands, machine_mode mode)
46222c18 1587{
1588 int i;
1589
1590#if DEBUG_SPLIT_WIDE_MOVE
1591 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1592 debug_rtx (operands[0]);
1593 debug_rtx (operands[1]);
1594#endif
1595
1596 for (i = 0; i <= 1; i++)
1597 {
1598 rtx op = operands[i], hi, lo;
1599
1600 switch (GET_CODE (op))
1601 {
1602 case REG:
1603 {
1604 unsigned int regno = REGNO (op);
1605
1606 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1607 {
1608 rtx i32;
1609
1610 lo = gen_rtx_REG (SImode, regno);
1611 i32 = GEN_INT (32);
1612 hi = gen_rtx_ZERO_EXTRACT (SImode,
1613 gen_rtx_REG (DImode, regno),
1614 i32, i32);
1615 }
1616 else
1617 {
1618 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1619 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1620 }
1621 }
1622 break;
1623
1624 case CONST_INT:
1625 case CONST_DOUBLE:
1626 case MEM:
1627 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1628 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1629 break;
1630
1631 default:
1632 gcc_unreachable ();
1633 }
1634
1635 /* The high part of CR <- GPR moves must be done after the low part. */
1636 operands [i + 4] = lo;
1637 operands [i + 2] = hi;
1638 }
1639
1640 if (reg_mentioned_p (operands[2], operands[5])
1641 || GET_CODE (operands[2]) == ZERO_EXTRACT
1642 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1643 {
1644 rtx tmp;
1645
1646 /* Overlapping register pairs -- make sure we don't
1647 early-clobber ourselves. */
1648 tmp = operands[2];
1649 operands[2] = operands[4];
1650 operands[4] = tmp;
1651 tmp = operands[3];
1652 operands[3] = operands[5];
1653 operands[5] = tmp;
1654 }
1655
1656#if DEBUG_SPLIT_WIDE_MOVE
1657 fprintf(stderr, "\033[34m");
1658 debug_rtx (operands[2]);
1659 debug_rtx (operands[3]);
1660 debug_rtx (operands[4]);
1661 debug_rtx (operands[5]);
1662 fprintf(stderr, "\033[0m");
1663#endif
1664}
1665
1666/* Emit a setcc instruction in its entirity. */
1667
1668static bool
1669mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1670{
1671 rtx tmp;
1672
1673 switch (code)
1674 {
1675 case GT:
1676 case GTU:
1677 tmp = op1, op1 = op2, op2 = tmp;
1678 code = swap_condition (code);
1679 /* FALLTHRU */
1680
1681 case LT:
1682 case LTU:
1683 op1 = force_reg (SImode, op1);
d1f9b275 1684 emit_insn (gen_rtx_SET (dest, gen_rtx_fmt_ee (code, SImode, op1, op2)));
46222c18 1685 return true;
1686
1687 case EQ:
1688 if (op2 != const0_rtx)
1689 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1690 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1691 return true;
1692
1693 case NE:
1694 /* Branchful sequence:
1695 mov dest, 0 16-bit
1696 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1697 mov dest, 1 16-bit
1698
1699 Branchless sequence:
1700 add3 tmp, op1, -op2 32-bit (or mov + sub)
1701 sltu3 tmp, tmp, 1 16-bit
1702 xor3 dest, tmp, 1 32-bit
1703 */
1704 if (optimize_size && op2 != const0_rtx)
1705 return false;
1706
1707 if (op2 != const0_rtx)
1708 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1709
1710 op2 = gen_reg_rtx (SImode);
1711 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1712
d1f9b275 1713 emit_insn (gen_rtx_SET (dest, gen_rtx_XOR (SImode, op2, const1_rtx)));
46222c18 1714 return true;
1715
1716 case LE:
1717 if (GET_CODE (op2) != CONST_INT
1718 || INTVAL (op2) == 0x7ffffff)
1719 return false;
1720 op2 = GEN_INT (INTVAL (op2) + 1);
1721 return mep_expand_setcc_1 (LT, dest, op1, op2);
1722
1723 case LEU:
1724 if (GET_CODE (op2) != CONST_INT
1725 || INTVAL (op2) == -1)
1726 return false;
1727 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1728 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1729
1730 case GE:
1731 if (GET_CODE (op2) != CONST_INT
1732 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1733 return false;
1734 op2 = GEN_INT (INTVAL (op2) - 1);
1735 return mep_expand_setcc_1 (GT, dest, op1, op2);
1736
1737 case GEU:
1738 if (GET_CODE (op2) != CONST_INT
1739 || op2 == const0_rtx)
1740 return false;
1741 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1742 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1743
1744 default:
1745 gcc_unreachable ();
1746 }
1747}
1748
1749bool
1750mep_expand_setcc (rtx *operands)
1751{
1752 rtx dest = operands[0];
1753 enum rtx_code code = GET_CODE (operands[1]);
1754 rtx op0 = operands[2];
1755 rtx op1 = operands[3];
1756
1757 return mep_expand_setcc_1 (code, dest, op0, op1);
1758}
1759
1760rtx
1761mep_expand_cbranch (rtx *operands)
1762{
1763 enum rtx_code code = GET_CODE (operands[0]);
1764 rtx op0 = operands[1];
1765 rtx op1 = operands[2];
1766 rtx tmp;
1767
1768 restart:
1769 switch (code)
1770 {
1771 case LT:
1772 if (mep_imm4_operand (op1, SImode))
1773 break;
1774
1775 tmp = gen_reg_rtx (SImode);
1776 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1777 code = NE;
1778 op0 = tmp;
1779 op1 = const0_rtx;
1780 break;
1781
1782 case GE:
1783 if (mep_imm4_operand (op1, SImode))
1784 break;
1785
1786 tmp = gen_reg_rtx (SImode);
1787 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1788
1789 code = EQ;
1790 op0 = tmp;
1791 op1 = const0_rtx;
1792 break;
1793
1794 case EQ:
1795 case NE:
1796 if (! mep_reg_or_imm4_operand (op1, SImode))
1797 op1 = force_reg (SImode, op1);
1798 break;
1799
1800 case LE:
1801 case GT:
1802 if (GET_CODE (op1) == CONST_INT
1803 && INTVAL (op1) != 0x7fffffff)
1804 {
1805 op1 = GEN_INT (INTVAL (op1) + 1);
1806 code = (code == LE ? LT : GE);
1807 goto restart;
1808 }
1809
1810 tmp = gen_reg_rtx (SImode);
1811 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1812
1813 code = (code == LE ? EQ : NE);
1814 op0 = tmp;
1815 op1 = const0_rtx;
1816 break;
1817
1818 case LTU:
1819 if (op1 == const1_rtx)
1820 {
1821 code = EQ;
1822 op1 = const0_rtx;
1823 break;
1824 }
1825
1826 tmp = gen_reg_rtx (SImode);
1827 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1828 code = NE;
1829 op0 = tmp;
1830 op1 = const0_rtx;
1831 break;
1832
1833 case LEU:
1834 tmp = gen_reg_rtx (SImode);
1835 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1836 code = NE;
1837 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1838 code = EQ;
1839 else
1840 gcc_unreachable ();
1841 op0 = tmp;
1842 op1 = const0_rtx;
1843 break;
1844
1845 case GTU:
1846 tmp = gen_reg_rtx (SImode);
1847 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1848 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1849 code = NE;
1850 op0 = tmp;
1851 op1 = const0_rtx;
1852 break;
1853
1854 case GEU:
1855 tmp = gen_reg_rtx (SImode);
1856 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1857 code = NE;
1858 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1859 code = EQ;
1860 else
1861 gcc_unreachable ();
1862 op0 = tmp;
1863 op1 = const0_rtx;
1864 break;
1865
1866 default:
1867 gcc_unreachable ();
1868 }
1869
1870 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1871}
1872
1873const char *
1874mep_emit_cbranch (rtx *operands, int ne)
1875{
1876 if (GET_CODE (operands[1]) == REG)
1877 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
d22d3228 1878 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
46222c18 1879 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1880 else
1881 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1882}
1883
1884void
1885mep_expand_call (rtx *operands, int returns_value)
1886{
1887 rtx addr = operands[returns_value];
1888 rtx tp = mep_tp_rtx ();
1889 rtx gp = mep_gp_rtx ();
1890
1891 gcc_assert (GET_CODE (addr) == MEM);
1892
1893 addr = XEXP (addr, 0);
1894
1895 if (! mep_call_address_operand (addr, VOIDmode))
1896 addr = force_reg (SImode, addr);
1897
1898 if (! operands[returns_value+2])
1899 operands[returns_value+2] = const0_rtx;
1900
1901 if (returns_value)
1902 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1903 operands[3], tp, gp));
1904 else
1905 emit_call_insn (gen_call_internal (addr, operands[1],
1906 operands[2], tp, gp));
1907}
1908\f
1909/* Aliasing Support. */
1910
1911/* If X is a machine specific address (i.e. a symbol or label being
1912 referenced as a displacement from the GOT implemented using an
1913 UNSPEC), then return the base term. Otherwise return X. */
1914
1915rtx
1916mep_find_base_term (rtx x)
1917{
1918 rtx base, term;
1919 int unspec;
1920
1921 if (GET_CODE (x) != PLUS)
1922 return x;
1923 base = XEXP (x, 0);
1924 term = XEXP (x, 1);
1925
1926 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1927 && base == mep_tp_rtx ())
1928 unspec = UNS_TPREL;
1929 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1930 && base == mep_gp_rtx ())
1931 unspec = UNS_GPREL;
1932 else
1933 return x;
1934
1935 if (GET_CODE (term) != CONST)
1936 return x;
1937 term = XEXP (term, 0);
1938
1939 if (GET_CODE (term) != UNSPEC
1940 || XINT (term, 1) != unspec)
1941 return x;
1942
1943 return XVECEXP (term, 0, 0);
1944}
1945\f
1946/* Reload Support. */
1947
1948/* Return true if the registers in CLASS cannot represent the change from
1949 modes FROM to TO. */
1950
1951bool
3754d046 1952mep_cannot_change_mode_class (machine_mode from, machine_mode to,
46222c18 1953 enum reg_class regclass)
1954{
1955 if (from == to)
1956 return false;
1957
1958 /* 64-bit COP regs must remain 64-bit COP regs. */
1959 if (TARGET_64BIT_CR_REGS
1960 && (regclass == CR_REGS
1961 || regclass == LOADABLE_CR_REGS)
1962 && (GET_MODE_SIZE (to) < 8
1963 || GET_MODE_SIZE (from) < 8))
1964 return true;
1965
1966 return false;
1967}
1968
1969#define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1970
1971static bool
1972mep_general_reg (rtx x)
1973{
1974 while (GET_CODE (x) == SUBREG)
1975 x = XEXP (x, 0);
1976 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1977}
1978
1979static bool
1980mep_nongeneral_reg (rtx x)
1981{
1982 while (GET_CODE (x) == SUBREG)
1983 x = XEXP (x, 0);
1984 return (GET_CODE (x) == REG
1985 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1986}
1987
1988static bool
1989mep_general_copro_reg (rtx x)
1990{
1991 while (GET_CODE (x) == SUBREG)
1992 x = XEXP (x, 0);
1993 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1994}
1995
1996static bool
1997mep_nonregister (rtx x)
1998{
1999 while (GET_CODE (x) == SUBREG)
2000 x = XEXP (x, 0);
2001 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2002}
2003
2004#define DEBUG_RELOAD 0
2005
2006/* Return the secondary reload class needed for moving value X to or
2007 from a register in coprocessor register class CLASS. */
2008
2009static enum reg_class
2010mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2011{
2012 if (mep_general_reg (x))
2013 /* We can do the move directly if mep_have_core_copro_moves_p,
2014 otherwise we need to go through memory. Either way, no secondary
2015 register is needed. */
2016 return NO_REGS;
2017
2018 if (mep_general_copro_reg (x))
2019 {
2020 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2021 if (mep_have_copro_copro_moves_p)
2022 return NO_REGS;
2023
2024 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2025 if (mep_have_core_copro_moves_p)
2026 return GENERAL_REGS;
2027
2028 /* Otherwise we need to do it through memory. No secondary
2029 register is needed. */
2030 return NO_REGS;
2031 }
2032
2033 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2034 && constraint_satisfied_p (x, CONSTRAINT_U))
2035 /* X is a memory value that we can access directly. */
2036 return NO_REGS;
2037
2038 /* We have to move X into a GPR first and then copy it to
2039 the coprocessor register. The move from the GPR to the
2040 coprocessor might be done directly or through memory,
2041 depending on mep_have_core_copro_moves_p. */
2042 return GENERAL_REGS;
2043}
2044
2045/* Copying X to register in RCLASS. */
2046
7290d950 2047enum reg_class
46222c18 2048mep_secondary_input_reload_class (enum reg_class rclass,
3754d046 2049 machine_mode mode ATTRIBUTE_UNUSED,
46222c18 2050 rtx x)
2051{
2052 int rv = NO_REGS;
2053
2054#if DEBUG_RELOAD
2055 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2056 debug_rtx (x);
2057#endif
2058
2059 if (reg_class_subset_p (rclass, CR_REGS))
2060 rv = mep_secondary_copro_reload_class (rclass, x);
2061 else if (MEP_NONGENERAL_CLASS (rclass)
2062 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2063 rv = GENERAL_REGS;
2064
2065#if DEBUG_RELOAD
2066 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2067#endif
7290d950 2068 return (enum reg_class) rv;
46222c18 2069}
2070
2071/* Copying register in RCLASS to X. */
2072
7290d950 2073enum reg_class
46222c18 2074mep_secondary_output_reload_class (enum reg_class rclass,
3754d046 2075 machine_mode mode ATTRIBUTE_UNUSED,
46222c18 2076 rtx x)
2077{
2078 int rv = NO_REGS;
2079
2080#if DEBUG_RELOAD
2081 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2082 debug_rtx (x);
2083#endif
2084
2085 if (reg_class_subset_p (rclass, CR_REGS))
2086 rv = mep_secondary_copro_reload_class (rclass, x);
2087 else if (MEP_NONGENERAL_CLASS (rclass)
2088 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2089 rv = GENERAL_REGS;
2090
2091#if DEBUG_RELOAD
2092 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2093#endif
2094
7290d950 2095 return (enum reg_class) rv;
46222c18 2096}
2097
2098/* Implement SECONDARY_MEMORY_NEEDED. */
2099
2100bool
2101mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
3754d046 2102 machine_mode mode ATTRIBUTE_UNUSED)
46222c18 2103{
2104 if (!mep_have_core_copro_moves_p)
2105 {
2106 if (reg_classes_intersect_p (rclass1, CR_REGS)
2107 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2108 return true;
2109 if (reg_classes_intersect_p (rclass2, CR_REGS)
2110 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2111 return true;
2112 if (!mep_have_copro_copro_moves_p
2113 && reg_classes_intersect_p (rclass1, CR_REGS)
2114 && reg_classes_intersect_p (rclass2, CR_REGS))
2115 return true;
2116 }
2117 return false;
2118}
2119
2120void
3754d046 2121mep_expand_reload (rtx *operands, machine_mode mode)
46222c18 2122{
2123 /* There are three cases for each direction:
2124 register, farsym
2125 control, farsym
2126 control, nearsym */
2127
2128 int s0 = mep_section_tag (operands[0]) == 'f';
2129 int s1 = mep_section_tag (operands[1]) == 'f';
2130 int c0 = mep_nongeneral_reg (operands[0]);
2131 int c1 = mep_nongeneral_reg (operands[1]);
2132 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2133
2134#if DEBUG_RELOAD
2135 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2136 debug_rtx (operands[0]);
2137 debug_rtx (operands[1]);
2138#endif
2139
2140 switch (which)
2141 {
2142 case 00: /* Don't know why this gets here. */
2143 case 02: /* general = far */
2144 emit_move_insn (operands[0], operands[1]);
2145 return;
2146
2147 case 10: /* cr = mem */
2148 case 11: /* cr = cr */
2149 case 01: /* mem = cr */
2150 case 12: /* cr = far */
2151 emit_move_insn (operands[2], operands[1]);
2152 emit_move_insn (operands[0], operands[2]);
2153 return;
2154
2155 case 20: /* far = general */
2156 emit_move_insn (operands[2], XEXP (operands[1], 0));
2157 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2158 return;
2159
2160 case 21: /* far = cr */
2161 case 22: /* far = far */
2162 default:
2163 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2164 which, mode_name[mode]);
2165 debug_rtx (operands[0]);
2166 debug_rtx (operands[1]);
2167 gcc_unreachable ();
2168 }
2169}
2170
2171/* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2172 can be moved directly into registers 0 to 7, but not into the rest.
2173 If so, and if the required class includes registers 0 to 7, restrict
2174 it to those registers. */
2175
2176enum reg_class
2177mep_preferred_reload_class (rtx x, enum reg_class rclass)
2178{
2179 switch (GET_CODE (x))
2180 {
2181 case CONST_INT:
2182 if (INTVAL (x) >= 0x10000
2183 && INTVAL (x) < 0x01000000
2184 && (INTVAL (x) & 0xffff) != 0
2185 && reg_class_subset_p (TPREL_REGS, rclass))
2186 rclass = TPREL_REGS;
2187 break;
2188
2189 case CONST:
2190 case SYMBOL_REF:
2191 case LABEL_REF:
2192 if (mep_section_tag (x) != 'f'
2193 && reg_class_subset_p (TPREL_REGS, rclass))
2194 rclass = TPREL_REGS;
2195 break;
2196
2197 default:
2198 break;
2199 }
2200 return rclass;
2201}
2202\f
2203/* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2204 moves, 4 for direct double-register moves, and 1000 for anything
2205 that requires a temporary register or temporary stack slot. */
2206
2207int
3754d046 2208mep_register_move_cost (machine_mode mode, enum reg_class from, enum reg_class to)
46222c18 2209{
2210 if (mep_have_copro_copro_moves_p
2211 && reg_class_subset_p (from, CR_REGS)
2212 && reg_class_subset_p (to, CR_REGS))
2213 {
2214 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2215 return 4;
2216 return 2;
2217 }
2218 if (reg_class_subset_p (from, CR_REGS)
2219 && reg_class_subset_p (to, CR_REGS))
2220 {
2221 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2222 return 8;
2223 return 4;
2224 }
2225 if (reg_class_subset_p (from, CR_REGS)
2226 || reg_class_subset_p (to, CR_REGS))
2227 {
2228 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2229 return 4;
2230 return 2;
2231 }
2232 if (mep_secondary_memory_needed (from, to, mode))
2233 return 1000;
2234 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2235 return 1000;
2236
2237 if (GET_MODE_SIZE (mode) > 4)
2238 return 4;
2239
2240 return 2;
2241}
2242
2243\f
2244/* Functions to save and restore machine-specific function data. */
2245
2246static struct machine_function *
2247mep_init_machine_status (void)
2248{
25a27413 2249 return ggc_cleared_alloc<machine_function> ();
46222c18 2250}
2251
2252static rtx
2253mep_allocate_initial_value (rtx reg)
2254{
2255 int rss;
2256
2257 if (GET_CODE (reg) != REG)
2258 return NULL_RTX;
2259
2260 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2261 return NULL_RTX;
2262
2263 /* In interrupt functions, the "initial" values of $gp and $tp are
2264 provided by the prologue. They are not necessarily the same as
2265 the values that the caller was using. */
2266 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2267 if (mep_interrupt_p ())
2268 return NULL_RTX;
2269
2270 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2271 {
2272 cfun->machine->reg_save_size += 4;
2273 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2274 }
2275
2276 rss = cfun->machine->reg_save_slot[REGNO(reg)];
29c05e22 2277 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
46222c18 2278}
2279
2280rtx
2281mep_return_addr_rtx (int count)
2282{
2283 if (count != 0)
2284 return const0_rtx;
2285
2286 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2287}
2288
2289static rtx
2290mep_tp_rtx (void)
2291{
2292 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2293}
2294
2295static rtx
2296mep_gp_rtx (void)
2297{
2298 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2299}
2300
2301static bool
2302mep_interrupt_p (void)
2303{
2304 if (cfun->machine->interrupt_handler == 0)
2305 {
2306 int interrupt_handler
2307 = (lookup_attribute ("interrupt",
2308 DECL_ATTRIBUTES (current_function_decl))
2309 != NULL_TREE);
2310 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2311 }
2312 return cfun->machine->interrupt_handler == 2;
2313}
2314
2315static bool
2316mep_disinterrupt_p (void)
2317{
2318 if (cfun->machine->disable_interrupts == 0)
2319 {
2320 int disable_interrupts
2321 = (lookup_attribute ("disinterrupt",
2322 DECL_ATTRIBUTES (current_function_decl))
2323 != NULL_TREE);
2324 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2325 }
2326 return cfun->machine->disable_interrupts == 2;
2327}
2328
2329\f
2330/* Frame/Epilog/Prolog Related. */
2331
2332static bool
2333mep_reg_set_p (rtx reg, rtx insn)
2334{
2335 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2336 if (INSN_P (insn))
2337 {
2338 if (FIND_REG_INC_NOTE (insn, reg))
2339 return true;
2340 insn = PATTERN (insn);
2341 }
2342
2343 if (GET_CODE (insn) == SET
2344 && GET_CODE (XEXP (insn, 0)) == REG
2345 && GET_CODE (XEXP (insn, 1)) == REG
2346 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2347 return false;
2348
2349 return set_of (reg, insn) != NULL_RTX;
2350}
2351
2352
2353#define MEP_SAVES_UNKNOWN 0
2354#define MEP_SAVES_YES 1
2355#define MEP_SAVES_MAYBE 2
2356#define MEP_SAVES_NO 3
2357
2358static bool
2359mep_reg_set_in_function (int regno)
2360{
d8691ecc 2361 rtx reg;
2362 rtx_insn *insn;
46222c18 2363
2364 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2365 return true;
2366
2367 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2368 return true;
2369
2370 push_topmost_sequence ();
2371 insn = get_insns ();
2372 pop_topmost_sequence ();
2373
2374 if (!insn)
2375 return false;
2376
2377 reg = gen_rtx_REG (SImode, regno);
2378
2379 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2380 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2381 return true;
2382 return false;
2383}
2384
2385static bool
2386mep_asm_without_operands_p (void)
2387{
2388 if (cfun->machine->asms_without_operands == 0)
2389 {
d8691ecc 2390 rtx_insn *insn;
46222c18 2391
2392 push_topmost_sequence ();
2393 insn = get_insns ();
2394 pop_topmost_sequence ();
2395
2396 cfun->machine->asms_without_operands = 1;
2397 while (insn)
2398 {
2399 if (INSN_P (insn)
2400 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2401 {
2402 cfun->machine->asms_without_operands = 2;
2403 break;
2404 }
2405 insn = NEXT_INSN (insn);
2406 }
2407
2408 }
2409 return cfun->machine->asms_without_operands == 2;
2410}
2411
2412/* Interrupt functions save/restore every call-preserved register, and
2413 any call-used register it uses (or all if it calls any function,
2414 since they may get clobbered there too). Here we check to see
2415 which call-used registers need saving. */
2416
aecef6a2 2417#define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2418 && (r == FIRST_CCR_REGNO + 1 \
2419 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2420 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2421
46222c18 2422static bool
2423mep_interrupt_saved_reg (int r)
2424{
2425 if (!mep_interrupt_p ())
2426 return false;
2427 if (r == REGSAVE_CONTROL_TEMP
2428 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2429 return true;
2430 if (mep_asm_without_operands_p ()
2431 && (!fixed_regs[r]
aecef6a2 2432 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2433 || IVC2_ISAVED_REG (r)))
46222c18 2434 return true;
d5bf7b64 2435 if (!crtl->is_leaf)
46222c18 2436 /* Function calls mean we need to save $lp. */
aecef6a2 2437 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
46222c18 2438 return true;
d5bf7b64 2439 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
46222c18 2440 /* The interrupt handler might use these registers for repeat blocks,
2441 or it might call a function that does so. */
2442 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2443 return true;
d5bf7b64 2444 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
46222c18 2445 return false;
2446 /* Functions we call might clobber these. */
2447 if (call_used_regs[r] && !fixed_regs[r])
2448 return true;
32a5f905 2449 /* Additional registers that need to be saved for IVC2. */
aecef6a2 2450 if (IVC2_ISAVED_REG (r))
32a5f905 2451 return true;
2452
46222c18 2453 return false;
2454}
2455
2456static bool
2457mep_call_saves_register (int r)
2458{
142c2869 2459 if (! cfun->machine->frame_locked)
46222c18 2460 {
2461 int rv = MEP_SAVES_NO;
2462
2463 if (cfun->machine->reg_save_slot[r])
2464 rv = MEP_SAVES_YES;
2465 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2466 rv = MEP_SAVES_YES;
2467 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2468 rv = MEP_SAVES_YES;
2469 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2470 rv = MEP_SAVES_YES;
2471 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2472 /* We need these to have stack slots so that they can be set during
2473 unwinding. */
2474 rv = MEP_SAVES_YES;
2475 else if (mep_interrupt_saved_reg (r))
2476 rv = MEP_SAVES_YES;
2477 cfun->machine->reg_saved[r] = rv;
2478 }
2479 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2480}
2481
2482/* Return true if epilogue uses register REGNO. */
2483
2484bool
2485mep_epilogue_uses (int regno)
2486{
2487 /* Since $lp is a call-saved register, the generic code will normally
2488 mark it used in the epilogue if it needs to be saved and restored.
2489 However, when profiling is enabled, the profiling code will implicitly
2490 clobber $11. This case has to be handled specially both here and in
2491 mep_call_saves_register. */
2492 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2493 return true;
2494 /* Interrupt functions save/restore pretty much everything. */
2495 return (reload_completed && mep_interrupt_saved_reg (regno));
2496}
2497
2498static int
2499mep_reg_size (int regno)
2500{
2501 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2502 return 8;
2503 return 4;
2504}
2505
cd90919d 2506/* Worker function for TARGET_CAN_ELIMINATE. */
2507
2508bool
2509mep_can_eliminate (const int from, const int to)
2510{
2511 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2512 ? ! frame_pointer_needed
2513 : true);
2514}
2515
46222c18 2516int
2517mep_elimination_offset (int from, int to)
2518{
2519 int reg_save_size;
2520 int i;
2521 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2522 int total_size;
2523
142c2869 2524 if (!cfun->machine->frame_locked)
2525 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
46222c18 2526
2527 /* We don't count arg_regs_to_save in the arg pointer offset, because
2528 gcc thinks the arg pointer has moved along with the saved regs.
2529 However, we do count it when we adjust $sp in the prologue. */
2530 reg_save_size = 0;
2531 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2532 if (mep_call_saves_register (i))
2533 reg_save_size += mep_reg_size (i);
2534
2535 if (reg_save_size % 8)
2536 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2537 else
2538 cfun->machine->regsave_filler = 0;
2539
2540 /* This is what our total stack adjustment looks like. */
2541 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2542
2543 if (total_size % 8)
2544 cfun->machine->frame_filler = 8 - (total_size % 8);
2545 else
2546 cfun->machine->frame_filler = 0;
2547
2548
2549 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2550 return reg_save_size + cfun->machine->regsave_filler;
2551
2552 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2553 return cfun->machine->frame_filler + frame_size;
2554
2555 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2556 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2557
2558 gcc_unreachable ();
2559}
2560
d8691ecc 2561static rtx_insn *
2562F (rtx_insn *x)
46222c18 2563{
2564 RTX_FRAME_RELATED_P (x) = 1;
2565 return x;
2566}
2567
2568/* Since the prologue/epilogue code is generated after optimization,
2569 we can't rely on gcc to split constants for us. So, this code
2570 captures all the ways to add a constant to a register in one logic
2571 chunk, including optimizing away insns we just don't need. This
2572 makes the prolog/epilog code easier to follow. */
2573static void
2574add_constant (int dest, int src, int value, int mark_frame)
2575{
d8691ecc 2576 rtx_insn *insn;
46222c18 2577 int hi, lo;
2578
2579 if (src == dest && value == 0)
2580 return;
2581
2582 if (value == 0)
2583 {
2584 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2585 gen_rtx_REG (SImode, src));
2586 if (mark_frame)
2587 RTX_FRAME_RELATED_P(insn) = 1;
2588 return;
2589 }
2590
2591 if (value >= -32768 && value <= 32767)
2592 {
2593 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2594 gen_rtx_REG (SImode, src),
2595 GEN_INT (value)));
2596 if (mark_frame)
2597 RTX_FRAME_RELATED_P(insn) = 1;
2598 return;
2599 }
2600
2601 /* Big constant, need to use a temp register. We use
2602 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2603 area is always small enough to directly add to). */
2604
2605 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2606 lo = value & 0xffff;
2607
2608 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2609 GEN_INT (hi));
2610
2611 if (lo)
2612 {
2613 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2614 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2615 GEN_INT (lo)));
2616 }
2617
2618 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2619 gen_rtx_REG (SImode, src),
2620 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2621 if (mark_frame)
2622 {
2623 RTX_FRAME_RELATED_P(insn) = 1;
2624 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
d1f9b275 2625 gen_rtx_SET (gen_rtx_REG (SImode, dest),
46222c18 2626 gen_rtx_PLUS (SImode,
2627 gen_rtx_REG (SImode, dest),
2628 GEN_INT (value))));
2629 }
2630}
2631
46222c18 2632/* Move SRC to DEST. Mark the move as being potentially dead if
2633 MAYBE_DEAD_P. */
2634
d8691ecc 2635static rtx_insn *
46222c18 2636maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2637{
d8691ecc 2638 rtx_insn *insn = emit_move_insn (dest, src);
46222c18 2639#if 0
2640 if (maybe_dead_p)
2641 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2642#endif
2643 return insn;
2644}
2645
2646/* Used for interrupt functions, which can't assume that $tp and $gp
2647 contain the correct pointers. */
2648
2649static void
2650mep_reload_pointer (int regno, const char *symbol)
2651{
2652 rtx reg, sym;
2653
d5bf7b64 2654 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
46222c18 2655 return;
2656
2657 reg = gen_rtx_REG (SImode, regno);
2658 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2659 emit_insn (gen_movsi_topsym_s (reg, sym));
2660 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2661}
2662
142c2869 2663/* Assign save slots for any register not already saved. DImode
2664 registers go at the end of the reg save area; the rest go at the
2665 beginning. This is for alignment purposes. Returns true if a frame
2666 is really needed. */
2667static bool
2668mep_assign_save_slots (int reg_save_size)
46222c18 2669{
142c2869 2670 bool really_need_stack_frame = false;
46222c18 2671 int di_ofs = 0;
142c2869 2672 int i;
46222c18 2673
46222c18 2674 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2675 if (mep_call_saves_register(i))
2676 {
2677 int regsize = mep_reg_size (i);
2678
2679 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2680 || mep_reg_set_in_function (i))
142c2869 2681 really_need_stack_frame = true;
46222c18 2682
2683 if (cfun->machine->reg_save_slot[i])
2684 continue;
2685
2686 if (regsize < 8)
2687 {
2688 cfun->machine->reg_save_size += regsize;
2689 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2690 }
2691 else
2692 {
2693 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2694 di_ofs += 8;
2695 }
2696 }
142c2869 2697 cfun->machine->frame_locked = 1;
2698 return really_need_stack_frame;
2699}
2700
2701void
2702mep_expand_prologue (void)
2703{
2704 int i, rss, sp_offset = 0;
2705 int reg_save_size;
2706 int frame_size;
1f4a1e65 2707 int really_need_stack_frame;
142c2869 2708
2709 /* We must not allow register renaming in interrupt functions,
2710 because that invalidates the correctness of the set of call-used
2711 registers we're going to save/restore. */
2712 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2713
2714 if (mep_disinterrupt_p ())
2715 emit_insn (gen_mep_disable_int ());
2716
2717 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2718
2719 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2720 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
1f4a1e65 2721 really_need_stack_frame = frame_size;
142c2869 2722
2723 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
46222c18 2724
2725 sp_offset = reg_save_size;
2726 if (sp_offset + frame_size < 128)
2727 sp_offset += frame_size ;
2728
2729 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2730
2731 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2732 if (mep_call_saves_register(i))
2733 {
2734 rtx mem;
2735 bool maybe_dead_p;
3754d046 2736 machine_mode rmode;
46222c18 2737
2738 rss = cfun->machine->reg_save_slot[i];
2739
2740 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2741 && (!mep_reg_set_in_function (i)
2742 && !mep_interrupt_p ()))
2743 continue;
2744
2745 if (mep_reg_size (i) == 8)
2746 rmode = DImode;
2747 else
2748 rmode = SImode;
2749
2750 /* If there is a pseudo associated with this register's initial value,
2751 reload might have already spilt it to the stack slot suggested by
2752 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2753 deleted as dead. */
2754 mem = gen_rtx_MEM (rmode,
29c05e22 2755 plus_constant (Pmode, stack_pointer_rtx,
2756 sp_offset - rss));
46222c18 2757 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2758
2759 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2760 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2761 else if (rmode == DImode)
2762 {
d8691ecc 2763 rtx_insn *insn;
46222c18 2764 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2765
2766 mem = gen_rtx_MEM (SImode,
29c05e22 2767 plus_constant (Pmode, stack_pointer_rtx,
2768 sp_offset - rss + be));
46222c18 2769
2770 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2771 gen_rtx_REG (SImode, i),
2772 maybe_dead_p);
2773 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2774 gen_rtx_ZERO_EXTRACT (SImode,
2775 gen_rtx_REG (DImode, i),
2776 GEN_INT (32),
2777 GEN_INT (32)),
2778 maybe_dead_p);
2779 insn = maybe_dead_move (mem,
2780 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2781 maybe_dead_p);
2782 RTX_FRAME_RELATED_P (insn) = 1;
2783
2784 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
d1f9b275 2785 gen_rtx_SET (copy_rtx (mem),
46222c18 2786 gen_rtx_REG (rmode, i)));
2787 mem = gen_rtx_MEM (SImode,
29c05e22 2788 plus_constant (Pmode, stack_pointer_rtx,
2789 sp_offset - rss + (4-be)));
46222c18 2790 insn = maybe_dead_move (mem,
2791 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2792 maybe_dead_p);
2793 }
2794 else
2795 {
d8691ecc 2796 rtx_insn *insn;
46222c18 2797 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2798 gen_rtx_REG (rmode, i),
2799 maybe_dead_p);
2800 insn = maybe_dead_move (mem,
2801 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2802 maybe_dead_p);
2803 RTX_FRAME_RELATED_P (insn) = 1;
2804
2805 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
d1f9b275 2806 gen_rtx_SET (copy_rtx (mem),
46222c18 2807 gen_rtx_REG (rmode, i)));
2808 }
2809 }
2810
2811 if (frame_pointer_needed)
28767f83 2812 {
2813 /* We've already adjusted down by sp_offset. Total $sp change
2814 is reg_save_size + frame_size. We want a net change here of
2815 just reg_save_size. */
2816 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2817 }
46222c18 2818
2819 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2820
2821 if (mep_interrupt_p ())
2822 {
2823 mep_reload_pointer(GP_REGNO, "__sdabase");
2824 mep_reload_pointer(TP_REGNO, "__tpbase");
2825 }
2826}
2827
2828static void
2829mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2830{
2831 int local = hwi_local;
2832 int frame_size = local + crtl->outgoing_args_size;
2833 int reg_save_size;
2834 int ffill;
2835 int i, sp, skip;
2836 int sp_offset;
2837 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2838
2839 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2840 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2841 sp_offset = reg_save_size + frame_size;
2842
2843 ffill = cfun->machine->frame_filler;
2844
2845 if (cfun->machine->mep_frame_pointer_needed)
2846 reg_names[FP_REGNO] = "$fp";
2847 else
2848 reg_names[FP_REGNO] = "$8";
2849
2850 if (sp_offset == 0)
2851 return;
2852
2853 if (debug_info_level == DINFO_LEVEL_NONE)
2854 {
2855 fprintf (file, "\t# frame: %d", sp_offset);
2856 if (reg_save_size)
2857 fprintf (file, " %d regs", reg_save_size);
2858 if (local)
2859 fprintf (file, " %d locals", local);
2860 if (crtl->outgoing_args_size)
2861 fprintf (file, " %d args", crtl->outgoing_args_size);
2862 fprintf (file, "\n");
2863 return;
2864 }
2865
2866 fprintf (file, "\t#\n");
2867 fprintf (file, "\t# Initial Frame Information:\n");
2868 if (sp_offset || !frame_pointer_needed)
2869 fprintf (file, "\t# Entry ---------- 0\n");
2870
2871 /* Sort registers by save slots, so they're printed in the order
2872 they appear in memory, not the order they're saved in. */
2873 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2874 slot_map[si] = si;
2875 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2876 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2877 if (cfun->machine->reg_save_slot[slot_map[si]]
2878 > cfun->machine->reg_save_slot[slot_map[sj]])
2879 {
2880 int t = slot_map[si];
2881 slot_map[si] = slot_map[sj];
2882 slot_map[sj] = t;
2883 }
2884
2885 sp = 0;
2886 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2887 {
2888 int rsize;
2889 int r = slot_map[i];
2890 int rss = cfun->machine->reg_save_slot[r];
2891
142c2869 2892 if (!mep_call_saves_register (r))
2893 continue;
2894
2895 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2896 && (!mep_reg_set_in_function (r)
2897 && !mep_interrupt_p ()))
46222c18 2898 continue;
2899
2900 rsize = mep_reg_size(r);
2901 skip = rss - (sp+rsize);
2902 if (skip)
2903 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2904 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2905 rsize, reg_names[r], sp_offset - rss);
2906 sp = rss;
2907 }
2908
2909 skip = reg_save_size - sp;
2910 if (skip)
2911 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2912
2913 if (frame_pointer_needed)
2914 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2915 if (local)
2916 fprintf (file, "\t# %3d bytes for local vars\n", local);
2917 if (ffill)
2918 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2919 if (crtl->outgoing_args_size)
2920 fprintf (file, "\t# %3d bytes for outgoing args\n",
2921 crtl->outgoing_args_size);
2922 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2923 fprintf (file, "\t#\n");
2924}
2925
2926
2927static int mep_prevent_lp_restore = 0;
2928static int mep_sibcall_epilogue = 0;
2929
2930void
2931mep_expand_epilogue (void)
2932{
2933 int i, sp_offset = 0;
2934 int reg_save_size = 0;
2935 int frame_size;
2936 int lp_temp = LP_REGNO, lp_slot = -1;
2937 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2938 int interrupt_handler = mep_interrupt_p ();
2939
2940 if (profile_arc_flag == 2)
2941 emit_insn (gen_mep_bb_trace_ret ());
2942
2943 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2944 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2945
142c2869 2946 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
46222c18 2947
2948 if (frame_pointer_needed)
2949 {
2950 /* If we have a frame pointer, we won't have a reliable stack
2951 pointer (alloca, you know), so rebase SP from FP */
2952 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2953 gen_rtx_REG (SImode, FP_REGNO));
2954 sp_offset = reg_save_size;
2955 }
2956 else
2957 {
2958 /* SP is right under our local variable space. Adjust it if
2959 needed. */
2960 sp_offset = reg_save_size + frame_size;
2961 if (sp_offset >= 128)
2962 {
2963 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2964 sp_offset -= frame_size;
2965 }
2966 }
2967
2968 /* This is backwards so that we restore the control and coprocessor
2969 registers before the temporary registers we use to restore
2970 them. */
2971 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2972 if (mep_call_saves_register (i))
2973 {
3754d046 2974 machine_mode rmode;
46222c18 2975 int rss = cfun->machine->reg_save_slot[i];
2976
2977 if (mep_reg_size (i) == 8)
2978 rmode = DImode;
2979 else
2980 rmode = SImode;
2981
2982 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2983 && !(mep_reg_set_in_function (i) || interrupt_handler))
2984 continue;
2985 if (mep_prevent_lp_restore && i == LP_REGNO)
2986 continue;
2987 if (!mep_prevent_lp_restore
2988 && !interrupt_handler
2989 && (i == 10 || i == 11))
2990 continue;
2991
2992 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2993 emit_move_insn (gen_rtx_REG (rmode, i),
2994 gen_rtx_MEM (rmode,
29c05e22 2995 plus_constant (Pmode, stack_pointer_rtx,
2996 sp_offset - rss)));
46222c18 2997 else
2998 {
2999 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3000 /* Defer this one so we can jump indirect rather than
3001 copying the RA to $lp and "ret". EH epilogues
3002 automatically skip this anyway. */
3003 lp_slot = sp_offset-rss;
3004 else
3005 {
3006 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3007 gen_rtx_MEM (rmode,
29c05e22 3008 plus_constant (Pmode,
3009 stack_pointer_rtx,
46222c18 3010 sp_offset-rss)));
3011 emit_move_insn (gen_rtx_REG (rmode, i),
3012 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3013 }
3014 }
3015 }
3016 if (lp_slot != -1)
3017 {
3018 /* Restore this one last so we know it will be in the temp
3019 register when we return by jumping indirectly via the temp. */
3020 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3021 gen_rtx_MEM (SImode,
29c05e22 3022 plus_constant (Pmode, stack_pointer_rtx,
46222c18 3023 lp_slot)));
3024 lp_temp = REGSAVE_CONTROL_TEMP;
3025 }
3026
3027
3028 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3029
3030 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3031 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3032 gen_rtx_REG (SImode, SP_REGNO),
3033 cfun->machine->eh_stack_adjust));
3034
3035 if (mep_sibcall_epilogue)
3036 return;
3037
3038 if (mep_disinterrupt_p ())
3039 emit_insn (gen_mep_enable_int ());
3040
3041 if (mep_prevent_lp_restore)
3042 {
3043 emit_jump_insn (gen_eh_return_internal ());
3044 emit_barrier ();
3045 }
3046 else if (interrupt_handler)
3047 emit_jump_insn (gen_mep_reti ());
3048 else
3049 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3050}
3051
3052void
3053mep_expand_eh_return (rtx *operands)
3054{
3055 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3056 {
3057 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3058 emit_move_insn (ra, operands[0]);
3059 operands[0] = ra;
3060 }
3061
3062 emit_insn (gen_eh_epilogue (operands[0]));
3063}
3064
3065void
3066mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3067{
3068 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3069 mep_prevent_lp_restore = 1;
3070 mep_expand_epilogue ();
3071 mep_prevent_lp_restore = 0;
3072}
3073
3074void
3075mep_expand_sibcall_epilogue (void)
3076{
3077 mep_sibcall_epilogue = 1;
3078 mep_expand_epilogue ();
3079 mep_sibcall_epilogue = 0;
3080}
3081
3082static bool
3083mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3084{
3085 if (decl == NULL)
3086 return false;
3087
3088 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3089 return false;
3090
3091 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3092 if (mep_interrupt_p () || mep_disinterrupt_p ())
3093 return false;
3094
3095 return true;
3096}
3097
3098rtx
3099mep_return_stackadj_rtx (void)
3100{
3101 return gen_rtx_REG (SImode, 10);
3102}
3103
3104rtx
3105mep_return_handler_rtx (void)
3106{
3107 return gen_rtx_REG (SImode, LP_REGNO);
3108}
3109
3110void
3111mep_function_profiler (FILE *file)
3112{
3113 /* Always right at the beginning of the function. */
3114 fprintf (file, "\t# mep function profiler\n");
3115 fprintf (file, "\tadd\t$sp, -8\n");
3116 fprintf (file, "\tsw\t$0, ($sp)\n");
3117 fprintf (file, "\tldc\t$0, $lp\n");
3118 fprintf (file, "\tsw\t$0, 4($sp)\n");
3119 fprintf (file, "\tbsr\t__mep_mcount\n");
3120 fprintf (file, "\tlw\t$0, 4($sp)\n");
3121 fprintf (file, "\tstc\t$0, $lp\n");
3122 fprintf (file, "\tlw\t$0, ($sp)\n");
3123 fprintf (file, "\tadd\t$sp, 8\n\n");
3124}
3125
3126const char *
3127mep_emit_bb_trace_ret (void)
3128{
3129 fprintf (asm_out_file, "\t# end of block profiling\n");
3130 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3131 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3132 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3133 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3134 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3135 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3136 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3137 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3138 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3139 return "";
3140}
3141
3142#undef SAVE
3143#undef RESTORE
3144\f
3145/* Operand Printing. */
3146
3147void
3148mep_print_operand_address (FILE *stream, rtx address)
3149{
3150 if (GET_CODE (address) == MEM)
3151 address = XEXP (address, 0);
3152 else
3153 /* cf: gcc.dg/asm-4.c. */
3154 gcc_assert (GET_CODE (address) == REG);
3155
3156 mep_print_operand (stream, address, 0);
3157}
3158
3159static struct
3160{
3161 char code;
3162 const char *pattern;
3163 const char *format;
3164}
3165const conversions[] =
3166{
3167 { 0, "r", "0" },
3168 { 0, "m+ri", "3(2)" },
3169 { 0, "mr", "(1)" },
3170 { 0, "ms", "(1)" },
2e24255f 3171 { 0, "ml", "(1)" },
46222c18 3172 { 0, "mLrs", "%lo(3)(2)" },
3173 { 0, "mLr+si", "%lo(4+5)(2)" },
3174 { 0, "m+ru2s", "%tpoff(5)(2)" },
3175 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3176 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3177 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3178 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3179 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3180 { 0, "mi", "(1)" },
3181 { 0, "m+si", "(2+3)" },
3182 { 0, "m+li", "(2+3)" },
3183 { 0, "i", "0" },
3184 { 0, "s", "0" },
3185 { 0, "+si", "1+2" },
3186 { 0, "+u2si", "%tpoff(3+4)" },
3187 { 0, "+u3si", "%sdaoff(3+4)" },
3188 { 0, "l", "0" },
3189 { 'b', "i", "0" },
3190 { 'B', "i", "0" },
3191 { 'U', "i", "0" },
3192 { 'h', "i", "0" },
3193 { 'h', "Hs", "%hi(1)" },
3194 { 'I', "i", "0" },
3195 { 'I', "u2s", "%tpoff(2)" },
3196 { 'I', "u3s", "%sdaoff(2)" },
3197 { 'I', "+u2si", "%tpoff(3+4)" },
3198 { 'I', "+u3si", "%sdaoff(3+4)" },
3199 { 'J', "i", "0" },
3200 { 'P', "mr", "(1\\+),\\0" },
3201 { 'x', "i", "0" },
3202 { 0, 0, 0 }
3203};
3204
3205static int
3206unique_bit_in (HOST_WIDE_INT i)
3207{
3208 switch (i & 0xff)
3209 {
3210 case 0x01: case 0xfe: return 0;
3211 case 0x02: case 0xfd: return 1;
3212 case 0x04: case 0xfb: return 2;
3213 case 0x08: case 0xf7: return 3;
3214 case 0x10: case 0x7f: return 4;
3215 case 0x20: case 0xbf: return 5;
3216 case 0x40: case 0xdf: return 6;
3217 case 0x80: case 0xef: return 7;
3218 default:
3219 gcc_unreachable ();
3220 }
3221}
3222
3223static int
3224bit_size_for_clip (HOST_WIDE_INT i)
3225{
3226 int rv;
3227
3228 for (rv = 0; rv < 31; rv ++)
3229 if (((HOST_WIDE_INT) 1 << rv) > i)
3230 return rv + 1;
3231 gcc_unreachable ();
3232}
3233
3234/* Print an operand to a assembler instruction. */
3235
3236void
3237mep_print_operand (FILE *file, rtx x, int code)
3238{
3239 int i, j;
3240 const char *real_name;
3241
3242 if (code == '<')
3243 {
3244 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3245 we're using, then skip over the "mep_" part of its name. */
3246 const struct cgen_insn *insn;
3247
3248 if (mep_get_move_insn (mep_cmov, &insn))
3249 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3250 else
3251 mep_intrinsic_unavailable (mep_cmov);
3252 return;
3253 }
3254 if (code == 'L')
3255 {
3256 switch (GET_CODE (x))
3257 {
3258 case AND:
3259 fputs ("clr", file);
3260 return;
3261 case IOR:
3262 fputs ("set", file);
3263 return;
3264 case XOR:
3265 fputs ("not", file);
3266 return;
3267 default:
3268 output_operand_lossage ("invalid %%L code");
3269 }
3270 }
3271 if (code == 'M')
3272 {
3273 /* Print the second operand of a CR <- CR move. If we're using
3274 a two-operand instruction (i.e., a real cmov), then just print
3275 the operand normally. If we're using a "reg, reg, immediate"
3276 instruction such as caddi3, print the operand followed by a
3277 zero field. If we're using a three-register instruction,
3278 print the operand twice. */
3279 const struct cgen_insn *insn;
3280
3281 mep_print_operand (file, x, 0);
3282 if (mep_get_move_insn (mep_cmov, &insn)
3283 && insn_data[insn->icode].n_operands == 3)
3284 {
3285 fputs (", ", file);
3286 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3287 mep_print_operand (file, x, 0);
3288 else
3289 mep_print_operand (file, const0_rtx, 0);
3290 }
3291 return;
3292 }
3293
3294 encode_pattern (x);
3295 for (i = 0; conversions[i].pattern; i++)
3296 if (conversions[i].code == code
3297 && strcmp(conversions[i].pattern, pattern) == 0)
3298 {
3299 for (j = 0; conversions[i].format[j]; j++)
3300 if (conversions[i].format[j] == '\\')
3301 {
3302 fputc (conversions[i].format[j+1], file);
3303 j++;
3304 }
3305 else if (ISDIGIT(conversions[i].format[j]))
3306 {
3307 rtx r = patternr[conversions[i].format[j] - '0'];
3308 switch (GET_CODE (r))
3309 {
3310 case REG:
3311 fprintf (file, "%s", reg_names [REGNO (r)]);
3312 break;
3313 case CONST_INT:
3314 switch (code)
3315 {
3316 case 'b':
3317 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3318 break;
3319 case 'B':
3320 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3321 break;
3322 case 'h':
3323 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3324 break;
3325 case 'U':
3326 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3327 break;
3328 case 'J':
3329 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3330 break;
3331 case 'x':
3332 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3333 && !(INTVAL (r) & 0xff))
3334 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3335 else
3336 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3337 break;
3338 case 'I':
3339 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3340 && conversions[i].format[j+1] == 0)
3341 {
3342 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3343 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3344 }
3345 else
3346 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3347 break;
3348 default:
3349 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3350 break;
3351 }
3352 break;
3353 case CONST_DOUBLE:
3354 fprintf(file, "[const_double 0x%lx]",
3355 (unsigned long) CONST_DOUBLE_HIGH(r));
3356 break;
3357 case SYMBOL_REF:
44ddcf5e 3358 real_name = targetm.strip_name_encoding (XSTR (r, 0));
46222c18 3359 assemble_name (file, real_name);
3360 break;
3361 case LABEL_REF:
3362 output_asm_label (r);
3363 break;
3364 default:
3365 fprintf (stderr, "don't know how to print this operand:");
3366 debug_rtx (r);
3367 gcc_unreachable ();
3368 }
3369 }
3370 else
3371 {
3372 if (conversions[i].format[j] == '+'
3373 && (!code || code == 'I')
3374 && ISDIGIT (conversions[i].format[j+1])
3375 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3376 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3377 continue;
3378 fputc(conversions[i].format[j], file);
3379 }
3380 break;
3381 }
3382 if (!conversions[i].pattern)
3383 {
3384 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3385 debug_rtx(x);
3386 }
3387
3388 return;
3389}
3390
3391void
d8691ecc 3392mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
46222c18 3393 int noperands ATTRIBUTE_UNUSED)
3394{
3395 /* Despite the fact that MeP is perfectly capable of branching and
3396 doing something else in the same bundle, gcc does jump
3397 optimization *after* scheduling, so we cannot trust the bundling
3398 flags on jump instructions. */
3399 if (GET_MODE (insn) == BImode
3400 && get_attr_slots (insn) != SLOTS_CORE)
3401 fputc ('+', asm_out_file);
3402}
3403
3404/* Function args in registers. */
3405
3406static void
39cba157 3407mep_setup_incoming_varargs (cumulative_args_t cum,
3754d046 3408 machine_mode mode ATTRIBUTE_UNUSED,
46222c18 3409 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3410 int second_time ATTRIBUTE_UNUSED)
3411{
39cba157 3412 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
46222c18 3413
3414 if (nsave > 0)
3415 cfun->machine->arg_regs_to_save = nsave;
3416 *pretend_size = nsave * 4;
3417}
3418
3419static int
3754d046 3420bytesize (const_tree type, machine_mode mode)
46222c18 3421{
3422 if (mode == BLKmode)
3423 return int_size_in_bytes (type);
3424 return GET_MODE_SIZE (mode);
3425}
3426
3427static rtx
3428mep_expand_builtin_saveregs (void)
3429{
3430 int bufsize, i, ns;
3431 rtx regbuf;
3432
3433 ns = cfun->machine->arg_regs_to_save;
09bb2dae 3434 if (TARGET_IVC2)
3435 {
3436 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3437 regbuf = assign_stack_local (SImode, bufsize, 64);
3438 }
3439 else
3440 {
3441 bufsize = ns * 4;
3442 regbuf = assign_stack_local (SImode, bufsize, 32);
3443 }
46222c18 3444
3445 move_block_from_reg (5-ns, regbuf, ns);
3446
3447 if (TARGET_IVC2)
3448 {
3449 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
09bb2dae 3450 int ofs = 8 * ((ns+1)/2);
46222c18 3451
3452 for (i=0; i<ns; i++)
3453 {
3454 int rn = (4-ns) + i + 49;
3455 rtx ptr;
3456
3457 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3458 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3459 ofs += 8;
3460 }
3461 }
3462 return XEXP (regbuf, 0);
3463}
3464
46222c18 3465static tree
3466mep_build_builtin_va_list (void)
3467{
3468 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3469 tree record;
3470
3471
3472 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3473
3474 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3475 get_identifier ("__va_next_gp"), ptr_type_node);
3476 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3477 get_identifier ("__va_next_gp_limit"),
3478 ptr_type_node);
3479 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3480 ptr_type_node);
3481 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3482 ptr_type_node);
3483
3484 DECL_FIELD_CONTEXT (f_next_gp) = record;
3485 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3486 DECL_FIELD_CONTEXT (f_next_cop) = record;
3487 DECL_FIELD_CONTEXT (f_next_stack) = record;
3488
3489 TYPE_FIELDS (record) = f_next_gp;
1767a056 3490 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3491 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3492 DECL_CHAIN (f_next_cop) = f_next_stack;
46222c18 3493
3494 layout_type (record);
3495
3496 return record;
3497}
3498
3499static void
3500mep_expand_va_start (tree valist, rtx nextarg)
3501{
3502 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3503 tree next_gp, next_gp_limit, next_cop, next_stack;
3504 tree t, u;
3505 int ns;
3506
3507 ns = cfun->machine->arg_regs_to_save;
3508
3509 f_next_gp = TYPE_FIELDS (va_list_type_node);
1767a056 3510 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3511 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3512 f_next_stack = DECL_CHAIN (f_next_cop);
46222c18 3513
3514 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3515 NULL_TREE);
3516 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3517 valist, f_next_gp_limit, NULL_TREE);
3518 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3519 NULL_TREE);
3520 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3521 valist, f_next_stack, NULL_TREE);
3522
3523 /* va_list.next_gp = expand_builtin_saveregs (); */
3524 u = make_tree (sizetype, expand_builtin_saveregs ());
3525 u = fold_convert (ptr_type_node, u);
3526 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3527 TREE_SIDE_EFFECTS (t) = 1;
3528 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3529
3530 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
2cc66f2a 3531 u = fold_build_pointer_plus_hwi (u, 4 * ns);
46222c18 3532 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3533 TREE_SIDE_EFFECTS (t) = 1;
3534 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3535
2cc66f2a 3536 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
09bb2dae 3537 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
46222c18 3538 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3539 TREE_SIDE_EFFECTS (t) = 1;
3540 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3541
3542 /* va_list.next_stack = nextarg; */
3543 u = make_tree (ptr_type_node, nextarg);
3544 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3545 TREE_SIDE_EFFECTS (t) = 1;
3546 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3547}
3548
3549static tree
3550mep_gimplify_va_arg_expr (tree valist, tree type,
260f365f 3551 gimple_seq *pre_p,
3552 gimple_seq *post_p ATTRIBUTE_UNUSED)
46222c18 3553{
3554 HOST_WIDE_INT size, rsize;
3555 bool by_reference, ivc2_vec;
3556 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3557 tree next_gp, next_gp_limit, next_cop, next_stack;
3558 tree label_sover, label_selse;
3559 tree tmp, res_addr;
3560
3561 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3562
3563 size = int_size_in_bytes (type);
3564 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3565
3566 if (by_reference)
3567 {
3568 type = build_pointer_type (type);
3569 size = 4;
3570 }
3571 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3572
3573 f_next_gp = TYPE_FIELDS (va_list_type_node);
1767a056 3574 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3575 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3576 f_next_stack = DECL_CHAIN (f_next_cop);
46222c18 3577
3578 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3579 NULL_TREE);
3580 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3581 valist, f_next_gp_limit, NULL_TREE);
3582 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3583 NULL_TREE);
3584 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3585 valist, f_next_stack, NULL_TREE);
3586
3587 /* if f_next_gp < f_next_gp_limit
3588 IF (VECTOR_P && IVC2)
3589 val = *f_next_cop;
3590 ELSE
3591 val = *f_next_gp;
3592 f_next_gp += 4;
3593 f_next_cop += 8;
3594 else
3595 label_selse:
3596 val = *f_next_stack;
3597 f_next_stack += rsize;
3598 label_sover:
3599 */
3600
3601 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3602 label_selse = create_artificial_label (UNKNOWN_LOCATION);
98cfaaca 3603 res_addr = create_tmp_var (ptr_type_node);
46222c18 3604
3605 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3606 unshare_expr (next_gp_limit));
3607 tmp = build3 (COND_EXPR, void_type_node, tmp,
3608 build1 (GOTO_EXPR, void_type_node,
3609 unshare_expr (label_selse)),
3610 NULL_TREE);
3611 gimplify_and_add (tmp, pre_p);
3612
3613 if (ivc2_vec)
3614 {
3615 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3616 gimplify_and_add (tmp, pre_p);
3617 }
3618 else
3619 {
3620 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3621 gimplify_and_add (tmp, pre_p);
3622 }
3623
2cc66f2a 3624 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
46222c18 3625 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3626
2cc66f2a 3627 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
46222c18 3628 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3629
3630 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3631 gimplify_and_add (tmp, pre_p);
3632
3633 /* - - */
3634
3635 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3636 gimplify_and_add (tmp, pre_p);
3637
3638 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3639 gimplify_and_add (tmp, pre_p);
3640
2cc66f2a 3641 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
46222c18 3642 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3643
3644 /* - - */
3645
3646 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3647 gimplify_and_add (tmp, pre_p);
3648
3649 res_addr = fold_convert (build_pointer_type (type), res_addr);
3650
3651 if (by_reference)
3652 res_addr = build_va_arg_indirect_ref (res_addr);
3653
3654 return build_va_arg_indirect_ref (res_addr);
3655}
3656
3657void
3658mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3659 rtx libname ATTRIBUTE_UNUSED,
3660 tree fndecl ATTRIBUTE_UNUSED)
3661{
3662 pcum->nregs = 0;
3663
3664 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3665 pcum->vliw = 1;
3666 else
3667 pcum->vliw = 0;
3668}
3669
4f6b272a 3670/* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3671 larger than 4 bytes are passed indirectly. Return value in 0,
3672 unless bigger than 4 bytes, then the caller passes a pointer as the
3673 first arg. For varargs, we copy $1..$4 to the stack. */
3674
3675static rtx
3754d046 3676mep_function_arg (cumulative_args_t cum_v, machine_mode mode,
4f6b272a 3677 const_tree type ATTRIBUTE_UNUSED,
3678 bool named ATTRIBUTE_UNUSED)
46222c18 3679{
39cba157 3680 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3681
46222c18 3682 /* VOIDmode is a signal for the backend to pass data to the call
3683 expander via the second operand to the call pattern. We use
3684 this to determine whether to use "jsr" or "jsrv". */
3685 if (mode == VOIDmode)
4f6b272a 3686 return GEN_INT (cum->vliw);
46222c18 3687
3688 /* If we havn't run out of argument registers, return the next. */
4f6b272a 3689 if (cum->nregs < 4)
46222c18 3690 {
3691 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
4f6b272a 3692 return gen_rtx_REG (mode, cum->nregs + 49);
46222c18 3693 else
4f6b272a 3694 return gen_rtx_REG (mode, cum->nregs + 1);
46222c18 3695 }
3696
3697 /* Otherwise the argument goes on the stack. */
3698 return NULL_RTX;
3699}
3700
3701static bool
39cba157 3702mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3754d046 3703 machine_mode mode,
46222c18 3704 const_tree type,
3705 bool named ATTRIBUTE_UNUSED)
3706{
3707 int size = bytesize (type, mode);
142c2869 3708
3709 /* This is non-obvious, but yes, large values passed after we've run
3710 out of registers are *still* passed by reference - we put the
3711 address of the parameter on the stack, as well as putting the
3712 parameter itself elsewhere on the stack. */
3713
3714 if (size <= 0 || size > 8)
3715 return true;
3716 if (size <= 4)
3717 return false;
39cba157 3718 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3719 && type != NULL_TREE && VECTOR_TYPE_P (type))
142c2869 3720 return false;
3721 return true;
46222c18 3722}
3723
4f6b272a 3724static void
39cba157 3725mep_function_arg_advance (cumulative_args_t pcum,
3754d046 3726 machine_mode mode ATTRIBUTE_UNUSED,
4f6b272a 3727 const_tree type ATTRIBUTE_UNUSED,
3728 bool named ATTRIBUTE_UNUSED)
46222c18 3729{
39cba157 3730 get_cumulative_args (pcum)->nregs += 1;
46222c18 3731}
3732
3733bool
3734mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3735{
3736 int size = bytesize (type, BLKmode);
3737 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
142c2869 3738 return size > 0 && size <= 8 ? 0 : 1;
3739 return size > 0 && size <= 4 ? 0 : 1;
46222c18 3740}
3741
3742static bool
3743mep_narrow_volatile_bitfield (void)
3744{
3745 return true;
3746 return false;
3747}
3748
3749/* Implement FUNCTION_VALUE. All values are returned in $0. */
3750
3751rtx
7290d950 3752mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
46222c18 3753{
3754 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3755 return gen_rtx_REG (TYPE_MODE (type), 48);
3756 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3757}
3758
3759/* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3760
3761rtx
3754d046 3762mep_libcall_value (machine_mode mode)
46222c18 3763{
3764 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3765}
3766
3767/* Handle pipeline hazards. */
3768
3769typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3770static const char *opnames[] = { "", "stc", "fsft", "ret" };
3771
3772static int prev_opcode = 0;
3773
3774/* This isn't as optimal as it could be, because we don't know what
3775 control register the STC opcode is storing in. We only need to add
9d75589a 3776 the nop if it's the relevant register, but we add it for irrelevant
46222c18 3777 registers also. */
3778
3779void
3780mep_asm_output_opcode (FILE *file, const char *ptr)
3781{
3782 int this_opcode = op_none;
3783 const char *hazard = 0;
3784
3785 switch (*ptr)
3786 {
3787 case 'f':
3788 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3789 this_opcode = op_fsft;
3790 break;
3791 case 'r':
3792 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3793 this_opcode = op_ret;
3794 break;
3795 case 's':
3796 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3797 this_opcode = op_stc;
3798 break;
3799 }
3800
3801 if (prev_opcode == op_stc && this_opcode == op_fsft)
3802 hazard = "nop";
3803 if (prev_opcode == op_stc && this_opcode == op_ret)
3804 hazard = "nop";
3805
3806 if (hazard)
3807 fprintf(file, "%s\t# %s-%s hazard\n\t",
3808 hazard, opnames[prev_opcode], opnames[this_opcode]);
3809
3810 prev_opcode = this_opcode;
3811}
3812
3813/* Handle attributes. */
3814
3815static tree
3816mep_validate_based_tiny (tree *node, tree name, tree args,
3817 int flags ATTRIBUTE_UNUSED, bool *no_add)
3818{
3819 if (TREE_CODE (*node) != VAR_DECL
3820 && TREE_CODE (*node) != POINTER_TYPE
3821 && TREE_CODE (*node) != TYPE_DECL)
3822 {
3823 warning (0, "%qE attribute only applies to variables", name);
3824 *no_add = true;
3825 }
3826 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3827 {
3828 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3829 {
3830 warning (0, "address region attributes not allowed with auto storage class");
3831 *no_add = true;
3832 }
3833 /* Ignore storage attribute of pointed to variable: char __far * x; */
3834 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3835 {
3836 warning (0, "address region attributes on pointed-to types ignored");
3837 *no_add = true;
3838 }
3839 }
3840
3841 return NULL_TREE;
3842}
3843
3844static int
3845mep_multiple_address_regions (tree list, bool check_section_attr)
3846{
3847 tree a;
3848 int count_sections = 0;
3849 int section_attr_count = 0;
3850
3851 for (a = list; a; a = TREE_CHAIN (a))
3852 {
3853 if (is_attribute_p ("based", TREE_PURPOSE (a))
3854 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3855 || is_attribute_p ("near", TREE_PURPOSE (a))
3856 || is_attribute_p ("far", TREE_PURPOSE (a))
3857 || is_attribute_p ("io", TREE_PURPOSE (a)))
3858 count_sections ++;
3859 if (check_section_attr)
3860 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3861 }
3862
3863 if (check_section_attr)
3864 return section_attr_count;
3865 else
3866 return count_sections;
3867}
3868
3869#define MEP_ATTRIBUTES(decl) \
3870 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3871 : DECL_ATTRIBUTES (decl) \
3872 ? (DECL_ATTRIBUTES (decl)) \
3873 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3874
3875static tree
3876mep_validate_near_far (tree *node, tree name, tree args,
3877 int flags ATTRIBUTE_UNUSED, bool *no_add)
3878{
3879 if (TREE_CODE (*node) != VAR_DECL
3880 && TREE_CODE (*node) != FUNCTION_DECL
3881 && TREE_CODE (*node) != METHOD_TYPE
3882 && TREE_CODE (*node) != POINTER_TYPE
3883 && TREE_CODE (*node) != TYPE_DECL)
3884 {
3885 warning (0, "%qE attribute only applies to variables and functions",
3886 name);
3887 *no_add = true;
3888 }
3889 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3890 {
3891 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3892 {
3893 warning (0, "address region attributes not allowed with auto storage class");
3894 *no_add = true;
3895 }
3896 /* Ignore storage attribute of pointed to variable: char __far * x; */
3897 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3898 {
3899 warning (0, "address region attributes on pointed-to types ignored");
3900 *no_add = true;
3901 }
3902 }
3903 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3904 {
3905 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3906 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3907 DECL_ATTRIBUTES (*node) = NULL_TREE;
3908 }
3909 return NULL_TREE;
3910}
3911
3912static tree
3913mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3914 int flags ATTRIBUTE_UNUSED, bool *no_add)
3915{
3916 if (TREE_CODE (*node) != FUNCTION_DECL
3917 && TREE_CODE (*node) != METHOD_TYPE)
3918 {
3919 warning (0, "%qE attribute only applies to functions", name);
3920 *no_add = true;
3921 }
3922 return NULL_TREE;
3923}
3924
3925static tree
3926mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3927 int flags ATTRIBUTE_UNUSED, bool *no_add)
3928{
3929 tree function_type;
3930
3931 if (TREE_CODE (*node) != FUNCTION_DECL)
3932 {
3933 warning (0, "%qE attribute only applies to functions", name);
3934 *no_add = true;
3935 return NULL_TREE;
3936 }
3937
3938 if (DECL_DECLARED_INLINE_P (*node))
3939 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3940 DECL_UNINLINABLE (*node) = 1;
3941
3942 function_type = TREE_TYPE (*node);
3943
3944 if (TREE_TYPE (function_type) != void_type_node)
3945 error ("interrupt function must have return type of void");
3946
a36cf284 3947 if (prototype_p (function_type)
46222c18 3948 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3949 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3950 error ("interrupt function must have no arguments");
3951
3952 return NULL_TREE;
3953}
3954
3955static tree
3956mep_validate_io_cb (tree *node, tree name, tree args,
3957 int flags ATTRIBUTE_UNUSED, bool *no_add)
3958{
3959 if (TREE_CODE (*node) != VAR_DECL)
3960 {
3961 warning (0, "%qE attribute only applies to variables", name);
3962 *no_add = true;
3963 }
3964
3965 if (args != NULL_TREE)
3966 {
3967 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3968 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3969 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3970 {
3971 warning (0, "%qE attribute allows only an integer constant argument",
3972 name);
3973 *no_add = true;
3974 }
3975 }
3976
3977 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3978 TREE_THIS_VOLATILE (*node) = 1;
3979
3980 return NULL_TREE;
3981}
3982
3983static tree
3984mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3985 int flags ATTRIBUTE_UNUSED, bool *no_add)
3986{
3987 if (TREE_CODE (*node) != FUNCTION_TYPE
3988 && TREE_CODE (*node) != FUNCTION_DECL
3989 && TREE_CODE (*node) != METHOD_TYPE
3990 && TREE_CODE (*node) != FIELD_DECL
3991 && TREE_CODE (*node) != TYPE_DECL)
3992 {
3993 static int gave_pointer_note = 0;
3994 static int gave_array_note = 0;
3995 static const char * given_type = NULL;
3996
f3d35d4d 3997 given_type = get_tree_code_name (TREE_CODE (*node));
46222c18 3998 if (TREE_CODE (*node) == POINTER_TYPE)
3999 given_type = "pointers";
4000 if (TREE_CODE (*node) == ARRAY_TYPE)
4001 given_type = "arrays";
4002
4003 if (given_type)
4004 warning (0, "%qE attribute only applies to functions, not %s",
4005 name, given_type);
4006 else
4007 warning (0, "%qE attribute only applies to functions",
4008 name);
4009 *no_add = true;
4010
4011 if (TREE_CODE (*node) == POINTER_TYPE
4012 && !gave_pointer_note)
4013 {
516bc58f 4014 inform (input_location,
4015 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4016 " typedef int (__vliw *vfuncptr) ();");
46222c18 4017 gave_pointer_note = 1;
4018 }
4019
4020 if (TREE_CODE (*node) == ARRAY_TYPE
4021 && !gave_array_note)
4022 {
516bc58f 4023 inform (input_location,
4024 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4025 " typedef int (__vliw *vfuncptr[]) ();");
46222c18 4026 gave_array_note = 1;
4027 }
4028 }
4029 if (!TARGET_VLIW)
4030 error ("VLIW functions are not allowed without a VLIW configuration");
4031 return NULL_TREE;
4032}
4033
4034static const struct attribute_spec mep_attribute_table[11] =
4035{
ac86af5d 4036 /* name min max decl type func handler
4037 affects_type_identity */
4038 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4039 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4040 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4041 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4042 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4043 false },
4044 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4045 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4046 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4047 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4048 { NULL, 0, 0, false, false, false, NULL, false }
46222c18 4049};
4050
4051static bool
4052mep_function_attribute_inlinable_p (const_tree callee)
4053{
4054 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4055 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4056 return (lookup_attribute ("disinterrupt", attrs) == 0
4057 && lookup_attribute ("interrupt", attrs) == 0);
4058}
4059
a71ff2a7 4060static bool
7c88e513 4061mep_can_inline_p (tree caller, tree callee)
a71ff2a7 4062{
4063 if (TREE_CODE (callee) == ADDR_EXPR)
4064 callee = TREE_OPERAND (callee, 0);
4065
1756c1fe 4066 if (!mep_vliw_function_p (caller)
a71ff2a7 4067 && mep_vliw_function_p (callee))
4068 {
1756c1fe 4069 return false;
a71ff2a7 4070 }
1756c1fe 4071 return true;
a71ff2a7 4072}
4073
46222c18 4074#define FUNC_CALL 1
4075#define FUNC_DISINTERRUPT 2
4076
4077
4078struct GTY(()) pragma_entry {
4079 int used;
4080 int flag;
46222c18 4081};
46222c18 4082
2ef51f0e 4083struct pragma_traits : default_hashmap_traits
46222c18 4084{
2ef51f0e 4085 static hashval_t hash (const char *s) { return htab_hash_string (s); }
4086 static bool
4087 equal_keys (const char *a, const char *b)
4088 {
4089 return strcmp (a, b) == 0;
4090 }
4091};
46222c18 4092
2ef51f0e 4093/* Hash table of farcall-tagged sections. */
4094static GTY(()) hash_map<const char *, pragma_entry, pragma_traits> *
4095 pragma_htab;
46222c18 4096
4097static void
4098mep_note_pragma_flag (const char *funcname, int flag)
4099{
46222c18 4100 if (!pragma_htab)
2ef51f0e 4101 pragma_htab
4102 = hash_map<const char *, pragma_entry, pragma_traits>::create_ggc (31);
46222c18 4103
2ef51f0e 4104 bool existed;
4105 const char *name = ggc_strdup (funcname);
4106 pragma_entry *slot = &pragma_htab->get_or_insert (name, &existed);
4107 if (!existed)
46222c18 4108 {
2ef51f0e 4109 slot->flag = 0;
4110 slot->used = 0;
46222c18 4111 }
2ef51f0e 4112 slot->flag |= flag;
46222c18 4113}
4114
4115static bool
4116mep_lookup_pragma_flag (const char *funcname, int flag)
4117{
46222c18 4118 if (!pragma_htab)
4119 return false;
4120
4121 if (funcname[0] == '@' && funcname[2] == '.')
4122 funcname += 3;
4123
2ef51f0e 4124 pragma_entry *slot = pragma_htab->get (funcname);
4125 if (slot && (slot->flag & flag))
46222c18 4126 {
2ef51f0e 4127 slot->used |= flag;
46222c18 4128 return true;
4129 }
4130 return false;
4131}
4132
4133bool
4134mep_lookup_pragma_call (const char *funcname)
4135{
4136 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4137}
4138
4139void
4140mep_note_pragma_call (const char *funcname)
4141{
4142 mep_note_pragma_flag (funcname, FUNC_CALL);
4143}
4144
4145bool
4146mep_lookup_pragma_disinterrupt (const char *funcname)
4147{
4148 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4149}
4150
4151void
4152mep_note_pragma_disinterrupt (const char *funcname)
4153{
4154 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4155}
4156
2ef51f0e 4157bool
4158note_unused_pragma_disinterrupt (const char *const &s, const pragma_entry &e,
4159 void *)
46222c18 4160{
2ef51f0e 4161 if ((e.flag & FUNC_DISINTERRUPT)
4162 && !(e.used & FUNC_DISINTERRUPT))
4163 warning (0, "\"#pragma disinterrupt %s\" not used", s);
46222c18 4164 return 1;
4165}
4166
4167void
4168mep_file_cleanups (void)
4169{
4170 if (pragma_htab)
2ef51f0e 4171 pragma_htab->traverse<void *, note_unused_pragma_disinterrupt> (NULL);
46222c18 4172}
7d86c715 4173
4174/* These three functions provide a bridge between the pramgas that
4175 affect register classes, and the functions that maintain them. We
4176 can't call those functions directly as pragma handling is part of
4177 the front end and doesn't have direct access to them. */
4178
4179void
4180mep_save_register_info (void)
4181{
4182 save_register_info ();
4183}
4184
4185void
4186mep_reinit_regs (void)
4187{
4188 reinit_regs ();
4189}
4190
4191void
4192mep_init_regs (void)
4193{
4194 init_regs ();
4195}
4196
46222c18 4197
4198
4199static int
4200mep_attrlist_to_encoding (tree list, tree decl)
4201{
4202 if (mep_multiple_address_regions (list, false) > 1)
4203 {
4204 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4205 TREE_PURPOSE (TREE_CHAIN (list)),
4206 DECL_NAME (decl),
4207 DECL_SOURCE_LINE (decl));
4208 TREE_CHAIN (list) = NULL_TREE;
4209 }
4210
4211 while (list)
4212 {
4213 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4214 return 'b';
4215 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4216 return 't';
4217 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4218 return 'n';
4219 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4220 return 'f';
4221 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4222 {
4223 if (TREE_VALUE (list)
4224 && TREE_VALUE (TREE_VALUE (list))
4225 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4226 {
4227 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4228 if (location >= 0
4229 && location <= 0x1000000)
4230 return 'i';
4231 }
4232 return 'I';
4233 }
4234 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4235 return 'c';
4236 list = TREE_CHAIN (list);
4237 }
4238 if (TARGET_TF
4239 && TREE_CODE (decl) == FUNCTION_DECL
4240 && DECL_SECTION_NAME (decl) == 0)
4241 return 'f';
4242 return 0;
4243}
4244
4245static int
4246mep_comp_type_attributes (const_tree t1, const_tree t2)
4247{
4248 int vliw1, vliw2;
4249
4250 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4251 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4252
4253 if (vliw1 != vliw2)
4254 return 0;
4255
4256 return 1;
4257}
4258
4259static void
4260mep_insert_attributes (tree decl, tree *attributes)
4261{
4262 int size;
4263 const char *secname = 0;
4264 tree attrib, attrlist;
4265 char encoding;
4266
4267 if (TREE_CODE (decl) == FUNCTION_DECL)
4268 {
4269 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4270
4271 if (mep_lookup_pragma_disinterrupt (funcname))
4272 {
4273 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4274 *attributes = chainon (*attributes, attrib);
4275 }
4276 }
4277
4278 if (TREE_CODE (decl) != VAR_DECL
4279 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4280 return;
4281
4282 if (TREE_READONLY (decl) && TARGET_DC)
4283 /* -mdc means that const variables default to the near section,
4284 regardless of the size cutoff. */
4285 return;
4286
4287 /* User specified an attribute, so override the default.
4288 Ignore storage attribute of pointed to variable. char __far * x; */
4289 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4290 {
4291 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4292 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4293 else if (DECL_ATTRIBUTES (decl) && *attributes)
4294 DECL_ATTRIBUTES (decl) = NULL_TREE;
4295 }
4296
4297 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4298 encoding = mep_attrlist_to_encoding (attrlist, decl);
4299 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4300 {
4301 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4302 encoding = mep_attrlist_to_encoding (attrlist, decl);
4303 }
4304 if (encoding)
4305 {
4306 /* This means that the declaration has a specific section
4307 attribute, so we should not apply the default rules. */
4308
4309 if (encoding == 'i' || encoding == 'I')
4310 {
4311 tree attr = lookup_attribute ("io", attrlist);
4312 if (attr
4313 && TREE_VALUE (attr)
4314 && TREE_VALUE (TREE_VALUE(attr)))
4315 {
4316 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4317 static tree previous_value = 0;
4318 static int previous_location = 0;
4319 static tree previous_name = 0;
4320
4321 /* We take advantage of the fact that gcc will reuse the
4322 same tree pointer when applying an attribute to a
4323 list of decls, but produce a new tree for attributes
4324 on separate source lines, even when they're textually
4325 identical. This is the behavior we want. */
4326 if (TREE_VALUE (attr) == previous_value
4327 && location == previous_location)
4328 {
4329 warning(0, "__io address 0x%x is the same for %qE and %qE",
4330 location, previous_name, DECL_NAME (decl));
4331 }
4332 previous_name = DECL_NAME (decl);
4333 previous_location = location;
4334 previous_value = TREE_VALUE (attr);
4335 }
4336 }
4337 return;
4338 }
4339
4340
4341 /* Declarations of arrays can change size. Don't trust them. */
4342 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4343 size = 0;
4344 else
4345 size = int_size_in_bytes (TREE_TYPE (decl));
4346
4347 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4348 {
4349 if (TREE_PUBLIC (decl)
4350 || DECL_EXTERNAL (decl)
4351 || TREE_STATIC (decl))
4352 {
4353 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4354 int key = 0;
4355
4356 while (*name)
4357 key += *name++;
4358
4359 switch (key & 3)
4360 {
4361 case 0:
4362 secname = "based";
4363 break;
4364 case 1:
4365 secname = "tiny";
4366 break;
4367 case 2:
4368 secname = "far";
4369 break;
4370 default:
4371 ;
4372 }
4373 }
4374 }
4375 else
4376 {
4377 if (size <= mep_based_cutoff && size > 0)
4378 secname = "based";
4379 else if (size <= mep_tiny_cutoff && size > 0)
4380 secname = "tiny";
4381 else if (TARGET_L)
4382 secname = "far";
4383 }
4384
4385 if (mep_const_section && TREE_READONLY (decl))
4386 {
4387 if (strcmp (mep_const_section, "tiny") == 0)
4388 secname = "tiny";
4389 else if (strcmp (mep_const_section, "near") == 0)
4390 return;
4391 else if (strcmp (mep_const_section, "far") == 0)
4392 secname = "far";
4393 }
4394
4395 if (!secname)
4396 return;
4397
4398 if (!mep_multiple_address_regions (*attributes, true)
4399 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4400 {
4401 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4402
4403 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4404 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4405 and mep_validate_based_tiny. */
4406 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4407 }
4408}
4409
4410static void
4411mep_encode_section_info (tree decl, rtx rtl, int first)
4412{
4413 rtx rtlname;
4414 const char *oldname;
4415 const char *secname;
4416 char encoding;
4417 char *newname;
4418 tree idp;
4419 int maxsize;
4420 tree type;
4421 tree mep_attributes;
4422
4423 if (! first)
4424 return;
4425
4426 if (TREE_CODE (decl) != VAR_DECL
4427 && TREE_CODE (decl) != FUNCTION_DECL)
4428 return;
4429
4430 rtlname = XEXP (rtl, 0);
4431 if (GET_CODE (rtlname) == SYMBOL_REF)
4432 oldname = XSTR (rtlname, 0);
4433 else if (GET_CODE (rtlname) == MEM
4434 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4435 oldname = XSTR (XEXP (rtlname, 0), 0);
4436 else
4437 gcc_unreachable ();
4438
4439 type = TREE_TYPE (decl);
4440 if (type == error_mark_node)
4441 return;
4442 mep_attributes = MEP_ATTRIBUTES (decl);
4443
4444 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4445
4446 if (encoding)
4447 {
4448 newname = (char *) alloca (strlen (oldname) + 4);
4449 sprintf (newname, "@%c.%s", encoding, oldname);
4450 idp = get_identifier (newname);
4451 XEXP (rtl, 0) =
4452 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
2d6df6a7 4453 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4454 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
46222c18 4455
4456 switch (encoding)
4457 {
4458 case 'b':
4459 maxsize = 128;
4460 secname = "based";
4461 break;
4462 case 't':
4463 maxsize = 65536;
4464 secname = "tiny";
4465 break;
4466 case 'n':
4467 maxsize = 0x1000000;
4468 secname = "near";
4469 break;
4470 default:
4471 maxsize = 0;
4472 secname = 0;
4473 break;
4474 }
4475 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4476 {
4477 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4478 oldname,
4479 (long) int_size_in_bytes (TREE_TYPE (decl)),
4480 secname,
4481 maxsize);
4482 }
4483 }
46222c18 4484}
4485
4486const char *
4487mep_strip_name_encoding (const char *sym)
4488{
4489 while (1)
4490 {
4491 if (*sym == '*')
4492 sym++;
4493 else if (*sym == '@' && sym[2] == '.')
4494 sym += 3;
4495 else
4496 return sym;
4497 }
4498}
4499
4500static section *
4501mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4502 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4503{
4504 int readonly = 1;
e3b9264d 4505 int encoding;
46222c18 4506
4507 switch (TREE_CODE (decl))
4508 {
4509 case VAR_DECL:
4510 if (!TREE_READONLY (decl)
4511 || TREE_SIDE_EFFECTS (decl)
4512 || !DECL_INITIAL (decl)
4513 || (DECL_INITIAL (decl) != error_mark_node
4514 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4515 readonly = 0;
4516 break;
4517 case CONSTRUCTOR:
4518 if (! TREE_CONSTANT (decl))
4519 readonly = 0;
4520 break;
4521
4522 default:
4523 break;
4524 }
4525
e3b9264d 4526 if (TREE_CODE (decl) == FUNCTION_DECL)
4527 {
4528 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4529
4530 if (name[0] == '@' && name[2] == '.')
4531 encoding = name[1];
4532 else
4533 encoding = 0;
4534
b395f451 4535 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
e3b9264d 4536 mep_unique_section (decl, 0);
4537 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4538 {
4539 if (encoding == 'f')
4540 return vftext_section;
4541 else
4542 return vtext_section;
4543 }
4544 else if (encoding == 'f')
4545 return ftext_section;
4546 else
4547 return text_section;
4548 }
4549
46222c18 4550 if (TREE_CODE (decl) == VAR_DECL)
4551 {
4552 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4553
4554 if (name[0] == '@' && name[2] == '.')
4555 switch (name[1])
4556 {
4557 case 'b':
4558 return based_section;
4559
4560 case 't':
4561 if (readonly)
4562 return srodata_section;
4563 if (DECL_INITIAL (decl))
4564 return sdata_section;
4565 return tinybss_section;
4566
4567 case 'f':
4568 if (readonly)
4569 return frodata_section;
4570 return far_section;
4571
4572 case 'i':
4573 case 'I':
a426d5d9 4574 error_at (DECL_SOURCE_LOCATION (decl),
4575 "variable %D of type %<io%> must be uninitialized", decl);
46222c18 4576 return data_section;
4577
4578 case 'c':
a426d5d9 4579 error_at (DECL_SOURCE_LOCATION (decl),
4580 "variable %D of type %<cb%> must be uninitialized", decl);
46222c18 4581 return data_section;
4582 }
4583 }
4584
4585 if (readonly)
4586 return readonly_data_section;
4587
4588 return data_section;
4589}
4590
4591static void
4592mep_unique_section (tree decl, int reloc)
4593{
4594 static const char *prefixes[][2] =
4595 {
4596 { ".text.", ".gnu.linkonce.t." },
4597 { ".rodata.", ".gnu.linkonce.r." },
4598 { ".data.", ".gnu.linkonce.d." },
4599 { ".based.", ".gnu.linkonce.based." },
4600 { ".sdata.", ".gnu.linkonce.s." },
4601 { ".far.", ".gnu.linkonce.far." },
4602 { ".ftext.", ".gnu.linkonce.ft." },
4603 { ".frodata.", ".gnu.linkonce.frd." },
e3b9264d 4604 { ".srodata.", ".gnu.linkonce.srd." },
4605 { ".vtext.", ".gnu.linkonce.v." },
4606 { ".vftext.", ".gnu.linkonce.vf." }
46222c18 4607 };
4608 int sec = 2; /* .data */
4609 int len;
4610 const char *name, *prefix;
4611 char *string;
4612
4613 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4614 if (DECL_RTL (decl))
4615 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4616
4617 if (TREE_CODE (decl) == FUNCTION_DECL)
e3b9264d 4618 {
4619 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4620 sec = 9; /* .vtext */
4621 else
4622 sec = 0; /* .text */
4623 }
46222c18 4624 else if (decl_readonly_section (decl, reloc))
4625 sec = 1; /* .rodata */
4626
4627 if (name[0] == '@' && name[2] == '.')
4628 {
4629 switch (name[1])
4630 {
4631 case 'b':
4632 sec = 3; /* .based */
4633 break;
4634 case 't':
4635 if (sec == 1)
4636 sec = 8; /* .srodata */
4637 else
4638 sec = 4; /* .sdata */
4639 break;
4640 case 'f':
4641 if (sec == 0)
4642 sec = 6; /* .ftext */
e3b9264d 4643 else if (sec == 9)
4644 sec = 10; /* .vftext */
46222c18 4645 else if (sec == 1)
4646 sec = 7; /* .frodata */
4647 else
4648 sec = 5; /* .far. */
4649 break;
4650 }
4651 name += 3;
4652 }
4653
b395f451 4654 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
46222c18 4655 len = strlen (name) + strlen (prefix);
4656 string = (char *) alloca (len + 1);
4657
4658 sprintf (string, "%s%s", prefix, name);
4659
738a6bda 4660 set_decl_section_name (decl, string);
46222c18 4661}
4662
4663/* Given a decl, a section name, and whether the decl initializer
4664 has relocs, choose attributes for the section. */
4665
4666#define SECTION_MEP_VLIW SECTION_MACH_DEP
4667
4668static unsigned int
4669mep_section_type_flags (tree decl, const char *name, int reloc)
4670{
4671 unsigned int flags = default_section_type_flags (decl, name, reloc);
4672
4673 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4674 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4675 flags |= SECTION_MEP_VLIW;
4676
4677 return flags;
4678}
4679
4680/* Switch to an arbitrary section NAME with attributes as specified
4681 by FLAGS. ALIGN specifies any known alignment requirements for
4682 the section; 0 if the default should be used.
4683
4684 Differs from the standard ELF version only in support of VLIW mode. */
4685
4686static void
4687mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4688{
4689 char flagchars[8], *f = flagchars;
4690 const char *type;
4691
4692 if (!(flags & SECTION_DEBUG))
4693 *f++ = 'a';
4694 if (flags & SECTION_WRITE)
4695 *f++ = 'w';
4696 if (flags & SECTION_CODE)
4697 *f++ = 'x';
4698 if (flags & SECTION_SMALL)
4699 *f++ = 's';
4700 if (flags & SECTION_MEP_VLIW)
4701 *f++ = 'v';
4702 *f = '\0';
4703
4704 if (flags & SECTION_BSS)
4705 type = "nobits";
4706 else
4707 type = "progbits";
4708
4709 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4710 name, flagchars, type);
4711
4712 if (flags & SECTION_CODE)
4713 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4714 asm_out_file);
4715}
4716
4717void
4718mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4719 int size, int align, int global)
4720{
4721 /* We intentionally don't use mep_section_tag() here. */
4722 if (name[0] == '@'
4723 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4724 && name[2] == '.')
4725 {
4726 int location = -1;
4727 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4728 DECL_ATTRIBUTES (decl));
4729 if (attr
4730 && TREE_VALUE (attr)
4731 && TREE_VALUE (TREE_VALUE(attr)))
4732 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4733 if (location == -1)
4734 return;
4735 if (global)
4736 {
4737 fprintf (stream, "\t.globl\t");
4738 assemble_name (stream, name);
4739 fprintf (stream, "\n");
4740 }
4741 assemble_name (stream, name);
4742 fprintf (stream, " = %d\n", location);
4743 return;
4744 }
4745 if (name[0] == '@' && name[2] == '.')
4746 {
4747 const char *sec = 0;
4748 switch (name[1])
4749 {
4750 case 'b':
4751 switch_to_section (based_section);
4752 sec = ".based";
4753 break;
4754 case 't':
4755 switch_to_section (tinybss_section);
4756 sec = ".sbss";
4757 break;
4758 case 'f':
4759 switch_to_section (farbss_section);
4760 sec = ".farbss";
4761 break;
4762 }
4763 if (sec)
4764 {
4765 const char *name2;
4766 int p2align = 0;
4767
4768 while (align > BITS_PER_UNIT)
4769 {
4770 align /= 2;
4771 p2align ++;
4772 }
44ddcf5e 4773 name2 = targetm.strip_name_encoding (name);
46222c18 4774 if (global)
4775 fprintf (stream, "\t.globl\t%s\n", name2);
4776 fprintf (stream, "\t.p2align %d\n", p2align);
4777 fprintf (stream, "\t.type\t%s,@object\n", name2);
4778 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4779 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4780 return;
4781 }
4782 }
4783
4784 if (!global)
4785 {
4786 fprintf (stream, "\t.local\t");
4787 assemble_name (stream, name);
4788 fprintf (stream, "\n");
4789 }
4790 fprintf (stream, "\t.comm\t");
4791 assemble_name (stream, name);
4792 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4793}
4794
4795/* Trampolines. */
4796
8786c274 4797static void
4798mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
46222c18 4799{
8786c274 4800 rtx addr = XEXP (m_tramp, 0);
4801 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4802
46222c18 4803 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4804 LCT_NORMAL, VOIDmode, 3,
4805 addr, Pmode,
4806 fnaddr, Pmode,
4807 static_chain, Pmode);
4808}
4809
4810/* Experimental Reorg. */
4811
4812static bool
4813mep_mentioned_p (rtx in,
4814 rtx reg, /* NULL for mem */
4815 int modes_too) /* if nonzero, modes must match also. */
4816{
4817 const char *fmt;
4818 int i;
4819 enum rtx_code code;
4820
4821 if (in == 0)
4822 return false;
4823 if (reg && GET_CODE (reg) != REG)
4824 return false;
4825
4826 if (GET_CODE (in) == LABEL_REF)
4827 return (reg == 0);
4828
4829 code = GET_CODE (in);
4830
4831 switch (code)
4832 {
4833 case MEM:
4834 if (reg)
4835 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4836 return true;
4837
4838 case REG:
4839 if (!reg)
4840 return false;
4841 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4842 return false;
4843 return (REGNO (in) == REGNO (reg));
4844
4845 case SCRATCH:
4846 case CC0:
4847 case PC:
4848 case CONST_INT:
4849 case CONST_DOUBLE:
4850 return false;
4851
4852 default:
4853 break;
4854 }
4855
4856 /* Set's source should be read-only. */
4857 if (code == SET && !reg)
4858 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4859
4860 fmt = GET_RTX_FORMAT (code);
4861
4862 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4863 {
4864 if (fmt[i] == 'E')
4865 {
4866 register int j;
4867 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4868 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4869 return true;
4870 }
4871 else if (fmt[i] == 'e'
4872 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4873 return true;
4874 }
4875 return false;
4876}
4877
4878#define EXPERIMENTAL_REGMOVE_REORG 1
4879
4880#if EXPERIMENTAL_REGMOVE_REORG
4881
4882static int
4883mep_compatible_reg_class (int r1, int r2)
4884{
4885 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4886 return 1;
4887 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4888 return 1;
4889 return 0;
4890}
4891
4892static void
d8691ecc 4893mep_reorg_regmove (rtx_insn *insns)
46222c18 4894{
d8691ecc 4895 rtx_insn *insn, *next, *follow;
4896 rtx pat, *where;
46222c18 4897 int count = 0, done = 0, replace, before = 0;
4898
4899 if (dump_file)
4900 for (insn = insns; insn; insn = NEXT_INSN (insn))
aa90bb35 4901 if (NONJUMP_INSN_P (insn))
46222c18 4902 before++;
4903
4904 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4905 set that uses the r2 and r2 dies there. We replace r2 with r1
4906 and see if it's still a valid insn. If so, delete the first set.
4907 Copied from reorg.c. */
4908
4909 while (!done)
4910 {
4911 done = 1;
4912 for (insn = insns; insn; insn = next)
4913 {
6f3836d6 4914 next = next_nonnote_nondebug_insn (insn);
aa90bb35 4915 if (! NONJUMP_INSN_P (insn))
46222c18 4916 continue;
4917 pat = PATTERN (insn);
4918
4919 replace = 0;
4920
4921 if (GET_CODE (pat) == SET
4922 && GET_CODE (SET_SRC (pat)) == REG
4923 && GET_CODE (SET_DEST (pat)) == REG
4924 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4925 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4926 {
6f3836d6 4927 follow = next_nonnote_nondebug_insn (insn);
46222c18 4928 if (dump_file)
4929 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4930
aa90bb35 4931 while (follow && NONJUMP_INSN_P (follow)
46222c18 4932 && GET_CODE (PATTERN (follow)) == SET
4933 && !dead_or_set_p (follow, SET_SRC (pat))
4934 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4935 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4936 {
4937 if (dump_file)
4938 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4939 follow = next_nonnote_insn (follow);
4940 }
4941
4942 if (dump_file)
4943 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
aa90bb35 4944 if (follow && NONJUMP_INSN_P (follow)
46222c18 4945 && GET_CODE (PATTERN (follow)) == SET
4946 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4947 {
4948 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4949 {
4950 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4951 {
4952 replace = 1;
4953 where = & SET_SRC (PATTERN (follow));
4954 }
4955 }
4956 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4957 {
4958 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4959 {
4960 replace = 1;
4961 where = & PATTERN (follow);
4962 }
4963 }
4964 }
4965 }
4966
4967 /* If so, follow is the corresponding insn */
4968 if (replace)
4969 {
4970 if (dump_file)
4971 {
d8691ecc 4972 rtx_insn *x;
46222c18 4973
4974 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4975 for (x = insn; x ;x = NEXT_INSN (x))
4976 {
4977 print_rtl_single (dump_file, x);
4978 if (x == follow)
4979 break;
4980 fprintf (dump_file, "\n");
4981 }
4982 }
4983
4984 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4985 follow, where))
4986 {
4987 count ++;
6f3836d6 4988 delete_insn (insn);
46222c18 4989 if (dump_file)
4990 {
4991 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4992 print_rtl_single (dump_file, follow);
4993 }
4994 done = 0;
4995 }
4996 }
4997 }
4998 }
4999
5000 if (dump_file)
5001 {
5002 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5003 fprintf (dump_file, "=====\n");
5004 }
5005}
5006#endif
5007
5008
5009/* Figure out where to put LABEL, which is the label for a repeat loop.
5010 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5011 the loop ends just before LAST_INSN. If SHARED, insns other than the
5012 "repeat" might use LABEL to jump to the loop's continuation point.
5013
5014 Return the last instruction in the adjusted loop. */
5015
d8691ecc 5016static rtx_insn *
5017mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
5018 bool including, bool shared)
46222c18 5019{
d8691ecc 5020 rtx_insn *next, *prev;
46222c18 5021 int count = 0, code, icode;
5022
5023 if (dump_file)
5024 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5025 INSN_UID (last_insn));
5026
5027 /* Set PREV to the last insn in the loop. */
5028 prev = last_insn;
5029 if (!including)
5030 prev = PREV_INSN (prev);
5031
5032 /* Set NEXT to the next insn after the repeat label. */
5033 next = last_insn;
5034 if (!shared)
5035 while (prev != 0)
5036 {
5037 code = GET_CODE (prev);
5038 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5039 break;
5040
5041 if (INSN_P (prev))
5042 {
5043 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
d8691ecc 5044 prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
46222c18 5045
5046 /* Other insns that should not be in the last two opcodes. */
5047 icode = recog_memoized (prev);
5048 if (icode < 0
5049 || icode == CODE_FOR_repeat
5050 || icode == CODE_FOR_erepeat
5051 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5052 break;
5053
5054 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5055 is the second instruction in a VLIW bundle. In that case,
5056 loop again: if the first instruction also satisfies the
5057 conditions above then we will reach here again and put
5058 both of them into the repeat epilogue. Otherwise both
5059 should remain outside. */
5060 if (GET_MODE (prev) != BImode)
5061 {
5062 count++;
5063 next = prev;
5064 if (dump_file)
5065 print_rtl_single (dump_file, next);
5066 if (count == 2)
5067 break;
5068 }
5069 }
5070 prev = PREV_INSN (prev);
5071 }
5072
5073 /* See if we're adding the label immediately after the repeat insn.
5074 If so, we need to separate them with a nop. */
5075 prev = prev_real_insn (next);
5076 if (prev)
5077 switch (recog_memoized (prev))
5078 {
5079 case CODE_FOR_repeat:
5080 case CODE_FOR_erepeat:
5081 if (dump_file)
5082 fprintf (dump_file, "Adding nop inside loop\n");
5083 emit_insn_before (gen_nop (), next);
5084 break;
5085
5086 default:
5087 break;
5088 }
5089
5090 /* Insert the label. */
5091 emit_label_before (label, next);
5092
5093 /* Insert the nops. */
5094 if (dump_file && count < 2)
5095 fprintf (dump_file, "Adding %d nop%s\n\n",
5096 2 - count, count == 1 ? "" : "s");
5097
5098 for (; count < 2; count++)
5099 if (including)
5100 last_insn = emit_insn_after (gen_nop (), last_insn);
5101 else
5102 emit_insn_before (gen_nop (), last_insn);
5103
5104 return last_insn;
5105}
5106
5107
5108void
5109mep_emit_doloop (rtx *operands, int is_end)
5110{
5111 rtx tag;
5112
5113 if (cfun->machine->doloop_tags == 0
5114 || cfun->machine->doloop_tag_from_end == is_end)
5115 {
5116 cfun->machine->doloop_tags++;
5117 cfun->machine->doloop_tag_from_end = is_end;
5118 }
5119
5120 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5121 if (is_end)
5f35dd0e 5122 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
46222c18 5123 else
5124 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5125}
5126
5127
5128/* Code for converting doloop_begins and doloop_ends into valid
5129 MeP instructions. A doloop_begin is just a placeholder:
5130
5131 $count = unspec ($count)
5132
5133 where $count is initially the number of iterations - 1.
5134 doloop_end has the form:
5135
5136 if ($count-- == 0) goto label
5137
5138 The counter variable is private to the doloop insns, nothing else
5139 relies on its value.
5140
5141 There are three cases, in decreasing order of preference:
5142
5143 1. A loop has exactly one doloop_begin and one doloop_end.
5144 The doloop_end branches to the first instruction after
5145 the doloop_begin.
5146
5147 In this case we can replace the doloop_begin with a repeat
5148 instruction and remove the doloop_end. I.e.:
5149
5150 $count1 = unspec ($count1)
5151 label:
5152 ...
5153 insn1
5154 insn2
5155 if ($count2-- == 0) goto label
5156
5157 becomes:
5158
5159 repeat $count1,repeat_label
5160 label:
5161 ...
5162 repeat_label:
5163 insn1
5164 insn2
5165 # end repeat
5166
5167 2. As for (1), except there are several doloop_ends. One of them
5168 (call it X) falls through to a label L. All the others fall
5169 through to branches to L.
5170
5171 In this case, we remove X and replace the other doloop_ends
5172 with branches to the repeat label. For example:
5173
5174 $count1 = unspec ($count1)
5175 start:
5176 ...
5177 if ($count2-- == 0) goto label
5178 end:
5179 ...
5180 if ($count3-- == 0) goto label
5181 goto end
5182
5183 becomes:
5184
5185 repeat $count1,repeat_label
5186 start:
5187 ...
5188 repeat_label:
5189 nop
5190 nop
5191 # end repeat
5192 end:
5193 ...
5194 goto repeat_label
5195
5196 3. The fallback case. Replace doloop_begins with:
5197
5198 $count = $count + 1
5199
5200 Replace doloop_ends with the equivalent of:
5201
5202 $count = $count - 1
5203 if ($count == 0) goto label
5204
5205 Note that this might need a scratch register if $count
5206 is stored in memory. */
5207
5208/* A structure describing one doloop_begin. */
5209struct mep_doloop_begin {
5210 /* The next doloop_begin with the same tag. */
5211 struct mep_doloop_begin *next;
5212
5213 /* The instruction itself. */
d8691ecc 5214 rtx_insn *insn;
46222c18 5215
5216 /* The initial counter value. This is known to be a general register. */
5217 rtx counter;
5218};
5219
5220/* A structure describing a doloop_end. */
5221struct mep_doloop_end {
5222 /* The next doloop_end with the same loop tag. */
5223 struct mep_doloop_end *next;
5224
5225 /* The instruction itself. */
d8691ecc 5226 rtx_insn *insn;
46222c18 5227
5228 /* The first instruction after INSN when the branch isn't taken. */
d8691ecc 5229 rtx_insn *fallthrough;
46222c18 5230
5231 /* The location of the counter value. Since doloop_end_internal is a
5232 jump instruction, it has to allow the counter to be stored anywhere
5233 (any non-fixed register or memory location). */
5234 rtx counter;
5235
5236 /* The target label (the place where the insn branches when the counter
5237 isn't zero). */
5238 rtx label;
5239
5240 /* A scratch register. Only available when COUNTER isn't stored
5241 in a general register. */
5242 rtx scratch;
5243};
5244
5245
5246/* One do-while loop. */
5247struct mep_doloop {
5248 /* All the doloop_begins for this loop (in no particular order). */
5249 struct mep_doloop_begin *begin;
5250
5251 /* All the doloop_ends. When there is more than one, arrange things
5252 so that the first one is the most likely to be X in case (2) above. */
5253 struct mep_doloop_end *end;
5254};
5255
5256
5257/* Return true if LOOP can be converted into repeat/repeat_end form
5258 (that is, if it matches cases (1) or (2) above). */
5259
5260static bool
5261mep_repeat_loop_p (struct mep_doloop *loop)
5262{
5263 struct mep_doloop_end *end;
5264 rtx fallthrough;
5265
5266 /* There must be exactly one doloop_begin and at least one doloop_end. */
5267 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5268 return false;
5269
5270 /* The first doloop_end (X) must branch back to the insn after
5271 the doloop_begin. */
5272 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5273 return false;
5274
5275 /* All the other doloop_ends must branch to the same place as X.
5276 When the branch isn't taken, they must jump to the instruction
5277 after X. */
5278 fallthrough = loop->end->fallthrough;
5279 for (end = loop->end->next; end != 0; end = end->next)
5280 if (end->label != loop->end->label
5281 || !simplejump_p (end->fallthrough)
5282 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5283 return false;
5284
5285 return true;
5286}
5287
5288
5289/* The main repeat reorg function. See comment above for details. */
5290
5291static void
d8691ecc 5292mep_reorg_repeat (rtx_insn *insns)
46222c18 5293{
d8691ecc 5294 rtx_insn *insn;
46222c18 5295 struct mep_doloop *loops, *loop;
5296 struct mep_doloop_begin *begin;
5297 struct mep_doloop_end *end;
5298
5299 /* Quick exit if we haven't created any loops. */
5300 if (cfun->machine->doloop_tags == 0)
5301 return;
5302
5303 /* Create an array of mep_doloop structures. */
5304 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5305 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5306
5307 /* Search the function for do-while insns and group them by loop tag. */
5308 for (insn = insns; insn; insn = NEXT_INSN (insn))
5309 if (INSN_P (insn))
5310 switch (recog_memoized (insn))
5311 {
5312 case CODE_FOR_doloop_begin_internal:
5313 insn_extract (insn);
5314 loop = &loops[INTVAL (recog_data.operand[2])];
5315
5316 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5317 begin->next = loop->begin;
5318 begin->insn = insn;
5319 begin->counter = recog_data.operand[0];
5320
5321 loop->begin = begin;
5322 break;
5323
5324 case CODE_FOR_doloop_end_internal:
5325 insn_extract (insn);
5326 loop = &loops[INTVAL (recog_data.operand[2])];
5327
5328 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5329 end->insn = insn;
5330 end->fallthrough = next_real_insn (insn);
5331 end->counter = recog_data.operand[0];
5332 end->label = recog_data.operand[1];
5333 end->scratch = recog_data.operand[3];
5334
5335 /* If this insn falls through to an unconditional jump,
5336 give it a lower priority than the others. */
5337 if (loop->end != 0 && simplejump_p (end->fallthrough))
5338 {
5339 end->next = loop->end->next;
5340 loop->end->next = end;
5341 }
5342 else
5343 {
5344 end->next = loop->end;
5345 loop->end = end;
5346 }
5347 break;
5348 }
5349
5350 /* Convert the insns for each loop in turn. */
5351 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5352 if (mep_repeat_loop_p (loop))
5353 {
5354 /* Case (1) or (2). */
d8691ecc 5355 rtx_code_label *repeat_label;
5356 rtx label_ref;
46222c18 5357
5358 /* Create a new label for the repeat insn. */
5359 repeat_label = gen_label_rtx ();
5360
5361 /* Replace the doloop_begin with a repeat. */
5362 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5363 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5364 loop->begin->insn);
5365 delete_insn (loop->begin->insn);
5366
5367 /* Insert the repeat label before the first doloop_end.
5368 Fill the gap with nops if there are other doloop_ends. */
5369 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5370 false, loop->end->next != 0);
5371
5372 /* Emit a repeat_end (to improve the readability of the output). */
5373 emit_insn_before (gen_repeat_end (), loop->end->insn);
5374
5375 /* Delete the first doloop_end. */
5376 delete_insn (loop->end->insn);
5377
5378 /* Replace the others with branches to REPEAT_LABEL. */
5379 for (end = loop->end->next; end != 0; end = end->next)
5380 {
5381 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5382 delete_insn (end->insn);
5383 delete_insn (end->fallthrough);
5384 }
5385 }
5386 else
5387 {
5388 /* Case (3). First replace all the doloop_begins with increment
5389 instructions. */
5390 for (begin = loop->begin; begin != 0; begin = begin->next)
5391 {
5392 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5393 begin->counter, const1_rtx),
5394 begin->insn);
5395 delete_insn (begin->insn);
5396 }
5397
5398 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5399 for (end = loop->end; end != 0; end = end->next)
5400 {
5401 rtx reg;
5402
5403 start_sequence ();
5404
5405 /* Load the counter value into a general register. */
5406 reg = end->counter;
5407 if (!REG_P (reg) || REGNO (reg) > 15)
5408 {
5409 reg = end->scratch;
5410 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5411 }
5412
5413 /* Decrement the counter. */
5414 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5415 constm1_rtx));
5416
5417 /* Copy it back to its original location. */
5418 if (reg != end->counter)
5419 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5420
5421 /* Jump back to the start label. */
5422 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5423 end->label));
5424 JUMP_LABEL (insn) = end->label;
5425 LABEL_NUSES (end->label)++;
5426
5427 /* Emit the whole sequence before the doloop_end. */
5428 insn = get_insns ();
5429 end_sequence ();
5430 emit_insn_before (insn, end->insn);
5431
5432 /* Delete the doloop_end. */
5433 delete_insn (end->insn);
5434 }
5435 }
5436}
5437
5438
5439static bool
d8691ecc 5440mep_invertable_branch_p (rtx_insn *insn)
46222c18 5441{
5442 rtx cond, set;
5443 enum rtx_code old_code;
5444 int i;
5445
5446 set = PATTERN (insn);
5447 if (GET_CODE (set) != SET)
5448 return false;
5449 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5450 return false;
5451 cond = XEXP (XEXP (set, 1), 0);
5452 old_code = GET_CODE (cond);
5453 switch (old_code)
5454 {
5455 case EQ:
5456 PUT_CODE (cond, NE);
5457 break;
5458 case NE:
5459 PUT_CODE (cond, EQ);
5460 break;
5461 case LT:
5462 PUT_CODE (cond, GE);
5463 break;
5464 case GE:
5465 PUT_CODE (cond, LT);
5466 break;
5467 default:
5468 return false;
5469 }
5470 INSN_CODE (insn) = -1;
5471 i = recog_memoized (insn);
5472 PUT_CODE (cond, old_code);
5473 INSN_CODE (insn) = -1;
5474 return i >= 0;
5475}
5476
5477static void
d8691ecc 5478mep_invert_branch (rtx_insn *insn, rtx_insn *after)
46222c18 5479{
5480 rtx cond, set, label;
5481 int i;
5482
5483 set = PATTERN (insn);
5484
5485 gcc_assert (GET_CODE (set) == SET);
5486 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5487
5488 cond = XEXP (XEXP (set, 1), 0);
5489 switch (GET_CODE (cond))
5490 {
5491 case EQ:
5492 PUT_CODE (cond, NE);
5493 break;
5494 case NE:
5495 PUT_CODE (cond, EQ);
5496 break;
5497 case LT:
5498 PUT_CODE (cond, GE);
5499 break;
5500 case GE:
5501 PUT_CODE (cond, LT);
5502 break;
5503 default:
5504 gcc_unreachable ();
5505 }
5506 label = gen_label_rtx ();
5507 emit_label_after (label, after);
5508 for (i=1; i<=2; i++)
5509 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5510 {
5511 rtx ref = XEXP (XEXP (set, 1), i);
5512 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5513 delete_insn (XEXP (ref, 0));
5514 XEXP (ref, 0) = label;
5515 LABEL_NUSES (label) ++;
5516 JUMP_LABEL (insn) = label;
5517 }
5518 INSN_CODE (insn) = -1;
5519 i = recog_memoized (insn);
5520 gcc_assert (i >= 0);
5521}
5522
5523static void
d8691ecc 5524mep_reorg_erepeat (rtx_insn *insns)
46222c18 5525{
d8691ecc 5526 rtx_insn *insn, *prev;
5527 rtx_code_label *l;
5528 rtx x;
46222c18 5529 int count;
5530
5531 for (insn = insns; insn; insn = NEXT_INSN (insn))
5532 if (JUMP_P (insn)
46222c18 5533 && mep_invertable_branch_p (insn))
5534 {
5535 if (dump_file)
5536 {
5537 fprintf (dump_file, "\n------------------------------\n");
5538 fprintf (dump_file, "erepeat: considering this jump:\n");
5539 print_rtl_single (dump_file, insn);
5540 }
5541 count = simplejump_p (insn) ? 0 : 1;
46222c18 5542 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5543 {
aa90bb35 5544 if (CALL_P (prev) || BARRIER_P (prev))
46222c18 5545 break;
5546
5547 if (prev == JUMP_LABEL (insn))
5548 {
d8691ecc 5549 rtx_insn *newlast;
46222c18 5550 if (dump_file)
5551 fprintf (dump_file, "found loop top, %d insns\n", count);
5552
5553 if (LABEL_NUSES (prev) == 1)
5554 /* We're the only user, always safe */ ;
5555 else if (LABEL_NUSES (prev) == 2)
5556 {
5557 /* See if there's a barrier before this label. If
5558 so, we know nobody inside the loop uses it.
5559 But we must be careful to put the erepeat
5560 *after* the label. */
d8691ecc 5561 rtx_insn *barrier;
46222c18 5562 for (barrier = PREV_INSN (prev);
aa90bb35 5563 barrier && NOTE_P (barrier);
46222c18 5564 barrier = PREV_INSN (barrier))
5565 ;
aa90bb35 5566 if (barrier && ! BARRIER_P (barrier))
46222c18 5567 break;
5568 }
5569 else
5570 {
5571 /* We don't know who else, within or without our loop, uses this */
5572 if (dump_file)
5573 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5574 break;
5575 }
5576
5577 /* Generate a label to be used by the erepat insn. */
5578 l = gen_label_rtx ();
5579
5580 /* Insert the erepeat after INSN's target label. */
5581 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5582 LABEL_NUSES (l)++;
5583 emit_insn_after (x, prev);
5584
5585 /* Insert the erepeat label. */
5586 newlast = (mep_insert_repeat_label_last
5587 (insn, l, !simplejump_p (insn), false));
5588 if (simplejump_p (insn))
5589 {
5590 emit_insn_before (gen_erepeat_end (), insn);
5591 delete_insn (insn);
5592 }
5593 else
5594 {
5595 mep_invert_branch (insn, newlast);
5596 emit_insn_after (gen_erepeat_end (), newlast);
5597 }
5598 break;
5599 }
5600
5601 if (LABEL_P (prev))
5602 {
5603 /* A label is OK if there is exactly one user, and we
5604 can find that user before the next label. */
d8691ecc 5605 rtx_insn *user = 0;
46222c18 5606 int safe = 0;
5607 if (LABEL_NUSES (prev) == 1)
5608 {
5609 for (user = PREV_INSN (prev);
aa90bb35 5610 user && (INSN_P (user) || NOTE_P (user));
46222c18 5611 user = PREV_INSN (user))
aa90bb35 5612 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
46222c18 5613 {
5614 safe = INSN_UID (user);
5615 break;
5616 }
5617 }
5618 if (!safe)
5619 break;
5620 if (dump_file)
5621 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5622 safe, INSN_UID (prev));
5623 }
5624
5625 if (INSN_P (prev))
5626 {
5627 count ++;
46222c18 5628 }
5629 }
5630 }
5631 if (dump_file)
5632 fprintf (dump_file, "\n==============================\n");
5633}
5634
5635/* Replace a jump to a return, with a copy of the return. GCC doesn't
5636 always do this on its own. */
5637
5638static void
d8691ecc 5639mep_jmp_return_reorg (rtx_insn *insns)
46222c18 5640{
ed3e6e5d 5641 rtx_insn *insn, *label, *ret;
46222c18 5642 int ret_code;
5643
5644 for (insn = insns; insn; insn = NEXT_INSN (insn))
5645 if (simplejump_p (insn))
5646 {
5647 /* Find the fist real insn the jump jumps to. */
ed3e6e5d 5648 label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
46222c18 5649 while (ret
aa90bb35 5650 && (NOTE_P (ret)
5651 || LABEL_P (ret)
46222c18 5652 || GET_CODE (PATTERN (ret)) == USE))
ed3e6e5d 5653 ret = NEXT_INSN (ret);
46222c18 5654
5655 if (ret)
5656 {
5657 /* Is it a return? */
5658 ret_code = recog_memoized (ret);
5659 if (ret_code == CODE_FOR_return_internal
5660 || ret_code == CODE_FOR_eh_return_internal)
5661 {
5662 /* It is. Replace the jump with a return. */
5663 LABEL_NUSES (label) --;
5664 if (LABEL_NUSES (label) == 0)
5665 delete_insn (label);
5666 PATTERN (insn) = copy_rtx (PATTERN (ret));
5667 INSN_CODE (insn) = -1;
5668 }
5669 }
5670 }
5671}
5672
5673
5674static void
d8691ecc 5675mep_reorg_addcombine (rtx_insn *insns)
46222c18 5676{
d8691ecc 5677 rtx_insn *i, *n;
46222c18 5678
5679 for (i = insns; i; i = NEXT_INSN (i))
5680 if (INSN_P (i)
5681 && INSN_CODE (i) == CODE_FOR_addsi3
5682 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5683 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5684 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5685 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5686 {
5687 n = NEXT_INSN (i);
5688 if (INSN_P (n)
5689 && INSN_CODE (n) == CODE_FOR_addsi3
5690 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5691 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5692 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5693 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5694 {
5695 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5696 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5697 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5698 && ic + nc < 32767
5699 && ic + nc > -32768)
5700 {
5701 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
4a57a2e8 5702 SET_NEXT_INSN (i) = NEXT_INSN (n);
46222c18 5703 if (NEXT_INSN (i))
4a57a2e8 5704 SET_PREV_INSN (NEXT_INSN (i)) = i;
46222c18 5705 }
5706 }
5707 }
5708}
5709
5710/* If this insn adjusts the stack, return the adjustment, else return
5711 zero. */
5712static int
d8691ecc 5713add_sp_insn_p (rtx_insn *insn)
46222c18 5714{
5715 rtx pat;
5716
5717 if (! single_set (insn))
5718 return 0;
5719 pat = PATTERN (insn);
5720 if (GET_CODE (SET_DEST (pat)) != REG)
5721 return 0;
5722 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5723 return 0;
5724 if (GET_CODE (SET_SRC (pat)) != PLUS)
5725 return 0;
5726 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5727 return 0;
5728 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5729 return 0;
5730 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5731 return 0;
5732 return INTVAL (XEXP (SET_SRC (pat), 1));
5733}
5734
5735/* Check for trivial functions that set up an unneeded stack
5736 frame. */
5737static void
d8691ecc 5738mep_reorg_noframe (rtx_insn *insns)
46222c18 5739{
d8691ecc 5740 rtx_insn *start_frame_insn;
5741 rtx_insn *end_frame_insn = 0;
46222c18 5742 int sp_adjust, sp2;
5743 rtx sp;
5744
5745 /* The first insn should be $sp = $sp + N */
5746 while (insns && ! INSN_P (insns))
5747 insns = NEXT_INSN (insns);
5748 if (!insns)
5749 return;
5750
5751 sp_adjust = add_sp_insn_p (insns);
5752 if (sp_adjust == 0)
5753 return;
5754
5755 start_frame_insn = insns;
5756 sp = SET_DEST (PATTERN (start_frame_insn));
5757
5758 insns = next_real_insn (insns);
5759
5760 while (insns)
5761 {
d8691ecc 5762 rtx_insn *next = next_real_insn (insns);
46222c18 5763 if (!next)
5764 break;
5765
5766 sp2 = add_sp_insn_p (insns);
5767 if (sp2)
5768 {
5769 if (end_frame_insn)
5770 return;
5771 end_frame_insn = insns;
5772 if (sp2 != -sp_adjust)
5773 return;
5774 }
5775 else if (mep_mentioned_p (insns, sp, 0))
5776 return;
5777 else if (CALL_P (insns))
5778 return;
5779
5780 insns = next;
5781 }
5782
5783 if (end_frame_insn)
5784 {
5785 delete_insn (start_frame_insn);
5786 delete_insn (end_frame_insn);
5787 }
5788}
5789
5790static void
5791mep_reorg (void)
5792{
d8691ecc 5793 rtx_insn *insns = get_insns ();
142c2869 5794
5795 /* We require accurate REG_DEAD notes. */
5796 compute_bb_for_insn ();
5797 df_note_add_problem ();
5798 df_analyze ();
5799
46222c18 5800 mep_reorg_addcombine (insns);
5801#if EXPERIMENTAL_REGMOVE_REORG
5802 /* VLIW packing has been done already, so we can't just delete things. */
5803 if (!mep_vliw_function_p (cfun->decl))
5804 mep_reorg_regmove (insns);
5805#endif
5806 mep_jmp_return_reorg (insns);
5807 mep_bundle_insns (insns);
5808 mep_reorg_repeat (insns);
5809 if (optimize
5810 && !profile_flag
5811 && !profile_arc_flag
5812 && TARGET_OPT_REPEAT
5813 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5814 mep_reorg_erepeat (insns);
5815
5816 /* This may delete *insns so make sure it's last. */
5817 mep_reorg_noframe (insns);
142c2869 5818
5819 df_finish_pass (false);
46222c18 5820}
5821
5822\f
5823
5824/*----------------------------------------------------------------------*/
5825/* Builtins */
5826/*----------------------------------------------------------------------*/
5827
5828/* Element X gives the index into cgen_insns[] of the most general
5829 implementation of intrinsic X. Unimplemented intrinsics are
5830 mapped to -1. */
5831int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5832
5833/* Element X gives the index of another instruction that is mapped to
5834 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5835 instruction.
5836
5837 Things are set up so that mep_intrinsic_chain[X] < X. */
5838static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5839
5840/* The bitmask for the current ISA. The ISA masks are declared
5841 in mep-intrin.h. */
5842unsigned int mep_selected_isa;
5843
5844struct mep_config {
5845 const char *config_name;
5846 unsigned int isa;
5847};
5848
5849static struct mep_config mep_configs[] = {
5850#ifdef COPROC_SELECTION_TABLE
5851 COPROC_SELECTION_TABLE,
5852#endif
5853 { 0, 0 }
5854};
5855
5856/* Initialize the global intrinsics variables above. */
5857
5858static void
5859mep_init_intrinsics (void)
5860{
5861 size_t i;
5862
5863 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5864 mep_selected_isa = mep_configs[0].isa;
5865 if (mep_config_string != 0)
5866 for (i = 0; mep_configs[i].config_name; i++)
5867 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5868 {
5869 mep_selected_isa = mep_configs[i].isa;
5870 break;
5871 }
5872
5873 /* Assume all intrinsics are unavailable. */
5874 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5875 mep_intrinsic_insn[i] = -1;
5876
5877 /* Build up the global intrinsic tables. */
5878 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5879 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5880 {
5881 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5882 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5883 }
5884 /* See whether we can directly move values between one coprocessor
5885 register and another. */
5886 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5887 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5888 mep_have_copro_copro_moves_p = true;
5889
5890 /* See whether we can directly move values between core and
5891 coprocessor registers. */
5892 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5893 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5894
5895 mep_have_core_copro_moves_p = 1;
5896}
5897
5898/* Declare all available intrinsic functions. Called once only. */
5899
5900static tree cp_data_bus_int_type_node;
5901static tree opaque_vector_type_node;
5902static tree v8qi_type_node;
5903static tree v4hi_type_node;
5904static tree v2si_type_node;
5905static tree v8uqi_type_node;
5906static tree v4uhi_type_node;
5907static tree v2usi_type_node;
5908
5909static tree
5910mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5911{
5912 switch (cr)
5913 {
5914 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5915 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5916 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5917 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5918 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5919 case cgen_regnum_operand_type_CHAR: return char_type_node;
5920 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5921 case cgen_regnum_operand_type_SI: return intSI_type_node;
5922 case cgen_regnum_operand_type_DI: return intDI_type_node;
5923 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5924 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5925 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5926 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5927 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5928 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5929 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5930 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5931 default:
5932 return void_type_node;
5933 }
5934}
5935
5936static void
5937mep_init_builtins (void)
5938{
5939 size_t i;
5940
5941 if (TARGET_64BIT_CR_REGS)
5942 cp_data_bus_int_type_node = long_long_integer_type_node;
5943 else
5944 cp_data_bus_int_type_node = long_integer_type_node;
5945
5946 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5947 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5948 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5949 v2si_type_node = build_vector_type (intSI_type_node, 2);
5950 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5951 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5952 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5953
549aab72 5954 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5955
5956 add_builtin_type ("cp_vector", opaque_vector_type_node);
5957
5958 add_builtin_type ("cp_v8qi", v8qi_type_node);
5959 add_builtin_type ("cp_v4hi", v4hi_type_node);
5960 add_builtin_type ("cp_v2si", v2si_type_node);
5961
5962 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5963 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5964 add_builtin_type ("cp_v2usi", v2usi_type_node);
46222c18 5965
5966 /* Intrinsics like mep_cadd3 are implemented with two groups of
5967 instructions, one which uses UNSPECs and one which uses a specific
5968 rtl code such as PLUS. Instructions in the latter group belong
5969 to GROUP_KNOWN_CODE.
5970
5971 In such cases, the intrinsic will have two entries in the global
5972 tables above. The unspec form is accessed using builtin functions
5973 while the specific form is accessed using the mep_* enum in
5974 mep-intrin.h.
5975
5976 The idea is that __cop arithmetic and builtin functions have
5977 different optimization requirements. If mep_cadd3() appears in
5978 the source code, the user will surely except gcc to use cadd3
5979 rather than a work-alike such as add3. However, if the user
5980 just writes "a + b", where a or b are __cop variables, it is
5981 reasonable for gcc to choose a core instruction rather than
5982 cadd3 if it believes that is more optimal. */
5983 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5984 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5985 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5986 {
5987 tree ret_type = void_type_node;
5988 tree bi_type;
5989
5990 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5991 continue;
5992
5993 if (cgen_insns[i].cret_p)
5994 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5995
43a83738 5996 bi_type = build_function_type_list (ret_type, NULL_TREE);
46222c18 5997 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5998 bi_type,
5999 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6000 }
6001}
6002
6003/* Report the unavailablity of the given intrinsic. */
6004
6005#if 1
6006static void
6007mep_intrinsic_unavailable (int intrinsic)
6008{
6009 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6010
6011 if (already_reported_p[intrinsic])
6012 return;
6013
6014 if (mep_intrinsic_insn[intrinsic] < 0)
6015 error ("coprocessor intrinsic %qs is not available in this configuration",
6016 cgen_intrinsics[intrinsic]);
6017 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6018 error ("%qs is not available in VLIW functions",
6019 cgen_intrinsics[intrinsic]);
6020 else
6021 error ("%qs is not available in non-VLIW functions",
6022 cgen_intrinsics[intrinsic]);
6023
6024 already_reported_p[intrinsic] = 1;
6025}
6026#endif
6027
6028
6029/* See if any implementation of INTRINSIC is available to the
6030 current function. If so, store the most general implementation
6031 in *INSN_PTR and return true. Return false otherwise. */
6032
6033static bool
6034mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6035{
6036 int i;
6037
6038 i = mep_intrinsic_insn[intrinsic];
6039 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6040 i = mep_intrinsic_chain[i];
6041
6042 if (i >= 0)
6043 {
6044 *insn_ptr = &cgen_insns[i];
6045 return true;
6046 }
6047 return false;
6048}
6049
6050
6051/* Like mep_get_intrinsic_insn, but with extra handling for moves.
6052 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6053 try using a work-alike instead. In this case, the returned insn
6054 may have three operands rather than two. */
6055
6056static bool
6057mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6058{
6059 size_t i;
6060
6061 if (intrinsic == mep_cmov)
6062 {
6063 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6064 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6065 return true;
6066 return false;
6067 }
6068 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6069}
6070
6071
6072/* If ARG is a register operand that is the same size as MODE, convert it
6073 to MODE using a subreg. Otherwise return ARG as-is. */
6074
6075static rtx
3754d046 6076mep_convert_arg (machine_mode mode, rtx arg)
46222c18 6077{
6078 if (GET_MODE (arg) != mode
6079 && register_operand (arg, VOIDmode)
6080 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6081 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6082 return arg;
6083}
6084
6085
6086/* Apply regnum conversions to ARG using the description given by REGNUM.
6087 Return the new argument on success and null on failure. */
6088
6089static rtx
6090mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6091{
6092 if (regnum->count == 0)
6093 return arg;
6094
6095 if (GET_CODE (arg) != CONST_INT
6096 || INTVAL (arg) < 0
6097 || INTVAL (arg) >= regnum->count)
6098 return 0;
6099
6100 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6101}
6102
6103
6104/* Try to make intrinsic argument ARG match the given operand.
6105 UNSIGNED_P is true if the argument has an unsigned type. */
6106
6107static rtx
6108mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6109 int unsigned_p)
6110{
6111 if (GET_CODE (arg) == CONST_INT)
6112 {
6113 /* CONST_INTs can only be bound to integer operands. */
6114 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6115 return 0;
6116 }
6117 else if (GET_CODE (arg) == CONST_DOUBLE)
6118 /* These hold vector constants. */;
6119 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6120 {
6121 /* If the argument is a different size from what's expected, we must
6122 have a value in the right mode class in order to convert it. */
6123 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6124 return 0;
6125
6126 /* If the operand is an rvalue, promote or demote it to match the
6127 operand's size. This might not need extra instructions when
6128 ARG is a register value. */
6129 if (operand->constraint[0] != '=')
6130 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6131 }
6132
6133 /* If the operand is an lvalue, bind the operand to a new register.
6134 The caller will copy this value into ARG after the main
6135 instruction. By doing this always, we produce slightly more
6136 optimal code. */
6137 /* But not for control registers. */
6138 if (operand->constraint[0] == '='
6139 && (! REG_P (arg)
829e79ce 6140 || ! (CONTROL_REGNO_P (REGNO (arg))
6141 || CCR_REGNO_P (REGNO (arg))
6142 || CR_REGNO_P (REGNO (arg)))
46222c18 6143 ))
6144 return gen_reg_rtx (operand->mode);
6145
6146 /* Try simple mode punning. */
6147 arg = mep_convert_arg (operand->mode, arg);
6148 if (operand->predicate (arg, operand->mode))
6149 return arg;
6150
6151 /* See if forcing the argument into a register will make it match. */
6152 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6153 arg = force_reg (operand->mode, arg);
6154 else
6155 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6156 if (operand->predicate (arg, operand->mode))
6157 return arg;
6158
6159 return 0;
6160}
6161
6162
6163/* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6164 function FNNAME. OPERAND describes the operand to which ARGNUM
6165 is mapped. */
6166
6167static void
6168mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6169 int argnum, tree fnname)
6170{
6171 size_t i;
6172
6173 if (GET_CODE (arg) == CONST_INT)
6174 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6175 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6176 {
6177 const struct cgen_immediate_predicate *predicate;
6178 HOST_WIDE_INT argval;
6179
6180 predicate = &cgen_immediate_predicates[i];
6181 argval = INTVAL (arg);
6182 if (argval < predicate->lower || argval >= predicate->upper)
6183 error ("argument %d of %qE must be in the range %d...%d",
6184 argnum, fnname, predicate->lower, predicate->upper - 1);
6185 else
6186 error ("argument %d of %qE must be a multiple of %d",
6187 argnum, fnname, predicate->align);
6188 return;
6189 }
6190
6191 error ("incompatible type for argument %d of %qE", argnum, fnname);
6192}
6193
6194static rtx
6195mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6196 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 6197 machine_mode mode ATTRIBUTE_UNUSED,
46222c18 6198 int ignore ATTRIBUTE_UNUSED)
6199{
6200 rtx pat, op[10], arg[10];
6201 unsigned int a;
6202 int opindex, unsigned_p[10];
6203 tree fndecl, args;
6204 unsigned int n_args;
6205 tree fnname;
6206 const struct cgen_insn *cgen_insn;
f2956fc5 6207 const struct insn_data_d *idata;
260f365f 6208 unsigned int first_arg = 0;
260f365f 6209 unsigned int builtin_n_args;
46222c18 6210
6211 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6212 fnname = DECL_NAME (fndecl);
6213
6214 /* Find out which instruction we should emit. Note that some coprocessor
6215 intrinsics may only be available in VLIW mode, or only in normal mode. */
6216 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6217 {
6218 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
260f365f 6219 return NULL_RTX;
46222c18 6220 }
6221 idata = &insn_data[cgen_insn->icode];
6222
6223 builtin_n_args = cgen_insn->num_args;
6224
6225 if (cgen_insn->cret_p)
6226 {
6227 if (cgen_insn->cret_p > 1)
6228 builtin_n_args ++;
6229 first_arg = 1;
7d86c715 6230 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
46222c18 6231 builtin_n_args --;
6232 }
6233
6234 /* Evaluate each argument. */
6235 n_args = call_expr_nargs (exp);
6236
6237 if (n_args < builtin_n_args)
6238 {
6239 error ("too few arguments to %qE", fnname);
260f365f 6240 return NULL_RTX;
46222c18 6241 }
6242 if (n_args > builtin_n_args)
6243 {
6244 error ("too many arguments to %qE", fnname);
260f365f 6245 return NULL_RTX;
46222c18 6246 }
6247
260f365f 6248 for (a = first_arg; a < builtin_n_args + first_arg; a++)
46222c18 6249 {
6250 tree value;
6251
260f365f 6252 args = CALL_EXPR_ARG (exp, a - first_arg);
46222c18 6253
6254 value = args;
6255
6256#if 0
6257 if (cgen_insn->regnums[a].reference_p)
6258 {
6259 if (TREE_CODE (value) != ADDR_EXPR)
6260 {
6261 debug_tree(value);
6262 error ("argument %d of %qE must be an address", a+1, fnname);
260f365f 6263 return NULL_RTX;
46222c18 6264 }
6265 value = TREE_OPERAND (value, 0);
6266 }
6267#endif
6268
6269 /* If the argument has been promoted to int, get the unpromoted
6270 value. This is necessary when sub-int memory values are bound
6271 to reference parameters. */
6272 if (TREE_CODE (value) == NOP_EXPR
6273 && TREE_TYPE (value) == integer_type_node
6274 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6275 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6276 < TYPE_PRECISION (TREE_TYPE (value))))
6277 value = TREE_OPERAND (value, 0);
6278
6279 /* If the argument has been promoted to double, get the unpromoted
6280 SFmode value. This is necessary for FMAX support, for example. */
6281 if (TREE_CODE (value) == NOP_EXPR
6282 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6283 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6284 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6285 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6286 value = TREE_OPERAND (value, 0);
6287
6288 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6289 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6290 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6291 if (cgen_insn->regnums[a].reference_p)
6292 {
6293 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
3754d046 6294 machine_mode pointed_mode = TYPE_MODE (pointed_to);
46222c18 6295
6296 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6297 }
6298 if (arg[a] == 0)
6299 {
6300 error ("argument %d of %qE must be in the range %d...%d",
6301 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
260f365f 6302 return NULL_RTX;
46222c18 6303 }
6304 }
6305
260f365f 6306 for (a = 0; a < first_arg; a++)
46222c18 6307 {
6308 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6309 arg[a] = target;
6310 else
6311 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6312 }
6313
6314 /* Convert the arguments into a form suitable for the intrinsic.
6315 Report an error if this isn't possible. */
6316 for (opindex = 0; opindex < idata->n_operands; opindex++)
6317 {
6318 a = cgen_insn->op_mapping[opindex];
6319 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6320 arg[a], unsigned_p[a]);
6321 if (op[opindex] == 0)
6322 {
6323 mep_incompatible_arg (&idata->operand[opindex],
6324 arg[a], a + 1 - first_arg, fnname);
260f365f 6325 return NULL_RTX;
46222c18 6326 }
6327 }
6328
6329 /* Emit the instruction. */
6330 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6331 op[5], op[6], op[7], op[8], op[9]);
6332
6333 if (GET_CODE (pat) == SET
6334 && GET_CODE (SET_DEST (pat)) == PC
6335 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6336 emit_jump_insn (pat);
6337 else
6338 emit_insn (pat);
6339
6340 /* Copy lvalues back to their final locations. */
6341 for (opindex = 0; opindex < idata->n_operands; opindex++)
6342 if (idata->operand[opindex].constraint[0] == '=')
6343 {
6344 a = cgen_insn->op_mapping[opindex];
6345 if (a >= first_arg)
6346 {
6347 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6348 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6349 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6350 op[opindex]));
6351 else
6352 {
6353 /* First convert the operand to the right mode, then copy it
6354 into the destination. Doing the conversion as a separate
6355 step (rather than using convert_move) means that we can
6356 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6357 refer to the same register. */
6358 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6359 op[opindex], unsigned_p[a]);
6360 if (!rtx_equal_p (arg[a], op[opindex]))
6361 emit_move_insn (arg[a], op[opindex]);
6362 }
6363 }
6364 }
6365
6366 if (first_arg > 0 && target && target != op[0])
6367 {
6368 emit_move_insn (target, op[0]);
6369 }
6370
6371 return target;
6372}
6373
6374static bool
3754d046 6375mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED)
46222c18 6376{
6377 return false;
6378}
6379\f
6380/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6381 a global register. */
6382
be83dbcd 6383static bool
6384global_reg_mentioned_p_1 (const_rtx x)
46222c18 6385{
6386 int regno;
46222c18 6387
6388 switch (GET_CODE (x))
6389 {
6390 case SUBREG:
6391 if (REG_P (SUBREG_REG (x)))
6392 {
6393 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6394 && global_regs[subreg_regno (x)])
be83dbcd 6395 return true;
6396 return false;
46222c18 6397 }
6398 break;
6399
6400 case REG:
6401 regno = REGNO (x);
6402 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
be83dbcd 6403 return true;
6404 return false;
46222c18 6405
6406 case CALL:
6407 /* A non-constant call might use a global register. */
be83dbcd 6408 return true;
46222c18 6409
6410 default:
6411 break;
6412 }
6413
be83dbcd 6414 return false;
46222c18 6415}
6416
6417/* Returns nonzero if X mentions a global register. */
6418
be83dbcd 6419static bool
46222c18 6420global_reg_mentioned_p (rtx x)
6421{
6422 if (INSN_P (x))
6423 {
6424 if (CALL_P (x))
6425 {
6426 if (! RTL_CONST_OR_PURE_CALL_P (x))
be83dbcd 6427 return true;
46222c18 6428 x = CALL_INSN_FUNCTION_USAGE (x);
6429 if (x == 0)
be83dbcd 6430 return false;
46222c18 6431 }
6432 else
6433 x = PATTERN (x);
6434 }
6435
be83dbcd 6436 subrtx_iterator::array_type array;
6437 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6438 if (global_reg_mentioned_p_1 (*iter))
6439 return true;
6440 return false;
46222c18 6441}
6442/* Scheduling hooks for VLIW mode.
6443
6444 Conceptually this is very simple: we have a two-pack architecture
6445 that takes one core insn and one coprocessor insn to make up either
6446 a 32- or 64-bit instruction word (depending on the option bit set in
6447 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6448 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6449 and one 48-bit cop insn or two 32-bit core/cop insns.
6450
6451 In practice, instruction selection will be a bear. Consider in
6452 VL64 mode the following insns
6453
6454 add $1, 1
6455 cmov $cr0, $0
6456
6457 these cannot pack, since the add is a 16-bit core insn and cmov
6458 is a 32-bit cop insn. However,
6459
6460 add3 $1, $1, 1
6461 cmov $cr0, $0
6462
6463 packs just fine. For good VLIW code generation in VL64 mode, we
6464 will have to have 32-bit alternatives for many of the common core
6465 insns. Not implemented. */
6466
6467static int
18282db0 6468mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
46222c18 6469{
6470 int cost_specified;
6471
6472 if (REG_NOTE_KIND (link) != 0)
6473 {
6474 /* See whether INSN and DEP_INSN are intrinsics that set the same
6475 hard register. If so, it is more important to free up DEP_INSN
6476 than it is to free up INSN.
6477
6478 Note that intrinsics like mep_mulr are handled differently from
6479 the equivalent mep.md patterns. In mep.md, if we don't care
6480 about the value of $lo and $hi, the pattern will just clobber
6481 the registers, not set them. Since clobbers don't count as
6482 output dependencies, it is often possible to reorder two mulrs,
6483 even after reload.
6484
6485 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6486 so any pair of mep_mulr()s will be inter-dependent. We should
6487 therefore give the first mep_mulr() a higher priority. */
6488 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6489 && global_reg_mentioned_p (PATTERN (insn))
6490 && global_reg_mentioned_p (PATTERN (dep_insn)))
6491 return 1;
6492
6493 /* If the dependence is an anti or output dependence, assume it
6494 has no cost. */
6495 return 0;
6496 }
6497
6498 /* If we can't recognize the insns, we can't really do anything. */
6499 if (recog_memoized (dep_insn) < 0)
6500 return cost;
6501
6502 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6503 attribute instead. */
6504 if (!TARGET_H1)
6505 {
6506 cost_specified = get_attr_latency (dep_insn);
6507 if (cost_specified != 0)
6508 return cost_specified;
6509 }
6510
6511 return cost;
6512}
6513
6514/* ??? We don't properly compute the length of a load/store insn,
6515 taking into account the addressing mode. */
6516
6517static int
6518mep_issue_rate (void)
6519{
6520 return TARGET_IVC2 ? 3 : 2;
6521}
6522
6523/* Return true if function DECL was declared with the vliw attribute. */
6524
6525bool
6526mep_vliw_function_p (tree decl)
6527{
6528 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6529}
6530
b24ef467 6531static rtx_insn *
6532mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6533 int length)
46222c18 6534{
6535 int i;
6536
6537 for (i = nready - 1; i >= 0; --i)
6538 {
b24ef467 6539 rtx_insn *insn = ready[i];
46222c18 6540 if (recog_memoized (insn) >= 0
6541 && get_attr_slot (insn) == slot
6542 && get_attr_length (insn) == length)
6543 return insn;
6544 }
6545
b24ef467 6546 return NULL;
46222c18 6547}
6548
6549static void
b24ef467 6550mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
46222c18 6551{
6552 int i;
6553
6554 for (i = 0; i < nready; ++i)
6555 if (ready[i] == insn)
6556 {
6557 for (; i < nready - 1; ++i)
6558 ready[i] = ready[i + 1];
6559 ready[i] = insn;
6560 return;
6561 }
6562
6563 gcc_unreachable ();
6564}
6565
6566static void
b24ef467 6567mep_print_sched_insn (FILE *dump, rtx_insn *insn)
46222c18 6568{
6569 const char *slots = "none";
6570 const char *name = NULL;
6571 int code;
6572 char buf[30];
6573
6574 if (GET_CODE (PATTERN (insn)) == SET
6575 || GET_CODE (PATTERN (insn)) == PARALLEL)
6576 {
6577 switch (get_attr_slots (insn))
6578 {
6579 case SLOTS_CORE: slots = "core"; break;
6580 case SLOTS_C3: slots = "c3"; break;
6581 case SLOTS_P0: slots = "p0"; break;
6582 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6583 case SLOTS_P0_P1: slots = "p0,p1"; break;
6584 case SLOTS_P0S: slots = "p0s"; break;
6585 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6586 case SLOTS_P1: slots = "p1"; break;
6587 default:
6588 sprintf(buf, "%d", get_attr_slots (insn));
6589 slots = buf;
6590 break;
6591 }
6592 }
6593 if (GET_CODE (PATTERN (insn)) == USE)
6594 slots = "use";
6595
6596 code = INSN_CODE (insn);
6597 if (code >= 0)
6598 name = get_insn_name (code);
6599 if (!name)
6600 name = "{unknown}";
6601
6602 fprintf (dump,
6603 "insn %4d %4d %8s %s\n",
6604 code,
6605 INSN_UID (insn),
6606 name,
6607 slots);
6608}
6609
6610static int
6611mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
b24ef467 6612 int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
46222c18 6613 int *pnready, int clock ATTRIBUTE_UNUSED)
6614{
6615 int nready = *pnready;
b24ef467 6616 rtx_insn *core_insn, *cop_insn;
46222c18 6617 int i;
6618
6619 if (dump && sched_verbose > 1)
6620 {
6621 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6622 for (i=0; i<nready; i++)
6623 mep_print_sched_insn (dump, ready[i]);
6624 fprintf (dump, "\n");
6625 }
6626
6627 if (!mep_vliw_function_p (cfun->decl))
6628 return 1;
6629 if (nready < 2)
6630 return 1;
6631
6632 /* IVC2 uses a DFA to determine what's ready and what's not. */
6633 if (TARGET_IVC2)
6634 return nready;
6635
6636 /* We can issue either a core or coprocessor instruction.
6637 Look for a matched pair of insns to reorder. If we don't
6638 find any, don't second-guess the scheduler's priorities. */
6639
6640 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6641 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6642 TARGET_OPT_VL64 ? 6 : 2)))
6643 ;
6644 else if (TARGET_OPT_VL64
6645 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6646 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6647 ;
6648 else
6649 /* We didn't find a pair. Issue the single insn at the head
6650 of the ready list. */
6651 return 1;
6652
6653 /* Reorder the two insns first. */
6654 mep_move_ready_insn (ready, nready, core_insn);
6655 mep_move_ready_insn (ready, nready - 1, cop_insn);
6656 return 2;
6657}
6658
b85b0430 6659/* Return true if X contains a register that is set by insn PREV. */
46222c18 6660
b85b0430 6661static bool
6662mep_store_find_set (const_rtx x, const rtx_insn *prev)
46222c18 6663{
b85b0430 6664 subrtx_iterator::array_type array;
6665 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6666 if (REG_P (x) && reg_set_p (x, prev))
6667 return true;
6668 return false;
46222c18 6669}
6670
6671/* Like mep_store_bypass_p, but takes a pattern as the second argument,
6672 not the containing insn. */
6673
6674static bool
d8691ecc 6675mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
46222c18 6676{
6677 /* Cope with intrinsics like swcpa. */
6678 if (GET_CODE (pat) == PARALLEL)
6679 {
6680 int i;
6681
6682 for (i = 0; i < XVECLEN (pat, 0); i++)
d8691ecc 6683 if (mep_store_data_bypass_p (prev,
6684 as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
46222c18 6685 return true;
6686
6687 return false;
6688 }
6689
6690 /* Check for some sort of store. */
6691 if (GET_CODE (pat) != SET
6692 || GET_CODE (SET_DEST (pat)) != MEM)
6693 return false;
6694
6695 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6696 The first operand to the unspec is the store data and the other operands
6697 are used to calculate the address. */
6698 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6699 {
6700 rtx src;
6701 int i;
6702
6703 src = SET_SRC (pat);
6704 for (i = 1; i < XVECLEN (src, 0); i++)
b85b0430 6705 if (mep_store_find_set (XVECEXP (src, 0, i), prev))
46222c18 6706 return false;
6707
6708 return true;
6709 }
6710
6711 /* Otherwise just check that PREV doesn't modify any register mentioned
6712 in the memory destination. */
b85b0430 6713 return !mep_store_find_set (SET_DEST (pat), prev);
46222c18 6714}
6715
6716/* Return true if INSN is a store instruction and if the store address
6717 has no true dependence on PREV. */
6718
6719bool
d8691ecc 6720mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
46222c18 6721{
6722 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6723}
6724
46222c18 6725/* Return true if, apart from HI/LO, there are no true dependencies
6726 between multiplication instructions PREV and INSN. */
6727
6728bool
d8691ecc 6729mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
46222c18 6730{
6731 rtx pat;
6732
6733 pat = PATTERN (insn);
6734 if (GET_CODE (pat) == PARALLEL)
6735 pat = XVECEXP (pat, 0, 0);
6b92c2dc 6736 if (GET_CODE (pat) != SET)
6737 return false;
6738 subrtx_iterator::array_type array;
6739 FOR_EACH_SUBRTX (iter, array, SET_SRC (pat), NONCONST)
6740 {
6741 const_rtx x = *iter;
6742 if (REG_P (x)
6743 && REGNO (x) != LO_REGNO
6744 && REGNO (x) != HI_REGNO
6745 && reg_set_p (x, prev))
6746 return false;
6747 }
6748 return true;
46222c18 6749}
6750
6751/* Return true if INSN is an ldc instruction that issues to the
6752 MeP-h1 integer pipeline. This is true for instructions that
6753 read from PSW, LP, SAR, HI and LO. */
6754
6755bool
d8691ecc 6756mep_ipipe_ldc_p (rtx_insn *insn)
46222c18 6757{
6758 rtx pat, src;
6759
6760 pat = PATTERN (insn);
6761
6762 /* Cope with instrinsics that set both a hard register and its shadow.
6763 The set of the hard register comes first. */
6764 if (GET_CODE (pat) == PARALLEL)
6765 pat = XVECEXP (pat, 0, 0);
6766
6767 if (GET_CODE (pat) == SET)
6768 {
6769 src = SET_SRC (pat);
6770
6771 /* Cope with intrinsics. The first operand to the unspec is
6772 the source register. */
6773 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6774 src = XVECEXP (src, 0, 0);
6775
6776 if (REG_P (src))
6777 switch (REGNO (src))
6778 {
6779 case PSW_REGNO:
6780 case LP_REGNO:
6781 case SAR_REGNO:
6782 case HI_REGNO:
6783 case LO_REGNO:
6784 return true;
6785 }
6786 }
6787 return false;
6788}
6789
6790/* Create a VLIW bundle from core instruction CORE and coprocessor
6791 instruction COP. COP always satisfies INSN_P, but CORE can be
6792 either a new pattern or an existing instruction.
6793
6794 Emit the bundle in place of COP and return it. */
6795
d8691ecc 6796static rtx_insn *
f17e3fff 6797mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
46222c18 6798{
d8691ecc 6799 rtx seq;
f17e3fff 6800 rtx_insn *core_insn;
d8691ecc 6801 rtx_insn *insn;
46222c18 6802
6803 /* If CORE is an existing instruction, remove it, otherwise put
6804 the new pattern in an INSN harness. */
f17e3fff 6805 if (INSN_P (core_insn_or_pat))
6806 {
6807 core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6808 remove_insn (core_insn);
6809 }
46222c18 6810 else
f17e3fff 6811 core_insn = make_insn_raw (core_insn_or_pat);
46222c18 6812
6813 /* Generate the bundle sequence and replace COP with it. */
f17e3fff 6814 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
d8691ecc 6815 insn = emit_insn_after (seq, cop);
46222c18 6816 remove_insn (cop);
6817
6818 /* Set up the links of the insns inside the SEQUENCE. */
f17e3fff 6819 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6820 SET_NEXT_INSN (core_insn) = cop;
6821 SET_PREV_INSN (cop) = core_insn;
4a57a2e8 6822 SET_NEXT_INSN (cop) = NEXT_INSN (insn);
46222c18 6823
6824 /* Set the VLIW flag for the coprocessor instruction. */
f17e3fff 6825 PUT_MODE (core_insn, VOIDmode);
46222c18 6826 PUT_MODE (cop, BImode);
6827
6828 /* Derive a location for the bundle. Individual instructions cannot
6829 have their own location because there can be no assembler labels
f17e3fff 6830 between CORE_INSN and COP. */
6831 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6832 INSN_LOCATION (core_insn) = 0;
d53c050c 6833 INSN_LOCATION (cop) = 0;
46222c18 6834
6835 return insn;
6836}
6837
6838/* A helper routine for ms1_insn_dependent_p called through note_stores. */
6839
6840static void
6841mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6842{
6843 rtx * pinsn = (rtx *) data;
6844
6845 if (*pinsn && reg_mentioned_p (x, *pinsn))
6846 *pinsn = NULL_RTX;
6847}
6848
6849/* Return true if anything in insn X is (anti,output,true) dependent on
6850 anything in insn Y. */
6851
6852static int
6853mep_insn_dependent_p (rtx x, rtx y)
6854{
6855 rtx tmp;
6856
6857 gcc_assert (INSN_P (x));
6858 gcc_assert (INSN_P (y));
6859
6860 tmp = PATTERN (y);
6861 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6862 if (tmp == NULL_RTX)
6863 return 1;
6864
6865 tmp = PATTERN (x);
6866 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6867 if (tmp == NULL_RTX)
6868 return 1;
6869
6870 return 0;
6871}
6872
6873static int
d8691ecc 6874core_insn_p (rtx_insn *insn)
46222c18 6875{
6876 if (GET_CODE (PATTERN (insn)) == USE)
6877 return 0;
6878 if (get_attr_slot (insn) == SLOT_CORE)
6879 return 1;
6880 return 0;
6881}
6882
6883/* Mark coprocessor instructions that can be bundled together with
9d75589a 6884 the immediately preceding core instruction. This is later used
46222c18 6885 to emit the "+" that tells the assembler to create a VLIW insn.
6886
6887 For unbundled insns, the assembler will automatically add coprocessor
6888 nops, and 16-bit core nops. Due to an apparent oversight in the
6889 spec, the assembler will _not_ automatically add 32-bit core nops,
6890 so we have to emit those here.
6891
6892 Called from mep_insn_reorg. */
6893
6894static void
d8691ecc 6895mep_bundle_insns (rtx_insn *insns)
46222c18 6896{
d8691ecc 6897 rtx_insn *insn, *last = NULL, *first = NULL;
46222c18 6898 int saw_scheduling = 0;
6899
6900 /* Only do bundling if we're in vliw mode. */
6901 if (!mep_vliw_function_p (cfun->decl))
6902 return;
6903
6904 /* The first insn in a bundle are TImode, the remainder are
6905 VOIDmode. After this function, the first has VOIDmode and the
6906 rest have BImode. */
6907
1f2d41ad 6908 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6909
46222c18 6910 /* First, move any NOTEs that are within a bundle, to the beginning
6911 of the bundle. */
6912 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6913 {
6914 if (NOTE_P (insn) && first)
6915 /* Don't clear FIRST. */;
6916
1f2d41ad 6917 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
46222c18 6918 first = insn;
6919
1f2d41ad 6920 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
46222c18 6921 {
d8691ecc 6922 rtx_insn *note, *prev;
46222c18 6923
6924 /* INSN is part of a bundle; FIRST is the first insn in that
6925 bundle. Move all intervening notes out of the bundle.
6926 In addition, since the debug pass may insert a label
6927 whenever the current line changes, set the location info
6928 for INSN to match FIRST. */
6929
d53c050c 6930 INSN_LOCATION (insn) = INSN_LOCATION (first);
46222c18 6931
6932 note = PREV_INSN (insn);
6933 while (note && note != first)
6934 {
6935 prev = PREV_INSN (note);
6936
6937 if (NOTE_P (note))
6938 {
6939 /* Remove NOTE from here... */
4a57a2e8 6940 SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6941 SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
46222c18 6942 /* ...and put it in here. */
4a57a2e8 6943 SET_NEXT_INSN (note) = first;
6944 SET_PREV_INSN (note) = PREV_INSN (first);
6945 SET_NEXT_INSN (PREV_INSN (note)) = note;
6946 SET_PREV_INSN (NEXT_INSN (note)) = note;
46222c18 6947 }
6948
6949 note = prev;
6950 }
6951 }
6952
1f2d41ad 6953 else if (!NONJUMP_INSN_P (insn))
46222c18 6954 first = 0;
6955 }
6956
6957 /* Now fix up the bundles. */
6958 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6959 {
6960 if (NOTE_P (insn))
6961 continue;
6962
1f2d41ad 6963 if (!NONJUMP_INSN_P (insn))
46222c18 6964 {
6965 last = 0;
6966 continue;
6967 }
6968
6969 /* If we're not optimizing enough, there won't be scheduling
6970 info. We detect that here. */
6971 if (GET_MODE (insn) == TImode)
6972 saw_scheduling = 1;
6973 if (!saw_scheduling)
6974 continue;
6975
6976 if (TARGET_IVC2)
6977 {
d8691ecc 6978 rtx_insn *core_insn = NULL;
46222c18 6979
6980 /* IVC2 slots are scheduled by DFA, so we just accept
6981 whatever the scheduler gives us. However, we must make
6982 sure the core insn (if any) is the first in the bundle.
6983 The IVC2 assembler can insert whatever NOPs are needed,
6984 and allows a COP insn to be first. */
6985
1f2d41ad 6986 if (NONJUMP_INSN_P (insn)
46222c18 6987 && GET_CODE (PATTERN (insn)) != USE
6988 && GET_MODE (insn) == TImode)
6989 {
6990 for (last = insn;
6991 NEXT_INSN (last)
6992 && GET_MODE (NEXT_INSN (last)) == VOIDmode
1f2d41ad 6993 && NONJUMP_INSN_P (NEXT_INSN (last));
46222c18 6994 last = NEXT_INSN (last))
6995 {
6996 if (core_insn_p (last))
6997 core_insn = last;
6998 }
6999 if (core_insn_p (last))
7000 core_insn = last;
7001
7002 if (core_insn && core_insn != insn)
7003 {
7004 /* Swap core insn to first in the bundle. */
7005
7006 /* Remove core insn. */
7007 if (PREV_INSN (core_insn))
4a57a2e8 7008 SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
46222c18 7009 if (NEXT_INSN (core_insn))
4a57a2e8 7010 SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
46222c18 7011
7012 /* Re-insert core insn. */
4a57a2e8 7013 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
7014 SET_NEXT_INSN (core_insn) = insn;
46222c18 7015
7016 if (PREV_INSN (core_insn))
4a57a2e8 7017 SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7018 SET_PREV_INSN (insn) = core_insn;
46222c18 7019
7020 PUT_MODE (core_insn, TImode);
7021 PUT_MODE (insn, VOIDmode);
7022 }
7023 }
7024
7025 /* The first insn has TImode, the rest have VOIDmode */
7026 if (GET_MODE (insn) == TImode)
7027 PUT_MODE (insn, VOIDmode);
7028 else
7029 PUT_MODE (insn, BImode);
7030 continue;
7031 }
7032
7033 PUT_MODE (insn, VOIDmode);
7034 if (recog_memoized (insn) >= 0
7035 && get_attr_slot (insn) == SLOT_COP)
7036 {
aa90bb35 7037 if (JUMP_P (insn)
46222c18 7038 || ! last
7039 || recog_memoized (last) < 0
7040 || get_attr_slot (last) != SLOT_CORE
7041 || (get_attr_length (insn)
7042 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7043 || mep_insn_dependent_p (insn, last))
7044 {
7045 switch (get_attr_length (insn))
7046 {
7047 case 8:
7048 break;
7049 case 6:
7050 insn = mep_make_bundle (gen_nop (), insn);
7051 break;
7052 case 4:
7053 if (TARGET_OPT_VL64)
7054 insn = mep_make_bundle (gen_nop32 (), insn);
7055 break;
7056 case 2:
7057 if (TARGET_OPT_VL64)
7058 error ("2 byte cop instructions are"
7059 " not allowed in 64-bit VLIW mode");
7060 else
7061 insn = mep_make_bundle (gen_nop (), insn);
7062 break;
7063 default:
7064 error ("unexpected %d byte cop instruction",
7065 get_attr_length (insn));
7066 break;
7067 }
7068 }
7069 else
7070 insn = mep_make_bundle (last, insn);
7071 }
7072
7073 last = insn;
7074 }
7075}
7076
7077
7078/* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7079 Return true on success. This function can fail if the intrinsic
7080 is unavailable or if the operands don't satisfy their predicates. */
7081
7082bool
7083mep_emit_intrinsic (int intrinsic, const rtx *operands)
7084{
7085 const struct cgen_insn *cgen_insn;
f2956fc5 7086 const struct insn_data_d *idata;
46222c18 7087 rtx newop[10];
7088 int i;
7089
7090 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7091 return false;
7092
7093 idata = &insn_data[cgen_insn->icode];
7094 for (i = 0; i < idata->n_operands; i++)
7095 {
7096 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7097 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7098 return false;
7099 }
7100
7101 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7102 newop[3], newop[4], newop[5],
7103 newop[6], newop[7], newop[8]));
7104
7105 return true;
7106}
7107
7108
7109/* Apply the given unary intrinsic to OPERANDS[1] and store it on
7110 OPERANDS[0]. Report an error if the instruction could not
7111 be synthesized. OPERANDS[1] is a register_operand. For sign
7112 and zero extensions, it may be smaller than SImode. */
7113
7114bool
7115mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7116 rtx * operands ATTRIBUTE_UNUSED)
7117{
7118 return false;
7119}
7120
7121
7122/* Likewise, but apply a binary operation to OPERANDS[1] and
7123 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7124 can be a general_operand.
7125
7126 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7127 third operand. REG and REG3 take register operands only. */
7128
7129bool
7130mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7131 int ATTRIBUTE_UNUSED immediate3,
7132 int ATTRIBUTE_UNUSED reg,
7133 int ATTRIBUTE_UNUSED reg3,
7134 rtx * operands ATTRIBUTE_UNUSED)
7135{
7136 return false;
7137}
7138
7139static bool
20d892d1 7140mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7141 int opno ATTRIBUTE_UNUSED, int *total,
7142 bool ATTRIBUTE_UNUSED speed_t)
46222c18 7143{
7144 switch (code)
7145 {
7146 case CONST_INT:
7147 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7148 *total = 0;
7149 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7150 *total = 1;
7151 else
7152 *total = 3;
7153 return true;
7154
7155 case SYMBOL_REF:
7156 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7157 return true;
7158
7159 case MULT:
7160 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7161 ? COSTS_N_INSNS (3)
7162 : COSTS_N_INSNS (2));
7163 return true;
7164 }
7165 return false;
7166}
7167
7168static int
d9c5e5f4 7169mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
3754d046 7170 machine_mode mode ATTRIBUTE_UNUSED,
d9c5e5f4 7171 addr_space_t as ATTRIBUTE_UNUSED,
7172 bool ATTRIBUTE_UNUSED speed_p)
46222c18 7173{
7174 return 1;
7175}
7176
46222c18 7177static void
7178mep_asm_init_sections (void)
7179{
7180 based_section
7181 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7182 "\t.section .based,\"aw\"");
7183
7184 tinybss_section
7185 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7186 "\t.section .sbss,\"aw\"");
7187
7188 sdata_section
7189 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7190 "\t.section .sdata,\"aw\",@progbits");
7191
7192 far_section
7193 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7194 "\t.section .far,\"aw\"");
7195
7196 farbss_section
7197 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7198 "\t.section .farbss,\"aw\"");
7199
7200 frodata_section
7201 = get_unnamed_section (0, output_section_asm_op,
7202 "\t.section .frodata,\"a\"");
7203
7204 srodata_section
7205 = get_unnamed_section (0, output_section_asm_op,
7206 "\t.section .srodata,\"a\"");
7207
e3b9264d 7208 vtext_section
987ce674 7209 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7210 "\t.section .vtext,\"axv\"\n\t.vliw");
e3b9264d 7211
7212 vftext_section
987ce674 7213 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
ce4c65f3 7214 "\t.section .vftext,\"axv\"\n\t.vliw");
e3b9264d 7215
7216 ftext_section
987ce674 7217 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
ce4c65f3 7218 "\t.section .ftext,\"ax\"\n\t.core");
e3b9264d 7219
46222c18 7220}
7d86c715 7221\f
7222/* Initialize the GCC target structure. */
7223
7224#undef TARGET_ASM_FUNCTION_PROLOGUE
7225#define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7226#undef TARGET_ATTRIBUTE_TABLE
7227#define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7228#undef TARGET_COMP_TYPE_ATTRIBUTES
7229#define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7230#undef TARGET_INSERT_ATTRIBUTES
7231#define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7232#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7233#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7234#undef TARGET_CAN_INLINE_P
7235#define TARGET_CAN_INLINE_P mep_can_inline_p
7236#undef TARGET_SECTION_TYPE_FLAGS
7237#define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7238#undef TARGET_ASM_NAMED_SECTION
7239#define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7240#undef TARGET_INIT_BUILTINS
7241#define TARGET_INIT_BUILTINS mep_init_builtins
7242#undef TARGET_EXPAND_BUILTIN
7243#define TARGET_EXPAND_BUILTIN mep_expand_builtin
7244#undef TARGET_SCHED_ADJUST_COST
7245#define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7246#undef TARGET_SCHED_ISSUE_RATE
7247#define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7248#undef TARGET_SCHED_REORDER
7249#define TARGET_SCHED_REORDER mep_sched_reorder
7250#undef TARGET_STRIP_NAME_ENCODING
7251#define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7252#undef TARGET_ASM_SELECT_SECTION
7253#define TARGET_ASM_SELECT_SECTION mep_select_section
7254#undef TARGET_ASM_UNIQUE_SECTION
7255#define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7256#undef TARGET_ENCODE_SECTION_INFO
7257#define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7258#undef TARGET_FUNCTION_OK_FOR_SIBCALL
7259#define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7260#undef TARGET_RTX_COSTS
7261#define TARGET_RTX_COSTS mep_rtx_cost
7262#undef TARGET_ADDRESS_COST
7263#define TARGET_ADDRESS_COST mep_address_cost
7264#undef TARGET_MACHINE_DEPENDENT_REORG
7265#define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7266#undef TARGET_SETUP_INCOMING_VARARGS
7267#define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7268#undef TARGET_PASS_BY_REFERENCE
7269#define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
4f6b272a 7270#undef TARGET_FUNCTION_ARG
7271#define TARGET_FUNCTION_ARG mep_function_arg
7272#undef TARGET_FUNCTION_ARG_ADVANCE
7273#define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7d86c715 7274#undef TARGET_VECTOR_MODE_SUPPORTED_P
7275#define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
4c834714 7276#undef TARGET_OPTION_OVERRIDE
7277#define TARGET_OPTION_OVERRIDE mep_option_override
7d86c715 7278#undef TARGET_ALLOCATE_INITIAL_VALUE
7279#define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7280#undef TARGET_ASM_INIT_SECTIONS
7281#define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7282#undef TARGET_RETURN_IN_MEMORY
7283#define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7284#undef TARGET_NARROW_VOLATILE_BITFIELD
7285#define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7286#undef TARGET_EXPAND_BUILTIN_SAVEREGS
7287#define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7288#undef TARGET_BUILD_BUILTIN_VA_LIST
7289#define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7290#undef TARGET_EXPAND_BUILTIN_VA_START
7291#define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7292#undef TARGET_GIMPLIFY_VA_ARG_EXPR
7293#define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7294#undef TARGET_CAN_ELIMINATE
7295#define TARGET_CAN_ELIMINATE mep_can_eliminate
b2d7ede1 7296#undef TARGET_CONDITIONAL_REGISTER_USAGE
7297#define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7d86c715 7298#undef TARGET_TRAMPOLINE_INIT
7299#define TARGET_TRAMPOLINE_INIT mep_trampoline_init
ca316360 7300#undef TARGET_LEGITIMATE_CONSTANT_P
7301#define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
5f35dd0e 7302#undef TARGET_CAN_USE_DOLOOP_P
7303#define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7d86c715 7304
7305struct gcc_target targetm = TARGET_INITIALIZER;
46222c18 7306
7307#include "gt-mep.h"