]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/mep/mep.c
move many gc hashtab to hash_table
[thirdparty/gcc.git] / gcc / config / mep / mep.c
CommitLineData
7acf4da6 1/* Definitions for Toshiba Media Processor
23a5b65a 2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
7acf4da6
DD
3 Contributed by Red Hat, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
d8a2d370
DN
27#include "varasm.h"
28#include "calls.h"
29#include "stringpool.h"
30#include "stor-layout.h"
7acf4da6
DD
31#include "regs.h"
32#include "hard-reg-set.h"
7acf4da6
DD
33#include "insn-config.h"
34#include "conditions.h"
35#include "insn-flags.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
39#include "recog.h"
40#include "obstack.h"
41#include "tree.h"
42#include "expr.h"
43#include "except.h"
44#include "function.h"
45#include "optabs.h"
46#include "reload.h"
47#include "tm_p.h"
48#include "ggc.h"
718f9c0f 49#include "diagnostic-core.h"
7acf4da6
DD
50#include "target.h"
51#include "target-def.h"
52#include "langhooks.h"
53#include "df.h"
2fb9a547
AM
54#include "hash-table.h"
55#include "vec.h"
56#include "basic-block.h"
57#include "tree-ssa-alias.h"
58#include "internal-fn.h"
59#include "gimple-fold.h"
60#include "tree-eh.h"
61#include "gimple-expr.h"
62#include "is-a.h"
18f429e2 63#include "gimple.h"
45b0be94 64#include "gimplify.h"
96e45421 65#include "opts.h"
7ee2468b 66#include "dumpfile.h"
9b2b7279 67#include "builtins.h"
7acf4da6
DD
68
69/* Structure of this file:
70
71 + Command Line Option Support
72 + Pattern support - constraints, predicates, expanders
73 + Reload Support
74 + Costs
75 + Functions to save and restore machine-specific function data.
76 + Frame/Epilog/Prolog Related
77 + Operand Printing
78 + Function args in registers
79 + Handle pipeline hazards
80 + Handle attributes
81 + Trampolines
82 + Machine-dependent Reorg
83 + Builtins. */
84
85/* Symbol encodings:
86
87 Symbols are encoded as @ <char> . <name> where <char> is one of these:
88
89 b - based
90 t - tiny
91 n - near
92 f - far
93 i - io, near
94 I - io, far
95 c - cb (control bus) */
96
97struct GTY(()) machine_function
98{
99 int mep_frame_pointer_needed;
100
101 /* For varargs. */
102 int arg_regs_to_save;
103 int regsave_filler;
104 int frame_filler;
e756464b 105 int frame_locked;
7acf4da6
DD
106
107 /* Records __builtin_return address. */
108 rtx eh_stack_adjust;
109
110 int reg_save_size;
111 int reg_save_slot[FIRST_PSEUDO_REGISTER];
112 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
113
114 /* 2 if the current function has an interrupt attribute, 1 if not, 0
115 if unknown. This is here because resource.c uses EPILOGUE_USES
116 which needs it. */
117 int interrupt_handler;
118
119 /* Likewise, for disinterrupt attribute. */
120 int disable_interrupts;
121
122 /* Number of doloop tags used so far. */
123 int doloop_tags;
124
125 /* True if the last tag was allocated to a doloop_end. */
126 bool doloop_tag_from_end;
127
128 /* True if reload changes $TP. */
129 bool reload_changes_tp;
130
131 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
132 We only set this if the function is an interrupt handler. */
133 int asms_without_operands;
134};
135
136#define MEP_CONTROL_REG(x) \
137 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
138
7acf4da6
DD
139static GTY(()) section * based_section;
140static GTY(()) section * tinybss_section;
141static GTY(()) section * far_section;
142static GTY(()) section * farbss_section;
143static GTY(()) section * frodata_section;
144static GTY(()) section * srodata_section;
145
820ca276
DD
146static GTY(()) section * vtext_section;
147static GTY(()) section * vftext_section;
148static GTY(()) section * ftext_section;
149
7acf4da6
DD
150static void mep_set_leaf_registers (int);
151static bool symbol_p (rtx);
152static bool symbolref_p (rtx);
153static void encode_pattern_1 (rtx);
154static void encode_pattern (rtx);
155static bool const_in_range (rtx, int, int);
aa4a0061
DM
156static void mep_rewrite_mult (rtx_insn *, rtx);
157static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
158static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
159static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
7acf4da6
DD
160static bool move_needs_splitting (rtx, rtx, enum machine_mode);
161static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
162static bool mep_nongeneral_reg (rtx);
163static bool mep_general_copro_reg (rtx);
164static bool mep_nonregister (rtx);
165static struct machine_function* mep_init_machine_status (void);
166static rtx mep_tp_rtx (void);
167static rtx mep_gp_rtx (void);
168static bool mep_interrupt_p (void);
169static bool mep_disinterrupt_p (void);
170static bool mep_reg_set_p (rtx, rtx);
171static bool mep_reg_set_in_function (int);
172static bool mep_interrupt_saved_reg (int);
173static bool mep_call_saves_register (int);
aa4a0061 174static rtx_insn *F (rtx_insn *);
7acf4da6 175static void add_constant (int, int, int, int);
aa4a0061 176static rtx_insn *maybe_dead_move (rtx, rtx, bool);
7acf4da6
DD
177static void mep_reload_pointer (int, const char *);
178static void mep_start_function (FILE *, HOST_WIDE_INT);
179static bool mep_function_ok_for_sibcall (tree, tree);
180static int unique_bit_in (HOST_WIDE_INT);
181static int bit_size_for_clip (HOST_WIDE_INT);
182static int bytesize (const_tree, enum machine_mode);
183static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
184static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
185static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
186static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
187static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
188static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
189static bool mep_function_attribute_inlinable_p (const_tree);
5cec9f59 190static bool mep_can_inline_p (tree, tree);
7acf4da6
DD
191static bool mep_lookup_pragma_disinterrupt (const char *);
192static int mep_multiple_address_regions (tree, bool);
193static int mep_attrlist_to_encoding (tree, tree);
194static void mep_insert_attributes (tree, tree *);
195static void mep_encode_section_info (tree, rtx, int);
196static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
197static void mep_unique_section (tree, int);
198static unsigned int mep_section_type_flags (tree, const char *, int);
199static void mep_asm_named_section (const char *, unsigned int, tree);
200static bool mep_mentioned_p (rtx, rtx, int);
aa4a0061
DM
201static void mep_reorg_regmove (rtx_insn *);
202static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
203 bool, bool);
204static void mep_reorg_repeat (rtx_insn *);
205static bool mep_invertable_branch_p (rtx_insn *);
206static void mep_invert_branch (rtx_insn *, rtx_insn *);
207static void mep_reorg_erepeat (rtx_insn *);
208static void mep_jmp_return_reorg (rtx_insn *);
209static void mep_reorg_addcombine (rtx_insn *);
7acf4da6
DD
210static void mep_reorg (void);
211static void mep_init_intrinsics (void);
212static void mep_init_builtins (void);
213static void mep_intrinsic_unavailable (int);
214static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
215static bool mep_get_move_insn (int, const struct cgen_insn **);
216static rtx mep_convert_arg (enum machine_mode, rtx);
217static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
218static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
219static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
220static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
ac44248e 221static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
7acf4da6 222static int mep_issue_rate (void);
ce1ce33a
DM
223static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
224static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
225static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
aa4a0061
DM
226static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
227static void mep_bundle_insns (rtx_insn *);
68f932c4 228static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
b413068c 229static int mep_address_cost (rtx, enum machine_mode, addr_space_t, bool);
d5cc9181 230static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
7acf4da6 231 tree, int *, int);
d5cc9181 232static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
7acf4da6 233 const_tree, bool);
d5cc9181 234static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
0851c6e3 235 const_tree, bool);
d5cc9181 236static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
0851c6e3 237 const_tree, bool);
7acf4da6 238static bool mep_vector_mode_supported_p (enum machine_mode);
7acf4da6
DD
239static rtx mep_allocate_initial_value (rtx);
240static void mep_asm_init_sections (void);
241static int mep_comp_type_attributes (const_tree, const_tree);
242static bool mep_narrow_volatile_bitfield (void);
243static rtx mep_expand_builtin_saveregs (void);
244static tree mep_build_builtin_va_list (void);
245static void mep_expand_va_start (tree, rtx);
12a54f54 246static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
7b5cbb57 247static bool mep_can_eliminate (const int, const int);
5efd84c5 248static void mep_conditional_register_usage (void);
87138d8d 249static void mep_trampoline_init (rtx, tree, rtx);
7acf4da6 250\f
7acf4da6
DD
251#define WANT_GCC_DEFINITIONS
252#include "mep-intrin.h"
253#undef WANT_GCC_DEFINITIONS
254
255\f
256/* Command Line Option Support. */
257
258char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
259
260/* True if we can use cmov instructions to move values back and forth
261 between core and coprocessor registers. */
262bool mep_have_core_copro_moves_p;
263
264/* True if we can use cmov instructions (or a work-alike) to move
265 values between coprocessor registers. */
266bool mep_have_copro_copro_moves_p;
267
268/* A table of all coprocessor instructions that can act like
269 a coprocessor-to-coprocessor cmov. */
270static const int mep_cmov_insns[] = {
271 mep_cmov,
272 mep_cpmov,
273 mep_fmovs,
274 mep_caddi3,
275 mep_csubi3,
276 mep_candi3,
277 mep_cori3,
278 mep_cxori3,
279 mep_cand3,
280 mep_cor3
281};
282
7acf4da6
DD
283\f
284static void
285mep_set_leaf_registers (int enable)
286{
287 int i;
288
289 if (mep_leaf_registers[0] != enable)
290 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
291 mep_leaf_registers[i] = enable;
292}
293
5efd84c5 294static void
ce959463 295mep_conditional_register_usage (void)
7acf4da6
DD
296{
297 int i;
298
299 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
300 {
301 fixed_regs[HI_REGNO] = 1;
302 fixed_regs[LO_REGNO] = 1;
303 call_used_regs[HI_REGNO] = 1;
304 call_used_regs[LO_REGNO] = 1;
305 }
306
307 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
308 global_regs[i] = 1;
309}
310
c5387660
JM
311static void
312mep_option_override (void)
7acf4da6 313{
dd51e35a
JM
314 unsigned int i;
315 int j;
316 cl_deferred_option *opt;
9771b263
DN
317 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
318
319 if (v)
320 FOR_EACH_VEC_ELT (*v, i, opt)
321 {
322 switch (opt->opt_index)
323 {
324 case OPT_mivc2:
325 for (j = 0; j < 32; j++)
326 fixed_regs[j + 48] = 0;
327 for (j = 0; j < 32; j++)
328 call_used_regs[j + 48] = 1;
329 for (j = 6; j < 8; j++)
330 call_used_regs[j + 48] = 0;
dd51e35a
JM
331
332#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
9771b263
DN
333 RN (0, "$csar0");
334 RN (1, "$cc");
335 RN (4, "$cofr0");
336 RN (5, "$cofr1");
337 RN (6, "$cofa0");
338 RN (7, "$cofa1");
339 RN (15, "$csar1");
340
341 RN (16, "$acc0_0");
342 RN (17, "$acc0_1");
343 RN (18, "$acc0_2");
344 RN (19, "$acc0_3");
345 RN (20, "$acc0_4");
346 RN (21, "$acc0_5");
347 RN (22, "$acc0_6");
348 RN (23, "$acc0_7");
349
350 RN (24, "$acc1_0");
351 RN (25, "$acc1_1");
352 RN (26, "$acc1_2");
353 RN (27, "$acc1_3");
354 RN (28, "$acc1_4");
355 RN (29, "$acc1_5");
356 RN (30, "$acc1_6");
357 RN (31, "$acc1_7");
dd51e35a 358#undef RN
9771b263 359 break;
dd51e35a 360
9771b263
DN
361 default:
362 gcc_unreachable ();
363 }
364 }
dd51e35a 365
7acf4da6
DD
366 if (flag_pic == 1)
367 warning (OPT_fpic, "-fpic is not supported");
368 if (flag_pic == 2)
369 warning (OPT_fPIC, "-fPIC is not supported");
370 if (TARGET_S && TARGET_M)
371 error ("only one of -ms and -mm may be given");
372 if (TARGET_S && TARGET_L)
373 error ("only one of -ms and -ml may be given");
374 if (TARGET_M && TARGET_L)
375 error ("only one of -mm and -ml may be given");
dd51e35a 376 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
7acf4da6 377 error ("only one of -ms and -mtiny= may be given");
dd51e35a 378 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
7acf4da6
DD
379 error ("only one of -mm and -mtiny= may be given");
380 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
381 warning (0, "-mclip currently has no effect without -mminmax");
382
383 if (mep_const_section)
384 {
385 if (strcmp (mep_const_section, "tiny") != 0
386 && strcmp (mep_const_section, "near") != 0
387 && strcmp (mep_const_section, "far") != 0)
388 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
389 }
390
391 if (TARGET_S)
392 mep_tiny_cutoff = 65536;
393 if (TARGET_M)
394 mep_tiny_cutoff = 0;
dd51e35a 395 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
7acf4da6
DD
396 mep_tiny_cutoff = 0;
397
398 if (TARGET_64BIT_CR_REGS)
399 flag_split_wide_types = 0;
400
401 init_machine_status = mep_init_machine_status;
402 mep_init_intrinsics ();
403}
404
405/* Pattern Support - constraints, predicates, expanders. */
406
407/* MEP has very few instructions that can refer to the span of
408 addresses used by symbols, so it's common to check for them. */
409
410static bool
411symbol_p (rtx x)
412{
413 int c = GET_CODE (x);
414
415 return (c == CONST_INT
416 || c == CONST
417 || c == SYMBOL_REF);
418}
419
420static bool
421symbolref_p (rtx x)
422{
423 int c;
424
425 if (GET_CODE (x) != MEM)
426 return false;
427
428 c = GET_CODE (XEXP (x, 0));
429 return (c == CONST_INT
430 || c == CONST
431 || c == SYMBOL_REF);
432}
433
434/* static const char *reg_class_names[] = REG_CLASS_NAMES; */
435
436#define GEN_REG(R, STRICT) \
437 (GR_REGNO_P (R) \
438 || (!STRICT \
439 && ((R) == ARG_POINTER_REGNUM \
440 || (R) >= FIRST_PSEUDO_REGISTER)))
441
442static char pattern[12], *patternp;
443static GTY(()) rtx patternr[12];
444#define RTX_IS(x) (strcmp (pattern, x) == 0)
445
446static void
447encode_pattern_1 (rtx x)
448{
449 int i;
450
451 if (patternp == pattern + sizeof (pattern) - 2)
452 {
453 patternp[-1] = '?';
454 return;
455 }
456
457 patternr[patternp-pattern] = x;
458
459 switch (GET_CODE (x))
460 {
461 case REG:
462 *patternp++ = 'r';
463 break;
464 case MEM:
465 *patternp++ = 'm';
466 case CONST:
467 encode_pattern_1 (XEXP(x, 0));
468 break;
469 case PLUS:
470 *patternp++ = '+';
471 encode_pattern_1 (XEXP(x, 0));
472 encode_pattern_1 (XEXP(x, 1));
473 break;
474 case LO_SUM:
475 *patternp++ = 'L';
476 encode_pattern_1 (XEXP(x, 0));
477 encode_pattern_1 (XEXP(x, 1));
478 break;
479 case HIGH:
480 *patternp++ = 'H';
481 encode_pattern_1 (XEXP(x, 0));
482 break;
483 case SYMBOL_REF:
484 *patternp++ = 's';
485 break;
486 case LABEL_REF:
487 *patternp++ = 'l';
488 break;
489 case CONST_INT:
490 case CONST_DOUBLE:
491 *patternp++ = 'i';
492 break;
493 case UNSPEC:
494 *patternp++ = 'u';
495 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
496 for (i=0; i<XVECLEN (x, 0); i++)
497 encode_pattern_1 (XVECEXP (x, 0, i));
498 break;
499 case USE:
500 *patternp++ = 'U';
501 break;
502 default:
503 *patternp++ = '?';
504#if 0
505 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
506 debug_rtx (x);
507 gcc_unreachable ();
508#endif
509 break;
510 }
511}
512
513static void
514encode_pattern (rtx x)
515{
516 patternp = pattern;
517 encode_pattern_1 (x);
518 *patternp = 0;
519}
520
521int
522mep_section_tag (rtx x)
523{
524 const char *name;
525
526 while (1)
527 {
528 switch (GET_CODE (x))
529 {
530 case MEM:
531 case CONST:
532 x = XEXP (x, 0);
533 break;
534 case UNSPEC:
535 x = XVECEXP (x, 0, 0);
536 break;
537 case PLUS:
538 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
539 return 0;
540 x = XEXP (x, 0);
541 break;
542 default:
543 goto done;
544 }
545 }
546 done:
547 if (GET_CODE (x) != SYMBOL_REF)
548 return 0;
549 name = XSTR (x, 0);
550 if (name[0] == '@' && name[2] == '.')
551 {
552 if (name[1] == 'i' || name[1] == 'I')
553 {
554 if (name[1] == 'I')
555 return 'f'; /* near */
556 return 'n'; /* far */
557 }
558 return name[1];
559 }
560 return 0;
561}
562
563int
564mep_regno_reg_class (int regno)
565{
566 switch (regno)
567 {
568 case SP_REGNO: return SP_REGS;
569 case TP_REGNO: return TP_REGS;
570 case GP_REGNO: return GP_REGS;
571 case 0: return R0_REGS;
572 case HI_REGNO: return HI_REGS;
573 case LO_REGNO: return LO_REGS;
574 case ARG_POINTER_REGNUM: return GENERAL_REGS;
575 }
576
577 if (GR_REGNO_P (regno))
578 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
579 if (CONTROL_REGNO_P (regno))
580 return CONTROL_REGS;
581
582 if (CR_REGNO_P (regno))
583 {
584 int i, j;
585
586 /* Search for the register amongst user-defined subclasses of
587 the coprocessor registers. */
588 for (i = USER0_REGS; i <= USER3_REGS; ++i)
589 {
590 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
591 continue;
592 for (j = 0; j < N_REG_CLASSES; ++j)
593 {
594 enum reg_class sub = reg_class_subclasses[i][j];
595
596 if (sub == LIM_REG_CLASSES)
597 return i;
598 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
599 break;
600 }
601 }
602
603 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
604 }
605
606 if (CCR_REGNO_P (regno))
607 return CCR_REGS;
608
609 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
610 return NO_REGS;
611}
612
7acf4da6
DD
613static bool
614const_in_range (rtx x, int minv, int maxv)
615{
616 return (GET_CODE (x) == CONST_INT
617 && INTVAL (x) >= minv
618 && INTVAL (x) <= maxv);
619}
620
621/* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
622 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
623 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
624 at the end of the insn stream. */
625
626rtx
aa4a0061 627mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
7acf4da6
DD
628{
629 if (rtx_equal_p (dest, src1))
630 return src2;
631 else if (rtx_equal_p (dest, src2))
632 return src1;
633 else
634 {
635 if (insn == 0)
636 emit_insn (gen_movsi (copy_rtx (dest), src1));
637 else
638 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
639 return src2;
640 }
641}
642
643/* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
644 Change the last element of PATTERN from (clobber (scratch:SI))
645 to (clobber (reg:SI HI_REGNO)). */
646
647static void
aa4a0061 648mep_rewrite_mult (rtx_insn *insn, rtx pattern)
7acf4da6
DD
649{
650 rtx hi_clobber;
651
652 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
653 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
654 PATTERN (insn) = pattern;
655 INSN_CODE (insn) = -1;
656}
657
658/* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
659 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
660 store the result in DEST if nonnull. */
661
662static void
aa4a0061 663mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
7acf4da6
DD
664{
665 rtx lo, pattern;
666
667 lo = gen_rtx_REG (SImode, LO_REGNO);
668 if (dest)
669 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
670 mep_mulr_source (insn, dest, src1, src2));
671 else
672 pattern = gen_mulsi3_lo (lo, src1, src2);
673 mep_rewrite_mult (insn, pattern);
674}
675
676/* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
677 SRC3 into $lo, then use either madd or maddr. The move into $lo will
678 be deleted by a peephole2 if SRC3 is already in $lo. */
679
680static void
aa4a0061 681mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
7acf4da6
DD
682{
683 rtx lo, pattern;
684
685 lo = gen_rtx_REG (SImode, LO_REGNO);
686 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
687 if (dest)
688 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
689 mep_mulr_source (insn, dest, src1, src2),
690 copy_rtx (lo));
691 else
692 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
693 mep_rewrite_mult (insn, pattern);
694}
695
696/* Return true if $lo has the same value as integer register GPR when
697 instruction INSN is reached. If necessary, rewrite the instruction
698 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
699 rtx for (reg:SI LO_REGNO).
700
701 This function is intended to be used by the peephole2 pass. Since
702 that pass goes from the end of a basic block to the beginning, and
703 propagates liveness information on the way, there is no need to
704 update register notes here.
705
706 If GPR_DEAD_P is true on entry, and this function returns true,
707 then the caller will replace _every_ use of GPR in and after INSN
708 with LO. This means that if the instruction that sets $lo is a
709 mulr- or maddr-type instruction, we can rewrite it to use mul or
710 madd instead. In combination with the copy progagation pass,
711 this allows us to replace sequences like:
712
713 mov GPR,R1
714 mulr GPR,R2
715
716 with:
717
718 mul R1,R2
719
720 if GPR is no longer used. */
721
722static bool
aa4a0061 723mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
7acf4da6
DD
724{
725 do
726 {
727 insn = PREV_INSN (insn);
728 if (INSN_P (insn))
729 switch (recog_memoized (insn))
730 {
731 case CODE_FOR_mulsi3_1:
732 extract_insn (insn);
733 if (rtx_equal_p (recog_data.operand[0], gpr))
734 {
735 mep_rewrite_mulsi3 (insn,
736 gpr_dead_p ? NULL : recog_data.operand[0],
737 recog_data.operand[1],
738 recog_data.operand[2]);
739 return true;
740 }
741 return false;
742
743 case CODE_FOR_maddsi3:
744 extract_insn (insn);
745 if (rtx_equal_p (recog_data.operand[0], gpr))
746 {
747 mep_rewrite_maddsi3 (insn,
748 gpr_dead_p ? NULL : recog_data.operand[0],
749 recog_data.operand[1],
750 recog_data.operand[2],
751 recog_data.operand[3]);
752 return true;
753 }
754 return false;
755
756 case CODE_FOR_mulsi3r:
757 case CODE_FOR_maddsi3r:
758 extract_insn (insn);
759 return rtx_equal_p (recog_data.operand[1], gpr);
760
761 default:
762 if (reg_set_p (lo, insn)
763 || reg_set_p (gpr, insn)
764 || volatile_insn_p (PATTERN (insn)))
765 return false;
766
767 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
768 gpr_dead_p = false;
769 break;
770 }
771 }
772 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
773 return false;
774}
775
776/* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
777
778bool
aa4a0061 779mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
7acf4da6
DD
780{
781 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
782 extract_insn (insn);
783 return result;
784}
785
786/* Return true if SET can be turned into a post-modify load or store
787 that adds OFFSET to GPR. In other words, return true if SET can be
788 changed into:
789
790 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
791
792 It's OK to change SET to an equivalent operation in order to
793 make it match. */
794
795static bool
796mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
797{
798 rtx *reg, *mem;
799 unsigned int reg_bytes, mem_bytes;
800 enum machine_mode reg_mode, mem_mode;
801
802 /* Only simple SETs can be converted. */
803 if (GET_CODE (set) != SET)
804 return false;
805
806 /* Point REG to what we hope will be the register side of the set and
807 MEM to what we hope will be the memory side. */
808 if (GET_CODE (SET_DEST (set)) == MEM)
809 {
810 mem = &SET_DEST (set);
811 reg = &SET_SRC (set);
812 }
813 else
814 {
815 reg = &SET_DEST (set);
816 mem = &SET_SRC (set);
817 if (GET_CODE (*mem) == SIGN_EXTEND)
818 mem = &XEXP (*mem, 0);
819 }
820
821 /* Check that *REG is a suitable coprocessor register. */
822 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
823 return false;
824
825 /* Check that *MEM is a suitable memory reference. */
826 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
827 return false;
828
829 /* Get the number of bytes in each operand. */
830 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
831 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
832
833 /* Check that OFFSET is suitably aligned. */
834 if (INTVAL (offset) & (mem_bytes - 1))
835 return false;
836
837 /* Convert *MEM to a normal integer mode. */
838 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
839 *mem = change_address (*mem, mem_mode, NULL);
840
841 /* Adjust *REG as well. */
842 *reg = shallow_copy_rtx (*reg);
843 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
844 {
845 /* SET is a subword load. Convert it to an explicit extension. */
846 PUT_MODE (*reg, SImode);
847 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
848 }
849 else
850 {
851 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
852 PUT_MODE (*reg, reg_mode);
853 }
854 return true;
855}
856
857/* Return the effect of frame-related instruction INSN. */
858
859static rtx
aa4a0061 860mep_frame_expr (rtx_insn *insn)
7acf4da6
DD
861{
862 rtx note, expr;
863
864 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
865 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
866 RTX_FRAME_RELATED_P (expr) = 1;
867 return expr;
868}
869
870/* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
871 new pattern in INSN1; INSN2 will be deleted by the caller. */
872
873static void
aa4a0061 874mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
7acf4da6
DD
875{
876 rtx expr;
877
878 if (RTX_FRAME_RELATED_P (insn2))
879 {
880 expr = mep_frame_expr (insn2);
881 if (RTX_FRAME_RELATED_P (insn1))
882 expr = gen_rtx_SEQUENCE (VOIDmode,
883 gen_rtvec (2, mep_frame_expr (insn1), expr));
884 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
885 RTX_FRAME_RELATED_P (insn1) = 1;
886 }
887
888 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
889 gen_rtvec (2, PATTERN (insn1),
890 PATTERN (insn2)));
891 INSN_CODE (insn1) = -1;
892}
893
894/* SET_INSN is an instruction that adds OFFSET to REG. Go back through
895 the basic block to see if any previous load or store instruction can
896 be persuaded to do SET_INSN as a side-effect. Return true if so. */
897
898static bool
aa4a0061 899mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
7acf4da6 900{
aa4a0061 901 rtx_insn *insn;
7acf4da6
DD
902
903 insn = set_insn;
904 do
905 {
906 insn = PREV_INSN (insn);
907 if (INSN_P (insn))
908 {
909 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
910 {
911 mep_make_parallel (insn, set_insn);
912 return true;
913 }
914
915 if (reg_set_p (reg, insn)
916 || reg_referenced_p (reg, PATTERN (insn))
917 || volatile_insn_p (PATTERN (insn)))
918 return false;
919 }
920 }
921 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
922 return false;
923}
924
925/* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
926
927bool
aa4a0061 928mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
7acf4da6
DD
929{
930 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
931 extract_insn (insn);
932 return result;
933}
934
935bool
936mep_allow_clip (rtx ux, rtx lx, int s)
937{
938 HOST_WIDE_INT u = INTVAL (ux);
939 HOST_WIDE_INT l = INTVAL (lx);
940 int i;
941
942 if (!TARGET_OPT_CLIP)
943 return false;
944
945 if (s)
946 {
947 for (i = 0; i < 30; i ++)
948 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
949 && (l == - ((HOST_WIDE_INT) 1 << i)))
950 return true;
951 }
952 else
953 {
954 if (l != 0)
955 return false;
956
957 for (i = 0; i < 30; i ++)
958 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
959 return true;
960 }
961 return false;
962}
963
964bool
965mep_bit_position_p (rtx x, bool looking_for)
966{
967 if (GET_CODE (x) != CONST_INT)
968 return false;
969 switch ((int) INTVAL(x) & 0xff)
970 {
971 case 0x01: case 0x02: case 0x04: case 0x08:
972 case 0x10: case 0x20: case 0x40: case 0x80:
973 return looking_for;
974 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
975 case 0xef: case 0xdf: case 0xbf: case 0x7f:
976 return !looking_for;
977 }
978 return false;
979}
980
981static bool
982move_needs_splitting (rtx dest, rtx src,
983 enum machine_mode mode ATTRIBUTE_UNUSED)
984{
985 int s = mep_section_tag (src);
986
987 while (1)
988 {
989 if (GET_CODE (src) == CONST
990 || GET_CODE (src) == MEM)
991 src = XEXP (src, 0);
992 else if (GET_CODE (src) == SYMBOL_REF
993 || GET_CODE (src) == LABEL_REF
994 || GET_CODE (src) == PLUS)
995 break;
996 else
997 return false;
998 }
999 if (s == 'f'
1000 || (GET_CODE (src) == PLUS
1001 && GET_CODE (XEXP (src, 1)) == CONST_INT
1002 && (INTVAL (XEXP (src, 1)) < -65536
1003 || INTVAL (XEXP (src, 1)) > 0xffffff))
1004 || (GET_CODE (dest) == REG
1005 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1006 return true;
1007 return false;
1008}
1009
1010bool
1011mep_split_mov (rtx *operands, int symbolic)
1012{
1013 if (symbolic)
1014 {
1015 if (move_needs_splitting (operands[0], operands[1], SImode))
1016 return true;
1017 return false;
1018 }
1019
1020 if (GET_CODE (operands[1]) != CONST_INT)
1021 return false;
1022
1023 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1024 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1025 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1026 return false;
1027
1028 if (((!reload_completed && !reload_in_progress)
1029 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1030 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1031 return false;
1032
1033 return true;
1034}
1035
1036/* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1037 it to one specific value. So the insn chosen depends on whether
1038 the source and destination modes match. */
1039
1040bool
1041mep_vliw_mode_match (rtx tgt)
1042{
1043 bool src_vliw = mep_vliw_function_p (cfun->decl);
1044 bool tgt_vliw = INTVAL (tgt);
1045
1046 return src_vliw == tgt_vliw;
1047}
1048
a9d1723f
DD
1049/* Like the above, but also test for near/far mismatches. */
1050
1051bool
1052mep_vliw_jmp_match (rtx tgt)
1053{
1054 bool src_vliw = mep_vliw_function_p (cfun->decl);
1055 bool tgt_vliw = INTVAL (tgt);
1056
1057 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1058 return false;
1059
1060 return src_vliw == tgt_vliw;
1061}
1062
7acf4da6 1063bool
84034c69 1064mep_multi_slot (rtx_insn *x)
7acf4da6
DD
1065{
1066 return get_attr_slot (x) == SLOT_MULTI;
1067}
1068
1a627b35 1069/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
7acf4da6 1070
1a627b35
RS
1071static bool
1072mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
5ba863d7
DD
1073{
1074 /* We can't convert symbol values to gp- or tp-rel values after
1075 reload, as reload might have used $gp or $tp for other
1076 purposes. */
1077 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1078 {
1079 char e = mep_section_tag (x);
1080 return (e != 't' && e != 'b');
1081 }
1082 return 1;
1083}
1084
7acf4da6
DD
1085/* Be careful not to use macros that need to be compiled one way for
1086 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1087
1088bool
1089mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1090{
1091 int the_tag;
1092
1093#define DEBUG_LEGIT 0
1094#if DEBUG_LEGIT
1095 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1096 debug_rtx (x);
1097#endif
1098
1099 if (GET_CODE (x) == LO_SUM
1100 && GET_CODE (XEXP (x, 0)) == REG
1101 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1102 && CONSTANT_P (XEXP (x, 1)))
1103 {
1104 if (GET_MODE_SIZE (mode) > 4)
1105 {
1106 /* We will end up splitting this, and lo_sums are not
1107 offsettable for us. */
1108#if DEBUG_LEGIT
1109 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1110#endif
1111 return false;
1112 }
1113#if DEBUG_LEGIT
1114 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1115#endif
1116 return true;
1117 }
1118
1119 if (GET_CODE (x) == REG
1120 && GEN_REG (REGNO (x), strict))
1121 {
1122#if DEBUG_LEGIT
1123 fprintf (stderr, " - yup, [reg]\n");
1124#endif
1125 return true;
1126 }
1127
1128 if (GET_CODE (x) == PLUS
1129 && GET_CODE (XEXP (x, 0)) == REG
1130 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1131 && const_in_range (XEXP (x, 1), -32768, 32767))
1132 {
1133#if DEBUG_LEGIT
1134 fprintf (stderr, " - yup, [reg+const]\n");
1135#endif
1136 return true;
1137 }
1138
1139 if (GET_CODE (x) == PLUS
1140 && GET_CODE (XEXP (x, 0)) == REG
1141 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1142 && GET_CODE (XEXP (x, 1)) == CONST
1143 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1144 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1145 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1146 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1147 {
1148#if DEBUG_LEGIT
1149 fprintf (stderr, " - yup, [reg+unspec]\n");
1150#endif
1151 return true;
1152 }
1153
1154 the_tag = mep_section_tag (x);
1155
1156 if (the_tag == 'f')
1157 {
1158#if DEBUG_LEGIT
1159 fprintf (stderr, " - nope, [far]\n");
1160#endif
1161 return false;
1162 }
1163
1164 if (mode == VOIDmode
1165 && GET_CODE (x) == SYMBOL_REF)
1166 {
1167#if DEBUG_LEGIT
1168 fprintf (stderr, " - yup, call [symbol]\n");
1169#endif
1170 return true;
1171 }
1172
1173 if ((mode == SImode || mode == SFmode)
1174 && CONSTANT_P (x)
1a627b35 1175 && mep_legitimate_constant_p (mode, x)
7acf4da6
DD
1176 && the_tag != 't' && the_tag != 'b')
1177 {
1178 if (GET_CODE (x) != CONST_INT
1179 || (INTVAL (x) <= 0xfffff
1180 && INTVAL (x) >= 0
1181 && (INTVAL (x) % 4) == 0))
1182 {
1183#if DEBUG_LEGIT
1184 fprintf (stderr, " - yup, [const]\n");
1185#endif
1186 return true;
1187 }
1188 }
1189
1190#if DEBUG_LEGIT
1191 fprintf (stderr, " - nope.\n");
1192#endif
1193 return false;
1194}
1195
1196int
1197mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
77b0efff 1198 int type_i,
7acf4da6
DD
1199 int ind_levels ATTRIBUTE_UNUSED)
1200{
77b0efff
JR
1201 enum reload_type type = (enum reload_type) type_i;
1202
7acf4da6
DD
1203 if (GET_CODE (*x) == PLUS
1204 && GET_CODE (XEXP (*x, 0)) == MEM
1205 && GET_CODE (XEXP (*x, 1)) == REG)
1206 {
1207 /* GCC will by default copy the MEM into a REG, which results in
1208 an invalid address. For us, the best thing to do is move the
1209 whole expression to a REG. */
1210 push_reload (*x, NULL_RTX, x, NULL,
1211 GENERAL_REGS, mode, VOIDmode,
1212 0, 0, opnum, type);
1213 return 1;
1214 }
1215
1216 if (GET_CODE (*x) == PLUS
1217 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1218 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1219 {
1220 char e = mep_section_tag (XEXP (*x, 0));
1221
1222 if (e != 't' && e != 'b')
1223 {
1224 /* GCC thinks that (sym+const) is a valid address. Well,
1225 sometimes it is, this time it isn't. The best thing to
1226 do is reload the symbol to a register, since reg+int
1227 tends to work, and we can't just add the symbol and
1228 constant anyway. */
1229 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1230 GENERAL_REGS, mode, VOIDmode,
1231 0, 0, opnum, type);
1232 return 1;
1233 }
1234 }
1235 return 0;
1236}
1237
1238int
aa4a0061 1239mep_core_address_length (rtx_insn *insn, int opn)
7acf4da6
DD
1240{
1241 rtx set = single_set (insn);
1242 rtx mem = XEXP (set, opn);
1243 rtx other = XEXP (set, 1-opn);
1244 rtx addr = XEXP (mem, 0);
1245
1246 if (register_operand (addr, Pmode))
1247 return 2;
1248 if (GET_CODE (addr) == PLUS)
1249 {
1250 rtx addend = XEXP (addr, 1);
1251
1252 gcc_assert (REG_P (XEXP (addr, 0)));
1253
1254 switch (REGNO (XEXP (addr, 0)))
1255 {
1256 case STACK_POINTER_REGNUM:
1257 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1258 && mep_imm7a4_operand (addend, VOIDmode))
1259 return 2;
1260 break;
1261
1262 case 13: /* TP */
1263 gcc_assert (REG_P (other));
1264
1265 if (REGNO (other) >= 8)
1266 break;
1267
1268 if (GET_CODE (addend) == CONST
1269 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1270 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1271 return 2;
1272
1273 if (GET_CODE (addend) == CONST_INT
1274 && INTVAL (addend) >= 0
1275 && INTVAL (addend) <= 127
1276 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1277 return 2;
1278 break;
1279 }
1280 }
1281
1282 return 4;
1283}
1284
1285int
aa4a0061 1286mep_cop_address_length (rtx_insn *insn, int opn)
7acf4da6
DD
1287{
1288 rtx set = single_set (insn);
1289 rtx mem = XEXP (set, opn);
1290 rtx addr = XEXP (mem, 0);
1291
1292 if (GET_CODE (mem) != MEM)
1293 return 2;
1294 if (register_operand (addr, Pmode))
1295 return 2;
1296 if (GET_CODE (addr) == POST_INC)
1297 return 2;
1298
1299 return 4;
1300}
1301
1302#define DEBUG_EXPAND_MOV 0
1303bool
1304mep_expand_mov (rtx *operands, enum machine_mode mode)
1305{
1306 int i, t;
1307 int tag[2];
1308 rtx tpsym, tpoffs;
1309 int post_reload = 0;
1310
1311 tag[0] = mep_section_tag (operands[0]);
1312 tag[1] = mep_section_tag (operands[1]);
1313
1314 if (!reload_in_progress
1315 && !reload_completed
1316 && GET_CODE (operands[0]) != REG
1317 && GET_CODE (operands[0]) != SUBREG
1318 && GET_CODE (operands[1]) != REG
1319 && GET_CODE (operands[1]) != SUBREG)
1320 operands[1] = copy_to_mode_reg (mode, operands[1]);
1321
1322#if DEBUG_EXPAND_MOV
1323 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1324 reload_in_progress || reload_completed);
1325 debug_rtx (operands[0]);
1326 debug_rtx (operands[1]);
1327#endif
1328
1329 if (mode == DImode || mode == DFmode)
1330 return false;
1331
1332 if (reload_in_progress || reload_completed)
1333 {
1334 rtx r;
1335
1336 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1337 cfun->machine->reload_changes_tp = true;
1338
1339 if (tag[0] == 't' || tag[1] == 't')
1340 {
1341 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1342 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1343 post_reload = 1;
1344 }
1345 if (tag[0] == 'b' || tag[1] == 'b')
1346 {
1347 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1348 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1349 post_reload = 1;
1350 }
1351 if (cfun->machine->reload_changes_tp == true)
1352 post_reload = 1;
1353 }
1354
1355 if (!post_reload)
1356 {
1357 rtx n;
1358 if (symbol_p (operands[1]))
1359 {
1360 t = mep_section_tag (operands[1]);
1361 if (t == 'b' || t == 't')
1362 {
1363
1364 if (GET_CODE (operands[1]) == SYMBOL_REF)
1365 {
1366 tpsym = operands[1];
1367 n = gen_rtx_UNSPEC (mode,
1368 gen_rtvec (1, operands[1]),
1369 t == 'b' ? UNS_TPREL : UNS_GPREL);
1370 n = gen_rtx_CONST (mode, n);
1371 }
1372 else if (GET_CODE (operands[1]) == CONST
1373 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1374 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1375 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1376 {
1377 tpsym = XEXP (XEXP (operands[1], 0), 0);
1378 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1379 n = gen_rtx_UNSPEC (mode,
1380 gen_rtvec (1, tpsym),
1381 t == 'b' ? UNS_TPREL : UNS_GPREL);
1382 n = gen_rtx_PLUS (mode, n, tpoffs);
1383 n = gen_rtx_CONST (mode, n);
1384 }
1385 else if (GET_CODE (operands[1]) == CONST
1386 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1387 return false;
1388 else
1389 {
1390 error ("unusual TP-relative address");
1391 return false;
1392 }
1393
1394 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1395 : mep_gp_rtx ()), n);
1396 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1397#if DEBUG_EXPAND_MOV
1398 fprintf(stderr, "mep_expand_mov emitting ");
1399 debug_rtx(n);
1400#endif
1401 return true;
1402 }
1403 }
1404
1405 for (i=0; i < 2; i++)
1406 {
1407 t = mep_section_tag (operands[i]);
1408 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1409 {
1410 rtx sym, n, r;
1411 int u;
1412
1413 sym = XEXP (operands[i], 0);
1414 if (GET_CODE (sym) == CONST
1415 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1416 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1417
1418 if (t == 'b')
1419 {
1420 r = mep_tp_rtx ();
1421 u = UNS_TPREL;
1422 }
1423 else
1424 {
1425 r = mep_gp_rtx ();
1426 u = UNS_GPREL;
1427 }
1428
1429 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1430 n = gen_rtx_CONST (Pmode, n);
1431 n = gen_rtx_PLUS (Pmode, r, n);
1432 operands[i] = replace_equiv_address (operands[i], n);
1433 }
1434 }
1435 }
1436
1437 if ((GET_CODE (operands[1]) != REG
1438 && MEP_CONTROL_REG (operands[0]))
1439 || (GET_CODE (operands[0]) != REG
1440 && MEP_CONTROL_REG (operands[1])))
1441 {
1442 rtx temp;
1443#if DEBUG_EXPAND_MOV
1444 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1445#endif
1446 temp = gen_reg_rtx (mode);
1447 emit_move_insn (temp, operands[1]);
1448 operands[1] = temp;
1449 }
1450
1451 if (symbolref_p (operands[0])
1452 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1453 || (GET_MODE_SIZE (mode) != 4)))
1454 {
1455 rtx temp;
1456
1457 gcc_assert (!reload_in_progress && !reload_completed);
1458
1459 temp = force_reg (Pmode, XEXP (operands[0], 0));
1460 operands[0] = replace_equiv_address (operands[0], temp);
1461 emit_move_insn (operands[0], operands[1]);
1462 return true;
1463 }
1464
1465 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1466 tag[1] = 0;
1467
1468 if (symbol_p (operands[1])
1469 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1470 {
1471 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1472 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1473 return true;
1474 }
1475
1476 if (symbolref_p (operands[1])
1477 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1478 {
1479 rtx temp;
1480
1481 if (reload_in_progress || reload_completed)
1482 temp = operands[0];
1483 else
1484 temp = gen_reg_rtx (Pmode);
1485
1486 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1487 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1488 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1489 return true;
1490 }
1491
1492 return false;
1493}
1494
1495/* Cases where the pattern can't be made to use at all. */
1496
1497bool
1498mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1499{
1500 int i;
1501
1502#define DEBUG_MOV_OK 0
1503#if DEBUG_MOV_OK
1504 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1505 mep_section_tag (operands[1]));
1506 debug_rtx (operands[0]);
1507 debug_rtx (operands[1]);
1508#endif
1509
1510 /* We want the movh patterns to get these. */
1511 if (GET_CODE (operands[1]) == HIGH)
1512 return false;
1513
1514 /* We can't store a register to a far variable without using a
1515 scratch register to hold the address. Using far variables should
1516 be split by mep_emit_mov anyway. */
1517 if (mep_section_tag (operands[0]) == 'f'
1518 || mep_section_tag (operands[1]) == 'f')
1519 {
1520#if DEBUG_MOV_OK
1521 fprintf (stderr, " - no, f\n");
1522#endif
1523 return false;
1524 }
1525 i = mep_section_tag (operands[1]);
1526 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1527 /* These are supposed to be generated with adds of the appropriate
1528 register. During and after reload, however, we allow them to
1529 be accessed as normal symbols because adding a dependency on
1530 the base register now might cause problems. */
1531 {
1532#if DEBUG_MOV_OK
1533 fprintf (stderr, " - no, bt\n");
1534#endif
1535 return false;
1536 }
1537
1538 /* The only moves we can allow involve at least one general
1539 register, so require it. */
1540 for (i = 0; i < 2; i ++)
1541 {
1542 /* Allow subregs too, before reload. */
1543 rtx x = operands[i];
1544
1545 if (GET_CODE (x) == SUBREG)
1546 x = XEXP (x, 0);
1547 if (GET_CODE (x) == REG
1548 && ! MEP_CONTROL_REG (x))
1549 {
1550#if DEBUG_MOV_OK
1551 fprintf (stderr, " - ok\n");
1552#endif
1553 return true;
1554 }
1555 }
1556#if DEBUG_MOV_OK
1557 fprintf (stderr, " - no, no gen reg\n");
1558#endif
1559 return false;
1560}
1561
1562#define DEBUG_SPLIT_WIDE_MOVE 0
1563void
1564mep_split_wide_move (rtx *operands, enum machine_mode mode)
1565{
1566 int i;
1567
1568#if DEBUG_SPLIT_WIDE_MOVE
1569 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1570 debug_rtx (operands[0]);
1571 debug_rtx (operands[1]);
1572#endif
1573
1574 for (i = 0; i <= 1; i++)
1575 {
1576 rtx op = operands[i], hi, lo;
1577
1578 switch (GET_CODE (op))
1579 {
1580 case REG:
1581 {
1582 unsigned int regno = REGNO (op);
1583
1584 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1585 {
1586 rtx i32;
1587
1588 lo = gen_rtx_REG (SImode, regno);
1589 i32 = GEN_INT (32);
1590 hi = gen_rtx_ZERO_EXTRACT (SImode,
1591 gen_rtx_REG (DImode, regno),
1592 i32, i32);
1593 }
1594 else
1595 {
1596 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1597 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1598 }
1599 }
1600 break;
1601
1602 case CONST_INT:
1603 case CONST_DOUBLE:
1604 case MEM:
1605 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1606 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1607 break;
1608
1609 default:
1610 gcc_unreachable ();
1611 }
1612
1613 /* The high part of CR <- GPR moves must be done after the low part. */
1614 operands [i + 4] = lo;
1615 operands [i + 2] = hi;
1616 }
1617
1618 if (reg_mentioned_p (operands[2], operands[5])
1619 || GET_CODE (operands[2]) == ZERO_EXTRACT
1620 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1621 {
1622 rtx tmp;
1623
1624 /* Overlapping register pairs -- make sure we don't
1625 early-clobber ourselves. */
1626 tmp = operands[2];
1627 operands[2] = operands[4];
1628 operands[4] = tmp;
1629 tmp = operands[3];
1630 operands[3] = operands[5];
1631 operands[5] = tmp;
1632 }
1633
1634#if DEBUG_SPLIT_WIDE_MOVE
1635 fprintf(stderr, "\033[34m");
1636 debug_rtx (operands[2]);
1637 debug_rtx (operands[3]);
1638 debug_rtx (operands[4]);
1639 debug_rtx (operands[5]);
1640 fprintf(stderr, "\033[0m");
1641#endif
1642}
1643
1644/* Emit a setcc instruction in its entirity. */
1645
1646static bool
1647mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1648{
1649 rtx tmp;
1650
1651 switch (code)
1652 {
1653 case GT:
1654 case GTU:
1655 tmp = op1, op1 = op2, op2 = tmp;
1656 code = swap_condition (code);
1657 /* FALLTHRU */
1658
1659 case LT:
1660 case LTU:
1661 op1 = force_reg (SImode, op1);
1662 emit_insn (gen_rtx_SET (VOIDmode, dest,
1663 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1664 return true;
1665
1666 case EQ:
1667 if (op2 != const0_rtx)
1668 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1669 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1670 return true;
1671
1672 case NE:
1673 /* Branchful sequence:
1674 mov dest, 0 16-bit
1675 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1676 mov dest, 1 16-bit
1677
1678 Branchless sequence:
1679 add3 tmp, op1, -op2 32-bit (or mov + sub)
1680 sltu3 tmp, tmp, 1 16-bit
1681 xor3 dest, tmp, 1 32-bit
1682 */
1683 if (optimize_size && op2 != const0_rtx)
1684 return false;
1685
1686 if (op2 != const0_rtx)
1687 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1688
1689 op2 = gen_reg_rtx (SImode);
1690 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1691
1692 emit_insn (gen_rtx_SET (VOIDmode, dest,
1693 gen_rtx_XOR (SImode, op2, const1_rtx)));
1694 return true;
1695
1696 case LE:
1697 if (GET_CODE (op2) != CONST_INT
1698 || INTVAL (op2) == 0x7ffffff)
1699 return false;
1700 op2 = GEN_INT (INTVAL (op2) + 1);
1701 return mep_expand_setcc_1 (LT, dest, op1, op2);
1702
1703 case LEU:
1704 if (GET_CODE (op2) != CONST_INT
1705 || INTVAL (op2) == -1)
1706 return false;
1707 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1708 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1709
1710 case GE:
1711 if (GET_CODE (op2) != CONST_INT
1712 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1713 return false;
1714 op2 = GEN_INT (INTVAL (op2) - 1);
1715 return mep_expand_setcc_1 (GT, dest, op1, op2);
1716
1717 case GEU:
1718 if (GET_CODE (op2) != CONST_INT
1719 || op2 == const0_rtx)
1720 return false;
1721 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1722 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1723
1724 default:
1725 gcc_unreachable ();
1726 }
1727}
1728
1729bool
1730mep_expand_setcc (rtx *operands)
1731{
1732 rtx dest = operands[0];
1733 enum rtx_code code = GET_CODE (operands[1]);
1734 rtx op0 = operands[2];
1735 rtx op1 = operands[3];
1736
1737 return mep_expand_setcc_1 (code, dest, op0, op1);
1738}
1739
1740rtx
1741mep_expand_cbranch (rtx *operands)
1742{
1743 enum rtx_code code = GET_CODE (operands[0]);
1744 rtx op0 = operands[1];
1745 rtx op1 = operands[2];
1746 rtx tmp;
1747
1748 restart:
1749 switch (code)
1750 {
1751 case LT:
1752 if (mep_imm4_operand (op1, SImode))
1753 break;
1754
1755 tmp = gen_reg_rtx (SImode);
1756 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1757 code = NE;
1758 op0 = tmp;
1759 op1 = const0_rtx;
1760 break;
1761
1762 case GE:
1763 if (mep_imm4_operand (op1, SImode))
1764 break;
1765
1766 tmp = gen_reg_rtx (SImode);
1767 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1768
1769 code = EQ;
1770 op0 = tmp;
1771 op1 = const0_rtx;
1772 break;
1773
1774 case EQ:
1775 case NE:
1776 if (! mep_reg_or_imm4_operand (op1, SImode))
1777 op1 = force_reg (SImode, op1);
1778 break;
1779
1780 case LE:
1781 case GT:
1782 if (GET_CODE (op1) == CONST_INT
1783 && INTVAL (op1) != 0x7fffffff)
1784 {
1785 op1 = GEN_INT (INTVAL (op1) + 1);
1786 code = (code == LE ? LT : GE);
1787 goto restart;
1788 }
1789
1790 tmp = gen_reg_rtx (SImode);
1791 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1792
1793 code = (code == LE ? EQ : NE);
1794 op0 = tmp;
1795 op1 = const0_rtx;
1796 break;
1797
1798 case LTU:
1799 if (op1 == const1_rtx)
1800 {
1801 code = EQ;
1802 op1 = const0_rtx;
1803 break;
1804 }
1805
1806 tmp = gen_reg_rtx (SImode);
1807 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1808 code = NE;
1809 op0 = tmp;
1810 op1 = const0_rtx;
1811 break;
1812
1813 case LEU:
1814 tmp = gen_reg_rtx (SImode);
1815 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1816 code = NE;
1817 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1818 code = EQ;
1819 else
1820 gcc_unreachable ();
1821 op0 = tmp;
1822 op1 = const0_rtx;
1823 break;
1824
1825 case GTU:
1826 tmp = gen_reg_rtx (SImode);
1827 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1828 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1829 code = NE;
1830 op0 = tmp;
1831 op1 = const0_rtx;
1832 break;
1833
1834 case GEU:
1835 tmp = gen_reg_rtx (SImode);
1836 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1837 code = NE;
1838 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1839 code = EQ;
1840 else
1841 gcc_unreachable ();
1842 op0 = tmp;
1843 op1 = const0_rtx;
1844 break;
1845
1846 default:
1847 gcc_unreachable ();
1848 }
1849
1850 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1851}
1852
1853const char *
1854mep_emit_cbranch (rtx *operands, int ne)
1855{
1856 if (GET_CODE (operands[1]) == REG)
1857 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
d839f1eb 1858 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
7acf4da6
DD
1859 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1860 else
1861 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1862}
1863
1864void
1865mep_expand_call (rtx *operands, int returns_value)
1866{
1867 rtx addr = operands[returns_value];
1868 rtx tp = mep_tp_rtx ();
1869 rtx gp = mep_gp_rtx ();
1870
1871 gcc_assert (GET_CODE (addr) == MEM);
1872
1873 addr = XEXP (addr, 0);
1874
1875 if (! mep_call_address_operand (addr, VOIDmode))
1876 addr = force_reg (SImode, addr);
1877
1878 if (! operands[returns_value+2])
1879 operands[returns_value+2] = const0_rtx;
1880
1881 if (returns_value)
1882 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1883 operands[3], tp, gp));
1884 else
1885 emit_call_insn (gen_call_internal (addr, operands[1],
1886 operands[2], tp, gp));
1887}
1888\f
1889/* Aliasing Support. */
1890
1891/* If X is a machine specific address (i.e. a symbol or label being
1892 referenced as a displacement from the GOT implemented using an
1893 UNSPEC), then return the base term. Otherwise return X. */
1894
1895rtx
1896mep_find_base_term (rtx x)
1897{
1898 rtx base, term;
1899 int unspec;
1900
1901 if (GET_CODE (x) != PLUS)
1902 return x;
1903 base = XEXP (x, 0);
1904 term = XEXP (x, 1);
1905
1906 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1907 && base == mep_tp_rtx ())
1908 unspec = UNS_TPREL;
1909 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1910 && base == mep_gp_rtx ())
1911 unspec = UNS_GPREL;
1912 else
1913 return x;
1914
1915 if (GET_CODE (term) != CONST)
1916 return x;
1917 term = XEXP (term, 0);
1918
1919 if (GET_CODE (term) != UNSPEC
1920 || XINT (term, 1) != unspec)
1921 return x;
1922
1923 return XVECEXP (term, 0, 0);
1924}
1925\f
1926/* Reload Support. */
1927
1928/* Return true if the registers in CLASS cannot represent the change from
1929 modes FROM to TO. */
1930
1931bool
1932mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
1933 enum reg_class regclass)
1934{
1935 if (from == to)
1936 return false;
1937
1938 /* 64-bit COP regs must remain 64-bit COP regs. */
1939 if (TARGET_64BIT_CR_REGS
1940 && (regclass == CR_REGS
1941 || regclass == LOADABLE_CR_REGS)
1942 && (GET_MODE_SIZE (to) < 8
1943 || GET_MODE_SIZE (from) < 8))
1944 return true;
1945
1946 return false;
1947}
1948
1949#define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1950
1951static bool
1952mep_general_reg (rtx x)
1953{
1954 while (GET_CODE (x) == SUBREG)
1955 x = XEXP (x, 0);
1956 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1957}
1958
1959static bool
1960mep_nongeneral_reg (rtx x)
1961{
1962 while (GET_CODE (x) == SUBREG)
1963 x = XEXP (x, 0);
1964 return (GET_CODE (x) == REG
1965 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1966}
1967
1968static bool
1969mep_general_copro_reg (rtx x)
1970{
1971 while (GET_CODE (x) == SUBREG)
1972 x = XEXP (x, 0);
1973 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1974}
1975
1976static bool
1977mep_nonregister (rtx x)
1978{
1979 while (GET_CODE (x) == SUBREG)
1980 x = XEXP (x, 0);
1981 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1982}
1983
1984#define DEBUG_RELOAD 0
1985
1986/* Return the secondary reload class needed for moving value X to or
1987 from a register in coprocessor register class CLASS. */
1988
1989static enum reg_class
1990mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
1991{
1992 if (mep_general_reg (x))
1993 /* We can do the move directly if mep_have_core_copro_moves_p,
1994 otherwise we need to go through memory. Either way, no secondary
1995 register is needed. */
1996 return NO_REGS;
1997
1998 if (mep_general_copro_reg (x))
1999 {
2000 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2001 if (mep_have_copro_copro_moves_p)
2002 return NO_REGS;
2003
2004 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2005 if (mep_have_core_copro_moves_p)
2006 return GENERAL_REGS;
2007
2008 /* Otherwise we need to do it through memory. No secondary
2009 register is needed. */
2010 return NO_REGS;
2011 }
2012
2013 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2014 && constraint_satisfied_p (x, CONSTRAINT_U))
2015 /* X is a memory value that we can access directly. */
2016 return NO_REGS;
2017
2018 /* We have to move X into a GPR first and then copy it to
2019 the coprocessor register. The move from the GPR to the
2020 coprocessor might be done directly or through memory,
2021 depending on mep_have_core_copro_moves_p. */
2022 return GENERAL_REGS;
2023}
2024
2025/* Copying X to register in RCLASS. */
2026
77b0efff 2027enum reg_class
7acf4da6
DD
2028mep_secondary_input_reload_class (enum reg_class rclass,
2029 enum machine_mode mode ATTRIBUTE_UNUSED,
2030 rtx x)
2031{
2032 int rv = NO_REGS;
2033
2034#if DEBUG_RELOAD
2035 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2036 debug_rtx (x);
2037#endif
2038
2039 if (reg_class_subset_p (rclass, CR_REGS))
2040 rv = mep_secondary_copro_reload_class (rclass, x);
2041 else if (MEP_NONGENERAL_CLASS (rclass)
2042 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2043 rv = GENERAL_REGS;
2044
2045#if DEBUG_RELOAD
2046 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2047#endif
77b0efff 2048 return (enum reg_class) rv;
7acf4da6
DD
2049}
2050
2051/* Copying register in RCLASS to X. */
2052
77b0efff 2053enum reg_class
7acf4da6
DD
2054mep_secondary_output_reload_class (enum reg_class rclass,
2055 enum machine_mode mode ATTRIBUTE_UNUSED,
2056 rtx x)
2057{
2058 int rv = NO_REGS;
2059
2060#if DEBUG_RELOAD
2061 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2062 debug_rtx (x);
2063#endif
2064
2065 if (reg_class_subset_p (rclass, CR_REGS))
2066 rv = mep_secondary_copro_reload_class (rclass, x);
2067 else if (MEP_NONGENERAL_CLASS (rclass)
2068 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2069 rv = GENERAL_REGS;
2070
2071#if DEBUG_RELOAD
2072 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2073#endif
2074
77b0efff 2075 return (enum reg_class) rv;
7acf4da6
DD
2076}
2077
2078/* Implement SECONDARY_MEMORY_NEEDED. */
2079
2080bool
2081mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2082 enum machine_mode mode ATTRIBUTE_UNUSED)
2083{
2084 if (!mep_have_core_copro_moves_p)
2085 {
2086 if (reg_classes_intersect_p (rclass1, CR_REGS)
2087 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2088 return true;
2089 if (reg_classes_intersect_p (rclass2, CR_REGS)
2090 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2091 return true;
2092 if (!mep_have_copro_copro_moves_p
2093 && reg_classes_intersect_p (rclass1, CR_REGS)
2094 && reg_classes_intersect_p (rclass2, CR_REGS))
2095 return true;
2096 }
2097 return false;
2098}
2099
2100void
2101mep_expand_reload (rtx *operands, enum machine_mode mode)
2102{
2103 /* There are three cases for each direction:
2104 register, farsym
2105 control, farsym
2106 control, nearsym */
2107
2108 int s0 = mep_section_tag (operands[0]) == 'f';
2109 int s1 = mep_section_tag (operands[1]) == 'f';
2110 int c0 = mep_nongeneral_reg (operands[0]);
2111 int c1 = mep_nongeneral_reg (operands[1]);
2112 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2113
2114#if DEBUG_RELOAD
2115 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2116 debug_rtx (operands[0]);
2117 debug_rtx (operands[1]);
2118#endif
2119
2120 switch (which)
2121 {
2122 case 00: /* Don't know why this gets here. */
2123 case 02: /* general = far */
2124 emit_move_insn (operands[0], operands[1]);
2125 return;
2126
2127 case 10: /* cr = mem */
2128 case 11: /* cr = cr */
2129 case 01: /* mem = cr */
2130 case 12: /* cr = far */
2131 emit_move_insn (operands[2], operands[1]);
2132 emit_move_insn (operands[0], operands[2]);
2133 return;
2134
2135 case 20: /* far = general */
2136 emit_move_insn (operands[2], XEXP (operands[1], 0));
2137 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2138 return;
2139
2140 case 21: /* far = cr */
2141 case 22: /* far = far */
2142 default:
2143 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2144 which, mode_name[mode]);
2145 debug_rtx (operands[0]);
2146 debug_rtx (operands[1]);
2147 gcc_unreachable ();
2148 }
2149}
2150
2151/* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2152 can be moved directly into registers 0 to 7, but not into the rest.
2153 If so, and if the required class includes registers 0 to 7, restrict
2154 it to those registers. */
2155
2156enum reg_class
2157mep_preferred_reload_class (rtx x, enum reg_class rclass)
2158{
2159 switch (GET_CODE (x))
2160 {
2161 case CONST_INT:
2162 if (INTVAL (x) >= 0x10000
2163 && INTVAL (x) < 0x01000000
2164 && (INTVAL (x) & 0xffff) != 0
2165 && reg_class_subset_p (TPREL_REGS, rclass))
2166 rclass = TPREL_REGS;
2167 break;
2168
2169 case CONST:
2170 case SYMBOL_REF:
2171 case LABEL_REF:
2172 if (mep_section_tag (x) != 'f'
2173 && reg_class_subset_p (TPREL_REGS, rclass))
2174 rclass = TPREL_REGS;
2175 break;
2176
2177 default:
2178 break;
2179 }
2180 return rclass;
2181}
2182\f
2183/* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2184 moves, 4 for direct double-register moves, and 1000 for anything
2185 that requires a temporary register or temporary stack slot. */
2186
2187int
2188mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2189{
2190 if (mep_have_copro_copro_moves_p
2191 && reg_class_subset_p (from, CR_REGS)
2192 && reg_class_subset_p (to, CR_REGS))
2193 {
2194 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2195 return 4;
2196 return 2;
2197 }
2198 if (reg_class_subset_p (from, CR_REGS)
2199 && reg_class_subset_p (to, CR_REGS))
2200 {
2201 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2202 return 8;
2203 return 4;
2204 }
2205 if (reg_class_subset_p (from, CR_REGS)
2206 || reg_class_subset_p (to, CR_REGS))
2207 {
2208 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2209 return 4;
2210 return 2;
2211 }
2212 if (mep_secondary_memory_needed (from, to, mode))
2213 return 1000;
2214 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2215 return 1000;
2216
2217 if (GET_MODE_SIZE (mode) > 4)
2218 return 4;
2219
2220 return 2;
2221}
2222
2223\f
2224/* Functions to save and restore machine-specific function data. */
2225
2226static struct machine_function *
2227mep_init_machine_status (void)
2228{
766090c2 2229 return ggc_cleared_alloc<machine_function> ();
7acf4da6
DD
2230}
2231
2232static rtx
2233mep_allocate_initial_value (rtx reg)
2234{
2235 int rss;
2236
2237 if (GET_CODE (reg) != REG)
2238 return NULL_RTX;
2239
2240 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2241 return NULL_RTX;
2242
2243 /* In interrupt functions, the "initial" values of $gp and $tp are
2244 provided by the prologue. They are not necessarily the same as
2245 the values that the caller was using. */
2246 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2247 if (mep_interrupt_p ())
2248 return NULL_RTX;
2249
2250 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2251 {
2252 cfun->machine->reg_save_size += 4;
2253 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2254 }
2255
2256 rss = cfun->machine->reg_save_slot[REGNO(reg)];
0a81f074 2257 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
7acf4da6
DD
2258}
2259
2260rtx
2261mep_return_addr_rtx (int count)
2262{
2263 if (count != 0)
2264 return const0_rtx;
2265
2266 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2267}
2268
2269static rtx
2270mep_tp_rtx (void)
2271{
2272 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2273}
2274
2275static rtx
2276mep_gp_rtx (void)
2277{
2278 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2279}
2280
2281static bool
2282mep_interrupt_p (void)
2283{
2284 if (cfun->machine->interrupt_handler == 0)
2285 {
2286 int interrupt_handler
2287 = (lookup_attribute ("interrupt",
2288 DECL_ATTRIBUTES (current_function_decl))
2289 != NULL_TREE);
2290 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2291 }
2292 return cfun->machine->interrupt_handler == 2;
2293}
2294
2295static bool
2296mep_disinterrupt_p (void)
2297{
2298 if (cfun->machine->disable_interrupts == 0)
2299 {
2300 int disable_interrupts
2301 = (lookup_attribute ("disinterrupt",
2302 DECL_ATTRIBUTES (current_function_decl))
2303 != NULL_TREE);
2304 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2305 }
2306 return cfun->machine->disable_interrupts == 2;
2307}
2308
2309\f
2310/* Frame/Epilog/Prolog Related. */
2311
2312static bool
2313mep_reg_set_p (rtx reg, rtx insn)
2314{
2315 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2316 if (INSN_P (insn))
2317 {
2318 if (FIND_REG_INC_NOTE (insn, reg))
2319 return true;
2320 insn = PATTERN (insn);
2321 }
2322
2323 if (GET_CODE (insn) == SET
2324 && GET_CODE (XEXP (insn, 0)) == REG
2325 && GET_CODE (XEXP (insn, 1)) == REG
2326 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2327 return false;
2328
2329 return set_of (reg, insn) != NULL_RTX;
2330}
2331
2332
2333#define MEP_SAVES_UNKNOWN 0
2334#define MEP_SAVES_YES 1
2335#define MEP_SAVES_MAYBE 2
2336#define MEP_SAVES_NO 3
2337
2338static bool
2339mep_reg_set_in_function (int regno)
2340{
aa4a0061
DM
2341 rtx reg;
2342 rtx_insn *insn;
7acf4da6
DD
2343
2344 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2345 return true;
2346
2347 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2348 return true;
2349
2350 push_topmost_sequence ();
2351 insn = get_insns ();
2352 pop_topmost_sequence ();
2353
2354 if (!insn)
2355 return false;
2356
2357 reg = gen_rtx_REG (SImode, regno);
2358
2359 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2360 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2361 return true;
2362 return false;
2363}
2364
2365static bool
2366mep_asm_without_operands_p (void)
2367{
2368 if (cfun->machine->asms_without_operands == 0)
2369 {
aa4a0061 2370 rtx_insn *insn;
7acf4da6
DD
2371
2372 push_topmost_sequence ();
2373 insn = get_insns ();
2374 pop_topmost_sequence ();
2375
2376 cfun->machine->asms_without_operands = 1;
2377 while (insn)
2378 {
2379 if (INSN_P (insn)
2380 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2381 {
2382 cfun->machine->asms_without_operands = 2;
2383 break;
2384 }
2385 insn = NEXT_INSN (insn);
2386 }
2387
2388 }
2389 return cfun->machine->asms_without_operands == 2;
2390}
2391
2392/* Interrupt functions save/restore every call-preserved register, and
2393 any call-used register it uses (or all if it calls any function,
2394 since they may get clobbered there too). Here we check to see
2395 which call-used registers need saving. */
2396
d1b5afd5
DD
2397#define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2398 && (r == FIRST_CCR_REGNO + 1 \
2399 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2400 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2401
7acf4da6
DD
2402static bool
2403mep_interrupt_saved_reg (int r)
2404{
2405 if (!mep_interrupt_p ())
2406 return false;
2407 if (r == REGSAVE_CONTROL_TEMP
2408 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2409 return true;
2410 if (mep_asm_without_operands_p ()
2411 && (!fixed_regs[r]
d1b5afd5
DD
2412 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2413 || IVC2_ISAVED_REG (r)))
7acf4da6 2414 return true;
416ff32e 2415 if (!crtl->is_leaf)
7acf4da6 2416 /* Function calls mean we need to save $lp. */
d1b5afd5 2417 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
7acf4da6 2418 return true;
416ff32e 2419 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
7acf4da6
DD
2420 /* The interrupt handler might use these registers for repeat blocks,
2421 or it might call a function that does so. */
2422 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2423 return true;
416ff32e 2424 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
7acf4da6
DD
2425 return false;
2426 /* Functions we call might clobber these. */
2427 if (call_used_regs[r] && !fixed_regs[r])
2428 return true;
f2082f90 2429 /* Additional registers that need to be saved for IVC2. */
d1b5afd5 2430 if (IVC2_ISAVED_REG (r))
f2082f90
DD
2431 return true;
2432
7acf4da6
DD
2433 return false;
2434}
2435
2436static bool
2437mep_call_saves_register (int r)
2438{
e756464b 2439 if (! cfun->machine->frame_locked)
7acf4da6
DD
2440 {
2441 int rv = MEP_SAVES_NO;
2442
2443 if (cfun->machine->reg_save_slot[r])
2444 rv = MEP_SAVES_YES;
2445 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2446 rv = MEP_SAVES_YES;
2447 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2448 rv = MEP_SAVES_YES;
2449 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2450 rv = MEP_SAVES_YES;
2451 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2452 /* We need these to have stack slots so that they can be set during
2453 unwinding. */
2454 rv = MEP_SAVES_YES;
2455 else if (mep_interrupt_saved_reg (r))
2456 rv = MEP_SAVES_YES;
2457 cfun->machine->reg_saved[r] = rv;
2458 }
2459 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2460}
2461
2462/* Return true if epilogue uses register REGNO. */
2463
2464bool
2465mep_epilogue_uses (int regno)
2466{
2467 /* Since $lp is a call-saved register, the generic code will normally
2468 mark it used in the epilogue if it needs to be saved and restored.
2469 However, when profiling is enabled, the profiling code will implicitly
2470 clobber $11. This case has to be handled specially both here and in
2471 mep_call_saves_register. */
2472 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2473 return true;
2474 /* Interrupt functions save/restore pretty much everything. */
2475 return (reload_completed && mep_interrupt_saved_reg (regno));
2476}
2477
2478static int
2479mep_reg_size (int regno)
2480{
2481 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2482 return 8;
2483 return 4;
2484}
2485
7b5cbb57
AS
2486/* Worker function for TARGET_CAN_ELIMINATE. */
2487
2488bool
2489mep_can_eliminate (const int from, const int to)
2490{
2491 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2492 ? ! frame_pointer_needed
2493 : true);
2494}
2495
7acf4da6
DD
2496int
2497mep_elimination_offset (int from, int to)
2498{
2499 int reg_save_size;
2500 int i;
2501 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2502 int total_size;
2503
e756464b
DD
2504 if (!cfun->machine->frame_locked)
2505 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
7acf4da6
DD
2506
2507 /* We don't count arg_regs_to_save in the arg pointer offset, because
2508 gcc thinks the arg pointer has moved along with the saved regs.
2509 However, we do count it when we adjust $sp in the prologue. */
2510 reg_save_size = 0;
2511 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2512 if (mep_call_saves_register (i))
2513 reg_save_size += mep_reg_size (i);
2514
2515 if (reg_save_size % 8)
2516 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2517 else
2518 cfun->machine->regsave_filler = 0;
2519
2520 /* This is what our total stack adjustment looks like. */
2521 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2522
2523 if (total_size % 8)
2524 cfun->machine->frame_filler = 8 - (total_size % 8);
2525 else
2526 cfun->machine->frame_filler = 0;
2527
2528
2529 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2530 return reg_save_size + cfun->machine->regsave_filler;
2531
2532 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2533 return cfun->machine->frame_filler + frame_size;
2534
2535 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2536 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2537
2538 gcc_unreachable ();
2539}
2540
aa4a0061
DM
2541static rtx_insn *
2542F (rtx_insn *x)
7acf4da6
DD
2543{
2544 RTX_FRAME_RELATED_P (x) = 1;
2545 return x;
2546}
2547
2548/* Since the prologue/epilogue code is generated after optimization,
2549 we can't rely on gcc to split constants for us. So, this code
2550 captures all the ways to add a constant to a register in one logic
2551 chunk, including optimizing away insns we just don't need. This
2552 makes the prolog/epilog code easier to follow. */
2553static void
2554add_constant (int dest, int src, int value, int mark_frame)
2555{
aa4a0061 2556 rtx_insn *insn;
7acf4da6
DD
2557 int hi, lo;
2558
2559 if (src == dest && value == 0)
2560 return;
2561
2562 if (value == 0)
2563 {
2564 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2565 gen_rtx_REG (SImode, src));
2566 if (mark_frame)
2567 RTX_FRAME_RELATED_P(insn) = 1;
2568 return;
2569 }
2570
2571 if (value >= -32768 && value <= 32767)
2572 {
2573 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2574 gen_rtx_REG (SImode, src),
2575 GEN_INT (value)));
2576 if (mark_frame)
2577 RTX_FRAME_RELATED_P(insn) = 1;
2578 return;
2579 }
2580
2581 /* Big constant, need to use a temp register. We use
2582 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2583 area is always small enough to directly add to). */
2584
2585 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2586 lo = value & 0xffff;
2587
2588 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2589 GEN_INT (hi));
2590
2591 if (lo)
2592 {
2593 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2594 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2595 GEN_INT (lo)));
2596 }
2597
2598 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2599 gen_rtx_REG (SImode, src),
2600 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2601 if (mark_frame)
2602 {
2603 RTX_FRAME_RELATED_P(insn) = 1;
2604 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2605 gen_rtx_SET (SImode,
2606 gen_rtx_REG (SImode, dest),
2607 gen_rtx_PLUS (SImode,
2608 gen_rtx_REG (SImode, dest),
2609 GEN_INT (value))));
2610 }
2611}
2612
7acf4da6
DD
2613/* Move SRC to DEST. Mark the move as being potentially dead if
2614 MAYBE_DEAD_P. */
2615
aa4a0061 2616static rtx_insn *
7acf4da6
DD
2617maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2618{
aa4a0061 2619 rtx_insn *insn = emit_move_insn (dest, src);
7acf4da6
DD
2620#if 0
2621 if (maybe_dead_p)
2622 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2623#endif
2624 return insn;
2625}
2626
2627/* Used for interrupt functions, which can't assume that $tp and $gp
2628 contain the correct pointers. */
2629
2630static void
2631mep_reload_pointer (int regno, const char *symbol)
2632{
2633 rtx reg, sym;
2634
416ff32e 2635 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
7acf4da6
DD
2636 return;
2637
2638 reg = gen_rtx_REG (SImode, regno);
2639 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2640 emit_insn (gen_movsi_topsym_s (reg, sym));
2641 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2642}
2643
e756464b
DD
2644/* Assign save slots for any register not already saved. DImode
2645 registers go at the end of the reg save area; the rest go at the
2646 beginning. This is for alignment purposes. Returns true if a frame
2647 is really needed. */
2648static bool
2649mep_assign_save_slots (int reg_save_size)
7acf4da6 2650{
e756464b 2651 bool really_need_stack_frame = false;
7acf4da6 2652 int di_ofs = 0;
e756464b 2653 int i;
7acf4da6 2654
7acf4da6
DD
2655 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2656 if (mep_call_saves_register(i))
2657 {
2658 int regsize = mep_reg_size (i);
2659
2660 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2661 || mep_reg_set_in_function (i))
e756464b 2662 really_need_stack_frame = true;
7acf4da6
DD
2663
2664 if (cfun->machine->reg_save_slot[i])
2665 continue;
2666
2667 if (regsize < 8)
2668 {
2669 cfun->machine->reg_save_size += regsize;
2670 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2671 }
2672 else
2673 {
2674 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2675 di_ofs += 8;
2676 }
2677 }
e756464b
DD
2678 cfun->machine->frame_locked = 1;
2679 return really_need_stack_frame;
2680}
2681
2682void
2683mep_expand_prologue (void)
2684{
2685 int i, rss, sp_offset = 0;
2686 int reg_save_size;
2687 int frame_size;
d2e1a4c2 2688 int really_need_stack_frame;
e756464b
DD
2689
2690 /* We must not allow register renaming in interrupt functions,
2691 because that invalidates the correctness of the set of call-used
2692 registers we're going to save/restore. */
2693 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2694
2695 if (mep_disinterrupt_p ())
2696 emit_insn (gen_mep_disable_int ());
2697
2698 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2699
2700 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2701 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
d2e1a4c2 2702 really_need_stack_frame = frame_size;
e756464b
DD
2703
2704 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
7acf4da6
DD
2705
2706 sp_offset = reg_save_size;
2707 if (sp_offset + frame_size < 128)
2708 sp_offset += frame_size ;
2709
2710 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2711
2712 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2713 if (mep_call_saves_register(i))
2714 {
2715 rtx mem;
2716 bool maybe_dead_p;
2717 enum machine_mode rmode;
2718
2719 rss = cfun->machine->reg_save_slot[i];
2720
2721 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2722 && (!mep_reg_set_in_function (i)
2723 && !mep_interrupt_p ()))
2724 continue;
2725
2726 if (mep_reg_size (i) == 8)
2727 rmode = DImode;
2728 else
2729 rmode = SImode;
2730
2731 /* If there is a pseudo associated with this register's initial value,
2732 reload might have already spilt it to the stack slot suggested by
2733 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2734 deleted as dead. */
2735 mem = gen_rtx_MEM (rmode,
0a81f074
RS
2736 plus_constant (Pmode, stack_pointer_rtx,
2737 sp_offset - rss));
7acf4da6
DD
2738 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2739
2740 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2741 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2742 else if (rmode == DImode)
2743 {
aa4a0061 2744 rtx_insn *insn;
7acf4da6
DD
2745 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2746
2747 mem = gen_rtx_MEM (SImode,
0a81f074
RS
2748 plus_constant (Pmode, stack_pointer_rtx,
2749 sp_offset - rss + be));
7acf4da6
DD
2750
2751 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2752 gen_rtx_REG (SImode, i),
2753 maybe_dead_p);
2754 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2755 gen_rtx_ZERO_EXTRACT (SImode,
2756 gen_rtx_REG (DImode, i),
2757 GEN_INT (32),
2758 GEN_INT (32)),
2759 maybe_dead_p);
2760 insn = maybe_dead_move (mem,
2761 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2762 maybe_dead_p);
2763 RTX_FRAME_RELATED_P (insn) = 1;
2764
2765 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2766 gen_rtx_SET (VOIDmode,
2767 copy_rtx (mem),
2768 gen_rtx_REG (rmode, i)));
2769 mem = gen_rtx_MEM (SImode,
0a81f074
RS
2770 plus_constant (Pmode, stack_pointer_rtx,
2771 sp_offset - rss + (4-be)));
7acf4da6
DD
2772 insn = maybe_dead_move (mem,
2773 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2774 maybe_dead_p);
2775 }
2776 else
2777 {
aa4a0061 2778 rtx_insn *insn;
7acf4da6
DD
2779 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2780 gen_rtx_REG (rmode, i),
2781 maybe_dead_p);
2782 insn = maybe_dead_move (mem,
2783 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2784 maybe_dead_p);
2785 RTX_FRAME_RELATED_P (insn) = 1;
2786
2787 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2788 gen_rtx_SET (VOIDmode,
2789 copy_rtx (mem),
2790 gen_rtx_REG (rmode, i)));
2791 }
2792 }
2793
2794 if (frame_pointer_needed)
a46f0964
DD
2795 {
2796 /* We've already adjusted down by sp_offset. Total $sp change
2797 is reg_save_size + frame_size. We want a net change here of
2798 just reg_save_size. */
2799 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2800 }
7acf4da6
DD
2801
2802 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2803
2804 if (mep_interrupt_p ())
2805 {
2806 mep_reload_pointer(GP_REGNO, "__sdabase");
2807 mep_reload_pointer(TP_REGNO, "__tpbase");
2808 }
2809}
2810
2811static void
2812mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2813{
2814 int local = hwi_local;
2815 int frame_size = local + crtl->outgoing_args_size;
2816 int reg_save_size;
2817 int ffill;
2818 int i, sp, skip;
2819 int sp_offset;
2820 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2821
2822 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2823 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2824 sp_offset = reg_save_size + frame_size;
2825
2826 ffill = cfun->machine->frame_filler;
2827
2828 if (cfun->machine->mep_frame_pointer_needed)
2829 reg_names[FP_REGNO] = "$fp";
2830 else
2831 reg_names[FP_REGNO] = "$8";
2832
2833 if (sp_offset == 0)
2834 return;
2835
2836 if (debug_info_level == DINFO_LEVEL_NONE)
2837 {
2838 fprintf (file, "\t# frame: %d", sp_offset);
2839 if (reg_save_size)
2840 fprintf (file, " %d regs", reg_save_size);
2841 if (local)
2842 fprintf (file, " %d locals", local);
2843 if (crtl->outgoing_args_size)
2844 fprintf (file, " %d args", crtl->outgoing_args_size);
2845 fprintf (file, "\n");
2846 return;
2847 }
2848
2849 fprintf (file, "\t#\n");
2850 fprintf (file, "\t# Initial Frame Information:\n");
2851 if (sp_offset || !frame_pointer_needed)
2852 fprintf (file, "\t# Entry ---------- 0\n");
2853
2854 /* Sort registers by save slots, so they're printed in the order
2855 they appear in memory, not the order they're saved in. */
2856 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2857 slot_map[si] = si;
2858 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2859 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2860 if (cfun->machine->reg_save_slot[slot_map[si]]
2861 > cfun->machine->reg_save_slot[slot_map[sj]])
2862 {
2863 int t = slot_map[si];
2864 slot_map[si] = slot_map[sj];
2865 slot_map[sj] = t;
2866 }
2867
2868 sp = 0;
2869 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2870 {
2871 int rsize;
2872 int r = slot_map[i];
2873 int rss = cfun->machine->reg_save_slot[r];
2874
e756464b
DD
2875 if (!mep_call_saves_register (r))
2876 continue;
2877
2878 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2879 && (!mep_reg_set_in_function (r)
2880 && !mep_interrupt_p ()))
7acf4da6
DD
2881 continue;
2882
2883 rsize = mep_reg_size(r);
2884 skip = rss - (sp+rsize);
2885 if (skip)
2886 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2887 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2888 rsize, reg_names[r], sp_offset - rss);
2889 sp = rss;
2890 }
2891
2892 skip = reg_save_size - sp;
2893 if (skip)
2894 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2895
2896 if (frame_pointer_needed)
2897 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2898 if (local)
2899 fprintf (file, "\t# %3d bytes for local vars\n", local);
2900 if (ffill)
2901 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2902 if (crtl->outgoing_args_size)
2903 fprintf (file, "\t# %3d bytes for outgoing args\n",
2904 crtl->outgoing_args_size);
2905 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2906 fprintf (file, "\t#\n");
2907}
2908
2909
2910static int mep_prevent_lp_restore = 0;
2911static int mep_sibcall_epilogue = 0;
2912
2913void
2914mep_expand_epilogue (void)
2915{
2916 int i, sp_offset = 0;
2917 int reg_save_size = 0;
2918 int frame_size;
2919 int lp_temp = LP_REGNO, lp_slot = -1;
2920 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2921 int interrupt_handler = mep_interrupt_p ();
2922
2923 if (profile_arc_flag == 2)
2924 emit_insn (gen_mep_bb_trace_ret ());
2925
2926 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2927 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2928
e756464b 2929 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
7acf4da6
DD
2930
2931 if (frame_pointer_needed)
2932 {
2933 /* If we have a frame pointer, we won't have a reliable stack
2934 pointer (alloca, you know), so rebase SP from FP */
2935 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2936 gen_rtx_REG (SImode, FP_REGNO));
2937 sp_offset = reg_save_size;
2938 }
2939 else
2940 {
2941 /* SP is right under our local variable space. Adjust it if
2942 needed. */
2943 sp_offset = reg_save_size + frame_size;
2944 if (sp_offset >= 128)
2945 {
2946 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2947 sp_offset -= frame_size;
2948 }
2949 }
2950
2951 /* This is backwards so that we restore the control and coprocessor
2952 registers before the temporary registers we use to restore
2953 them. */
2954 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2955 if (mep_call_saves_register (i))
2956 {
2957 enum machine_mode rmode;
2958 int rss = cfun->machine->reg_save_slot[i];
2959
2960 if (mep_reg_size (i) == 8)
2961 rmode = DImode;
2962 else
2963 rmode = SImode;
2964
2965 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2966 && !(mep_reg_set_in_function (i) || interrupt_handler))
2967 continue;
2968 if (mep_prevent_lp_restore && i == LP_REGNO)
2969 continue;
2970 if (!mep_prevent_lp_restore
2971 && !interrupt_handler
2972 && (i == 10 || i == 11))
2973 continue;
2974
2975 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2976 emit_move_insn (gen_rtx_REG (rmode, i),
2977 gen_rtx_MEM (rmode,
0a81f074
RS
2978 plus_constant (Pmode, stack_pointer_rtx,
2979 sp_offset - rss)));
7acf4da6
DD
2980 else
2981 {
2982 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2983 /* Defer this one so we can jump indirect rather than
2984 copying the RA to $lp and "ret". EH epilogues
2985 automatically skip this anyway. */
2986 lp_slot = sp_offset-rss;
2987 else
2988 {
2989 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2990 gen_rtx_MEM (rmode,
0a81f074
RS
2991 plus_constant (Pmode,
2992 stack_pointer_rtx,
7acf4da6
DD
2993 sp_offset-rss)));
2994 emit_move_insn (gen_rtx_REG (rmode, i),
2995 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
2996 }
2997 }
2998 }
2999 if (lp_slot != -1)
3000 {
3001 /* Restore this one last so we know it will be in the temp
3002 register when we return by jumping indirectly via the temp. */
3003 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3004 gen_rtx_MEM (SImode,
0a81f074 3005 plus_constant (Pmode, stack_pointer_rtx,
7acf4da6
DD
3006 lp_slot)));
3007 lp_temp = REGSAVE_CONTROL_TEMP;
3008 }
3009
3010
3011 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3012
3013 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3014 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3015 gen_rtx_REG (SImode, SP_REGNO),
3016 cfun->machine->eh_stack_adjust));
3017
3018 if (mep_sibcall_epilogue)
3019 return;
3020
3021 if (mep_disinterrupt_p ())
3022 emit_insn (gen_mep_enable_int ());
3023
3024 if (mep_prevent_lp_restore)
3025 {
3026 emit_jump_insn (gen_eh_return_internal ());
3027 emit_barrier ();
3028 }
3029 else if (interrupt_handler)
3030 emit_jump_insn (gen_mep_reti ());
3031 else
3032 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3033}
3034
3035void
3036mep_expand_eh_return (rtx *operands)
3037{
3038 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3039 {
3040 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3041 emit_move_insn (ra, operands[0]);
3042 operands[0] = ra;
3043 }
3044
3045 emit_insn (gen_eh_epilogue (operands[0]));
3046}
3047
3048void
3049mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3050{
3051 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3052 mep_prevent_lp_restore = 1;
3053 mep_expand_epilogue ();
3054 mep_prevent_lp_restore = 0;
3055}
3056
3057void
3058mep_expand_sibcall_epilogue (void)
3059{
3060 mep_sibcall_epilogue = 1;
3061 mep_expand_epilogue ();
3062 mep_sibcall_epilogue = 0;
3063}
3064
3065static bool
3066mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3067{
3068 if (decl == NULL)
3069 return false;
3070
3071 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3072 return false;
3073
3074 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3075 if (mep_interrupt_p () || mep_disinterrupt_p ())
3076 return false;
3077
3078 return true;
3079}
3080
3081rtx
3082mep_return_stackadj_rtx (void)
3083{
3084 return gen_rtx_REG (SImode, 10);
3085}
3086
3087rtx
3088mep_return_handler_rtx (void)
3089{
3090 return gen_rtx_REG (SImode, LP_REGNO);
3091}
3092
3093void
3094mep_function_profiler (FILE *file)
3095{
3096 /* Always right at the beginning of the function. */
3097 fprintf (file, "\t# mep function profiler\n");
3098 fprintf (file, "\tadd\t$sp, -8\n");
3099 fprintf (file, "\tsw\t$0, ($sp)\n");
3100 fprintf (file, "\tldc\t$0, $lp\n");
3101 fprintf (file, "\tsw\t$0, 4($sp)\n");
3102 fprintf (file, "\tbsr\t__mep_mcount\n");
3103 fprintf (file, "\tlw\t$0, 4($sp)\n");
3104 fprintf (file, "\tstc\t$0, $lp\n");
3105 fprintf (file, "\tlw\t$0, ($sp)\n");
3106 fprintf (file, "\tadd\t$sp, 8\n\n");
3107}
3108
3109const char *
3110mep_emit_bb_trace_ret (void)
3111{
3112 fprintf (asm_out_file, "\t# end of block profiling\n");
3113 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3114 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3115 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3116 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3117 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3118 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3119 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3120 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3121 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3122 return "";
3123}
3124
3125#undef SAVE
3126#undef RESTORE
3127\f
3128/* Operand Printing. */
3129
3130void
3131mep_print_operand_address (FILE *stream, rtx address)
3132{
3133 if (GET_CODE (address) == MEM)
3134 address = XEXP (address, 0);
3135 else
3136 /* cf: gcc.dg/asm-4.c. */
3137 gcc_assert (GET_CODE (address) == REG);
3138
3139 mep_print_operand (stream, address, 0);
3140}
3141
3142static struct
3143{
3144 char code;
3145 const char *pattern;
3146 const char *format;
3147}
3148const conversions[] =
3149{
3150 { 0, "r", "0" },
3151 { 0, "m+ri", "3(2)" },
3152 { 0, "mr", "(1)" },
3153 { 0, "ms", "(1)" },
5fb455bc 3154 { 0, "ml", "(1)" },
7acf4da6
DD
3155 { 0, "mLrs", "%lo(3)(2)" },
3156 { 0, "mLr+si", "%lo(4+5)(2)" },
3157 { 0, "m+ru2s", "%tpoff(5)(2)" },
3158 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3159 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3160 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3161 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3162 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3163 { 0, "mi", "(1)" },
3164 { 0, "m+si", "(2+3)" },
3165 { 0, "m+li", "(2+3)" },
3166 { 0, "i", "0" },
3167 { 0, "s", "0" },
3168 { 0, "+si", "1+2" },
3169 { 0, "+u2si", "%tpoff(3+4)" },
3170 { 0, "+u3si", "%sdaoff(3+4)" },
3171 { 0, "l", "0" },
3172 { 'b', "i", "0" },
3173 { 'B', "i", "0" },
3174 { 'U', "i", "0" },
3175 { 'h', "i", "0" },
3176 { 'h', "Hs", "%hi(1)" },
3177 { 'I', "i", "0" },
3178 { 'I', "u2s", "%tpoff(2)" },
3179 { 'I', "u3s", "%sdaoff(2)" },
3180 { 'I', "+u2si", "%tpoff(3+4)" },
3181 { 'I', "+u3si", "%sdaoff(3+4)" },
3182 { 'J', "i", "0" },
3183 { 'P', "mr", "(1\\+),\\0" },
3184 { 'x', "i", "0" },
3185 { 0, 0, 0 }
3186};
3187
3188static int
3189unique_bit_in (HOST_WIDE_INT i)
3190{
3191 switch (i & 0xff)
3192 {
3193 case 0x01: case 0xfe: return 0;
3194 case 0x02: case 0xfd: return 1;
3195 case 0x04: case 0xfb: return 2;
3196 case 0x08: case 0xf7: return 3;
3197 case 0x10: case 0x7f: return 4;
3198 case 0x20: case 0xbf: return 5;
3199 case 0x40: case 0xdf: return 6;
3200 case 0x80: case 0xef: return 7;
3201 default:
3202 gcc_unreachable ();
3203 }
3204}
3205
3206static int
3207bit_size_for_clip (HOST_WIDE_INT i)
3208{
3209 int rv;
3210
3211 for (rv = 0; rv < 31; rv ++)
3212 if (((HOST_WIDE_INT) 1 << rv) > i)
3213 return rv + 1;
3214 gcc_unreachable ();
3215}
3216
3217/* Print an operand to a assembler instruction. */
3218
3219void
3220mep_print_operand (FILE *file, rtx x, int code)
3221{
3222 int i, j;
3223 const char *real_name;
3224
3225 if (code == '<')
3226 {
3227 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3228 we're using, then skip over the "mep_" part of its name. */
3229 const struct cgen_insn *insn;
3230
3231 if (mep_get_move_insn (mep_cmov, &insn))
3232 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3233 else
3234 mep_intrinsic_unavailable (mep_cmov);
3235 return;
3236 }
3237 if (code == 'L')
3238 {
3239 switch (GET_CODE (x))
3240 {
3241 case AND:
3242 fputs ("clr", file);
3243 return;
3244 case IOR:
3245 fputs ("set", file);
3246 return;
3247 case XOR:
3248 fputs ("not", file);
3249 return;
3250 default:
3251 output_operand_lossage ("invalid %%L code");
3252 }
3253 }
3254 if (code == 'M')
3255 {
3256 /* Print the second operand of a CR <- CR move. If we're using
3257 a two-operand instruction (i.e., a real cmov), then just print
3258 the operand normally. If we're using a "reg, reg, immediate"
3259 instruction such as caddi3, print the operand followed by a
3260 zero field. If we're using a three-register instruction,
3261 print the operand twice. */
3262 const struct cgen_insn *insn;
3263
3264 mep_print_operand (file, x, 0);
3265 if (mep_get_move_insn (mep_cmov, &insn)
3266 && insn_data[insn->icode].n_operands == 3)
3267 {
3268 fputs (", ", file);
3269 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3270 mep_print_operand (file, x, 0);
3271 else
3272 mep_print_operand (file, const0_rtx, 0);
3273 }
3274 return;
3275 }
3276
3277 encode_pattern (x);
3278 for (i = 0; conversions[i].pattern; i++)
3279 if (conversions[i].code == code
3280 && strcmp(conversions[i].pattern, pattern) == 0)
3281 {
3282 for (j = 0; conversions[i].format[j]; j++)
3283 if (conversions[i].format[j] == '\\')
3284 {
3285 fputc (conversions[i].format[j+1], file);
3286 j++;
3287 }
3288 else if (ISDIGIT(conversions[i].format[j]))
3289 {
3290 rtx r = patternr[conversions[i].format[j] - '0'];
3291 switch (GET_CODE (r))
3292 {
3293 case REG:
3294 fprintf (file, "%s", reg_names [REGNO (r)]);
3295 break;
3296 case CONST_INT:
3297 switch (code)
3298 {
3299 case 'b':
3300 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3301 break;
3302 case 'B':
3303 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3304 break;
3305 case 'h':
3306 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3307 break;
3308 case 'U':
3309 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3310 break;
3311 case 'J':
3312 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3313 break;
3314 case 'x':
3315 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3316 && !(INTVAL (r) & 0xff))
3317 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3318 else
3319 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3320 break;
3321 case 'I':
3322 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3323 && conversions[i].format[j+1] == 0)
3324 {
3325 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3326 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3327 }
3328 else
3329 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3330 break;
3331 default:
3332 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3333 break;
3334 }
3335 break;
3336 case CONST_DOUBLE:
3337 fprintf(file, "[const_double 0x%lx]",
3338 (unsigned long) CONST_DOUBLE_HIGH(r));
3339 break;
3340 case SYMBOL_REF:
9018b19c 3341 real_name = targetm.strip_name_encoding (XSTR (r, 0));
7acf4da6
DD
3342 assemble_name (file, real_name);
3343 break;
3344 case LABEL_REF:
3345 output_asm_label (r);
3346 break;
3347 default:
3348 fprintf (stderr, "don't know how to print this operand:");
3349 debug_rtx (r);
3350 gcc_unreachable ();
3351 }
3352 }
3353 else
3354 {
3355 if (conversions[i].format[j] == '+'
3356 && (!code || code == 'I')
3357 && ISDIGIT (conversions[i].format[j+1])
3358 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3359 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3360 continue;
3361 fputc(conversions[i].format[j], file);
3362 }
3363 break;
3364 }
3365 if (!conversions[i].pattern)
3366 {
3367 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3368 debug_rtx(x);
3369 }
3370
3371 return;
3372}
3373
3374void
aa4a0061 3375mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
7acf4da6
DD
3376 int noperands ATTRIBUTE_UNUSED)
3377{
3378 /* Despite the fact that MeP is perfectly capable of branching and
3379 doing something else in the same bundle, gcc does jump
3380 optimization *after* scheduling, so we cannot trust the bundling
3381 flags on jump instructions. */
3382 if (GET_MODE (insn) == BImode
3383 && get_attr_slots (insn) != SLOTS_CORE)
3384 fputc ('+', asm_out_file);
3385}
3386
3387/* Function args in registers. */
3388
3389static void
d5cc9181 3390mep_setup_incoming_varargs (cumulative_args_t cum,
7acf4da6
DD
3391 enum machine_mode mode ATTRIBUTE_UNUSED,
3392 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3393 int second_time ATTRIBUTE_UNUSED)
3394{
d5cc9181 3395 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
7acf4da6
DD
3396
3397 if (nsave > 0)
3398 cfun->machine->arg_regs_to_save = nsave;
3399 *pretend_size = nsave * 4;
3400}
3401
3402static int
3403bytesize (const_tree type, enum machine_mode mode)
3404{
3405 if (mode == BLKmode)
3406 return int_size_in_bytes (type);
3407 return GET_MODE_SIZE (mode);
3408}
3409
3410static rtx
3411mep_expand_builtin_saveregs (void)
3412{
3413 int bufsize, i, ns;
3414 rtx regbuf;
3415
3416 ns = cfun->machine->arg_regs_to_save;
683a1be6
DD
3417 if (TARGET_IVC2)
3418 {
3419 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3420 regbuf = assign_stack_local (SImode, bufsize, 64);
3421 }
3422 else
3423 {
3424 bufsize = ns * 4;
3425 regbuf = assign_stack_local (SImode, bufsize, 32);
3426 }
7acf4da6
DD
3427
3428 move_block_from_reg (5-ns, regbuf, ns);
3429
3430 if (TARGET_IVC2)
3431 {
3432 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
683a1be6 3433 int ofs = 8 * ((ns+1)/2);
7acf4da6
DD
3434
3435 for (i=0; i<ns; i++)
3436 {
3437 int rn = (4-ns) + i + 49;
3438 rtx ptr;
3439
3440 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3441 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3442 ofs += 8;
3443 }
3444 }
3445 return XEXP (regbuf, 0);
3446}
3447
7acf4da6
DD
3448static tree
3449mep_build_builtin_va_list (void)
3450{
3451 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3452 tree record;
3453
3454
3455 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3456
3457 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3458 get_identifier ("__va_next_gp"), ptr_type_node);
3459 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3460 get_identifier ("__va_next_gp_limit"),
3461 ptr_type_node);
3462 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3463 ptr_type_node);
3464 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3465 ptr_type_node);
3466
3467 DECL_FIELD_CONTEXT (f_next_gp) = record;
3468 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3469 DECL_FIELD_CONTEXT (f_next_cop) = record;
3470 DECL_FIELD_CONTEXT (f_next_stack) = record;
3471
3472 TYPE_FIELDS (record) = f_next_gp;
910ad8de
NF
3473 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3474 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3475 DECL_CHAIN (f_next_cop) = f_next_stack;
7acf4da6
DD
3476
3477 layout_type (record);
3478
3479 return record;
3480}
3481
3482static void
3483mep_expand_va_start (tree valist, rtx nextarg)
3484{
3485 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3486 tree next_gp, next_gp_limit, next_cop, next_stack;
3487 tree t, u;
3488 int ns;
3489
3490 ns = cfun->machine->arg_regs_to_save;
3491
3492 f_next_gp = TYPE_FIELDS (va_list_type_node);
910ad8de
NF
3493 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3494 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3495 f_next_stack = DECL_CHAIN (f_next_cop);
7acf4da6
DD
3496
3497 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3498 NULL_TREE);
3499 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3500 valist, f_next_gp_limit, NULL_TREE);
3501 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3502 NULL_TREE);
3503 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3504 valist, f_next_stack, NULL_TREE);
3505
3506 /* va_list.next_gp = expand_builtin_saveregs (); */
3507 u = make_tree (sizetype, expand_builtin_saveregs ());
3508 u = fold_convert (ptr_type_node, u);
3509 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3510 TREE_SIDE_EFFECTS (t) = 1;
3511 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3512
3513 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
5d49b6a7 3514 u = fold_build_pointer_plus_hwi (u, 4 * ns);
7acf4da6
DD
3515 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3516 TREE_SIDE_EFFECTS (t) = 1;
3517 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3518
5d49b6a7 3519 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
683a1be6 3520 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
7acf4da6
DD
3521 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3522 TREE_SIDE_EFFECTS (t) = 1;
3523 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3524
3525 /* va_list.next_stack = nextarg; */
3526 u = make_tree (ptr_type_node, nextarg);
3527 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3528 TREE_SIDE_EFFECTS (t) = 1;
3529 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3530}
3531
3532static tree
3533mep_gimplify_va_arg_expr (tree valist, tree type,
12a54f54
NC
3534 gimple_seq *pre_p,
3535 gimple_seq *post_p ATTRIBUTE_UNUSED)
7acf4da6
DD
3536{
3537 HOST_WIDE_INT size, rsize;
3538 bool by_reference, ivc2_vec;
3539 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3540 tree next_gp, next_gp_limit, next_cop, next_stack;
3541 tree label_sover, label_selse;
3542 tree tmp, res_addr;
3543
3544 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3545
3546 size = int_size_in_bytes (type);
3547 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3548
3549 if (by_reference)
3550 {
3551 type = build_pointer_type (type);
3552 size = 4;
3553 }
3554 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3555
3556 f_next_gp = TYPE_FIELDS (va_list_type_node);
910ad8de
NF
3557 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3558 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3559 f_next_stack = DECL_CHAIN (f_next_cop);
7acf4da6
DD
3560
3561 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3562 NULL_TREE);
3563 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3564 valist, f_next_gp_limit, NULL_TREE);
3565 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3566 NULL_TREE);
3567 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3568 valist, f_next_stack, NULL_TREE);
3569
3570 /* if f_next_gp < f_next_gp_limit
3571 IF (VECTOR_P && IVC2)
3572 val = *f_next_cop;
3573 ELSE
3574 val = *f_next_gp;
3575 f_next_gp += 4;
3576 f_next_cop += 8;
3577 else
3578 label_selse:
3579 val = *f_next_stack;
3580 f_next_stack += rsize;
3581 label_sover:
3582 */
3583
3584 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3585 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3586 res_addr = create_tmp_var (ptr_type_node, NULL);
3587
3588 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3589 unshare_expr (next_gp_limit));
3590 tmp = build3 (COND_EXPR, void_type_node, tmp,
3591 build1 (GOTO_EXPR, void_type_node,
3592 unshare_expr (label_selse)),
3593 NULL_TREE);
3594 gimplify_and_add (tmp, pre_p);
3595
3596 if (ivc2_vec)
3597 {
3598 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3599 gimplify_and_add (tmp, pre_p);
3600 }
3601 else
3602 {
3603 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3604 gimplify_and_add (tmp, pre_p);
3605 }
3606
5d49b6a7 3607 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
7acf4da6
DD
3608 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3609
5d49b6a7 3610 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
7acf4da6
DD
3611 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3612
3613 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3614 gimplify_and_add (tmp, pre_p);
3615
3616 /* - - */
3617
3618 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3619 gimplify_and_add (tmp, pre_p);
3620
3621 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3622 gimplify_and_add (tmp, pre_p);
3623
5d49b6a7 3624 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
7acf4da6
DD
3625 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3626
3627 /* - - */
3628
3629 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3630 gimplify_and_add (tmp, pre_p);
3631
3632 res_addr = fold_convert (build_pointer_type (type), res_addr);
3633
3634 if (by_reference)
3635 res_addr = build_va_arg_indirect_ref (res_addr);
3636
3637 return build_va_arg_indirect_ref (res_addr);
3638}
3639
3640void
3641mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3642 rtx libname ATTRIBUTE_UNUSED,
3643 tree fndecl ATTRIBUTE_UNUSED)
3644{
3645 pcum->nregs = 0;
3646
3647 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3648 pcum->vliw = 1;
3649 else
3650 pcum->vliw = 0;
3651}
3652
0851c6e3
NF
3653/* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3654 larger than 4 bytes are passed indirectly. Return value in 0,
3655 unless bigger than 4 bytes, then the caller passes a pointer as the
3656 first arg. For varargs, we copy $1..$4 to the stack. */
3657
3658static rtx
d5cc9181 3659mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
0851c6e3
NF
3660 const_tree type ATTRIBUTE_UNUSED,
3661 bool named ATTRIBUTE_UNUSED)
7acf4da6 3662{
d5cc9181
JR
3663 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3664
7acf4da6
DD
3665 /* VOIDmode is a signal for the backend to pass data to the call
3666 expander via the second operand to the call pattern. We use
3667 this to determine whether to use "jsr" or "jsrv". */
3668 if (mode == VOIDmode)
0851c6e3 3669 return GEN_INT (cum->vliw);
7acf4da6
DD
3670
3671 /* If we havn't run out of argument registers, return the next. */
0851c6e3 3672 if (cum->nregs < 4)
7acf4da6
DD
3673 {
3674 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
0851c6e3 3675 return gen_rtx_REG (mode, cum->nregs + 49);
7acf4da6 3676 else
0851c6e3 3677 return gen_rtx_REG (mode, cum->nregs + 1);
7acf4da6
DD
3678 }
3679
3680 /* Otherwise the argument goes on the stack. */
3681 return NULL_RTX;
3682}
3683
3684static bool
d5cc9181 3685mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
7acf4da6
DD
3686 enum machine_mode mode,
3687 const_tree type,
3688 bool named ATTRIBUTE_UNUSED)
3689{
3690 int size = bytesize (type, mode);
e756464b
DD
3691
3692 /* This is non-obvious, but yes, large values passed after we've run
3693 out of registers are *still* passed by reference - we put the
3694 address of the parameter on the stack, as well as putting the
3695 parameter itself elsewhere on the stack. */
3696
3697 if (size <= 0 || size > 8)
3698 return true;
3699 if (size <= 4)
3700 return false;
d5cc9181
JR
3701 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3702 && type != NULL_TREE && VECTOR_TYPE_P (type))
e756464b
DD
3703 return false;
3704 return true;
7acf4da6
DD
3705}
3706
0851c6e3 3707static void
d5cc9181 3708mep_function_arg_advance (cumulative_args_t pcum,
0851c6e3
NF
3709 enum machine_mode mode ATTRIBUTE_UNUSED,
3710 const_tree type ATTRIBUTE_UNUSED,
3711 bool named ATTRIBUTE_UNUSED)
7acf4da6 3712{
d5cc9181 3713 get_cumulative_args (pcum)->nregs += 1;
7acf4da6
DD
3714}
3715
3716bool
3717mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3718{
3719 int size = bytesize (type, BLKmode);
3720 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
e756464b
DD
3721 return size > 0 && size <= 8 ? 0 : 1;
3722 return size > 0 && size <= 4 ? 0 : 1;
7acf4da6
DD
3723}
3724
3725static bool
3726mep_narrow_volatile_bitfield (void)
3727{
3728 return true;
3729 return false;
3730}
3731
3732/* Implement FUNCTION_VALUE. All values are returned in $0. */
3733
3734rtx
77b0efff 3735mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
7acf4da6
DD
3736{
3737 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3738 return gen_rtx_REG (TYPE_MODE (type), 48);
3739 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3740}
3741
3742/* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3743
3744rtx
3745mep_libcall_value (enum machine_mode mode)
3746{
3747 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3748}
3749
3750/* Handle pipeline hazards. */
3751
3752typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3753static const char *opnames[] = { "", "stc", "fsft", "ret" };
3754
3755static int prev_opcode = 0;
3756
3757/* This isn't as optimal as it could be, because we don't know what
3758 control register the STC opcode is storing in. We only need to add
073a8998 3759 the nop if it's the relevant register, but we add it for irrelevant
7acf4da6
DD
3760 registers also. */
3761
3762void
3763mep_asm_output_opcode (FILE *file, const char *ptr)
3764{
3765 int this_opcode = op_none;
3766 const char *hazard = 0;
3767
3768 switch (*ptr)
3769 {
3770 case 'f':
3771 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3772 this_opcode = op_fsft;
3773 break;
3774 case 'r':
3775 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3776 this_opcode = op_ret;
3777 break;
3778 case 's':
3779 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3780 this_opcode = op_stc;
3781 break;
3782 }
3783
3784 if (prev_opcode == op_stc && this_opcode == op_fsft)
3785 hazard = "nop";
3786 if (prev_opcode == op_stc && this_opcode == op_ret)
3787 hazard = "nop";
3788
3789 if (hazard)
3790 fprintf(file, "%s\t# %s-%s hazard\n\t",
3791 hazard, opnames[prev_opcode], opnames[this_opcode]);
3792
3793 prev_opcode = this_opcode;
3794}
3795
3796/* Handle attributes. */
3797
3798static tree
3799mep_validate_based_tiny (tree *node, tree name, tree args,
3800 int flags ATTRIBUTE_UNUSED, bool *no_add)
3801{
3802 if (TREE_CODE (*node) != VAR_DECL
3803 && TREE_CODE (*node) != POINTER_TYPE
3804 && TREE_CODE (*node) != TYPE_DECL)
3805 {
3806 warning (0, "%qE attribute only applies to variables", name);
3807 *no_add = true;
3808 }
3809 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3810 {
3811 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3812 {
3813 warning (0, "address region attributes not allowed with auto storage class");
3814 *no_add = true;
3815 }
3816 /* Ignore storage attribute of pointed to variable: char __far * x; */
3817 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3818 {
3819 warning (0, "address region attributes on pointed-to types ignored");
3820 *no_add = true;
3821 }
3822 }
3823
3824 return NULL_TREE;
3825}
3826
3827static int
3828mep_multiple_address_regions (tree list, bool check_section_attr)
3829{
3830 tree a;
3831 int count_sections = 0;
3832 int section_attr_count = 0;
3833
3834 for (a = list; a; a = TREE_CHAIN (a))
3835 {
3836 if (is_attribute_p ("based", TREE_PURPOSE (a))
3837 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3838 || is_attribute_p ("near", TREE_PURPOSE (a))
3839 || is_attribute_p ("far", TREE_PURPOSE (a))
3840 || is_attribute_p ("io", TREE_PURPOSE (a)))
3841 count_sections ++;
3842 if (check_section_attr)
3843 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3844 }
3845
3846 if (check_section_attr)
3847 return section_attr_count;
3848 else
3849 return count_sections;
3850}
3851
3852#define MEP_ATTRIBUTES(decl) \
3853 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3854 : DECL_ATTRIBUTES (decl) \
3855 ? (DECL_ATTRIBUTES (decl)) \
3856 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3857
3858static tree
3859mep_validate_near_far (tree *node, tree name, tree args,
3860 int flags ATTRIBUTE_UNUSED, bool *no_add)
3861{
3862 if (TREE_CODE (*node) != VAR_DECL
3863 && TREE_CODE (*node) != FUNCTION_DECL
3864 && TREE_CODE (*node) != METHOD_TYPE
3865 && TREE_CODE (*node) != POINTER_TYPE
3866 && TREE_CODE (*node) != TYPE_DECL)
3867 {
3868 warning (0, "%qE attribute only applies to variables and functions",
3869 name);
3870 *no_add = true;
3871 }
3872 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3873 {
3874 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3875 {
3876 warning (0, "address region attributes not allowed with auto storage class");
3877 *no_add = true;
3878 }
3879 /* Ignore storage attribute of pointed to variable: char __far * x; */
3880 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3881 {
3882 warning (0, "address region attributes on pointed-to types ignored");
3883 *no_add = true;
3884 }
3885 }
3886 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3887 {
3888 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3889 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3890 DECL_ATTRIBUTES (*node) = NULL_TREE;
3891 }
3892 return NULL_TREE;
3893}
3894
3895static tree
3896mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3897 int flags ATTRIBUTE_UNUSED, bool *no_add)
3898{
3899 if (TREE_CODE (*node) != FUNCTION_DECL
3900 && TREE_CODE (*node) != METHOD_TYPE)
3901 {
3902 warning (0, "%qE attribute only applies to functions", name);
3903 *no_add = true;
3904 }
3905 return NULL_TREE;
3906}
3907
3908static tree
3909mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3910 int flags ATTRIBUTE_UNUSED, bool *no_add)
3911{
3912 tree function_type;
3913
3914 if (TREE_CODE (*node) != FUNCTION_DECL)
3915 {
3916 warning (0, "%qE attribute only applies to functions", name);
3917 *no_add = true;
3918 return NULL_TREE;
3919 }
3920
3921 if (DECL_DECLARED_INLINE_P (*node))
3922 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3923 DECL_UNINLINABLE (*node) = 1;
3924
3925 function_type = TREE_TYPE (*node);
3926
3927 if (TREE_TYPE (function_type) != void_type_node)
3928 error ("interrupt function must have return type of void");
3929
f4da8dce 3930 if (prototype_p (function_type)
7acf4da6
DD
3931 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3932 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3933 error ("interrupt function must have no arguments");
3934
3935 return NULL_TREE;
3936}
3937
3938static tree
3939mep_validate_io_cb (tree *node, tree name, tree args,
3940 int flags ATTRIBUTE_UNUSED, bool *no_add)
3941{
3942 if (TREE_CODE (*node) != VAR_DECL)
3943 {
3944 warning (0, "%qE attribute only applies to variables", name);
3945 *no_add = true;
3946 }
3947
3948 if (args != NULL_TREE)
3949 {
3950 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3951 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3952 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3953 {
3954 warning (0, "%qE attribute allows only an integer constant argument",
3955 name);
3956 *no_add = true;
3957 }
3958 }
3959
3960 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3961 TREE_THIS_VOLATILE (*node) = 1;
3962
3963 return NULL_TREE;
3964}
3965
3966static tree
3967mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3968 int flags ATTRIBUTE_UNUSED, bool *no_add)
3969{
3970 if (TREE_CODE (*node) != FUNCTION_TYPE
3971 && TREE_CODE (*node) != FUNCTION_DECL
3972 && TREE_CODE (*node) != METHOD_TYPE
3973 && TREE_CODE (*node) != FIELD_DECL
3974 && TREE_CODE (*node) != TYPE_DECL)
3975 {
3976 static int gave_pointer_note = 0;
3977 static int gave_array_note = 0;
3978 static const char * given_type = NULL;
3979
5806f481 3980 given_type = get_tree_code_name (TREE_CODE (*node));
7acf4da6
DD
3981 if (TREE_CODE (*node) == POINTER_TYPE)
3982 given_type = "pointers";
3983 if (TREE_CODE (*node) == ARRAY_TYPE)
3984 given_type = "arrays";
3985
3986 if (given_type)
3987 warning (0, "%qE attribute only applies to functions, not %s",
3988 name, given_type);
3989 else
3990 warning (0, "%qE attribute only applies to functions",
3991 name);
3992 *no_add = true;
3993
3994 if (TREE_CODE (*node) == POINTER_TYPE
3995 && !gave_pointer_note)
3996 {
6d9e7c41
PT
3997 inform (input_location,
3998 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3999 " typedef int (__vliw *vfuncptr) ();");
7acf4da6
DD
4000 gave_pointer_note = 1;
4001 }
4002
4003 if (TREE_CODE (*node) == ARRAY_TYPE
4004 && !gave_array_note)
4005 {
6d9e7c41
PT
4006 inform (input_location,
4007 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4008 " typedef int (__vliw *vfuncptr[]) ();");
7acf4da6
DD
4009 gave_array_note = 1;
4010 }
4011 }
4012 if (!TARGET_VLIW)
4013 error ("VLIW functions are not allowed without a VLIW configuration");
4014 return NULL_TREE;
4015}
4016
4017static const struct attribute_spec mep_attribute_table[11] =
4018{
62d784f7
KT
4019 /* name min max decl type func handler
4020 affects_type_identity */
4021 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4022 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4023 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4024 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4025 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4026 false },
4027 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4028 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4029 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4030 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4031 { NULL, 0, 0, false, false, false, NULL, false }
7acf4da6
DD
4032};
4033
4034static bool
4035mep_function_attribute_inlinable_p (const_tree callee)
4036{
4037 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4038 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4039 return (lookup_attribute ("disinterrupt", attrs) == 0
4040 && lookup_attribute ("interrupt", attrs) == 0);
4041}
4042
ae30c1fa 4043static bool
5cec9f59 4044mep_can_inline_p (tree caller, tree callee)
ae30c1fa
DD
4045{
4046 if (TREE_CODE (callee) == ADDR_EXPR)
4047 callee = TREE_OPERAND (callee, 0);
4048
82e45095 4049 if (!mep_vliw_function_p (caller)
ae30c1fa
DD
4050 && mep_vliw_function_p (callee))
4051 {
82e45095 4052 return false;
ae30c1fa 4053 }
82e45095 4054 return true;
ae30c1fa
DD
4055}
4056
7acf4da6
DD
4057#define FUNC_CALL 1
4058#define FUNC_DISINTERRUPT 2
4059
4060
4061struct GTY(()) pragma_entry {
4062 int used;
4063 int flag;
7acf4da6 4064};
7acf4da6 4065
2a22f99c 4066struct pragma_traits : default_hashmap_traits
7acf4da6 4067{
2a22f99c
TS
4068 static hashval_t hash (const char *s) { return htab_hash_string (s); }
4069 static bool
4070 equal_keys (const char *a, const char *b)
4071 {
4072 return strcmp (a, b) == 0;
4073 }
4074};
7acf4da6 4075
2a22f99c
TS
4076/* Hash table of farcall-tagged sections. */
4077static GTY(()) hash_map<const char *, pragma_entry, pragma_traits> *
4078 pragma_htab;
7acf4da6
DD
4079
4080static void
4081mep_note_pragma_flag (const char *funcname, int flag)
4082{
7acf4da6 4083 if (!pragma_htab)
2a22f99c
TS
4084 pragma_htab
4085 = hash_map<const char *, pragma_entry, pragma_traits>::create_ggc (31);
7acf4da6 4086
2a22f99c
TS
4087 bool existed;
4088 const char *name = ggc_strdup (funcname);
4089 pragma_entry *slot = &pragma_htab->get_or_insert (name, &existed);
4090 if (!existed)
7acf4da6 4091 {
2a22f99c
TS
4092 slot->flag = 0;
4093 slot->used = 0;
7acf4da6 4094 }
2a22f99c 4095 slot->flag |= flag;
7acf4da6
DD
4096}
4097
4098static bool
4099mep_lookup_pragma_flag (const char *funcname, int flag)
4100{
7acf4da6
DD
4101 if (!pragma_htab)
4102 return false;
4103
4104 if (funcname[0] == '@' && funcname[2] == '.')
4105 funcname += 3;
4106
2a22f99c
TS
4107 pragma_entry *slot = pragma_htab->get (funcname);
4108 if (slot && (slot->flag & flag))
7acf4da6 4109 {
2a22f99c 4110 slot->used |= flag;
7acf4da6
DD
4111 return true;
4112 }
4113 return false;
4114}
4115
4116bool
4117mep_lookup_pragma_call (const char *funcname)
4118{
4119 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4120}
4121
4122void
4123mep_note_pragma_call (const char *funcname)
4124{
4125 mep_note_pragma_flag (funcname, FUNC_CALL);
4126}
4127
4128bool
4129mep_lookup_pragma_disinterrupt (const char *funcname)
4130{
4131 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4132}
4133
4134void
4135mep_note_pragma_disinterrupt (const char *funcname)
4136{
4137 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4138}
4139
2a22f99c
TS
4140bool
4141note_unused_pragma_disinterrupt (const char *const &s, const pragma_entry &e,
4142 void *)
7acf4da6 4143{
2a22f99c
TS
4144 if ((e.flag & FUNC_DISINTERRUPT)
4145 && !(e.used & FUNC_DISINTERRUPT))
4146 warning (0, "\"#pragma disinterrupt %s\" not used", s);
7acf4da6
DD
4147 return 1;
4148}
4149
4150void
4151mep_file_cleanups (void)
4152{
4153 if (pragma_htab)
2a22f99c 4154 pragma_htab->traverse<void *, note_unused_pragma_disinterrupt> (NULL);
7acf4da6 4155}
c28883e6
DD
4156
4157/* These three functions provide a bridge between the pramgas that
4158 affect register classes, and the functions that maintain them. We
4159 can't call those functions directly as pragma handling is part of
4160 the front end and doesn't have direct access to them. */
4161
4162void
4163mep_save_register_info (void)
4164{
4165 save_register_info ();
4166}
4167
4168void
4169mep_reinit_regs (void)
4170{
4171 reinit_regs ();
4172}
4173
4174void
4175mep_init_regs (void)
4176{
4177 init_regs ();
4178}
4179
7acf4da6
DD
4180
4181
4182static int
4183mep_attrlist_to_encoding (tree list, tree decl)
4184{
4185 if (mep_multiple_address_regions (list, false) > 1)
4186 {
4187 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4188 TREE_PURPOSE (TREE_CHAIN (list)),
4189 DECL_NAME (decl),
4190 DECL_SOURCE_LINE (decl));
4191 TREE_CHAIN (list) = NULL_TREE;
4192 }
4193
4194 while (list)
4195 {
4196 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4197 return 'b';
4198 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4199 return 't';
4200 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4201 return 'n';
4202 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4203 return 'f';
4204 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4205 {
4206 if (TREE_VALUE (list)
4207 && TREE_VALUE (TREE_VALUE (list))
4208 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4209 {
4210 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4211 if (location >= 0
4212 && location <= 0x1000000)
4213 return 'i';
4214 }
4215 return 'I';
4216 }
4217 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4218 return 'c';
4219 list = TREE_CHAIN (list);
4220 }
4221 if (TARGET_TF
4222 && TREE_CODE (decl) == FUNCTION_DECL
4223 && DECL_SECTION_NAME (decl) == 0)
4224 return 'f';
4225 return 0;
4226}
4227
4228static int
4229mep_comp_type_attributes (const_tree t1, const_tree t2)
4230{
4231 int vliw1, vliw2;
4232
4233 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4234 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4235
4236 if (vliw1 != vliw2)
4237 return 0;
4238
4239 return 1;
4240}
4241
4242static void
4243mep_insert_attributes (tree decl, tree *attributes)
4244{
4245 int size;
4246 const char *secname = 0;
4247 tree attrib, attrlist;
4248 char encoding;
4249
4250 if (TREE_CODE (decl) == FUNCTION_DECL)
4251 {
4252 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4253
4254 if (mep_lookup_pragma_disinterrupt (funcname))
4255 {
4256 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4257 *attributes = chainon (*attributes, attrib);
4258 }
4259 }
4260
4261 if (TREE_CODE (decl) != VAR_DECL
4262 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4263 return;
4264
4265 if (TREE_READONLY (decl) && TARGET_DC)
4266 /* -mdc means that const variables default to the near section,
4267 regardless of the size cutoff. */
4268 return;
4269
4270 /* User specified an attribute, so override the default.
4271 Ignore storage attribute of pointed to variable. char __far * x; */
4272 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4273 {
4274 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4275 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4276 else if (DECL_ATTRIBUTES (decl) && *attributes)
4277 DECL_ATTRIBUTES (decl) = NULL_TREE;
4278 }
4279
4280 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4281 encoding = mep_attrlist_to_encoding (attrlist, decl);
4282 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4283 {
4284 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4285 encoding = mep_attrlist_to_encoding (attrlist, decl);
4286 }
4287 if (encoding)
4288 {
4289 /* This means that the declaration has a specific section
4290 attribute, so we should not apply the default rules. */
4291
4292 if (encoding == 'i' || encoding == 'I')
4293 {
4294 tree attr = lookup_attribute ("io", attrlist);
4295 if (attr
4296 && TREE_VALUE (attr)
4297 && TREE_VALUE (TREE_VALUE(attr)))
4298 {
4299 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4300 static tree previous_value = 0;
4301 static int previous_location = 0;
4302 static tree previous_name = 0;
4303
4304 /* We take advantage of the fact that gcc will reuse the
4305 same tree pointer when applying an attribute to a
4306 list of decls, but produce a new tree for attributes
4307 on separate source lines, even when they're textually
4308 identical. This is the behavior we want. */
4309 if (TREE_VALUE (attr) == previous_value
4310 && location == previous_location)
4311 {
4312 warning(0, "__io address 0x%x is the same for %qE and %qE",
4313 location, previous_name, DECL_NAME (decl));
4314 }
4315 previous_name = DECL_NAME (decl);
4316 previous_location = location;
4317 previous_value = TREE_VALUE (attr);
4318 }
4319 }
4320 return;
4321 }
4322
4323
4324 /* Declarations of arrays can change size. Don't trust them. */
4325 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4326 size = 0;
4327 else
4328 size = int_size_in_bytes (TREE_TYPE (decl));
4329
4330 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4331 {
4332 if (TREE_PUBLIC (decl)
4333 || DECL_EXTERNAL (decl)
4334 || TREE_STATIC (decl))
4335 {
4336 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4337 int key = 0;
4338
4339 while (*name)
4340 key += *name++;
4341
4342 switch (key & 3)
4343 {
4344 case 0:
4345 secname = "based";
4346 break;
4347 case 1:
4348 secname = "tiny";
4349 break;
4350 case 2:
4351 secname = "far";
4352 break;
4353 default:
4354 ;
4355 }
4356 }
4357 }
4358 else
4359 {
4360 if (size <= mep_based_cutoff && size > 0)
4361 secname = "based";
4362 else if (size <= mep_tiny_cutoff && size > 0)
4363 secname = "tiny";
4364 else if (TARGET_L)
4365 secname = "far";
4366 }
4367
4368 if (mep_const_section && TREE_READONLY (decl))
4369 {
4370 if (strcmp (mep_const_section, "tiny") == 0)
4371 secname = "tiny";
4372 else if (strcmp (mep_const_section, "near") == 0)
4373 return;
4374 else if (strcmp (mep_const_section, "far") == 0)
4375 secname = "far";
4376 }
4377
4378 if (!secname)
4379 return;
4380
4381 if (!mep_multiple_address_regions (*attributes, true)
4382 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4383 {
4384 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4385
4386 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4387 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4388 and mep_validate_based_tiny. */
4389 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4390 }
4391}
4392
4393static void
4394mep_encode_section_info (tree decl, rtx rtl, int first)
4395{
4396 rtx rtlname;
4397 const char *oldname;
4398 const char *secname;
4399 char encoding;
4400 char *newname;
4401 tree idp;
4402 int maxsize;
4403 tree type;
4404 tree mep_attributes;
4405
4406 if (! first)
4407 return;
4408
4409 if (TREE_CODE (decl) != VAR_DECL
4410 && TREE_CODE (decl) != FUNCTION_DECL)
4411 return;
4412
4413 rtlname = XEXP (rtl, 0);
4414 if (GET_CODE (rtlname) == SYMBOL_REF)
4415 oldname = XSTR (rtlname, 0);
4416 else if (GET_CODE (rtlname) == MEM
4417 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4418 oldname = XSTR (XEXP (rtlname, 0), 0);
4419 else
4420 gcc_unreachable ();
4421
4422 type = TREE_TYPE (decl);
4423 if (type == error_mark_node)
4424 return;
4425 mep_attributes = MEP_ATTRIBUTES (decl);
4426
4427 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4428
4429 if (encoding)
4430 {
4431 newname = (char *) alloca (strlen (oldname) + 4);
4432 sprintf (newname, "@%c.%s", encoding, oldname);
4433 idp = get_identifier (newname);
4434 XEXP (rtl, 0) =
4435 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
1c6679e2
NC
4436 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4437 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
7acf4da6
DD
4438
4439 switch (encoding)
4440 {
4441 case 'b':
4442 maxsize = 128;
4443 secname = "based";
4444 break;
4445 case 't':
4446 maxsize = 65536;
4447 secname = "tiny";
4448 break;
4449 case 'n':
4450 maxsize = 0x1000000;
4451 secname = "near";
4452 break;
4453 default:
4454 maxsize = 0;
4455 secname = 0;
4456 break;
4457 }
4458 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4459 {
4460 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4461 oldname,
4462 (long) int_size_in_bytes (TREE_TYPE (decl)),
4463 secname,
4464 maxsize);
4465 }
4466 }
7acf4da6
DD
4467}
4468
4469const char *
4470mep_strip_name_encoding (const char *sym)
4471{
4472 while (1)
4473 {
4474 if (*sym == '*')
4475 sym++;
4476 else if (*sym == '@' && sym[2] == '.')
4477 sym += 3;
4478 else
4479 return sym;
4480 }
4481}
4482
4483static section *
4484mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4485 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4486{
4487 int readonly = 1;
820ca276 4488 int encoding;
7acf4da6
DD
4489
4490 switch (TREE_CODE (decl))
4491 {
4492 case VAR_DECL:
4493 if (!TREE_READONLY (decl)
4494 || TREE_SIDE_EFFECTS (decl)
4495 || !DECL_INITIAL (decl)
4496 || (DECL_INITIAL (decl) != error_mark_node
4497 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4498 readonly = 0;
4499 break;
4500 case CONSTRUCTOR:
4501 if (! TREE_CONSTANT (decl))
4502 readonly = 0;
4503 break;
4504
4505 default:
4506 break;
4507 }
4508
820ca276
DD
4509 if (TREE_CODE (decl) == FUNCTION_DECL)
4510 {
4511 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4512
4513 if (name[0] == '@' && name[2] == '.')
4514 encoding = name[1];
4515 else
4516 encoding = 0;
4517
cf288ed3 4518 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
820ca276
DD
4519 mep_unique_section (decl, 0);
4520 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4521 {
4522 if (encoding == 'f')
4523 return vftext_section;
4524 else
4525 return vtext_section;
4526 }
4527 else if (encoding == 'f')
4528 return ftext_section;
4529 else
4530 return text_section;
4531 }
4532
7acf4da6
DD
4533 if (TREE_CODE (decl) == VAR_DECL)
4534 {
4535 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4536
4537 if (name[0] == '@' && name[2] == '.')
4538 switch (name[1])
4539 {
4540 case 'b':
4541 return based_section;
4542
4543 case 't':
4544 if (readonly)
4545 return srodata_section;
4546 if (DECL_INITIAL (decl))
4547 return sdata_section;
4548 return tinybss_section;
4549
4550 case 'f':
4551 if (readonly)
4552 return frodata_section;
4553 return far_section;
4554
4555 case 'i':
4556 case 'I':
dcb91ebe
MLI
4557 error_at (DECL_SOURCE_LOCATION (decl),
4558 "variable %D of type %<io%> must be uninitialized", decl);
7acf4da6
DD
4559 return data_section;
4560
4561 case 'c':
dcb91ebe
MLI
4562 error_at (DECL_SOURCE_LOCATION (decl),
4563 "variable %D of type %<cb%> must be uninitialized", decl);
7acf4da6
DD
4564 return data_section;
4565 }
4566 }
4567
4568 if (readonly)
4569 return readonly_data_section;
4570
4571 return data_section;
4572}
4573
4574static void
4575mep_unique_section (tree decl, int reloc)
4576{
4577 static const char *prefixes[][2] =
4578 {
4579 { ".text.", ".gnu.linkonce.t." },
4580 { ".rodata.", ".gnu.linkonce.r." },
4581 { ".data.", ".gnu.linkonce.d." },
4582 { ".based.", ".gnu.linkonce.based." },
4583 { ".sdata.", ".gnu.linkonce.s." },
4584 { ".far.", ".gnu.linkonce.far." },
4585 { ".ftext.", ".gnu.linkonce.ft." },
4586 { ".frodata.", ".gnu.linkonce.frd." },
820ca276
DD
4587 { ".srodata.", ".gnu.linkonce.srd." },
4588 { ".vtext.", ".gnu.linkonce.v." },
4589 { ".vftext.", ".gnu.linkonce.vf." }
7acf4da6
DD
4590 };
4591 int sec = 2; /* .data */
4592 int len;
4593 const char *name, *prefix;
4594 char *string;
4595
4596 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4597 if (DECL_RTL (decl))
4598 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4599
4600 if (TREE_CODE (decl) == FUNCTION_DECL)
820ca276
DD
4601 {
4602 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4603 sec = 9; /* .vtext */
4604 else
4605 sec = 0; /* .text */
4606 }
7acf4da6
DD
4607 else if (decl_readonly_section (decl, reloc))
4608 sec = 1; /* .rodata */
4609
4610 if (name[0] == '@' && name[2] == '.')
4611 {
4612 switch (name[1])
4613 {
4614 case 'b':
4615 sec = 3; /* .based */
4616 break;
4617 case 't':
4618 if (sec == 1)
4619 sec = 8; /* .srodata */
4620 else
4621 sec = 4; /* .sdata */
4622 break;
4623 case 'f':
4624 if (sec == 0)
4625 sec = 6; /* .ftext */
820ca276
DD
4626 else if (sec == 9)
4627 sec = 10; /* .vftext */
7acf4da6
DD
4628 else if (sec == 1)
4629 sec = 7; /* .frodata */
4630 else
4631 sec = 5; /* .far. */
4632 break;
4633 }
4634 name += 3;
4635 }
4636
cf288ed3 4637 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
7acf4da6
DD
4638 len = strlen (name) + strlen (prefix);
4639 string = (char *) alloca (len + 1);
4640
4641 sprintf (string, "%s%s", prefix, name);
4642
f961457f 4643 set_decl_section_name (decl, string);
7acf4da6
DD
4644}
4645
4646/* Given a decl, a section name, and whether the decl initializer
4647 has relocs, choose attributes for the section. */
4648
4649#define SECTION_MEP_VLIW SECTION_MACH_DEP
4650
4651static unsigned int
4652mep_section_type_flags (tree decl, const char *name, int reloc)
4653{
4654 unsigned int flags = default_section_type_flags (decl, name, reloc);
4655
4656 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4657 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4658 flags |= SECTION_MEP_VLIW;
4659
4660 return flags;
4661}
4662
4663/* Switch to an arbitrary section NAME with attributes as specified
4664 by FLAGS. ALIGN specifies any known alignment requirements for
4665 the section; 0 if the default should be used.
4666
4667 Differs from the standard ELF version only in support of VLIW mode. */
4668
4669static void
4670mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4671{
4672 char flagchars[8], *f = flagchars;
4673 const char *type;
4674
4675 if (!(flags & SECTION_DEBUG))
4676 *f++ = 'a';
4677 if (flags & SECTION_WRITE)
4678 *f++ = 'w';
4679 if (flags & SECTION_CODE)
4680 *f++ = 'x';
4681 if (flags & SECTION_SMALL)
4682 *f++ = 's';
4683 if (flags & SECTION_MEP_VLIW)
4684 *f++ = 'v';
4685 *f = '\0';
4686
4687 if (flags & SECTION_BSS)
4688 type = "nobits";
4689 else
4690 type = "progbits";
4691
4692 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4693 name, flagchars, type);
4694
4695 if (flags & SECTION_CODE)
4696 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4697 asm_out_file);
4698}
4699
4700void
4701mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4702 int size, int align, int global)
4703{
4704 /* We intentionally don't use mep_section_tag() here. */
4705 if (name[0] == '@'
4706 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4707 && name[2] == '.')
4708 {
4709 int location = -1;
4710 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4711 DECL_ATTRIBUTES (decl));
4712 if (attr
4713 && TREE_VALUE (attr)
4714 && TREE_VALUE (TREE_VALUE(attr)))
4715 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4716 if (location == -1)
4717 return;
4718 if (global)
4719 {
4720 fprintf (stream, "\t.globl\t");
4721 assemble_name (stream, name);
4722 fprintf (stream, "\n");
4723 }
4724 assemble_name (stream, name);
4725 fprintf (stream, " = %d\n", location);
4726 return;
4727 }
4728 if (name[0] == '@' && name[2] == '.')
4729 {
4730 const char *sec = 0;
4731 switch (name[1])
4732 {
4733 case 'b':
4734 switch_to_section (based_section);
4735 sec = ".based";
4736 break;
4737 case 't':
4738 switch_to_section (tinybss_section);
4739 sec = ".sbss";
4740 break;
4741 case 'f':
4742 switch_to_section (farbss_section);
4743 sec = ".farbss";
4744 break;
4745 }
4746 if (sec)
4747 {
4748 const char *name2;
4749 int p2align = 0;
4750
4751 while (align > BITS_PER_UNIT)
4752 {
4753 align /= 2;
4754 p2align ++;
4755 }
9018b19c 4756 name2 = targetm.strip_name_encoding (name);
7acf4da6
DD
4757 if (global)
4758 fprintf (stream, "\t.globl\t%s\n", name2);
4759 fprintf (stream, "\t.p2align %d\n", p2align);
4760 fprintf (stream, "\t.type\t%s,@object\n", name2);
4761 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4762 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4763 return;
4764 }
4765 }
4766
4767 if (!global)
4768 {
4769 fprintf (stream, "\t.local\t");
4770 assemble_name (stream, name);
4771 fprintf (stream, "\n");
4772 }
4773 fprintf (stream, "\t.comm\t");
4774 assemble_name (stream, name);
4775 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4776}
4777
4778/* Trampolines. */
4779
87138d8d
RH
4780static void
4781mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
7acf4da6 4782{
87138d8d
RH
4783 rtx addr = XEXP (m_tramp, 0);
4784 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4785
7acf4da6
DD
4786 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4787 LCT_NORMAL, VOIDmode, 3,
4788 addr, Pmode,
4789 fnaddr, Pmode,
4790 static_chain, Pmode);
4791}
4792
4793/* Experimental Reorg. */
4794
4795static bool
4796mep_mentioned_p (rtx in,
4797 rtx reg, /* NULL for mem */
4798 int modes_too) /* if nonzero, modes must match also. */
4799{
4800 const char *fmt;
4801 int i;
4802 enum rtx_code code;
4803
4804 if (in == 0)
4805 return false;
4806 if (reg && GET_CODE (reg) != REG)
4807 return false;
4808
4809 if (GET_CODE (in) == LABEL_REF)
4810 return (reg == 0);
4811
4812 code = GET_CODE (in);
4813
4814 switch (code)
4815 {
4816 case MEM:
4817 if (reg)
4818 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4819 return true;
4820
4821 case REG:
4822 if (!reg)
4823 return false;
4824 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4825 return false;
4826 return (REGNO (in) == REGNO (reg));
4827
4828 case SCRATCH:
4829 case CC0:
4830 case PC:
4831 case CONST_INT:
4832 case CONST_DOUBLE:
4833 return false;
4834
4835 default:
4836 break;
4837 }
4838
4839 /* Set's source should be read-only. */
4840 if (code == SET && !reg)
4841 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4842
4843 fmt = GET_RTX_FORMAT (code);
4844
4845 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4846 {
4847 if (fmt[i] == 'E')
4848 {
4849 register int j;
4850 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4851 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4852 return true;
4853 }
4854 else if (fmt[i] == 'e'
4855 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4856 return true;
4857 }
4858 return false;
4859}
4860
4861#define EXPERIMENTAL_REGMOVE_REORG 1
4862
4863#if EXPERIMENTAL_REGMOVE_REORG
4864
4865static int
4866mep_compatible_reg_class (int r1, int r2)
4867{
4868 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4869 return 1;
4870 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4871 return 1;
4872 return 0;
4873}
4874
4875static void
aa4a0061 4876mep_reorg_regmove (rtx_insn *insns)
7acf4da6 4877{
aa4a0061
DM
4878 rtx_insn *insn, *next, *follow;
4879 rtx pat, *where;
7acf4da6
DD
4880 int count = 0, done = 0, replace, before = 0;
4881
4882 if (dump_file)
4883 for (insn = insns; insn; insn = NEXT_INSN (insn))
b64925dc 4884 if (NONJUMP_INSN_P (insn))
7acf4da6
DD
4885 before++;
4886
4887 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4888 set that uses the r2 and r2 dies there. We replace r2 with r1
4889 and see if it's still a valid insn. If so, delete the first set.
4890 Copied from reorg.c. */
4891
4892 while (!done)
4893 {
4894 done = 1;
4895 for (insn = insns; insn; insn = next)
4896 {
f758c26c 4897 next = next_nonnote_nondebug_insn (insn);
b64925dc 4898 if (! NONJUMP_INSN_P (insn))
7acf4da6
DD
4899 continue;
4900 pat = PATTERN (insn);
4901
4902 replace = 0;
4903
4904 if (GET_CODE (pat) == SET
4905 && GET_CODE (SET_SRC (pat)) == REG
4906 && GET_CODE (SET_DEST (pat)) == REG
4907 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4908 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4909 {
f758c26c 4910 follow = next_nonnote_nondebug_insn (insn);
7acf4da6
DD
4911 if (dump_file)
4912 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4913
b64925dc 4914 while (follow && NONJUMP_INSN_P (follow)
7acf4da6
DD
4915 && GET_CODE (PATTERN (follow)) == SET
4916 && !dead_or_set_p (follow, SET_SRC (pat))
4917 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4918 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4919 {
4920 if (dump_file)
4921 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4922 follow = next_nonnote_insn (follow);
4923 }
4924
4925 if (dump_file)
4926 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
b64925dc 4927 if (follow && NONJUMP_INSN_P (follow)
7acf4da6
DD
4928 && GET_CODE (PATTERN (follow)) == SET
4929 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4930 {
4931 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4932 {
4933 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4934 {
4935 replace = 1;
4936 where = & SET_SRC (PATTERN (follow));
4937 }
4938 }
4939 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4940 {
4941 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4942 {
4943 replace = 1;
4944 where = & PATTERN (follow);
4945 }
4946 }
4947 }
4948 }
4949
4950 /* If so, follow is the corresponding insn */
4951 if (replace)
4952 {
4953 if (dump_file)
4954 {
aa4a0061 4955 rtx_insn *x;
7acf4da6
DD
4956
4957 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4958 for (x = insn; x ;x = NEXT_INSN (x))
4959 {
4960 print_rtl_single (dump_file, x);
4961 if (x == follow)
4962 break;
4963 fprintf (dump_file, "\n");
4964 }
4965 }
4966
4967 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4968 follow, where))
4969 {
4970 count ++;
f758c26c 4971 delete_insn (insn);
7acf4da6
DD
4972 if (dump_file)
4973 {
4974 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4975 print_rtl_single (dump_file, follow);
4976 }
4977 done = 0;
4978 }
4979 }
4980 }
4981 }
4982
4983 if (dump_file)
4984 {
4985 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
4986 fprintf (dump_file, "=====\n");
4987 }
4988}
4989#endif
4990
4991
4992/* Figure out where to put LABEL, which is the label for a repeat loop.
4993 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
4994 the loop ends just before LAST_INSN. If SHARED, insns other than the
4995 "repeat" might use LABEL to jump to the loop's continuation point.
4996
4997 Return the last instruction in the adjusted loop. */
4998
aa4a0061
DM
4999static rtx_insn *
5000mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
5001 bool including, bool shared)
7acf4da6 5002{
aa4a0061 5003 rtx_insn *next, *prev;
7acf4da6
DD
5004 int count = 0, code, icode;
5005
5006 if (dump_file)
5007 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5008 INSN_UID (last_insn));
5009
5010 /* Set PREV to the last insn in the loop. */
5011 prev = last_insn;
5012 if (!including)
5013 prev = PREV_INSN (prev);
5014
5015 /* Set NEXT to the next insn after the repeat label. */
5016 next = last_insn;
5017 if (!shared)
5018 while (prev != 0)
5019 {
5020 code = GET_CODE (prev);
5021 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5022 break;
5023
5024 if (INSN_P (prev))
5025 {
5026 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
aa4a0061 5027 prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
7acf4da6
DD
5028
5029 /* Other insns that should not be in the last two opcodes. */
5030 icode = recog_memoized (prev);
5031 if (icode < 0
5032 || icode == CODE_FOR_repeat
5033 || icode == CODE_FOR_erepeat
5034 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5035 break;
5036
5037 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5038 is the second instruction in a VLIW bundle. In that case,
5039 loop again: if the first instruction also satisfies the
5040 conditions above then we will reach here again and put
5041 both of them into the repeat epilogue. Otherwise both
5042 should remain outside. */
5043 if (GET_MODE (prev) != BImode)
5044 {
5045 count++;
5046 next = prev;
5047 if (dump_file)
5048 print_rtl_single (dump_file, next);
5049 if (count == 2)
5050 break;
5051 }
5052 }
5053 prev = PREV_INSN (prev);
5054 }
5055
5056 /* See if we're adding the label immediately after the repeat insn.
5057 If so, we need to separate them with a nop. */
5058 prev = prev_real_insn (next);
5059 if (prev)
5060 switch (recog_memoized (prev))
5061 {
5062 case CODE_FOR_repeat:
5063 case CODE_FOR_erepeat:
5064 if (dump_file)
5065 fprintf (dump_file, "Adding nop inside loop\n");
5066 emit_insn_before (gen_nop (), next);
5067 break;
5068
5069 default:
5070 break;
5071 }
5072
5073 /* Insert the label. */
5074 emit_label_before (label, next);
5075
5076 /* Insert the nops. */
5077 if (dump_file && count < 2)
5078 fprintf (dump_file, "Adding %d nop%s\n\n",
5079 2 - count, count == 1 ? "" : "s");
5080
5081 for (; count < 2; count++)
5082 if (including)
5083 last_insn = emit_insn_after (gen_nop (), last_insn);
5084 else
5085 emit_insn_before (gen_nop (), last_insn);
5086
5087 return last_insn;
5088}
5089
5090
5091void
5092mep_emit_doloop (rtx *operands, int is_end)
5093{
5094 rtx tag;
5095
5096 if (cfun->machine->doloop_tags == 0
5097 || cfun->machine->doloop_tag_from_end == is_end)
5098 {
5099 cfun->machine->doloop_tags++;
5100 cfun->machine->doloop_tag_from_end = is_end;
5101 }
5102
5103 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5104 if (is_end)
1d0216c8 5105 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
7acf4da6
DD
5106 else
5107 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5108}
5109
5110
5111/* Code for converting doloop_begins and doloop_ends into valid
5112 MeP instructions. A doloop_begin is just a placeholder:
5113
5114 $count = unspec ($count)
5115
5116 where $count is initially the number of iterations - 1.
5117 doloop_end has the form:
5118
5119 if ($count-- == 0) goto label
5120
5121 The counter variable is private to the doloop insns, nothing else
5122 relies on its value.
5123
5124 There are three cases, in decreasing order of preference:
5125
5126 1. A loop has exactly one doloop_begin and one doloop_end.
5127 The doloop_end branches to the first instruction after
5128 the doloop_begin.
5129
5130 In this case we can replace the doloop_begin with a repeat
5131 instruction and remove the doloop_end. I.e.:
5132
5133 $count1 = unspec ($count1)
5134 label:
5135 ...
5136 insn1
5137 insn2
5138 if ($count2-- == 0) goto label
5139
5140 becomes:
5141
5142 repeat $count1,repeat_label
5143 label:
5144 ...
5145 repeat_label:
5146 insn1
5147 insn2
5148 # end repeat
5149
5150 2. As for (1), except there are several doloop_ends. One of them
5151 (call it X) falls through to a label L. All the others fall
5152 through to branches to L.
5153
5154 In this case, we remove X and replace the other doloop_ends
5155 with branches to the repeat label. For example:
5156
5157 $count1 = unspec ($count1)
5158 start:
5159 ...
5160 if ($count2-- == 0) goto label
5161 end:
5162 ...
5163 if ($count3-- == 0) goto label
5164 goto end
5165
5166 becomes:
5167
5168 repeat $count1,repeat_label
5169 start:
5170 ...
5171 repeat_label:
5172 nop
5173 nop
5174 # end repeat
5175 end:
5176 ...
5177 goto repeat_label
5178
5179 3. The fallback case. Replace doloop_begins with:
5180
5181 $count = $count + 1
5182
5183 Replace doloop_ends with the equivalent of:
5184
5185 $count = $count - 1
5186 if ($count == 0) goto label
5187
5188 Note that this might need a scratch register if $count
5189 is stored in memory. */
5190
5191/* A structure describing one doloop_begin. */
5192struct mep_doloop_begin {
5193 /* The next doloop_begin with the same tag. */
5194 struct mep_doloop_begin *next;
5195
5196 /* The instruction itself. */
aa4a0061 5197 rtx_insn *insn;
7acf4da6
DD
5198
5199 /* The initial counter value. This is known to be a general register. */
5200 rtx counter;
5201};
5202
5203/* A structure describing a doloop_end. */
5204struct mep_doloop_end {
5205 /* The next doloop_end with the same loop tag. */
5206 struct mep_doloop_end *next;
5207
5208 /* The instruction itself. */
aa4a0061 5209 rtx_insn *insn;
7acf4da6
DD
5210
5211 /* The first instruction after INSN when the branch isn't taken. */
aa4a0061 5212 rtx_insn *fallthrough;
7acf4da6
DD
5213
5214 /* The location of the counter value. Since doloop_end_internal is a
5215 jump instruction, it has to allow the counter to be stored anywhere
5216 (any non-fixed register or memory location). */
5217 rtx counter;
5218
5219 /* The target label (the place where the insn branches when the counter
5220 isn't zero). */
5221 rtx label;
5222
5223 /* A scratch register. Only available when COUNTER isn't stored
5224 in a general register. */
5225 rtx scratch;
5226};
5227
5228
5229/* One do-while loop. */
5230struct mep_doloop {
5231 /* All the doloop_begins for this loop (in no particular order). */
5232 struct mep_doloop_begin *begin;
5233
5234 /* All the doloop_ends. When there is more than one, arrange things
5235 so that the first one is the most likely to be X in case (2) above. */
5236 struct mep_doloop_end *end;
5237};
5238
5239
5240/* Return true if LOOP can be converted into repeat/repeat_end form
5241 (that is, if it matches cases (1) or (2) above). */
5242
5243static bool
5244mep_repeat_loop_p (struct mep_doloop *loop)
5245{
5246 struct mep_doloop_end *end;
5247 rtx fallthrough;
5248
5249 /* There must be exactly one doloop_begin and at least one doloop_end. */
5250 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5251 return false;
5252
5253 /* The first doloop_end (X) must branch back to the insn after
5254 the doloop_begin. */
5255 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5256 return false;
5257
5258 /* All the other doloop_ends must branch to the same place as X.
5259 When the branch isn't taken, they must jump to the instruction
5260 after X. */
5261 fallthrough = loop->end->fallthrough;
5262 for (end = loop->end->next; end != 0; end = end->next)
5263 if (end->label != loop->end->label
5264 || !simplejump_p (end->fallthrough)
5265 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5266 return false;
5267
5268 return true;
5269}
5270
5271
5272/* The main repeat reorg function. See comment above for details. */
5273
5274static void
aa4a0061 5275mep_reorg_repeat (rtx_insn *insns)
7acf4da6 5276{
aa4a0061 5277 rtx_insn *insn;
7acf4da6
DD
5278 struct mep_doloop *loops, *loop;
5279 struct mep_doloop_begin *begin;
5280 struct mep_doloop_end *end;
5281
5282 /* Quick exit if we haven't created any loops. */
5283 if (cfun->machine->doloop_tags == 0)
5284 return;
5285
5286 /* Create an array of mep_doloop structures. */
5287 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5288 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5289
5290 /* Search the function for do-while insns and group them by loop tag. */
5291 for (insn = insns; insn; insn = NEXT_INSN (insn))
5292 if (INSN_P (insn))
5293 switch (recog_memoized (insn))
5294 {
5295 case CODE_FOR_doloop_begin_internal:
5296 insn_extract (insn);
5297 loop = &loops[INTVAL (recog_data.operand[2])];
5298
5299 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5300 begin->next = loop->begin;
5301 begin->insn = insn;
5302 begin->counter = recog_data.operand[0];
5303
5304 loop->begin = begin;
5305 break;
5306
5307 case CODE_FOR_doloop_end_internal:
5308 insn_extract (insn);
5309 loop = &loops[INTVAL (recog_data.operand[2])];
5310
5311 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5312 end->insn = insn;
5313 end->fallthrough = next_real_insn (insn);
5314 end->counter = recog_data.operand[0];
5315 end->label = recog_data.operand[1];
5316 end->scratch = recog_data.operand[3];
5317
5318 /* If this insn falls through to an unconditional jump,
5319 give it a lower priority than the others. */
5320 if (loop->end != 0 && simplejump_p (end->fallthrough))
5321 {
5322 end->next = loop->end->next;
5323 loop->end->next = end;
5324 }
5325 else
5326 {
5327 end->next = loop->end;
5328 loop->end = end;
5329 }
5330 break;
5331 }
5332
5333 /* Convert the insns for each loop in turn. */
5334 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5335 if (mep_repeat_loop_p (loop))
5336 {
5337 /* Case (1) or (2). */
aa4a0061
DM
5338 rtx_code_label *repeat_label;
5339 rtx label_ref;
7acf4da6
DD
5340
5341 /* Create a new label for the repeat insn. */
5342 repeat_label = gen_label_rtx ();
5343
5344 /* Replace the doloop_begin with a repeat. */
5345 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5346 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5347 loop->begin->insn);
5348 delete_insn (loop->begin->insn);
5349
5350 /* Insert the repeat label before the first doloop_end.
5351 Fill the gap with nops if there are other doloop_ends. */
5352 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5353 false, loop->end->next != 0);
5354
5355 /* Emit a repeat_end (to improve the readability of the output). */
5356 emit_insn_before (gen_repeat_end (), loop->end->insn);
5357
5358 /* Delete the first doloop_end. */
5359 delete_insn (loop->end->insn);
5360
5361 /* Replace the others with branches to REPEAT_LABEL. */
5362 for (end = loop->end->next; end != 0; end = end->next)
5363 {
5364 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5365 delete_insn (end->insn);
5366 delete_insn (end->fallthrough);
5367 }
5368 }
5369 else
5370 {
5371 /* Case (3). First replace all the doloop_begins with increment
5372 instructions. */
5373 for (begin = loop->begin; begin != 0; begin = begin->next)
5374 {
5375 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5376 begin->counter, const1_rtx),
5377 begin->insn);
5378 delete_insn (begin->insn);
5379 }
5380
5381 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5382 for (end = loop->end; end != 0; end = end->next)
5383 {
5384 rtx reg;
5385
5386 start_sequence ();
5387
5388 /* Load the counter value into a general register. */
5389 reg = end->counter;
5390 if (!REG_P (reg) || REGNO (reg) > 15)
5391 {
5392 reg = end->scratch;
5393 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5394 }
5395
5396 /* Decrement the counter. */
5397 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5398 constm1_rtx));
5399
5400 /* Copy it back to its original location. */
5401 if (reg != end->counter)
5402 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5403
5404 /* Jump back to the start label. */
5405 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5406 end->label));
5407 JUMP_LABEL (insn) = end->label;
5408 LABEL_NUSES (end->label)++;
5409
5410 /* Emit the whole sequence before the doloop_end. */
5411 insn = get_insns ();
5412 end_sequence ();
5413 emit_insn_before (insn, end->insn);
5414
5415 /* Delete the doloop_end. */
5416 delete_insn (end->insn);
5417 }
5418 }
5419}
5420
5421
5422static bool
aa4a0061 5423mep_invertable_branch_p (rtx_insn *insn)
7acf4da6
DD
5424{
5425 rtx cond, set;
5426 enum rtx_code old_code;
5427 int i;
5428
5429 set = PATTERN (insn);
5430 if (GET_CODE (set) != SET)
5431 return false;
5432 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5433 return false;
5434 cond = XEXP (XEXP (set, 1), 0);
5435 old_code = GET_CODE (cond);
5436 switch (old_code)
5437 {
5438 case EQ:
5439 PUT_CODE (cond, NE);
5440 break;
5441 case NE:
5442 PUT_CODE (cond, EQ);
5443 break;
5444 case LT:
5445 PUT_CODE (cond, GE);
5446 break;
5447 case GE:
5448 PUT_CODE (cond, LT);
5449 break;
5450 default:
5451 return false;
5452 }
5453 INSN_CODE (insn) = -1;
5454 i = recog_memoized (insn);
5455 PUT_CODE (cond, old_code);
5456 INSN_CODE (insn) = -1;
5457 return i >= 0;
5458}
5459
5460static void
aa4a0061 5461mep_invert_branch (rtx_insn *insn, rtx_insn *after)
7acf4da6
DD
5462{
5463 rtx cond, set, label;
5464 int i;
5465
5466 set = PATTERN (insn);
5467
5468 gcc_assert (GET_CODE (set) == SET);
5469 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5470
5471 cond = XEXP (XEXP (set, 1), 0);
5472 switch (GET_CODE (cond))
5473 {
5474 case EQ:
5475 PUT_CODE (cond, NE);
5476 break;
5477 case NE:
5478 PUT_CODE (cond, EQ);
5479 break;
5480 case LT:
5481 PUT_CODE (cond, GE);
5482 break;
5483 case GE:
5484 PUT_CODE (cond, LT);
5485 break;
5486 default:
5487 gcc_unreachable ();
5488 }
5489 label = gen_label_rtx ();
5490 emit_label_after (label, after);
5491 for (i=1; i<=2; i++)
5492 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5493 {
5494 rtx ref = XEXP (XEXP (set, 1), i);
5495 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5496 delete_insn (XEXP (ref, 0));
5497 XEXP (ref, 0) = label;
5498 LABEL_NUSES (label) ++;
5499 JUMP_LABEL (insn) = label;
5500 }
5501 INSN_CODE (insn) = -1;
5502 i = recog_memoized (insn);
5503 gcc_assert (i >= 0);
5504}
5505
5506static void
aa4a0061 5507mep_reorg_erepeat (rtx_insn *insns)
7acf4da6 5508{
aa4a0061
DM
5509 rtx_insn *insn, *prev;
5510 rtx_code_label *l;
5511 rtx x;
7acf4da6
DD
5512 int count;
5513
5514 for (insn = insns; insn; insn = NEXT_INSN (insn))
5515 if (JUMP_P (insn)
7acf4da6
DD
5516 && mep_invertable_branch_p (insn))
5517 {
5518 if (dump_file)
5519 {
5520 fprintf (dump_file, "\n------------------------------\n");
5521 fprintf (dump_file, "erepeat: considering this jump:\n");
5522 print_rtl_single (dump_file, insn);
5523 }
5524 count = simplejump_p (insn) ? 0 : 1;
7acf4da6
DD
5525 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5526 {
b64925dc 5527 if (CALL_P (prev) || BARRIER_P (prev))
7acf4da6
DD
5528 break;
5529
5530 if (prev == JUMP_LABEL (insn))
5531 {
aa4a0061 5532 rtx_insn *newlast;
7acf4da6
DD
5533 if (dump_file)
5534 fprintf (dump_file, "found loop top, %d insns\n", count);
5535
5536 if (LABEL_NUSES (prev) == 1)
5537 /* We're the only user, always safe */ ;
5538 else if (LABEL_NUSES (prev) == 2)
5539 {
5540 /* See if there's a barrier before this label. If
5541 so, we know nobody inside the loop uses it.
5542 But we must be careful to put the erepeat
5543 *after* the label. */
aa4a0061 5544 rtx_insn *barrier;
7acf4da6 5545 for (barrier = PREV_INSN (prev);
b64925dc 5546 barrier && NOTE_P (barrier);
7acf4da6
DD
5547 barrier = PREV_INSN (barrier))
5548 ;
b64925dc 5549 if (barrier && ! BARRIER_P (barrier))
7acf4da6
DD
5550 break;
5551 }
5552 else
5553 {
5554 /* We don't know who else, within or without our loop, uses this */
5555 if (dump_file)
5556 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5557 break;
5558 }
5559
5560 /* Generate a label to be used by the erepat insn. */
5561 l = gen_label_rtx ();
5562
5563 /* Insert the erepeat after INSN's target label. */
5564 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5565 LABEL_NUSES (l)++;
5566 emit_insn_after (x, prev);
5567
5568 /* Insert the erepeat label. */
5569 newlast = (mep_insert_repeat_label_last
5570 (insn, l, !simplejump_p (insn), false));
5571 if (simplejump_p (insn))
5572 {
5573 emit_insn_before (gen_erepeat_end (), insn);
5574 delete_insn (insn);
5575 }
5576 else
5577 {
5578 mep_invert_branch (insn, newlast);
5579 emit_insn_after (gen_erepeat_end (), newlast);
5580 }
5581 break;
5582 }
5583
5584 if (LABEL_P (prev))
5585 {
5586 /* A label is OK if there is exactly one user, and we
5587 can find that user before the next label. */
aa4a0061 5588 rtx_insn *user = 0;
7acf4da6
DD
5589 int safe = 0;
5590 if (LABEL_NUSES (prev) == 1)
5591 {
5592 for (user = PREV_INSN (prev);
b64925dc 5593 user && (INSN_P (user) || NOTE_P (user));
7acf4da6 5594 user = PREV_INSN (user))
b64925dc 5595 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
7acf4da6
DD
5596 {
5597 safe = INSN_UID (user);
5598 break;
5599 }
5600 }
5601 if (!safe)
5602 break;
5603 if (dump_file)
5604 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5605 safe, INSN_UID (prev));
5606 }
5607
5608 if (INSN_P (prev))
5609 {
5610 count ++;
7acf4da6
DD
5611 }
5612 }
5613 }
5614 if (dump_file)
5615 fprintf (dump_file, "\n==============================\n");
5616}
5617
5618/* Replace a jump to a return, with a copy of the return. GCC doesn't
5619 always do this on its own. */
5620
5621static void
aa4a0061 5622mep_jmp_return_reorg (rtx_insn *insns)
7acf4da6 5623{
647d790d 5624 rtx_insn *insn, *label, *ret;
7acf4da6
DD
5625 int ret_code;
5626
5627 for (insn = insns; insn; insn = NEXT_INSN (insn))
5628 if (simplejump_p (insn))
5629 {
5630 /* Find the fist real insn the jump jumps to. */
647d790d 5631 label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
7acf4da6 5632 while (ret
b64925dc
SB
5633 && (NOTE_P (ret)
5634 || LABEL_P (ret)
7acf4da6 5635 || GET_CODE (PATTERN (ret)) == USE))
647d790d 5636 ret = NEXT_INSN (ret);
7acf4da6
DD
5637
5638 if (ret)
5639 {
5640 /* Is it a return? */
5641 ret_code = recog_memoized (ret);
5642 if (ret_code == CODE_FOR_return_internal
5643 || ret_code == CODE_FOR_eh_return_internal)
5644 {
5645 /* It is. Replace the jump with a return. */
5646 LABEL_NUSES (label) --;
5647 if (LABEL_NUSES (label) == 0)
5648 delete_insn (label);
5649 PATTERN (insn) = copy_rtx (PATTERN (ret));
5650 INSN_CODE (insn) = -1;
5651 }
5652 }
5653 }
5654}
5655
5656
5657static void
aa4a0061 5658mep_reorg_addcombine (rtx_insn *insns)
7acf4da6 5659{
aa4a0061 5660 rtx_insn *i, *n;
7acf4da6
DD
5661
5662 for (i = insns; i; i = NEXT_INSN (i))
5663 if (INSN_P (i)
5664 && INSN_CODE (i) == CODE_FOR_addsi3
5665 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5666 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5667 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5668 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5669 {
5670 n = NEXT_INSN (i);
5671 if (INSN_P (n)
5672 && INSN_CODE (n) == CODE_FOR_addsi3
5673 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5674 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5675 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5676 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5677 {
5678 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5679 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5680 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5681 && ic + nc < 32767
5682 && ic + nc > -32768)
5683 {
5684 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
0f82e5c9 5685 SET_NEXT_INSN (i) = NEXT_INSN (n);
7acf4da6 5686 if (NEXT_INSN (i))
0f82e5c9 5687 SET_PREV_INSN (NEXT_INSN (i)) = i;
7acf4da6
DD
5688 }
5689 }
5690 }
5691}
5692
5693/* If this insn adjusts the stack, return the adjustment, else return
5694 zero. */
5695static int
aa4a0061 5696add_sp_insn_p (rtx_insn *insn)
7acf4da6
DD
5697{
5698 rtx pat;
5699
5700 if (! single_set (insn))
5701 return 0;
5702 pat = PATTERN (insn);
5703 if (GET_CODE (SET_DEST (pat)) != REG)
5704 return 0;
5705 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5706 return 0;
5707 if (GET_CODE (SET_SRC (pat)) != PLUS)
5708 return 0;
5709 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5710 return 0;
5711 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5712 return 0;
5713 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5714 return 0;
5715 return INTVAL (XEXP (SET_SRC (pat), 1));
5716}
5717
5718/* Check for trivial functions that set up an unneeded stack
5719 frame. */
5720static void
aa4a0061 5721mep_reorg_noframe (rtx_insn *insns)
7acf4da6 5722{
aa4a0061
DM
5723 rtx_insn *start_frame_insn;
5724 rtx_insn *end_frame_insn = 0;
7acf4da6
DD
5725 int sp_adjust, sp2;
5726 rtx sp;
5727
5728 /* The first insn should be $sp = $sp + N */
5729 while (insns && ! INSN_P (insns))
5730 insns = NEXT_INSN (insns);
5731 if (!insns)
5732 return;
5733
5734 sp_adjust = add_sp_insn_p (insns);
5735 if (sp_adjust == 0)
5736 return;
5737
5738 start_frame_insn = insns;
5739 sp = SET_DEST (PATTERN (start_frame_insn));
5740
5741 insns = next_real_insn (insns);
5742
5743 while (insns)
5744 {
aa4a0061 5745 rtx_insn *next = next_real_insn (insns);
7acf4da6
DD
5746 if (!next)
5747 break;
5748
5749 sp2 = add_sp_insn_p (insns);
5750 if (sp2)
5751 {
5752 if (end_frame_insn)
5753 return;
5754 end_frame_insn = insns;
5755 if (sp2 != -sp_adjust)
5756 return;
5757 }
5758 else if (mep_mentioned_p (insns, sp, 0))
5759 return;
5760 else if (CALL_P (insns))
5761 return;
5762
5763 insns = next;
5764 }
5765
5766 if (end_frame_insn)
5767 {
5768 delete_insn (start_frame_insn);
5769 delete_insn (end_frame_insn);
5770 }
5771}
5772
5773static void
5774mep_reorg (void)
5775{
aa4a0061 5776 rtx_insn *insns = get_insns ();
e756464b
DD
5777
5778 /* We require accurate REG_DEAD notes. */
5779 compute_bb_for_insn ();
5780 df_note_add_problem ();
5781 df_analyze ();
5782
7acf4da6
DD
5783 mep_reorg_addcombine (insns);
5784#if EXPERIMENTAL_REGMOVE_REORG
5785 /* VLIW packing has been done already, so we can't just delete things. */
5786 if (!mep_vliw_function_p (cfun->decl))
5787 mep_reorg_regmove (insns);
5788#endif
5789 mep_jmp_return_reorg (insns);
5790 mep_bundle_insns (insns);
5791 mep_reorg_repeat (insns);
5792 if (optimize
5793 && !profile_flag
5794 && !profile_arc_flag
5795 && TARGET_OPT_REPEAT
5796 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5797 mep_reorg_erepeat (insns);
5798
5799 /* This may delete *insns so make sure it's last. */
5800 mep_reorg_noframe (insns);
e756464b
DD
5801
5802 df_finish_pass (false);
7acf4da6
DD
5803}
5804
5805\f
5806
5807/*----------------------------------------------------------------------*/
5808/* Builtins */
5809/*----------------------------------------------------------------------*/
5810
5811/* Element X gives the index into cgen_insns[] of the most general
5812 implementation of intrinsic X. Unimplemented intrinsics are
5813 mapped to -1. */
5814int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5815
5816/* Element X gives the index of another instruction that is mapped to
5817 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5818 instruction.
5819
5820 Things are set up so that mep_intrinsic_chain[X] < X. */
5821static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5822
5823/* The bitmask for the current ISA. The ISA masks are declared
5824 in mep-intrin.h. */
5825unsigned int mep_selected_isa;
5826
5827struct mep_config {
5828 const char *config_name;
5829 unsigned int isa;
5830};
5831
5832static struct mep_config mep_configs[] = {
5833#ifdef COPROC_SELECTION_TABLE
5834 COPROC_SELECTION_TABLE,
5835#endif
5836 { 0, 0 }
5837};
5838
5839/* Initialize the global intrinsics variables above. */
5840
5841static void
5842mep_init_intrinsics (void)
5843{
5844 size_t i;
5845
5846 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5847 mep_selected_isa = mep_configs[0].isa;
5848 if (mep_config_string != 0)
5849 for (i = 0; mep_configs[i].config_name; i++)
5850 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5851 {
5852 mep_selected_isa = mep_configs[i].isa;
5853 break;
5854 }
5855
5856 /* Assume all intrinsics are unavailable. */
5857 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5858 mep_intrinsic_insn[i] = -1;
5859
5860 /* Build up the global intrinsic tables. */
5861 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5862 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5863 {
5864 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5865 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5866 }
5867 /* See whether we can directly move values between one coprocessor
5868 register and another. */
5869 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5870 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5871 mep_have_copro_copro_moves_p = true;
5872
5873 /* See whether we can directly move values between core and
5874 coprocessor registers. */
5875 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5876 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5877
5878 mep_have_core_copro_moves_p = 1;
5879}
5880
5881/* Declare all available intrinsic functions. Called once only. */
5882
5883static tree cp_data_bus_int_type_node;
5884static tree opaque_vector_type_node;
5885static tree v8qi_type_node;
5886static tree v4hi_type_node;
5887static tree v2si_type_node;
5888static tree v8uqi_type_node;
5889static tree v4uhi_type_node;
5890static tree v2usi_type_node;
5891
5892static tree
5893mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5894{
5895 switch (cr)
5896 {
5897 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5898 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5899 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5900 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5901 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5902 case cgen_regnum_operand_type_CHAR: return char_type_node;
5903 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5904 case cgen_regnum_operand_type_SI: return intSI_type_node;
5905 case cgen_regnum_operand_type_DI: return intDI_type_node;
5906 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5907 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5908 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5909 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5910 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5911 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5912 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5913 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5914 default:
5915 return void_type_node;
5916 }
5917}
5918
5919static void
5920mep_init_builtins (void)
5921{
5922 size_t i;
5923
5924 if (TARGET_64BIT_CR_REGS)
5925 cp_data_bus_int_type_node = long_long_integer_type_node;
5926 else
5927 cp_data_bus_int_type_node = long_integer_type_node;
5928
5929 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5930 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5931 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5932 v2si_type_node = build_vector_type (intSI_type_node, 2);
5933 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5934 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5935 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5936
c0814136
SB
5937 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5938
5939 add_builtin_type ("cp_vector", opaque_vector_type_node);
5940
5941 add_builtin_type ("cp_v8qi", v8qi_type_node);
5942 add_builtin_type ("cp_v4hi", v4hi_type_node);
5943 add_builtin_type ("cp_v2si", v2si_type_node);
5944
5945 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5946 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5947 add_builtin_type ("cp_v2usi", v2usi_type_node);
7acf4da6
DD
5948
5949 /* Intrinsics like mep_cadd3 are implemented with two groups of
5950 instructions, one which uses UNSPECs and one which uses a specific
5951 rtl code such as PLUS. Instructions in the latter group belong
5952 to GROUP_KNOWN_CODE.
5953
5954 In such cases, the intrinsic will have two entries in the global
5955 tables above. The unspec form is accessed using builtin functions
5956 while the specific form is accessed using the mep_* enum in
5957 mep-intrin.h.
5958
5959 The idea is that __cop arithmetic and builtin functions have
5960 different optimization requirements. If mep_cadd3() appears in
5961 the source code, the user will surely except gcc to use cadd3
5962 rather than a work-alike such as add3. However, if the user
5963 just writes "a + b", where a or b are __cop variables, it is
5964 reasonable for gcc to choose a core instruction rather than
5965 cadd3 if it believes that is more optimal. */
5966 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5967 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5968 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5969 {
5970 tree ret_type = void_type_node;
5971 tree bi_type;
5972
5973 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5974 continue;
5975
5976 if (cgen_insns[i].cret_p)
5977 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5978
34149ed5 5979 bi_type = build_function_type_list (ret_type, NULL_TREE);
7acf4da6
DD
5980 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5981 bi_type,
5982 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5983 }
5984}
5985
5986/* Report the unavailablity of the given intrinsic. */
5987
5988#if 1
5989static void
5990mep_intrinsic_unavailable (int intrinsic)
5991{
5992 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
5993
5994 if (already_reported_p[intrinsic])
5995 return;
5996
5997 if (mep_intrinsic_insn[intrinsic] < 0)
5998 error ("coprocessor intrinsic %qs is not available in this configuration",
5999 cgen_intrinsics[intrinsic]);
6000 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6001 error ("%qs is not available in VLIW functions",
6002 cgen_intrinsics[intrinsic]);
6003 else
6004 error ("%qs is not available in non-VLIW functions",
6005 cgen_intrinsics[intrinsic]);
6006
6007 already_reported_p[intrinsic] = 1;
6008}
6009#endif
6010
6011
6012/* See if any implementation of INTRINSIC is available to the
6013 current function. If so, store the most general implementation
6014 in *INSN_PTR and return true. Return false otherwise. */
6015
6016static bool
6017mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6018{
6019 int i;
6020
6021 i = mep_intrinsic_insn[intrinsic];
6022 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6023 i = mep_intrinsic_chain[i];
6024
6025 if (i >= 0)
6026 {
6027 *insn_ptr = &cgen_insns[i];
6028 return true;
6029 }
6030 return false;
6031}
6032
6033
6034/* Like mep_get_intrinsic_insn, but with extra handling for moves.
6035 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6036 try using a work-alike instead. In this case, the returned insn
6037 may have three operands rather than two. */
6038
6039static bool
6040mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6041{
6042 size_t i;
6043
6044 if (intrinsic == mep_cmov)
6045 {
6046 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6047 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6048 return true;
6049 return false;
6050 }
6051 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6052}
6053
6054
6055/* If ARG is a register operand that is the same size as MODE, convert it
6056 to MODE using a subreg. Otherwise return ARG as-is. */
6057
6058static rtx
6059mep_convert_arg (enum machine_mode mode, rtx arg)
6060{
6061 if (GET_MODE (arg) != mode
6062 && register_operand (arg, VOIDmode)
6063 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6064 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6065 return arg;
6066}
6067
6068
6069/* Apply regnum conversions to ARG using the description given by REGNUM.
6070 Return the new argument on success and null on failure. */
6071
6072static rtx
6073mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6074{
6075 if (regnum->count == 0)
6076 return arg;
6077
6078 if (GET_CODE (arg) != CONST_INT
6079 || INTVAL (arg) < 0
6080 || INTVAL (arg) >= regnum->count)
6081 return 0;
6082
6083 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6084}
6085
6086
6087/* Try to make intrinsic argument ARG match the given operand.
6088 UNSIGNED_P is true if the argument has an unsigned type. */
6089
6090static rtx
6091mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6092 int unsigned_p)
6093{
6094 if (GET_CODE (arg) == CONST_INT)
6095 {
6096 /* CONST_INTs can only be bound to integer operands. */
6097 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6098 return 0;
6099 }
6100 else if (GET_CODE (arg) == CONST_DOUBLE)
6101 /* These hold vector constants. */;
6102 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6103 {
6104 /* If the argument is a different size from what's expected, we must
6105 have a value in the right mode class in order to convert it. */
6106 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6107 return 0;
6108
6109 /* If the operand is an rvalue, promote or demote it to match the
6110 operand's size. This might not need extra instructions when
6111 ARG is a register value. */
6112 if (operand->constraint[0] != '=')
6113 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6114 }
6115
6116 /* If the operand is an lvalue, bind the operand to a new register.
6117 The caller will copy this value into ARG after the main
6118 instruction. By doing this always, we produce slightly more
6119 optimal code. */
6120 /* But not for control registers. */
6121 if (operand->constraint[0] == '='
6122 && (! REG_P (arg)
072ebd49
DD
6123 || ! (CONTROL_REGNO_P (REGNO (arg))
6124 || CCR_REGNO_P (REGNO (arg))
6125 || CR_REGNO_P (REGNO (arg)))
7acf4da6
DD
6126 ))
6127 return gen_reg_rtx (operand->mode);
6128
6129 /* Try simple mode punning. */
6130 arg = mep_convert_arg (operand->mode, arg);
6131 if (operand->predicate (arg, operand->mode))
6132 return arg;
6133
6134 /* See if forcing the argument into a register will make it match. */
6135 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6136 arg = force_reg (operand->mode, arg);
6137 else
6138 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6139 if (operand->predicate (arg, operand->mode))
6140 return arg;
6141
6142 return 0;
6143}
6144
6145
6146/* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6147 function FNNAME. OPERAND describes the operand to which ARGNUM
6148 is mapped. */
6149
6150static void
6151mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6152 int argnum, tree fnname)
6153{
6154 size_t i;
6155
6156 if (GET_CODE (arg) == CONST_INT)
6157 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6158 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6159 {
6160 const struct cgen_immediate_predicate *predicate;
6161 HOST_WIDE_INT argval;
6162
6163 predicate = &cgen_immediate_predicates[i];
6164 argval = INTVAL (arg);
6165 if (argval < predicate->lower || argval >= predicate->upper)
6166 error ("argument %d of %qE must be in the range %d...%d",
6167 argnum, fnname, predicate->lower, predicate->upper - 1);
6168 else
6169 error ("argument %d of %qE must be a multiple of %d",
6170 argnum, fnname, predicate->align);
6171 return;
6172 }
6173
6174 error ("incompatible type for argument %d of %qE", argnum, fnname);
6175}
6176
6177static rtx
6178mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6179 rtx subtarget ATTRIBUTE_UNUSED,
6180 enum machine_mode mode ATTRIBUTE_UNUSED,
6181 int ignore ATTRIBUTE_UNUSED)
6182{
6183 rtx pat, op[10], arg[10];
6184 unsigned int a;
6185 int opindex, unsigned_p[10];
6186 tree fndecl, args;
6187 unsigned int n_args;
6188 tree fnname;
6189 const struct cgen_insn *cgen_insn;
f12c802a 6190 const struct insn_data_d *idata;
12a54f54 6191 unsigned int first_arg = 0;
12a54f54 6192 unsigned int builtin_n_args;
7acf4da6
DD
6193
6194 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6195 fnname = DECL_NAME (fndecl);
6196
6197 /* Find out which instruction we should emit. Note that some coprocessor
6198 intrinsics may only be available in VLIW mode, or only in normal mode. */
6199 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6200 {
6201 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
12a54f54 6202 return NULL_RTX;
7acf4da6
DD
6203 }
6204 idata = &insn_data[cgen_insn->icode];
6205
6206 builtin_n_args = cgen_insn->num_args;
6207
6208 if (cgen_insn->cret_p)
6209 {
6210 if (cgen_insn->cret_p > 1)
6211 builtin_n_args ++;
6212 first_arg = 1;
c28883e6 6213 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
7acf4da6
DD
6214 builtin_n_args --;
6215 }
6216
6217 /* Evaluate each argument. */
6218 n_args = call_expr_nargs (exp);
6219
6220 if (n_args < builtin_n_args)
6221 {
6222 error ("too few arguments to %qE", fnname);
12a54f54 6223 return NULL_RTX;
7acf4da6
DD
6224 }
6225 if (n_args > builtin_n_args)
6226 {
6227 error ("too many arguments to %qE", fnname);
12a54f54 6228 return NULL_RTX;
7acf4da6
DD
6229 }
6230
12a54f54 6231 for (a = first_arg; a < builtin_n_args + first_arg; a++)
7acf4da6
DD
6232 {
6233 tree value;
6234
12a54f54 6235 args = CALL_EXPR_ARG (exp, a - first_arg);
7acf4da6
DD
6236
6237 value = args;
6238
6239#if 0
6240 if (cgen_insn->regnums[a].reference_p)
6241 {
6242 if (TREE_CODE (value) != ADDR_EXPR)
6243 {
6244 debug_tree(value);
6245 error ("argument %d of %qE must be an address", a+1, fnname);
12a54f54 6246 return NULL_RTX;
7acf4da6
DD
6247 }
6248 value = TREE_OPERAND (value, 0);
6249 }
6250#endif
6251
6252 /* If the argument has been promoted to int, get the unpromoted
6253 value. This is necessary when sub-int memory values are bound
6254 to reference parameters. */
6255 if (TREE_CODE (value) == NOP_EXPR
6256 && TREE_TYPE (value) == integer_type_node
6257 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6258 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6259 < TYPE_PRECISION (TREE_TYPE (value))))
6260 value = TREE_OPERAND (value, 0);
6261
6262 /* If the argument has been promoted to double, get the unpromoted
6263 SFmode value. This is necessary for FMAX support, for example. */
6264 if (TREE_CODE (value) == NOP_EXPR
6265 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6266 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6267 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6268 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6269 value = TREE_OPERAND (value, 0);
6270
6271 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6272 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6273 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6274 if (cgen_insn->regnums[a].reference_p)
6275 {
6276 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6277 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6278
6279 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6280 }
6281 if (arg[a] == 0)
6282 {
6283 error ("argument %d of %qE must be in the range %d...%d",
6284 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
12a54f54 6285 return NULL_RTX;
7acf4da6
DD
6286 }
6287 }
6288
12a54f54 6289 for (a = 0; a < first_arg; a++)
7acf4da6
DD
6290 {
6291 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6292 arg[a] = target;
6293 else
6294 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6295 }
6296
6297 /* Convert the arguments into a form suitable for the intrinsic.
6298 Report an error if this isn't possible. */
6299 for (opindex = 0; opindex < idata->n_operands; opindex++)
6300 {
6301 a = cgen_insn->op_mapping[opindex];
6302 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6303 arg[a], unsigned_p[a]);
6304 if (op[opindex] == 0)
6305 {
6306 mep_incompatible_arg (&idata->operand[opindex],
6307 arg[a], a + 1 - first_arg, fnname);
12a54f54 6308 return NULL_RTX;
7acf4da6
DD
6309 }
6310 }
6311
6312 /* Emit the instruction. */
6313 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6314 op[5], op[6], op[7], op[8], op[9]);
6315
6316 if (GET_CODE (pat) == SET
6317 && GET_CODE (SET_DEST (pat)) == PC
6318 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6319 emit_jump_insn (pat);
6320 else
6321 emit_insn (pat);
6322
6323 /* Copy lvalues back to their final locations. */
6324 for (opindex = 0; opindex < idata->n_operands; opindex++)
6325 if (idata->operand[opindex].constraint[0] == '=')
6326 {
6327 a = cgen_insn->op_mapping[opindex];
6328 if (a >= first_arg)
6329 {
6330 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6331 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6332 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6333 op[opindex]));
6334 else
6335 {
6336 /* First convert the operand to the right mode, then copy it
6337 into the destination. Doing the conversion as a separate
6338 step (rather than using convert_move) means that we can
6339 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6340 refer to the same register. */
6341 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6342 op[opindex], unsigned_p[a]);
6343 if (!rtx_equal_p (arg[a], op[opindex]))
6344 emit_move_insn (arg[a], op[opindex]);
6345 }
6346 }
6347 }
6348
6349 if (first_arg > 0 && target && target != op[0])
6350 {
6351 emit_move_insn (target, op[0]);
6352 }
6353
6354 return target;
6355}
6356
6357static bool
6358mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6359{
6360 return false;
6361}
6362\f
6363/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6364 a global register. */
6365
6366static int
6367global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6368{
6369 int regno;
6370 rtx x = *loc;
6371
6372 if (! x)
6373 return 0;
6374
6375 switch (GET_CODE (x))
6376 {
6377 case SUBREG:
6378 if (REG_P (SUBREG_REG (x)))
6379 {
6380 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6381 && global_regs[subreg_regno (x)])
6382 return 1;
6383 return 0;
6384 }
6385 break;
6386
6387 case REG:
6388 regno = REGNO (x);
6389 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6390 return 1;
6391 return 0;
6392
6393 case SCRATCH:
6394 case PC:
6395 case CC0:
6396 case CONST_INT:
6397 case CONST_DOUBLE:
6398 case CONST:
6399 case LABEL_REF:
6400 return 0;
6401
6402 case CALL:
6403 /* A non-constant call might use a global register. */
6404 return 1;
6405
6406 default:
6407 break;
6408 }
6409
6410 return 0;
6411}
6412
6413/* Returns nonzero if X mentions a global register. */
6414
6415static int
6416global_reg_mentioned_p (rtx x)
6417{
6418 if (INSN_P (x))
6419 {
6420 if (CALL_P (x))
6421 {
6422 if (! RTL_CONST_OR_PURE_CALL_P (x))
6423 return 1;
6424 x = CALL_INSN_FUNCTION_USAGE (x);
6425 if (x == 0)
6426 return 0;
6427 }
6428 else
6429 x = PATTERN (x);
6430 }
6431
6432 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6433}
6434/* Scheduling hooks for VLIW mode.
6435
6436 Conceptually this is very simple: we have a two-pack architecture
6437 that takes one core insn and one coprocessor insn to make up either
6438 a 32- or 64-bit instruction word (depending on the option bit set in
6439 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6440 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6441 and one 48-bit cop insn or two 32-bit core/cop insns.
6442
6443 In practice, instruction selection will be a bear. Consider in
6444 VL64 mode the following insns
6445
6446 add $1, 1
6447 cmov $cr0, $0
6448
6449 these cannot pack, since the add is a 16-bit core insn and cmov
6450 is a 32-bit cop insn. However,
6451
6452 add3 $1, $1, 1
6453 cmov $cr0, $0
6454
6455 packs just fine. For good VLIW code generation in VL64 mode, we
6456 will have to have 32-bit alternatives for many of the common core
6457 insns. Not implemented. */
6458
6459static int
ac44248e 6460mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
7acf4da6
DD
6461{
6462 int cost_specified;
6463
6464 if (REG_NOTE_KIND (link) != 0)
6465 {
6466 /* See whether INSN and DEP_INSN are intrinsics that set the same
6467 hard register. If so, it is more important to free up DEP_INSN
6468 than it is to free up INSN.
6469
6470 Note that intrinsics like mep_mulr are handled differently from
6471 the equivalent mep.md patterns. In mep.md, if we don't care
6472 about the value of $lo and $hi, the pattern will just clobber
6473 the registers, not set them. Since clobbers don't count as
6474 output dependencies, it is often possible to reorder two mulrs,
6475 even after reload.
6476
6477 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6478 so any pair of mep_mulr()s will be inter-dependent. We should
6479 therefore give the first mep_mulr() a higher priority. */
6480 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6481 && global_reg_mentioned_p (PATTERN (insn))
6482 && global_reg_mentioned_p (PATTERN (dep_insn)))
6483 return 1;
6484
6485 /* If the dependence is an anti or output dependence, assume it
6486 has no cost. */
6487 return 0;
6488 }
6489
6490 /* If we can't recognize the insns, we can't really do anything. */
6491 if (recog_memoized (dep_insn) < 0)
6492 return cost;
6493
6494 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6495 attribute instead. */
6496 if (!TARGET_H1)
6497 {
6498 cost_specified = get_attr_latency (dep_insn);
6499 if (cost_specified != 0)
6500 return cost_specified;
6501 }
6502
6503 return cost;
6504}
6505
6506/* ??? We don't properly compute the length of a load/store insn,
6507 taking into account the addressing mode. */
6508
6509static int
6510mep_issue_rate (void)
6511{
6512 return TARGET_IVC2 ? 3 : 2;
6513}
6514
6515/* Return true if function DECL was declared with the vliw attribute. */
6516
6517bool
6518mep_vliw_function_p (tree decl)
6519{
6520 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6521}
6522
ce1ce33a
DM
6523static rtx_insn *
6524mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6525 int length)
7acf4da6
DD
6526{
6527 int i;
6528
6529 for (i = nready - 1; i >= 0; --i)
6530 {
ce1ce33a 6531 rtx_insn *insn = ready[i];
7acf4da6
DD
6532 if (recog_memoized (insn) >= 0
6533 && get_attr_slot (insn) == slot
6534 && get_attr_length (insn) == length)
6535 return insn;
6536 }
6537
ce1ce33a 6538 return NULL;
7acf4da6
DD
6539}
6540
6541static void
ce1ce33a 6542mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
7acf4da6
DD
6543{
6544 int i;
6545
6546 for (i = 0; i < nready; ++i)
6547 if (ready[i] == insn)
6548 {
6549 for (; i < nready - 1; ++i)
6550 ready[i] = ready[i + 1];
6551 ready[i] = insn;
6552 return;
6553 }
6554
6555 gcc_unreachable ();
6556}
6557
6558static void
ce1ce33a 6559mep_print_sched_insn (FILE *dump, rtx_insn *insn)
7acf4da6
DD
6560{
6561 const char *slots = "none";
6562 const char *name = NULL;
6563 int code;
6564 char buf[30];
6565
6566 if (GET_CODE (PATTERN (insn)) == SET
6567 || GET_CODE (PATTERN (insn)) == PARALLEL)
6568 {
6569 switch (get_attr_slots (insn))
6570 {
6571 case SLOTS_CORE: slots = "core"; break;
6572 case SLOTS_C3: slots = "c3"; break;
6573 case SLOTS_P0: slots = "p0"; break;
6574 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6575 case SLOTS_P0_P1: slots = "p0,p1"; break;
6576 case SLOTS_P0S: slots = "p0s"; break;
6577 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6578 case SLOTS_P1: slots = "p1"; break;
6579 default:
6580 sprintf(buf, "%d", get_attr_slots (insn));
6581 slots = buf;
6582 break;
6583 }
6584 }
6585 if (GET_CODE (PATTERN (insn)) == USE)
6586 slots = "use";
6587
6588 code = INSN_CODE (insn);
6589 if (code >= 0)
6590 name = get_insn_name (code);
6591 if (!name)
6592 name = "{unknown}";
6593
6594 fprintf (dump,
6595 "insn %4d %4d %8s %s\n",
6596 code,
6597 INSN_UID (insn),
6598 name,
6599 slots);
6600}
6601
6602static int
6603mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
ce1ce33a 6604 int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
7acf4da6
DD
6605 int *pnready, int clock ATTRIBUTE_UNUSED)
6606{
6607 int nready = *pnready;
ce1ce33a 6608 rtx_insn *core_insn, *cop_insn;
7acf4da6
DD
6609 int i;
6610
6611 if (dump && sched_verbose > 1)
6612 {
6613 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6614 for (i=0; i<nready; i++)
6615 mep_print_sched_insn (dump, ready[i]);
6616 fprintf (dump, "\n");
6617 }
6618
6619 if (!mep_vliw_function_p (cfun->decl))
6620 return 1;
6621 if (nready < 2)
6622 return 1;
6623
6624 /* IVC2 uses a DFA to determine what's ready and what's not. */
6625 if (TARGET_IVC2)
6626 return nready;
6627
6628 /* We can issue either a core or coprocessor instruction.
6629 Look for a matched pair of insns to reorder. If we don't
6630 find any, don't second-guess the scheduler's priorities. */
6631
6632 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6633 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6634 TARGET_OPT_VL64 ? 6 : 2)))
6635 ;
6636 else if (TARGET_OPT_VL64
6637 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6638 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6639 ;
6640 else
6641 /* We didn't find a pair. Issue the single insn at the head
6642 of the ready list. */
6643 return 1;
6644
6645 /* Reorder the two insns first. */
6646 mep_move_ready_insn (ready, nready, core_insn);
6647 mep_move_ready_insn (ready, nready - 1, cop_insn);
6648 return 2;
6649}
6650
6651/* A for_each_rtx callback. Return true if *X is a register that is
6652 set by insn PREV. */
6653
6654static int
6655mep_store_find_set (rtx *x, void *prev)
6656{
6657 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6658}
6659
6660/* Like mep_store_bypass_p, but takes a pattern as the second argument,
6661 not the containing insn. */
6662
6663static bool
aa4a0061 6664mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
7acf4da6
DD
6665{
6666 /* Cope with intrinsics like swcpa. */
6667 if (GET_CODE (pat) == PARALLEL)
6668 {
6669 int i;
6670
6671 for (i = 0; i < XVECLEN (pat, 0); i++)
aa4a0061
DM
6672 if (mep_store_data_bypass_p (prev,
6673 as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
7acf4da6
DD
6674 return true;
6675
6676 return false;
6677 }
6678
6679 /* Check for some sort of store. */
6680 if (GET_CODE (pat) != SET
6681 || GET_CODE (SET_DEST (pat)) != MEM)
6682 return false;
6683
6684 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6685 The first operand to the unspec is the store data and the other operands
6686 are used to calculate the address. */
6687 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6688 {
6689 rtx src;
6690 int i;
6691
6692 src = SET_SRC (pat);
6693 for (i = 1; i < XVECLEN (src, 0); i++)
6694 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6695 return false;
6696
6697 return true;
6698 }
6699
6700 /* Otherwise just check that PREV doesn't modify any register mentioned
6701 in the memory destination. */
6702 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6703}
6704
6705/* Return true if INSN is a store instruction and if the store address
6706 has no true dependence on PREV. */
6707
6708bool
aa4a0061 6709mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
7acf4da6
DD
6710{
6711 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6712}
6713
6714/* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6715 is a register other than LO or HI and if PREV sets *X. */
6716
6717static int
6718mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6719{
6720 return (REG_P (*x)
6721 && REGNO (*x) != LO_REGNO
6722 && REGNO (*x) != HI_REGNO
6723 && reg_set_p (*x, (const_rtx) prev));
6724}
6725
6726/* Return true if, apart from HI/LO, there are no true dependencies
6727 between multiplication instructions PREV and INSN. */
6728
6729bool
aa4a0061 6730mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
7acf4da6
DD
6731{
6732 rtx pat;
6733
6734 pat = PATTERN (insn);
6735 if (GET_CODE (pat) == PARALLEL)
6736 pat = XVECEXP (pat, 0, 0);
6737 return (GET_CODE (pat) == SET
6738 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6739}
6740
6741/* Return true if INSN is an ldc instruction that issues to the
6742 MeP-h1 integer pipeline. This is true for instructions that
6743 read from PSW, LP, SAR, HI and LO. */
6744
6745bool
aa4a0061 6746mep_ipipe_ldc_p (rtx_insn *insn)
7acf4da6
DD
6747{
6748 rtx pat, src;
6749
6750 pat = PATTERN (insn);
6751
6752 /* Cope with instrinsics that set both a hard register and its shadow.
6753 The set of the hard register comes first. */
6754 if (GET_CODE (pat) == PARALLEL)
6755 pat = XVECEXP (pat, 0, 0);
6756
6757 if (GET_CODE (pat) == SET)
6758 {
6759 src = SET_SRC (pat);
6760
6761 /* Cope with intrinsics. The first operand to the unspec is
6762 the source register. */
6763 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6764 src = XVECEXP (src, 0, 0);
6765
6766 if (REG_P (src))
6767 switch (REGNO (src))
6768 {
6769 case PSW_REGNO:
6770 case LP_REGNO:
6771 case SAR_REGNO:
6772 case HI_REGNO:
6773 case LO_REGNO:
6774 return true;
6775 }
6776 }
6777 return false;
6778}
6779
6780/* Create a VLIW bundle from core instruction CORE and coprocessor
6781 instruction COP. COP always satisfies INSN_P, but CORE can be
6782 either a new pattern or an existing instruction.
6783
6784 Emit the bundle in place of COP and return it. */
6785
aa4a0061 6786static rtx_insn *
e6eda746 6787mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
7acf4da6 6788{
aa4a0061 6789 rtx seq;
e6eda746 6790 rtx_insn *core_insn;
aa4a0061 6791 rtx_insn *insn;
7acf4da6
DD
6792
6793 /* If CORE is an existing instruction, remove it, otherwise put
6794 the new pattern in an INSN harness. */
e6eda746
DM
6795 if (INSN_P (core_insn_or_pat))
6796 {
6797 core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6798 remove_insn (core_insn);
6799 }
7acf4da6 6800 else
e6eda746 6801 core_insn = make_insn_raw (core_insn_or_pat);
7acf4da6
DD
6802
6803 /* Generate the bundle sequence and replace COP with it. */
e6eda746 6804 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
aa4a0061 6805 insn = emit_insn_after (seq, cop);
7acf4da6
DD
6806 remove_insn (cop);
6807
6808 /* Set up the links of the insns inside the SEQUENCE. */
e6eda746
DM
6809 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6810 SET_NEXT_INSN (core_insn) = cop;
6811 SET_PREV_INSN (cop) = core_insn;
0f82e5c9 6812 SET_NEXT_INSN (cop) = NEXT_INSN (insn);
7acf4da6
DD
6813
6814 /* Set the VLIW flag for the coprocessor instruction. */
e6eda746 6815 PUT_MODE (core_insn, VOIDmode);
7acf4da6
DD
6816 PUT_MODE (cop, BImode);
6817
6818 /* Derive a location for the bundle. Individual instructions cannot
6819 have their own location because there can be no assembler labels
e6eda746
DM
6820 between CORE_INSN and COP. */
6821 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6822 INSN_LOCATION (core_insn) = 0;
9d12bc68 6823 INSN_LOCATION (cop) = 0;
7acf4da6
DD
6824
6825 return insn;
6826}
6827
6828/* A helper routine for ms1_insn_dependent_p called through note_stores. */
6829
6830static void
6831mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6832{
6833 rtx * pinsn = (rtx *) data;
6834
6835 if (*pinsn && reg_mentioned_p (x, *pinsn))
6836 *pinsn = NULL_RTX;
6837}
6838
6839/* Return true if anything in insn X is (anti,output,true) dependent on
6840 anything in insn Y. */
6841
6842static int
6843mep_insn_dependent_p (rtx x, rtx y)
6844{
6845 rtx tmp;
6846
6847 gcc_assert (INSN_P (x));
6848 gcc_assert (INSN_P (y));
6849
6850 tmp = PATTERN (y);
6851 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6852 if (tmp == NULL_RTX)
6853 return 1;
6854
6855 tmp = PATTERN (x);
6856 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6857 if (tmp == NULL_RTX)
6858 return 1;
6859
6860 return 0;
6861}
6862
6863static int
aa4a0061 6864core_insn_p (rtx_insn *insn)
7acf4da6
DD
6865{
6866 if (GET_CODE (PATTERN (insn)) == USE)
6867 return 0;
6868 if (get_attr_slot (insn) == SLOT_CORE)
6869 return 1;
6870 return 0;
6871}
6872
6873/* Mark coprocessor instructions that can be bundled together with
073a8998 6874 the immediately preceding core instruction. This is later used
7acf4da6
DD
6875 to emit the "+" that tells the assembler to create a VLIW insn.
6876
6877 For unbundled insns, the assembler will automatically add coprocessor
6878 nops, and 16-bit core nops. Due to an apparent oversight in the
6879 spec, the assembler will _not_ automatically add 32-bit core nops,
6880 so we have to emit those here.
6881
6882 Called from mep_insn_reorg. */
6883
6884static void
aa4a0061 6885mep_bundle_insns (rtx_insn *insns)
7acf4da6 6886{
aa4a0061 6887 rtx_insn *insn, *last = NULL, *first = NULL;
7acf4da6
DD
6888 int saw_scheduling = 0;
6889
6890 /* Only do bundling if we're in vliw mode. */
6891 if (!mep_vliw_function_p (cfun->decl))
6892 return;
6893
6894 /* The first insn in a bundle are TImode, the remainder are
6895 VOIDmode. After this function, the first has VOIDmode and the
6896 rest have BImode. */
6897
bb7681bf
DD
6898 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6899
7acf4da6
DD
6900 /* First, move any NOTEs that are within a bundle, to the beginning
6901 of the bundle. */
6902 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6903 {
6904 if (NOTE_P (insn) && first)
6905 /* Don't clear FIRST. */;
6906
bb7681bf 6907 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7acf4da6
DD
6908 first = insn;
6909
bb7681bf 6910 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7acf4da6 6911 {
aa4a0061 6912 rtx_insn *note, *prev;
7acf4da6
DD
6913
6914 /* INSN is part of a bundle; FIRST is the first insn in that
6915 bundle. Move all intervening notes out of the bundle.
6916 In addition, since the debug pass may insert a label
6917 whenever the current line changes, set the location info
6918 for INSN to match FIRST. */
6919
9d12bc68 6920 INSN_LOCATION (insn) = INSN_LOCATION (first);
7acf4da6
DD
6921
6922 note = PREV_INSN (insn);
6923 while (note && note != first)
6924 {
6925 prev = PREV_INSN (note);
6926
6927 if (NOTE_P (note))
6928 {
6929 /* Remove NOTE from here... */
0f82e5c9
DM
6930 SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6931 SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7acf4da6 6932 /* ...and put it in here. */
0f82e5c9
DM
6933 SET_NEXT_INSN (note) = first;
6934 SET_PREV_INSN (note) = PREV_INSN (first);
6935 SET_NEXT_INSN (PREV_INSN (note)) = note;
6936 SET_PREV_INSN (NEXT_INSN (note)) = note;
7acf4da6
DD
6937 }
6938
6939 note = prev;
6940 }
6941 }
6942
bb7681bf 6943 else if (!NONJUMP_INSN_P (insn))
7acf4da6
DD
6944 first = 0;
6945 }
6946
6947 /* Now fix up the bundles. */
6948 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6949 {
6950 if (NOTE_P (insn))
6951 continue;
6952
bb7681bf 6953 if (!NONJUMP_INSN_P (insn))
7acf4da6
DD
6954 {
6955 last = 0;
6956 continue;
6957 }
6958
6959 /* If we're not optimizing enough, there won't be scheduling
6960 info. We detect that here. */
6961 if (GET_MODE (insn) == TImode)
6962 saw_scheduling = 1;
6963 if (!saw_scheduling)
6964 continue;
6965
6966 if (TARGET_IVC2)
6967 {
aa4a0061 6968 rtx_insn *core_insn = NULL;
7acf4da6
DD
6969
6970 /* IVC2 slots are scheduled by DFA, so we just accept
6971 whatever the scheduler gives us. However, we must make
6972 sure the core insn (if any) is the first in the bundle.
6973 The IVC2 assembler can insert whatever NOPs are needed,
6974 and allows a COP insn to be first. */
6975
bb7681bf 6976 if (NONJUMP_INSN_P (insn)
7acf4da6
DD
6977 && GET_CODE (PATTERN (insn)) != USE
6978 && GET_MODE (insn) == TImode)
6979 {
6980 for (last = insn;
6981 NEXT_INSN (last)
6982 && GET_MODE (NEXT_INSN (last)) == VOIDmode
bb7681bf 6983 && NONJUMP_INSN_P (NEXT_INSN (last));
7acf4da6
DD
6984 last = NEXT_INSN (last))
6985 {
6986 if (core_insn_p (last))
6987 core_insn = last;
6988 }
6989 if (core_insn_p (last))
6990 core_insn = last;
6991
6992 if (core_insn && core_insn != insn)
6993 {
6994 /* Swap core insn to first in the bundle. */
6995
6996 /* Remove core insn. */
6997 if (PREV_INSN (core_insn))
0f82e5c9 6998 SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7acf4da6 6999 if (NEXT_INSN (core_insn))
0f82e5c9 7000 SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7acf4da6
DD
7001
7002 /* Re-insert core insn. */
0f82e5c9
DM
7003 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
7004 SET_NEXT_INSN (core_insn) = insn;
7acf4da6
DD
7005
7006 if (PREV_INSN (core_insn))
0f82e5c9
DM
7007 SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7008 SET_PREV_INSN (insn) = core_insn;
7acf4da6
DD
7009
7010 PUT_MODE (core_insn, TImode);
7011 PUT_MODE (insn, VOIDmode);
7012 }
7013 }
7014
7015 /* The first insn has TImode, the rest have VOIDmode */
7016 if (GET_MODE (insn) == TImode)
7017 PUT_MODE (insn, VOIDmode);
7018 else
7019 PUT_MODE (insn, BImode);
7020 continue;
7021 }
7022
7023 PUT_MODE (insn, VOIDmode);
7024 if (recog_memoized (insn) >= 0
7025 && get_attr_slot (insn) == SLOT_COP)
7026 {
b64925dc 7027 if (JUMP_P (insn)
7acf4da6
DD
7028 || ! last
7029 || recog_memoized (last) < 0
7030 || get_attr_slot (last) != SLOT_CORE
7031 || (get_attr_length (insn)
7032 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7033 || mep_insn_dependent_p (insn, last))
7034 {
7035 switch (get_attr_length (insn))
7036 {
7037 case 8:
7038 break;
7039 case 6:
7040 insn = mep_make_bundle (gen_nop (), insn);
7041 break;
7042 case 4:
7043 if (TARGET_OPT_VL64)
7044 insn = mep_make_bundle (gen_nop32 (), insn);
7045 break;
7046 case 2:
7047 if (TARGET_OPT_VL64)
7048 error ("2 byte cop instructions are"
7049 " not allowed in 64-bit VLIW mode");
7050 else
7051 insn = mep_make_bundle (gen_nop (), insn);
7052 break;
7053 default:
7054 error ("unexpected %d byte cop instruction",
7055 get_attr_length (insn));
7056 break;
7057 }
7058 }
7059 else
7060 insn = mep_make_bundle (last, insn);
7061 }
7062
7063 last = insn;
7064 }
7065}
7066
7067
7068/* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7069 Return true on success. This function can fail if the intrinsic
7070 is unavailable or if the operands don't satisfy their predicates. */
7071
7072bool
7073mep_emit_intrinsic (int intrinsic, const rtx *operands)
7074{
7075 const struct cgen_insn *cgen_insn;
f12c802a 7076 const struct insn_data_d *idata;
7acf4da6
DD
7077 rtx newop[10];
7078 int i;
7079
7080 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7081 return false;
7082
7083 idata = &insn_data[cgen_insn->icode];
7084 for (i = 0; i < idata->n_operands; i++)
7085 {
7086 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7087 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7088 return false;
7089 }
7090
7091 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7092 newop[3], newop[4], newop[5],
7093 newop[6], newop[7], newop[8]));
7094
7095 return true;
7096}
7097
7098
7099/* Apply the given unary intrinsic to OPERANDS[1] and store it on
7100 OPERANDS[0]. Report an error if the instruction could not
7101 be synthesized. OPERANDS[1] is a register_operand. For sign
7102 and zero extensions, it may be smaller than SImode. */
7103
7104bool
7105mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7106 rtx * operands ATTRIBUTE_UNUSED)
7107{
7108 return false;
7109}
7110
7111
7112/* Likewise, but apply a binary operation to OPERANDS[1] and
7113 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7114 can be a general_operand.
7115
7116 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7117 third operand. REG and REG3 take register operands only. */
7118
7119bool
7120mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7121 int ATTRIBUTE_UNUSED immediate3,
7122 int ATTRIBUTE_UNUSED reg,
7123 int ATTRIBUTE_UNUSED reg3,
7124 rtx * operands ATTRIBUTE_UNUSED)
7125{
7126 return false;
7127}
7128
7129static bool
68f932c4
RS
7130mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7131 int opno ATTRIBUTE_UNUSED, int *total,
7132 bool ATTRIBUTE_UNUSED speed_t)
7acf4da6
DD
7133{
7134 switch (code)
7135 {
7136 case CONST_INT:
7137 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7138 *total = 0;
7139 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7140 *total = 1;
7141 else
7142 *total = 3;
7143 return true;
7144
7145 case SYMBOL_REF:
7146 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7147 return true;
7148
7149 case MULT:
7150 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7151 ? COSTS_N_INSNS (3)
7152 : COSTS_N_INSNS (2));
7153 return true;
7154 }
7155 return false;
7156}
7157
7158static int
b413068c
OE
7159mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7160 enum machine_mode mode ATTRIBUTE_UNUSED,
7161 addr_space_t as ATTRIBUTE_UNUSED,
7162 bool ATTRIBUTE_UNUSED speed_p)
7acf4da6
DD
7163{
7164 return 1;
7165}
7166
7acf4da6
DD
7167static void
7168mep_asm_init_sections (void)
7169{
7170 based_section
7171 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7172 "\t.section .based,\"aw\"");
7173
7174 tinybss_section
7175 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7176 "\t.section .sbss,\"aw\"");
7177
7178 sdata_section
7179 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7180 "\t.section .sdata,\"aw\",@progbits");
7181
7182 far_section
7183 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7184 "\t.section .far,\"aw\"");
7185
7186 farbss_section
7187 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7188 "\t.section .farbss,\"aw\"");
7189
7190 frodata_section
7191 = get_unnamed_section (0, output_section_asm_op,
7192 "\t.section .frodata,\"a\"");
7193
7194 srodata_section
7195 = get_unnamed_section (0, output_section_asm_op,
7196 "\t.section .srodata,\"a\"");
7197
820ca276 7198 vtext_section
77806925
DD
7199 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7200 "\t.section .vtext,\"axv\"\n\t.vliw");
820ca276
DD
7201
7202 vftext_section
77806925 7203 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
da24f9d9 7204 "\t.section .vftext,\"axv\"\n\t.vliw");
820ca276
DD
7205
7206 ftext_section
77806925 7207 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
da24f9d9 7208 "\t.section .ftext,\"ax\"\n\t.core");
820ca276 7209
7acf4da6 7210}
c28883e6
DD
7211\f
7212/* Initialize the GCC target structure. */
7213
7214#undef TARGET_ASM_FUNCTION_PROLOGUE
7215#define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7216#undef TARGET_ATTRIBUTE_TABLE
7217#define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7218#undef TARGET_COMP_TYPE_ATTRIBUTES
7219#define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7220#undef TARGET_INSERT_ATTRIBUTES
7221#define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7222#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7223#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7224#undef TARGET_CAN_INLINE_P
7225#define TARGET_CAN_INLINE_P mep_can_inline_p
7226#undef TARGET_SECTION_TYPE_FLAGS
7227#define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7228#undef TARGET_ASM_NAMED_SECTION
7229#define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7230#undef TARGET_INIT_BUILTINS
7231#define TARGET_INIT_BUILTINS mep_init_builtins
7232#undef TARGET_EXPAND_BUILTIN
7233#define TARGET_EXPAND_BUILTIN mep_expand_builtin
7234#undef TARGET_SCHED_ADJUST_COST
7235#define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7236#undef TARGET_SCHED_ISSUE_RATE
7237#define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7238#undef TARGET_SCHED_REORDER
7239#define TARGET_SCHED_REORDER mep_sched_reorder
7240#undef TARGET_STRIP_NAME_ENCODING
7241#define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7242#undef TARGET_ASM_SELECT_SECTION
7243#define TARGET_ASM_SELECT_SECTION mep_select_section
7244#undef TARGET_ASM_UNIQUE_SECTION
7245#define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7246#undef TARGET_ENCODE_SECTION_INFO
7247#define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7248#undef TARGET_FUNCTION_OK_FOR_SIBCALL
7249#define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7250#undef TARGET_RTX_COSTS
7251#define TARGET_RTX_COSTS mep_rtx_cost
7252#undef TARGET_ADDRESS_COST
7253#define TARGET_ADDRESS_COST mep_address_cost
7254#undef TARGET_MACHINE_DEPENDENT_REORG
7255#define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7256#undef TARGET_SETUP_INCOMING_VARARGS
7257#define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7258#undef TARGET_PASS_BY_REFERENCE
7259#define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
0851c6e3
NF
7260#undef TARGET_FUNCTION_ARG
7261#define TARGET_FUNCTION_ARG mep_function_arg
7262#undef TARGET_FUNCTION_ARG_ADVANCE
7263#define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
c28883e6
DD
7264#undef TARGET_VECTOR_MODE_SUPPORTED_P
7265#define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
c5387660
JM
7266#undef TARGET_OPTION_OVERRIDE
7267#define TARGET_OPTION_OVERRIDE mep_option_override
c28883e6
DD
7268#undef TARGET_ALLOCATE_INITIAL_VALUE
7269#define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7270#undef TARGET_ASM_INIT_SECTIONS
7271#define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7272#undef TARGET_RETURN_IN_MEMORY
7273#define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7274#undef TARGET_NARROW_VOLATILE_BITFIELD
7275#define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7276#undef TARGET_EXPAND_BUILTIN_SAVEREGS
7277#define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7278#undef TARGET_BUILD_BUILTIN_VA_LIST
7279#define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7280#undef TARGET_EXPAND_BUILTIN_VA_START
7281#define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7282#undef TARGET_GIMPLIFY_VA_ARG_EXPR
7283#define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7284#undef TARGET_CAN_ELIMINATE
7285#define TARGET_CAN_ELIMINATE mep_can_eliminate
5efd84c5
NF
7286#undef TARGET_CONDITIONAL_REGISTER_USAGE
7287#define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
c28883e6
DD
7288#undef TARGET_TRAMPOLINE_INIT
7289#define TARGET_TRAMPOLINE_INIT mep_trampoline_init
1a627b35
RS
7290#undef TARGET_LEGITIMATE_CONSTANT_P
7291#define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
1d0216c8
RS
7292#undef TARGET_CAN_USE_DOLOOP_P
7293#define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
c28883e6
DD
7294
7295struct gcc_target targetm = TARGET_INITIALIZER;
7acf4da6
DD
7296
7297#include "gt-mep.h"