]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/mep/mep.c
73bb45d6c66b2920f7d042523d09657b7006d4d6
[thirdparty/gcc.git] / gcc / config / mep / mep.c
1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "obstack.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "except.h"
41 #include "function.h"
42 #include "optabs.h"
43 #include "reload.h"
44 #include "tm_p.h"
45 #include "ggc.h"
46 #include "diagnostic-core.h"
47 #include "integrate.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51 #include "df.h"
52 #include "gimple.h"
53
54 /* Structure of this file:
55
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
58 + Reload Support
59 + Costs
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
62 + Operand Printing
63 + Function args in registers
64 + Handle pipeline hazards
65 + Handle attributes
66 + Trampolines
67 + Machine-dependent Reorg
68 + Builtins. */
69
70 /* Symbol encodings:
71
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
73
74 b - based
75 t - tiny
76 n - near
77 f - far
78 i - io, near
79 I - io, far
80 c - cb (control bus) */
81
82 struct GTY(()) machine_function
83 {
84 int mep_frame_pointer_needed;
85
86 /* For varargs. */
87 int arg_regs_to_save;
88 int regsave_filler;
89 int frame_filler;
90 int frame_locked;
91
92 /* Records __builtin_return address. */
93 rtx eh_stack_adjust;
94
95 int reg_save_size;
96 int reg_save_slot[FIRST_PSEUDO_REGISTER];
97 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
98
99 /* 2 if the current function has an interrupt attribute, 1 if not, 0
100 if unknown. This is here because resource.c uses EPILOGUE_USES
101 which needs it. */
102 int interrupt_handler;
103
104 /* Likewise, for disinterrupt attribute. */
105 int disable_interrupts;
106
107 /* Number of doloop tags used so far. */
108 int doloop_tags;
109
110 /* True if the last tag was allocated to a doloop_end. */
111 bool doloop_tag_from_end;
112
113 /* True if reload changes $TP. */
114 bool reload_changes_tp;
115
116 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
117 We only set this if the function is an interrupt handler. */
118 int asms_without_operands;
119 };
120
121 #define MEP_CONTROL_REG(x) \
122 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
123
124 static GTY(()) section * based_section;
125 static GTY(()) section * tinybss_section;
126 static GTY(()) section * far_section;
127 static GTY(()) section * farbss_section;
128 static GTY(()) section * frodata_section;
129 static GTY(()) section * srodata_section;
130
131 static GTY(()) section * vtext_section;
132 static GTY(()) section * vftext_section;
133 static GTY(()) section * ftext_section;
134
135 static void mep_set_leaf_registers (int);
136 static bool symbol_p (rtx);
137 static bool symbolref_p (rtx);
138 static void encode_pattern_1 (rtx);
139 static void encode_pattern (rtx);
140 static bool const_in_range (rtx, int, int);
141 static void mep_rewrite_mult (rtx, rtx);
142 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
143 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
144 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
145 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
146 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
147 static bool mep_nongeneral_reg (rtx);
148 static bool mep_general_copro_reg (rtx);
149 static bool mep_nonregister (rtx);
150 static struct machine_function* mep_init_machine_status (void);
151 static rtx mep_tp_rtx (void);
152 static rtx mep_gp_rtx (void);
153 static bool mep_interrupt_p (void);
154 static bool mep_disinterrupt_p (void);
155 static bool mep_reg_set_p (rtx, rtx);
156 static bool mep_reg_set_in_function (int);
157 static bool mep_interrupt_saved_reg (int);
158 static bool mep_call_saves_register (int);
159 static rtx F (rtx);
160 static void add_constant (int, int, int, int);
161 static rtx maybe_dead_move (rtx, rtx, bool);
162 static void mep_reload_pointer (int, const char *);
163 static void mep_start_function (FILE *, HOST_WIDE_INT);
164 static bool mep_function_ok_for_sibcall (tree, tree);
165 static int unique_bit_in (HOST_WIDE_INT);
166 static int bit_size_for_clip (HOST_WIDE_INT);
167 static int bytesize (const_tree, enum machine_mode);
168 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
169 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
170 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
171 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
172 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
173 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
174 static bool mep_function_attribute_inlinable_p (const_tree);
175 static bool mep_can_inline_p (tree, tree);
176 static bool mep_lookup_pragma_disinterrupt (const char *);
177 static int mep_multiple_address_regions (tree, bool);
178 static int mep_attrlist_to_encoding (tree, tree);
179 static void mep_insert_attributes (tree, tree *);
180 static void mep_encode_section_info (tree, rtx, int);
181 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
182 static void mep_unique_section (tree, int);
183 static unsigned int mep_section_type_flags (tree, const char *, int);
184 static void mep_asm_named_section (const char *, unsigned int, tree);
185 static bool mep_mentioned_p (rtx, rtx, int);
186 static void mep_reorg_regmove (rtx);
187 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
188 static void mep_reorg_repeat (rtx);
189 static bool mep_invertable_branch_p (rtx);
190 static void mep_invert_branch (rtx, rtx);
191 static void mep_reorg_erepeat (rtx);
192 static void mep_jmp_return_reorg (rtx);
193 static void mep_reorg_addcombine (rtx);
194 static void mep_reorg (void);
195 static void mep_init_intrinsics (void);
196 static void mep_init_builtins (void);
197 static void mep_intrinsic_unavailable (int);
198 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
199 static bool mep_get_move_insn (int, const struct cgen_insn **);
200 static rtx mep_convert_arg (enum machine_mode, rtx);
201 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
202 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
203 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
204 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
205 static int mep_adjust_cost (rtx, rtx, rtx, int);
206 static int mep_issue_rate (void);
207 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
208 static void mep_move_ready_insn (rtx *, int, rtx);
209 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
210 static rtx mep_make_bundle (rtx, rtx);
211 static void mep_bundle_insns (rtx);
212 static bool mep_rtx_cost (rtx, int, int, int *, bool);
213 static int mep_address_cost (rtx, bool);
214 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
215 tree, int *, int);
216 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
217 const_tree, bool);
218 static rtx mep_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
219 const_tree, bool);
220 static void mep_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
221 const_tree, bool);
222 static bool mep_vector_mode_supported_p (enum machine_mode);
223 static bool mep_handle_option (size_t, const char *, int);
224 static rtx mep_allocate_initial_value (rtx);
225 static void mep_asm_init_sections (void);
226 static int mep_comp_type_attributes (const_tree, const_tree);
227 static bool mep_narrow_volatile_bitfield (void);
228 static rtx mep_expand_builtin_saveregs (void);
229 static tree mep_build_builtin_va_list (void);
230 static void mep_expand_va_start (tree, rtx);
231 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
232 static bool mep_can_eliminate (const int, const int);
233 static void mep_conditional_register_usage (void);
234 static void mep_trampoline_init (rtx, tree, rtx);
235 \f
236 #define WANT_GCC_DEFINITIONS
237 #include "mep-intrin.h"
238 #undef WANT_GCC_DEFINITIONS
239
240 \f
241 /* Command Line Option Support. */
242
243 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
244
245 /* True if we can use cmov instructions to move values back and forth
246 between core and coprocessor registers. */
247 bool mep_have_core_copro_moves_p;
248
249 /* True if we can use cmov instructions (or a work-alike) to move
250 values between coprocessor registers. */
251 bool mep_have_copro_copro_moves_p;
252
253 /* A table of all coprocessor instructions that can act like
254 a coprocessor-to-coprocessor cmov. */
255 static const int mep_cmov_insns[] = {
256 mep_cmov,
257 mep_cpmov,
258 mep_fmovs,
259 mep_caddi3,
260 mep_csubi3,
261 mep_candi3,
262 mep_cori3,
263 mep_cxori3,
264 mep_cand3,
265 mep_cor3
266 };
267
268 static int option_mtiny_specified = 0;
269
270 \f
271 static void
272 mep_set_leaf_registers (int enable)
273 {
274 int i;
275
276 if (mep_leaf_registers[0] != enable)
277 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
278 mep_leaf_registers[i] = enable;
279 }
280
281 static void
282 mep_conditional_register_usage (void)
283 {
284 int i;
285
286 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
287 {
288 fixed_regs[HI_REGNO] = 1;
289 fixed_regs[LO_REGNO] = 1;
290 call_used_regs[HI_REGNO] = 1;
291 call_used_regs[LO_REGNO] = 1;
292 }
293
294 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
295 global_regs[i] = 1;
296 }
297
298
299 static const struct default_options mep_option_optimization_table[] =
300 {
301 /* The first scheduling pass often increases register pressure and
302 tends to result in more spill code. Only run it when
303 specifically asked. */
304 { OPT_LEVELS_ALL, OPT_fschedule_insns, NULL, 0 },
305
306 /* Using $fp doesn't gain us much, even when debugging is
307 important. */
308 { OPT_LEVELS_ALL, OPT_fomit_frame_pointer, NULL, 1 },
309
310 { OPT_LEVELS_NONE, 0, NULL, 0 }
311 };
312
313 static void
314 mep_option_override (void)
315 {
316 if (flag_pic == 1)
317 warning (OPT_fpic, "-fpic is not supported");
318 if (flag_pic == 2)
319 warning (OPT_fPIC, "-fPIC is not supported");
320 if (TARGET_S && TARGET_M)
321 error ("only one of -ms and -mm may be given");
322 if (TARGET_S && TARGET_L)
323 error ("only one of -ms and -ml may be given");
324 if (TARGET_M && TARGET_L)
325 error ("only one of -mm and -ml may be given");
326 if (TARGET_S && option_mtiny_specified)
327 error ("only one of -ms and -mtiny= may be given");
328 if (TARGET_M && option_mtiny_specified)
329 error ("only one of -mm and -mtiny= may be given");
330 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
331 warning (0, "-mclip currently has no effect without -mminmax");
332
333 if (mep_const_section)
334 {
335 if (strcmp (mep_const_section, "tiny") != 0
336 && strcmp (mep_const_section, "near") != 0
337 && strcmp (mep_const_section, "far") != 0)
338 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
339 }
340
341 if (TARGET_S)
342 mep_tiny_cutoff = 65536;
343 if (TARGET_M)
344 mep_tiny_cutoff = 0;
345 if (TARGET_L && ! option_mtiny_specified)
346 mep_tiny_cutoff = 0;
347
348 if (TARGET_64BIT_CR_REGS)
349 flag_split_wide_types = 0;
350
351 init_machine_status = mep_init_machine_status;
352 mep_init_intrinsics ();
353 }
354
355 /* Pattern Support - constraints, predicates, expanders. */
356
357 /* MEP has very few instructions that can refer to the span of
358 addresses used by symbols, so it's common to check for them. */
359
360 static bool
361 symbol_p (rtx x)
362 {
363 int c = GET_CODE (x);
364
365 return (c == CONST_INT
366 || c == CONST
367 || c == SYMBOL_REF);
368 }
369
370 static bool
371 symbolref_p (rtx x)
372 {
373 int c;
374
375 if (GET_CODE (x) != MEM)
376 return false;
377
378 c = GET_CODE (XEXP (x, 0));
379 return (c == CONST_INT
380 || c == CONST
381 || c == SYMBOL_REF);
382 }
383
384 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
385
386 #define GEN_REG(R, STRICT) \
387 (GR_REGNO_P (R) \
388 || (!STRICT \
389 && ((R) == ARG_POINTER_REGNUM \
390 || (R) >= FIRST_PSEUDO_REGISTER)))
391
392 static char pattern[12], *patternp;
393 static GTY(()) rtx patternr[12];
394 #define RTX_IS(x) (strcmp (pattern, x) == 0)
395
396 static void
397 encode_pattern_1 (rtx x)
398 {
399 int i;
400
401 if (patternp == pattern + sizeof (pattern) - 2)
402 {
403 patternp[-1] = '?';
404 return;
405 }
406
407 patternr[patternp-pattern] = x;
408
409 switch (GET_CODE (x))
410 {
411 case REG:
412 *patternp++ = 'r';
413 break;
414 case MEM:
415 *patternp++ = 'm';
416 case CONST:
417 encode_pattern_1 (XEXP(x, 0));
418 break;
419 case PLUS:
420 *patternp++ = '+';
421 encode_pattern_1 (XEXP(x, 0));
422 encode_pattern_1 (XEXP(x, 1));
423 break;
424 case LO_SUM:
425 *patternp++ = 'L';
426 encode_pattern_1 (XEXP(x, 0));
427 encode_pattern_1 (XEXP(x, 1));
428 break;
429 case HIGH:
430 *patternp++ = 'H';
431 encode_pattern_1 (XEXP(x, 0));
432 break;
433 case SYMBOL_REF:
434 *patternp++ = 's';
435 break;
436 case LABEL_REF:
437 *patternp++ = 'l';
438 break;
439 case CONST_INT:
440 case CONST_DOUBLE:
441 *patternp++ = 'i';
442 break;
443 case UNSPEC:
444 *patternp++ = 'u';
445 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
446 for (i=0; i<XVECLEN (x, 0); i++)
447 encode_pattern_1 (XVECEXP (x, 0, i));
448 break;
449 case USE:
450 *patternp++ = 'U';
451 break;
452 default:
453 *patternp++ = '?';
454 #if 0
455 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
456 debug_rtx (x);
457 gcc_unreachable ();
458 #endif
459 break;
460 }
461 }
462
463 static void
464 encode_pattern (rtx x)
465 {
466 patternp = pattern;
467 encode_pattern_1 (x);
468 *patternp = 0;
469 }
470
471 int
472 mep_section_tag (rtx x)
473 {
474 const char *name;
475
476 while (1)
477 {
478 switch (GET_CODE (x))
479 {
480 case MEM:
481 case CONST:
482 x = XEXP (x, 0);
483 break;
484 case UNSPEC:
485 x = XVECEXP (x, 0, 0);
486 break;
487 case PLUS:
488 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
489 return 0;
490 x = XEXP (x, 0);
491 break;
492 default:
493 goto done;
494 }
495 }
496 done:
497 if (GET_CODE (x) != SYMBOL_REF)
498 return 0;
499 name = XSTR (x, 0);
500 if (name[0] == '@' && name[2] == '.')
501 {
502 if (name[1] == 'i' || name[1] == 'I')
503 {
504 if (name[1] == 'I')
505 return 'f'; /* near */
506 return 'n'; /* far */
507 }
508 return name[1];
509 }
510 return 0;
511 }
512
513 int
514 mep_regno_reg_class (int regno)
515 {
516 switch (regno)
517 {
518 case SP_REGNO: return SP_REGS;
519 case TP_REGNO: return TP_REGS;
520 case GP_REGNO: return GP_REGS;
521 case 0: return R0_REGS;
522 case HI_REGNO: return HI_REGS;
523 case LO_REGNO: return LO_REGS;
524 case ARG_POINTER_REGNUM: return GENERAL_REGS;
525 }
526
527 if (GR_REGNO_P (regno))
528 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
529 if (CONTROL_REGNO_P (regno))
530 return CONTROL_REGS;
531
532 if (CR_REGNO_P (regno))
533 {
534 int i, j;
535
536 /* Search for the register amongst user-defined subclasses of
537 the coprocessor registers. */
538 for (i = USER0_REGS; i <= USER3_REGS; ++i)
539 {
540 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
541 continue;
542 for (j = 0; j < N_REG_CLASSES; ++j)
543 {
544 enum reg_class sub = reg_class_subclasses[i][j];
545
546 if (sub == LIM_REG_CLASSES)
547 return i;
548 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
549 break;
550 }
551 }
552
553 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
554 }
555
556 if (CCR_REGNO_P (regno))
557 return CCR_REGS;
558
559 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
560 return NO_REGS;
561 }
562
563 #if 0
564 int
565 mep_reg_class_from_constraint (int c, const char *str)
566 {
567 switch (c)
568 {
569 case 'a':
570 return SP_REGS;
571 case 'b':
572 return TP_REGS;
573 case 'c':
574 return CONTROL_REGS;
575 case 'd':
576 return HILO_REGS;
577 case 'e':
578 {
579 switch (str[1])
580 {
581 case 'm':
582 return LOADABLE_CR_REGS;
583 case 'x':
584 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
585 case 'r':
586 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
587 default:
588 return NO_REGS;
589 }
590 }
591 case 'h':
592 return HI_REGS;
593 case 'j':
594 return RPC_REGS;
595 case 'l':
596 return LO_REGS;
597 case 't':
598 return TPREL_REGS;
599 case 'v':
600 return GP_REGS;
601 case 'x':
602 return CR_REGS;
603 case 'y':
604 return CCR_REGS;
605 case 'z':
606 return R0_REGS;
607
608 case 'A':
609 case 'B':
610 case 'C':
611 case 'D':
612 {
613 enum reg_class which = c - 'A' + USER0_REGS;
614 return (reg_class_size[which] > 0 ? which : NO_REGS);
615 }
616
617 default:
618 return NO_REGS;
619 }
620 }
621
622 bool
623 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
624 {
625 switch (c)
626 {
627 case 'I': return value >= -32768 && value < 32768;
628 case 'J': return value >= 0 && value < 65536;
629 case 'K': return value >= 0 && value < 0x01000000;
630 case 'L': return value >= -32 && value < 32;
631 case 'M': return value >= 0 && value < 32;
632 case 'N': return value >= 0 && value < 16;
633 case 'O':
634 if (value & 0xffff)
635 return false;
636 return value >= -2147483647-1 && value <= 2147483647;
637 default:
638 gcc_unreachable ();
639 }
640 }
641
642 bool
643 mep_extra_constraint (rtx value, int c)
644 {
645 encode_pattern (value);
646
647 switch (c)
648 {
649 case 'R':
650 /* For near symbols, like what call uses. */
651 if (GET_CODE (value) == REG)
652 return 0;
653 return mep_call_address_operand (value, GET_MODE (value));
654
655 case 'S':
656 /* For signed 8-bit immediates. */
657 return (GET_CODE (value) == CONST_INT
658 && INTVAL (value) >= -128
659 && INTVAL (value) <= 127);
660
661 case 'T':
662 /* For tp/gp relative symbol values. */
663 return (RTX_IS ("u3s") || RTX_IS ("u2s")
664 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
665
666 case 'U':
667 /* Non-absolute memories. */
668 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
669
670 case 'W':
671 /* %hi(sym) */
672 return RTX_IS ("Hs");
673
674 case 'Y':
675 /* Register indirect. */
676 return RTX_IS ("mr");
677
678 case 'Z':
679 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
680 }
681
682 return false;
683 }
684 #endif
685
686 #undef PASS
687 #undef FAIL
688
689 static bool
690 const_in_range (rtx x, int minv, int maxv)
691 {
692 return (GET_CODE (x) == CONST_INT
693 && INTVAL (x) >= minv
694 && INTVAL (x) <= maxv);
695 }
696
697 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
698 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
699 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
700 at the end of the insn stream. */
701
702 rtx
703 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
704 {
705 if (rtx_equal_p (dest, src1))
706 return src2;
707 else if (rtx_equal_p (dest, src2))
708 return src1;
709 else
710 {
711 if (insn == 0)
712 emit_insn (gen_movsi (copy_rtx (dest), src1));
713 else
714 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
715 return src2;
716 }
717 }
718
719 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
720 Change the last element of PATTERN from (clobber (scratch:SI))
721 to (clobber (reg:SI HI_REGNO)). */
722
723 static void
724 mep_rewrite_mult (rtx insn, rtx pattern)
725 {
726 rtx hi_clobber;
727
728 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
729 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
730 PATTERN (insn) = pattern;
731 INSN_CODE (insn) = -1;
732 }
733
734 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
735 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
736 store the result in DEST if nonnull. */
737
738 static void
739 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
740 {
741 rtx lo, pattern;
742
743 lo = gen_rtx_REG (SImode, LO_REGNO);
744 if (dest)
745 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
746 mep_mulr_source (insn, dest, src1, src2));
747 else
748 pattern = gen_mulsi3_lo (lo, src1, src2);
749 mep_rewrite_mult (insn, pattern);
750 }
751
752 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
753 SRC3 into $lo, then use either madd or maddr. The move into $lo will
754 be deleted by a peephole2 if SRC3 is already in $lo. */
755
756 static void
757 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
758 {
759 rtx lo, pattern;
760
761 lo = gen_rtx_REG (SImode, LO_REGNO);
762 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
763 if (dest)
764 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
765 mep_mulr_source (insn, dest, src1, src2),
766 copy_rtx (lo));
767 else
768 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
769 mep_rewrite_mult (insn, pattern);
770 }
771
772 /* Return true if $lo has the same value as integer register GPR when
773 instruction INSN is reached. If necessary, rewrite the instruction
774 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
775 rtx for (reg:SI LO_REGNO).
776
777 This function is intended to be used by the peephole2 pass. Since
778 that pass goes from the end of a basic block to the beginning, and
779 propagates liveness information on the way, there is no need to
780 update register notes here.
781
782 If GPR_DEAD_P is true on entry, and this function returns true,
783 then the caller will replace _every_ use of GPR in and after INSN
784 with LO. This means that if the instruction that sets $lo is a
785 mulr- or maddr-type instruction, we can rewrite it to use mul or
786 madd instead. In combination with the copy progagation pass,
787 this allows us to replace sequences like:
788
789 mov GPR,R1
790 mulr GPR,R2
791
792 with:
793
794 mul R1,R2
795
796 if GPR is no longer used. */
797
798 static bool
799 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
800 {
801 do
802 {
803 insn = PREV_INSN (insn);
804 if (INSN_P (insn))
805 switch (recog_memoized (insn))
806 {
807 case CODE_FOR_mulsi3_1:
808 extract_insn (insn);
809 if (rtx_equal_p (recog_data.operand[0], gpr))
810 {
811 mep_rewrite_mulsi3 (insn,
812 gpr_dead_p ? NULL : recog_data.operand[0],
813 recog_data.operand[1],
814 recog_data.operand[2]);
815 return true;
816 }
817 return false;
818
819 case CODE_FOR_maddsi3:
820 extract_insn (insn);
821 if (rtx_equal_p (recog_data.operand[0], gpr))
822 {
823 mep_rewrite_maddsi3 (insn,
824 gpr_dead_p ? NULL : recog_data.operand[0],
825 recog_data.operand[1],
826 recog_data.operand[2],
827 recog_data.operand[3]);
828 return true;
829 }
830 return false;
831
832 case CODE_FOR_mulsi3r:
833 case CODE_FOR_maddsi3r:
834 extract_insn (insn);
835 return rtx_equal_p (recog_data.operand[1], gpr);
836
837 default:
838 if (reg_set_p (lo, insn)
839 || reg_set_p (gpr, insn)
840 || volatile_insn_p (PATTERN (insn)))
841 return false;
842
843 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
844 gpr_dead_p = false;
845 break;
846 }
847 }
848 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
849 return false;
850 }
851
852 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
853
854 bool
855 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
856 {
857 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
858 extract_insn (insn);
859 return result;
860 }
861
862 /* Return true if SET can be turned into a post-modify load or store
863 that adds OFFSET to GPR. In other words, return true if SET can be
864 changed into:
865
866 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
867
868 It's OK to change SET to an equivalent operation in order to
869 make it match. */
870
871 static bool
872 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
873 {
874 rtx *reg, *mem;
875 unsigned int reg_bytes, mem_bytes;
876 enum machine_mode reg_mode, mem_mode;
877
878 /* Only simple SETs can be converted. */
879 if (GET_CODE (set) != SET)
880 return false;
881
882 /* Point REG to what we hope will be the register side of the set and
883 MEM to what we hope will be the memory side. */
884 if (GET_CODE (SET_DEST (set)) == MEM)
885 {
886 mem = &SET_DEST (set);
887 reg = &SET_SRC (set);
888 }
889 else
890 {
891 reg = &SET_DEST (set);
892 mem = &SET_SRC (set);
893 if (GET_CODE (*mem) == SIGN_EXTEND)
894 mem = &XEXP (*mem, 0);
895 }
896
897 /* Check that *REG is a suitable coprocessor register. */
898 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
899 return false;
900
901 /* Check that *MEM is a suitable memory reference. */
902 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
903 return false;
904
905 /* Get the number of bytes in each operand. */
906 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
907 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
908
909 /* Check that OFFSET is suitably aligned. */
910 if (INTVAL (offset) & (mem_bytes - 1))
911 return false;
912
913 /* Convert *MEM to a normal integer mode. */
914 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
915 *mem = change_address (*mem, mem_mode, NULL);
916
917 /* Adjust *REG as well. */
918 *reg = shallow_copy_rtx (*reg);
919 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
920 {
921 /* SET is a subword load. Convert it to an explicit extension. */
922 PUT_MODE (*reg, SImode);
923 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
924 }
925 else
926 {
927 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
928 PUT_MODE (*reg, reg_mode);
929 }
930 return true;
931 }
932
933 /* Return the effect of frame-related instruction INSN. */
934
935 static rtx
936 mep_frame_expr (rtx insn)
937 {
938 rtx note, expr;
939
940 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
941 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
942 RTX_FRAME_RELATED_P (expr) = 1;
943 return expr;
944 }
945
946 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
947 new pattern in INSN1; INSN2 will be deleted by the caller. */
948
949 static void
950 mep_make_parallel (rtx insn1, rtx insn2)
951 {
952 rtx expr;
953
954 if (RTX_FRAME_RELATED_P (insn2))
955 {
956 expr = mep_frame_expr (insn2);
957 if (RTX_FRAME_RELATED_P (insn1))
958 expr = gen_rtx_SEQUENCE (VOIDmode,
959 gen_rtvec (2, mep_frame_expr (insn1), expr));
960 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
961 RTX_FRAME_RELATED_P (insn1) = 1;
962 }
963
964 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
965 gen_rtvec (2, PATTERN (insn1),
966 PATTERN (insn2)));
967 INSN_CODE (insn1) = -1;
968 }
969
970 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
971 the basic block to see if any previous load or store instruction can
972 be persuaded to do SET_INSN as a side-effect. Return true if so. */
973
974 static bool
975 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
976 {
977 rtx insn;
978
979 insn = set_insn;
980 do
981 {
982 insn = PREV_INSN (insn);
983 if (INSN_P (insn))
984 {
985 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
986 {
987 mep_make_parallel (insn, set_insn);
988 return true;
989 }
990
991 if (reg_set_p (reg, insn)
992 || reg_referenced_p (reg, PATTERN (insn))
993 || volatile_insn_p (PATTERN (insn)))
994 return false;
995 }
996 }
997 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
998 return false;
999 }
1000
1001 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1002
1003 bool
1004 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1005 {
1006 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1007 extract_insn (insn);
1008 return result;
1009 }
1010
1011 bool
1012 mep_allow_clip (rtx ux, rtx lx, int s)
1013 {
1014 HOST_WIDE_INT u = INTVAL (ux);
1015 HOST_WIDE_INT l = INTVAL (lx);
1016 int i;
1017
1018 if (!TARGET_OPT_CLIP)
1019 return false;
1020
1021 if (s)
1022 {
1023 for (i = 0; i < 30; i ++)
1024 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1025 && (l == - ((HOST_WIDE_INT) 1 << i)))
1026 return true;
1027 }
1028 else
1029 {
1030 if (l != 0)
1031 return false;
1032
1033 for (i = 0; i < 30; i ++)
1034 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1035 return true;
1036 }
1037 return false;
1038 }
1039
1040 bool
1041 mep_bit_position_p (rtx x, bool looking_for)
1042 {
1043 if (GET_CODE (x) != CONST_INT)
1044 return false;
1045 switch ((int) INTVAL(x) & 0xff)
1046 {
1047 case 0x01: case 0x02: case 0x04: case 0x08:
1048 case 0x10: case 0x20: case 0x40: case 0x80:
1049 return looking_for;
1050 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1051 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1052 return !looking_for;
1053 }
1054 return false;
1055 }
1056
1057 static bool
1058 move_needs_splitting (rtx dest, rtx src,
1059 enum machine_mode mode ATTRIBUTE_UNUSED)
1060 {
1061 int s = mep_section_tag (src);
1062
1063 while (1)
1064 {
1065 if (GET_CODE (src) == CONST
1066 || GET_CODE (src) == MEM)
1067 src = XEXP (src, 0);
1068 else if (GET_CODE (src) == SYMBOL_REF
1069 || GET_CODE (src) == LABEL_REF
1070 || GET_CODE (src) == PLUS)
1071 break;
1072 else
1073 return false;
1074 }
1075 if (s == 'f'
1076 || (GET_CODE (src) == PLUS
1077 && GET_CODE (XEXP (src, 1)) == CONST_INT
1078 && (INTVAL (XEXP (src, 1)) < -65536
1079 || INTVAL (XEXP (src, 1)) > 0xffffff))
1080 || (GET_CODE (dest) == REG
1081 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1082 return true;
1083 return false;
1084 }
1085
1086 bool
1087 mep_split_mov (rtx *operands, int symbolic)
1088 {
1089 if (symbolic)
1090 {
1091 if (move_needs_splitting (operands[0], operands[1], SImode))
1092 return true;
1093 return false;
1094 }
1095
1096 if (GET_CODE (operands[1]) != CONST_INT)
1097 return false;
1098
1099 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1100 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1101 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1102 return false;
1103
1104 if (((!reload_completed && !reload_in_progress)
1105 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1106 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1107 return false;
1108
1109 return true;
1110 }
1111
1112 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1113 it to one specific value. So the insn chosen depends on whether
1114 the source and destination modes match. */
1115
1116 bool
1117 mep_vliw_mode_match (rtx tgt)
1118 {
1119 bool src_vliw = mep_vliw_function_p (cfun->decl);
1120 bool tgt_vliw = INTVAL (tgt);
1121
1122 return src_vliw == tgt_vliw;
1123 }
1124
1125 /* Like the above, but also test for near/far mismatches. */
1126
1127 bool
1128 mep_vliw_jmp_match (rtx tgt)
1129 {
1130 bool src_vliw = mep_vliw_function_p (cfun->decl);
1131 bool tgt_vliw = INTVAL (tgt);
1132
1133 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1134 return false;
1135
1136 return src_vliw == tgt_vliw;
1137 }
1138
1139 bool
1140 mep_multi_slot (rtx x)
1141 {
1142 return get_attr_slot (x) == SLOT_MULTI;
1143 }
1144
1145
1146 bool
1147 mep_legitimate_constant_p (rtx x)
1148 {
1149 /* We can't convert symbol values to gp- or tp-rel values after
1150 reload, as reload might have used $gp or $tp for other
1151 purposes. */
1152 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1153 {
1154 char e = mep_section_tag (x);
1155 return (e != 't' && e != 'b');
1156 }
1157 return 1;
1158 }
1159
1160 /* Be careful not to use macros that need to be compiled one way for
1161 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1162
1163 bool
1164 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1165 {
1166 int the_tag;
1167
1168 #define DEBUG_LEGIT 0
1169 #if DEBUG_LEGIT
1170 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1171 debug_rtx (x);
1172 #endif
1173
1174 if (GET_CODE (x) == LO_SUM
1175 && GET_CODE (XEXP (x, 0)) == REG
1176 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1177 && CONSTANT_P (XEXP (x, 1)))
1178 {
1179 if (GET_MODE_SIZE (mode) > 4)
1180 {
1181 /* We will end up splitting this, and lo_sums are not
1182 offsettable for us. */
1183 #if DEBUG_LEGIT
1184 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1185 #endif
1186 return false;
1187 }
1188 #if DEBUG_LEGIT
1189 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1190 #endif
1191 return true;
1192 }
1193
1194 if (GET_CODE (x) == REG
1195 && GEN_REG (REGNO (x), strict))
1196 {
1197 #if DEBUG_LEGIT
1198 fprintf (stderr, " - yup, [reg]\n");
1199 #endif
1200 return true;
1201 }
1202
1203 if (GET_CODE (x) == PLUS
1204 && GET_CODE (XEXP (x, 0)) == REG
1205 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1206 && const_in_range (XEXP (x, 1), -32768, 32767))
1207 {
1208 #if DEBUG_LEGIT
1209 fprintf (stderr, " - yup, [reg+const]\n");
1210 #endif
1211 return true;
1212 }
1213
1214 if (GET_CODE (x) == PLUS
1215 && GET_CODE (XEXP (x, 0)) == REG
1216 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1217 && GET_CODE (XEXP (x, 1)) == CONST
1218 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1219 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1220 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1221 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1222 {
1223 #if DEBUG_LEGIT
1224 fprintf (stderr, " - yup, [reg+unspec]\n");
1225 #endif
1226 return true;
1227 }
1228
1229 the_tag = mep_section_tag (x);
1230
1231 if (the_tag == 'f')
1232 {
1233 #if DEBUG_LEGIT
1234 fprintf (stderr, " - nope, [far]\n");
1235 #endif
1236 return false;
1237 }
1238
1239 if (mode == VOIDmode
1240 && GET_CODE (x) == SYMBOL_REF)
1241 {
1242 #if DEBUG_LEGIT
1243 fprintf (stderr, " - yup, call [symbol]\n");
1244 #endif
1245 return true;
1246 }
1247
1248 if ((mode == SImode || mode == SFmode)
1249 && CONSTANT_P (x)
1250 && LEGITIMATE_CONSTANT_P (x)
1251 && the_tag != 't' && the_tag != 'b')
1252 {
1253 if (GET_CODE (x) != CONST_INT
1254 || (INTVAL (x) <= 0xfffff
1255 && INTVAL (x) >= 0
1256 && (INTVAL (x) % 4) == 0))
1257 {
1258 #if DEBUG_LEGIT
1259 fprintf (stderr, " - yup, [const]\n");
1260 #endif
1261 return true;
1262 }
1263 }
1264
1265 #if DEBUG_LEGIT
1266 fprintf (stderr, " - nope.\n");
1267 #endif
1268 return false;
1269 }
1270
1271 int
1272 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1273 int type_i,
1274 int ind_levels ATTRIBUTE_UNUSED)
1275 {
1276 enum reload_type type = (enum reload_type) type_i;
1277
1278 if (GET_CODE (*x) == PLUS
1279 && GET_CODE (XEXP (*x, 0)) == MEM
1280 && GET_CODE (XEXP (*x, 1)) == REG)
1281 {
1282 /* GCC will by default copy the MEM into a REG, which results in
1283 an invalid address. For us, the best thing to do is move the
1284 whole expression to a REG. */
1285 push_reload (*x, NULL_RTX, x, NULL,
1286 GENERAL_REGS, mode, VOIDmode,
1287 0, 0, opnum, type);
1288 return 1;
1289 }
1290
1291 if (GET_CODE (*x) == PLUS
1292 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1293 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1294 {
1295 char e = mep_section_tag (XEXP (*x, 0));
1296
1297 if (e != 't' && e != 'b')
1298 {
1299 /* GCC thinks that (sym+const) is a valid address. Well,
1300 sometimes it is, this time it isn't. The best thing to
1301 do is reload the symbol to a register, since reg+int
1302 tends to work, and we can't just add the symbol and
1303 constant anyway. */
1304 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1305 GENERAL_REGS, mode, VOIDmode,
1306 0, 0, opnum, type);
1307 return 1;
1308 }
1309 }
1310 return 0;
1311 }
1312
1313 int
1314 mep_core_address_length (rtx insn, int opn)
1315 {
1316 rtx set = single_set (insn);
1317 rtx mem = XEXP (set, opn);
1318 rtx other = XEXP (set, 1-opn);
1319 rtx addr = XEXP (mem, 0);
1320
1321 if (register_operand (addr, Pmode))
1322 return 2;
1323 if (GET_CODE (addr) == PLUS)
1324 {
1325 rtx addend = XEXP (addr, 1);
1326
1327 gcc_assert (REG_P (XEXP (addr, 0)));
1328
1329 switch (REGNO (XEXP (addr, 0)))
1330 {
1331 case STACK_POINTER_REGNUM:
1332 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1333 && mep_imm7a4_operand (addend, VOIDmode))
1334 return 2;
1335 break;
1336
1337 case 13: /* TP */
1338 gcc_assert (REG_P (other));
1339
1340 if (REGNO (other) >= 8)
1341 break;
1342
1343 if (GET_CODE (addend) == CONST
1344 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1345 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1346 return 2;
1347
1348 if (GET_CODE (addend) == CONST_INT
1349 && INTVAL (addend) >= 0
1350 && INTVAL (addend) <= 127
1351 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1352 return 2;
1353 break;
1354 }
1355 }
1356
1357 return 4;
1358 }
1359
1360 int
1361 mep_cop_address_length (rtx insn, int opn)
1362 {
1363 rtx set = single_set (insn);
1364 rtx mem = XEXP (set, opn);
1365 rtx addr = XEXP (mem, 0);
1366
1367 if (GET_CODE (mem) != MEM)
1368 return 2;
1369 if (register_operand (addr, Pmode))
1370 return 2;
1371 if (GET_CODE (addr) == POST_INC)
1372 return 2;
1373
1374 return 4;
1375 }
1376
1377 #define DEBUG_EXPAND_MOV 0
1378 bool
1379 mep_expand_mov (rtx *operands, enum machine_mode mode)
1380 {
1381 int i, t;
1382 int tag[2];
1383 rtx tpsym, tpoffs;
1384 int post_reload = 0;
1385
1386 tag[0] = mep_section_tag (operands[0]);
1387 tag[1] = mep_section_tag (operands[1]);
1388
1389 if (!reload_in_progress
1390 && !reload_completed
1391 && GET_CODE (operands[0]) != REG
1392 && GET_CODE (operands[0]) != SUBREG
1393 && GET_CODE (operands[1]) != REG
1394 && GET_CODE (operands[1]) != SUBREG)
1395 operands[1] = copy_to_mode_reg (mode, operands[1]);
1396
1397 #if DEBUG_EXPAND_MOV
1398 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1399 reload_in_progress || reload_completed);
1400 debug_rtx (operands[0]);
1401 debug_rtx (operands[1]);
1402 #endif
1403
1404 if (mode == DImode || mode == DFmode)
1405 return false;
1406
1407 if (reload_in_progress || reload_completed)
1408 {
1409 rtx r;
1410
1411 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1412 cfun->machine->reload_changes_tp = true;
1413
1414 if (tag[0] == 't' || tag[1] == 't')
1415 {
1416 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1417 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1418 post_reload = 1;
1419 }
1420 if (tag[0] == 'b' || tag[1] == 'b')
1421 {
1422 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1423 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1424 post_reload = 1;
1425 }
1426 if (cfun->machine->reload_changes_tp == true)
1427 post_reload = 1;
1428 }
1429
1430 if (!post_reload)
1431 {
1432 rtx n;
1433 if (symbol_p (operands[1]))
1434 {
1435 t = mep_section_tag (operands[1]);
1436 if (t == 'b' || t == 't')
1437 {
1438
1439 if (GET_CODE (operands[1]) == SYMBOL_REF)
1440 {
1441 tpsym = operands[1];
1442 n = gen_rtx_UNSPEC (mode,
1443 gen_rtvec (1, operands[1]),
1444 t == 'b' ? UNS_TPREL : UNS_GPREL);
1445 n = gen_rtx_CONST (mode, n);
1446 }
1447 else if (GET_CODE (operands[1]) == CONST
1448 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1449 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1450 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1451 {
1452 tpsym = XEXP (XEXP (operands[1], 0), 0);
1453 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1454 n = gen_rtx_UNSPEC (mode,
1455 gen_rtvec (1, tpsym),
1456 t == 'b' ? UNS_TPREL : UNS_GPREL);
1457 n = gen_rtx_PLUS (mode, n, tpoffs);
1458 n = gen_rtx_CONST (mode, n);
1459 }
1460 else if (GET_CODE (operands[1]) == CONST
1461 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1462 return false;
1463 else
1464 {
1465 error ("unusual TP-relative address");
1466 return false;
1467 }
1468
1469 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1470 : mep_gp_rtx ()), n);
1471 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1472 #if DEBUG_EXPAND_MOV
1473 fprintf(stderr, "mep_expand_mov emitting ");
1474 debug_rtx(n);
1475 #endif
1476 return true;
1477 }
1478 }
1479
1480 for (i=0; i < 2; i++)
1481 {
1482 t = mep_section_tag (operands[i]);
1483 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1484 {
1485 rtx sym, n, r;
1486 int u;
1487
1488 sym = XEXP (operands[i], 0);
1489 if (GET_CODE (sym) == CONST
1490 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1491 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1492
1493 if (t == 'b')
1494 {
1495 r = mep_tp_rtx ();
1496 u = UNS_TPREL;
1497 }
1498 else
1499 {
1500 r = mep_gp_rtx ();
1501 u = UNS_GPREL;
1502 }
1503
1504 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1505 n = gen_rtx_CONST (Pmode, n);
1506 n = gen_rtx_PLUS (Pmode, r, n);
1507 operands[i] = replace_equiv_address (operands[i], n);
1508 }
1509 }
1510 }
1511
1512 if ((GET_CODE (operands[1]) != REG
1513 && MEP_CONTROL_REG (operands[0]))
1514 || (GET_CODE (operands[0]) != REG
1515 && MEP_CONTROL_REG (operands[1])))
1516 {
1517 rtx temp;
1518 #if DEBUG_EXPAND_MOV
1519 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1520 #endif
1521 temp = gen_reg_rtx (mode);
1522 emit_move_insn (temp, operands[1]);
1523 operands[1] = temp;
1524 }
1525
1526 if (symbolref_p (operands[0])
1527 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1528 || (GET_MODE_SIZE (mode) != 4)))
1529 {
1530 rtx temp;
1531
1532 gcc_assert (!reload_in_progress && !reload_completed);
1533
1534 temp = force_reg (Pmode, XEXP (operands[0], 0));
1535 operands[0] = replace_equiv_address (operands[0], temp);
1536 emit_move_insn (operands[0], operands[1]);
1537 return true;
1538 }
1539
1540 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1541 tag[1] = 0;
1542
1543 if (symbol_p (operands[1])
1544 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1545 {
1546 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1547 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1548 return true;
1549 }
1550
1551 if (symbolref_p (operands[1])
1552 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1553 {
1554 rtx temp;
1555
1556 if (reload_in_progress || reload_completed)
1557 temp = operands[0];
1558 else
1559 temp = gen_reg_rtx (Pmode);
1560
1561 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1562 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1563 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1564 return true;
1565 }
1566
1567 return false;
1568 }
1569
1570 /* Cases where the pattern can't be made to use at all. */
1571
1572 bool
1573 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1574 {
1575 int i;
1576
1577 #define DEBUG_MOV_OK 0
1578 #if DEBUG_MOV_OK
1579 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1580 mep_section_tag (operands[1]));
1581 debug_rtx (operands[0]);
1582 debug_rtx (operands[1]);
1583 #endif
1584
1585 /* We want the movh patterns to get these. */
1586 if (GET_CODE (operands[1]) == HIGH)
1587 return false;
1588
1589 /* We can't store a register to a far variable without using a
1590 scratch register to hold the address. Using far variables should
1591 be split by mep_emit_mov anyway. */
1592 if (mep_section_tag (operands[0]) == 'f'
1593 || mep_section_tag (operands[1]) == 'f')
1594 {
1595 #if DEBUG_MOV_OK
1596 fprintf (stderr, " - no, f\n");
1597 #endif
1598 return false;
1599 }
1600 i = mep_section_tag (operands[1]);
1601 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1602 /* These are supposed to be generated with adds of the appropriate
1603 register. During and after reload, however, we allow them to
1604 be accessed as normal symbols because adding a dependency on
1605 the base register now might cause problems. */
1606 {
1607 #if DEBUG_MOV_OK
1608 fprintf (stderr, " - no, bt\n");
1609 #endif
1610 return false;
1611 }
1612
1613 /* The only moves we can allow involve at least one general
1614 register, so require it. */
1615 for (i = 0; i < 2; i ++)
1616 {
1617 /* Allow subregs too, before reload. */
1618 rtx x = operands[i];
1619
1620 if (GET_CODE (x) == SUBREG)
1621 x = XEXP (x, 0);
1622 if (GET_CODE (x) == REG
1623 && ! MEP_CONTROL_REG (x))
1624 {
1625 #if DEBUG_MOV_OK
1626 fprintf (stderr, " - ok\n");
1627 #endif
1628 return true;
1629 }
1630 }
1631 #if DEBUG_MOV_OK
1632 fprintf (stderr, " - no, no gen reg\n");
1633 #endif
1634 return false;
1635 }
1636
1637 #define DEBUG_SPLIT_WIDE_MOVE 0
1638 void
1639 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1640 {
1641 int i;
1642
1643 #if DEBUG_SPLIT_WIDE_MOVE
1644 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1645 debug_rtx (operands[0]);
1646 debug_rtx (operands[1]);
1647 #endif
1648
1649 for (i = 0; i <= 1; i++)
1650 {
1651 rtx op = operands[i], hi, lo;
1652
1653 switch (GET_CODE (op))
1654 {
1655 case REG:
1656 {
1657 unsigned int regno = REGNO (op);
1658
1659 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1660 {
1661 rtx i32;
1662
1663 lo = gen_rtx_REG (SImode, regno);
1664 i32 = GEN_INT (32);
1665 hi = gen_rtx_ZERO_EXTRACT (SImode,
1666 gen_rtx_REG (DImode, regno),
1667 i32, i32);
1668 }
1669 else
1670 {
1671 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1672 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1673 }
1674 }
1675 break;
1676
1677 case CONST_INT:
1678 case CONST_DOUBLE:
1679 case MEM:
1680 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1681 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1682 break;
1683
1684 default:
1685 gcc_unreachable ();
1686 }
1687
1688 /* The high part of CR <- GPR moves must be done after the low part. */
1689 operands [i + 4] = lo;
1690 operands [i + 2] = hi;
1691 }
1692
1693 if (reg_mentioned_p (operands[2], operands[5])
1694 || GET_CODE (operands[2]) == ZERO_EXTRACT
1695 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1696 {
1697 rtx tmp;
1698
1699 /* Overlapping register pairs -- make sure we don't
1700 early-clobber ourselves. */
1701 tmp = operands[2];
1702 operands[2] = operands[4];
1703 operands[4] = tmp;
1704 tmp = operands[3];
1705 operands[3] = operands[5];
1706 operands[5] = tmp;
1707 }
1708
1709 #if DEBUG_SPLIT_WIDE_MOVE
1710 fprintf(stderr, "\033[34m");
1711 debug_rtx (operands[2]);
1712 debug_rtx (operands[3]);
1713 debug_rtx (operands[4]);
1714 debug_rtx (operands[5]);
1715 fprintf(stderr, "\033[0m");
1716 #endif
1717 }
1718
1719 /* Emit a setcc instruction in its entirity. */
1720
1721 static bool
1722 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1723 {
1724 rtx tmp;
1725
1726 switch (code)
1727 {
1728 case GT:
1729 case GTU:
1730 tmp = op1, op1 = op2, op2 = tmp;
1731 code = swap_condition (code);
1732 /* FALLTHRU */
1733
1734 case LT:
1735 case LTU:
1736 op1 = force_reg (SImode, op1);
1737 emit_insn (gen_rtx_SET (VOIDmode, dest,
1738 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1739 return true;
1740
1741 case EQ:
1742 if (op2 != const0_rtx)
1743 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1744 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1745 return true;
1746
1747 case NE:
1748 /* Branchful sequence:
1749 mov dest, 0 16-bit
1750 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1751 mov dest, 1 16-bit
1752
1753 Branchless sequence:
1754 add3 tmp, op1, -op2 32-bit (or mov + sub)
1755 sltu3 tmp, tmp, 1 16-bit
1756 xor3 dest, tmp, 1 32-bit
1757 */
1758 if (optimize_size && op2 != const0_rtx)
1759 return false;
1760
1761 if (op2 != const0_rtx)
1762 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1763
1764 op2 = gen_reg_rtx (SImode);
1765 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1766
1767 emit_insn (gen_rtx_SET (VOIDmode, dest,
1768 gen_rtx_XOR (SImode, op2, const1_rtx)));
1769 return true;
1770
1771 case LE:
1772 if (GET_CODE (op2) != CONST_INT
1773 || INTVAL (op2) == 0x7ffffff)
1774 return false;
1775 op2 = GEN_INT (INTVAL (op2) + 1);
1776 return mep_expand_setcc_1 (LT, dest, op1, op2);
1777
1778 case LEU:
1779 if (GET_CODE (op2) != CONST_INT
1780 || INTVAL (op2) == -1)
1781 return false;
1782 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1783 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1784
1785 case GE:
1786 if (GET_CODE (op2) != CONST_INT
1787 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1788 return false;
1789 op2 = GEN_INT (INTVAL (op2) - 1);
1790 return mep_expand_setcc_1 (GT, dest, op1, op2);
1791
1792 case GEU:
1793 if (GET_CODE (op2) != CONST_INT
1794 || op2 == const0_rtx)
1795 return false;
1796 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1797 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1798
1799 default:
1800 gcc_unreachable ();
1801 }
1802 }
1803
1804 bool
1805 mep_expand_setcc (rtx *operands)
1806 {
1807 rtx dest = operands[0];
1808 enum rtx_code code = GET_CODE (operands[1]);
1809 rtx op0 = operands[2];
1810 rtx op1 = operands[3];
1811
1812 return mep_expand_setcc_1 (code, dest, op0, op1);
1813 }
1814
1815 rtx
1816 mep_expand_cbranch (rtx *operands)
1817 {
1818 enum rtx_code code = GET_CODE (operands[0]);
1819 rtx op0 = operands[1];
1820 rtx op1 = operands[2];
1821 rtx tmp;
1822
1823 restart:
1824 switch (code)
1825 {
1826 case LT:
1827 if (mep_imm4_operand (op1, SImode))
1828 break;
1829
1830 tmp = gen_reg_rtx (SImode);
1831 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1832 code = NE;
1833 op0 = tmp;
1834 op1 = const0_rtx;
1835 break;
1836
1837 case GE:
1838 if (mep_imm4_operand (op1, SImode))
1839 break;
1840
1841 tmp = gen_reg_rtx (SImode);
1842 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1843
1844 code = EQ;
1845 op0 = tmp;
1846 op1 = const0_rtx;
1847 break;
1848
1849 case EQ:
1850 case NE:
1851 if (! mep_reg_or_imm4_operand (op1, SImode))
1852 op1 = force_reg (SImode, op1);
1853 break;
1854
1855 case LE:
1856 case GT:
1857 if (GET_CODE (op1) == CONST_INT
1858 && INTVAL (op1) != 0x7fffffff)
1859 {
1860 op1 = GEN_INT (INTVAL (op1) + 1);
1861 code = (code == LE ? LT : GE);
1862 goto restart;
1863 }
1864
1865 tmp = gen_reg_rtx (SImode);
1866 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1867
1868 code = (code == LE ? EQ : NE);
1869 op0 = tmp;
1870 op1 = const0_rtx;
1871 break;
1872
1873 case LTU:
1874 if (op1 == const1_rtx)
1875 {
1876 code = EQ;
1877 op1 = const0_rtx;
1878 break;
1879 }
1880
1881 tmp = gen_reg_rtx (SImode);
1882 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1883 code = NE;
1884 op0 = tmp;
1885 op1 = const0_rtx;
1886 break;
1887
1888 case LEU:
1889 tmp = gen_reg_rtx (SImode);
1890 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1891 code = NE;
1892 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1893 code = EQ;
1894 else
1895 gcc_unreachable ();
1896 op0 = tmp;
1897 op1 = const0_rtx;
1898 break;
1899
1900 case GTU:
1901 tmp = gen_reg_rtx (SImode);
1902 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1903 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1904 code = NE;
1905 op0 = tmp;
1906 op1 = const0_rtx;
1907 break;
1908
1909 case GEU:
1910 tmp = gen_reg_rtx (SImode);
1911 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1912 code = NE;
1913 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1914 code = EQ;
1915 else
1916 gcc_unreachable ();
1917 op0 = tmp;
1918 op1 = const0_rtx;
1919 break;
1920
1921 default:
1922 gcc_unreachable ();
1923 }
1924
1925 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1926 }
1927
1928 const char *
1929 mep_emit_cbranch (rtx *operands, int ne)
1930 {
1931 if (GET_CODE (operands[1]) == REG)
1932 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1933 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1934 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1935 else
1936 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1937 }
1938
1939 void
1940 mep_expand_call (rtx *operands, int returns_value)
1941 {
1942 rtx addr = operands[returns_value];
1943 rtx tp = mep_tp_rtx ();
1944 rtx gp = mep_gp_rtx ();
1945
1946 gcc_assert (GET_CODE (addr) == MEM);
1947
1948 addr = XEXP (addr, 0);
1949
1950 if (! mep_call_address_operand (addr, VOIDmode))
1951 addr = force_reg (SImode, addr);
1952
1953 if (! operands[returns_value+2])
1954 operands[returns_value+2] = const0_rtx;
1955
1956 if (returns_value)
1957 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1958 operands[3], tp, gp));
1959 else
1960 emit_call_insn (gen_call_internal (addr, operands[1],
1961 operands[2], tp, gp));
1962 }
1963 \f
1964 /* Aliasing Support. */
1965
1966 /* If X is a machine specific address (i.e. a symbol or label being
1967 referenced as a displacement from the GOT implemented using an
1968 UNSPEC), then return the base term. Otherwise return X. */
1969
1970 rtx
1971 mep_find_base_term (rtx x)
1972 {
1973 rtx base, term;
1974 int unspec;
1975
1976 if (GET_CODE (x) != PLUS)
1977 return x;
1978 base = XEXP (x, 0);
1979 term = XEXP (x, 1);
1980
1981 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1982 && base == mep_tp_rtx ())
1983 unspec = UNS_TPREL;
1984 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1985 && base == mep_gp_rtx ())
1986 unspec = UNS_GPREL;
1987 else
1988 return x;
1989
1990 if (GET_CODE (term) != CONST)
1991 return x;
1992 term = XEXP (term, 0);
1993
1994 if (GET_CODE (term) != UNSPEC
1995 || XINT (term, 1) != unspec)
1996 return x;
1997
1998 return XVECEXP (term, 0, 0);
1999 }
2000 \f
2001 /* Reload Support. */
2002
2003 /* Return true if the registers in CLASS cannot represent the change from
2004 modes FROM to TO. */
2005
2006 bool
2007 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2008 enum reg_class regclass)
2009 {
2010 if (from == to)
2011 return false;
2012
2013 /* 64-bit COP regs must remain 64-bit COP regs. */
2014 if (TARGET_64BIT_CR_REGS
2015 && (regclass == CR_REGS
2016 || regclass == LOADABLE_CR_REGS)
2017 && (GET_MODE_SIZE (to) < 8
2018 || GET_MODE_SIZE (from) < 8))
2019 return true;
2020
2021 return false;
2022 }
2023
2024 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2025
2026 static bool
2027 mep_general_reg (rtx x)
2028 {
2029 while (GET_CODE (x) == SUBREG)
2030 x = XEXP (x, 0);
2031 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2032 }
2033
2034 static bool
2035 mep_nongeneral_reg (rtx x)
2036 {
2037 while (GET_CODE (x) == SUBREG)
2038 x = XEXP (x, 0);
2039 return (GET_CODE (x) == REG
2040 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2041 }
2042
2043 static bool
2044 mep_general_copro_reg (rtx x)
2045 {
2046 while (GET_CODE (x) == SUBREG)
2047 x = XEXP (x, 0);
2048 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2049 }
2050
2051 static bool
2052 mep_nonregister (rtx x)
2053 {
2054 while (GET_CODE (x) == SUBREG)
2055 x = XEXP (x, 0);
2056 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2057 }
2058
2059 #define DEBUG_RELOAD 0
2060
2061 /* Return the secondary reload class needed for moving value X to or
2062 from a register in coprocessor register class CLASS. */
2063
2064 static enum reg_class
2065 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2066 {
2067 if (mep_general_reg (x))
2068 /* We can do the move directly if mep_have_core_copro_moves_p,
2069 otherwise we need to go through memory. Either way, no secondary
2070 register is needed. */
2071 return NO_REGS;
2072
2073 if (mep_general_copro_reg (x))
2074 {
2075 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2076 if (mep_have_copro_copro_moves_p)
2077 return NO_REGS;
2078
2079 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2080 if (mep_have_core_copro_moves_p)
2081 return GENERAL_REGS;
2082
2083 /* Otherwise we need to do it through memory. No secondary
2084 register is needed. */
2085 return NO_REGS;
2086 }
2087
2088 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2089 && constraint_satisfied_p (x, CONSTRAINT_U))
2090 /* X is a memory value that we can access directly. */
2091 return NO_REGS;
2092
2093 /* We have to move X into a GPR first and then copy it to
2094 the coprocessor register. The move from the GPR to the
2095 coprocessor might be done directly or through memory,
2096 depending on mep_have_core_copro_moves_p. */
2097 return GENERAL_REGS;
2098 }
2099
2100 /* Copying X to register in RCLASS. */
2101
2102 enum reg_class
2103 mep_secondary_input_reload_class (enum reg_class rclass,
2104 enum machine_mode mode ATTRIBUTE_UNUSED,
2105 rtx x)
2106 {
2107 int rv = NO_REGS;
2108
2109 #if DEBUG_RELOAD
2110 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2111 debug_rtx (x);
2112 #endif
2113
2114 if (reg_class_subset_p (rclass, CR_REGS))
2115 rv = mep_secondary_copro_reload_class (rclass, x);
2116 else if (MEP_NONGENERAL_CLASS (rclass)
2117 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2118 rv = GENERAL_REGS;
2119
2120 #if DEBUG_RELOAD
2121 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2122 #endif
2123 return (enum reg_class) rv;
2124 }
2125
2126 /* Copying register in RCLASS to X. */
2127
2128 enum reg_class
2129 mep_secondary_output_reload_class (enum reg_class rclass,
2130 enum machine_mode mode ATTRIBUTE_UNUSED,
2131 rtx x)
2132 {
2133 int rv = NO_REGS;
2134
2135 #if DEBUG_RELOAD
2136 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2137 debug_rtx (x);
2138 #endif
2139
2140 if (reg_class_subset_p (rclass, CR_REGS))
2141 rv = mep_secondary_copro_reload_class (rclass, x);
2142 else if (MEP_NONGENERAL_CLASS (rclass)
2143 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2144 rv = GENERAL_REGS;
2145
2146 #if DEBUG_RELOAD
2147 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2148 #endif
2149
2150 return (enum reg_class) rv;
2151 }
2152
2153 /* Implement SECONDARY_MEMORY_NEEDED. */
2154
2155 bool
2156 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2157 enum machine_mode mode ATTRIBUTE_UNUSED)
2158 {
2159 if (!mep_have_core_copro_moves_p)
2160 {
2161 if (reg_classes_intersect_p (rclass1, CR_REGS)
2162 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2163 return true;
2164 if (reg_classes_intersect_p (rclass2, CR_REGS)
2165 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2166 return true;
2167 if (!mep_have_copro_copro_moves_p
2168 && reg_classes_intersect_p (rclass1, CR_REGS)
2169 && reg_classes_intersect_p (rclass2, CR_REGS))
2170 return true;
2171 }
2172 return false;
2173 }
2174
2175 void
2176 mep_expand_reload (rtx *operands, enum machine_mode mode)
2177 {
2178 /* There are three cases for each direction:
2179 register, farsym
2180 control, farsym
2181 control, nearsym */
2182
2183 int s0 = mep_section_tag (operands[0]) == 'f';
2184 int s1 = mep_section_tag (operands[1]) == 'f';
2185 int c0 = mep_nongeneral_reg (operands[0]);
2186 int c1 = mep_nongeneral_reg (operands[1]);
2187 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2188
2189 #if DEBUG_RELOAD
2190 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2191 debug_rtx (operands[0]);
2192 debug_rtx (operands[1]);
2193 #endif
2194
2195 switch (which)
2196 {
2197 case 00: /* Don't know why this gets here. */
2198 case 02: /* general = far */
2199 emit_move_insn (operands[0], operands[1]);
2200 return;
2201
2202 case 10: /* cr = mem */
2203 case 11: /* cr = cr */
2204 case 01: /* mem = cr */
2205 case 12: /* cr = far */
2206 emit_move_insn (operands[2], operands[1]);
2207 emit_move_insn (operands[0], operands[2]);
2208 return;
2209
2210 case 20: /* far = general */
2211 emit_move_insn (operands[2], XEXP (operands[1], 0));
2212 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2213 return;
2214
2215 case 21: /* far = cr */
2216 case 22: /* far = far */
2217 default:
2218 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2219 which, mode_name[mode]);
2220 debug_rtx (operands[0]);
2221 debug_rtx (operands[1]);
2222 gcc_unreachable ();
2223 }
2224 }
2225
2226 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2227 can be moved directly into registers 0 to 7, but not into the rest.
2228 If so, and if the required class includes registers 0 to 7, restrict
2229 it to those registers. */
2230
2231 enum reg_class
2232 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2233 {
2234 switch (GET_CODE (x))
2235 {
2236 case CONST_INT:
2237 if (INTVAL (x) >= 0x10000
2238 && INTVAL (x) < 0x01000000
2239 && (INTVAL (x) & 0xffff) != 0
2240 && reg_class_subset_p (TPREL_REGS, rclass))
2241 rclass = TPREL_REGS;
2242 break;
2243
2244 case CONST:
2245 case SYMBOL_REF:
2246 case LABEL_REF:
2247 if (mep_section_tag (x) != 'f'
2248 && reg_class_subset_p (TPREL_REGS, rclass))
2249 rclass = TPREL_REGS;
2250 break;
2251
2252 default:
2253 break;
2254 }
2255 return rclass;
2256 }
2257 \f
2258 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2259 moves, 4 for direct double-register moves, and 1000 for anything
2260 that requires a temporary register or temporary stack slot. */
2261
2262 int
2263 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2264 {
2265 if (mep_have_copro_copro_moves_p
2266 && reg_class_subset_p (from, CR_REGS)
2267 && reg_class_subset_p (to, CR_REGS))
2268 {
2269 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2270 return 4;
2271 return 2;
2272 }
2273 if (reg_class_subset_p (from, CR_REGS)
2274 && reg_class_subset_p (to, CR_REGS))
2275 {
2276 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2277 return 8;
2278 return 4;
2279 }
2280 if (reg_class_subset_p (from, CR_REGS)
2281 || reg_class_subset_p (to, CR_REGS))
2282 {
2283 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2284 return 4;
2285 return 2;
2286 }
2287 if (mep_secondary_memory_needed (from, to, mode))
2288 return 1000;
2289 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2290 return 1000;
2291
2292 if (GET_MODE_SIZE (mode) > 4)
2293 return 4;
2294
2295 return 2;
2296 }
2297
2298 \f
2299 /* Functions to save and restore machine-specific function data. */
2300
2301 static struct machine_function *
2302 mep_init_machine_status (void)
2303 {
2304 return ggc_alloc_cleared_machine_function ();
2305 }
2306
2307 static rtx
2308 mep_allocate_initial_value (rtx reg)
2309 {
2310 int rss;
2311
2312 if (GET_CODE (reg) != REG)
2313 return NULL_RTX;
2314
2315 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2316 return NULL_RTX;
2317
2318 /* In interrupt functions, the "initial" values of $gp and $tp are
2319 provided by the prologue. They are not necessarily the same as
2320 the values that the caller was using. */
2321 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2322 if (mep_interrupt_p ())
2323 return NULL_RTX;
2324
2325 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2326 {
2327 cfun->machine->reg_save_size += 4;
2328 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2329 }
2330
2331 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2332 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2333 }
2334
2335 rtx
2336 mep_return_addr_rtx (int count)
2337 {
2338 if (count != 0)
2339 return const0_rtx;
2340
2341 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2342 }
2343
2344 static rtx
2345 mep_tp_rtx (void)
2346 {
2347 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2348 }
2349
2350 static rtx
2351 mep_gp_rtx (void)
2352 {
2353 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2354 }
2355
2356 static bool
2357 mep_interrupt_p (void)
2358 {
2359 if (cfun->machine->interrupt_handler == 0)
2360 {
2361 int interrupt_handler
2362 = (lookup_attribute ("interrupt",
2363 DECL_ATTRIBUTES (current_function_decl))
2364 != NULL_TREE);
2365 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2366 }
2367 return cfun->machine->interrupt_handler == 2;
2368 }
2369
2370 static bool
2371 mep_disinterrupt_p (void)
2372 {
2373 if (cfun->machine->disable_interrupts == 0)
2374 {
2375 int disable_interrupts
2376 = (lookup_attribute ("disinterrupt",
2377 DECL_ATTRIBUTES (current_function_decl))
2378 != NULL_TREE);
2379 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2380 }
2381 return cfun->machine->disable_interrupts == 2;
2382 }
2383
2384 \f
2385 /* Frame/Epilog/Prolog Related. */
2386
2387 static bool
2388 mep_reg_set_p (rtx reg, rtx insn)
2389 {
2390 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2391 if (INSN_P (insn))
2392 {
2393 if (FIND_REG_INC_NOTE (insn, reg))
2394 return true;
2395 insn = PATTERN (insn);
2396 }
2397
2398 if (GET_CODE (insn) == SET
2399 && GET_CODE (XEXP (insn, 0)) == REG
2400 && GET_CODE (XEXP (insn, 1)) == REG
2401 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2402 return false;
2403
2404 return set_of (reg, insn) != NULL_RTX;
2405 }
2406
2407
2408 #define MEP_SAVES_UNKNOWN 0
2409 #define MEP_SAVES_YES 1
2410 #define MEP_SAVES_MAYBE 2
2411 #define MEP_SAVES_NO 3
2412
2413 static bool
2414 mep_reg_set_in_function (int regno)
2415 {
2416 rtx reg, insn;
2417
2418 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2419 return true;
2420
2421 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2422 return true;
2423
2424 push_topmost_sequence ();
2425 insn = get_insns ();
2426 pop_topmost_sequence ();
2427
2428 if (!insn)
2429 return false;
2430
2431 reg = gen_rtx_REG (SImode, regno);
2432
2433 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2434 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2435 return true;
2436 return false;
2437 }
2438
2439 static bool
2440 mep_asm_without_operands_p (void)
2441 {
2442 if (cfun->machine->asms_without_operands == 0)
2443 {
2444 rtx insn;
2445
2446 push_topmost_sequence ();
2447 insn = get_insns ();
2448 pop_topmost_sequence ();
2449
2450 cfun->machine->asms_without_operands = 1;
2451 while (insn)
2452 {
2453 if (INSN_P (insn)
2454 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2455 {
2456 cfun->machine->asms_without_operands = 2;
2457 break;
2458 }
2459 insn = NEXT_INSN (insn);
2460 }
2461
2462 }
2463 return cfun->machine->asms_without_operands == 2;
2464 }
2465
2466 /* Interrupt functions save/restore every call-preserved register, and
2467 any call-used register it uses (or all if it calls any function,
2468 since they may get clobbered there too). Here we check to see
2469 which call-used registers need saving. */
2470
2471 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2472 && (r == FIRST_CCR_REGNO + 1 \
2473 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2474 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2475
2476 static bool
2477 mep_interrupt_saved_reg (int r)
2478 {
2479 if (!mep_interrupt_p ())
2480 return false;
2481 if (r == REGSAVE_CONTROL_TEMP
2482 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2483 return true;
2484 if (mep_asm_without_operands_p ()
2485 && (!fixed_regs[r]
2486 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2487 || IVC2_ISAVED_REG (r)))
2488 return true;
2489 if (!current_function_is_leaf)
2490 /* Function calls mean we need to save $lp. */
2491 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2492 return true;
2493 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2494 /* The interrupt handler might use these registers for repeat blocks,
2495 or it might call a function that does so. */
2496 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2497 return true;
2498 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2499 return false;
2500 /* Functions we call might clobber these. */
2501 if (call_used_regs[r] && !fixed_regs[r])
2502 return true;
2503 /* Additional registers that need to be saved for IVC2. */
2504 if (IVC2_ISAVED_REG (r))
2505 return true;
2506
2507 return false;
2508 }
2509
2510 static bool
2511 mep_call_saves_register (int r)
2512 {
2513 if (! cfun->machine->frame_locked)
2514 {
2515 int rv = MEP_SAVES_NO;
2516
2517 if (cfun->machine->reg_save_slot[r])
2518 rv = MEP_SAVES_YES;
2519 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2520 rv = MEP_SAVES_YES;
2521 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2522 rv = MEP_SAVES_YES;
2523 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2524 rv = MEP_SAVES_YES;
2525 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2526 /* We need these to have stack slots so that they can be set during
2527 unwinding. */
2528 rv = MEP_SAVES_YES;
2529 else if (mep_interrupt_saved_reg (r))
2530 rv = MEP_SAVES_YES;
2531 cfun->machine->reg_saved[r] = rv;
2532 }
2533 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2534 }
2535
2536 /* Return true if epilogue uses register REGNO. */
2537
2538 bool
2539 mep_epilogue_uses (int regno)
2540 {
2541 /* Since $lp is a call-saved register, the generic code will normally
2542 mark it used in the epilogue if it needs to be saved and restored.
2543 However, when profiling is enabled, the profiling code will implicitly
2544 clobber $11. This case has to be handled specially both here and in
2545 mep_call_saves_register. */
2546 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2547 return true;
2548 /* Interrupt functions save/restore pretty much everything. */
2549 return (reload_completed && mep_interrupt_saved_reg (regno));
2550 }
2551
2552 static int
2553 mep_reg_size (int regno)
2554 {
2555 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2556 return 8;
2557 return 4;
2558 }
2559
2560 /* Worker function for TARGET_CAN_ELIMINATE. */
2561
2562 bool
2563 mep_can_eliminate (const int from, const int to)
2564 {
2565 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2566 ? ! frame_pointer_needed
2567 : true);
2568 }
2569
2570 int
2571 mep_elimination_offset (int from, int to)
2572 {
2573 int reg_save_size;
2574 int i;
2575 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2576 int total_size;
2577
2578 if (!cfun->machine->frame_locked)
2579 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2580
2581 /* We don't count arg_regs_to_save in the arg pointer offset, because
2582 gcc thinks the arg pointer has moved along with the saved regs.
2583 However, we do count it when we adjust $sp in the prologue. */
2584 reg_save_size = 0;
2585 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2586 if (mep_call_saves_register (i))
2587 reg_save_size += mep_reg_size (i);
2588
2589 if (reg_save_size % 8)
2590 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2591 else
2592 cfun->machine->regsave_filler = 0;
2593
2594 /* This is what our total stack adjustment looks like. */
2595 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2596
2597 if (total_size % 8)
2598 cfun->machine->frame_filler = 8 - (total_size % 8);
2599 else
2600 cfun->machine->frame_filler = 0;
2601
2602
2603 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2604 return reg_save_size + cfun->machine->regsave_filler;
2605
2606 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2607 return cfun->machine->frame_filler + frame_size;
2608
2609 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2610 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2611
2612 gcc_unreachable ();
2613 }
2614
2615 static rtx
2616 F (rtx x)
2617 {
2618 RTX_FRAME_RELATED_P (x) = 1;
2619 return x;
2620 }
2621
2622 /* Since the prologue/epilogue code is generated after optimization,
2623 we can't rely on gcc to split constants for us. So, this code
2624 captures all the ways to add a constant to a register in one logic
2625 chunk, including optimizing away insns we just don't need. This
2626 makes the prolog/epilog code easier to follow. */
2627 static void
2628 add_constant (int dest, int src, int value, int mark_frame)
2629 {
2630 rtx insn;
2631 int hi, lo;
2632
2633 if (src == dest && value == 0)
2634 return;
2635
2636 if (value == 0)
2637 {
2638 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2639 gen_rtx_REG (SImode, src));
2640 if (mark_frame)
2641 RTX_FRAME_RELATED_P(insn) = 1;
2642 return;
2643 }
2644
2645 if (value >= -32768 && value <= 32767)
2646 {
2647 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2648 gen_rtx_REG (SImode, src),
2649 GEN_INT (value)));
2650 if (mark_frame)
2651 RTX_FRAME_RELATED_P(insn) = 1;
2652 return;
2653 }
2654
2655 /* Big constant, need to use a temp register. We use
2656 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2657 area is always small enough to directly add to). */
2658
2659 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2660 lo = value & 0xffff;
2661
2662 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2663 GEN_INT (hi));
2664
2665 if (lo)
2666 {
2667 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2668 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2669 GEN_INT (lo)));
2670 }
2671
2672 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2673 gen_rtx_REG (SImode, src),
2674 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2675 if (mark_frame)
2676 {
2677 RTX_FRAME_RELATED_P(insn) = 1;
2678 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2679 gen_rtx_SET (SImode,
2680 gen_rtx_REG (SImode, dest),
2681 gen_rtx_PLUS (SImode,
2682 gen_rtx_REG (SImode, dest),
2683 GEN_INT (value))));
2684 }
2685 }
2686
2687 /* Move SRC to DEST. Mark the move as being potentially dead if
2688 MAYBE_DEAD_P. */
2689
2690 static rtx
2691 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2692 {
2693 rtx insn = emit_move_insn (dest, src);
2694 #if 0
2695 if (maybe_dead_p)
2696 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2697 #endif
2698 return insn;
2699 }
2700
2701 /* Used for interrupt functions, which can't assume that $tp and $gp
2702 contain the correct pointers. */
2703
2704 static void
2705 mep_reload_pointer (int regno, const char *symbol)
2706 {
2707 rtx reg, sym;
2708
2709 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2710 return;
2711
2712 reg = gen_rtx_REG (SImode, regno);
2713 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2714 emit_insn (gen_movsi_topsym_s (reg, sym));
2715 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2716 }
2717
2718 /* Assign save slots for any register not already saved. DImode
2719 registers go at the end of the reg save area; the rest go at the
2720 beginning. This is for alignment purposes. Returns true if a frame
2721 is really needed. */
2722 static bool
2723 mep_assign_save_slots (int reg_save_size)
2724 {
2725 bool really_need_stack_frame = false;
2726 int di_ofs = 0;
2727 int i;
2728
2729 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2730 if (mep_call_saves_register(i))
2731 {
2732 int regsize = mep_reg_size (i);
2733
2734 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2735 || mep_reg_set_in_function (i))
2736 really_need_stack_frame = true;
2737
2738 if (cfun->machine->reg_save_slot[i])
2739 continue;
2740
2741 if (regsize < 8)
2742 {
2743 cfun->machine->reg_save_size += regsize;
2744 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2745 }
2746 else
2747 {
2748 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2749 di_ofs += 8;
2750 }
2751 }
2752 cfun->machine->frame_locked = 1;
2753 return really_need_stack_frame;
2754 }
2755
2756 void
2757 mep_expand_prologue (void)
2758 {
2759 int i, rss, sp_offset = 0;
2760 int reg_save_size;
2761 int frame_size;
2762 int really_need_stack_frame;
2763
2764 /* We must not allow register renaming in interrupt functions,
2765 because that invalidates the correctness of the set of call-used
2766 registers we're going to save/restore. */
2767 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2768
2769 if (mep_disinterrupt_p ())
2770 emit_insn (gen_mep_disable_int ());
2771
2772 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2773
2774 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2775 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2776 really_need_stack_frame = frame_size;
2777
2778 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2779
2780 sp_offset = reg_save_size;
2781 if (sp_offset + frame_size < 128)
2782 sp_offset += frame_size ;
2783
2784 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2785
2786 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2787 if (mep_call_saves_register(i))
2788 {
2789 rtx mem;
2790 bool maybe_dead_p;
2791 enum machine_mode rmode;
2792
2793 rss = cfun->machine->reg_save_slot[i];
2794
2795 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2796 && (!mep_reg_set_in_function (i)
2797 && !mep_interrupt_p ()))
2798 continue;
2799
2800 if (mep_reg_size (i) == 8)
2801 rmode = DImode;
2802 else
2803 rmode = SImode;
2804
2805 /* If there is a pseudo associated with this register's initial value,
2806 reload might have already spilt it to the stack slot suggested by
2807 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2808 deleted as dead. */
2809 mem = gen_rtx_MEM (rmode,
2810 plus_constant (stack_pointer_rtx, sp_offset - rss));
2811 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2812
2813 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2814 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2815 else if (rmode == DImode)
2816 {
2817 rtx insn;
2818 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2819
2820 mem = gen_rtx_MEM (SImode,
2821 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2822
2823 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2824 gen_rtx_REG (SImode, i),
2825 maybe_dead_p);
2826 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2827 gen_rtx_ZERO_EXTRACT (SImode,
2828 gen_rtx_REG (DImode, i),
2829 GEN_INT (32),
2830 GEN_INT (32)),
2831 maybe_dead_p);
2832 insn = maybe_dead_move (mem,
2833 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2834 maybe_dead_p);
2835 RTX_FRAME_RELATED_P (insn) = 1;
2836
2837 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2838 gen_rtx_SET (VOIDmode,
2839 copy_rtx (mem),
2840 gen_rtx_REG (rmode, i)));
2841 mem = gen_rtx_MEM (SImode,
2842 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2843 insn = maybe_dead_move (mem,
2844 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2845 maybe_dead_p);
2846 }
2847 else
2848 {
2849 rtx insn;
2850 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2851 gen_rtx_REG (rmode, i),
2852 maybe_dead_p);
2853 insn = maybe_dead_move (mem,
2854 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2855 maybe_dead_p);
2856 RTX_FRAME_RELATED_P (insn) = 1;
2857
2858 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2859 gen_rtx_SET (VOIDmode,
2860 copy_rtx (mem),
2861 gen_rtx_REG (rmode, i)));
2862 }
2863 }
2864
2865 if (frame_pointer_needed)
2866 {
2867 /* We've already adjusted down by sp_offset. Total $sp change
2868 is reg_save_size + frame_size. We want a net change here of
2869 just reg_save_size. */
2870 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2871 }
2872
2873 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2874
2875 if (mep_interrupt_p ())
2876 {
2877 mep_reload_pointer(GP_REGNO, "__sdabase");
2878 mep_reload_pointer(TP_REGNO, "__tpbase");
2879 }
2880 }
2881
2882 static void
2883 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2884 {
2885 int local = hwi_local;
2886 int frame_size = local + crtl->outgoing_args_size;
2887 int reg_save_size;
2888 int ffill;
2889 int i, sp, skip;
2890 int sp_offset;
2891 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2892
2893 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2894 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2895 sp_offset = reg_save_size + frame_size;
2896
2897 ffill = cfun->machine->frame_filler;
2898
2899 if (cfun->machine->mep_frame_pointer_needed)
2900 reg_names[FP_REGNO] = "$fp";
2901 else
2902 reg_names[FP_REGNO] = "$8";
2903
2904 if (sp_offset == 0)
2905 return;
2906
2907 if (debug_info_level == DINFO_LEVEL_NONE)
2908 {
2909 fprintf (file, "\t# frame: %d", sp_offset);
2910 if (reg_save_size)
2911 fprintf (file, " %d regs", reg_save_size);
2912 if (local)
2913 fprintf (file, " %d locals", local);
2914 if (crtl->outgoing_args_size)
2915 fprintf (file, " %d args", crtl->outgoing_args_size);
2916 fprintf (file, "\n");
2917 return;
2918 }
2919
2920 fprintf (file, "\t#\n");
2921 fprintf (file, "\t# Initial Frame Information:\n");
2922 if (sp_offset || !frame_pointer_needed)
2923 fprintf (file, "\t# Entry ---------- 0\n");
2924
2925 /* Sort registers by save slots, so they're printed in the order
2926 they appear in memory, not the order they're saved in. */
2927 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2928 slot_map[si] = si;
2929 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2930 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2931 if (cfun->machine->reg_save_slot[slot_map[si]]
2932 > cfun->machine->reg_save_slot[slot_map[sj]])
2933 {
2934 int t = slot_map[si];
2935 slot_map[si] = slot_map[sj];
2936 slot_map[sj] = t;
2937 }
2938
2939 sp = 0;
2940 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2941 {
2942 int rsize;
2943 int r = slot_map[i];
2944 int rss = cfun->machine->reg_save_slot[r];
2945
2946 if (!mep_call_saves_register (r))
2947 continue;
2948
2949 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2950 && (!mep_reg_set_in_function (r)
2951 && !mep_interrupt_p ()))
2952 continue;
2953
2954 rsize = mep_reg_size(r);
2955 skip = rss - (sp+rsize);
2956 if (skip)
2957 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2958 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2959 rsize, reg_names[r], sp_offset - rss);
2960 sp = rss;
2961 }
2962
2963 skip = reg_save_size - sp;
2964 if (skip)
2965 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2966
2967 if (frame_pointer_needed)
2968 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2969 if (local)
2970 fprintf (file, "\t# %3d bytes for local vars\n", local);
2971 if (ffill)
2972 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2973 if (crtl->outgoing_args_size)
2974 fprintf (file, "\t# %3d bytes for outgoing args\n",
2975 crtl->outgoing_args_size);
2976 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2977 fprintf (file, "\t#\n");
2978 }
2979
2980
2981 static int mep_prevent_lp_restore = 0;
2982 static int mep_sibcall_epilogue = 0;
2983
2984 void
2985 mep_expand_epilogue (void)
2986 {
2987 int i, sp_offset = 0;
2988 int reg_save_size = 0;
2989 int frame_size;
2990 int lp_temp = LP_REGNO, lp_slot = -1;
2991 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2992 int interrupt_handler = mep_interrupt_p ();
2993
2994 if (profile_arc_flag == 2)
2995 emit_insn (gen_mep_bb_trace_ret ());
2996
2997 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2998 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2999
3000 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
3001
3002 if (frame_pointer_needed)
3003 {
3004 /* If we have a frame pointer, we won't have a reliable stack
3005 pointer (alloca, you know), so rebase SP from FP */
3006 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3007 gen_rtx_REG (SImode, FP_REGNO));
3008 sp_offset = reg_save_size;
3009 }
3010 else
3011 {
3012 /* SP is right under our local variable space. Adjust it if
3013 needed. */
3014 sp_offset = reg_save_size + frame_size;
3015 if (sp_offset >= 128)
3016 {
3017 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3018 sp_offset -= frame_size;
3019 }
3020 }
3021
3022 /* This is backwards so that we restore the control and coprocessor
3023 registers before the temporary registers we use to restore
3024 them. */
3025 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3026 if (mep_call_saves_register (i))
3027 {
3028 enum machine_mode rmode;
3029 int rss = cfun->machine->reg_save_slot[i];
3030
3031 if (mep_reg_size (i) == 8)
3032 rmode = DImode;
3033 else
3034 rmode = SImode;
3035
3036 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3037 && !(mep_reg_set_in_function (i) || interrupt_handler))
3038 continue;
3039 if (mep_prevent_lp_restore && i == LP_REGNO)
3040 continue;
3041 if (!mep_prevent_lp_restore
3042 && !interrupt_handler
3043 && (i == 10 || i == 11))
3044 continue;
3045
3046 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3047 emit_move_insn (gen_rtx_REG (rmode, i),
3048 gen_rtx_MEM (rmode,
3049 plus_constant (stack_pointer_rtx,
3050 sp_offset-rss)));
3051 else
3052 {
3053 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3054 /* Defer this one so we can jump indirect rather than
3055 copying the RA to $lp and "ret". EH epilogues
3056 automatically skip this anyway. */
3057 lp_slot = sp_offset-rss;
3058 else
3059 {
3060 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3061 gen_rtx_MEM (rmode,
3062 plus_constant (stack_pointer_rtx,
3063 sp_offset-rss)));
3064 emit_move_insn (gen_rtx_REG (rmode, i),
3065 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3066 }
3067 }
3068 }
3069 if (lp_slot != -1)
3070 {
3071 /* Restore this one last so we know it will be in the temp
3072 register when we return by jumping indirectly via the temp. */
3073 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3074 gen_rtx_MEM (SImode,
3075 plus_constant (stack_pointer_rtx,
3076 lp_slot)));
3077 lp_temp = REGSAVE_CONTROL_TEMP;
3078 }
3079
3080
3081 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3082
3083 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3084 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3085 gen_rtx_REG (SImode, SP_REGNO),
3086 cfun->machine->eh_stack_adjust));
3087
3088 if (mep_sibcall_epilogue)
3089 return;
3090
3091 if (mep_disinterrupt_p ())
3092 emit_insn (gen_mep_enable_int ());
3093
3094 if (mep_prevent_lp_restore)
3095 {
3096 emit_jump_insn (gen_eh_return_internal ());
3097 emit_barrier ();
3098 }
3099 else if (interrupt_handler)
3100 emit_jump_insn (gen_mep_reti ());
3101 else
3102 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3103 }
3104
3105 void
3106 mep_expand_eh_return (rtx *operands)
3107 {
3108 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3109 {
3110 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3111 emit_move_insn (ra, operands[0]);
3112 operands[0] = ra;
3113 }
3114
3115 emit_insn (gen_eh_epilogue (operands[0]));
3116 }
3117
3118 void
3119 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3120 {
3121 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3122 mep_prevent_lp_restore = 1;
3123 mep_expand_epilogue ();
3124 mep_prevent_lp_restore = 0;
3125 }
3126
3127 void
3128 mep_expand_sibcall_epilogue (void)
3129 {
3130 mep_sibcall_epilogue = 1;
3131 mep_expand_epilogue ();
3132 mep_sibcall_epilogue = 0;
3133 }
3134
3135 static bool
3136 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3137 {
3138 if (decl == NULL)
3139 return false;
3140
3141 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3142 return false;
3143
3144 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3145 if (mep_interrupt_p () || mep_disinterrupt_p ())
3146 return false;
3147
3148 return true;
3149 }
3150
3151 rtx
3152 mep_return_stackadj_rtx (void)
3153 {
3154 return gen_rtx_REG (SImode, 10);
3155 }
3156
3157 rtx
3158 mep_return_handler_rtx (void)
3159 {
3160 return gen_rtx_REG (SImode, LP_REGNO);
3161 }
3162
3163 void
3164 mep_function_profiler (FILE *file)
3165 {
3166 /* Always right at the beginning of the function. */
3167 fprintf (file, "\t# mep function profiler\n");
3168 fprintf (file, "\tadd\t$sp, -8\n");
3169 fprintf (file, "\tsw\t$0, ($sp)\n");
3170 fprintf (file, "\tldc\t$0, $lp\n");
3171 fprintf (file, "\tsw\t$0, 4($sp)\n");
3172 fprintf (file, "\tbsr\t__mep_mcount\n");
3173 fprintf (file, "\tlw\t$0, 4($sp)\n");
3174 fprintf (file, "\tstc\t$0, $lp\n");
3175 fprintf (file, "\tlw\t$0, ($sp)\n");
3176 fprintf (file, "\tadd\t$sp, 8\n\n");
3177 }
3178
3179 const char *
3180 mep_emit_bb_trace_ret (void)
3181 {
3182 fprintf (asm_out_file, "\t# end of block profiling\n");
3183 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3184 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3185 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3186 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3187 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3188 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3189 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3190 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3191 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3192 return "";
3193 }
3194
3195 #undef SAVE
3196 #undef RESTORE
3197 \f
3198 /* Operand Printing. */
3199
3200 void
3201 mep_print_operand_address (FILE *stream, rtx address)
3202 {
3203 if (GET_CODE (address) == MEM)
3204 address = XEXP (address, 0);
3205 else
3206 /* cf: gcc.dg/asm-4.c. */
3207 gcc_assert (GET_CODE (address) == REG);
3208
3209 mep_print_operand (stream, address, 0);
3210 }
3211
3212 static struct
3213 {
3214 char code;
3215 const char *pattern;
3216 const char *format;
3217 }
3218 const conversions[] =
3219 {
3220 { 0, "r", "0" },
3221 { 0, "m+ri", "3(2)" },
3222 { 0, "mr", "(1)" },
3223 { 0, "ms", "(1)" },
3224 { 0, "ml", "(1)" },
3225 { 0, "mLrs", "%lo(3)(2)" },
3226 { 0, "mLr+si", "%lo(4+5)(2)" },
3227 { 0, "m+ru2s", "%tpoff(5)(2)" },
3228 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3229 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3230 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3231 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3232 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3233 { 0, "mi", "(1)" },
3234 { 0, "m+si", "(2+3)" },
3235 { 0, "m+li", "(2+3)" },
3236 { 0, "i", "0" },
3237 { 0, "s", "0" },
3238 { 0, "+si", "1+2" },
3239 { 0, "+u2si", "%tpoff(3+4)" },
3240 { 0, "+u3si", "%sdaoff(3+4)" },
3241 { 0, "l", "0" },
3242 { 'b', "i", "0" },
3243 { 'B', "i", "0" },
3244 { 'U', "i", "0" },
3245 { 'h', "i", "0" },
3246 { 'h', "Hs", "%hi(1)" },
3247 { 'I', "i", "0" },
3248 { 'I', "u2s", "%tpoff(2)" },
3249 { 'I', "u3s", "%sdaoff(2)" },
3250 { 'I', "+u2si", "%tpoff(3+4)" },
3251 { 'I', "+u3si", "%sdaoff(3+4)" },
3252 { 'J', "i", "0" },
3253 { 'P', "mr", "(1\\+),\\0" },
3254 { 'x', "i", "0" },
3255 { 0, 0, 0 }
3256 };
3257
3258 static int
3259 unique_bit_in (HOST_WIDE_INT i)
3260 {
3261 switch (i & 0xff)
3262 {
3263 case 0x01: case 0xfe: return 0;
3264 case 0x02: case 0xfd: return 1;
3265 case 0x04: case 0xfb: return 2;
3266 case 0x08: case 0xf7: return 3;
3267 case 0x10: case 0x7f: return 4;
3268 case 0x20: case 0xbf: return 5;
3269 case 0x40: case 0xdf: return 6;
3270 case 0x80: case 0xef: return 7;
3271 default:
3272 gcc_unreachable ();
3273 }
3274 }
3275
3276 static int
3277 bit_size_for_clip (HOST_WIDE_INT i)
3278 {
3279 int rv;
3280
3281 for (rv = 0; rv < 31; rv ++)
3282 if (((HOST_WIDE_INT) 1 << rv) > i)
3283 return rv + 1;
3284 gcc_unreachable ();
3285 }
3286
3287 /* Print an operand to a assembler instruction. */
3288
3289 void
3290 mep_print_operand (FILE *file, rtx x, int code)
3291 {
3292 int i, j;
3293 const char *real_name;
3294
3295 if (code == '<')
3296 {
3297 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3298 we're using, then skip over the "mep_" part of its name. */
3299 const struct cgen_insn *insn;
3300
3301 if (mep_get_move_insn (mep_cmov, &insn))
3302 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3303 else
3304 mep_intrinsic_unavailable (mep_cmov);
3305 return;
3306 }
3307 if (code == 'L')
3308 {
3309 switch (GET_CODE (x))
3310 {
3311 case AND:
3312 fputs ("clr", file);
3313 return;
3314 case IOR:
3315 fputs ("set", file);
3316 return;
3317 case XOR:
3318 fputs ("not", file);
3319 return;
3320 default:
3321 output_operand_lossage ("invalid %%L code");
3322 }
3323 }
3324 if (code == 'M')
3325 {
3326 /* Print the second operand of a CR <- CR move. If we're using
3327 a two-operand instruction (i.e., a real cmov), then just print
3328 the operand normally. If we're using a "reg, reg, immediate"
3329 instruction such as caddi3, print the operand followed by a
3330 zero field. If we're using a three-register instruction,
3331 print the operand twice. */
3332 const struct cgen_insn *insn;
3333
3334 mep_print_operand (file, x, 0);
3335 if (mep_get_move_insn (mep_cmov, &insn)
3336 && insn_data[insn->icode].n_operands == 3)
3337 {
3338 fputs (", ", file);
3339 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3340 mep_print_operand (file, x, 0);
3341 else
3342 mep_print_operand (file, const0_rtx, 0);
3343 }
3344 return;
3345 }
3346
3347 encode_pattern (x);
3348 for (i = 0; conversions[i].pattern; i++)
3349 if (conversions[i].code == code
3350 && strcmp(conversions[i].pattern, pattern) == 0)
3351 {
3352 for (j = 0; conversions[i].format[j]; j++)
3353 if (conversions[i].format[j] == '\\')
3354 {
3355 fputc (conversions[i].format[j+1], file);
3356 j++;
3357 }
3358 else if (ISDIGIT(conversions[i].format[j]))
3359 {
3360 rtx r = patternr[conversions[i].format[j] - '0'];
3361 switch (GET_CODE (r))
3362 {
3363 case REG:
3364 fprintf (file, "%s", reg_names [REGNO (r)]);
3365 break;
3366 case CONST_INT:
3367 switch (code)
3368 {
3369 case 'b':
3370 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3371 break;
3372 case 'B':
3373 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3374 break;
3375 case 'h':
3376 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3377 break;
3378 case 'U':
3379 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3380 break;
3381 case 'J':
3382 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3383 break;
3384 case 'x':
3385 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3386 && !(INTVAL (r) & 0xff))
3387 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3388 else
3389 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3390 break;
3391 case 'I':
3392 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3393 && conversions[i].format[j+1] == 0)
3394 {
3395 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3396 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3397 }
3398 else
3399 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3400 break;
3401 default:
3402 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3403 break;
3404 }
3405 break;
3406 case CONST_DOUBLE:
3407 fprintf(file, "[const_double 0x%lx]",
3408 (unsigned long) CONST_DOUBLE_HIGH(r));
3409 break;
3410 case SYMBOL_REF:
3411 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3412 assemble_name (file, real_name);
3413 break;
3414 case LABEL_REF:
3415 output_asm_label (r);
3416 break;
3417 default:
3418 fprintf (stderr, "don't know how to print this operand:");
3419 debug_rtx (r);
3420 gcc_unreachable ();
3421 }
3422 }
3423 else
3424 {
3425 if (conversions[i].format[j] == '+'
3426 && (!code || code == 'I')
3427 && ISDIGIT (conversions[i].format[j+1])
3428 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3429 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3430 continue;
3431 fputc(conversions[i].format[j], file);
3432 }
3433 break;
3434 }
3435 if (!conversions[i].pattern)
3436 {
3437 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3438 debug_rtx(x);
3439 }
3440
3441 return;
3442 }
3443
3444 void
3445 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3446 int noperands ATTRIBUTE_UNUSED)
3447 {
3448 /* Despite the fact that MeP is perfectly capable of branching and
3449 doing something else in the same bundle, gcc does jump
3450 optimization *after* scheduling, so we cannot trust the bundling
3451 flags on jump instructions. */
3452 if (GET_MODE (insn) == BImode
3453 && get_attr_slots (insn) != SLOTS_CORE)
3454 fputc ('+', asm_out_file);
3455 }
3456
3457 /* Function args in registers. */
3458
3459 static void
3460 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3461 enum machine_mode mode ATTRIBUTE_UNUSED,
3462 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3463 int second_time ATTRIBUTE_UNUSED)
3464 {
3465 int nsave = 4 - (cum->nregs + 1);
3466
3467 if (nsave > 0)
3468 cfun->machine->arg_regs_to_save = nsave;
3469 *pretend_size = nsave * 4;
3470 }
3471
3472 static int
3473 bytesize (const_tree type, enum machine_mode mode)
3474 {
3475 if (mode == BLKmode)
3476 return int_size_in_bytes (type);
3477 return GET_MODE_SIZE (mode);
3478 }
3479
3480 static rtx
3481 mep_expand_builtin_saveregs (void)
3482 {
3483 int bufsize, i, ns;
3484 rtx regbuf;
3485
3486 ns = cfun->machine->arg_regs_to_save;
3487 if (TARGET_IVC2)
3488 {
3489 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3490 regbuf = assign_stack_local (SImode, bufsize, 64);
3491 }
3492 else
3493 {
3494 bufsize = ns * 4;
3495 regbuf = assign_stack_local (SImode, bufsize, 32);
3496 }
3497
3498 move_block_from_reg (5-ns, regbuf, ns);
3499
3500 if (TARGET_IVC2)
3501 {
3502 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3503 int ofs = 8 * ((ns+1)/2);
3504
3505 for (i=0; i<ns; i++)
3506 {
3507 int rn = (4-ns) + i + 49;
3508 rtx ptr;
3509
3510 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3511 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3512 ofs += 8;
3513 }
3514 }
3515 return XEXP (regbuf, 0);
3516 }
3517
3518 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3519
3520 static tree
3521 mep_build_builtin_va_list (void)
3522 {
3523 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3524 tree record;
3525
3526
3527 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3528
3529 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3530 get_identifier ("__va_next_gp"), ptr_type_node);
3531 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3532 get_identifier ("__va_next_gp_limit"),
3533 ptr_type_node);
3534 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3535 ptr_type_node);
3536 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3537 ptr_type_node);
3538
3539 DECL_FIELD_CONTEXT (f_next_gp) = record;
3540 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3541 DECL_FIELD_CONTEXT (f_next_cop) = record;
3542 DECL_FIELD_CONTEXT (f_next_stack) = record;
3543
3544 TYPE_FIELDS (record) = f_next_gp;
3545 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3546 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3547 DECL_CHAIN (f_next_cop) = f_next_stack;
3548
3549 layout_type (record);
3550
3551 return record;
3552 }
3553
3554 static void
3555 mep_expand_va_start (tree valist, rtx nextarg)
3556 {
3557 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3558 tree next_gp, next_gp_limit, next_cop, next_stack;
3559 tree t, u;
3560 int ns;
3561
3562 ns = cfun->machine->arg_regs_to_save;
3563
3564 f_next_gp = TYPE_FIELDS (va_list_type_node);
3565 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3566 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3567 f_next_stack = DECL_CHAIN (f_next_cop);
3568
3569 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3570 NULL_TREE);
3571 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3572 valist, f_next_gp_limit, NULL_TREE);
3573 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3574 NULL_TREE);
3575 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3576 valist, f_next_stack, NULL_TREE);
3577
3578 /* va_list.next_gp = expand_builtin_saveregs (); */
3579 u = make_tree (sizetype, expand_builtin_saveregs ());
3580 u = fold_convert (ptr_type_node, u);
3581 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3582 TREE_SIDE_EFFECTS (t) = 1;
3583 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3584
3585 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3586 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3587 size_int (4 * ns));
3588 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3589 TREE_SIDE_EFFECTS (t) = 1;
3590 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3591
3592 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3593 size_int (8 * ((ns+1)/2)));
3594 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3595 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3596 TREE_SIDE_EFFECTS (t) = 1;
3597 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3598
3599 /* va_list.next_stack = nextarg; */
3600 u = make_tree (ptr_type_node, nextarg);
3601 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3602 TREE_SIDE_EFFECTS (t) = 1;
3603 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3604 }
3605
3606 static tree
3607 mep_gimplify_va_arg_expr (tree valist, tree type,
3608 gimple_seq *pre_p,
3609 gimple_seq *post_p ATTRIBUTE_UNUSED)
3610 {
3611 HOST_WIDE_INT size, rsize;
3612 bool by_reference, ivc2_vec;
3613 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3614 tree next_gp, next_gp_limit, next_cop, next_stack;
3615 tree label_sover, label_selse;
3616 tree tmp, res_addr;
3617
3618 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3619
3620 size = int_size_in_bytes (type);
3621 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3622
3623 if (by_reference)
3624 {
3625 type = build_pointer_type (type);
3626 size = 4;
3627 }
3628 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3629
3630 f_next_gp = TYPE_FIELDS (va_list_type_node);
3631 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3632 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3633 f_next_stack = DECL_CHAIN (f_next_cop);
3634
3635 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3636 NULL_TREE);
3637 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3638 valist, f_next_gp_limit, NULL_TREE);
3639 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3640 NULL_TREE);
3641 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3642 valist, f_next_stack, NULL_TREE);
3643
3644 /* if f_next_gp < f_next_gp_limit
3645 IF (VECTOR_P && IVC2)
3646 val = *f_next_cop;
3647 ELSE
3648 val = *f_next_gp;
3649 f_next_gp += 4;
3650 f_next_cop += 8;
3651 else
3652 label_selse:
3653 val = *f_next_stack;
3654 f_next_stack += rsize;
3655 label_sover:
3656 */
3657
3658 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3659 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3660 res_addr = create_tmp_var (ptr_type_node, NULL);
3661
3662 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3663 unshare_expr (next_gp_limit));
3664 tmp = build3 (COND_EXPR, void_type_node, tmp,
3665 build1 (GOTO_EXPR, void_type_node,
3666 unshare_expr (label_selse)),
3667 NULL_TREE);
3668 gimplify_and_add (tmp, pre_p);
3669
3670 if (ivc2_vec)
3671 {
3672 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3673 gimplify_and_add (tmp, pre_p);
3674 }
3675 else
3676 {
3677 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3678 gimplify_and_add (tmp, pre_p);
3679 }
3680
3681 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3682 unshare_expr (next_gp), size_int (4));
3683 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3684
3685 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3686 unshare_expr (next_cop), size_int (8));
3687 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3688
3689 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3690 gimplify_and_add (tmp, pre_p);
3691
3692 /* - - */
3693
3694 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3695 gimplify_and_add (tmp, pre_p);
3696
3697 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3698 gimplify_and_add (tmp, pre_p);
3699
3700 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3701 unshare_expr (next_stack), size_int (rsize));
3702 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3703
3704 /* - - */
3705
3706 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3707 gimplify_and_add (tmp, pre_p);
3708
3709 res_addr = fold_convert (build_pointer_type (type), res_addr);
3710
3711 if (by_reference)
3712 res_addr = build_va_arg_indirect_ref (res_addr);
3713
3714 return build_va_arg_indirect_ref (res_addr);
3715 }
3716
3717 void
3718 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3719 rtx libname ATTRIBUTE_UNUSED,
3720 tree fndecl ATTRIBUTE_UNUSED)
3721 {
3722 pcum->nregs = 0;
3723
3724 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3725 pcum->vliw = 1;
3726 else
3727 pcum->vliw = 0;
3728 }
3729
3730 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3731 larger than 4 bytes are passed indirectly. Return value in 0,
3732 unless bigger than 4 bytes, then the caller passes a pointer as the
3733 first arg. For varargs, we copy $1..$4 to the stack. */
3734
3735 static rtx
3736 mep_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3737 const_tree type ATTRIBUTE_UNUSED,
3738 bool named ATTRIBUTE_UNUSED)
3739 {
3740 /* VOIDmode is a signal for the backend to pass data to the call
3741 expander via the second operand to the call pattern. We use
3742 this to determine whether to use "jsr" or "jsrv". */
3743 if (mode == VOIDmode)
3744 return GEN_INT (cum->vliw);
3745
3746 /* If we havn't run out of argument registers, return the next. */
3747 if (cum->nregs < 4)
3748 {
3749 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3750 return gen_rtx_REG (mode, cum->nregs + 49);
3751 else
3752 return gen_rtx_REG (mode, cum->nregs + 1);
3753 }
3754
3755 /* Otherwise the argument goes on the stack. */
3756 return NULL_RTX;
3757 }
3758
3759 static bool
3760 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3761 enum machine_mode mode,
3762 const_tree type,
3763 bool named ATTRIBUTE_UNUSED)
3764 {
3765 int size = bytesize (type, mode);
3766
3767 /* This is non-obvious, but yes, large values passed after we've run
3768 out of registers are *still* passed by reference - we put the
3769 address of the parameter on the stack, as well as putting the
3770 parameter itself elsewhere on the stack. */
3771
3772 if (size <= 0 || size > 8)
3773 return true;
3774 if (size <= 4)
3775 return false;
3776 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3777 return false;
3778 return true;
3779 }
3780
3781 static void
3782 mep_function_arg_advance (CUMULATIVE_ARGS *pcum,
3783 enum machine_mode mode ATTRIBUTE_UNUSED,
3784 const_tree type ATTRIBUTE_UNUSED,
3785 bool named ATTRIBUTE_UNUSED)
3786 {
3787 pcum->nregs += 1;
3788 }
3789
3790 bool
3791 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3792 {
3793 int size = bytesize (type, BLKmode);
3794 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3795 return size > 0 && size <= 8 ? 0 : 1;
3796 return size > 0 && size <= 4 ? 0 : 1;
3797 }
3798
3799 static bool
3800 mep_narrow_volatile_bitfield (void)
3801 {
3802 return true;
3803 return false;
3804 }
3805
3806 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3807
3808 rtx
3809 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3810 {
3811 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3812 return gen_rtx_REG (TYPE_MODE (type), 48);
3813 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3814 }
3815
3816 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3817
3818 rtx
3819 mep_libcall_value (enum machine_mode mode)
3820 {
3821 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3822 }
3823
3824 /* Handle pipeline hazards. */
3825
3826 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3827 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3828
3829 static int prev_opcode = 0;
3830
3831 /* This isn't as optimal as it could be, because we don't know what
3832 control register the STC opcode is storing in. We only need to add
3833 the nop if it's the relevent register, but we add it for irrelevent
3834 registers also. */
3835
3836 void
3837 mep_asm_output_opcode (FILE *file, const char *ptr)
3838 {
3839 int this_opcode = op_none;
3840 const char *hazard = 0;
3841
3842 switch (*ptr)
3843 {
3844 case 'f':
3845 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3846 this_opcode = op_fsft;
3847 break;
3848 case 'r':
3849 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3850 this_opcode = op_ret;
3851 break;
3852 case 's':
3853 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3854 this_opcode = op_stc;
3855 break;
3856 }
3857
3858 if (prev_opcode == op_stc && this_opcode == op_fsft)
3859 hazard = "nop";
3860 if (prev_opcode == op_stc && this_opcode == op_ret)
3861 hazard = "nop";
3862
3863 if (hazard)
3864 fprintf(file, "%s\t# %s-%s hazard\n\t",
3865 hazard, opnames[prev_opcode], opnames[this_opcode]);
3866
3867 prev_opcode = this_opcode;
3868 }
3869
3870 /* Handle attributes. */
3871
3872 static tree
3873 mep_validate_based_tiny (tree *node, tree name, tree args,
3874 int flags ATTRIBUTE_UNUSED, bool *no_add)
3875 {
3876 if (TREE_CODE (*node) != VAR_DECL
3877 && TREE_CODE (*node) != POINTER_TYPE
3878 && TREE_CODE (*node) != TYPE_DECL)
3879 {
3880 warning (0, "%qE attribute only applies to variables", name);
3881 *no_add = true;
3882 }
3883 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3884 {
3885 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3886 {
3887 warning (0, "address region attributes not allowed with auto storage class");
3888 *no_add = true;
3889 }
3890 /* Ignore storage attribute of pointed to variable: char __far * x; */
3891 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3892 {
3893 warning (0, "address region attributes on pointed-to types ignored");
3894 *no_add = true;
3895 }
3896 }
3897
3898 return NULL_TREE;
3899 }
3900
3901 static int
3902 mep_multiple_address_regions (tree list, bool check_section_attr)
3903 {
3904 tree a;
3905 int count_sections = 0;
3906 int section_attr_count = 0;
3907
3908 for (a = list; a; a = TREE_CHAIN (a))
3909 {
3910 if (is_attribute_p ("based", TREE_PURPOSE (a))
3911 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3912 || is_attribute_p ("near", TREE_PURPOSE (a))
3913 || is_attribute_p ("far", TREE_PURPOSE (a))
3914 || is_attribute_p ("io", TREE_PURPOSE (a)))
3915 count_sections ++;
3916 if (check_section_attr)
3917 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3918 }
3919
3920 if (check_section_attr)
3921 return section_attr_count;
3922 else
3923 return count_sections;
3924 }
3925
3926 #define MEP_ATTRIBUTES(decl) \
3927 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3928 : DECL_ATTRIBUTES (decl) \
3929 ? (DECL_ATTRIBUTES (decl)) \
3930 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3931
3932 static tree
3933 mep_validate_near_far (tree *node, tree name, tree args,
3934 int flags ATTRIBUTE_UNUSED, bool *no_add)
3935 {
3936 if (TREE_CODE (*node) != VAR_DECL
3937 && TREE_CODE (*node) != FUNCTION_DECL
3938 && TREE_CODE (*node) != METHOD_TYPE
3939 && TREE_CODE (*node) != POINTER_TYPE
3940 && TREE_CODE (*node) != TYPE_DECL)
3941 {
3942 warning (0, "%qE attribute only applies to variables and functions",
3943 name);
3944 *no_add = true;
3945 }
3946 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3947 {
3948 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3949 {
3950 warning (0, "address region attributes not allowed with auto storage class");
3951 *no_add = true;
3952 }
3953 /* Ignore storage attribute of pointed to variable: char __far * x; */
3954 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3955 {
3956 warning (0, "address region attributes on pointed-to types ignored");
3957 *no_add = true;
3958 }
3959 }
3960 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3961 {
3962 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3963 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3964 DECL_ATTRIBUTES (*node) = NULL_TREE;
3965 }
3966 return NULL_TREE;
3967 }
3968
3969 static tree
3970 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3971 int flags ATTRIBUTE_UNUSED, bool *no_add)
3972 {
3973 if (TREE_CODE (*node) != FUNCTION_DECL
3974 && TREE_CODE (*node) != METHOD_TYPE)
3975 {
3976 warning (0, "%qE attribute only applies to functions", name);
3977 *no_add = true;
3978 }
3979 return NULL_TREE;
3980 }
3981
3982 static tree
3983 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3984 int flags ATTRIBUTE_UNUSED, bool *no_add)
3985 {
3986 tree function_type;
3987
3988 if (TREE_CODE (*node) != FUNCTION_DECL)
3989 {
3990 warning (0, "%qE attribute only applies to functions", name);
3991 *no_add = true;
3992 return NULL_TREE;
3993 }
3994
3995 if (DECL_DECLARED_INLINE_P (*node))
3996 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3997 DECL_UNINLINABLE (*node) = 1;
3998
3999 function_type = TREE_TYPE (*node);
4000
4001 if (TREE_TYPE (function_type) != void_type_node)
4002 error ("interrupt function must have return type of void");
4003
4004 if (prototype_p (function_type)
4005 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4006 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4007 error ("interrupt function must have no arguments");
4008
4009 return NULL_TREE;
4010 }
4011
4012 static tree
4013 mep_validate_io_cb (tree *node, tree name, tree args,
4014 int flags ATTRIBUTE_UNUSED, bool *no_add)
4015 {
4016 if (TREE_CODE (*node) != VAR_DECL)
4017 {
4018 warning (0, "%qE attribute only applies to variables", name);
4019 *no_add = true;
4020 }
4021
4022 if (args != NULL_TREE)
4023 {
4024 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4025 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4026 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4027 {
4028 warning (0, "%qE attribute allows only an integer constant argument",
4029 name);
4030 *no_add = true;
4031 }
4032 }
4033
4034 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4035 TREE_THIS_VOLATILE (*node) = 1;
4036
4037 return NULL_TREE;
4038 }
4039
4040 static tree
4041 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4042 int flags ATTRIBUTE_UNUSED, bool *no_add)
4043 {
4044 if (TREE_CODE (*node) != FUNCTION_TYPE
4045 && TREE_CODE (*node) != FUNCTION_DECL
4046 && TREE_CODE (*node) != METHOD_TYPE
4047 && TREE_CODE (*node) != FIELD_DECL
4048 && TREE_CODE (*node) != TYPE_DECL)
4049 {
4050 static int gave_pointer_note = 0;
4051 static int gave_array_note = 0;
4052 static const char * given_type = NULL;
4053
4054 given_type = tree_code_name[TREE_CODE (*node)];
4055 if (TREE_CODE (*node) == POINTER_TYPE)
4056 given_type = "pointers";
4057 if (TREE_CODE (*node) == ARRAY_TYPE)
4058 given_type = "arrays";
4059
4060 if (given_type)
4061 warning (0, "%qE attribute only applies to functions, not %s",
4062 name, given_type);
4063 else
4064 warning (0, "%qE attribute only applies to functions",
4065 name);
4066 *no_add = true;
4067
4068 if (TREE_CODE (*node) == POINTER_TYPE
4069 && !gave_pointer_note)
4070 {
4071 inform (input_location, "to describe a pointer to a VLIW function, use syntax like this:");
4072 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4073 gave_pointer_note = 1;
4074 }
4075
4076 if (TREE_CODE (*node) == ARRAY_TYPE
4077 && !gave_array_note)
4078 {
4079 inform (input_location, "to describe an array of VLIW function pointers, use syntax like this:");
4080 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4081 gave_array_note = 1;
4082 }
4083 }
4084 if (!TARGET_VLIW)
4085 error ("VLIW functions are not allowed without a VLIW configuration");
4086 return NULL_TREE;
4087 }
4088
4089 static const struct attribute_spec mep_attribute_table[11] =
4090 {
4091 /* name min max decl type func handler
4092 affects_type_identity */
4093 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4094 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4095 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4096 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4097 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4098 false },
4099 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4100 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4101 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4102 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4103 { NULL, 0, 0, false, false, false, NULL, false }
4104 };
4105
4106 static bool
4107 mep_function_attribute_inlinable_p (const_tree callee)
4108 {
4109 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4110 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4111 return (lookup_attribute ("disinterrupt", attrs) == 0
4112 && lookup_attribute ("interrupt", attrs) == 0);
4113 }
4114
4115 static bool
4116 mep_can_inline_p (tree caller, tree callee)
4117 {
4118 if (TREE_CODE (callee) == ADDR_EXPR)
4119 callee = TREE_OPERAND (callee, 0);
4120
4121 if (!mep_vliw_function_p (caller)
4122 && mep_vliw_function_p (callee))
4123 {
4124 return false;
4125 }
4126 return true;
4127 }
4128
4129 #define FUNC_CALL 1
4130 #define FUNC_DISINTERRUPT 2
4131
4132
4133 struct GTY(()) pragma_entry {
4134 int used;
4135 int flag;
4136 const char *funcname;
4137 };
4138 typedef struct pragma_entry pragma_entry;
4139
4140 /* Hash table of farcall-tagged sections. */
4141 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4142
4143 static int
4144 pragma_entry_eq (const void *p1, const void *p2)
4145 {
4146 const pragma_entry *old = (const pragma_entry *) p1;
4147 const char *new_name = (const char *) p2;
4148
4149 return strcmp (old->funcname, new_name) == 0;
4150 }
4151
4152 static hashval_t
4153 pragma_entry_hash (const void *p)
4154 {
4155 const pragma_entry *old = (const pragma_entry *) p;
4156 return htab_hash_string (old->funcname);
4157 }
4158
4159 static void
4160 mep_note_pragma_flag (const char *funcname, int flag)
4161 {
4162 pragma_entry **slot;
4163
4164 if (!pragma_htab)
4165 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4166 pragma_entry_eq, NULL);
4167
4168 slot = (pragma_entry **)
4169 htab_find_slot_with_hash (pragma_htab, funcname,
4170 htab_hash_string (funcname), INSERT);
4171
4172 if (!*slot)
4173 {
4174 *slot = ggc_alloc_pragma_entry ();
4175 (*slot)->flag = 0;
4176 (*slot)->used = 0;
4177 (*slot)->funcname = ggc_strdup (funcname);
4178 }
4179 (*slot)->flag |= flag;
4180 }
4181
4182 static bool
4183 mep_lookup_pragma_flag (const char *funcname, int flag)
4184 {
4185 pragma_entry **slot;
4186
4187 if (!pragma_htab)
4188 return false;
4189
4190 if (funcname[0] == '@' && funcname[2] == '.')
4191 funcname += 3;
4192
4193 slot = (pragma_entry **)
4194 htab_find_slot_with_hash (pragma_htab, funcname,
4195 htab_hash_string (funcname), NO_INSERT);
4196 if (slot && *slot && ((*slot)->flag & flag))
4197 {
4198 (*slot)->used |= flag;
4199 return true;
4200 }
4201 return false;
4202 }
4203
4204 bool
4205 mep_lookup_pragma_call (const char *funcname)
4206 {
4207 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4208 }
4209
4210 void
4211 mep_note_pragma_call (const char *funcname)
4212 {
4213 mep_note_pragma_flag (funcname, FUNC_CALL);
4214 }
4215
4216 bool
4217 mep_lookup_pragma_disinterrupt (const char *funcname)
4218 {
4219 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4220 }
4221
4222 void
4223 mep_note_pragma_disinterrupt (const char *funcname)
4224 {
4225 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4226 }
4227
4228 static int
4229 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4230 {
4231 const pragma_entry *d = (const pragma_entry *)(*slot);
4232
4233 if ((d->flag & FUNC_DISINTERRUPT)
4234 && !(d->used & FUNC_DISINTERRUPT))
4235 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4236 return 1;
4237 }
4238
4239 void
4240 mep_file_cleanups (void)
4241 {
4242 if (pragma_htab)
4243 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4244 }
4245
4246 /* These three functions provide a bridge between the pramgas that
4247 affect register classes, and the functions that maintain them. We
4248 can't call those functions directly as pragma handling is part of
4249 the front end and doesn't have direct access to them. */
4250
4251 void
4252 mep_save_register_info (void)
4253 {
4254 save_register_info ();
4255 }
4256
4257 void
4258 mep_reinit_regs (void)
4259 {
4260 reinit_regs ();
4261 }
4262
4263 void
4264 mep_init_regs (void)
4265 {
4266 init_regs ();
4267 }
4268
4269
4270
4271 static int
4272 mep_attrlist_to_encoding (tree list, tree decl)
4273 {
4274 if (mep_multiple_address_regions (list, false) > 1)
4275 {
4276 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4277 TREE_PURPOSE (TREE_CHAIN (list)),
4278 DECL_NAME (decl),
4279 DECL_SOURCE_LINE (decl));
4280 TREE_CHAIN (list) = NULL_TREE;
4281 }
4282
4283 while (list)
4284 {
4285 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4286 return 'b';
4287 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4288 return 't';
4289 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4290 return 'n';
4291 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4292 return 'f';
4293 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4294 {
4295 if (TREE_VALUE (list)
4296 && TREE_VALUE (TREE_VALUE (list))
4297 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4298 {
4299 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4300 if (location >= 0
4301 && location <= 0x1000000)
4302 return 'i';
4303 }
4304 return 'I';
4305 }
4306 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4307 return 'c';
4308 list = TREE_CHAIN (list);
4309 }
4310 if (TARGET_TF
4311 && TREE_CODE (decl) == FUNCTION_DECL
4312 && DECL_SECTION_NAME (decl) == 0)
4313 return 'f';
4314 return 0;
4315 }
4316
4317 static int
4318 mep_comp_type_attributes (const_tree t1, const_tree t2)
4319 {
4320 int vliw1, vliw2;
4321
4322 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4323 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4324
4325 if (vliw1 != vliw2)
4326 return 0;
4327
4328 return 1;
4329 }
4330
4331 static void
4332 mep_insert_attributes (tree decl, tree *attributes)
4333 {
4334 int size;
4335 const char *secname = 0;
4336 tree attrib, attrlist;
4337 char encoding;
4338
4339 if (TREE_CODE (decl) == FUNCTION_DECL)
4340 {
4341 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4342
4343 if (mep_lookup_pragma_disinterrupt (funcname))
4344 {
4345 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4346 *attributes = chainon (*attributes, attrib);
4347 }
4348 }
4349
4350 if (TREE_CODE (decl) != VAR_DECL
4351 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4352 return;
4353
4354 if (TREE_READONLY (decl) && TARGET_DC)
4355 /* -mdc means that const variables default to the near section,
4356 regardless of the size cutoff. */
4357 return;
4358
4359 /* User specified an attribute, so override the default.
4360 Ignore storage attribute of pointed to variable. char __far * x; */
4361 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4362 {
4363 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4364 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4365 else if (DECL_ATTRIBUTES (decl) && *attributes)
4366 DECL_ATTRIBUTES (decl) = NULL_TREE;
4367 }
4368
4369 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4370 encoding = mep_attrlist_to_encoding (attrlist, decl);
4371 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4372 {
4373 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4374 encoding = mep_attrlist_to_encoding (attrlist, decl);
4375 }
4376 if (encoding)
4377 {
4378 /* This means that the declaration has a specific section
4379 attribute, so we should not apply the default rules. */
4380
4381 if (encoding == 'i' || encoding == 'I')
4382 {
4383 tree attr = lookup_attribute ("io", attrlist);
4384 if (attr
4385 && TREE_VALUE (attr)
4386 && TREE_VALUE (TREE_VALUE(attr)))
4387 {
4388 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4389 static tree previous_value = 0;
4390 static int previous_location = 0;
4391 static tree previous_name = 0;
4392
4393 /* We take advantage of the fact that gcc will reuse the
4394 same tree pointer when applying an attribute to a
4395 list of decls, but produce a new tree for attributes
4396 on separate source lines, even when they're textually
4397 identical. This is the behavior we want. */
4398 if (TREE_VALUE (attr) == previous_value
4399 && location == previous_location)
4400 {
4401 warning(0, "__io address 0x%x is the same for %qE and %qE",
4402 location, previous_name, DECL_NAME (decl));
4403 }
4404 previous_name = DECL_NAME (decl);
4405 previous_location = location;
4406 previous_value = TREE_VALUE (attr);
4407 }
4408 }
4409 return;
4410 }
4411
4412
4413 /* Declarations of arrays can change size. Don't trust them. */
4414 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4415 size = 0;
4416 else
4417 size = int_size_in_bytes (TREE_TYPE (decl));
4418
4419 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4420 {
4421 if (TREE_PUBLIC (decl)
4422 || DECL_EXTERNAL (decl)
4423 || TREE_STATIC (decl))
4424 {
4425 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4426 int key = 0;
4427
4428 while (*name)
4429 key += *name++;
4430
4431 switch (key & 3)
4432 {
4433 case 0:
4434 secname = "based";
4435 break;
4436 case 1:
4437 secname = "tiny";
4438 break;
4439 case 2:
4440 secname = "far";
4441 break;
4442 default:
4443 ;
4444 }
4445 }
4446 }
4447 else
4448 {
4449 if (size <= mep_based_cutoff && size > 0)
4450 secname = "based";
4451 else if (size <= mep_tiny_cutoff && size > 0)
4452 secname = "tiny";
4453 else if (TARGET_L)
4454 secname = "far";
4455 }
4456
4457 if (mep_const_section && TREE_READONLY (decl))
4458 {
4459 if (strcmp (mep_const_section, "tiny") == 0)
4460 secname = "tiny";
4461 else if (strcmp (mep_const_section, "near") == 0)
4462 return;
4463 else if (strcmp (mep_const_section, "far") == 0)
4464 secname = "far";
4465 }
4466
4467 if (!secname)
4468 return;
4469
4470 if (!mep_multiple_address_regions (*attributes, true)
4471 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4472 {
4473 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4474
4475 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4476 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4477 and mep_validate_based_tiny. */
4478 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4479 }
4480 }
4481
4482 static void
4483 mep_encode_section_info (tree decl, rtx rtl, int first)
4484 {
4485 rtx rtlname;
4486 const char *oldname;
4487 const char *secname;
4488 char encoding;
4489 char *newname;
4490 tree idp;
4491 int maxsize;
4492 tree type;
4493 tree mep_attributes;
4494
4495 if (! first)
4496 return;
4497
4498 if (TREE_CODE (decl) != VAR_DECL
4499 && TREE_CODE (decl) != FUNCTION_DECL)
4500 return;
4501
4502 rtlname = XEXP (rtl, 0);
4503 if (GET_CODE (rtlname) == SYMBOL_REF)
4504 oldname = XSTR (rtlname, 0);
4505 else if (GET_CODE (rtlname) == MEM
4506 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4507 oldname = XSTR (XEXP (rtlname, 0), 0);
4508 else
4509 gcc_unreachable ();
4510
4511 type = TREE_TYPE (decl);
4512 if (type == error_mark_node)
4513 return;
4514 mep_attributes = MEP_ATTRIBUTES (decl);
4515
4516 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4517
4518 if (encoding)
4519 {
4520 newname = (char *) alloca (strlen (oldname) + 4);
4521 sprintf (newname, "@%c.%s", encoding, oldname);
4522 idp = get_identifier (newname);
4523 XEXP (rtl, 0) =
4524 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4525 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4526 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4527
4528 switch (encoding)
4529 {
4530 case 'b':
4531 maxsize = 128;
4532 secname = "based";
4533 break;
4534 case 't':
4535 maxsize = 65536;
4536 secname = "tiny";
4537 break;
4538 case 'n':
4539 maxsize = 0x1000000;
4540 secname = "near";
4541 break;
4542 default:
4543 maxsize = 0;
4544 secname = 0;
4545 break;
4546 }
4547 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4548 {
4549 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4550 oldname,
4551 (long) int_size_in_bytes (TREE_TYPE (decl)),
4552 secname,
4553 maxsize);
4554 }
4555 }
4556 }
4557
4558 const char *
4559 mep_strip_name_encoding (const char *sym)
4560 {
4561 while (1)
4562 {
4563 if (*sym == '*')
4564 sym++;
4565 else if (*sym == '@' && sym[2] == '.')
4566 sym += 3;
4567 else
4568 return sym;
4569 }
4570 }
4571
4572 static section *
4573 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4574 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4575 {
4576 int readonly = 1;
4577 int encoding;
4578
4579 switch (TREE_CODE (decl))
4580 {
4581 case VAR_DECL:
4582 if (!TREE_READONLY (decl)
4583 || TREE_SIDE_EFFECTS (decl)
4584 || !DECL_INITIAL (decl)
4585 || (DECL_INITIAL (decl) != error_mark_node
4586 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4587 readonly = 0;
4588 break;
4589 case CONSTRUCTOR:
4590 if (! TREE_CONSTANT (decl))
4591 readonly = 0;
4592 break;
4593
4594 default:
4595 break;
4596 }
4597
4598 if (TREE_CODE (decl) == FUNCTION_DECL)
4599 {
4600 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4601
4602 if (name[0] == '@' && name[2] == '.')
4603 encoding = name[1];
4604 else
4605 encoding = 0;
4606
4607 if (flag_function_sections || DECL_ONE_ONLY (decl))
4608 mep_unique_section (decl, 0);
4609 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4610 {
4611 if (encoding == 'f')
4612 return vftext_section;
4613 else
4614 return vtext_section;
4615 }
4616 else if (encoding == 'f')
4617 return ftext_section;
4618 else
4619 return text_section;
4620 }
4621
4622 if (TREE_CODE (decl) == VAR_DECL)
4623 {
4624 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4625
4626 if (name[0] == '@' && name[2] == '.')
4627 switch (name[1])
4628 {
4629 case 'b':
4630 return based_section;
4631
4632 case 't':
4633 if (readonly)
4634 return srodata_section;
4635 if (DECL_INITIAL (decl))
4636 return sdata_section;
4637 return tinybss_section;
4638
4639 case 'f':
4640 if (readonly)
4641 return frodata_section;
4642 return far_section;
4643
4644 case 'i':
4645 case 'I':
4646 error_at (DECL_SOURCE_LOCATION (decl),
4647 "variable %D of type %<io%> must be uninitialized", decl);
4648 return data_section;
4649
4650 case 'c':
4651 error_at (DECL_SOURCE_LOCATION (decl),
4652 "variable %D of type %<cb%> must be uninitialized", decl);
4653 return data_section;
4654 }
4655 }
4656
4657 if (readonly)
4658 return readonly_data_section;
4659
4660 return data_section;
4661 }
4662
4663 static void
4664 mep_unique_section (tree decl, int reloc)
4665 {
4666 static const char *prefixes[][2] =
4667 {
4668 { ".text.", ".gnu.linkonce.t." },
4669 { ".rodata.", ".gnu.linkonce.r." },
4670 { ".data.", ".gnu.linkonce.d." },
4671 { ".based.", ".gnu.linkonce.based." },
4672 { ".sdata.", ".gnu.linkonce.s." },
4673 { ".far.", ".gnu.linkonce.far." },
4674 { ".ftext.", ".gnu.linkonce.ft." },
4675 { ".frodata.", ".gnu.linkonce.frd." },
4676 { ".srodata.", ".gnu.linkonce.srd." },
4677 { ".vtext.", ".gnu.linkonce.v." },
4678 { ".vftext.", ".gnu.linkonce.vf." }
4679 };
4680 int sec = 2; /* .data */
4681 int len;
4682 const char *name, *prefix;
4683 char *string;
4684
4685 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4686 if (DECL_RTL (decl))
4687 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4688
4689 if (TREE_CODE (decl) == FUNCTION_DECL)
4690 {
4691 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4692 sec = 9; /* .vtext */
4693 else
4694 sec = 0; /* .text */
4695 }
4696 else if (decl_readonly_section (decl, reloc))
4697 sec = 1; /* .rodata */
4698
4699 if (name[0] == '@' && name[2] == '.')
4700 {
4701 switch (name[1])
4702 {
4703 case 'b':
4704 sec = 3; /* .based */
4705 break;
4706 case 't':
4707 if (sec == 1)
4708 sec = 8; /* .srodata */
4709 else
4710 sec = 4; /* .sdata */
4711 break;
4712 case 'f':
4713 if (sec == 0)
4714 sec = 6; /* .ftext */
4715 else if (sec == 9)
4716 sec = 10; /* .vftext */
4717 else if (sec == 1)
4718 sec = 7; /* .frodata */
4719 else
4720 sec = 5; /* .far. */
4721 break;
4722 }
4723 name += 3;
4724 }
4725
4726 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4727 len = strlen (name) + strlen (prefix);
4728 string = (char *) alloca (len + 1);
4729
4730 sprintf (string, "%s%s", prefix, name);
4731
4732 DECL_SECTION_NAME (decl) = build_string (len, string);
4733 }
4734
4735 /* Given a decl, a section name, and whether the decl initializer
4736 has relocs, choose attributes for the section. */
4737
4738 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4739
4740 static unsigned int
4741 mep_section_type_flags (tree decl, const char *name, int reloc)
4742 {
4743 unsigned int flags = default_section_type_flags (decl, name, reloc);
4744
4745 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4746 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4747 flags |= SECTION_MEP_VLIW;
4748
4749 return flags;
4750 }
4751
4752 /* Switch to an arbitrary section NAME with attributes as specified
4753 by FLAGS. ALIGN specifies any known alignment requirements for
4754 the section; 0 if the default should be used.
4755
4756 Differs from the standard ELF version only in support of VLIW mode. */
4757
4758 static void
4759 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4760 {
4761 char flagchars[8], *f = flagchars;
4762 const char *type;
4763
4764 if (!(flags & SECTION_DEBUG))
4765 *f++ = 'a';
4766 if (flags & SECTION_WRITE)
4767 *f++ = 'w';
4768 if (flags & SECTION_CODE)
4769 *f++ = 'x';
4770 if (flags & SECTION_SMALL)
4771 *f++ = 's';
4772 if (flags & SECTION_MEP_VLIW)
4773 *f++ = 'v';
4774 *f = '\0';
4775
4776 if (flags & SECTION_BSS)
4777 type = "nobits";
4778 else
4779 type = "progbits";
4780
4781 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4782 name, flagchars, type);
4783
4784 if (flags & SECTION_CODE)
4785 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4786 asm_out_file);
4787 }
4788
4789 void
4790 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4791 int size, int align, int global)
4792 {
4793 /* We intentionally don't use mep_section_tag() here. */
4794 if (name[0] == '@'
4795 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4796 && name[2] == '.')
4797 {
4798 int location = -1;
4799 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4800 DECL_ATTRIBUTES (decl));
4801 if (attr
4802 && TREE_VALUE (attr)
4803 && TREE_VALUE (TREE_VALUE(attr)))
4804 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4805 if (location == -1)
4806 return;
4807 if (global)
4808 {
4809 fprintf (stream, "\t.globl\t");
4810 assemble_name (stream, name);
4811 fprintf (stream, "\n");
4812 }
4813 assemble_name (stream, name);
4814 fprintf (stream, " = %d\n", location);
4815 return;
4816 }
4817 if (name[0] == '@' && name[2] == '.')
4818 {
4819 const char *sec = 0;
4820 switch (name[1])
4821 {
4822 case 'b':
4823 switch_to_section (based_section);
4824 sec = ".based";
4825 break;
4826 case 't':
4827 switch_to_section (tinybss_section);
4828 sec = ".sbss";
4829 break;
4830 case 'f':
4831 switch_to_section (farbss_section);
4832 sec = ".farbss";
4833 break;
4834 }
4835 if (sec)
4836 {
4837 const char *name2;
4838 int p2align = 0;
4839
4840 while (align > BITS_PER_UNIT)
4841 {
4842 align /= 2;
4843 p2align ++;
4844 }
4845 name2 = targetm.strip_name_encoding (name);
4846 if (global)
4847 fprintf (stream, "\t.globl\t%s\n", name2);
4848 fprintf (stream, "\t.p2align %d\n", p2align);
4849 fprintf (stream, "\t.type\t%s,@object\n", name2);
4850 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4851 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4852 return;
4853 }
4854 }
4855
4856 if (!global)
4857 {
4858 fprintf (stream, "\t.local\t");
4859 assemble_name (stream, name);
4860 fprintf (stream, "\n");
4861 }
4862 fprintf (stream, "\t.comm\t");
4863 assemble_name (stream, name);
4864 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4865 }
4866
4867 /* Trampolines. */
4868
4869 static void
4870 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4871 {
4872 rtx addr = XEXP (m_tramp, 0);
4873 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4874
4875 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4876 LCT_NORMAL, VOIDmode, 3,
4877 addr, Pmode,
4878 fnaddr, Pmode,
4879 static_chain, Pmode);
4880 }
4881
4882 /* Experimental Reorg. */
4883
4884 static bool
4885 mep_mentioned_p (rtx in,
4886 rtx reg, /* NULL for mem */
4887 int modes_too) /* if nonzero, modes must match also. */
4888 {
4889 const char *fmt;
4890 int i;
4891 enum rtx_code code;
4892
4893 if (in == 0)
4894 return false;
4895 if (reg && GET_CODE (reg) != REG)
4896 return false;
4897
4898 if (GET_CODE (in) == LABEL_REF)
4899 return (reg == 0);
4900
4901 code = GET_CODE (in);
4902
4903 switch (code)
4904 {
4905 case MEM:
4906 if (reg)
4907 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4908 return true;
4909
4910 case REG:
4911 if (!reg)
4912 return false;
4913 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4914 return false;
4915 return (REGNO (in) == REGNO (reg));
4916
4917 case SCRATCH:
4918 case CC0:
4919 case PC:
4920 case CONST_INT:
4921 case CONST_DOUBLE:
4922 return false;
4923
4924 default:
4925 break;
4926 }
4927
4928 /* Set's source should be read-only. */
4929 if (code == SET && !reg)
4930 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4931
4932 fmt = GET_RTX_FORMAT (code);
4933
4934 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4935 {
4936 if (fmt[i] == 'E')
4937 {
4938 register int j;
4939 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4940 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4941 return true;
4942 }
4943 else if (fmt[i] == 'e'
4944 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4945 return true;
4946 }
4947 return false;
4948 }
4949
4950 #define EXPERIMENTAL_REGMOVE_REORG 1
4951
4952 #if EXPERIMENTAL_REGMOVE_REORG
4953
4954 static int
4955 mep_compatible_reg_class (int r1, int r2)
4956 {
4957 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4958 return 1;
4959 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4960 return 1;
4961 return 0;
4962 }
4963
4964 static void
4965 mep_reorg_regmove (rtx insns)
4966 {
4967 rtx insn, next, pat, follow, *where;
4968 int count = 0, done = 0, replace, before = 0;
4969
4970 if (dump_file)
4971 for (insn = insns; insn; insn = NEXT_INSN (insn))
4972 if (GET_CODE (insn) == INSN)
4973 before++;
4974
4975 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4976 set that uses the r2 and r2 dies there. We replace r2 with r1
4977 and see if it's still a valid insn. If so, delete the first set.
4978 Copied from reorg.c. */
4979
4980 while (!done)
4981 {
4982 done = 1;
4983 for (insn = insns; insn; insn = next)
4984 {
4985 next = NEXT_INSN (insn);
4986 if (GET_CODE (insn) != INSN)
4987 continue;
4988 pat = PATTERN (insn);
4989
4990 replace = 0;
4991
4992 if (GET_CODE (pat) == SET
4993 && GET_CODE (SET_SRC (pat)) == REG
4994 && GET_CODE (SET_DEST (pat)) == REG
4995 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4996 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4997 {
4998 follow = next_nonnote_insn (insn);
4999 if (dump_file)
5000 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5001
5002 while (follow && GET_CODE (follow) == INSN
5003 && GET_CODE (PATTERN (follow)) == SET
5004 && !dead_or_set_p (follow, SET_SRC (pat))
5005 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5006 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5007 {
5008 if (dump_file)
5009 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5010 follow = next_nonnote_insn (follow);
5011 }
5012
5013 if (dump_file)
5014 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5015 if (follow && GET_CODE (follow) == INSN
5016 && GET_CODE (PATTERN (follow)) == SET
5017 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5018 {
5019 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5020 {
5021 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5022 {
5023 replace = 1;
5024 where = & SET_SRC (PATTERN (follow));
5025 }
5026 }
5027 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5028 {
5029 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5030 {
5031 replace = 1;
5032 where = & PATTERN (follow);
5033 }
5034 }
5035 }
5036 }
5037
5038 /* If so, follow is the corresponding insn */
5039 if (replace)
5040 {
5041 if (dump_file)
5042 {
5043 rtx x;
5044
5045 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5046 for (x = insn; x ;x = NEXT_INSN (x))
5047 {
5048 print_rtl_single (dump_file, x);
5049 if (x == follow)
5050 break;
5051 fprintf (dump_file, "\n");
5052 }
5053 }
5054
5055 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5056 follow, where))
5057 {
5058 count ++;
5059 next = delete_insn (insn);
5060 if (dump_file)
5061 {
5062 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5063 print_rtl_single (dump_file, follow);
5064 }
5065 done = 0;
5066 }
5067 }
5068 }
5069 }
5070
5071 if (dump_file)
5072 {
5073 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5074 fprintf (dump_file, "=====\n");
5075 }
5076 }
5077 #endif
5078
5079
5080 /* Figure out where to put LABEL, which is the label for a repeat loop.
5081 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5082 the loop ends just before LAST_INSN. If SHARED, insns other than the
5083 "repeat" might use LABEL to jump to the loop's continuation point.
5084
5085 Return the last instruction in the adjusted loop. */
5086
5087 static rtx
5088 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5089 bool shared)
5090 {
5091 rtx next, prev;
5092 int count = 0, code, icode;
5093
5094 if (dump_file)
5095 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5096 INSN_UID (last_insn));
5097
5098 /* Set PREV to the last insn in the loop. */
5099 prev = last_insn;
5100 if (!including)
5101 prev = PREV_INSN (prev);
5102
5103 /* Set NEXT to the next insn after the repeat label. */
5104 next = last_insn;
5105 if (!shared)
5106 while (prev != 0)
5107 {
5108 code = GET_CODE (prev);
5109 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5110 break;
5111
5112 if (INSN_P (prev))
5113 {
5114 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5115 prev = XVECEXP (PATTERN (prev), 0, 1);
5116
5117 /* Other insns that should not be in the last two opcodes. */
5118 icode = recog_memoized (prev);
5119 if (icode < 0
5120 || icode == CODE_FOR_repeat
5121 || icode == CODE_FOR_erepeat
5122 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5123 break;
5124
5125 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5126 is the second instruction in a VLIW bundle. In that case,
5127 loop again: if the first instruction also satisfies the
5128 conditions above then we will reach here again and put
5129 both of them into the repeat epilogue. Otherwise both
5130 should remain outside. */
5131 if (GET_MODE (prev) != BImode)
5132 {
5133 count++;
5134 next = prev;
5135 if (dump_file)
5136 print_rtl_single (dump_file, next);
5137 if (count == 2)
5138 break;
5139 }
5140 }
5141 prev = PREV_INSN (prev);
5142 }
5143
5144 /* See if we're adding the label immediately after the repeat insn.
5145 If so, we need to separate them with a nop. */
5146 prev = prev_real_insn (next);
5147 if (prev)
5148 switch (recog_memoized (prev))
5149 {
5150 case CODE_FOR_repeat:
5151 case CODE_FOR_erepeat:
5152 if (dump_file)
5153 fprintf (dump_file, "Adding nop inside loop\n");
5154 emit_insn_before (gen_nop (), next);
5155 break;
5156
5157 default:
5158 break;
5159 }
5160
5161 /* Insert the label. */
5162 emit_label_before (label, next);
5163
5164 /* Insert the nops. */
5165 if (dump_file && count < 2)
5166 fprintf (dump_file, "Adding %d nop%s\n\n",
5167 2 - count, count == 1 ? "" : "s");
5168
5169 for (; count < 2; count++)
5170 if (including)
5171 last_insn = emit_insn_after (gen_nop (), last_insn);
5172 else
5173 emit_insn_before (gen_nop (), last_insn);
5174
5175 return last_insn;
5176 }
5177
5178
5179 void
5180 mep_emit_doloop (rtx *operands, int is_end)
5181 {
5182 rtx tag;
5183
5184 if (cfun->machine->doloop_tags == 0
5185 || cfun->machine->doloop_tag_from_end == is_end)
5186 {
5187 cfun->machine->doloop_tags++;
5188 cfun->machine->doloop_tag_from_end = is_end;
5189 }
5190
5191 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5192 if (is_end)
5193 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5194 else
5195 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5196 }
5197
5198
5199 /* Code for converting doloop_begins and doloop_ends into valid
5200 MeP instructions. A doloop_begin is just a placeholder:
5201
5202 $count = unspec ($count)
5203
5204 where $count is initially the number of iterations - 1.
5205 doloop_end has the form:
5206
5207 if ($count-- == 0) goto label
5208
5209 The counter variable is private to the doloop insns, nothing else
5210 relies on its value.
5211
5212 There are three cases, in decreasing order of preference:
5213
5214 1. A loop has exactly one doloop_begin and one doloop_end.
5215 The doloop_end branches to the first instruction after
5216 the doloop_begin.
5217
5218 In this case we can replace the doloop_begin with a repeat
5219 instruction and remove the doloop_end. I.e.:
5220
5221 $count1 = unspec ($count1)
5222 label:
5223 ...
5224 insn1
5225 insn2
5226 if ($count2-- == 0) goto label
5227
5228 becomes:
5229
5230 repeat $count1,repeat_label
5231 label:
5232 ...
5233 repeat_label:
5234 insn1
5235 insn2
5236 # end repeat
5237
5238 2. As for (1), except there are several doloop_ends. One of them
5239 (call it X) falls through to a label L. All the others fall
5240 through to branches to L.
5241
5242 In this case, we remove X and replace the other doloop_ends
5243 with branches to the repeat label. For example:
5244
5245 $count1 = unspec ($count1)
5246 start:
5247 ...
5248 if ($count2-- == 0) goto label
5249 end:
5250 ...
5251 if ($count3-- == 0) goto label
5252 goto end
5253
5254 becomes:
5255
5256 repeat $count1,repeat_label
5257 start:
5258 ...
5259 repeat_label:
5260 nop
5261 nop
5262 # end repeat
5263 end:
5264 ...
5265 goto repeat_label
5266
5267 3. The fallback case. Replace doloop_begins with:
5268
5269 $count = $count + 1
5270
5271 Replace doloop_ends with the equivalent of:
5272
5273 $count = $count - 1
5274 if ($count == 0) goto label
5275
5276 Note that this might need a scratch register if $count
5277 is stored in memory. */
5278
5279 /* A structure describing one doloop_begin. */
5280 struct mep_doloop_begin {
5281 /* The next doloop_begin with the same tag. */
5282 struct mep_doloop_begin *next;
5283
5284 /* The instruction itself. */
5285 rtx insn;
5286
5287 /* The initial counter value. This is known to be a general register. */
5288 rtx counter;
5289 };
5290
5291 /* A structure describing a doloop_end. */
5292 struct mep_doloop_end {
5293 /* The next doloop_end with the same loop tag. */
5294 struct mep_doloop_end *next;
5295
5296 /* The instruction itself. */
5297 rtx insn;
5298
5299 /* The first instruction after INSN when the branch isn't taken. */
5300 rtx fallthrough;
5301
5302 /* The location of the counter value. Since doloop_end_internal is a
5303 jump instruction, it has to allow the counter to be stored anywhere
5304 (any non-fixed register or memory location). */
5305 rtx counter;
5306
5307 /* The target label (the place where the insn branches when the counter
5308 isn't zero). */
5309 rtx label;
5310
5311 /* A scratch register. Only available when COUNTER isn't stored
5312 in a general register. */
5313 rtx scratch;
5314 };
5315
5316
5317 /* One do-while loop. */
5318 struct mep_doloop {
5319 /* All the doloop_begins for this loop (in no particular order). */
5320 struct mep_doloop_begin *begin;
5321
5322 /* All the doloop_ends. When there is more than one, arrange things
5323 so that the first one is the most likely to be X in case (2) above. */
5324 struct mep_doloop_end *end;
5325 };
5326
5327
5328 /* Return true if LOOP can be converted into repeat/repeat_end form
5329 (that is, if it matches cases (1) or (2) above). */
5330
5331 static bool
5332 mep_repeat_loop_p (struct mep_doloop *loop)
5333 {
5334 struct mep_doloop_end *end;
5335 rtx fallthrough;
5336
5337 /* There must be exactly one doloop_begin and at least one doloop_end. */
5338 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5339 return false;
5340
5341 /* The first doloop_end (X) must branch back to the insn after
5342 the doloop_begin. */
5343 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5344 return false;
5345
5346 /* All the other doloop_ends must branch to the same place as X.
5347 When the branch isn't taken, they must jump to the instruction
5348 after X. */
5349 fallthrough = loop->end->fallthrough;
5350 for (end = loop->end->next; end != 0; end = end->next)
5351 if (end->label != loop->end->label
5352 || !simplejump_p (end->fallthrough)
5353 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5354 return false;
5355
5356 return true;
5357 }
5358
5359
5360 /* The main repeat reorg function. See comment above for details. */
5361
5362 static void
5363 mep_reorg_repeat (rtx insns)
5364 {
5365 rtx insn;
5366 struct mep_doloop *loops, *loop;
5367 struct mep_doloop_begin *begin;
5368 struct mep_doloop_end *end;
5369
5370 /* Quick exit if we haven't created any loops. */
5371 if (cfun->machine->doloop_tags == 0)
5372 return;
5373
5374 /* Create an array of mep_doloop structures. */
5375 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5376 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5377
5378 /* Search the function for do-while insns and group them by loop tag. */
5379 for (insn = insns; insn; insn = NEXT_INSN (insn))
5380 if (INSN_P (insn))
5381 switch (recog_memoized (insn))
5382 {
5383 case CODE_FOR_doloop_begin_internal:
5384 insn_extract (insn);
5385 loop = &loops[INTVAL (recog_data.operand[2])];
5386
5387 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5388 begin->next = loop->begin;
5389 begin->insn = insn;
5390 begin->counter = recog_data.operand[0];
5391
5392 loop->begin = begin;
5393 break;
5394
5395 case CODE_FOR_doloop_end_internal:
5396 insn_extract (insn);
5397 loop = &loops[INTVAL (recog_data.operand[2])];
5398
5399 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5400 end->insn = insn;
5401 end->fallthrough = next_real_insn (insn);
5402 end->counter = recog_data.operand[0];
5403 end->label = recog_data.operand[1];
5404 end->scratch = recog_data.operand[3];
5405
5406 /* If this insn falls through to an unconditional jump,
5407 give it a lower priority than the others. */
5408 if (loop->end != 0 && simplejump_p (end->fallthrough))
5409 {
5410 end->next = loop->end->next;
5411 loop->end->next = end;
5412 }
5413 else
5414 {
5415 end->next = loop->end;
5416 loop->end = end;
5417 }
5418 break;
5419 }
5420
5421 /* Convert the insns for each loop in turn. */
5422 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5423 if (mep_repeat_loop_p (loop))
5424 {
5425 /* Case (1) or (2). */
5426 rtx repeat_label, label_ref;
5427
5428 /* Create a new label for the repeat insn. */
5429 repeat_label = gen_label_rtx ();
5430
5431 /* Replace the doloop_begin with a repeat. */
5432 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5433 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5434 loop->begin->insn);
5435 delete_insn (loop->begin->insn);
5436
5437 /* Insert the repeat label before the first doloop_end.
5438 Fill the gap with nops if there are other doloop_ends. */
5439 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5440 false, loop->end->next != 0);
5441
5442 /* Emit a repeat_end (to improve the readability of the output). */
5443 emit_insn_before (gen_repeat_end (), loop->end->insn);
5444
5445 /* Delete the first doloop_end. */
5446 delete_insn (loop->end->insn);
5447
5448 /* Replace the others with branches to REPEAT_LABEL. */
5449 for (end = loop->end->next; end != 0; end = end->next)
5450 {
5451 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5452 delete_insn (end->insn);
5453 delete_insn (end->fallthrough);
5454 }
5455 }
5456 else
5457 {
5458 /* Case (3). First replace all the doloop_begins with increment
5459 instructions. */
5460 for (begin = loop->begin; begin != 0; begin = begin->next)
5461 {
5462 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5463 begin->counter, const1_rtx),
5464 begin->insn);
5465 delete_insn (begin->insn);
5466 }
5467
5468 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5469 for (end = loop->end; end != 0; end = end->next)
5470 {
5471 rtx reg;
5472
5473 start_sequence ();
5474
5475 /* Load the counter value into a general register. */
5476 reg = end->counter;
5477 if (!REG_P (reg) || REGNO (reg) > 15)
5478 {
5479 reg = end->scratch;
5480 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5481 }
5482
5483 /* Decrement the counter. */
5484 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5485 constm1_rtx));
5486
5487 /* Copy it back to its original location. */
5488 if (reg != end->counter)
5489 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5490
5491 /* Jump back to the start label. */
5492 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5493 end->label));
5494 JUMP_LABEL (insn) = end->label;
5495 LABEL_NUSES (end->label)++;
5496
5497 /* Emit the whole sequence before the doloop_end. */
5498 insn = get_insns ();
5499 end_sequence ();
5500 emit_insn_before (insn, end->insn);
5501
5502 /* Delete the doloop_end. */
5503 delete_insn (end->insn);
5504 }
5505 }
5506 }
5507
5508
5509 static bool
5510 mep_invertable_branch_p (rtx insn)
5511 {
5512 rtx cond, set;
5513 enum rtx_code old_code;
5514 int i;
5515
5516 set = PATTERN (insn);
5517 if (GET_CODE (set) != SET)
5518 return false;
5519 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5520 return false;
5521 cond = XEXP (XEXP (set, 1), 0);
5522 old_code = GET_CODE (cond);
5523 switch (old_code)
5524 {
5525 case EQ:
5526 PUT_CODE (cond, NE);
5527 break;
5528 case NE:
5529 PUT_CODE (cond, EQ);
5530 break;
5531 case LT:
5532 PUT_CODE (cond, GE);
5533 break;
5534 case GE:
5535 PUT_CODE (cond, LT);
5536 break;
5537 default:
5538 return false;
5539 }
5540 INSN_CODE (insn) = -1;
5541 i = recog_memoized (insn);
5542 PUT_CODE (cond, old_code);
5543 INSN_CODE (insn) = -1;
5544 return i >= 0;
5545 }
5546
5547 static void
5548 mep_invert_branch (rtx insn, rtx after)
5549 {
5550 rtx cond, set, label;
5551 int i;
5552
5553 set = PATTERN (insn);
5554
5555 gcc_assert (GET_CODE (set) == SET);
5556 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5557
5558 cond = XEXP (XEXP (set, 1), 0);
5559 switch (GET_CODE (cond))
5560 {
5561 case EQ:
5562 PUT_CODE (cond, NE);
5563 break;
5564 case NE:
5565 PUT_CODE (cond, EQ);
5566 break;
5567 case LT:
5568 PUT_CODE (cond, GE);
5569 break;
5570 case GE:
5571 PUT_CODE (cond, LT);
5572 break;
5573 default:
5574 gcc_unreachable ();
5575 }
5576 label = gen_label_rtx ();
5577 emit_label_after (label, after);
5578 for (i=1; i<=2; i++)
5579 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5580 {
5581 rtx ref = XEXP (XEXP (set, 1), i);
5582 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5583 delete_insn (XEXP (ref, 0));
5584 XEXP (ref, 0) = label;
5585 LABEL_NUSES (label) ++;
5586 JUMP_LABEL (insn) = label;
5587 }
5588 INSN_CODE (insn) = -1;
5589 i = recog_memoized (insn);
5590 gcc_assert (i >= 0);
5591 }
5592
5593 static void
5594 mep_reorg_erepeat (rtx insns)
5595 {
5596 rtx insn, prev, l, x;
5597 int count;
5598
5599 for (insn = insns; insn; insn = NEXT_INSN (insn))
5600 if (JUMP_P (insn)
5601 && ! JUMP_TABLE_DATA_P (insn)
5602 && mep_invertable_branch_p (insn))
5603 {
5604 if (dump_file)
5605 {
5606 fprintf (dump_file, "\n------------------------------\n");
5607 fprintf (dump_file, "erepeat: considering this jump:\n");
5608 print_rtl_single (dump_file, insn);
5609 }
5610 count = simplejump_p (insn) ? 0 : 1;
5611 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5612 {
5613 if (GET_CODE (prev) == CALL_INSN
5614 || BARRIER_P (prev))
5615 break;
5616
5617 if (prev == JUMP_LABEL (insn))
5618 {
5619 rtx newlast;
5620 if (dump_file)
5621 fprintf (dump_file, "found loop top, %d insns\n", count);
5622
5623 if (LABEL_NUSES (prev) == 1)
5624 /* We're the only user, always safe */ ;
5625 else if (LABEL_NUSES (prev) == 2)
5626 {
5627 /* See if there's a barrier before this label. If
5628 so, we know nobody inside the loop uses it.
5629 But we must be careful to put the erepeat
5630 *after* the label. */
5631 rtx barrier;
5632 for (barrier = PREV_INSN (prev);
5633 barrier && GET_CODE (barrier) == NOTE;
5634 barrier = PREV_INSN (barrier))
5635 ;
5636 if (barrier && GET_CODE (barrier) != BARRIER)
5637 break;
5638 }
5639 else
5640 {
5641 /* We don't know who else, within or without our loop, uses this */
5642 if (dump_file)
5643 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5644 break;
5645 }
5646
5647 /* Generate a label to be used by the erepat insn. */
5648 l = gen_label_rtx ();
5649
5650 /* Insert the erepeat after INSN's target label. */
5651 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5652 LABEL_NUSES (l)++;
5653 emit_insn_after (x, prev);
5654
5655 /* Insert the erepeat label. */
5656 newlast = (mep_insert_repeat_label_last
5657 (insn, l, !simplejump_p (insn), false));
5658 if (simplejump_p (insn))
5659 {
5660 emit_insn_before (gen_erepeat_end (), insn);
5661 delete_insn (insn);
5662 }
5663 else
5664 {
5665 mep_invert_branch (insn, newlast);
5666 emit_insn_after (gen_erepeat_end (), newlast);
5667 }
5668 break;
5669 }
5670
5671 if (LABEL_P (prev))
5672 {
5673 /* A label is OK if there is exactly one user, and we
5674 can find that user before the next label. */
5675 rtx user = 0;
5676 int safe = 0;
5677 if (LABEL_NUSES (prev) == 1)
5678 {
5679 for (user = PREV_INSN (prev);
5680 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5681 user = PREV_INSN (user))
5682 if (GET_CODE (user) == JUMP_INSN
5683 && JUMP_LABEL (user) == prev)
5684 {
5685 safe = INSN_UID (user);
5686 break;
5687 }
5688 }
5689 if (!safe)
5690 break;
5691 if (dump_file)
5692 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5693 safe, INSN_UID (prev));
5694 }
5695
5696 if (INSN_P (prev))
5697 {
5698 count ++;
5699 }
5700 }
5701 }
5702 if (dump_file)
5703 fprintf (dump_file, "\n==============================\n");
5704 }
5705
5706 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5707 always do this on its own. */
5708
5709 static void
5710 mep_jmp_return_reorg (rtx insns)
5711 {
5712 rtx insn, label, ret;
5713 int ret_code;
5714
5715 for (insn = insns; insn; insn = NEXT_INSN (insn))
5716 if (simplejump_p (insn))
5717 {
5718 /* Find the fist real insn the jump jumps to. */
5719 label = ret = JUMP_LABEL (insn);
5720 while (ret
5721 && (GET_CODE (ret) == NOTE
5722 || GET_CODE (ret) == CODE_LABEL
5723 || GET_CODE (PATTERN (ret)) == USE))
5724 ret = NEXT_INSN (ret);
5725
5726 if (ret)
5727 {
5728 /* Is it a return? */
5729 ret_code = recog_memoized (ret);
5730 if (ret_code == CODE_FOR_return_internal
5731 || ret_code == CODE_FOR_eh_return_internal)
5732 {
5733 /* It is. Replace the jump with a return. */
5734 LABEL_NUSES (label) --;
5735 if (LABEL_NUSES (label) == 0)
5736 delete_insn (label);
5737 PATTERN (insn) = copy_rtx (PATTERN (ret));
5738 INSN_CODE (insn) = -1;
5739 }
5740 }
5741 }
5742 }
5743
5744
5745 static void
5746 mep_reorg_addcombine (rtx insns)
5747 {
5748 rtx i, n;
5749
5750 for (i = insns; i; i = NEXT_INSN (i))
5751 if (INSN_P (i)
5752 && INSN_CODE (i) == CODE_FOR_addsi3
5753 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5754 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5755 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5756 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5757 {
5758 n = NEXT_INSN (i);
5759 if (INSN_P (n)
5760 && INSN_CODE (n) == CODE_FOR_addsi3
5761 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5762 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5763 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5764 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5765 {
5766 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5767 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5768 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5769 && ic + nc < 32767
5770 && ic + nc > -32768)
5771 {
5772 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5773 NEXT_INSN (i) = NEXT_INSN (n);
5774 if (NEXT_INSN (i))
5775 PREV_INSN (NEXT_INSN (i)) = i;
5776 }
5777 }
5778 }
5779 }
5780
5781 /* If this insn adjusts the stack, return the adjustment, else return
5782 zero. */
5783 static int
5784 add_sp_insn_p (rtx insn)
5785 {
5786 rtx pat;
5787
5788 if (! single_set (insn))
5789 return 0;
5790 pat = PATTERN (insn);
5791 if (GET_CODE (SET_DEST (pat)) != REG)
5792 return 0;
5793 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5794 return 0;
5795 if (GET_CODE (SET_SRC (pat)) != PLUS)
5796 return 0;
5797 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5798 return 0;
5799 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5800 return 0;
5801 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5802 return 0;
5803 return INTVAL (XEXP (SET_SRC (pat), 1));
5804 }
5805
5806 /* Check for trivial functions that set up an unneeded stack
5807 frame. */
5808 static void
5809 mep_reorg_noframe (rtx insns)
5810 {
5811 rtx start_frame_insn;
5812 rtx end_frame_insn = 0;
5813 int sp_adjust, sp2;
5814 rtx sp;
5815
5816 /* The first insn should be $sp = $sp + N */
5817 while (insns && ! INSN_P (insns))
5818 insns = NEXT_INSN (insns);
5819 if (!insns)
5820 return;
5821
5822 sp_adjust = add_sp_insn_p (insns);
5823 if (sp_adjust == 0)
5824 return;
5825
5826 start_frame_insn = insns;
5827 sp = SET_DEST (PATTERN (start_frame_insn));
5828
5829 insns = next_real_insn (insns);
5830
5831 while (insns)
5832 {
5833 rtx next = next_real_insn (insns);
5834 if (!next)
5835 break;
5836
5837 sp2 = add_sp_insn_p (insns);
5838 if (sp2)
5839 {
5840 if (end_frame_insn)
5841 return;
5842 end_frame_insn = insns;
5843 if (sp2 != -sp_adjust)
5844 return;
5845 }
5846 else if (mep_mentioned_p (insns, sp, 0))
5847 return;
5848 else if (CALL_P (insns))
5849 return;
5850
5851 insns = next;
5852 }
5853
5854 if (end_frame_insn)
5855 {
5856 delete_insn (start_frame_insn);
5857 delete_insn (end_frame_insn);
5858 }
5859 }
5860
5861 static void
5862 mep_reorg (void)
5863 {
5864 rtx insns = get_insns ();
5865
5866 /* We require accurate REG_DEAD notes. */
5867 compute_bb_for_insn ();
5868 df_note_add_problem ();
5869 df_analyze ();
5870
5871 mep_reorg_addcombine (insns);
5872 #if EXPERIMENTAL_REGMOVE_REORG
5873 /* VLIW packing has been done already, so we can't just delete things. */
5874 if (!mep_vliw_function_p (cfun->decl))
5875 mep_reorg_regmove (insns);
5876 #endif
5877 mep_jmp_return_reorg (insns);
5878 mep_bundle_insns (insns);
5879 mep_reorg_repeat (insns);
5880 if (optimize
5881 && !profile_flag
5882 && !profile_arc_flag
5883 && TARGET_OPT_REPEAT
5884 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5885 mep_reorg_erepeat (insns);
5886
5887 /* This may delete *insns so make sure it's last. */
5888 mep_reorg_noframe (insns);
5889
5890 df_finish_pass (false);
5891 }
5892
5893 \f
5894
5895 /*----------------------------------------------------------------------*/
5896 /* Builtins */
5897 /*----------------------------------------------------------------------*/
5898
5899 /* Element X gives the index into cgen_insns[] of the most general
5900 implementation of intrinsic X. Unimplemented intrinsics are
5901 mapped to -1. */
5902 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5903
5904 /* Element X gives the index of another instruction that is mapped to
5905 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5906 instruction.
5907
5908 Things are set up so that mep_intrinsic_chain[X] < X. */
5909 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5910
5911 /* The bitmask for the current ISA. The ISA masks are declared
5912 in mep-intrin.h. */
5913 unsigned int mep_selected_isa;
5914
5915 struct mep_config {
5916 const char *config_name;
5917 unsigned int isa;
5918 };
5919
5920 static struct mep_config mep_configs[] = {
5921 #ifdef COPROC_SELECTION_TABLE
5922 COPROC_SELECTION_TABLE,
5923 #endif
5924 { 0, 0 }
5925 };
5926
5927 /* Initialize the global intrinsics variables above. */
5928
5929 static void
5930 mep_init_intrinsics (void)
5931 {
5932 size_t i;
5933
5934 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5935 mep_selected_isa = mep_configs[0].isa;
5936 if (mep_config_string != 0)
5937 for (i = 0; mep_configs[i].config_name; i++)
5938 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5939 {
5940 mep_selected_isa = mep_configs[i].isa;
5941 break;
5942 }
5943
5944 /* Assume all intrinsics are unavailable. */
5945 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5946 mep_intrinsic_insn[i] = -1;
5947
5948 /* Build up the global intrinsic tables. */
5949 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5950 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5951 {
5952 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5953 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5954 }
5955 /* See whether we can directly move values between one coprocessor
5956 register and another. */
5957 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5958 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5959 mep_have_copro_copro_moves_p = true;
5960
5961 /* See whether we can directly move values between core and
5962 coprocessor registers. */
5963 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5964 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5965
5966 mep_have_core_copro_moves_p = 1;
5967 }
5968
5969 /* Declare all available intrinsic functions. Called once only. */
5970
5971 static tree cp_data_bus_int_type_node;
5972 static tree opaque_vector_type_node;
5973 static tree v8qi_type_node;
5974 static tree v4hi_type_node;
5975 static tree v2si_type_node;
5976 static tree v8uqi_type_node;
5977 static tree v4uhi_type_node;
5978 static tree v2usi_type_node;
5979
5980 static tree
5981 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5982 {
5983 switch (cr)
5984 {
5985 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5986 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5987 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5988 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5989 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5990 case cgen_regnum_operand_type_CHAR: return char_type_node;
5991 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5992 case cgen_regnum_operand_type_SI: return intSI_type_node;
5993 case cgen_regnum_operand_type_DI: return intDI_type_node;
5994 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5995 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5996 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5997 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5998 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5999 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
6000 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
6001 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6002 default:
6003 return void_type_node;
6004 }
6005 }
6006
6007 static void
6008 mep_init_builtins (void)
6009 {
6010 size_t i;
6011
6012 if (TARGET_64BIT_CR_REGS)
6013 cp_data_bus_int_type_node = long_long_integer_type_node;
6014 else
6015 cp_data_bus_int_type_node = long_integer_type_node;
6016
6017 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6018 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6019 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6020 v2si_type_node = build_vector_type (intSI_type_node, 2);
6021 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6022 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6023 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6024
6025 (*lang_hooks.decls.pushdecl)
6026 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6027 cp_data_bus_int_type_node));
6028
6029 (*lang_hooks.decls.pushdecl)
6030 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6031 opaque_vector_type_node));
6032
6033 (*lang_hooks.decls.pushdecl)
6034 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6035 v8qi_type_node));
6036 (*lang_hooks.decls.pushdecl)
6037 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6038 v4hi_type_node));
6039 (*lang_hooks.decls.pushdecl)
6040 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6041 v2si_type_node));
6042
6043 (*lang_hooks.decls.pushdecl)
6044 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6045 v8uqi_type_node));
6046 (*lang_hooks.decls.pushdecl)
6047 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6048 v4uhi_type_node));
6049 (*lang_hooks.decls.pushdecl)
6050 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6051 v2usi_type_node));
6052
6053 /* Intrinsics like mep_cadd3 are implemented with two groups of
6054 instructions, one which uses UNSPECs and one which uses a specific
6055 rtl code such as PLUS. Instructions in the latter group belong
6056 to GROUP_KNOWN_CODE.
6057
6058 In such cases, the intrinsic will have two entries in the global
6059 tables above. The unspec form is accessed using builtin functions
6060 while the specific form is accessed using the mep_* enum in
6061 mep-intrin.h.
6062
6063 The idea is that __cop arithmetic and builtin functions have
6064 different optimization requirements. If mep_cadd3() appears in
6065 the source code, the user will surely except gcc to use cadd3
6066 rather than a work-alike such as add3. However, if the user
6067 just writes "a + b", where a or b are __cop variables, it is
6068 reasonable for gcc to choose a core instruction rather than
6069 cadd3 if it believes that is more optimal. */
6070 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6071 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6072 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6073 {
6074 tree ret_type = void_type_node;
6075 tree bi_type;
6076
6077 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6078 continue;
6079
6080 if (cgen_insns[i].cret_p)
6081 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6082
6083 bi_type = build_function_type (ret_type, 0);
6084 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6085 bi_type,
6086 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6087 }
6088 }
6089
6090 /* Report the unavailablity of the given intrinsic. */
6091
6092 #if 1
6093 static void
6094 mep_intrinsic_unavailable (int intrinsic)
6095 {
6096 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6097
6098 if (already_reported_p[intrinsic])
6099 return;
6100
6101 if (mep_intrinsic_insn[intrinsic] < 0)
6102 error ("coprocessor intrinsic %qs is not available in this configuration",
6103 cgen_intrinsics[intrinsic]);
6104 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6105 error ("%qs is not available in VLIW functions",
6106 cgen_intrinsics[intrinsic]);
6107 else
6108 error ("%qs is not available in non-VLIW functions",
6109 cgen_intrinsics[intrinsic]);
6110
6111 already_reported_p[intrinsic] = 1;
6112 }
6113 #endif
6114
6115
6116 /* See if any implementation of INTRINSIC is available to the
6117 current function. If so, store the most general implementation
6118 in *INSN_PTR and return true. Return false otherwise. */
6119
6120 static bool
6121 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6122 {
6123 int i;
6124
6125 i = mep_intrinsic_insn[intrinsic];
6126 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6127 i = mep_intrinsic_chain[i];
6128
6129 if (i >= 0)
6130 {
6131 *insn_ptr = &cgen_insns[i];
6132 return true;
6133 }
6134 return false;
6135 }
6136
6137
6138 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6139 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6140 try using a work-alike instead. In this case, the returned insn
6141 may have three operands rather than two. */
6142
6143 static bool
6144 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6145 {
6146 size_t i;
6147
6148 if (intrinsic == mep_cmov)
6149 {
6150 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6151 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6152 return true;
6153 return false;
6154 }
6155 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6156 }
6157
6158
6159 /* If ARG is a register operand that is the same size as MODE, convert it
6160 to MODE using a subreg. Otherwise return ARG as-is. */
6161
6162 static rtx
6163 mep_convert_arg (enum machine_mode mode, rtx arg)
6164 {
6165 if (GET_MODE (arg) != mode
6166 && register_operand (arg, VOIDmode)
6167 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6168 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6169 return arg;
6170 }
6171
6172
6173 /* Apply regnum conversions to ARG using the description given by REGNUM.
6174 Return the new argument on success and null on failure. */
6175
6176 static rtx
6177 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6178 {
6179 if (regnum->count == 0)
6180 return arg;
6181
6182 if (GET_CODE (arg) != CONST_INT
6183 || INTVAL (arg) < 0
6184 || INTVAL (arg) >= regnum->count)
6185 return 0;
6186
6187 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6188 }
6189
6190
6191 /* Try to make intrinsic argument ARG match the given operand.
6192 UNSIGNED_P is true if the argument has an unsigned type. */
6193
6194 static rtx
6195 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6196 int unsigned_p)
6197 {
6198 if (GET_CODE (arg) == CONST_INT)
6199 {
6200 /* CONST_INTs can only be bound to integer operands. */
6201 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6202 return 0;
6203 }
6204 else if (GET_CODE (arg) == CONST_DOUBLE)
6205 /* These hold vector constants. */;
6206 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6207 {
6208 /* If the argument is a different size from what's expected, we must
6209 have a value in the right mode class in order to convert it. */
6210 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6211 return 0;
6212
6213 /* If the operand is an rvalue, promote or demote it to match the
6214 operand's size. This might not need extra instructions when
6215 ARG is a register value. */
6216 if (operand->constraint[0] != '=')
6217 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6218 }
6219
6220 /* If the operand is an lvalue, bind the operand to a new register.
6221 The caller will copy this value into ARG after the main
6222 instruction. By doing this always, we produce slightly more
6223 optimal code. */
6224 /* But not for control registers. */
6225 if (operand->constraint[0] == '='
6226 && (! REG_P (arg)
6227 || ! (CONTROL_REGNO_P (REGNO (arg))
6228 || CCR_REGNO_P (REGNO (arg))
6229 || CR_REGNO_P (REGNO (arg)))
6230 ))
6231 return gen_reg_rtx (operand->mode);
6232
6233 /* Try simple mode punning. */
6234 arg = mep_convert_arg (operand->mode, arg);
6235 if (operand->predicate (arg, operand->mode))
6236 return arg;
6237
6238 /* See if forcing the argument into a register will make it match. */
6239 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6240 arg = force_reg (operand->mode, arg);
6241 else
6242 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6243 if (operand->predicate (arg, operand->mode))
6244 return arg;
6245
6246 return 0;
6247 }
6248
6249
6250 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6251 function FNNAME. OPERAND describes the operand to which ARGNUM
6252 is mapped. */
6253
6254 static void
6255 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6256 int argnum, tree fnname)
6257 {
6258 size_t i;
6259
6260 if (GET_CODE (arg) == CONST_INT)
6261 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6262 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6263 {
6264 const struct cgen_immediate_predicate *predicate;
6265 HOST_WIDE_INT argval;
6266
6267 predicate = &cgen_immediate_predicates[i];
6268 argval = INTVAL (arg);
6269 if (argval < predicate->lower || argval >= predicate->upper)
6270 error ("argument %d of %qE must be in the range %d...%d",
6271 argnum, fnname, predicate->lower, predicate->upper - 1);
6272 else
6273 error ("argument %d of %qE must be a multiple of %d",
6274 argnum, fnname, predicate->align);
6275 return;
6276 }
6277
6278 error ("incompatible type for argument %d of %qE", argnum, fnname);
6279 }
6280
6281 static rtx
6282 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6283 rtx subtarget ATTRIBUTE_UNUSED,
6284 enum machine_mode mode ATTRIBUTE_UNUSED,
6285 int ignore ATTRIBUTE_UNUSED)
6286 {
6287 rtx pat, op[10], arg[10];
6288 unsigned int a;
6289 int opindex, unsigned_p[10];
6290 tree fndecl, args;
6291 unsigned int n_args;
6292 tree fnname;
6293 const struct cgen_insn *cgen_insn;
6294 const struct insn_data_d *idata;
6295 unsigned int first_arg = 0;
6296 unsigned int builtin_n_args;
6297
6298 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6299 fnname = DECL_NAME (fndecl);
6300
6301 /* Find out which instruction we should emit. Note that some coprocessor
6302 intrinsics may only be available in VLIW mode, or only in normal mode. */
6303 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6304 {
6305 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6306 return NULL_RTX;
6307 }
6308 idata = &insn_data[cgen_insn->icode];
6309
6310 builtin_n_args = cgen_insn->num_args;
6311
6312 if (cgen_insn->cret_p)
6313 {
6314 if (cgen_insn->cret_p > 1)
6315 builtin_n_args ++;
6316 first_arg = 1;
6317 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6318 builtin_n_args --;
6319 }
6320
6321 /* Evaluate each argument. */
6322 n_args = call_expr_nargs (exp);
6323
6324 if (n_args < builtin_n_args)
6325 {
6326 error ("too few arguments to %qE", fnname);
6327 return NULL_RTX;
6328 }
6329 if (n_args > builtin_n_args)
6330 {
6331 error ("too many arguments to %qE", fnname);
6332 return NULL_RTX;
6333 }
6334
6335 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6336 {
6337 tree value;
6338
6339 args = CALL_EXPR_ARG (exp, a - first_arg);
6340
6341 value = args;
6342
6343 #if 0
6344 if (cgen_insn->regnums[a].reference_p)
6345 {
6346 if (TREE_CODE (value) != ADDR_EXPR)
6347 {
6348 debug_tree(value);
6349 error ("argument %d of %qE must be an address", a+1, fnname);
6350 return NULL_RTX;
6351 }
6352 value = TREE_OPERAND (value, 0);
6353 }
6354 #endif
6355
6356 /* If the argument has been promoted to int, get the unpromoted
6357 value. This is necessary when sub-int memory values are bound
6358 to reference parameters. */
6359 if (TREE_CODE (value) == NOP_EXPR
6360 && TREE_TYPE (value) == integer_type_node
6361 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6362 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6363 < TYPE_PRECISION (TREE_TYPE (value))))
6364 value = TREE_OPERAND (value, 0);
6365
6366 /* If the argument has been promoted to double, get the unpromoted
6367 SFmode value. This is necessary for FMAX support, for example. */
6368 if (TREE_CODE (value) == NOP_EXPR
6369 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6370 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6371 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6372 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6373 value = TREE_OPERAND (value, 0);
6374
6375 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6376 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6377 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6378 if (cgen_insn->regnums[a].reference_p)
6379 {
6380 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6381 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6382
6383 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6384 }
6385 if (arg[a] == 0)
6386 {
6387 error ("argument %d of %qE must be in the range %d...%d",
6388 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6389 return NULL_RTX;
6390 }
6391 }
6392
6393 for (a = 0; a < first_arg; a++)
6394 {
6395 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6396 arg[a] = target;
6397 else
6398 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6399 }
6400
6401 /* Convert the arguments into a form suitable for the intrinsic.
6402 Report an error if this isn't possible. */
6403 for (opindex = 0; opindex < idata->n_operands; opindex++)
6404 {
6405 a = cgen_insn->op_mapping[opindex];
6406 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6407 arg[a], unsigned_p[a]);
6408 if (op[opindex] == 0)
6409 {
6410 mep_incompatible_arg (&idata->operand[opindex],
6411 arg[a], a + 1 - first_arg, fnname);
6412 return NULL_RTX;
6413 }
6414 }
6415
6416 /* Emit the instruction. */
6417 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6418 op[5], op[6], op[7], op[8], op[9]);
6419
6420 if (GET_CODE (pat) == SET
6421 && GET_CODE (SET_DEST (pat)) == PC
6422 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6423 emit_jump_insn (pat);
6424 else
6425 emit_insn (pat);
6426
6427 /* Copy lvalues back to their final locations. */
6428 for (opindex = 0; opindex < idata->n_operands; opindex++)
6429 if (idata->operand[opindex].constraint[0] == '=')
6430 {
6431 a = cgen_insn->op_mapping[opindex];
6432 if (a >= first_arg)
6433 {
6434 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6435 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6436 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6437 op[opindex]));
6438 else
6439 {
6440 /* First convert the operand to the right mode, then copy it
6441 into the destination. Doing the conversion as a separate
6442 step (rather than using convert_move) means that we can
6443 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6444 refer to the same register. */
6445 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6446 op[opindex], unsigned_p[a]);
6447 if (!rtx_equal_p (arg[a], op[opindex]))
6448 emit_move_insn (arg[a], op[opindex]);
6449 }
6450 }
6451 }
6452
6453 if (first_arg > 0 && target && target != op[0])
6454 {
6455 emit_move_insn (target, op[0]);
6456 }
6457
6458 return target;
6459 }
6460
6461 static bool
6462 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6463 {
6464 return false;
6465 }
6466 \f
6467 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6468 a global register. */
6469
6470 static int
6471 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6472 {
6473 int regno;
6474 rtx x = *loc;
6475
6476 if (! x)
6477 return 0;
6478
6479 switch (GET_CODE (x))
6480 {
6481 case SUBREG:
6482 if (REG_P (SUBREG_REG (x)))
6483 {
6484 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6485 && global_regs[subreg_regno (x)])
6486 return 1;
6487 return 0;
6488 }
6489 break;
6490
6491 case REG:
6492 regno = REGNO (x);
6493 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6494 return 1;
6495 return 0;
6496
6497 case SCRATCH:
6498 case PC:
6499 case CC0:
6500 case CONST_INT:
6501 case CONST_DOUBLE:
6502 case CONST:
6503 case LABEL_REF:
6504 return 0;
6505
6506 case CALL:
6507 /* A non-constant call might use a global register. */
6508 return 1;
6509
6510 default:
6511 break;
6512 }
6513
6514 return 0;
6515 }
6516
6517 /* Returns nonzero if X mentions a global register. */
6518
6519 static int
6520 global_reg_mentioned_p (rtx x)
6521 {
6522 if (INSN_P (x))
6523 {
6524 if (CALL_P (x))
6525 {
6526 if (! RTL_CONST_OR_PURE_CALL_P (x))
6527 return 1;
6528 x = CALL_INSN_FUNCTION_USAGE (x);
6529 if (x == 0)
6530 return 0;
6531 }
6532 else
6533 x = PATTERN (x);
6534 }
6535
6536 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6537 }
6538 /* Scheduling hooks for VLIW mode.
6539
6540 Conceptually this is very simple: we have a two-pack architecture
6541 that takes one core insn and one coprocessor insn to make up either
6542 a 32- or 64-bit instruction word (depending on the option bit set in
6543 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6544 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6545 and one 48-bit cop insn or two 32-bit core/cop insns.
6546
6547 In practice, instruction selection will be a bear. Consider in
6548 VL64 mode the following insns
6549
6550 add $1, 1
6551 cmov $cr0, $0
6552
6553 these cannot pack, since the add is a 16-bit core insn and cmov
6554 is a 32-bit cop insn. However,
6555
6556 add3 $1, $1, 1
6557 cmov $cr0, $0
6558
6559 packs just fine. For good VLIW code generation in VL64 mode, we
6560 will have to have 32-bit alternatives for many of the common core
6561 insns. Not implemented. */
6562
6563 static int
6564 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6565 {
6566 int cost_specified;
6567
6568 if (REG_NOTE_KIND (link) != 0)
6569 {
6570 /* See whether INSN and DEP_INSN are intrinsics that set the same
6571 hard register. If so, it is more important to free up DEP_INSN
6572 than it is to free up INSN.
6573
6574 Note that intrinsics like mep_mulr are handled differently from
6575 the equivalent mep.md patterns. In mep.md, if we don't care
6576 about the value of $lo and $hi, the pattern will just clobber
6577 the registers, not set them. Since clobbers don't count as
6578 output dependencies, it is often possible to reorder two mulrs,
6579 even after reload.
6580
6581 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6582 so any pair of mep_mulr()s will be inter-dependent. We should
6583 therefore give the first mep_mulr() a higher priority. */
6584 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6585 && global_reg_mentioned_p (PATTERN (insn))
6586 && global_reg_mentioned_p (PATTERN (dep_insn)))
6587 return 1;
6588
6589 /* If the dependence is an anti or output dependence, assume it
6590 has no cost. */
6591 return 0;
6592 }
6593
6594 /* If we can't recognize the insns, we can't really do anything. */
6595 if (recog_memoized (dep_insn) < 0)
6596 return cost;
6597
6598 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6599 attribute instead. */
6600 if (!TARGET_H1)
6601 {
6602 cost_specified = get_attr_latency (dep_insn);
6603 if (cost_specified != 0)
6604 return cost_specified;
6605 }
6606
6607 return cost;
6608 }
6609
6610 /* ??? We don't properly compute the length of a load/store insn,
6611 taking into account the addressing mode. */
6612
6613 static int
6614 mep_issue_rate (void)
6615 {
6616 return TARGET_IVC2 ? 3 : 2;
6617 }
6618
6619 /* Return true if function DECL was declared with the vliw attribute. */
6620
6621 bool
6622 mep_vliw_function_p (tree decl)
6623 {
6624 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6625 }
6626
6627 static rtx
6628 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6629 {
6630 int i;
6631
6632 for (i = nready - 1; i >= 0; --i)
6633 {
6634 rtx insn = ready[i];
6635 if (recog_memoized (insn) >= 0
6636 && get_attr_slot (insn) == slot
6637 && get_attr_length (insn) == length)
6638 return insn;
6639 }
6640
6641 return NULL_RTX;
6642 }
6643
6644 static void
6645 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6646 {
6647 int i;
6648
6649 for (i = 0; i < nready; ++i)
6650 if (ready[i] == insn)
6651 {
6652 for (; i < nready - 1; ++i)
6653 ready[i] = ready[i + 1];
6654 ready[i] = insn;
6655 return;
6656 }
6657
6658 gcc_unreachable ();
6659 }
6660
6661 static void
6662 mep_print_sched_insn (FILE *dump, rtx insn)
6663 {
6664 const char *slots = "none";
6665 const char *name = NULL;
6666 int code;
6667 char buf[30];
6668
6669 if (GET_CODE (PATTERN (insn)) == SET
6670 || GET_CODE (PATTERN (insn)) == PARALLEL)
6671 {
6672 switch (get_attr_slots (insn))
6673 {
6674 case SLOTS_CORE: slots = "core"; break;
6675 case SLOTS_C3: slots = "c3"; break;
6676 case SLOTS_P0: slots = "p0"; break;
6677 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6678 case SLOTS_P0_P1: slots = "p0,p1"; break;
6679 case SLOTS_P0S: slots = "p0s"; break;
6680 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6681 case SLOTS_P1: slots = "p1"; break;
6682 default:
6683 sprintf(buf, "%d", get_attr_slots (insn));
6684 slots = buf;
6685 break;
6686 }
6687 }
6688 if (GET_CODE (PATTERN (insn)) == USE)
6689 slots = "use";
6690
6691 code = INSN_CODE (insn);
6692 if (code >= 0)
6693 name = get_insn_name (code);
6694 if (!name)
6695 name = "{unknown}";
6696
6697 fprintf (dump,
6698 "insn %4d %4d %8s %s\n",
6699 code,
6700 INSN_UID (insn),
6701 name,
6702 slots);
6703 }
6704
6705 static int
6706 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6707 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6708 int *pnready, int clock ATTRIBUTE_UNUSED)
6709 {
6710 int nready = *pnready;
6711 rtx core_insn, cop_insn;
6712 int i;
6713
6714 if (dump && sched_verbose > 1)
6715 {
6716 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6717 for (i=0; i<nready; i++)
6718 mep_print_sched_insn (dump, ready[i]);
6719 fprintf (dump, "\n");
6720 }
6721
6722 if (!mep_vliw_function_p (cfun->decl))
6723 return 1;
6724 if (nready < 2)
6725 return 1;
6726
6727 /* IVC2 uses a DFA to determine what's ready and what's not. */
6728 if (TARGET_IVC2)
6729 return nready;
6730
6731 /* We can issue either a core or coprocessor instruction.
6732 Look for a matched pair of insns to reorder. If we don't
6733 find any, don't second-guess the scheduler's priorities. */
6734
6735 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6736 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6737 TARGET_OPT_VL64 ? 6 : 2)))
6738 ;
6739 else if (TARGET_OPT_VL64
6740 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6741 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6742 ;
6743 else
6744 /* We didn't find a pair. Issue the single insn at the head
6745 of the ready list. */
6746 return 1;
6747
6748 /* Reorder the two insns first. */
6749 mep_move_ready_insn (ready, nready, core_insn);
6750 mep_move_ready_insn (ready, nready - 1, cop_insn);
6751 return 2;
6752 }
6753
6754 /* A for_each_rtx callback. Return true if *X is a register that is
6755 set by insn PREV. */
6756
6757 static int
6758 mep_store_find_set (rtx *x, void *prev)
6759 {
6760 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6761 }
6762
6763 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6764 not the containing insn. */
6765
6766 static bool
6767 mep_store_data_bypass_1 (rtx prev, rtx pat)
6768 {
6769 /* Cope with intrinsics like swcpa. */
6770 if (GET_CODE (pat) == PARALLEL)
6771 {
6772 int i;
6773
6774 for (i = 0; i < XVECLEN (pat, 0); i++)
6775 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6776 return true;
6777
6778 return false;
6779 }
6780
6781 /* Check for some sort of store. */
6782 if (GET_CODE (pat) != SET
6783 || GET_CODE (SET_DEST (pat)) != MEM)
6784 return false;
6785
6786 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6787 The first operand to the unspec is the store data and the other operands
6788 are used to calculate the address. */
6789 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6790 {
6791 rtx src;
6792 int i;
6793
6794 src = SET_SRC (pat);
6795 for (i = 1; i < XVECLEN (src, 0); i++)
6796 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6797 return false;
6798
6799 return true;
6800 }
6801
6802 /* Otherwise just check that PREV doesn't modify any register mentioned
6803 in the memory destination. */
6804 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6805 }
6806
6807 /* Return true if INSN is a store instruction and if the store address
6808 has no true dependence on PREV. */
6809
6810 bool
6811 mep_store_data_bypass_p (rtx prev, rtx insn)
6812 {
6813 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6814 }
6815
6816 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6817 is a register other than LO or HI and if PREV sets *X. */
6818
6819 static int
6820 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6821 {
6822 return (REG_P (*x)
6823 && REGNO (*x) != LO_REGNO
6824 && REGNO (*x) != HI_REGNO
6825 && reg_set_p (*x, (const_rtx) prev));
6826 }
6827
6828 /* Return true if, apart from HI/LO, there are no true dependencies
6829 between multiplication instructions PREV and INSN. */
6830
6831 bool
6832 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6833 {
6834 rtx pat;
6835
6836 pat = PATTERN (insn);
6837 if (GET_CODE (pat) == PARALLEL)
6838 pat = XVECEXP (pat, 0, 0);
6839 return (GET_CODE (pat) == SET
6840 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6841 }
6842
6843 /* Return true if INSN is an ldc instruction that issues to the
6844 MeP-h1 integer pipeline. This is true for instructions that
6845 read from PSW, LP, SAR, HI and LO. */
6846
6847 bool
6848 mep_ipipe_ldc_p (rtx insn)
6849 {
6850 rtx pat, src;
6851
6852 pat = PATTERN (insn);
6853
6854 /* Cope with instrinsics that set both a hard register and its shadow.
6855 The set of the hard register comes first. */
6856 if (GET_CODE (pat) == PARALLEL)
6857 pat = XVECEXP (pat, 0, 0);
6858
6859 if (GET_CODE (pat) == SET)
6860 {
6861 src = SET_SRC (pat);
6862
6863 /* Cope with intrinsics. The first operand to the unspec is
6864 the source register. */
6865 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6866 src = XVECEXP (src, 0, 0);
6867
6868 if (REG_P (src))
6869 switch (REGNO (src))
6870 {
6871 case PSW_REGNO:
6872 case LP_REGNO:
6873 case SAR_REGNO:
6874 case HI_REGNO:
6875 case LO_REGNO:
6876 return true;
6877 }
6878 }
6879 return false;
6880 }
6881
6882 /* Create a VLIW bundle from core instruction CORE and coprocessor
6883 instruction COP. COP always satisfies INSN_P, but CORE can be
6884 either a new pattern or an existing instruction.
6885
6886 Emit the bundle in place of COP and return it. */
6887
6888 static rtx
6889 mep_make_bundle (rtx core, rtx cop)
6890 {
6891 rtx insn;
6892
6893 /* If CORE is an existing instruction, remove it, otherwise put
6894 the new pattern in an INSN harness. */
6895 if (INSN_P (core))
6896 remove_insn (core);
6897 else
6898 core = make_insn_raw (core);
6899
6900 /* Generate the bundle sequence and replace COP with it. */
6901 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6902 insn = emit_insn_after (insn, cop);
6903 remove_insn (cop);
6904
6905 /* Set up the links of the insns inside the SEQUENCE. */
6906 PREV_INSN (core) = PREV_INSN (insn);
6907 NEXT_INSN (core) = cop;
6908 PREV_INSN (cop) = core;
6909 NEXT_INSN (cop) = NEXT_INSN (insn);
6910
6911 /* Set the VLIW flag for the coprocessor instruction. */
6912 PUT_MODE (core, VOIDmode);
6913 PUT_MODE (cop, BImode);
6914
6915 /* Derive a location for the bundle. Individual instructions cannot
6916 have their own location because there can be no assembler labels
6917 between CORE and COP. */
6918 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6919 INSN_LOCATOR (core) = 0;
6920 INSN_LOCATOR (cop) = 0;
6921
6922 return insn;
6923 }
6924
6925 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6926
6927 static void
6928 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6929 {
6930 rtx * pinsn = (rtx *) data;
6931
6932 if (*pinsn && reg_mentioned_p (x, *pinsn))
6933 *pinsn = NULL_RTX;
6934 }
6935
6936 /* Return true if anything in insn X is (anti,output,true) dependent on
6937 anything in insn Y. */
6938
6939 static int
6940 mep_insn_dependent_p (rtx x, rtx y)
6941 {
6942 rtx tmp;
6943
6944 gcc_assert (INSN_P (x));
6945 gcc_assert (INSN_P (y));
6946
6947 tmp = PATTERN (y);
6948 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6949 if (tmp == NULL_RTX)
6950 return 1;
6951
6952 tmp = PATTERN (x);
6953 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6954 if (tmp == NULL_RTX)
6955 return 1;
6956
6957 return 0;
6958 }
6959
6960 static int
6961 core_insn_p (rtx insn)
6962 {
6963 if (GET_CODE (PATTERN (insn)) == USE)
6964 return 0;
6965 if (get_attr_slot (insn) == SLOT_CORE)
6966 return 1;
6967 return 0;
6968 }
6969
6970 /* Mark coprocessor instructions that can be bundled together with
6971 the immediately preceeding core instruction. This is later used
6972 to emit the "+" that tells the assembler to create a VLIW insn.
6973
6974 For unbundled insns, the assembler will automatically add coprocessor
6975 nops, and 16-bit core nops. Due to an apparent oversight in the
6976 spec, the assembler will _not_ automatically add 32-bit core nops,
6977 so we have to emit those here.
6978
6979 Called from mep_insn_reorg. */
6980
6981 static void
6982 mep_bundle_insns (rtx insns)
6983 {
6984 rtx insn, last = NULL_RTX, first = NULL_RTX;
6985 int saw_scheduling = 0;
6986
6987 /* Only do bundling if we're in vliw mode. */
6988 if (!mep_vliw_function_p (cfun->decl))
6989 return;
6990
6991 /* The first insn in a bundle are TImode, the remainder are
6992 VOIDmode. After this function, the first has VOIDmode and the
6993 rest have BImode. */
6994
6995 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6996
6997 /* First, move any NOTEs that are within a bundle, to the beginning
6998 of the bundle. */
6999 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7000 {
7001 if (NOTE_P (insn) && first)
7002 /* Don't clear FIRST. */;
7003
7004 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7005 first = insn;
7006
7007 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7008 {
7009 rtx note, prev;
7010
7011 /* INSN is part of a bundle; FIRST is the first insn in that
7012 bundle. Move all intervening notes out of the bundle.
7013 In addition, since the debug pass may insert a label
7014 whenever the current line changes, set the location info
7015 for INSN to match FIRST. */
7016
7017 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7018
7019 note = PREV_INSN (insn);
7020 while (note && note != first)
7021 {
7022 prev = PREV_INSN (note);
7023
7024 if (NOTE_P (note))
7025 {
7026 /* Remove NOTE from here... */
7027 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7028 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7029 /* ...and put it in here. */
7030 NEXT_INSN (note) = first;
7031 PREV_INSN (note) = PREV_INSN (first);
7032 NEXT_INSN (PREV_INSN (note)) = note;
7033 PREV_INSN (NEXT_INSN (note)) = note;
7034 }
7035
7036 note = prev;
7037 }
7038 }
7039
7040 else if (!NONJUMP_INSN_P (insn))
7041 first = 0;
7042 }
7043
7044 /* Now fix up the bundles. */
7045 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7046 {
7047 if (NOTE_P (insn))
7048 continue;
7049
7050 if (!NONJUMP_INSN_P (insn))
7051 {
7052 last = 0;
7053 continue;
7054 }
7055
7056 /* If we're not optimizing enough, there won't be scheduling
7057 info. We detect that here. */
7058 if (GET_MODE (insn) == TImode)
7059 saw_scheduling = 1;
7060 if (!saw_scheduling)
7061 continue;
7062
7063 if (TARGET_IVC2)
7064 {
7065 rtx core_insn = NULL_RTX;
7066
7067 /* IVC2 slots are scheduled by DFA, so we just accept
7068 whatever the scheduler gives us. However, we must make
7069 sure the core insn (if any) is the first in the bundle.
7070 The IVC2 assembler can insert whatever NOPs are needed,
7071 and allows a COP insn to be first. */
7072
7073 if (NONJUMP_INSN_P (insn)
7074 && GET_CODE (PATTERN (insn)) != USE
7075 && GET_MODE (insn) == TImode)
7076 {
7077 for (last = insn;
7078 NEXT_INSN (last)
7079 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7080 && NONJUMP_INSN_P (NEXT_INSN (last));
7081 last = NEXT_INSN (last))
7082 {
7083 if (core_insn_p (last))
7084 core_insn = last;
7085 }
7086 if (core_insn_p (last))
7087 core_insn = last;
7088
7089 if (core_insn && core_insn != insn)
7090 {
7091 /* Swap core insn to first in the bundle. */
7092
7093 /* Remove core insn. */
7094 if (PREV_INSN (core_insn))
7095 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7096 if (NEXT_INSN (core_insn))
7097 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7098
7099 /* Re-insert core insn. */
7100 PREV_INSN (core_insn) = PREV_INSN (insn);
7101 NEXT_INSN (core_insn) = insn;
7102
7103 if (PREV_INSN (core_insn))
7104 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7105 PREV_INSN (insn) = core_insn;
7106
7107 PUT_MODE (core_insn, TImode);
7108 PUT_MODE (insn, VOIDmode);
7109 }
7110 }
7111
7112 /* The first insn has TImode, the rest have VOIDmode */
7113 if (GET_MODE (insn) == TImode)
7114 PUT_MODE (insn, VOIDmode);
7115 else
7116 PUT_MODE (insn, BImode);
7117 continue;
7118 }
7119
7120 PUT_MODE (insn, VOIDmode);
7121 if (recog_memoized (insn) >= 0
7122 && get_attr_slot (insn) == SLOT_COP)
7123 {
7124 if (GET_CODE (insn) == JUMP_INSN
7125 || ! last
7126 || recog_memoized (last) < 0
7127 || get_attr_slot (last) != SLOT_CORE
7128 || (get_attr_length (insn)
7129 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7130 || mep_insn_dependent_p (insn, last))
7131 {
7132 switch (get_attr_length (insn))
7133 {
7134 case 8:
7135 break;
7136 case 6:
7137 insn = mep_make_bundle (gen_nop (), insn);
7138 break;
7139 case 4:
7140 if (TARGET_OPT_VL64)
7141 insn = mep_make_bundle (gen_nop32 (), insn);
7142 break;
7143 case 2:
7144 if (TARGET_OPT_VL64)
7145 error ("2 byte cop instructions are"
7146 " not allowed in 64-bit VLIW mode");
7147 else
7148 insn = mep_make_bundle (gen_nop (), insn);
7149 break;
7150 default:
7151 error ("unexpected %d byte cop instruction",
7152 get_attr_length (insn));
7153 break;
7154 }
7155 }
7156 else
7157 insn = mep_make_bundle (last, insn);
7158 }
7159
7160 last = insn;
7161 }
7162 }
7163
7164
7165 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7166 Return true on success. This function can fail if the intrinsic
7167 is unavailable or if the operands don't satisfy their predicates. */
7168
7169 bool
7170 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7171 {
7172 const struct cgen_insn *cgen_insn;
7173 const struct insn_data_d *idata;
7174 rtx newop[10];
7175 int i;
7176
7177 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7178 return false;
7179
7180 idata = &insn_data[cgen_insn->icode];
7181 for (i = 0; i < idata->n_operands; i++)
7182 {
7183 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7184 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7185 return false;
7186 }
7187
7188 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7189 newop[3], newop[4], newop[5],
7190 newop[6], newop[7], newop[8]));
7191
7192 return true;
7193 }
7194
7195
7196 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7197 OPERANDS[0]. Report an error if the instruction could not
7198 be synthesized. OPERANDS[1] is a register_operand. For sign
7199 and zero extensions, it may be smaller than SImode. */
7200
7201 bool
7202 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7203 rtx * operands ATTRIBUTE_UNUSED)
7204 {
7205 return false;
7206 }
7207
7208
7209 /* Likewise, but apply a binary operation to OPERANDS[1] and
7210 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7211 can be a general_operand.
7212
7213 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7214 third operand. REG and REG3 take register operands only. */
7215
7216 bool
7217 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7218 int ATTRIBUTE_UNUSED immediate3,
7219 int ATTRIBUTE_UNUSED reg,
7220 int ATTRIBUTE_UNUSED reg3,
7221 rtx * operands ATTRIBUTE_UNUSED)
7222 {
7223 return false;
7224 }
7225
7226 static bool
7227 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7228 {
7229 switch (code)
7230 {
7231 case CONST_INT:
7232 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7233 *total = 0;
7234 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7235 *total = 1;
7236 else
7237 *total = 3;
7238 return true;
7239
7240 case SYMBOL_REF:
7241 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7242 return true;
7243
7244 case MULT:
7245 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7246 ? COSTS_N_INSNS (3)
7247 : COSTS_N_INSNS (2));
7248 return true;
7249 }
7250 return false;
7251 }
7252
7253 static int
7254 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7255 {
7256 return 1;
7257 }
7258
7259 static bool
7260 mep_handle_option (size_t code,
7261 const char *arg ATTRIBUTE_UNUSED,
7262 int value ATTRIBUTE_UNUSED)
7263 {
7264 int i;
7265
7266 switch (code)
7267 {
7268 case OPT_mall_opts:
7269 target_flags |= MEP_ALL_OPTS;
7270 break;
7271
7272 case OPT_mno_opts:
7273 target_flags &= ~ MEP_ALL_OPTS;
7274 break;
7275
7276 case OPT_mcop64:
7277 target_flags |= MASK_COP;
7278 target_flags |= MASK_64BIT_CR_REGS;
7279 break;
7280
7281 case OPT_mtiny_:
7282 option_mtiny_specified = 1;
7283
7284 case OPT_mivc2:
7285 target_flags |= MASK_COP;
7286 target_flags |= MASK_64BIT_CR_REGS;
7287 target_flags |= MASK_VLIW;
7288 target_flags |= MASK_OPT_VL64;
7289 target_flags |= MASK_IVC2;
7290
7291 for (i=0; i<32; i++)
7292 fixed_regs[i+48] = 0;
7293 for (i=0; i<32; i++)
7294 call_used_regs[i+48] = 1;
7295 for (i=6; i<8; i++)
7296 call_used_regs[i+48] = 0;
7297
7298 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7299 RN (0, "$csar0");
7300 RN (1, "$cc");
7301 RN (4, "$cofr0");
7302 RN (5, "$cofr1");
7303 RN (6, "$cofa0");
7304 RN (7, "$cofa1");
7305 RN (15, "$csar1");
7306
7307 RN (16, "$acc0_0");
7308 RN (17, "$acc0_1");
7309 RN (18, "$acc0_2");
7310 RN (19, "$acc0_3");
7311 RN (20, "$acc0_4");
7312 RN (21, "$acc0_5");
7313 RN (22, "$acc0_6");
7314 RN (23, "$acc0_7");
7315
7316 RN (24, "$acc1_0");
7317 RN (25, "$acc1_1");
7318 RN (26, "$acc1_2");
7319 RN (27, "$acc1_3");
7320 RN (28, "$acc1_4");
7321 RN (29, "$acc1_5");
7322 RN (30, "$acc1_6");
7323 RN (31, "$acc1_7");
7324 #undef RN
7325
7326 break;
7327
7328 default:
7329 break;
7330 }
7331 return TRUE;
7332 }
7333
7334 static void
7335 mep_asm_init_sections (void)
7336 {
7337 based_section
7338 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7339 "\t.section .based,\"aw\"");
7340
7341 tinybss_section
7342 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7343 "\t.section .sbss,\"aw\"");
7344
7345 sdata_section
7346 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7347 "\t.section .sdata,\"aw\",@progbits");
7348
7349 far_section
7350 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7351 "\t.section .far,\"aw\"");
7352
7353 farbss_section
7354 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7355 "\t.section .farbss,\"aw\"");
7356
7357 frodata_section
7358 = get_unnamed_section (0, output_section_asm_op,
7359 "\t.section .frodata,\"a\"");
7360
7361 srodata_section
7362 = get_unnamed_section (0, output_section_asm_op,
7363 "\t.section .srodata,\"a\"");
7364
7365 vtext_section
7366 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7367 "\t.section .vtext,\"axv\"\n\t.vliw");
7368
7369 vftext_section
7370 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7371 "\t.section .vftext,\"axv\"\n\t.vliw");
7372
7373 ftext_section
7374 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7375 "\t.section .ftext,\"ax\"\n\t.core");
7376
7377 }
7378 \f
7379 /* Initialize the GCC target structure. */
7380
7381 #undef TARGET_ASM_FUNCTION_PROLOGUE
7382 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7383 #undef TARGET_ATTRIBUTE_TABLE
7384 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7385 #undef TARGET_COMP_TYPE_ATTRIBUTES
7386 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7387 #undef TARGET_INSERT_ATTRIBUTES
7388 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7389 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7390 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7391 #undef TARGET_CAN_INLINE_P
7392 #define TARGET_CAN_INLINE_P mep_can_inline_p
7393 #undef TARGET_SECTION_TYPE_FLAGS
7394 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7395 #undef TARGET_ASM_NAMED_SECTION
7396 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7397 #undef TARGET_INIT_BUILTINS
7398 #define TARGET_INIT_BUILTINS mep_init_builtins
7399 #undef TARGET_EXPAND_BUILTIN
7400 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7401 #undef TARGET_SCHED_ADJUST_COST
7402 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7403 #undef TARGET_SCHED_ISSUE_RATE
7404 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7405 #undef TARGET_SCHED_REORDER
7406 #define TARGET_SCHED_REORDER mep_sched_reorder
7407 #undef TARGET_STRIP_NAME_ENCODING
7408 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7409 #undef TARGET_ASM_SELECT_SECTION
7410 #define TARGET_ASM_SELECT_SECTION mep_select_section
7411 #undef TARGET_ASM_UNIQUE_SECTION
7412 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7413 #undef TARGET_ENCODE_SECTION_INFO
7414 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7415 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7416 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7417 #undef TARGET_RTX_COSTS
7418 #define TARGET_RTX_COSTS mep_rtx_cost
7419 #undef TARGET_ADDRESS_COST
7420 #define TARGET_ADDRESS_COST mep_address_cost
7421 #undef TARGET_MACHINE_DEPENDENT_REORG
7422 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7423 #undef TARGET_SETUP_INCOMING_VARARGS
7424 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7425 #undef TARGET_PASS_BY_REFERENCE
7426 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7427 #undef TARGET_FUNCTION_ARG
7428 #define TARGET_FUNCTION_ARG mep_function_arg
7429 #undef TARGET_FUNCTION_ARG_ADVANCE
7430 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7431 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7432 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7433 #undef TARGET_HANDLE_OPTION
7434 #define TARGET_HANDLE_OPTION mep_handle_option
7435 #undef TARGET_OPTION_OVERRIDE
7436 #define TARGET_OPTION_OVERRIDE mep_option_override
7437 #undef TARGET_OPTION_OPTIMIZATION_TABLE
7438 #define TARGET_OPTION_OPTIMIZATION_TABLE mep_option_optimization_table
7439 #undef TARGET_DEFAULT_TARGET_FLAGS
7440 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
7441 #undef TARGET_ALLOCATE_INITIAL_VALUE
7442 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7443 #undef TARGET_ASM_INIT_SECTIONS
7444 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7445 #undef TARGET_RETURN_IN_MEMORY
7446 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7447 #undef TARGET_NARROW_VOLATILE_BITFIELD
7448 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7449 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7450 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7451 #undef TARGET_BUILD_BUILTIN_VA_LIST
7452 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7453 #undef TARGET_EXPAND_BUILTIN_VA_START
7454 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7455 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7456 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7457 #undef TARGET_CAN_ELIMINATE
7458 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7459 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7460 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7461 #undef TARGET_TRAMPOLINE_INIT
7462 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7463
7464 struct gcc_target targetm = TARGET_INITIALIZER;
7465
7466 #include "gt-mep.h"