]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/mep/mep.c
target.def (handle_option): Take gcc_options and cl_decoded_option pointers and locat...
[thirdparty/gcc.git] / gcc / config / mep / mep.c
CommitLineData
7acf4da6 1/* Definitions for Toshiba Media Processor
96e45421
JM
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011
7acf4da6
DD
4 Free Software Foundation, Inc.
5 Contributed by Red Hat, Inc.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "tree.h"
29#include "regs.h"
30#include "hard-reg-set.h"
7acf4da6
DD
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "output.h"
35#include "insn-attr.h"
36#include "flags.h"
37#include "recog.h"
38#include "obstack.h"
39#include "tree.h"
40#include "expr.h"
41#include "except.h"
42#include "function.h"
43#include "optabs.h"
44#include "reload.h"
45#include "tm_p.h"
46#include "ggc.h"
718f9c0f 47#include "diagnostic-core.h"
7acf4da6
DD
48#include "integrate.h"
49#include "target.h"
50#include "target-def.h"
51#include "langhooks.h"
52#include "df.h"
12a54f54 53#include "gimple.h"
96e45421 54#include "opts.h"
7acf4da6
DD
55
56/* Structure of this file:
57
58 + Command Line Option Support
59 + Pattern support - constraints, predicates, expanders
60 + Reload Support
61 + Costs
62 + Functions to save and restore machine-specific function data.
63 + Frame/Epilog/Prolog Related
64 + Operand Printing
65 + Function args in registers
66 + Handle pipeline hazards
67 + Handle attributes
68 + Trampolines
69 + Machine-dependent Reorg
70 + Builtins. */
71
72/* Symbol encodings:
73
74 Symbols are encoded as @ <char> . <name> where <char> is one of these:
75
76 b - based
77 t - tiny
78 n - near
79 f - far
80 i - io, near
81 I - io, far
82 c - cb (control bus) */
83
84struct GTY(()) machine_function
85{
86 int mep_frame_pointer_needed;
87
88 /* For varargs. */
89 int arg_regs_to_save;
90 int regsave_filler;
91 int frame_filler;
e756464b 92 int frame_locked;
7acf4da6
DD
93
94 /* Records __builtin_return address. */
95 rtx eh_stack_adjust;
96
97 int reg_save_size;
98 int reg_save_slot[FIRST_PSEUDO_REGISTER];
99 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
100
101 /* 2 if the current function has an interrupt attribute, 1 if not, 0
102 if unknown. This is here because resource.c uses EPILOGUE_USES
103 which needs it. */
104 int interrupt_handler;
105
106 /* Likewise, for disinterrupt attribute. */
107 int disable_interrupts;
108
109 /* Number of doloop tags used so far. */
110 int doloop_tags;
111
112 /* True if the last tag was allocated to a doloop_end. */
113 bool doloop_tag_from_end;
114
115 /* True if reload changes $TP. */
116 bool reload_changes_tp;
117
118 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
119 We only set this if the function is an interrupt handler. */
120 int asms_without_operands;
121};
122
123#define MEP_CONTROL_REG(x) \
124 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
125
7acf4da6
DD
126static GTY(()) section * based_section;
127static GTY(()) section * tinybss_section;
128static GTY(()) section * far_section;
129static GTY(()) section * farbss_section;
130static GTY(()) section * frodata_section;
131static GTY(()) section * srodata_section;
132
820ca276
DD
133static GTY(()) section * vtext_section;
134static GTY(()) section * vftext_section;
135static GTY(()) section * ftext_section;
136
7acf4da6
DD
137static void mep_set_leaf_registers (int);
138static bool symbol_p (rtx);
139static bool symbolref_p (rtx);
140static void encode_pattern_1 (rtx);
141static void encode_pattern (rtx);
142static bool const_in_range (rtx, int, int);
143static void mep_rewrite_mult (rtx, rtx);
144static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
145static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
146static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
147static bool move_needs_splitting (rtx, rtx, enum machine_mode);
148static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149static bool mep_nongeneral_reg (rtx);
150static bool mep_general_copro_reg (rtx);
151static bool mep_nonregister (rtx);
152static struct machine_function* mep_init_machine_status (void);
153static rtx mep_tp_rtx (void);
154static rtx mep_gp_rtx (void);
155static bool mep_interrupt_p (void);
156static bool mep_disinterrupt_p (void);
157static bool mep_reg_set_p (rtx, rtx);
158static bool mep_reg_set_in_function (int);
159static bool mep_interrupt_saved_reg (int);
160static bool mep_call_saves_register (int);
161static rtx F (rtx);
162static void add_constant (int, int, int, int);
7acf4da6
DD
163static rtx maybe_dead_move (rtx, rtx, bool);
164static void mep_reload_pointer (int, const char *);
165static void mep_start_function (FILE *, HOST_WIDE_INT);
166static bool mep_function_ok_for_sibcall (tree, tree);
167static int unique_bit_in (HOST_WIDE_INT);
168static int bit_size_for_clip (HOST_WIDE_INT);
169static int bytesize (const_tree, enum machine_mode);
170static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
171static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
172static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
173static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
174static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
175static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
176static bool mep_function_attribute_inlinable_p (const_tree);
5cec9f59 177static bool mep_can_inline_p (tree, tree);
7acf4da6
DD
178static bool mep_lookup_pragma_disinterrupt (const char *);
179static int mep_multiple_address_regions (tree, bool);
180static int mep_attrlist_to_encoding (tree, tree);
181static void mep_insert_attributes (tree, tree *);
182static void mep_encode_section_info (tree, rtx, int);
183static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
184static void mep_unique_section (tree, int);
185static unsigned int mep_section_type_flags (tree, const char *, int);
186static void mep_asm_named_section (const char *, unsigned int, tree);
187static bool mep_mentioned_p (rtx, rtx, int);
188static void mep_reorg_regmove (rtx);
189static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
190static void mep_reorg_repeat (rtx);
191static bool mep_invertable_branch_p (rtx);
192static void mep_invert_branch (rtx, rtx);
193static void mep_reorg_erepeat (rtx);
194static void mep_jmp_return_reorg (rtx);
195static void mep_reorg_addcombine (rtx);
196static void mep_reorg (void);
197static void mep_init_intrinsics (void);
198static void mep_init_builtins (void);
199static void mep_intrinsic_unavailable (int);
200static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
201static bool mep_get_move_insn (int, const struct cgen_insn **);
202static rtx mep_convert_arg (enum machine_mode, rtx);
203static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
204static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
205static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
206static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
207static int mep_adjust_cost (rtx, rtx, rtx, int);
208static int mep_issue_rate (void);
209static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
210static void mep_move_ready_insn (rtx *, int, rtx);
211static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
212static rtx mep_make_bundle (rtx, rtx);
213static void mep_bundle_insns (rtx);
214static bool mep_rtx_cost (rtx, int, int, int *, bool);
215static int mep_address_cost (rtx, bool);
216static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
217 tree, int *, int);
218static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
219 const_tree, bool);
0851c6e3
NF
220static rtx mep_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
221 const_tree, bool);
222static void mep_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
223 const_tree, bool);
7acf4da6 224static bool mep_vector_mode_supported_p (enum machine_mode);
96e45421
JM
225static bool mep_handle_option (struct gcc_options *, struct gcc_options *,
226 const struct cl_decoded_option *, location_t);
7acf4da6
DD
227static rtx mep_allocate_initial_value (rtx);
228static void mep_asm_init_sections (void);
229static int mep_comp_type_attributes (const_tree, const_tree);
230static bool mep_narrow_volatile_bitfield (void);
231static rtx mep_expand_builtin_saveregs (void);
232static tree mep_build_builtin_va_list (void);
233static void mep_expand_va_start (tree, rtx);
12a54f54 234static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
7b5cbb57 235static bool mep_can_eliminate (const int, const int);
5efd84c5 236static void mep_conditional_register_usage (void);
87138d8d 237static void mep_trampoline_init (rtx, tree, rtx);
7acf4da6 238\f
7acf4da6
DD
239#define WANT_GCC_DEFINITIONS
240#include "mep-intrin.h"
241#undef WANT_GCC_DEFINITIONS
242
243\f
244/* Command Line Option Support. */
245
246char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
247
248/* True if we can use cmov instructions to move values back and forth
249 between core and coprocessor registers. */
250bool mep_have_core_copro_moves_p;
251
252/* True if we can use cmov instructions (or a work-alike) to move
253 values between coprocessor registers. */
254bool mep_have_copro_copro_moves_p;
255
256/* A table of all coprocessor instructions that can act like
257 a coprocessor-to-coprocessor cmov. */
258static const int mep_cmov_insns[] = {
259 mep_cmov,
260 mep_cpmov,
261 mep_fmovs,
262 mep_caddi3,
263 mep_csubi3,
264 mep_candi3,
265 mep_cori3,
266 mep_cxori3,
267 mep_cand3,
268 mep_cor3
269};
270
271static int option_mtiny_specified = 0;
272
273\f
274static void
275mep_set_leaf_registers (int enable)
276{
277 int i;
278
279 if (mep_leaf_registers[0] != enable)
280 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
281 mep_leaf_registers[i] = enable;
282}
283
5efd84c5 284static void
ce959463 285mep_conditional_register_usage (void)
7acf4da6
DD
286{
287 int i;
288
289 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
290 {
291 fixed_regs[HI_REGNO] = 1;
292 fixed_regs[LO_REGNO] = 1;
293 call_used_regs[HI_REGNO] = 1;
294 call_used_regs[LO_REGNO] = 1;
295 }
296
297 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
298 global_regs[i] = 1;
299}
300
7acf4da6 301
3020190e
JM
302static const struct default_options mep_option_optimization_table[] =
303 {
304 /* The first scheduling pass often increases register pressure and
305 tends to result in more spill code. Only run it when
306 specifically asked. */
307 { OPT_LEVELS_ALL, OPT_fschedule_insns, NULL, 0 },
308
309 /* Using $fp doesn't gain us much, even when debugging is
310 important. */
311 { OPT_LEVELS_ALL, OPT_fomit_frame_pointer, NULL, 1 },
312
313 { OPT_LEVELS_NONE, 0, NULL, 0 }
314 };
7acf4da6 315
c5387660
JM
316static void
317mep_option_override (void)
7acf4da6
DD
318{
319 if (flag_pic == 1)
320 warning (OPT_fpic, "-fpic is not supported");
321 if (flag_pic == 2)
322 warning (OPT_fPIC, "-fPIC is not supported");
323 if (TARGET_S && TARGET_M)
324 error ("only one of -ms and -mm may be given");
325 if (TARGET_S && TARGET_L)
326 error ("only one of -ms and -ml may be given");
327 if (TARGET_M && TARGET_L)
328 error ("only one of -mm and -ml may be given");
329 if (TARGET_S && option_mtiny_specified)
330 error ("only one of -ms and -mtiny= may be given");
331 if (TARGET_M && option_mtiny_specified)
332 error ("only one of -mm and -mtiny= may be given");
333 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
334 warning (0, "-mclip currently has no effect without -mminmax");
335
336 if (mep_const_section)
337 {
338 if (strcmp (mep_const_section, "tiny") != 0
339 && strcmp (mep_const_section, "near") != 0
340 && strcmp (mep_const_section, "far") != 0)
341 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
342 }
343
344 if (TARGET_S)
345 mep_tiny_cutoff = 65536;
346 if (TARGET_M)
347 mep_tiny_cutoff = 0;
348 if (TARGET_L && ! option_mtiny_specified)
349 mep_tiny_cutoff = 0;
350
351 if (TARGET_64BIT_CR_REGS)
352 flag_split_wide_types = 0;
353
354 init_machine_status = mep_init_machine_status;
355 mep_init_intrinsics ();
356}
357
358/* Pattern Support - constraints, predicates, expanders. */
359
360/* MEP has very few instructions that can refer to the span of
361 addresses used by symbols, so it's common to check for them. */
362
363static bool
364symbol_p (rtx x)
365{
366 int c = GET_CODE (x);
367
368 return (c == CONST_INT
369 || c == CONST
370 || c == SYMBOL_REF);
371}
372
373static bool
374symbolref_p (rtx x)
375{
376 int c;
377
378 if (GET_CODE (x) != MEM)
379 return false;
380
381 c = GET_CODE (XEXP (x, 0));
382 return (c == CONST_INT
383 || c == CONST
384 || c == SYMBOL_REF);
385}
386
387/* static const char *reg_class_names[] = REG_CLASS_NAMES; */
388
389#define GEN_REG(R, STRICT) \
390 (GR_REGNO_P (R) \
391 || (!STRICT \
392 && ((R) == ARG_POINTER_REGNUM \
393 || (R) >= FIRST_PSEUDO_REGISTER)))
394
395static char pattern[12], *patternp;
396static GTY(()) rtx patternr[12];
397#define RTX_IS(x) (strcmp (pattern, x) == 0)
398
399static void
400encode_pattern_1 (rtx x)
401{
402 int i;
403
404 if (patternp == pattern + sizeof (pattern) - 2)
405 {
406 patternp[-1] = '?';
407 return;
408 }
409
410 patternr[patternp-pattern] = x;
411
412 switch (GET_CODE (x))
413 {
414 case REG:
415 *patternp++ = 'r';
416 break;
417 case MEM:
418 *patternp++ = 'm';
419 case CONST:
420 encode_pattern_1 (XEXP(x, 0));
421 break;
422 case PLUS:
423 *patternp++ = '+';
424 encode_pattern_1 (XEXP(x, 0));
425 encode_pattern_1 (XEXP(x, 1));
426 break;
427 case LO_SUM:
428 *patternp++ = 'L';
429 encode_pattern_1 (XEXP(x, 0));
430 encode_pattern_1 (XEXP(x, 1));
431 break;
432 case HIGH:
433 *patternp++ = 'H';
434 encode_pattern_1 (XEXP(x, 0));
435 break;
436 case SYMBOL_REF:
437 *patternp++ = 's';
438 break;
439 case LABEL_REF:
440 *patternp++ = 'l';
441 break;
442 case CONST_INT:
443 case CONST_DOUBLE:
444 *patternp++ = 'i';
445 break;
446 case UNSPEC:
447 *patternp++ = 'u';
448 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
449 for (i=0; i<XVECLEN (x, 0); i++)
450 encode_pattern_1 (XVECEXP (x, 0, i));
451 break;
452 case USE:
453 *patternp++ = 'U';
454 break;
455 default:
456 *patternp++ = '?';
457#if 0
458 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
459 debug_rtx (x);
460 gcc_unreachable ();
461#endif
462 break;
463 }
464}
465
466static void
467encode_pattern (rtx x)
468{
469 patternp = pattern;
470 encode_pattern_1 (x);
471 *patternp = 0;
472}
473
474int
475mep_section_tag (rtx x)
476{
477 const char *name;
478
479 while (1)
480 {
481 switch (GET_CODE (x))
482 {
483 case MEM:
484 case CONST:
485 x = XEXP (x, 0);
486 break;
487 case UNSPEC:
488 x = XVECEXP (x, 0, 0);
489 break;
490 case PLUS:
491 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
492 return 0;
493 x = XEXP (x, 0);
494 break;
495 default:
496 goto done;
497 }
498 }
499 done:
500 if (GET_CODE (x) != SYMBOL_REF)
501 return 0;
502 name = XSTR (x, 0);
503 if (name[0] == '@' && name[2] == '.')
504 {
505 if (name[1] == 'i' || name[1] == 'I')
506 {
507 if (name[1] == 'I')
508 return 'f'; /* near */
509 return 'n'; /* far */
510 }
511 return name[1];
512 }
513 return 0;
514}
515
516int
517mep_regno_reg_class (int regno)
518{
519 switch (regno)
520 {
521 case SP_REGNO: return SP_REGS;
522 case TP_REGNO: return TP_REGS;
523 case GP_REGNO: return GP_REGS;
524 case 0: return R0_REGS;
525 case HI_REGNO: return HI_REGS;
526 case LO_REGNO: return LO_REGS;
527 case ARG_POINTER_REGNUM: return GENERAL_REGS;
528 }
529
530 if (GR_REGNO_P (regno))
531 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
532 if (CONTROL_REGNO_P (regno))
533 return CONTROL_REGS;
534
535 if (CR_REGNO_P (regno))
536 {
537 int i, j;
538
539 /* Search for the register amongst user-defined subclasses of
540 the coprocessor registers. */
541 for (i = USER0_REGS; i <= USER3_REGS; ++i)
542 {
543 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
544 continue;
545 for (j = 0; j < N_REG_CLASSES; ++j)
546 {
547 enum reg_class sub = reg_class_subclasses[i][j];
548
549 if (sub == LIM_REG_CLASSES)
550 return i;
551 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
552 break;
553 }
554 }
555
556 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
557 }
558
559 if (CCR_REGNO_P (regno))
560 return CCR_REGS;
561
562 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
563 return NO_REGS;
564}
565
566#if 0
567int
568mep_reg_class_from_constraint (int c, const char *str)
569{
570 switch (c)
571 {
572 case 'a':
573 return SP_REGS;
574 case 'b':
575 return TP_REGS;
576 case 'c':
577 return CONTROL_REGS;
578 case 'd':
579 return HILO_REGS;
580 case 'e':
581 {
582 switch (str[1])
583 {
584 case 'm':
585 return LOADABLE_CR_REGS;
586 case 'x':
587 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
588 case 'r':
589 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
590 default:
591 return NO_REGS;
592 }
593 }
594 case 'h':
595 return HI_REGS;
596 case 'j':
597 return RPC_REGS;
598 case 'l':
599 return LO_REGS;
600 case 't':
601 return TPREL_REGS;
602 case 'v':
603 return GP_REGS;
604 case 'x':
605 return CR_REGS;
606 case 'y':
607 return CCR_REGS;
608 case 'z':
609 return R0_REGS;
610
611 case 'A':
612 case 'B':
613 case 'C':
614 case 'D':
615 {
616 enum reg_class which = c - 'A' + USER0_REGS;
617 return (reg_class_size[which] > 0 ? which : NO_REGS);
618 }
619
620 default:
621 return NO_REGS;
622 }
623}
624
625bool
626mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
627{
628 switch (c)
629 {
630 case 'I': return value >= -32768 && value < 32768;
631 case 'J': return value >= 0 && value < 65536;
632 case 'K': return value >= 0 && value < 0x01000000;
633 case 'L': return value >= -32 && value < 32;
634 case 'M': return value >= 0 && value < 32;
635 case 'N': return value >= 0 && value < 16;
636 case 'O':
637 if (value & 0xffff)
638 return false;
639 return value >= -2147483647-1 && value <= 2147483647;
640 default:
641 gcc_unreachable ();
642 }
643}
644
645bool
646mep_extra_constraint (rtx value, int c)
647{
648 encode_pattern (value);
649
650 switch (c)
651 {
652 case 'R':
653 /* For near symbols, like what call uses. */
654 if (GET_CODE (value) == REG)
655 return 0;
656 return mep_call_address_operand (value, GET_MODE (value));
657
658 case 'S':
659 /* For signed 8-bit immediates. */
660 return (GET_CODE (value) == CONST_INT
661 && INTVAL (value) >= -128
662 && INTVAL (value) <= 127);
663
664 case 'T':
665 /* For tp/gp relative symbol values. */
666 return (RTX_IS ("u3s") || RTX_IS ("u2s")
667 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
668
669 case 'U':
670 /* Non-absolute memories. */
671 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
672
673 case 'W':
674 /* %hi(sym) */
675 return RTX_IS ("Hs");
676
677 case 'Y':
678 /* Register indirect. */
679 return RTX_IS ("mr");
680
681 case 'Z':
682 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
683 }
684
685 return false;
686}
687#endif
688
689#undef PASS
690#undef FAIL
691
692static bool
693const_in_range (rtx x, int minv, int maxv)
694{
695 return (GET_CODE (x) == CONST_INT
696 && INTVAL (x) >= minv
697 && INTVAL (x) <= maxv);
698}
699
700/* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
701 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
702 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
703 at the end of the insn stream. */
704
705rtx
706mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
707{
708 if (rtx_equal_p (dest, src1))
709 return src2;
710 else if (rtx_equal_p (dest, src2))
711 return src1;
712 else
713 {
714 if (insn == 0)
715 emit_insn (gen_movsi (copy_rtx (dest), src1));
716 else
717 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
718 return src2;
719 }
720}
721
722/* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
723 Change the last element of PATTERN from (clobber (scratch:SI))
724 to (clobber (reg:SI HI_REGNO)). */
725
726static void
727mep_rewrite_mult (rtx insn, rtx pattern)
728{
729 rtx hi_clobber;
730
731 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
732 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
733 PATTERN (insn) = pattern;
734 INSN_CODE (insn) = -1;
735}
736
737/* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
738 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
739 store the result in DEST if nonnull. */
740
741static void
742mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
743{
744 rtx lo, pattern;
745
746 lo = gen_rtx_REG (SImode, LO_REGNO);
747 if (dest)
748 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
749 mep_mulr_source (insn, dest, src1, src2));
750 else
751 pattern = gen_mulsi3_lo (lo, src1, src2);
752 mep_rewrite_mult (insn, pattern);
753}
754
755/* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
756 SRC3 into $lo, then use either madd or maddr. The move into $lo will
757 be deleted by a peephole2 if SRC3 is already in $lo. */
758
759static void
760mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
761{
762 rtx lo, pattern;
763
764 lo = gen_rtx_REG (SImode, LO_REGNO);
765 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
766 if (dest)
767 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
768 mep_mulr_source (insn, dest, src1, src2),
769 copy_rtx (lo));
770 else
771 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
772 mep_rewrite_mult (insn, pattern);
773}
774
775/* Return true if $lo has the same value as integer register GPR when
776 instruction INSN is reached. If necessary, rewrite the instruction
777 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
778 rtx for (reg:SI LO_REGNO).
779
780 This function is intended to be used by the peephole2 pass. Since
781 that pass goes from the end of a basic block to the beginning, and
782 propagates liveness information on the way, there is no need to
783 update register notes here.
784
785 If GPR_DEAD_P is true on entry, and this function returns true,
786 then the caller will replace _every_ use of GPR in and after INSN
787 with LO. This means that if the instruction that sets $lo is a
788 mulr- or maddr-type instruction, we can rewrite it to use mul or
789 madd instead. In combination with the copy progagation pass,
790 this allows us to replace sequences like:
791
792 mov GPR,R1
793 mulr GPR,R2
794
795 with:
796
797 mul R1,R2
798
799 if GPR is no longer used. */
800
801static bool
802mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
803{
804 do
805 {
806 insn = PREV_INSN (insn);
807 if (INSN_P (insn))
808 switch (recog_memoized (insn))
809 {
810 case CODE_FOR_mulsi3_1:
811 extract_insn (insn);
812 if (rtx_equal_p (recog_data.operand[0], gpr))
813 {
814 mep_rewrite_mulsi3 (insn,
815 gpr_dead_p ? NULL : recog_data.operand[0],
816 recog_data.operand[1],
817 recog_data.operand[2]);
818 return true;
819 }
820 return false;
821
822 case CODE_FOR_maddsi3:
823 extract_insn (insn);
824 if (rtx_equal_p (recog_data.operand[0], gpr))
825 {
826 mep_rewrite_maddsi3 (insn,
827 gpr_dead_p ? NULL : recog_data.operand[0],
828 recog_data.operand[1],
829 recog_data.operand[2],
830 recog_data.operand[3]);
831 return true;
832 }
833 return false;
834
835 case CODE_FOR_mulsi3r:
836 case CODE_FOR_maddsi3r:
837 extract_insn (insn);
838 return rtx_equal_p (recog_data.operand[1], gpr);
839
840 default:
841 if (reg_set_p (lo, insn)
842 || reg_set_p (gpr, insn)
843 || volatile_insn_p (PATTERN (insn)))
844 return false;
845
846 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
847 gpr_dead_p = false;
848 break;
849 }
850 }
851 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
852 return false;
853}
854
855/* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
856
857bool
858mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
859{
860 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
861 extract_insn (insn);
862 return result;
863}
864
865/* Return true if SET can be turned into a post-modify load or store
866 that adds OFFSET to GPR. In other words, return true if SET can be
867 changed into:
868
869 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
870
871 It's OK to change SET to an equivalent operation in order to
872 make it match. */
873
874static bool
875mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
876{
877 rtx *reg, *mem;
878 unsigned int reg_bytes, mem_bytes;
879 enum machine_mode reg_mode, mem_mode;
880
881 /* Only simple SETs can be converted. */
882 if (GET_CODE (set) != SET)
883 return false;
884
885 /* Point REG to what we hope will be the register side of the set and
886 MEM to what we hope will be the memory side. */
887 if (GET_CODE (SET_DEST (set)) == MEM)
888 {
889 mem = &SET_DEST (set);
890 reg = &SET_SRC (set);
891 }
892 else
893 {
894 reg = &SET_DEST (set);
895 mem = &SET_SRC (set);
896 if (GET_CODE (*mem) == SIGN_EXTEND)
897 mem = &XEXP (*mem, 0);
898 }
899
900 /* Check that *REG is a suitable coprocessor register. */
901 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
902 return false;
903
904 /* Check that *MEM is a suitable memory reference. */
905 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
906 return false;
907
908 /* Get the number of bytes in each operand. */
909 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
910 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
911
912 /* Check that OFFSET is suitably aligned. */
913 if (INTVAL (offset) & (mem_bytes - 1))
914 return false;
915
916 /* Convert *MEM to a normal integer mode. */
917 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
918 *mem = change_address (*mem, mem_mode, NULL);
919
920 /* Adjust *REG as well. */
921 *reg = shallow_copy_rtx (*reg);
922 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
923 {
924 /* SET is a subword load. Convert it to an explicit extension. */
925 PUT_MODE (*reg, SImode);
926 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
927 }
928 else
929 {
930 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
931 PUT_MODE (*reg, reg_mode);
932 }
933 return true;
934}
935
936/* Return the effect of frame-related instruction INSN. */
937
938static rtx
939mep_frame_expr (rtx insn)
940{
941 rtx note, expr;
942
943 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
944 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
945 RTX_FRAME_RELATED_P (expr) = 1;
946 return expr;
947}
948
949/* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
950 new pattern in INSN1; INSN2 will be deleted by the caller. */
951
952static void
953mep_make_parallel (rtx insn1, rtx insn2)
954{
955 rtx expr;
956
957 if (RTX_FRAME_RELATED_P (insn2))
958 {
959 expr = mep_frame_expr (insn2);
960 if (RTX_FRAME_RELATED_P (insn1))
961 expr = gen_rtx_SEQUENCE (VOIDmode,
962 gen_rtvec (2, mep_frame_expr (insn1), expr));
963 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
964 RTX_FRAME_RELATED_P (insn1) = 1;
965 }
966
967 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
968 gen_rtvec (2, PATTERN (insn1),
969 PATTERN (insn2)));
970 INSN_CODE (insn1) = -1;
971}
972
973/* SET_INSN is an instruction that adds OFFSET to REG. Go back through
974 the basic block to see if any previous load or store instruction can
975 be persuaded to do SET_INSN as a side-effect. Return true if so. */
976
977static bool
978mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
979{
980 rtx insn;
981
982 insn = set_insn;
983 do
984 {
985 insn = PREV_INSN (insn);
986 if (INSN_P (insn))
987 {
988 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
989 {
990 mep_make_parallel (insn, set_insn);
991 return true;
992 }
993
994 if (reg_set_p (reg, insn)
995 || reg_referenced_p (reg, PATTERN (insn))
996 || volatile_insn_p (PATTERN (insn)))
997 return false;
998 }
999 }
1000 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1001 return false;
1002}
1003
1004/* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1005
1006bool
1007mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1008{
1009 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1010 extract_insn (insn);
1011 return result;
1012}
1013
1014bool
1015mep_allow_clip (rtx ux, rtx lx, int s)
1016{
1017 HOST_WIDE_INT u = INTVAL (ux);
1018 HOST_WIDE_INT l = INTVAL (lx);
1019 int i;
1020
1021 if (!TARGET_OPT_CLIP)
1022 return false;
1023
1024 if (s)
1025 {
1026 for (i = 0; i < 30; i ++)
1027 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1028 && (l == - ((HOST_WIDE_INT) 1 << i)))
1029 return true;
1030 }
1031 else
1032 {
1033 if (l != 0)
1034 return false;
1035
1036 for (i = 0; i < 30; i ++)
1037 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1038 return true;
1039 }
1040 return false;
1041}
1042
1043bool
1044mep_bit_position_p (rtx x, bool looking_for)
1045{
1046 if (GET_CODE (x) != CONST_INT)
1047 return false;
1048 switch ((int) INTVAL(x) & 0xff)
1049 {
1050 case 0x01: case 0x02: case 0x04: case 0x08:
1051 case 0x10: case 0x20: case 0x40: case 0x80:
1052 return looking_for;
1053 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1054 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1055 return !looking_for;
1056 }
1057 return false;
1058}
1059
1060static bool
1061move_needs_splitting (rtx dest, rtx src,
1062 enum machine_mode mode ATTRIBUTE_UNUSED)
1063{
1064 int s = mep_section_tag (src);
1065
1066 while (1)
1067 {
1068 if (GET_CODE (src) == CONST
1069 || GET_CODE (src) == MEM)
1070 src = XEXP (src, 0);
1071 else if (GET_CODE (src) == SYMBOL_REF
1072 || GET_CODE (src) == LABEL_REF
1073 || GET_CODE (src) == PLUS)
1074 break;
1075 else
1076 return false;
1077 }
1078 if (s == 'f'
1079 || (GET_CODE (src) == PLUS
1080 && GET_CODE (XEXP (src, 1)) == CONST_INT
1081 && (INTVAL (XEXP (src, 1)) < -65536
1082 || INTVAL (XEXP (src, 1)) > 0xffffff))
1083 || (GET_CODE (dest) == REG
1084 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1085 return true;
1086 return false;
1087}
1088
1089bool
1090mep_split_mov (rtx *operands, int symbolic)
1091{
1092 if (symbolic)
1093 {
1094 if (move_needs_splitting (operands[0], operands[1], SImode))
1095 return true;
1096 return false;
1097 }
1098
1099 if (GET_CODE (operands[1]) != CONST_INT)
1100 return false;
1101
1102 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1103 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1104 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1105 return false;
1106
1107 if (((!reload_completed && !reload_in_progress)
1108 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1109 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1110 return false;
1111
1112 return true;
1113}
1114
1115/* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1116 it to one specific value. So the insn chosen depends on whether
1117 the source and destination modes match. */
1118
1119bool
1120mep_vliw_mode_match (rtx tgt)
1121{
1122 bool src_vliw = mep_vliw_function_p (cfun->decl);
1123 bool tgt_vliw = INTVAL (tgt);
1124
1125 return src_vliw == tgt_vliw;
1126}
1127
a9d1723f
DD
1128/* Like the above, but also test for near/far mismatches. */
1129
1130bool
1131mep_vliw_jmp_match (rtx tgt)
1132{
1133 bool src_vliw = mep_vliw_function_p (cfun->decl);
1134 bool tgt_vliw = INTVAL (tgt);
1135
1136 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1137 return false;
1138
1139 return src_vliw == tgt_vliw;
1140}
1141
7acf4da6
DD
1142bool
1143mep_multi_slot (rtx x)
1144{
1145 return get_attr_slot (x) == SLOT_MULTI;
1146}
1147
1148
5ba863d7
DD
1149bool
1150mep_legitimate_constant_p (rtx x)
1151{
1152 /* We can't convert symbol values to gp- or tp-rel values after
1153 reload, as reload might have used $gp or $tp for other
1154 purposes. */
1155 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1156 {
1157 char e = mep_section_tag (x);
1158 return (e != 't' && e != 'b');
1159 }
1160 return 1;
1161}
1162
7acf4da6
DD
1163/* Be careful not to use macros that need to be compiled one way for
1164 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1165
1166bool
1167mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1168{
1169 int the_tag;
1170
1171#define DEBUG_LEGIT 0
1172#if DEBUG_LEGIT
1173 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1174 debug_rtx (x);
1175#endif
1176
1177 if (GET_CODE (x) == LO_SUM
1178 && GET_CODE (XEXP (x, 0)) == REG
1179 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1180 && CONSTANT_P (XEXP (x, 1)))
1181 {
1182 if (GET_MODE_SIZE (mode) > 4)
1183 {
1184 /* We will end up splitting this, and lo_sums are not
1185 offsettable for us. */
1186#if DEBUG_LEGIT
1187 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1188#endif
1189 return false;
1190 }
1191#if DEBUG_LEGIT
1192 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1193#endif
1194 return true;
1195 }
1196
1197 if (GET_CODE (x) == REG
1198 && GEN_REG (REGNO (x), strict))
1199 {
1200#if DEBUG_LEGIT
1201 fprintf (stderr, " - yup, [reg]\n");
1202#endif
1203 return true;
1204 }
1205
1206 if (GET_CODE (x) == PLUS
1207 && GET_CODE (XEXP (x, 0)) == REG
1208 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1209 && const_in_range (XEXP (x, 1), -32768, 32767))
1210 {
1211#if DEBUG_LEGIT
1212 fprintf (stderr, " - yup, [reg+const]\n");
1213#endif
1214 return true;
1215 }
1216
1217 if (GET_CODE (x) == PLUS
1218 && GET_CODE (XEXP (x, 0)) == REG
1219 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1220 && GET_CODE (XEXP (x, 1)) == CONST
1221 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1222 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1223 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1224 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1225 {
1226#if DEBUG_LEGIT
1227 fprintf (stderr, " - yup, [reg+unspec]\n");
1228#endif
1229 return true;
1230 }
1231
1232 the_tag = mep_section_tag (x);
1233
1234 if (the_tag == 'f')
1235 {
1236#if DEBUG_LEGIT
1237 fprintf (stderr, " - nope, [far]\n");
1238#endif
1239 return false;
1240 }
1241
1242 if (mode == VOIDmode
1243 && GET_CODE (x) == SYMBOL_REF)
1244 {
1245#if DEBUG_LEGIT
1246 fprintf (stderr, " - yup, call [symbol]\n");
1247#endif
1248 return true;
1249 }
1250
1251 if ((mode == SImode || mode == SFmode)
1252 && CONSTANT_P (x)
1253 && LEGITIMATE_CONSTANT_P (x)
1254 && the_tag != 't' && the_tag != 'b')
1255 {
1256 if (GET_CODE (x) != CONST_INT
1257 || (INTVAL (x) <= 0xfffff
1258 && INTVAL (x) >= 0
1259 && (INTVAL (x) % 4) == 0))
1260 {
1261#if DEBUG_LEGIT
1262 fprintf (stderr, " - yup, [const]\n");
1263#endif
1264 return true;
1265 }
1266 }
1267
1268#if DEBUG_LEGIT
1269 fprintf (stderr, " - nope.\n");
1270#endif
1271 return false;
1272}
1273
1274int
1275mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
77b0efff 1276 int type_i,
7acf4da6
DD
1277 int ind_levels ATTRIBUTE_UNUSED)
1278{
77b0efff
JR
1279 enum reload_type type = (enum reload_type) type_i;
1280
7acf4da6
DD
1281 if (GET_CODE (*x) == PLUS
1282 && GET_CODE (XEXP (*x, 0)) == MEM
1283 && GET_CODE (XEXP (*x, 1)) == REG)
1284 {
1285 /* GCC will by default copy the MEM into a REG, which results in
1286 an invalid address. For us, the best thing to do is move the
1287 whole expression to a REG. */
1288 push_reload (*x, NULL_RTX, x, NULL,
1289 GENERAL_REGS, mode, VOIDmode,
1290 0, 0, opnum, type);
1291 return 1;
1292 }
1293
1294 if (GET_CODE (*x) == PLUS
1295 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1296 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1297 {
1298 char e = mep_section_tag (XEXP (*x, 0));
1299
1300 if (e != 't' && e != 'b')
1301 {
1302 /* GCC thinks that (sym+const) is a valid address. Well,
1303 sometimes it is, this time it isn't. The best thing to
1304 do is reload the symbol to a register, since reg+int
1305 tends to work, and we can't just add the symbol and
1306 constant anyway. */
1307 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1308 GENERAL_REGS, mode, VOIDmode,
1309 0, 0, opnum, type);
1310 return 1;
1311 }
1312 }
1313 return 0;
1314}
1315
1316int
1317mep_core_address_length (rtx insn, int opn)
1318{
1319 rtx set = single_set (insn);
1320 rtx mem = XEXP (set, opn);
1321 rtx other = XEXP (set, 1-opn);
1322 rtx addr = XEXP (mem, 0);
1323
1324 if (register_operand (addr, Pmode))
1325 return 2;
1326 if (GET_CODE (addr) == PLUS)
1327 {
1328 rtx addend = XEXP (addr, 1);
1329
1330 gcc_assert (REG_P (XEXP (addr, 0)));
1331
1332 switch (REGNO (XEXP (addr, 0)))
1333 {
1334 case STACK_POINTER_REGNUM:
1335 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1336 && mep_imm7a4_operand (addend, VOIDmode))
1337 return 2;
1338 break;
1339
1340 case 13: /* TP */
1341 gcc_assert (REG_P (other));
1342
1343 if (REGNO (other) >= 8)
1344 break;
1345
1346 if (GET_CODE (addend) == CONST
1347 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1348 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1349 return 2;
1350
1351 if (GET_CODE (addend) == CONST_INT
1352 && INTVAL (addend) >= 0
1353 && INTVAL (addend) <= 127
1354 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1355 return 2;
1356 break;
1357 }
1358 }
1359
1360 return 4;
1361}
1362
1363int
1364mep_cop_address_length (rtx insn, int opn)
1365{
1366 rtx set = single_set (insn);
1367 rtx mem = XEXP (set, opn);
1368 rtx addr = XEXP (mem, 0);
1369
1370 if (GET_CODE (mem) != MEM)
1371 return 2;
1372 if (register_operand (addr, Pmode))
1373 return 2;
1374 if (GET_CODE (addr) == POST_INC)
1375 return 2;
1376
1377 return 4;
1378}
1379
1380#define DEBUG_EXPAND_MOV 0
1381bool
1382mep_expand_mov (rtx *operands, enum machine_mode mode)
1383{
1384 int i, t;
1385 int tag[2];
1386 rtx tpsym, tpoffs;
1387 int post_reload = 0;
1388
1389 tag[0] = mep_section_tag (operands[0]);
1390 tag[1] = mep_section_tag (operands[1]);
1391
1392 if (!reload_in_progress
1393 && !reload_completed
1394 && GET_CODE (operands[0]) != REG
1395 && GET_CODE (operands[0]) != SUBREG
1396 && GET_CODE (operands[1]) != REG
1397 && GET_CODE (operands[1]) != SUBREG)
1398 operands[1] = copy_to_mode_reg (mode, operands[1]);
1399
1400#if DEBUG_EXPAND_MOV
1401 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1402 reload_in_progress || reload_completed);
1403 debug_rtx (operands[0]);
1404 debug_rtx (operands[1]);
1405#endif
1406
1407 if (mode == DImode || mode == DFmode)
1408 return false;
1409
1410 if (reload_in_progress || reload_completed)
1411 {
1412 rtx r;
1413
1414 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1415 cfun->machine->reload_changes_tp = true;
1416
1417 if (tag[0] == 't' || tag[1] == 't')
1418 {
1419 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1420 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1421 post_reload = 1;
1422 }
1423 if (tag[0] == 'b' || tag[1] == 'b')
1424 {
1425 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1426 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1427 post_reload = 1;
1428 }
1429 if (cfun->machine->reload_changes_tp == true)
1430 post_reload = 1;
1431 }
1432
1433 if (!post_reload)
1434 {
1435 rtx n;
1436 if (symbol_p (operands[1]))
1437 {
1438 t = mep_section_tag (operands[1]);
1439 if (t == 'b' || t == 't')
1440 {
1441
1442 if (GET_CODE (operands[1]) == SYMBOL_REF)
1443 {
1444 tpsym = operands[1];
1445 n = gen_rtx_UNSPEC (mode,
1446 gen_rtvec (1, operands[1]),
1447 t == 'b' ? UNS_TPREL : UNS_GPREL);
1448 n = gen_rtx_CONST (mode, n);
1449 }
1450 else if (GET_CODE (operands[1]) == CONST
1451 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1452 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1453 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1454 {
1455 tpsym = XEXP (XEXP (operands[1], 0), 0);
1456 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1457 n = gen_rtx_UNSPEC (mode,
1458 gen_rtvec (1, tpsym),
1459 t == 'b' ? UNS_TPREL : UNS_GPREL);
1460 n = gen_rtx_PLUS (mode, n, tpoffs);
1461 n = gen_rtx_CONST (mode, n);
1462 }
1463 else if (GET_CODE (operands[1]) == CONST
1464 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1465 return false;
1466 else
1467 {
1468 error ("unusual TP-relative address");
1469 return false;
1470 }
1471
1472 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1473 : mep_gp_rtx ()), n);
1474 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1475#if DEBUG_EXPAND_MOV
1476 fprintf(stderr, "mep_expand_mov emitting ");
1477 debug_rtx(n);
1478#endif
1479 return true;
1480 }
1481 }
1482
1483 for (i=0; i < 2; i++)
1484 {
1485 t = mep_section_tag (operands[i]);
1486 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1487 {
1488 rtx sym, n, r;
1489 int u;
1490
1491 sym = XEXP (operands[i], 0);
1492 if (GET_CODE (sym) == CONST
1493 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1494 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1495
1496 if (t == 'b')
1497 {
1498 r = mep_tp_rtx ();
1499 u = UNS_TPREL;
1500 }
1501 else
1502 {
1503 r = mep_gp_rtx ();
1504 u = UNS_GPREL;
1505 }
1506
1507 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1508 n = gen_rtx_CONST (Pmode, n);
1509 n = gen_rtx_PLUS (Pmode, r, n);
1510 operands[i] = replace_equiv_address (operands[i], n);
1511 }
1512 }
1513 }
1514
1515 if ((GET_CODE (operands[1]) != REG
1516 && MEP_CONTROL_REG (operands[0]))
1517 || (GET_CODE (operands[0]) != REG
1518 && MEP_CONTROL_REG (operands[1])))
1519 {
1520 rtx temp;
1521#if DEBUG_EXPAND_MOV
1522 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1523#endif
1524 temp = gen_reg_rtx (mode);
1525 emit_move_insn (temp, operands[1]);
1526 operands[1] = temp;
1527 }
1528
1529 if (symbolref_p (operands[0])
1530 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1531 || (GET_MODE_SIZE (mode) != 4)))
1532 {
1533 rtx temp;
1534
1535 gcc_assert (!reload_in_progress && !reload_completed);
1536
1537 temp = force_reg (Pmode, XEXP (operands[0], 0));
1538 operands[0] = replace_equiv_address (operands[0], temp);
1539 emit_move_insn (operands[0], operands[1]);
1540 return true;
1541 }
1542
1543 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1544 tag[1] = 0;
1545
1546 if (symbol_p (operands[1])
1547 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1548 {
1549 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1550 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1551 return true;
1552 }
1553
1554 if (symbolref_p (operands[1])
1555 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1556 {
1557 rtx temp;
1558
1559 if (reload_in_progress || reload_completed)
1560 temp = operands[0];
1561 else
1562 temp = gen_reg_rtx (Pmode);
1563
1564 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1565 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1566 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1567 return true;
1568 }
1569
1570 return false;
1571}
1572
1573/* Cases where the pattern can't be made to use at all. */
1574
1575bool
1576mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1577{
1578 int i;
1579
1580#define DEBUG_MOV_OK 0
1581#if DEBUG_MOV_OK
1582 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1583 mep_section_tag (operands[1]));
1584 debug_rtx (operands[0]);
1585 debug_rtx (operands[1]);
1586#endif
1587
1588 /* We want the movh patterns to get these. */
1589 if (GET_CODE (operands[1]) == HIGH)
1590 return false;
1591
1592 /* We can't store a register to a far variable without using a
1593 scratch register to hold the address. Using far variables should
1594 be split by mep_emit_mov anyway. */
1595 if (mep_section_tag (operands[0]) == 'f'
1596 || mep_section_tag (operands[1]) == 'f')
1597 {
1598#if DEBUG_MOV_OK
1599 fprintf (stderr, " - no, f\n");
1600#endif
1601 return false;
1602 }
1603 i = mep_section_tag (operands[1]);
1604 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1605 /* These are supposed to be generated with adds of the appropriate
1606 register. During and after reload, however, we allow them to
1607 be accessed as normal symbols because adding a dependency on
1608 the base register now might cause problems. */
1609 {
1610#if DEBUG_MOV_OK
1611 fprintf (stderr, " - no, bt\n");
1612#endif
1613 return false;
1614 }
1615
1616 /* The only moves we can allow involve at least one general
1617 register, so require it. */
1618 for (i = 0; i < 2; i ++)
1619 {
1620 /* Allow subregs too, before reload. */
1621 rtx x = operands[i];
1622
1623 if (GET_CODE (x) == SUBREG)
1624 x = XEXP (x, 0);
1625 if (GET_CODE (x) == REG
1626 && ! MEP_CONTROL_REG (x))
1627 {
1628#if DEBUG_MOV_OK
1629 fprintf (stderr, " - ok\n");
1630#endif
1631 return true;
1632 }
1633 }
1634#if DEBUG_MOV_OK
1635 fprintf (stderr, " - no, no gen reg\n");
1636#endif
1637 return false;
1638}
1639
1640#define DEBUG_SPLIT_WIDE_MOVE 0
1641void
1642mep_split_wide_move (rtx *operands, enum machine_mode mode)
1643{
1644 int i;
1645
1646#if DEBUG_SPLIT_WIDE_MOVE
1647 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1648 debug_rtx (operands[0]);
1649 debug_rtx (operands[1]);
1650#endif
1651
1652 for (i = 0; i <= 1; i++)
1653 {
1654 rtx op = operands[i], hi, lo;
1655
1656 switch (GET_CODE (op))
1657 {
1658 case REG:
1659 {
1660 unsigned int regno = REGNO (op);
1661
1662 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1663 {
1664 rtx i32;
1665
1666 lo = gen_rtx_REG (SImode, regno);
1667 i32 = GEN_INT (32);
1668 hi = gen_rtx_ZERO_EXTRACT (SImode,
1669 gen_rtx_REG (DImode, regno),
1670 i32, i32);
1671 }
1672 else
1673 {
1674 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1675 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1676 }
1677 }
1678 break;
1679
1680 case CONST_INT:
1681 case CONST_DOUBLE:
1682 case MEM:
1683 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1684 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1685 break;
1686
1687 default:
1688 gcc_unreachable ();
1689 }
1690
1691 /* The high part of CR <- GPR moves must be done after the low part. */
1692 operands [i + 4] = lo;
1693 operands [i + 2] = hi;
1694 }
1695
1696 if (reg_mentioned_p (operands[2], operands[5])
1697 || GET_CODE (operands[2]) == ZERO_EXTRACT
1698 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1699 {
1700 rtx tmp;
1701
1702 /* Overlapping register pairs -- make sure we don't
1703 early-clobber ourselves. */
1704 tmp = operands[2];
1705 operands[2] = operands[4];
1706 operands[4] = tmp;
1707 tmp = operands[3];
1708 operands[3] = operands[5];
1709 operands[5] = tmp;
1710 }
1711
1712#if DEBUG_SPLIT_WIDE_MOVE
1713 fprintf(stderr, "\033[34m");
1714 debug_rtx (operands[2]);
1715 debug_rtx (operands[3]);
1716 debug_rtx (operands[4]);
1717 debug_rtx (operands[5]);
1718 fprintf(stderr, "\033[0m");
1719#endif
1720}
1721
1722/* Emit a setcc instruction in its entirity. */
1723
1724static bool
1725mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1726{
1727 rtx tmp;
1728
1729 switch (code)
1730 {
1731 case GT:
1732 case GTU:
1733 tmp = op1, op1 = op2, op2 = tmp;
1734 code = swap_condition (code);
1735 /* FALLTHRU */
1736
1737 case LT:
1738 case LTU:
1739 op1 = force_reg (SImode, op1);
1740 emit_insn (gen_rtx_SET (VOIDmode, dest,
1741 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1742 return true;
1743
1744 case EQ:
1745 if (op2 != const0_rtx)
1746 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1747 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1748 return true;
1749
1750 case NE:
1751 /* Branchful sequence:
1752 mov dest, 0 16-bit
1753 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1754 mov dest, 1 16-bit
1755
1756 Branchless sequence:
1757 add3 tmp, op1, -op2 32-bit (or mov + sub)
1758 sltu3 tmp, tmp, 1 16-bit
1759 xor3 dest, tmp, 1 32-bit
1760 */
1761 if (optimize_size && op2 != const0_rtx)
1762 return false;
1763
1764 if (op2 != const0_rtx)
1765 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1766
1767 op2 = gen_reg_rtx (SImode);
1768 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1769
1770 emit_insn (gen_rtx_SET (VOIDmode, dest,
1771 gen_rtx_XOR (SImode, op2, const1_rtx)));
1772 return true;
1773
1774 case LE:
1775 if (GET_CODE (op2) != CONST_INT
1776 || INTVAL (op2) == 0x7ffffff)
1777 return false;
1778 op2 = GEN_INT (INTVAL (op2) + 1);
1779 return mep_expand_setcc_1 (LT, dest, op1, op2);
1780
1781 case LEU:
1782 if (GET_CODE (op2) != CONST_INT
1783 || INTVAL (op2) == -1)
1784 return false;
1785 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1786 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1787
1788 case GE:
1789 if (GET_CODE (op2) != CONST_INT
1790 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1791 return false;
1792 op2 = GEN_INT (INTVAL (op2) - 1);
1793 return mep_expand_setcc_1 (GT, dest, op1, op2);
1794
1795 case GEU:
1796 if (GET_CODE (op2) != CONST_INT
1797 || op2 == const0_rtx)
1798 return false;
1799 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1800 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1801
1802 default:
1803 gcc_unreachable ();
1804 }
1805}
1806
1807bool
1808mep_expand_setcc (rtx *operands)
1809{
1810 rtx dest = operands[0];
1811 enum rtx_code code = GET_CODE (operands[1]);
1812 rtx op0 = operands[2];
1813 rtx op1 = operands[3];
1814
1815 return mep_expand_setcc_1 (code, dest, op0, op1);
1816}
1817
1818rtx
1819mep_expand_cbranch (rtx *operands)
1820{
1821 enum rtx_code code = GET_CODE (operands[0]);
1822 rtx op0 = operands[1];
1823 rtx op1 = operands[2];
1824 rtx tmp;
1825
1826 restart:
1827 switch (code)
1828 {
1829 case LT:
1830 if (mep_imm4_operand (op1, SImode))
1831 break;
1832
1833 tmp = gen_reg_rtx (SImode);
1834 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1835 code = NE;
1836 op0 = tmp;
1837 op1 = const0_rtx;
1838 break;
1839
1840 case GE:
1841 if (mep_imm4_operand (op1, SImode))
1842 break;
1843
1844 tmp = gen_reg_rtx (SImode);
1845 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1846
1847 code = EQ;
1848 op0 = tmp;
1849 op1 = const0_rtx;
1850 break;
1851
1852 case EQ:
1853 case NE:
1854 if (! mep_reg_or_imm4_operand (op1, SImode))
1855 op1 = force_reg (SImode, op1);
1856 break;
1857
1858 case LE:
1859 case GT:
1860 if (GET_CODE (op1) == CONST_INT
1861 && INTVAL (op1) != 0x7fffffff)
1862 {
1863 op1 = GEN_INT (INTVAL (op1) + 1);
1864 code = (code == LE ? LT : GE);
1865 goto restart;
1866 }
1867
1868 tmp = gen_reg_rtx (SImode);
1869 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1870
1871 code = (code == LE ? EQ : NE);
1872 op0 = tmp;
1873 op1 = const0_rtx;
1874 break;
1875
1876 case LTU:
1877 if (op1 == const1_rtx)
1878 {
1879 code = EQ;
1880 op1 = const0_rtx;
1881 break;
1882 }
1883
1884 tmp = gen_reg_rtx (SImode);
1885 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1886 code = NE;
1887 op0 = tmp;
1888 op1 = const0_rtx;
1889 break;
1890
1891 case LEU:
1892 tmp = gen_reg_rtx (SImode);
1893 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1894 code = NE;
1895 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1896 code = EQ;
1897 else
1898 gcc_unreachable ();
1899 op0 = tmp;
1900 op1 = const0_rtx;
1901 break;
1902
1903 case GTU:
1904 tmp = gen_reg_rtx (SImode);
1905 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1906 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1907 code = NE;
1908 op0 = tmp;
1909 op1 = const0_rtx;
1910 break;
1911
1912 case GEU:
1913 tmp = gen_reg_rtx (SImode);
1914 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1915 code = NE;
1916 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1917 code = EQ;
1918 else
1919 gcc_unreachable ();
1920 op0 = tmp;
1921 op1 = const0_rtx;
1922 break;
1923
1924 default:
1925 gcc_unreachable ();
1926 }
1927
1928 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1929}
1930
1931const char *
1932mep_emit_cbranch (rtx *operands, int ne)
1933{
1934 if (GET_CODE (operands[1]) == REG)
1935 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
d839f1eb 1936 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
7acf4da6
DD
1937 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1938 else
1939 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1940}
1941
1942void
1943mep_expand_call (rtx *operands, int returns_value)
1944{
1945 rtx addr = operands[returns_value];
1946 rtx tp = mep_tp_rtx ();
1947 rtx gp = mep_gp_rtx ();
1948
1949 gcc_assert (GET_CODE (addr) == MEM);
1950
1951 addr = XEXP (addr, 0);
1952
1953 if (! mep_call_address_operand (addr, VOIDmode))
1954 addr = force_reg (SImode, addr);
1955
1956 if (! operands[returns_value+2])
1957 operands[returns_value+2] = const0_rtx;
1958
1959 if (returns_value)
1960 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1961 operands[3], tp, gp));
1962 else
1963 emit_call_insn (gen_call_internal (addr, operands[1],
1964 operands[2], tp, gp));
1965}
1966\f
1967/* Aliasing Support. */
1968
1969/* If X is a machine specific address (i.e. a symbol or label being
1970 referenced as a displacement from the GOT implemented using an
1971 UNSPEC), then return the base term. Otherwise return X. */
1972
1973rtx
1974mep_find_base_term (rtx x)
1975{
1976 rtx base, term;
1977 int unspec;
1978
1979 if (GET_CODE (x) != PLUS)
1980 return x;
1981 base = XEXP (x, 0);
1982 term = XEXP (x, 1);
1983
1984 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1985 && base == mep_tp_rtx ())
1986 unspec = UNS_TPREL;
1987 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1988 && base == mep_gp_rtx ())
1989 unspec = UNS_GPREL;
1990 else
1991 return x;
1992
1993 if (GET_CODE (term) != CONST)
1994 return x;
1995 term = XEXP (term, 0);
1996
1997 if (GET_CODE (term) != UNSPEC
1998 || XINT (term, 1) != unspec)
1999 return x;
2000
2001 return XVECEXP (term, 0, 0);
2002}
2003\f
2004/* Reload Support. */
2005
2006/* Return true if the registers in CLASS cannot represent the change from
2007 modes FROM to TO. */
2008
2009bool
2010mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2011 enum reg_class regclass)
2012{
2013 if (from == to)
2014 return false;
2015
2016 /* 64-bit COP regs must remain 64-bit COP regs. */
2017 if (TARGET_64BIT_CR_REGS
2018 && (regclass == CR_REGS
2019 || regclass == LOADABLE_CR_REGS)
2020 && (GET_MODE_SIZE (to) < 8
2021 || GET_MODE_SIZE (from) < 8))
2022 return true;
2023
2024 return false;
2025}
2026
2027#define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2028
2029static bool
2030mep_general_reg (rtx x)
2031{
2032 while (GET_CODE (x) == SUBREG)
2033 x = XEXP (x, 0);
2034 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2035}
2036
2037static bool
2038mep_nongeneral_reg (rtx x)
2039{
2040 while (GET_CODE (x) == SUBREG)
2041 x = XEXP (x, 0);
2042 return (GET_CODE (x) == REG
2043 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2044}
2045
2046static bool
2047mep_general_copro_reg (rtx x)
2048{
2049 while (GET_CODE (x) == SUBREG)
2050 x = XEXP (x, 0);
2051 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2052}
2053
2054static bool
2055mep_nonregister (rtx x)
2056{
2057 while (GET_CODE (x) == SUBREG)
2058 x = XEXP (x, 0);
2059 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2060}
2061
2062#define DEBUG_RELOAD 0
2063
2064/* Return the secondary reload class needed for moving value X to or
2065 from a register in coprocessor register class CLASS. */
2066
2067static enum reg_class
2068mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2069{
2070 if (mep_general_reg (x))
2071 /* We can do the move directly if mep_have_core_copro_moves_p,
2072 otherwise we need to go through memory. Either way, no secondary
2073 register is needed. */
2074 return NO_REGS;
2075
2076 if (mep_general_copro_reg (x))
2077 {
2078 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2079 if (mep_have_copro_copro_moves_p)
2080 return NO_REGS;
2081
2082 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2083 if (mep_have_core_copro_moves_p)
2084 return GENERAL_REGS;
2085
2086 /* Otherwise we need to do it through memory. No secondary
2087 register is needed. */
2088 return NO_REGS;
2089 }
2090
2091 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2092 && constraint_satisfied_p (x, CONSTRAINT_U))
2093 /* X is a memory value that we can access directly. */
2094 return NO_REGS;
2095
2096 /* We have to move X into a GPR first and then copy it to
2097 the coprocessor register. The move from the GPR to the
2098 coprocessor might be done directly or through memory,
2099 depending on mep_have_core_copro_moves_p. */
2100 return GENERAL_REGS;
2101}
2102
2103/* Copying X to register in RCLASS. */
2104
77b0efff 2105enum reg_class
7acf4da6
DD
2106mep_secondary_input_reload_class (enum reg_class rclass,
2107 enum machine_mode mode ATTRIBUTE_UNUSED,
2108 rtx x)
2109{
2110 int rv = NO_REGS;
2111
2112#if DEBUG_RELOAD
2113 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2114 debug_rtx (x);
2115#endif
2116
2117 if (reg_class_subset_p (rclass, CR_REGS))
2118 rv = mep_secondary_copro_reload_class (rclass, x);
2119 else if (MEP_NONGENERAL_CLASS (rclass)
2120 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2121 rv = GENERAL_REGS;
2122
2123#if DEBUG_RELOAD
2124 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2125#endif
77b0efff 2126 return (enum reg_class) rv;
7acf4da6
DD
2127}
2128
2129/* Copying register in RCLASS to X. */
2130
77b0efff 2131enum reg_class
7acf4da6
DD
2132mep_secondary_output_reload_class (enum reg_class rclass,
2133 enum machine_mode mode ATTRIBUTE_UNUSED,
2134 rtx x)
2135{
2136 int rv = NO_REGS;
2137
2138#if DEBUG_RELOAD
2139 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2140 debug_rtx (x);
2141#endif
2142
2143 if (reg_class_subset_p (rclass, CR_REGS))
2144 rv = mep_secondary_copro_reload_class (rclass, x);
2145 else if (MEP_NONGENERAL_CLASS (rclass)
2146 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2147 rv = GENERAL_REGS;
2148
2149#if DEBUG_RELOAD
2150 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2151#endif
2152
77b0efff 2153 return (enum reg_class) rv;
7acf4da6
DD
2154}
2155
2156/* Implement SECONDARY_MEMORY_NEEDED. */
2157
2158bool
2159mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2160 enum machine_mode mode ATTRIBUTE_UNUSED)
2161{
2162 if (!mep_have_core_copro_moves_p)
2163 {
2164 if (reg_classes_intersect_p (rclass1, CR_REGS)
2165 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2166 return true;
2167 if (reg_classes_intersect_p (rclass2, CR_REGS)
2168 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2169 return true;
2170 if (!mep_have_copro_copro_moves_p
2171 && reg_classes_intersect_p (rclass1, CR_REGS)
2172 && reg_classes_intersect_p (rclass2, CR_REGS))
2173 return true;
2174 }
2175 return false;
2176}
2177
2178void
2179mep_expand_reload (rtx *operands, enum machine_mode mode)
2180{
2181 /* There are three cases for each direction:
2182 register, farsym
2183 control, farsym
2184 control, nearsym */
2185
2186 int s0 = mep_section_tag (operands[0]) == 'f';
2187 int s1 = mep_section_tag (operands[1]) == 'f';
2188 int c0 = mep_nongeneral_reg (operands[0]);
2189 int c1 = mep_nongeneral_reg (operands[1]);
2190 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2191
2192#if DEBUG_RELOAD
2193 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2194 debug_rtx (operands[0]);
2195 debug_rtx (operands[1]);
2196#endif
2197
2198 switch (which)
2199 {
2200 case 00: /* Don't know why this gets here. */
2201 case 02: /* general = far */
2202 emit_move_insn (operands[0], operands[1]);
2203 return;
2204
2205 case 10: /* cr = mem */
2206 case 11: /* cr = cr */
2207 case 01: /* mem = cr */
2208 case 12: /* cr = far */
2209 emit_move_insn (operands[2], operands[1]);
2210 emit_move_insn (operands[0], operands[2]);
2211 return;
2212
2213 case 20: /* far = general */
2214 emit_move_insn (operands[2], XEXP (operands[1], 0));
2215 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2216 return;
2217
2218 case 21: /* far = cr */
2219 case 22: /* far = far */
2220 default:
2221 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2222 which, mode_name[mode]);
2223 debug_rtx (operands[0]);
2224 debug_rtx (operands[1]);
2225 gcc_unreachable ();
2226 }
2227}
2228
2229/* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2230 can be moved directly into registers 0 to 7, but not into the rest.
2231 If so, and if the required class includes registers 0 to 7, restrict
2232 it to those registers. */
2233
2234enum reg_class
2235mep_preferred_reload_class (rtx x, enum reg_class rclass)
2236{
2237 switch (GET_CODE (x))
2238 {
2239 case CONST_INT:
2240 if (INTVAL (x) >= 0x10000
2241 && INTVAL (x) < 0x01000000
2242 && (INTVAL (x) & 0xffff) != 0
2243 && reg_class_subset_p (TPREL_REGS, rclass))
2244 rclass = TPREL_REGS;
2245 break;
2246
2247 case CONST:
2248 case SYMBOL_REF:
2249 case LABEL_REF:
2250 if (mep_section_tag (x) != 'f'
2251 && reg_class_subset_p (TPREL_REGS, rclass))
2252 rclass = TPREL_REGS;
2253 break;
2254
2255 default:
2256 break;
2257 }
2258 return rclass;
2259}
2260\f
2261/* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2262 moves, 4 for direct double-register moves, and 1000 for anything
2263 that requires a temporary register or temporary stack slot. */
2264
2265int
2266mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2267{
2268 if (mep_have_copro_copro_moves_p
2269 && reg_class_subset_p (from, CR_REGS)
2270 && reg_class_subset_p (to, CR_REGS))
2271 {
2272 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2273 return 4;
2274 return 2;
2275 }
2276 if (reg_class_subset_p (from, CR_REGS)
2277 && reg_class_subset_p (to, CR_REGS))
2278 {
2279 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2280 return 8;
2281 return 4;
2282 }
2283 if (reg_class_subset_p (from, CR_REGS)
2284 || reg_class_subset_p (to, CR_REGS))
2285 {
2286 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2287 return 4;
2288 return 2;
2289 }
2290 if (mep_secondary_memory_needed (from, to, mode))
2291 return 1000;
2292 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2293 return 1000;
2294
2295 if (GET_MODE_SIZE (mode) > 4)
2296 return 4;
2297
2298 return 2;
2299}
2300
2301\f
2302/* Functions to save and restore machine-specific function data. */
2303
2304static struct machine_function *
2305mep_init_machine_status (void)
2306{
a9429e29 2307 return ggc_alloc_cleared_machine_function ();
7acf4da6
DD
2308}
2309
2310static rtx
2311mep_allocate_initial_value (rtx reg)
2312{
2313 int rss;
2314
2315 if (GET_CODE (reg) != REG)
2316 return NULL_RTX;
2317
2318 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2319 return NULL_RTX;
2320
2321 /* In interrupt functions, the "initial" values of $gp and $tp are
2322 provided by the prologue. They are not necessarily the same as
2323 the values that the caller was using. */
2324 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2325 if (mep_interrupt_p ())
2326 return NULL_RTX;
2327
2328 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2329 {
2330 cfun->machine->reg_save_size += 4;
2331 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2332 }
2333
2334 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2335 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2336}
2337
2338rtx
2339mep_return_addr_rtx (int count)
2340{
2341 if (count != 0)
2342 return const0_rtx;
2343
2344 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2345}
2346
2347static rtx
2348mep_tp_rtx (void)
2349{
2350 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2351}
2352
2353static rtx
2354mep_gp_rtx (void)
2355{
2356 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2357}
2358
2359static bool
2360mep_interrupt_p (void)
2361{
2362 if (cfun->machine->interrupt_handler == 0)
2363 {
2364 int interrupt_handler
2365 = (lookup_attribute ("interrupt",
2366 DECL_ATTRIBUTES (current_function_decl))
2367 != NULL_TREE);
2368 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2369 }
2370 return cfun->machine->interrupt_handler == 2;
2371}
2372
2373static bool
2374mep_disinterrupt_p (void)
2375{
2376 if (cfun->machine->disable_interrupts == 0)
2377 {
2378 int disable_interrupts
2379 = (lookup_attribute ("disinterrupt",
2380 DECL_ATTRIBUTES (current_function_decl))
2381 != NULL_TREE);
2382 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2383 }
2384 return cfun->machine->disable_interrupts == 2;
2385}
2386
2387\f
2388/* Frame/Epilog/Prolog Related. */
2389
2390static bool
2391mep_reg_set_p (rtx reg, rtx insn)
2392{
2393 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2394 if (INSN_P (insn))
2395 {
2396 if (FIND_REG_INC_NOTE (insn, reg))
2397 return true;
2398 insn = PATTERN (insn);
2399 }
2400
2401 if (GET_CODE (insn) == SET
2402 && GET_CODE (XEXP (insn, 0)) == REG
2403 && GET_CODE (XEXP (insn, 1)) == REG
2404 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2405 return false;
2406
2407 return set_of (reg, insn) != NULL_RTX;
2408}
2409
2410
2411#define MEP_SAVES_UNKNOWN 0
2412#define MEP_SAVES_YES 1
2413#define MEP_SAVES_MAYBE 2
2414#define MEP_SAVES_NO 3
2415
2416static bool
2417mep_reg_set_in_function (int regno)
2418{
2419 rtx reg, insn;
2420
2421 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2422 return true;
2423
2424 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2425 return true;
2426
2427 push_topmost_sequence ();
2428 insn = get_insns ();
2429 pop_topmost_sequence ();
2430
2431 if (!insn)
2432 return false;
2433
2434 reg = gen_rtx_REG (SImode, regno);
2435
2436 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2437 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2438 return true;
2439 return false;
2440}
2441
2442static bool
2443mep_asm_without_operands_p (void)
2444{
2445 if (cfun->machine->asms_without_operands == 0)
2446 {
2447 rtx insn;
2448
2449 push_topmost_sequence ();
2450 insn = get_insns ();
2451 pop_topmost_sequence ();
2452
2453 cfun->machine->asms_without_operands = 1;
2454 while (insn)
2455 {
2456 if (INSN_P (insn)
2457 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2458 {
2459 cfun->machine->asms_without_operands = 2;
2460 break;
2461 }
2462 insn = NEXT_INSN (insn);
2463 }
2464
2465 }
2466 return cfun->machine->asms_without_operands == 2;
2467}
2468
2469/* Interrupt functions save/restore every call-preserved register, and
2470 any call-used register it uses (or all if it calls any function,
2471 since they may get clobbered there too). Here we check to see
2472 which call-used registers need saving. */
2473
d1b5afd5
DD
2474#define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2475 && (r == FIRST_CCR_REGNO + 1 \
2476 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2477 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2478
7acf4da6
DD
2479static bool
2480mep_interrupt_saved_reg (int r)
2481{
2482 if (!mep_interrupt_p ())
2483 return false;
2484 if (r == REGSAVE_CONTROL_TEMP
2485 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2486 return true;
2487 if (mep_asm_without_operands_p ()
2488 && (!fixed_regs[r]
d1b5afd5
DD
2489 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2490 || IVC2_ISAVED_REG (r)))
7acf4da6
DD
2491 return true;
2492 if (!current_function_is_leaf)
2493 /* Function calls mean we need to save $lp. */
d1b5afd5 2494 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
7acf4da6
DD
2495 return true;
2496 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2497 /* The interrupt handler might use these registers for repeat blocks,
2498 or it might call a function that does so. */
2499 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2500 return true;
2501 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2502 return false;
2503 /* Functions we call might clobber these. */
2504 if (call_used_regs[r] && !fixed_regs[r])
2505 return true;
f2082f90 2506 /* Additional registers that need to be saved for IVC2. */
d1b5afd5 2507 if (IVC2_ISAVED_REG (r))
f2082f90
DD
2508 return true;
2509
7acf4da6
DD
2510 return false;
2511}
2512
2513static bool
2514mep_call_saves_register (int r)
2515{
e756464b 2516 if (! cfun->machine->frame_locked)
7acf4da6
DD
2517 {
2518 int rv = MEP_SAVES_NO;
2519
2520 if (cfun->machine->reg_save_slot[r])
2521 rv = MEP_SAVES_YES;
2522 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2523 rv = MEP_SAVES_YES;
2524 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2525 rv = MEP_SAVES_YES;
2526 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2527 rv = MEP_SAVES_YES;
2528 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2529 /* We need these to have stack slots so that they can be set during
2530 unwinding. */
2531 rv = MEP_SAVES_YES;
2532 else if (mep_interrupt_saved_reg (r))
2533 rv = MEP_SAVES_YES;
2534 cfun->machine->reg_saved[r] = rv;
2535 }
2536 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2537}
2538
2539/* Return true if epilogue uses register REGNO. */
2540
2541bool
2542mep_epilogue_uses (int regno)
2543{
2544 /* Since $lp is a call-saved register, the generic code will normally
2545 mark it used in the epilogue if it needs to be saved and restored.
2546 However, when profiling is enabled, the profiling code will implicitly
2547 clobber $11. This case has to be handled specially both here and in
2548 mep_call_saves_register. */
2549 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2550 return true;
2551 /* Interrupt functions save/restore pretty much everything. */
2552 return (reload_completed && mep_interrupt_saved_reg (regno));
2553}
2554
2555static int
2556mep_reg_size (int regno)
2557{
2558 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2559 return 8;
2560 return 4;
2561}
2562
7b5cbb57
AS
2563/* Worker function for TARGET_CAN_ELIMINATE. */
2564
2565bool
2566mep_can_eliminate (const int from, const int to)
2567{
2568 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2569 ? ! frame_pointer_needed
2570 : true);
2571}
2572
7acf4da6
DD
2573int
2574mep_elimination_offset (int from, int to)
2575{
2576 int reg_save_size;
2577 int i;
2578 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2579 int total_size;
2580
e756464b
DD
2581 if (!cfun->machine->frame_locked)
2582 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
7acf4da6
DD
2583
2584 /* We don't count arg_regs_to_save in the arg pointer offset, because
2585 gcc thinks the arg pointer has moved along with the saved regs.
2586 However, we do count it when we adjust $sp in the prologue. */
2587 reg_save_size = 0;
2588 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2589 if (mep_call_saves_register (i))
2590 reg_save_size += mep_reg_size (i);
2591
2592 if (reg_save_size % 8)
2593 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2594 else
2595 cfun->machine->regsave_filler = 0;
2596
2597 /* This is what our total stack adjustment looks like. */
2598 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2599
2600 if (total_size % 8)
2601 cfun->machine->frame_filler = 8 - (total_size % 8);
2602 else
2603 cfun->machine->frame_filler = 0;
2604
2605
2606 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2607 return reg_save_size + cfun->machine->regsave_filler;
2608
2609 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2610 return cfun->machine->frame_filler + frame_size;
2611
2612 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2613 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2614
2615 gcc_unreachable ();
2616}
2617
2618static rtx
2619F (rtx x)
2620{
2621 RTX_FRAME_RELATED_P (x) = 1;
2622 return x;
2623}
2624
2625/* Since the prologue/epilogue code is generated after optimization,
2626 we can't rely on gcc to split constants for us. So, this code
2627 captures all the ways to add a constant to a register in one logic
2628 chunk, including optimizing away insns we just don't need. This
2629 makes the prolog/epilog code easier to follow. */
2630static void
2631add_constant (int dest, int src, int value, int mark_frame)
2632{
2633 rtx insn;
2634 int hi, lo;
2635
2636 if (src == dest && value == 0)
2637 return;
2638
2639 if (value == 0)
2640 {
2641 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2642 gen_rtx_REG (SImode, src));
2643 if (mark_frame)
2644 RTX_FRAME_RELATED_P(insn) = 1;
2645 return;
2646 }
2647
2648 if (value >= -32768 && value <= 32767)
2649 {
2650 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2651 gen_rtx_REG (SImode, src),
2652 GEN_INT (value)));
2653 if (mark_frame)
2654 RTX_FRAME_RELATED_P(insn) = 1;
2655 return;
2656 }
2657
2658 /* Big constant, need to use a temp register. We use
2659 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2660 area is always small enough to directly add to). */
2661
2662 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2663 lo = value & 0xffff;
2664
2665 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2666 GEN_INT (hi));
2667
2668 if (lo)
2669 {
2670 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2671 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2672 GEN_INT (lo)));
2673 }
2674
2675 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2676 gen_rtx_REG (SImode, src),
2677 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2678 if (mark_frame)
2679 {
2680 RTX_FRAME_RELATED_P(insn) = 1;
2681 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2682 gen_rtx_SET (SImode,
2683 gen_rtx_REG (SImode, dest),
2684 gen_rtx_PLUS (SImode,
2685 gen_rtx_REG (SImode, dest),
2686 GEN_INT (value))));
2687 }
2688}
2689
7acf4da6
DD
2690/* Move SRC to DEST. Mark the move as being potentially dead if
2691 MAYBE_DEAD_P. */
2692
2693static rtx
2694maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2695{
2696 rtx insn = emit_move_insn (dest, src);
2697#if 0
2698 if (maybe_dead_p)
2699 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2700#endif
2701 return insn;
2702}
2703
2704/* Used for interrupt functions, which can't assume that $tp and $gp
2705 contain the correct pointers. */
2706
2707static void
2708mep_reload_pointer (int regno, const char *symbol)
2709{
2710 rtx reg, sym;
2711
2712 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2713 return;
2714
2715 reg = gen_rtx_REG (SImode, regno);
2716 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2717 emit_insn (gen_movsi_topsym_s (reg, sym));
2718 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2719}
2720
e756464b
DD
2721/* Assign save slots for any register not already saved. DImode
2722 registers go at the end of the reg save area; the rest go at the
2723 beginning. This is for alignment purposes. Returns true if a frame
2724 is really needed. */
2725static bool
2726mep_assign_save_slots (int reg_save_size)
7acf4da6 2727{
e756464b 2728 bool really_need_stack_frame = false;
7acf4da6 2729 int di_ofs = 0;
e756464b 2730 int i;
7acf4da6 2731
7acf4da6
DD
2732 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2733 if (mep_call_saves_register(i))
2734 {
2735 int regsize = mep_reg_size (i);
2736
2737 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2738 || mep_reg_set_in_function (i))
e756464b 2739 really_need_stack_frame = true;
7acf4da6
DD
2740
2741 if (cfun->machine->reg_save_slot[i])
2742 continue;
2743
2744 if (regsize < 8)
2745 {
2746 cfun->machine->reg_save_size += regsize;
2747 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2748 }
2749 else
2750 {
2751 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2752 di_ofs += 8;
2753 }
2754 }
e756464b
DD
2755 cfun->machine->frame_locked = 1;
2756 return really_need_stack_frame;
2757}
2758
2759void
2760mep_expand_prologue (void)
2761{
2762 int i, rss, sp_offset = 0;
2763 int reg_save_size;
2764 int frame_size;
d2e1a4c2 2765 int really_need_stack_frame;
e756464b
DD
2766
2767 /* We must not allow register renaming in interrupt functions,
2768 because that invalidates the correctness of the set of call-used
2769 registers we're going to save/restore. */
2770 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2771
2772 if (mep_disinterrupt_p ())
2773 emit_insn (gen_mep_disable_int ());
2774
2775 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2776
2777 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2778 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
d2e1a4c2 2779 really_need_stack_frame = frame_size;
e756464b
DD
2780
2781 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
7acf4da6
DD
2782
2783 sp_offset = reg_save_size;
2784 if (sp_offset + frame_size < 128)
2785 sp_offset += frame_size ;
2786
2787 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2788
2789 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2790 if (mep_call_saves_register(i))
2791 {
2792 rtx mem;
2793 bool maybe_dead_p;
2794 enum machine_mode rmode;
2795
2796 rss = cfun->machine->reg_save_slot[i];
2797
2798 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2799 && (!mep_reg_set_in_function (i)
2800 && !mep_interrupt_p ()))
2801 continue;
2802
2803 if (mep_reg_size (i) == 8)
2804 rmode = DImode;
2805 else
2806 rmode = SImode;
2807
2808 /* If there is a pseudo associated with this register's initial value,
2809 reload might have already spilt it to the stack slot suggested by
2810 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2811 deleted as dead. */
2812 mem = gen_rtx_MEM (rmode,
2813 plus_constant (stack_pointer_rtx, sp_offset - rss));
2814 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2815
2816 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2817 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2818 else if (rmode == DImode)
2819 {
2820 rtx insn;
2821 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2822
2823 mem = gen_rtx_MEM (SImode,
2824 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2825
2826 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2827 gen_rtx_REG (SImode, i),
2828 maybe_dead_p);
2829 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2830 gen_rtx_ZERO_EXTRACT (SImode,
2831 gen_rtx_REG (DImode, i),
2832 GEN_INT (32),
2833 GEN_INT (32)),
2834 maybe_dead_p);
2835 insn = maybe_dead_move (mem,
2836 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2837 maybe_dead_p);
2838 RTX_FRAME_RELATED_P (insn) = 1;
2839
2840 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2841 gen_rtx_SET (VOIDmode,
2842 copy_rtx (mem),
2843 gen_rtx_REG (rmode, i)));
2844 mem = gen_rtx_MEM (SImode,
2845 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2846 insn = maybe_dead_move (mem,
2847 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2848 maybe_dead_p);
2849 }
2850 else
2851 {
2852 rtx insn;
2853 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2854 gen_rtx_REG (rmode, i),
2855 maybe_dead_p);
2856 insn = maybe_dead_move (mem,
2857 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2858 maybe_dead_p);
2859 RTX_FRAME_RELATED_P (insn) = 1;
2860
2861 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2862 gen_rtx_SET (VOIDmode,
2863 copy_rtx (mem),
2864 gen_rtx_REG (rmode, i)));
2865 }
2866 }
2867
2868 if (frame_pointer_needed)
a46f0964
DD
2869 {
2870 /* We've already adjusted down by sp_offset. Total $sp change
2871 is reg_save_size + frame_size. We want a net change here of
2872 just reg_save_size. */
2873 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2874 }
7acf4da6
DD
2875
2876 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2877
2878 if (mep_interrupt_p ())
2879 {
2880 mep_reload_pointer(GP_REGNO, "__sdabase");
2881 mep_reload_pointer(TP_REGNO, "__tpbase");
2882 }
2883}
2884
2885static void
2886mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2887{
2888 int local = hwi_local;
2889 int frame_size = local + crtl->outgoing_args_size;
2890 int reg_save_size;
2891 int ffill;
2892 int i, sp, skip;
2893 int sp_offset;
2894 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2895
2896 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2897 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2898 sp_offset = reg_save_size + frame_size;
2899
2900 ffill = cfun->machine->frame_filler;
2901
2902 if (cfun->machine->mep_frame_pointer_needed)
2903 reg_names[FP_REGNO] = "$fp";
2904 else
2905 reg_names[FP_REGNO] = "$8";
2906
2907 if (sp_offset == 0)
2908 return;
2909
2910 if (debug_info_level == DINFO_LEVEL_NONE)
2911 {
2912 fprintf (file, "\t# frame: %d", sp_offset);
2913 if (reg_save_size)
2914 fprintf (file, " %d regs", reg_save_size);
2915 if (local)
2916 fprintf (file, " %d locals", local);
2917 if (crtl->outgoing_args_size)
2918 fprintf (file, " %d args", crtl->outgoing_args_size);
2919 fprintf (file, "\n");
2920 return;
2921 }
2922
2923 fprintf (file, "\t#\n");
2924 fprintf (file, "\t# Initial Frame Information:\n");
2925 if (sp_offset || !frame_pointer_needed)
2926 fprintf (file, "\t# Entry ---------- 0\n");
2927
2928 /* Sort registers by save slots, so they're printed in the order
2929 they appear in memory, not the order they're saved in. */
2930 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2931 slot_map[si] = si;
2932 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2933 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2934 if (cfun->machine->reg_save_slot[slot_map[si]]
2935 > cfun->machine->reg_save_slot[slot_map[sj]])
2936 {
2937 int t = slot_map[si];
2938 slot_map[si] = slot_map[sj];
2939 slot_map[sj] = t;
2940 }
2941
2942 sp = 0;
2943 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2944 {
2945 int rsize;
2946 int r = slot_map[i];
2947 int rss = cfun->machine->reg_save_slot[r];
2948
e756464b
DD
2949 if (!mep_call_saves_register (r))
2950 continue;
2951
2952 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2953 && (!mep_reg_set_in_function (r)
2954 && !mep_interrupt_p ()))
7acf4da6
DD
2955 continue;
2956
2957 rsize = mep_reg_size(r);
2958 skip = rss - (sp+rsize);
2959 if (skip)
2960 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2961 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2962 rsize, reg_names[r], sp_offset - rss);
2963 sp = rss;
2964 }
2965
2966 skip = reg_save_size - sp;
2967 if (skip)
2968 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2969
2970 if (frame_pointer_needed)
2971 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2972 if (local)
2973 fprintf (file, "\t# %3d bytes for local vars\n", local);
2974 if (ffill)
2975 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2976 if (crtl->outgoing_args_size)
2977 fprintf (file, "\t# %3d bytes for outgoing args\n",
2978 crtl->outgoing_args_size);
2979 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2980 fprintf (file, "\t#\n");
2981}
2982
2983
2984static int mep_prevent_lp_restore = 0;
2985static int mep_sibcall_epilogue = 0;
2986
2987void
2988mep_expand_epilogue (void)
2989{
2990 int i, sp_offset = 0;
2991 int reg_save_size = 0;
2992 int frame_size;
2993 int lp_temp = LP_REGNO, lp_slot = -1;
2994 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2995 int interrupt_handler = mep_interrupt_p ();
2996
2997 if (profile_arc_flag == 2)
2998 emit_insn (gen_mep_bb_trace_ret ());
2999
3000 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3001 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3002
e756464b 3003 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
7acf4da6
DD
3004
3005 if (frame_pointer_needed)
3006 {
3007 /* If we have a frame pointer, we won't have a reliable stack
3008 pointer (alloca, you know), so rebase SP from FP */
3009 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3010 gen_rtx_REG (SImode, FP_REGNO));
3011 sp_offset = reg_save_size;
3012 }
3013 else
3014 {
3015 /* SP is right under our local variable space. Adjust it if
3016 needed. */
3017 sp_offset = reg_save_size + frame_size;
3018 if (sp_offset >= 128)
3019 {
3020 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3021 sp_offset -= frame_size;
3022 }
3023 }
3024
3025 /* This is backwards so that we restore the control and coprocessor
3026 registers before the temporary registers we use to restore
3027 them. */
3028 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3029 if (mep_call_saves_register (i))
3030 {
3031 enum machine_mode rmode;
3032 int rss = cfun->machine->reg_save_slot[i];
3033
3034 if (mep_reg_size (i) == 8)
3035 rmode = DImode;
3036 else
3037 rmode = SImode;
3038
3039 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3040 && !(mep_reg_set_in_function (i) || interrupt_handler))
3041 continue;
3042 if (mep_prevent_lp_restore && i == LP_REGNO)
3043 continue;
3044 if (!mep_prevent_lp_restore
3045 && !interrupt_handler
3046 && (i == 10 || i == 11))
3047 continue;
3048
3049 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3050 emit_move_insn (gen_rtx_REG (rmode, i),
3051 gen_rtx_MEM (rmode,
3052 plus_constant (stack_pointer_rtx,
3053 sp_offset-rss)));
3054 else
3055 {
3056 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3057 /* Defer this one so we can jump indirect rather than
3058 copying the RA to $lp and "ret". EH epilogues
3059 automatically skip this anyway. */
3060 lp_slot = sp_offset-rss;
3061 else
3062 {
3063 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3064 gen_rtx_MEM (rmode,
3065 plus_constant (stack_pointer_rtx,
3066 sp_offset-rss)));
3067 emit_move_insn (gen_rtx_REG (rmode, i),
3068 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3069 }
3070 }
3071 }
3072 if (lp_slot != -1)
3073 {
3074 /* Restore this one last so we know it will be in the temp
3075 register when we return by jumping indirectly via the temp. */
3076 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3077 gen_rtx_MEM (SImode,
3078 plus_constant (stack_pointer_rtx,
3079 lp_slot)));
3080 lp_temp = REGSAVE_CONTROL_TEMP;
3081 }
3082
3083
3084 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3085
3086 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3087 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3088 gen_rtx_REG (SImode, SP_REGNO),
3089 cfun->machine->eh_stack_adjust));
3090
3091 if (mep_sibcall_epilogue)
3092 return;
3093
3094 if (mep_disinterrupt_p ())
3095 emit_insn (gen_mep_enable_int ());
3096
3097 if (mep_prevent_lp_restore)
3098 {
3099 emit_jump_insn (gen_eh_return_internal ());
3100 emit_barrier ();
3101 }
3102 else if (interrupt_handler)
3103 emit_jump_insn (gen_mep_reti ());
3104 else
3105 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3106}
3107
3108void
3109mep_expand_eh_return (rtx *operands)
3110{
3111 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3112 {
3113 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3114 emit_move_insn (ra, operands[0]);
3115 operands[0] = ra;
3116 }
3117
3118 emit_insn (gen_eh_epilogue (operands[0]));
3119}
3120
3121void
3122mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3123{
3124 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3125 mep_prevent_lp_restore = 1;
3126 mep_expand_epilogue ();
3127 mep_prevent_lp_restore = 0;
3128}
3129
3130void
3131mep_expand_sibcall_epilogue (void)
3132{
3133 mep_sibcall_epilogue = 1;
3134 mep_expand_epilogue ();
3135 mep_sibcall_epilogue = 0;
3136}
3137
3138static bool
3139mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3140{
3141 if (decl == NULL)
3142 return false;
3143
3144 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3145 return false;
3146
3147 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3148 if (mep_interrupt_p () || mep_disinterrupt_p ())
3149 return false;
3150
3151 return true;
3152}
3153
3154rtx
3155mep_return_stackadj_rtx (void)
3156{
3157 return gen_rtx_REG (SImode, 10);
3158}
3159
3160rtx
3161mep_return_handler_rtx (void)
3162{
3163 return gen_rtx_REG (SImode, LP_REGNO);
3164}
3165
3166void
3167mep_function_profiler (FILE *file)
3168{
3169 /* Always right at the beginning of the function. */
3170 fprintf (file, "\t# mep function profiler\n");
3171 fprintf (file, "\tadd\t$sp, -8\n");
3172 fprintf (file, "\tsw\t$0, ($sp)\n");
3173 fprintf (file, "\tldc\t$0, $lp\n");
3174 fprintf (file, "\tsw\t$0, 4($sp)\n");
3175 fprintf (file, "\tbsr\t__mep_mcount\n");
3176 fprintf (file, "\tlw\t$0, 4($sp)\n");
3177 fprintf (file, "\tstc\t$0, $lp\n");
3178 fprintf (file, "\tlw\t$0, ($sp)\n");
3179 fprintf (file, "\tadd\t$sp, 8\n\n");
3180}
3181
3182const char *
3183mep_emit_bb_trace_ret (void)
3184{
3185 fprintf (asm_out_file, "\t# end of block profiling\n");
3186 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3187 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3188 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3189 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3190 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3191 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3192 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3193 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3194 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3195 return "";
3196}
3197
3198#undef SAVE
3199#undef RESTORE
3200\f
3201/* Operand Printing. */
3202
3203void
3204mep_print_operand_address (FILE *stream, rtx address)
3205{
3206 if (GET_CODE (address) == MEM)
3207 address = XEXP (address, 0);
3208 else
3209 /* cf: gcc.dg/asm-4.c. */
3210 gcc_assert (GET_CODE (address) == REG);
3211
3212 mep_print_operand (stream, address, 0);
3213}
3214
3215static struct
3216{
3217 char code;
3218 const char *pattern;
3219 const char *format;
3220}
3221const conversions[] =
3222{
3223 { 0, "r", "0" },
3224 { 0, "m+ri", "3(2)" },
3225 { 0, "mr", "(1)" },
3226 { 0, "ms", "(1)" },
5fb455bc 3227 { 0, "ml", "(1)" },
7acf4da6
DD
3228 { 0, "mLrs", "%lo(3)(2)" },
3229 { 0, "mLr+si", "%lo(4+5)(2)" },
3230 { 0, "m+ru2s", "%tpoff(5)(2)" },
3231 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3232 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3233 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3234 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3235 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3236 { 0, "mi", "(1)" },
3237 { 0, "m+si", "(2+3)" },
3238 { 0, "m+li", "(2+3)" },
3239 { 0, "i", "0" },
3240 { 0, "s", "0" },
3241 { 0, "+si", "1+2" },
3242 { 0, "+u2si", "%tpoff(3+4)" },
3243 { 0, "+u3si", "%sdaoff(3+4)" },
3244 { 0, "l", "0" },
3245 { 'b', "i", "0" },
3246 { 'B', "i", "0" },
3247 { 'U', "i", "0" },
3248 { 'h', "i", "0" },
3249 { 'h', "Hs", "%hi(1)" },
3250 { 'I', "i", "0" },
3251 { 'I', "u2s", "%tpoff(2)" },
3252 { 'I', "u3s", "%sdaoff(2)" },
3253 { 'I', "+u2si", "%tpoff(3+4)" },
3254 { 'I', "+u3si", "%sdaoff(3+4)" },
3255 { 'J', "i", "0" },
3256 { 'P', "mr", "(1\\+),\\0" },
3257 { 'x', "i", "0" },
3258 { 0, 0, 0 }
3259};
3260
3261static int
3262unique_bit_in (HOST_WIDE_INT i)
3263{
3264 switch (i & 0xff)
3265 {
3266 case 0x01: case 0xfe: return 0;
3267 case 0x02: case 0xfd: return 1;
3268 case 0x04: case 0xfb: return 2;
3269 case 0x08: case 0xf7: return 3;
3270 case 0x10: case 0x7f: return 4;
3271 case 0x20: case 0xbf: return 5;
3272 case 0x40: case 0xdf: return 6;
3273 case 0x80: case 0xef: return 7;
3274 default:
3275 gcc_unreachable ();
3276 }
3277}
3278
3279static int
3280bit_size_for_clip (HOST_WIDE_INT i)
3281{
3282 int rv;
3283
3284 for (rv = 0; rv < 31; rv ++)
3285 if (((HOST_WIDE_INT) 1 << rv) > i)
3286 return rv + 1;
3287 gcc_unreachable ();
3288}
3289
3290/* Print an operand to a assembler instruction. */
3291
3292void
3293mep_print_operand (FILE *file, rtx x, int code)
3294{
3295 int i, j;
3296 const char *real_name;
3297
3298 if (code == '<')
3299 {
3300 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3301 we're using, then skip over the "mep_" part of its name. */
3302 const struct cgen_insn *insn;
3303
3304 if (mep_get_move_insn (mep_cmov, &insn))
3305 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3306 else
3307 mep_intrinsic_unavailable (mep_cmov);
3308 return;
3309 }
3310 if (code == 'L')
3311 {
3312 switch (GET_CODE (x))
3313 {
3314 case AND:
3315 fputs ("clr", file);
3316 return;
3317 case IOR:
3318 fputs ("set", file);
3319 return;
3320 case XOR:
3321 fputs ("not", file);
3322 return;
3323 default:
3324 output_operand_lossage ("invalid %%L code");
3325 }
3326 }
3327 if (code == 'M')
3328 {
3329 /* Print the second operand of a CR <- CR move. If we're using
3330 a two-operand instruction (i.e., a real cmov), then just print
3331 the operand normally. If we're using a "reg, reg, immediate"
3332 instruction such as caddi3, print the operand followed by a
3333 zero field. If we're using a three-register instruction,
3334 print the operand twice. */
3335 const struct cgen_insn *insn;
3336
3337 mep_print_operand (file, x, 0);
3338 if (mep_get_move_insn (mep_cmov, &insn)
3339 && insn_data[insn->icode].n_operands == 3)
3340 {
3341 fputs (", ", file);
3342 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3343 mep_print_operand (file, x, 0);
3344 else
3345 mep_print_operand (file, const0_rtx, 0);
3346 }
3347 return;
3348 }
3349
3350 encode_pattern (x);
3351 for (i = 0; conversions[i].pattern; i++)
3352 if (conversions[i].code == code
3353 && strcmp(conversions[i].pattern, pattern) == 0)
3354 {
3355 for (j = 0; conversions[i].format[j]; j++)
3356 if (conversions[i].format[j] == '\\')
3357 {
3358 fputc (conversions[i].format[j+1], file);
3359 j++;
3360 }
3361 else if (ISDIGIT(conversions[i].format[j]))
3362 {
3363 rtx r = patternr[conversions[i].format[j] - '0'];
3364 switch (GET_CODE (r))
3365 {
3366 case REG:
3367 fprintf (file, "%s", reg_names [REGNO (r)]);
3368 break;
3369 case CONST_INT:
3370 switch (code)
3371 {
3372 case 'b':
3373 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3374 break;
3375 case 'B':
3376 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3377 break;
3378 case 'h':
3379 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3380 break;
3381 case 'U':
3382 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3383 break;
3384 case 'J':
3385 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3386 break;
3387 case 'x':
3388 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3389 && !(INTVAL (r) & 0xff))
3390 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3391 else
3392 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3393 break;
3394 case 'I':
3395 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3396 && conversions[i].format[j+1] == 0)
3397 {
3398 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3399 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3400 }
3401 else
3402 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3403 break;
3404 default:
3405 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3406 break;
3407 }
3408 break;
3409 case CONST_DOUBLE:
3410 fprintf(file, "[const_double 0x%lx]",
3411 (unsigned long) CONST_DOUBLE_HIGH(r));
3412 break;
3413 case SYMBOL_REF:
9018b19c 3414 real_name = targetm.strip_name_encoding (XSTR (r, 0));
7acf4da6
DD
3415 assemble_name (file, real_name);
3416 break;
3417 case LABEL_REF:
3418 output_asm_label (r);
3419 break;
3420 default:
3421 fprintf (stderr, "don't know how to print this operand:");
3422 debug_rtx (r);
3423 gcc_unreachable ();
3424 }
3425 }
3426 else
3427 {
3428 if (conversions[i].format[j] == '+'
3429 && (!code || code == 'I')
3430 && ISDIGIT (conversions[i].format[j+1])
3431 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3432 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3433 continue;
3434 fputc(conversions[i].format[j], file);
3435 }
3436 break;
3437 }
3438 if (!conversions[i].pattern)
3439 {
3440 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3441 debug_rtx(x);
3442 }
3443
3444 return;
3445}
3446
3447void
3448mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3449 int noperands ATTRIBUTE_UNUSED)
3450{
3451 /* Despite the fact that MeP is perfectly capable of branching and
3452 doing something else in the same bundle, gcc does jump
3453 optimization *after* scheduling, so we cannot trust the bundling
3454 flags on jump instructions. */
3455 if (GET_MODE (insn) == BImode
3456 && get_attr_slots (insn) != SLOTS_CORE)
3457 fputc ('+', asm_out_file);
3458}
3459
3460/* Function args in registers. */
3461
3462static void
3463mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3464 enum machine_mode mode ATTRIBUTE_UNUSED,
3465 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3466 int second_time ATTRIBUTE_UNUSED)
3467{
3468 int nsave = 4 - (cum->nregs + 1);
3469
3470 if (nsave > 0)
3471 cfun->machine->arg_regs_to_save = nsave;
3472 *pretend_size = nsave * 4;
3473}
3474
3475static int
3476bytesize (const_tree type, enum machine_mode mode)
3477{
3478 if (mode == BLKmode)
3479 return int_size_in_bytes (type);
3480 return GET_MODE_SIZE (mode);
3481}
3482
3483static rtx
3484mep_expand_builtin_saveregs (void)
3485{
3486 int bufsize, i, ns;
3487 rtx regbuf;
3488
3489 ns = cfun->machine->arg_regs_to_save;
683a1be6
DD
3490 if (TARGET_IVC2)
3491 {
3492 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3493 regbuf = assign_stack_local (SImode, bufsize, 64);
3494 }
3495 else
3496 {
3497 bufsize = ns * 4;
3498 regbuf = assign_stack_local (SImode, bufsize, 32);
3499 }
7acf4da6
DD
3500
3501 move_block_from_reg (5-ns, regbuf, ns);
3502
3503 if (TARGET_IVC2)
3504 {
3505 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
683a1be6 3506 int ofs = 8 * ((ns+1)/2);
7acf4da6
DD
3507
3508 for (i=0; i<ns; i++)
3509 {
3510 int rn = (4-ns) + i + 49;
3511 rtx ptr;
3512
3513 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3514 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3515 ofs += 8;
3516 }
3517 }
3518 return XEXP (regbuf, 0);
3519}
3520
3521#define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3522
3523static tree
3524mep_build_builtin_va_list (void)
3525{
3526 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3527 tree record;
3528
3529
3530 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3531
3532 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3533 get_identifier ("__va_next_gp"), ptr_type_node);
3534 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3535 get_identifier ("__va_next_gp_limit"),
3536 ptr_type_node);
3537 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3538 ptr_type_node);
3539 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3540 ptr_type_node);
3541
3542 DECL_FIELD_CONTEXT (f_next_gp) = record;
3543 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3544 DECL_FIELD_CONTEXT (f_next_cop) = record;
3545 DECL_FIELD_CONTEXT (f_next_stack) = record;
3546
3547 TYPE_FIELDS (record) = f_next_gp;
910ad8de
NF
3548 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3549 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3550 DECL_CHAIN (f_next_cop) = f_next_stack;
7acf4da6
DD
3551
3552 layout_type (record);
3553
3554 return record;
3555}
3556
3557static void
3558mep_expand_va_start (tree valist, rtx nextarg)
3559{
3560 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3561 tree next_gp, next_gp_limit, next_cop, next_stack;
3562 tree t, u;
3563 int ns;
3564
3565 ns = cfun->machine->arg_regs_to_save;
3566
3567 f_next_gp = TYPE_FIELDS (va_list_type_node);
910ad8de
NF
3568 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3569 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3570 f_next_stack = DECL_CHAIN (f_next_cop);
7acf4da6
DD
3571
3572 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3573 NULL_TREE);
3574 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3575 valist, f_next_gp_limit, NULL_TREE);
3576 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3577 NULL_TREE);
3578 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3579 valist, f_next_stack, NULL_TREE);
3580
3581 /* va_list.next_gp = expand_builtin_saveregs (); */
3582 u = make_tree (sizetype, expand_builtin_saveregs ());
3583 u = fold_convert (ptr_type_node, u);
3584 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3585 TREE_SIDE_EFFECTS (t) = 1;
3586 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3587
3588 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3589 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3590 size_int (4 * ns));
3591 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3592 TREE_SIDE_EFFECTS (t) = 1;
3593 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3594
683a1be6
DD
3595 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3596 size_int (8 * ((ns+1)/2)));
3597 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
7acf4da6
DD
3598 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3599 TREE_SIDE_EFFECTS (t) = 1;
3600 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3601
3602 /* va_list.next_stack = nextarg; */
3603 u = make_tree (ptr_type_node, nextarg);
3604 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3605 TREE_SIDE_EFFECTS (t) = 1;
3606 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3607}
3608
3609static tree
3610mep_gimplify_va_arg_expr (tree valist, tree type,
12a54f54
NC
3611 gimple_seq *pre_p,
3612 gimple_seq *post_p ATTRIBUTE_UNUSED)
7acf4da6
DD
3613{
3614 HOST_WIDE_INT size, rsize;
3615 bool by_reference, ivc2_vec;
3616 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3617 tree next_gp, next_gp_limit, next_cop, next_stack;
3618 tree label_sover, label_selse;
3619 tree tmp, res_addr;
3620
3621 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3622
3623 size = int_size_in_bytes (type);
3624 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3625
3626 if (by_reference)
3627 {
3628 type = build_pointer_type (type);
3629 size = 4;
3630 }
3631 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3632
3633 f_next_gp = TYPE_FIELDS (va_list_type_node);
910ad8de
NF
3634 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3635 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3636 f_next_stack = DECL_CHAIN (f_next_cop);
7acf4da6
DD
3637
3638 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3639 NULL_TREE);
3640 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3641 valist, f_next_gp_limit, NULL_TREE);
3642 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3643 NULL_TREE);
3644 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3645 valist, f_next_stack, NULL_TREE);
3646
3647 /* if f_next_gp < f_next_gp_limit
3648 IF (VECTOR_P && IVC2)
3649 val = *f_next_cop;
3650 ELSE
3651 val = *f_next_gp;
3652 f_next_gp += 4;
3653 f_next_cop += 8;
3654 else
3655 label_selse:
3656 val = *f_next_stack;
3657 f_next_stack += rsize;
3658 label_sover:
3659 */
3660
3661 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3662 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3663 res_addr = create_tmp_var (ptr_type_node, NULL);
3664
3665 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3666 unshare_expr (next_gp_limit));
3667 tmp = build3 (COND_EXPR, void_type_node, tmp,
3668 build1 (GOTO_EXPR, void_type_node,
3669 unshare_expr (label_selse)),
3670 NULL_TREE);
3671 gimplify_and_add (tmp, pre_p);
3672
3673 if (ivc2_vec)
3674 {
3675 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3676 gimplify_and_add (tmp, pre_p);
3677 }
3678 else
3679 {
3680 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3681 gimplify_and_add (tmp, pre_p);
3682 }
3683
3684 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3685 unshare_expr (next_gp), size_int (4));
3686 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3687
3688 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3689 unshare_expr (next_cop), size_int (8));
3690 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3691
3692 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3693 gimplify_and_add (tmp, pre_p);
3694
3695 /* - - */
3696
3697 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3698 gimplify_and_add (tmp, pre_p);
3699
3700 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3701 gimplify_and_add (tmp, pre_p);
3702
3703 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3704 unshare_expr (next_stack), size_int (rsize));
3705 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3706
3707 /* - - */
3708
3709 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3710 gimplify_and_add (tmp, pre_p);
3711
3712 res_addr = fold_convert (build_pointer_type (type), res_addr);
3713
3714 if (by_reference)
3715 res_addr = build_va_arg_indirect_ref (res_addr);
3716
3717 return build_va_arg_indirect_ref (res_addr);
3718}
3719
3720void
3721mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3722 rtx libname ATTRIBUTE_UNUSED,
3723 tree fndecl ATTRIBUTE_UNUSED)
3724{
3725 pcum->nregs = 0;
3726
3727 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3728 pcum->vliw = 1;
3729 else
3730 pcum->vliw = 0;
3731}
3732
0851c6e3
NF
3733/* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3734 larger than 4 bytes are passed indirectly. Return value in 0,
3735 unless bigger than 4 bytes, then the caller passes a pointer as the
3736 first arg. For varargs, we copy $1..$4 to the stack. */
3737
3738static rtx
3739mep_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3740 const_tree type ATTRIBUTE_UNUSED,
3741 bool named ATTRIBUTE_UNUSED)
7acf4da6
DD
3742{
3743 /* VOIDmode is a signal for the backend to pass data to the call
3744 expander via the second operand to the call pattern. We use
3745 this to determine whether to use "jsr" or "jsrv". */
3746 if (mode == VOIDmode)
0851c6e3 3747 return GEN_INT (cum->vliw);
7acf4da6
DD
3748
3749 /* If we havn't run out of argument registers, return the next. */
0851c6e3 3750 if (cum->nregs < 4)
7acf4da6
DD
3751 {
3752 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
0851c6e3 3753 return gen_rtx_REG (mode, cum->nregs + 49);
7acf4da6 3754 else
0851c6e3 3755 return gen_rtx_REG (mode, cum->nregs + 1);
7acf4da6
DD
3756 }
3757
3758 /* Otherwise the argument goes on the stack. */
3759 return NULL_RTX;
3760}
3761
3762static bool
3763mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3764 enum machine_mode mode,
3765 const_tree type,
3766 bool named ATTRIBUTE_UNUSED)
3767{
3768 int size = bytesize (type, mode);
e756464b
DD
3769
3770 /* This is non-obvious, but yes, large values passed after we've run
3771 out of registers are *still* passed by reference - we put the
3772 address of the parameter on the stack, as well as putting the
3773 parameter itself elsewhere on the stack. */
3774
3775 if (size <= 0 || size > 8)
3776 return true;
3777 if (size <= 4)
3778 return false;
3779 if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
3780 return false;
3781 return true;
7acf4da6
DD
3782}
3783
0851c6e3
NF
3784static void
3785mep_function_arg_advance (CUMULATIVE_ARGS *pcum,
3786 enum machine_mode mode ATTRIBUTE_UNUSED,
3787 const_tree type ATTRIBUTE_UNUSED,
3788 bool named ATTRIBUTE_UNUSED)
7acf4da6
DD
3789{
3790 pcum->nregs += 1;
3791}
3792
3793bool
3794mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3795{
3796 int size = bytesize (type, BLKmode);
3797 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
e756464b
DD
3798 return size > 0 && size <= 8 ? 0 : 1;
3799 return size > 0 && size <= 4 ? 0 : 1;
7acf4da6
DD
3800}
3801
3802static bool
3803mep_narrow_volatile_bitfield (void)
3804{
3805 return true;
3806 return false;
3807}
3808
3809/* Implement FUNCTION_VALUE. All values are returned in $0. */
3810
3811rtx
77b0efff 3812mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
7acf4da6
DD
3813{
3814 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3815 return gen_rtx_REG (TYPE_MODE (type), 48);
3816 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3817}
3818
3819/* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3820
3821rtx
3822mep_libcall_value (enum machine_mode mode)
3823{
3824 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3825}
3826
3827/* Handle pipeline hazards. */
3828
3829typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3830static const char *opnames[] = { "", "stc", "fsft", "ret" };
3831
3832static int prev_opcode = 0;
3833
3834/* This isn't as optimal as it could be, because we don't know what
3835 control register the STC opcode is storing in. We only need to add
3836 the nop if it's the relevent register, but we add it for irrelevent
3837 registers also. */
3838
3839void
3840mep_asm_output_opcode (FILE *file, const char *ptr)
3841{
3842 int this_opcode = op_none;
3843 const char *hazard = 0;
3844
3845 switch (*ptr)
3846 {
3847 case 'f':
3848 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3849 this_opcode = op_fsft;
3850 break;
3851 case 'r':
3852 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3853 this_opcode = op_ret;
3854 break;
3855 case 's':
3856 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3857 this_opcode = op_stc;
3858 break;
3859 }
3860
3861 if (prev_opcode == op_stc && this_opcode == op_fsft)
3862 hazard = "nop";
3863 if (prev_opcode == op_stc && this_opcode == op_ret)
3864 hazard = "nop";
3865
3866 if (hazard)
3867 fprintf(file, "%s\t# %s-%s hazard\n\t",
3868 hazard, opnames[prev_opcode], opnames[this_opcode]);
3869
3870 prev_opcode = this_opcode;
3871}
3872
3873/* Handle attributes. */
3874
3875static tree
3876mep_validate_based_tiny (tree *node, tree name, tree args,
3877 int flags ATTRIBUTE_UNUSED, bool *no_add)
3878{
3879 if (TREE_CODE (*node) != VAR_DECL
3880 && TREE_CODE (*node) != POINTER_TYPE
3881 && TREE_CODE (*node) != TYPE_DECL)
3882 {
3883 warning (0, "%qE attribute only applies to variables", name);
3884 *no_add = true;
3885 }
3886 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3887 {
3888 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3889 {
3890 warning (0, "address region attributes not allowed with auto storage class");
3891 *no_add = true;
3892 }
3893 /* Ignore storage attribute of pointed to variable: char __far * x; */
3894 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3895 {
3896 warning (0, "address region attributes on pointed-to types ignored");
3897 *no_add = true;
3898 }
3899 }
3900
3901 return NULL_TREE;
3902}
3903
3904static int
3905mep_multiple_address_regions (tree list, bool check_section_attr)
3906{
3907 tree a;
3908 int count_sections = 0;
3909 int section_attr_count = 0;
3910
3911 for (a = list; a; a = TREE_CHAIN (a))
3912 {
3913 if (is_attribute_p ("based", TREE_PURPOSE (a))
3914 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3915 || is_attribute_p ("near", TREE_PURPOSE (a))
3916 || is_attribute_p ("far", TREE_PURPOSE (a))
3917 || is_attribute_p ("io", TREE_PURPOSE (a)))
3918 count_sections ++;
3919 if (check_section_attr)
3920 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3921 }
3922
3923 if (check_section_attr)
3924 return section_attr_count;
3925 else
3926 return count_sections;
3927}
3928
3929#define MEP_ATTRIBUTES(decl) \
3930 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3931 : DECL_ATTRIBUTES (decl) \
3932 ? (DECL_ATTRIBUTES (decl)) \
3933 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3934
3935static tree
3936mep_validate_near_far (tree *node, tree name, tree args,
3937 int flags ATTRIBUTE_UNUSED, bool *no_add)
3938{
3939 if (TREE_CODE (*node) != VAR_DECL
3940 && TREE_CODE (*node) != FUNCTION_DECL
3941 && TREE_CODE (*node) != METHOD_TYPE
3942 && TREE_CODE (*node) != POINTER_TYPE
3943 && TREE_CODE (*node) != TYPE_DECL)
3944 {
3945 warning (0, "%qE attribute only applies to variables and functions",
3946 name);
3947 *no_add = true;
3948 }
3949 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3950 {
3951 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3952 {
3953 warning (0, "address region attributes not allowed with auto storage class");
3954 *no_add = true;
3955 }
3956 /* Ignore storage attribute of pointed to variable: char __far * x; */
3957 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3958 {
3959 warning (0, "address region attributes on pointed-to types ignored");
3960 *no_add = true;
3961 }
3962 }
3963 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3964 {
3965 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3966 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3967 DECL_ATTRIBUTES (*node) = NULL_TREE;
3968 }
3969 return NULL_TREE;
3970}
3971
3972static tree
3973mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3974 int flags ATTRIBUTE_UNUSED, bool *no_add)
3975{
3976 if (TREE_CODE (*node) != FUNCTION_DECL
3977 && TREE_CODE (*node) != METHOD_TYPE)
3978 {
3979 warning (0, "%qE attribute only applies to functions", name);
3980 *no_add = true;
3981 }
3982 return NULL_TREE;
3983}
3984
3985static tree
3986mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3987 int flags ATTRIBUTE_UNUSED, bool *no_add)
3988{
3989 tree function_type;
3990
3991 if (TREE_CODE (*node) != FUNCTION_DECL)
3992 {
3993 warning (0, "%qE attribute only applies to functions", name);
3994 *no_add = true;
3995 return NULL_TREE;
3996 }
3997
3998 if (DECL_DECLARED_INLINE_P (*node))
3999 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4000 DECL_UNINLINABLE (*node) = 1;
4001
4002 function_type = TREE_TYPE (*node);
4003
4004 if (TREE_TYPE (function_type) != void_type_node)
4005 error ("interrupt function must have return type of void");
4006
f4da8dce 4007 if (prototype_p (function_type)
7acf4da6
DD
4008 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4009 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4010 error ("interrupt function must have no arguments");
4011
4012 return NULL_TREE;
4013}
4014
4015static tree
4016mep_validate_io_cb (tree *node, tree name, tree args,
4017 int flags ATTRIBUTE_UNUSED, bool *no_add)
4018{
4019 if (TREE_CODE (*node) != VAR_DECL)
4020 {
4021 warning (0, "%qE attribute only applies to variables", name);
4022 *no_add = true;
4023 }
4024
4025 if (args != NULL_TREE)
4026 {
4027 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4028 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4029 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4030 {
4031 warning (0, "%qE attribute allows only an integer constant argument",
4032 name);
4033 *no_add = true;
4034 }
4035 }
4036
4037 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4038 TREE_THIS_VOLATILE (*node) = 1;
4039
4040 return NULL_TREE;
4041}
4042
4043static tree
4044mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4045 int flags ATTRIBUTE_UNUSED, bool *no_add)
4046{
4047 if (TREE_CODE (*node) != FUNCTION_TYPE
4048 && TREE_CODE (*node) != FUNCTION_DECL
4049 && TREE_CODE (*node) != METHOD_TYPE
4050 && TREE_CODE (*node) != FIELD_DECL
4051 && TREE_CODE (*node) != TYPE_DECL)
4052 {
4053 static int gave_pointer_note = 0;
4054 static int gave_array_note = 0;
4055 static const char * given_type = NULL;
4056
4057 given_type = tree_code_name[TREE_CODE (*node)];
4058 if (TREE_CODE (*node) == POINTER_TYPE)
4059 given_type = "pointers";
4060 if (TREE_CODE (*node) == ARRAY_TYPE)
4061 given_type = "arrays";
4062
4063 if (given_type)
4064 warning (0, "%qE attribute only applies to functions, not %s",
4065 name, given_type);
4066 else
4067 warning (0, "%qE attribute only applies to functions",
4068 name);
4069 *no_add = true;
4070
4071 if (TREE_CODE (*node) == POINTER_TYPE
4072 && !gave_pointer_note)
4073 {
d8a07487 4074 inform (input_location, "to describe a pointer to a VLIW function, use syntax like this:");
7acf4da6
DD
4075 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4076 gave_pointer_note = 1;
4077 }
4078
4079 if (TREE_CODE (*node) == ARRAY_TYPE
4080 && !gave_array_note)
4081 {
d8a07487 4082 inform (input_location, "to describe an array of VLIW function pointers, use syntax like this:");
7acf4da6
DD
4083 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4084 gave_array_note = 1;
4085 }
4086 }
4087 if (!TARGET_VLIW)
4088 error ("VLIW functions are not allowed without a VLIW configuration");
4089 return NULL_TREE;
4090}
4091
4092static const struct attribute_spec mep_attribute_table[11] =
4093{
62d784f7
KT
4094 /* name min max decl type func handler
4095 affects_type_identity */
4096 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4097 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4098 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4099 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4100 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4101 false },
4102 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4103 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4104 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4105 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4106 { NULL, 0, 0, false, false, false, NULL, false }
7acf4da6
DD
4107};
4108
4109static bool
4110mep_function_attribute_inlinable_p (const_tree callee)
4111{
4112 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4113 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4114 return (lookup_attribute ("disinterrupt", attrs) == 0
4115 && lookup_attribute ("interrupt", attrs) == 0);
4116}
4117
ae30c1fa 4118static bool
5cec9f59 4119mep_can_inline_p (tree caller, tree callee)
ae30c1fa
DD
4120{
4121 if (TREE_CODE (callee) == ADDR_EXPR)
4122 callee = TREE_OPERAND (callee, 0);
4123
82e45095 4124 if (!mep_vliw_function_p (caller)
ae30c1fa
DD
4125 && mep_vliw_function_p (callee))
4126 {
82e45095 4127 return false;
ae30c1fa 4128 }
82e45095 4129 return true;
ae30c1fa
DD
4130}
4131
7acf4da6
DD
4132#define FUNC_CALL 1
4133#define FUNC_DISINTERRUPT 2
4134
4135
4136struct GTY(()) pragma_entry {
4137 int used;
4138 int flag;
4139 const char *funcname;
4140};
4141typedef struct pragma_entry pragma_entry;
4142
4143/* Hash table of farcall-tagged sections. */
4144static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4145
4146static int
4147pragma_entry_eq (const void *p1, const void *p2)
4148{
4149 const pragma_entry *old = (const pragma_entry *) p1;
4150 const char *new_name = (const char *) p2;
4151
4152 return strcmp (old->funcname, new_name) == 0;
4153}
4154
4155static hashval_t
4156pragma_entry_hash (const void *p)
4157{
4158 const pragma_entry *old = (const pragma_entry *) p;
4159 return htab_hash_string (old->funcname);
4160}
4161
4162static void
4163mep_note_pragma_flag (const char *funcname, int flag)
4164{
4165 pragma_entry **slot;
4166
4167 if (!pragma_htab)
4168 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4169 pragma_entry_eq, NULL);
4170
4171 slot = (pragma_entry **)
4172 htab_find_slot_with_hash (pragma_htab, funcname,
4173 htab_hash_string (funcname), INSERT);
4174
4175 if (!*slot)
4176 {
a9429e29 4177 *slot = ggc_alloc_pragma_entry ();
7acf4da6
DD
4178 (*slot)->flag = 0;
4179 (*slot)->used = 0;
4180 (*slot)->funcname = ggc_strdup (funcname);
4181 }
4182 (*slot)->flag |= flag;
4183}
4184
4185static bool
4186mep_lookup_pragma_flag (const char *funcname, int flag)
4187{
4188 pragma_entry **slot;
4189
4190 if (!pragma_htab)
4191 return false;
4192
4193 if (funcname[0] == '@' && funcname[2] == '.')
4194 funcname += 3;
4195
4196 slot = (pragma_entry **)
4197 htab_find_slot_with_hash (pragma_htab, funcname,
4198 htab_hash_string (funcname), NO_INSERT);
4199 if (slot && *slot && ((*slot)->flag & flag))
4200 {
4201 (*slot)->used |= flag;
4202 return true;
4203 }
4204 return false;
4205}
4206
4207bool
4208mep_lookup_pragma_call (const char *funcname)
4209{
4210 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4211}
4212
4213void
4214mep_note_pragma_call (const char *funcname)
4215{
4216 mep_note_pragma_flag (funcname, FUNC_CALL);
4217}
4218
4219bool
4220mep_lookup_pragma_disinterrupt (const char *funcname)
4221{
4222 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4223}
4224
4225void
4226mep_note_pragma_disinterrupt (const char *funcname)
4227{
4228 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4229}
4230
4231static int
4232note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4233{
4234 const pragma_entry *d = (const pragma_entry *)(*slot);
4235
4236 if ((d->flag & FUNC_DISINTERRUPT)
4237 && !(d->used & FUNC_DISINTERRUPT))
4238 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4239 return 1;
4240}
4241
4242void
4243mep_file_cleanups (void)
4244{
4245 if (pragma_htab)
4246 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4247}
c28883e6
DD
4248
4249/* These three functions provide a bridge between the pramgas that
4250 affect register classes, and the functions that maintain them. We
4251 can't call those functions directly as pragma handling is part of
4252 the front end and doesn't have direct access to them. */
4253
4254void
4255mep_save_register_info (void)
4256{
4257 save_register_info ();
4258}
4259
4260void
4261mep_reinit_regs (void)
4262{
4263 reinit_regs ();
4264}
4265
4266void
4267mep_init_regs (void)
4268{
4269 init_regs ();
4270}
4271
7acf4da6
DD
4272
4273
4274static int
4275mep_attrlist_to_encoding (tree list, tree decl)
4276{
4277 if (mep_multiple_address_regions (list, false) > 1)
4278 {
4279 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4280 TREE_PURPOSE (TREE_CHAIN (list)),
4281 DECL_NAME (decl),
4282 DECL_SOURCE_LINE (decl));
4283 TREE_CHAIN (list) = NULL_TREE;
4284 }
4285
4286 while (list)
4287 {
4288 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4289 return 'b';
4290 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4291 return 't';
4292 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4293 return 'n';
4294 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4295 return 'f';
4296 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4297 {
4298 if (TREE_VALUE (list)
4299 && TREE_VALUE (TREE_VALUE (list))
4300 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4301 {
4302 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4303 if (location >= 0
4304 && location <= 0x1000000)
4305 return 'i';
4306 }
4307 return 'I';
4308 }
4309 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4310 return 'c';
4311 list = TREE_CHAIN (list);
4312 }
4313 if (TARGET_TF
4314 && TREE_CODE (decl) == FUNCTION_DECL
4315 && DECL_SECTION_NAME (decl) == 0)
4316 return 'f';
4317 return 0;
4318}
4319
4320static int
4321mep_comp_type_attributes (const_tree t1, const_tree t2)
4322{
4323 int vliw1, vliw2;
4324
4325 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4326 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4327
4328 if (vliw1 != vliw2)
4329 return 0;
4330
4331 return 1;
4332}
4333
4334static void
4335mep_insert_attributes (tree decl, tree *attributes)
4336{
4337 int size;
4338 const char *secname = 0;
4339 tree attrib, attrlist;
4340 char encoding;
4341
4342 if (TREE_CODE (decl) == FUNCTION_DECL)
4343 {
4344 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4345
4346 if (mep_lookup_pragma_disinterrupt (funcname))
4347 {
4348 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4349 *attributes = chainon (*attributes, attrib);
4350 }
4351 }
4352
4353 if (TREE_CODE (decl) != VAR_DECL
4354 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4355 return;
4356
4357 if (TREE_READONLY (decl) && TARGET_DC)
4358 /* -mdc means that const variables default to the near section,
4359 regardless of the size cutoff. */
4360 return;
4361
4362 /* User specified an attribute, so override the default.
4363 Ignore storage attribute of pointed to variable. char __far * x; */
4364 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4365 {
4366 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4367 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4368 else if (DECL_ATTRIBUTES (decl) && *attributes)
4369 DECL_ATTRIBUTES (decl) = NULL_TREE;
4370 }
4371
4372 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4373 encoding = mep_attrlist_to_encoding (attrlist, decl);
4374 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4375 {
4376 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4377 encoding = mep_attrlist_to_encoding (attrlist, decl);
4378 }
4379 if (encoding)
4380 {
4381 /* This means that the declaration has a specific section
4382 attribute, so we should not apply the default rules. */
4383
4384 if (encoding == 'i' || encoding == 'I')
4385 {
4386 tree attr = lookup_attribute ("io", attrlist);
4387 if (attr
4388 && TREE_VALUE (attr)
4389 && TREE_VALUE (TREE_VALUE(attr)))
4390 {
4391 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4392 static tree previous_value = 0;
4393 static int previous_location = 0;
4394 static tree previous_name = 0;
4395
4396 /* We take advantage of the fact that gcc will reuse the
4397 same tree pointer when applying an attribute to a
4398 list of decls, but produce a new tree for attributes
4399 on separate source lines, even when they're textually
4400 identical. This is the behavior we want. */
4401 if (TREE_VALUE (attr) == previous_value
4402 && location == previous_location)
4403 {
4404 warning(0, "__io address 0x%x is the same for %qE and %qE",
4405 location, previous_name, DECL_NAME (decl));
4406 }
4407 previous_name = DECL_NAME (decl);
4408 previous_location = location;
4409 previous_value = TREE_VALUE (attr);
4410 }
4411 }
4412 return;
4413 }
4414
4415
4416 /* Declarations of arrays can change size. Don't trust them. */
4417 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4418 size = 0;
4419 else
4420 size = int_size_in_bytes (TREE_TYPE (decl));
4421
4422 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4423 {
4424 if (TREE_PUBLIC (decl)
4425 || DECL_EXTERNAL (decl)
4426 || TREE_STATIC (decl))
4427 {
4428 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4429 int key = 0;
4430
4431 while (*name)
4432 key += *name++;
4433
4434 switch (key & 3)
4435 {
4436 case 0:
4437 secname = "based";
4438 break;
4439 case 1:
4440 secname = "tiny";
4441 break;
4442 case 2:
4443 secname = "far";
4444 break;
4445 default:
4446 ;
4447 }
4448 }
4449 }
4450 else
4451 {
4452 if (size <= mep_based_cutoff && size > 0)
4453 secname = "based";
4454 else if (size <= mep_tiny_cutoff && size > 0)
4455 secname = "tiny";
4456 else if (TARGET_L)
4457 secname = "far";
4458 }
4459
4460 if (mep_const_section && TREE_READONLY (decl))
4461 {
4462 if (strcmp (mep_const_section, "tiny") == 0)
4463 secname = "tiny";
4464 else if (strcmp (mep_const_section, "near") == 0)
4465 return;
4466 else if (strcmp (mep_const_section, "far") == 0)
4467 secname = "far";
4468 }
4469
4470 if (!secname)
4471 return;
4472
4473 if (!mep_multiple_address_regions (*attributes, true)
4474 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4475 {
4476 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4477
4478 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4479 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4480 and mep_validate_based_tiny. */
4481 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4482 }
4483}
4484
4485static void
4486mep_encode_section_info (tree decl, rtx rtl, int first)
4487{
4488 rtx rtlname;
4489 const char *oldname;
4490 const char *secname;
4491 char encoding;
4492 char *newname;
4493 tree idp;
4494 int maxsize;
4495 tree type;
4496 tree mep_attributes;
4497
4498 if (! first)
4499 return;
4500
4501 if (TREE_CODE (decl) != VAR_DECL
4502 && TREE_CODE (decl) != FUNCTION_DECL)
4503 return;
4504
4505 rtlname = XEXP (rtl, 0);
4506 if (GET_CODE (rtlname) == SYMBOL_REF)
4507 oldname = XSTR (rtlname, 0);
4508 else if (GET_CODE (rtlname) == MEM
4509 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4510 oldname = XSTR (XEXP (rtlname, 0), 0);
4511 else
4512 gcc_unreachable ();
4513
4514 type = TREE_TYPE (decl);
4515 if (type == error_mark_node)
4516 return;
4517 mep_attributes = MEP_ATTRIBUTES (decl);
4518
4519 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4520
4521 if (encoding)
4522 {
4523 newname = (char *) alloca (strlen (oldname) + 4);
4524 sprintf (newname, "@%c.%s", encoding, oldname);
4525 idp = get_identifier (newname);
4526 XEXP (rtl, 0) =
4527 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
1c6679e2
NC
4528 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4529 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
7acf4da6
DD
4530
4531 switch (encoding)
4532 {
4533 case 'b':
4534 maxsize = 128;
4535 secname = "based";
4536 break;
4537 case 't':
4538 maxsize = 65536;
4539 secname = "tiny";
4540 break;
4541 case 'n':
4542 maxsize = 0x1000000;
4543 secname = "near";
4544 break;
4545 default:
4546 maxsize = 0;
4547 secname = 0;
4548 break;
4549 }
4550 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4551 {
4552 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4553 oldname,
4554 (long) int_size_in_bytes (TREE_TYPE (decl)),
4555 secname,
4556 maxsize);
4557 }
4558 }
7acf4da6
DD
4559}
4560
4561const char *
4562mep_strip_name_encoding (const char *sym)
4563{
4564 while (1)
4565 {
4566 if (*sym == '*')
4567 sym++;
4568 else if (*sym == '@' && sym[2] == '.')
4569 sym += 3;
4570 else
4571 return sym;
4572 }
4573}
4574
4575static section *
4576mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4577 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4578{
4579 int readonly = 1;
820ca276 4580 int encoding;
7acf4da6
DD
4581
4582 switch (TREE_CODE (decl))
4583 {
4584 case VAR_DECL:
4585 if (!TREE_READONLY (decl)
4586 || TREE_SIDE_EFFECTS (decl)
4587 || !DECL_INITIAL (decl)
4588 || (DECL_INITIAL (decl) != error_mark_node
4589 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4590 readonly = 0;
4591 break;
4592 case CONSTRUCTOR:
4593 if (! TREE_CONSTANT (decl))
4594 readonly = 0;
4595 break;
4596
4597 default:
4598 break;
4599 }
4600
820ca276
DD
4601 if (TREE_CODE (decl) == FUNCTION_DECL)
4602 {
4603 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4604
4605 if (name[0] == '@' && name[2] == '.')
4606 encoding = name[1];
4607 else
4608 encoding = 0;
4609
4610 if (flag_function_sections || DECL_ONE_ONLY (decl))
4611 mep_unique_section (decl, 0);
4612 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4613 {
4614 if (encoding == 'f')
4615 return vftext_section;
4616 else
4617 return vtext_section;
4618 }
4619 else if (encoding == 'f')
4620 return ftext_section;
4621 else
4622 return text_section;
4623 }
4624
7acf4da6
DD
4625 if (TREE_CODE (decl) == VAR_DECL)
4626 {
4627 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4628
4629 if (name[0] == '@' && name[2] == '.')
4630 switch (name[1])
4631 {
4632 case 'b':
4633 return based_section;
4634
4635 case 't':
4636 if (readonly)
4637 return srodata_section;
4638 if (DECL_INITIAL (decl))
4639 return sdata_section;
4640 return tinybss_section;
4641
4642 case 'f':
4643 if (readonly)
4644 return frodata_section;
4645 return far_section;
4646
4647 case 'i':
4648 case 'I':
dcb91ebe
MLI
4649 error_at (DECL_SOURCE_LOCATION (decl),
4650 "variable %D of type %<io%> must be uninitialized", decl);
7acf4da6
DD
4651 return data_section;
4652
4653 case 'c':
dcb91ebe
MLI
4654 error_at (DECL_SOURCE_LOCATION (decl),
4655 "variable %D of type %<cb%> must be uninitialized", decl);
7acf4da6
DD
4656 return data_section;
4657 }
4658 }
4659
4660 if (readonly)
4661 return readonly_data_section;
4662
4663 return data_section;
4664}
4665
4666static void
4667mep_unique_section (tree decl, int reloc)
4668{
4669 static const char *prefixes[][2] =
4670 {
4671 { ".text.", ".gnu.linkonce.t." },
4672 { ".rodata.", ".gnu.linkonce.r." },
4673 { ".data.", ".gnu.linkonce.d." },
4674 { ".based.", ".gnu.linkonce.based." },
4675 { ".sdata.", ".gnu.linkonce.s." },
4676 { ".far.", ".gnu.linkonce.far." },
4677 { ".ftext.", ".gnu.linkonce.ft." },
4678 { ".frodata.", ".gnu.linkonce.frd." },
820ca276
DD
4679 { ".srodata.", ".gnu.linkonce.srd." },
4680 { ".vtext.", ".gnu.linkonce.v." },
4681 { ".vftext.", ".gnu.linkonce.vf." }
7acf4da6
DD
4682 };
4683 int sec = 2; /* .data */
4684 int len;
4685 const char *name, *prefix;
4686 char *string;
4687
4688 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4689 if (DECL_RTL (decl))
4690 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4691
4692 if (TREE_CODE (decl) == FUNCTION_DECL)
820ca276
DD
4693 {
4694 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4695 sec = 9; /* .vtext */
4696 else
4697 sec = 0; /* .text */
4698 }
7acf4da6
DD
4699 else if (decl_readonly_section (decl, reloc))
4700 sec = 1; /* .rodata */
4701
4702 if (name[0] == '@' && name[2] == '.')
4703 {
4704 switch (name[1])
4705 {
4706 case 'b':
4707 sec = 3; /* .based */
4708 break;
4709 case 't':
4710 if (sec == 1)
4711 sec = 8; /* .srodata */
4712 else
4713 sec = 4; /* .sdata */
4714 break;
4715 case 'f':
4716 if (sec == 0)
4717 sec = 6; /* .ftext */
820ca276
DD
4718 else if (sec == 9)
4719 sec = 10; /* .vftext */
7acf4da6
DD
4720 else if (sec == 1)
4721 sec = 7; /* .frodata */
4722 else
4723 sec = 5; /* .far. */
4724 break;
4725 }
4726 name += 3;
4727 }
4728
4729 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4730 len = strlen (name) + strlen (prefix);
4731 string = (char *) alloca (len + 1);
4732
4733 sprintf (string, "%s%s", prefix, name);
4734
4735 DECL_SECTION_NAME (decl) = build_string (len, string);
4736}
4737
4738/* Given a decl, a section name, and whether the decl initializer
4739 has relocs, choose attributes for the section. */
4740
4741#define SECTION_MEP_VLIW SECTION_MACH_DEP
4742
4743static unsigned int
4744mep_section_type_flags (tree decl, const char *name, int reloc)
4745{
4746 unsigned int flags = default_section_type_flags (decl, name, reloc);
4747
4748 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4749 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4750 flags |= SECTION_MEP_VLIW;
4751
4752 return flags;
4753}
4754
4755/* Switch to an arbitrary section NAME with attributes as specified
4756 by FLAGS. ALIGN specifies any known alignment requirements for
4757 the section; 0 if the default should be used.
4758
4759 Differs from the standard ELF version only in support of VLIW mode. */
4760
4761static void
4762mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4763{
4764 char flagchars[8], *f = flagchars;
4765 const char *type;
4766
4767 if (!(flags & SECTION_DEBUG))
4768 *f++ = 'a';
4769 if (flags & SECTION_WRITE)
4770 *f++ = 'w';
4771 if (flags & SECTION_CODE)
4772 *f++ = 'x';
4773 if (flags & SECTION_SMALL)
4774 *f++ = 's';
4775 if (flags & SECTION_MEP_VLIW)
4776 *f++ = 'v';
4777 *f = '\0';
4778
4779 if (flags & SECTION_BSS)
4780 type = "nobits";
4781 else
4782 type = "progbits";
4783
4784 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4785 name, flagchars, type);
4786
4787 if (flags & SECTION_CODE)
4788 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4789 asm_out_file);
4790}
4791
4792void
4793mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4794 int size, int align, int global)
4795{
4796 /* We intentionally don't use mep_section_tag() here. */
4797 if (name[0] == '@'
4798 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4799 && name[2] == '.')
4800 {
4801 int location = -1;
4802 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4803 DECL_ATTRIBUTES (decl));
4804 if (attr
4805 && TREE_VALUE (attr)
4806 && TREE_VALUE (TREE_VALUE(attr)))
4807 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4808 if (location == -1)
4809 return;
4810 if (global)
4811 {
4812 fprintf (stream, "\t.globl\t");
4813 assemble_name (stream, name);
4814 fprintf (stream, "\n");
4815 }
4816 assemble_name (stream, name);
4817 fprintf (stream, " = %d\n", location);
4818 return;
4819 }
4820 if (name[0] == '@' && name[2] == '.')
4821 {
4822 const char *sec = 0;
4823 switch (name[1])
4824 {
4825 case 'b':
4826 switch_to_section (based_section);
4827 sec = ".based";
4828 break;
4829 case 't':
4830 switch_to_section (tinybss_section);
4831 sec = ".sbss";
4832 break;
4833 case 'f':
4834 switch_to_section (farbss_section);
4835 sec = ".farbss";
4836 break;
4837 }
4838 if (sec)
4839 {
4840 const char *name2;
4841 int p2align = 0;
4842
4843 while (align > BITS_PER_UNIT)
4844 {
4845 align /= 2;
4846 p2align ++;
4847 }
9018b19c 4848 name2 = targetm.strip_name_encoding (name);
7acf4da6
DD
4849 if (global)
4850 fprintf (stream, "\t.globl\t%s\n", name2);
4851 fprintf (stream, "\t.p2align %d\n", p2align);
4852 fprintf (stream, "\t.type\t%s,@object\n", name2);
4853 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4854 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4855 return;
4856 }
4857 }
4858
4859 if (!global)
4860 {
4861 fprintf (stream, "\t.local\t");
4862 assemble_name (stream, name);
4863 fprintf (stream, "\n");
4864 }
4865 fprintf (stream, "\t.comm\t");
4866 assemble_name (stream, name);
4867 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4868}
4869
4870/* Trampolines. */
4871
87138d8d
RH
4872static void
4873mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
7acf4da6 4874{
87138d8d
RH
4875 rtx addr = XEXP (m_tramp, 0);
4876 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4877
7acf4da6
DD
4878 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4879 LCT_NORMAL, VOIDmode, 3,
4880 addr, Pmode,
4881 fnaddr, Pmode,
4882 static_chain, Pmode);
4883}
4884
4885/* Experimental Reorg. */
4886
4887static bool
4888mep_mentioned_p (rtx in,
4889 rtx reg, /* NULL for mem */
4890 int modes_too) /* if nonzero, modes must match also. */
4891{
4892 const char *fmt;
4893 int i;
4894 enum rtx_code code;
4895
4896 if (in == 0)
4897 return false;
4898 if (reg && GET_CODE (reg) != REG)
4899 return false;
4900
4901 if (GET_CODE (in) == LABEL_REF)
4902 return (reg == 0);
4903
4904 code = GET_CODE (in);
4905
4906 switch (code)
4907 {
4908 case MEM:
4909 if (reg)
4910 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4911 return true;
4912
4913 case REG:
4914 if (!reg)
4915 return false;
4916 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4917 return false;
4918 return (REGNO (in) == REGNO (reg));
4919
4920 case SCRATCH:
4921 case CC0:
4922 case PC:
4923 case CONST_INT:
4924 case CONST_DOUBLE:
4925 return false;
4926
4927 default:
4928 break;
4929 }
4930
4931 /* Set's source should be read-only. */
4932 if (code == SET && !reg)
4933 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4934
4935 fmt = GET_RTX_FORMAT (code);
4936
4937 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4938 {
4939 if (fmt[i] == 'E')
4940 {
4941 register int j;
4942 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4943 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4944 return true;
4945 }
4946 else if (fmt[i] == 'e'
4947 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4948 return true;
4949 }
4950 return false;
4951}
4952
4953#define EXPERIMENTAL_REGMOVE_REORG 1
4954
4955#if EXPERIMENTAL_REGMOVE_REORG
4956
4957static int
4958mep_compatible_reg_class (int r1, int r2)
4959{
4960 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4961 return 1;
4962 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4963 return 1;
4964 return 0;
4965}
4966
4967static void
4968mep_reorg_regmove (rtx insns)
4969{
4970 rtx insn, next, pat, follow, *where;
4971 int count = 0, done = 0, replace, before = 0;
4972
4973 if (dump_file)
4974 for (insn = insns; insn; insn = NEXT_INSN (insn))
4975 if (GET_CODE (insn) == INSN)
4976 before++;
4977
4978 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4979 set that uses the r2 and r2 dies there. We replace r2 with r1
4980 and see if it's still a valid insn. If so, delete the first set.
4981 Copied from reorg.c. */
4982
4983 while (!done)
4984 {
4985 done = 1;
4986 for (insn = insns; insn; insn = next)
4987 {
4988 next = NEXT_INSN (insn);
4989 if (GET_CODE (insn) != INSN)
4990 continue;
4991 pat = PATTERN (insn);
4992
4993 replace = 0;
4994
4995 if (GET_CODE (pat) == SET
4996 && GET_CODE (SET_SRC (pat)) == REG
4997 && GET_CODE (SET_DEST (pat)) == REG
4998 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4999 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5000 {
5001 follow = next_nonnote_insn (insn);
5002 if (dump_file)
5003 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5004
5005 while (follow && GET_CODE (follow) == INSN
5006 && GET_CODE (PATTERN (follow)) == SET
5007 && !dead_or_set_p (follow, SET_SRC (pat))
5008 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5009 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5010 {
5011 if (dump_file)
5012 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5013 follow = next_nonnote_insn (follow);
5014 }
5015
5016 if (dump_file)
5017 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5018 if (follow && GET_CODE (follow) == INSN
5019 && GET_CODE (PATTERN (follow)) == SET
5020 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5021 {
5022 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5023 {
5024 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5025 {
5026 replace = 1;
5027 where = & SET_SRC (PATTERN (follow));
5028 }
5029 }
5030 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5031 {
5032 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5033 {
5034 replace = 1;
5035 where = & PATTERN (follow);
5036 }
5037 }
5038 }
5039 }
5040
5041 /* If so, follow is the corresponding insn */
5042 if (replace)
5043 {
5044 if (dump_file)
5045 {
5046 rtx x;
5047
5048 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5049 for (x = insn; x ;x = NEXT_INSN (x))
5050 {
5051 print_rtl_single (dump_file, x);
5052 if (x == follow)
5053 break;
5054 fprintf (dump_file, "\n");
5055 }
5056 }
5057
5058 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5059 follow, where))
5060 {
5061 count ++;
5062 next = delete_insn (insn);
5063 if (dump_file)
5064 {
5065 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5066 print_rtl_single (dump_file, follow);
5067 }
5068 done = 0;
5069 }
5070 }
5071 }
5072 }
5073
5074 if (dump_file)
5075 {
5076 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5077 fprintf (dump_file, "=====\n");
5078 }
5079}
5080#endif
5081
5082
5083/* Figure out where to put LABEL, which is the label for a repeat loop.
5084 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5085 the loop ends just before LAST_INSN. If SHARED, insns other than the
5086 "repeat" might use LABEL to jump to the loop's continuation point.
5087
5088 Return the last instruction in the adjusted loop. */
5089
5090static rtx
5091mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5092 bool shared)
5093{
5094 rtx next, prev;
5095 int count = 0, code, icode;
5096
5097 if (dump_file)
5098 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5099 INSN_UID (last_insn));
5100
5101 /* Set PREV to the last insn in the loop. */
5102 prev = last_insn;
5103 if (!including)
5104 prev = PREV_INSN (prev);
5105
5106 /* Set NEXT to the next insn after the repeat label. */
5107 next = last_insn;
5108 if (!shared)
5109 while (prev != 0)
5110 {
5111 code = GET_CODE (prev);
5112 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5113 break;
5114
5115 if (INSN_P (prev))
5116 {
5117 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5118 prev = XVECEXP (PATTERN (prev), 0, 1);
5119
5120 /* Other insns that should not be in the last two opcodes. */
5121 icode = recog_memoized (prev);
5122 if (icode < 0
5123 || icode == CODE_FOR_repeat
5124 || icode == CODE_FOR_erepeat
5125 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5126 break;
5127
5128 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5129 is the second instruction in a VLIW bundle. In that case,
5130 loop again: if the first instruction also satisfies the
5131 conditions above then we will reach here again and put
5132 both of them into the repeat epilogue. Otherwise both
5133 should remain outside. */
5134 if (GET_MODE (prev) != BImode)
5135 {
5136 count++;
5137 next = prev;
5138 if (dump_file)
5139 print_rtl_single (dump_file, next);
5140 if (count == 2)
5141 break;
5142 }
5143 }
5144 prev = PREV_INSN (prev);
5145 }
5146
5147 /* See if we're adding the label immediately after the repeat insn.
5148 If so, we need to separate them with a nop. */
5149 prev = prev_real_insn (next);
5150 if (prev)
5151 switch (recog_memoized (prev))
5152 {
5153 case CODE_FOR_repeat:
5154 case CODE_FOR_erepeat:
5155 if (dump_file)
5156 fprintf (dump_file, "Adding nop inside loop\n");
5157 emit_insn_before (gen_nop (), next);
5158 break;
5159
5160 default:
5161 break;
5162 }
5163
5164 /* Insert the label. */
5165 emit_label_before (label, next);
5166
5167 /* Insert the nops. */
5168 if (dump_file && count < 2)
5169 fprintf (dump_file, "Adding %d nop%s\n\n",
5170 2 - count, count == 1 ? "" : "s");
5171
5172 for (; count < 2; count++)
5173 if (including)
5174 last_insn = emit_insn_after (gen_nop (), last_insn);
5175 else
5176 emit_insn_before (gen_nop (), last_insn);
5177
5178 return last_insn;
5179}
5180
5181
5182void
5183mep_emit_doloop (rtx *operands, int is_end)
5184{
5185 rtx tag;
5186
5187 if (cfun->machine->doloop_tags == 0
5188 || cfun->machine->doloop_tag_from_end == is_end)
5189 {
5190 cfun->machine->doloop_tags++;
5191 cfun->machine->doloop_tag_from_end = is_end;
5192 }
5193
5194 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5195 if (is_end)
5196 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5197 else
5198 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5199}
5200
5201
5202/* Code for converting doloop_begins and doloop_ends into valid
5203 MeP instructions. A doloop_begin is just a placeholder:
5204
5205 $count = unspec ($count)
5206
5207 where $count is initially the number of iterations - 1.
5208 doloop_end has the form:
5209
5210 if ($count-- == 0) goto label
5211
5212 The counter variable is private to the doloop insns, nothing else
5213 relies on its value.
5214
5215 There are three cases, in decreasing order of preference:
5216
5217 1. A loop has exactly one doloop_begin and one doloop_end.
5218 The doloop_end branches to the first instruction after
5219 the doloop_begin.
5220
5221 In this case we can replace the doloop_begin with a repeat
5222 instruction and remove the doloop_end. I.e.:
5223
5224 $count1 = unspec ($count1)
5225 label:
5226 ...
5227 insn1
5228 insn2
5229 if ($count2-- == 0) goto label
5230
5231 becomes:
5232
5233 repeat $count1,repeat_label
5234 label:
5235 ...
5236 repeat_label:
5237 insn1
5238 insn2
5239 # end repeat
5240
5241 2. As for (1), except there are several doloop_ends. One of them
5242 (call it X) falls through to a label L. All the others fall
5243 through to branches to L.
5244
5245 In this case, we remove X and replace the other doloop_ends
5246 with branches to the repeat label. For example:
5247
5248 $count1 = unspec ($count1)
5249 start:
5250 ...
5251 if ($count2-- == 0) goto label
5252 end:
5253 ...
5254 if ($count3-- == 0) goto label
5255 goto end
5256
5257 becomes:
5258
5259 repeat $count1,repeat_label
5260 start:
5261 ...
5262 repeat_label:
5263 nop
5264 nop
5265 # end repeat
5266 end:
5267 ...
5268 goto repeat_label
5269
5270 3. The fallback case. Replace doloop_begins with:
5271
5272 $count = $count + 1
5273
5274 Replace doloop_ends with the equivalent of:
5275
5276 $count = $count - 1
5277 if ($count == 0) goto label
5278
5279 Note that this might need a scratch register if $count
5280 is stored in memory. */
5281
5282/* A structure describing one doloop_begin. */
5283struct mep_doloop_begin {
5284 /* The next doloop_begin with the same tag. */
5285 struct mep_doloop_begin *next;
5286
5287 /* The instruction itself. */
5288 rtx insn;
5289
5290 /* The initial counter value. This is known to be a general register. */
5291 rtx counter;
5292};
5293
5294/* A structure describing a doloop_end. */
5295struct mep_doloop_end {
5296 /* The next doloop_end with the same loop tag. */
5297 struct mep_doloop_end *next;
5298
5299 /* The instruction itself. */
5300 rtx insn;
5301
5302 /* The first instruction after INSN when the branch isn't taken. */
5303 rtx fallthrough;
5304
5305 /* The location of the counter value. Since doloop_end_internal is a
5306 jump instruction, it has to allow the counter to be stored anywhere
5307 (any non-fixed register or memory location). */
5308 rtx counter;
5309
5310 /* The target label (the place where the insn branches when the counter
5311 isn't zero). */
5312 rtx label;
5313
5314 /* A scratch register. Only available when COUNTER isn't stored
5315 in a general register. */
5316 rtx scratch;
5317};
5318
5319
5320/* One do-while loop. */
5321struct mep_doloop {
5322 /* All the doloop_begins for this loop (in no particular order). */
5323 struct mep_doloop_begin *begin;
5324
5325 /* All the doloop_ends. When there is more than one, arrange things
5326 so that the first one is the most likely to be X in case (2) above. */
5327 struct mep_doloop_end *end;
5328};
5329
5330
5331/* Return true if LOOP can be converted into repeat/repeat_end form
5332 (that is, if it matches cases (1) or (2) above). */
5333
5334static bool
5335mep_repeat_loop_p (struct mep_doloop *loop)
5336{
5337 struct mep_doloop_end *end;
5338 rtx fallthrough;
5339
5340 /* There must be exactly one doloop_begin and at least one doloop_end. */
5341 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5342 return false;
5343
5344 /* The first doloop_end (X) must branch back to the insn after
5345 the doloop_begin. */
5346 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5347 return false;
5348
5349 /* All the other doloop_ends must branch to the same place as X.
5350 When the branch isn't taken, they must jump to the instruction
5351 after X. */
5352 fallthrough = loop->end->fallthrough;
5353 for (end = loop->end->next; end != 0; end = end->next)
5354 if (end->label != loop->end->label
5355 || !simplejump_p (end->fallthrough)
5356 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5357 return false;
5358
5359 return true;
5360}
5361
5362
5363/* The main repeat reorg function. See comment above for details. */
5364
5365static void
5366mep_reorg_repeat (rtx insns)
5367{
5368 rtx insn;
5369 struct mep_doloop *loops, *loop;
5370 struct mep_doloop_begin *begin;
5371 struct mep_doloop_end *end;
5372
5373 /* Quick exit if we haven't created any loops. */
5374 if (cfun->machine->doloop_tags == 0)
5375 return;
5376
5377 /* Create an array of mep_doloop structures. */
5378 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5379 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5380
5381 /* Search the function for do-while insns and group them by loop tag. */
5382 for (insn = insns; insn; insn = NEXT_INSN (insn))
5383 if (INSN_P (insn))
5384 switch (recog_memoized (insn))
5385 {
5386 case CODE_FOR_doloop_begin_internal:
5387 insn_extract (insn);
5388 loop = &loops[INTVAL (recog_data.operand[2])];
5389
5390 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5391 begin->next = loop->begin;
5392 begin->insn = insn;
5393 begin->counter = recog_data.operand[0];
5394
5395 loop->begin = begin;
5396 break;
5397
5398 case CODE_FOR_doloop_end_internal:
5399 insn_extract (insn);
5400 loop = &loops[INTVAL (recog_data.operand[2])];
5401
5402 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5403 end->insn = insn;
5404 end->fallthrough = next_real_insn (insn);
5405 end->counter = recog_data.operand[0];
5406 end->label = recog_data.operand[1];
5407 end->scratch = recog_data.operand[3];
5408
5409 /* If this insn falls through to an unconditional jump,
5410 give it a lower priority than the others. */
5411 if (loop->end != 0 && simplejump_p (end->fallthrough))
5412 {
5413 end->next = loop->end->next;
5414 loop->end->next = end;
5415 }
5416 else
5417 {
5418 end->next = loop->end;
5419 loop->end = end;
5420 }
5421 break;
5422 }
5423
5424 /* Convert the insns for each loop in turn. */
5425 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5426 if (mep_repeat_loop_p (loop))
5427 {
5428 /* Case (1) or (2). */
5429 rtx repeat_label, label_ref;
5430
5431 /* Create a new label for the repeat insn. */
5432 repeat_label = gen_label_rtx ();
5433
5434 /* Replace the doloop_begin with a repeat. */
5435 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5436 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5437 loop->begin->insn);
5438 delete_insn (loop->begin->insn);
5439
5440 /* Insert the repeat label before the first doloop_end.
5441 Fill the gap with nops if there are other doloop_ends. */
5442 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5443 false, loop->end->next != 0);
5444
5445 /* Emit a repeat_end (to improve the readability of the output). */
5446 emit_insn_before (gen_repeat_end (), loop->end->insn);
5447
5448 /* Delete the first doloop_end. */
5449 delete_insn (loop->end->insn);
5450
5451 /* Replace the others with branches to REPEAT_LABEL. */
5452 for (end = loop->end->next; end != 0; end = end->next)
5453 {
5454 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5455 delete_insn (end->insn);
5456 delete_insn (end->fallthrough);
5457 }
5458 }
5459 else
5460 {
5461 /* Case (3). First replace all the doloop_begins with increment
5462 instructions. */
5463 for (begin = loop->begin; begin != 0; begin = begin->next)
5464 {
5465 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5466 begin->counter, const1_rtx),
5467 begin->insn);
5468 delete_insn (begin->insn);
5469 }
5470
5471 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5472 for (end = loop->end; end != 0; end = end->next)
5473 {
5474 rtx reg;
5475
5476 start_sequence ();
5477
5478 /* Load the counter value into a general register. */
5479 reg = end->counter;
5480 if (!REG_P (reg) || REGNO (reg) > 15)
5481 {
5482 reg = end->scratch;
5483 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5484 }
5485
5486 /* Decrement the counter. */
5487 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5488 constm1_rtx));
5489
5490 /* Copy it back to its original location. */
5491 if (reg != end->counter)
5492 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5493
5494 /* Jump back to the start label. */
5495 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5496 end->label));
5497 JUMP_LABEL (insn) = end->label;
5498 LABEL_NUSES (end->label)++;
5499
5500 /* Emit the whole sequence before the doloop_end. */
5501 insn = get_insns ();
5502 end_sequence ();
5503 emit_insn_before (insn, end->insn);
5504
5505 /* Delete the doloop_end. */
5506 delete_insn (end->insn);
5507 }
5508 }
5509}
5510
5511
5512static bool
5513mep_invertable_branch_p (rtx insn)
5514{
5515 rtx cond, set;
5516 enum rtx_code old_code;
5517 int i;
5518
5519 set = PATTERN (insn);
5520 if (GET_CODE (set) != SET)
5521 return false;
5522 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5523 return false;
5524 cond = XEXP (XEXP (set, 1), 0);
5525 old_code = GET_CODE (cond);
5526 switch (old_code)
5527 {
5528 case EQ:
5529 PUT_CODE (cond, NE);
5530 break;
5531 case NE:
5532 PUT_CODE (cond, EQ);
5533 break;
5534 case LT:
5535 PUT_CODE (cond, GE);
5536 break;
5537 case GE:
5538 PUT_CODE (cond, LT);
5539 break;
5540 default:
5541 return false;
5542 }
5543 INSN_CODE (insn) = -1;
5544 i = recog_memoized (insn);
5545 PUT_CODE (cond, old_code);
5546 INSN_CODE (insn) = -1;
5547 return i >= 0;
5548}
5549
5550static void
5551mep_invert_branch (rtx insn, rtx after)
5552{
5553 rtx cond, set, label;
5554 int i;
5555
5556 set = PATTERN (insn);
5557
5558 gcc_assert (GET_CODE (set) == SET);
5559 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5560
5561 cond = XEXP (XEXP (set, 1), 0);
5562 switch (GET_CODE (cond))
5563 {
5564 case EQ:
5565 PUT_CODE (cond, NE);
5566 break;
5567 case NE:
5568 PUT_CODE (cond, EQ);
5569 break;
5570 case LT:
5571 PUT_CODE (cond, GE);
5572 break;
5573 case GE:
5574 PUT_CODE (cond, LT);
5575 break;
5576 default:
5577 gcc_unreachable ();
5578 }
5579 label = gen_label_rtx ();
5580 emit_label_after (label, after);
5581 for (i=1; i<=2; i++)
5582 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5583 {
5584 rtx ref = XEXP (XEXP (set, 1), i);
5585 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5586 delete_insn (XEXP (ref, 0));
5587 XEXP (ref, 0) = label;
5588 LABEL_NUSES (label) ++;
5589 JUMP_LABEL (insn) = label;
5590 }
5591 INSN_CODE (insn) = -1;
5592 i = recog_memoized (insn);
5593 gcc_assert (i >= 0);
5594}
5595
5596static void
5597mep_reorg_erepeat (rtx insns)
5598{
c28883e6 5599 rtx insn, prev, l, x;
7acf4da6
DD
5600 int count;
5601
5602 for (insn = insns; insn; insn = NEXT_INSN (insn))
5603 if (JUMP_P (insn)
5604 && ! JUMP_TABLE_DATA_P (insn)
5605 && mep_invertable_branch_p (insn))
5606 {
5607 if (dump_file)
5608 {
5609 fprintf (dump_file, "\n------------------------------\n");
5610 fprintf (dump_file, "erepeat: considering this jump:\n");
5611 print_rtl_single (dump_file, insn);
5612 }
5613 count = simplejump_p (insn) ? 0 : 1;
7acf4da6
DD
5614 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5615 {
5616 if (GET_CODE (prev) == CALL_INSN
5617 || BARRIER_P (prev))
5618 break;
5619
5620 if (prev == JUMP_LABEL (insn))
5621 {
5622 rtx newlast;
5623 if (dump_file)
5624 fprintf (dump_file, "found loop top, %d insns\n", count);
5625
5626 if (LABEL_NUSES (prev) == 1)
5627 /* We're the only user, always safe */ ;
5628 else if (LABEL_NUSES (prev) == 2)
5629 {
5630 /* See if there's a barrier before this label. If
5631 so, we know nobody inside the loop uses it.
5632 But we must be careful to put the erepeat
5633 *after* the label. */
5634 rtx barrier;
5635 for (barrier = PREV_INSN (prev);
5636 barrier && GET_CODE (barrier) == NOTE;
5637 barrier = PREV_INSN (barrier))
5638 ;
5639 if (barrier && GET_CODE (barrier) != BARRIER)
5640 break;
5641 }
5642 else
5643 {
5644 /* We don't know who else, within or without our loop, uses this */
5645 if (dump_file)
5646 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5647 break;
5648 }
5649
5650 /* Generate a label to be used by the erepat insn. */
5651 l = gen_label_rtx ();
5652
5653 /* Insert the erepeat after INSN's target label. */
5654 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5655 LABEL_NUSES (l)++;
5656 emit_insn_after (x, prev);
5657
5658 /* Insert the erepeat label. */
5659 newlast = (mep_insert_repeat_label_last
5660 (insn, l, !simplejump_p (insn), false));
5661 if (simplejump_p (insn))
5662 {
5663 emit_insn_before (gen_erepeat_end (), insn);
5664 delete_insn (insn);
5665 }
5666 else
5667 {
5668 mep_invert_branch (insn, newlast);
5669 emit_insn_after (gen_erepeat_end (), newlast);
5670 }
5671 break;
5672 }
5673
5674 if (LABEL_P (prev))
5675 {
5676 /* A label is OK if there is exactly one user, and we
5677 can find that user before the next label. */
5678 rtx user = 0;
5679 int safe = 0;
5680 if (LABEL_NUSES (prev) == 1)
5681 {
5682 for (user = PREV_INSN (prev);
5683 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5684 user = PREV_INSN (user))
5685 if (GET_CODE (user) == JUMP_INSN
5686 && JUMP_LABEL (user) == prev)
5687 {
5688 safe = INSN_UID (user);
5689 break;
5690 }
5691 }
5692 if (!safe)
5693 break;
5694 if (dump_file)
5695 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5696 safe, INSN_UID (prev));
5697 }
5698
5699 if (INSN_P (prev))
5700 {
5701 count ++;
7acf4da6
DD
5702 }
5703 }
5704 }
5705 if (dump_file)
5706 fprintf (dump_file, "\n==============================\n");
5707}
5708
5709/* Replace a jump to a return, with a copy of the return. GCC doesn't
5710 always do this on its own. */
5711
5712static void
5713mep_jmp_return_reorg (rtx insns)
5714{
5715 rtx insn, label, ret;
5716 int ret_code;
5717
5718 for (insn = insns; insn; insn = NEXT_INSN (insn))
5719 if (simplejump_p (insn))
5720 {
5721 /* Find the fist real insn the jump jumps to. */
5722 label = ret = JUMP_LABEL (insn);
5723 while (ret
5724 && (GET_CODE (ret) == NOTE
5725 || GET_CODE (ret) == CODE_LABEL
5726 || GET_CODE (PATTERN (ret)) == USE))
5727 ret = NEXT_INSN (ret);
5728
5729 if (ret)
5730 {
5731 /* Is it a return? */
5732 ret_code = recog_memoized (ret);
5733 if (ret_code == CODE_FOR_return_internal
5734 || ret_code == CODE_FOR_eh_return_internal)
5735 {
5736 /* It is. Replace the jump with a return. */
5737 LABEL_NUSES (label) --;
5738 if (LABEL_NUSES (label) == 0)
5739 delete_insn (label);
5740 PATTERN (insn) = copy_rtx (PATTERN (ret));
5741 INSN_CODE (insn) = -1;
5742 }
5743 }
5744 }
5745}
5746
5747
5748static void
5749mep_reorg_addcombine (rtx insns)
5750{
5751 rtx i, n;
5752
5753 for (i = insns; i; i = NEXT_INSN (i))
5754 if (INSN_P (i)
5755 && INSN_CODE (i) == CODE_FOR_addsi3
5756 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5757 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5758 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5759 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5760 {
5761 n = NEXT_INSN (i);
5762 if (INSN_P (n)
5763 && INSN_CODE (n) == CODE_FOR_addsi3
5764 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5765 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5766 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5767 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5768 {
5769 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5770 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5771 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5772 && ic + nc < 32767
5773 && ic + nc > -32768)
5774 {
5775 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5776 NEXT_INSN (i) = NEXT_INSN (n);
5777 if (NEXT_INSN (i))
5778 PREV_INSN (NEXT_INSN (i)) = i;
5779 }
5780 }
5781 }
5782}
5783
5784/* If this insn adjusts the stack, return the adjustment, else return
5785 zero. */
5786static int
5787add_sp_insn_p (rtx insn)
5788{
5789 rtx pat;
5790
5791 if (! single_set (insn))
5792 return 0;
5793 pat = PATTERN (insn);
5794 if (GET_CODE (SET_DEST (pat)) != REG)
5795 return 0;
5796 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5797 return 0;
5798 if (GET_CODE (SET_SRC (pat)) != PLUS)
5799 return 0;
5800 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5801 return 0;
5802 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5803 return 0;
5804 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5805 return 0;
5806 return INTVAL (XEXP (SET_SRC (pat), 1));
5807}
5808
5809/* Check for trivial functions that set up an unneeded stack
5810 frame. */
5811static void
5812mep_reorg_noframe (rtx insns)
5813{
5814 rtx start_frame_insn;
5815 rtx end_frame_insn = 0;
5816 int sp_adjust, sp2;
5817 rtx sp;
5818
5819 /* The first insn should be $sp = $sp + N */
5820 while (insns && ! INSN_P (insns))
5821 insns = NEXT_INSN (insns);
5822 if (!insns)
5823 return;
5824
5825 sp_adjust = add_sp_insn_p (insns);
5826 if (sp_adjust == 0)
5827 return;
5828
5829 start_frame_insn = insns;
5830 sp = SET_DEST (PATTERN (start_frame_insn));
5831
5832 insns = next_real_insn (insns);
5833
5834 while (insns)
5835 {
5836 rtx next = next_real_insn (insns);
5837 if (!next)
5838 break;
5839
5840 sp2 = add_sp_insn_p (insns);
5841 if (sp2)
5842 {
5843 if (end_frame_insn)
5844 return;
5845 end_frame_insn = insns;
5846 if (sp2 != -sp_adjust)
5847 return;
5848 }
5849 else if (mep_mentioned_p (insns, sp, 0))
5850 return;
5851 else if (CALL_P (insns))
5852 return;
5853
5854 insns = next;
5855 }
5856
5857 if (end_frame_insn)
5858 {
5859 delete_insn (start_frame_insn);
5860 delete_insn (end_frame_insn);
5861 }
5862}
5863
5864static void
5865mep_reorg (void)
5866{
5867 rtx insns = get_insns ();
e756464b
DD
5868
5869 /* We require accurate REG_DEAD notes. */
5870 compute_bb_for_insn ();
5871 df_note_add_problem ();
5872 df_analyze ();
5873
7acf4da6
DD
5874 mep_reorg_addcombine (insns);
5875#if EXPERIMENTAL_REGMOVE_REORG
5876 /* VLIW packing has been done already, so we can't just delete things. */
5877 if (!mep_vliw_function_p (cfun->decl))
5878 mep_reorg_regmove (insns);
5879#endif
5880 mep_jmp_return_reorg (insns);
5881 mep_bundle_insns (insns);
5882 mep_reorg_repeat (insns);
5883 if (optimize
5884 && !profile_flag
5885 && !profile_arc_flag
5886 && TARGET_OPT_REPEAT
5887 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5888 mep_reorg_erepeat (insns);
5889
5890 /* This may delete *insns so make sure it's last. */
5891 mep_reorg_noframe (insns);
e756464b
DD
5892
5893 df_finish_pass (false);
7acf4da6
DD
5894}
5895
5896\f
5897
5898/*----------------------------------------------------------------------*/
5899/* Builtins */
5900/*----------------------------------------------------------------------*/
5901
5902/* Element X gives the index into cgen_insns[] of the most general
5903 implementation of intrinsic X. Unimplemented intrinsics are
5904 mapped to -1. */
5905int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5906
5907/* Element X gives the index of another instruction that is mapped to
5908 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5909 instruction.
5910
5911 Things are set up so that mep_intrinsic_chain[X] < X. */
5912static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5913
5914/* The bitmask for the current ISA. The ISA masks are declared
5915 in mep-intrin.h. */
5916unsigned int mep_selected_isa;
5917
5918struct mep_config {
5919 const char *config_name;
5920 unsigned int isa;
5921};
5922
5923static struct mep_config mep_configs[] = {
5924#ifdef COPROC_SELECTION_TABLE
5925 COPROC_SELECTION_TABLE,
5926#endif
5927 { 0, 0 }
5928};
5929
5930/* Initialize the global intrinsics variables above. */
5931
5932static void
5933mep_init_intrinsics (void)
5934{
5935 size_t i;
5936
5937 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5938 mep_selected_isa = mep_configs[0].isa;
5939 if (mep_config_string != 0)
5940 for (i = 0; mep_configs[i].config_name; i++)
5941 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5942 {
5943 mep_selected_isa = mep_configs[i].isa;
5944 break;
5945 }
5946
5947 /* Assume all intrinsics are unavailable. */
5948 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5949 mep_intrinsic_insn[i] = -1;
5950
5951 /* Build up the global intrinsic tables. */
5952 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5953 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5954 {
5955 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5956 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5957 }
5958 /* See whether we can directly move values between one coprocessor
5959 register and another. */
5960 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5961 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5962 mep_have_copro_copro_moves_p = true;
5963
5964 /* See whether we can directly move values between core and
5965 coprocessor registers. */
5966 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5967 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5968
5969 mep_have_core_copro_moves_p = 1;
5970}
5971
5972/* Declare all available intrinsic functions. Called once only. */
5973
5974static tree cp_data_bus_int_type_node;
5975static tree opaque_vector_type_node;
5976static tree v8qi_type_node;
5977static tree v4hi_type_node;
5978static tree v2si_type_node;
5979static tree v8uqi_type_node;
5980static tree v4uhi_type_node;
5981static tree v2usi_type_node;
5982
5983static tree
5984mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5985{
5986 switch (cr)
5987 {
5988 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5989 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5990 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5991 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5992 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5993 case cgen_regnum_operand_type_CHAR: return char_type_node;
5994 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5995 case cgen_regnum_operand_type_SI: return intSI_type_node;
5996 case cgen_regnum_operand_type_DI: return intDI_type_node;
5997 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5998 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5999 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
6000 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
6001 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
6002 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
6003 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
6004 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6005 default:
6006 return void_type_node;
6007 }
6008}
6009
6010static void
6011mep_init_builtins (void)
6012{
6013 size_t i;
6014
6015 if (TARGET_64BIT_CR_REGS)
6016 cp_data_bus_int_type_node = long_long_integer_type_node;
6017 else
6018 cp_data_bus_int_type_node = long_integer_type_node;
6019
6020 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6021 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6022 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6023 v2si_type_node = build_vector_type (intSI_type_node, 2);
6024 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6025 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6026 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6027
6028 (*lang_hooks.decls.pushdecl)
6029 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6030 cp_data_bus_int_type_node));
6031
6032 (*lang_hooks.decls.pushdecl)
6033 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6034 opaque_vector_type_node));
6035
6036 (*lang_hooks.decls.pushdecl)
6037 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6038 v8qi_type_node));
6039 (*lang_hooks.decls.pushdecl)
6040 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6041 v4hi_type_node));
6042 (*lang_hooks.decls.pushdecl)
6043 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6044 v2si_type_node));
6045
6046 (*lang_hooks.decls.pushdecl)
6047 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6048 v8uqi_type_node));
6049 (*lang_hooks.decls.pushdecl)
6050 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6051 v4uhi_type_node));
6052 (*lang_hooks.decls.pushdecl)
6053 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6054 v2usi_type_node));
6055
6056 /* Intrinsics like mep_cadd3 are implemented with two groups of
6057 instructions, one which uses UNSPECs and one which uses a specific
6058 rtl code such as PLUS. Instructions in the latter group belong
6059 to GROUP_KNOWN_CODE.
6060
6061 In such cases, the intrinsic will have two entries in the global
6062 tables above. The unspec form is accessed using builtin functions
6063 while the specific form is accessed using the mep_* enum in
6064 mep-intrin.h.
6065
6066 The idea is that __cop arithmetic and builtin functions have
6067 different optimization requirements. If mep_cadd3() appears in
6068 the source code, the user will surely except gcc to use cadd3
6069 rather than a work-alike such as add3. However, if the user
6070 just writes "a + b", where a or b are __cop variables, it is
6071 reasonable for gcc to choose a core instruction rather than
6072 cadd3 if it believes that is more optimal. */
6073 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6074 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6075 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6076 {
6077 tree ret_type = void_type_node;
6078 tree bi_type;
6079
6080 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6081 continue;
6082
6083 if (cgen_insns[i].cret_p)
6084 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6085
6086 bi_type = build_function_type (ret_type, 0);
6087 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6088 bi_type,
6089 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6090 }
6091}
6092
6093/* Report the unavailablity of the given intrinsic. */
6094
6095#if 1
6096static void
6097mep_intrinsic_unavailable (int intrinsic)
6098{
6099 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6100
6101 if (already_reported_p[intrinsic])
6102 return;
6103
6104 if (mep_intrinsic_insn[intrinsic] < 0)
6105 error ("coprocessor intrinsic %qs is not available in this configuration",
6106 cgen_intrinsics[intrinsic]);
6107 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6108 error ("%qs is not available in VLIW functions",
6109 cgen_intrinsics[intrinsic]);
6110 else
6111 error ("%qs is not available in non-VLIW functions",
6112 cgen_intrinsics[intrinsic]);
6113
6114 already_reported_p[intrinsic] = 1;
6115}
6116#endif
6117
6118
6119/* See if any implementation of INTRINSIC is available to the
6120 current function. If so, store the most general implementation
6121 in *INSN_PTR and return true. Return false otherwise. */
6122
6123static bool
6124mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6125{
6126 int i;
6127
6128 i = mep_intrinsic_insn[intrinsic];
6129 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6130 i = mep_intrinsic_chain[i];
6131
6132 if (i >= 0)
6133 {
6134 *insn_ptr = &cgen_insns[i];
6135 return true;
6136 }
6137 return false;
6138}
6139
6140
6141/* Like mep_get_intrinsic_insn, but with extra handling for moves.
6142 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6143 try using a work-alike instead. In this case, the returned insn
6144 may have three operands rather than two. */
6145
6146static bool
6147mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6148{
6149 size_t i;
6150
6151 if (intrinsic == mep_cmov)
6152 {
6153 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6154 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6155 return true;
6156 return false;
6157 }
6158 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6159}
6160
6161
6162/* If ARG is a register operand that is the same size as MODE, convert it
6163 to MODE using a subreg. Otherwise return ARG as-is. */
6164
6165static rtx
6166mep_convert_arg (enum machine_mode mode, rtx arg)
6167{
6168 if (GET_MODE (arg) != mode
6169 && register_operand (arg, VOIDmode)
6170 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6171 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6172 return arg;
6173}
6174
6175
6176/* Apply regnum conversions to ARG using the description given by REGNUM.
6177 Return the new argument on success and null on failure. */
6178
6179static rtx
6180mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6181{
6182 if (regnum->count == 0)
6183 return arg;
6184
6185 if (GET_CODE (arg) != CONST_INT
6186 || INTVAL (arg) < 0
6187 || INTVAL (arg) >= regnum->count)
6188 return 0;
6189
6190 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6191}
6192
6193
6194/* Try to make intrinsic argument ARG match the given operand.
6195 UNSIGNED_P is true if the argument has an unsigned type. */
6196
6197static rtx
6198mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6199 int unsigned_p)
6200{
6201 if (GET_CODE (arg) == CONST_INT)
6202 {
6203 /* CONST_INTs can only be bound to integer operands. */
6204 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6205 return 0;
6206 }
6207 else if (GET_CODE (arg) == CONST_DOUBLE)
6208 /* These hold vector constants. */;
6209 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6210 {
6211 /* If the argument is a different size from what's expected, we must
6212 have a value in the right mode class in order to convert it. */
6213 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6214 return 0;
6215
6216 /* If the operand is an rvalue, promote or demote it to match the
6217 operand's size. This might not need extra instructions when
6218 ARG is a register value. */
6219 if (operand->constraint[0] != '=')
6220 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6221 }
6222
6223 /* If the operand is an lvalue, bind the operand to a new register.
6224 The caller will copy this value into ARG after the main
6225 instruction. By doing this always, we produce slightly more
6226 optimal code. */
6227 /* But not for control registers. */
6228 if (operand->constraint[0] == '='
6229 && (! REG_P (arg)
072ebd49
DD
6230 || ! (CONTROL_REGNO_P (REGNO (arg))
6231 || CCR_REGNO_P (REGNO (arg))
6232 || CR_REGNO_P (REGNO (arg)))
7acf4da6
DD
6233 ))
6234 return gen_reg_rtx (operand->mode);
6235
6236 /* Try simple mode punning. */
6237 arg = mep_convert_arg (operand->mode, arg);
6238 if (operand->predicate (arg, operand->mode))
6239 return arg;
6240
6241 /* See if forcing the argument into a register will make it match. */
6242 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6243 arg = force_reg (operand->mode, arg);
6244 else
6245 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6246 if (operand->predicate (arg, operand->mode))
6247 return arg;
6248
6249 return 0;
6250}
6251
6252
6253/* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6254 function FNNAME. OPERAND describes the operand to which ARGNUM
6255 is mapped. */
6256
6257static void
6258mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6259 int argnum, tree fnname)
6260{
6261 size_t i;
6262
6263 if (GET_CODE (arg) == CONST_INT)
6264 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6265 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6266 {
6267 const struct cgen_immediate_predicate *predicate;
6268 HOST_WIDE_INT argval;
6269
6270 predicate = &cgen_immediate_predicates[i];
6271 argval = INTVAL (arg);
6272 if (argval < predicate->lower || argval >= predicate->upper)
6273 error ("argument %d of %qE must be in the range %d...%d",
6274 argnum, fnname, predicate->lower, predicate->upper - 1);
6275 else
6276 error ("argument %d of %qE must be a multiple of %d",
6277 argnum, fnname, predicate->align);
6278 return;
6279 }
6280
6281 error ("incompatible type for argument %d of %qE", argnum, fnname);
6282}
6283
6284static rtx
6285mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6286 rtx subtarget ATTRIBUTE_UNUSED,
6287 enum machine_mode mode ATTRIBUTE_UNUSED,
6288 int ignore ATTRIBUTE_UNUSED)
6289{
6290 rtx pat, op[10], arg[10];
6291 unsigned int a;
6292 int opindex, unsigned_p[10];
6293 tree fndecl, args;
6294 unsigned int n_args;
6295 tree fnname;
6296 const struct cgen_insn *cgen_insn;
f12c802a 6297 const struct insn_data_d *idata;
12a54f54 6298 unsigned int first_arg = 0;
12a54f54 6299 unsigned int builtin_n_args;
7acf4da6
DD
6300
6301 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6302 fnname = DECL_NAME (fndecl);
6303
6304 /* Find out which instruction we should emit. Note that some coprocessor
6305 intrinsics may only be available in VLIW mode, or only in normal mode. */
6306 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6307 {
6308 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
12a54f54 6309 return NULL_RTX;
7acf4da6
DD
6310 }
6311 idata = &insn_data[cgen_insn->icode];
6312
6313 builtin_n_args = cgen_insn->num_args;
6314
6315 if (cgen_insn->cret_p)
6316 {
6317 if (cgen_insn->cret_p > 1)
6318 builtin_n_args ++;
6319 first_arg = 1;
c28883e6 6320 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
7acf4da6
DD
6321 builtin_n_args --;
6322 }
6323
6324 /* Evaluate each argument. */
6325 n_args = call_expr_nargs (exp);
6326
6327 if (n_args < builtin_n_args)
6328 {
6329 error ("too few arguments to %qE", fnname);
12a54f54 6330 return NULL_RTX;
7acf4da6
DD
6331 }
6332 if (n_args > builtin_n_args)
6333 {
6334 error ("too many arguments to %qE", fnname);
12a54f54 6335 return NULL_RTX;
7acf4da6
DD
6336 }
6337
12a54f54 6338 for (a = first_arg; a < builtin_n_args + first_arg; a++)
7acf4da6
DD
6339 {
6340 tree value;
6341
12a54f54 6342 args = CALL_EXPR_ARG (exp, a - first_arg);
7acf4da6
DD
6343
6344 value = args;
6345
6346#if 0
6347 if (cgen_insn->regnums[a].reference_p)
6348 {
6349 if (TREE_CODE (value) != ADDR_EXPR)
6350 {
6351 debug_tree(value);
6352 error ("argument %d of %qE must be an address", a+1, fnname);
12a54f54 6353 return NULL_RTX;
7acf4da6
DD
6354 }
6355 value = TREE_OPERAND (value, 0);
6356 }
6357#endif
6358
6359 /* If the argument has been promoted to int, get the unpromoted
6360 value. This is necessary when sub-int memory values are bound
6361 to reference parameters. */
6362 if (TREE_CODE (value) == NOP_EXPR
6363 && TREE_TYPE (value) == integer_type_node
6364 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6365 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6366 < TYPE_PRECISION (TREE_TYPE (value))))
6367 value = TREE_OPERAND (value, 0);
6368
6369 /* If the argument has been promoted to double, get the unpromoted
6370 SFmode value. This is necessary for FMAX support, for example. */
6371 if (TREE_CODE (value) == NOP_EXPR
6372 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6373 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6374 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6375 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6376 value = TREE_OPERAND (value, 0);
6377
6378 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6379 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6380 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6381 if (cgen_insn->regnums[a].reference_p)
6382 {
6383 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6384 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6385
6386 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6387 }
6388 if (arg[a] == 0)
6389 {
6390 error ("argument %d of %qE must be in the range %d...%d",
6391 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
12a54f54 6392 return NULL_RTX;
7acf4da6
DD
6393 }
6394 }
6395
12a54f54 6396 for (a = 0; a < first_arg; a++)
7acf4da6
DD
6397 {
6398 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6399 arg[a] = target;
6400 else
6401 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6402 }
6403
6404 /* Convert the arguments into a form suitable for the intrinsic.
6405 Report an error if this isn't possible. */
6406 for (opindex = 0; opindex < idata->n_operands; opindex++)
6407 {
6408 a = cgen_insn->op_mapping[opindex];
6409 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6410 arg[a], unsigned_p[a]);
6411 if (op[opindex] == 0)
6412 {
6413 mep_incompatible_arg (&idata->operand[opindex],
6414 arg[a], a + 1 - first_arg, fnname);
12a54f54 6415 return NULL_RTX;
7acf4da6
DD
6416 }
6417 }
6418
6419 /* Emit the instruction. */
6420 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6421 op[5], op[6], op[7], op[8], op[9]);
6422
6423 if (GET_CODE (pat) == SET
6424 && GET_CODE (SET_DEST (pat)) == PC
6425 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6426 emit_jump_insn (pat);
6427 else
6428 emit_insn (pat);
6429
6430 /* Copy lvalues back to their final locations. */
6431 for (opindex = 0; opindex < idata->n_operands; opindex++)
6432 if (idata->operand[opindex].constraint[0] == '=')
6433 {
6434 a = cgen_insn->op_mapping[opindex];
6435 if (a >= first_arg)
6436 {
6437 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6438 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6439 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6440 op[opindex]));
6441 else
6442 {
6443 /* First convert the operand to the right mode, then copy it
6444 into the destination. Doing the conversion as a separate
6445 step (rather than using convert_move) means that we can
6446 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6447 refer to the same register. */
6448 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6449 op[opindex], unsigned_p[a]);
6450 if (!rtx_equal_p (arg[a], op[opindex]))
6451 emit_move_insn (arg[a], op[opindex]);
6452 }
6453 }
6454 }
6455
6456 if (first_arg > 0 && target && target != op[0])
6457 {
6458 emit_move_insn (target, op[0]);
6459 }
6460
6461 return target;
6462}
6463
6464static bool
6465mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6466{
6467 return false;
6468}
6469\f
6470/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6471 a global register. */
6472
6473static int
6474global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6475{
6476 int regno;
6477 rtx x = *loc;
6478
6479 if (! x)
6480 return 0;
6481
6482 switch (GET_CODE (x))
6483 {
6484 case SUBREG:
6485 if (REG_P (SUBREG_REG (x)))
6486 {
6487 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6488 && global_regs[subreg_regno (x)])
6489 return 1;
6490 return 0;
6491 }
6492 break;
6493
6494 case REG:
6495 regno = REGNO (x);
6496 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6497 return 1;
6498 return 0;
6499
6500 case SCRATCH:
6501 case PC:
6502 case CC0:
6503 case CONST_INT:
6504 case CONST_DOUBLE:
6505 case CONST:
6506 case LABEL_REF:
6507 return 0;
6508
6509 case CALL:
6510 /* A non-constant call might use a global register. */
6511 return 1;
6512
6513 default:
6514 break;
6515 }
6516
6517 return 0;
6518}
6519
6520/* Returns nonzero if X mentions a global register. */
6521
6522static int
6523global_reg_mentioned_p (rtx x)
6524{
6525 if (INSN_P (x))
6526 {
6527 if (CALL_P (x))
6528 {
6529 if (! RTL_CONST_OR_PURE_CALL_P (x))
6530 return 1;
6531 x = CALL_INSN_FUNCTION_USAGE (x);
6532 if (x == 0)
6533 return 0;
6534 }
6535 else
6536 x = PATTERN (x);
6537 }
6538
6539 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6540}
6541/* Scheduling hooks for VLIW mode.
6542
6543 Conceptually this is very simple: we have a two-pack architecture
6544 that takes one core insn and one coprocessor insn to make up either
6545 a 32- or 64-bit instruction word (depending on the option bit set in
6546 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6547 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6548 and one 48-bit cop insn or two 32-bit core/cop insns.
6549
6550 In practice, instruction selection will be a bear. Consider in
6551 VL64 mode the following insns
6552
6553 add $1, 1
6554 cmov $cr0, $0
6555
6556 these cannot pack, since the add is a 16-bit core insn and cmov
6557 is a 32-bit cop insn. However,
6558
6559 add3 $1, $1, 1
6560 cmov $cr0, $0
6561
6562 packs just fine. For good VLIW code generation in VL64 mode, we
6563 will have to have 32-bit alternatives for many of the common core
6564 insns. Not implemented. */
6565
6566static int
6567mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6568{
6569 int cost_specified;
6570
6571 if (REG_NOTE_KIND (link) != 0)
6572 {
6573 /* See whether INSN and DEP_INSN are intrinsics that set the same
6574 hard register. If so, it is more important to free up DEP_INSN
6575 than it is to free up INSN.
6576
6577 Note that intrinsics like mep_mulr are handled differently from
6578 the equivalent mep.md patterns. In mep.md, if we don't care
6579 about the value of $lo and $hi, the pattern will just clobber
6580 the registers, not set them. Since clobbers don't count as
6581 output dependencies, it is often possible to reorder two mulrs,
6582 even after reload.
6583
6584 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6585 so any pair of mep_mulr()s will be inter-dependent. We should
6586 therefore give the first mep_mulr() a higher priority. */
6587 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6588 && global_reg_mentioned_p (PATTERN (insn))
6589 && global_reg_mentioned_p (PATTERN (dep_insn)))
6590 return 1;
6591
6592 /* If the dependence is an anti or output dependence, assume it
6593 has no cost. */
6594 return 0;
6595 }
6596
6597 /* If we can't recognize the insns, we can't really do anything. */
6598 if (recog_memoized (dep_insn) < 0)
6599 return cost;
6600
6601 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6602 attribute instead. */
6603 if (!TARGET_H1)
6604 {
6605 cost_specified = get_attr_latency (dep_insn);
6606 if (cost_specified != 0)
6607 return cost_specified;
6608 }
6609
6610 return cost;
6611}
6612
6613/* ??? We don't properly compute the length of a load/store insn,
6614 taking into account the addressing mode. */
6615
6616static int
6617mep_issue_rate (void)
6618{
6619 return TARGET_IVC2 ? 3 : 2;
6620}
6621
6622/* Return true if function DECL was declared with the vliw attribute. */
6623
6624bool
6625mep_vliw_function_p (tree decl)
6626{
6627 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6628}
6629
6630static rtx
6631mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6632{
6633 int i;
6634
6635 for (i = nready - 1; i >= 0; --i)
6636 {
6637 rtx insn = ready[i];
6638 if (recog_memoized (insn) >= 0
6639 && get_attr_slot (insn) == slot
6640 && get_attr_length (insn) == length)
6641 return insn;
6642 }
6643
6644 return NULL_RTX;
6645}
6646
6647static void
6648mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6649{
6650 int i;
6651
6652 for (i = 0; i < nready; ++i)
6653 if (ready[i] == insn)
6654 {
6655 for (; i < nready - 1; ++i)
6656 ready[i] = ready[i + 1];
6657 ready[i] = insn;
6658 return;
6659 }
6660
6661 gcc_unreachable ();
6662}
6663
6664static void
6665mep_print_sched_insn (FILE *dump, rtx insn)
6666{
6667 const char *slots = "none";
6668 const char *name = NULL;
6669 int code;
6670 char buf[30];
6671
6672 if (GET_CODE (PATTERN (insn)) == SET
6673 || GET_CODE (PATTERN (insn)) == PARALLEL)
6674 {
6675 switch (get_attr_slots (insn))
6676 {
6677 case SLOTS_CORE: slots = "core"; break;
6678 case SLOTS_C3: slots = "c3"; break;
6679 case SLOTS_P0: slots = "p0"; break;
6680 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6681 case SLOTS_P0_P1: slots = "p0,p1"; break;
6682 case SLOTS_P0S: slots = "p0s"; break;
6683 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6684 case SLOTS_P1: slots = "p1"; break;
6685 default:
6686 sprintf(buf, "%d", get_attr_slots (insn));
6687 slots = buf;
6688 break;
6689 }
6690 }
6691 if (GET_CODE (PATTERN (insn)) == USE)
6692 slots = "use";
6693
6694 code = INSN_CODE (insn);
6695 if (code >= 0)
6696 name = get_insn_name (code);
6697 if (!name)
6698 name = "{unknown}";
6699
6700 fprintf (dump,
6701 "insn %4d %4d %8s %s\n",
6702 code,
6703 INSN_UID (insn),
6704 name,
6705 slots);
6706}
6707
6708static int
6709mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6710 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6711 int *pnready, int clock ATTRIBUTE_UNUSED)
6712{
6713 int nready = *pnready;
6714 rtx core_insn, cop_insn;
6715 int i;
6716
6717 if (dump && sched_verbose > 1)
6718 {
6719 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6720 for (i=0; i<nready; i++)
6721 mep_print_sched_insn (dump, ready[i]);
6722 fprintf (dump, "\n");
6723 }
6724
6725 if (!mep_vliw_function_p (cfun->decl))
6726 return 1;
6727 if (nready < 2)
6728 return 1;
6729
6730 /* IVC2 uses a DFA to determine what's ready and what's not. */
6731 if (TARGET_IVC2)
6732 return nready;
6733
6734 /* We can issue either a core or coprocessor instruction.
6735 Look for a matched pair of insns to reorder. If we don't
6736 find any, don't second-guess the scheduler's priorities. */
6737
6738 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6739 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6740 TARGET_OPT_VL64 ? 6 : 2)))
6741 ;
6742 else if (TARGET_OPT_VL64
6743 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6744 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6745 ;
6746 else
6747 /* We didn't find a pair. Issue the single insn at the head
6748 of the ready list. */
6749 return 1;
6750
6751 /* Reorder the two insns first. */
6752 mep_move_ready_insn (ready, nready, core_insn);
6753 mep_move_ready_insn (ready, nready - 1, cop_insn);
6754 return 2;
6755}
6756
6757/* A for_each_rtx callback. Return true if *X is a register that is
6758 set by insn PREV. */
6759
6760static int
6761mep_store_find_set (rtx *x, void *prev)
6762{
6763 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6764}
6765
6766/* Like mep_store_bypass_p, but takes a pattern as the second argument,
6767 not the containing insn. */
6768
6769static bool
6770mep_store_data_bypass_1 (rtx prev, rtx pat)
6771{
6772 /* Cope with intrinsics like swcpa. */
6773 if (GET_CODE (pat) == PARALLEL)
6774 {
6775 int i;
6776
6777 for (i = 0; i < XVECLEN (pat, 0); i++)
6778 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6779 return true;
6780
6781 return false;
6782 }
6783
6784 /* Check for some sort of store. */
6785 if (GET_CODE (pat) != SET
6786 || GET_CODE (SET_DEST (pat)) != MEM)
6787 return false;
6788
6789 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6790 The first operand to the unspec is the store data and the other operands
6791 are used to calculate the address. */
6792 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6793 {
6794 rtx src;
6795 int i;
6796
6797 src = SET_SRC (pat);
6798 for (i = 1; i < XVECLEN (src, 0); i++)
6799 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6800 return false;
6801
6802 return true;
6803 }
6804
6805 /* Otherwise just check that PREV doesn't modify any register mentioned
6806 in the memory destination. */
6807 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6808}
6809
6810/* Return true if INSN is a store instruction and if the store address
6811 has no true dependence on PREV. */
6812
6813bool
6814mep_store_data_bypass_p (rtx prev, rtx insn)
6815{
6816 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6817}
6818
6819/* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6820 is a register other than LO or HI and if PREV sets *X. */
6821
6822static int
6823mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6824{
6825 return (REG_P (*x)
6826 && REGNO (*x) != LO_REGNO
6827 && REGNO (*x) != HI_REGNO
6828 && reg_set_p (*x, (const_rtx) prev));
6829}
6830
6831/* Return true if, apart from HI/LO, there are no true dependencies
6832 between multiplication instructions PREV and INSN. */
6833
6834bool
6835mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6836{
6837 rtx pat;
6838
6839 pat = PATTERN (insn);
6840 if (GET_CODE (pat) == PARALLEL)
6841 pat = XVECEXP (pat, 0, 0);
6842 return (GET_CODE (pat) == SET
6843 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6844}
6845
6846/* Return true if INSN is an ldc instruction that issues to the
6847 MeP-h1 integer pipeline. This is true for instructions that
6848 read from PSW, LP, SAR, HI and LO. */
6849
6850bool
6851mep_ipipe_ldc_p (rtx insn)
6852{
6853 rtx pat, src;
6854
6855 pat = PATTERN (insn);
6856
6857 /* Cope with instrinsics that set both a hard register and its shadow.
6858 The set of the hard register comes first. */
6859 if (GET_CODE (pat) == PARALLEL)
6860 pat = XVECEXP (pat, 0, 0);
6861
6862 if (GET_CODE (pat) == SET)
6863 {
6864 src = SET_SRC (pat);
6865
6866 /* Cope with intrinsics. The first operand to the unspec is
6867 the source register. */
6868 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6869 src = XVECEXP (src, 0, 0);
6870
6871 if (REG_P (src))
6872 switch (REGNO (src))
6873 {
6874 case PSW_REGNO:
6875 case LP_REGNO:
6876 case SAR_REGNO:
6877 case HI_REGNO:
6878 case LO_REGNO:
6879 return true;
6880 }
6881 }
6882 return false;
6883}
6884
6885/* Create a VLIW bundle from core instruction CORE and coprocessor
6886 instruction COP. COP always satisfies INSN_P, but CORE can be
6887 either a new pattern or an existing instruction.
6888
6889 Emit the bundle in place of COP and return it. */
6890
6891static rtx
6892mep_make_bundle (rtx core, rtx cop)
6893{
6894 rtx insn;
6895
6896 /* If CORE is an existing instruction, remove it, otherwise put
6897 the new pattern in an INSN harness. */
6898 if (INSN_P (core))
6899 remove_insn (core);
6900 else
6901 core = make_insn_raw (core);
6902
6903 /* Generate the bundle sequence and replace COP with it. */
6904 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6905 insn = emit_insn_after (insn, cop);
6906 remove_insn (cop);
6907
6908 /* Set up the links of the insns inside the SEQUENCE. */
6909 PREV_INSN (core) = PREV_INSN (insn);
6910 NEXT_INSN (core) = cop;
6911 PREV_INSN (cop) = core;
6912 NEXT_INSN (cop) = NEXT_INSN (insn);
6913
6914 /* Set the VLIW flag for the coprocessor instruction. */
6915 PUT_MODE (core, VOIDmode);
6916 PUT_MODE (cop, BImode);
6917
6918 /* Derive a location for the bundle. Individual instructions cannot
6919 have their own location because there can be no assembler labels
6920 between CORE and COP. */
6921 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6922 INSN_LOCATOR (core) = 0;
6923 INSN_LOCATOR (cop) = 0;
6924
6925 return insn;
6926}
6927
6928/* A helper routine for ms1_insn_dependent_p called through note_stores. */
6929
6930static void
6931mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6932{
6933 rtx * pinsn = (rtx *) data;
6934
6935 if (*pinsn && reg_mentioned_p (x, *pinsn))
6936 *pinsn = NULL_RTX;
6937}
6938
6939/* Return true if anything in insn X is (anti,output,true) dependent on
6940 anything in insn Y. */
6941
6942static int
6943mep_insn_dependent_p (rtx x, rtx y)
6944{
6945 rtx tmp;
6946
6947 gcc_assert (INSN_P (x));
6948 gcc_assert (INSN_P (y));
6949
6950 tmp = PATTERN (y);
6951 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6952 if (tmp == NULL_RTX)
6953 return 1;
6954
6955 tmp = PATTERN (x);
6956 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6957 if (tmp == NULL_RTX)
6958 return 1;
6959
6960 return 0;
6961}
6962
6963static int
6964core_insn_p (rtx insn)
6965{
6966 if (GET_CODE (PATTERN (insn)) == USE)
6967 return 0;
6968 if (get_attr_slot (insn) == SLOT_CORE)
6969 return 1;
6970 return 0;
6971}
6972
6973/* Mark coprocessor instructions that can be bundled together with
6974 the immediately preceeding core instruction. This is later used
6975 to emit the "+" that tells the assembler to create a VLIW insn.
6976
6977 For unbundled insns, the assembler will automatically add coprocessor
6978 nops, and 16-bit core nops. Due to an apparent oversight in the
6979 spec, the assembler will _not_ automatically add 32-bit core nops,
6980 so we have to emit those here.
6981
6982 Called from mep_insn_reorg. */
6983
6984static void
6985mep_bundle_insns (rtx insns)
6986{
6987 rtx insn, last = NULL_RTX, first = NULL_RTX;
6988 int saw_scheduling = 0;
6989
6990 /* Only do bundling if we're in vliw mode. */
6991 if (!mep_vliw_function_p (cfun->decl))
6992 return;
6993
6994 /* The first insn in a bundle are TImode, the remainder are
6995 VOIDmode. After this function, the first has VOIDmode and the
6996 rest have BImode. */
6997
bb7681bf
DD
6998 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6999
7acf4da6
DD
7000 /* First, move any NOTEs that are within a bundle, to the beginning
7001 of the bundle. */
7002 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7003 {
7004 if (NOTE_P (insn) && first)
7005 /* Don't clear FIRST. */;
7006
bb7681bf 7007 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7acf4da6
DD
7008 first = insn;
7009
bb7681bf 7010 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7acf4da6
DD
7011 {
7012 rtx note, prev;
7013
7014 /* INSN is part of a bundle; FIRST is the first insn in that
7015 bundle. Move all intervening notes out of the bundle.
7016 In addition, since the debug pass may insert a label
7017 whenever the current line changes, set the location info
7018 for INSN to match FIRST. */
7019
7020 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7021
7022 note = PREV_INSN (insn);
7023 while (note && note != first)
7024 {
7025 prev = PREV_INSN (note);
7026
7027 if (NOTE_P (note))
7028 {
7029 /* Remove NOTE from here... */
7030 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7031 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7032 /* ...and put it in here. */
7033 NEXT_INSN (note) = first;
7034 PREV_INSN (note) = PREV_INSN (first);
7035 NEXT_INSN (PREV_INSN (note)) = note;
7036 PREV_INSN (NEXT_INSN (note)) = note;
7037 }
7038
7039 note = prev;
7040 }
7041 }
7042
bb7681bf 7043 else if (!NONJUMP_INSN_P (insn))
7acf4da6
DD
7044 first = 0;
7045 }
7046
7047 /* Now fix up the bundles. */
7048 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7049 {
7050 if (NOTE_P (insn))
7051 continue;
7052
bb7681bf 7053 if (!NONJUMP_INSN_P (insn))
7acf4da6
DD
7054 {
7055 last = 0;
7056 continue;
7057 }
7058
7059 /* If we're not optimizing enough, there won't be scheduling
7060 info. We detect that here. */
7061 if (GET_MODE (insn) == TImode)
7062 saw_scheduling = 1;
7063 if (!saw_scheduling)
7064 continue;
7065
7066 if (TARGET_IVC2)
7067 {
7068 rtx core_insn = NULL_RTX;
7069
7070 /* IVC2 slots are scheduled by DFA, so we just accept
7071 whatever the scheduler gives us. However, we must make
7072 sure the core insn (if any) is the first in the bundle.
7073 The IVC2 assembler can insert whatever NOPs are needed,
7074 and allows a COP insn to be first. */
7075
bb7681bf 7076 if (NONJUMP_INSN_P (insn)
7acf4da6
DD
7077 && GET_CODE (PATTERN (insn)) != USE
7078 && GET_MODE (insn) == TImode)
7079 {
7080 for (last = insn;
7081 NEXT_INSN (last)
7082 && GET_MODE (NEXT_INSN (last)) == VOIDmode
bb7681bf 7083 && NONJUMP_INSN_P (NEXT_INSN (last));
7acf4da6
DD
7084 last = NEXT_INSN (last))
7085 {
7086 if (core_insn_p (last))
7087 core_insn = last;
7088 }
7089 if (core_insn_p (last))
7090 core_insn = last;
7091
7092 if (core_insn && core_insn != insn)
7093 {
7094 /* Swap core insn to first in the bundle. */
7095
7096 /* Remove core insn. */
7097 if (PREV_INSN (core_insn))
7098 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7099 if (NEXT_INSN (core_insn))
7100 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7101
7102 /* Re-insert core insn. */
7103 PREV_INSN (core_insn) = PREV_INSN (insn);
7104 NEXT_INSN (core_insn) = insn;
7105
7106 if (PREV_INSN (core_insn))
7107 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7108 PREV_INSN (insn) = core_insn;
7109
7110 PUT_MODE (core_insn, TImode);
7111 PUT_MODE (insn, VOIDmode);
7112 }
7113 }
7114
7115 /* The first insn has TImode, the rest have VOIDmode */
7116 if (GET_MODE (insn) == TImode)
7117 PUT_MODE (insn, VOIDmode);
7118 else
7119 PUT_MODE (insn, BImode);
7120 continue;
7121 }
7122
7123 PUT_MODE (insn, VOIDmode);
7124 if (recog_memoized (insn) >= 0
7125 && get_attr_slot (insn) == SLOT_COP)
7126 {
7127 if (GET_CODE (insn) == JUMP_INSN
7128 || ! last
7129 || recog_memoized (last) < 0
7130 || get_attr_slot (last) != SLOT_CORE
7131 || (get_attr_length (insn)
7132 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7133 || mep_insn_dependent_p (insn, last))
7134 {
7135 switch (get_attr_length (insn))
7136 {
7137 case 8:
7138 break;
7139 case 6:
7140 insn = mep_make_bundle (gen_nop (), insn);
7141 break;
7142 case 4:
7143 if (TARGET_OPT_VL64)
7144 insn = mep_make_bundle (gen_nop32 (), insn);
7145 break;
7146 case 2:
7147 if (TARGET_OPT_VL64)
7148 error ("2 byte cop instructions are"
7149 " not allowed in 64-bit VLIW mode");
7150 else
7151 insn = mep_make_bundle (gen_nop (), insn);
7152 break;
7153 default:
7154 error ("unexpected %d byte cop instruction",
7155 get_attr_length (insn));
7156 break;
7157 }
7158 }
7159 else
7160 insn = mep_make_bundle (last, insn);
7161 }
7162
7163 last = insn;
7164 }
7165}
7166
7167
7168/* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7169 Return true on success. This function can fail if the intrinsic
7170 is unavailable or if the operands don't satisfy their predicates. */
7171
7172bool
7173mep_emit_intrinsic (int intrinsic, const rtx *operands)
7174{
7175 const struct cgen_insn *cgen_insn;
f12c802a 7176 const struct insn_data_d *idata;
7acf4da6
DD
7177 rtx newop[10];
7178 int i;
7179
7180 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7181 return false;
7182
7183 idata = &insn_data[cgen_insn->icode];
7184 for (i = 0; i < idata->n_operands; i++)
7185 {
7186 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7187 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7188 return false;
7189 }
7190
7191 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7192 newop[3], newop[4], newop[5],
7193 newop[6], newop[7], newop[8]));
7194
7195 return true;
7196}
7197
7198
7199/* Apply the given unary intrinsic to OPERANDS[1] and store it on
7200 OPERANDS[0]. Report an error if the instruction could not
7201 be synthesized. OPERANDS[1] is a register_operand. For sign
7202 and zero extensions, it may be smaller than SImode. */
7203
7204bool
7205mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7206 rtx * operands ATTRIBUTE_UNUSED)
7207{
7208 return false;
7209}
7210
7211
7212/* Likewise, but apply a binary operation to OPERANDS[1] and
7213 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7214 can be a general_operand.
7215
7216 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7217 third operand. REG and REG3 take register operands only. */
7218
7219bool
7220mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7221 int ATTRIBUTE_UNUSED immediate3,
7222 int ATTRIBUTE_UNUSED reg,
7223 int ATTRIBUTE_UNUSED reg3,
7224 rtx * operands ATTRIBUTE_UNUSED)
7225{
7226 return false;
7227}
7228
7229static bool
7230mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7231{
7232 switch (code)
7233 {
7234 case CONST_INT:
7235 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7236 *total = 0;
7237 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7238 *total = 1;
7239 else
7240 *total = 3;
7241 return true;
7242
7243 case SYMBOL_REF:
7244 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7245 return true;
7246
7247 case MULT:
7248 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7249 ? COSTS_N_INSNS (3)
7250 : COSTS_N_INSNS (2));
7251 return true;
7252 }
7253 return false;
7254}
7255
7256static int
7257mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7258{
7259 return 1;
7260}
7261
7262static bool
96e45421
JM
7263mep_handle_option (struct gcc_options *opts, struct gcc_options *opts_set,
7264 const struct cl_decoded_option *decoded,
7265 location_t loc ATTRIBUTE_UNUSED)
7acf4da6
DD
7266{
7267 int i;
96e45421
JM
7268 size_t code = decoded->opt_index;
7269
7270 gcc_assert (opts == &global_options);
7271 gcc_assert (opts_set == &global_options_set);
7acf4da6
DD
7272
7273 switch (code)
7274 {
7275 case OPT_mall_opts:
7276 target_flags |= MEP_ALL_OPTS;
7277 break;
7278
7279 case OPT_mno_opts:
7280 target_flags &= ~ MEP_ALL_OPTS;
7281 break;
7282
7283 case OPT_mcop64:
7284 target_flags |= MASK_COP;
7285 target_flags |= MASK_64BIT_CR_REGS;
7286 break;
7287
7288 case OPT_mtiny_:
7289 option_mtiny_specified = 1;
7290
7291 case OPT_mivc2:
7292 target_flags |= MASK_COP;
7293 target_flags |= MASK_64BIT_CR_REGS;
7294 target_flags |= MASK_VLIW;
7295 target_flags |= MASK_OPT_VL64;
7296 target_flags |= MASK_IVC2;
7297
7298 for (i=0; i<32; i++)
7299 fixed_regs[i+48] = 0;
7300 for (i=0; i<32; i++)
7301 call_used_regs[i+48] = 1;
7302 for (i=6; i<8; i++)
7303 call_used_regs[i+48] = 0;
7304
7acf4da6
DD
7305#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7306 RN (0, "$csar0");
7307 RN (1, "$cc");
7308 RN (4, "$cofr0");
7309 RN (5, "$cofr1");
7310 RN (6, "$cofa0");
7311 RN (7, "$cofa1");
7312 RN (15, "$csar1");
7313
7314 RN (16, "$acc0_0");
7315 RN (17, "$acc0_1");
7316 RN (18, "$acc0_2");
7317 RN (19, "$acc0_3");
7318 RN (20, "$acc0_4");
7319 RN (21, "$acc0_5");
7320 RN (22, "$acc0_6");
7321 RN (23, "$acc0_7");
7322
7323 RN (24, "$acc1_0");
7324 RN (25, "$acc1_1");
7325 RN (26, "$acc1_2");
7326 RN (27, "$acc1_3");
7327 RN (28, "$acc1_4");
7328 RN (29, "$acc1_5");
7329 RN (30, "$acc1_6");
7330 RN (31, "$acc1_7");
7331#undef RN
7332
7333 break;
7334
7335 default:
7336 break;
7337 }
7338 return TRUE;
7339}
7340
7341static void
7342mep_asm_init_sections (void)
7343{
7344 based_section
7345 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7346 "\t.section .based,\"aw\"");
7347
7348 tinybss_section
7349 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7350 "\t.section .sbss,\"aw\"");
7351
7352 sdata_section
7353 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7354 "\t.section .sdata,\"aw\",@progbits");
7355
7356 far_section
7357 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7358 "\t.section .far,\"aw\"");
7359
7360 farbss_section
7361 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7362 "\t.section .farbss,\"aw\"");
7363
7364 frodata_section
7365 = get_unnamed_section (0, output_section_asm_op,
7366 "\t.section .frodata,\"a\"");
7367
7368 srodata_section
7369 = get_unnamed_section (0, output_section_asm_op,
7370 "\t.section .srodata,\"a\"");
7371
820ca276 7372 vtext_section
77806925
DD
7373 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7374 "\t.section .vtext,\"axv\"\n\t.vliw");
820ca276
DD
7375
7376 vftext_section
77806925 7377 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
da24f9d9 7378 "\t.section .vftext,\"axv\"\n\t.vliw");
820ca276
DD
7379
7380 ftext_section
77806925 7381 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
da24f9d9 7382 "\t.section .ftext,\"ax\"\n\t.core");
820ca276 7383
7acf4da6 7384}
c28883e6
DD
7385\f
7386/* Initialize the GCC target structure. */
7387
7388#undef TARGET_ASM_FUNCTION_PROLOGUE
7389#define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7390#undef TARGET_ATTRIBUTE_TABLE
7391#define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7392#undef TARGET_COMP_TYPE_ATTRIBUTES
7393#define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7394#undef TARGET_INSERT_ATTRIBUTES
7395#define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7396#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7397#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7398#undef TARGET_CAN_INLINE_P
7399#define TARGET_CAN_INLINE_P mep_can_inline_p
7400#undef TARGET_SECTION_TYPE_FLAGS
7401#define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7402#undef TARGET_ASM_NAMED_SECTION
7403#define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7404#undef TARGET_INIT_BUILTINS
7405#define TARGET_INIT_BUILTINS mep_init_builtins
7406#undef TARGET_EXPAND_BUILTIN
7407#define TARGET_EXPAND_BUILTIN mep_expand_builtin
7408#undef TARGET_SCHED_ADJUST_COST
7409#define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7410#undef TARGET_SCHED_ISSUE_RATE
7411#define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7412#undef TARGET_SCHED_REORDER
7413#define TARGET_SCHED_REORDER mep_sched_reorder
7414#undef TARGET_STRIP_NAME_ENCODING
7415#define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7416#undef TARGET_ASM_SELECT_SECTION
7417#define TARGET_ASM_SELECT_SECTION mep_select_section
7418#undef TARGET_ASM_UNIQUE_SECTION
7419#define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7420#undef TARGET_ENCODE_SECTION_INFO
7421#define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7422#undef TARGET_FUNCTION_OK_FOR_SIBCALL
7423#define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7424#undef TARGET_RTX_COSTS
7425#define TARGET_RTX_COSTS mep_rtx_cost
7426#undef TARGET_ADDRESS_COST
7427#define TARGET_ADDRESS_COST mep_address_cost
7428#undef TARGET_MACHINE_DEPENDENT_REORG
7429#define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7430#undef TARGET_SETUP_INCOMING_VARARGS
7431#define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7432#undef TARGET_PASS_BY_REFERENCE
7433#define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
0851c6e3
NF
7434#undef TARGET_FUNCTION_ARG
7435#define TARGET_FUNCTION_ARG mep_function_arg
7436#undef TARGET_FUNCTION_ARG_ADVANCE
7437#define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
c28883e6
DD
7438#undef TARGET_VECTOR_MODE_SUPPORTED_P
7439#define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7440#undef TARGET_HANDLE_OPTION
7441#define TARGET_HANDLE_OPTION mep_handle_option
c5387660
JM
7442#undef TARGET_OPTION_OVERRIDE
7443#define TARGET_OPTION_OVERRIDE mep_option_override
3020190e
JM
7444#undef TARGET_OPTION_OPTIMIZATION_TABLE
7445#define TARGET_OPTION_OPTIMIZATION_TABLE mep_option_optimization_table
c28883e6
DD
7446#undef TARGET_DEFAULT_TARGET_FLAGS
7447#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
7448#undef TARGET_ALLOCATE_INITIAL_VALUE
7449#define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7450#undef TARGET_ASM_INIT_SECTIONS
7451#define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7452#undef TARGET_RETURN_IN_MEMORY
7453#define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7454#undef TARGET_NARROW_VOLATILE_BITFIELD
7455#define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7456#undef TARGET_EXPAND_BUILTIN_SAVEREGS
7457#define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7458#undef TARGET_BUILD_BUILTIN_VA_LIST
7459#define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7460#undef TARGET_EXPAND_BUILTIN_VA_START
7461#define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7462#undef TARGET_GIMPLIFY_VA_ARG_EXPR
7463#define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7464#undef TARGET_CAN_ELIMINATE
7465#define TARGET_CAN_ELIMINATE mep_can_eliminate
5efd84c5
NF
7466#undef TARGET_CONDITIONAL_REGISTER_USAGE
7467#define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
c28883e6
DD
7468#undef TARGET_TRAMPOLINE_INIT
7469#define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7470
7471struct gcc_target targetm = TARGET_INITIALIZER;
7acf4da6
DD
7472
7473#include "gt-mep.h"