1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 /* Some output-actions in c4x.md need these. */
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
49 #include "c4x-protos.h"
51 #include "target-def.h"
55 rtx fix_truncqfhi2_libfunc
;
56 rtx fixuns_truncqfhi2_libfunc
;
57 rtx fix_trunchfhi2_libfunc
;
58 rtx fixuns_trunchfhi2_libfunc
;
59 rtx floathiqf2_libfunc
;
60 rtx floatunshiqf2_libfunc
;
61 rtx floathihf2_libfunc
;
62 rtx floatunshihf2_libfunc
;
64 static int c4x_leaf_function
;
66 static const char *const float_reg_names
[] = FLOAT_REGISTER_NAMES
;
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
73 enum reg_class c4x_regclass_map
[FIRST_PSEUDO_REGISTER
] =
75 /* Reg Modes Saved. */
76 R0R1_REGS
, /* R0 QI, QF, HF No. */
77 R0R1_REGS
, /* R1 QI, QF, HF No. */
78 R2R3_REGS
, /* R2 QI, QF, HF No. */
79 R2R3_REGS
, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS
, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS
, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS
, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS
, /* R7 QI, QF, HF QF. */
84 ADDR_REGS
, /* AR0 QI No. */
85 ADDR_REGS
, /* AR1 QI No. */
86 ADDR_REGS
, /* AR2 QI No. */
87 ADDR_REGS
, /* AR3 QI QI. */
88 ADDR_REGS
, /* AR4 QI QI. */
89 ADDR_REGS
, /* AR5 QI QI. */
90 ADDR_REGS
, /* AR6 QI QI. */
91 ADDR_REGS
, /* AR7 QI QI. */
92 DP_REG
, /* DP QI No. */
93 INDEX_REGS
, /* IR0 QI No. */
94 INDEX_REGS
, /* IR1 QI No. */
95 BK_REG
, /* BK QI QI. */
96 SP_REG
, /* SP QI No. */
97 ST_REG
, /* ST CC No. */
98 NO_REGS
, /* DIE/IE No. */
99 NO_REGS
, /* IIE/IF No. */
100 NO_REGS
, /* IIF/IOF No. */
101 INT_REGS
, /* RS QI No. */
102 INT_REGS
, /* RE QI No. */
103 RC_REG
, /* RC QI No. */
104 EXT_REGS
, /* R8 QI, QF, HF QI. */
105 EXT_REGS
, /* R9 QI, QF, HF No. */
106 EXT_REGS
, /* R10 QI, QF, HF No. */
107 EXT_REGS
, /* R11 QI, QF, HF No. */
110 enum machine_mode c4x_caller_save_map
[FIRST_PSEUDO_REGISTER
] =
112 /* Reg Modes Saved. */
113 HFmode
, /* R0 QI, QF, HF No. */
114 HFmode
, /* R1 QI, QF, HF No. */
115 HFmode
, /* R2 QI, QF, HF No. */
116 HFmode
, /* R3 QI, QF, HF No. */
117 QFmode
, /* R4 QI, QF, HF QI. */
118 QFmode
, /* R5 QI, QF, HF QI. */
119 QImode
, /* R6 QI, QF, HF QF. */
120 QImode
, /* R7 QI, QF, HF QF. */
121 QImode
, /* AR0 QI No. */
122 QImode
, /* AR1 QI No. */
123 QImode
, /* AR2 QI No. */
124 QImode
, /* AR3 QI QI. */
125 QImode
, /* AR4 QI QI. */
126 QImode
, /* AR5 QI QI. */
127 QImode
, /* AR6 QI QI. */
128 QImode
, /* AR7 QI QI. */
129 VOIDmode
, /* DP QI No. */
130 QImode
, /* IR0 QI No. */
131 QImode
, /* IR1 QI No. */
132 QImode
, /* BK QI QI. */
133 VOIDmode
, /* SP QI No. */
134 VOIDmode
, /* ST CC No. */
135 VOIDmode
, /* DIE/IE No. */
136 VOIDmode
, /* IIE/IF No. */
137 VOIDmode
, /* IIF/IOF No. */
138 QImode
, /* RS QI No. */
139 QImode
, /* RE QI No. */
140 VOIDmode
, /* RC QI No. */
141 QFmode
, /* R8 QI, QF, HF QI. */
142 HFmode
, /* R9 QI, QF, HF No. */
143 HFmode
, /* R10 QI, QF, HF No. */
144 HFmode
, /* R11 QI, QF, HF No. */
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
151 struct rtx_def
*c4x_compare_op0
= NULL_RTX
;
152 struct rtx_def
*c4x_compare_op1
= NULL_RTX
;
154 const char *c4x_rpts_cycles_string
;
155 int c4x_rpts_cycles
= 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string
;
157 int c4x_cpu_version
= 40; /* CPU version C30/31/32/33/40/44. */
159 /* Pragma definitions. */
161 tree code_tree
= NULL_TREE
;
162 tree data_tree
= NULL_TREE
;
163 tree pure_tree
= NULL_TREE
;
164 tree noreturn_tree
= NULL_TREE
;
165 tree interrupt_tree
= NULL_TREE
;
167 /* Forward declarations */
168 static void c4x_add_gc_roots
PARAMS ((void));
169 static int c4x_isr_reg_used_p
PARAMS ((unsigned int));
170 static int c4x_leaf_function_p
PARAMS ((void));
171 static int c4x_assembler_function_p
PARAMS ((void));
172 static int c4x_immed_float_p
PARAMS ((rtx
));
173 static int c4x_a_register
PARAMS ((rtx
));
174 static int c4x_x_register
PARAMS ((rtx
));
175 static int c4x_immed_int_constant
PARAMS ((rtx
));
176 static int c4x_immed_float_constant
PARAMS ((rtx
));
177 static int c4x_K_constant
PARAMS ((rtx
));
178 static int c4x_N_constant
PARAMS ((rtx
));
179 static int c4x_O_constant
PARAMS ((rtx
));
180 static int c4x_R_indirect
PARAMS ((rtx
));
181 static int c4x_S_indirect
PARAMS ((rtx
));
182 static void c4x_S_address_parse
PARAMS ((rtx
, int *, int *, int *, int *));
183 static int c4x_valid_operands
PARAMS ((enum rtx_code
, rtx
*,
184 enum machine_mode
, int));
185 static int c4x_arn_reg_operand
PARAMS ((rtx
, enum machine_mode
, unsigned int));
186 static int c4x_arn_mem_operand
PARAMS ((rtx
, enum machine_mode
, unsigned int));
187 static void c4x_check_attribute
PARAMS ((const char *, tree
, tree
, tree
*));
188 static int c4x_r11_set_p
PARAMS ((rtx
));
189 static int c4x_rptb_valid_p
PARAMS ((rtx
, rtx
));
190 static int c4x_label_ref_used_p
PARAMS ((rtx
, rtx
));
191 static tree c4x_handle_fntype_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
192 const struct attribute_spec c4x_attribute_table
[];
193 static void c4x_insert_attributes
PARAMS ((tree
, tree
*));
194 static void c4x_asm_named_section
PARAMS ((const char *, unsigned int));
195 static int c4x_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
196 static void c4x_encode_section_info
PARAMS ((tree
, int));
198 /* Initialize the GCC target structure. */
199 #undef TARGET_ASM_BYTE_OP
200 #define TARGET_ASM_BYTE_OP "\t.word\t"
201 #undef TARGET_ASM_ALIGNED_HI_OP
202 #define TARGET_ASM_ALIGNED_HI_OP NULL
203 #undef TARGET_ASM_ALIGNED_SI_OP
204 #define TARGET_ASM_ALIGNED_SI_OP NULL
206 #undef TARGET_ATTRIBUTE_TABLE
207 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
209 #undef TARGET_INSERT_ATTRIBUTES
210 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
212 #undef TARGET_INIT_BUILTINS
213 #define TARGET_INIT_BUILTINS c4x_init_builtins
215 #undef TARGET_EXPAND_BUILTIN
216 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
218 #undef TARGET_SCHED_ADJUST_COST
219 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
221 #undef TARGET_ENCODE_SECTION_INFO
222 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
224 struct gcc_target targetm
= TARGET_INITIALIZER
;
226 /* Called to register all of our global variables with the garbage
232 ggc_add_rtx_root (&c4x_compare_op0
, 1);
233 ggc_add_rtx_root (&c4x_compare_op1
, 1);
234 ggc_add_tree_root (&code_tree
, 1);
235 ggc_add_tree_root (&data_tree
, 1);
236 ggc_add_tree_root (&pure_tree
, 1);
237 ggc_add_tree_root (&noreturn_tree
, 1);
238 ggc_add_tree_root (&interrupt_tree
, 1);
239 ggc_add_rtx_root (&smulhi3_libfunc
, 1);
240 ggc_add_rtx_root (&umulhi3_libfunc
, 1);
241 ggc_add_rtx_root (&fix_truncqfhi2_libfunc
, 1);
242 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc
, 1);
243 ggc_add_rtx_root (&fix_trunchfhi2_libfunc
, 1);
244 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc
, 1);
245 ggc_add_rtx_root (&floathiqf2_libfunc
, 1);
246 ggc_add_rtx_root (&floatunshiqf2_libfunc
, 1);
247 ggc_add_rtx_root (&floathihf2_libfunc
, 1);
248 ggc_add_rtx_root (&floatunshihf2_libfunc
, 1);
252 /* Override command line options.
253 Called once after all options have been parsed.
254 Mostly we process the processor
255 type and sometimes adjust other TARGET_ options. */
258 c4x_override_options ()
260 if (c4x_rpts_cycles_string
)
261 c4x_rpts_cycles
= atoi (c4x_rpts_cycles_string
);
266 c4x_cpu_version
= 30;
268 c4x_cpu_version
= 31;
270 c4x_cpu_version
= 32;
272 c4x_cpu_version
= 33;
274 c4x_cpu_version
= 40;
276 c4x_cpu_version
= 44;
278 c4x_cpu_version
= 40;
280 /* -mcpu=xx overrides -m40 etc. */
281 if (c4x_cpu_version_string
)
283 const char *p
= c4x_cpu_version_string
;
285 /* Also allow -mcpu=c30 etc. */
286 if (*p
== 'c' || *p
== 'C')
288 c4x_cpu_version
= atoi (p
);
291 target_flags
&= ~(C30_FLAG
| C31_FLAG
| C32_FLAG
| C33_FLAG
|
292 C40_FLAG
| C44_FLAG
);
294 switch (c4x_cpu_version
)
296 case 30: target_flags
|= C30_FLAG
; break;
297 case 31: target_flags
|= C31_FLAG
; break;
298 case 32: target_flags
|= C32_FLAG
; break;
299 case 33: target_flags
|= C33_FLAG
; break;
300 case 40: target_flags
|= C40_FLAG
; break;
301 case 44: target_flags
|= C44_FLAG
; break;
303 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version
);
304 c4x_cpu_version
= 40;
305 target_flags
|= C40_FLAG
;
308 if (TARGET_C30
|| TARGET_C31
|| TARGET_C32
|| TARGET_C33
)
309 target_flags
|= C3X_FLAG
;
311 target_flags
&= ~C3X_FLAG
;
313 /* Convert foo / 8.0 into foo * 0.125, etc. */
314 set_fast_math_flags (1);
316 /* We should phase out the following at some stage.
317 This provides compatibility with the old -mno-aliases option. */
318 if (! TARGET_ALIASES
&& ! flag_argument_noalias
)
319 flag_argument_noalias
= 1;
321 /* Register global variables with the garbage collector. */
326 /* This is called before c4x_override_options. */
329 c4x_optimization_options (level
, size
)
330 int level ATTRIBUTE_UNUSED
;
331 int size ATTRIBUTE_UNUSED
;
333 /* Scheduling before register allocation can screw up global
334 register allocation, especially for functions that use MPY||ADD
335 instructions. The benefit we gain we get by scheduling before
336 register allocation is probably marginal anyhow. */
337 flag_schedule_insns
= 0;
341 /* Write an ASCII string. */
343 #define C4X_ASCII_LIMIT 40
346 c4x_output_ascii (stream
, ptr
, len
)
351 char sbuf
[C4X_ASCII_LIMIT
+ 1];
352 int s
, l
, special
, first
= 1, onlys
;
355 fprintf (stream
, "\t.byte\t");
357 for (s
= l
= 0; len
> 0; --len
, ++ptr
)
361 /* Escape " and \ with a \". */
362 special
= *ptr
== '\"' || *ptr
== '\\';
364 /* If printable - add to buff. */
365 if ((! TARGET_TI
|| ! special
) && *ptr
>= 0x20 && *ptr
< 0x7f)
370 if (s
< C4X_ASCII_LIMIT
- 1)
385 fprintf (stream
, "\"%s\"", sbuf
);
387 if (TARGET_TI
&& l
>= 80 && len
> 1)
389 fprintf (stream
, "\n\t.byte\t");
407 fprintf (stream
, "%d", *ptr
);
409 if (TARGET_TI
&& l
>= 80 && len
> 1)
411 fprintf (stream
, "\n\t.byte\t");
422 fprintf (stream
, "\"%s\"", sbuf
);
425 fputc ('\n', stream
);
430 c4x_hard_regno_mode_ok (regno
, mode
)
432 enum machine_mode mode
;
437 case Pmode
: /* Pointer (24/32 bits). */
439 case QImode
: /* Integer (32 bits). */
440 return IS_INT_REGNO (regno
);
442 case QFmode
: /* Float, Double (32 bits). */
443 case HFmode
: /* Long Double (40 bits). */
444 return IS_EXT_REGNO (regno
);
446 case CCmode
: /* Condition Codes. */
447 case CC_NOOVmode
: /* Condition Codes. */
448 return IS_ST_REGNO (regno
);
450 case HImode
: /* Long Long (64 bits). */
451 /* We need two registers to store long longs. Note that
452 it is much easier to constrain the first register
453 to start on an even boundary. */
454 return IS_INT_REGNO (regno
)
455 && IS_INT_REGNO (regno
+ 1)
459 return 0; /* We don't support these modes. */
465 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
467 c4x_hard_regno_rename_ok (regno1
, regno2
)
471 /* We can not copy call saved registers from mode QI into QF or from
473 if (IS_FLOAT_CALL_SAVED_REGNO (regno1
) && IS_INT_CALL_SAVED_REGNO (regno2
))
475 if (IS_INT_CALL_SAVED_REGNO (regno1
) && IS_FLOAT_CALL_SAVED_REGNO (regno2
))
477 /* We cannot copy from an extended (40 bit) register to a standard
478 (32 bit) register because we only set the condition codes for
479 extended registers. */
480 if (IS_EXT_REGNO (regno1
) && ! IS_EXT_REGNO (regno2
))
482 if (IS_EXT_REGNO (regno2
) && ! IS_EXT_REGNO (regno1
))
487 /* The TI C3x C compiler register argument runtime model uses 6 registers,
488 AR2, R2, R3, RC, RS, RE.
490 The first two floating point arguments (float, double, long double)
491 that are found scanning from left to right are assigned to R2 and R3.
493 The remaining integer (char, short, int, long) or pointer arguments
494 are assigned to the remaining registers in the order AR2, R2, R3,
495 RC, RS, RE when scanning left to right, except for the last named
496 argument prior to an ellipsis denoting variable number of
497 arguments. We don't have to worry about the latter condition since
498 function.c treats the last named argument as anonymous (unnamed).
500 All arguments that cannot be passed in registers are pushed onto
501 the stack in reverse order (right to left). GCC handles that for us.
503 c4x_init_cumulative_args() is called at the start, so we can parse
504 the args to see how many floating point arguments and how many
505 integer (or pointer) arguments there are. c4x_function_arg() is
506 then called (sometimes repeatedly) for each argument (parsed left
507 to right) to obtain the register to pass the argument in, or zero
508 if the argument is to be passed on the stack. Once the compiler is
509 happy, c4x_function_arg_advance() is called.
511 Don't use R0 to pass arguments in, we use 0 to indicate a stack
514 static const int c4x_int_reglist
[3][6] =
516 {AR2_REGNO
, R2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
},
517 {AR2_REGNO
, R3_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0},
518 {AR2_REGNO
, RC_REGNO
, RS_REGNO
, RE_REGNO
, 0, 0}
521 static const int c4x_fp_reglist
[2] = {R2_REGNO
, R3_REGNO
};
524 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
525 function whose data type is FNTYPE.
526 For a library call, FNTYPE is 0. */
529 c4x_init_cumulative_args (cum
, fntype
, libname
)
530 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize. */
531 tree fntype
; /* Tree ptr for function decl. */
532 rtx libname
; /* SYMBOL_REF of library name or 0. */
534 tree param
, next_param
;
536 cum
->floats
= cum
->ints
= 0;
543 fprintf (stderr
, "\nc4x_init_cumulative_args (");
546 tree ret_type
= TREE_TYPE (fntype
);
548 fprintf (stderr
, "fntype code = %s, ret code = %s",
549 tree_code_name
[(int) TREE_CODE (fntype
)],
550 tree_code_name
[(int) TREE_CODE (ret_type
)]);
553 fprintf (stderr
, "no fntype");
556 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
559 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
561 for (param
= fntype
? TYPE_ARG_TYPES (fntype
) : 0;
562 param
; param
= next_param
)
566 next_param
= TREE_CHAIN (param
);
568 type
= TREE_VALUE (param
);
569 if (type
&& type
!= void_type_node
)
571 enum machine_mode mode
;
573 /* If the last arg doesn't have void type then we have
574 variable arguments. */
578 if ((mode
= TYPE_MODE (type
)))
580 if (! MUST_PASS_IN_STACK (mode
, type
))
582 /* Look for float, double, or long double argument. */
583 if (mode
== QFmode
|| mode
== HFmode
)
585 /* Look for integer, enumeral, boolean, char, or pointer
587 else if (mode
== QImode
|| mode
== Pmode
)
596 fprintf (stderr
, "%s%s, args = %d)\n",
597 cum
->prototype
? ", prototype" : "",
598 cum
->var
? ", variable args" : "",
603 /* Update the data in CUM to advance over an argument
604 of mode MODE and data type TYPE.
605 (TYPE is null for libcalls where that information may not be available.) */
608 c4x_function_arg_advance (cum
, mode
, type
, named
)
609 CUMULATIVE_ARGS
*cum
; /* Current arg information. */
610 enum machine_mode mode
; /* Current arg mode. */
611 tree type
; /* Type of the arg or 0 if lib support. */
612 int named
; /* Whether or not the argument was named. */
615 fprintf (stderr
, "c4x_function_adv(mode=%s, named=%d)\n\n",
616 GET_MODE_NAME (mode
), named
);
620 && ! MUST_PASS_IN_STACK (mode
, type
))
622 /* Look for float, double, or long double argument. */
623 if (mode
== QFmode
|| mode
== HFmode
)
625 /* Look for integer, enumeral, boolean, char, or pointer argument. */
626 else if (mode
== QImode
|| mode
== Pmode
)
629 else if (! TARGET_MEMPARM
&& ! type
)
631 /* Handle libcall arguments. */
632 if (mode
== QFmode
|| mode
== HFmode
)
634 else if (mode
== QImode
|| mode
== Pmode
)
641 /* Define where to put the arguments to a function. Value is zero to
642 push the argument on the stack, or a hard register in which to
645 MODE is the argument's machine mode.
646 TYPE is the data type of the argument (as a tree).
647 This is null for libcalls where that information may
649 CUM is a variable of type CUMULATIVE_ARGS which gives info about
650 the preceding args and about the function being called.
651 NAMED is nonzero if this argument is a named parameter
652 (otherwise it is an extra parameter matching an ellipsis). */
655 c4x_function_arg (cum
, mode
, type
, named
)
656 CUMULATIVE_ARGS
*cum
; /* Current arg information. */
657 enum machine_mode mode
; /* Current arg mode. */
658 tree type
; /* Type of the arg or 0 if lib support. */
659 int named
; /* != 0 for normal args, == 0 for ... args. */
661 int reg
= 0; /* Default to passing argument on stack. */
665 /* We can handle at most 2 floats in R2, R3. */
666 cum
->maxfloats
= (cum
->floats
> 2) ? 2 : cum
->floats
;
668 /* We can handle at most 6 integers minus number of floats passed
670 cum
->maxints
= (cum
->ints
> 6 - cum
->maxfloats
) ?
671 6 - cum
->maxfloats
: cum
->ints
;
673 /* If there is no prototype, assume all the arguments are integers. */
674 if (! cum
->prototype
)
677 cum
->ints
= cum
->floats
= 0;
681 /* This marks the last argument. We don't need to pass this through
683 if (type
== void_type_node
)
689 && ! MUST_PASS_IN_STACK (mode
, type
))
691 /* Look for float, double, or long double argument. */
692 if (mode
== QFmode
|| mode
== HFmode
)
694 if (cum
->floats
< cum
->maxfloats
)
695 reg
= c4x_fp_reglist
[cum
->floats
];
697 /* Look for integer, enumeral, boolean, char, or pointer argument. */
698 else if (mode
== QImode
|| mode
== Pmode
)
700 if (cum
->ints
< cum
->maxints
)
701 reg
= c4x_int_reglist
[cum
->maxfloats
][cum
->ints
];
704 else if (! TARGET_MEMPARM
&& ! type
)
706 /* We could use a different argument calling model for libcalls,
707 since we're only calling functions in libgcc. Thus we could
708 pass arguments for long longs in registers rather than on the
709 stack. In the meantime, use the odd TI format. We make the
710 assumption that we won't have more than two floating point
711 args, six integer args, and that all the arguments are of the
713 if (mode
== QFmode
|| mode
== HFmode
)
714 reg
= c4x_fp_reglist
[cum
->floats
];
715 else if (mode
== QImode
|| mode
== Pmode
)
716 reg
= c4x_int_reglist
[0][cum
->ints
];
721 fprintf (stderr
, "c4x_function_arg(mode=%s, named=%d",
722 GET_MODE_NAME (mode
), named
);
724 fprintf (stderr
, ", reg=%s", reg_names
[reg
]);
726 fprintf (stderr
, ", stack");
727 fprintf (stderr
, ")\n");
730 return gen_rtx_REG (mode
, reg
);
737 c4x_va_start (stdarg_p
, valist
, nextarg
)
742 nextarg
= plus_constant (nextarg
, stdarg_p
? 0 : UNITS_PER_WORD
* 2);
744 std_expand_builtin_va_start (stdarg_p
, valist
, nextarg
);
748 /* C[34]x arguments grow in weird ways (downwards) that the standard
749 varargs stuff can't handle.. */
751 c4x_va_arg (valist
, type
)
756 t
= build (PREDECREMENT_EXPR
, TREE_TYPE (valist
), valist
,
757 build_int_2 (int_size_in_bytes (type
), 0));
758 TREE_SIDE_EFFECTS (t
) = 1;
760 return expand_expr (t
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
765 c4x_isr_reg_used_p (regno
)
768 /* Don't save/restore FP or ST, we handle them separately. */
769 if (regno
== FRAME_POINTER_REGNUM
770 || IS_ST_REGNO (regno
))
773 /* We could be a little smarter abut saving/restoring DP.
774 We'll only save if for the big memory model or if
775 we're paranoid. ;-) */
776 if (IS_DP_REGNO (regno
))
777 return ! TARGET_SMALL
|| TARGET_PARANOID
;
779 /* Only save/restore regs in leaf function that are used. */
780 if (c4x_leaf_function
)
781 return regs_ever_live
[regno
] && fixed_regs
[regno
] == 0;
783 /* Only save/restore regs that are used by the ISR and regs
784 that are likely to be used by functions the ISR calls
785 if they are not fixed. */
786 return IS_EXT_REGNO (regno
)
787 || ((regs_ever_live
[regno
] || call_used_regs
[regno
])
788 && fixed_regs
[regno
] == 0);
793 c4x_leaf_function_p ()
795 /* A leaf function makes no calls, so we only need
796 to save/restore the registers we actually use.
797 For the global variable leaf_function to be set, we need
798 to define LEAF_REGISTERS and all that it entails.
799 Let's check ourselves... */
801 if (lookup_attribute ("leaf_pretend",
802 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
805 /* Use the leaf_pretend attribute at your own risk. This is a hack
806 to speed up ISRs that call a function infrequently where the
807 overhead of saving and restoring the additional registers is not
808 warranted. You must save and restore the additional registers
809 required by the called function. Caveat emptor. Here's enough
812 if (leaf_function_p ())
820 c4x_assembler_function_p ()
824 type
= TREE_TYPE (current_function_decl
);
825 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type
)) != NULL
)
826 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type
)) != NULL
);
831 c4x_interrupt_function_p ()
833 if (lookup_attribute ("interrupt",
834 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
837 /* Look for TI style c_intnn. */
838 return current_function_name
[0] == 'c'
839 && current_function_name
[1] == '_'
840 && current_function_name
[2] == 'i'
841 && current_function_name
[3] == 'n'
842 && current_function_name
[4] == 't'
843 && ISDIGIT (current_function_name
[5])
844 && ISDIGIT (current_function_name
[6]);
848 c4x_expand_prologue ()
851 int size
= get_frame_size ();
854 /* In functions where ar3 is not used but frame pointers are still
855 specified, frame pointers are not adjusted (if >= -O2) and this
856 is used so it won't needlessly push the frame pointer. */
859 /* For __assembler__ function don't build a prologue. */
860 if (c4x_assembler_function_p ())
865 /* For __interrupt__ function build specific prologue. */
866 if (c4x_interrupt_function_p ())
868 c4x_leaf_function
= c4x_leaf_function_p ();
870 insn
= emit_insn (gen_push_st ());
871 RTX_FRAME_RELATED_P (insn
) = 1;
874 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
875 RTX_FRAME_RELATED_P (insn
) = 1;
876 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
877 gen_rtx_REG (QImode
, SP_REGNO
)));
878 RTX_FRAME_RELATED_P (insn
) = 1;
879 /* We require that an ISR uses fewer than 32768 words of
880 local variables, otherwise we have to go to lots of
881 effort to save a register, load it with the desired size,
882 adjust the stack pointer, and then restore the modified
883 register. Frankly, I think it is a poor ISR that
884 requires more than 32767 words of local temporary
887 error ("ISR %s requires %d words of local vars, max is 32767",
888 current_function_name
, size
);
890 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
891 gen_rtx_REG (QImode
, SP_REGNO
),
893 RTX_FRAME_RELATED_P (insn
) = 1;
895 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
897 if (c4x_isr_reg_used_p (regno
))
899 if (regno
== DP_REGNO
)
901 insn
= emit_insn (gen_push_dp ());
902 RTX_FRAME_RELATED_P (insn
) = 1;
906 insn
= emit_insn (gen_pushqi (gen_rtx_REG (QImode
, regno
)));
907 RTX_FRAME_RELATED_P (insn
) = 1;
908 if (IS_EXT_REGNO (regno
))
910 insn
= emit_insn (gen_pushqf
911 (gen_rtx_REG (QFmode
, regno
)));
912 RTX_FRAME_RELATED_P (insn
) = 1;
917 /* We need to clear the repeat mode flag if the ISR is
918 going to use a RPTB instruction or uses the RC, RS, or RE
920 if (regs_ever_live
[RC_REGNO
]
921 || regs_ever_live
[RS_REGNO
]
922 || regs_ever_live
[RE_REGNO
])
924 insn
= emit_insn (gen_andn_st (GEN_INT(~0x100)));
925 RTX_FRAME_RELATED_P (insn
) = 1;
928 /* Reload DP reg if we are paranoid about some turkey
929 violating small memory model rules. */
930 if (TARGET_SMALL
&& TARGET_PARANOID
)
932 insn
= emit_insn (gen_set_ldp_prologue
933 (gen_rtx_REG (QImode
, DP_REGNO
),
934 gen_rtx_SYMBOL_REF (QImode
, "data_sec")));
935 RTX_FRAME_RELATED_P (insn
) = 1;
940 if (frame_pointer_needed
)
943 || (current_function_args_size
!= 0)
946 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, AR3_REGNO
)));
947 RTX_FRAME_RELATED_P (insn
) = 1;
948 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, AR3_REGNO
),
949 gen_rtx_REG (QImode
, SP_REGNO
)));
950 RTX_FRAME_RELATED_P (insn
) = 1;
955 /* Since ar3 is not used, we don't need to push it. */
961 /* If we use ar3, we need to push it. */
963 if ((size
!= 0) || (current_function_args_size
!= 0))
965 /* If we are omitting the frame pointer, we still have
966 to make space for it so the offsets are correct
967 unless we don't use anything on the stack at all. */
974 /* Local vars are too big, it will take multiple operations
978 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
979 GEN_INT(size
>> 16)));
980 RTX_FRAME_RELATED_P (insn
) = 1;
981 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
982 gen_rtx_REG (QImode
, R1_REGNO
),
984 RTX_FRAME_RELATED_P (insn
) = 1;
988 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R1_REGNO
),
989 GEN_INT(size
& ~0xffff)));
990 RTX_FRAME_RELATED_P (insn
) = 1;
992 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R1_REGNO
),
993 gen_rtx_REG (QImode
, R1_REGNO
),
994 GEN_INT(size
& 0xffff)));
995 RTX_FRAME_RELATED_P (insn
) = 1;
996 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
997 gen_rtx_REG (QImode
, SP_REGNO
),
998 gen_rtx_REG (QImode
, R1_REGNO
)));
999 RTX_FRAME_RELATED_P (insn
) = 1;
1003 /* Local vars take up less than 32767 words, so we can directly
1005 insn
= emit_insn (gen_addqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1006 gen_rtx_REG (QImode
, SP_REGNO
),
1008 RTX_FRAME_RELATED_P (insn
) = 1;
1011 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1013 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1015 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
1017 if (TARGET_PRESERVE_FLOAT
)
1019 insn
= emit_insn (gen_pushqi
1020 (gen_rtx_REG (QImode
, regno
)));
1021 RTX_FRAME_RELATED_P (insn
) = 1;
1023 insn
= emit_insn (gen_pushqf (gen_rtx_REG (QFmode
, regno
)));
1024 RTX_FRAME_RELATED_P (insn
) = 1;
1026 else if ((! dont_push_ar3
) || (regno
!= AR3_REGNO
))
1028 insn
= emit_insn (gen_pushqi ( gen_rtx_REG (QImode
, regno
)));
1029 RTX_FRAME_RELATED_P (insn
) = 1;
1038 c4x_expand_epilogue()
1044 int size
= get_frame_size ();
1046 /* For __assembler__ function build no epilogue. */
1047 if (c4x_assembler_function_p ())
1049 insn
= emit_jump_insn (gen_return_from_epilogue ());
1050 RTX_FRAME_RELATED_P (insn
) = 1;
1054 /* For __interrupt__ function build specific epilogue. */
1055 if (c4x_interrupt_function_p ())
1057 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; --regno
)
1059 if (! c4x_isr_reg_used_p (regno
))
1061 if (regno
== DP_REGNO
)
1063 insn
= emit_insn (gen_pop_dp ());
1064 RTX_FRAME_RELATED_P (insn
) = 1;
1068 /* We have to use unspec because the compiler will delete insns
1069 that are not call-saved. */
1070 if (IS_EXT_REGNO (regno
))
1072 insn
= emit_insn (gen_popqf_unspec
1073 (gen_rtx_REG (QFmode
, regno
)));
1074 RTX_FRAME_RELATED_P (insn
) = 1;
1076 insn
= emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode
, regno
)));
1077 RTX_FRAME_RELATED_P (insn
) = 1;
1082 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1083 gen_rtx_REG (QImode
, SP_REGNO
),
1085 RTX_FRAME_RELATED_P (insn
) = 1;
1086 insn
= emit_insn (gen_popqi
1087 (gen_rtx_REG (QImode
, AR3_REGNO
)));
1088 RTX_FRAME_RELATED_P (insn
) = 1;
1090 insn
= emit_insn (gen_pop_st ());
1091 RTX_FRAME_RELATED_P (insn
) = 1;
1092 insn
= emit_jump_insn (gen_return_from_interrupt_epilogue ());
1093 RTX_FRAME_RELATED_P (insn
) = 1;
1097 if (frame_pointer_needed
)
1100 || (current_function_args_size
!= 0)
1104 (gen_movqi (gen_rtx_REG (QImode
, R2_REGNO
),
1105 gen_rtx_MEM (QImode
,
1107 (QImode
, gen_rtx_REG (QImode
,
1110 RTX_FRAME_RELATED_P (insn
) = 1;
1112 /* We already have the return value and the fp,
1113 so we need to add those to the stack. */
1120 /* Since ar3 is not used for anything, we don't need to
1127 dont_pop_ar3
= 0; /* If we use ar3, we need to pop it. */
1128 if (size
|| current_function_args_size
)
1130 /* If we are ommitting the frame pointer, we still have
1131 to make space for it so the offsets are correct
1132 unless we don't use anything on the stack at all. */
1137 /* Now restore the saved registers, putting in the delayed branch
1139 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1141 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1143 if (regno
== AR3_REGNO
&& dont_pop_ar3
)
1146 if (IS_FLOAT_CALL_SAVED_REGNO (regno
))
1148 insn
= emit_insn (gen_popqf_unspec
1149 (gen_rtx_REG (QFmode
, regno
)));
1150 RTX_FRAME_RELATED_P (insn
) = 1;
1151 if (TARGET_PRESERVE_FLOAT
)
1153 insn
= emit_insn (gen_popqi_unspec
1154 (gen_rtx_REG (QImode
, regno
)));
1155 RTX_FRAME_RELATED_P (insn
) = 1;
1160 insn
= emit_insn (gen_popqi (gen_rtx_REG (QImode
, regno
)));
1161 RTX_FRAME_RELATED_P (insn
) = 1;
1166 if (frame_pointer_needed
)
1169 || (current_function_args_size
!= 0)
1172 /* Restore the old FP. */
1175 (gen_rtx_REG (QImode
, AR3_REGNO
),
1176 gen_rtx_MEM (QImode
, gen_rtx_REG (QImode
, AR3_REGNO
))));
1178 RTX_FRAME_RELATED_P (insn
) = 1;
1184 /* Local vars are too big, it will take multiple operations
1188 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1189 GEN_INT(size
>> 16)));
1190 RTX_FRAME_RELATED_P (insn
) = 1;
1191 insn
= emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1192 gen_rtx_REG (QImode
, R3_REGNO
),
1194 RTX_FRAME_RELATED_P (insn
) = 1;
1198 insn
= emit_insn (gen_movqi (gen_rtx_REG (QImode
, R3_REGNO
),
1199 GEN_INT(size
& ~0xffff)));
1200 RTX_FRAME_RELATED_P (insn
) = 1;
1202 insn
= emit_insn (gen_iorqi3 (gen_rtx_REG (QImode
, R3_REGNO
),
1203 gen_rtx_REG (QImode
, R3_REGNO
),
1204 GEN_INT(size
& 0xffff)));
1205 RTX_FRAME_RELATED_P (insn
) = 1;
1206 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1207 gen_rtx_REG (QImode
, SP_REGNO
),
1208 gen_rtx_REG (QImode
, R3_REGNO
)));
1209 RTX_FRAME_RELATED_P (insn
) = 1;
1213 /* Local vars take up less than 32768 words, so we can directly
1214 subtract the number. */
1215 insn
= emit_insn (gen_subqi3 (gen_rtx_REG (QImode
, SP_REGNO
),
1216 gen_rtx_REG (QImode
, SP_REGNO
),
1218 RTX_FRAME_RELATED_P (insn
) = 1;
1223 insn
= emit_jump_insn (gen_return_indirect_internal
1224 (gen_rtx_REG (QImode
, R2_REGNO
)));
1225 RTX_FRAME_RELATED_P (insn
) = 1;
1229 insn
= emit_jump_insn (gen_return_from_epilogue ());
1230 RTX_FRAME_RELATED_P (insn
) = 1;
1237 c4x_null_epilogue_p ()
1241 if (reload_completed
1242 && ! c4x_assembler_function_p ()
1243 && ! c4x_interrupt_function_p ()
1244 && ! current_function_calls_alloca
1245 && ! current_function_args_size
1247 && ! get_frame_size ())
1249 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1250 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
]
1251 && (regno
!= AR3_REGNO
))
1260 c4x_emit_move_sequence (operands
, mode
)
1262 enum machine_mode mode
;
1264 rtx op0
= operands
[0];
1265 rtx op1
= operands
[1];
1267 if (! reload_in_progress
1270 && ! (stik_const_operand (op1
, mode
) && ! push_operand (op0
, mode
)))
1271 op1
= force_reg (mode
, op1
);
1273 if (GET_CODE (op1
) == LO_SUM
1274 && GET_MODE (op1
) == Pmode
1275 && dp_reg_operand (XEXP (op1
, 0), mode
))
1277 /* expand_increment will sometimes create a LO_SUM immediate
1279 op1
= XEXP (op1
, 1);
1281 else if (symbolic_address_operand (op1
, mode
))
1283 if (TARGET_LOAD_ADDRESS
)
1285 /* Alias analysis seems to do a better job if we force
1286 constant addresses to memory after reload. */
1287 emit_insn (gen_load_immed_address (op0
, op1
));
1292 /* Stick symbol or label address into the constant pool. */
1293 op1
= force_const_mem (Pmode
, op1
);
1296 else if (mode
== HFmode
&& CONSTANT_P (op1
) && ! LEGITIMATE_CONSTANT_P (op1
))
1298 /* We could be a lot smarter about loading some of these
1300 op1
= force_const_mem (mode
, op1
);
1303 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1304 and emit associated (HIGH (SYMREF)) if large memory model.
1305 c4x_legitimize_address could be used to do this,
1306 perhaps by calling validize_address. */
1307 if (TARGET_EXPOSE_LDP
1308 && ! (reload_in_progress
|| reload_completed
)
1309 && GET_CODE (op1
) == MEM
1310 && symbolic_address_operand (XEXP (op1
, 0), Pmode
))
1312 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1314 emit_insn (gen_set_ldp (dp_reg
, XEXP (op1
, 0)));
1315 op1
= change_address (op1
, mode
,
1316 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op1
, 0)));
1319 if (TARGET_EXPOSE_LDP
1320 && ! (reload_in_progress
|| reload_completed
)
1321 && GET_CODE (op0
) == MEM
1322 && symbolic_address_operand (XEXP (op0
, 0), Pmode
))
1324 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1326 emit_insn (gen_set_ldp (dp_reg
, XEXP (op0
, 0)));
1327 op0
= change_address (op0
, mode
,
1328 gen_rtx_LO_SUM (Pmode
, dp_reg
, XEXP (op0
, 0)));
1331 if (GET_CODE (op0
) == SUBREG
1332 && mixed_subreg_operand (op0
, mode
))
1334 /* We should only generate these mixed mode patterns
1335 during RTL generation. If we need do it later on
1336 then we'll have to emit patterns that won't clobber CC. */
1337 if (reload_in_progress
|| reload_completed
)
1339 if (GET_MODE (SUBREG_REG (op0
)) == QImode
)
1340 op0
= SUBREG_REG (op0
);
1341 else if (GET_MODE (SUBREG_REG (op0
)) == HImode
)
1343 op0
= copy_rtx (op0
);
1344 PUT_MODE (op0
, QImode
);
1350 emit_insn (gen_storeqf_int_clobber (op0
, op1
));
1356 if (GET_CODE (op1
) == SUBREG
1357 && mixed_subreg_operand (op1
, mode
))
1359 /* We should only generate these mixed mode patterns
1360 during RTL generation. If we need do it later on
1361 then we'll have to emit patterns that won't clobber CC. */
1362 if (reload_in_progress
|| reload_completed
)
1364 if (GET_MODE (SUBREG_REG (op1
)) == QImode
)
1365 op1
= SUBREG_REG (op1
);
1366 else if (GET_MODE (SUBREG_REG (op1
)) == HImode
)
1368 op1
= copy_rtx (op1
);
1369 PUT_MODE (op1
, QImode
);
1375 emit_insn (gen_loadqf_int_clobber (op0
, op1
));
1382 && reg_operand (op0
, mode
)
1383 && const_int_operand (op1
, mode
)
1384 && ! IS_INT16_CONST (INTVAL (op1
))
1385 && ! IS_HIGH_CONST (INTVAL (op1
)))
1387 emit_insn (gen_loadqi_big_constant (op0
, op1
));
1392 && reg_operand (op0
, mode
)
1393 && const_int_operand (op1
, mode
))
1395 emit_insn (gen_loadhi_big_constant (op0
, op1
));
1399 /* Adjust operands in case we have modified them. */
1403 /* Emit normal pattern. */
1409 c4x_emit_libcall (libcall
, code
, dmode
, smode
, noperands
, operands
)
1412 enum machine_mode dmode
;
1413 enum machine_mode smode
;
1425 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 1,
1426 operands
[1], smode
);
1427 equiv
= gen_rtx (code
, dmode
, operands
[1]);
1431 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, dmode
, 2,
1432 operands
[1], smode
, operands
[2], smode
);
1433 equiv
= gen_rtx (code
, dmode
, operands
[1], operands
[2]);
1440 insns
= get_insns ();
1442 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1447 c4x_emit_libcall3 (libcall
, code
, mode
, operands
)
1450 enum machine_mode mode
;
1453 c4x_emit_libcall (libcall
, code
, mode
, mode
, 3, operands
);
1458 c4x_emit_libcall_mulhi (libcall
, code
, mode
, operands
)
1461 enum machine_mode mode
;
1469 ret
= emit_library_call_value (libcall
, NULL_RTX
, 1, mode
, 2,
1470 operands
[1], mode
, operands
[2], mode
);
1471 equiv
= gen_rtx_TRUNCATE (mode
,
1472 gen_rtx_LSHIFTRT (HImode
,
1473 gen_rtx_MULT (HImode
,
1474 gen_rtx (code
, HImode
, operands
[1]),
1475 gen_rtx (code
, HImode
, operands
[2])),
1477 insns
= get_insns ();
1479 emit_libcall_block (insns
, operands
[0], ret
, equiv
);
1483 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1484 yet use this info. */
1487 c4x_encode_section_info (decl
, first
)
1489 int first ATTRIBUTE_UNUSED
;
1491 if (TREE_CODE (decl
) == FUNCTION_DECL
)
1492 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
1497 c4x_check_legit_addr (mode
, addr
, strict
)
1498 enum machine_mode mode
;
1502 rtx base
= NULL_RTX
; /* Base register (AR0-AR7). */
1503 rtx indx
= NULL_RTX
; /* Index register (IR0,IR1). */
1504 rtx disp
= NULL_RTX
; /* Displacement. */
1507 code
= GET_CODE (addr
);
1510 /* Register indirect with auto increment/decrement. We don't
1511 allow SP here---push_operand should recognise an operand
1512 being pushed on the stack. */
1517 if (mode
!= QImode
&& mode
!= QFmode
)
1521 base
= XEXP (addr
, 0);
1529 rtx op0
= XEXP (addr
, 0);
1530 rtx op1
= XEXP (addr
, 1);
1532 if (mode
!= QImode
&& mode
!= QFmode
)
1536 || (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
))
1538 base
= XEXP (op1
, 0);
1541 if (REG_P (XEXP (op1
, 1)))
1542 indx
= XEXP (op1
, 1);
1544 disp
= XEXP (op1
, 1);
1548 /* Register indirect. */
1553 /* Register indirect with displacement or index. */
1556 rtx op0
= XEXP (addr
, 0);
1557 rtx op1
= XEXP (addr
, 1);
1558 enum rtx_code code0
= GET_CODE (op0
);
1565 base
= op0
; /* Base + index. */
1567 if (IS_INDEX_REG (base
) || IS_ADDR_REG (indx
))
1575 base
= op0
; /* Base + displacement. */
1586 /* Direct addressing with DP register. */
1589 rtx op0
= XEXP (addr
, 0);
1590 rtx op1
= XEXP (addr
, 1);
1592 /* HImode and HFmode direct memory references aren't truly
1593 offsettable (consider case at end of data page). We
1594 probably get better code by loading a pointer and using an
1595 indirect memory reference. */
1596 if (mode
== HImode
|| mode
== HFmode
)
1599 if (!REG_P (op0
) || REGNO (op0
) != DP_REGNO
)
1602 if ((GET_CODE (op1
) == SYMBOL_REF
|| GET_CODE (op1
) == LABEL_REF
))
1605 if (GET_CODE (op1
) == CONST
)
1611 /* Direct addressing with some work for the assembler... */
1613 /* Direct addressing. */
1616 if (! TARGET_EXPOSE_LDP
&& ! strict
&& mode
!= HFmode
&& mode
!= HImode
)
1618 /* These need to be converted to a LO_SUM (...).
1619 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1622 /* Do not allow direct memory access to absolute addresses.
1623 This is more pain than it's worth, especially for the
1624 small memory model where we can't guarantee that
1625 this address is within the data page---we don't want
1626 to modify the DP register in the small memory model,
1627 even temporarily, since an interrupt can sneak in.... */
1631 /* Indirect indirect addressing. */
1636 fatal_insn ("using CONST_DOUBLE for address", addr
);
1642 /* Validate the base register. */
1645 /* Check that the address is offsettable for HImode and HFmode. */
1646 if (indx
&& (mode
== HImode
|| mode
== HFmode
))
1649 /* Handle DP based stuff. */
1650 if (REGNO (base
) == DP_REGNO
)
1652 if (strict
&& ! REGNO_OK_FOR_BASE_P (REGNO (base
)))
1654 else if (! strict
&& ! IS_ADDR_OR_PSEUDO_REG (base
))
1658 /* Now validate the index register. */
1661 if (GET_CODE (indx
) != REG
)
1663 if (strict
&& ! REGNO_OK_FOR_INDEX_P (REGNO (indx
)))
1665 else if (! strict
&& ! IS_INDEX_OR_PSEUDO_REG (indx
))
1669 /* Validate displacement. */
1672 if (GET_CODE (disp
) != CONST_INT
)
1674 if (mode
== HImode
|| mode
== HFmode
)
1676 /* The offset displacement must be legitimate. */
1677 if (! IS_DISP8_OFF_CONST (INTVAL (disp
)))
1682 if (! IS_DISP8_CONST (INTVAL (disp
)))
1685 /* Can't add an index with a disp. */
1694 c4x_legitimize_address (orig
, mode
)
1695 rtx orig ATTRIBUTE_UNUSED
;
1696 enum machine_mode mode ATTRIBUTE_UNUSED
;
1698 if (GET_CODE (orig
) == SYMBOL_REF
1699 || GET_CODE (orig
) == LABEL_REF
)
1701 if (mode
== HImode
|| mode
== HFmode
)
1703 /* We need to force the address into
1704 a register so that it is offsettable. */
1705 rtx addr_reg
= gen_reg_rtx (Pmode
);
1706 emit_move_insn (addr_reg
, orig
);
1711 rtx dp_reg
= gen_rtx_REG (Pmode
, DP_REGNO
);
1714 emit_insn (gen_set_ldp (dp_reg
, orig
));
1716 return gen_rtx_LO_SUM (Pmode
, dp_reg
, orig
);
1724 /* Provide the costs of an addressing mode that contains ADDR.
1725 If ADDR is not a valid address, its cost is irrelevant.
1726 This is used in cse and loop optimisation to determine
1727 if it is worthwhile storing a common address into a register.
1728 Unfortunately, the C4x address cost depends on other operands. */
1731 c4x_address_cost (addr
)
1734 switch (GET_CODE (addr
))
1745 /* These shouldn't be directly generated. */
1753 rtx op1
= XEXP (addr
, 1);
1755 if (GET_CODE (op1
) == LABEL_REF
|| GET_CODE (op1
) == SYMBOL_REF
)
1756 return TARGET_SMALL
? 3 : 4;
1758 if (GET_CODE (op1
) == CONST
)
1760 rtx offset
= const0_rtx
;
1762 op1
= eliminate_constant_term (op1
, &offset
);
1764 /* ??? These costs need rethinking... */
1765 if (GET_CODE (op1
) == LABEL_REF
)
1768 if (GET_CODE (op1
) != SYMBOL_REF
)
1771 if (INTVAL (offset
) == 0)
1776 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr
);
1782 register rtx op0
= XEXP (addr
, 0);
1783 register rtx op1
= XEXP (addr
, 1);
1785 if (GET_CODE (op0
) != REG
)
1788 switch (GET_CODE (op1
))
1794 /* This cost for REG+REG must be greater than the cost
1795 for REG if we want autoincrement addressing modes. */
1799 /* The following tries to improve GIV combination
1800 in strength reduce but appears not to help. */
1801 if (TARGET_DEVEL
&& IS_UINT5_CONST (INTVAL (op1
)))
1804 if (IS_DISP1_CONST (INTVAL (op1
)))
1807 if (! TARGET_C3X
&& IS_UINT5_CONST (INTVAL (op1
)))
1822 c4x_gen_compare_reg (code
, x
, y
)
1826 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
1829 if (mode
== CC_NOOVmode
1830 && (code
== LE
|| code
== GE
|| code
== LT
|| code
== GT
))
1833 cc_reg
= gen_rtx_REG (mode
, ST_REGNO
);
1834 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
1835 gen_rtx_COMPARE (mode
, x
, y
)));
1840 c4x_output_cbranch (form
, seq
)
1849 static char str
[100];
1853 delay
= XVECEXP (final_sequence
, 0, 1);
1854 delayed
= ! INSN_ANNULLED_BRANCH_P (seq
);
1855 annultrue
= INSN_ANNULLED_BRANCH_P (seq
) && ! INSN_FROM_TARGET_P (delay
);
1856 annulfalse
= INSN_ANNULLED_BRANCH_P (seq
) && INSN_FROM_TARGET_P (delay
);
1859 cp
= &str
[strlen (str
)];
1884 c4x_print_operand (file
, op
, letter
)
1885 FILE *file
; /* File to write to. */
1886 rtx op
; /* Operand to print. */
1887 int letter
; /* %<letter> or 0. */
1894 case '#': /* Delayed. */
1896 asm_fprintf (file
, "d");
1900 code
= GET_CODE (op
);
1903 case 'A': /* Direct address. */
1904 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== CONST
)
1905 asm_fprintf (file
, "@");
1908 case 'H': /* Sethi. */
1909 output_addr_const (file
, op
);
1912 case 'I': /* Reversed condition. */
1913 code
= reverse_condition (code
);
1916 case 'L': /* Log 2 of constant. */
1917 if (code
!= CONST_INT
)
1918 fatal_insn ("c4x_print_operand: %%L inconsistency", op
);
1919 fprintf (file
, "%d", exact_log2 (INTVAL (op
)));
1922 case 'N': /* Ones complement of small constant. */
1923 if (code
!= CONST_INT
)
1924 fatal_insn ("c4x_print_operand: %%N inconsistency", op
);
1925 fprintf (file
, "%d", ~INTVAL (op
));
1928 case 'K': /* Generate ldp(k) if direct address. */
1931 && GET_CODE (XEXP (op
, 0)) == LO_SUM
1932 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == REG
1933 && REGNO (XEXP (XEXP (op
, 0), 0)) == DP_REGNO
)
1935 op1
= XEXP (XEXP (op
, 0), 1);
1936 if (GET_CODE(op1
) == CONST_INT
|| GET_CODE(op1
) == SYMBOL_REF
)
1938 asm_fprintf (file
, "\t%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1939 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1940 asm_fprintf (file
, "\n");
1945 case 'M': /* Generate ldp(k) if direct address. */
1946 if (! TARGET_SMALL
/* Only used in asm statements. */
1948 && (GET_CODE (XEXP (op
, 0)) == CONST
1949 || GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
))
1951 asm_fprintf (file
, "%s\t@", TARGET_C3X
? "ldp" : "ldpk");
1952 output_address (XEXP (op
, 0));
1953 asm_fprintf (file
, "\n\t");
1957 case 'O': /* Offset address. */
1958 if (code
== MEM
&& c4x_autoinc_operand (op
, Pmode
))
1960 else if (code
== MEM
)
1961 output_address (XEXP (adjust_address (op
, VOIDmode
, 1), 0));
1962 else if (code
== REG
)
1963 fprintf (file
, "%s", reg_names
[REGNO (op
) + 1]);
1965 fatal_insn ("c4x_print_operand: %%O inconsistency", op
);
1968 case 'C': /* Call. */
1971 case 'U': /* Call/callu. */
1972 if (code
!= SYMBOL_REF
)
1973 asm_fprintf (file
, "u");
1983 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1985 fprintf (file
, "%s", float_reg_names
[REGNO (op
)]);
1987 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1991 output_address (XEXP (op
, 0));
1999 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2000 REAL_VALUE_TO_DECIMAL (r
, "%20f", str
);
2001 fprintf (file
, "%s", str
);
2006 fprintf (file
, "%d", INTVAL (op
));
2010 asm_fprintf (file
, "ne");
2014 asm_fprintf (file
, "eq");
2018 asm_fprintf (file
, "ge");
2022 asm_fprintf (file
, "gt");
2026 asm_fprintf (file
, "le");
2030 asm_fprintf (file
, "lt");
2034 asm_fprintf (file
, "hs");
2038 asm_fprintf (file
, "hi");
2042 asm_fprintf (file
, "ls");
2046 asm_fprintf (file
, "lo");
2050 output_addr_const (file
, op
);
2054 output_addr_const (file
, XEXP (op
, 0));
2061 fatal_insn ("c4x_print_operand: Bad operand case", op
);
2068 c4x_print_operand_address (file
, addr
)
2072 switch (GET_CODE (addr
))
2075 fprintf (file
, "*%s", reg_names
[REGNO (addr
)]);
2079 fprintf (file
, "*--%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2083 fprintf (file
, "*%s++", reg_names
[REGNO (XEXP (addr
, 0))]);
2088 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2089 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2091 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2092 fprintf (file
, "*%s++(%s)", reg_names
[REGNO (op0
)],
2093 reg_names
[REGNO (op1
)]);
2094 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2095 fprintf (file
, "*%s++(%d)", reg_names
[REGNO (op0
)],
2097 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2098 fprintf (file
, "*%s--(%d)", reg_names
[REGNO (op0
)],
2100 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2101 fprintf (file
, "*%s--(%s)", reg_names
[REGNO (op0
)],
2102 reg_names
[REGNO (op1
)]);
2104 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr
);
2110 rtx op0
= XEXP (XEXP (addr
, 1), 0);
2111 rtx op1
= XEXP (XEXP (addr
, 1), 1);
2113 if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& REG_P (op1
))
2114 fprintf (file
, "*++%s(%s)", reg_names
[REGNO (op0
)],
2115 reg_names
[REGNO (op1
)]);
2116 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) > 0)
2117 fprintf (file
, "*++%s(%d)", reg_names
[REGNO (op0
)],
2119 else if (GET_CODE (XEXP (addr
, 1)) == PLUS
&& INTVAL (op1
) < 0)
2120 fprintf (file
, "*--%s(%d)", reg_names
[REGNO (op0
)],
2122 else if (GET_CODE (XEXP (addr
, 1)) == MINUS
&& REG_P (op1
))
2123 fprintf (file
, "*--%s(%s)", reg_names
[REGNO (op0
)],
2124 reg_names
[REGNO (op1
)]);
2126 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr
);
2131 fprintf (file
, "*++%s", reg_names
[REGNO (XEXP (addr
, 0))]);
2135 fprintf (file
, "*%s--", reg_names
[REGNO (XEXP (addr
, 0))]);
2138 case PLUS
: /* Indirect with displacement. */
2140 rtx op0
= XEXP (addr
, 0);
2141 rtx op1
= XEXP (addr
, 1);
2147 if (IS_INDEX_REG (op0
))
2149 fprintf (file
, "*+%s(%s)",
2150 reg_names
[REGNO (op1
)],
2151 reg_names
[REGNO (op0
)]); /* Index + base. */
2155 fprintf (file
, "*+%s(%s)",
2156 reg_names
[REGNO (op0
)],
2157 reg_names
[REGNO (op1
)]); /* Base + index. */
2160 else if (INTVAL (op1
) < 0)
2162 fprintf (file
, "*-%s(%d)",
2163 reg_names
[REGNO (op0
)],
2164 -INTVAL (op1
)); /* Base - displacement. */
2168 fprintf (file
, "*+%s(%d)",
2169 reg_names
[REGNO (op0
)],
2170 INTVAL (op1
)); /* Base + displacement. */
2174 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2180 rtx op0
= XEXP (addr
, 0);
2181 rtx op1
= XEXP (addr
, 1);
2183 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
2184 c4x_print_operand_address (file
, op1
);
2186 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2193 fprintf (file
, "@");
2194 output_addr_const (file
, addr
);
2197 /* We shouldn't access CONST_INT addresses. */
2201 fatal_insn ("c4x_print_operand_address: Bad operand case", addr
);
2207 /* Return nonzero if the floating point operand will fit
2208 in the immediate field. */
2211 c4x_immed_float_p (op
)
2218 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
2219 if (GET_MODE (op
) == HFmode
)
2220 REAL_VALUE_TO_TARGET_DOUBLE (r
, convval
);
2223 REAL_VALUE_TO_TARGET_SINGLE (r
, convval
[0]);
2227 /* Sign extend exponent. */
2228 exponent
= (((convval
[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2229 if (exponent
== -128)
2231 if ((convval
[0] & 0x00000fff) != 0 || convval
[1] != 0)
2232 return 0; /* Precision doesn't fit. */
2233 return (exponent
<= 7) /* Positive exp. */
2234 && (exponent
>= -7); /* Negative exp. */
2238 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2239 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2241 None of the last four instructions from the bottom of the block can
2242 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2243 BcondAT or RETIcondD.
2245 This routine scans the four previous insns for a jump insn, and if
2246 one is found, returns 1 so that we bung in a nop instruction.
2247 This simple minded strategy will add a nop, when it may not
2248 be required. Say when there is a JUMP_INSN near the end of the
2249 block that doesn't get converted into a delayed branch.
2251 Note that we cannot have a call insn, since we don't generate
2252 repeat loops with calls in them (although I suppose we could, but
2253 there's no benefit.)
2255 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2258 c4x_rptb_nop_p (insn
)
2264 /* Extract the start label from the jump pattern (rptb_end). */
2265 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2267 /* If there is a label at the end of the loop we must insert
2270 insn
= previous_insn (insn
);
2271 } while (GET_CODE (insn
) == NOTE
2272 || GET_CODE (insn
) == USE
2273 || GET_CODE (insn
) == CLOBBER
);
2274 if (GET_CODE (insn
) == CODE_LABEL
)
2277 for (i
= 0; i
< 4; i
++)
2279 /* Search back for prev non-note and non-label insn. */
2280 while (GET_CODE (insn
) == NOTE
|| GET_CODE (insn
) == CODE_LABEL
2281 || GET_CODE (insn
) == USE
|| GET_CODE (insn
) == CLOBBER
)
2283 if (insn
== start_label
)
2286 insn
= previous_insn (insn
);
2289 /* If we have a jump instruction we should insert a NOP. If we
2290 hit repeat block top we should only insert a NOP if the loop
2292 if (GET_CODE (insn
) == JUMP_INSN
)
2294 insn
= previous_insn (insn
);
2300 /* The C4x looping instruction needs to be emitted at the top of the
2301 loop. Emitting the true RTL for a looping instruction at the top of
2302 the loop can cause problems with flow analysis. So instead, a dummy
2303 doloop insn is emitted at the end of the loop. This routine checks
2304 for the presence of this doloop insn and then searches back to the
2305 top of the loop, where it inserts the true looping insn (provided
2306 there are no instructions in the loop which would cause problems).
2307 Any additional labels can be emitted at this point. In addition, if
2308 the desired loop count register was not allocated, this routine does
2311 Before we can create a repeat block looping instruction we have to
2312 verify that there are no jumps outside the loop and no jumps outside
2313 the loop go into this loop. This can happen in the basic blocks reorder
2314 pass. The C4x cpu can not handle this. */
2317 c4x_label_ref_used_p (x
, code_label
)
2327 code
= GET_CODE (x
);
2328 if (code
== LABEL_REF
)
2329 return INSN_UID (XEXP (x
,0)) == INSN_UID (code_label
);
2331 fmt
= GET_RTX_FORMAT (code
);
2332 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2336 if (c4x_label_ref_used_p (XEXP (x
, i
), code_label
))
2339 else if (fmt
[i
] == 'E')
2340 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2341 if (c4x_label_ref_used_p (XVECEXP (x
, i
, j
), code_label
))
2349 c4x_rptb_valid_p (insn
, start_label
)
2350 rtx insn
, start_label
;
2356 /* Find the start label. */
2357 for (; insn
; insn
= PREV_INSN (insn
))
2358 if (insn
== start_label
)
2361 /* Note found then we can not use a rptb or rpts. The label was
2362 probably moved by the basic block reorder pass. */
2367 /* If any jump jumps inside this block then we must fail. */
2368 for (insn
= PREV_INSN (start
); insn
; insn
= PREV_INSN (insn
))
2370 if (GET_CODE (insn
) == CODE_LABEL
)
2372 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2373 if (GET_CODE (tmp
) == JUMP_INSN
2374 && c4x_label_ref_used_p (tmp
, insn
))
2378 for (insn
= NEXT_INSN (end
); insn
; insn
= NEXT_INSN (insn
))
2380 if (GET_CODE (insn
) == CODE_LABEL
)
2382 for (tmp
= NEXT_INSN (start
); tmp
!= end
; tmp
= NEXT_INSN(tmp
))
2383 if (GET_CODE (tmp
) == JUMP_INSN
2384 && c4x_label_ref_used_p (tmp
, insn
))
2388 /* If any jump jumps outside this block then we must fail. */
2389 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
2391 if (GET_CODE (insn
) == CODE_LABEL
)
2393 for (tmp
= NEXT_INSN (end
); tmp
; tmp
= NEXT_INSN(tmp
))
2394 if (GET_CODE (tmp
) == JUMP_INSN
2395 && c4x_label_ref_used_p (tmp
, insn
))
2397 for (tmp
= PREV_INSN (start
); tmp
; tmp
= PREV_INSN(tmp
))
2398 if (GET_CODE (tmp
) == JUMP_INSN
2399 && c4x_label_ref_used_p (tmp
, insn
))
2404 /* All checks OK. */
2410 c4x_rptb_insert (insn
)
2415 rtx new_start_label
;
2418 /* If the count register has not been allocated to RC, say if
2419 there is a movstr pattern in the loop, then do not insert a
2420 RPTB instruction. Instead we emit a decrement and branch
2421 at the end of the loop. */
2422 count_reg
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 0), 0);
2423 if (REGNO (count_reg
) != RC_REGNO
)
2426 /* Extract the start label from the jump pattern (rptb_end). */
2427 start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
), 0, 0)), 1), 0);
2429 if (! c4x_rptb_valid_p (insn
, start_label
))
2431 /* We can not use the rptb insn. Replace it so reorg can use
2432 the delay slots of the jump insn. */
2433 emit_insn_before (gen_addqi3 (count_reg
, count_reg
, GEN_INT (-1)), insn
);
2434 emit_insn_before (gen_cmpqi (count_reg
, GEN_INT (0)), insn
);
2435 emit_insn_before (gen_bge (start_label
), insn
);
2436 LABEL_NUSES (start_label
)++;
2441 end_label
= gen_label_rtx ();
2442 LABEL_NUSES (end_label
)++;
2443 emit_label_after (end_label
, insn
);
2445 new_start_label
= gen_label_rtx ();
2446 LABEL_NUSES (new_start_label
)++;
2448 for (; insn
; insn
= PREV_INSN (insn
))
2450 if (insn
== start_label
)
2452 if (GET_CODE (insn
) == JUMP_INSN
&&
2453 JUMP_LABEL (insn
) == start_label
)
2454 redirect_jump (insn
, new_start_label
, 0);
2457 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label
);
2459 emit_label_after (new_start_label
, insn
);
2461 if (TARGET_RPTS
&& c4x_rptb_rpts_p (PREV_INSN (insn
), 0))
2462 emit_insn_after (gen_rpts_top (new_start_label
, end_label
), insn
);
2464 emit_insn_after (gen_rptb_top (new_start_label
, end_label
), insn
);
2465 if (LABEL_NUSES (start_label
) == 0)
2466 delete_insn (start_label
);
2470 /* This function is a C4x special called immediately before delayed
2471 branch scheduling. We fix up RTPB style loops that didn't get RC
2472 allocated as the loop counter. */
2475 c4x_process_after_reload (first
)
2480 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2482 /* Look for insn. */
2485 int insn_code_number
;
2488 insn_code_number
= recog_memoized (insn
);
2490 if (insn_code_number
< 0)
2493 /* Insert the RTX for RPTB at the top of the loop
2494 and a label at the end of the loop. */
2495 if (insn_code_number
== CODE_FOR_rptb_end
)
2496 c4x_rptb_insert(insn
);
2498 /* We need to split the insn here. Otherwise the calls to
2499 force_const_mem will not work for load_immed_address. */
2502 /* Don't split the insn if it has been deleted. */
2503 if (! INSN_DELETED_P (old
))
2504 insn
= try_split (PATTERN(old
), old
, 1);
2506 /* When not optimizing, the old insn will be still left around
2507 with only the 'deleted' bit set. Transform it into a note
2508 to avoid confusion of subsequent processing. */
2509 if (INSN_DELETED_P (old
))
2511 PUT_CODE (old
, NOTE
);
2512 NOTE_LINE_NUMBER (old
) = NOTE_INSN_DELETED
;
2513 NOTE_SOURCE_FILE (old
) = 0;
2524 return REG_P (op
) && IS_ADDR_OR_PSEUDO_REG (op
);
2532 return REG_P (op
) && IS_INDEX_OR_PSEUDO_REG (op
);
2537 c4x_immed_int_constant (op
)
2540 if (GET_CODE (op
) != CONST_INT
)
2543 return GET_MODE (op
) == VOIDmode
2544 || GET_MODE_CLASS (op
) == MODE_INT
2545 || GET_MODE_CLASS (op
) == MODE_PARTIAL_INT
;
2550 c4x_immed_float_constant (op
)
2553 if (GET_CODE (op
) != CONST_DOUBLE
)
2556 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2557 present this only means that a MEM rtx has been generated. It does
2558 not mean the rtx is really in memory. */
2560 return GET_MODE (op
) == QFmode
|| GET_MODE (op
) == HFmode
;
2565 c4x_shiftable_constant (op
)
2570 int val
= INTVAL (op
);
2572 for (i
= 0; i
< 16; i
++)
2577 mask
= ((0xffff >> i
) << 16) | 0xffff;
2578 if (IS_INT16_CONST (val
& (1 << 31) ? (val
>> i
) | ~mask
2579 : (val
>> i
) & mask
))
2589 return c4x_immed_float_constant (op
) && c4x_immed_float_p (op
);
2597 return c4x_immed_int_constant (op
) && IS_INT16_CONST (INTVAL (op
));
2607 return c4x_immed_int_constant (op
) && IS_INT8_CONST (INTVAL (op
));
2615 if (TARGET_C3X
|| ! c4x_immed_int_constant (op
))
2617 return IS_INT5_CONST (INTVAL (op
));
2625 return c4x_immed_int_constant (op
) && IS_UINT16_CONST (INTVAL (op
));
2633 return c4x_immed_int_constant (op
) && IS_NOT_UINT16_CONST (INTVAL (op
));
2641 return c4x_immed_int_constant (op
) && IS_HIGH_CONST (INTVAL (op
));
2645 /* The constraints do not have to check the register class,
2646 except when needed to discriminate between the constraints.
2647 The operand has been checked by the predicates to be valid. */
2649 /* ARx + 9-bit signed const or IRn
2650 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2651 We don't include the pre/post inc/dec forms here since
2652 they are handled by the <> constraints. */
2655 c4x_Q_constraint (op
)
2658 enum machine_mode mode
= GET_MODE (op
);
2660 if (GET_CODE (op
) != MEM
)
2663 switch (GET_CODE (op
))
2670 rtx op0
= XEXP (op
, 0);
2671 rtx op1
= XEXP (op
, 1);
2679 if (GET_CODE (op1
) != CONST_INT
)
2682 /* HImode and HFmode must be offsettable. */
2683 if (mode
== HImode
|| mode
== HFmode
)
2684 return IS_DISP8_OFF_CONST (INTVAL (op1
));
2686 return IS_DISP8_CONST (INTVAL (op1
));
2697 /* ARx + 5-bit unsigned const
2698 *ARx, *+ARx(n) for n < 32. */
2701 c4x_R_constraint (op
)
2704 enum machine_mode mode
= GET_MODE (op
);
2708 if (GET_CODE (op
) != MEM
)
2711 switch (GET_CODE (op
))
2718 rtx op0
= XEXP (op
, 0);
2719 rtx op1
= XEXP (op
, 1);
2724 if (GET_CODE (op1
) != CONST_INT
)
2727 /* HImode and HFmode must be offsettable. */
2728 if (mode
== HImode
|| mode
== HFmode
)
2729 return IS_UINT5_CONST (INTVAL (op1
) + 1);
2731 return IS_UINT5_CONST (INTVAL (op1
));
2746 enum machine_mode mode
= GET_MODE (op
);
2748 if (TARGET_C3X
|| GET_CODE (op
) != MEM
)
2752 switch (GET_CODE (op
))
2755 return IS_ADDR_OR_PSEUDO_REG (op
);
2759 rtx op0
= XEXP (op
, 0);
2760 rtx op1
= XEXP (op
, 1);
2762 /* HImode and HFmode must be offsettable. */
2763 if (mode
== HImode
|| mode
== HFmode
)
2764 return IS_ADDR_OR_PSEUDO_REG (op0
)
2765 && GET_CODE (op1
) == CONST_INT
2766 && IS_UINT5_CONST (INTVAL (op1
) + 1);
2769 && IS_ADDR_OR_PSEUDO_REG (op0
)
2770 && GET_CODE (op1
) == CONST_INT
2771 && IS_UINT5_CONST (INTVAL (op1
));
2782 /* ARx + 1-bit unsigned const or IRn
2783 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2784 We don't include the pre/post inc/dec forms here since
2785 they are handled by the <> constraints. */
2788 c4x_S_constraint (op
)
2791 enum machine_mode mode
= GET_MODE (op
);
2792 if (GET_CODE (op
) != MEM
)
2795 switch (GET_CODE (op
))
2803 rtx op0
= XEXP (op
, 0);
2804 rtx op1
= XEXP (op
, 1);
2806 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2807 || (op0
!= XEXP (op1
, 0)))
2810 op0
= XEXP (op1
, 0);
2811 op1
= XEXP (op1
, 1);
2812 return REG_P (op0
) && REG_P (op1
);
2813 /* Pre or post_modify with a displacement of 0 or 1
2814 should not be generated. */
2820 rtx op0
= XEXP (op
, 0);
2821 rtx op1
= XEXP (op
, 1);
2829 if (GET_CODE (op1
) != CONST_INT
)
2832 /* HImode and HFmode must be offsettable. */
2833 if (mode
== HImode
|| mode
== HFmode
)
2834 return IS_DISP1_OFF_CONST (INTVAL (op1
));
2836 return IS_DISP1_CONST (INTVAL (op1
));
2851 enum machine_mode mode
= GET_MODE (op
);
2852 if (GET_CODE (op
) != MEM
)
2856 switch (GET_CODE (op
))
2860 if (mode
!= QImode
&& mode
!= QFmode
)
2867 return IS_ADDR_OR_PSEUDO_REG (op
);
2872 rtx op0
= XEXP (op
, 0);
2873 rtx op1
= XEXP (op
, 1);
2875 if (mode
!= QImode
&& mode
!= QFmode
)
2878 if ((GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
2879 || (op0
!= XEXP (op1
, 0)))
2882 op0
= XEXP (op1
, 0);
2883 op1
= XEXP (op1
, 1);
2884 return REG_P (op0
) && IS_ADDR_OR_PSEUDO_REG (op0
)
2885 && REG_P (op1
) && IS_INDEX_OR_PSEUDO_REG (op1
);
2886 /* Pre or post_modify with a displacement of 0 or 1
2887 should not be generated. */
2892 rtx op0
= XEXP (op
, 0);
2893 rtx op1
= XEXP (op
, 1);
2897 /* HImode and HFmode must be offsettable. */
2898 if (mode
== HImode
|| mode
== HFmode
)
2899 return IS_ADDR_OR_PSEUDO_REG (op0
)
2900 && GET_CODE (op1
) == CONST_INT
2901 && IS_DISP1_OFF_CONST (INTVAL (op1
));
2904 return (IS_INDEX_OR_PSEUDO_REG (op1
)
2905 && IS_ADDR_OR_PSEUDO_REG (op0
))
2906 || (IS_ADDR_OR_PSEUDO_REG (op1
)
2907 && IS_INDEX_OR_PSEUDO_REG (op0
));
2909 return IS_ADDR_OR_PSEUDO_REG (op0
)
2910 && GET_CODE (op1
) == CONST_INT
2911 && IS_DISP1_CONST (INTVAL (op1
));
2923 /* Direct memory operand. */
2926 c4x_T_constraint (op
)
2929 if (GET_CODE (op
) != MEM
)
2933 if (GET_CODE (op
) != LO_SUM
)
2935 /* Allow call operands. */
2936 return GET_CODE (op
) == SYMBOL_REF
2937 && GET_MODE (op
) == Pmode
2938 && SYMBOL_REF_FLAG (op
);
2941 /* HImode and HFmode are not offsettable. */
2942 if (GET_MODE (op
) == HImode
|| GET_CODE (op
) == HFmode
)
2945 if ((GET_CODE (XEXP (op
, 0)) == REG
)
2946 && (REGNO (XEXP (op
, 0)) == DP_REGNO
))
2947 return c4x_U_constraint (XEXP (op
, 1));
2953 /* Symbolic operand. */
2956 c4x_U_constraint (op
)
2959 /* Don't allow direct addressing to an arbitrary constant. */
2960 return GET_CODE (op
) == CONST
2961 || GET_CODE (op
) == SYMBOL_REF
2962 || GET_CODE (op
) == LABEL_REF
;
2967 c4x_autoinc_operand (op
, mode
)
2969 enum machine_mode mode ATTRIBUTE_UNUSED
;
2971 if (GET_CODE (op
) == MEM
)
2973 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
2979 || code
== PRE_MODIFY
2980 || code
== POST_MODIFY
2988 /* Match any operand. */
2991 any_operand (op
, mode
)
2992 register rtx op ATTRIBUTE_UNUSED
;
2993 enum machine_mode mode ATTRIBUTE_UNUSED
;
2999 /* Nonzero if OP is a floating point value with value 0.0. */
3002 fp_zero_operand (op
, mode
)
3004 enum machine_mode mode ATTRIBUTE_UNUSED
;
3008 if (GET_CODE (op
) != CONST_DOUBLE
)
3010 REAL_VALUE_FROM_CONST_DOUBLE (r
, op
);
3011 return REAL_VALUES_EQUAL (r
, dconst0
);
3016 const_operand (op
, mode
)
3018 register enum machine_mode mode
;
3024 if (GET_CODE (op
) != CONST_DOUBLE
3025 || GET_MODE (op
) != mode
3026 || GET_MODE_CLASS (mode
) != MODE_FLOAT
)
3029 return c4x_immed_float_p (op
);
3035 if (GET_CODE (op
) == CONSTANT_P_RTX
)
3038 if (GET_CODE (op
) != CONST_INT
3039 || (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != mode
)
3040 || GET_MODE_CLASS (mode
) != MODE_INT
)
3043 return IS_HIGH_CONST (INTVAL (op
)) || IS_INT16_CONST (INTVAL (op
));
3055 stik_const_operand (op
, mode
)
3057 enum machine_mode mode ATTRIBUTE_UNUSED
;
3059 return c4x_K_constant (op
);
3064 not_const_operand (op
, mode
)
3066 enum machine_mode mode ATTRIBUTE_UNUSED
;
3068 return c4x_N_constant (op
);
3073 reg_operand (op
, mode
)
3075 enum machine_mode mode
;
3077 if (GET_CODE (op
) == SUBREG
3078 && GET_MODE (op
) == QFmode
)
3080 return register_operand (op
, mode
);
3085 mixed_subreg_operand (op
, mode
)
3087 enum machine_mode mode ATTRIBUTE_UNUSED
;
3089 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3090 int and a long double. */
3091 if (GET_CODE (op
) == SUBREG
3092 && (GET_MODE (op
) == QFmode
)
3093 && (GET_MODE (SUBREG_REG (op
)) == QImode
3094 || GET_MODE (SUBREG_REG (op
)) == HImode
))
3101 reg_imm_operand (op
, mode
)
3103 enum machine_mode mode ATTRIBUTE_UNUSED
;
3105 if (REG_P (op
) || CONSTANT_P (op
))
3112 not_modify_reg (op
, mode
)
3114 enum machine_mode mode ATTRIBUTE_UNUSED
;
3116 if (REG_P (op
) || CONSTANT_P (op
))
3118 if (GET_CODE (op
) != MEM
)
3121 switch (GET_CODE (op
))
3128 rtx op0
= XEXP (op
, 0);
3129 rtx op1
= XEXP (op
, 1);
3134 if (REG_P (op1
) || GET_CODE (op1
) == CONST_INT
)
3140 rtx op0
= XEXP (op
, 0);
3142 if (REG_P (op0
) && REGNO (op0
) == DP_REGNO
)
3160 not_rc_reg (op
, mode
)
3162 enum machine_mode mode ATTRIBUTE_UNUSED
;
3164 if (REG_P (op
) && REGNO (op
) == RC_REGNO
)
3170 /* Extended precision register R0-R1. */
3173 r0r1_reg_operand (op
, mode
)
3175 enum machine_mode mode
;
3177 if (! reg_operand (op
, mode
))
3179 if (GET_CODE (op
) == SUBREG
)
3180 op
= SUBREG_REG (op
);
3181 return REG_P (op
) && IS_R0R1_OR_PSEUDO_REG (op
);
3185 /* Extended precision register R2-R3. */
3188 r2r3_reg_operand (op
, mode
)
3190 enum machine_mode mode
;
3192 if (! reg_operand (op
, mode
))
3194 if (GET_CODE (op
) == SUBREG
)
3195 op
= SUBREG_REG (op
);
3196 return REG_P (op
) && IS_R2R3_OR_PSEUDO_REG (op
);
3200 /* Low extended precision register R0-R7. */
3203 ext_low_reg_operand (op
, mode
)
3205 enum machine_mode mode
;
3207 if (! reg_operand (op
, mode
))
3209 if (GET_CODE (op
) == SUBREG
)
3210 op
= SUBREG_REG (op
);
3211 return REG_P (op
) && IS_EXT_LOW_OR_PSEUDO_REG (op
);
3215 /* Extended precision register. */
3218 ext_reg_operand (op
, mode
)
3220 enum machine_mode mode
;
3222 if (! reg_operand (op
, mode
))
3224 if (GET_CODE (op
) == SUBREG
)
3225 op
= SUBREG_REG (op
);
3228 return IS_EXT_OR_PSEUDO_REG (op
);
3232 /* Standard precision register. */
3235 std_reg_operand (op
, mode
)
3237 enum machine_mode mode
;
3239 if (! reg_operand (op
, mode
))
3241 if (GET_CODE (op
) == SUBREG
)
3242 op
= SUBREG_REG (op
);
3243 return REG_P (op
) && IS_STD_OR_PSEUDO_REG (op
);
3246 /* Standard precision or normal register. */
3249 std_or_reg_operand (op
, mode
)
3251 enum machine_mode mode
;
3253 if (reload_in_progress
)
3254 return std_reg_operand (op
, mode
);
3255 return reg_operand (op
, mode
);
3258 /* Address register. */
3261 addr_reg_operand (op
, mode
)
3263 enum machine_mode mode
;
3265 if (! reg_operand (op
, mode
))
3267 return c4x_a_register (op
);
3271 /* Index register. */
3274 index_reg_operand (op
, mode
)
3276 enum machine_mode mode
;
3278 if (! reg_operand (op
, mode
))
3280 if (GET_CODE (op
) == SUBREG
)
3281 op
= SUBREG_REG (op
);
3282 return c4x_x_register (op
);
3289 dp_reg_operand (op
, mode
)
3291 enum machine_mode mode ATTRIBUTE_UNUSED
;
3293 return REG_P (op
) && IS_DP_OR_PSEUDO_REG (op
);
3300 sp_reg_operand (op
, mode
)
3302 enum machine_mode mode ATTRIBUTE_UNUSED
;
3304 return REG_P (op
) && IS_SP_OR_PSEUDO_REG (op
);
3311 st_reg_operand (op
, mode
)
3313 enum machine_mode mode ATTRIBUTE_UNUSED
;
3315 return REG_P (op
) && IS_ST_OR_PSEUDO_REG (op
);
3322 rc_reg_operand (op
, mode
)
3324 enum machine_mode mode ATTRIBUTE_UNUSED
;
3326 return REG_P (op
) && IS_RC_OR_PSEUDO_REG (op
);
3331 call_address_operand (op
, mode
)
3333 enum machine_mode mode ATTRIBUTE_UNUSED
;
3335 return (REG_P (op
) || symbolic_address_operand (op
, mode
));
3339 /* Symbolic address operand. */
3342 symbolic_address_operand (op
, mode
)
3344 enum machine_mode mode ATTRIBUTE_UNUSED
;
3346 switch (GET_CODE (op
))
3358 /* Check dst operand of a move instruction. */
3361 dst_operand (op
, mode
)
3363 enum machine_mode mode
;
3365 if (GET_CODE (op
) == SUBREG
3366 && mixed_subreg_operand (op
, mode
))
3370 return reg_operand (op
, mode
);
3372 return nonimmediate_operand (op
, mode
);
3376 /* Check src operand of two operand arithmetic instructions. */
3379 src_operand (op
, mode
)
3381 enum machine_mode mode
;
3383 if (GET_CODE (op
) == SUBREG
3384 && mixed_subreg_operand (op
, mode
))
3388 return reg_operand (op
, mode
);
3390 if (mode
== VOIDmode
)
3391 mode
= GET_MODE (op
);
3393 if (GET_CODE (op
) == CONST_INT
)
3394 return (mode
== QImode
|| mode
== Pmode
|| mode
== HImode
)
3395 && c4x_I_constant (op
);
3397 /* We don't like CONST_DOUBLE integers. */
3398 if (GET_CODE (op
) == CONST_DOUBLE
)
3399 return c4x_H_constant (op
);
3401 /* Disallow symbolic addresses. Only the predicate
3402 symbolic_address_operand will match these. */
3403 if (GET_CODE (op
) == SYMBOL_REF
3404 || GET_CODE (op
) == LABEL_REF
3405 || GET_CODE (op
) == CONST
)
3408 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3409 access to symbolic addresses. These operands will get forced
3410 into a register and the movqi expander will generate a
3411 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3412 if (GET_CODE (op
) == MEM
3413 && ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
3414 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
3415 || GET_CODE (XEXP (op
, 0)) == CONST
)))
3416 return ! TARGET_LOAD_DIRECT_MEMS
&& GET_MODE (op
) == mode
;
3418 return general_operand (op
, mode
);
3423 src_hi_operand (op
, mode
)
3425 enum machine_mode mode
;
3427 if (c4x_O_constant (op
))
3429 return src_operand (op
, mode
);
3433 /* Check src operand of two operand logical instructions. */
3436 lsrc_operand (op
, mode
)
3438 enum machine_mode mode
;
3440 if (mode
== VOIDmode
)
3441 mode
= GET_MODE (op
);
3443 if (mode
!= QImode
&& mode
!= Pmode
)
3444 fatal_insn ("mode not QImode", op
);
3446 if (GET_CODE (op
) == CONST_INT
)
3447 return c4x_L_constant (op
) || c4x_J_constant (op
);
3449 return src_operand (op
, mode
);
3453 /* Check src operand of two operand tricky instructions. */
3456 tsrc_operand (op
, mode
)
3458 enum machine_mode mode
;
3460 if (mode
== VOIDmode
)
3461 mode
= GET_MODE (op
);
3463 if (mode
!= QImode
&& mode
!= Pmode
)
3464 fatal_insn ("mode not QImode", op
);
3466 if (GET_CODE (op
) == CONST_INT
)
3467 return c4x_L_constant (op
) || c4x_N_constant (op
) || c4x_J_constant (op
);
3469 return src_operand (op
, mode
);
3473 /* Check src operand of two operand non immedidate instructions. */
3476 nonimmediate_src_operand (op
, mode
)
3478 enum machine_mode mode
;
3480 if (GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
3483 return src_operand (op
, mode
);
3487 /* Check logical src operand of two operand non immedidate instructions. */
3490 nonimmediate_lsrc_operand (op
, mode
)
3492 enum machine_mode mode
;
3494 if (GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
3497 return lsrc_operand (op
, mode
);
3502 reg_or_const_operand (op
, mode
)
3504 enum machine_mode mode
;
3506 return reg_operand (op
, mode
) || const_operand (op
, mode
);
3510 /* Check for indirect operands allowable in parallel instruction. */
3513 par_ind_operand (op
, mode
)
3515 enum machine_mode mode
;
3517 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3520 return c4x_S_indirect (op
);
3524 /* Check for operands allowable in parallel instruction. */
3527 parallel_operand (op
, mode
)
3529 enum machine_mode mode
;
3531 return ext_low_reg_operand (op
, mode
) || par_ind_operand (op
, mode
);
3536 c4x_S_address_parse (op
, base
, incdec
, index
, disp
)
3548 if (GET_CODE (op
) != MEM
)
3549 fatal_insn ("invalid indirect memory address", op
);
3552 switch (GET_CODE (op
))
3555 *base
= REGNO (XEXP (op
, 0));
3561 *base
= REGNO (XEXP (op
, 0));
3567 *base
= REGNO (XEXP (op
, 0));
3573 *base
= REGNO (XEXP (op
, 0));
3579 *base
= REGNO (XEXP (op
, 0));
3580 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3582 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3583 *disp
= 0; /* ??? */
3586 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3591 *base
= REGNO (XEXP (op
, 0));
3592 if (REG_P (XEXP (XEXP (op
, 1), 1)))
3594 *index
= REGNO (XEXP (XEXP (op
, 1), 1));
3595 *disp
= 1; /* ??? */
3598 *disp
= INTVAL (XEXP (XEXP (op
, 1), 1));
3609 rtx op0
= XEXP (op
, 0);
3610 rtx op1
= XEXP (op
, 1);
3612 if (c4x_a_register (op0
))
3614 if (c4x_x_register (op1
))
3616 *base
= REGNO (op0
);
3617 *index
= REGNO (op1
);
3620 else if ((GET_CODE (op1
) == CONST_INT
3621 && IS_DISP1_CONST (INTVAL (op1
))))
3623 *base
= REGNO (op0
);
3624 *disp
= INTVAL (op1
);
3628 else if (c4x_x_register (op0
) && c4x_a_register (op1
))
3630 *base
= REGNO (op1
);
3631 *index
= REGNO (op0
);
3638 fatal_insn ("invalid indirect (S) memory address", op
);
3644 c4x_address_conflict (op0
, op1
, store0
, store1
)
3659 if (MEM_VOLATILE_P (op0
) && MEM_VOLATILE_P (op1
))
3662 c4x_S_address_parse (op0
, &base0
, &incdec0
, &index0
, &disp0
);
3663 c4x_S_address_parse (op1
, &base1
, &incdec1
, &index1
, &disp1
);
3665 if (store0
&& store1
)
3667 /* If we have two stores in parallel to the same address, then
3668 the C4x only executes one of the stores. This is unlikely to
3669 cause problems except when writing to a hardware device such
3670 as a FIFO since the second write will be lost. The user
3671 should flag the hardware location as being volatile so that
3672 we don't do this optimisation. While it is unlikely that we
3673 have an aliased address if both locations are not marked
3674 volatile, it is probably safer to flag a potential conflict
3675 if either location is volatile. */
3676 if (! flag_argument_noalias
)
3678 if (MEM_VOLATILE_P (op0
) || MEM_VOLATILE_P (op1
))
3683 /* If have a parallel load and a store to the same address, the load
3684 is performed first, so there is no conflict. Similarly, there is
3685 no conflict if have parallel loads from the same address. */
3687 /* Cannot use auto increment or auto decrement twice for same
3689 if (base0
== base1
&& incdec0
&& incdec0
)
3692 /* It might be too confusing for GCC if we have use a base register
3693 with a side effect and a memory reference using the same register
3695 if (! TARGET_DEVEL
&& base0
== base1
&& (incdec0
|| incdec1
))
3698 /* We can not optimize the case where op1 and op2 refer to the same
3700 if (base0
== base1
&& disp0
== disp1
&& index0
== index1
)
3708 /* Check for while loop inside a decrement and branch loop. */
3711 c4x_label_conflict (insn
, jump
, db
)
3718 if (GET_CODE (insn
) == CODE_LABEL
)
3720 if (CODE_LABEL_NUMBER (jump
) == CODE_LABEL_NUMBER (insn
))
3722 if (CODE_LABEL_NUMBER (db
) == CODE_LABEL_NUMBER (insn
))
3725 insn
= PREV_INSN (insn
);
3731 /* Validate combination of operands for parallel load/store instructions. */
3734 valid_parallel_load_store (operands
, mode
)
3736 enum machine_mode mode ATTRIBUTE_UNUSED
;
3738 rtx op0
= operands
[0];
3739 rtx op1
= operands
[1];
3740 rtx op2
= operands
[2];
3741 rtx op3
= operands
[3];
3743 if (GET_CODE (op0
) == SUBREG
)
3744 op0
= SUBREG_REG (op0
);
3745 if (GET_CODE (op1
) == SUBREG
)
3746 op1
= SUBREG_REG (op1
);
3747 if (GET_CODE (op2
) == SUBREG
)
3748 op2
= SUBREG_REG (op2
);
3749 if (GET_CODE (op3
) == SUBREG
)
3750 op3
= SUBREG_REG (op3
);
3752 /* The patterns should only allow ext_low_reg_operand() or
3753 par_ind_operand() operands. Thus of the 4 operands, only 2
3754 should be REGs and the other 2 should be MEMs. */
3756 /* This test prevents the multipack pass from using this pattern if
3757 op0 is used as an index or base register in op2 or op3, since
3758 this combination will require reloading. */
3759 if (GET_CODE (op0
) == REG
3760 && ((GET_CODE (op2
) == MEM
&& reg_mentioned_p (op0
, XEXP (op2
, 0)))
3761 || (GET_CODE (op3
) == MEM
&& reg_mentioned_p (op0
, XEXP (op3
, 0)))))
3765 if (GET_CODE (op0
) == REG
&& GET_CODE (op2
) == REG
)
3766 return (REGNO (op0
) != REGNO (op2
))
3767 && GET_CODE (op1
) == MEM
&& GET_CODE (op3
) == MEM
3768 && ! c4x_address_conflict (op1
, op3
, 0, 0);
3771 if (GET_CODE (op1
) == REG
&& GET_CODE (op3
) == REG
)
3772 return GET_CODE (op0
) == MEM
&& GET_CODE (op2
) == MEM
3773 && ! c4x_address_conflict (op0
, op2
, 1, 1);
3776 if (GET_CODE (op0
) == REG
&& GET_CODE (op3
) == REG
)
3777 return GET_CODE (op1
) == MEM
&& GET_CODE (op2
) == MEM
3778 && ! c4x_address_conflict (op1
, op2
, 0, 1);
3781 if (GET_CODE (op1
) == REG
&& GET_CODE (op2
) == REG
)
3782 return GET_CODE (op0
) == MEM
&& GET_CODE (op3
) == MEM
3783 && ! c4x_address_conflict (op0
, op3
, 1, 0);
3790 valid_parallel_operands_4 (operands
, mode
)
3792 enum machine_mode mode ATTRIBUTE_UNUSED
;
3794 rtx op0
= operands
[0];
3795 rtx op2
= operands
[2];
3797 if (GET_CODE (op0
) == SUBREG
)
3798 op0
= SUBREG_REG (op0
);
3799 if (GET_CODE (op2
) == SUBREG
)
3800 op2
= SUBREG_REG (op2
);
3802 /* This test prevents the multipack pass from using this pattern if
3803 op0 is used as an index or base register in op2, since this combination
3804 will require reloading. */
3805 if (GET_CODE (op0
) == REG
3806 && GET_CODE (op2
) == MEM
3807 && reg_mentioned_p (op0
, XEXP (op2
, 0)))
3815 valid_parallel_operands_5 (operands
, mode
)
3817 enum machine_mode mode ATTRIBUTE_UNUSED
;
3820 rtx op0
= operands
[0];
3821 rtx op1
= operands
[1];
3822 rtx op2
= operands
[2];
3823 rtx op3
= operands
[3];
3825 if (GET_CODE (op0
) == SUBREG
)
3826 op0
= SUBREG_REG (op0
);
3827 if (GET_CODE (op1
) == SUBREG
)
3828 op1
= SUBREG_REG (op1
);
3829 if (GET_CODE (op2
) == SUBREG
)
3830 op2
= SUBREG_REG (op2
);
3832 /* The patterns should only allow ext_low_reg_operand() or
3833 par_ind_operand() operands. Operands 1 and 2 may be commutative
3834 but only one of them can be a register. */
3835 if (GET_CODE (op1
) == REG
)
3837 if (GET_CODE (op2
) == REG
)
3843 /* This test prevents the multipack pass from using this pattern if
3844 op0 is used as an index or base register in op3, since this combination
3845 will require reloading. */
3846 if (GET_CODE (op0
) == REG
3847 && GET_CODE (op3
) == MEM
3848 && reg_mentioned_p (op0
, XEXP (op3
, 0)))
3856 valid_parallel_operands_6 (operands
, mode
)
3858 enum machine_mode mode ATTRIBUTE_UNUSED
;
3861 rtx op0
= operands
[0];
3862 rtx op1
= operands
[1];
3863 rtx op2
= operands
[2];
3864 rtx op4
= operands
[4];
3865 rtx op5
= operands
[5];
3867 if (GET_CODE (op1
) == SUBREG
)
3868 op1
= SUBREG_REG (op1
);
3869 if (GET_CODE (op2
) == SUBREG
)
3870 op2
= SUBREG_REG (op2
);
3871 if (GET_CODE (op4
) == SUBREG
)
3872 op4
= SUBREG_REG (op4
);
3873 if (GET_CODE (op5
) == SUBREG
)
3874 op5
= SUBREG_REG (op5
);
3876 /* The patterns should only allow ext_low_reg_operand() or
3877 par_ind_operand() operands. Thus of the 4 input operands, only 2
3878 should be REGs and the other 2 should be MEMs. */
3880 if (GET_CODE (op1
) == REG
)
3882 if (GET_CODE (op2
) == REG
)
3884 if (GET_CODE (op4
) == REG
)
3886 if (GET_CODE (op5
) == REG
)
3889 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3890 Perhaps we should count the MEMs as well? */
3894 /* This test prevents the multipack pass from using this pattern if
3895 op0 is used as an index or base register in op4 or op5, since
3896 this combination will require reloading. */
3897 if (GET_CODE (op0
) == REG
3898 && ((GET_CODE (op4
) == MEM
&& reg_mentioned_p (op0
, XEXP (op4
, 0)))
3899 || (GET_CODE (op5
) == MEM
&& reg_mentioned_p (op0
, XEXP (op5
, 0)))))
3906 /* Validate combination of src operands. Note that the operands have
3907 been screened by the src_operand predicate. We just have to check
3908 that the combination of operands is valid. If FORCE is set, ensure
3909 that the destination regno is valid if we have a 2 operand insn. */
3912 c4x_valid_operands (code
, operands
, mode
, force
)
3915 enum machine_mode mode ATTRIBUTE_UNUSED
;
3920 enum rtx_code code1
;
3921 enum rtx_code code2
;
3923 if (code
== COMPARE
)
3934 if (GET_CODE (op1
) == SUBREG
)
3935 op1
= SUBREG_REG (op1
);
3936 if (GET_CODE (op2
) == SUBREG
)
3937 op2
= SUBREG_REG (op2
);
3939 code1
= GET_CODE (op1
);
3940 code2
= GET_CODE (op2
);
3942 if (code1
== REG
&& code2
== REG
)
3945 if (code1
== MEM
&& code2
== MEM
)
3947 if (c4x_S_indirect (op1
) && c4x_S_indirect (op2
))
3949 return c4x_R_indirect (op1
) && c4x_R_indirect (op2
);
3960 if (c4x_J_constant (op2
) && c4x_R_indirect (op1
))
3965 if (! c4x_H_constant (op2
))
3969 /* Any valid memory operand screened by src_operand is OK. */
3972 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3973 into a stack slot memory address comprising a PLUS and a
3979 fatal_insn ("c4x_valid_operands: Internal error", op2
);
3983 /* Check that we have a valid destination register for a two operand
3985 return ! force
|| code
== COMPARE
|| REGNO (op1
) == REGNO (operands
[0]);
3988 /* We assume MINUS is commutative since the subtract patterns
3989 also support the reverse subtract instructions. Since op1
3990 is not a register, and op2 is a register, op1 can only
3991 be a restricted memory operand for a shift instruction. */
3992 if (code
== ASHIFTRT
|| code
== LSHIFTRT
3993 || code
== ASHIFT
|| code
== COMPARE
)
3995 && (c4x_S_indirect (op1
) || c4x_R_indirect (op1
));
4000 if (c4x_J_constant (op1
) && c4x_R_indirect (op2
))
4005 if (! c4x_H_constant (op1
))
4009 /* Any valid memory operand screened by src_operand is OK. */
4017 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
4018 into a stack slot memory address comprising a PLUS and a
4028 /* Check that we have a valid destination register for a two operand
4030 return ! force
|| REGNO (op1
) == REGNO (operands
[0]);
4034 int valid_operands (code
, operands
, mode
)
4037 enum machine_mode mode
;
4040 /* If we are not optimizing then we have to let anything go and let
4041 reload fix things up. instantiate_decl in function.c can produce
4042 invalid insns by changing the offset of a memory operand from a
4043 valid one into an invalid one, when the second operand is also a
4044 memory operand. The alternative is not to allow two memory
4045 operands for an insn when not optimizing. The problem only rarely
4046 occurs, for example with the C-torture program DFcmp.c. */
4048 return ! optimize
|| c4x_valid_operands (code
, operands
, mode
, 0);
4053 legitimize_operands (code
, operands
, mode
)
4056 enum machine_mode mode
;
4058 /* Compare only has 2 operands. */
4059 if (code
== COMPARE
)
4061 /* During RTL generation, force constants into pseudos so that
4062 they can get hoisted out of loops. This will tie up an extra
4063 register but can save an extra cycle. Only do this if loop
4064 optimisation enabled. (We cannot pull this trick for add and
4065 sub instructions since the flow pass won't find
4066 autoincrements etc.) This allows us to generate compare
4067 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4068 of LDI *AR0++, R0; CMPI 42, R0.
4070 Note that expand_binops will try to load an expensive constant
4071 into a register if it is used within a loop. Unfortunately,
4072 the cost mechanism doesn't allow us to look at the other
4073 operand to decide whether the constant is expensive. */
4075 if (! reload_in_progress
4078 && GET_CODE (operands
[1]) == CONST_INT
4079 && preserve_subexpressions_p ()
4080 && rtx_cost (operands
[1], code
) > 1)
4081 operands
[1] = force_reg (mode
, operands
[1]);
4083 if (! reload_in_progress
4084 && ! c4x_valid_operands (code
, operands
, mode
, 0))
4085 operands
[0] = force_reg (mode
, operands
[0]);
4089 /* We cannot do this for ADDI/SUBI insns since we will
4090 defeat the flow pass from finding autoincrement addressing
4092 if (! reload_in_progress
4093 && ! ((code
== PLUS
|| code
== MINUS
) && mode
== Pmode
)
4096 && GET_CODE (operands
[2]) == CONST_INT
4097 && preserve_subexpressions_p ()
4098 && rtx_cost (operands
[2], code
) > 1)
4099 operands
[2] = force_reg (mode
, operands
[2]);
4101 /* We can get better code on a C30 if we force constant shift counts
4102 into a register. This way they can get hoisted out of loops,
4103 tying up a register, but saving an instruction. The downside is
4104 that they may get allocated to an address or index register, and
4105 thus we will get a pipeline conflict if there is a nearby
4106 indirect address using an address register.
4108 Note that expand_binops will not try to load an expensive constant
4109 into a register if it is used within a loop for a shift insn. */
4111 if (! reload_in_progress
4112 && ! c4x_valid_operands (code
, operands
, mode
, TARGET_FORCE
))
4114 /* If the operand combination is invalid, we force operand1 into a
4115 register, preventing reload from having doing to do this at a
4117 operands
[1] = force_reg (mode
, operands
[1]);
4120 emit_move_insn (operands
[0], operands
[1]);
4121 operands
[1] = copy_rtx (operands
[0]);
4125 /* Just in case... */
4126 if (! c4x_valid_operands (code
, operands
, mode
, 0))
4127 operands
[2] = force_reg (mode
, operands
[2]);
4131 /* Right shifts require a negative shift count, but GCC expects
4132 a positive count, so we emit a NEG. */
4133 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
4134 && (GET_CODE (operands
[2]) != CONST_INT
))
4135 operands
[2] = gen_rtx_NEG (mode
, negate_rtx (mode
, operands
[2]));
4141 /* The following predicates are used for instruction scheduling. */
4144 group1_reg_operand (op
, mode
)
4146 enum machine_mode mode
;
4148 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4150 if (GET_CODE (op
) == SUBREG
)
4151 op
= SUBREG_REG (op
);
4152 return REG_P (op
) && (! reload_completed
|| IS_GROUP1_REG (op
));
4157 group1_mem_operand (op
, mode
)
4159 enum machine_mode mode
;
4161 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4164 if (GET_CODE (op
) == MEM
)
4167 if (GET_CODE (op
) == PLUS
)
4169 rtx op0
= XEXP (op
, 0);
4170 rtx op1
= XEXP (op
, 1);
4172 if ((REG_P (op0
) && (! reload_completed
|| IS_GROUP1_REG (op0
)))
4173 || (REG_P (op1
) && (! reload_completed
|| IS_GROUP1_REG (op1
))))
4176 else if ((REG_P (op
)) && (! reload_completed
|| IS_GROUP1_REG (op
)))
4184 /* Return true if any one of the address registers. */
4187 arx_reg_operand (op
, mode
)
4189 enum machine_mode mode
;
4191 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4193 if (GET_CODE (op
) == SUBREG
)
4194 op
= SUBREG_REG (op
);
4195 return REG_P (op
) && (! reload_completed
|| IS_ADDR_REG (op
));
4200 c4x_arn_reg_operand (op
, mode
, regno
)
4202 enum machine_mode mode
;
4205 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4207 if (GET_CODE (op
) == SUBREG
)
4208 op
= SUBREG_REG (op
);
4209 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
4214 c4x_arn_mem_operand (op
, mode
, regno
)
4216 enum machine_mode mode
;
4219 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
4222 if (GET_CODE (op
) == MEM
)
4225 switch (GET_CODE (op
))
4234 return REG_P (op
) && (! reload_completed
|| (REGNO (op
) == regno
));
4238 if (REG_P (XEXP (op
, 0)) && (! reload_completed
4239 || (REGNO (XEXP (op
, 0)) == regno
)))
4241 if (REG_P (XEXP (XEXP (op
, 1), 1))
4242 && (! reload_completed
4243 || (REGNO (XEXP (XEXP (op
, 1), 1)) == regno
)))
4249 rtx op0
= XEXP (op
, 0);
4250 rtx op1
= XEXP (op
, 1);
4252 if ((REG_P (op0
) && (! reload_completed
4253 || (REGNO (op0
) == regno
)))
4254 || (REG_P (op1
) && (! reload_completed
4255 || (REGNO (op1
) == regno
))))
4269 ar0_reg_operand (op
, mode
)
4271 enum machine_mode mode
;
4273 return c4x_arn_reg_operand (op
, mode
, AR0_REGNO
);
4278 ar0_mem_operand (op
, mode
)
4280 enum machine_mode mode
;
4282 return c4x_arn_mem_operand (op
, mode
, AR0_REGNO
);
4287 ar1_reg_operand (op
, mode
)
4289 enum machine_mode mode
;
4291 return c4x_arn_reg_operand (op
, mode
, AR1_REGNO
);
4296 ar1_mem_operand (op
, mode
)
4298 enum machine_mode mode
;
4300 return c4x_arn_mem_operand (op
, mode
, AR1_REGNO
);
4305 ar2_reg_operand (op
, mode
)
4307 enum machine_mode mode
;
4309 return c4x_arn_reg_operand (op
, mode
, AR2_REGNO
);
4314 ar2_mem_operand (op
, mode
)
4316 enum machine_mode mode
;
4318 return c4x_arn_mem_operand (op
, mode
, AR2_REGNO
);
4323 ar3_reg_operand (op
, mode
)
4325 enum machine_mode mode
;
4327 return c4x_arn_reg_operand (op
, mode
, AR3_REGNO
);
4332 ar3_mem_operand (op
, mode
)
4334 enum machine_mode mode
;
4336 return c4x_arn_mem_operand (op
, mode
, AR3_REGNO
);
4341 ar4_reg_operand (op
, mode
)
4343 enum machine_mode mode
;
4345 return c4x_arn_reg_operand (op
, mode
, AR4_REGNO
);
4350 ar4_mem_operand (op
, mode
)
4352 enum machine_mode mode
;
4354 return c4x_arn_mem_operand (op
, mode
, AR4_REGNO
);
4359 ar5_reg_operand (op
, mode
)
4361 enum machine_mode mode
;
4363 return c4x_arn_reg_operand (op
, mode
, AR5_REGNO
);
4368 ar5_mem_operand (op
, mode
)
4370 enum machine_mode mode
;
4372 return c4x_arn_mem_operand (op
, mode
, AR5_REGNO
);
4377 ar6_reg_operand (op
, mode
)
4379 enum machine_mode mode
;
4381 return c4x_arn_reg_operand (op
, mode
, AR6_REGNO
);
4386 ar6_mem_operand (op
, mode
)
4388 enum machine_mode mode
;
4390 return c4x_arn_mem_operand (op
, mode
, AR6_REGNO
);
4395 ar7_reg_operand (op
, mode
)
4397 enum machine_mode mode
;
4399 return c4x_arn_reg_operand (op
, mode
, AR7_REGNO
);
4404 ar7_mem_operand (op
, mode
)
4406 enum machine_mode mode
;
4408 return c4x_arn_mem_operand (op
, mode
, AR7_REGNO
);
4413 ir0_reg_operand (op
, mode
)
4415 enum machine_mode mode
;
4417 return c4x_arn_reg_operand (op
, mode
, IR0_REGNO
);
4422 ir0_mem_operand (op
, mode
)
4424 enum machine_mode mode
;
4426 return c4x_arn_mem_operand (op
, mode
, IR0_REGNO
);
4431 ir1_reg_operand (op
, mode
)
4433 enum machine_mode mode
;
4435 return c4x_arn_reg_operand (op
, mode
, IR1_REGNO
);
4440 ir1_mem_operand (op
, mode
)
4442 enum machine_mode mode
;
4444 return c4x_arn_mem_operand (op
, mode
, IR1_REGNO
);
4448 /* This is similar to operand_subword but allows autoincrement
4452 c4x_operand_subword (op
, i
, validate_address
, mode
)
4455 int validate_address
;
4456 enum machine_mode mode
;
4458 if (mode
!= HImode
&& mode
!= HFmode
)
4459 fatal_insn ("c4x_operand_subword: invalid mode", op
);
4461 if (mode
== HFmode
&& REG_P (op
))
4462 fatal_insn ("c4x_operand_subword: invalid operand", op
);
4464 if (GET_CODE (op
) == MEM
)
4466 enum rtx_code code
= GET_CODE (XEXP (op
, 0));
4467 enum machine_mode mode
= GET_MODE (XEXP (op
, 0));
4468 enum machine_mode submode
;
4473 else if (mode
== HFmode
)
4480 return gen_rtx_MEM (submode
, XEXP (op
, 0));
4486 /* We could handle these with some difficulty.
4487 e.g., *p-- => *(p-=2); *(p+1). */
4488 fatal_insn ("c4x_operand_subword: invalid autoincrement", op
);
4494 fatal_insn ("c4x_operand_subword: invalid address", op
);
4496 /* Even though offsettable_address_p considers (MEM
4497 (LO_SUM)) to be offsettable, it is not safe if the
4498 address is at the end of the data page since we also have
4499 to fix up the associated high PART. In this case where
4500 we are trying to split a HImode or HFmode memory
4501 reference, we would have to emit another insn to reload a
4502 new HIGH value. It's easier to disable LO_SUM memory references
4503 in HImode or HFmode and we probably get better code. */
4505 fatal_insn ("c4x_operand_subword: address not offsettable", op
);
4512 return operand_subword (op
, i
, validate_address
, mode
);
4517 struct name_list
*next
;
4521 static struct name_list
*global_head
;
4522 static struct name_list
*extern_head
;
4525 /* Add NAME to list of global symbols and remove from external list if
4526 present on external list. */
4529 c4x_global_label (name
)
4532 struct name_list
*p
, *last
;
4534 /* Do not insert duplicate names, so linearly search through list of
4539 if (strcmp (p
->name
, name
) == 0)
4543 p
= (struct name_list
*) permalloc (sizeof *p
);
4544 p
->next
= global_head
;
4548 /* Remove this name from ref list if present. */
4553 if (strcmp (p
->name
, name
) == 0)
4556 last
->next
= p
->next
;
4558 extern_head
= p
->next
;
4567 /* Add NAME to list of external symbols. */
4570 c4x_external_ref (name
)
4573 struct name_list
*p
;
4575 /* Do not insert duplicate names. */
4579 if (strcmp (p
->name
, name
) == 0)
4584 /* Do not insert ref if global found. */
4588 if (strcmp (p
->name
, name
) == 0)
4592 p
= (struct name_list
*) permalloc (sizeof *p
);
4593 p
->next
= extern_head
;
4603 struct name_list
*p
;
4605 /* Output all external names that are not global. */
4609 fprintf (fp
, "\t.ref\t");
4610 assemble_name (fp
, p
->name
);
4614 fprintf (fp
, "\t.end\n");
4619 c4x_check_attribute (attrib
, list
, decl
, attributes
)
4621 tree list
, decl
, *attributes
;
4623 while (list
!= NULL_TREE
4624 && IDENTIFIER_POINTER (TREE_PURPOSE (list
))
4625 != IDENTIFIER_POINTER (DECL_NAME (decl
)))
4626 list
= TREE_CHAIN (list
);
4628 *attributes
= tree_cons (get_identifier (attrib
), TREE_VALUE (list
),
4634 c4x_insert_attributes (decl
, attributes
)
4635 tree decl
, *attributes
;
4637 switch (TREE_CODE (decl
))
4640 c4x_check_attribute ("section", code_tree
, decl
, attributes
);
4641 c4x_check_attribute ("const", pure_tree
, decl
, attributes
);
4642 c4x_check_attribute ("noreturn", noreturn_tree
, decl
, attributes
);
4643 c4x_check_attribute ("interrupt", interrupt_tree
, decl
, attributes
);
4647 c4x_check_attribute ("section", data_tree
, decl
, attributes
);
4655 /* Table of valid machine attributes. */
4656 const struct attribute_spec c4x_attribute_table
[] =
4658 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4659 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4660 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4661 "interrupt"; should it be accepted here? */
4662 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4663 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute
},
4664 { NULL
, 0, 0, false, false, false, NULL
}
4667 /* Handle an attribute requiring a FUNCTION_TYPE;
4668 arguments as in struct attribute_spec.handler. */
4670 c4x_handle_fntype_attribute (node
, name
, args
, flags
, no_add_attrs
)
4673 tree args ATTRIBUTE_UNUSED
;
4674 int flags ATTRIBUTE_UNUSED
;
4677 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4679 warning ("`%s' attribute only applies to functions",
4680 IDENTIFIER_POINTER (name
));
4681 *no_add_attrs
= true;
4688 /* !!! FIXME to emit RPTS correctly. */
4691 c4x_rptb_rpts_p (insn
, op
)
4694 /* The next insn should be our label marking where the
4695 repeat block starts. */
4696 insn
= NEXT_INSN (insn
);
4697 if (GET_CODE (insn
) != CODE_LABEL
)
4699 /* Some insns may have been shifted between the RPTB insn
4700 and the top label... They were probably destined to
4701 be moved out of the loop. For now, let's leave them
4702 where they are and print a warning. We should
4703 probably move these insns before the repeat block insn. */
4705 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4710 /* Skip any notes. */
4711 insn
= next_nonnote_insn (insn
);
4713 /* This should be our first insn in the loop. */
4714 if (! INSN_P (insn
))
4717 /* Skip any notes. */
4718 insn
= next_nonnote_insn (insn
);
4720 if (! INSN_P (insn
))
4723 if (recog_memoized (insn
) != CODE_FOR_rptb_end
)
4729 return (GET_CODE (op
) == CONST_INT
) && TARGET_RPTS_CYCLES (INTVAL (op
));
4733 /* Check if register r11 is used as the destination of an insn. */
4746 if (INSN_P (x
) && GET_CODE (PATTERN (x
)) == SEQUENCE
)
4747 x
= XVECEXP (PATTERN (x
), 0, XVECLEN (PATTERN (x
), 0) - 1);
4749 if (INSN_P (x
) && (set
= single_set (x
)))
4752 if (GET_CODE (x
) == REG
&& REGNO (x
) == R11_REGNO
)
4755 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4756 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4760 if (c4x_r11_set_p (XEXP (x
, i
)))
4763 else if (fmt
[i
] == 'E')
4764 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4765 if (c4x_r11_set_p (XVECEXP (x
, i
, j
)))
4772 /* The c4x sometimes has a problem when the insn before the laj insn
4773 sets the r11 register. Check for this situation. */
4776 c4x_check_laj_p (insn
)
4779 insn
= prev_nonnote_insn (insn
);
4781 /* If this is the start of the function no nop is needed. */
4785 /* If the previous insn is a code label we have to insert a nop. This
4786 could be a jump or table jump. We can find the normal jumps by
4787 scanning the function but this will not find table jumps. */
4788 if (GET_CODE (insn
) == CODE_LABEL
)
4791 /* If the previous insn sets register r11 we have to insert a nop. */
4792 if (c4x_r11_set_p (insn
))
4795 /* No nop needed. */
4800 /* Adjust the cost of a scheduling dependency. Return the new cost of
4801 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4802 A set of an address register followed by a use occurs a 2 cycle
4803 stall (reduced to a single cycle on the c40 using LDA), while
4804 a read of an address register followed by a use occurs a single cycle. */
4806 #define SET_USE_COST 3
4807 #define SETLDA_USE_COST 2
4808 #define READ_USE_COST 2
4811 c4x_adjust_cost (insn
, link
, dep_insn
, cost
)
4817 /* Don't worry about this until we know what registers have been
4819 if (flag_schedule_insns
== 0 && ! reload_completed
)
4822 /* How do we handle dependencies where a read followed by another
4823 read causes a pipeline stall? For example, a read of ar0 followed
4824 by the use of ar0 for a memory reference. It looks like we
4825 need to extend the scheduler to handle this case. */
4827 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4828 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4829 so only deal with insns we know about. */
4830 if (recog_memoized (dep_insn
) < 0)
4833 if (REG_NOTE_KIND (link
) == 0)
4837 /* Data dependency; DEP_INSN writes a register that INSN reads some
4841 if (get_attr_setgroup1 (dep_insn
) && get_attr_usegroup1 (insn
))
4842 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4843 if (get_attr_readarx (dep_insn
) && get_attr_usegroup1 (insn
))
4844 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4848 /* This could be significantly optimized. We should look
4849 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4850 insn uses ar0-ar7. We then test if the same register
4851 is used. The tricky bit is that some operands will
4852 use several registers... */
4853 if (get_attr_setar0 (dep_insn
) && get_attr_usear0 (insn
))
4854 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4855 if (get_attr_setlda_ar0 (dep_insn
) && get_attr_usear0 (insn
))
4856 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4857 if (get_attr_readar0 (dep_insn
) && get_attr_usear0 (insn
))
4858 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4860 if (get_attr_setar1 (dep_insn
) && get_attr_usear1 (insn
))
4861 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4862 if (get_attr_setlda_ar1 (dep_insn
) && get_attr_usear1 (insn
))
4863 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4864 if (get_attr_readar1 (dep_insn
) && get_attr_usear1 (insn
))
4865 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4867 if (get_attr_setar2 (dep_insn
) && get_attr_usear2 (insn
))
4868 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4869 if (get_attr_setlda_ar2 (dep_insn
) && get_attr_usear2 (insn
))
4870 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4871 if (get_attr_readar2 (dep_insn
) && get_attr_usear2 (insn
))
4872 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4874 if (get_attr_setar3 (dep_insn
) && get_attr_usear3 (insn
))
4875 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4876 if (get_attr_setlda_ar3 (dep_insn
) && get_attr_usear3 (insn
))
4877 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4878 if (get_attr_readar3 (dep_insn
) && get_attr_usear3 (insn
))
4879 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4881 if (get_attr_setar4 (dep_insn
) && get_attr_usear4 (insn
))
4882 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4883 if (get_attr_setlda_ar4 (dep_insn
) && get_attr_usear4 (insn
))
4884 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4885 if (get_attr_readar4 (dep_insn
) && get_attr_usear4 (insn
))
4886 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4888 if (get_attr_setar5 (dep_insn
) && get_attr_usear5 (insn
))
4889 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4890 if (get_attr_setlda_ar5 (dep_insn
) && get_attr_usear5 (insn
))
4891 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4892 if (get_attr_readar5 (dep_insn
) && get_attr_usear5 (insn
))
4893 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4895 if (get_attr_setar6 (dep_insn
) && get_attr_usear6 (insn
))
4896 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4897 if (get_attr_setlda_ar6 (dep_insn
) && get_attr_usear6 (insn
))
4898 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4899 if (get_attr_readar6 (dep_insn
) && get_attr_usear6 (insn
))
4900 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4902 if (get_attr_setar7 (dep_insn
) && get_attr_usear7 (insn
))
4903 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4904 if (get_attr_setlda_ar7 (dep_insn
) && get_attr_usear7 (insn
))
4905 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4906 if (get_attr_readar7 (dep_insn
) && get_attr_usear7 (insn
))
4907 max
= READ_USE_COST
> max
? READ_USE_COST
: max
;
4909 if (get_attr_setir0 (dep_insn
) && get_attr_useir0 (insn
))
4910 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4911 if (get_attr_setlda_ir0 (dep_insn
) && get_attr_useir0 (insn
))
4912 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4914 if (get_attr_setir1 (dep_insn
) && get_attr_useir1 (insn
))
4915 max
= SET_USE_COST
> max
? SET_USE_COST
: max
;
4916 if (get_attr_setlda_ir1 (dep_insn
) && get_attr_useir1 (insn
))
4917 max
= SETLDA_USE_COST
> max
? SETLDA_USE_COST
: max
;
4923 /* For other data dependencies, the default cost specified in the
4927 else if (REG_NOTE_KIND (link
) == REG_DEP_ANTI
)
4929 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4932 /* For c4x anti dependencies, the cost is 0. */
4935 else if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
)
4937 /* Output dependency; DEP_INSN writes a register that INSN writes some
4940 /* For c4x output dependencies, the cost is 0. */
4948 c4x_init_builtins ()
4950 tree endlink
= void_list_node
;
4952 builtin_function ("fast_ftoi",
4955 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4956 C4X_BUILTIN_FIX
, BUILT_IN_MD
, NULL
);
4957 builtin_function ("ansi_ftoi",
4960 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4961 C4X_BUILTIN_FIX_ANSI
, BUILT_IN_MD
, NULL
);
4963 builtin_function ("fast_imult",
4966 tree_cons (NULL_TREE
, integer_type_node
,
4967 tree_cons (NULL_TREE
,
4968 integer_type_node
, endlink
))),
4969 C4X_BUILTIN_MPYI
, BUILT_IN_MD
, NULL
);
4972 builtin_function ("toieee",
4975 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4976 C4X_BUILTIN_TOIEEE
, BUILT_IN_MD
, NULL
);
4977 builtin_function ("frieee",
4980 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4981 C4X_BUILTIN_FRIEEE
, BUILT_IN_MD
, NULL
);
4982 builtin_function ("fast_invf",
4985 tree_cons (NULL_TREE
, double_type_node
, endlink
)),
4986 C4X_BUILTIN_RCPF
, BUILT_IN_MD
, NULL
);
4992 c4x_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
4995 rtx subtarget ATTRIBUTE_UNUSED
;
4996 enum machine_mode mode ATTRIBUTE_UNUSED
;
4997 int ignore ATTRIBUTE_UNUSED
;
4999 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5000 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5001 tree arglist
= TREE_OPERAND (exp
, 1);
5007 case C4X_BUILTIN_FIX
:
5008 arg0
= TREE_VALUE (arglist
);
5009 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5010 r0
= protect_from_queue (r0
, 0);
5011 if (! target
|| ! register_operand (target
, QImode
))
5012 target
= gen_reg_rtx (QImode
);
5013 emit_insn (gen_fixqfqi_clobber (target
, r0
));
5016 case C4X_BUILTIN_FIX_ANSI
:
5017 arg0
= TREE_VALUE (arglist
);
5018 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5019 r0
= protect_from_queue (r0
, 0);
5020 if (! target
|| ! register_operand (target
, QImode
))
5021 target
= gen_reg_rtx (QImode
);
5022 emit_insn (gen_fix_truncqfqi2 (target
, r0
));
5025 case C4X_BUILTIN_MPYI
:
5028 arg0
= TREE_VALUE (arglist
);
5029 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5030 r0
= expand_expr (arg0
, NULL_RTX
, QImode
, 0);
5031 r1
= expand_expr (arg1
, NULL_RTX
, QImode
, 0);
5032 r0
= protect_from_queue (r0
, 0);
5033 r1
= protect_from_queue (r1
, 0);
5034 if (! target
|| ! register_operand (target
, QImode
))
5035 target
= gen_reg_rtx (QImode
);
5036 emit_insn (gen_mulqi3_24_clobber (target
, r0
, r1
));
5039 case C4X_BUILTIN_TOIEEE
:
5042 arg0
= TREE_VALUE (arglist
);
5043 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5044 r0
= protect_from_queue (r0
, 0);
5045 if (! target
|| ! register_operand (target
, QFmode
))
5046 target
= gen_reg_rtx (QFmode
);
5047 emit_insn (gen_toieee (target
, r0
));
5050 case C4X_BUILTIN_FRIEEE
:
5053 arg0
= TREE_VALUE (arglist
);
5054 if (TREE_CODE (arg0
) == VAR_DECL
|| TREE_CODE (arg0
) == PARM_DECL
)
5055 put_var_into_stack (arg0
);
5056 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5057 r0
= protect_from_queue (r0
, 0);
5058 if (register_operand (r0
, QFmode
))
5060 r1
= assign_stack_local (QFmode
, GET_MODE_SIZE (QFmode
), 0);
5061 emit_move_insn (r1
, r0
);
5064 if (! target
|| ! register_operand (target
, QFmode
))
5065 target
= gen_reg_rtx (QFmode
);
5066 emit_insn (gen_frieee (target
, r0
));
5069 case C4X_BUILTIN_RCPF
:
5072 arg0
= TREE_VALUE (arglist
);
5073 r0
= expand_expr (arg0
, NULL_RTX
, QFmode
, 0);
5074 r0
= protect_from_queue (r0
, 0);
5075 if (! target
|| ! register_operand (target
, QFmode
))
5076 target
= gen_reg_rtx (QFmode
);
5077 emit_insn (gen_rcpfqf_clobber (target
, r0
));
5084 c4x_asm_named_section (name
, flags
)
5086 unsigned int flags ATTRIBUTE_UNUSED
;
5088 fprintf (asm_out_file
, "\t.sect\t\"%s\"\n", name
);