]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/c4x/c4x.c
Merge basic-improvements-branch to trunk
[thirdparty/gcc.git] / gcc / config / c4x / c4x.c
CommitLineData
cb0ca284 1/* Subroutines for assembler code output on the TMS320C[34]x
400500c4
RK
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
cb0ca284
MH
4
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
7
400500c4 8This file is part of GNU CC.
cb0ca284 9
400500c4
RK
10GNU CC is free software; you can redistribute it and/or modify
11it under the terms of the GNU General Public License as published by
12the Free Software Foundation; either version 2, or (at your option)
13any later version.
cb0ca284 14
400500c4
RK
15GNU CC is distributed in the hope that it will be useful,
16but WITHOUT ANY WARRANTY; without even the implied warranty of
17MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18GNU General Public License for more details.
cb0ca284 19
400500c4
RK
20You should have received a copy of the GNU General Public License
21along with GNU CC; see the file COPYING. If not, write to
22the Free Software Foundation, 59 Temple Place - Suite 330,
23Boston, MA 02111-1307, USA. */
cb0ca284
MH
24
25/* Some output-actions in c4x.md need these. */
cb0ca284 26#include "config.h"
5e6a42d9 27#include "system.h"
4977bab6
ZW
28#include "coretypes.h"
29#include "tm.h"
cb0ca284 30#include "rtl.h"
2cc07db4 31#include "tree.h"
cb0ca284
MH
32#include "regs.h"
33#include "hard-reg-set.h"
34#include "basic-block.h"
35#include "real.h"
36#include "insn-config.h"
37#include "insn-attr.h"
cb0ca284 38#include "conditions.h"
cb0ca284 39#include "output.h"
49ad7cfa 40#include "function.h"
cb0ca284 41#include "expr.h"
e78d8e51
ZW
42#include "optabs.h"
43#include "libfuncs.h"
cb0ca284
MH
44#include "flags.h"
45#include "loop.h"
46#include "recog.h"
47#include "c-tree.h"
7eb3fb5f 48#include "ggc.h"
8b97c5f8 49#include "cpplib.h"
2cc07db4 50#include "toplev.h"
245ff137 51#include "tm_p.h"
672a6f42
NB
52#include "target.h"
53#include "target-def.h"
cb0ca284 54
4fda2521
HB
55rtx smulhi3_libfunc;
56rtx umulhi3_libfunc;
57rtx fix_truncqfhi2_libfunc;
58rtx fixuns_truncqfhi2_libfunc;
59rtx fix_trunchfhi2_libfunc;
60rtx fixuns_trunchfhi2_libfunc;
61rtx floathiqf2_libfunc;
62rtx floatunshiqf2_libfunc;
63rtx floathihf2_libfunc;
64rtx floatunshihf2_libfunc;
65
cb0ca284
MH
66static int c4x_leaf_function;
67
27c38fbe 68static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
cb0ca284
MH
69
70/* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
74
75enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
76{
975ab131
MH
77 /* Reg Modes Saved. */
78 R0R1_REGS, /* R0 QI, QF, HF No. */
79 R0R1_REGS, /* R1 QI, QF, HF No. */
80 R2R3_REGS, /* R2 QI, QF, HF No. */
81 R2R3_REGS, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86 ADDR_REGS, /* AR0 QI No. */
87 ADDR_REGS, /* AR1 QI No. */
88 ADDR_REGS, /* AR2 QI No. */
89 ADDR_REGS, /* AR3 QI QI. */
90 ADDR_REGS, /* AR4 QI QI. */
91 ADDR_REGS, /* AR5 QI QI. */
92 ADDR_REGS, /* AR6 QI QI. */
93 ADDR_REGS, /* AR7 QI QI. */
94 DP_REG, /* DP QI No. */
95 INDEX_REGS, /* IR0 QI No. */
96 INDEX_REGS, /* IR1 QI No. */
97 BK_REG, /* BK QI QI. */
98 SP_REG, /* SP QI No. */
99 ST_REG, /* ST CC No. */
100 NO_REGS, /* DIE/IE No. */
101 NO_REGS, /* IIE/IF No. */
102 NO_REGS, /* IIF/IOF No. */
103 INT_REGS, /* RS QI No. */
104 INT_REGS, /* RE QI No. */
105 RC_REG, /* RC QI No. */
106 EXT_REGS, /* R8 QI, QF, HF QI. */
107 EXT_REGS, /* R9 QI, QF, HF No. */
108 EXT_REGS, /* R10 QI, QF, HF No. */
109 EXT_REGS, /* R11 QI, QF, HF No. */
cb0ca284
MH
110};
111
112enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
113{
975ab131
MH
114 /* Reg Modes Saved. */
115 HFmode, /* R0 QI, QF, HF No. */
116 HFmode, /* R1 QI, QF, HF No. */
117 HFmode, /* R2 QI, QF, HF No. */
118 HFmode, /* R3 QI, QF, HF No. */
119 QFmode, /* R4 QI, QF, HF QI. */
120 QFmode, /* R5 QI, QF, HF QI. */
121 QImode, /* R6 QI, QF, HF QF. */
122 QImode, /* R7 QI, QF, HF QF. */
123 QImode, /* AR0 QI No. */
124 QImode, /* AR1 QI No. */
125 QImode, /* AR2 QI No. */
126 QImode, /* AR3 QI QI. */
127 QImode, /* AR4 QI QI. */
128 QImode, /* AR5 QI QI. */
129 QImode, /* AR6 QI QI. */
130 QImode, /* AR7 QI QI. */
131 VOIDmode, /* DP QI No. */
132 QImode, /* IR0 QI No. */
133 QImode, /* IR1 QI No. */
134 QImode, /* BK QI QI. */
135 VOIDmode, /* SP QI No. */
136 VOIDmode, /* ST CC No. */
137 VOIDmode, /* DIE/IE No. */
138 VOIDmode, /* IIE/IF No. */
139 VOIDmode, /* IIF/IOF No. */
140 QImode, /* RS QI No. */
141 QImode, /* RE QI No. */
142 VOIDmode, /* RC QI No. */
143 QFmode, /* R8 QI, QF, HF QI. */
144 HFmode, /* R9 QI, QF, HF No. */
145 HFmode, /* R10 QI, QF, HF No. */
146 HFmode, /* R11 QI, QF, HF No. */
cb0ca284
MH
147};
148
149
cb0ca284
MH
150/* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
152
e2500fed
GK
153rtx c4x_compare_op0;
154rtx c4x_compare_op1;
cb0ca284 155
ddf16f18 156const char *c4x_rpts_cycles_string;
7eb3fb5f 157int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
ddf16f18 158const char *c4x_cpu_version_string;
eda45b64 159int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
cb0ca284
MH
160
161/* Pragma definitions. */
162
51fabca5
NB
163tree code_tree = NULL_TREE;
164tree data_tree = NULL_TREE;
165tree pure_tree = NULL_TREE;
166tree noreturn_tree = NULL_TREE;
167tree interrupt_tree = NULL_TREE;
7eb3fb5f 168
8d485e2d 169/* Forward declarations */
8d485e2d
MH
170static int c4x_isr_reg_used_p PARAMS ((unsigned int));
171static int c4x_leaf_function_p PARAMS ((void));
172static int c4x_assembler_function_p PARAMS ((void));
173static int c4x_immed_float_p PARAMS ((rtx));
174static int c4x_a_register PARAMS ((rtx));
175static int c4x_x_register PARAMS ((rtx));
176static int c4x_immed_int_constant PARAMS ((rtx));
177static int c4x_immed_float_constant PARAMS ((rtx));
178static int c4x_K_constant PARAMS ((rtx));
179static int c4x_N_constant PARAMS ((rtx));
180static int c4x_O_constant PARAMS ((rtx));
181static int c4x_R_indirect PARAMS ((rtx));
182static int c4x_S_indirect PARAMS ((rtx));
183static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
184static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
185 enum machine_mode, int));
186static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
187static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
188static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
5078f5eb 189static int c4x_r11_set_p PARAMS ((rtx));
0bbcfbaf
HB
190static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
191static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
91d231cb
JM
192static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
193const struct attribute_spec c4x_attribute_table[];
12a68f1f 194static void c4x_insert_attributes PARAMS ((tree, tree *));
715bdd29 195static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
c237e94a 196static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
fb49053f 197static void c4x_encode_section_info PARAMS ((tree, int));
5eb99654 198static void c4x_globalize_label PARAMS ((FILE *, const char *));
672a6f42
NB
199\f
200/* Initialize the GCC target structure. */
301d03af
RS
201#undef TARGET_ASM_BYTE_OP
202#define TARGET_ASM_BYTE_OP "\t.word\t"
203#undef TARGET_ASM_ALIGNED_HI_OP
204#define TARGET_ASM_ALIGNED_HI_OP NULL
205#undef TARGET_ASM_ALIGNED_SI_OP
206#define TARGET_ASM_ALIGNED_SI_OP NULL
207
91d231cb
JM
208#undef TARGET_ATTRIBUTE_TABLE
209#define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
672a6f42 210
12a68f1f
JM
211#undef TARGET_INSERT_ATTRIBUTES
212#define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
213
f6155fda
SS
214#undef TARGET_INIT_BUILTINS
215#define TARGET_INIT_BUILTINS c4x_init_builtins
216
217#undef TARGET_EXPAND_BUILTIN
218#define TARGET_EXPAND_BUILTIN c4x_expand_builtin
219
c237e94a
ZW
220#undef TARGET_SCHED_ADJUST_COST
221#define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
222
fb49053f
RH
223#undef TARGET_ENCODE_SECTION_INFO
224#define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
225
5eb99654
KG
226#undef TARGET_ASM_GLOBALIZE_LABEL
227#define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
228
f6897b10 229struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 230\f
cb0ca284
MH
231/* Override command line options.
232 Called once after all options have been parsed.
233 Mostly we process the processor
234 type and sometimes adjust other TARGET_ options. */
235
236void
237c4x_override_options ()
238{
cb0ca284
MH
239 if (c4x_rpts_cycles_string)
240 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
241 else
242 c4x_rpts_cycles = 0;
243
244 if (TARGET_C30)
245 c4x_cpu_version = 30;
246 else if (TARGET_C31)
247 c4x_cpu_version = 31;
248 else if (TARGET_C32)
249 c4x_cpu_version = 32;
eda45b64
MH
250 else if (TARGET_C33)
251 c4x_cpu_version = 33;
cb0ca284
MH
252 else if (TARGET_C40)
253 c4x_cpu_version = 40;
254 else if (TARGET_C44)
255 c4x_cpu_version = 44;
256 else
257 c4x_cpu_version = 40;
258
259 /* -mcpu=xx overrides -m40 etc. */
260 if (c4x_cpu_version_string)
798f6e6f
MH
261 {
262 const char *p = c4x_cpu_version_string;
263
264 /* Also allow -mcpu=c30 etc. */
265 if (*p == 'c' || *p == 'C')
266 p++;
267 c4x_cpu_version = atoi (p);
268 }
cb0ca284 269
eda45b64
MH
270 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
271 C40_FLAG | C44_FLAG);
cb0ca284
MH
272
273 switch (c4x_cpu_version)
274 {
275 case 30: target_flags |= C30_FLAG; break;
276 case 31: target_flags |= C31_FLAG; break;
277 case 32: target_flags |= C32_FLAG; break;
eda45b64 278 case 33: target_flags |= C33_FLAG; break;
cb0ca284
MH
279 case 40: target_flags |= C40_FLAG; break;
280 case 44: target_flags |= C44_FLAG; break;
281 default:
c725bd79 282 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
cb0ca284
MH
283 c4x_cpu_version = 40;
284 target_flags |= C40_FLAG;
285 }
286
eda45b64 287 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
cb0ca284
MH
288 target_flags |= C3X_FLAG;
289 else
290 target_flags &= ~C3X_FLAG;
291
4271f003 292 /* Convert foo / 8.0 into foo * 0.125, etc. */
748d29c1 293 set_fast_math_flags (1);
4271f003 294
4271f003
MH
295 /* We should phase out the following at some stage.
296 This provides compatibility with the old -mno-aliases option. */
4ddb3ea6 297 if (! TARGET_ALIASES && ! flag_argument_noalias)
4271f003 298 flag_argument_noalias = 1;
3dc85dfb
RH
299
300 /* We're C4X floating point, not IEEE floating point. */
301 memset (real_format_for_mode, 0, sizeof real_format_for_mode);
302 real_format_for_mode[QFmode - QFmode] = &c4x_single_format;
303 real_format_for_mode[HFmode - QFmode] = &c4x_extended_format;
cb0ca284
MH
304}
305
7eb3fb5f 306
4271f003 307/* This is called before c4x_override_options. */
975ab131 308
d5e4ff48
MH
309void
310c4x_optimization_options (level, size)
8b97c5f8 311 int level ATTRIBUTE_UNUSED;
d5e4ff48
MH
312 int size ATTRIBUTE_UNUSED;
313{
5e6a42d9
MH
314 /* Scheduling before register allocation can screw up global
315 register allocation, especially for functions that use MPY||ADD
316 instructions. The benefit we gain we get by scheduling before
317 register allocation is probably marginal anyhow. */
318 flag_schedule_insns = 0;
d5e4ff48 319}
cb0ca284 320
975ab131 321
cb0ca284
MH
322/* Write an ASCII string. */
323
324#define C4X_ASCII_LIMIT 40
325
326void
327c4x_output_ascii (stream, ptr, len)
328 FILE *stream;
798f6e6f 329 const char *ptr;
cb0ca284
MH
330 int len;
331{
332 char sbuf[C4X_ASCII_LIMIT + 1];
94eebed9 333 int s, l, special, first = 1, onlys;
cb0ca284
MH
334
335 if (len)
cb0ca284 336 fprintf (stream, "\t.byte\t");
cb0ca284 337
dfafcb4d 338 for (s = l = 0; len > 0; --len, ++ptr)
cb0ca284
MH
339 {
340 onlys = 0;
341
342 /* Escape " and \ with a \". */
dfafcb4d 343 special = *ptr == '\"' || *ptr == '\\';
cb0ca284
MH
344
345 /* If printable - add to buff. */
dfafcb4d 346 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
cb0ca284 347 {
dfafcb4d
HB
348 if (special)
349 sbuf[s++] = '\\';
cb0ca284
MH
350 sbuf[s++] = *ptr;
351 if (s < C4X_ASCII_LIMIT - 1)
352 continue;
353 onlys = 1;
354 }
355 if (s)
356 {
357 if (first)
358 first = 0;
359 else
dfafcb4d
HB
360 {
361 fputc (',', stream);
362 l++;
363 }
cb0ca284
MH
364
365 sbuf[s] = 0;
366 fprintf (stream, "\"%s\"", sbuf);
dfafcb4d
HB
367 l += s + 2;
368 if (TARGET_TI && l >= 80 && len > 1)
369 {
370 fprintf (stream, "\n\t.byte\t");
371 first = 1;
372 l = 0;
373 }
374
cb0ca284
MH
375 s = 0;
376 }
377 if (onlys)
378 continue;
379
380 if (first)
381 first = 0;
382 else
dfafcb4d
HB
383 {
384 fputc (',', stream);
385 l++;
386 }
cb0ca284
MH
387
388 fprintf (stream, "%d", *ptr);
dfafcb4d
HB
389 l += 3;
390 if (TARGET_TI && l >= 80 && len > 1)
391 {
392 fprintf (stream, "\n\t.byte\t");
393 first = 1;
394 l = 0;
395 }
cb0ca284
MH
396 }
397 if (s)
398 {
4ddb3ea6 399 if (! first)
cb0ca284
MH
400 fputc (',', stream);
401
402 sbuf[s] = 0;
403 fprintf (stream, "\"%s\"", sbuf);
404 s = 0;
405 }
406 fputc ('\n', stream);
407}
408
409
410int
411c4x_hard_regno_mode_ok (regno, mode)
8d485e2d 412 unsigned int regno;
cb0ca284
MH
413 enum machine_mode mode;
414{
415 switch (mode)
416 {
417#if Pmode != QImode
975ab131 418 case Pmode: /* Pointer (24/32 bits). */
cb0ca284 419#endif
975ab131 420 case QImode: /* Integer (32 bits). */
bc46716b 421 return IS_INT_REGNO (regno);
cb0ca284 422
975ab131
MH
423 case QFmode: /* Float, Double (32 bits). */
424 case HFmode: /* Long Double (40 bits). */
bc46716b 425 return IS_EXT_REGNO (regno);
cb0ca284 426
975ab131
MH
427 case CCmode: /* Condition Codes. */
428 case CC_NOOVmode: /* Condition Codes. */
bc46716b 429 return IS_ST_REGNO (regno);
cb0ca284 430
975ab131 431 case HImode: /* Long Long (64 bits). */
cb0ca284
MH
432 /* We need two registers to store long longs. Note that
433 it is much easier to constrain the first register
434 to start on an even boundary. */
bc46716b
MH
435 return IS_INT_REGNO (regno)
436 && IS_INT_REGNO (regno + 1)
cb0ca284
MH
437 && (regno & 1) == 0;
438
439 default:
975ab131 440 return 0; /* We don't support these modes. */
cb0ca284
MH
441 }
442
443 return 0;
444}
445
825dda42 446/* Return nonzero if REGNO1 can be renamed to REGNO2. */
40eef757
HB
447int
448c4x_hard_regno_rename_ok (regno1, regno2)
449 unsigned int regno1;
450 unsigned int regno2;
451{
452 /* We can not copy call saved registers from mode QI into QF or from
453 mode QF into QI. */
0b53f039 454 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
40eef757 455 return 0;
0b53f039 456 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
40eef757
HB
457 return 0;
458 /* We cannot copy from an extended (40 bit) register to a standard
459 (32 bit) register because we only set the condition codes for
460 extended registers. */
461 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
462 return 0;
463 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
464 return 0;
465 return 1;
466}
cb0ca284
MH
467
468/* The TI C3x C compiler register argument runtime model uses 6 registers,
469 AR2, R2, R3, RC, RS, RE.
470
471 The first two floating point arguments (float, double, long double)
472 that are found scanning from left to right are assigned to R2 and R3.
473
474 The remaining integer (char, short, int, long) or pointer arguments
475 are assigned to the remaining registers in the order AR2, R2, R3,
476 RC, RS, RE when scanning left to right, except for the last named
477 argument prior to an ellipsis denoting variable number of
478 arguments. We don't have to worry about the latter condition since
479 function.c treats the last named argument as anonymous (unnamed).
480
481 All arguments that cannot be passed in registers are pushed onto
482 the stack in reverse order (right to left). GCC handles that for us.
483
484 c4x_init_cumulative_args() is called at the start, so we can parse
485 the args to see how many floating point arguments and how many
486 integer (or pointer) arguments there are. c4x_function_arg() is
487 then called (sometimes repeatedly) for each argument (parsed left
488 to right) to obtain the register to pass the argument in, or zero
489 if the argument is to be passed on the stack. Once the compiler is
490 happy, c4x_function_arg_advance() is called.
491
492 Don't use R0 to pass arguments in, we use 0 to indicate a stack
493 argument. */
494
8b60264b 495static const int c4x_int_reglist[3][6] =
cb0ca284
MH
496{
497 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
498 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
499 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
500};
501
0139adca 502static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
cb0ca284
MH
503
504
505/* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
506 function whose data type is FNTYPE.
507 For a library call, FNTYPE is 0. */
508
509void
510c4x_init_cumulative_args (cum, fntype, libname)
975ab131
MH
511 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
512 tree fntype; /* Tree ptr for function decl. */
513 rtx libname; /* SYMBOL_REF of library name or 0. */
cb0ca284
MH
514{
515 tree param, next_param;
516
517 cum->floats = cum->ints = 0;
518 cum->init = 0;
519 cum->var = 0;
520 cum->args = 0;
521
522 if (TARGET_DEBUG)
523 {
524 fprintf (stderr, "\nc4x_init_cumulative_args (");
525 if (fntype)
526 {
527 tree ret_type = TREE_TYPE (fntype);
528
529 fprintf (stderr, "fntype code = %s, ret code = %s",
530 tree_code_name[(int) TREE_CODE (fntype)],
531 tree_code_name[(int) TREE_CODE (ret_type)]);
532 }
533 else
534 fprintf (stderr, "no fntype");
535
536 if (libname)
537 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
538 }
539
540 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
541
542 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
543 param; param = next_param)
544 {
545 tree type;
546
547 next_param = TREE_CHAIN (param);
548
549 type = TREE_VALUE (param);
550 if (type && type != void_type_node)
551 {
552 enum machine_mode mode;
553
554 /* If the last arg doesn't have void type then we have
555 variable arguments. */
4ddb3ea6 556 if (! next_param)
cb0ca284
MH
557 cum->var = 1;
558
559 if ((mode = TYPE_MODE (type)))
560 {
4ddb3ea6 561 if (! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
562 {
563 /* Look for float, double, or long double argument. */
564 if (mode == QFmode || mode == HFmode)
565 cum->floats++;
566 /* Look for integer, enumeral, boolean, char, or pointer
567 argument. */
568 else if (mode == QImode || mode == Pmode)
569 cum->ints++;
570 }
571 }
572 cum->args++;
573 }
574 }
575
576 if (TARGET_DEBUG)
577 fprintf (stderr, "%s%s, args = %d)\n",
578 cum->prototype ? ", prototype" : "",
579 cum->var ? ", variable args" : "",
580 cum->args);
581}
582
583
584/* Update the data in CUM to advance over an argument
585 of mode MODE and data type TYPE.
586 (TYPE is null for libcalls where that information may not be available.) */
587
588void
589c4x_function_arg_advance (cum, mode, type, named)
975ab131
MH
590 CUMULATIVE_ARGS *cum; /* Current arg information. */
591 enum machine_mode mode; /* Current arg mode. */
592 tree type; /* Type of the arg or 0 if lib support. */
593 int named; /* Whether or not the argument was named. */
cb0ca284
MH
594{
595 if (TARGET_DEBUG)
596 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
597 GET_MODE_NAME (mode), named);
4ddb3ea6 598 if (! TARGET_MEMPARM
cb0ca284
MH
599 && named
600 && type
4ddb3ea6 601 && ! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
602 {
603 /* Look for float, double, or long double argument. */
604 if (mode == QFmode || mode == HFmode)
605 cum->floats++;
606 /* Look for integer, enumeral, boolean, char, or pointer argument. */
607 else if (mode == QImode || mode == Pmode)
608 cum->ints++;
609 }
4ddb3ea6 610 else if (! TARGET_MEMPARM && ! type)
cb0ca284
MH
611 {
612 /* Handle libcall arguments. */
613 if (mode == QFmode || mode == HFmode)
614 cum->floats++;
615 else if (mode == QImode || mode == Pmode)
616 cum->ints++;
617 }
618 return;
619}
620
621
622/* Define where to put the arguments to a function. Value is zero to
623 push the argument on the stack, or a hard register in which to
624 store the argument.
625
626 MODE is the argument's machine mode.
627 TYPE is the data type of the argument (as a tree).
628 This is null for libcalls where that information may
629 not be available.
630 CUM is a variable of type CUMULATIVE_ARGS which gives info about
631 the preceding args and about the function being called.
632 NAMED is nonzero if this argument is a named parameter
633 (otherwise it is an extra parameter matching an ellipsis). */
634
635struct rtx_def *
636c4x_function_arg (cum, mode, type, named)
975ab131
MH
637 CUMULATIVE_ARGS *cum; /* Current arg information. */
638 enum machine_mode mode; /* Current arg mode. */
639 tree type; /* Type of the arg or 0 if lib support. */
640 int named; /* != 0 for normal args, == 0 for ... args. */
cb0ca284 641{
975ab131 642 int reg = 0; /* Default to passing argument on stack. */
cb0ca284 643
4ddb3ea6 644 if (! cum->init)
cb0ca284 645 {
975ab131 646 /* We can handle at most 2 floats in R2, R3. */
cb0ca284
MH
647 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
648
649 /* We can handle at most 6 integers minus number of floats passed
650 in registers. */
651 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
652 6 - cum->maxfloats : cum->ints;
653
1ac7a7f5 654 /* If there is no prototype, assume all the arguments are integers. */
4ddb3ea6 655 if (! cum->prototype)
cb0ca284
MH
656 cum->maxints = 6;
657
658 cum->ints = cum->floats = 0;
659 cum->init = 1;
660 }
661
49d1b871
MH
662 /* This marks the last argument. We don't need to pass this through
663 to the call insn. */
664 if (type == void_type_node)
665 return 0;
666
4ddb3ea6 667 if (! TARGET_MEMPARM
cb0ca284
MH
668 && named
669 && type
4ddb3ea6 670 && ! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
671 {
672 /* Look for float, double, or long double argument. */
673 if (mode == QFmode || mode == HFmode)
674 {
675 if (cum->floats < cum->maxfloats)
676 reg = c4x_fp_reglist[cum->floats];
677 }
678 /* Look for integer, enumeral, boolean, char, or pointer argument. */
679 else if (mode == QImode || mode == Pmode)
680 {
681 if (cum->ints < cum->maxints)
682 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
683 }
684 }
4ddb3ea6 685 else if (! TARGET_MEMPARM && ! type)
cb0ca284
MH
686 {
687 /* We could use a different argument calling model for libcalls,
688 since we're only calling functions in libgcc. Thus we could
689 pass arguments for long longs in registers rather than on the
690 stack. In the meantime, use the odd TI format. We make the
691 assumption that we won't have more than two floating point
692 args, six integer args, and that all the arguments are of the
693 same mode. */
694 if (mode == QFmode || mode == HFmode)
695 reg = c4x_fp_reglist[cum->floats];
696 else if (mode == QImode || mode == Pmode)
697 reg = c4x_int_reglist[0][cum->ints];
698 }
699
700 if (TARGET_DEBUG)
701 {
702 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
703 GET_MODE_NAME (mode), named);
704 if (reg)
705 fprintf (stderr, ", reg=%s", reg_names[reg]);
706 else
707 fprintf (stderr, ", stack");
708 fprintf (stderr, ")\n");
709 }
710 if (reg)
d5e4ff48 711 return gen_rtx_REG (mode, reg);
cb0ca284
MH
712 else
713 return NULL_RTX;
714}
715
cc7fd398 716/* C[34]x arguments grow in weird ways (downwards) that the standard
975ab131 717 varargs stuff can't handle.. */
cc7fd398
RH
718rtx
719c4x_va_arg (valist, type)
720 tree valist, type;
721{
722 tree t;
723
724 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
725 build_int_2 (int_size_in_bytes (type), 0));
726 TREE_SIDE_EFFECTS (t) = 1;
727
728 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
729}
cb0ca284 730
f959ff1a 731
cb0ca284
MH
732static int
733c4x_isr_reg_used_p (regno)
8d485e2d 734 unsigned int regno;
cb0ca284
MH
735{
736 /* Don't save/restore FP or ST, we handle them separately. */
737 if (regno == FRAME_POINTER_REGNUM
bc46716b 738 || IS_ST_REGNO (regno))
cb0ca284
MH
739 return 0;
740
741 /* We could be a little smarter abut saving/restoring DP.
742 We'll only save if for the big memory model or if
743 we're paranoid. ;-) */
bc46716b 744 if (IS_DP_REGNO (regno))
4ddb3ea6 745 return ! TARGET_SMALL || TARGET_PARANOID;
cb0ca284
MH
746
747 /* Only save/restore regs in leaf function that are used. */
748 if (c4x_leaf_function)
749 return regs_ever_live[regno] && fixed_regs[regno] == 0;
750
751 /* Only save/restore regs that are used by the ISR and regs
752 that are likely to be used by functions the ISR calls
753 if they are not fixed. */
bc46716b 754 return IS_EXT_REGNO (regno)
cb0ca284
MH
755 || ((regs_ever_live[regno] || call_used_regs[regno])
756 && fixed_regs[regno] == 0);
757}
758
759
760static int
761c4x_leaf_function_p ()
762{
763 /* A leaf function makes no calls, so we only need
764 to save/restore the registers we actually use.
765 For the global variable leaf_function to be set, we need
766 to define LEAF_REGISTERS and all that it entails.
767 Let's check ourselves... */
768
769 if (lookup_attribute ("leaf_pretend",
770 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
771 return 1;
772
773 /* Use the leaf_pretend attribute at your own risk. This is a hack
774 to speed up ISRs that call a function infrequently where the
775 overhead of saving and restoring the additional registers is not
776 warranted. You must save and restore the additional registers
777 required by the called function. Caveat emptor. Here's enough
778 rope... */
779
780 if (leaf_function_p ())
781 return 1;
782
783 return 0;
784}
785
786
787static int
788c4x_assembler_function_p ()
789{
790 tree type;
791
792 type = TREE_TYPE (current_function_decl);
49d1b871
MH
793 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
794 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
cb0ca284
MH
795}
796
797
8d485e2d 798int
cb0ca284
MH
799c4x_interrupt_function_p ()
800{
801 if (lookup_attribute ("interrupt",
802 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
803 return 1;
804
975ab131 805 /* Look for TI style c_intnn. */
cb0ca284
MH
806 return current_function_name[0] == 'c'
807 && current_function_name[1] == '_'
808 && current_function_name[2] == 'i'
809 && current_function_name[3] == 'n'
810 && current_function_name[4] == 't'
92a438d1
KG
811 && ISDIGIT (current_function_name[5])
812 && ISDIGIT (current_function_name[6]);
cb0ca284
MH
813}
814
cb0ca284 815void
8d485e2d 816c4x_expand_prologue ()
cb0ca284 817{
8d485e2d
MH
818 unsigned int regno;
819 int size = get_frame_size ();
820 rtx insn;
cb0ca284 821
8d485e2d
MH
822 /* In functions where ar3 is not used but frame pointers are still
823 specified, frame pointers are not adjusted (if >= -O2) and this
824 is used so it won't needlessly push the frame pointer. */
cb0ca284
MH
825 int dont_push_ar3;
826
827 /* For __assembler__ function don't build a prologue. */
828 if (c4x_assembler_function_p ())
829 {
cb0ca284
MH
830 return;
831 }
8d485e2d 832
cb0ca284
MH
833 /* For __interrupt__ function build specific prologue. */
834 if (c4x_interrupt_function_p ())
835 {
836 c4x_leaf_function = c4x_leaf_function_p ();
8d485e2d
MH
837
838 insn = emit_insn (gen_push_st ());
839 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
840 if (size)
841 {
8d485e2d
MH
842 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
843 RTX_FRAME_RELATED_P (insn) = 1;
844 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
845 gen_rtx_REG (QImode, SP_REGNO)));
846 RTX_FRAME_RELATED_P (insn) = 1;
69f6e760
MH
847 /* We require that an ISR uses fewer than 32768 words of
848 local variables, otherwise we have to go to lots of
849 effort to save a register, load it with the desired size,
850 adjust the stack pointer, and then restore the modified
851 register. Frankly, I think it is a poor ISR that
852 requires more than 32767 words of local temporary
853 storage! */
cb0ca284 854 if (size > 32767)
c725bd79 855 error ("ISR %s requires %d words of local vars, max is 32767",
06dd70c6 856 current_function_name, size);
400500c4 857
8d485e2d
MH
858 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
859 gen_rtx_REG (QImode, SP_REGNO),
06dd70c6 860 GEN_INT (size)));
8d485e2d 861 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
862 }
863 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
864 {
865 if (c4x_isr_reg_used_p (regno))
866 {
8d485e2d
MH
867 if (regno == DP_REGNO)
868 {
869 insn = emit_insn (gen_push_dp ());
870 RTX_FRAME_RELATED_P (insn) = 1;
871 }
872 else
873 {
874 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
875 RTX_FRAME_RELATED_P (insn) = 1;
876 if (IS_EXT_REGNO (regno))
877 {
878 insn = emit_insn (gen_pushqf
879 (gen_rtx_REG (QFmode, regno)));
880 RTX_FRAME_RELATED_P (insn) = 1;
881 }
882 }
cb0ca284
MH
883 }
884 }
885 /* We need to clear the repeat mode flag if the ISR is
886 going to use a RPTB instruction or uses the RC, RS, or RE
887 registers. */
888 if (regs_ever_live[RC_REGNO]
889 || regs_ever_live[RS_REGNO]
890 || regs_ever_live[RE_REGNO])
8d485e2d
MH
891 {
892 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
893 RTX_FRAME_RELATED_P (insn) = 1;
894 }
cb0ca284
MH
895
896 /* Reload DP reg if we are paranoid about some turkey
897 violating small memory model rules. */
898 if (TARGET_SMALL && TARGET_PARANOID)
8d485e2d
MH
899 {
900 insn = emit_insn (gen_set_ldp_prologue
901 (gen_rtx_REG (QImode, DP_REGNO),
902 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
903 RTX_FRAME_RELATED_P (insn) = 1;
904 }
cb0ca284
MH
905 }
906 else
907 {
908 if (frame_pointer_needed)
909 {
910 if ((size != 0)
911 || (current_function_args_size != 0)
912 || (optimize < 2))
913 {
8d485e2d
MH
914 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
915 RTX_FRAME_RELATED_P (insn) = 1;
916 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
917 gen_rtx_REG (QImode, SP_REGNO)));
918 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
919 dont_push_ar3 = 1;
920 }
921 else
922 {
923 /* Since ar3 is not used, we don't need to push it. */
924 dont_push_ar3 = 1;
925 }
926 }
927 else
928 {
929 /* If we use ar3, we need to push it. */
930 dont_push_ar3 = 0;
931 if ((size != 0) || (current_function_args_size != 0))
932 {
933 /* If we are omitting the frame pointer, we still have
934 to make space for it so the offsets are correct
935 unless we don't use anything on the stack at all. */
936 size += 1;
937 }
938 }
8d485e2d 939
cb0ca284
MH
940 if (size > 32767)
941 {
942 /* Local vars are too big, it will take multiple operations
943 to increment SP. */
944 if (TARGET_C3X)
945 {
8d485e2d
MH
946 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
947 GEN_INT(size >> 16)));
948 RTX_FRAME_RELATED_P (insn) = 1;
949 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
950 gen_rtx_REG (QImode, R1_REGNO),
951 GEN_INT(-16)));
952 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
953 }
954 else
8d485e2d
MH
955 {
956 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
957 GEN_INT(size & ~0xffff)));
958 RTX_FRAME_RELATED_P (insn) = 1;
959 }
960 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
961 gen_rtx_REG (QImode, R1_REGNO),
962 GEN_INT(size & 0xffff)));
963 RTX_FRAME_RELATED_P (insn) = 1;
964 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
965 gen_rtx_REG (QImode, SP_REGNO),
966 gen_rtx_REG (QImode, R1_REGNO)));
967 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
968 }
969 else if (size != 0)
970 {
971 /* Local vars take up less than 32767 words, so we can directly
972 add the number. */
8d485e2d
MH
973 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
974 gen_rtx_REG (QImode, SP_REGNO),
975 GEN_INT (size)));
976 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 977 }
8d485e2d 978
cb0ca284
MH
979 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
980 {
4ddb3ea6 981 if (regs_ever_live[regno] && ! call_used_regs[regno])
cb0ca284 982 {
0b53f039 983 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
cb0ca284 984 {
cb0ca284 985 if (TARGET_PRESERVE_FLOAT)
8d485e2d
MH
986 {
987 insn = emit_insn (gen_pushqi
988 (gen_rtx_REG (QImode, regno)));
989 RTX_FRAME_RELATED_P (insn) = 1;
990 }
991 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
992 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 993 }
4ddb3ea6 994 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
cb0ca284 995 {
8d485e2d
MH
996 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
997 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
998 }
999 }
1000 }
1001 }
1002}
1003
1004
cb0ca284 1005void
8d485e2d 1006c4x_expand_epilogue()
cb0ca284
MH
1007{
1008 int regno;
8d485e2d 1009 int jump = 0;
cb0ca284
MH
1010 int dont_pop_ar3;
1011 rtx insn;
8d485e2d
MH
1012 int size = get_frame_size ();
1013
cb0ca284
MH
1014 /* For __assembler__ function build no epilogue. */
1015 if (c4x_assembler_function_p ())
1016 {
8d485e2d
MH
1017 insn = emit_jump_insn (gen_return_from_epilogue ());
1018 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1019 return;
1020 }
1021
cb0ca284
MH
1022 /* For __interrupt__ function build specific epilogue. */
1023 if (c4x_interrupt_function_p ())
1024 {
1025 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1026 {
4ddb3ea6 1027 if (! c4x_isr_reg_used_p (regno))
cb0ca284 1028 continue;
8d485e2d
MH
1029 if (regno == DP_REGNO)
1030 {
1031 insn = emit_insn (gen_pop_dp ());
1032 RTX_FRAME_RELATED_P (insn) = 1;
1033 }
1034 else
1035 {
1036 /* We have to use unspec because the compiler will delete insns
1037 that are not call-saved. */
1038 if (IS_EXT_REGNO (regno))
1039 {
1040 insn = emit_insn (gen_popqf_unspec
1041 (gen_rtx_REG (QFmode, regno)));
1042 RTX_FRAME_RELATED_P (insn) = 1;
1043 }
1044 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1045 RTX_FRAME_RELATED_P (insn) = 1;
1046 }
cb0ca284
MH
1047 }
1048 if (size)
1049 {
8d485e2d
MH
1050 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1051 gen_rtx_REG (QImode, SP_REGNO),
1052 GEN_INT(size)));
1053 RTX_FRAME_RELATED_P (insn) = 1;
1054 insn = emit_insn (gen_popqi
1055 (gen_rtx_REG (QImode, AR3_REGNO)));
1056 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 1057 }
8d485e2d
MH
1058 insn = emit_insn (gen_pop_st ());
1059 RTX_FRAME_RELATED_P (insn) = 1;
1060 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1061 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1062 }
1063 else
1064 {
1065 if (frame_pointer_needed)
1066 {
1067 if ((size != 0)
1068 || (current_function_args_size != 0)
1069 || (optimize < 2))
1070 {
8d485e2d
MH
1071 insn = emit_insn
1072 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1073 gen_rtx_MEM (QImode,
1074 gen_rtx_PLUS
1075 (QImode, gen_rtx_REG (QImode,
1076 AR3_REGNO),
1077 GEN_INT(-1)))));
1078 RTX_FRAME_RELATED_P (insn) = 1;
1079
cb0ca284
MH
1080 /* We already have the return value and the fp,
1081 so we need to add those to the stack. */
1082 size += 2;
8d485e2d 1083 jump = 1;
cb0ca284
MH
1084 dont_pop_ar3 = 1;
1085 }
1086 else
1087 {
1088 /* Since ar3 is not used for anything, we don't need to
1089 pop it. */
1090 dont_pop_ar3 = 1;
1091 }
1092 }
1093 else
1094 {
975ab131 1095 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
cb0ca284
MH
1096 if (size || current_function_args_size)
1097 {
1098 /* If we are ommitting the frame pointer, we still have
1099 to make space for it so the offsets are correct
1100 unless we don't use anything on the stack at all. */
1101 size += 1;
1102 }
1103 }
8d485e2d 1104
cb0ca284
MH
1105 /* Now restore the saved registers, putting in the delayed branch
1106 where required. */
1107 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1108 {
4ddb3ea6 1109 if (regs_ever_live[regno] && ! call_used_regs[regno])
cb0ca284
MH
1110 {
1111 if (regno == AR3_REGNO && dont_pop_ar3)
1112 continue;
8d485e2d 1113
0b53f039 1114 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
cb0ca284 1115 {
8d485e2d
MH
1116 insn = emit_insn (gen_popqf_unspec
1117 (gen_rtx_REG (QFmode, regno)));
1118 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1119 if (TARGET_PRESERVE_FLOAT)
1120 {
8d485e2d
MH
1121 insn = emit_insn (gen_popqi_unspec
1122 (gen_rtx_REG (QImode, regno)));
1123 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1124 }
1125 }
1126 else
8d485e2d
MH
1127 {
1128 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1129 RTX_FRAME_RELATED_P (insn) = 1;
1130 }
cb0ca284
MH
1131 }
1132 }
8d485e2d 1133
cb0ca284
MH
1134 if (frame_pointer_needed)
1135 {
1136 if ((size != 0)
1137 || (current_function_args_size != 0)
1138 || (optimize < 2))
1139 {
1140 /* Restore the old FP. */
8d485e2d
MH
1141 insn = emit_insn
1142 (gen_movqi
1143 (gen_rtx_REG (QImode, AR3_REGNO),
1144 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1145
1146 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1147 }
1148 }
8d485e2d 1149
cb0ca284
MH
1150 if (size > 32767)
1151 {
1152 /* Local vars are too big, it will take multiple operations
1153 to decrement SP. */
1154 if (TARGET_C3X)
1155 {
8d485e2d
MH
1156 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1157 GEN_INT(size >> 16)));
1158 RTX_FRAME_RELATED_P (insn) = 1;
1159 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1160 gen_rtx_REG (QImode, R3_REGNO),
1161 GEN_INT(-16)));
1162 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1163 }
1164 else
8d485e2d
MH
1165 {
1166 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1167 GEN_INT(size & ~0xffff)));
1168 RTX_FRAME_RELATED_P (insn) = 1;
1169 }
1170 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1171 gen_rtx_REG (QImode, R3_REGNO),
1172 GEN_INT(size & 0xffff)));
1173 RTX_FRAME_RELATED_P (insn) = 1;
1174 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1175 gen_rtx_REG (QImode, SP_REGNO),
1176 gen_rtx_REG (QImode, R3_REGNO)));
1177 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1178 }
1179 else if (size != 0)
1180 {
1181 /* Local vars take up less than 32768 words, so we can directly
1182 subtract the number. */
8d485e2d
MH
1183 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1184 gen_rtx_REG (QImode, SP_REGNO),
1185 GEN_INT(size)));
1186 RTX_FRAME_RELATED_P (insn) = 1;
1187 }
1188
1189 if (jump)
1190 {
39c1728e
HB
1191 insn = emit_jump_insn (gen_return_indirect_internal
1192 (gen_rtx_REG (QImode, R2_REGNO)));
8d485e2d
MH
1193 RTX_FRAME_RELATED_P (insn) = 1;
1194 }
1195 else
1196 {
1197 insn = emit_jump_insn (gen_return_from_epilogue ());
1198 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 1199 }
cb0ca284
MH
1200 }
1201}
1202
8d485e2d 1203
cb0ca284
MH
1204int
1205c4x_null_epilogue_p ()
1206{
1207 int regno;
1208
1209 if (reload_completed
4ddb3ea6
MH
1210 && ! c4x_assembler_function_p ()
1211 && ! c4x_interrupt_function_p ()
1212 && ! current_function_calls_alloca
1213 && ! current_function_args_size
4ddb3ea6
MH
1214 && ! (optimize < 2)
1215 && ! get_frame_size ())
cb0ca284
MH
1216 {
1217 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
4ddb3ea6 1218 if (regs_ever_live[regno] && ! call_used_regs[regno]
cb0ca284
MH
1219 && (regno != AR3_REGNO))
1220 return 0;
1221 return 1;
1222 }
1223 return 0;
1224}
1225
8d485e2d 1226
50c33087
MH
1227int
1228c4x_emit_move_sequence (operands, mode)
1229 rtx *operands;
1230 enum machine_mode mode;
1231{
1232 rtx op0 = operands[0];
1233 rtx op1 = operands[1];
1234
1235 if (! reload_in_progress
1236 && ! REG_P (op0)
1237 && ! REG_P (op1)
1238 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1239 op1 = force_reg (mode, op1);
1240
2718204c
MH
1241 if (GET_CODE (op1) == LO_SUM
1242 && GET_MODE (op1) == Pmode
1243 && dp_reg_operand (XEXP (op1, 0), mode))
1244 {
1245 /* expand_increment will sometimes create a LO_SUM immediate
1246 address. */
1247 op1 = XEXP (op1, 1);
1248 }
55310df7 1249 else if (symbolic_address_operand (op1, mode))
50c33087
MH
1250 {
1251 if (TARGET_LOAD_ADDRESS)
1252 {
1253 /* Alias analysis seems to do a better job if we force
1254 constant addresses to memory after reload. */
1255 emit_insn (gen_load_immed_address (op0, op1));
1256 return 1;
1257 }
1258 else
1259 {
1260 /* Stick symbol or label address into the constant pool. */
1261 op1 = force_const_mem (Pmode, op1);
1262 }
1263 }
1264 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1265 {
1266 /* We could be a lot smarter about loading some of these
1267 constants... */
1268 op1 = force_const_mem (mode, op1);
1269 }
50c33087
MH
1270
1271 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1272 and emit associated (HIGH (SYMREF)) if large memory model.
1273 c4x_legitimize_address could be used to do this,
1274 perhaps by calling validize_address. */
31445126
MH
1275 if (TARGET_EXPOSE_LDP
1276 && ! (reload_in_progress || reload_completed)
50c33087 1277 && GET_CODE (op1) == MEM
55310df7 1278 && symbolic_address_operand (XEXP (op1, 0), Pmode))
50c33087
MH
1279 {
1280 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1281 if (! TARGET_SMALL)
1282 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1283 op1 = change_address (op1, mode,
1284 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1285 }
1286
31445126
MH
1287 if (TARGET_EXPOSE_LDP
1288 && ! (reload_in_progress || reload_completed)
50c33087 1289 && GET_CODE (op0) == MEM
55310df7 1290 && symbolic_address_operand (XEXP (op0, 0), Pmode))
50c33087
MH
1291 {
1292 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1293 if (! TARGET_SMALL)
1294 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1295 op0 = change_address (op0, mode,
1296 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1297 }
1298
ebcc44f4
MH
1299 if (GET_CODE (op0) == SUBREG
1300 && mixed_subreg_operand (op0, mode))
1301 {
1302 /* We should only generate these mixed mode patterns
1303 during RTL generation. If we need do it later on
1304 then we'll have to emit patterns that won't clobber CC. */
1305 if (reload_in_progress || reload_completed)
1306 abort ();
1307 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1308 op0 = SUBREG_REG (op0);
1309 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1310 {
1311 op0 = copy_rtx (op0);
1312 PUT_MODE (op0, QImode);
1313 }
1314 else
1315 abort ();
1316
1317 if (mode == QFmode)
1318 emit_insn (gen_storeqf_int_clobber (op0, op1));
1319 else
1320 abort ();
1321 return 1;
1322 }
1323
1324 if (GET_CODE (op1) == SUBREG
1325 && mixed_subreg_operand (op1, mode))
1326 {
1327 /* We should only generate these mixed mode patterns
1328 during RTL generation. If we need do it later on
1329 then we'll have to emit patterns that won't clobber CC. */
1330 if (reload_in_progress || reload_completed)
1331 abort ();
1332 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1333 op1 = SUBREG_REG (op1);
1334 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1335 {
1336 op1 = copy_rtx (op1);
1337 PUT_MODE (op1, QImode);
1338 }
1339 else
1340 abort ();
1341
1342 if (mode == QFmode)
1343 emit_insn (gen_loadqf_int_clobber (op0, op1));
1344 else
1345 abort ();
1346 return 1;
1347 }
1348
8d485e2d
MH
1349 if (mode == QImode
1350 && reg_operand (op0, mode)
1351 && const_int_operand (op1, mode)
1352 && ! IS_INT16_CONST (INTVAL (op1))
1353 && ! IS_HIGH_CONST (INTVAL (op1)))
1354 {
1355 emit_insn (gen_loadqi_big_constant (op0, op1));
1356 return 1;
1357 }
1358
1359 if (mode == HImode
1360 && reg_operand (op0, mode)
1361 && const_int_operand (op1, mode))
1362 {
1363 emit_insn (gen_loadhi_big_constant (op0, op1));
1364 return 1;
1365 }
1366
50c33087
MH
1367 /* Adjust operands in case we have modified them. */
1368 operands[0] = op0;
1369 operands[1] = op1;
1370
1371 /* Emit normal pattern. */
1372 return 0;
1373}
1374
1375
cb0ca284 1376void
4fda2521
HB
1377c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1378 rtx libcall;
cb0ca284
MH
1379 enum rtx_code code;
1380 enum machine_mode dmode;
1381 enum machine_mode smode;
1382 int noperands;
1383 rtx *operands;
1384{
1385 rtx ret;
1386 rtx insns;
cb0ca284
MH
1387 rtx equiv;
1388
1389 start_sequence ();
cb0ca284
MH
1390 switch (noperands)
1391 {
1392 case 2:
1393 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1394 operands[1], smode);
1395 equiv = gen_rtx (code, dmode, operands[1]);
1396 break;
1397
1398 case 3:
1399 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1400 operands[1], smode, operands[2], smode);
1401 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1402 break;
1403
1404 default:
400500c4 1405 abort ();
cb0ca284
MH
1406 }
1407
1408 insns = get_insns ();
1409 end_sequence ();
1410 emit_libcall_block (insns, operands[0], ret, equiv);
1411}
1412
1413
1414void
4fda2521
HB
1415c4x_emit_libcall3 (libcall, code, mode, operands)
1416 rtx libcall;
cb0ca284
MH
1417 enum rtx_code code;
1418 enum machine_mode mode;
1419 rtx *operands;
1420{
8d485e2d 1421 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
cb0ca284
MH
1422}
1423
50c33087 1424
cb0ca284 1425void
4fda2521
HB
1426c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1427 rtx libcall;
cb0ca284
MH
1428 enum rtx_code code;
1429 enum machine_mode mode;
1430 rtx *operands;
1431{
1432 rtx ret;
1433 rtx insns;
cb0ca284
MH
1434 rtx equiv;
1435
1436 start_sequence ();
cb0ca284
MH
1437 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1438 operands[1], mode, operands[2], mode);
d5e4ff48
MH
1439 equiv = gen_rtx_TRUNCATE (mode,
1440 gen_rtx_LSHIFTRT (HImode,
1441 gen_rtx_MULT (HImode,
cb0ca284
MH
1442 gen_rtx (code, HImode, operands[1]),
1443 gen_rtx (code, HImode, operands[2])),
e27f8c8a 1444 GEN_INT (32)));
cb0ca284
MH
1445 insns = get_insns ();
1446 end_sequence ();
1447 emit_libcall_block (insns, operands[0], ret, equiv);
1448}
1449
1450
50c33087
MH
1451/* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1452 yet use this info. */
fb49053f
RH
1453
1454static void
b2003250
RH
1455c4x_encode_section_info (decl, first)
1456 tree decl;
1457 int first ATTRIBUTE_UNUSED;
50c33087 1458{
50c33087
MH
1459 if (TREE_CODE (decl) == FUNCTION_DECL)
1460 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
50c33087
MH
1461}
1462
1463
cb0ca284
MH
1464int
1465c4x_check_legit_addr (mode, addr, strict)
1466 enum machine_mode mode;
1467 rtx addr;
1468 int strict;
1469{
975ab131
MH
1470 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1471 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1472 rtx disp = NULL_RTX; /* Displacement. */
cb0ca284
MH
1473 enum rtx_code code;
1474
1475 code = GET_CODE (addr);
1476 switch (code)
1477 {
1478 /* Register indirect with auto increment/decrement. We don't
05713b80 1479 allow SP here---push_operand should recognize an operand
cb0ca284
MH
1480 being pushed on the stack. */
1481
1482 case PRE_DEC:
ebcc44f4 1483 case PRE_INC:
cb0ca284
MH
1484 case POST_DEC:
1485 if (mode != QImode && mode != QFmode)
1486 return 0;
ebcc44f4 1487
cb0ca284
MH
1488 case POST_INC:
1489 base = XEXP (addr, 0);
4ddb3ea6 1490 if (! REG_P (base))
cb0ca284
MH
1491 return 0;
1492 break;
1493
1494 case PRE_MODIFY:
1495 case POST_MODIFY:
1496 {
1497 rtx op0 = XEXP (addr, 0);
1498 rtx op1 = XEXP (addr, 1);
1499
1500 if (mode != QImode && mode != QFmode)
1501 return 0;
1502
4ddb3ea6 1503 if (! REG_P (op0)
cb0ca284
MH
1504 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1505 return 0;
1506 base = XEXP (op1, 0);
1507 if (base != op0)
1508 return 0;
1509 if (REG_P (XEXP (op1, 1)))
1510 indx = XEXP (op1, 1);
1511 else
1512 disp = XEXP (op1, 1);
1513 }
1514 break;
1515
1516 /* Register indirect. */
1517 case REG:
1518 base = addr;
1519 break;
1520
1521 /* Register indirect with displacement or index. */
1522 case PLUS:
1523 {
1524 rtx op0 = XEXP (addr, 0);
1525 rtx op1 = XEXP (addr, 1);
1526 enum rtx_code code0 = GET_CODE (op0);
1527
1528 switch (code0)
1529 {
cb0ca284
MH
1530 case REG:
1531 if (REG_P (op1))
1532 {
975ab131 1533 base = op0; /* Base + index. */
cb0ca284 1534 indx = op1;
bc46716b 1535 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
cb0ca284
MH
1536 {
1537 base = op1;
1538 indx = op0;
1539 }
1540 }
1541 else
1542 {
975ab131 1543 base = op0; /* Base + displacement. */
cb0ca284
MH
1544 disp = op1;
1545 }
1546 break;
1547
1548 default:
1549 return 0;
1550 }
1551 }
1552 break;
1553
50c33087
MH
1554 /* Direct addressing with DP register. */
1555 case LO_SUM:
1556 {
1557 rtx op0 = XEXP (addr, 0);
1558 rtx op1 = XEXP (addr, 1);
1559
1560 /* HImode and HFmode direct memory references aren't truly
1561 offsettable (consider case at end of data page). We
1562 probably get better code by loading a pointer and using an
1563 indirect memory reference. */
1564 if (mode == HImode || mode == HFmode)
1565 return 0;
1566
1567 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1568 return 0;
1569
1570 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1571 return 1;
1572
1573 if (GET_CODE (op1) == CONST)
5078f5eb 1574 return 1;
50c33087
MH
1575 return 0;
1576 }
1577 break;
1578
cb0ca284
MH
1579 /* Direct addressing with some work for the assembler... */
1580 case CONST:
cb0ca284 1581 /* Direct addressing. */
cb0ca284 1582 case LABEL_REF:
50c33087 1583 case SYMBOL_REF:
31445126
MH
1584 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1585 return 1;
50c33087 1586 /* These need to be converted to a LO_SUM (...).
31445126 1587 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
50c33087 1588 return 0;
cb0ca284
MH
1589
1590 /* Do not allow direct memory access to absolute addresses.
31445126 1591 This is more pain than it's worth, especially for the
cb0ca284
MH
1592 small memory model where we can't guarantee that
1593 this address is within the data page---we don't want
1594 to modify the DP register in the small memory model,
1595 even temporarily, since an interrupt can sneak in.... */
1596 case CONST_INT:
1597 return 0;
1598
1599 /* Indirect indirect addressing. */
1600 case MEM:
1601 return 0;
1602
1603 case CONST_DOUBLE:
c725bd79 1604 fatal_insn ("using CONST_DOUBLE for address", addr);
cb0ca284
MH
1605
1606 default:
1607 return 0;
1608 }
1609
1610 /* Validate the base register. */
1611 if (base)
1612 {
1613 /* Check that the address is offsettable for HImode and HFmode. */
1614 if (indx && (mode == HImode || mode == HFmode))
1615 return 0;
1616
1617 /* Handle DP based stuff. */
1618 if (REGNO (base) == DP_REGNO)
1619 return 1;
4ddb3ea6 1620 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
cb0ca284 1621 return 0;
bc46716b 1622 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
cb0ca284
MH
1623 return 0;
1624 }
1625
1626 /* Now validate the index register. */
1627 if (indx)
1628 {
1629 if (GET_CODE (indx) != REG)
1630 return 0;
4ddb3ea6 1631 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
cb0ca284 1632 return 0;
bc46716b 1633 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
cb0ca284
MH
1634 return 0;
1635 }
1636
1637 /* Validate displacement. */
1638 if (disp)
1639 {
1640 if (GET_CODE (disp) != CONST_INT)
1641 return 0;
1642 if (mode == HImode || mode == HFmode)
1643 {
1644 /* The offset displacement must be legitimate. */
4ddb3ea6 1645 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
cb0ca284
MH
1646 return 0;
1647 }
1648 else
1649 {
4ddb3ea6 1650 if (! IS_DISP8_CONST (INTVAL (disp)))
cb0ca284
MH
1651 return 0;
1652 }
1653 /* Can't add an index with a disp. */
1654 if (indx)
1655 return 0;
1656 }
1657 return 1;
1658}
1659
1660
1661rtx
1662c4x_legitimize_address (orig, mode)
d5e4ff48
MH
1663 rtx orig ATTRIBUTE_UNUSED;
1664 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 1665{
305902b0
MH
1666 if (GET_CODE (orig) == SYMBOL_REF
1667 || GET_CODE (orig) == LABEL_REF)
50c33087 1668 {
2718204c
MH
1669 if (mode == HImode || mode == HFmode)
1670 {
1671 /* We need to force the address into
1672 a register so that it is offsettable. */
1673 rtx addr_reg = gen_reg_rtx (Pmode);
1674 emit_move_insn (addr_reg, orig);
1675 return addr_reg;
1676 }
1677 else
1678 {
1679 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1680
1681 if (! TARGET_SMALL)
1682 emit_insn (gen_set_ldp (dp_reg, orig));
1683
1684 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1685 }
50c33087
MH
1686 }
1687
cb0ca284
MH
1688 return NULL_RTX;
1689}
1690
1691
1692/* Provide the costs of an addressing mode that contains ADDR.
1693 If ADDR is not a valid address, its cost is irrelevant.
1694 This is used in cse and loop optimisation to determine
1695 if it is worthwhile storing a common address into a register.
1696 Unfortunately, the C4x address cost depends on other operands. */
1697
1698int
1699c4x_address_cost (addr)
798f6e6f 1700 rtx addr;
cb0ca284
MH
1701{
1702 switch (GET_CODE (addr))
1703 {
1704 case REG:
1705 return 1;
1706
cb0ca284
MH
1707 case POST_INC:
1708 case POST_DEC:
1709 case PRE_INC:
1710 case PRE_DEC:
1711 return 1;
1712
50c33087 1713 /* These shouldn't be directly generated. */
cb0ca284
MH
1714 case SYMBOL_REF:
1715 case LABEL_REF:
50c33087
MH
1716 case CONST:
1717 return 10;
1718
1719 case LO_SUM:
1720 {
1721 rtx op1 = XEXP (addr, 1);
1722
1723 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1724 return TARGET_SMALL ? 3 : 4;
1725
1726 if (GET_CODE (op1) == CONST)
1727 {
1728 rtx offset = const0_rtx;
1729
1730 op1 = eliminate_constant_term (op1, &offset);
1731
1ac7a7f5 1732 /* ??? These costs need rethinking... */
50c33087
MH
1733 if (GET_CODE (op1) == LABEL_REF)
1734 return 3;
1735
1736 if (GET_CODE (op1) != SYMBOL_REF)
1737 return 4;
1738
1739 if (INTVAL (offset) == 0)
1740 return 3;
1741
1742 return 4;
1743 }
1744 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1745 }
1746 break;
cb0ca284
MH
1747
1748 case PLUS:
1749 {
1750 register rtx op0 = XEXP (addr, 0);
1751 register rtx op1 = XEXP (addr, 1);
1752
1753 if (GET_CODE (op0) != REG)
1754 break;
1755
1756 switch (GET_CODE (op1))
1757 {
1758 default:
1759 break;
1760
1761 case REG:
21034cc5
MH
1762 /* This cost for REG+REG must be greater than the cost
1763 for REG if we want autoincrement addressing modes. */
cb0ca284
MH
1764 return 2;
1765
1766 case CONST_INT:
798f6e6f
MH
1767 /* The following tries to improve GIV combination
1768 in strength reduce but appears not to help. */
1769 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1770 return 1;
1771
cb0ca284
MH
1772 if (IS_DISP1_CONST (INTVAL (op1)))
1773 return 1;
1774
4ddb3ea6 1775 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
cb0ca284
MH
1776 return 2;
1777
1778 return 3;
1779 }
1780 }
1781 default:
933cddd0 1782 break;
cb0ca284
MH
1783 }
1784
1785 return 4;
1786}
1787
1788
1789rtx
1790c4x_gen_compare_reg (code, x, y)
1791 enum rtx_code code;
1792 rtx x, y;
1793{
1794 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1795 rtx cc_reg;
1796
1797 if (mode == CC_NOOVmode
1798 && (code == LE || code == GE || code == LT || code == GT))
1799 return NULL_RTX;
1800
d5e4ff48
MH
1801 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1802 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1803 gen_rtx_COMPARE (mode, x, y)));
cb0ca284
MH
1804 return cc_reg;
1805}
1806
1807char *
4271f003 1808c4x_output_cbranch (form, seq)
8d485e2d 1809 const char *form;
4271f003 1810 rtx seq;
cb0ca284
MH
1811{
1812 int delayed = 0;
1813 int annultrue = 0;
1814 int annulfalse = 0;
1815 rtx delay;
1816 char *cp;
4271f003 1817 static char str[100];
cb0ca284
MH
1818
1819 if (final_sequence)
1820 {
1821 delay = XVECEXP (final_sequence, 0, 1);
4ddb3ea6
MH
1822 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1823 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
4271f003 1824 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
cb0ca284 1825 }
4271f003
MH
1826 strcpy (str, form);
1827 cp = &str [strlen (str)];
cb0ca284
MH
1828 if (delayed)
1829 {
1830 *cp++ = '%';
1831 *cp++ = '#';
1832 }
1833 if (annultrue)
1834 {
1835 *cp++ = 'a';
1836 *cp++ = 't';
1837 }
1838 if (annulfalse)
1839 {
1840 *cp++ = 'a';
1841 *cp++ = 'f';
1842 }
1843 *cp++ = '\t';
1844 *cp++ = '%';
1845 *cp++ = 'l';
1846 *cp++ = '1';
1847 *cp = 0;
1848 return str;
1849}
1850
cb0ca284
MH
1851void
1852c4x_print_operand (file, op, letter)
975ab131
MH
1853 FILE *file; /* File to write to. */
1854 rtx op; /* Operand to print. */
1855 int letter; /* %<letter> or 0. */
cb0ca284
MH
1856{
1857 rtx op1;
1858 enum rtx_code code;
1859
1860 switch (letter)
1861 {
975ab131 1862 case '#': /* Delayed. */
cb0ca284 1863 if (final_sequence)
761c70aa 1864 fprintf (file, "d");
cb0ca284
MH
1865 return;
1866 }
1867
1868 code = GET_CODE (op);
1869 switch (letter)
1870 {
975ab131 1871 case 'A': /* Direct address. */
dfafcb4d 1872 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
761c70aa 1873 fprintf (file, "@");
cb0ca284
MH
1874 break;
1875
975ab131 1876 case 'H': /* Sethi. */
50c33087
MH
1877 output_addr_const (file, op);
1878 return;
cb0ca284 1879
975ab131 1880 case 'I': /* Reversed condition. */
cb0ca284
MH
1881 code = reverse_condition (code);
1882 break;
1883
975ab131 1884 case 'L': /* Log 2 of constant. */
cb0ca284
MH
1885 if (code != CONST_INT)
1886 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1887 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1888 return;
1889
975ab131 1890 case 'N': /* Ones complement of small constant. */
cb0ca284
MH
1891 if (code != CONST_INT)
1892 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1893 fprintf (file, "%d", ~INTVAL (op));
1894 return;
1895
975ab131 1896 case 'K': /* Generate ldp(k) if direct address. */
4ddb3ea6 1897 if (! TARGET_SMALL
cb0ca284 1898 && code == MEM
50c33087
MH
1899 && GET_CODE (XEXP (op, 0)) == LO_SUM
1900 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1901 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
cb0ca284
MH
1902 {
1903 op1 = XEXP (XEXP (op, 0), 1);
1904 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1905 {
761c70aa 1906 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
f6155fda 1907 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
761c70aa 1908 fprintf (file, "\n");
cb0ca284
MH
1909 }
1910 }
1911 return;
1912
975ab131
MH
1913 case 'M': /* Generate ldp(k) if direct address. */
1914 if (! TARGET_SMALL /* Only used in asm statements. */
cb0ca284
MH
1915 && code == MEM
1916 && (GET_CODE (XEXP (op, 0)) == CONST
1917 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1918 {
761c70aa 1919 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
cb0ca284 1920 output_address (XEXP (op, 0));
761c70aa 1921 fprintf (file, "\n\t");
cb0ca284
MH
1922 }
1923 return;
1924
975ab131 1925 case 'O': /* Offset address. */
cb0ca284
MH
1926 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1927 break;
1928 else if (code == MEM)
f6155fda 1929 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
cb0ca284
MH
1930 else if (code == REG)
1931 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1932 else
1933 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1934 return;
1935
975ab131 1936 case 'C': /* Call. */
50c33087
MH
1937 break;
1938
975ab131 1939 case 'U': /* Call/callu. */
b2e9a2fd 1940 if (code != SYMBOL_REF)
761c70aa 1941 fprintf (file, "u");
cb0ca284
MH
1942 return;
1943
1944 default:
1945 break;
1946 }
1947
1948 switch (code)
1949 {
1950 case REG:
dfafcb4d
HB
1951 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1952 && ! TARGET_TI)
cb0ca284
MH
1953 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1954 else
1955 fprintf (file, "%s", reg_names[REGNO (op)]);
1956 break;
1957
1958 case MEM:
1959 output_address (XEXP (op, 0));
1960 break;
1961
1962 case CONST_DOUBLE:
1963 {
25009e02 1964 char str[64];
cb0ca284 1965
da6eec72
RH
1966 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1967 sizeof (str), 0, 1);
cb0ca284
MH
1968 fprintf (file, "%s", str);
1969 }
1970 break;
1971
1972 case CONST_INT:
1973 fprintf (file, "%d", INTVAL (op));
1974 break;
1975
1976 case NE:
761c70aa 1977 fprintf (file, "ne");
cb0ca284
MH
1978 break;
1979
1980 case EQ:
761c70aa 1981 fprintf (file, "eq");
cb0ca284
MH
1982 break;
1983
1984 case GE:
761c70aa 1985 fprintf (file, "ge");
cb0ca284
MH
1986 break;
1987
1988 case GT:
761c70aa 1989 fprintf (file, "gt");
cb0ca284
MH
1990 break;
1991
1992 case LE:
761c70aa 1993 fprintf (file, "le");
cb0ca284
MH
1994 break;
1995
1996 case LT:
761c70aa 1997 fprintf (file, "lt");
cb0ca284
MH
1998 break;
1999
2000 case GEU:
761c70aa 2001 fprintf (file, "hs");
cb0ca284
MH
2002 break;
2003
2004 case GTU:
761c70aa 2005 fprintf (file, "hi");
cb0ca284
MH
2006 break;
2007
2008 case LEU:
761c70aa 2009 fprintf (file, "ls");
cb0ca284
MH
2010 break;
2011
2012 case LTU:
761c70aa 2013 fprintf (file, "lo");
cb0ca284
MH
2014 break;
2015
2016 case SYMBOL_REF:
2017 output_addr_const (file, op);
2018 break;
2019
2020 case CONST:
2021 output_addr_const (file, XEXP (op, 0));
2022 break;
2023
2024 case CODE_LABEL:
2025 break;
2026
2027 default:
2028 fatal_insn ("c4x_print_operand: Bad operand case", op);
2029 break;
2030 }
2031}
2032
2033
2034void
2035c4x_print_operand_address (file, addr)
2036 FILE *file;
2037 rtx addr;
2038{
2039 switch (GET_CODE (addr))
2040 {
2041 case REG:
2042 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2043 break;
2044
2045 case PRE_DEC:
2046 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2047 break;
2048
2049 case POST_INC:
2050 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2051 break;
2052
2053 case POST_MODIFY:
2054 {
2055 rtx op0 = XEXP (XEXP (addr, 1), 0);
2056 rtx op1 = XEXP (XEXP (addr, 1), 1);
2057
2058 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2059 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2060 reg_names[REGNO (op1)]);
2061 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2062 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2063 INTVAL (op1));
2064 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2065 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2066 -INTVAL (op1));
2067 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2068 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2069 reg_names[REGNO (op1)]);
2070 else
2071 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2072 }
2073 break;
2074
2075 case PRE_MODIFY:
2076 {
2077 rtx op0 = XEXP (XEXP (addr, 1), 0);
2078 rtx op1 = XEXP (XEXP (addr, 1), 1);
2079
2080 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2081 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2082 reg_names[REGNO (op1)]);
2083 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2084 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2085 INTVAL (op1));
2086 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2087 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2088 -INTVAL (op1));
2089 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2090 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2091 reg_names[REGNO (op1)]);
2092 else
2093 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2094 }
2095 break;
2096
2097 case PRE_INC:
2098 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2099 break;
2100
2101 case POST_DEC:
2102 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2103 break;
2104
2105 case PLUS: /* Indirect with displacement. */
2106 {
2107 rtx op0 = XEXP (addr, 0);
2108 rtx op1 = XEXP (addr, 1);
cb0ca284 2109
50c33087 2110 if (REG_P (op0))
cb0ca284 2111 {
50c33087 2112 if (REG_P (op1))
cb0ca284 2113 {
bc46716b 2114 if (IS_INDEX_REG (op0))
cb0ca284
MH
2115 {
2116 fprintf (file, "*+%s(%s)",
2117 reg_names[REGNO (op1)],
975ab131 2118 reg_names[REGNO (op0)]); /* Index + base. */
cb0ca284
MH
2119 }
2120 else
2121 {
2122 fprintf (file, "*+%s(%s)",
2123 reg_names[REGNO (op0)],
975ab131 2124 reg_names[REGNO (op1)]); /* Base + index. */
cb0ca284
MH
2125 }
2126 }
2127 else if (INTVAL (op1) < 0)
2128 {
2129 fprintf (file, "*-%s(%d)",
2130 reg_names[REGNO (op0)],
975ab131 2131 -INTVAL (op1)); /* Base - displacement. */
cb0ca284
MH
2132 }
2133 else
2134 {
2135 fprintf (file, "*+%s(%d)",
2136 reg_names[REGNO (op0)],
975ab131 2137 INTVAL (op1)); /* Base + displacement. */
cb0ca284
MH
2138 }
2139 }
50c33087
MH
2140 else
2141 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2142 }
2143 break;
2144
2145 case LO_SUM:
2146 {
2147 rtx op0 = XEXP (addr, 0);
2148 rtx op1 = XEXP (addr, 1);
2149
2150 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2151 c4x_print_operand_address (file, op1);
2152 else
2153 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
cb0ca284
MH
2154 }
2155 break;
2156
2157 case CONST:
2158 case SYMBOL_REF:
2159 case LABEL_REF:
50c33087 2160 fprintf (file, "@");
cb0ca284 2161 output_addr_const (file, addr);
cb0ca284
MH
2162 break;
2163
2164 /* We shouldn't access CONST_INT addresses. */
2165 case CONST_INT:
2166
2167 default:
2168 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2169 break;
2170 }
2171}
2172
975ab131 2173
50c33087
MH
2174/* Return nonzero if the floating point operand will fit
2175 in the immediate field. */
975ab131 2176
cb0ca284 2177static int
50c33087
MH
2178c4x_immed_float_p (op)
2179 rtx op;
cb0ca284
MH
2180{
2181 long convval[2];
2182 int exponent;
2183 REAL_VALUE_TYPE r;
2184
50c33087
MH
2185 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2186 if (GET_MODE (op) == HFmode)
cb0ca284
MH
2187 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2188 else
2189 {
2190 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2191 convval[1] = 0;
2192 }
2193
975ab131 2194 /* Sign extend exponent. */
cb0ca284
MH
2195 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2196 if (exponent == -128)
975ab131 2197 return 1; /* 0.0 */
cb0ca284 2198 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
975ab131
MH
2199 return 0; /* Precision doesn't fit. */
2200 return (exponent <= 7) /* Positive exp. */
2201 && (exponent >= -7); /* Negative exp. */
cb0ca284
MH
2202}
2203
975ab131 2204
cb0ca284
MH
2205/* The last instruction in a repeat block cannot be a Bcond, DBcound,
2206 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2207
2208 None of the last four instructions from the bottom of the block can
2209 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2210 BcondAT or RETIcondD.
2211
2212 This routine scans the four previous insns for a jump insn, and if
2213 one is found, returns 1 so that we bung in a nop instruction.
2214 This simple minded strategy will add a nop, when it may not
2215 be required. Say when there is a JUMP_INSN near the end of the
2216 block that doesn't get converted into a delayed branch.
2217
2218 Note that we cannot have a call insn, since we don't generate
2219 repeat loops with calls in them (although I suppose we could, but
d5e4ff48
MH
2220 there's no benefit.)
2221
2222 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
cb0ca284
MH
2223
2224int
2225c4x_rptb_nop_p (insn)
2226 rtx insn;
2227{
d5e4ff48 2228 rtx start_label;
cb0ca284
MH
2229 int i;
2230
d5e4ff48
MH
2231 /* Extract the start label from the jump pattern (rptb_end). */
2232 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2233
cb0ca284
MH
2234 /* If there is a label at the end of the loop we must insert
2235 a NOP. */
50c33087
MH
2236 do {
2237 insn = previous_insn (insn);
2238 } while (GET_CODE (insn) == NOTE
2239 || GET_CODE (insn) == USE
2240 || GET_CODE (insn) == CLOBBER);
cb0ca284
MH
2241 if (GET_CODE (insn) == CODE_LABEL)
2242 return 1;
2243
2244 for (i = 0; i < 4; i++)
2245 {
2246 /* Search back for prev non-note and non-label insn. */
2247 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2248 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
d5e4ff48
MH
2249 {
2250 if (insn == start_label)
2251 return i == 0;
2252
50c33087 2253 insn = previous_insn (insn);
d5e4ff48 2254 };
cb0ca284 2255
d5e4ff48 2256 /* If we have a jump instruction we should insert a NOP. If we
cb0ca284 2257 hit repeat block top we should only insert a NOP if the loop
1ac7a7f5 2258 is empty. */
cb0ca284
MH
2259 if (GET_CODE (insn) == JUMP_INSN)
2260 return 1;
50c33087 2261 insn = previous_insn (insn);
cb0ca284
MH
2262 }
2263 return 0;
2264}
2265
2266
933cddd0
MH
2267/* The C4x looping instruction needs to be emitted at the top of the
2268 loop. Emitting the true RTL for a looping instruction at the top of
2269 the loop can cause problems with flow analysis. So instead, a dummy
2270 doloop insn is emitted at the end of the loop. This routine checks
2271 for the presence of this doloop insn and then searches back to the
2272 top of the loop, where it inserts the true looping insn (provided
2273 there are no instructions in the loop which would cause problems).
2274 Any additional labels can be emitted at this point. In addition, if
2275 the desired loop count register was not allocated, this routine does
0bbcfbaf
HB
2276 nothing.
2277
2278 Before we can create a repeat block looping instruction we have to
2279 verify that there are no jumps outside the loop and no jumps outside
2280 the loop go into this loop. This can happen in the basic blocks reorder
2281 pass. The C4x cpu can not handle this. */
2282
2283static int
2284c4x_label_ref_used_p (x, code_label)
2285 rtx x, code_label;
2286{
2287 enum rtx_code code;
2288 int i, j;
2289 const char *fmt;
2290
2291 if (x == 0)
2292 return 0;
2293
2294 code = GET_CODE (x);
2295 if (code == LABEL_REF)
2296 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2297
2298 fmt = GET_RTX_FORMAT (code);
2299 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2300 {
2301 if (fmt[i] == 'e')
2302 {
2303 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2304 return 1;
2305 }
2306 else if (fmt[i] == 'E')
2307 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2308 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2309 return 1;
2310 }
2311 return 0;
2312}
2313
2314
2315static int
2316c4x_rptb_valid_p (insn, start_label)
2317 rtx insn, start_label;
2318{
2319 rtx end = insn;
2320 rtx start;
2321 rtx tmp;
2322
2323 /* Find the start label. */
2324 for (; insn; insn = PREV_INSN (insn))
2325 if (insn == start_label)
2326 break;
2327
2328 /* Note found then we can not use a rptb or rpts. The label was
2329 probably moved by the basic block reorder pass. */
2330 if (! insn)
2331 return 0;
2332
2333 start = insn;
2334 /* If any jump jumps inside this block then we must fail. */
2335 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2336 {
2337 if (GET_CODE (insn) == CODE_LABEL)
2338 {
2339 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2340 if (GET_CODE (tmp) == JUMP_INSN
2341 && c4x_label_ref_used_p (tmp, insn))
2342 return 0;
2343 }
2344 }
2345 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2346 {
2347 if (GET_CODE (insn) == CODE_LABEL)
2348 {
2349 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2350 if (GET_CODE (tmp) == JUMP_INSN
2351 && c4x_label_ref_used_p (tmp, insn))
2352 return 0;
2353 }
2354 }
2355 /* If any jump jumps outside this block then we must fail. */
2356 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2357 {
2358 if (GET_CODE (insn) == CODE_LABEL)
2359 {
2360 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2361 if (GET_CODE (tmp) == JUMP_INSN
2362 && c4x_label_ref_used_p (tmp, insn))
2363 return 0;
2364 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2365 if (GET_CODE (tmp) == JUMP_INSN
2366 && c4x_label_ref_used_p (tmp, insn))
2367 return 0;
2368 }
2369 }
2370
2371 /* All checks OK. */
2372 return 1;
2373}
2374
975ab131 2375
d5e4ff48
MH
2376void
2377c4x_rptb_insert (insn)
2378 rtx insn;
2379{
2380 rtx end_label;
2381 rtx start_label;
b864825e 2382 rtx new_start_label;
4271f003
MH
2383 rtx count_reg;
2384
2385 /* If the count register has not been allocated to RC, say if
2386 there is a movstr pattern in the loop, then do not insert a
2387 RPTB instruction. Instead we emit a decrement and branch
2388 at the end of the loop. */
2389 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2390 if (REGNO (count_reg) != RC_REGNO)
2391 return;
2392
d5e4ff48
MH
2393 /* Extract the start label from the jump pattern (rptb_end). */
2394 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
4271f003 2395
0bbcfbaf
HB
2396 if (! c4x_rptb_valid_p (insn, start_label))
2397 {
2398 /* We can not use the rptb insn. Replace it so reorg can use
2399 the delay slots of the jump insn. */
2400 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2401 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2402 emit_insn_before (gen_bge (start_label), insn);
2403 LABEL_NUSES (start_label)++;
2404 delete_insn (insn);
2405 return;
2406 }
2407
d5e4ff48 2408 end_label = gen_label_rtx ();
b864825e 2409 LABEL_NUSES (end_label)++;
d5e4ff48
MH
2410 emit_label_after (end_label, insn);
2411
b864825e
MH
2412 new_start_label = gen_label_rtx ();
2413 LABEL_NUSES (new_start_label)++;
2414
d5e4ff48 2415 for (; insn; insn = PREV_INSN (insn))
b864825e
MH
2416 {
2417 if (insn == start_label)
2418 break;
2419 if (GET_CODE (insn) == JUMP_INSN &&
2420 JUMP_LABEL (insn) == start_label)
2421 redirect_jump (insn, new_start_label, 0);
2422 }
4ddb3ea6 2423 if (! insn)
d5e4ff48
MH
2424 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2425
b864825e
MH
2426 emit_label_after (new_start_label, insn);
2427
3b5e8a16 2428 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
b864825e 2429 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
3b5e8a16 2430 else
b864825e
MH
2431 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2432 if (LABEL_NUSES (start_label) == 0)
2433 delete_insn (start_label);
d5e4ff48
MH
2434}
2435
50c33087
MH
2436
2437/* This function is a C4x special called immediately before delayed
2438 branch scheduling. We fix up RTPB style loops that didn't get RC
2439 allocated as the loop counter. */
cb0ca284
MH
2440
2441void
2442c4x_process_after_reload (first)
2443 rtx first;
2444{
cb0ca284 2445 rtx insn;
cb0ca284
MH
2446
2447 for (insn = first; insn; insn = NEXT_INSN (insn))
2448 {
2449 /* Look for insn. */
2c3c49de 2450 if (INSN_P (insn))
cb0ca284 2451 {
cb0ca284 2452 int insn_code_number;
41387ffd 2453 rtx old;
cb0ca284
MH
2454
2455 insn_code_number = recog_memoized (insn);
2456
2457 if (insn_code_number < 0)
2458 continue;
2459
d5e4ff48 2460 /* Insert the RTX for RPTB at the top of the loop
1ac7a7f5 2461 and a label at the end of the loop. */
d5e4ff48
MH
2462 if (insn_code_number == CODE_FOR_rptb_end)
2463 c4x_rptb_insert(insn);
2464
41387ffd
MH
2465 /* We need to split the insn here. Otherwise the calls to
2466 force_const_mem will not work for load_immed_address. */
2467 old = insn;
34de028f 2468
41387ffd
MH
2469 /* Don't split the insn if it has been deleted. */
2470 if (! INSN_DELETED_P (old))
2471 insn = try_split (PATTERN(old), old, 1);
cb0ca284 2472
41387ffd
MH
2473 /* When not optimizing, the old insn will be still left around
2474 with only the 'deleted' bit set. Transform it into a note
2475 to avoid confusion of subsequent processing. */
2476 if (INSN_DELETED_P (old))
2477 {
2478 PUT_CODE (old, NOTE);
2479 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2480 NOTE_SOURCE_FILE (old) = 0;
cb0ca284 2481 }
cb0ca284
MH
2482 }
2483 }
2484}
2485
2486
2487static int
2488c4x_a_register (op)
2489 rtx op;
2490{
bc46716b 2491 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2492}
2493
2494
2495static int
2496c4x_x_register (op)
2497 rtx op;
2498{
bc46716b 2499 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
cb0ca284
MH
2500}
2501
2502
2503static int
50c33087 2504c4x_immed_int_constant (op)
cb0ca284
MH
2505 rtx op;
2506{
2507 if (GET_CODE (op) != CONST_INT)
2508 return 0;
50c33087 2509
cb0ca284
MH
2510 return GET_MODE (op) == VOIDmode
2511 || GET_MODE_CLASS (op) == MODE_INT
2512 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2513}
2514
2515
2516static int
50c33087 2517c4x_immed_float_constant (op)
cb0ca284
MH
2518 rtx op;
2519{
2520 if (GET_CODE (op) != CONST_DOUBLE)
2521 return 0;
50c33087 2522
5078f5eb
HB
2523 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2524 present this only means that a MEM rtx has been generated. It does
2525 not mean the rtx is really in memory. */
50c33087 2526
cb0ca284
MH
2527 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2528}
2529
2530
483dd5be
MH
2531int
2532c4x_shiftable_constant (op)
2533 rtx op;
2534{
2535 int i;
2536 int mask;
2537 int val = INTVAL (op);
2538
2539 for (i = 0; i < 16; i++)
2540 {
2541 if (val & (1 << i))
2542 break;
2543 }
2544 mask = ((0xffff >> i) << 16) | 0xffff;
4fda2521
HB
2545 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2546 : (val >> i) & mask))
483dd5be
MH
2547 return i;
2548 return -1;
2549}
2550
2551
cb0ca284
MH
2552int
2553c4x_H_constant (op)
2554 rtx op;
2555{
50c33087 2556 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
cb0ca284
MH
2557}
2558
2559
2560int
2561c4x_I_constant (op)
2562 rtx op;
2563{
50c33087 2564 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
cb0ca284
MH
2565}
2566
2567
2568int
2569c4x_J_constant (op)
2570 rtx op;
2571{
2572 if (TARGET_C3X)
2573 return 0;
50c33087 2574 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
cb0ca284
MH
2575}
2576
2577
2578static int
2579c4x_K_constant (op)
2580 rtx op;
2581{
305902b0 2582 if (TARGET_C3X || ! c4x_immed_int_constant (op))
cb0ca284 2583 return 0;
305902b0 2584 return IS_INT5_CONST (INTVAL (op));
cb0ca284
MH
2585}
2586
2587
2588int
2589c4x_L_constant (op)
2590 rtx op;
2591{
50c33087 2592 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
cb0ca284
MH
2593}
2594
2595
2596static int
2597c4x_N_constant (op)
2598 rtx op;
2599{
50c33087 2600 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
cb0ca284
MH
2601}
2602
2603
2604static int
2605c4x_O_constant (op)
2606 rtx op;
2607{
50c33087 2608 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
cb0ca284
MH
2609}
2610
2611
2612/* The constraints do not have to check the register class,
2613 except when needed to discriminate between the constraints.
2614 The operand has been checked by the predicates to be valid. */
2615
2616/* ARx + 9-bit signed const or IRn
2617 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2618 We don't include the pre/post inc/dec forms here since
2619 they are handled by the <> constraints. */
2620
2621int
2622c4x_Q_constraint (op)
2623 rtx op;
2624{
2625 enum machine_mode mode = GET_MODE (op);
2626
2627 if (GET_CODE (op) != MEM)
2628 return 0;
2629 op = XEXP (op, 0);
2630 switch (GET_CODE (op))
2631 {
2632 case REG:
2633 return 1;
2634
2635 case PLUS:
2636 {
2637 rtx op0 = XEXP (op, 0);
2638 rtx op1 = XEXP (op, 1);
2639
4ddb3ea6 2640 if (! REG_P (op0))
cb0ca284
MH
2641 return 0;
2642
2643 if (REG_P (op1))
2644 return 1;
2645
2646 if (GET_CODE (op1) != CONST_INT)
2647 return 0;
2648
2649 /* HImode and HFmode must be offsettable. */
2650 if (mode == HImode || mode == HFmode)
2651 return IS_DISP8_OFF_CONST (INTVAL (op1));
2652
2653 return IS_DISP8_CONST (INTVAL (op1));
2654 }
2655 break;
50c33087 2656
cb0ca284
MH
2657 default:
2658 break;
2659 }
2660 return 0;
2661}
2662
2663
2664/* ARx + 5-bit unsigned const
975ab131 2665 *ARx, *+ARx(n) for n < 32. */
cb0ca284
MH
2666
2667int
2668c4x_R_constraint (op)
2669 rtx op;
2670{
2671 enum machine_mode mode = GET_MODE (op);
2672
2673 if (TARGET_C3X)
2674 return 0;
2675 if (GET_CODE (op) != MEM)
2676 return 0;
2677 op = XEXP (op, 0);
2678 switch (GET_CODE (op))
2679 {
2680 case REG:
2681 return 1;
2682
2683 case PLUS:
2684 {
2685 rtx op0 = XEXP (op, 0);
2686 rtx op1 = XEXP (op, 1);
2687
4ddb3ea6 2688 if (! REG_P (op0))
cb0ca284
MH
2689 return 0;
2690
2691 if (GET_CODE (op1) != CONST_INT)
2692 return 0;
2693
2694 /* HImode and HFmode must be offsettable. */
2695 if (mode == HImode || mode == HFmode)
2696 return IS_UINT5_CONST (INTVAL (op1) + 1);
2697
2698 return IS_UINT5_CONST (INTVAL (op1));
2699 }
2700 break;
933cddd0 2701
cb0ca284
MH
2702 default:
2703 break;
2704 }
2705 return 0;
2706}
2707
2708
2709static int
2710c4x_R_indirect (op)
2711 rtx op;
2712{
2713 enum machine_mode mode = GET_MODE (op);
2714
2715 if (TARGET_C3X || GET_CODE (op) != MEM)
2716 return 0;
2717
2718 op = XEXP (op, 0);
2719 switch (GET_CODE (op))
2720 {
2721 case REG:
bc46716b 2722 return IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2723
2724 case PLUS:
2725 {
2726 rtx op0 = XEXP (op, 0);
2727 rtx op1 = XEXP (op, 1);
2728
2729 /* HImode and HFmode must be offsettable. */
2730 if (mode == HImode || mode == HFmode)
bc46716b 2731 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2732 && GET_CODE (op1) == CONST_INT
2733 && IS_UINT5_CONST (INTVAL (op1) + 1);
2734
2735 return REG_P (op0)
bc46716b 2736 && IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2737 && GET_CODE (op1) == CONST_INT
2738 && IS_UINT5_CONST (INTVAL (op1));
2739 }
2740 break;
2741
2742 default:
2743 break;
2744 }
2745 return 0;
2746}
2747
2748
2749/* ARx + 1-bit unsigned const or IRn
2750 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2751 We don't include the pre/post inc/dec forms here since
2752 they are handled by the <> constraints. */
2753
2754int
2755c4x_S_constraint (op)
2756 rtx op;
2757{
2758 enum machine_mode mode = GET_MODE (op);
2759 if (GET_CODE (op) != MEM)
2760 return 0;
2761 op = XEXP (op, 0);
2762 switch (GET_CODE (op))
2763 {
2764 case REG:
2765 return 1;
2766
2767 case PRE_MODIFY:
2768 case POST_MODIFY:
2769 {
2770 rtx op0 = XEXP (op, 0);
2771 rtx op1 = XEXP (op, 1);
2772
2773 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2774 || (op0 != XEXP (op1, 0)))
2775 return 0;
2776
2777 op0 = XEXP (op1, 0);
2778 op1 = XEXP (op1, 1);
2779 return REG_P (op0) && REG_P (op1);
975ab131 2780 /* Pre or post_modify with a displacement of 0 or 1
cb0ca284
MH
2781 should not be generated. */
2782 }
2783 break;
2784
2785 case PLUS:
2786 {
2787 rtx op0 = XEXP (op, 0);
2788 rtx op1 = XEXP (op, 1);
2789
2790 if (!REG_P (op0))
2791 return 0;
2792
2793 if (REG_P (op1))
2794 return 1;
2795
dfb31eec 2796 if (GET_CODE (op1) != CONST_INT)
cb0ca284
MH
2797 return 0;
2798
2799 /* HImode and HFmode must be offsettable. */
2800 if (mode == HImode || mode == HFmode)
2801 return IS_DISP1_OFF_CONST (INTVAL (op1));
2802
2803 return IS_DISP1_CONST (INTVAL (op1));
2804 }
2805 break;
933cddd0 2806
cb0ca284
MH
2807 default:
2808 break;
2809 }
2810 return 0;
2811}
2812
2813
2814static int
2815c4x_S_indirect (op)
2816 rtx op;
2817{
2818 enum machine_mode mode = GET_MODE (op);
2819 if (GET_CODE (op) != MEM)
2820 return 0;
2821
2822 op = XEXP (op, 0);
2823 switch (GET_CODE (op))
2824 {
2825 case PRE_DEC:
2826 case POST_DEC:
2827 if (mode != QImode && mode != QFmode)
2828 return 0;
2829 case PRE_INC:
2830 case POST_INC:
2831 op = XEXP (op, 0);
2832
2833 case REG:
bc46716b 2834 return IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2835
2836 case PRE_MODIFY:
2837 case POST_MODIFY:
2838 {
2839 rtx op0 = XEXP (op, 0);
2840 rtx op1 = XEXP (op, 1);
2841
2842 if (mode != QImode && mode != QFmode)
2843 return 0;
2844
2845 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2846 || (op0 != XEXP (op1, 0)))
2847 return 0;
2848
2849 op0 = XEXP (op1, 0);
2850 op1 = XEXP (op1, 1);
bc46716b
MH
2851 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2852 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
975ab131 2853 /* Pre or post_modify with a displacement of 0 or 1
cb0ca284
MH
2854 should not be generated. */
2855 }
2856
2857 case PLUS:
2858 {
2859 rtx op0 = XEXP (op, 0);
2860 rtx op1 = XEXP (op, 1);
2861
2862 if (REG_P (op0))
2863 {
2864 /* HImode and HFmode must be offsettable. */
2865 if (mode == HImode || mode == HFmode)
bc46716b 2866 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2867 && GET_CODE (op1) == CONST_INT
2868 && IS_DISP1_OFF_CONST (INTVAL (op1));
2869
2870 if (REG_P (op1))
bc46716b
MH
2871 return (IS_INDEX_OR_PSEUDO_REG (op1)
2872 && IS_ADDR_OR_PSEUDO_REG (op0))
2873 || (IS_ADDR_OR_PSEUDO_REG (op1)
2874 && IS_INDEX_OR_PSEUDO_REG (op0));
cb0ca284 2875
bc46716b 2876 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2877 && GET_CODE (op1) == CONST_INT
2878 && IS_DISP1_CONST (INTVAL (op1));
2879 }
2880 }
2881 break;
2882
2883 default:
2884 break;
2885 }
2886 return 0;
2887}
2888
2889
50c33087 2890/* Direct memory operand. */
cb0ca284
MH
2891
2892int
2893c4x_T_constraint (op)
2894 rtx op;
2895{
2896 if (GET_CODE (op) != MEM)
2897 return 0;
2898 op = XEXP (op, 0);
2899
50c33087 2900 if (GET_CODE (op) != LO_SUM)
cb0ca284 2901 {
50c33087
MH
2902 /* Allow call operands. */
2903 return GET_CODE (op) == SYMBOL_REF
2904 && GET_MODE (op) == Pmode
2905 && SYMBOL_REF_FLAG (op);
cb0ca284
MH
2906 }
2907
50c33087
MH
2908 /* HImode and HFmode are not offsettable. */
2909 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2910 return 0;
2911
2912 if ((GET_CODE (XEXP (op, 0)) == REG)
2913 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2914 return c4x_U_constraint (XEXP (op, 1));
2915
2916 return 0;
2917}
2918
2919
2920/* Symbolic operand. */
2921
2922int
2923c4x_U_constraint (op)
2924 rtx op;
2925{
cb0ca284 2926 /* Don't allow direct addressing to an arbitrary constant. */
5078f5eb
HB
2927 return GET_CODE (op) == CONST
2928 || GET_CODE (op) == SYMBOL_REF
2929 || GET_CODE (op) == LABEL_REF;
cb0ca284
MH
2930}
2931
2932
2933int
2934c4x_autoinc_operand (op, mode)
2935 rtx op;
d5e4ff48 2936 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
2937{
2938 if (GET_CODE (op) == MEM)
2939 {
2940 enum rtx_code code = GET_CODE (XEXP (op, 0));
2941
2942 if (code == PRE_INC
2943 || code == PRE_DEC
2944 || code == POST_INC
2945 || code == POST_DEC
2946 || code == PRE_MODIFY
2947 || code == POST_MODIFY
2948 )
2949 return 1;
2950 }
2951 return 0;
2952}
2953
2954
2955/* Match any operand. */
2956
2957int
2958any_operand (op, mode)
d5e4ff48
MH
2959 register rtx op ATTRIBUTE_UNUSED;
2960 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
2961{
2962 return 1;
2963}
2964
2965
2966/* Nonzero if OP is a floating point value with value 0.0. */
2967
2968int
798f6e6f 2969fp_zero_operand (op, mode)
cb0ca284 2970 rtx op;
798f6e6f 2971 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
2972{
2973 REAL_VALUE_TYPE r;
2974
f9ef1f02
MH
2975 if (GET_CODE (op) != CONST_DOUBLE)
2976 return 0;
cb0ca284
MH
2977 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2978 return REAL_VALUES_EQUAL (r, dconst0);
2979}
2980
2981
2982int
2983const_operand (op, mode)
2984 register rtx op;
2985 register enum machine_mode mode;
2986{
2987 switch (mode)
2988 {
2989 case QFmode:
2990 case HFmode:
2991 if (GET_CODE (op) != CONST_DOUBLE
2992 || GET_MODE (op) != mode
2993 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2994 return 0;
2995
2996 return c4x_immed_float_p (op);
2997
2998#if Pmode != QImode
2999 case Pmode:
3000#endif
3001 case QImode:
ee5332b8
RH
3002 if (GET_CODE (op) == CONSTANT_P_RTX)
3003 return 1;
3004
cb0ca284
MH
3005 if (GET_CODE (op) != CONST_INT
3006 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3007 || GET_MODE_CLASS (mode) != MODE_INT)
3008 return 0;
3009
3010 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3011
3012 case HImode:
3013 return 0;
3014
3015 default:
3016 return 0;
3017 }
3018}
3019
3020
3021int
3022stik_const_operand (op, mode)
3023 rtx op;
d5e4ff48 3024 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3025{
3026 return c4x_K_constant (op);
3027}
3028
3029
3030int
3031not_const_operand (op, mode)
3032 rtx op;
d5e4ff48 3033 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3034{
3035 return c4x_N_constant (op);
3036}
3037
3038
3039int
3040reg_operand (op, mode)
3041 rtx op;
3042 enum machine_mode mode;
3043{
ebcc44f4
MH
3044 if (GET_CODE (op) == SUBREG
3045 && GET_MODE (op) == QFmode)
3046 return 0;
cb0ca284
MH
3047 return register_operand (op, mode);
3048}
3049
50c33087 3050
ebcc44f4
MH
3051int
3052mixed_subreg_operand (op, mode)
3053 rtx op;
483dd5be 3054 enum machine_mode mode ATTRIBUTE_UNUSED;
ebcc44f4
MH
3055{
3056 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3057 int and a long double. */
3058 if (GET_CODE (op) == SUBREG
3059 && (GET_MODE (op) == QFmode)
3060 && (GET_MODE (SUBREG_REG (op)) == QImode
3061 || GET_MODE (SUBREG_REG (op)) == HImode))
3062 return 1;
3063 return 0;
3064}
3065
3066
cb0ca284
MH
3067int
3068reg_imm_operand (op, mode)
3069 rtx op;
d5e4ff48 3070 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3071{
3072 if (REG_P (op) || CONSTANT_P (op))
3073 return 1;
3074 return 0;
3075}
3076
50c33087 3077
cb0ca284
MH
3078int
3079not_modify_reg (op, mode)
3080 rtx op;
d5e4ff48 3081 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3082{
3083 if (REG_P (op) || CONSTANT_P (op))
3084 return 1;
3085 if (GET_CODE (op) != MEM)
3086 return 0;
3087 op = XEXP (op, 0);
3088 switch (GET_CODE (op))
3089 {
3090 case REG:
3091 return 1;
3092
3093 case PLUS:
3094 {
3095 rtx op0 = XEXP (op, 0);
3096 rtx op1 = XEXP (op, 1);
3097
4ddb3ea6 3098 if (! REG_P (op0))
cb0ca284
MH
3099 return 0;
3100
3101 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3102 return 1;
3103 }
50c33087
MH
3104
3105 case LO_SUM:
3106 {
3107 rtx op0 = XEXP (op, 0);
3108
3109 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3110 return 1;
3111 }
3112 break;
3113
cb0ca284
MH
3114 case CONST:
3115 case SYMBOL_REF:
3116 case LABEL_REF:
3117 return 1;
933cddd0 3118
cb0ca284
MH
3119 default:
3120 break;
3121 }
3122 return 0;
3123}
3124
50c33087 3125
cb0ca284
MH
3126int
3127not_rc_reg (op, mode)
3128 rtx op;
d5e4ff48 3129 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3130{
3131 if (REG_P (op) && REGNO (op) == RC_REGNO)
3132 return 0;
3133 return 1;
3134}
3135
50c33087 3136
cb0ca284
MH
3137/* Extended precision register R0-R1. */
3138
3139int
3140r0r1_reg_operand (op, mode)
3141 rtx op;
3142 enum machine_mode mode;
3143{
ebcc44f4 3144 if (! reg_operand (op, mode))
cb0ca284
MH
3145 return 0;
3146 if (GET_CODE (op) == SUBREG)
3147 op = SUBREG_REG (op);
bc46716b 3148 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
cb0ca284
MH
3149}
3150
3151
3152/* Extended precision register R2-R3. */
3153
3154int
3155r2r3_reg_operand (op, mode)
3156 rtx op;
3157 enum machine_mode mode;
3158{
ebcc44f4 3159 if (! reg_operand (op, mode))
cb0ca284
MH
3160 return 0;
3161 if (GET_CODE (op) == SUBREG)
3162 op = SUBREG_REG (op);
bc46716b 3163 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
cb0ca284
MH
3164}
3165
3166
3167/* Low extended precision register R0-R7. */
3168
3169int
3170ext_low_reg_operand (op, mode)
3171 rtx op;
3172 enum machine_mode mode;
3173{
ebcc44f4 3174 if (! reg_operand (op, mode))
cb0ca284
MH
3175 return 0;
3176 if (GET_CODE (op) == SUBREG)
3177 op = SUBREG_REG (op);
bc46716b 3178 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
cb0ca284
MH
3179}
3180
3181
3182/* Extended precision register. */
3183
3184int
3185ext_reg_operand (op, mode)
3186 rtx op;
3187 enum machine_mode mode;
3188{
ebcc44f4 3189 if (! reg_operand (op, mode))
cb0ca284
MH
3190 return 0;
3191 if (GET_CODE (op) == SUBREG)
3192 op = SUBREG_REG (op);
4ddb3ea6 3193 if (! REG_P (op))
cb0ca284 3194 return 0;
bc46716b 3195 return IS_EXT_OR_PSEUDO_REG (op);
cb0ca284
MH
3196}
3197
3198
3199/* Standard precision register. */
3200
3201int
3202std_reg_operand (op, mode)
3203 rtx op;
3204 enum machine_mode mode;
3205{
ebcc44f4 3206 if (! reg_operand (op, mode))
cb0ca284
MH
3207 return 0;
3208 if (GET_CODE (op) == SUBREG)
3209 op = SUBREG_REG (op);
bc46716b 3210 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
cb0ca284
MH
3211}
3212
ed3614cd
HB
3213/* Standard precision or normal register. */
3214
3215int
3216std_or_reg_operand (op, mode)
3217 rtx op;
3218 enum machine_mode mode;
3219{
3220 if (reload_in_progress)
3221 return std_reg_operand (op, mode);
3222 return reg_operand (op, mode);
3223}
3224
cb0ca284
MH
3225/* Address register. */
3226
3227int
3228addr_reg_operand (op, mode)
3229 rtx op;
3230 enum machine_mode mode;
3231{
ebcc44f4 3232 if (! reg_operand (op, mode))
cb0ca284
MH
3233 return 0;
3234 return c4x_a_register (op);
3235}
3236
3237
3238/* Index register. */
3239
3240int
3241index_reg_operand (op, mode)
3242 rtx op;
3243 enum machine_mode mode;
3244{
ebcc44f4 3245 if (! reg_operand (op, mode))
cb0ca284
MH
3246 return 0;
3247 if (GET_CODE (op) == SUBREG)
3248 op = SUBREG_REG (op);
3249 return c4x_x_register (op);
3250}
3251
3252
3253/* DP register. */
3254
3255int
3256dp_reg_operand (op, mode)
3257 rtx op;
d5e4ff48 3258 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 3259{
bc46716b 3260 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
cb0ca284
MH
3261}
3262
3263
3264/* SP register. */
3265
3266int
3267sp_reg_operand (op, mode)
3268 rtx op;
d5e4ff48 3269 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 3270{
bc46716b 3271 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
cb0ca284
MH
3272}
3273
3274
3275/* ST register. */
3276
3277int
3278st_reg_operand (op, mode)
3279 register rtx op;
d5e4ff48 3280 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 3281{
bc46716b 3282 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
cb0ca284
MH
3283}
3284
3285
d5e4ff48
MH
3286/* RC register. */
3287
3288int
3289rc_reg_operand (op, mode)
3290 register rtx op;
3291 enum machine_mode mode ATTRIBUTE_UNUSED;
3292{
bc46716b 3293 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
d5e4ff48
MH
3294}
3295
3296
cb0ca284 3297int
55310df7 3298call_address_operand (op, mode)
cb0ca284 3299 rtx op;
d5e4ff48 3300 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 3301{
55310df7 3302 return (REG_P (op) || symbolic_address_operand (op, mode));
cb0ca284
MH
3303}
3304
3305
305902b0 3306/* Symbolic address operand. */
50c33087
MH
3307
3308int
55310df7 3309symbolic_address_operand (op, mode)
50c33087
MH
3310 register rtx op;
3311 enum machine_mode mode ATTRIBUTE_UNUSED;
3312{
3313 switch (GET_CODE (op))
3314 {
5078f5eb 3315 case CONST:
50c33087
MH
3316 case SYMBOL_REF:
3317 case LABEL_REF:
3318 return 1;
50c33087
MH
3319 default:
3320 return 0;
3321 }
3322}
3323
975ab131 3324
f416f18c 3325/* Check dst operand of a move instruction. */
975ab131 3326
f416f18c
MH
3327int
3328dst_operand (op, mode)
3329 rtx op;
3330 enum machine_mode mode;
3331{
3332 if (GET_CODE (op) == SUBREG
3333 && mixed_subreg_operand (op, mode))
3334 return 0;
cb0ca284 3335
f416f18c
MH
3336 if (REG_P (op))
3337 return reg_operand (op, mode);
3338
f959ff1a 3339 return nonimmediate_operand (op, mode);
f416f18c
MH
3340}
3341
3342
3343/* Check src operand of two operand arithmetic instructions. */
975ab131 3344
cb0ca284
MH
3345int
3346src_operand (op, mode)
3347 rtx op;
3348 enum machine_mode mode;
3349{
ebcc44f4
MH
3350 if (GET_CODE (op) == SUBREG
3351 && mixed_subreg_operand (op, mode))
3352 return 0;
3353
cb0ca284
MH
3354 if (REG_P (op))
3355 return reg_operand (op, mode);
3356
3357 if (mode == VOIDmode)
3358 mode = GET_MODE (op);
3359
cb0ca284 3360 if (GET_CODE (op) == CONST_INT)
50c33087
MH
3361 return (mode == QImode || mode == Pmode || mode == HImode)
3362 && c4x_I_constant (op);
cb0ca284
MH
3363
3364 /* We don't like CONST_DOUBLE integers. */
3365 if (GET_CODE (op) == CONST_DOUBLE)
3366 return c4x_H_constant (op);
3367
31445126
MH
3368 /* Disallow symbolic addresses. Only the predicate
3369 symbolic_address_operand will match these. */
50c33087
MH
3370 if (GET_CODE (op) == SYMBOL_REF
3371 || GET_CODE (op) == LABEL_REF
3372 || GET_CODE (op) == CONST)
3373 return 0;
3374
825dda42 3375 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
4a1f52a8
HB
3376 access to symbolic addresses. These operands will get forced
3377 into a register and the movqi expander will generate a
825dda42 3378 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
50c33087
MH
3379 if (GET_CODE (op) == MEM
3380 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3381 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3382 || GET_CODE (XEXP (op, 0)) == CONST)))
4a1f52a8 3383 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
50c33087 3384
cb0ca284
MH
3385 return general_operand (op, mode);
3386}
3387
3388
3389int
3390src_hi_operand (op, mode)
3391 rtx op;
3392 enum machine_mode mode;
3393{
3394 if (c4x_O_constant (op))
3395 return 1;
3396 return src_operand (op, mode);
3397}
3398
3399
3400/* Check src operand of two operand logical instructions. */
3401
3402int
3403lsrc_operand (op, mode)
3404 rtx op;
3405 enum machine_mode mode;
3406{
3407 if (mode == VOIDmode)
3408 mode = GET_MODE (op);
3409
3410 if (mode != QImode && mode != Pmode)
c725bd79 3411 fatal_insn ("mode not QImode", op);
cb0ca284 3412
cb0ca284
MH
3413 if (GET_CODE (op) == CONST_INT)
3414 return c4x_L_constant (op) || c4x_J_constant (op);
3415
50c33087 3416 return src_operand (op, mode);
cb0ca284
MH
3417}
3418
3419
3420/* Check src operand of two operand tricky instructions. */
3421
3422int
3423tsrc_operand (op, mode)
3424 rtx op;
3425 enum machine_mode mode;
3426{
3427 if (mode == VOIDmode)
3428 mode = GET_MODE (op);
3429
3430 if (mode != QImode && mode != Pmode)
c725bd79 3431 fatal_insn ("mode not QImode", op);
cb0ca284 3432
cb0ca284
MH
3433 if (GET_CODE (op) == CONST_INT)
3434 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3435
50c33087 3436 return src_operand (op, mode);
cb0ca284
MH
3437}
3438
3439
65f2f288
HB
3440/* Check src operand of two operand non immedidate instructions. */
3441
3442int
3443nonimmediate_src_operand (op, mode)
3444 rtx op;
3445 enum machine_mode mode;
3446{
3447 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3448 return 0;
3449
3450 return src_operand (op, mode);
3451}
3452
3453
3454/* Check logical src operand of two operand non immedidate instructions. */
3455
3456int
3457nonimmediate_lsrc_operand (op, mode)
3458 rtx op;
3459 enum machine_mode mode;
3460{
3461 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3462 return 0;
3463
3464 return lsrc_operand (op, mode);
3465}
3466
3467
cb0ca284
MH
3468int
3469reg_or_const_operand (op, mode)
3470 rtx op;
3471 enum machine_mode mode;
3472{
3473 return reg_operand (op, mode) || const_operand (op, mode);
3474}
3475
3476
3477/* Check for indirect operands allowable in parallel instruction. */
3478
3479int
3480par_ind_operand (op, mode)
3481 rtx op;
3482 enum machine_mode mode;
3483{
3484 if (mode != VOIDmode && mode != GET_MODE (op))
3485 return 0;
3486
3487 return c4x_S_indirect (op);
3488}
3489
3490
3491/* Check for operands allowable in parallel instruction. */
3492
3493int
3494parallel_operand (op, mode)
3495 rtx op;
3496 enum machine_mode mode;
3497{
3498 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3499}
3500
3501
3502static void
3503c4x_S_address_parse (op, base, incdec, index, disp)
3504 rtx op;
3505 int *base;
3506 int *incdec;
3507 int *index;
3508 int *disp;
3509{
3510 *base = 0;
3511 *incdec = 0;
3512 *index = 0;
3513 *disp = 0;
3514
3515 if (GET_CODE (op) != MEM)
c725bd79 3516 fatal_insn ("invalid indirect memory address", op);
cb0ca284
MH
3517
3518 op = XEXP (op, 0);
3519 switch (GET_CODE (op))
3520 {
3521 case PRE_DEC:
3522 *base = REGNO (XEXP (op, 0));
3523 *incdec = 1;
3524 *disp = -1;
3525 return;
3526
3527 case POST_DEC:
3528 *base = REGNO (XEXP (op, 0));
3529 *incdec = 1;
3530 *disp = 0;
3531 return;
3532
3533 case PRE_INC:
3534 *base = REGNO (XEXP (op, 0));
3535 *incdec = 1;
3536 *disp = 1;
3537 return;
3538
3539 case POST_INC:
3540 *base = REGNO (XEXP (op, 0));
3541 *incdec = 1;
3542 *disp = 0;
3543 return;
3544
3545 case POST_MODIFY:
3546 *base = REGNO (XEXP (op, 0));
3547 if (REG_P (XEXP (XEXP (op, 1), 1)))
3548 {
3549 *index = REGNO (XEXP (XEXP (op, 1), 1));
3550 *disp = 0; /* ??? */
3551 }
3552 else
3553 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3554 *incdec = 1;
3555 return;
3556
3557 case PRE_MODIFY:
3558 *base = REGNO (XEXP (op, 0));
3559 if (REG_P (XEXP (XEXP (op, 1), 1)))
3560 {
3561 *index = REGNO (XEXP (XEXP (op, 1), 1));
3562 *disp = 1; /* ??? */
3563 }
3564 else
3565 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3566 *incdec = 1;
3567
3568 return;
3569
3570 case REG:
3571 *base = REGNO (op);
3572 return;
3573
3574 case PLUS:
3575 {
3576 rtx op0 = XEXP (op, 0);
3577 rtx op1 = XEXP (op, 1);
3578
3579 if (c4x_a_register (op0))
3580 {
3581 if (c4x_x_register (op1))
3582 {
3583 *base = REGNO (op0);
3584 *index = REGNO (op1);
3585 return;
3586 }
3587 else if ((GET_CODE (op1) == CONST_INT
3588 && IS_DISP1_CONST (INTVAL (op1))))
3589 {
3590 *base = REGNO (op0);
3591 *disp = INTVAL (op1);
3592 return;
3593 }
3594 }
3595 else if (c4x_x_register (op0) && c4x_a_register (op1))
3596 {
3597 *base = REGNO (op1);
3598 *index = REGNO (op0);
3599 return;
3600 }
3601 }
975ab131 3602 /* Fallthrough. */
cb0ca284
MH
3603
3604 default:
c725bd79 3605 fatal_insn ("invalid indirect (S) memory address", op);
cb0ca284
MH
3606 }
3607}
3608
3609
3610int
3611c4x_address_conflict (op0, op1, store0, store1)
3612 rtx op0;
3613 rtx op1;
3614 int store0;
3615 int store1;
3616{
3617 int base0;
3618 int base1;
3619 int incdec0;
3620 int incdec1;
3621 int index0;
3622 int index1;
3623 int disp0;
3624 int disp1;
3625
4271f003
MH
3626 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3627 return 1;
3628
cb0ca284
MH
3629 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3630 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3631
3632 if (store0 && store1)
3633 {
3634 /* If we have two stores in parallel to the same address, then
3635 the C4x only executes one of the stores. This is unlikely to
3636 cause problems except when writing to a hardware device such
3637 as a FIFO since the second write will be lost. The user
3638 should flag the hardware location as being volatile so that
3639 we don't do this optimisation. While it is unlikely that we
3640 have an aliased address if both locations are not marked
3641 volatile, it is probably safer to flag a potential conflict
3642 if either location is volatile. */
4ddb3ea6 3643 if (! flag_argument_noalias)
cb0ca284
MH
3644 {
3645 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3646 return 1;
3647 }
3648 }
3649
3650 /* If have a parallel load and a store to the same address, the load
3651 is performed first, so there is no conflict. Similarly, there is
3652 no conflict if have parallel loads from the same address. */
3653
3654 /* Cannot use auto increment or auto decrement twice for same
3655 base register. */
3656 if (base0 == base1 && incdec0 && incdec0)
3657 return 1;
3658
3659 /* It might be too confusing for GCC if we have use a base register
3660 with a side effect and a memory reference using the same register
3661 in parallel. */
4ddb3ea6 3662 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
cb0ca284
MH
3663 return 1;
3664
f1c374cb 3665 /* We can not optimize the case where op1 and op2 refer to the same
1ac7a7f5 3666 address. */
f1c374cb 3667 if (base0 == base1 && disp0 == disp1 && index0 == index1)
cb0ca284
MH
3668 return 1;
3669
3670 /* No conflict. */
3671 return 0;
3672}
3673
3674
3675/* Check for while loop inside a decrement and branch loop. */
3676
3677int
3678c4x_label_conflict (insn, jump, db)
3679 rtx insn;
3680 rtx jump;
3681 rtx db;
3682{
3683 while (insn)
3684 {
3685 if (GET_CODE (insn) == CODE_LABEL)
3686 {
3687 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3688 return 1;
3689 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3690 return 0;
3691 }
3692 insn = PREV_INSN (insn);
3693 }
3694 return 1;
3695}
3696
3697
3698/* Validate combination of operands for parallel load/store instructions. */
3699
5e6a42d9 3700int
e868a840 3701valid_parallel_load_store (operands, mode)
cb0ca284 3702 rtx *operands;
d5e4ff48 3703 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3704{
3705 rtx op0 = operands[0];
3706 rtx op1 = operands[1];
3707 rtx op2 = operands[2];
3708 rtx op3 = operands[3];
3709
3710 if (GET_CODE (op0) == SUBREG)
3711 op0 = SUBREG_REG (op0);
3712 if (GET_CODE (op1) == SUBREG)
3713 op1 = SUBREG_REG (op1);
3714 if (GET_CODE (op2) == SUBREG)
3715 op2 = SUBREG_REG (op2);
3716 if (GET_CODE (op3) == SUBREG)
3717 op3 = SUBREG_REG (op3);
3718
3719 /* The patterns should only allow ext_low_reg_operand() or
3720 par_ind_operand() operands. Thus of the 4 operands, only 2
3721 should be REGs and the other 2 should be MEMs. */
3722
4271f003 3723 /* This test prevents the multipack pass from using this pattern if
e868a840
MH
3724 op0 is used as an index or base register in op2 or op3, since
3725 this combination will require reloading. */
4271f003 3726 if (GET_CODE (op0) == REG
e868a840
MH
3727 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3728 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
4271f003
MH
3729 return 0;
3730
975ab131 3731 /* LDI||LDI. */
cb0ca284
MH
3732 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3733 return (REGNO (op0) != REGNO (op2))
3734 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
4ddb3ea6 3735 && ! c4x_address_conflict (op1, op3, 0, 0);
cb0ca284 3736
975ab131 3737 /* STI||STI. */
cb0ca284
MH
3738 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3739 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
4ddb3ea6 3740 && ! c4x_address_conflict (op0, op2, 1, 1);
cb0ca284 3741
975ab131 3742 /* LDI||STI. */
cb0ca284
MH
3743 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3744 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
4ddb3ea6 3745 && ! c4x_address_conflict (op1, op2, 0, 1);
cb0ca284 3746
975ab131 3747 /* STI||LDI. */
cb0ca284
MH
3748 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3749 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
4ddb3ea6 3750 && ! c4x_address_conflict (op0, op3, 1, 0);
cb0ca284
MH
3751
3752 return 0;
3753}
3754
4271f003 3755
e868a840
MH
3756int
3757valid_parallel_operands_4 (operands, mode)
3758 rtx *operands;
3759 enum machine_mode mode ATTRIBUTE_UNUSED;
3760{
e868a840
MH
3761 rtx op0 = operands[0];
3762 rtx op2 = operands[2];
3763
3764 if (GET_CODE (op0) == SUBREG)
3765 op0 = SUBREG_REG (op0);
3766 if (GET_CODE (op2) == SUBREG)
3767 op2 = SUBREG_REG (op2);
3768
3769 /* This test prevents the multipack pass from using this pattern if
3770 op0 is used as an index or base register in op2, since this combination
3771 will require reloading. */
3772 if (GET_CODE (op0) == REG
3773 && GET_CODE (op2) == MEM
3774 && reg_mentioned_p (op0, XEXP (op2, 0)))
3775 return 0;
3776
3777 return 1;
3778}
3779
3780
cb0ca284
MH
3781int
3782valid_parallel_operands_5 (operands, mode)
3783 rtx *operands;
d5e4ff48 3784 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3785{
3786 int regs = 0;
4271f003 3787 rtx op0 = operands[0];
e868a840 3788 rtx op1 = operands[1];
4271f003
MH
3789 rtx op2 = operands[2];
3790 rtx op3 = operands[3];
cb0ca284
MH
3791
3792 if (GET_CODE (op0) == SUBREG)
3793 op0 = SUBREG_REG (op0);
e868a840
MH
3794 if (GET_CODE (op1) == SUBREG)
3795 op1 = SUBREG_REG (op1);
4271f003
MH
3796 if (GET_CODE (op2) == SUBREG)
3797 op2 = SUBREG_REG (op2);
cb0ca284
MH
3798
3799 /* The patterns should only allow ext_low_reg_operand() or
e868a840
MH
3800 par_ind_operand() operands. Operands 1 and 2 may be commutative
3801 but only one of them can be a register. */
3802 if (GET_CODE (op1) == REG)
cb0ca284 3803 regs++;
4271f003 3804 if (GET_CODE (op2) == REG)
cb0ca284
MH
3805 regs++;
3806
4271f003
MH
3807 if (regs != 1)
3808 return 0;
3809
3810 /* This test prevents the multipack pass from using this pattern if
3811 op0 is used as an index or base register in op3, since this combination
3812 will require reloading. */
3813 if (GET_CODE (op0) == REG
3814 && GET_CODE (op3) == MEM
3815 && reg_mentioned_p (op0, XEXP (op3, 0)))
3816 return 0;
3817
3818 return 1;
cb0ca284
MH
3819}
3820
3821
3822int
3823valid_parallel_operands_6 (operands, mode)
3824 rtx *operands;
d5e4ff48 3825 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3826{
3827 int regs = 0;
4271f003
MH
3828 rtx op0 = operands[0];
3829 rtx op1 = operands[1];
3830 rtx op2 = operands[2];
3831 rtx op4 = operands[4];
3832 rtx op5 = operands[5];
cb0ca284 3833
cb0ca284
MH
3834 if (GET_CODE (op1) == SUBREG)
3835 op1 = SUBREG_REG (op1);
3836 if (GET_CODE (op2) == SUBREG)
3837 op2 = SUBREG_REG (op2);
4271f003
MH
3838 if (GET_CODE (op4) == SUBREG)
3839 op4 = SUBREG_REG (op4);
3840 if (GET_CODE (op5) == SUBREG)
3841 op5 = SUBREG_REG (op5);
cb0ca284
MH
3842
3843 /* The patterns should only allow ext_low_reg_operand() or
3844 par_ind_operand() operands. Thus of the 4 input operands, only 2
3845 should be REGs and the other 2 should be MEMs. */
3846
cb0ca284
MH
3847 if (GET_CODE (op1) == REG)
3848 regs++;
3849 if (GET_CODE (op2) == REG)
3850 regs++;
4271f003
MH
3851 if (GET_CODE (op4) == REG)
3852 regs++;
3853 if (GET_CODE (op5) == REG)
cb0ca284
MH
3854 regs++;
3855
3856 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3857 Perhaps we should count the MEMs as well? */
4271f003
MH
3858 if (regs != 2)
3859 return 0;
cb0ca284 3860
4271f003
MH
3861 /* This test prevents the multipack pass from using this pattern if
3862 op0 is used as an index or base register in op4 or op5, since
3863 this combination will require reloading. */
3864 if (GET_CODE (op0) == REG
3865 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3866 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3867 return 0;
cb0ca284 3868
4271f003 3869 return 1;
cb0ca284
MH
3870}
3871
3872
3873/* Validate combination of src operands. Note that the operands have
3874 been screened by the src_operand predicate. We just have to check
3875 that the combination of operands is valid. If FORCE is set, ensure
3876 that the destination regno is valid if we have a 2 operand insn. */
3877
3878static int
3879c4x_valid_operands (code, operands, mode, force)
3880 enum rtx_code code;
3881 rtx *operands;
8d485e2d 3882 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3883 int force;
3884{
3885 rtx op1;
3886 rtx op2;
3887 enum rtx_code code1;
3888 enum rtx_code code2;
3889
3890 if (code == COMPARE)
3891 {
3892 op1 = operands[0];
3893 op2 = operands[1];
3894 }
3895 else
3896 {
3897 op1 = operands[1];
3898 op2 = operands[2];
3899 }
3900
3901 if (GET_CODE (op1) == SUBREG)
3902 op1 = SUBREG_REG (op1);
3903 if (GET_CODE (op2) == SUBREG)
3904 op2 = SUBREG_REG (op2);
3905
3906 code1 = GET_CODE (op1);
3907 code2 = GET_CODE (op2);
3908
3909 if (code1 == REG && code2 == REG)
3910 return 1;
3911
3912 if (code1 == MEM && code2 == MEM)
3913 {
8d485e2d 3914 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
cb0ca284 3915 return 1;
8d485e2d 3916 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
cb0ca284
MH
3917 }
3918
3919 if (code1 == code2)
3920 return 0;
3921
3922 if (code1 == REG)
3923 {
3924 switch (code2)
3925 {
3926 case CONST_INT:
3927 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3928 return 1;
3929 break;
3930
3931 case CONST_DOUBLE:
4ddb3ea6 3932 if (! c4x_H_constant (op2))
cb0ca284
MH
3933 return 0;
3934 break;
3935
3936 /* Any valid memory operand screened by src_operand is OK. */
3937 case MEM:
3938
3939 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3940 into a stack slot memory address comprising a PLUS and a
3941 constant. */
3942 case ADDRESSOF:
3943 break;
3944
3945 default:
50c33087 3946 fatal_insn ("c4x_valid_operands: Internal error", op2);
cb0ca284
MH
3947 break;
3948 }
3949
3950 /* Check that we have a valid destination register for a two operand
3951 instruction. */
4ddb3ea6 3952 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
cb0ca284
MH
3953 }
3954
3955 /* We assume MINUS is commutative since the subtract patterns
3956 also support the reverse subtract instructions. Since op1
3957 is not a register, and op2 is a register, op1 can only
3958 be a restricted memory operand for a shift instruction. */
3959 if (code == ASHIFTRT || code == LSHIFTRT
3960 || code == ASHIFT || code == COMPARE)
3961 return code2 == REG
3962 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3963
3964 switch (code1)
3965 {
3966 case CONST_INT:
3967 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3968 return 1;
3969 break;
3970
3971 case CONST_DOUBLE:
4ddb3ea6 3972 if (! c4x_H_constant (op1))
cb0ca284
MH
3973 return 0;
3974 break;
3975
1ac7a7f5 3976 /* Any valid memory operand screened by src_operand is OK. */
cb0ca284 3977 case MEM:
87ba6944
MH
3978#if 0
3979 if (code2 != REG)
3980 return 0;
3981#endif
3982 break;
cb0ca284
MH
3983
3984 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3985 into a stack slot memory address comprising a PLUS and a
3986 constant. */
3987 case ADDRESSOF:
3988 break;
3989
3990 default:
400500c4 3991 abort ();
cb0ca284
MH
3992 break;
3993 }
3994
3995 /* Check that we have a valid destination register for a two operand
3996 instruction. */
4ddb3ea6 3997 return ! force || REGNO (op1) == REGNO (operands[0]);
cb0ca284
MH
3998}
3999
4000
4001int valid_operands (code, operands, mode)
4002 enum rtx_code code;
4003 rtx *operands;
4004 enum machine_mode mode;
4005{
4006
4007 /* If we are not optimizing then we have to let anything go and let
4008 reload fix things up. instantiate_decl in function.c can produce
4009 invalid insns by changing the offset of a memory operand from a
4010 valid one into an invalid one, when the second operand is also a
4011 memory operand. The alternative is not to allow two memory
4012 operands for an insn when not optimizing. The problem only rarely
975ab131 4013 occurs, for example with the C-torture program DFcmp.c. */
cb0ca284 4014
4ddb3ea6 4015 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
cb0ca284
MH
4016}
4017
4018
4019int
4020legitimize_operands (code, operands, mode)
4021 enum rtx_code code;
4022 rtx *operands;
4023 enum machine_mode mode;
4024{
4025 /* Compare only has 2 operands. */
4026 if (code == COMPARE)
4027 {
4028 /* During RTL generation, force constants into pseudos so that
4029 they can get hoisted out of loops. This will tie up an extra
4030 register but can save an extra cycle. Only do this if loop
4031 optimisation enabled. (We cannot pull this trick for add and
4032 sub instructions since the flow pass won't find
4033 autoincrements etc.) This allows us to generate compare
4034 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4035 of LDI *AR0++, R0; CMPI 42, R0.
4036
4037 Note that expand_binops will try to load an expensive constant
4038 into a register if it is used within a loop. Unfortunately,
4039 the cost mechanism doesn't allow us to look at the other
4040 operand to decide whether the constant is expensive. */
4041
4ddb3ea6 4042 if (! reload_in_progress
cb0ca284
MH
4043 && TARGET_HOIST
4044 && optimize > 0
87ba6944
MH
4045 && GET_CODE (operands[1]) == CONST_INT
4046 && preserve_subexpressions_p ()
4047 && rtx_cost (operands[1], code) > 1)
cb0ca284
MH
4048 operands[1] = force_reg (mode, operands[1]);
4049
4ddb3ea6
MH
4050 if (! reload_in_progress
4051 && ! c4x_valid_operands (code, operands, mode, 0))
cb0ca284
MH
4052 operands[0] = force_reg (mode, operands[0]);
4053 return 1;
4054 }
4055
4056 /* We cannot do this for ADDI/SUBI insns since we will
4057 defeat the flow pass from finding autoincrement addressing
4058 opportunities. */
4ddb3ea6
MH
4059 if (! reload_in_progress
4060 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
87ba6944
MH
4061 && TARGET_HOIST
4062 && optimize > 1
4063 && GET_CODE (operands[2]) == CONST_INT
4064 && preserve_subexpressions_p ()
4065 && rtx_cost (operands[2], code) > 1)
cb0ca284
MH
4066 operands[2] = force_reg (mode, operands[2]);
4067
4068 /* We can get better code on a C30 if we force constant shift counts
4069 into a register. This way they can get hoisted out of loops,
4070 tying up a register, but saving an instruction. The downside is
4071 that they may get allocated to an address or index register, and
4072 thus we will get a pipeline conflict if there is a nearby
4073 indirect address using an address register.
4074
4075 Note that expand_binops will not try to load an expensive constant
4076 into a register if it is used within a loop for a shift insn. */
4077
4ddb3ea6
MH
4078 if (! reload_in_progress
4079 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
cb0ca284
MH
4080 {
4081 /* If the operand combination is invalid, we force operand1 into a
4082 register, preventing reload from having doing to do this at a
4083 later stage. */
4084 operands[1] = force_reg (mode, operands[1]);
4085 if (TARGET_FORCE)
4086 {
4087 emit_move_insn (operands[0], operands[1]);
4088 operands[1] = copy_rtx (operands[0]);
4089 }
4090 else
4091 {
4092 /* Just in case... */
4ddb3ea6 4093 if (! c4x_valid_operands (code, operands, mode, 0))
cb0ca284
MH
4094 operands[2] = force_reg (mode, operands[2]);
4095 }
4096 }
4097
4098 /* Right shifts require a negative shift count, but GCC expects
4099 a positive count, so we emit a NEG. */
4100 if ((code == ASHIFTRT || code == LSHIFTRT)
4101 && (GET_CODE (operands[2]) != CONST_INT))
d5e4ff48 4102 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
cb0ca284
MH
4103
4104 return 1;
4105}
4106
4107
4108/* The following predicates are used for instruction scheduling. */
4109
4110int
4111group1_reg_operand (op, mode)
4112 rtx op;
4113 enum machine_mode mode;
4114{
4115 if (mode != VOIDmode && mode != GET_MODE (op))
4116 return 0;
4117 if (GET_CODE (op) == SUBREG)
4118 op = SUBREG_REG (op);
d001969e 4119 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
cb0ca284
MH
4120}
4121
4122
4123int
4124group1_mem_operand (op, mode)
4125 rtx op;
4126 enum machine_mode mode;
4127{
4128 if (mode != VOIDmode && mode != GET_MODE (op))
4129 return 0;
4130
4131 if (GET_CODE (op) == MEM)
4132 {
4133 op = XEXP (op, 0);
4134 if (GET_CODE (op) == PLUS)
4135 {
4136 rtx op0 = XEXP (op, 0);
4137 rtx op1 = XEXP (op, 1);
4138
d001969e
HB
4139 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4140 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
cb0ca284
MH
4141 return 1;
4142 }
d001969e 4143 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
cb0ca284
MH
4144 return 1;
4145 }
4146
4147 return 0;
4148}
4149
4150
4151/* Return true if any one of the address registers. */
4152
4153int
4154arx_reg_operand (op, mode)
4155 rtx op;
4156 enum machine_mode mode;
4157{
4158 if (mode != VOIDmode && mode != GET_MODE (op))
4159 return 0;
4160 if (GET_CODE (op) == SUBREG)
4161 op = SUBREG_REG (op);
d001969e 4162 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
cb0ca284
MH
4163}
4164
4165
4166static int
4167c4x_arn_reg_operand (op, mode, regno)
4168 rtx op;
4169 enum machine_mode mode;
8d485e2d 4170 unsigned int regno;
cb0ca284
MH
4171{
4172 if (mode != VOIDmode && mode != GET_MODE (op))
4173 return 0;
4174 if (GET_CODE (op) == SUBREG)
4175 op = SUBREG_REG (op);
d001969e 4176 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
cb0ca284
MH
4177}
4178
4179
4180static int
4181c4x_arn_mem_operand (op, mode, regno)
4182 rtx op;
4183 enum machine_mode mode;
8d485e2d 4184 unsigned int regno;
cb0ca284
MH
4185{
4186 if (mode != VOIDmode && mode != GET_MODE (op))
4187 return 0;
4188
4189 if (GET_CODE (op) == MEM)
4190 {
4191 op = XEXP (op, 0);
4192 switch (GET_CODE (op))
4193 {
4194 case PRE_DEC:
4195 case POST_DEC:
4196 case PRE_INC:
4197 case POST_INC:
4198 op = XEXP (op, 0);
4199
4200 case REG:
d001969e 4201 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
cb0ca284
MH
4202
4203 case PRE_MODIFY:
4204 case POST_MODIFY:
d001969e
HB
4205 if (REG_P (XEXP (op, 0)) && (! reload_completed
4206 || (REGNO (XEXP (op, 0)) == regno)))
cb0ca284
MH
4207 return 1;
4208 if (REG_P (XEXP (XEXP (op, 1), 1))
d001969e
HB
4209 && (! reload_completed
4210 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
cb0ca284
MH
4211 return 1;
4212 break;
4213
4214 case PLUS:
4215 {
4216 rtx op0 = XEXP (op, 0);
4217 rtx op1 = XEXP (op, 1);
4218
d001969e
HB
4219 if ((REG_P (op0) && (! reload_completed
4220 || (REGNO (op0) == regno)))
4221 || (REG_P (op1) && (! reload_completed
4222 || (REGNO (op1) == regno))))
cb0ca284
MH
4223 return 1;
4224 }
4225 break;
933cddd0 4226
cb0ca284
MH
4227 default:
4228 break;
4229 }
4230 }
4231 return 0;
4232}
4233
4234
4235int
4236ar0_reg_operand (op, mode)
4237 rtx op;
4238 enum machine_mode mode;
4239{
4240 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4241}
4242
4243
4244int
4245ar0_mem_operand (op, mode)
4246 rtx op;
4247 enum machine_mode mode;
4248{
4249 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4250}
4251
4252
4253int
4254ar1_reg_operand (op, mode)
4255 rtx op;
4256 enum machine_mode mode;
4257{
4258 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4259}
4260
4261
4262int
4263ar1_mem_operand (op, mode)
4264 rtx op;
4265 enum machine_mode mode;
4266{
4267 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4268}
4269
4270
4271int
4272ar2_reg_operand (op, mode)
4273 rtx op;
4274 enum machine_mode mode;
4275{
4276 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4277}
4278
4279
4280int
4281ar2_mem_operand (op, mode)
4282 rtx op;
4283 enum machine_mode mode;
4284{
4285 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4286}
4287
4288
4289int
4290ar3_reg_operand (op, mode)
4291 rtx op;
4292 enum machine_mode mode;
4293{
4294 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4295}
4296
4297
4298int
4299ar3_mem_operand (op, mode)
4300 rtx op;
4301 enum machine_mode mode;
4302{
4303 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4304}
4305
4306
4307int
4308ar4_reg_operand (op, mode)
4309 rtx op;
4310 enum machine_mode mode;
4311{
4312 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4313}
4314
4315
4316int
4317ar4_mem_operand (op, mode)
4318 rtx op;
4319 enum machine_mode mode;
4320{
4321 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4322}
4323
4324
4325int
4326ar5_reg_operand (op, mode)
4327 rtx op;
4328 enum machine_mode mode;
4329{
4330 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4331}
4332
4333
4334int
4335ar5_mem_operand (op, mode)
4336 rtx op;
4337 enum machine_mode mode;
4338{
4339 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4340}
4341
4342
4343int
4344ar6_reg_operand (op, mode)
4345 rtx op;
4346 enum machine_mode mode;
4347{
4348 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4349}
4350
4351
4352int
4353ar6_mem_operand (op, mode)
4354 rtx op;
4355 enum machine_mode mode;
4356{
4357 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4358}
4359
4360
4361int
4362ar7_reg_operand (op, mode)
4363 rtx op;
4364 enum machine_mode mode;
4365{
4366 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4367}
4368
4369
4370int
4371ar7_mem_operand (op, mode)
4372 rtx op;
4373 enum machine_mode mode;
4374{
4375 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4376}
4377
4378
4379int
4380ir0_reg_operand (op, mode)
4381 rtx op;
4382 enum machine_mode mode;
4383{
4384 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4385}
4386
4387
4388int
4389ir0_mem_operand (op, mode)
4390 rtx op;
4391 enum machine_mode mode;
4392{
4393 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4394}
4395
4396
4397int
4398ir1_reg_operand (op, mode)
4399 rtx op;
4400 enum machine_mode mode;
4401{
4402 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4403}
4404
4405
4406int
4407ir1_mem_operand (op, mode)
4408 rtx op;
4409 enum machine_mode mode;
4410{
4411 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4412}
4413
4414
975ab131
MH
4415/* This is similar to operand_subword but allows autoincrement
4416 addressing. */
cb0ca284
MH
4417
4418rtx
4419c4x_operand_subword (op, i, validate_address, mode)
4420 rtx op;
4421 int i;
4422 int validate_address;
4423 enum machine_mode mode;
4424{
4425 if (mode != HImode && mode != HFmode)
4426 fatal_insn ("c4x_operand_subword: invalid mode", op);
4427
4428 if (mode == HFmode && REG_P (op))
4429 fatal_insn ("c4x_operand_subword: invalid operand", op);
4430
4431 if (GET_CODE (op) == MEM)
4432 {
4433 enum rtx_code code = GET_CODE (XEXP (op, 0));
4434 enum machine_mode mode = GET_MODE (XEXP (op, 0));
50c33087
MH
4435 enum machine_mode submode;
4436
4437 submode = mode;
4438 if (mode == HImode)
4439 submode = QImode;
4440 else if (mode == HFmode)
4441 submode = QFmode;
cb0ca284
MH
4442
4443 switch (code)
4444 {
4445 case POST_INC:
4446 case PRE_INC:
50c33087 4447 return gen_rtx_MEM (submode, XEXP (op, 0));
cb0ca284
MH
4448
4449 case POST_DEC:
4450 case PRE_DEC:
4451 case PRE_MODIFY:
4452 case POST_MODIFY:
4453 /* We could handle these with some difficulty.
4454 e.g., *p-- => *(p-=2); *(p+1). */
4455 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4456
50c33087
MH
4457 case SYMBOL_REF:
4458 case LABEL_REF:
4459 case CONST:
4460 case CONST_INT:
4461 fatal_insn ("c4x_operand_subword: invalid address", op);
4462
4463 /* Even though offsettable_address_p considers (MEM
4464 (LO_SUM)) to be offsettable, it is not safe if the
4465 address is at the end of the data page since we also have
4466 to fix up the associated high PART. In this case where
4467 we are trying to split a HImode or HFmode memory
4468 reference, we would have to emit another insn to reload a
4469 new HIGH value. It's easier to disable LO_SUM memory references
4470 in HImode or HFmode and we probably get better code. */
4471 case LO_SUM:
4472 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4473
cb0ca284
MH
4474 default:
4475 break;
4476 }
4477 }
4478
4479 return operand_subword (op, i, validate_address, mode);
4480}
4481
eff784fe
MH
4482struct name_list
4483{
4484 struct name_list *next;
16219347 4485 const char *name;
eff784fe
MH
4486};
4487
4488static struct name_list *global_head;
4489static struct name_list *extern_head;
4490
4491
4492/* Add NAME to list of global symbols and remove from external list if
4493 present on external list. */
4494
4495void
4496c4x_global_label (name)
41387ffd 4497 const char *name;
eff784fe
MH
4498{
4499 struct name_list *p, *last;
4500
4501 /* Do not insert duplicate names, so linearly search through list of
4502 existing names. */
4503 p = global_head;
4504 while (p)
4505 {
4506 if (strcmp (p->name, name) == 0)
4507 return;
4508 p = p->next;
4509 }
6d9f628e 4510 p = (struct name_list *) xmalloc (sizeof *p);
eff784fe
MH
4511 p->next = global_head;
4512 p->name = name;
4513 global_head = p;
4514
4515 /* Remove this name from ref list if present. */
4516 last = NULL;
4517 p = extern_head;
4518 while (p)
4519 {
4520 if (strcmp (p->name, name) == 0)
4521 {
4522 if (last)
4523 last->next = p->next;
4524 else
4525 extern_head = p->next;
4526 break;
4527 }
4528 last = p;
4529 p = p->next;
4530 }
4531}
4532
4533
4534/* Add NAME to list of external symbols. */
4535
4536void
4537c4x_external_ref (name)
41387ffd 4538 const char *name;
eff784fe
MH
4539{
4540 struct name_list *p;
4541
4542 /* Do not insert duplicate names. */
4543 p = extern_head;
4544 while (p)
4545 {
4546 if (strcmp (p->name, name) == 0)
4547 return;
4548 p = p->next;
4549 }
4550
4551 /* Do not insert ref if global found. */
4552 p = global_head;
4553 while (p)
4554 {
4555 if (strcmp (p->name, name) == 0)
4556 return;
4557 p = p->next;
4558 }
6d9f628e 4559 p = (struct name_list *) xmalloc (sizeof *p);
eff784fe
MH
4560 p->next = extern_head;
4561 p->name = name;
4562 extern_head = p;
4563}
4564
4565
4566void
4567c4x_file_end (fp)
4568 FILE *fp;
4569{
4570 struct name_list *p;
4571
4572 /* Output all external names that are not global. */
4573 p = extern_head;
4574 while (p)
4575 {
4576 fprintf (fp, "\t.ref\t");
4577 assemble_name (fp, p->name);
4578 fprintf (fp, "\n");
4579 p = p->next;
4580 }
4581 fprintf (fp, "\t.end\n");
4582}
4583
4584
cb0ca284 4585static void
eff784fe 4586c4x_check_attribute (attrib, list, decl, attributes)
8d485e2d 4587 const char *attrib;
cb0ca284
MH
4588 tree list, decl, *attributes;
4589{
4590 while (list != NULL_TREE
4ddb3ea6
MH
4591 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4592 != IDENTIFIER_POINTER (DECL_NAME (decl)))
eff784fe 4593 list = TREE_CHAIN (list);
cb0ca284 4594 if (list)
12a68f1f
JM
4595 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4596 *attributes);
cb0ca284
MH
4597}
4598
4599
12a68f1f
JM
4600static void
4601c4x_insert_attributes (decl, attributes)
cb0ca284
MH
4602 tree decl, *attributes;
4603{
4604 switch (TREE_CODE (decl))
4605 {
4606 case FUNCTION_DECL:
4607 c4x_check_attribute ("section", code_tree, decl, attributes);
4608 c4x_check_attribute ("const", pure_tree, decl, attributes);
4609 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4610 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4611 break;
4612
4613 case VAR_DECL:
4614 c4x_check_attribute ("section", data_tree, decl, attributes);
4615 break;
4616
4617 default:
4618 break;
4619 }
4620}
4621
91d231cb
JM
4622/* Table of valid machine attributes. */
4623const struct attribute_spec c4x_attribute_table[] =
4624{
4625 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4626 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4627 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4628 "interrupt"; should it be accepted here? */
4629 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4630 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4631 { NULL, 0, 0, false, false, false, NULL }
4632};
cb0ca284 4633
91d231cb
JM
4634/* Handle an attribute requiring a FUNCTION_TYPE;
4635 arguments as in struct attribute_spec.handler. */
4636static tree
4637c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4638 tree *node;
4639 tree name;
d5e4ff48 4640 tree args ATTRIBUTE_UNUSED;
91d231cb
JM
4641 int flags ATTRIBUTE_UNUSED;
4642 bool *no_add_attrs;
cb0ca284 4643{
91d231cb
JM
4644 if (TREE_CODE (*node) != FUNCTION_TYPE)
4645 {
4646 warning ("`%s' attribute only applies to functions",
4647 IDENTIFIER_POINTER (name));
4648 *no_add_attrs = true;
4649 }
4650
4651 return NULL_TREE;
cb0ca284
MH
4652}
4653
4654
d5e4ff48 4655/* !!! FIXME to emit RPTS correctly. */
975ab131 4656
cb0ca284
MH
4657int
4658c4x_rptb_rpts_p (insn, op)
4659 rtx insn, op;
4660{
4661 /* The next insn should be our label marking where the
4662 repeat block starts. */
4663 insn = NEXT_INSN (insn);
4664 if (GET_CODE (insn) != CODE_LABEL)
4665 {
4666 /* Some insns may have been shifted between the RPTB insn
4667 and the top label... They were probably destined to
4668 be moved out of the loop. For now, let's leave them
4669 where they are and print a warning. We should
4670 probably move these insns before the repeat block insn. */
4671 if (TARGET_DEBUG)
4672 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4673 insn);
4674 return 0;
4675 }
4676
4677 /* Skip any notes. */
4678 insn = next_nonnote_insn (insn);
4679
4680 /* This should be our first insn in the loop. */
2c3c49de 4681 if (! INSN_P (insn))
cb0ca284
MH
4682 return 0;
4683
4684 /* Skip any notes. */
4685 insn = next_nonnote_insn (insn);
4686
2c3c49de 4687 if (! INSN_P (insn))
cb0ca284
MH
4688 return 0;
4689
f1c374cb 4690 if (recog_memoized (insn) != CODE_FOR_rptb_end)
cb0ca284
MH
4691 return 0;
4692
4693 if (TARGET_RPTS)
4694 return 1;
4695
4696 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4697}
4698
cb0ca284 4699
5078f5eb
HB
4700/* Check if register r11 is used as the destination of an insn. */
4701
4702static int
4703c4x_r11_set_p(x)
4704 rtx x;
4705{
5078f5eb
HB
4706 rtx set;
4707 int i, j;
4708 const char *fmt;
4709
4710 if (x == 0)
4711 return 0;
4712
4a1f52a8 4713 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
5078f5eb
HB
4714 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4715
4a1f52a8
HB
4716 if (INSN_P (x) && (set = single_set (x)))
4717 x = SET_DEST (set);
5078f5eb 4718
4a1f52a8 4719 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
5078f5eb
HB
4720 return 1;
4721
4722 fmt = GET_RTX_FORMAT (GET_CODE (x));
4a1f52a8 4723 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5078f5eb
HB
4724 {
4725 if (fmt[i] == 'e')
4726 {
4727 if (c4x_r11_set_p (XEXP (x, i)))
4728 return 1;
4729 }
4730 else if (fmt[i] == 'E')
4731 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4732 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4733 return 1;
4734 }
4735 return 0;
4736}
4737
4738
4739/* The c4x sometimes has a problem when the insn before the laj insn
4740 sets the r11 register. Check for this situation. */
4741
4742int
4743c4x_check_laj_p (insn)
4744 rtx insn;
4745{
4746 insn = prev_nonnote_insn (insn);
4747
4748 /* If this is the start of the function no nop is needed. */
4749 if (insn == 0)
4750 return 0;
4751
4752 /* If the previous insn is a code label we have to insert a nop. This
4753 could be a jump or table jump. We can find the normal jumps by
4754 scanning the function but this will not find table jumps. */
4755 if (GET_CODE (insn) == CODE_LABEL)
4756 return 1;
4757
4758 /* If the previous insn sets register r11 we have to insert a nop. */
4759 if (c4x_r11_set_p (insn))
4760 return 1;
4761
4762 /* No nop needed. */
4763 return 0;
4764}
4765
4766
cb0ca284
MH
4767/* Adjust the cost of a scheduling dependency. Return the new cost of
4768 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4769 A set of an address register followed by a use occurs a 2 cycle
4770 stall (reduced to a single cycle on the c40 using LDA), while
4771 a read of an address register followed by a use occurs a single cycle. */
975ab131 4772
cb0ca284
MH
4773#define SET_USE_COST 3
4774#define SETLDA_USE_COST 2
4775#define READ_USE_COST 2
4776
c237e94a 4777static int
cb0ca284
MH
4778c4x_adjust_cost (insn, link, dep_insn, cost)
4779 rtx insn;
4780 rtx link;
4781 rtx dep_insn;
4782 int cost;
4783{
4784 /* Don't worry about this until we know what registers have been
4785 assigned. */
d001969e 4786 if (flag_schedule_insns == 0 && ! reload_completed)
cb0ca284
MH
4787 return 0;
4788
4789 /* How do we handle dependencies where a read followed by another
4790 read causes a pipeline stall? For example, a read of ar0 followed
4791 by the use of ar0 for a memory reference. It looks like we
4792 need to extend the scheduler to handle this case. */
4793
4794 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4795 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4796 so only deal with insns we know about. */
4797 if (recog_memoized (dep_insn) < 0)
4798 return 0;
4799
4800 if (REG_NOTE_KIND (link) == 0)
4801 {
4802 int max = 0;
4803
4804 /* Data dependency; DEP_INSN writes a register that INSN reads some
4805 cycles later. */
cb0ca284
MH
4806 if (TARGET_C3X)
4807 {
4808 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4809 max = SET_USE_COST > max ? SET_USE_COST : max;
4810 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4811 max = READ_USE_COST > max ? READ_USE_COST : max;
4812 }
4813 else
4814 {
4815 /* This could be significantly optimized. We should look
4816 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4817 insn uses ar0-ar7. We then test if the same register
4818 is used. The tricky bit is that some operands will
4819 use several registers... */
cb0ca284
MH
4820 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4821 max = SET_USE_COST > max ? SET_USE_COST : max;
4822 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4823 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4824 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4825 max = READ_USE_COST > max ? READ_USE_COST : max;
4826
4827 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4828 max = SET_USE_COST > max ? SET_USE_COST : max;
4829 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4830 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4831 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4832 max = READ_USE_COST > max ? READ_USE_COST : max;
4833
4834 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4835 max = SET_USE_COST > max ? SET_USE_COST : max;
4836 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4837 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4838 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4839 max = READ_USE_COST > max ? READ_USE_COST : max;
4840
4841 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4842 max = SET_USE_COST > max ? SET_USE_COST : max;
4843 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4844 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4845 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4846 max = READ_USE_COST > max ? READ_USE_COST : max;
4847
4848 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4849 max = SET_USE_COST > max ? SET_USE_COST : max;
4850 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4851 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4852 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4853 max = READ_USE_COST > max ? READ_USE_COST : max;
4854
4855 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4856 max = SET_USE_COST > max ? SET_USE_COST : max;
4857 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4858 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4859 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4860 max = READ_USE_COST > max ? READ_USE_COST : max;
4861
4862 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4863 max = SET_USE_COST > max ? SET_USE_COST : max;
4864 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4865 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4866 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4867 max = READ_USE_COST > max ? READ_USE_COST : max;
4868
4869 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4870 max = SET_USE_COST > max ? SET_USE_COST : max;
4871 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4872 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4873 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4874 max = READ_USE_COST > max ? READ_USE_COST : max;
4875
4876 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4877 max = SET_USE_COST > max ? SET_USE_COST : max;
4878 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4879 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4880
4881 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4882 max = SET_USE_COST > max ? SET_USE_COST : max;
4883 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4884 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4885 }
4886
4887 if (max)
4888 cost = max;
4889
4890 /* For other data dependencies, the default cost specified in the
4891 md is correct. */
4892 return cost;
4893 }
4894 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4895 {
4896 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4897 cycles later. */
4898
4899 /* For c4x anti dependencies, the cost is 0. */
4900 return 0;
4901 }
4902 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4903 {
4904 /* Output dependency; DEP_INSN writes a register that INSN writes some
4905 cycles later. */
4906
4907 /* For c4x output dependencies, the cost is 0. */
4908 return 0;
4909 }
4910 else
4911 abort ();
4912}
8a119a7d
MH
4913
4914void
f6155fda 4915c4x_init_builtins ()
8a119a7d 4916{
f6155fda
SS
4917 tree endlink = void_list_node;
4918
8a119a7d
MH
4919 builtin_function ("fast_ftoi",
4920 build_function_type
4921 (integer_type_node,
4922 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4923 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4924 builtin_function ("ansi_ftoi",
4925 build_function_type
4926 (integer_type_node,
4927 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4928 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4929 if (TARGET_C3X)
4930 builtin_function ("fast_imult",
4931 build_function_type
4932 (integer_type_node,
4933 tree_cons (NULL_TREE, integer_type_node,
4934 tree_cons (NULL_TREE,
4935 integer_type_node, endlink))),
6a2dd09a 4936 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4937 else
4938 {
4939 builtin_function ("toieee",
4940 build_function_type
4941 (double_type_node,
4942 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4943 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4944 builtin_function ("frieee",
4945 build_function_type
4946 (double_type_node,
4947 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4948 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4949 builtin_function ("fast_invf",
4950 build_function_type
4951 (double_type_node,
4952 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4953 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4954 }
4955}
4956
4957
4958rtx
4959c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4960 tree exp;
4961 rtx target;
4962 rtx subtarget ATTRIBUTE_UNUSED;
4963 enum machine_mode mode ATTRIBUTE_UNUSED;
4964 int ignore ATTRIBUTE_UNUSED;
4965{
4966 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4967 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4968 tree arglist = TREE_OPERAND (exp, 1);
4969 tree arg0, arg1;
4970 rtx r0, r1;
4971
4972 switch (fcode)
4973 {
8a119a7d
MH
4974 case C4X_BUILTIN_FIX:
4975 arg0 = TREE_VALUE (arglist);
4976 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4977 r0 = protect_from_queue (r0, 0);
4978 if (! target || ! register_operand (target, QImode))
4979 target = gen_reg_rtx (QImode);
4980 emit_insn (gen_fixqfqi_clobber (target, r0));
4981 return target;
4982
4983 case C4X_BUILTIN_FIX_ANSI:
4984 arg0 = TREE_VALUE (arglist);
4985 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4986 r0 = protect_from_queue (r0, 0);
4987 if (! target || ! register_operand (target, QImode))
4988 target = gen_reg_rtx (QImode);
4989 emit_insn (gen_fix_truncqfqi2 (target, r0));
4990 return target;
4991
4992 case C4X_BUILTIN_MPYI:
4993 if (! TARGET_C3X)
4994 break;
4995 arg0 = TREE_VALUE (arglist);
4996 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4997 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4998 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4999 r0 = protect_from_queue (r0, 0);
5000 r1 = protect_from_queue (r1, 0);
5001 if (! target || ! register_operand (target, QImode))
5002 target = gen_reg_rtx (QImode);
5003 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5004 return target;
5005
5006 case C4X_BUILTIN_TOIEEE:
5007 if (TARGET_C3X)
5008 break;
5009 arg0 = TREE_VALUE (arglist);
5010 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5011 r0 = protect_from_queue (r0, 0);
5012 if (! target || ! register_operand (target, QFmode))
5013 target = gen_reg_rtx (QFmode);
5014 emit_insn (gen_toieee (target, r0));
5015 return target;
5016
5017 case C4X_BUILTIN_FRIEEE:
5018 if (TARGET_C3X)
5019 break;
5020 arg0 = TREE_VALUE (arglist);
5021 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5022 put_var_into_stack (arg0);
5023 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5024 r0 = protect_from_queue (r0, 0);
5025 if (register_operand (r0, QFmode))
5026 {
5027 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5028 emit_move_insn (r1, r0);
5029 r0 = r1;
5030 }
5031 if (! target || ! register_operand (target, QFmode))
5032 target = gen_reg_rtx (QFmode);
5033 emit_insn (gen_frieee (target, r0));
5034 return target;
5035
5036 case C4X_BUILTIN_RCPF:
5037 if (TARGET_C3X)
5038 break;
5039 arg0 = TREE_VALUE (arglist);
5040 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5041 r0 = protect_from_queue (r0, 0);
5042 if (! target || ! register_operand (target, QFmode))
5043 target = gen_reg_rtx (QFmode);
5044 emit_insn (gen_rcpfqf_clobber (target, r0));
5045 return target;
5046 }
5047 return NULL_RTX;
5048}
7c262518
RH
5049
5050static void
715bdd29 5051c4x_asm_named_section (name, flags)
7c262518
RH
5052 const char *name;
5053 unsigned int flags ATTRIBUTE_UNUSED;
7c262518
RH
5054{
5055 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
5056}
e2500fed 5057
5eb99654
KG
5058static void
5059c4x_globalize_label (stream, name)
5060 FILE *stream;
5061 const char *name;
5062{
5063 default_globalize_label (stream, name);
5064 c4x_global_label (name);
5065}