]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/c4x/c4x.c
builtins.c: Rename movstr*, except for movstrict*, to movmem* and clrstr* to clrmem*.
[thirdparty/gcc.git] / gcc / config / c4x / c4x.c
CommitLineData
cb0ca284 1/* Subroutines for assembler code output on the TMS320C[34]x
28b24176 2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004
400500c4 3 Free Software Foundation, Inc.
cb0ca284
MH
4
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
7
4db9c756 8This file is part of GCC.
cb0ca284 9
4db9c756 10GCC is free software; you can redistribute it and/or modify
400500c4
RK
11it under the terms of the GNU General Public License as published by
12the Free Software Foundation; either version 2, or (at your option)
13any later version.
cb0ca284 14
4db9c756 15GCC is distributed in the hope that it will be useful,
400500c4
RK
16but WITHOUT ANY WARRANTY; without even the implied warranty of
17MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18GNU General Public License for more details.
cb0ca284 19
400500c4 20You should have received a copy of the GNU General Public License
4db9c756 21along with GCC; see the file COPYING. If not, write to
400500c4
RK
22the Free Software Foundation, 59 Temple Place - Suite 330,
23Boston, MA 02111-1307, USA. */
cb0ca284
MH
24
25/* Some output-actions in c4x.md need these. */
cb0ca284 26#include "config.h"
5e6a42d9 27#include "system.h"
4977bab6
ZW
28#include "coretypes.h"
29#include "tm.h"
cb0ca284 30#include "rtl.h"
2cc07db4 31#include "tree.h"
cb0ca284
MH
32#include "regs.h"
33#include "hard-reg-set.h"
34#include "basic-block.h"
35#include "real.h"
36#include "insn-config.h"
37#include "insn-attr.h"
cb0ca284 38#include "conditions.h"
cb0ca284 39#include "output.h"
49ad7cfa 40#include "function.h"
cb0ca284 41#include "expr.h"
e78d8e51
ZW
42#include "optabs.h"
43#include "libfuncs.h"
cb0ca284
MH
44#include "flags.h"
45#include "loop.h"
46#include "recog.h"
7eb3fb5f 47#include "ggc.h"
8b97c5f8 48#include "cpplib.h"
2cc07db4 49#include "toplev.h"
245ff137 50#include "tm_p.h"
672a6f42
NB
51#include "target.h"
52#include "target-def.h"
cb0ca284 53
4fda2521
HB
54rtx smulhi3_libfunc;
55rtx umulhi3_libfunc;
56rtx fix_truncqfhi2_libfunc;
57rtx fixuns_truncqfhi2_libfunc;
58rtx fix_trunchfhi2_libfunc;
59rtx fixuns_trunchfhi2_libfunc;
60rtx floathiqf2_libfunc;
61rtx floatunshiqf2_libfunc;
62rtx floathihf2_libfunc;
63rtx floatunshihf2_libfunc;
64
cb0ca284
MH
65static int c4x_leaf_function;
66
27c38fbe 67static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
cb0ca284
MH
68
69/* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
73
74enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75{
975ab131
MH
76 /* Reg Modes Saved. */
77 R0R1_REGS, /* R0 QI, QF, HF No. */
78 R0R1_REGS, /* R1 QI, QF, HF No. */
79 R2R3_REGS, /* R2 QI, QF, HF No. */
80 R2R3_REGS, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
85 ADDR_REGS, /* AR0 QI No. */
86 ADDR_REGS, /* AR1 QI No. */
87 ADDR_REGS, /* AR2 QI No. */
88 ADDR_REGS, /* AR3 QI QI. */
89 ADDR_REGS, /* AR4 QI QI. */
90 ADDR_REGS, /* AR5 QI QI. */
91 ADDR_REGS, /* AR6 QI QI. */
92 ADDR_REGS, /* AR7 QI QI. */
93 DP_REG, /* DP QI No. */
94 INDEX_REGS, /* IR0 QI No. */
95 INDEX_REGS, /* IR1 QI No. */
96 BK_REG, /* BK QI QI. */
97 SP_REG, /* SP QI No. */
98 ST_REG, /* ST CC No. */
99 NO_REGS, /* DIE/IE No. */
100 NO_REGS, /* IIE/IF No. */
101 NO_REGS, /* IIF/IOF No. */
102 INT_REGS, /* RS QI No. */
103 INT_REGS, /* RE QI No. */
104 RC_REG, /* RC QI No. */
105 EXT_REGS, /* R8 QI, QF, HF QI. */
106 EXT_REGS, /* R9 QI, QF, HF No. */
107 EXT_REGS, /* R10 QI, QF, HF No. */
108 EXT_REGS, /* R11 QI, QF, HF No. */
cb0ca284
MH
109};
110
111enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112{
975ab131
MH
113 /* Reg Modes Saved. */
114 HFmode, /* R0 QI, QF, HF No. */
115 HFmode, /* R1 QI, QF, HF No. */
116 HFmode, /* R2 QI, QF, HF No. */
117 HFmode, /* R3 QI, QF, HF No. */
118 QFmode, /* R4 QI, QF, HF QI. */
119 QFmode, /* R5 QI, QF, HF QI. */
120 QImode, /* R6 QI, QF, HF QF. */
121 QImode, /* R7 QI, QF, HF QF. */
122 QImode, /* AR0 QI No. */
123 QImode, /* AR1 QI No. */
124 QImode, /* AR2 QI No. */
125 QImode, /* AR3 QI QI. */
126 QImode, /* AR4 QI QI. */
127 QImode, /* AR5 QI QI. */
128 QImode, /* AR6 QI QI. */
129 QImode, /* AR7 QI QI. */
130 VOIDmode, /* DP QI No. */
131 QImode, /* IR0 QI No. */
132 QImode, /* IR1 QI No. */
133 QImode, /* BK QI QI. */
134 VOIDmode, /* SP QI No. */
135 VOIDmode, /* ST CC No. */
136 VOIDmode, /* DIE/IE No. */
137 VOIDmode, /* IIE/IF No. */
138 VOIDmode, /* IIF/IOF No. */
139 QImode, /* RS QI No. */
140 QImode, /* RE QI No. */
141 VOIDmode, /* RC QI No. */
142 QFmode, /* R8 QI, QF, HF QI. */
143 HFmode, /* R9 QI, QF, HF No. */
144 HFmode, /* R10 QI, QF, HF No. */
145 HFmode, /* R11 QI, QF, HF No. */
cb0ca284
MH
146};
147
148
cb0ca284
MH
149/* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
151
e2500fed
GK
152rtx c4x_compare_op0;
153rtx c4x_compare_op1;
cb0ca284 154
ddf16f18 155const char *c4x_rpts_cycles_string;
7eb3fb5f 156int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
ddf16f18 157const char *c4x_cpu_version_string;
eda45b64 158int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
cb0ca284
MH
159
160/* Pragma definitions. */
161
51fabca5
NB
162tree code_tree = NULL_TREE;
163tree data_tree = NULL_TREE;
164tree pure_tree = NULL_TREE;
165tree noreturn_tree = NULL_TREE;
166tree interrupt_tree = NULL_TREE;
eb47a205 167tree naked_tree = NULL_TREE;
7eb3fb5f 168
8d485e2d 169/* Forward declarations */
f12b3fc8
SB
170static int c4x_isr_reg_used_p (unsigned int);
171static int c4x_leaf_function_p (void);
172static int c4x_naked_function_p (void);
173static int c4x_immed_float_p (rtx);
174static int c4x_a_register (rtx);
175static int c4x_x_register (rtx);
176static int c4x_immed_int_constant (rtx);
177static int c4x_immed_float_constant (rtx);
178static int c4x_K_constant (rtx);
179static int c4x_N_constant (rtx);
180static int c4x_O_constant (rtx);
181static int c4x_R_indirect (rtx);
182static int c4x_S_indirect (rtx);
183static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
184static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
185static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
186static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
187static void c4x_file_start (void);
188static void c4x_file_end (void);
189static void c4x_check_attribute (const char *, tree, tree, tree *);
190static int c4x_r11_set_p (rtx);
191static int c4x_rptb_valid_p (rtx, rtx);
192static void c4x_reorg (void);
193static int c4x_label_ref_used_p (rtx, rtx);
194static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
91d231cb 195const struct attribute_spec c4x_attribute_table[];
f12b3fc8
SB
196static void c4x_insert_attributes (tree, tree *);
197static void c4x_asm_named_section (const char *, unsigned int);
198static int c4x_adjust_cost (rtx, rtx, rtx, int);
199static void c4x_globalize_label (FILE *, const char *);
200static bool c4x_rtx_costs (rtx, int, int, int *);
201static int c4x_address_cost (rtx);
c15c90bb 202static void c4x_init_libfuncs (void);
28b24176
KH
203static void c4x_external_libcall (rtx);
204static rtx c4x_struct_value_rtx (tree, int);
672a6f42
NB
205\f
206/* Initialize the GCC target structure. */
301d03af
RS
207#undef TARGET_ASM_BYTE_OP
208#define TARGET_ASM_BYTE_OP "\t.word\t"
209#undef TARGET_ASM_ALIGNED_HI_OP
210#define TARGET_ASM_ALIGNED_HI_OP NULL
211#undef TARGET_ASM_ALIGNED_SI_OP
212#define TARGET_ASM_ALIGNED_SI_OP NULL
1bc7c5b6
ZW
213#undef TARGET_ASM_FILE_START
214#define TARGET_ASM_FILE_START c4x_file_start
215#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
216#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
a5fe455b
ZW
217#undef TARGET_ASM_FILE_END
218#define TARGET_ASM_FILE_END c4x_file_end
301d03af 219
28b24176
KH
220#undef TARGET_ASM_EXTERNAL_LIBCALL
221#define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
222
91d231cb
JM
223#undef TARGET_ATTRIBUTE_TABLE
224#define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
672a6f42 225
12a68f1f
JM
226#undef TARGET_INSERT_ATTRIBUTES
227#define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
228
f6155fda
SS
229#undef TARGET_INIT_BUILTINS
230#define TARGET_INIT_BUILTINS c4x_init_builtins
231
232#undef TARGET_EXPAND_BUILTIN
233#define TARGET_EXPAND_BUILTIN c4x_expand_builtin
234
c237e94a
ZW
235#undef TARGET_SCHED_ADJUST_COST
236#define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
237
e4638a72
SB
238#undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
239#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
240
5eb99654
KG
241#undef TARGET_ASM_GLOBALIZE_LABEL
242#define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
243
3c50106f
RH
244#undef TARGET_RTX_COSTS
245#define TARGET_RTX_COSTS c4x_rtx_costs
dcefdf67
RH
246#undef TARGET_ADDRESS_COST
247#define TARGET_ADDRESS_COST c4x_address_cost
3c50106f 248
18dbd950
RS
249#undef TARGET_MACHINE_DEPENDENT_REORG
250#define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
251
c15c90bb
ZW
252#undef TARGET_INIT_LIBFUNCS
253#define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
254
28b24176
KH
255#undef TARGET_STRUCT_VALUE_RTX
256#define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
257
f6897b10 258struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 259\f
cb0ca284
MH
260/* Override command line options.
261 Called once after all options have been parsed.
262 Mostly we process the processor
263 type and sometimes adjust other TARGET_ options. */
264
265void
f12b3fc8 266c4x_override_options (void)
cb0ca284 267{
cb0ca284
MH
268 if (c4x_rpts_cycles_string)
269 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
270 else
271 c4x_rpts_cycles = 0;
272
273 if (TARGET_C30)
274 c4x_cpu_version = 30;
275 else if (TARGET_C31)
276 c4x_cpu_version = 31;
277 else if (TARGET_C32)
278 c4x_cpu_version = 32;
eda45b64
MH
279 else if (TARGET_C33)
280 c4x_cpu_version = 33;
cb0ca284
MH
281 else if (TARGET_C40)
282 c4x_cpu_version = 40;
283 else if (TARGET_C44)
284 c4x_cpu_version = 44;
285 else
286 c4x_cpu_version = 40;
287
288 /* -mcpu=xx overrides -m40 etc. */
289 if (c4x_cpu_version_string)
798f6e6f
MH
290 {
291 const char *p = c4x_cpu_version_string;
292
293 /* Also allow -mcpu=c30 etc. */
294 if (*p == 'c' || *p == 'C')
295 p++;
296 c4x_cpu_version = atoi (p);
297 }
cb0ca284 298
eda45b64
MH
299 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
300 C40_FLAG | C44_FLAG);
cb0ca284
MH
301
302 switch (c4x_cpu_version)
303 {
304 case 30: target_flags |= C30_FLAG; break;
305 case 31: target_flags |= C31_FLAG; break;
306 case 32: target_flags |= C32_FLAG; break;
eda45b64 307 case 33: target_flags |= C33_FLAG; break;
cb0ca284
MH
308 case 40: target_flags |= C40_FLAG; break;
309 case 44: target_flags |= C44_FLAG; break;
310 default:
c725bd79 311 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
cb0ca284
MH
312 c4x_cpu_version = 40;
313 target_flags |= C40_FLAG;
314 }
315
eda45b64 316 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
cb0ca284
MH
317 target_flags |= C3X_FLAG;
318 else
319 target_flags &= ~C3X_FLAG;
320
4271f003 321 /* Convert foo / 8.0 into foo * 0.125, etc. */
748d29c1 322 set_fast_math_flags (1);
4271f003 323
4271f003
MH
324 /* We should phase out the following at some stage.
325 This provides compatibility with the old -mno-aliases option. */
4ddb3ea6 326 if (! TARGET_ALIASES && ! flag_argument_noalias)
4271f003 327 flag_argument_noalias = 1;
cb0ca284
MH
328}
329
7eb3fb5f 330
4271f003 331/* This is called before c4x_override_options. */
975ab131 332
d5e4ff48 333void
f12b3fc8
SB
334c4x_optimization_options (int level ATTRIBUTE_UNUSED,
335 int size ATTRIBUTE_UNUSED)
d5e4ff48 336{
5e6a42d9
MH
337 /* Scheduling before register allocation can screw up global
338 register allocation, especially for functions that use MPY||ADD
339 instructions. The benefit we gain we get by scheduling before
340 register allocation is probably marginal anyhow. */
341 flag_schedule_insns = 0;
d5e4ff48 342}
cb0ca284 343
975ab131 344
cb0ca284
MH
345/* Write an ASCII string. */
346
347#define C4X_ASCII_LIMIT 40
348
349void
f12b3fc8 350c4x_output_ascii (FILE *stream, const char *ptr, int len)
cb0ca284
MH
351{
352 char sbuf[C4X_ASCII_LIMIT + 1];
94eebed9 353 int s, l, special, first = 1, onlys;
cb0ca284
MH
354
355 if (len)
cb0ca284 356 fprintf (stream, "\t.byte\t");
cb0ca284 357
dfafcb4d 358 for (s = l = 0; len > 0; --len, ++ptr)
cb0ca284
MH
359 {
360 onlys = 0;
361
362 /* Escape " and \ with a \". */
dfafcb4d 363 special = *ptr == '\"' || *ptr == '\\';
cb0ca284
MH
364
365 /* If printable - add to buff. */
dfafcb4d 366 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
cb0ca284 367 {
dfafcb4d
HB
368 if (special)
369 sbuf[s++] = '\\';
cb0ca284
MH
370 sbuf[s++] = *ptr;
371 if (s < C4X_ASCII_LIMIT - 1)
372 continue;
373 onlys = 1;
374 }
375 if (s)
376 {
377 if (first)
378 first = 0;
379 else
dfafcb4d
HB
380 {
381 fputc (',', stream);
382 l++;
383 }
cb0ca284
MH
384
385 sbuf[s] = 0;
386 fprintf (stream, "\"%s\"", sbuf);
dfafcb4d
HB
387 l += s + 2;
388 if (TARGET_TI && l >= 80 && len > 1)
389 {
390 fprintf (stream, "\n\t.byte\t");
391 first = 1;
392 l = 0;
393 }
394
cb0ca284
MH
395 s = 0;
396 }
397 if (onlys)
398 continue;
399
400 if (first)
401 first = 0;
402 else
dfafcb4d
HB
403 {
404 fputc (',', stream);
405 l++;
406 }
cb0ca284
MH
407
408 fprintf (stream, "%d", *ptr);
dfafcb4d
HB
409 l += 3;
410 if (TARGET_TI && l >= 80 && len > 1)
411 {
412 fprintf (stream, "\n\t.byte\t");
413 first = 1;
414 l = 0;
415 }
cb0ca284
MH
416 }
417 if (s)
418 {
4ddb3ea6 419 if (! first)
cb0ca284
MH
420 fputc (',', stream);
421
422 sbuf[s] = 0;
423 fprintf (stream, "\"%s\"", sbuf);
424 s = 0;
425 }
426 fputc ('\n', stream);
427}
428
429
430int
f12b3fc8 431c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
cb0ca284
MH
432{
433 switch (mode)
434 {
435#if Pmode != QImode
975ab131 436 case Pmode: /* Pointer (24/32 bits). */
cb0ca284 437#endif
975ab131 438 case QImode: /* Integer (32 bits). */
bc46716b 439 return IS_INT_REGNO (regno);
cb0ca284 440
975ab131
MH
441 case QFmode: /* Float, Double (32 bits). */
442 case HFmode: /* Long Double (40 bits). */
bc46716b 443 return IS_EXT_REGNO (regno);
cb0ca284 444
975ab131
MH
445 case CCmode: /* Condition Codes. */
446 case CC_NOOVmode: /* Condition Codes. */
bc46716b 447 return IS_ST_REGNO (regno);
cb0ca284 448
975ab131 449 case HImode: /* Long Long (64 bits). */
cb0ca284
MH
450 /* We need two registers to store long longs. Note that
451 it is much easier to constrain the first register
452 to start on an even boundary. */
bc46716b
MH
453 return IS_INT_REGNO (regno)
454 && IS_INT_REGNO (regno + 1)
cb0ca284
MH
455 && (regno & 1) == 0;
456
457 default:
975ab131 458 return 0; /* We don't support these modes. */
cb0ca284
MH
459 }
460
461 return 0;
462}
463
825dda42 464/* Return nonzero if REGNO1 can be renamed to REGNO2. */
40eef757 465int
f12b3fc8 466c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
40eef757
HB
467{
468 /* We can not copy call saved registers from mode QI into QF or from
469 mode QF into QI. */
0b53f039 470 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
40eef757 471 return 0;
0b53f039 472 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
40eef757
HB
473 return 0;
474 /* We cannot copy from an extended (40 bit) register to a standard
475 (32 bit) register because we only set the condition codes for
476 extended registers. */
477 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
478 return 0;
479 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
480 return 0;
481 return 1;
482}
cb0ca284
MH
483
484/* The TI C3x C compiler register argument runtime model uses 6 registers,
485 AR2, R2, R3, RC, RS, RE.
486
487 The first two floating point arguments (float, double, long double)
488 that are found scanning from left to right are assigned to R2 and R3.
489
490 The remaining integer (char, short, int, long) or pointer arguments
491 are assigned to the remaining registers in the order AR2, R2, R3,
492 RC, RS, RE when scanning left to right, except for the last named
493 argument prior to an ellipsis denoting variable number of
494 arguments. We don't have to worry about the latter condition since
495 function.c treats the last named argument as anonymous (unnamed).
496
497 All arguments that cannot be passed in registers are pushed onto
498 the stack in reverse order (right to left). GCC handles that for us.
499
500 c4x_init_cumulative_args() is called at the start, so we can parse
501 the args to see how many floating point arguments and how many
502 integer (or pointer) arguments there are. c4x_function_arg() is
503 then called (sometimes repeatedly) for each argument (parsed left
504 to right) to obtain the register to pass the argument in, or zero
505 if the argument is to be passed on the stack. Once the compiler is
506 happy, c4x_function_arg_advance() is called.
507
508 Don't use R0 to pass arguments in, we use 0 to indicate a stack
509 argument. */
510
8b60264b 511static const int c4x_int_reglist[3][6] =
cb0ca284
MH
512{
513 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
514 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
515 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
516};
517
0139adca 518static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
cb0ca284
MH
519
520
521/* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
522 function whose data type is FNTYPE.
523 For a library call, FNTYPE is 0. */
524
525void
f12b3fc8 526c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
cb0ca284
MH
527{
528 tree param, next_param;
529
530 cum->floats = cum->ints = 0;
531 cum->init = 0;
532 cum->var = 0;
533 cum->args = 0;
534
535 if (TARGET_DEBUG)
536 {
537 fprintf (stderr, "\nc4x_init_cumulative_args (");
538 if (fntype)
539 {
540 tree ret_type = TREE_TYPE (fntype);
541
542 fprintf (stderr, "fntype code = %s, ret code = %s",
543 tree_code_name[(int) TREE_CODE (fntype)],
544 tree_code_name[(int) TREE_CODE (ret_type)]);
545 }
546 else
547 fprintf (stderr, "no fntype");
548
549 if (libname)
550 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
551 }
552
553 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
554
555 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
556 param; param = next_param)
557 {
558 tree type;
559
560 next_param = TREE_CHAIN (param);
561
562 type = TREE_VALUE (param);
563 if (type && type != void_type_node)
564 {
565 enum machine_mode mode;
566
567 /* If the last arg doesn't have void type then we have
568 variable arguments. */
4ddb3ea6 569 if (! next_param)
cb0ca284
MH
570 cum->var = 1;
571
572 if ((mode = TYPE_MODE (type)))
573 {
4ddb3ea6 574 if (! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
575 {
576 /* Look for float, double, or long double argument. */
577 if (mode == QFmode || mode == HFmode)
578 cum->floats++;
579 /* Look for integer, enumeral, boolean, char, or pointer
580 argument. */
581 else if (mode == QImode || mode == Pmode)
582 cum->ints++;
583 }
584 }
585 cum->args++;
586 }
587 }
588
589 if (TARGET_DEBUG)
590 fprintf (stderr, "%s%s, args = %d)\n",
591 cum->prototype ? ", prototype" : "",
592 cum->var ? ", variable args" : "",
593 cum->args);
594}
595
596
597/* Update the data in CUM to advance over an argument
598 of mode MODE and data type TYPE.
599 (TYPE is null for libcalls where that information may not be available.) */
600
601void
f12b3fc8
SB
602c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
603 tree type, int named)
cb0ca284
MH
604{
605 if (TARGET_DEBUG)
606 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
607 GET_MODE_NAME (mode), named);
4ddb3ea6 608 if (! TARGET_MEMPARM
cb0ca284
MH
609 && named
610 && type
4ddb3ea6 611 && ! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
612 {
613 /* Look for float, double, or long double argument. */
614 if (mode == QFmode || mode == HFmode)
615 cum->floats++;
616 /* Look for integer, enumeral, boolean, char, or pointer argument. */
617 else if (mode == QImode || mode == Pmode)
618 cum->ints++;
619 }
4ddb3ea6 620 else if (! TARGET_MEMPARM && ! type)
cb0ca284
MH
621 {
622 /* Handle libcall arguments. */
623 if (mode == QFmode || mode == HFmode)
624 cum->floats++;
625 else if (mode == QImode || mode == Pmode)
626 cum->ints++;
627 }
628 return;
629}
630
631
632/* Define where to put the arguments to a function. Value is zero to
633 push the argument on the stack, or a hard register in which to
634 store the argument.
635
636 MODE is the argument's machine mode.
637 TYPE is the data type of the argument (as a tree).
638 This is null for libcalls where that information may
639 not be available.
640 CUM is a variable of type CUMULATIVE_ARGS which gives info about
641 the preceding args and about the function being called.
642 NAMED is nonzero if this argument is a named parameter
643 (otherwise it is an extra parameter matching an ellipsis). */
644
645struct rtx_def *
f12b3fc8
SB
646c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
647 tree type, int named)
cb0ca284 648{
975ab131 649 int reg = 0; /* Default to passing argument on stack. */
cb0ca284 650
4ddb3ea6 651 if (! cum->init)
cb0ca284 652 {
975ab131 653 /* We can handle at most 2 floats in R2, R3. */
cb0ca284
MH
654 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
655
656 /* We can handle at most 6 integers minus number of floats passed
657 in registers. */
658 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
659 6 - cum->maxfloats : cum->ints;
660
1ac7a7f5 661 /* If there is no prototype, assume all the arguments are integers. */
4ddb3ea6 662 if (! cum->prototype)
cb0ca284
MH
663 cum->maxints = 6;
664
665 cum->ints = cum->floats = 0;
666 cum->init = 1;
667 }
668
49d1b871
MH
669 /* This marks the last argument. We don't need to pass this through
670 to the call insn. */
671 if (type == void_type_node)
672 return 0;
673
4ddb3ea6 674 if (! TARGET_MEMPARM
cb0ca284
MH
675 && named
676 && type
4ddb3ea6 677 && ! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
678 {
679 /* Look for float, double, or long double argument. */
680 if (mode == QFmode || mode == HFmode)
681 {
682 if (cum->floats < cum->maxfloats)
683 reg = c4x_fp_reglist[cum->floats];
684 }
685 /* Look for integer, enumeral, boolean, char, or pointer argument. */
686 else if (mode == QImode || mode == Pmode)
687 {
688 if (cum->ints < cum->maxints)
689 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
690 }
691 }
4ddb3ea6 692 else if (! TARGET_MEMPARM && ! type)
cb0ca284
MH
693 {
694 /* We could use a different argument calling model for libcalls,
695 since we're only calling functions in libgcc. Thus we could
696 pass arguments for long longs in registers rather than on the
697 stack. In the meantime, use the odd TI format. We make the
698 assumption that we won't have more than two floating point
699 args, six integer args, and that all the arguments are of the
700 same mode. */
701 if (mode == QFmode || mode == HFmode)
702 reg = c4x_fp_reglist[cum->floats];
703 else if (mode == QImode || mode == Pmode)
704 reg = c4x_int_reglist[0][cum->ints];
705 }
706
707 if (TARGET_DEBUG)
708 {
709 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
710 GET_MODE_NAME (mode), named);
711 if (reg)
712 fprintf (stderr, ", reg=%s", reg_names[reg]);
713 else
714 fprintf (stderr, ", stack");
715 fprintf (stderr, ")\n");
716 }
717 if (reg)
d5e4ff48 718 return gen_rtx_REG (mode, reg);
cb0ca284
MH
719 else
720 return NULL_RTX;
721}
722
cc7fd398 723/* C[34]x arguments grow in weird ways (downwards) that the standard
975ab131 724 varargs stuff can't handle.. */
cc7fd398 725rtx
f12b3fc8 726c4x_va_arg (tree valist, tree type)
cc7fd398
RH
727{
728 tree t;
729
730 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
731 build_int_2 (int_size_in_bytes (type), 0));
732 TREE_SIDE_EFFECTS (t) = 1;
733
734 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
735}
cb0ca284 736
f959ff1a 737
cb0ca284 738static int
f12b3fc8 739c4x_isr_reg_used_p (unsigned int regno)
cb0ca284
MH
740{
741 /* Don't save/restore FP or ST, we handle them separately. */
742 if (regno == FRAME_POINTER_REGNUM
bc46716b 743 || IS_ST_REGNO (regno))
cb0ca284
MH
744 return 0;
745
746 /* We could be a little smarter abut saving/restoring DP.
747 We'll only save if for the big memory model or if
748 we're paranoid. ;-) */
bc46716b 749 if (IS_DP_REGNO (regno))
4ddb3ea6 750 return ! TARGET_SMALL || TARGET_PARANOID;
cb0ca284
MH
751
752 /* Only save/restore regs in leaf function that are used. */
753 if (c4x_leaf_function)
754 return regs_ever_live[regno] && fixed_regs[regno] == 0;
755
756 /* Only save/restore regs that are used by the ISR and regs
757 that are likely to be used by functions the ISR calls
758 if they are not fixed. */
bc46716b 759 return IS_EXT_REGNO (regno)
cb0ca284
MH
760 || ((regs_ever_live[regno] || call_used_regs[regno])
761 && fixed_regs[regno] == 0);
762}
763
764
765static int
f12b3fc8 766c4x_leaf_function_p (void)
cb0ca284
MH
767{
768 /* A leaf function makes no calls, so we only need
769 to save/restore the registers we actually use.
770 For the global variable leaf_function to be set, we need
771 to define LEAF_REGISTERS and all that it entails.
71cc389b 772 Let's check ourselves.... */
cb0ca284
MH
773
774 if (lookup_attribute ("leaf_pretend",
775 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
776 return 1;
777
778 /* Use the leaf_pretend attribute at your own risk. This is a hack
779 to speed up ISRs that call a function infrequently where the
780 overhead of saving and restoring the additional registers is not
781 warranted. You must save and restore the additional registers
782 required by the called function. Caveat emptor. Here's enough
783 rope... */
784
785 if (leaf_function_p ())
786 return 1;
787
788 return 0;
789}
790
791
792static int
f12b3fc8 793c4x_naked_function_p (void)
cb0ca284
MH
794{
795 tree type;
796
797 type = TREE_TYPE (current_function_decl);
eb47a205 798 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
cb0ca284
MH
799}
800
801
8d485e2d 802int
f12b3fc8 803c4x_interrupt_function_p (void)
cb0ca284 804{
faed5cc3 805 const char *cfun_name;
cb0ca284
MH
806 if (lookup_attribute ("interrupt",
807 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
808 return 1;
809
975ab131 810 /* Look for TI style c_intnn. */
faed5cc3
SB
811 cfun_name = current_function_name ();
812 return cfun_name[0] == 'c'
813 && cfun_name[1] == '_'
814 && cfun_name[2] == 'i'
815 && cfun_name[3] == 'n'
816 && cfun_name[4] == 't'
817 && ISDIGIT (cfun_name[5])
818 && ISDIGIT (cfun_name[6]);
cb0ca284
MH
819}
820
cb0ca284 821void
f12b3fc8 822c4x_expand_prologue (void)
cb0ca284 823{
8d485e2d
MH
824 unsigned int regno;
825 int size = get_frame_size ();
826 rtx insn;
cb0ca284 827
8d485e2d
MH
828 /* In functions where ar3 is not used but frame pointers are still
829 specified, frame pointers are not adjusted (if >= -O2) and this
830 is used so it won't needlessly push the frame pointer. */
cb0ca284
MH
831 int dont_push_ar3;
832
eb47a205
MH
833 /* For __naked__ function don't build a prologue. */
834 if (c4x_naked_function_p ())
cb0ca284 835 {
cb0ca284
MH
836 return;
837 }
8d485e2d 838
cb0ca284
MH
839 /* For __interrupt__ function build specific prologue. */
840 if (c4x_interrupt_function_p ())
841 {
842 c4x_leaf_function = c4x_leaf_function_p ();
8d485e2d
MH
843
844 insn = emit_insn (gen_push_st ());
845 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
846 if (size)
847 {
8d485e2d
MH
848 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
849 RTX_FRAME_RELATED_P (insn) = 1;
850 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
851 gen_rtx_REG (QImode, SP_REGNO)));
852 RTX_FRAME_RELATED_P (insn) = 1;
69f6e760
MH
853 /* We require that an ISR uses fewer than 32768 words of
854 local variables, otherwise we have to go to lots of
855 effort to save a register, load it with the desired size,
856 adjust the stack pointer, and then restore the modified
857 register. Frankly, I think it is a poor ISR that
858 requires more than 32767 words of local temporary
859 storage! */
cb0ca284 860 if (size > 32767)
c725bd79 861 error ("ISR %s requires %d words of local vars, max is 32767",
faed5cc3 862 current_function_name (), size);
400500c4 863
8d485e2d
MH
864 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
865 gen_rtx_REG (QImode, SP_REGNO),
06dd70c6 866 GEN_INT (size)));
8d485e2d 867 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
868 }
869 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
870 {
871 if (c4x_isr_reg_used_p (regno))
872 {
8d485e2d
MH
873 if (regno == DP_REGNO)
874 {
875 insn = emit_insn (gen_push_dp ());
876 RTX_FRAME_RELATED_P (insn) = 1;
877 }
878 else
879 {
880 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
881 RTX_FRAME_RELATED_P (insn) = 1;
882 if (IS_EXT_REGNO (regno))
883 {
884 insn = emit_insn (gen_pushqf
885 (gen_rtx_REG (QFmode, regno)));
886 RTX_FRAME_RELATED_P (insn) = 1;
887 }
888 }
cb0ca284
MH
889 }
890 }
891 /* We need to clear the repeat mode flag if the ISR is
892 going to use a RPTB instruction or uses the RC, RS, or RE
893 registers. */
894 if (regs_ever_live[RC_REGNO]
895 || regs_ever_live[RS_REGNO]
896 || regs_ever_live[RE_REGNO])
8d485e2d
MH
897 {
898 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
899 RTX_FRAME_RELATED_P (insn) = 1;
900 }
cb0ca284
MH
901
902 /* Reload DP reg if we are paranoid about some turkey
903 violating small memory model rules. */
904 if (TARGET_SMALL && TARGET_PARANOID)
8d485e2d
MH
905 {
906 insn = emit_insn (gen_set_ldp_prologue
907 (gen_rtx_REG (QImode, DP_REGNO),
908 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
909 RTX_FRAME_RELATED_P (insn) = 1;
910 }
cb0ca284
MH
911 }
912 else
913 {
914 if (frame_pointer_needed)
915 {
916 if ((size != 0)
917 || (current_function_args_size != 0)
918 || (optimize < 2))
919 {
8d485e2d
MH
920 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
921 RTX_FRAME_RELATED_P (insn) = 1;
922 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
923 gen_rtx_REG (QImode, SP_REGNO)));
924 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
925 dont_push_ar3 = 1;
926 }
927 else
928 {
929 /* Since ar3 is not used, we don't need to push it. */
930 dont_push_ar3 = 1;
931 }
932 }
933 else
934 {
71cc389b 935 /* If we use ar3, we need to push it. */
cb0ca284
MH
936 dont_push_ar3 = 0;
937 if ((size != 0) || (current_function_args_size != 0))
938 {
939 /* If we are omitting the frame pointer, we still have
940 to make space for it so the offsets are correct
941 unless we don't use anything on the stack at all. */
942 size += 1;
943 }
944 }
8d485e2d 945
cb0ca284
MH
946 if (size > 32767)
947 {
948 /* Local vars are too big, it will take multiple operations
949 to increment SP. */
950 if (TARGET_C3X)
951 {
8d485e2d
MH
952 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
953 GEN_INT(size >> 16)));
954 RTX_FRAME_RELATED_P (insn) = 1;
955 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
956 gen_rtx_REG (QImode, R1_REGNO),
957 GEN_INT(-16)));
958 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
959 }
960 else
8d485e2d
MH
961 {
962 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
963 GEN_INT(size & ~0xffff)));
964 RTX_FRAME_RELATED_P (insn) = 1;
965 }
966 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
967 gen_rtx_REG (QImode, R1_REGNO),
968 GEN_INT(size & 0xffff)));
969 RTX_FRAME_RELATED_P (insn) = 1;
970 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
971 gen_rtx_REG (QImode, SP_REGNO),
972 gen_rtx_REG (QImode, R1_REGNO)));
973 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
974 }
975 else if (size != 0)
976 {
977 /* Local vars take up less than 32767 words, so we can directly
978 add the number. */
8d485e2d
MH
979 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
980 gen_rtx_REG (QImode, SP_REGNO),
981 GEN_INT (size)));
982 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 983 }
8d485e2d 984
cb0ca284
MH
985 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
986 {
4ddb3ea6 987 if (regs_ever_live[regno] && ! call_used_regs[regno])
cb0ca284 988 {
0b53f039 989 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
cb0ca284 990 {
cb0ca284 991 if (TARGET_PRESERVE_FLOAT)
8d485e2d
MH
992 {
993 insn = emit_insn (gen_pushqi
994 (gen_rtx_REG (QImode, regno)));
995 RTX_FRAME_RELATED_P (insn) = 1;
996 }
997 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
998 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 999 }
4ddb3ea6 1000 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
cb0ca284 1001 {
8d485e2d
MH
1002 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1003 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1004 }
1005 }
1006 }
1007 }
1008}
1009
1010
cb0ca284 1011void
f12b3fc8 1012c4x_expand_epilogue(void)
cb0ca284
MH
1013{
1014 int regno;
8d485e2d 1015 int jump = 0;
cb0ca284
MH
1016 int dont_pop_ar3;
1017 rtx insn;
8d485e2d
MH
1018 int size = get_frame_size ();
1019
eb47a205
MH
1020 /* For __naked__ function build no epilogue. */
1021 if (c4x_naked_function_p ())
cb0ca284 1022 {
8d485e2d
MH
1023 insn = emit_jump_insn (gen_return_from_epilogue ());
1024 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1025 return;
1026 }
1027
cb0ca284
MH
1028 /* For __interrupt__ function build specific epilogue. */
1029 if (c4x_interrupt_function_p ())
1030 {
1031 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1032 {
4ddb3ea6 1033 if (! c4x_isr_reg_used_p (regno))
cb0ca284 1034 continue;
8d485e2d
MH
1035 if (regno == DP_REGNO)
1036 {
1037 insn = emit_insn (gen_pop_dp ());
1038 RTX_FRAME_RELATED_P (insn) = 1;
1039 }
1040 else
1041 {
1042 /* We have to use unspec because the compiler will delete insns
1043 that are not call-saved. */
1044 if (IS_EXT_REGNO (regno))
1045 {
1046 insn = emit_insn (gen_popqf_unspec
1047 (gen_rtx_REG (QFmode, regno)));
1048 RTX_FRAME_RELATED_P (insn) = 1;
1049 }
1050 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1051 RTX_FRAME_RELATED_P (insn) = 1;
1052 }
cb0ca284
MH
1053 }
1054 if (size)
1055 {
8d485e2d
MH
1056 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1057 gen_rtx_REG (QImode, SP_REGNO),
1058 GEN_INT(size)));
1059 RTX_FRAME_RELATED_P (insn) = 1;
1060 insn = emit_insn (gen_popqi
1061 (gen_rtx_REG (QImode, AR3_REGNO)));
1062 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 1063 }
8d485e2d
MH
1064 insn = emit_insn (gen_pop_st ());
1065 RTX_FRAME_RELATED_P (insn) = 1;
1066 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1067 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1068 }
1069 else
1070 {
1071 if (frame_pointer_needed)
1072 {
1073 if ((size != 0)
1074 || (current_function_args_size != 0)
1075 || (optimize < 2))
1076 {
8d485e2d
MH
1077 insn = emit_insn
1078 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1079 gen_rtx_MEM (QImode,
1080 gen_rtx_PLUS
1081 (QImode, gen_rtx_REG (QImode,
1082 AR3_REGNO),
a556fd39 1083 constm1_rtx))));
8d485e2d
MH
1084 RTX_FRAME_RELATED_P (insn) = 1;
1085
cb0ca284
MH
1086 /* We already have the return value and the fp,
1087 so we need to add those to the stack. */
1088 size += 2;
8d485e2d 1089 jump = 1;
cb0ca284
MH
1090 dont_pop_ar3 = 1;
1091 }
1092 else
1093 {
1094 /* Since ar3 is not used for anything, we don't need to
1095 pop it. */
1096 dont_pop_ar3 = 1;
1097 }
1098 }
1099 else
1100 {
975ab131 1101 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
cb0ca284
MH
1102 if (size || current_function_args_size)
1103 {
839a4992 1104 /* If we are omitting the frame pointer, we still have
cb0ca284
MH
1105 to make space for it so the offsets are correct
1106 unless we don't use anything on the stack at all. */
1107 size += 1;
1108 }
1109 }
8d485e2d 1110
cb0ca284
MH
1111 /* Now restore the saved registers, putting in the delayed branch
1112 where required. */
1113 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1114 {
4ddb3ea6 1115 if (regs_ever_live[regno] && ! call_used_regs[regno])
cb0ca284
MH
1116 {
1117 if (regno == AR3_REGNO && dont_pop_ar3)
1118 continue;
8d485e2d 1119
0b53f039 1120 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
cb0ca284 1121 {
8d485e2d
MH
1122 insn = emit_insn (gen_popqf_unspec
1123 (gen_rtx_REG (QFmode, regno)));
1124 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1125 if (TARGET_PRESERVE_FLOAT)
1126 {
8d485e2d
MH
1127 insn = emit_insn (gen_popqi_unspec
1128 (gen_rtx_REG (QImode, regno)));
1129 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1130 }
1131 }
1132 else
8d485e2d
MH
1133 {
1134 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1135 RTX_FRAME_RELATED_P (insn) = 1;
1136 }
cb0ca284
MH
1137 }
1138 }
8d485e2d 1139
cb0ca284
MH
1140 if (frame_pointer_needed)
1141 {
1142 if ((size != 0)
1143 || (current_function_args_size != 0)
1144 || (optimize < 2))
1145 {
1146 /* Restore the old FP. */
8d485e2d
MH
1147 insn = emit_insn
1148 (gen_movqi
1149 (gen_rtx_REG (QImode, AR3_REGNO),
1150 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1151
1152 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1153 }
1154 }
8d485e2d 1155
cb0ca284
MH
1156 if (size > 32767)
1157 {
1158 /* Local vars are too big, it will take multiple operations
1159 to decrement SP. */
1160 if (TARGET_C3X)
1161 {
8d485e2d
MH
1162 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1163 GEN_INT(size >> 16)));
1164 RTX_FRAME_RELATED_P (insn) = 1;
1165 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1166 gen_rtx_REG (QImode, R3_REGNO),
1167 GEN_INT(-16)));
1168 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1169 }
1170 else
8d485e2d
MH
1171 {
1172 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1173 GEN_INT(size & ~0xffff)));
1174 RTX_FRAME_RELATED_P (insn) = 1;
1175 }
1176 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1177 gen_rtx_REG (QImode, R3_REGNO),
1178 GEN_INT(size & 0xffff)));
1179 RTX_FRAME_RELATED_P (insn) = 1;
1180 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1181 gen_rtx_REG (QImode, SP_REGNO),
1182 gen_rtx_REG (QImode, R3_REGNO)));
1183 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1184 }
1185 else if (size != 0)
1186 {
1187 /* Local vars take up less than 32768 words, so we can directly
1188 subtract the number. */
8d485e2d
MH
1189 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1190 gen_rtx_REG (QImode, SP_REGNO),
1191 GEN_INT(size)));
1192 RTX_FRAME_RELATED_P (insn) = 1;
1193 }
1194
1195 if (jump)
1196 {
39c1728e
HB
1197 insn = emit_jump_insn (gen_return_indirect_internal
1198 (gen_rtx_REG (QImode, R2_REGNO)));
8d485e2d
MH
1199 RTX_FRAME_RELATED_P (insn) = 1;
1200 }
1201 else
1202 {
1203 insn = emit_jump_insn (gen_return_from_epilogue ());
1204 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 1205 }
cb0ca284
MH
1206 }
1207}
1208
8d485e2d 1209
cb0ca284 1210int
f12b3fc8 1211c4x_null_epilogue_p (void)
cb0ca284
MH
1212{
1213 int regno;
1214
1215 if (reload_completed
eb47a205 1216 && ! c4x_naked_function_p ()
4ddb3ea6
MH
1217 && ! c4x_interrupt_function_p ()
1218 && ! current_function_calls_alloca
1219 && ! current_function_args_size
4ddb3ea6
MH
1220 && ! (optimize < 2)
1221 && ! get_frame_size ())
cb0ca284
MH
1222 {
1223 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
4ddb3ea6 1224 if (regs_ever_live[regno] && ! call_used_regs[regno]
cb0ca284 1225 && (regno != AR3_REGNO))
eb47a205
MH
1226 return 1;
1227 return 0;
cb0ca284 1228 }
eb47a205 1229 return 1;
cb0ca284
MH
1230}
1231
8d485e2d 1232
50c33087 1233int
f12b3fc8 1234c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
50c33087
MH
1235{
1236 rtx op0 = operands[0];
1237 rtx op1 = operands[1];
1238
1239 if (! reload_in_progress
1240 && ! REG_P (op0)
1241 && ! REG_P (op1)
1242 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1243 op1 = force_reg (mode, op1);
1244
2718204c
MH
1245 if (GET_CODE (op1) == LO_SUM
1246 && GET_MODE (op1) == Pmode
1247 && dp_reg_operand (XEXP (op1, 0), mode))
1248 {
1249 /* expand_increment will sometimes create a LO_SUM immediate
1ae58c30 1250 address. Undo this silliness. */
2718204c
MH
1251 op1 = XEXP (op1, 1);
1252 }
9c3602e4
MH
1253
1254 if (symbolic_address_operand (op1, mode))
50c33087
MH
1255 {
1256 if (TARGET_LOAD_ADDRESS)
1257 {
1258 /* Alias analysis seems to do a better job if we force
1259 constant addresses to memory after reload. */
1260 emit_insn (gen_load_immed_address (op0, op1));
1261 return 1;
1262 }
1263 else
1264 {
1265 /* Stick symbol or label address into the constant pool. */
1266 op1 = force_const_mem (Pmode, op1);
1267 }
1268 }
1269 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1270 {
1271 /* We could be a lot smarter about loading some of these
1272 constants... */
1273 op1 = force_const_mem (mode, op1);
1274 }
50c33087
MH
1275
1276 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1277 and emit associated (HIGH (SYMREF)) if large memory model.
1278 c4x_legitimize_address could be used to do this,
1279 perhaps by calling validize_address. */
31445126
MH
1280 if (TARGET_EXPOSE_LDP
1281 && ! (reload_in_progress || reload_completed)
50c33087 1282 && GET_CODE (op1) == MEM
55310df7 1283 && symbolic_address_operand (XEXP (op1, 0), Pmode))
50c33087
MH
1284 {
1285 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1286 if (! TARGET_SMALL)
1287 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1288 op1 = change_address (op1, mode,
1289 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1290 }
1291
31445126
MH
1292 if (TARGET_EXPOSE_LDP
1293 && ! (reload_in_progress || reload_completed)
50c33087 1294 && GET_CODE (op0) == MEM
55310df7 1295 && symbolic_address_operand (XEXP (op0, 0), Pmode))
50c33087
MH
1296 {
1297 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1298 if (! TARGET_SMALL)
1299 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1300 op0 = change_address (op0, mode,
1301 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1302 }
1303
ebcc44f4
MH
1304 if (GET_CODE (op0) == SUBREG
1305 && mixed_subreg_operand (op0, mode))
1306 {
1307 /* We should only generate these mixed mode patterns
1308 during RTL generation. If we need do it later on
1309 then we'll have to emit patterns that won't clobber CC. */
1310 if (reload_in_progress || reload_completed)
1311 abort ();
1312 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1313 op0 = SUBREG_REG (op0);
1314 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1315 {
1316 op0 = copy_rtx (op0);
1317 PUT_MODE (op0, QImode);
1318 }
1319 else
1320 abort ();
1321
1322 if (mode == QFmode)
1323 emit_insn (gen_storeqf_int_clobber (op0, op1));
1324 else
1325 abort ();
1326 return 1;
1327 }
1328
1329 if (GET_CODE (op1) == SUBREG
1330 && mixed_subreg_operand (op1, mode))
1331 {
1332 /* We should only generate these mixed mode patterns
1333 during RTL generation. If we need do it later on
1334 then we'll have to emit patterns that won't clobber CC. */
1335 if (reload_in_progress || reload_completed)
1336 abort ();
1337 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1338 op1 = SUBREG_REG (op1);
1339 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1340 {
1341 op1 = copy_rtx (op1);
1342 PUT_MODE (op1, QImode);
1343 }
1344 else
1345 abort ();
1346
1347 if (mode == QFmode)
1348 emit_insn (gen_loadqf_int_clobber (op0, op1));
1349 else
1350 abort ();
1351 return 1;
1352 }
1353
8d485e2d
MH
1354 if (mode == QImode
1355 && reg_operand (op0, mode)
1356 && const_int_operand (op1, mode)
1357 && ! IS_INT16_CONST (INTVAL (op1))
1358 && ! IS_HIGH_CONST (INTVAL (op1)))
1359 {
1360 emit_insn (gen_loadqi_big_constant (op0, op1));
1361 return 1;
1362 }
1363
1364 if (mode == HImode
1365 && reg_operand (op0, mode)
1366 && const_int_operand (op1, mode))
1367 {
1368 emit_insn (gen_loadhi_big_constant (op0, op1));
1369 return 1;
1370 }
1371
50c33087
MH
1372 /* Adjust operands in case we have modified them. */
1373 operands[0] = op0;
1374 operands[1] = op1;
1375
1376 /* Emit normal pattern. */
1377 return 0;
1378}
1379
1380
cb0ca284 1381void
f12b3fc8
SB
1382c4x_emit_libcall (rtx libcall, enum rtx_code code,
1383 enum machine_mode dmode, enum machine_mode smode,
1384 int noperands, rtx *operands)
cb0ca284
MH
1385{
1386 rtx ret;
1387 rtx insns;
cb0ca284
MH
1388 rtx equiv;
1389
1390 start_sequence ();
cb0ca284
MH
1391 switch (noperands)
1392 {
1393 case 2:
1394 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1395 operands[1], smode);
1c563bed 1396 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
cb0ca284
MH
1397 break;
1398
1399 case 3:
1400 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1401 operands[1], smode, operands[2], smode);
1c563bed 1402 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
cb0ca284
MH
1403 break;
1404
1405 default:
400500c4 1406 abort ();
cb0ca284
MH
1407 }
1408
1409 insns = get_insns ();
1410 end_sequence ();
1411 emit_libcall_block (insns, operands[0], ret, equiv);
1412}
1413
1414
1415void
f12b3fc8
SB
1416c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1417 enum machine_mode mode, rtx *operands)
cb0ca284 1418{
8d485e2d 1419 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
cb0ca284
MH
1420}
1421
50c33087 1422
cb0ca284 1423void
f12b3fc8
SB
1424c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1425 enum machine_mode mode, rtx *operands)
cb0ca284
MH
1426{
1427 rtx ret;
1428 rtx insns;
cb0ca284
MH
1429 rtx equiv;
1430
1431 start_sequence ();
cb0ca284
MH
1432 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1433 operands[1], mode, operands[2], mode);
d5e4ff48
MH
1434 equiv = gen_rtx_TRUNCATE (mode,
1435 gen_rtx_LSHIFTRT (HImode,
1436 gen_rtx_MULT (HImode,
1c563bed
KH
1437 gen_rtx_fmt_e (code, HImode, operands[1]),
1438 gen_rtx_fmt_e (code, HImode, operands[2])),
e27f8c8a 1439 GEN_INT (32)));
cb0ca284
MH
1440 insns = get_insns ();
1441 end_sequence ();
1442 emit_libcall_block (insns, operands[0], ret, equiv);
1443}
1444
1445
cb0ca284 1446int
1e903c61 1447c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
cb0ca284 1448{
975ab131
MH
1449 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1450 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1451 rtx disp = NULL_RTX; /* Displacement. */
cb0ca284
MH
1452 enum rtx_code code;
1453
1454 code = GET_CODE (addr);
1455 switch (code)
1456 {
1457 /* Register indirect with auto increment/decrement. We don't
05713b80 1458 allow SP here---push_operand should recognize an operand
cb0ca284
MH
1459 being pushed on the stack. */
1460
1461 case PRE_DEC:
ebcc44f4 1462 case PRE_INC:
cb0ca284
MH
1463 case POST_DEC:
1464 if (mode != QImode && mode != QFmode)
1465 return 0;
ebcc44f4 1466
cb0ca284
MH
1467 case POST_INC:
1468 base = XEXP (addr, 0);
4ddb3ea6 1469 if (! REG_P (base))
cb0ca284
MH
1470 return 0;
1471 break;
1472
1473 case PRE_MODIFY:
1474 case POST_MODIFY:
1475 {
1476 rtx op0 = XEXP (addr, 0);
1477 rtx op1 = XEXP (addr, 1);
1478
1479 if (mode != QImode && mode != QFmode)
1480 return 0;
1481
4ddb3ea6 1482 if (! REG_P (op0)
cb0ca284
MH
1483 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1484 return 0;
1485 base = XEXP (op1, 0);
1e903c61
MH
1486 if (! REG_P (base))
1487 return 0;
1488 if (REGNO (base) != REGNO (op0))
cb0ca284
MH
1489 return 0;
1490 if (REG_P (XEXP (op1, 1)))
1491 indx = XEXP (op1, 1);
1492 else
1493 disp = XEXP (op1, 1);
1494 }
1495 break;
1496
1497 /* Register indirect. */
1498 case REG:
1499 base = addr;
1500 break;
1501
1502 /* Register indirect with displacement or index. */
1503 case PLUS:
1504 {
1505 rtx op0 = XEXP (addr, 0);
1506 rtx op1 = XEXP (addr, 1);
1507 enum rtx_code code0 = GET_CODE (op0);
1508
1509 switch (code0)
1510 {
cb0ca284
MH
1511 case REG:
1512 if (REG_P (op1))
1513 {
975ab131 1514 base = op0; /* Base + index. */
cb0ca284 1515 indx = op1;
bc46716b 1516 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
cb0ca284
MH
1517 {
1518 base = op1;
1519 indx = op0;
1520 }
1521 }
1522 else
1523 {
975ab131 1524 base = op0; /* Base + displacement. */
cb0ca284
MH
1525 disp = op1;
1526 }
1527 break;
1528
1529 default:
1530 return 0;
1531 }
1532 }
1533 break;
1534
50c33087
MH
1535 /* Direct addressing with DP register. */
1536 case LO_SUM:
1537 {
1538 rtx op0 = XEXP (addr, 0);
1539 rtx op1 = XEXP (addr, 1);
1540
1541 /* HImode and HFmode direct memory references aren't truly
1542 offsettable (consider case at end of data page). We
1543 probably get better code by loading a pointer and using an
1544 indirect memory reference. */
1545 if (mode == HImode || mode == HFmode)
1546 return 0;
1547
1548 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1549 return 0;
1550
1551 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1552 return 1;
1553
1554 if (GET_CODE (op1) == CONST)
5078f5eb 1555 return 1;
50c33087
MH
1556 return 0;
1557 }
1558 break;
1559
cb0ca284
MH
1560 /* Direct addressing with some work for the assembler... */
1561 case CONST:
cb0ca284 1562 /* Direct addressing. */
cb0ca284 1563 case LABEL_REF:
50c33087 1564 case SYMBOL_REF:
31445126
MH
1565 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1566 return 1;
50c33087 1567 /* These need to be converted to a LO_SUM (...).
31445126 1568 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
50c33087 1569 return 0;
cb0ca284
MH
1570
1571 /* Do not allow direct memory access to absolute addresses.
31445126 1572 This is more pain than it's worth, especially for the
cb0ca284
MH
1573 small memory model where we can't guarantee that
1574 this address is within the data page---we don't want
1575 to modify the DP register in the small memory model,
1576 even temporarily, since an interrupt can sneak in.... */
1577 case CONST_INT:
1578 return 0;
1579
1580 /* Indirect indirect addressing. */
1581 case MEM:
1582 return 0;
1583
1584 case CONST_DOUBLE:
c725bd79 1585 fatal_insn ("using CONST_DOUBLE for address", addr);
cb0ca284
MH
1586
1587 default:
1588 return 0;
1589 }
1590
1591 /* Validate the base register. */
1592 if (base)
1593 {
1594 /* Check that the address is offsettable for HImode and HFmode. */
1595 if (indx && (mode == HImode || mode == HFmode))
1596 return 0;
1597
1598 /* Handle DP based stuff. */
1599 if (REGNO (base) == DP_REGNO)
1600 return 1;
4ddb3ea6 1601 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
cb0ca284 1602 return 0;
bc46716b 1603 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
cb0ca284
MH
1604 return 0;
1605 }
1606
1607 /* Now validate the index register. */
1608 if (indx)
1609 {
1610 if (GET_CODE (indx) != REG)
1611 return 0;
4ddb3ea6 1612 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
cb0ca284 1613 return 0;
bc46716b 1614 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
cb0ca284
MH
1615 return 0;
1616 }
1617
1618 /* Validate displacement. */
1619 if (disp)
1620 {
1621 if (GET_CODE (disp) != CONST_INT)
1622 return 0;
1623 if (mode == HImode || mode == HFmode)
1624 {
1625 /* The offset displacement must be legitimate. */
4ddb3ea6 1626 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
cb0ca284
MH
1627 return 0;
1628 }
1629 else
1630 {
4ddb3ea6 1631 if (! IS_DISP8_CONST (INTVAL (disp)))
cb0ca284
MH
1632 return 0;
1633 }
1634 /* Can't add an index with a disp. */
1635 if (indx)
1636 return 0;
1637 }
1638 return 1;
1639}
1640
1641
1642rtx
f12b3fc8
SB
1643c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1644 enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284 1645{
305902b0
MH
1646 if (GET_CODE (orig) == SYMBOL_REF
1647 || GET_CODE (orig) == LABEL_REF)
50c33087 1648 {
2718204c
MH
1649 if (mode == HImode || mode == HFmode)
1650 {
1651 /* We need to force the address into
1652 a register so that it is offsettable. */
1653 rtx addr_reg = gen_reg_rtx (Pmode);
1654 emit_move_insn (addr_reg, orig);
1655 return addr_reg;
1656 }
1657 else
1658 {
1659 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1660
1661 if (! TARGET_SMALL)
1662 emit_insn (gen_set_ldp (dp_reg, orig));
1663
1664 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1665 }
50c33087
MH
1666 }
1667
cb0ca284
MH
1668 return NULL_RTX;
1669}
1670
1671
1672/* Provide the costs of an addressing mode that contains ADDR.
1673 If ADDR is not a valid address, its cost is irrelevant.
f1ba665b 1674 This is used in cse and loop optimization to determine
cb0ca284
MH
1675 if it is worthwhile storing a common address into a register.
1676 Unfortunately, the C4x address cost depends on other operands. */
1677
dcefdf67 1678static int
f12b3fc8 1679c4x_address_cost (rtx addr)
cb0ca284
MH
1680{
1681 switch (GET_CODE (addr))
1682 {
1683 case REG:
1684 return 1;
1685
cb0ca284
MH
1686 case POST_INC:
1687 case POST_DEC:
1688 case PRE_INC:
1689 case PRE_DEC:
1690 return 1;
1691
50c33087 1692 /* These shouldn't be directly generated. */
cb0ca284
MH
1693 case SYMBOL_REF:
1694 case LABEL_REF:
50c33087
MH
1695 case CONST:
1696 return 10;
1697
1698 case LO_SUM:
1699 {
1700 rtx op1 = XEXP (addr, 1);
1701
1702 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1703 return TARGET_SMALL ? 3 : 4;
1704
1705 if (GET_CODE (op1) == CONST)
1706 {
1707 rtx offset = const0_rtx;
1708
1709 op1 = eliminate_constant_term (op1, &offset);
1710
1ac7a7f5 1711 /* ??? These costs need rethinking... */
50c33087
MH
1712 if (GET_CODE (op1) == LABEL_REF)
1713 return 3;
1714
1715 if (GET_CODE (op1) != SYMBOL_REF)
1716 return 4;
1717
1718 if (INTVAL (offset) == 0)
1719 return 3;
1720
1721 return 4;
1722 }
1723 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1724 }
1725 break;
cb0ca284
MH
1726
1727 case PLUS:
1728 {
1729 register rtx op0 = XEXP (addr, 0);
1730 register rtx op1 = XEXP (addr, 1);
1731
1732 if (GET_CODE (op0) != REG)
1733 break;
1734
1735 switch (GET_CODE (op1))
1736 {
1737 default:
1738 break;
1739
1740 case REG:
21034cc5
MH
1741 /* This cost for REG+REG must be greater than the cost
1742 for REG if we want autoincrement addressing modes. */
cb0ca284
MH
1743 return 2;
1744
1745 case CONST_INT:
798f6e6f
MH
1746 /* The following tries to improve GIV combination
1747 in strength reduce but appears not to help. */
1748 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1749 return 1;
1750
cb0ca284
MH
1751 if (IS_DISP1_CONST (INTVAL (op1)))
1752 return 1;
1753
4ddb3ea6 1754 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
cb0ca284
MH
1755 return 2;
1756
1757 return 3;
1758 }
1759 }
1760 default:
933cddd0 1761 break;
cb0ca284
MH
1762 }
1763
1764 return 4;
1765}
1766
1767
1768rtx
f12b3fc8 1769c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
cb0ca284
MH
1770{
1771 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1772 rtx cc_reg;
1773
1774 if (mode == CC_NOOVmode
1775 && (code == LE || code == GE || code == LT || code == GT))
1776 return NULL_RTX;
1777
d5e4ff48
MH
1778 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1779 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1780 gen_rtx_COMPARE (mode, x, y)));
cb0ca284
MH
1781 return cc_reg;
1782}
1783
1784char *
f12b3fc8 1785c4x_output_cbranch (const char *form, rtx seq)
cb0ca284
MH
1786{
1787 int delayed = 0;
1788 int annultrue = 0;
1789 int annulfalse = 0;
1790 rtx delay;
1791 char *cp;
4271f003 1792 static char str[100];
cb0ca284
MH
1793
1794 if (final_sequence)
1795 {
1796 delay = XVECEXP (final_sequence, 0, 1);
4ddb3ea6
MH
1797 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1798 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
4271f003 1799 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
cb0ca284 1800 }
4271f003
MH
1801 strcpy (str, form);
1802 cp = &str [strlen (str)];
cb0ca284
MH
1803 if (delayed)
1804 {
1805 *cp++ = '%';
1806 *cp++ = '#';
1807 }
1808 if (annultrue)
1809 {
1810 *cp++ = 'a';
1811 *cp++ = 't';
1812 }
1813 if (annulfalse)
1814 {
1815 *cp++ = 'a';
1816 *cp++ = 'f';
1817 }
1818 *cp++ = '\t';
1819 *cp++ = '%';
1820 *cp++ = 'l';
1821 *cp++ = '1';
1822 *cp = 0;
1823 return str;
1824}
1825
cb0ca284 1826void
f12b3fc8 1827c4x_print_operand (FILE *file, rtx op, int letter)
cb0ca284
MH
1828{
1829 rtx op1;
1830 enum rtx_code code;
1831
1832 switch (letter)
1833 {
975ab131 1834 case '#': /* Delayed. */
cb0ca284 1835 if (final_sequence)
761c70aa 1836 fprintf (file, "d");
cb0ca284
MH
1837 return;
1838 }
1839
1840 code = GET_CODE (op);
1841 switch (letter)
1842 {
975ab131 1843 case 'A': /* Direct address. */
dfafcb4d 1844 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
761c70aa 1845 fprintf (file, "@");
cb0ca284
MH
1846 break;
1847
975ab131 1848 case 'H': /* Sethi. */
50c33087
MH
1849 output_addr_const (file, op);
1850 return;
cb0ca284 1851
975ab131 1852 case 'I': /* Reversed condition. */
cb0ca284
MH
1853 code = reverse_condition (code);
1854 break;
1855
975ab131 1856 case 'L': /* Log 2 of constant. */
cb0ca284
MH
1857 if (code != CONST_INT)
1858 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1859 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1860 return;
1861
975ab131 1862 case 'N': /* Ones complement of small constant. */
cb0ca284
MH
1863 if (code != CONST_INT)
1864 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
13a8b496 1865 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
cb0ca284
MH
1866 return;
1867
975ab131 1868 case 'K': /* Generate ldp(k) if direct address. */
4ddb3ea6 1869 if (! TARGET_SMALL
cb0ca284 1870 && code == MEM
50c33087
MH
1871 && GET_CODE (XEXP (op, 0)) == LO_SUM
1872 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1873 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
cb0ca284
MH
1874 {
1875 op1 = XEXP (XEXP (op, 0), 1);
1876 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1877 {
761c70aa 1878 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
f6155fda 1879 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
761c70aa 1880 fprintf (file, "\n");
cb0ca284
MH
1881 }
1882 }
1883 return;
1884
975ab131
MH
1885 case 'M': /* Generate ldp(k) if direct address. */
1886 if (! TARGET_SMALL /* Only used in asm statements. */
cb0ca284
MH
1887 && code == MEM
1888 && (GET_CODE (XEXP (op, 0)) == CONST
1889 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1890 {
761c70aa 1891 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
cb0ca284 1892 output_address (XEXP (op, 0));
761c70aa 1893 fprintf (file, "\n\t");
cb0ca284
MH
1894 }
1895 return;
1896
975ab131 1897 case 'O': /* Offset address. */
cb0ca284
MH
1898 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1899 break;
1900 else if (code == MEM)
f6155fda 1901 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
cb0ca284
MH
1902 else if (code == REG)
1903 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1904 else
1905 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1906 return;
1907
975ab131 1908 case 'C': /* Call. */
50c33087
MH
1909 break;
1910
975ab131 1911 case 'U': /* Call/callu. */
b2e9a2fd 1912 if (code != SYMBOL_REF)
761c70aa 1913 fprintf (file, "u");
cb0ca284
MH
1914 return;
1915
1916 default:
1917 break;
1918 }
1919
1920 switch (code)
1921 {
1922 case REG:
dfafcb4d
HB
1923 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1924 && ! TARGET_TI)
cb0ca284
MH
1925 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1926 else
1927 fprintf (file, "%s", reg_names[REGNO (op)]);
1928 break;
1929
1930 case MEM:
1931 output_address (XEXP (op, 0));
1932 break;
1933
1934 case CONST_DOUBLE:
1935 {
25009e02 1936 char str[64];
cb0ca284 1937
da6eec72
RH
1938 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1939 sizeof (str), 0, 1);
cb0ca284
MH
1940 fprintf (file, "%s", str);
1941 }
1942 break;
1943
1944 case CONST_INT:
13a8b496 1945 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
cb0ca284
MH
1946 break;
1947
1948 case NE:
761c70aa 1949 fprintf (file, "ne");
cb0ca284
MH
1950 break;
1951
1952 case EQ:
761c70aa 1953 fprintf (file, "eq");
cb0ca284
MH
1954 break;
1955
1956 case GE:
761c70aa 1957 fprintf (file, "ge");
cb0ca284
MH
1958 break;
1959
1960 case GT:
761c70aa 1961 fprintf (file, "gt");
cb0ca284
MH
1962 break;
1963
1964 case LE:
761c70aa 1965 fprintf (file, "le");
cb0ca284
MH
1966 break;
1967
1968 case LT:
761c70aa 1969 fprintf (file, "lt");
cb0ca284
MH
1970 break;
1971
1972 case GEU:
761c70aa 1973 fprintf (file, "hs");
cb0ca284
MH
1974 break;
1975
1976 case GTU:
761c70aa 1977 fprintf (file, "hi");
cb0ca284
MH
1978 break;
1979
1980 case LEU:
761c70aa 1981 fprintf (file, "ls");
cb0ca284
MH
1982 break;
1983
1984 case LTU:
761c70aa 1985 fprintf (file, "lo");
cb0ca284
MH
1986 break;
1987
1988 case SYMBOL_REF:
1989 output_addr_const (file, op);
1990 break;
1991
1992 case CONST:
1993 output_addr_const (file, XEXP (op, 0));
1994 break;
1995
1996 case CODE_LABEL:
1997 break;
1998
1999 default:
2000 fatal_insn ("c4x_print_operand: Bad operand case", op);
2001 break;
2002 }
2003}
2004
2005
2006void
f12b3fc8 2007c4x_print_operand_address (FILE *file, rtx addr)
cb0ca284
MH
2008{
2009 switch (GET_CODE (addr))
2010 {
2011 case REG:
2012 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2013 break;
2014
2015 case PRE_DEC:
2016 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2017 break;
2018
2019 case POST_INC:
2020 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2021 break;
2022
2023 case POST_MODIFY:
2024 {
2025 rtx op0 = XEXP (XEXP (addr, 1), 0);
2026 rtx op1 = XEXP (XEXP (addr, 1), 1);
2027
2028 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2029 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2030 reg_names[REGNO (op1)]);
2031 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
13a8b496
KG
2032 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2033 reg_names[REGNO (op0)], INTVAL (op1));
cb0ca284 2034 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
13a8b496
KG
2035 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2036 reg_names[REGNO (op0)], -INTVAL (op1));
cb0ca284
MH
2037 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2038 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2039 reg_names[REGNO (op1)]);
2040 else
2041 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2042 }
2043 break;
2044
2045 case PRE_MODIFY:
2046 {
2047 rtx op0 = XEXP (XEXP (addr, 1), 0);
2048 rtx op1 = XEXP (XEXP (addr, 1), 1);
2049
2050 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2051 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2052 reg_names[REGNO (op1)]);
2053 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
13a8b496
KG
2054 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2055 reg_names[REGNO (op0)], INTVAL (op1));
cb0ca284 2056 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
13a8b496
KG
2057 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2058 reg_names[REGNO (op0)], -INTVAL (op1));
cb0ca284
MH
2059 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2060 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2061 reg_names[REGNO (op1)]);
2062 else
2063 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2064 }
2065 break;
2066
2067 case PRE_INC:
2068 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2069 break;
2070
2071 case POST_DEC:
2072 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2073 break;
2074
2075 case PLUS: /* Indirect with displacement. */
2076 {
2077 rtx op0 = XEXP (addr, 0);
2078 rtx op1 = XEXP (addr, 1);
cb0ca284 2079
50c33087 2080 if (REG_P (op0))
cb0ca284 2081 {
50c33087 2082 if (REG_P (op1))
cb0ca284 2083 {
bc46716b 2084 if (IS_INDEX_REG (op0))
cb0ca284
MH
2085 {
2086 fprintf (file, "*+%s(%s)",
2087 reg_names[REGNO (op1)],
975ab131 2088 reg_names[REGNO (op0)]); /* Index + base. */
cb0ca284
MH
2089 }
2090 else
2091 {
2092 fprintf (file, "*+%s(%s)",
2093 reg_names[REGNO (op0)],
975ab131 2094 reg_names[REGNO (op1)]); /* Base + index. */
cb0ca284
MH
2095 }
2096 }
2097 else if (INTVAL (op1) < 0)
2098 {
13a8b496 2099 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
cb0ca284 2100 reg_names[REGNO (op0)],
975ab131 2101 -INTVAL (op1)); /* Base - displacement. */
cb0ca284
MH
2102 }
2103 else
2104 {
13a8b496 2105 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
cb0ca284 2106 reg_names[REGNO (op0)],
975ab131 2107 INTVAL (op1)); /* Base + displacement. */
cb0ca284
MH
2108 }
2109 }
50c33087
MH
2110 else
2111 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2112 }
2113 break;
2114
2115 case LO_SUM:
2116 {
2117 rtx op0 = XEXP (addr, 0);
2118 rtx op1 = XEXP (addr, 1);
2119
2120 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2121 c4x_print_operand_address (file, op1);
2122 else
2123 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
cb0ca284
MH
2124 }
2125 break;
2126
2127 case CONST:
2128 case SYMBOL_REF:
2129 case LABEL_REF:
50c33087 2130 fprintf (file, "@");
cb0ca284 2131 output_addr_const (file, addr);
cb0ca284
MH
2132 break;
2133
2134 /* We shouldn't access CONST_INT addresses. */
2135 case CONST_INT:
2136
2137 default:
2138 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2139 break;
2140 }
2141}
2142
975ab131 2143
50c33087
MH
2144/* Return nonzero if the floating point operand will fit
2145 in the immediate field. */
975ab131 2146
cb0ca284 2147static int
f12b3fc8 2148c4x_immed_float_p (rtx op)
cb0ca284
MH
2149{
2150 long convval[2];
2151 int exponent;
2152 REAL_VALUE_TYPE r;
2153
50c33087
MH
2154 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2155 if (GET_MODE (op) == HFmode)
cb0ca284
MH
2156 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2157 else
2158 {
2159 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2160 convval[1] = 0;
2161 }
2162
975ab131 2163 /* Sign extend exponent. */
cb0ca284
MH
2164 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2165 if (exponent == -128)
975ab131 2166 return 1; /* 0.0 */
cb0ca284 2167 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
975ab131
MH
2168 return 0; /* Precision doesn't fit. */
2169 return (exponent <= 7) /* Positive exp. */
2170 && (exponent >= -7); /* Negative exp. */
cb0ca284
MH
2171}
2172
975ab131 2173
cb0ca284
MH
2174/* The last instruction in a repeat block cannot be a Bcond, DBcound,
2175 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2176
2177 None of the last four instructions from the bottom of the block can
2178 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2179 BcondAT or RETIcondD.
2180
2181 This routine scans the four previous insns for a jump insn, and if
2182 one is found, returns 1 so that we bung in a nop instruction.
2183 This simple minded strategy will add a nop, when it may not
2184 be required. Say when there is a JUMP_INSN near the end of the
2185 block that doesn't get converted into a delayed branch.
2186
2187 Note that we cannot have a call insn, since we don't generate
2188 repeat loops with calls in them (although I suppose we could, but
d5e4ff48
MH
2189 there's no benefit.)
2190
2191 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
cb0ca284
MH
2192
2193int
f12b3fc8 2194c4x_rptb_nop_p (rtx insn)
cb0ca284 2195{
d5e4ff48 2196 rtx start_label;
cb0ca284
MH
2197 int i;
2198
d5e4ff48
MH
2199 /* Extract the start label from the jump pattern (rptb_end). */
2200 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2201
cb0ca284
MH
2202 /* If there is a label at the end of the loop we must insert
2203 a NOP. */
50c33087
MH
2204 do {
2205 insn = previous_insn (insn);
2206 } while (GET_CODE (insn) == NOTE
2207 || GET_CODE (insn) == USE
2208 || GET_CODE (insn) == CLOBBER);
cb0ca284
MH
2209 if (GET_CODE (insn) == CODE_LABEL)
2210 return 1;
2211
2212 for (i = 0; i < 4; i++)
2213 {
2214 /* Search back for prev non-note and non-label insn. */
2215 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2216 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
d5e4ff48
MH
2217 {
2218 if (insn == start_label)
2219 return i == 0;
2220
50c33087 2221 insn = previous_insn (insn);
d5e4ff48 2222 };
cb0ca284 2223
d5e4ff48 2224 /* If we have a jump instruction we should insert a NOP. If we
cb0ca284 2225 hit repeat block top we should only insert a NOP if the loop
1ac7a7f5 2226 is empty. */
cb0ca284
MH
2227 if (GET_CODE (insn) == JUMP_INSN)
2228 return 1;
50c33087 2229 insn = previous_insn (insn);
cb0ca284
MH
2230 }
2231 return 0;
2232}
2233
2234
933cddd0
MH
2235/* The C4x looping instruction needs to be emitted at the top of the
2236 loop. Emitting the true RTL for a looping instruction at the top of
2237 the loop can cause problems with flow analysis. So instead, a dummy
2238 doloop insn is emitted at the end of the loop. This routine checks
2239 for the presence of this doloop insn and then searches back to the
2240 top of the loop, where it inserts the true looping insn (provided
2241 there are no instructions in the loop which would cause problems).
2242 Any additional labels can be emitted at this point. In addition, if
2243 the desired loop count register was not allocated, this routine does
0bbcfbaf
HB
2244 nothing.
2245
2246 Before we can create a repeat block looping instruction we have to
2247 verify that there are no jumps outside the loop and no jumps outside
2248 the loop go into this loop. This can happen in the basic blocks reorder
2249 pass. The C4x cpu can not handle this. */
2250
2251static int
f12b3fc8 2252c4x_label_ref_used_p (rtx x, rtx code_label)
0bbcfbaf
HB
2253{
2254 enum rtx_code code;
2255 int i, j;
2256 const char *fmt;
2257
2258 if (x == 0)
2259 return 0;
2260
2261 code = GET_CODE (x);
2262 if (code == LABEL_REF)
2263 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2264
2265 fmt = GET_RTX_FORMAT (code);
2266 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2267 {
2268 if (fmt[i] == 'e')
2269 {
2270 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2271 return 1;
2272 }
2273 else if (fmt[i] == 'E')
2274 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2275 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2276 return 1;
2277 }
2278 return 0;
2279}
2280
2281
2282static int
f12b3fc8 2283c4x_rptb_valid_p (rtx insn, rtx start_label)
0bbcfbaf
HB
2284{
2285 rtx end = insn;
2286 rtx start;
2287 rtx tmp;
2288
2289 /* Find the start label. */
2290 for (; insn; insn = PREV_INSN (insn))
2291 if (insn == start_label)
2292 break;
2293
2294 /* Note found then we can not use a rptb or rpts. The label was
2295 probably moved by the basic block reorder pass. */
2296 if (! insn)
2297 return 0;
2298
2299 start = insn;
2300 /* If any jump jumps inside this block then we must fail. */
2301 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2302 {
2303 if (GET_CODE (insn) == CODE_LABEL)
2304 {
2305 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2306 if (GET_CODE (tmp) == JUMP_INSN
2307 && c4x_label_ref_used_p (tmp, insn))
2308 return 0;
2309 }
2310 }
2311 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2312 {
2313 if (GET_CODE (insn) == CODE_LABEL)
2314 {
2315 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2316 if (GET_CODE (tmp) == JUMP_INSN
2317 && c4x_label_ref_used_p (tmp, insn))
2318 return 0;
2319 }
2320 }
2321 /* If any jump jumps outside this block then we must fail. */
2322 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2323 {
2324 if (GET_CODE (insn) == CODE_LABEL)
2325 {
2326 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2327 if (GET_CODE (tmp) == JUMP_INSN
2328 && c4x_label_ref_used_p (tmp, insn))
2329 return 0;
2330 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2331 if (GET_CODE (tmp) == JUMP_INSN
2332 && c4x_label_ref_used_p (tmp, insn))
2333 return 0;
2334 }
2335 }
2336
2337 /* All checks OK. */
2338 return 1;
2339}
2340
975ab131 2341
d5e4ff48 2342void
f12b3fc8 2343c4x_rptb_insert (rtx insn)
d5e4ff48
MH
2344{
2345 rtx end_label;
2346 rtx start_label;
b864825e 2347 rtx new_start_label;
4271f003
MH
2348 rtx count_reg;
2349
2350 /* If the count register has not been allocated to RC, say if
70128ad9 2351 there is a movmem pattern in the loop, then do not insert a
4271f003
MH
2352 RPTB instruction. Instead we emit a decrement and branch
2353 at the end of the loop. */
2354 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2355 if (REGNO (count_reg) != RC_REGNO)
2356 return;
2357
d5e4ff48
MH
2358 /* Extract the start label from the jump pattern (rptb_end). */
2359 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
4271f003 2360
0bbcfbaf
HB
2361 if (! c4x_rptb_valid_p (insn, start_label))
2362 {
2363 /* We can not use the rptb insn. Replace it so reorg can use
2364 the delay slots of the jump insn. */
a556fd39
KH
2365 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2366 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
0bbcfbaf
HB
2367 emit_insn_before (gen_bge (start_label), insn);
2368 LABEL_NUSES (start_label)++;
2369 delete_insn (insn);
2370 return;
2371 }
2372
d5e4ff48 2373 end_label = gen_label_rtx ();
b864825e 2374 LABEL_NUSES (end_label)++;
d5e4ff48
MH
2375 emit_label_after (end_label, insn);
2376
b864825e
MH
2377 new_start_label = gen_label_rtx ();
2378 LABEL_NUSES (new_start_label)++;
2379
d5e4ff48 2380 for (; insn; insn = PREV_INSN (insn))
b864825e
MH
2381 {
2382 if (insn == start_label)
2383 break;
2384 if (GET_CODE (insn) == JUMP_INSN &&
2385 JUMP_LABEL (insn) == start_label)
2386 redirect_jump (insn, new_start_label, 0);
2387 }
4ddb3ea6 2388 if (! insn)
d5e4ff48
MH
2389 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2390
b864825e
MH
2391 emit_label_after (new_start_label, insn);
2392
3b5e8a16 2393 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
b864825e 2394 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
3b5e8a16 2395 else
b864825e
MH
2396 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2397 if (LABEL_NUSES (start_label) == 0)
2398 delete_insn (start_label);
d5e4ff48
MH
2399}
2400
50c33087 2401
18dbd950
RS
2402/* We need to use direct addressing for large constants and addresses
2403 that cannot fit within an instruction. We must check for these
f1ba665b 2404 after after the final jump optimization pass, since this may
18dbd950
RS
2405 introduce a local_move insn for a SYMBOL_REF. This pass
2406 must come before delayed branch slot filling since it can generate
2407 additional instructions.
2408
2409 This function also fixes up RTPB style loops that didn't get RC
50c33087 2410 allocated as the loop counter. */
cb0ca284 2411
18dbd950 2412static void
f12b3fc8 2413c4x_reorg (void)
cb0ca284 2414{
cb0ca284 2415 rtx insn;
cb0ca284 2416
18dbd950 2417 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
cb0ca284
MH
2418 {
2419 /* Look for insn. */
2c3c49de 2420 if (INSN_P (insn))
cb0ca284 2421 {
cb0ca284 2422 int insn_code_number;
41387ffd 2423 rtx old;
cb0ca284
MH
2424
2425 insn_code_number = recog_memoized (insn);
2426
2427 if (insn_code_number < 0)
2428 continue;
2429
d5e4ff48 2430 /* Insert the RTX for RPTB at the top of the loop
1ac7a7f5 2431 and a label at the end of the loop. */
d5e4ff48
MH
2432 if (insn_code_number == CODE_FOR_rptb_end)
2433 c4x_rptb_insert(insn);
2434
41387ffd
MH
2435 /* We need to split the insn here. Otherwise the calls to
2436 force_const_mem will not work for load_immed_address. */
2437 old = insn;
34de028f 2438
41387ffd
MH
2439 /* Don't split the insn if it has been deleted. */
2440 if (! INSN_DELETED_P (old))
2441 insn = try_split (PATTERN(old), old, 1);
cb0ca284 2442
41387ffd
MH
2443 /* When not optimizing, the old insn will be still left around
2444 with only the 'deleted' bit set. Transform it into a note
2445 to avoid confusion of subsequent processing. */
2446 if (INSN_DELETED_P (old))
2447 {
2448 PUT_CODE (old, NOTE);
2449 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2450 NOTE_SOURCE_FILE (old) = 0;
cb0ca284 2451 }
cb0ca284
MH
2452 }
2453 }
2454}
2455
2456
2457static int
f12b3fc8 2458c4x_a_register (rtx op)
cb0ca284 2459{
bc46716b 2460 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2461}
2462
2463
2464static int
f12b3fc8 2465c4x_x_register (rtx op)
cb0ca284 2466{
bc46716b 2467 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
cb0ca284
MH
2468}
2469
2470
2471static int
f12b3fc8 2472c4x_immed_int_constant (rtx op)
cb0ca284
MH
2473{
2474 if (GET_CODE (op) != CONST_INT)
2475 return 0;
50c33087 2476
cb0ca284 2477 return GET_MODE (op) == VOIDmode
94134f42
ZW
2478 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2479 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
cb0ca284
MH
2480}
2481
2482
2483static int
f12b3fc8 2484c4x_immed_float_constant (rtx op)
cb0ca284
MH
2485{
2486 if (GET_CODE (op) != CONST_DOUBLE)
2487 return 0;
50c33087 2488
5078f5eb
HB
2489 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2490 present this only means that a MEM rtx has been generated. It does
2491 not mean the rtx is really in memory. */
50c33087 2492
cb0ca284
MH
2493 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2494}
2495
2496
483dd5be 2497int
f12b3fc8 2498c4x_shiftable_constant (rtx op)
483dd5be
MH
2499{
2500 int i;
2501 int mask;
2502 int val = INTVAL (op);
2503
2504 for (i = 0; i < 16; i++)
2505 {
2506 if (val & (1 << i))
2507 break;
2508 }
2509 mask = ((0xffff >> i) << 16) | 0xffff;
4fda2521
HB
2510 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2511 : (val >> i) & mask))
483dd5be
MH
2512 return i;
2513 return -1;
2514}
2515
2516
cb0ca284 2517int
f12b3fc8 2518c4x_H_constant (rtx op)
cb0ca284 2519{
50c33087 2520 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
cb0ca284
MH
2521}
2522
2523
2524int
f12b3fc8 2525c4x_I_constant (rtx op)
cb0ca284 2526{
50c33087 2527 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
cb0ca284
MH
2528}
2529
2530
2531int
f12b3fc8 2532c4x_J_constant (rtx op)
cb0ca284
MH
2533{
2534 if (TARGET_C3X)
2535 return 0;
50c33087 2536 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
cb0ca284
MH
2537}
2538
2539
2540static int
f12b3fc8 2541c4x_K_constant (rtx op)
cb0ca284 2542{
305902b0 2543 if (TARGET_C3X || ! c4x_immed_int_constant (op))
cb0ca284 2544 return 0;
305902b0 2545 return IS_INT5_CONST (INTVAL (op));
cb0ca284
MH
2546}
2547
2548
2549int
f12b3fc8 2550c4x_L_constant (rtx op)
cb0ca284 2551{
50c33087 2552 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
cb0ca284
MH
2553}
2554
2555
2556static int
f12b3fc8 2557c4x_N_constant (rtx op)
cb0ca284 2558{
50c33087 2559 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
cb0ca284
MH
2560}
2561
2562
2563static int
f12b3fc8 2564c4x_O_constant (rtx op)
cb0ca284 2565{
50c33087 2566 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
cb0ca284
MH
2567}
2568
2569
2570/* The constraints do not have to check the register class,
2571 except when needed to discriminate between the constraints.
2572 The operand has been checked by the predicates to be valid. */
2573
2574/* ARx + 9-bit signed const or IRn
2575 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2576 We don't include the pre/post inc/dec forms here since
2577 they are handled by the <> constraints. */
2578
2579int
f12b3fc8 2580c4x_Q_constraint (rtx op)
cb0ca284
MH
2581{
2582 enum machine_mode mode = GET_MODE (op);
2583
2584 if (GET_CODE (op) != MEM)
2585 return 0;
2586 op = XEXP (op, 0);
2587 switch (GET_CODE (op))
2588 {
2589 case REG:
2590 return 1;
2591
2592 case PLUS:
2593 {
2594 rtx op0 = XEXP (op, 0);
2595 rtx op1 = XEXP (op, 1);
2596
4ddb3ea6 2597 if (! REG_P (op0))
cb0ca284
MH
2598 return 0;
2599
2600 if (REG_P (op1))
2601 return 1;
2602
2603 if (GET_CODE (op1) != CONST_INT)
2604 return 0;
2605
2606 /* HImode and HFmode must be offsettable. */
2607 if (mode == HImode || mode == HFmode)
2608 return IS_DISP8_OFF_CONST (INTVAL (op1));
2609
2610 return IS_DISP8_CONST (INTVAL (op1));
2611 }
2612 break;
50c33087 2613
cb0ca284
MH
2614 default:
2615 break;
2616 }
2617 return 0;
2618}
2619
2620
2621/* ARx + 5-bit unsigned const
975ab131 2622 *ARx, *+ARx(n) for n < 32. */
cb0ca284
MH
2623
2624int
f12b3fc8 2625c4x_R_constraint (rtx op)
cb0ca284
MH
2626{
2627 enum machine_mode mode = GET_MODE (op);
2628
2629 if (TARGET_C3X)
2630 return 0;
2631 if (GET_CODE (op) != MEM)
2632 return 0;
2633 op = XEXP (op, 0);
2634 switch (GET_CODE (op))
2635 {
2636 case REG:
2637 return 1;
2638
2639 case PLUS:
2640 {
2641 rtx op0 = XEXP (op, 0);
2642 rtx op1 = XEXP (op, 1);
2643
4ddb3ea6 2644 if (! REG_P (op0))
cb0ca284
MH
2645 return 0;
2646
2647 if (GET_CODE (op1) != CONST_INT)
2648 return 0;
2649
2650 /* HImode and HFmode must be offsettable. */
2651 if (mode == HImode || mode == HFmode)
2652 return IS_UINT5_CONST (INTVAL (op1) + 1);
2653
2654 return IS_UINT5_CONST (INTVAL (op1));
2655 }
2656 break;
933cddd0 2657
cb0ca284
MH
2658 default:
2659 break;
2660 }
2661 return 0;
2662}
2663
2664
2665static int
f12b3fc8 2666c4x_R_indirect (rtx op)
cb0ca284
MH
2667{
2668 enum machine_mode mode = GET_MODE (op);
2669
2670 if (TARGET_C3X || GET_CODE (op) != MEM)
2671 return 0;
2672
2673 op = XEXP (op, 0);
2674 switch (GET_CODE (op))
2675 {
2676 case REG:
bc46716b 2677 return IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2678
2679 case PLUS:
2680 {
2681 rtx op0 = XEXP (op, 0);
2682 rtx op1 = XEXP (op, 1);
2683
2684 /* HImode and HFmode must be offsettable. */
2685 if (mode == HImode || mode == HFmode)
bc46716b 2686 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2687 && GET_CODE (op1) == CONST_INT
2688 && IS_UINT5_CONST (INTVAL (op1) + 1);
2689
2690 return REG_P (op0)
bc46716b 2691 && IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2692 && GET_CODE (op1) == CONST_INT
2693 && IS_UINT5_CONST (INTVAL (op1));
2694 }
2695 break;
2696
2697 default:
2698 break;
2699 }
2700 return 0;
2701}
2702
2703
2704/* ARx + 1-bit unsigned const or IRn
2705 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2706 We don't include the pre/post inc/dec forms here since
2707 they are handled by the <> constraints. */
2708
2709int
f12b3fc8 2710c4x_S_constraint (rtx op)
cb0ca284
MH
2711{
2712 enum machine_mode mode = GET_MODE (op);
2713 if (GET_CODE (op) != MEM)
2714 return 0;
2715 op = XEXP (op, 0);
2716 switch (GET_CODE (op))
2717 {
2718 case REG:
2719 return 1;
2720
2721 case PRE_MODIFY:
2722 case POST_MODIFY:
2723 {
2724 rtx op0 = XEXP (op, 0);
2725 rtx op1 = XEXP (op, 1);
2726
2727 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2728 || (op0 != XEXP (op1, 0)))
2729 return 0;
2730
2731 op0 = XEXP (op1, 0);
2732 op1 = XEXP (op1, 1);
2733 return REG_P (op0) && REG_P (op1);
975ab131 2734 /* Pre or post_modify with a displacement of 0 or 1
cb0ca284
MH
2735 should not be generated. */
2736 }
2737 break;
2738
2739 case PLUS:
2740 {
2741 rtx op0 = XEXP (op, 0);
2742 rtx op1 = XEXP (op, 1);
2743
2744 if (!REG_P (op0))
2745 return 0;
2746
2747 if (REG_P (op1))
2748 return 1;
2749
dfb31eec 2750 if (GET_CODE (op1) != CONST_INT)
cb0ca284
MH
2751 return 0;
2752
2753 /* HImode and HFmode must be offsettable. */
2754 if (mode == HImode || mode == HFmode)
2755 return IS_DISP1_OFF_CONST (INTVAL (op1));
2756
2757 return IS_DISP1_CONST (INTVAL (op1));
2758 }
2759 break;
933cddd0 2760
cb0ca284
MH
2761 default:
2762 break;
2763 }
2764 return 0;
2765}
2766
2767
2768static int
f12b3fc8 2769c4x_S_indirect (rtx op)
cb0ca284
MH
2770{
2771 enum machine_mode mode = GET_MODE (op);
2772 if (GET_CODE (op) != MEM)
2773 return 0;
2774
2775 op = XEXP (op, 0);
2776 switch (GET_CODE (op))
2777 {
2778 case PRE_DEC:
2779 case POST_DEC:
2780 if (mode != QImode && mode != QFmode)
2781 return 0;
2782 case PRE_INC:
2783 case POST_INC:
2784 op = XEXP (op, 0);
2785
2786 case REG:
bc46716b 2787 return IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2788
2789 case PRE_MODIFY:
2790 case POST_MODIFY:
2791 {
2792 rtx op0 = XEXP (op, 0);
2793 rtx op1 = XEXP (op, 1);
2794
2795 if (mode != QImode && mode != QFmode)
2796 return 0;
2797
2798 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2799 || (op0 != XEXP (op1, 0)))
2800 return 0;
2801
2802 op0 = XEXP (op1, 0);
2803 op1 = XEXP (op1, 1);
bc46716b
MH
2804 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2805 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
975ab131 2806 /* Pre or post_modify with a displacement of 0 or 1
cb0ca284
MH
2807 should not be generated. */
2808 }
2809
2810 case PLUS:
2811 {
2812 rtx op0 = XEXP (op, 0);
2813 rtx op1 = XEXP (op, 1);
2814
2815 if (REG_P (op0))
2816 {
2817 /* HImode and HFmode must be offsettable. */
2818 if (mode == HImode || mode == HFmode)
bc46716b 2819 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2820 && GET_CODE (op1) == CONST_INT
2821 && IS_DISP1_OFF_CONST (INTVAL (op1));
2822
2823 if (REG_P (op1))
bc46716b
MH
2824 return (IS_INDEX_OR_PSEUDO_REG (op1)
2825 && IS_ADDR_OR_PSEUDO_REG (op0))
2826 || (IS_ADDR_OR_PSEUDO_REG (op1)
2827 && IS_INDEX_OR_PSEUDO_REG (op0));
cb0ca284 2828
bc46716b 2829 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2830 && GET_CODE (op1) == CONST_INT
2831 && IS_DISP1_CONST (INTVAL (op1));
2832 }
2833 }
2834 break;
2835
2836 default:
2837 break;
2838 }
2839 return 0;
2840}
2841
2842
50c33087 2843/* Direct memory operand. */
cb0ca284
MH
2844
2845int
f12b3fc8 2846c4x_T_constraint (rtx op)
cb0ca284
MH
2847{
2848 if (GET_CODE (op) != MEM)
2849 return 0;
2850 op = XEXP (op, 0);
2851
50c33087 2852 if (GET_CODE (op) != LO_SUM)
cb0ca284 2853 {
50c33087
MH
2854 /* Allow call operands. */
2855 return GET_CODE (op) == SYMBOL_REF
2856 && GET_MODE (op) == Pmode
11467df2 2857 && SYMBOL_REF_FUNCTION_P (op);
cb0ca284
MH
2858 }
2859
50c33087
MH
2860 /* HImode and HFmode are not offsettable. */
2861 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2862 return 0;
2863
2864 if ((GET_CODE (XEXP (op, 0)) == REG)
2865 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2866 return c4x_U_constraint (XEXP (op, 1));
2867
2868 return 0;
2869}
2870
2871
2872/* Symbolic operand. */
2873
2874int
f12b3fc8 2875c4x_U_constraint (rtx op)
50c33087 2876{
cb0ca284 2877 /* Don't allow direct addressing to an arbitrary constant. */
5078f5eb
HB
2878 return GET_CODE (op) == CONST
2879 || GET_CODE (op) == SYMBOL_REF
2880 || GET_CODE (op) == LABEL_REF;
cb0ca284
MH
2881}
2882
2883
2884int
f12b3fc8 2885c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
2886{
2887 if (GET_CODE (op) == MEM)
2888 {
2889 enum rtx_code code = GET_CODE (XEXP (op, 0));
2890
2891 if (code == PRE_INC
2892 || code == PRE_DEC
2893 || code == POST_INC
2894 || code == POST_DEC
2895 || code == PRE_MODIFY
2896 || code == POST_MODIFY
2897 )
2898 return 1;
2899 }
2900 return 0;
2901}
2902
2903
2904/* Match any operand. */
2905
2906int
f12b3fc8
SB
2907any_operand (register rtx op ATTRIBUTE_UNUSED,
2908 enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
2909{
2910 return 1;
2911}
2912
2913
2914/* Nonzero if OP is a floating point value with value 0.0. */
2915
2916int
f12b3fc8 2917fp_zero_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
2918{
2919 REAL_VALUE_TYPE r;
2920
f9ef1f02
MH
2921 if (GET_CODE (op) != CONST_DOUBLE)
2922 return 0;
cb0ca284
MH
2923 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2924 return REAL_VALUES_EQUAL (r, dconst0);
2925}
2926
2927
2928int
f12b3fc8 2929const_operand (register rtx op, register enum machine_mode mode)
cb0ca284
MH
2930{
2931 switch (mode)
2932 {
2933 case QFmode:
2934 case HFmode:
2935 if (GET_CODE (op) != CONST_DOUBLE
2936 || GET_MODE (op) != mode
2937 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2938 return 0;
2939
2940 return c4x_immed_float_p (op);
2941
2942#if Pmode != QImode
2943 case Pmode:
2944#endif
2945 case QImode:
2946 if (GET_CODE (op) != CONST_INT
2947 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
2948 || GET_MODE_CLASS (mode) != MODE_INT)
2949 return 0;
2950
2951 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
2952
2953 case HImode:
2954 return 0;
2955
2956 default:
2957 return 0;
2958 }
2959}
2960
2961
2962int
f12b3fc8 2963stik_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
2964{
2965 return c4x_K_constant (op);
2966}
2967
2968
2969int
f12b3fc8 2970not_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
2971{
2972 return c4x_N_constant (op);
2973}
2974
2975
2976int
f12b3fc8 2977reg_operand (rtx op, enum machine_mode mode)
cb0ca284 2978{
ebcc44f4
MH
2979 if (GET_CODE (op) == SUBREG
2980 && GET_MODE (op) == QFmode)
2981 return 0;
cb0ca284
MH
2982 return register_operand (op, mode);
2983}
2984
50c33087 2985
ebcc44f4 2986int
f12b3fc8 2987mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
ebcc44f4
MH
2988{
2989 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2990 int and a long double. */
2991 if (GET_CODE (op) == SUBREG
2992 && (GET_MODE (op) == QFmode)
2993 && (GET_MODE (SUBREG_REG (op)) == QImode
2994 || GET_MODE (SUBREG_REG (op)) == HImode))
2995 return 1;
2996 return 0;
2997}
2998
2999
cb0ca284 3000int
f12b3fc8 3001reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
3002{
3003 if (REG_P (op) || CONSTANT_P (op))
3004 return 1;
3005 return 0;
3006}
3007
50c33087 3008
cb0ca284 3009int
f12b3fc8 3010not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
3011{
3012 if (REG_P (op) || CONSTANT_P (op))
3013 return 1;
3014 if (GET_CODE (op) != MEM)
3015 return 0;
3016 op = XEXP (op, 0);
3017 switch (GET_CODE (op))
3018 {
3019 case REG:
3020 return 1;
3021
3022 case PLUS:
3023 {
3024 rtx op0 = XEXP (op, 0);
3025 rtx op1 = XEXP (op, 1);
3026
4ddb3ea6 3027 if (! REG_P (op0))
cb0ca284
MH
3028 return 0;
3029
3030 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3031 return 1;
3032 }
50c33087
MH
3033
3034 case LO_SUM:
3035 {
3036 rtx op0 = XEXP (op, 0);
3037
3038 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3039 return 1;
3040 }
3041 break;
3042
cb0ca284
MH
3043 case CONST:
3044 case SYMBOL_REF:
3045 case LABEL_REF:
3046 return 1;
933cddd0 3047
cb0ca284
MH
3048 default:
3049 break;
3050 }
3051 return 0;
3052}
3053
50c33087 3054
cb0ca284 3055int
f12b3fc8 3056not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
3057{
3058 if (REG_P (op) && REGNO (op) == RC_REGNO)
3059 return 0;
3060 return 1;
3061}
3062
50c33087 3063
cb0ca284
MH
3064/* Extended precision register R0-R1. */
3065
3066int
f12b3fc8 3067r0r1_reg_operand (rtx op, enum machine_mode mode)
cb0ca284 3068{
ebcc44f4 3069 if (! reg_operand (op, mode))
cb0ca284
MH
3070 return 0;
3071 if (GET_CODE (op) == SUBREG)
3072 op = SUBREG_REG (op);
bc46716b 3073 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
cb0ca284
MH
3074}
3075
3076
3077/* Extended precision register R2-R3. */
3078
3079int
f12b3fc8 3080r2r3_reg_operand (rtx op, enum machine_mode mode)
cb0ca284 3081{
ebcc44f4 3082 if (! reg_operand (op, mode))
cb0ca284
MH
3083 return 0;
3084 if (GET_CODE (op) == SUBREG)
3085 op = SUBREG_REG (op);
bc46716b 3086 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
cb0ca284
MH
3087}
3088
3089
3090/* Low extended precision register R0-R7. */
3091
3092int
f12b3fc8 3093ext_low_reg_operand (rtx op, enum machine_mode mode)
cb0ca284 3094{
ebcc44f4 3095 if (! reg_operand (op, mode))
cb0ca284
MH
3096 return 0;
3097 if (GET_CODE (op) == SUBREG)
3098 op = SUBREG_REG (op);
bc46716b 3099 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
cb0ca284
MH
3100}
3101
3102
3103/* Extended precision register. */
3104
3105int
f12b3fc8 3106ext_reg_operand (rtx op, enum machine_mode mode)
cb0ca284 3107{
ebcc44f4 3108 if (! reg_operand (op, mode))
cb0ca284
MH
3109 return 0;
3110 if (GET_CODE (op) == SUBREG)
3111 op = SUBREG_REG (op);
4ddb3ea6 3112 if (! REG_P (op))
cb0ca284 3113 return 0;
bc46716b 3114 return IS_EXT_OR_PSEUDO_REG (op);
cb0ca284
MH
3115}
3116
3117
3118/* Standard precision register. */
3119
3120int
f12b3fc8 3121std_reg_operand (rtx op, enum machine_mode mode)
cb0ca284 3122{
ebcc44f4 3123 if (! reg_operand (op, mode))
cb0ca284
MH
3124 return 0;
3125 if (GET_CODE (op) == SUBREG)
3126 op = SUBREG_REG (op);
bc46716b 3127 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
cb0ca284
MH
3128}
3129
ed3614cd
HB
3130/* Standard precision or normal register. */
3131
3132int
f12b3fc8 3133std_or_reg_operand (rtx op, enum machine_mode mode)
ed3614cd
HB
3134{
3135 if (reload_in_progress)
3136 return std_reg_operand (op, mode);
3137 return reg_operand (op, mode);
3138}
3139
cb0ca284
MH
3140/* Address register. */
3141
3142int
f12b3fc8 3143addr_reg_operand (rtx op, enum machine_mode mode)
cb0ca284 3144{
ebcc44f4 3145 if (! reg_operand (op, mode))
cb0ca284
MH
3146 return 0;
3147 return c4x_a_register (op);
3148}
3149
3150
3151/* Index register. */
3152
3153int
f12b3fc8 3154index_reg_operand (rtx op, enum machine_mode mode)
cb0ca284 3155{
ebcc44f4 3156 if (! reg_operand (op, mode))
cb0ca284
MH
3157 return 0;
3158 if (GET_CODE (op) == SUBREG)
3159 op = SUBREG_REG (op);
3160 return c4x_x_register (op);
3161}
3162
3163
3164/* DP register. */
3165
3166int
f12b3fc8 3167dp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284 3168{
bc46716b 3169 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
cb0ca284
MH
3170}
3171
3172
3173/* SP register. */
3174
3175int
f12b3fc8 3176sp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284 3177{
bc46716b 3178 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
cb0ca284
MH
3179}
3180
3181
3182/* ST register. */
3183
3184int
f12b3fc8 3185st_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284 3186{
bc46716b 3187 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
cb0ca284
MH
3188}
3189
3190
d5e4ff48
MH
3191/* RC register. */
3192
3193int
f12b3fc8 3194rc_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
d5e4ff48 3195{
bc46716b 3196 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
d5e4ff48
MH
3197}
3198
3199
cb0ca284 3200int
f12b3fc8 3201call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284 3202{
55310df7 3203 return (REG_P (op) || symbolic_address_operand (op, mode));
cb0ca284
MH
3204}
3205
3206
305902b0 3207/* Symbolic address operand. */
50c33087
MH
3208
3209int
f12b3fc8
SB
3210symbolic_address_operand (register rtx op,
3211 enum machine_mode mode ATTRIBUTE_UNUSED)
50c33087
MH
3212{
3213 switch (GET_CODE (op))
3214 {
5078f5eb 3215 case CONST:
50c33087
MH
3216 case SYMBOL_REF:
3217 case LABEL_REF:
3218 return 1;
50c33087
MH
3219 default:
3220 return 0;
3221 }
3222}
3223
975ab131 3224
f416f18c 3225/* Check dst operand of a move instruction. */
975ab131 3226
f416f18c 3227int
f12b3fc8 3228dst_operand (rtx op, enum machine_mode mode)
f416f18c
MH
3229{
3230 if (GET_CODE (op) == SUBREG
3231 && mixed_subreg_operand (op, mode))
3232 return 0;
cb0ca284 3233
f416f18c
MH
3234 if (REG_P (op))
3235 return reg_operand (op, mode);
3236
f959ff1a 3237 return nonimmediate_operand (op, mode);
f416f18c
MH
3238}
3239
3240
3241/* Check src operand of two operand arithmetic instructions. */
975ab131 3242
cb0ca284 3243int
f12b3fc8 3244src_operand (rtx op, enum machine_mode mode)
cb0ca284 3245{
ebcc44f4
MH
3246 if (GET_CODE (op) == SUBREG
3247 && mixed_subreg_operand (op, mode))
3248 return 0;
3249
cb0ca284
MH
3250 if (REG_P (op))
3251 return reg_operand (op, mode);
3252
3253 if (mode == VOIDmode)
3254 mode = GET_MODE (op);
3255
cb0ca284 3256 if (GET_CODE (op) == CONST_INT)
50c33087
MH
3257 return (mode == QImode || mode == Pmode || mode == HImode)
3258 && c4x_I_constant (op);
cb0ca284
MH
3259
3260 /* We don't like CONST_DOUBLE integers. */
3261 if (GET_CODE (op) == CONST_DOUBLE)
3262 return c4x_H_constant (op);
3263
31445126
MH
3264 /* Disallow symbolic addresses. Only the predicate
3265 symbolic_address_operand will match these. */
50c33087
MH
3266 if (GET_CODE (op) == SYMBOL_REF
3267 || GET_CODE (op) == LABEL_REF
3268 || GET_CODE (op) == CONST)
3269 return 0;
3270
825dda42 3271 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
4a1f52a8
HB
3272 access to symbolic addresses. These operands will get forced
3273 into a register and the movqi expander will generate a
825dda42 3274 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
50c33087
MH
3275 if (GET_CODE (op) == MEM
3276 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3277 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3278 || GET_CODE (XEXP (op, 0)) == CONST)))
9c3602e4
MH
3279 return !TARGET_EXPOSE_LDP &&
3280 ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
50c33087 3281
cb0ca284
MH
3282 return general_operand (op, mode);
3283}
3284
3285
3286int
f12b3fc8 3287src_hi_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
3288{
3289 if (c4x_O_constant (op))
3290 return 1;
3291 return src_operand (op, mode);
3292}
3293
3294
3295/* Check src operand of two operand logical instructions. */
3296
3297int
f12b3fc8 3298lsrc_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
3299{
3300 if (mode == VOIDmode)
3301 mode = GET_MODE (op);
3302
3303 if (mode != QImode && mode != Pmode)
c725bd79 3304 fatal_insn ("mode not QImode", op);
cb0ca284 3305
cb0ca284
MH
3306 if (GET_CODE (op) == CONST_INT)
3307 return c4x_L_constant (op) || c4x_J_constant (op);
3308
50c33087 3309 return src_operand (op, mode);
cb0ca284
MH
3310}
3311
3312
3313/* Check src operand of two operand tricky instructions. */
3314
3315int
f12b3fc8 3316tsrc_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
3317{
3318 if (mode == VOIDmode)
3319 mode = GET_MODE (op);
3320
3321 if (mode != QImode && mode != Pmode)
c725bd79 3322 fatal_insn ("mode not QImode", op);
cb0ca284 3323
cb0ca284
MH
3324 if (GET_CODE (op) == CONST_INT)
3325 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3326
50c33087 3327 return src_operand (op, mode);
cb0ca284
MH
3328}
3329
3330
65f2f288
HB
3331/* Check src operand of two operand non immedidate instructions. */
3332
3333int
f12b3fc8 3334nonimmediate_src_operand (rtx op, enum machine_mode mode)
65f2f288
HB
3335{
3336 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3337 return 0;
3338
3339 return src_operand (op, mode);
3340}
3341
3342
3343/* Check logical src operand of two operand non immedidate instructions. */
3344
3345int
f12b3fc8 3346nonimmediate_lsrc_operand (rtx op, enum machine_mode mode)
65f2f288
HB
3347{
3348 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3349 return 0;
3350
3351 return lsrc_operand (op, mode);
3352}
3353
3354
cb0ca284 3355int
f12b3fc8 3356reg_or_const_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
3357{
3358 return reg_operand (op, mode) || const_operand (op, mode);
3359}
3360
3361
3362/* Check for indirect operands allowable in parallel instruction. */
3363
3364int
f12b3fc8 3365par_ind_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
3366{
3367 if (mode != VOIDmode && mode != GET_MODE (op))
3368 return 0;
3369
3370 return c4x_S_indirect (op);
3371}
3372
3373
3374/* Check for operands allowable in parallel instruction. */
3375
3376int
f12b3fc8 3377parallel_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
3378{
3379 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3380}
3381
3382
3383static void
f12b3fc8 3384c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
cb0ca284
MH
3385{
3386 *base = 0;
3387 *incdec = 0;
3388 *index = 0;
3389 *disp = 0;
3390
3391 if (GET_CODE (op) != MEM)
c725bd79 3392 fatal_insn ("invalid indirect memory address", op);
cb0ca284
MH
3393
3394 op = XEXP (op, 0);
3395 switch (GET_CODE (op))
3396 {
3397 case PRE_DEC:
3398 *base = REGNO (XEXP (op, 0));
3399 *incdec = 1;
3400 *disp = -1;
3401 return;
3402
3403 case POST_DEC:
3404 *base = REGNO (XEXP (op, 0));
3405 *incdec = 1;
3406 *disp = 0;
3407 return;
3408
3409 case PRE_INC:
3410 *base = REGNO (XEXP (op, 0));
3411 *incdec = 1;
3412 *disp = 1;
3413 return;
3414
3415 case POST_INC:
3416 *base = REGNO (XEXP (op, 0));
3417 *incdec = 1;
3418 *disp = 0;
3419 return;
3420
3421 case POST_MODIFY:
3422 *base = REGNO (XEXP (op, 0));
3423 if (REG_P (XEXP (XEXP (op, 1), 1)))
3424 {
3425 *index = REGNO (XEXP (XEXP (op, 1), 1));
3426 *disp = 0; /* ??? */
3427 }
3428 else
3429 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3430 *incdec = 1;
3431 return;
3432
3433 case PRE_MODIFY:
3434 *base = REGNO (XEXP (op, 0));
3435 if (REG_P (XEXP (XEXP (op, 1), 1)))
3436 {
3437 *index = REGNO (XEXP (XEXP (op, 1), 1));
3438 *disp = 1; /* ??? */
3439 }
3440 else
3441 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3442 *incdec = 1;
3443
3444 return;
3445
3446 case REG:
3447 *base = REGNO (op);
3448 return;
3449
3450 case PLUS:
3451 {
3452 rtx op0 = XEXP (op, 0);
3453 rtx op1 = XEXP (op, 1);
3454
3455 if (c4x_a_register (op0))
3456 {
3457 if (c4x_x_register (op1))
3458 {
3459 *base = REGNO (op0);
3460 *index = REGNO (op1);
3461 return;
3462 }
3463 else if ((GET_CODE (op1) == CONST_INT
3464 && IS_DISP1_CONST (INTVAL (op1))))
3465 {
3466 *base = REGNO (op0);
3467 *disp = INTVAL (op1);
3468 return;
3469 }
3470 }
3471 else if (c4x_x_register (op0) && c4x_a_register (op1))
3472 {
3473 *base = REGNO (op1);
3474 *index = REGNO (op0);
3475 return;
3476 }
3477 }
59b9a953 3478 /* Fall through. */
cb0ca284
MH
3479
3480 default:
c725bd79 3481 fatal_insn ("invalid indirect (S) memory address", op);
cb0ca284
MH
3482 }
3483}
3484
3485
3486int
f12b3fc8 3487c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
cb0ca284
MH
3488{
3489 int base0;
3490 int base1;
3491 int incdec0;
3492 int incdec1;
3493 int index0;
3494 int index1;
3495 int disp0;
3496 int disp1;
3497
4271f003
MH
3498 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3499 return 1;
3500
cb0ca284
MH
3501 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3502 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3503
3504 if (store0 && store1)
3505 {
3506 /* If we have two stores in parallel to the same address, then
3507 the C4x only executes one of the stores. This is unlikely to
3508 cause problems except when writing to a hardware device such
3509 as a FIFO since the second write will be lost. The user
3510 should flag the hardware location as being volatile so that
f1ba665b 3511 we don't do this optimization. While it is unlikely that we
cb0ca284
MH
3512 have an aliased address if both locations are not marked
3513 volatile, it is probably safer to flag a potential conflict
3514 if either location is volatile. */
4ddb3ea6 3515 if (! flag_argument_noalias)
cb0ca284
MH
3516 {
3517 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3518 return 1;
3519 }
3520 }
3521
3522 /* If have a parallel load and a store to the same address, the load
3523 is performed first, so there is no conflict. Similarly, there is
3524 no conflict if have parallel loads from the same address. */
3525
3526 /* Cannot use auto increment or auto decrement twice for same
3527 base register. */
3528 if (base0 == base1 && incdec0 && incdec0)
3529 return 1;
3530
3531 /* It might be too confusing for GCC if we have use a base register
3532 with a side effect and a memory reference using the same register
3533 in parallel. */
4ddb3ea6 3534 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
cb0ca284
MH
3535 return 1;
3536
f1c374cb 3537 /* We can not optimize the case where op1 and op2 refer to the same
1ac7a7f5 3538 address. */
f1c374cb 3539 if (base0 == base1 && disp0 == disp1 && index0 == index1)
cb0ca284
MH
3540 return 1;
3541
3542 /* No conflict. */
3543 return 0;
3544}
3545
3546
3547/* Check for while loop inside a decrement and branch loop. */
3548
3549int
f12b3fc8 3550c4x_label_conflict (rtx insn, rtx jump, rtx db)
cb0ca284
MH
3551{
3552 while (insn)
3553 {
3554 if (GET_CODE (insn) == CODE_LABEL)
3555 {
3556 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3557 return 1;
3558 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3559 return 0;
3560 }
3561 insn = PREV_INSN (insn);
3562 }
3563 return 1;
3564}
3565
3566
3567/* Validate combination of operands for parallel load/store instructions. */
3568
5e6a42d9 3569int
f12b3fc8
SB
3570valid_parallel_load_store (rtx *operands,
3571 enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
3572{
3573 rtx op0 = operands[0];
3574 rtx op1 = operands[1];
3575 rtx op2 = operands[2];
3576 rtx op3 = operands[3];
3577
3578 if (GET_CODE (op0) == SUBREG)
3579 op0 = SUBREG_REG (op0);
3580 if (GET_CODE (op1) == SUBREG)
3581 op1 = SUBREG_REG (op1);
3582 if (GET_CODE (op2) == SUBREG)
3583 op2 = SUBREG_REG (op2);
3584 if (GET_CODE (op3) == SUBREG)
3585 op3 = SUBREG_REG (op3);
3586
3587 /* The patterns should only allow ext_low_reg_operand() or
3588 par_ind_operand() operands. Thus of the 4 operands, only 2
3589 should be REGs and the other 2 should be MEMs. */
3590
4271f003 3591 /* This test prevents the multipack pass from using this pattern if
e868a840
MH
3592 op0 is used as an index or base register in op2 or op3, since
3593 this combination will require reloading. */
4271f003 3594 if (GET_CODE (op0) == REG
e868a840
MH
3595 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3596 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
4271f003
MH
3597 return 0;
3598
975ab131 3599 /* LDI||LDI. */
cb0ca284
MH
3600 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3601 return (REGNO (op0) != REGNO (op2))
3602 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
4ddb3ea6 3603 && ! c4x_address_conflict (op1, op3, 0, 0);
cb0ca284 3604
975ab131 3605 /* STI||STI. */
cb0ca284
MH
3606 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3607 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
4ddb3ea6 3608 && ! c4x_address_conflict (op0, op2, 1, 1);
cb0ca284 3609
975ab131 3610 /* LDI||STI. */
cb0ca284
MH
3611 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3612 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
4ddb3ea6 3613 && ! c4x_address_conflict (op1, op2, 0, 1);
cb0ca284 3614
975ab131 3615 /* STI||LDI. */
cb0ca284
MH
3616 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3617 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
4ddb3ea6 3618 && ! c4x_address_conflict (op0, op3, 1, 0);
cb0ca284
MH
3619
3620 return 0;
3621}
3622
4271f003 3623
e868a840 3624int
f12b3fc8
SB
3625valid_parallel_operands_4 (rtx *operands,
3626 enum machine_mode mode ATTRIBUTE_UNUSED)
e868a840 3627{
e868a840
MH
3628 rtx op0 = operands[0];
3629 rtx op2 = operands[2];
3630
3631 if (GET_CODE (op0) == SUBREG)
3632 op0 = SUBREG_REG (op0);
3633 if (GET_CODE (op2) == SUBREG)
3634 op2 = SUBREG_REG (op2);
3635
3636 /* This test prevents the multipack pass from using this pattern if
3637 op0 is used as an index or base register in op2, since this combination
3638 will require reloading. */
3639 if (GET_CODE (op0) == REG
3640 && GET_CODE (op2) == MEM
3641 && reg_mentioned_p (op0, XEXP (op2, 0)))
3642 return 0;
3643
3644 return 1;
3645}
3646
3647
cb0ca284 3648int
f12b3fc8
SB
3649valid_parallel_operands_5 (rtx *operands,
3650 enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
3651{
3652 int regs = 0;
4271f003 3653 rtx op0 = operands[0];
e868a840 3654 rtx op1 = operands[1];
4271f003
MH
3655 rtx op2 = operands[2];
3656 rtx op3 = operands[3];
cb0ca284
MH
3657
3658 if (GET_CODE (op0) == SUBREG)
3659 op0 = SUBREG_REG (op0);
e868a840
MH
3660 if (GET_CODE (op1) == SUBREG)
3661 op1 = SUBREG_REG (op1);
4271f003
MH
3662 if (GET_CODE (op2) == SUBREG)
3663 op2 = SUBREG_REG (op2);
cb0ca284
MH
3664
3665 /* The patterns should only allow ext_low_reg_operand() or
e868a840
MH
3666 par_ind_operand() operands. Operands 1 and 2 may be commutative
3667 but only one of them can be a register. */
3668 if (GET_CODE (op1) == REG)
cb0ca284 3669 regs++;
4271f003 3670 if (GET_CODE (op2) == REG)
cb0ca284
MH
3671 regs++;
3672
4271f003
MH
3673 if (regs != 1)
3674 return 0;
3675
3676 /* This test prevents the multipack pass from using this pattern if
3677 op0 is used as an index or base register in op3, since this combination
3678 will require reloading. */
3679 if (GET_CODE (op0) == REG
3680 && GET_CODE (op3) == MEM
3681 && reg_mentioned_p (op0, XEXP (op3, 0)))
3682 return 0;
3683
3684 return 1;
cb0ca284
MH
3685}
3686
3687
3688int
f12b3fc8
SB
3689valid_parallel_operands_6 (rtx *operands,
3690 enum machine_mode mode ATTRIBUTE_UNUSED)
cb0ca284
MH
3691{
3692 int regs = 0;
4271f003
MH
3693 rtx op0 = operands[0];
3694 rtx op1 = operands[1];
3695 rtx op2 = operands[2];
3696 rtx op4 = operands[4];
3697 rtx op5 = operands[5];
cb0ca284 3698
cb0ca284
MH
3699 if (GET_CODE (op1) == SUBREG)
3700 op1 = SUBREG_REG (op1);
3701 if (GET_CODE (op2) == SUBREG)
3702 op2 = SUBREG_REG (op2);
4271f003
MH
3703 if (GET_CODE (op4) == SUBREG)
3704 op4 = SUBREG_REG (op4);
3705 if (GET_CODE (op5) == SUBREG)
3706 op5 = SUBREG_REG (op5);
cb0ca284
MH
3707
3708 /* The patterns should only allow ext_low_reg_operand() or
3709 par_ind_operand() operands. Thus of the 4 input operands, only 2
3710 should be REGs and the other 2 should be MEMs. */
3711
cb0ca284
MH
3712 if (GET_CODE (op1) == REG)
3713 regs++;
3714 if (GET_CODE (op2) == REG)
3715 regs++;
4271f003
MH
3716 if (GET_CODE (op4) == REG)
3717 regs++;
3718 if (GET_CODE (op5) == REG)
cb0ca284
MH
3719 regs++;
3720
3721 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3722 Perhaps we should count the MEMs as well? */
4271f003
MH
3723 if (regs != 2)
3724 return 0;
cb0ca284 3725
4271f003
MH
3726 /* This test prevents the multipack pass from using this pattern if
3727 op0 is used as an index or base register in op4 or op5, since
3728 this combination will require reloading. */
3729 if (GET_CODE (op0) == REG
3730 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3731 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3732 return 0;
cb0ca284 3733
4271f003 3734 return 1;
cb0ca284
MH
3735}
3736
3737
3738/* Validate combination of src operands. Note that the operands have
3739 been screened by the src_operand predicate. We just have to check
3740 that the combination of operands is valid. If FORCE is set, ensure
3741 that the destination regno is valid if we have a 2 operand insn. */
3742
3743static int
f12b3fc8
SB
3744c4x_valid_operands (enum rtx_code code, rtx *operands,
3745 enum machine_mode mode ATTRIBUTE_UNUSED,
3746 int force)
cb0ca284 3747{
b7bbb574 3748 rtx op0;
cb0ca284
MH
3749 rtx op1;
3750 rtx op2;
3751 enum rtx_code code1;
3752 enum rtx_code code2;
3753
b7bbb574
MH
3754
3755 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3756 if (code == IF_THEN_ELSE)
3757 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3758
cb0ca284
MH
3759 if (code == COMPARE)
3760 {
3761 op1 = operands[0];
3762 op2 = operands[1];
3763 }
3764 else
3765 {
3766 op1 = operands[1];
3767 op2 = operands[2];
3768 }
3769
b7bbb574
MH
3770 op0 = operands[0];
3771
3772 if (GET_CODE (op0) == SUBREG)
3773 op0 = SUBREG_REG (op0);
cb0ca284
MH
3774 if (GET_CODE (op1) == SUBREG)
3775 op1 = SUBREG_REG (op1);
3776 if (GET_CODE (op2) == SUBREG)
3777 op2 = SUBREG_REG (op2);
3778
3779 code1 = GET_CODE (op1);
3780 code2 = GET_CODE (op2);
3781
b7bbb574 3782
cb0ca284
MH
3783 if (code1 == REG && code2 == REG)
3784 return 1;
3785
3786 if (code1 == MEM && code2 == MEM)
3787 {
8d485e2d 3788 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
cb0ca284 3789 return 1;
8d485e2d 3790 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
cb0ca284
MH
3791 }
3792
b7bbb574 3793 /* We cannot handle two MEMs or two CONSTS, etc. */
cb0ca284
MH
3794 if (code1 == code2)
3795 return 0;
3796
3797 if (code1 == REG)
3798 {
3799 switch (code2)
3800 {
3801 case CONST_INT:
3802 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3803 return 1;
3804 break;
3805
3806 case CONST_DOUBLE:
4ddb3ea6 3807 if (! c4x_H_constant (op2))
cb0ca284
MH
3808 return 0;
3809 break;
3810
3811 /* Any valid memory operand screened by src_operand is OK. */
3812 case MEM:
b7bbb574 3813 break;
cb0ca284 3814
cb0ca284 3815 default:
50c33087 3816 fatal_insn ("c4x_valid_operands: Internal error", op2);
cb0ca284
MH
3817 break;
3818 }
3819
b7bbb574
MH
3820 if (GET_CODE (op0) == SCRATCH)
3821 return 1;
3822
3823 if (!REG_P (op0))
3824 return 0;
3825
cb0ca284
MH
3826 /* Check that we have a valid destination register for a two operand
3827 instruction. */
b7bbb574 3828 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
cb0ca284
MH
3829 }
3830
b7bbb574
MH
3831
3832 /* Check non-commutative operators. */
cb0ca284
MH
3833 if (code == ASHIFTRT || code == LSHIFTRT
3834 || code == ASHIFT || code == COMPARE)
3835 return code2 == REG
3836 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
b7bbb574
MH
3837
3838
3839 /* Assume MINUS is commutative since the subtract patterns
3840 also support the reverse subtract instructions. Since op1
3841 is not a register, and op2 is a register, op1 can only
3842 be a restricted memory operand for a shift instruction. */
3843 if (code2 == REG)
cb0ca284 3844 {
b7bbb574
MH
3845 switch (code1)
3846 {
3847 case CONST_INT:
3848 break;
cb0ca284 3849
b7bbb574
MH
3850 case CONST_DOUBLE:
3851 if (! c4x_H_constant (op1))
3852 return 0;
3853 break;
cb0ca284 3854
b7bbb574
MH
3855 /* Any valid memory operand screened by src_operand is OK. */
3856 case MEM:
3857 break;
3858
b7bbb574
MH
3859 default:
3860 abort ();
3861 break;
3862 }
cb0ca284 3863
b7bbb574
MH
3864 if (GET_CODE (op0) == SCRATCH)
3865 return 1;
3866
3867 if (!REG_P (op0))
3868 return 0;
3869
3870 /* Check that we have a valid destination register for a two operand
3871 instruction. */
3872 return ! force || REGNO (op1) == REGNO (op0);
cb0ca284
MH
3873 }
3874
b7bbb574
MH
3875 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3876 return 1;
3877
3878 return 0;
3879}
3880
3881
3882int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3883{
3884
3885 /* If we are not optimizing then we have to let anything go and let
3886 reload fix things up. instantiate_decl in function.c can produce
3887 invalid insns by changing the offset of a memory operand from a
3888 valid one into an invalid one, when the second operand is also a
3889 memory operand. The alternative is not to allow two memory
3890 operands for an insn when not optimizing. The problem only rarely
3891 occurs, for example with the C-torture program DFcmp.c. */
3892
3893 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
cb0ca284
MH
3894}
3895
3896
cb0ca284 3897int
f12b3fc8 3898legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
cb0ca284
MH
3899{
3900 /* Compare only has 2 operands. */
3901 if (code == COMPARE)
3902 {
3903 /* During RTL generation, force constants into pseudos so that
3904 they can get hoisted out of loops. This will tie up an extra
3905 register but can save an extra cycle. Only do this if loop
f1ba665b 3906 optimization enabled. (We cannot pull this trick for add and
cb0ca284
MH
3907 sub instructions since the flow pass won't find
3908 autoincrements etc.) This allows us to generate compare
3909 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3910 of LDI *AR0++, R0; CMPI 42, R0.
3911
3912 Note that expand_binops will try to load an expensive constant
3913 into a register if it is used within a loop. Unfortunately,
3914 the cost mechanism doesn't allow us to look at the other
3915 operand to decide whether the constant is expensive. */
3916
4ddb3ea6 3917 if (! reload_in_progress
cb0ca284
MH
3918 && TARGET_HOIST
3919 && optimize > 0
87ba6944
MH
3920 && GET_CODE (operands[1]) == CONST_INT
3921 && preserve_subexpressions_p ()
3922 && rtx_cost (operands[1], code) > 1)
cb0ca284
MH
3923 operands[1] = force_reg (mode, operands[1]);
3924
4ddb3ea6
MH
3925 if (! reload_in_progress
3926 && ! c4x_valid_operands (code, operands, mode, 0))
cb0ca284
MH
3927 operands[0] = force_reg (mode, operands[0]);
3928 return 1;
3929 }
3930
3931 /* We cannot do this for ADDI/SUBI insns since we will
3932 defeat the flow pass from finding autoincrement addressing
3933 opportunities. */
4ddb3ea6
MH
3934 if (! reload_in_progress
3935 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
87ba6944
MH
3936 && TARGET_HOIST
3937 && optimize > 1
3938 && GET_CODE (operands[2]) == CONST_INT
3939 && preserve_subexpressions_p ()
3940 && rtx_cost (operands[2], code) > 1)
cb0ca284
MH
3941 operands[2] = force_reg (mode, operands[2]);
3942
3943 /* We can get better code on a C30 if we force constant shift counts
3944 into a register. This way they can get hoisted out of loops,
a026b9d6 3945 tying up a register but saving an instruction. The downside is
cb0ca284
MH
3946 that they may get allocated to an address or index register, and
3947 thus we will get a pipeline conflict if there is a nearby
3948 indirect address using an address register.
3949
3950 Note that expand_binops will not try to load an expensive constant
3951 into a register if it is used within a loop for a shift insn. */
3952
4ddb3ea6
MH
3953 if (! reload_in_progress
3954 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
cb0ca284
MH
3955 {
3956 /* If the operand combination is invalid, we force operand1 into a
3957 register, preventing reload from having doing to do this at a
3958 later stage. */
3959 operands[1] = force_reg (mode, operands[1]);
3960 if (TARGET_FORCE)
3961 {
3962 emit_move_insn (operands[0], operands[1]);
3963 operands[1] = copy_rtx (operands[0]);
3964 }
3965 else
3966 {
3967 /* Just in case... */
4ddb3ea6 3968 if (! c4x_valid_operands (code, operands, mode, 0))
cb0ca284
MH
3969 operands[2] = force_reg (mode, operands[2]);
3970 }
3971 }
3972
3973 /* Right shifts require a negative shift count, but GCC expects
3974 a positive count, so we emit a NEG. */
3975 if ((code == ASHIFTRT || code == LSHIFTRT)
3976 && (GET_CODE (operands[2]) != CONST_INT))
d5e4ff48 3977 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
cb0ca284 3978
a026b9d6
MH
3979
3980 /* When the shift count is greater than 32 then the result
3981 can be implementation dependent. We truncate the result to
3982 fit in 5 bits so that we do not emit invalid code when
59b9a953 3983 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
a026b9d6
MH
3984 if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3985 && (GET_CODE (operands[2]) == CONST_INT))
3986 && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3987 operands[2]
3988 = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
3989
cb0ca284
MH
3990 return 1;
3991}
3992
3993
3994/* The following predicates are used for instruction scheduling. */
3995
3996int
f12b3fc8 3997group1_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
3998{
3999 if (mode != VOIDmode && mode != GET_MODE (op))
4000 return 0;
4001 if (GET_CODE (op) == SUBREG)
4002 op = SUBREG_REG (op);
d001969e 4003 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
cb0ca284
MH
4004}
4005
4006
4007int
f12b3fc8 4008group1_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4009{
4010 if (mode != VOIDmode && mode != GET_MODE (op))
4011 return 0;
4012
4013 if (GET_CODE (op) == MEM)
4014 {
4015 op = XEXP (op, 0);
4016 if (GET_CODE (op) == PLUS)
4017 {
4018 rtx op0 = XEXP (op, 0);
4019 rtx op1 = XEXP (op, 1);
4020
d001969e
HB
4021 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4022 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
cb0ca284
MH
4023 return 1;
4024 }
d001969e 4025 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
cb0ca284
MH
4026 return 1;
4027 }
4028
4029 return 0;
4030}
4031
4032
4033/* Return true if any one of the address registers. */
4034
4035int
f12b3fc8 4036arx_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4037{
4038 if (mode != VOIDmode && mode != GET_MODE (op))
4039 return 0;
4040 if (GET_CODE (op) == SUBREG)
4041 op = SUBREG_REG (op);
d001969e 4042 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
cb0ca284
MH
4043}
4044
4045
4046static int
f12b3fc8 4047c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
cb0ca284
MH
4048{
4049 if (mode != VOIDmode && mode != GET_MODE (op))
4050 return 0;
4051 if (GET_CODE (op) == SUBREG)
4052 op = SUBREG_REG (op);
d001969e 4053 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
cb0ca284
MH
4054}
4055
4056
4057static int
f12b3fc8 4058c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
cb0ca284
MH
4059{
4060 if (mode != VOIDmode && mode != GET_MODE (op))
4061 return 0;
4062
4063 if (GET_CODE (op) == MEM)
4064 {
4065 op = XEXP (op, 0);
4066 switch (GET_CODE (op))
4067 {
4068 case PRE_DEC:
4069 case POST_DEC:
4070 case PRE_INC:
4071 case POST_INC:
4072 op = XEXP (op, 0);
4073
4074 case REG:
d001969e 4075 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
cb0ca284
MH
4076
4077 case PRE_MODIFY:
4078 case POST_MODIFY:
d001969e
HB
4079 if (REG_P (XEXP (op, 0)) && (! reload_completed
4080 || (REGNO (XEXP (op, 0)) == regno)))
cb0ca284
MH
4081 return 1;
4082 if (REG_P (XEXP (XEXP (op, 1), 1))
d001969e
HB
4083 && (! reload_completed
4084 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
cb0ca284
MH
4085 return 1;
4086 break;
4087
4088 case PLUS:
4089 {
4090 rtx op0 = XEXP (op, 0);
4091 rtx op1 = XEXP (op, 1);
4092
d001969e
HB
4093 if ((REG_P (op0) && (! reload_completed
4094 || (REGNO (op0) == regno)))
4095 || (REG_P (op1) && (! reload_completed
4096 || (REGNO (op1) == regno))))
cb0ca284
MH
4097 return 1;
4098 }
4099 break;
933cddd0 4100
cb0ca284
MH
4101 default:
4102 break;
4103 }
4104 }
4105 return 0;
4106}
4107
4108
4109int
f12b3fc8 4110ar0_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4111{
4112 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4113}
4114
4115
4116int
f12b3fc8 4117ar0_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4118{
4119 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4120}
4121
4122
4123int
f12b3fc8 4124ar1_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4125{
4126 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4127}
4128
4129
4130int
f12b3fc8 4131ar1_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4132{
4133 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4134}
4135
4136
4137int
f12b3fc8 4138ar2_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4139{
4140 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4141}
4142
4143
4144int
f12b3fc8 4145ar2_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4146{
4147 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4148}
4149
4150
4151int
f12b3fc8 4152ar3_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4153{
4154 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4155}
4156
4157
4158int
f12b3fc8 4159ar3_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4160{
4161 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4162}
4163
4164
4165int
f12b3fc8 4166ar4_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4167{
4168 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4169}
4170
4171
4172int
f12b3fc8 4173ar4_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4174{
4175 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4176}
4177
4178
4179int
f12b3fc8 4180ar5_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4181{
4182 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4183}
4184
4185
4186int
f12b3fc8 4187ar5_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4188{
4189 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4190}
4191
4192
4193int
f12b3fc8 4194ar6_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4195{
4196 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4197}
4198
4199
4200int
f12b3fc8 4201ar6_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4202{
4203 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4204}
4205
4206
4207int
f12b3fc8 4208ar7_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4209{
4210 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4211}
4212
4213
4214int
f12b3fc8 4215ar7_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4216{
4217 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4218}
4219
4220
4221int
f12b3fc8 4222ir0_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4223{
4224 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4225}
4226
4227
4228int
f12b3fc8 4229ir0_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4230{
4231 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4232}
4233
4234
4235int
f12b3fc8 4236ir1_reg_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4237{
4238 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4239}
4240
4241
4242int
f12b3fc8 4243ir1_mem_operand (rtx op, enum machine_mode mode)
cb0ca284
MH
4244{
4245 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4246}
4247
4248
975ab131
MH
4249/* This is similar to operand_subword but allows autoincrement
4250 addressing. */
cb0ca284
MH
4251
4252rtx
f12b3fc8
SB
4253c4x_operand_subword (rtx op, int i, int validate_address,
4254 enum machine_mode mode)
cb0ca284
MH
4255{
4256 if (mode != HImode && mode != HFmode)
4257 fatal_insn ("c4x_operand_subword: invalid mode", op);
4258
4259 if (mode == HFmode && REG_P (op))
4260 fatal_insn ("c4x_operand_subword: invalid operand", op);
4261
4262 if (GET_CODE (op) == MEM)
4263 {
4264 enum rtx_code code = GET_CODE (XEXP (op, 0));
4265 enum machine_mode mode = GET_MODE (XEXP (op, 0));
50c33087
MH
4266 enum machine_mode submode;
4267
4268 submode = mode;
4269 if (mode == HImode)
4270 submode = QImode;
4271 else if (mode == HFmode)
4272 submode = QFmode;
cb0ca284
MH
4273
4274 switch (code)
4275 {
4276 case POST_INC:
4277 case PRE_INC:
50c33087 4278 return gen_rtx_MEM (submode, XEXP (op, 0));
cb0ca284
MH
4279
4280 case POST_DEC:
4281 case PRE_DEC:
4282 case PRE_MODIFY:
4283 case POST_MODIFY:
4284 /* We could handle these with some difficulty.
4285 e.g., *p-- => *(p-=2); *(p+1). */
4286 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4287
50c33087
MH
4288 case SYMBOL_REF:
4289 case LABEL_REF:
4290 case CONST:
4291 case CONST_INT:
4292 fatal_insn ("c4x_operand_subword: invalid address", op);
4293
4294 /* Even though offsettable_address_p considers (MEM
4295 (LO_SUM)) to be offsettable, it is not safe if the
4296 address is at the end of the data page since we also have
4297 to fix up the associated high PART. In this case where
4298 we are trying to split a HImode or HFmode memory
4299 reference, we would have to emit another insn to reload a
4300 new HIGH value. It's easier to disable LO_SUM memory references
4301 in HImode or HFmode and we probably get better code. */
4302 case LO_SUM:
4303 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4304
cb0ca284
MH
4305 default:
4306 break;
4307 }
4308 }
4309
4310 return operand_subword (op, i, validate_address, mode);
4311}
4312
eff784fe
MH
4313struct name_list
4314{
4315 struct name_list *next;
16219347 4316 const char *name;
eff784fe
MH
4317};
4318
4319static struct name_list *global_head;
4320static struct name_list *extern_head;
4321
4322
4323/* Add NAME to list of global symbols and remove from external list if
4324 present on external list. */
4325
4326void
f12b3fc8 4327c4x_global_label (const char *name)
eff784fe
MH
4328{
4329 struct name_list *p, *last;
4330
4331 /* Do not insert duplicate names, so linearly search through list of
4332 existing names. */
4333 p = global_head;
4334 while (p)
4335 {
4336 if (strcmp (p->name, name) == 0)
4337 return;
4338 p = p->next;
4339 }
6d9f628e 4340 p = (struct name_list *) xmalloc (sizeof *p);
eff784fe
MH
4341 p->next = global_head;
4342 p->name = name;
4343 global_head = p;
4344
4345 /* Remove this name from ref list if present. */
4346 last = NULL;
4347 p = extern_head;
4348 while (p)
4349 {
4350 if (strcmp (p->name, name) == 0)
4351 {
4352 if (last)
4353 last->next = p->next;
4354 else
4355 extern_head = p->next;
4356 break;
4357 }
4358 last = p;
4359 p = p->next;
4360 }
4361}
4362
4363
4364/* Add NAME to list of external symbols. */
4365
4366void
f12b3fc8 4367c4x_external_ref (const char *name)
eff784fe
MH
4368{
4369 struct name_list *p;
4370
4371 /* Do not insert duplicate names. */
4372 p = extern_head;
4373 while (p)
4374 {
4375 if (strcmp (p->name, name) == 0)
4376 return;
4377 p = p->next;
4378 }
4379
4380 /* Do not insert ref if global found. */
4381 p = global_head;
4382 while (p)
4383 {
4384 if (strcmp (p->name, name) == 0)
4385 return;
4386 p = p->next;
4387 }
6d9f628e 4388 p = (struct name_list *) xmalloc (sizeof *p);
eff784fe
MH
4389 p->next = extern_head;
4390 p->name = name;
4391 extern_head = p;
4392}
4393
1bc7c5b6
ZW
4394/* We need to have a data section we can identify so that we can set
4395 the DP register back to a data pointer in the small memory model.
4396 This is only required for ISRs if we are paranoid that someone
4397 may have quietly changed this register on the sly. */
4398static void
f12b3fc8 4399c4x_file_start (void)
1bc7c5b6
ZW
4400{
4401 int dspversion = 0;
4402 if (TARGET_C30) dspversion = 30;
4403 if (TARGET_C31) dspversion = 31;
4404 if (TARGET_C32) dspversion = 32;
4405 if (TARGET_C33) dspversion = 33;
4406 if (TARGET_C40) dspversion = 40;
4407 if (TARGET_C44) dspversion = 44;
4408
4409 default_file_start ();
4410 fprintf (asm_out_file, "\t.version\t%d\n", dspversion);
4411 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4412}
4413
eff784fe 4414
a5fe455b 4415static void
f12b3fc8 4416c4x_file_end (void)
eff784fe
MH
4417{
4418 struct name_list *p;
4419
4420 /* Output all external names that are not global. */
4421 p = extern_head;
4422 while (p)
4423 {
a5fe455b
ZW
4424 fprintf (asm_out_file, "\t.ref\t");
4425 assemble_name (asm_out_file, p->name);
4426 fprintf (asm_out_file, "\n");
eff784fe
MH
4427 p = p->next;
4428 }
a5fe455b 4429 fprintf (asm_out_file, "\t.end\n");
eff784fe
MH
4430}
4431
4432
cb0ca284 4433static void
f12b3fc8 4434c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
cb0ca284
MH
4435{
4436 while (list != NULL_TREE
4ddb3ea6
MH
4437 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4438 != IDENTIFIER_POINTER (DECL_NAME (decl)))
eff784fe 4439 list = TREE_CHAIN (list);
cb0ca284 4440 if (list)
12a68f1f
JM
4441 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4442 *attributes);
cb0ca284
MH
4443}
4444
4445
12a68f1f 4446static void
f12b3fc8 4447c4x_insert_attributes (tree decl, tree *attributes)
cb0ca284
MH
4448{
4449 switch (TREE_CODE (decl))
4450 {
4451 case FUNCTION_DECL:
4452 c4x_check_attribute ("section", code_tree, decl, attributes);
4453 c4x_check_attribute ("const", pure_tree, decl, attributes);
4454 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4455 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
eb47a205 4456 c4x_check_attribute ("naked", naked_tree, decl, attributes);
cb0ca284
MH
4457 break;
4458
4459 case VAR_DECL:
4460 c4x_check_attribute ("section", data_tree, decl, attributes);
4461 break;
4462
4463 default:
4464 break;
4465 }
4466}
4467
91d231cb
JM
4468/* Table of valid machine attributes. */
4469const struct attribute_spec c4x_attribute_table[] =
4470{
4471 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4472 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
eb47a205 4473 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
91d231cb
JM
4474 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4475 { NULL, 0, 0, false, false, false, NULL }
4476};
cb0ca284 4477
91d231cb
JM
4478/* Handle an attribute requiring a FUNCTION_TYPE;
4479 arguments as in struct attribute_spec.handler. */
4480static tree
f12b3fc8
SB
4481c4x_handle_fntype_attribute (tree *node, tree name,
4482 tree args ATTRIBUTE_UNUSED,
4483 int flags ATTRIBUTE_UNUSED,
4484 bool *no_add_attrs)
cb0ca284 4485{
91d231cb
JM
4486 if (TREE_CODE (*node) != FUNCTION_TYPE)
4487 {
4488 warning ("`%s' attribute only applies to functions",
4489 IDENTIFIER_POINTER (name));
4490 *no_add_attrs = true;
4491 }
4492
4493 return NULL_TREE;
cb0ca284
MH
4494}
4495
4496
d5e4ff48 4497/* !!! FIXME to emit RPTS correctly. */
975ab131 4498
cb0ca284 4499int
f12b3fc8 4500c4x_rptb_rpts_p (rtx insn, rtx op)
cb0ca284
MH
4501{
4502 /* The next insn should be our label marking where the
4503 repeat block starts. */
4504 insn = NEXT_INSN (insn);
4505 if (GET_CODE (insn) != CODE_LABEL)
4506 {
4507 /* Some insns may have been shifted between the RPTB insn
4508 and the top label... They were probably destined to
4509 be moved out of the loop. For now, let's leave them
4510 where they are and print a warning. We should
4511 probably move these insns before the repeat block insn. */
4512 if (TARGET_DEBUG)
4513 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4514 insn);
4515 return 0;
4516 }
4517
4518 /* Skip any notes. */
4519 insn = next_nonnote_insn (insn);
4520
4521 /* This should be our first insn in the loop. */
2c3c49de 4522 if (! INSN_P (insn))
cb0ca284
MH
4523 return 0;
4524
4525 /* Skip any notes. */
4526 insn = next_nonnote_insn (insn);
4527
2c3c49de 4528 if (! INSN_P (insn))
cb0ca284
MH
4529 return 0;
4530
f1c374cb 4531 if (recog_memoized (insn) != CODE_FOR_rptb_end)
cb0ca284
MH
4532 return 0;
4533
4534 if (TARGET_RPTS)
4535 return 1;
4536
4537 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4538}
4539
cb0ca284 4540
5078f5eb
HB
4541/* Check if register r11 is used as the destination of an insn. */
4542
4543static int
f12b3fc8 4544c4x_r11_set_p(rtx x)
5078f5eb 4545{
5078f5eb
HB
4546 rtx set;
4547 int i, j;
4548 const char *fmt;
4549
4550 if (x == 0)
4551 return 0;
4552
4a1f52a8 4553 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
5078f5eb
HB
4554 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4555
4a1f52a8
HB
4556 if (INSN_P (x) && (set = single_set (x)))
4557 x = SET_DEST (set);
5078f5eb 4558
4a1f52a8 4559 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
5078f5eb
HB
4560 return 1;
4561
4562 fmt = GET_RTX_FORMAT (GET_CODE (x));
4a1f52a8 4563 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5078f5eb
HB
4564 {
4565 if (fmt[i] == 'e')
4566 {
4567 if (c4x_r11_set_p (XEXP (x, i)))
4568 return 1;
4569 }
4570 else if (fmt[i] == 'E')
4571 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4572 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4573 return 1;
4574 }
4575 return 0;
4576}
4577
4578
4579/* The c4x sometimes has a problem when the insn before the laj insn
4580 sets the r11 register. Check for this situation. */
4581
4582int
f12b3fc8 4583c4x_check_laj_p (rtx insn)
5078f5eb
HB
4584{
4585 insn = prev_nonnote_insn (insn);
4586
4587 /* If this is the start of the function no nop is needed. */
4588 if (insn == 0)
4589 return 0;
4590
4591 /* If the previous insn is a code label we have to insert a nop. This
4592 could be a jump or table jump. We can find the normal jumps by
4593 scanning the function but this will not find table jumps. */
4594 if (GET_CODE (insn) == CODE_LABEL)
4595 return 1;
4596
4597 /* If the previous insn sets register r11 we have to insert a nop. */
4598 if (c4x_r11_set_p (insn))
4599 return 1;
4600
4601 /* No nop needed. */
4602 return 0;
4603}
4604
4605
cb0ca284
MH
4606/* Adjust the cost of a scheduling dependency. Return the new cost of
4607 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4608 A set of an address register followed by a use occurs a 2 cycle
4609 stall (reduced to a single cycle on the c40 using LDA), while
4610 a read of an address register followed by a use occurs a single cycle. */
975ab131 4611
cb0ca284
MH
4612#define SET_USE_COST 3
4613#define SETLDA_USE_COST 2
4614#define READ_USE_COST 2
4615
c237e94a 4616static int
f12b3fc8 4617c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
cb0ca284
MH
4618{
4619 /* Don't worry about this until we know what registers have been
4620 assigned. */
d001969e 4621 if (flag_schedule_insns == 0 && ! reload_completed)
cb0ca284
MH
4622 return 0;
4623
4624 /* How do we handle dependencies where a read followed by another
4625 read causes a pipeline stall? For example, a read of ar0 followed
4626 by the use of ar0 for a memory reference. It looks like we
4627 need to extend the scheduler to handle this case. */
4628
4629 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4630 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4631 so only deal with insns we know about. */
4632 if (recog_memoized (dep_insn) < 0)
4633 return 0;
4634
4635 if (REG_NOTE_KIND (link) == 0)
4636 {
4637 int max = 0;
4638
4639 /* Data dependency; DEP_INSN writes a register that INSN reads some
4640 cycles later. */
cb0ca284
MH
4641 if (TARGET_C3X)
4642 {
4643 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4644 max = SET_USE_COST > max ? SET_USE_COST : max;
4645 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4646 max = READ_USE_COST > max ? READ_USE_COST : max;
4647 }
4648 else
4649 {
4650 /* This could be significantly optimized. We should look
4651 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4652 insn uses ar0-ar7. We then test if the same register
4653 is used. The tricky bit is that some operands will
4654 use several registers... */
cb0ca284
MH
4655 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4656 max = SET_USE_COST > max ? SET_USE_COST : max;
4657 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4658 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4659 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4660 max = READ_USE_COST > max ? READ_USE_COST : max;
4661
4662 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4663 max = SET_USE_COST > max ? SET_USE_COST : max;
4664 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4665 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4666 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4667 max = READ_USE_COST > max ? READ_USE_COST : max;
4668
4669 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4670 max = SET_USE_COST > max ? SET_USE_COST : max;
4671 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4672 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4673 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4674 max = READ_USE_COST > max ? READ_USE_COST : max;
4675
4676 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4677 max = SET_USE_COST > max ? SET_USE_COST : max;
4678 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4679 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4680 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4681 max = READ_USE_COST > max ? READ_USE_COST : max;
4682
4683 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4684 max = SET_USE_COST > max ? SET_USE_COST : max;
4685 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4686 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4687 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4688 max = READ_USE_COST > max ? READ_USE_COST : max;
4689
4690 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4691 max = SET_USE_COST > max ? SET_USE_COST : max;
4692 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4693 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4694 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4695 max = READ_USE_COST > max ? READ_USE_COST : max;
4696
4697 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4698 max = SET_USE_COST > max ? SET_USE_COST : max;
4699 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4700 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4701 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4702 max = READ_USE_COST > max ? READ_USE_COST : max;
4703
4704 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4705 max = SET_USE_COST > max ? SET_USE_COST : max;
4706 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4707 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4708 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4709 max = READ_USE_COST > max ? READ_USE_COST : max;
4710
4711 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4712 max = SET_USE_COST > max ? SET_USE_COST : max;
4713 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4714 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4715
4716 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4717 max = SET_USE_COST > max ? SET_USE_COST : max;
4718 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4719 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4720 }
4721
4722 if (max)
4723 cost = max;
4724
4725 /* For other data dependencies, the default cost specified in the
4726 md is correct. */
4727 return cost;
4728 }
4729 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4730 {
4731 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4732 cycles later. */
4733
4734 /* For c4x anti dependencies, the cost is 0. */
4735 return 0;
4736 }
4737 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4738 {
4739 /* Output dependency; DEP_INSN writes a register that INSN writes some
4740 cycles later. */
4741
4742 /* For c4x output dependencies, the cost is 0. */
4743 return 0;
4744 }
4745 else
4746 abort ();
4747}
8a119a7d
MH
4748
4749void
f12b3fc8 4750c4x_init_builtins (void)
8a119a7d 4751{
f6155fda
SS
4752 tree endlink = void_list_node;
4753
8a119a7d
MH
4754 builtin_function ("fast_ftoi",
4755 build_function_type
4756 (integer_type_node,
4757 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4758 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4759 builtin_function ("ansi_ftoi",
4760 build_function_type
4761 (integer_type_node,
4762 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4763 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4764 if (TARGET_C3X)
4765 builtin_function ("fast_imult",
4766 build_function_type
4767 (integer_type_node,
4768 tree_cons (NULL_TREE, integer_type_node,
4769 tree_cons (NULL_TREE,
4770 integer_type_node, endlink))),
6a2dd09a 4771 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4772 else
4773 {
4774 builtin_function ("toieee",
4775 build_function_type
4776 (double_type_node,
4777 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4778 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4779 builtin_function ("frieee",
4780 build_function_type
4781 (double_type_node,
4782 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4783 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4784 builtin_function ("fast_invf",
4785 build_function_type
4786 (double_type_node,
4787 tree_cons (NULL_TREE, double_type_node, endlink)),
6a2dd09a 4788 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
8a119a7d
MH
4789 }
4790}
4791
4792
4793rtx
f12b3fc8
SB
4794c4x_expand_builtin (tree exp, rtx target,
4795 rtx subtarget ATTRIBUTE_UNUSED,
4796 enum machine_mode mode ATTRIBUTE_UNUSED,
4797 int ignore ATTRIBUTE_UNUSED)
8a119a7d
MH
4798{
4799 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4800 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4801 tree arglist = TREE_OPERAND (exp, 1);
4802 tree arg0, arg1;
4803 rtx r0, r1;
4804
4805 switch (fcode)
4806 {
8a119a7d
MH
4807 case C4X_BUILTIN_FIX:
4808 arg0 = TREE_VALUE (arglist);
4809 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4810 r0 = protect_from_queue (r0, 0);
4811 if (! target || ! register_operand (target, QImode))
4812 target = gen_reg_rtx (QImode);
4813 emit_insn (gen_fixqfqi_clobber (target, r0));
4814 return target;
4815
4816 case C4X_BUILTIN_FIX_ANSI:
4817 arg0 = TREE_VALUE (arglist);
4818 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4819 r0 = protect_from_queue (r0, 0);
4820 if (! target || ! register_operand (target, QImode))
4821 target = gen_reg_rtx (QImode);
4822 emit_insn (gen_fix_truncqfqi2 (target, r0));
4823 return target;
4824
4825 case C4X_BUILTIN_MPYI:
4826 if (! TARGET_C3X)
4827 break;
4828 arg0 = TREE_VALUE (arglist);
4829 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4830 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4831 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4832 r0 = protect_from_queue (r0, 0);
4833 r1 = protect_from_queue (r1, 0);
4834 if (! target || ! register_operand (target, QImode))
4835 target = gen_reg_rtx (QImode);
4836 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4837 return target;
4838
4839 case C4X_BUILTIN_TOIEEE:
4840 if (TARGET_C3X)
4841 break;
4842 arg0 = TREE_VALUE (arglist);
4843 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4844 r0 = protect_from_queue (r0, 0);
4845 if (! target || ! register_operand (target, QFmode))
4846 target = gen_reg_rtx (QFmode);
4847 emit_insn (gen_toieee (target, r0));
4848 return target;
4849
4850 case C4X_BUILTIN_FRIEEE:
4851 if (TARGET_C3X)
4852 break;
4853 arg0 = TREE_VALUE (arglist);
8a119a7d
MH
4854 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4855 r0 = protect_from_queue (r0, 0);
4856 if (register_operand (r0, QFmode))
4857 {
4858 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4859 emit_move_insn (r1, r0);
4860 r0 = r1;
4861 }
4862 if (! target || ! register_operand (target, QFmode))
4863 target = gen_reg_rtx (QFmode);
4864 emit_insn (gen_frieee (target, r0));
4865 return target;
4866
4867 case C4X_BUILTIN_RCPF:
4868 if (TARGET_C3X)
4869 break;
4870 arg0 = TREE_VALUE (arglist);
4871 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4872 r0 = protect_from_queue (r0, 0);
4873 if (! target || ! register_operand (target, QFmode))
4874 target = gen_reg_rtx (QFmode);
4875 emit_insn (gen_rcpfqf_clobber (target, r0));
4876 return target;
4877 }
4878 return NULL_RTX;
4879}
7c262518 4880
c15c90bb
ZW
4881static void
4882c4x_init_libfuncs (void)
4883{
4884 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4885 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4886 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4887 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4888 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4889 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4890 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4891 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4892 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4893 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4894 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4895 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4896 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4897 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4898 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4899 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4900 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4901 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4902 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4903 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4904 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4905 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4906 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4907 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4908}
4909
7c262518 4910static void
f12b3fc8 4911c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED)
7c262518
RH
4912{
4913 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4914}
e2500fed 4915
5eb99654 4916static void
f12b3fc8 4917c4x_globalize_label (FILE *stream, const char *name)
5eb99654
KG
4918{
4919 default_globalize_label (stream, name);
4920 c4x_global_label (name);
4921}
3c50106f
RH
4922\f
4923#define SHIFT_CODE_P(C) \
4924 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4925#define LOGICAL_CODE_P(C) \
4926 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4927
4928/* Compute a (partial) cost for rtx X. Return true if the complete
4929 cost has been computed, and false if subexpressions should be
4930 scanned. In either case, *TOTAL contains the cost result. */
4931
4932static bool
f12b3fc8 4933c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f
RH
4934{
4935 HOST_WIDE_INT val;
4936
4937 switch (code)
4938 {
4939 /* Some small integers are effectively free for the C40. We should
4940 also consider if we are using the small memory model. With
4941 the big memory model we require an extra insn for a constant
4942 loaded from memory. */
4943
4944 case CONST_INT:
4945 val = INTVAL (x);
4946 if (c4x_J_constant (x))
4947 *total = 0;
4948 else if (! TARGET_C3X
4949 && outer_code == AND
4950 && (val == 255 || val == 65535))
4951 *total = 0;
4952 else if (! TARGET_C3X
4953 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4954 && (val == 16 || val == 24))
4955 *total = 0;
4956 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4957 *total = 3;
4958 else if (LOGICAL_CODE_P (outer_code)
4959 ? c4x_L_constant (x) : c4x_I_constant (x))
4960 *total = 2;
4961 else
4962 *total = 4;
4963 return true;
4964
4965 case CONST:
4966 case LABEL_REF:
4967 case SYMBOL_REF:
4968 *total = 4;
4969 return true;
4970
4971 case CONST_DOUBLE:
4972 if (c4x_H_constant (x))
4973 *total = 2;
4974 else if (GET_MODE (x) == QFmode)
4975 *total = 4;
4976 else
4977 *total = 8;
4978 return true;
4979
4980 /* ??? Note that we return true, rather than false so that rtx_cost
4981 doesn't include the constant costs. Otherwise expand_mult will
4982 think that it is cheaper to synthesize a multiply rather than to
4983 use a multiply instruction. I think this is because the algorithm
4984 synth_mult doesn't take into account the loading of the operands,
4985 whereas the calculation of mult_cost does. */
4986 case PLUS:
4987 case MINUS:
4988 case AND:
4989 case IOR:
4990 case XOR:
4991 case ASHIFT:
4992 case ASHIFTRT:
4993 case LSHIFTRT:
4994 *total = COSTS_N_INSNS (1);
4995 return true;
4996
4997 case MULT:
4998 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4999 || TARGET_MPYI ? 1 : 14);
5000 return true;
5001
5002 case DIV:
5003 case UDIV:
5004 case MOD:
5005 case UMOD:
5006 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5007 ? 15 : 50);
5008 return true;
5009
5010 default:
5011 return false;
5012 }
5013}
28b24176
KH
5014\f
5015/* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
5016
5017static void
5018c4x_external_libcall (rtx fun)
5019{
5020 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
5021 c4x_external_ref (XSTR (fun, 0));
5022}
5023
5024/* Worker function for TARGET_STRUCT_VALUE_RTX. */
5025
5026static rtx
5027c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
5028 int incoming ATTRIBUTE_UNUSED)
5029{
5030 return gen_rtx_REG (Pmode, AR0_REGNO);
5031}