]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/c4x/c4x.c
126b132f5b5405a2d81a51d3f1ebbd0c9175b9ad
[thirdparty/gcc.git] / gcc / config / c4x / c4x.c
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
4
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
7
8 This file is part of GNU CC.
9
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
14
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
24
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "flags.h"
43 #include "loop.h"
44 #include "recog.h"
45 #include "c-tree.h"
46 #include "ggc.h"
47 #include "cpplib.h"
48 #include "toplev.h"
49 #include "c4x-protos.h"
50 #include "target.h"
51 #include "target-def.h"
52
53 rtx smulhi3_libfunc;
54 rtx umulhi3_libfunc;
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
63
64 static int c4x_leaf_function;
65
66 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
67
68 /* Array of the smallest class containing reg number REGNO, indexed by
69 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
70 registers are available and set the class to NO_REGS for registers
71 that the target switches say are unavailable. */
72
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
74 {
75 /* Reg Modes Saved. */
76 R0R1_REGS, /* R0 QI, QF, HF No. */
77 R0R1_REGS, /* R1 QI, QF, HF No. */
78 R2R3_REGS, /* R2 QI, QF, HF No. */
79 R2R3_REGS, /* R3 QI, QF, HF No. */
80 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
81 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
83 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
84 ADDR_REGS, /* AR0 QI No. */
85 ADDR_REGS, /* AR1 QI No. */
86 ADDR_REGS, /* AR2 QI No. */
87 ADDR_REGS, /* AR3 QI QI. */
88 ADDR_REGS, /* AR4 QI QI. */
89 ADDR_REGS, /* AR5 QI QI. */
90 ADDR_REGS, /* AR6 QI QI. */
91 ADDR_REGS, /* AR7 QI QI. */
92 DP_REG, /* DP QI No. */
93 INDEX_REGS, /* IR0 QI No. */
94 INDEX_REGS, /* IR1 QI No. */
95 BK_REG, /* BK QI QI. */
96 SP_REG, /* SP QI No. */
97 ST_REG, /* ST CC No. */
98 NO_REGS, /* DIE/IE No. */
99 NO_REGS, /* IIE/IF No. */
100 NO_REGS, /* IIF/IOF No. */
101 INT_REGS, /* RS QI No. */
102 INT_REGS, /* RE QI No. */
103 RC_REG, /* RC QI No. */
104 EXT_REGS, /* R8 QI, QF, HF QI. */
105 EXT_REGS, /* R9 QI, QF, HF No. */
106 EXT_REGS, /* R10 QI, QF, HF No. */
107 EXT_REGS, /* R11 QI, QF, HF No. */
108 };
109
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
111 {
112 /* Reg Modes Saved. */
113 HFmode, /* R0 QI, QF, HF No. */
114 HFmode, /* R1 QI, QF, HF No. */
115 HFmode, /* R2 QI, QF, HF No. */
116 HFmode, /* R3 QI, QF, HF No. */
117 QFmode, /* R4 QI, QF, HF QI. */
118 QFmode, /* R5 QI, QF, HF QI. */
119 QImode, /* R6 QI, QF, HF QF. */
120 QImode, /* R7 QI, QF, HF QF. */
121 QImode, /* AR0 QI No. */
122 QImode, /* AR1 QI No. */
123 QImode, /* AR2 QI No. */
124 QImode, /* AR3 QI QI. */
125 QImode, /* AR4 QI QI. */
126 QImode, /* AR5 QI QI. */
127 QImode, /* AR6 QI QI. */
128 QImode, /* AR7 QI QI. */
129 VOIDmode, /* DP QI No. */
130 QImode, /* IR0 QI No. */
131 QImode, /* IR1 QI No. */
132 QImode, /* BK QI QI. */
133 VOIDmode, /* SP QI No. */
134 VOIDmode, /* ST CC No. */
135 VOIDmode, /* DIE/IE No. */
136 VOIDmode, /* IIE/IF No. */
137 VOIDmode, /* IIF/IOF No. */
138 QImode, /* RS QI No. */
139 QImode, /* RE QI No. */
140 VOIDmode, /* RC QI No. */
141 QFmode, /* R8 QI, QF, HF QI. */
142 HFmode, /* R9 QI, QF, HF No. */
143 HFmode, /* R10 QI, QF, HF No. */
144 HFmode, /* R11 QI, QF, HF No. */
145 };
146
147
148 /* Test and compare insns in c4x.md store the information needed to
149 generate branch and scc insns here. */
150
151 struct rtx_def *c4x_compare_op0 = NULL_RTX;
152 struct rtx_def *c4x_compare_op1 = NULL_RTX;
153
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
158
159 /* Pragma definitions. */
160
161 tree code_tree = NULL_TREE;
162 tree data_tree = NULL_TREE;
163 tree pure_tree = NULL_TREE;
164 tree noreturn_tree = NULL_TREE;
165 tree interrupt_tree = NULL_TREE;
166
167 /* Forward declarations */
168 static void c4x_add_gc_roots PARAMS ((void));
169 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
170 static int c4x_leaf_function_p PARAMS ((void));
171 static int c4x_assembler_function_p PARAMS ((void));
172 static int c4x_immed_float_p PARAMS ((rtx));
173 static int c4x_a_register PARAMS ((rtx));
174 static int c4x_x_register PARAMS ((rtx));
175 static int c4x_immed_int_constant PARAMS ((rtx));
176 static int c4x_immed_float_constant PARAMS ((rtx));
177 static int c4x_K_constant PARAMS ((rtx));
178 static int c4x_N_constant PARAMS ((rtx));
179 static int c4x_O_constant PARAMS ((rtx));
180 static int c4x_R_indirect PARAMS ((rtx));
181 static int c4x_S_indirect PARAMS ((rtx));
182 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
183 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
184 enum machine_mode, int));
185 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
187 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
188 static int c4x_r11_set_p PARAMS ((rtx));
189 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
190 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
191 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
192 const struct attribute_spec c4x_attribute_table[];
193 static void c4x_insert_attributes PARAMS ((tree, tree *));
194 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
195 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
196 static void c4x_encode_section_info PARAMS ((tree, int));
197 \f
198 /* Initialize the GCC target structure. */
199 #undef TARGET_ASM_BYTE_OP
200 #define TARGET_ASM_BYTE_OP "\t.word\t"
201 #undef TARGET_ASM_ALIGNED_HI_OP
202 #define TARGET_ASM_ALIGNED_HI_OP NULL
203 #undef TARGET_ASM_ALIGNED_SI_OP
204 #define TARGET_ASM_ALIGNED_SI_OP NULL
205
206 #undef TARGET_ATTRIBUTE_TABLE
207 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
208
209 #undef TARGET_INSERT_ATTRIBUTES
210 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
211
212 #undef TARGET_INIT_BUILTINS
213 #define TARGET_INIT_BUILTINS c4x_init_builtins
214
215 #undef TARGET_EXPAND_BUILTIN
216 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
217
218 #undef TARGET_SCHED_ADJUST_COST
219 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
220
221 #undef TARGET_ENCODE_SECTION_INFO
222 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
223
224 struct gcc_target targetm = TARGET_INITIALIZER;
225 \f
226 /* Called to register all of our global variables with the garbage
227 collector. */
228
229 static void
230 c4x_add_gc_roots ()
231 {
232 ggc_add_rtx_root (&c4x_compare_op0, 1);
233 ggc_add_rtx_root (&c4x_compare_op1, 1);
234 ggc_add_tree_root (&code_tree, 1);
235 ggc_add_tree_root (&data_tree, 1);
236 ggc_add_tree_root (&pure_tree, 1);
237 ggc_add_tree_root (&noreturn_tree, 1);
238 ggc_add_tree_root (&interrupt_tree, 1);
239 ggc_add_rtx_root (&smulhi3_libfunc, 1);
240 ggc_add_rtx_root (&umulhi3_libfunc, 1);
241 ggc_add_rtx_root (&fix_truncqfhi2_libfunc, 1);
242 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc, 1);
243 ggc_add_rtx_root (&fix_trunchfhi2_libfunc, 1);
244 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc, 1);
245 ggc_add_rtx_root (&floathiqf2_libfunc, 1);
246 ggc_add_rtx_root (&floatunshiqf2_libfunc, 1);
247 ggc_add_rtx_root (&floathihf2_libfunc, 1);
248 ggc_add_rtx_root (&floatunshihf2_libfunc, 1);
249 }
250
251
252 /* Override command line options.
253 Called once after all options have been parsed.
254 Mostly we process the processor
255 type and sometimes adjust other TARGET_ options. */
256
257 void
258 c4x_override_options ()
259 {
260 if (c4x_rpts_cycles_string)
261 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
262 else
263 c4x_rpts_cycles = 0;
264
265 if (TARGET_C30)
266 c4x_cpu_version = 30;
267 else if (TARGET_C31)
268 c4x_cpu_version = 31;
269 else if (TARGET_C32)
270 c4x_cpu_version = 32;
271 else if (TARGET_C33)
272 c4x_cpu_version = 33;
273 else if (TARGET_C40)
274 c4x_cpu_version = 40;
275 else if (TARGET_C44)
276 c4x_cpu_version = 44;
277 else
278 c4x_cpu_version = 40;
279
280 /* -mcpu=xx overrides -m40 etc. */
281 if (c4x_cpu_version_string)
282 {
283 const char *p = c4x_cpu_version_string;
284
285 /* Also allow -mcpu=c30 etc. */
286 if (*p == 'c' || *p == 'C')
287 p++;
288 c4x_cpu_version = atoi (p);
289 }
290
291 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
292 C40_FLAG | C44_FLAG);
293
294 switch (c4x_cpu_version)
295 {
296 case 30: target_flags |= C30_FLAG; break;
297 case 31: target_flags |= C31_FLAG; break;
298 case 32: target_flags |= C32_FLAG; break;
299 case 33: target_flags |= C33_FLAG; break;
300 case 40: target_flags |= C40_FLAG; break;
301 case 44: target_flags |= C44_FLAG; break;
302 default:
303 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
304 c4x_cpu_version = 40;
305 target_flags |= C40_FLAG;
306 }
307
308 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
309 target_flags |= C3X_FLAG;
310 else
311 target_flags &= ~C3X_FLAG;
312
313 /* Convert foo / 8.0 into foo * 0.125, etc. */
314 set_fast_math_flags (1);
315
316 /* We should phase out the following at some stage.
317 This provides compatibility with the old -mno-aliases option. */
318 if (! TARGET_ALIASES && ! flag_argument_noalias)
319 flag_argument_noalias = 1;
320
321 /* Register global variables with the garbage collector. */
322 c4x_add_gc_roots ();
323 }
324
325
326 /* This is called before c4x_override_options. */
327
328 void
329 c4x_optimization_options (level, size)
330 int level ATTRIBUTE_UNUSED;
331 int size ATTRIBUTE_UNUSED;
332 {
333 /* Scheduling before register allocation can screw up global
334 register allocation, especially for functions that use MPY||ADD
335 instructions. The benefit we gain we get by scheduling before
336 register allocation is probably marginal anyhow. */
337 flag_schedule_insns = 0;
338 }
339
340
341 /* Write an ASCII string. */
342
343 #define C4X_ASCII_LIMIT 40
344
345 void
346 c4x_output_ascii (stream, ptr, len)
347 FILE *stream;
348 const char *ptr;
349 int len;
350 {
351 char sbuf[C4X_ASCII_LIMIT + 1];
352 int s, l, special, first = 1, onlys;
353
354 if (len)
355 fprintf (stream, "\t.byte\t");
356
357 for (s = l = 0; len > 0; --len, ++ptr)
358 {
359 onlys = 0;
360
361 /* Escape " and \ with a \". */
362 special = *ptr == '\"' || *ptr == '\\';
363
364 /* If printable - add to buff. */
365 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
366 {
367 if (special)
368 sbuf[s++] = '\\';
369 sbuf[s++] = *ptr;
370 if (s < C4X_ASCII_LIMIT - 1)
371 continue;
372 onlys = 1;
373 }
374 if (s)
375 {
376 if (first)
377 first = 0;
378 else
379 {
380 fputc (',', stream);
381 l++;
382 }
383
384 sbuf[s] = 0;
385 fprintf (stream, "\"%s\"", sbuf);
386 l += s + 2;
387 if (TARGET_TI && l >= 80 && len > 1)
388 {
389 fprintf (stream, "\n\t.byte\t");
390 first = 1;
391 l = 0;
392 }
393
394 s = 0;
395 }
396 if (onlys)
397 continue;
398
399 if (first)
400 first = 0;
401 else
402 {
403 fputc (',', stream);
404 l++;
405 }
406
407 fprintf (stream, "%d", *ptr);
408 l += 3;
409 if (TARGET_TI && l >= 80 && len > 1)
410 {
411 fprintf (stream, "\n\t.byte\t");
412 first = 1;
413 l = 0;
414 }
415 }
416 if (s)
417 {
418 if (! first)
419 fputc (',', stream);
420
421 sbuf[s] = 0;
422 fprintf (stream, "\"%s\"", sbuf);
423 s = 0;
424 }
425 fputc ('\n', stream);
426 }
427
428
429 int
430 c4x_hard_regno_mode_ok (regno, mode)
431 unsigned int regno;
432 enum machine_mode mode;
433 {
434 switch (mode)
435 {
436 #if Pmode != QImode
437 case Pmode: /* Pointer (24/32 bits). */
438 #endif
439 case QImode: /* Integer (32 bits). */
440 return IS_INT_REGNO (regno);
441
442 case QFmode: /* Float, Double (32 bits). */
443 case HFmode: /* Long Double (40 bits). */
444 return IS_EXT_REGNO (regno);
445
446 case CCmode: /* Condition Codes. */
447 case CC_NOOVmode: /* Condition Codes. */
448 return IS_ST_REGNO (regno);
449
450 case HImode: /* Long Long (64 bits). */
451 /* We need two registers to store long longs. Note that
452 it is much easier to constrain the first register
453 to start on an even boundary. */
454 return IS_INT_REGNO (regno)
455 && IS_INT_REGNO (regno + 1)
456 && (regno & 1) == 0;
457
458 default:
459 return 0; /* We don't support these modes. */
460 }
461
462 return 0;
463 }
464
465 /* Return non-zero if REGNO1 can be renamed to REGNO2. */
466 int
467 c4x_hard_regno_rename_ok (regno1, regno2)
468 unsigned int regno1;
469 unsigned int regno2;
470 {
471 /* We can not copy call saved registers from mode QI into QF or from
472 mode QF into QI. */
473 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
474 return 0;
475 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
476 return 0;
477 /* We cannot copy from an extended (40 bit) register to a standard
478 (32 bit) register because we only set the condition codes for
479 extended registers. */
480 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
481 return 0;
482 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
483 return 0;
484 return 1;
485 }
486
487 /* The TI C3x C compiler register argument runtime model uses 6 registers,
488 AR2, R2, R3, RC, RS, RE.
489
490 The first two floating point arguments (float, double, long double)
491 that are found scanning from left to right are assigned to R2 and R3.
492
493 The remaining integer (char, short, int, long) or pointer arguments
494 are assigned to the remaining registers in the order AR2, R2, R3,
495 RC, RS, RE when scanning left to right, except for the last named
496 argument prior to an ellipsis denoting variable number of
497 arguments. We don't have to worry about the latter condition since
498 function.c treats the last named argument as anonymous (unnamed).
499
500 All arguments that cannot be passed in registers are pushed onto
501 the stack in reverse order (right to left). GCC handles that for us.
502
503 c4x_init_cumulative_args() is called at the start, so we can parse
504 the args to see how many floating point arguments and how many
505 integer (or pointer) arguments there are. c4x_function_arg() is
506 then called (sometimes repeatedly) for each argument (parsed left
507 to right) to obtain the register to pass the argument in, or zero
508 if the argument is to be passed on the stack. Once the compiler is
509 happy, c4x_function_arg_advance() is called.
510
511 Don't use R0 to pass arguments in, we use 0 to indicate a stack
512 argument. */
513
514 static const int c4x_int_reglist[3][6] =
515 {
516 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
517 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
518 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
519 };
520
521 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
522
523
524 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
525 function whose data type is FNTYPE.
526 For a library call, FNTYPE is 0. */
527
528 void
529 c4x_init_cumulative_args (cum, fntype, libname)
530 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
531 tree fntype; /* Tree ptr for function decl. */
532 rtx libname; /* SYMBOL_REF of library name or 0. */
533 {
534 tree param, next_param;
535
536 cum->floats = cum->ints = 0;
537 cum->init = 0;
538 cum->var = 0;
539 cum->args = 0;
540
541 if (TARGET_DEBUG)
542 {
543 fprintf (stderr, "\nc4x_init_cumulative_args (");
544 if (fntype)
545 {
546 tree ret_type = TREE_TYPE (fntype);
547
548 fprintf (stderr, "fntype code = %s, ret code = %s",
549 tree_code_name[(int) TREE_CODE (fntype)],
550 tree_code_name[(int) TREE_CODE (ret_type)]);
551 }
552 else
553 fprintf (stderr, "no fntype");
554
555 if (libname)
556 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
557 }
558
559 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
560
561 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
562 param; param = next_param)
563 {
564 tree type;
565
566 next_param = TREE_CHAIN (param);
567
568 type = TREE_VALUE (param);
569 if (type && type != void_type_node)
570 {
571 enum machine_mode mode;
572
573 /* If the last arg doesn't have void type then we have
574 variable arguments. */
575 if (! next_param)
576 cum->var = 1;
577
578 if ((mode = TYPE_MODE (type)))
579 {
580 if (! MUST_PASS_IN_STACK (mode, type))
581 {
582 /* Look for float, double, or long double argument. */
583 if (mode == QFmode || mode == HFmode)
584 cum->floats++;
585 /* Look for integer, enumeral, boolean, char, or pointer
586 argument. */
587 else if (mode == QImode || mode == Pmode)
588 cum->ints++;
589 }
590 }
591 cum->args++;
592 }
593 }
594
595 if (TARGET_DEBUG)
596 fprintf (stderr, "%s%s, args = %d)\n",
597 cum->prototype ? ", prototype" : "",
598 cum->var ? ", variable args" : "",
599 cum->args);
600 }
601
602
603 /* Update the data in CUM to advance over an argument
604 of mode MODE and data type TYPE.
605 (TYPE is null for libcalls where that information may not be available.) */
606
607 void
608 c4x_function_arg_advance (cum, mode, type, named)
609 CUMULATIVE_ARGS *cum; /* Current arg information. */
610 enum machine_mode mode; /* Current arg mode. */
611 tree type; /* Type of the arg or 0 if lib support. */
612 int named; /* Whether or not the argument was named. */
613 {
614 if (TARGET_DEBUG)
615 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
616 GET_MODE_NAME (mode), named);
617 if (! TARGET_MEMPARM
618 && named
619 && type
620 && ! MUST_PASS_IN_STACK (mode, type))
621 {
622 /* Look for float, double, or long double argument. */
623 if (mode == QFmode || mode == HFmode)
624 cum->floats++;
625 /* Look for integer, enumeral, boolean, char, or pointer argument. */
626 else if (mode == QImode || mode == Pmode)
627 cum->ints++;
628 }
629 else if (! TARGET_MEMPARM && ! type)
630 {
631 /* Handle libcall arguments. */
632 if (mode == QFmode || mode == HFmode)
633 cum->floats++;
634 else if (mode == QImode || mode == Pmode)
635 cum->ints++;
636 }
637 return;
638 }
639
640
641 /* Define where to put the arguments to a function. Value is zero to
642 push the argument on the stack, or a hard register in which to
643 store the argument.
644
645 MODE is the argument's machine mode.
646 TYPE is the data type of the argument (as a tree).
647 This is null for libcalls where that information may
648 not be available.
649 CUM is a variable of type CUMULATIVE_ARGS which gives info about
650 the preceding args and about the function being called.
651 NAMED is nonzero if this argument is a named parameter
652 (otherwise it is an extra parameter matching an ellipsis). */
653
654 struct rtx_def *
655 c4x_function_arg (cum, mode, type, named)
656 CUMULATIVE_ARGS *cum; /* Current arg information. */
657 enum machine_mode mode; /* Current arg mode. */
658 tree type; /* Type of the arg or 0 if lib support. */
659 int named; /* != 0 for normal args, == 0 for ... args. */
660 {
661 int reg = 0; /* Default to passing argument on stack. */
662
663 if (! cum->init)
664 {
665 /* We can handle at most 2 floats in R2, R3. */
666 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
667
668 /* We can handle at most 6 integers minus number of floats passed
669 in registers. */
670 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
671 6 - cum->maxfloats : cum->ints;
672
673 /* If there is no prototype, assume all the arguments are integers. */
674 if (! cum->prototype)
675 cum->maxints = 6;
676
677 cum->ints = cum->floats = 0;
678 cum->init = 1;
679 }
680
681 /* This marks the last argument. We don't need to pass this through
682 to the call insn. */
683 if (type == void_type_node)
684 return 0;
685
686 if (! TARGET_MEMPARM
687 && named
688 && type
689 && ! MUST_PASS_IN_STACK (mode, type))
690 {
691 /* Look for float, double, or long double argument. */
692 if (mode == QFmode || mode == HFmode)
693 {
694 if (cum->floats < cum->maxfloats)
695 reg = c4x_fp_reglist[cum->floats];
696 }
697 /* Look for integer, enumeral, boolean, char, or pointer argument. */
698 else if (mode == QImode || mode == Pmode)
699 {
700 if (cum->ints < cum->maxints)
701 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
702 }
703 }
704 else if (! TARGET_MEMPARM && ! type)
705 {
706 /* We could use a different argument calling model for libcalls,
707 since we're only calling functions in libgcc. Thus we could
708 pass arguments for long longs in registers rather than on the
709 stack. In the meantime, use the odd TI format. We make the
710 assumption that we won't have more than two floating point
711 args, six integer args, and that all the arguments are of the
712 same mode. */
713 if (mode == QFmode || mode == HFmode)
714 reg = c4x_fp_reglist[cum->floats];
715 else if (mode == QImode || mode == Pmode)
716 reg = c4x_int_reglist[0][cum->ints];
717 }
718
719 if (TARGET_DEBUG)
720 {
721 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
722 GET_MODE_NAME (mode), named);
723 if (reg)
724 fprintf (stderr, ", reg=%s", reg_names[reg]);
725 else
726 fprintf (stderr, ", stack");
727 fprintf (stderr, ")\n");
728 }
729 if (reg)
730 return gen_rtx_REG (mode, reg);
731 else
732 return NULL_RTX;
733 }
734
735
736 void
737 c4x_va_start (stdarg_p, valist, nextarg)
738 int stdarg_p;
739 tree valist;
740 rtx nextarg;
741 {
742 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
743
744 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
745 }
746
747
748 /* C[34]x arguments grow in weird ways (downwards) that the standard
749 varargs stuff can't handle.. */
750 rtx
751 c4x_va_arg (valist, type)
752 tree valist, type;
753 {
754 tree t;
755
756 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
757 build_int_2 (int_size_in_bytes (type), 0));
758 TREE_SIDE_EFFECTS (t) = 1;
759
760 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
761 }
762
763
764 static int
765 c4x_isr_reg_used_p (regno)
766 unsigned int regno;
767 {
768 /* Don't save/restore FP or ST, we handle them separately. */
769 if (regno == FRAME_POINTER_REGNUM
770 || IS_ST_REGNO (regno))
771 return 0;
772
773 /* We could be a little smarter abut saving/restoring DP.
774 We'll only save if for the big memory model or if
775 we're paranoid. ;-) */
776 if (IS_DP_REGNO (regno))
777 return ! TARGET_SMALL || TARGET_PARANOID;
778
779 /* Only save/restore regs in leaf function that are used. */
780 if (c4x_leaf_function)
781 return regs_ever_live[regno] && fixed_regs[regno] == 0;
782
783 /* Only save/restore regs that are used by the ISR and regs
784 that are likely to be used by functions the ISR calls
785 if they are not fixed. */
786 return IS_EXT_REGNO (regno)
787 || ((regs_ever_live[regno] || call_used_regs[regno])
788 && fixed_regs[regno] == 0);
789 }
790
791
792 static int
793 c4x_leaf_function_p ()
794 {
795 /* A leaf function makes no calls, so we only need
796 to save/restore the registers we actually use.
797 For the global variable leaf_function to be set, we need
798 to define LEAF_REGISTERS and all that it entails.
799 Let's check ourselves... */
800
801 if (lookup_attribute ("leaf_pretend",
802 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
803 return 1;
804
805 /* Use the leaf_pretend attribute at your own risk. This is a hack
806 to speed up ISRs that call a function infrequently where the
807 overhead of saving and restoring the additional registers is not
808 warranted. You must save and restore the additional registers
809 required by the called function. Caveat emptor. Here's enough
810 rope... */
811
812 if (leaf_function_p ())
813 return 1;
814
815 return 0;
816 }
817
818
819 static int
820 c4x_assembler_function_p ()
821 {
822 tree type;
823
824 type = TREE_TYPE (current_function_decl);
825 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
826 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
827 }
828
829
830 int
831 c4x_interrupt_function_p ()
832 {
833 if (lookup_attribute ("interrupt",
834 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
835 return 1;
836
837 /* Look for TI style c_intnn. */
838 return current_function_name[0] == 'c'
839 && current_function_name[1] == '_'
840 && current_function_name[2] == 'i'
841 && current_function_name[3] == 'n'
842 && current_function_name[4] == 't'
843 && ISDIGIT (current_function_name[5])
844 && ISDIGIT (current_function_name[6]);
845 }
846
847 void
848 c4x_expand_prologue ()
849 {
850 unsigned int regno;
851 int size = get_frame_size ();
852 rtx insn;
853
854 /* In functions where ar3 is not used but frame pointers are still
855 specified, frame pointers are not adjusted (if >= -O2) and this
856 is used so it won't needlessly push the frame pointer. */
857 int dont_push_ar3;
858
859 /* For __assembler__ function don't build a prologue. */
860 if (c4x_assembler_function_p ())
861 {
862 return;
863 }
864
865 /* For __interrupt__ function build specific prologue. */
866 if (c4x_interrupt_function_p ())
867 {
868 c4x_leaf_function = c4x_leaf_function_p ();
869
870 insn = emit_insn (gen_push_st ());
871 RTX_FRAME_RELATED_P (insn) = 1;
872 if (size)
873 {
874 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
875 RTX_FRAME_RELATED_P (insn) = 1;
876 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
877 gen_rtx_REG (QImode, SP_REGNO)));
878 RTX_FRAME_RELATED_P (insn) = 1;
879 /* We require that an ISR uses fewer than 32768 words of
880 local variables, otherwise we have to go to lots of
881 effort to save a register, load it with the desired size,
882 adjust the stack pointer, and then restore the modified
883 register. Frankly, I think it is a poor ISR that
884 requires more than 32767 words of local temporary
885 storage! */
886 if (size > 32767)
887 error ("ISR %s requires %d words of local vars, max is 32767",
888 current_function_name, size);
889
890 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
891 gen_rtx_REG (QImode, SP_REGNO),
892 GEN_INT (size)));
893 RTX_FRAME_RELATED_P (insn) = 1;
894 }
895 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
896 {
897 if (c4x_isr_reg_used_p (regno))
898 {
899 if (regno == DP_REGNO)
900 {
901 insn = emit_insn (gen_push_dp ());
902 RTX_FRAME_RELATED_P (insn) = 1;
903 }
904 else
905 {
906 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
907 RTX_FRAME_RELATED_P (insn) = 1;
908 if (IS_EXT_REGNO (regno))
909 {
910 insn = emit_insn (gen_pushqf
911 (gen_rtx_REG (QFmode, regno)));
912 RTX_FRAME_RELATED_P (insn) = 1;
913 }
914 }
915 }
916 }
917 /* We need to clear the repeat mode flag if the ISR is
918 going to use a RPTB instruction or uses the RC, RS, or RE
919 registers. */
920 if (regs_ever_live[RC_REGNO]
921 || regs_ever_live[RS_REGNO]
922 || regs_ever_live[RE_REGNO])
923 {
924 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
925 RTX_FRAME_RELATED_P (insn) = 1;
926 }
927
928 /* Reload DP reg if we are paranoid about some turkey
929 violating small memory model rules. */
930 if (TARGET_SMALL && TARGET_PARANOID)
931 {
932 insn = emit_insn (gen_set_ldp_prologue
933 (gen_rtx_REG (QImode, DP_REGNO),
934 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
935 RTX_FRAME_RELATED_P (insn) = 1;
936 }
937 }
938 else
939 {
940 if (frame_pointer_needed)
941 {
942 if ((size != 0)
943 || (current_function_args_size != 0)
944 || (optimize < 2))
945 {
946 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
947 RTX_FRAME_RELATED_P (insn) = 1;
948 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
949 gen_rtx_REG (QImode, SP_REGNO)));
950 RTX_FRAME_RELATED_P (insn) = 1;
951 dont_push_ar3 = 1;
952 }
953 else
954 {
955 /* Since ar3 is not used, we don't need to push it. */
956 dont_push_ar3 = 1;
957 }
958 }
959 else
960 {
961 /* If we use ar3, we need to push it. */
962 dont_push_ar3 = 0;
963 if ((size != 0) || (current_function_args_size != 0))
964 {
965 /* If we are omitting the frame pointer, we still have
966 to make space for it so the offsets are correct
967 unless we don't use anything on the stack at all. */
968 size += 1;
969 }
970 }
971
972 if (size > 32767)
973 {
974 /* Local vars are too big, it will take multiple operations
975 to increment SP. */
976 if (TARGET_C3X)
977 {
978 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
979 GEN_INT(size >> 16)));
980 RTX_FRAME_RELATED_P (insn) = 1;
981 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
982 gen_rtx_REG (QImode, R1_REGNO),
983 GEN_INT(-16)));
984 RTX_FRAME_RELATED_P (insn) = 1;
985 }
986 else
987 {
988 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
989 GEN_INT(size & ~0xffff)));
990 RTX_FRAME_RELATED_P (insn) = 1;
991 }
992 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
993 gen_rtx_REG (QImode, R1_REGNO),
994 GEN_INT(size & 0xffff)));
995 RTX_FRAME_RELATED_P (insn) = 1;
996 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
997 gen_rtx_REG (QImode, SP_REGNO),
998 gen_rtx_REG (QImode, R1_REGNO)));
999 RTX_FRAME_RELATED_P (insn) = 1;
1000 }
1001 else if (size != 0)
1002 {
1003 /* Local vars take up less than 32767 words, so we can directly
1004 add the number. */
1005 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
1006 gen_rtx_REG (QImode, SP_REGNO),
1007 GEN_INT (size)));
1008 RTX_FRAME_RELATED_P (insn) = 1;
1009 }
1010
1011 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1012 {
1013 if (regs_ever_live[regno] && ! call_used_regs[regno])
1014 {
1015 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1016 {
1017 if (TARGET_PRESERVE_FLOAT)
1018 {
1019 insn = emit_insn (gen_pushqi
1020 (gen_rtx_REG (QImode, regno)));
1021 RTX_FRAME_RELATED_P (insn) = 1;
1022 }
1023 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1024 RTX_FRAME_RELATED_P (insn) = 1;
1025 }
1026 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1027 {
1028 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1029 RTX_FRAME_RELATED_P (insn) = 1;
1030 }
1031 }
1032 }
1033 }
1034 }
1035
1036
1037 void
1038 c4x_expand_epilogue()
1039 {
1040 int regno;
1041 int jump = 0;
1042 int dont_pop_ar3;
1043 rtx insn;
1044 int size = get_frame_size ();
1045
1046 /* For __assembler__ function build no epilogue. */
1047 if (c4x_assembler_function_p ())
1048 {
1049 insn = emit_jump_insn (gen_return_from_epilogue ());
1050 RTX_FRAME_RELATED_P (insn) = 1;
1051 return;
1052 }
1053
1054 /* For __interrupt__ function build specific epilogue. */
1055 if (c4x_interrupt_function_p ())
1056 {
1057 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1058 {
1059 if (! c4x_isr_reg_used_p (regno))
1060 continue;
1061 if (regno == DP_REGNO)
1062 {
1063 insn = emit_insn (gen_pop_dp ());
1064 RTX_FRAME_RELATED_P (insn) = 1;
1065 }
1066 else
1067 {
1068 /* We have to use unspec because the compiler will delete insns
1069 that are not call-saved. */
1070 if (IS_EXT_REGNO (regno))
1071 {
1072 insn = emit_insn (gen_popqf_unspec
1073 (gen_rtx_REG (QFmode, regno)));
1074 RTX_FRAME_RELATED_P (insn) = 1;
1075 }
1076 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1077 RTX_FRAME_RELATED_P (insn) = 1;
1078 }
1079 }
1080 if (size)
1081 {
1082 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1083 gen_rtx_REG (QImode, SP_REGNO),
1084 GEN_INT(size)));
1085 RTX_FRAME_RELATED_P (insn) = 1;
1086 insn = emit_insn (gen_popqi
1087 (gen_rtx_REG (QImode, AR3_REGNO)));
1088 RTX_FRAME_RELATED_P (insn) = 1;
1089 }
1090 insn = emit_insn (gen_pop_st ());
1091 RTX_FRAME_RELATED_P (insn) = 1;
1092 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1093 RTX_FRAME_RELATED_P (insn) = 1;
1094 }
1095 else
1096 {
1097 if (frame_pointer_needed)
1098 {
1099 if ((size != 0)
1100 || (current_function_args_size != 0)
1101 || (optimize < 2))
1102 {
1103 insn = emit_insn
1104 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1105 gen_rtx_MEM (QImode,
1106 gen_rtx_PLUS
1107 (QImode, gen_rtx_REG (QImode,
1108 AR3_REGNO),
1109 GEN_INT(-1)))));
1110 RTX_FRAME_RELATED_P (insn) = 1;
1111
1112 /* We already have the return value and the fp,
1113 so we need to add those to the stack. */
1114 size += 2;
1115 jump = 1;
1116 dont_pop_ar3 = 1;
1117 }
1118 else
1119 {
1120 /* Since ar3 is not used for anything, we don't need to
1121 pop it. */
1122 dont_pop_ar3 = 1;
1123 }
1124 }
1125 else
1126 {
1127 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1128 if (size || current_function_args_size)
1129 {
1130 /* If we are ommitting the frame pointer, we still have
1131 to make space for it so the offsets are correct
1132 unless we don't use anything on the stack at all. */
1133 size += 1;
1134 }
1135 }
1136
1137 /* Now restore the saved registers, putting in the delayed branch
1138 where required. */
1139 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1140 {
1141 if (regs_ever_live[regno] && ! call_used_regs[regno])
1142 {
1143 if (regno == AR3_REGNO && dont_pop_ar3)
1144 continue;
1145
1146 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1147 {
1148 insn = emit_insn (gen_popqf_unspec
1149 (gen_rtx_REG (QFmode, regno)));
1150 RTX_FRAME_RELATED_P (insn) = 1;
1151 if (TARGET_PRESERVE_FLOAT)
1152 {
1153 insn = emit_insn (gen_popqi_unspec
1154 (gen_rtx_REG (QImode, regno)));
1155 RTX_FRAME_RELATED_P (insn) = 1;
1156 }
1157 }
1158 else
1159 {
1160 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1161 RTX_FRAME_RELATED_P (insn) = 1;
1162 }
1163 }
1164 }
1165
1166 if (frame_pointer_needed)
1167 {
1168 if ((size != 0)
1169 || (current_function_args_size != 0)
1170 || (optimize < 2))
1171 {
1172 /* Restore the old FP. */
1173 insn = emit_insn
1174 (gen_movqi
1175 (gen_rtx_REG (QImode, AR3_REGNO),
1176 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1177
1178 RTX_FRAME_RELATED_P (insn) = 1;
1179 }
1180 }
1181
1182 if (size > 32767)
1183 {
1184 /* Local vars are too big, it will take multiple operations
1185 to decrement SP. */
1186 if (TARGET_C3X)
1187 {
1188 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1189 GEN_INT(size >> 16)));
1190 RTX_FRAME_RELATED_P (insn) = 1;
1191 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1192 gen_rtx_REG (QImode, R3_REGNO),
1193 GEN_INT(-16)));
1194 RTX_FRAME_RELATED_P (insn) = 1;
1195 }
1196 else
1197 {
1198 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1199 GEN_INT(size & ~0xffff)));
1200 RTX_FRAME_RELATED_P (insn) = 1;
1201 }
1202 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1203 gen_rtx_REG (QImode, R3_REGNO),
1204 GEN_INT(size & 0xffff)));
1205 RTX_FRAME_RELATED_P (insn) = 1;
1206 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1207 gen_rtx_REG (QImode, SP_REGNO),
1208 gen_rtx_REG (QImode, R3_REGNO)));
1209 RTX_FRAME_RELATED_P (insn) = 1;
1210 }
1211 else if (size != 0)
1212 {
1213 /* Local vars take up less than 32768 words, so we can directly
1214 subtract the number. */
1215 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1216 gen_rtx_REG (QImode, SP_REGNO),
1217 GEN_INT(size)));
1218 RTX_FRAME_RELATED_P (insn) = 1;
1219 }
1220
1221 if (jump)
1222 {
1223 insn = emit_jump_insn (gen_return_indirect_internal
1224 (gen_rtx_REG (QImode, R2_REGNO)));
1225 RTX_FRAME_RELATED_P (insn) = 1;
1226 }
1227 else
1228 {
1229 insn = emit_jump_insn (gen_return_from_epilogue ());
1230 RTX_FRAME_RELATED_P (insn) = 1;
1231 }
1232 }
1233 }
1234
1235
1236 int
1237 c4x_null_epilogue_p ()
1238 {
1239 int regno;
1240
1241 if (reload_completed
1242 && ! c4x_assembler_function_p ()
1243 && ! c4x_interrupt_function_p ()
1244 && ! current_function_calls_alloca
1245 && ! current_function_args_size
1246 && ! (optimize < 2)
1247 && ! get_frame_size ())
1248 {
1249 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1250 if (regs_ever_live[regno] && ! call_used_regs[regno]
1251 && (regno != AR3_REGNO))
1252 return 0;
1253 return 1;
1254 }
1255 return 0;
1256 }
1257
1258
1259 int
1260 c4x_emit_move_sequence (operands, mode)
1261 rtx *operands;
1262 enum machine_mode mode;
1263 {
1264 rtx op0 = operands[0];
1265 rtx op1 = operands[1];
1266
1267 if (! reload_in_progress
1268 && ! REG_P (op0)
1269 && ! REG_P (op1)
1270 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1271 op1 = force_reg (mode, op1);
1272
1273 if (GET_CODE (op1) == LO_SUM
1274 && GET_MODE (op1) == Pmode
1275 && dp_reg_operand (XEXP (op1, 0), mode))
1276 {
1277 /* expand_increment will sometimes create a LO_SUM immediate
1278 address. */
1279 op1 = XEXP (op1, 1);
1280 }
1281 else if (symbolic_address_operand (op1, mode))
1282 {
1283 if (TARGET_LOAD_ADDRESS)
1284 {
1285 /* Alias analysis seems to do a better job if we force
1286 constant addresses to memory after reload. */
1287 emit_insn (gen_load_immed_address (op0, op1));
1288 return 1;
1289 }
1290 else
1291 {
1292 /* Stick symbol or label address into the constant pool. */
1293 op1 = force_const_mem (Pmode, op1);
1294 }
1295 }
1296 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1297 {
1298 /* We could be a lot smarter about loading some of these
1299 constants... */
1300 op1 = force_const_mem (mode, op1);
1301 }
1302
1303 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1304 and emit associated (HIGH (SYMREF)) if large memory model.
1305 c4x_legitimize_address could be used to do this,
1306 perhaps by calling validize_address. */
1307 if (TARGET_EXPOSE_LDP
1308 && ! (reload_in_progress || reload_completed)
1309 && GET_CODE (op1) == MEM
1310 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1311 {
1312 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1313 if (! TARGET_SMALL)
1314 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1315 op1 = change_address (op1, mode,
1316 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1317 }
1318
1319 if (TARGET_EXPOSE_LDP
1320 && ! (reload_in_progress || reload_completed)
1321 && GET_CODE (op0) == MEM
1322 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1323 {
1324 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1325 if (! TARGET_SMALL)
1326 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1327 op0 = change_address (op0, mode,
1328 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1329 }
1330
1331 if (GET_CODE (op0) == SUBREG
1332 && mixed_subreg_operand (op0, mode))
1333 {
1334 /* We should only generate these mixed mode patterns
1335 during RTL generation. If we need do it later on
1336 then we'll have to emit patterns that won't clobber CC. */
1337 if (reload_in_progress || reload_completed)
1338 abort ();
1339 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1340 op0 = SUBREG_REG (op0);
1341 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1342 {
1343 op0 = copy_rtx (op0);
1344 PUT_MODE (op0, QImode);
1345 }
1346 else
1347 abort ();
1348
1349 if (mode == QFmode)
1350 emit_insn (gen_storeqf_int_clobber (op0, op1));
1351 else
1352 abort ();
1353 return 1;
1354 }
1355
1356 if (GET_CODE (op1) == SUBREG
1357 && mixed_subreg_operand (op1, mode))
1358 {
1359 /* We should only generate these mixed mode patterns
1360 during RTL generation. If we need do it later on
1361 then we'll have to emit patterns that won't clobber CC. */
1362 if (reload_in_progress || reload_completed)
1363 abort ();
1364 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1365 op1 = SUBREG_REG (op1);
1366 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1367 {
1368 op1 = copy_rtx (op1);
1369 PUT_MODE (op1, QImode);
1370 }
1371 else
1372 abort ();
1373
1374 if (mode == QFmode)
1375 emit_insn (gen_loadqf_int_clobber (op0, op1));
1376 else
1377 abort ();
1378 return 1;
1379 }
1380
1381 if (mode == QImode
1382 && reg_operand (op0, mode)
1383 && const_int_operand (op1, mode)
1384 && ! IS_INT16_CONST (INTVAL (op1))
1385 && ! IS_HIGH_CONST (INTVAL (op1)))
1386 {
1387 emit_insn (gen_loadqi_big_constant (op0, op1));
1388 return 1;
1389 }
1390
1391 if (mode == HImode
1392 && reg_operand (op0, mode)
1393 && const_int_operand (op1, mode))
1394 {
1395 emit_insn (gen_loadhi_big_constant (op0, op1));
1396 return 1;
1397 }
1398
1399 /* Adjust operands in case we have modified them. */
1400 operands[0] = op0;
1401 operands[1] = op1;
1402
1403 /* Emit normal pattern. */
1404 return 0;
1405 }
1406
1407
1408 void
1409 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1410 rtx libcall;
1411 enum rtx_code code;
1412 enum machine_mode dmode;
1413 enum machine_mode smode;
1414 int noperands;
1415 rtx *operands;
1416 {
1417 rtx ret;
1418 rtx insns;
1419 rtx equiv;
1420
1421 start_sequence ();
1422 switch (noperands)
1423 {
1424 case 2:
1425 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1426 operands[1], smode);
1427 equiv = gen_rtx (code, dmode, operands[1]);
1428 break;
1429
1430 case 3:
1431 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1432 operands[1], smode, operands[2], smode);
1433 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1434 break;
1435
1436 default:
1437 abort ();
1438 }
1439
1440 insns = get_insns ();
1441 end_sequence ();
1442 emit_libcall_block (insns, operands[0], ret, equiv);
1443 }
1444
1445
1446 void
1447 c4x_emit_libcall3 (libcall, code, mode, operands)
1448 rtx libcall;
1449 enum rtx_code code;
1450 enum machine_mode mode;
1451 rtx *operands;
1452 {
1453 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1454 }
1455
1456
1457 void
1458 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1459 rtx libcall;
1460 enum rtx_code code;
1461 enum machine_mode mode;
1462 rtx *operands;
1463 {
1464 rtx ret;
1465 rtx insns;
1466 rtx equiv;
1467
1468 start_sequence ();
1469 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1470 operands[1], mode, operands[2], mode);
1471 equiv = gen_rtx_TRUNCATE (mode,
1472 gen_rtx_LSHIFTRT (HImode,
1473 gen_rtx_MULT (HImode,
1474 gen_rtx (code, HImode, operands[1]),
1475 gen_rtx (code, HImode, operands[2])),
1476 GEN_INT (32)));
1477 insns = get_insns ();
1478 end_sequence ();
1479 emit_libcall_block (insns, operands[0], ret, equiv);
1480 }
1481
1482
1483 /* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1484 yet use this info. */
1485
1486 static void
1487 c4x_encode_section_info (decl, first)
1488 tree decl;
1489 int first ATTRIBUTE_UNUSED;
1490 {
1491 if (TREE_CODE (decl) == FUNCTION_DECL)
1492 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1493 }
1494
1495
1496 int
1497 c4x_check_legit_addr (mode, addr, strict)
1498 enum machine_mode mode;
1499 rtx addr;
1500 int strict;
1501 {
1502 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1503 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1504 rtx disp = NULL_RTX; /* Displacement. */
1505 enum rtx_code code;
1506
1507 code = GET_CODE (addr);
1508 switch (code)
1509 {
1510 /* Register indirect with auto increment/decrement. We don't
1511 allow SP here---push_operand should recognise an operand
1512 being pushed on the stack. */
1513
1514 case PRE_DEC:
1515 case PRE_INC:
1516 case POST_DEC:
1517 if (mode != QImode && mode != QFmode)
1518 return 0;
1519
1520 case POST_INC:
1521 base = XEXP (addr, 0);
1522 if (! REG_P (base))
1523 return 0;
1524 break;
1525
1526 case PRE_MODIFY:
1527 case POST_MODIFY:
1528 {
1529 rtx op0 = XEXP (addr, 0);
1530 rtx op1 = XEXP (addr, 1);
1531
1532 if (mode != QImode && mode != QFmode)
1533 return 0;
1534
1535 if (! REG_P (op0)
1536 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1537 return 0;
1538 base = XEXP (op1, 0);
1539 if (base != op0)
1540 return 0;
1541 if (REG_P (XEXP (op1, 1)))
1542 indx = XEXP (op1, 1);
1543 else
1544 disp = XEXP (op1, 1);
1545 }
1546 break;
1547
1548 /* Register indirect. */
1549 case REG:
1550 base = addr;
1551 break;
1552
1553 /* Register indirect with displacement or index. */
1554 case PLUS:
1555 {
1556 rtx op0 = XEXP (addr, 0);
1557 rtx op1 = XEXP (addr, 1);
1558 enum rtx_code code0 = GET_CODE (op0);
1559
1560 switch (code0)
1561 {
1562 case REG:
1563 if (REG_P (op1))
1564 {
1565 base = op0; /* Base + index. */
1566 indx = op1;
1567 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1568 {
1569 base = op1;
1570 indx = op0;
1571 }
1572 }
1573 else
1574 {
1575 base = op0; /* Base + displacement. */
1576 disp = op1;
1577 }
1578 break;
1579
1580 default:
1581 return 0;
1582 }
1583 }
1584 break;
1585
1586 /* Direct addressing with DP register. */
1587 case LO_SUM:
1588 {
1589 rtx op0 = XEXP (addr, 0);
1590 rtx op1 = XEXP (addr, 1);
1591
1592 /* HImode and HFmode direct memory references aren't truly
1593 offsettable (consider case at end of data page). We
1594 probably get better code by loading a pointer and using an
1595 indirect memory reference. */
1596 if (mode == HImode || mode == HFmode)
1597 return 0;
1598
1599 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1600 return 0;
1601
1602 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1603 return 1;
1604
1605 if (GET_CODE (op1) == CONST)
1606 return 1;
1607 return 0;
1608 }
1609 break;
1610
1611 /* Direct addressing with some work for the assembler... */
1612 case CONST:
1613 /* Direct addressing. */
1614 case LABEL_REF:
1615 case SYMBOL_REF:
1616 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1617 return 1;
1618 /* These need to be converted to a LO_SUM (...).
1619 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1620 return 0;
1621
1622 /* Do not allow direct memory access to absolute addresses.
1623 This is more pain than it's worth, especially for the
1624 small memory model where we can't guarantee that
1625 this address is within the data page---we don't want
1626 to modify the DP register in the small memory model,
1627 even temporarily, since an interrupt can sneak in.... */
1628 case CONST_INT:
1629 return 0;
1630
1631 /* Indirect indirect addressing. */
1632 case MEM:
1633 return 0;
1634
1635 case CONST_DOUBLE:
1636 fatal_insn ("using CONST_DOUBLE for address", addr);
1637
1638 default:
1639 return 0;
1640 }
1641
1642 /* Validate the base register. */
1643 if (base)
1644 {
1645 /* Check that the address is offsettable for HImode and HFmode. */
1646 if (indx && (mode == HImode || mode == HFmode))
1647 return 0;
1648
1649 /* Handle DP based stuff. */
1650 if (REGNO (base) == DP_REGNO)
1651 return 1;
1652 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1653 return 0;
1654 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1655 return 0;
1656 }
1657
1658 /* Now validate the index register. */
1659 if (indx)
1660 {
1661 if (GET_CODE (indx) != REG)
1662 return 0;
1663 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1664 return 0;
1665 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1666 return 0;
1667 }
1668
1669 /* Validate displacement. */
1670 if (disp)
1671 {
1672 if (GET_CODE (disp) != CONST_INT)
1673 return 0;
1674 if (mode == HImode || mode == HFmode)
1675 {
1676 /* The offset displacement must be legitimate. */
1677 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1678 return 0;
1679 }
1680 else
1681 {
1682 if (! IS_DISP8_CONST (INTVAL (disp)))
1683 return 0;
1684 }
1685 /* Can't add an index with a disp. */
1686 if (indx)
1687 return 0;
1688 }
1689 return 1;
1690 }
1691
1692
1693 rtx
1694 c4x_legitimize_address (orig, mode)
1695 rtx orig ATTRIBUTE_UNUSED;
1696 enum machine_mode mode ATTRIBUTE_UNUSED;
1697 {
1698 if (GET_CODE (orig) == SYMBOL_REF
1699 || GET_CODE (orig) == LABEL_REF)
1700 {
1701 if (mode == HImode || mode == HFmode)
1702 {
1703 /* We need to force the address into
1704 a register so that it is offsettable. */
1705 rtx addr_reg = gen_reg_rtx (Pmode);
1706 emit_move_insn (addr_reg, orig);
1707 return addr_reg;
1708 }
1709 else
1710 {
1711 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1712
1713 if (! TARGET_SMALL)
1714 emit_insn (gen_set_ldp (dp_reg, orig));
1715
1716 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1717 }
1718 }
1719
1720 return NULL_RTX;
1721 }
1722
1723
1724 /* Provide the costs of an addressing mode that contains ADDR.
1725 If ADDR is not a valid address, its cost is irrelevant.
1726 This is used in cse and loop optimisation to determine
1727 if it is worthwhile storing a common address into a register.
1728 Unfortunately, the C4x address cost depends on other operands. */
1729
1730 int
1731 c4x_address_cost (addr)
1732 rtx addr;
1733 {
1734 switch (GET_CODE (addr))
1735 {
1736 case REG:
1737 return 1;
1738
1739 case POST_INC:
1740 case POST_DEC:
1741 case PRE_INC:
1742 case PRE_DEC:
1743 return 1;
1744
1745 /* These shouldn't be directly generated. */
1746 case SYMBOL_REF:
1747 case LABEL_REF:
1748 case CONST:
1749 return 10;
1750
1751 case LO_SUM:
1752 {
1753 rtx op1 = XEXP (addr, 1);
1754
1755 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1756 return TARGET_SMALL ? 3 : 4;
1757
1758 if (GET_CODE (op1) == CONST)
1759 {
1760 rtx offset = const0_rtx;
1761
1762 op1 = eliminate_constant_term (op1, &offset);
1763
1764 /* ??? These costs need rethinking... */
1765 if (GET_CODE (op1) == LABEL_REF)
1766 return 3;
1767
1768 if (GET_CODE (op1) != SYMBOL_REF)
1769 return 4;
1770
1771 if (INTVAL (offset) == 0)
1772 return 3;
1773
1774 return 4;
1775 }
1776 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1777 }
1778 break;
1779
1780 case PLUS:
1781 {
1782 register rtx op0 = XEXP (addr, 0);
1783 register rtx op1 = XEXP (addr, 1);
1784
1785 if (GET_CODE (op0) != REG)
1786 break;
1787
1788 switch (GET_CODE (op1))
1789 {
1790 default:
1791 break;
1792
1793 case REG:
1794 /* This cost for REG+REG must be greater than the cost
1795 for REG if we want autoincrement addressing modes. */
1796 return 2;
1797
1798 case CONST_INT:
1799 /* The following tries to improve GIV combination
1800 in strength reduce but appears not to help. */
1801 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1802 return 1;
1803
1804 if (IS_DISP1_CONST (INTVAL (op1)))
1805 return 1;
1806
1807 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1808 return 2;
1809
1810 return 3;
1811 }
1812 }
1813 default:
1814 break;
1815 }
1816
1817 return 4;
1818 }
1819
1820
1821 rtx
1822 c4x_gen_compare_reg (code, x, y)
1823 enum rtx_code code;
1824 rtx x, y;
1825 {
1826 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1827 rtx cc_reg;
1828
1829 if (mode == CC_NOOVmode
1830 && (code == LE || code == GE || code == LT || code == GT))
1831 return NULL_RTX;
1832
1833 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1834 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1835 gen_rtx_COMPARE (mode, x, y)));
1836 return cc_reg;
1837 }
1838
1839 char *
1840 c4x_output_cbranch (form, seq)
1841 const char *form;
1842 rtx seq;
1843 {
1844 int delayed = 0;
1845 int annultrue = 0;
1846 int annulfalse = 0;
1847 rtx delay;
1848 char *cp;
1849 static char str[100];
1850
1851 if (final_sequence)
1852 {
1853 delay = XVECEXP (final_sequence, 0, 1);
1854 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1855 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1856 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1857 }
1858 strcpy (str, form);
1859 cp = &str [strlen (str)];
1860 if (delayed)
1861 {
1862 *cp++ = '%';
1863 *cp++ = '#';
1864 }
1865 if (annultrue)
1866 {
1867 *cp++ = 'a';
1868 *cp++ = 't';
1869 }
1870 if (annulfalse)
1871 {
1872 *cp++ = 'a';
1873 *cp++ = 'f';
1874 }
1875 *cp++ = '\t';
1876 *cp++ = '%';
1877 *cp++ = 'l';
1878 *cp++ = '1';
1879 *cp = 0;
1880 return str;
1881 }
1882
1883 void
1884 c4x_print_operand (file, op, letter)
1885 FILE *file; /* File to write to. */
1886 rtx op; /* Operand to print. */
1887 int letter; /* %<letter> or 0. */
1888 {
1889 rtx op1;
1890 enum rtx_code code;
1891
1892 switch (letter)
1893 {
1894 case '#': /* Delayed. */
1895 if (final_sequence)
1896 asm_fprintf (file, "d");
1897 return;
1898 }
1899
1900 code = GET_CODE (op);
1901 switch (letter)
1902 {
1903 case 'A': /* Direct address. */
1904 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1905 asm_fprintf (file, "@");
1906 break;
1907
1908 case 'H': /* Sethi. */
1909 output_addr_const (file, op);
1910 return;
1911
1912 case 'I': /* Reversed condition. */
1913 code = reverse_condition (code);
1914 break;
1915
1916 case 'L': /* Log 2 of constant. */
1917 if (code != CONST_INT)
1918 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1919 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1920 return;
1921
1922 case 'N': /* Ones complement of small constant. */
1923 if (code != CONST_INT)
1924 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1925 fprintf (file, "%d", ~INTVAL (op));
1926 return;
1927
1928 case 'K': /* Generate ldp(k) if direct address. */
1929 if (! TARGET_SMALL
1930 && code == MEM
1931 && GET_CODE (XEXP (op, 0)) == LO_SUM
1932 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1933 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1934 {
1935 op1 = XEXP (XEXP (op, 0), 1);
1936 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1937 {
1938 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1939 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1940 asm_fprintf (file, "\n");
1941 }
1942 }
1943 return;
1944
1945 case 'M': /* Generate ldp(k) if direct address. */
1946 if (! TARGET_SMALL /* Only used in asm statements. */
1947 && code == MEM
1948 && (GET_CODE (XEXP (op, 0)) == CONST
1949 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1950 {
1951 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1952 output_address (XEXP (op, 0));
1953 asm_fprintf (file, "\n\t");
1954 }
1955 return;
1956
1957 case 'O': /* Offset address. */
1958 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1959 break;
1960 else if (code == MEM)
1961 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1962 else if (code == REG)
1963 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1964 else
1965 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1966 return;
1967
1968 case 'C': /* Call. */
1969 break;
1970
1971 case 'U': /* Call/callu. */
1972 if (code != SYMBOL_REF)
1973 asm_fprintf (file, "u");
1974 return;
1975
1976 default:
1977 break;
1978 }
1979
1980 switch (code)
1981 {
1982 case REG:
1983 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1984 && ! TARGET_TI)
1985 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1986 else
1987 fprintf (file, "%s", reg_names[REGNO (op)]);
1988 break;
1989
1990 case MEM:
1991 output_address (XEXP (op, 0));
1992 break;
1993
1994 case CONST_DOUBLE:
1995 {
1996 char str[30];
1997 REAL_VALUE_TYPE r;
1998
1999 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2000 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
2001 fprintf (file, "%s", str);
2002 }
2003 break;
2004
2005 case CONST_INT:
2006 fprintf (file, "%d", INTVAL (op));
2007 break;
2008
2009 case NE:
2010 asm_fprintf (file, "ne");
2011 break;
2012
2013 case EQ:
2014 asm_fprintf (file, "eq");
2015 break;
2016
2017 case GE:
2018 asm_fprintf (file, "ge");
2019 break;
2020
2021 case GT:
2022 asm_fprintf (file, "gt");
2023 break;
2024
2025 case LE:
2026 asm_fprintf (file, "le");
2027 break;
2028
2029 case LT:
2030 asm_fprintf (file, "lt");
2031 break;
2032
2033 case GEU:
2034 asm_fprintf (file, "hs");
2035 break;
2036
2037 case GTU:
2038 asm_fprintf (file, "hi");
2039 break;
2040
2041 case LEU:
2042 asm_fprintf (file, "ls");
2043 break;
2044
2045 case LTU:
2046 asm_fprintf (file, "lo");
2047 break;
2048
2049 case SYMBOL_REF:
2050 output_addr_const (file, op);
2051 break;
2052
2053 case CONST:
2054 output_addr_const (file, XEXP (op, 0));
2055 break;
2056
2057 case CODE_LABEL:
2058 break;
2059
2060 default:
2061 fatal_insn ("c4x_print_operand: Bad operand case", op);
2062 break;
2063 }
2064 }
2065
2066
2067 void
2068 c4x_print_operand_address (file, addr)
2069 FILE *file;
2070 rtx addr;
2071 {
2072 switch (GET_CODE (addr))
2073 {
2074 case REG:
2075 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2076 break;
2077
2078 case PRE_DEC:
2079 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2080 break;
2081
2082 case POST_INC:
2083 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2084 break;
2085
2086 case POST_MODIFY:
2087 {
2088 rtx op0 = XEXP (XEXP (addr, 1), 0);
2089 rtx op1 = XEXP (XEXP (addr, 1), 1);
2090
2091 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2092 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2093 reg_names[REGNO (op1)]);
2094 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2095 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2096 INTVAL (op1));
2097 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2098 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2099 -INTVAL (op1));
2100 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2101 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2102 reg_names[REGNO (op1)]);
2103 else
2104 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2105 }
2106 break;
2107
2108 case PRE_MODIFY:
2109 {
2110 rtx op0 = XEXP (XEXP (addr, 1), 0);
2111 rtx op1 = XEXP (XEXP (addr, 1), 1);
2112
2113 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2114 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2115 reg_names[REGNO (op1)]);
2116 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2117 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2118 INTVAL (op1));
2119 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2120 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2121 -INTVAL (op1));
2122 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2123 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2124 reg_names[REGNO (op1)]);
2125 else
2126 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2127 }
2128 break;
2129
2130 case PRE_INC:
2131 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2132 break;
2133
2134 case POST_DEC:
2135 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2136 break;
2137
2138 case PLUS: /* Indirect with displacement. */
2139 {
2140 rtx op0 = XEXP (addr, 0);
2141 rtx op1 = XEXP (addr, 1);
2142
2143 if (REG_P (op0))
2144 {
2145 if (REG_P (op1))
2146 {
2147 if (IS_INDEX_REG (op0))
2148 {
2149 fprintf (file, "*+%s(%s)",
2150 reg_names[REGNO (op1)],
2151 reg_names[REGNO (op0)]); /* Index + base. */
2152 }
2153 else
2154 {
2155 fprintf (file, "*+%s(%s)",
2156 reg_names[REGNO (op0)],
2157 reg_names[REGNO (op1)]); /* Base + index. */
2158 }
2159 }
2160 else if (INTVAL (op1) < 0)
2161 {
2162 fprintf (file, "*-%s(%d)",
2163 reg_names[REGNO (op0)],
2164 -INTVAL (op1)); /* Base - displacement. */
2165 }
2166 else
2167 {
2168 fprintf (file, "*+%s(%d)",
2169 reg_names[REGNO (op0)],
2170 INTVAL (op1)); /* Base + displacement. */
2171 }
2172 }
2173 else
2174 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2175 }
2176 break;
2177
2178 case LO_SUM:
2179 {
2180 rtx op0 = XEXP (addr, 0);
2181 rtx op1 = XEXP (addr, 1);
2182
2183 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2184 c4x_print_operand_address (file, op1);
2185 else
2186 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2187 }
2188 break;
2189
2190 case CONST:
2191 case SYMBOL_REF:
2192 case LABEL_REF:
2193 fprintf (file, "@");
2194 output_addr_const (file, addr);
2195 break;
2196
2197 /* We shouldn't access CONST_INT addresses. */
2198 case CONST_INT:
2199
2200 default:
2201 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2202 break;
2203 }
2204 }
2205
2206
2207 /* Return nonzero if the floating point operand will fit
2208 in the immediate field. */
2209
2210 static int
2211 c4x_immed_float_p (op)
2212 rtx op;
2213 {
2214 long convval[2];
2215 int exponent;
2216 REAL_VALUE_TYPE r;
2217
2218 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2219 if (GET_MODE (op) == HFmode)
2220 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2221 else
2222 {
2223 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2224 convval[1] = 0;
2225 }
2226
2227 /* Sign extend exponent. */
2228 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2229 if (exponent == -128)
2230 return 1; /* 0.0 */
2231 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2232 return 0; /* Precision doesn't fit. */
2233 return (exponent <= 7) /* Positive exp. */
2234 && (exponent >= -7); /* Negative exp. */
2235 }
2236
2237
2238 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2239 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2240
2241 None of the last four instructions from the bottom of the block can
2242 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2243 BcondAT or RETIcondD.
2244
2245 This routine scans the four previous insns for a jump insn, and if
2246 one is found, returns 1 so that we bung in a nop instruction.
2247 This simple minded strategy will add a nop, when it may not
2248 be required. Say when there is a JUMP_INSN near the end of the
2249 block that doesn't get converted into a delayed branch.
2250
2251 Note that we cannot have a call insn, since we don't generate
2252 repeat loops with calls in them (although I suppose we could, but
2253 there's no benefit.)
2254
2255 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2256
2257 int
2258 c4x_rptb_nop_p (insn)
2259 rtx insn;
2260 {
2261 rtx start_label;
2262 int i;
2263
2264 /* Extract the start label from the jump pattern (rptb_end). */
2265 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2266
2267 /* If there is a label at the end of the loop we must insert
2268 a NOP. */
2269 do {
2270 insn = previous_insn (insn);
2271 } while (GET_CODE (insn) == NOTE
2272 || GET_CODE (insn) == USE
2273 || GET_CODE (insn) == CLOBBER);
2274 if (GET_CODE (insn) == CODE_LABEL)
2275 return 1;
2276
2277 for (i = 0; i < 4; i++)
2278 {
2279 /* Search back for prev non-note and non-label insn. */
2280 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2281 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2282 {
2283 if (insn == start_label)
2284 return i == 0;
2285
2286 insn = previous_insn (insn);
2287 };
2288
2289 /* If we have a jump instruction we should insert a NOP. If we
2290 hit repeat block top we should only insert a NOP if the loop
2291 is empty. */
2292 if (GET_CODE (insn) == JUMP_INSN)
2293 return 1;
2294 insn = previous_insn (insn);
2295 }
2296 return 0;
2297 }
2298
2299
2300 /* The C4x looping instruction needs to be emitted at the top of the
2301 loop. Emitting the true RTL for a looping instruction at the top of
2302 the loop can cause problems with flow analysis. So instead, a dummy
2303 doloop insn is emitted at the end of the loop. This routine checks
2304 for the presence of this doloop insn and then searches back to the
2305 top of the loop, where it inserts the true looping insn (provided
2306 there are no instructions in the loop which would cause problems).
2307 Any additional labels can be emitted at this point. In addition, if
2308 the desired loop count register was not allocated, this routine does
2309 nothing.
2310
2311 Before we can create a repeat block looping instruction we have to
2312 verify that there are no jumps outside the loop and no jumps outside
2313 the loop go into this loop. This can happen in the basic blocks reorder
2314 pass. The C4x cpu can not handle this. */
2315
2316 static int
2317 c4x_label_ref_used_p (x, code_label)
2318 rtx x, code_label;
2319 {
2320 enum rtx_code code;
2321 int i, j;
2322 const char *fmt;
2323
2324 if (x == 0)
2325 return 0;
2326
2327 code = GET_CODE (x);
2328 if (code == LABEL_REF)
2329 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2330
2331 fmt = GET_RTX_FORMAT (code);
2332 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2333 {
2334 if (fmt[i] == 'e')
2335 {
2336 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2337 return 1;
2338 }
2339 else if (fmt[i] == 'E')
2340 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2341 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2342 return 1;
2343 }
2344 return 0;
2345 }
2346
2347
2348 static int
2349 c4x_rptb_valid_p (insn, start_label)
2350 rtx insn, start_label;
2351 {
2352 rtx end = insn;
2353 rtx start;
2354 rtx tmp;
2355
2356 /* Find the start label. */
2357 for (; insn; insn = PREV_INSN (insn))
2358 if (insn == start_label)
2359 break;
2360
2361 /* Note found then we can not use a rptb or rpts. The label was
2362 probably moved by the basic block reorder pass. */
2363 if (! insn)
2364 return 0;
2365
2366 start = insn;
2367 /* If any jump jumps inside this block then we must fail. */
2368 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2369 {
2370 if (GET_CODE (insn) == CODE_LABEL)
2371 {
2372 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2373 if (GET_CODE (tmp) == JUMP_INSN
2374 && c4x_label_ref_used_p (tmp, insn))
2375 return 0;
2376 }
2377 }
2378 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2379 {
2380 if (GET_CODE (insn) == CODE_LABEL)
2381 {
2382 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2383 if (GET_CODE (tmp) == JUMP_INSN
2384 && c4x_label_ref_used_p (tmp, insn))
2385 return 0;
2386 }
2387 }
2388 /* If any jump jumps outside this block then we must fail. */
2389 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2390 {
2391 if (GET_CODE (insn) == CODE_LABEL)
2392 {
2393 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2394 if (GET_CODE (tmp) == JUMP_INSN
2395 && c4x_label_ref_used_p (tmp, insn))
2396 return 0;
2397 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2398 if (GET_CODE (tmp) == JUMP_INSN
2399 && c4x_label_ref_used_p (tmp, insn))
2400 return 0;
2401 }
2402 }
2403
2404 /* All checks OK. */
2405 return 1;
2406 }
2407
2408
2409 void
2410 c4x_rptb_insert (insn)
2411 rtx insn;
2412 {
2413 rtx end_label;
2414 rtx start_label;
2415 rtx new_start_label;
2416 rtx count_reg;
2417
2418 /* If the count register has not been allocated to RC, say if
2419 there is a movstr pattern in the loop, then do not insert a
2420 RPTB instruction. Instead we emit a decrement and branch
2421 at the end of the loop. */
2422 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2423 if (REGNO (count_reg) != RC_REGNO)
2424 return;
2425
2426 /* Extract the start label from the jump pattern (rptb_end). */
2427 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2428
2429 if (! c4x_rptb_valid_p (insn, start_label))
2430 {
2431 /* We can not use the rptb insn. Replace it so reorg can use
2432 the delay slots of the jump insn. */
2433 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2434 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2435 emit_insn_before (gen_bge (start_label), insn);
2436 LABEL_NUSES (start_label)++;
2437 delete_insn (insn);
2438 return;
2439 }
2440
2441 end_label = gen_label_rtx ();
2442 LABEL_NUSES (end_label)++;
2443 emit_label_after (end_label, insn);
2444
2445 new_start_label = gen_label_rtx ();
2446 LABEL_NUSES (new_start_label)++;
2447
2448 for (; insn; insn = PREV_INSN (insn))
2449 {
2450 if (insn == start_label)
2451 break;
2452 if (GET_CODE (insn) == JUMP_INSN &&
2453 JUMP_LABEL (insn) == start_label)
2454 redirect_jump (insn, new_start_label, 0);
2455 }
2456 if (! insn)
2457 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2458
2459 emit_label_after (new_start_label, insn);
2460
2461 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2462 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2463 else
2464 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2465 if (LABEL_NUSES (start_label) == 0)
2466 delete_insn (start_label);
2467 }
2468
2469
2470 /* This function is a C4x special called immediately before delayed
2471 branch scheduling. We fix up RTPB style loops that didn't get RC
2472 allocated as the loop counter. */
2473
2474 void
2475 c4x_process_after_reload (first)
2476 rtx first;
2477 {
2478 rtx insn;
2479
2480 for (insn = first; insn; insn = NEXT_INSN (insn))
2481 {
2482 /* Look for insn. */
2483 if (INSN_P (insn))
2484 {
2485 int insn_code_number;
2486 rtx old;
2487
2488 insn_code_number = recog_memoized (insn);
2489
2490 if (insn_code_number < 0)
2491 continue;
2492
2493 /* Insert the RTX for RPTB at the top of the loop
2494 and a label at the end of the loop. */
2495 if (insn_code_number == CODE_FOR_rptb_end)
2496 c4x_rptb_insert(insn);
2497
2498 /* We need to split the insn here. Otherwise the calls to
2499 force_const_mem will not work for load_immed_address. */
2500 old = insn;
2501
2502 /* Don't split the insn if it has been deleted. */
2503 if (! INSN_DELETED_P (old))
2504 insn = try_split (PATTERN(old), old, 1);
2505
2506 /* When not optimizing, the old insn will be still left around
2507 with only the 'deleted' bit set. Transform it into a note
2508 to avoid confusion of subsequent processing. */
2509 if (INSN_DELETED_P (old))
2510 {
2511 PUT_CODE (old, NOTE);
2512 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2513 NOTE_SOURCE_FILE (old) = 0;
2514 }
2515 }
2516 }
2517 }
2518
2519
2520 static int
2521 c4x_a_register (op)
2522 rtx op;
2523 {
2524 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2525 }
2526
2527
2528 static int
2529 c4x_x_register (op)
2530 rtx op;
2531 {
2532 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2533 }
2534
2535
2536 static int
2537 c4x_immed_int_constant (op)
2538 rtx op;
2539 {
2540 if (GET_CODE (op) != CONST_INT)
2541 return 0;
2542
2543 return GET_MODE (op) == VOIDmode
2544 || GET_MODE_CLASS (op) == MODE_INT
2545 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2546 }
2547
2548
2549 static int
2550 c4x_immed_float_constant (op)
2551 rtx op;
2552 {
2553 if (GET_CODE (op) != CONST_DOUBLE)
2554 return 0;
2555
2556 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2557 present this only means that a MEM rtx has been generated. It does
2558 not mean the rtx is really in memory. */
2559
2560 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2561 }
2562
2563
2564 int
2565 c4x_shiftable_constant (op)
2566 rtx op;
2567 {
2568 int i;
2569 int mask;
2570 int val = INTVAL (op);
2571
2572 for (i = 0; i < 16; i++)
2573 {
2574 if (val & (1 << i))
2575 break;
2576 }
2577 mask = ((0xffff >> i) << 16) | 0xffff;
2578 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2579 : (val >> i) & mask))
2580 return i;
2581 return -1;
2582 }
2583
2584
2585 int
2586 c4x_H_constant (op)
2587 rtx op;
2588 {
2589 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2590 }
2591
2592
2593 int
2594 c4x_I_constant (op)
2595 rtx op;
2596 {
2597 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2598 }
2599
2600
2601 int
2602 c4x_J_constant (op)
2603 rtx op;
2604 {
2605 if (TARGET_C3X)
2606 return 0;
2607 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2608 }
2609
2610
2611 static int
2612 c4x_K_constant (op)
2613 rtx op;
2614 {
2615 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2616 return 0;
2617 return IS_INT5_CONST (INTVAL (op));
2618 }
2619
2620
2621 int
2622 c4x_L_constant (op)
2623 rtx op;
2624 {
2625 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2626 }
2627
2628
2629 static int
2630 c4x_N_constant (op)
2631 rtx op;
2632 {
2633 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2634 }
2635
2636
2637 static int
2638 c4x_O_constant (op)
2639 rtx op;
2640 {
2641 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2642 }
2643
2644
2645 /* The constraints do not have to check the register class,
2646 except when needed to discriminate between the constraints.
2647 The operand has been checked by the predicates to be valid. */
2648
2649 /* ARx + 9-bit signed const or IRn
2650 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2651 We don't include the pre/post inc/dec forms here since
2652 they are handled by the <> constraints. */
2653
2654 int
2655 c4x_Q_constraint (op)
2656 rtx op;
2657 {
2658 enum machine_mode mode = GET_MODE (op);
2659
2660 if (GET_CODE (op) != MEM)
2661 return 0;
2662 op = XEXP (op, 0);
2663 switch (GET_CODE (op))
2664 {
2665 case REG:
2666 return 1;
2667
2668 case PLUS:
2669 {
2670 rtx op0 = XEXP (op, 0);
2671 rtx op1 = XEXP (op, 1);
2672
2673 if (! REG_P (op0))
2674 return 0;
2675
2676 if (REG_P (op1))
2677 return 1;
2678
2679 if (GET_CODE (op1) != CONST_INT)
2680 return 0;
2681
2682 /* HImode and HFmode must be offsettable. */
2683 if (mode == HImode || mode == HFmode)
2684 return IS_DISP8_OFF_CONST (INTVAL (op1));
2685
2686 return IS_DISP8_CONST (INTVAL (op1));
2687 }
2688 break;
2689
2690 default:
2691 break;
2692 }
2693 return 0;
2694 }
2695
2696
2697 /* ARx + 5-bit unsigned const
2698 *ARx, *+ARx(n) for n < 32. */
2699
2700 int
2701 c4x_R_constraint (op)
2702 rtx op;
2703 {
2704 enum machine_mode mode = GET_MODE (op);
2705
2706 if (TARGET_C3X)
2707 return 0;
2708 if (GET_CODE (op) != MEM)
2709 return 0;
2710 op = XEXP (op, 0);
2711 switch (GET_CODE (op))
2712 {
2713 case REG:
2714 return 1;
2715
2716 case PLUS:
2717 {
2718 rtx op0 = XEXP (op, 0);
2719 rtx op1 = XEXP (op, 1);
2720
2721 if (! REG_P (op0))
2722 return 0;
2723
2724 if (GET_CODE (op1) != CONST_INT)
2725 return 0;
2726
2727 /* HImode and HFmode must be offsettable. */
2728 if (mode == HImode || mode == HFmode)
2729 return IS_UINT5_CONST (INTVAL (op1) + 1);
2730
2731 return IS_UINT5_CONST (INTVAL (op1));
2732 }
2733 break;
2734
2735 default:
2736 break;
2737 }
2738 return 0;
2739 }
2740
2741
2742 static int
2743 c4x_R_indirect (op)
2744 rtx op;
2745 {
2746 enum machine_mode mode = GET_MODE (op);
2747
2748 if (TARGET_C3X || GET_CODE (op) != MEM)
2749 return 0;
2750
2751 op = XEXP (op, 0);
2752 switch (GET_CODE (op))
2753 {
2754 case REG:
2755 return IS_ADDR_OR_PSEUDO_REG (op);
2756
2757 case PLUS:
2758 {
2759 rtx op0 = XEXP (op, 0);
2760 rtx op1 = XEXP (op, 1);
2761
2762 /* HImode and HFmode must be offsettable. */
2763 if (mode == HImode || mode == HFmode)
2764 return IS_ADDR_OR_PSEUDO_REG (op0)
2765 && GET_CODE (op1) == CONST_INT
2766 && IS_UINT5_CONST (INTVAL (op1) + 1);
2767
2768 return REG_P (op0)
2769 && IS_ADDR_OR_PSEUDO_REG (op0)
2770 && GET_CODE (op1) == CONST_INT
2771 && IS_UINT5_CONST (INTVAL (op1));
2772 }
2773 break;
2774
2775 default:
2776 break;
2777 }
2778 return 0;
2779 }
2780
2781
2782 /* ARx + 1-bit unsigned const or IRn
2783 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2784 We don't include the pre/post inc/dec forms here since
2785 they are handled by the <> constraints. */
2786
2787 int
2788 c4x_S_constraint (op)
2789 rtx op;
2790 {
2791 enum machine_mode mode = GET_MODE (op);
2792 if (GET_CODE (op) != MEM)
2793 return 0;
2794 op = XEXP (op, 0);
2795 switch (GET_CODE (op))
2796 {
2797 case REG:
2798 return 1;
2799
2800 case PRE_MODIFY:
2801 case POST_MODIFY:
2802 {
2803 rtx op0 = XEXP (op, 0);
2804 rtx op1 = XEXP (op, 1);
2805
2806 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2807 || (op0 != XEXP (op1, 0)))
2808 return 0;
2809
2810 op0 = XEXP (op1, 0);
2811 op1 = XEXP (op1, 1);
2812 return REG_P (op0) && REG_P (op1);
2813 /* Pre or post_modify with a displacement of 0 or 1
2814 should not be generated. */
2815 }
2816 break;
2817
2818 case PLUS:
2819 {
2820 rtx op0 = XEXP (op, 0);
2821 rtx op1 = XEXP (op, 1);
2822
2823 if (!REG_P (op0))
2824 return 0;
2825
2826 if (REG_P (op1))
2827 return 1;
2828
2829 if (GET_CODE (op1) != CONST_INT)
2830 return 0;
2831
2832 /* HImode and HFmode must be offsettable. */
2833 if (mode == HImode || mode == HFmode)
2834 return IS_DISP1_OFF_CONST (INTVAL (op1));
2835
2836 return IS_DISP1_CONST (INTVAL (op1));
2837 }
2838 break;
2839
2840 default:
2841 break;
2842 }
2843 return 0;
2844 }
2845
2846
2847 static int
2848 c4x_S_indirect (op)
2849 rtx op;
2850 {
2851 enum machine_mode mode = GET_MODE (op);
2852 if (GET_CODE (op) != MEM)
2853 return 0;
2854
2855 op = XEXP (op, 0);
2856 switch (GET_CODE (op))
2857 {
2858 case PRE_DEC:
2859 case POST_DEC:
2860 if (mode != QImode && mode != QFmode)
2861 return 0;
2862 case PRE_INC:
2863 case POST_INC:
2864 op = XEXP (op, 0);
2865
2866 case REG:
2867 return IS_ADDR_OR_PSEUDO_REG (op);
2868
2869 case PRE_MODIFY:
2870 case POST_MODIFY:
2871 {
2872 rtx op0 = XEXP (op, 0);
2873 rtx op1 = XEXP (op, 1);
2874
2875 if (mode != QImode && mode != QFmode)
2876 return 0;
2877
2878 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2879 || (op0 != XEXP (op1, 0)))
2880 return 0;
2881
2882 op0 = XEXP (op1, 0);
2883 op1 = XEXP (op1, 1);
2884 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2885 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2886 /* Pre or post_modify with a displacement of 0 or 1
2887 should not be generated. */
2888 }
2889
2890 case PLUS:
2891 {
2892 rtx op0 = XEXP (op, 0);
2893 rtx op1 = XEXP (op, 1);
2894
2895 if (REG_P (op0))
2896 {
2897 /* HImode and HFmode must be offsettable. */
2898 if (mode == HImode || mode == HFmode)
2899 return IS_ADDR_OR_PSEUDO_REG (op0)
2900 && GET_CODE (op1) == CONST_INT
2901 && IS_DISP1_OFF_CONST (INTVAL (op1));
2902
2903 if (REG_P (op1))
2904 return (IS_INDEX_OR_PSEUDO_REG (op1)
2905 && IS_ADDR_OR_PSEUDO_REG (op0))
2906 || (IS_ADDR_OR_PSEUDO_REG (op1)
2907 && IS_INDEX_OR_PSEUDO_REG (op0));
2908
2909 return IS_ADDR_OR_PSEUDO_REG (op0)
2910 && GET_CODE (op1) == CONST_INT
2911 && IS_DISP1_CONST (INTVAL (op1));
2912 }
2913 }
2914 break;
2915
2916 default:
2917 break;
2918 }
2919 return 0;
2920 }
2921
2922
2923 /* Direct memory operand. */
2924
2925 int
2926 c4x_T_constraint (op)
2927 rtx op;
2928 {
2929 if (GET_CODE (op) != MEM)
2930 return 0;
2931 op = XEXP (op, 0);
2932
2933 if (GET_CODE (op) != LO_SUM)
2934 {
2935 /* Allow call operands. */
2936 return GET_CODE (op) == SYMBOL_REF
2937 && GET_MODE (op) == Pmode
2938 && SYMBOL_REF_FLAG (op);
2939 }
2940
2941 /* HImode and HFmode are not offsettable. */
2942 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2943 return 0;
2944
2945 if ((GET_CODE (XEXP (op, 0)) == REG)
2946 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2947 return c4x_U_constraint (XEXP (op, 1));
2948
2949 return 0;
2950 }
2951
2952
2953 /* Symbolic operand. */
2954
2955 int
2956 c4x_U_constraint (op)
2957 rtx op;
2958 {
2959 /* Don't allow direct addressing to an arbitrary constant. */
2960 return GET_CODE (op) == CONST
2961 || GET_CODE (op) == SYMBOL_REF
2962 || GET_CODE (op) == LABEL_REF;
2963 }
2964
2965
2966 int
2967 c4x_autoinc_operand (op, mode)
2968 rtx op;
2969 enum machine_mode mode ATTRIBUTE_UNUSED;
2970 {
2971 if (GET_CODE (op) == MEM)
2972 {
2973 enum rtx_code code = GET_CODE (XEXP (op, 0));
2974
2975 if (code == PRE_INC
2976 || code == PRE_DEC
2977 || code == POST_INC
2978 || code == POST_DEC
2979 || code == PRE_MODIFY
2980 || code == POST_MODIFY
2981 )
2982 return 1;
2983 }
2984 return 0;
2985 }
2986
2987
2988 /* Match any operand. */
2989
2990 int
2991 any_operand (op, mode)
2992 register rtx op ATTRIBUTE_UNUSED;
2993 enum machine_mode mode ATTRIBUTE_UNUSED;
2994 {
2995 return 1;
2996 }
2997
2998
2999 /* Nonzero if OP is a floating point value with value 0.0. */
3000
3001 int
3002 fp_zero_operand (op, mode)
3003 rtx op;
3004 enum machine_mode mode ATTRIBUTE_UNUSED;
3005 {
3006 REAL_VALUE_TYPE r;
3007
3008 if (GET_CODE (op) != CONST_DOUBLE)
3009 return 0;
3010 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
3011 return REAL_VALUES_EQUAL (r, dconst0);
3012 }
3013
3014
3015 int
3016 const_operand (op, mode)
3017 register rtx op;
3018 register enum machine_mode mode;
3019 {
3020 switch (mode)
3021 {
3022 case QFmode:
3023 case HFmode:
3024 if (GET_CODE (op) != CONST_DOUBLE
3025 || GET_MODE (op) != mode
3026 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3027 return 0;
3028
3029 return c4x_immed_float_p (op);
3030
3031 #if Pmode != QImode
3032 case Pmode:
3033 #endif
3034 case QImode:
3035 if (GET_CODE (op) == CONSTANT_P_RTX)
3036 return 1;
3037
3038 if (GET_CODE (op) != CONST_INT
3039 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3040 || GET_MODE_CLASS (mode) != MODE_INT)
3041 return 0;
3042
3043 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3044
3045 case HImode:
3046 return 0;
3047
3048 default:
3049 return 0;
3050 }
3051 }
3052
3053
3054 int
3055 stik_const_operand (op, mode)
3056 rtx op;
3057 enum machine_mode mode ATTRIBUTE_UNUSED;
3058 {
3059 return c4x_K_constant (op);
3060 }
3061
3062
3063 int
3064 not_const_operand (op, mode)
3065 rtx op;
3066 enum machine_mode mode ATTRIBUTE_UNUSED;
3067 {
3068 return c4x_N_constant (op);
3069 }
3070
3071
3072 int
3073 reg_operand (op, mode)
3074 rtx op;
3075 enum machine_mode mode;
3076 {
3077 if (GET_CODE (op) == SUBREG
3078 && GET_MODE (op) == QFmode)
3079 return 0;
3080 return register_operand (op, mode);
3081 }
3082
3083
3084 int
3085 mixed_subreg_operand (op, mode)
3086 rtx op;
3087 enum machine_mode mode ATTRIBUTE_UNUSED;
3088 {
3089 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3090 int and a long double. */
3091 if (GET_CODE (op) == SUBREG
3092 && (GET_MODE (op) == QFmode)
3093 && (GET_MODE (SUBREG_REG (op)) == QImode
3094 || GET_MODE (SUBREG_REG (op)) == HImode))
3095 return 1;
3096 return 0;
3097 }
3098
3099
3100 int
3101 reg_imm_operand (op, mode)
3102 rtx op;
3103 enum machine_mode mode ATTRIBUTE_UNUSED;
3104 {
3105 if (REG_P (op) || CONSTANT_P (op))
3106 return 1;
3107 return 0;
3108 }
3109
3110
3111 int
3112 not_modify_reg (op, mode)
3113 rtx op;
3114 enum machine_mode mode ATTRIBUTE_UNUSED;
3115 {
3116 if (REG_P (op) || CONSTANT_P (op))
3117 return 1;
3118 if (GET_CODE (op) != MEM)
3119 return 0;
3120 op = XEXP (op, 0);
3121 switch (GET_CODE (op))
3122 {
3123 case REG:
3124 return 1;
3125
3126 case PLUS:
3127 {
3128 rtx op0 = XEXP (op, 0);
3129 rtx op1 = XEXP (op, 1);
3130
3131 if (! REG_P (op0))
3132 return 0;
3133
3134 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3135 return 1;
3136 }
3137
3138 case LO_SUM:
3139 {
3140 rtx op0 = XEXP (op, 0);
3141
3142 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3143 return 1;
3144 }
3145 break;
3146
3147 case CONST:
3148 case SYMBOL_REF:
3149 case LABEL_REF:
3150 return 1;
3151
3152 default:
3153 break;
3154 }
3155 return 0;
3156 }
3157
3158
3159 int
3160 not_rc_reg (op, mode)
3161 rtx op;
3162 enum machine_mode mode ATTRIBUTE_UNUSED;
3163 {
3164 if (REG_P (op) && REGNO (op) == RC_REGNO)
3165 return 0;
3166 return 1;
3167 }
3168
3169
3170 /* Extended precision register R0-R1. */
3171
3172 int
3173 r0r1_reg_operand (op, mode)
3174 rtx op;
3175 enum machine_mode mode;
3176 {
3177 if (! reg_operand (op, mode))
3178 return 0;
3179 if (GET_CODE (op) == SUBREG)
3180 op = SUBREG_REG (op);
3181 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3182 }
3183
3184
3185 /* Extended precision register R2-R3. */
3186
3187 int
3188 r2r3_reg_operand (op, mode)
3189 rtx op;
3190 enum machine_mode mode;
3191 {
3192 if (! reg_operand (op, mode))
3193 return 0;
3194 if (GET_CODE (op) == SUBREG)
3195 op = SUBREG_REG (op);
3196 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3197 }
3198
3199
3200 /* Low extended precision register R0-R7. */
3201
3202 int
3203 ext_low_reg_operand (op, mode)
3204 rtx op;
3205 enum machine_mode mode;
3206 {
3207 if (! reg_operand (op, mode))
3208 return 0;
3209 if (GET_CODE (op) == SUBREG)
3210 op = SUBREG_REG (op);
3211 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3212 }
3213
3214
3215 /* Extended precision register. */
3216
3217 int
3218 ext_reg_operand (op, mode)
3219 rtx op;
3220 enum machine_mode mode;
3221 {
3222 if (! reg_operand (op, mode))
3223 return 0;
3224 if (GET_CODE (op) == SUBREG)
3225 op = SUBREG_REG (op);
3226 if (! REG_P (op))
3227 return 0;
3228 return IS_EXT_OR_PSEUDO_REG (op);
3229 }
3230
3231
3232 /* Standard precision register. */
3233
3234 int
3235 std_reg_operand (op, mode)
3236 rtx op;
3237 enum machine_mode mode;
3238 {
3239 if (! reg_operand (op, mode))
3240 return 0;
3241 if (GET_CODE (op) == SUBREG)
3242 op = SUBREG_REG (op);
3243 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3244 }
3245
3246 /* Standard precision or normal register. */
3247
3248 int
3249 std_or_reg_operand (op, mode)
3250 rtx op;
3251 enum machine_mode mode;
3252 {
3253 if (reload_in_progress)
3254 return std_reg_operand (op, mode);
3255 return reg_operand (op, mode);
3256 }
3257
3258 /* Address register. */
3259
3260 int
3261 addr_reg_operand (op, mode)
3262 rtx op;
3263 enum machine_mode mode;
3264 {
3265 if (! reg_operand (op, mode))
3266 return 0;
3267 return c4x_a_register (op);
3268 }
3269
3270
3271 /* Index register. */
3272
3273 int
3274 index_reg_operand (op, mode)
3275 rtx op;
3276 enum machine_mode mode;
3277 {
3278 if (! reg_operand (op, mode))
3279 return 0;
3280 if (GET_CODE (op) == SUBREG)
3281 op = SUBREG_REG (op);
3282 return c4x_x_register (op);
3283 }
3284
3285
3286 /* DP register. */
3287
3288 int
3289 dp_reg_operand (op, mode)
3290 rtx op;
3291 enum machine_mode mode ATTRIBUTE_UNUSED;
3292 {
3293 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3294 }
3295
3296
3297 /* SP register. */
3298
3299 int
3300 sp_reg_operand (op, mode)
3301 rtx op;
3302 enum machine_mode mode ATTRIBUTE_UNUSED;
3303 {
3304 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3305 }
3306
3307
3308 /* ST register. */
3309
3310 int
3311 st_reg_operand (op, mode)
3312 register rtx op;
3313 enum machine_mode mode ATTRIBUTE_UNUSED;
3314 {
3315 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3316 }
3317
3318
3319 /* RC register. */
3320
3321 int
3322 rc_reg_operand (op, mode)
3323 register rtx op;
3324 enum machine_mode mode ATTRIBUTE_UNUSED;
3325 {
3326 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3327 }
3328
3329
3330 int
3331 call_address_operand (op, mode)
3332 rtx op;
3333 enum machine_mode mode ATTRIBUTE_UNUSED;
3334 {
3335 return (REG_P (op) || symbolic_address_operand (op, mode));
3336 }
3337
3338
3339 /* Symbolic address operand. */
3340
3341 int
3342 symbolic_address_operand (op, mode)
3343 register rtx op;
3344 enum machine_mode mode ATTRIBUTE_UNUSED;
3345 {
3346 switch (GET_CODE (op))
3347 {
3348 case CONST:
3349 case SYMBOL_REF:
3350 case LABEL_REF:
3351 return 1;
3352 default:
3353 return 0;
3354 }
3355 }
3356
3357
3358 /* Check dst operand of a move instruction. */
3359
3360 int
3361 dst_operand (op, mode)
3362 rtx op;
3363 enum machine_mode mode;
3364 {
3365 if (GET_CODE (op) == SUBREG
3366 && mixed_subreg_operand (op, mode))
3367 return 0;
3368
3369 if (REG_P (op))
3370 return reg_operand (op, mode);
3371
3372 return nonimmediate_operand (op, mode);
3373 }
3374
3375
3376 /* Check src operand of two operand arithmetic instructions. */
3377
3378 int
3379 src_operand (op, mode)
3380 rtx op;
3381 enum machine_mode mode;
3382 {
3383 if (GET_CODE (op) == SUBREG
3384 && mixed_subreg_operand (op, mode))
3385 return 0;
3386
3387 if (REG_P (op))
3388 return reg_operand (op, mode);
3389
3390 if (mode == VOIDmode)
3391 mode = GET_MODE (op);
3392
3393 if (GET_CODE (op) == CONST_INT)
3394 return (mode == QImode || mode == Pmode || mode == HImode)
3395 && c4x_I_constant (op);
3396
3397 /* We don't like CONST_DOUBLE integers. */
3398 if (GET_CODE (op) == CONST_DOUBLE)
3399 return c4x_H_constant (op);
3400
3401 /* Disallow symbolic addresses. Only the predicate
3402 symbolic_address_operand will match these. */
3403 if (GET_CODE (op) == SYMBOL_REF
3404 || GET_CODE (op) == LABEL_REF
3405 || GET_CODE (op) == CONST)
3406 return 0;
3407
3408 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3409 access to symbolic addresses. These operands will get forced
3410 into a register and the movqi expander will generate a
3411 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
3412 if (GET_CODE (op) == MEM
3413 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3414 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3415 || GET_CODE (XEXP (op, 0)) == CONST)))
3416 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3417
3418 return general_operand (op, mode);
3419 }
3420
3421
3422 int
3423 src_hi_operand (op, mode)
3424 rtx op;
3425 enum machine_mode mode;
3426 {
3427 if (c4x_O_constant (op))
3428 return 1;
3429 return src_operand (op, mode);
3430 }
3431
3432
3433 /* Check src operand of two operand logical instructions. */
3434
3435 int
3436 lsrc_operand (op, mode)
3437 rtx op;
3438 enum machine_mode mode;
3439 {
3440 if (mode == VOIDmode)
3441 mode = GET_MODE (op);
3442
3443 if (mode != QImode && mode != Pmode)
3444 fatal_insn ("mode not QImode", op);
3445
3446 if (GET_CODE (op) == CONST_INT)
3447 return c4x_L_constant (op) || c4x_J_constant (op);
3448
3449 return src_operand (op, mode);
3450 }
3451
3452
3453 /* Check src operand of two operand tricky instructions. */
3454
3455 int
3456 tsrc_operand (op, mode)
3457 rtx op;
3458 enum machine_mode mode;
3459 {
3460 if (mode == VOIDmode)
3461 mode = GET_MODE (op);
3462
3463 if (mode != QImode && mode != Pmode)
3464 fatal_insn ("mode not QImode", op);
3465
3466 if (GET_CODE (op) == CONST_INT)
3467 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3468
3469 return src_operand (op, mode);
3470 }
3471
3472
3473 /* Check src operand of two operand non immedidate instructions. */
3474
3475 int
3476 nonimmediate_src_operand (op, mode)
3477 rtx op;
3478 enum machine_mode mode;
3479 {
3480 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3481 return 0;
3482
3483 return src_operand (op, mode);
3484 }
3485
3486
3487 /* Check logical src operand of two operand non immedidate instructions. */
3488
3489 int
3490 nonimmediate_lsrc_operand (op, mode)
3491 rtx op;
3492 enum machine_mode mode;
3493 {
3494 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3495 return 0;
3496
3497 return lsrc_operand (op, mode);
3498 }
3499
3500
3501 int
3502 reg_or_const_operand (op, mode)
3503 rtx op;
3504 enum machine_mode mode;
3505 {
3506 return reg_operand (op, mode) || const_operand (op, mode);
3507 }
3508
3509
3510 /* Check for indirect operands allowable in parallel instruction. */
3511
3512 int
3513 par_ind_operand (op, mode)
3514 rtx op;
3515 enum machine_mode mode;
3516 {
3517 if (mode != VOIDmode && mode != GET_MODE (op))
3518 return 0;
3519
3520 return c4x_S_indirect (op);
3521 }
3522
3523
3524 /* Check for operands allowable in parallel instruction. */
3525
3526 int
3527 parallel_operand (op, mode)
3528 rtx op;
3529 enum machine_mode mode;
3530 {
3531 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3532 }
3533
3534
3535 static void
3536 c4x_S_address_parse (op, base, incdec, index, disp)
3537 rtx op;
3538 int *base;
3539 int *incdec;
3540 int *index;
3541 int *disp;
3542 {
3543 *base = 0;
3544 *incdec = 0;
3545 *index = 0;
3546 *disp = 0;
3547
3548 if (GET_CODE (op) != MEM)
3549 fatal_insn ("invalid indirect memory address", op);
3550
3551 op = XEXP (op, 0);
3552 switch (GET_CODE (op))
3553 {
3554 case PRE_DEC:
3555 *base = REGNO (XEXP (op, 0));
3556 *incdec = 1;
3557 *disp = -1;
3558 return;
3559
3560 case POST_DEC:
3561 *base = REGNO (XEXP (op, 0));
3562 *incdec = 1;
3563 *disp = 0;
3564 return;
3565
3566 case PRE_INC:
3567 *base = REGNO (XEXP (op, 0));
3568 *incdec = 1;
3569 *disp = 1;
3570 return;
3571
3572 case POST_INC:
3573 *base = REGNO (XEXP (op, 0));
3574 *incdec = 1;
3575 *disp = 0;
3576 return;
3577
3578 case POST_MODIFY:
3579 *base = REGNO (XEXP (op, 0));
3580 if (REG_P (XEXP (XEXP (op, 1), 1)))
3581 {
3582 *index = REGNO (XEXP (XEXP (op, 1), 1));
3583 *disp = 0; /* ??? */
3584 }
3585 else
3586 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3587 *incdec = 1;
3588 return;
3589
3590 case PRE_MODIFY:
3591 *base = REGNO (XEXP (op, 0));
3592 if (REG_P (XEXP (XEXP (op, 1), 1)))
3593 {
3594 *index = REGNO (XEXP (XEXP (op, 1), 1));
3595 *disp = 1; /* ??? */
3596 }
3597 else
3598 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3599 *incdec = 1;
3600
3601 return;
3602
3603 case REG:
3604 *base = REGNO (op);
3605 return;
3606
3607 case PLUS:
3608 {
3609 rtx op0 = XEXP (op, 0);
3610 rtx op1 = XEXP (op, 1);
3611
3612 if (c4x_a_register (op0))
3613 {
3614 if (c4x_x_register (op1))
3615 {
3616 *base = REGNO (op0);
3617 *index = REGNO (op1);
3618 return;
3619 }
3620 else if ((GET_CODE (op1) == CONST_INT
3621 && IS_DISP1_CONST (INTVAL (op1))))
3622 {
3623 *base = REGNO (op0);
3624 *disp = INTVAL (op1);
3625 return;
3626 }
3627 }
3628 else if (c4x_x_register (op0) && c4x_a_register (op1))
3629 {
3630 *base = REGNO (op1);
3631 *index = REGNO (op0);
3632 return;
3633 }
3634 }
3635 /* Fallthrough. */
3636
3637 default:
3638 fatal_insn ("invalid indirect (S) memory address", op);
3639 }
3640 }
3641
3642
3643 int
3644 c4x_address_conflict (op0, op1, store0, store1)
3645 rtx op0;
3646 rtx op1;
3647 int store0;
3648 int store1;
3649 {
3650 int base0;
3651 int base1;
3652 int incdec0;
3653 int incdec1;
3654 int index0;
3655 int index1;
3656 int disp0;
3657 int disp1;
3658
3659 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3660 return 1;
3661
3662 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3663 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3664
3665 if (store0 && store1)
3666 {
3667 /* If we have two stores in parallel to the same address, then
3668 the C4x only executes one of the stores. This is unlikely to
3669 cause problems except when writing to a hardware device such
3670 as a FIFO since the second write will be lost. The user
3671 should flag the hardware location as being volatile so that
3672 we don't do this optimisation. While it is unlikely that we
3673 have an aliased address if both locations are not marked
3674 volatile, it is probably safer to flag a potential conflict
3675 if either location is volatile. */
3676 if (! flag_argument_noalias)
3677 {
3678 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3679 return 1;
3680 }
3681 }
3682
3683 /* If have a parallel load and a store to the same address, the load
3684 is performed first, so there is no conflict. Similarly, there is
3685 no conflict if have parallel loads from the same address. */
3686
3687 /* Cannot use auto increment or auto decrement twice for same
3688 base register. */
3689 if (base0 == base1 && incdec0 && incdec0)
3690 return 1;
3691
3692 /* It might be too confusing for GCC if we have use a base register
3693 with a side effect and a memory reference using the same register
3694 in parallel. */
3695 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3696 return 1;
3697
3698 /* We can not optimize the case where op1 and op2 refer to the same
3699 address. */
3700 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3701 return 1;
3702
3703 /* No conflict. */
3704 return 0;
3705 }
3706
3707
3708 /* Check for while loop inside a decrement and branch loop. */
3709
3710 int
3711 c4x_label_conflict (insn, jump, db)
3712 rtx insn;
3713 rtx jump;
3714 rtx db;
3715 {
3716 while (insn)
3717 {
3718 if (GET_CODE (insn) == CODE_LABEL)
3719 {
3720 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3721 return 1;
3722 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3723 return 0;
3724 }
3725 insn = PREV_INSN (insn);
3726 }
3727 return 1;
3728 }
3729
3730
3731 /* Validate combination of operands for parallel load/store instructions. */
3732
3733 int
3734 valid_parallel_load_store (operands, mode)
3735 rtx *operands;
3736 enum machine_mode mode ATTRIBUTE_UNUSED;
3737 {
3738 rtx op0 = operands[0];
3739 rtx op1 = operands[1];
3740 rtx op2 = operands[2];
3741 rtx op3 = operands[3];
3742
3743 if (GET_CODE (op0) == SUBREG)
3744 op0 = SUBREG_REG (op0);
3745 if (GET_CODE (op1) == SUBREG)
3746 op1 = SUBREG_REG (op1);
3747 if (GET_CODE (op2) == SUBREG)
3748 op2 = SUBREG_REG (op2);
3749 if (GET_CODE (op3) == SUBREG)
3750 op3 = SUBREG_REG (op3);
3751
3752 /* The patterns should only allow ext_low_reg_operand() or
3753 par_ind_operand() operands. Thus of the 4 operands, only 2
3754 should be REGs and the other 2 should be MEMs. */
3755
3756 /* This test prevents the multipack pass from using this pattern if
3757 op0 is used as an index or base register in op2 or op3, since
3758 this combination will require reloading. */
3759 if (GET_CODE (op0) == REG
3760 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3761 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3762 return 0;
3763
3764 /* LDI||LDI. */
3765 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3766 return (REGNO (op0) != REGNO (op2))
3767 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3768 && ! c4x_address_conflict (op1, op3, 0, 0);
3769
3770 /* STI||STI. */
3771 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3772 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3773 && ! c4x_address_conflict (op0, op2, 1, 1);
3774
3775 /* LDI||STI. */
3776 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3777 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3778 && ! c4x_address_conflict (op1, op2, 0, 1);
3779
3780 /* STI||LDI. */
3781 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3782 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3783 && ! c4x_address_conflict (op0, op3, 1, 0);
3784
3785 return 0;
3786 }
3787
3788
3789 int
3790 valid_parallel_operands_4 (operands, mode)
3791 rtx *operands;
3792 enum machine_mode mode ATTRIBUTE_UNUSED;
3793 {
3794 rtx op0 = operands[0];
3795 rtx op2 = operands[2];
3796
3797 if (GET_CODE (op0) == SUBREG)
3798 op0 = SUBREG_REG (op0);
3799 if (GET_CODE (op2) == SUBREG)
3800 op2 = SUBREG_REG (op2);
3801
3802 /* This test prevents the multipack pass from using this pattern if
3803 op0 is used as an index or base register in op2, since this combination
3804 will require reloading. */
3805 if (GET_CODE (op0) == REG
3806 && GET_CODE (op2) == MEM
3807 && reg_mentioned_p (op0, XEXP (op2, 0)))
3808 return 0;
3809
3810 return 1;
3811 }
3812
3813
3814 int
3815 valid_parallel_operands_5 (operands, mode)
3816 rtx *operands;
3817 enum machine_mode mode ATTRIBUTE_UNUSED;
3818 {
3819 int regs = 0;
3820 rtx op0 = operands[0];
3821 rtx op1 = operands[1];
3822 rtx op2 = operands[2];
3823 rtx op3 = operands[3];
3824
3825 if (GET_CODE (op0) == SUBREG)
3826 op0 = SUBREG_REG (op0);
3827 if (GET_CODE (op1) == SUBREG)
3828 op1 = SUBREG_REG (op1);
3829 if (GET_CODE (op2) == SUBREG)
3830 op2 = SUBREG_REG (op2);
3831
3832 /* The patterns should only allow ext_low_reg_operand() or
3833 par_ind_operand() operands. Operands 1 and 2 may be commutative
3834 but only one of them can be a register. */
3835 if (GET_CODE (op1) == REG)
3836 regs++;
3837 if (GET_CODE (op2) == REG)
3838 regs++;
3839
3840 if (regs != 1)
3841 return 0;
3842
3843 /* This test prevents the multipack pass from using this pattern if
3844 op0 is used as an index or base register in op3, since this combination
3845 will require reloading. */
3846 if (GET_CODE (op0) == REG
3847 && GET_CODE (op3) == MEM
3848 && reg_mentioned_p (op0, XEXP (op3, 0)))
3849 return 0;
3850
3851 return 1;
3852 }
3853
3854
3855 int
3856 valid_parallel_operands_6 (operands, mode)
3857 rtx *operands;
3858 enum machine_mode mode ATTRIBUTE_UNUSED;
3859 {
3860 int regs = 0;
3861 rtx op0 = operands[0];
3862 rtx op1 = operands[1];
3863 rtx op2 = operands[2];
3864 rtx op4 = operands[4];
3865 rtx op5 = operands[5];
3866
3867 if (GET_CODE (op1) == SUBREG)
3868 op1 = SUBREG_REG (op1);
3869 if (GET_CODE (op2) == SUBREG)
3870 op2 = SUBREG_REG (op2);
3871 if (GET_CODE (op4) == SUBREG)
3872 op4 = SUBREG_REG (op4);
3873 if (GET_CODE (op5) == SUBREG)
3874 op5 = SUBREG_REG (op5);
3875
3876 /* The patterns should only allow ext_low_reg_operand() or
3877 par_ind_operand() operands. Thus of the 4 input operands, only 2
3878 should be REGs and the other 2 should be MEMs. */
3879
3880 if (GET_CODE (op1) == REG)
3881 regs++;
3882 if (GET_CODE (op2) == REG)
3883 regs++;
3884 if (GET_CODE (op4) == REG)
3885 regs++;
3886 if (GET_CODE (op5) == REG)
3887 regs++;
3888
3889 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3890 Perhaps we should count the MEMs as well? */
3891 if (regs != 2)
3892 return 0;
3893
3894 /* This test prevents the multipack pass from using this pattern if
3895 op0 is used as an index or base register in op4 or op5, since
3896 this combination will require reloading. */
3897 if (GET_CODE (op0) == REG
3898 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3899 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3900 return 0;
3901
3902 return 1;
3903 }
3904
3905
3906 /* Validate combination of src operands. Note that the operands have
3907 been screened by the src_operand predicate. We just have to check
3908 that the combination of operands is valid. If FORCE is set, ensure
3909 that the destination regno is valid if we have a 2 operand insn. */
3910
3911 static int
3912 c4x_valid_operands (code, operands, mode, force)
3913 enum rtx_code code;
3914 rtx *operands;
3915 enum machine_mode mode ATTRIBUTE_UNUSED;
3916 int force;
3917 {
3918 rtx op1;
3919 rtx op2;
3920 enum rtx_code code1;
3921 enum rtx_code code2;
3922
3923 if (code == COMPARE)
3924 {
3925 op1 = operands[0];
3926 op2 = operands[1];
3927 }
3928 else
3929 {
3930 op1 = operands[1];
3931 op2 = operands[2];
3932 }
3933
3934 if (GET_CODE (op1) == SUBREG)
3935 op1 = SUBREG_REG (op1);
3936 if (GET_CODE (op2) == SUBREG)
3937 op2 = SUBREG_REG (op2);
3938
3939 code1 = GET_CODE (op1);
3940 code2 = GET_CODE (op2);
3941
3942 if (code1 == REG && code2 == REG)
3943 return 1;
3944
3945 if (code1 == MEM && code2 == MEM)
3946 {
3947 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3948 return 1;
3949 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3950 }
3951
3952 if (code1 == code2)
3953 return 0;
3954
3955 if (code1 == REG)
3956 {
3957 switch (code2)
3958 {
3959 case CONST_INT:
3960 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3961 return 1;
3962 break;
3963
3964 case CONST_DOUBLE:
3965 if (! c4x_H_constant (op2))
3966 return 0;
3967 break;
3968
3969 /* Any valid memory operand screened by src_operand is OK. */
3970 case MEM:
3971
3972 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3973 into a stack slot memory address comprising a PLUS and a
3974 constant. */
3975 case ADDRESSOF:
3976 break;
3977
3978 default:
3979 fatal_insn ("c4x_valid_operands: Internal error", op2);
3980 break;
3981 }
3982
3983 /* Check that we have a valid destination register for a two operand
3984 instruction. */
3985 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3986 }
3987
3988 /* We assume MINUS is commutative since the subtract patterns
3989 also support the reverse subtract instructions. Since op1
3990 is not a register, and op2 is a register, op1 can only
3991 be a restricted memory operand for a shift instruction. */
3992 if (code == ASHIFTRT || code == LSHIFTRT
3993 || code == ASHIFT || code == COMPARE)
3994 return code2 == REG
3995 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3996
3997 switch (code1)
3998 {
3999 case CONST_INT:
4000 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
4001 return 1;
4002 break;
4003
4004 case CONST_DOUBLE:
4005 if (! c4x_H_constant (op1))
4006 return 0;
4007 break;
4008
4009 /* Any valid memory operand screened by src_operand is OK. */
4010 case MEM:
4011 #if 0
4012 if (code2 != REG)
4013 return 0;
4014 #endif
4015 break;
4016
4017 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
4018 into a stack slot memory address comprising a PLUS and a
4019 constant. */
4020 case ADDRESSOF:
4021 break;
4022
4023 default:
4024 abort ();
4025 break;
4026 }
4027
4028 /* Check that we have a valid destination register for a two operand
4029 instruction. */
4030 return ! force || REGNO (op1) == REGNO (operands[0]);
4031 }
4032
4033
4034 int valid_operands (code, operands, mode)
4035 enum rtx_code code;
4036 rtx *operands;
4037 enum machine_mode mode;
4038 {
4039
4040 /* If we are not optimizing then we have to let anything go and let
4041 reload fix things up. instantiate_decl in function.c can produce
4042 invalid insns by changing the offset of a memory operand from a
4043 valid one into an invalid one, when the second operand is also a
4044 memory operand. The alternative is not to allow two memory
4045 operands for an insn when not optimizing. The problem only rarely
4046 occurs, for example with the C-torture program DFcmp.c. */
4047
4048 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4049 }
4050
4051
4052 int
4053 legitimize_operands (code, operands, mode)
4054 enum rtx_code code;
4055 rtx *operands;
4056 enum machine_mode mode;
4057 {
4058 /* Compare only has 2 operands. */
4059 if (code == COMPARE)
4060 {
4061 /* During RTL generation, force constants into pseudos so that
4062 they can get hoisted out of loops. This will tie up an extra
4063 register but can save an extra cycle. Only do this if loop
4064 optimisation enabled. (We cannot pull this trick for add and
4065 sub instructions since the flow pass won't find
4066 autoincrements etc.) This allows us to generate compare
4067 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4068 of LDI *AR0++, R0; CMPI 42, R0.
4069
4070 Note that expand_binops will try to load an expensive constant
4071 into a register if it is used within a loop. Unfortunately,
4072 the cost mechanism doesn't allow us to look at the other
4073 operand to decide whether the constant is expensive. */
4074
4075 if (! reload_in_progress
4076 && TARGET_HOIST
4077 && optimize > 0
4078 && GET_CODE (operands[1]) == CONST_INT
4079 && preserve_subexpressions_p ()
4080 && rtx_cost (operands[1], code) > 1)
4081 operands[1] = force_reg (mode, operands[1]);
4082
4083 if (! reload_in_progress
4084 && ! c4x_valid_operands (code, operands, mode, 0))
4085 operands[0] = force_reg (mode, operands[0]);
4086 return 1;
4087 }
4088
4089 /* We cannot do this for ADDI/SUBI insns since we will
4090 defeat the flow pass from finding autoincrement addressing
4091 opportunities. */
4092 if (! reload_in_progress
4093 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4094 && TARGET_HOIST
4095 && optimize > 1
4096 && GET_CODE (operands[2]) == CONST_INT
4097 && preserve_subexpressions_p ()
4098 && rtx_cost (operands[2], code) > 1)
4099 operands[2] = force_reg (mode, operands[2]);
4100
4101 /* We can get better code on a C30 if we force constant shift counts
4102 into a register. This way they can get hoisted out of loops,
4103 tying up a register, but saving an instruction. The downside is
4104 that they may get allocated to an address or index register, and
4105 thus we will get a pipeline conflict if there is a nearby
4106 indirect address using an address register.
4107
4108 Note that expand_binops will not try to load an expensive constant
4109 into a register if it is used within a loop for a shift insn. */
4110
4111 if (! reload_in_progress
4112 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4113 {
4114 /* If the operand combination is invalid, we force operand1 into a
4115 register, preventing reload from having doing to do this at a
4116 later stage. */
4117 operands[1] = force_reg (mode, operands[1]);
4118 if (TARGET_FORCE)
4119 {
4120 emit_move_insn (operands[0], operands[1]);
4121 operands[1] = copy_rtx (operands[0]);
4122 }
4123 else
4124 {
4125 /* Just in case... */
4126 if (! c4x_valid_operands (code, operands, mode, 0))
4127 operands[2] = force_reg (mode, operands[2]);
4128 }
4129 }
4130
4131 /* Right shifts require a negative shift count, but GCC expects
4132 a positive count, so we emit a NEG. */
4133 if ((code == ASHIFTRT || code == LSHIFTRT)
4134 && (GET_CODE (operands[2]) != CONST_INT))
4135 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4136
4137 return 1;
4138 }
4139
4140
4141 /* The following predicates are used for instruction scheduling. */
4142
4143 int
4144 group1_reg_operand (op, mode)
4145 rtx op;
4146 enum machine_mode mode;
4147 {
4148 if (mode != VOIDmode && mode != GET_MODE (op))
4149 return 0;
4150 if (GET_CODE (op) == SUBREG)
4151 op = SUBREG_REG (op);
4152 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4153 }
4154
4155
4156 int
4157 group1_mem_operand (op, mode)
4158 rtx op;
4159 enum machine_mode mode;
4160 {
4161 if (mode != VOIDmode && mode != GET_MODE (op))
4162 return 0;
4163
4164 if (GET_CODE (op) == MEM)
4165 {
4166 op = XEXP (op, 0);
4167 if (GET_CODE (op) == PLUS)
4168 {
4169 rtx op0 = XEXP (op, 0);
4170 rtx op1 = XEXP (op, 1);
4171
4172 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4173 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4174 return 1;
4175 }
4176 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4177 return 1;
4178 }
4179
4180 return 0;
4181 }
4182
4183
4184 /* Return true if any one of the address registers. */
4185
4186 int
4187 arx_reg_operand (op, mode)
4188 rtx op;
4189 enum machine_mode mode;
4190 {
4191 if (mode != VOIDmode && mode != GET_MODE (op))
4192 return 0;
4193 if (GET_CODE (op) == SUBREG)
4194 op = SUBREG_REG (op);
4195 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4196 }
4197
4198
4199 static int
4200 c4x_arn_reg_operand (op, mode, regno)
4201 rtx op;
4202 enum machine_mode mode;
4203 unsigned int regno;
4204 {
4205 if (mode != VOIDmode && mode != GET_MODE (op))
4206 return 0;
4207 if (GET_CODE (op) == SUBREG)
4208 op = SUBREG_REG (op);
4209 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4210 }
4211
4212
4213 static int
4214 c4x_arn_mem_operand (op, mode, regno)
4215 rtx op;
4216 enum machine_mode mode;
4217 unsigned int regno;
4218 {
4219 if (mode != VOIDmode && mode != GET_MODE (op))
4220 return 0;
4221
4222 if (GET_CODE (op) == MEM)
4223 {
4224 op = XEXP (op, 0);
4225 switch (GET_CODE (op))
4226 {
4227 case PRE_DEC:
4228 case POST_DEC:
4229 case PRE_INC:
4230 case POST_INC:
4231 op = XEXP (op, 0);
4232
4233 case REG:
4234 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4235
4236 case PRE_MODIFY:
4237 case POST_MODIFY:
4238 if (REG_P (XEXP (op, 0)) && (! reload_completed
4239 || (REGNO (XEXP (op, 0)) == regno)))
4240 return 1;
4241 if (REG_P (XEXP (XEXP (op, 1), 1))
4242 && (! reload_completed
4243 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4244 return 1;
4245 break;
4246
4247 case PLUS:
4248 {
4249 rtx op0 = XEXP (op, 0);
4250 rtx op1 = XEXP (op, 1);
4251
4252 if ((REG_P (op0) && (! reload_completed
4253 || (REGNO (op0) == regno)))
4254 || (REG_P (op1) && (! reload_completed
4255 || (REGNO (op1) == regno))))
4256 return 1;
4257 }
4258 break;
4259
4260 default:
4261 break;
4262 }
4263 }
4264 return 0;
4265 }
4266
4267
4268 int
4269 ar0_reg_operand (op, mode)
4270 rtx op;
4271 enum machine_mode mode;
4272 {
4273 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4274 }
4275
4276
4277 int
4278 ar0_mem_operand (op, mode)
4279 rtx op;
4280 enum machine_mode mode;
4281 {
4282 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4283 }
4284
4285
4286 int
4287 ar1_reg_operand (op, mode)
4288 rtx op;
4289 enum machine_mode mode;
4290 {
4291 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4292 }
4293
4294
4295 int
4296 ar1_mem_operand (op, mode)
4297 rtx op;
4298 enum machine_mode mode;
4299 {
4300 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4301 }
4302
4303
4304 int
4305 ar2_reg_operand (op, mode)
4306 rtx op;
4307 enum machine_mode mode;
4308 {
4309 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4310 }
4311
4312
4313 int
4314 ar2_mem_operand (op, mode)
4315 rtx op;
4316 enum machine_mode mode;
4317 {
4318 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4319 }
4320
4321
4322 int
4323 ar3_reg_operand (op, mode)
4324 rtx op;
4325 enum machine_mode mode;
4326 {
4327 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4328 }
4329
4330
4331 int
4332 ar3_mem_operand (op, mode)
4333 rtx op;
4334 enum machine_mode mode;
4335 {
4336 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4337 }
4338
4339
4340 int
4341 ar4_reg_operand (op, mode)
4342 rtx op;
4343 enum machine_mode mode;
4344 {
4345 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4346 }
4347
4348
4349 int
4350 ar4_mem_operand (op, mode)
4351 rtx op;
4352 enum machine_mode mode;
4353 {
4354 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4355 }
4356
4357
4358 int
4359 ar5_reg_operand (op, mode)
4360 rtx op;
4361 enum machine_mode mode;
4362 {
4363 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4364 }
4365
4366
4367 int
4368 ar5_mem_operand (op, mode)
4369 rtx op;
4370 enum machine_mode mode;
4371 {
4372 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4373 }
4374
4375
4376 int
4377 ar6_reg_operand (op, mode)
4378 rtx op;
4379 enum machine_mode mode;
4380 {
4381 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4382 }
4383
4384
4385 int
4386 ar6_mem_operand (op, mode)
4387 rtx op;
4388 enum machine_mode mode;
4389 {
4390 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4391 }
4392
4393
4394 int
4395 ar7_reg_operand (op, mode)
4396 rtx op;
4397 enum machine_mode mode;
4398 {
4399 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4400 }
4401
4402
4403 int
4404 ar7_mem_operand (op, mode)
4405 rtx op;
4406 enum machine_mode mode;
4407 {
4408 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4409 }
4410
4411
4412 int
4413 ir0_reg_operand (op, mode)
4414 rtx op;
4415 enum machine_mode mode;
4416 {
4417 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4418 }
4419
4420
4421 int
4422 ir0_mem_operand (op, mode)
4423 rtx op;
4424 enum machine_mode mode;
4425 {
4426 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4427 }
4428
4429
4430 int
4431 ir1_reg_operand (op, mode)
4432 rtx op;
4433 enum machine_mode mode;
4434 {
4435 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4436 }
4437
4438
4439 int
4440 ir1_mem_operand (op, mode)
4441 rtx op;
4442 enum machine_mode mode;
4443 {
4444 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4445 }
4446
4447
4448 /* This is similar to operand_subword but allows autoincrement
4449 addressing. */
4450
4451 rtx
4452 c4x_operand_subword (op, i, validate_address, mode)
4453 rtx op;
4454 int i;
4455 int validate_address;
4456 enum machine_mode mode;
4457 {
4458 if (mode != HImode && mode != HFmode)
4459 fatal_insn ("c4x_operand_subword: invalid mode", op);
4460
4461 if (mode == HFmode && REG_P (op))
4462 fatal_insn ("c4x_operand_subword: invalid operand", op);
4463
4464 if (GET_CODE (op) == MEM)
4465 {
4466 enum rtx_code code = GET_CODE (XEXP (op, 0));
4467 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4468 enum machine_mode submode;
4469
4470 submode = mode;
4471 if (mode == HImode)
4472 submode = QImode;
4473 else if (mode == HFmode)
4474 submode = QFmode;
4475
4476 switch (code)
4477 {
4478 case POST_INC:
4479 case PRE_INC:
4480 return gen_rtx_MEM (submode, XEXP (op, 0));
4481
4482 case POST_DEC:
4483 case PRE_DEC:
4484 case PRE_MODIFY:
4485 case POST_MODIFY:
4486 /* We could handle these with some difficulty.
4487 e.g., *p-- => *(p-=2); *(p+1). */
4488 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4489
4490 case SYMBOL_REF:
4491 case LABEL_REF:
4492 case CONST:
4493 case CONST_INT:
4494 fatal_insn ("c4x_operand_subword: invalid address", op);
4495
4496 /* Even though offsettable_address_p considers (MEM
4497 (LO_SUM)) to be offsettable, it is not safe if the
4498 address is at the end of the data page since we also have
4499 to fix up the associated high PART. In this case where
4500 we are trying to split a HImode or HFmode memory
4501 reference, we would have to emit another insn to reload a
4502 new HIGH value. It's easier to disable LO_SUM memory references
4503 in HImode or HFmode and we probably get better code. */
4504 case LO_SUM:
4505 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4506
4507 default:
4508 break;
4509 }
4510 }
4511
4512 return operand_subword (op, i, validate_address, mode);
4513 }
4514
4515 struct name_list
4516 {
4517 struct name_list *next;
4518 const char *name;
4519 };
4520
4521 static struct name_list *global_head;
4522 static struct name_list *extern_head;
4523
4524
4525 /* Add NAME to list of global symbols and remove from external list if
4526 present on external list. */
4527
4528 void
4529 c4x_global_label (name)
4530 const char *name;
4531 {
4532 struct name_list *p, *last;
4533
4534 /* Do not insert duplicate names, so linearly search through list of
4535 existing names. */
4536 p = global_head;
4537 while (p)
4538 {
4539 if (strcmp (p->name, name) == 0)
4540 return;
4541 p = p->next;
4542 }
4543 p = (struct name_list *) permalloc (sizeof *p);
4544 p->next = global_head;
4545 p->name = name;
4546 global_head = p;
4547
4548 /* Remove this name from ref list if present. */
4549 last = NULL;
4550 p = extern_head;
4551 while (p)
4552 {
4553 if (strcmp (p->name, name) == 0)
4554 {
4555 if (last)
4556 last->next = p->next;
4557 else
4558 extern_head = p->next;
4559 break;
4560 }
4561 last = p;
4562 p = p->next;
4563 }
4564 }
4565
4566
4567 /* Add NAME to list of external symbols. */
4568
4569 void
4570 c4x_external_ref (name)
4571 const char *name;
4572 {
4573 struct name_list *p;
4574
4575 /* Do not insert duplicate names. */
4576 p = extern_head;
4577 while (p)
4578 {
4579 if (strcmp (p->name, name) == 0)
4580 return;
4581 p = p->next;
4582 }
4583
4584 /* Do not insert ref if global found. */
4585 p = global_head;
4586 while (p)
4587 {
4588 if (strcmp (p->name, name) == 0)
4589 return;
4590 p = p->next;
4591 }
4592 p = (struct name_list *) permalloc (sizeof *p);
4593 p->next = extern_head;
4594 p->name = name;
4595 extern_head = p;
4596 }
4597
4598
4599 void
4600 c4x_file_end (fp)
4601 FILE *fp;
4602 {
4603 struct name_list *p;
4604
4605 /* Output all external names that are not global. */
4606 p = extern_head;
4607 while (p)
4608 {
4609 fprintf (fp, "\t.ref\t");
4610 assemble_name (fp, p->name);
4611 fprintf (fp, "\n");
4612 p = p->next;
4613 }
4614 fprintf (fp, "\t.end\n");
4615 }
4616
4617
4618 static void
4619 c4x_check_attribute (attrib, list, decl, attributes)
4620 const char *attrib;
4621 tree list, decl, *attributes;
4622 {
4623 while (list != NULL_TREE
4624 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4625 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4626 list = TREE_CHAIN (list);
4627 if (list)
4628 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4629 *attributes);
4630 }
4631
4632
4633 static void
4634 c4x_insert_attributes (decl, attributes)
4635 tree decl, *attributes;
4636 {
4637 switch (TREE_CODE (decl))
4638 {
4639 case FUNCTION_DECL:
4640 c4x_check_attribute ("section", code_tree, decl, attributes);
4641 c4x_check_attribute ("const", pure_tree, decl, attributes);
4642 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4643 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4644 break;
4645
4646 case VAR_DECL:
4647 c4x_check_attribute ("section", data_tree, decl, attributes);
4648 break;
4649
4650 default:
4651 break;
4652 }
4653 }
4654
4655 /* Table of valid machine attributes. */
4656 const struct attribute_spec c4x_attribute_table[] =
4657 {
4658 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4659 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4660 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4661 "interrupt"; should it be accepted here? */
4662 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4663 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4664 { NULL, 0, 0, false, false, false, NULL }
4665 };
4666
4667 /* Handle an attribute requiring a FUNCTION_TYPE;
4668 arguments as in struct attribute_spec.handler. */
4669 static tree
4670 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4671 tree *node;
4672 tree name;
4673 tree args ATTRIBUTE_UNUSED;
4674 int flags ATTRIBUTE_UNUSED;
4675 bool *no_add_attrs;
4676 {
4677 if (TREE_CODE (*node) != FUNCTION_TYPE)
4678 {
4679 warning ("`%s' attribute only applies to functions",
4680 IDENTIFIER_POINTER (name));
4681 *no_add_attrs = true;
4682 }
4683
4684 return NULL_TREE;
4685 }
4686
4687
4688 /* !!! FIXME to emit RPTS correctly. */
4689
4690 int
4691 c4x_rptb_rpts_p (insn, op)
4692 rtx insn, op;
4693 {
4694 /* The next insn should be our label marking where the
4695 repeat block starts. */
4696 insn = NEXT_INSN (insn);
4697 if (GET_CODE (insn) != CODE_LABEL)
4698 {
4699 /* Some insns may have been shifted between the RPTB insn
4700 and the top label... They were probably destined to
4701 be moved out of the loop. For now, let's leave them
4702 where they are and print a warning. We should
4703 probably move these insns before the repeat block insn. */
4704 if (TARGET_DEBUG)
4705 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4706 insn);
4707 return 0;
4708 }
4709
4710 /* Skip any notes. */
4711 insn = next_nonnote_insn (insn);
4712
4713 /* This should be our first insn in the loop. */
4714 if (! INSN_P (insn))
4715 return 0;
4716
4717 /* Skip any notes. */
4718 insn = next_nonnote_insn (insn);
4719
4720 if (! INSN_P (insn))
4721 return 0;
4722
4723 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4724 return 0;
4725
4726 if (TARGET_RPTS)
4727 return 1;
4728
4729 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4730 }
4731
4732
4733 /* Check if register r11 is used as the destination of an insn. */
4734
4735 static int
4736 c4x_r11_set_p(x)
4737 rtx x;
4738 {
4739 rtx set;
4740 int i, j;
4741 const char *fmt;
4742
4743 if (x == 0)
4744 return 0;
4745
4746 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4747 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4748
4749 if (INSN_P (x) && (set = single_set (x)))
4750 x = SET_DEST (set);
4751
4752 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4753 return 1;
4754
4755 fmt = GET_RTX_FORMAT (GET_CODE (x));
4756 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4757 {
4758 if (fmt[i] == 'e')
4759 {
4760 if (c4x_r11_set_p (XEXP (x, i)))
4761 return 1;
4762 }
4763 else if (fmt[i] == 'E')
4764 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4765 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4766 return 1;
4767 }
4768 return 0;
4769 }
4770
4771
4772 /* The c4x sometimes has a problem when the insn before the laj insn
4773 sets the r11 register. Check for this situation. */
4774
4775 int
4776 c4x_check_laj_p (insn)
4777 rtx insn;
4778 {
4779 insn = prev_nonnote_insn (insn);
4780
4781 /* If this is the start of the function no nop is needed. */
4782 if (insn == 0)
4783 return 0;
4784
4785 /* If the previous insn is a code label we have to insert a nop. This
4786 could be a jump or table jump. We can find the normal jumps by
4787 scanning the function but this will not find table jumps. */
4788 if (GET_CODE (insn) == CODE_LABEL)
4789 return 1;
4790
4791 /* If the previous insn sets register r11 we have to insert a nop. */
4792 if (c4x_r11_set_p (insn))
4793 return 1;
4794
4795 /* No nop needed. */
4796 return 0;
4797 }
4798
4799
4800 /* Adjust the cost of a scheduling dependency. Return the new cost of
4801 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4802 A set of an address register followed by a use occurs a 2 cycle
4803 stall (reduced to a single cycle on the c40 using LDA), while
4804 a read of an address register followed by a use occurs a single cycle. */
4805
4806 #define SET_USE_COST 3
4807 #define SETLDA_USE_COST 2
4808 #define READ_USE_COST 2
4809
4810 static int
4811 c4x_adjust_cost (insn, link, dep_insn, cost)
4812 rtx insn;
4813 rtx link;
4814 rtx dep_insn;
4815 int cost;
4816 {
4817 /* Don't worry about this until we know what registers have been
4818 assigned. */
4819 if (flag_schedule_insns == 0 && ! reload_completed)
4820 return 0;
4821
4822 /* How do we handle dependencies where a read followed by another
4823 read causes a pipeline stall? For example, a read of ar0 followed
4824 by the use of ar0 for a memory reference. It looks like we
4825 need to extend the scheduler to handle this case. */
4826
4827 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4828 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4829 so only deal with insns we know about. */
4830 if (recog_memoized (dep_insn) < 0)
4831 return 0;
4832
4833 if (REG_NOTE_KIND (link) == 0)
4834 {
4835 int max = 0;
4836
4837 /* Data dependency; DEP_INSN writes a register that INSN reads some
4838 cycles later. */
4839 if (TARGET_C3X)
4840 {
4841 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4842 max = SET_USE_COST > max ? SET_USE_COST : max;
4843 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4844 max = READ_USE_COST > max ? READ_USE_COST : max;
4845 }
4846 else
4847 {
4848 /* This could be significantly optimized. We should look
4849 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4850 insn uses ar0-ar7. We then test if the same register
4851 is used. The tricky bit is that some operands will
4852 use several registers... */
4853 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4854 max = SET_USE_COST > max ? SET_USE_COST : max;
4855 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4856 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4857 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4858 max = READ_USE_COST > max ? READ_USE_COST : max;
4859
4860 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4861 max = SET_USE_COST > max ? SET_USE_COST : max;
4862 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4863 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4864 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4865 max = READ_USE_COST > max ? READ_USE_COST : max;
4866
4867 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4868 max = SET_USE_COST > max ? SET_USE_COST : max;
4869 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4870 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4871 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4872 max = READ_USE_COST > max ? READ_USE_COST : max;
4873
4874 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4875 max = SET_USE_COST > max ? SET_USE_COST : max;
4876 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4877 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4878 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4879 max = READ_USE_COST > max ? READ_USE_COST : max;
4880
4881 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4882 max = SET_USE_COST > max ? SET_USE_COST : max;
4883 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4884 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4885 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4886 max = READ_USE_COST > max ? READ_USE_COST : max;
4887
4888 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4889 max = SET_USE_COST > max ? SET_USE_COST : max;
4890 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4891 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4892 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4893 max = READ_USE_COST > max ? READ_USE_COST : max;
4894
4895 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4896 max = SET_USE_COST > max ? SET_USE_COST : max;
4897 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4898 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4899 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4900 max = READ_USE_COST > max ? READ_USE_COST : max;
4901
4902 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4903 max = SET_USE_COST > max ? SET_USE_COST : max;
4904 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4905 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4906 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4907 max = READ_USE_COST > max ? READ_USE_COST : max;
4908
4909 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4910 max = SET_USE_COST > max ? SET_USE_COST : max;
4911 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4912 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4913
4914 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4915 max = SET_USE_COST > max ? SET_USE_COST : max;
4916 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4917 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4918 }
4919
4920 if (max)
4921 cost = max;
4922
4923 /* For other data dependencies, the default cost specified in the
4924 md is correct. */
4925 return cost;
4926 }
4927 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4928 {
4929 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4930 cycles later. */
4931
4932 /* For c4x anti dependencies, the cost is 0. */
4933 return 0;
4934 }
4935 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4936 {
4937 /* Output dependency; DEP_INSN writes a register that INSN writes some
4938 cycles later. */
4939
4940 /* For c4x output dependencies, the cost is 0. */
4941 return 0;
4942 }
4943 else
4944 abort ();
4945 }
4946
4947 void
4948 c4x_init_builtins ()
4949 {
4950 tree endlink = void_list_node;
4951
4952 builtin_function ("fast_ftoi",
4953 build_function_type
4954 (integer_type_node,
4955 tree_cons (NULL_TREE, double_type_node, endlink)),
4956 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL);
4957 builtin_function ("ansi_ftoi",
4958 build_function_type
4959 (integer_type_node,
4960 tree_cons (NULL_TREE, double_type_node, endlink)),
4961 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL);
4962 if (TARGET_C3X)
4963 builtin_function ("fast_imult",
4964 build_function_type
4965 (integer_type_node,
4966 tree_cons (NULL_TREE, integer_type_node,
4967 tree_cons (NULL_TREE,
4968 integer_type_node, endlink))),
4969 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL);
4970 else
4971 {
4972 builtin_function ("toieee",
4973 build_function_type
4974 (double_type_node,
4975 tree_cons (NULL_TREE, double_type_node, endlink)),
4976 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL);
4977 builtin_function ("frieee",
4978 build_function_type
4979 (double_type_node,
4980 tree_cons (NULL_TREE, double_type_node, endlink)),
4981 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL);
4982 builtin_function ("fast_invf",
4983 build_function_type
4984 (double_type_node,
4985 tree_cons (NULL_TREE, double_type_node, endlink)),
4986 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL);
4987 }
4988 }
4989
4990
4991 rtx
4992 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4993 tree exp;
4994 rtx target;
4995 rtx subtarget ATTRIBUTE_UNUSED;
4996 enum machine_mode mode ATTRIBUTE_UNUSED;
4997 int ignore ATTRIBUTE_UNUSED;
4998 {
4999 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5000 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5001 tree arglist = TREE_OPERAND (exp, 1);
5002 tree arg0, arg1;
5003 rtx r0, r1;
5004
5005 switch (fcode)
5006 {
5007 case C4X_BUILTIN_FIX:
5008 arg0 = TREE_VALUE (arglist);
5009 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5010 r0 = protect_from_queue (r0, 0);
5011 if (! target || ! register_operand (target, QImode))
5012 target = gen_reg_rtx (QImode);
5013 emit_insn (gen_fixqfqi_clobber (target, r0));
5014 return target;
5015
5016 case C4X_BUILTIN_FIX_ANSI:
5017 arg0 = TREE_VALUE (arglist);
5018 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5019 r0 = protect_from_queue (r0, 0);
5020 if (! target || ! register_operand (target, QImode))
5021 target = gen_reg_rtx (QImode);
5022 emit_insn (gen_fix_truncqfqi2 (target, r0));
5023 return target;
5024
5025 case C4X_BUILTIN_MPYI:
5026 if (! TARGET_C3X)
5027 break;
5028 arg0 = TREE_VALUE (arglist);
5029 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5030 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5031 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5032 r0 = protect_from_queue (r0, 0);
5033 r1 = protect_from_queue (r1, 0);
5034 if (! target || ! register_operand (target, QImode))
5035 target = gen_reg_rtx (QImode);
5036 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5037 return target;
5038
5039 case C4X_BUILTIN_TOIEEE:
5040 if (TARGET_C3X)
5041 break;
5042 arg0 = TREE_VALUE (arglist);
5043 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5044 r0 = protect_from_queue (r0, 0);
5045 if (! target || ! register_operand (target, QFmode))
5046 target = gen_reg_rtx (QFmode);
5047 emit_insn (gen_toieee (target, r0));
5048 return target;
5049
5050 case C4X_BUILTIN_FRIEEE:
5051 if (TARGET_C3X)
5052 break;
5053 arg0 = TREE_VALUE (arglist);
5054 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5055 put_var_into_stack (arg0);
5056 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5057 r0 = protect_from_queue (r0, 0);
5058 if (register_operand (r0, QFmode))
5059 {
5060 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5061 emit_move_insn (r1, r0);
5062 r0 = r1;
5063 }
5064 if (! target || ! register_operand (target, QFmode))
5065 target = gen_reg_rtx (QFmode);
5066 emit_insn (gen_frieee (target, r0));
5067 return target;
5068
5069 case C4X_BUILTIN_RCPF:
5070 if (TARGET_C3X)
5071 break;
5072 arg0 = TREE_VALUE (arglist);
5073 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5074 r0 = protect_from_queue (r0, 0);
5075 if (! target || ! register_operand (target, QFmode))
5076 target = gen_reg_rtx (QFmode);
5077 emit_insn (gen_rcpfqf_clobber (target, r0));
5078 return target;
5079 }
5080 return NULL_RTX;
5081 }
5082
5083 static void
5084 c4x_asm_named_section (name, flags)
5085 const char *name;
5086 unsigned int flags ATTRIBUTE_UNUSED;
5087 {
5088 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
5089 }