]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/c4x/c4x.c
ceaa7dfcb9890bef1767d491d2552f96420aef00
[thirdparty/gcc.git] / gcc / config / c4x / c4x.c
1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004
3 Free Software Foundation, Inc.
4
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
24
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "real.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
39 #include "output.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "libfuncs.h"
44 #include "flags.h"
45 #include "loop.h"
46 #include "recog.h"
47 #include "ggc.h"
48 #include "cpplib.h"
49 #include "toplev.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
53
54 rtx smulhi3_libfunc;
55 rtx umulhi3_libfunc;
56 rtx fix_truncqfhi2_libfunc;
57 rtx fixuns_truncqfhi2_libfunc;
58 rtx fix_trunchfhi2_libfunc;
59 rtx fixuns_trunchfhi2_libfunc;
60 rtx floathiqf2_libfunc;
61 rtx floatunshiqf2_libfunc;
62 rtx floathihf2_libfunc;
63 rtx floatunshihf2_libfunc;
64
65 static int c4x_leaf_function;
66
67 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
68
69 /* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
73
74 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 {
76 /* Reg Modes Saved. */
77 R0R1_REGS, /* R0 QI, QF, HF No. */
78 R0R1_REGS, /* R1 QI, QF, HF No. */
79 R2R3_REGS, /* R2 QI, QF, HF No. */
80 R2R3_REGS, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
85 ADDR_REGS, /* AR0 QI No. */
86 ADDR_REGS, /* AR1 QI No. */
87 ADDR_REGS, /* AR2 QI No. */
88 ADDR_REGS, /* AR3 QI QI. */
89 ADDR_REGS, /* AR4 QI QI. */
90 ADDR_REGS, /* AR5 QI QI. */
91 ADDR_REGS, /* AR6 QI QI. */
92 ADDR_REGS, /* AR7 QI QI. */
93 DP_REG, /* DP QI No. */
94 INDEX_REGS, /* IR0 QI No. */
95 INDEX_REGS, /* IR1 QI No. */
96 BK_REG, /* BK QI QI. */
97 SP_REG, /* SP QI No. */
98 ST_REG, /* ST CC No. */
99 NO_REGS, /* DIE/IE No. */
100 NO_REGS, /* IIE/IF No. */
101 NO_REGS, /* IIF/IOF No. */
102 INT_REGS, /* RS QI No. */
103 INT_REGS, /* RE QI No. */
104 RC_REG, /* RC QI No. */
105 EXT_REGS, /* R8 QI, QF, HF QI. */
106 EXT_REGS, /* R9 QI, QF, HF No. */
107 EXT_REGS, /* R10 QI, QF, HF No. */
108 EXT_REGS, /* R11 QI, QF, HF No. */
109 };
110
111 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 {
113 /* Reg Modes Saved. */
114 HFmode, /* R0 QI, QF, HF No. */
115 HFmode, /* R1 QI, QF, HF No. */
116 HFmode, /* R2 QI, QF, HF No. */
117 HFmode, /* R3 QI, QF, HF No. */
118 QFmode, /* R4 QI, QF, HF QI. */
119 QFmode, /* R5 QI, QF, HF QI. */
120 QImode, /* R6 QI, QF, HF QF. */
121 QImode, /* R7 QI, QF, HF QF. */
122 QImode, /* AR0 QI No. */
123 QImode, /* AR1 QI No. */
124 QImode, /* AR2 QI No. */
125 QImode, /* AR3 QI QI. */
126 QImode, /* AR4 QI QI. */
127 QImode, /* AR5 QI QI. */
128 QImode, /* AR6 QI QI. */
129 QImode, /* AR7 QI QI. */
130 VOIDmode, /* DP QI No. */
131 QImode, /* IR0 QI No. */
132 QImode, /* IR1 QI No. */
133 QImode, /* BK QI QI. */
134 VOIDmode, /* SP QI No. */
135 VOIDmode, /* ST CC No. */
136 VOIDmode, /* DIE/IE No. */
137 VOIDmode, /* IIE/IF No. */
138 VOIDmode, /* IIF/IOF No. */
139 QImode, /* RS QI No. */
140 QImode, /* RE QI No. */
141 VOIDmode, /* RC QI No. */
142 QFmode, /* R8 QI, QF, HF QI. */
143 HFmode, /* R9 QI, QF, HF No. */
144 HFmode, /* R10 QI, QF, HF No. */
145 HFmode, /* R11 QI, QF, HF No. */
146 };
147
148
149 /* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
151
152 rtx c4x_compare_op0;
153 rtx c4x_compare_op1;
154
155 const char *c4x_rpts_cycles_string;
156 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
157 const char *c4x_cpu_version_string;
158 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159
160 /* Pragma definitions. */
161
162 tree code_tree = NULL_TREE;
163 tree data_tree = NULL_TREE;
164 tree pure_tree = NULL_TREE;
165 tree noreturn_tree = NULL_TREE;
166 tree interrupt_tree = NULL_TREE;
167 tree naked_tree = NULL_TREE;
168
169 /* Forward declarations */
170 static int c4x_isr_reg_used_p (unsigned int);
171 static int c4x_leaf_function_p (void);
172 static int c4x_naked_function_p (void);
173 static int c4x_immed_float_p (rtx);
174 static int c4x_a_register (rtx);
175 static int c4x_x_register (rtx);
176 static int c4x_immed_int_constant (rtx);
177 static int c4x_immed_float_constant (rtx);
178 static int c4x_K_constant (rtx);
179 static int c4x_N_constant (rtx);
180 static int c4x_O_constant (rtx);
181 static int c4x_R_indirect (rtx);
182 static int c4x_S_indirect (rtx);
183 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
184 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
185 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
186 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
187 static void c4x_file_start (void);
188 static void c4x_file_end (void);
189 static void c4x_check_attribute (const char *, tree, tree, tree *);
190 static int c4x_r11_set_p (rtx);
191 static int c4x_rptb_valid_p (rtx, rtx);
192 static void c4x_reorg (void);
193 static int c4x_label_ref_used_p (rtx, rtx);
194 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
195 const struct attribute_spec c4x_attribute_table[];
196 static void c4x_insert_attributes (tree, tree *);
197 static void c4x_asm_named_section (const char *, unsigned int);
198 static int c4x_adjust_cost (rtx, rtx, rtx, int);
199 static void c4x_globalize_label (FILE *, const char *);
200 static bool c4x_rtx_costs (rtx, int, int, int *);
201 static int c4x_address_cost (rtx);
202 static void c4x_init_libfuncs (void);
203 static void c4x_external_libcall (rtx);
204 static rtx c4x_struct_value_rtx (tree, int);
205 \f
206 /* Initialize the GCC target structure. */
207 #undef TARGET_ASM_BYTE_OP
208 #define TARGET_ASM_BYTE_OP "\t.word\t"
209 #undef TARGET_ASM_ALIGNED_HI_OP
210 #define TARGET_ASM_ALIGNED_HI_OP NULL
211 #undef TARGET_ASM_ALIGNED_SI_OP
212 #define TARGET_ASM_ALIGNED_SI_OP NULL
213 #undef TARGET_ASM_FILE_START
214 #define TARGET_ASM_FILE_START c4x_file_start
215 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
216 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
217 #undef TARGET_ASM_FILE_END
218 #define TARGET_ASM_FILE_END c4x_file_end
219
220 #undef TARGET_ASM_EXTERNAL_LIBCALL
221 #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
222
223 #undef TARGET_ATTRIBUTE_TABLE
224 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
225
226 #undef TARGET_INSERT_ATTRIBUTES
227 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
228
229 #undef TARGET_INIT_BUILTINS
230 #define TARGET_INIT_BUILTINS c4x_init_builtins
231
232 #undef TARGET_EXPAND_BUILTIN
233 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
234
235 #undef TARGET_SCHED_ADJUST_COST
236 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
237
238 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
239 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
240
241 #undef TARGET_ASM_GLOBALIZE_LABEL
242 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
243
244 #undef TARGET_RTX_COSTS
245 #define TARGET_RTX_COSTS c4x_rtx_costs
246 #undef TARGET_ADDRESS_COST
247 #define TARGET_ADDRESS_COST c4x_address_cost
248
249 #undef TARGET_MACHINE_DEPENDENT_REORG
250 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
251
252 #undef TARGET_INIT_LIBFUNCS
253 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
254
255 #undef TARGET_STRUCT_VALUE_RTX
256 #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
257
258 struct gcc_target targetm = TARGET_INITIALIZER;
259 \f
260 /* Override command line options.
261 Called once after all options have been parsed.
262 Mostly we process the processor
263 type and sometimes adjust other TARGET_ options. */
264
265 void
266 c4x_override_options (void)
267 {
268 if (c4x_rpts_cycles_string)
269 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
270 else
271 c4x_rpts_cycles = 0;
272
273 if (TARGET_C30)
274 c4x_cpu_version = 30;
275 else if (TARGET_C31)
276 c4x_cpu_version = 31;
277 else if (TARGET_C32)
278 c4x_cpu_version = 32;
279 else if (TARGET_C33)
280 c4x_cpu_version = 33;
281 else if (TARGET_C40)
282 c4x_cpu_version = 40;
283 else if (TARGET_C44)
284 c4x_cpu_version = 44;
285 else
286 c4x_cpu_version = 40;
287
288 /* -mcpu=xx overrides -m40 etc. */
289 if (c4x_cpu_version_string)
290 {
291 const char *p = c4x_cpu_version_string;
292
293 /* Also allow -mcpu=c30 etc. */
294 if (*p == 'c' || *p == 'C')
295 p++;
296 c4x_cpu_version = atoi (p);
297 }
298
299 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
300 C40_FLAG | C44_FLAG);
301
302 switch (c4x_cpu_version)
303 {
304 case 30: target_flags |= C30_FLAG; break;
305 case 31: target_flags |= C31_FLAG; break;
306 case 32: target_flags |= C32_FLAG; break;
307 case 33: target_flags |= C33_FLAG; break;
308 case 40: target_flags |= C40_FLAG; break;
309 case 44: target_flags |= C44_FLAG; break;
310 default:
311 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
312 c4x_cpu_version = 40;
313 target_flags |= C40_FLAG;
314 }
315
316 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
317 target_flags |= C3X_FLAG;
318 else
319 target_flags &= ~C3X_FLAG;
320
321 /* Convert foo / 8.0 into foo * 0.125, etc. */
322 set_fast_math_flags (1);
323
324 /* We should phase out the following at some stage.
325 This provides compatibility with the old -mno-aliases option. */
326 if (! TARGET_ALIASES && ! flag_argument_noalias)
327 flag_argument_noalias = 1;
328 }
329
330
331 /* This is called before c4x_override_options. */
332
333 void
334 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
335 int size ATTRIBUTE_UNUSED)
336 {
337 /* Scheduling before register allocation can screw up global
338 register allocation, especially for functions that use MPY||ADD
339 instructions. The benefit we gain we get by scheduling before
340 register allocation is probably marginal anyhow. */
341 flag_schedule_insns = 0;
342 }
343
344
345 /* Write an ASCII string. */
346
347 #define C4X_ASCII_LIMIT 40
348
349 void
350 c4x_output_ascii (FILE *stream, const char *ptr, int len)
351 {
352 char sbuf[C4X_ASCII_LIMIT + 1];
353 int s, l, special, first = 1, onlys;
354
355 if (len)
356 fprintf (stream, "\t.byte\t");
357
358 for (s = l = 0; len > 0; --len, ++ptr)
359 {
360 onlys = 0;
361
362 /* Escape " and \ with a \". */
363 special = *ptr == '\"' || *ptr == '\\';
364
365 /* If printable - add to buff. */
366 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
367 {
368 if (special)
369 sbuf[s++] = '\\';
370 sbuf[s++] = *ptr;
371 if (s < C4X_ASCII_LIMIT - 1)
372 continue;
373 onlys = 1;
374 }
375 if (s)
376 {
377 if (first)
378 first = 0;
379 else
380 {
381 fputc (',', stream);
382 l++;
383 }
384
385 sbuf[s] = 0;
386 fprintf (stream, "\"%s\"", sbuf);
387 l += s + 2;
388 if (TARGET_TI && l >= 80 && len > 1)
389 {
390 fprintf (stream, "\n\t.byte\t");
391 first = 1;
392 l = 0;
393 }
394
395 s = 0;
396 }
397 if (onlys)
398 continue;
399
400 if (first)
401 first = 0;
402 else
403 {
404 fputc (',', stream);
405 l++;
406 }
407
408 fprintf (stream, "%d", *ptr);
409 l += 3;
410 if (TARGET_TI && l >= 80 && len > 1)
411 {
412 fprintf (stream, "\n\t.byte\t");
413 first = 1;
414 l = 0;
415 }
416 }
417 if (s)
418 {
419 if (! first)
420 fputc (',', stream);
421
422 sbuf[s] = 0;
423 fprintf (stream, "\"%s\"", sbuf);
424 s = 0;
425 }
426 fputc ('\n', stream);
427 }
428
429
430 int
431 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
432 {
433 switch (mode)
434 {
435 #if Pmode != QImode
436 case Pmode: /* Pointer (24/32 bits). */
437 #endif
438 case QImode: /* Integer (32 bits). */
439 return IS_INT_REGNO (regno);
440
441 case QFmode: /* Float, Double (32 bits). */
442 case HFmode: /* Long Double (40 bits). */
443 return IS_EXT_REGNO (regno);
444
445 case CCmode: /* Condition Codes. */
446 case CC_NOOVmode: /* Condition Codes. */
447 return IS_ST_REGNO (regno);
448
449 case HImode: /* Long Long (64 bits). */
450 /* We need two registers to store long longs. Note that
451 it is much easier to constrain the first register
452 to start on an even boundary. */
453 return IS_INT_REGNO (regno)
454 && IS_INT_REGNO (regno + 1)
455 && (regno & 1) == 0;
456
457 default:
458 return 0; /* We don't support these modes. */
459 }
460
461 return 0;
462 }
463
464 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
465 int
466 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
467 {
468 /* We can not copy call saved registers from mode QI into QF or from
469 mode QF into QI. */
470 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
471 return 0;
472 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
473 return 0;
474 /* We cannot copy from an extended (40 bit) register to a standard
475 (32 bit) register because we only set the condition codes for
476 extended registers. */
477 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
478 return 0;
479 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
480 return 0;
481 return 1;
482 }
483
484 /* The TI C3x C compiler register argument runtime model uses 6 registers,
485 AR2, R2, R3, RC, RS, RE.
486
487 The first two floating point arguments (float, double, long double)
488 that are found scanning from left to right are assigned to R2 and R3.
489
490 The remaining integer (char, short, int, long) or pointer arguments
491 are assigned to the remaining registers in the order AR2, R2, R3,
492 RC, RS, RE when scanning left to right, except for the last named
493 argument prior to an ellipsis denoting variable number of
494 arguments. We don't have to worry about the latter condition since
495 function.c treats the last named argument as anonymous (unnamed).
496
497 All arguments that cannot be passed in registers are pushed onto
498 the stack in reverse order (right to left). GCC handles that for us.
499
500 c4x_init_cumulative_args() is called at the start, so we can parse
501 the args to see how many floating point arguments and how many
502 integer (or pointer) arguments there are. c4x_function_arg() is
503 then called (sometimes repeatedly) for each argument (parsed left
504 to right) to obtain the register to pass the argument in, or zero
505 if the argument is to be passed on the stack. Once the compiler is
506 happy, c4x_function_arg_advance() is called.
507
508 Don't use R0 to pass arguments in, we use 0 to indicate a stack
509 argument. */
510
511 static const int c4x_int_reglist[3][6] =
512 {
513 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
514 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
515 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
516 };
517
518 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
519
520
521 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
522 function whose data type is FNTYPE.
523 For a library call, FNTYPE is 0. */
524
525 void
526 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
527 {
528 tree param, next_param;
529
530 cum->floats = cum->ints = 0;
531 cum->init = 0;
532 cum->var = 0;
533 cum->args = 0;
534
535 if (TARGET_DEBUG)
536 {
537 fprintf (stderr, "\nc4x_init_cumulative_args (");
538 if (fntype)
539 {
540 tree ret_type = TREE_TYPE (fntype);
541
542 fprintf (stderr, "fntype code = %s, ret code = %s",
543 tree_code_name[(int) TREE_CODE (fntype)],
544 tree_code_name[(int) TREE_CODE (ret_type)]);
545 }
546 else
547 fprintf (stderr, "no fntype");
548
549 if (libname)
550 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
551 }
552
553 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
554
555 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
556 param; param = next_param)
557 {
558 tree type;
559
560 next_param = TREE_CHAIN (param);
561
562 type = TREE_VALUE (param);
563 if (type && type != void_type_node)
564 {
565 enum machine_mode mode;
566
567 /* If the last arg doesn't have void type then we have
568 variable arguments. */
569 if (! next_param)
570 cum->var = 1;
571
572 if ((mode = TYPE_MODE (type)))
573 {
574 if (! MUST_PASS_IN_STACK (mode, type))
575 {
576 /* Look for float, double, or long double argument. */
577 if (mode == QFmode || mode == HFmode)
578 cum->floats++;
579 /* Look for integer, enumeral, boolean, char, or pointer
580 argument. */
581 else if (mode == QImode || mode == Pmode)
582 cum->ints++;
583 }
584 }
585 cum->args++;
586 }
587 }
588
589 if (TARGET_DEBUG)
590 fprintf (stderr, "%s%s, args = %d)\n",
591 cum->prototype ? ", prototype" : "",
592 cum->var ? ", variable args" : "",
593 cum->args);
594 }
595
596
597 /* Update the data in CUM to advance over an argument
598 of mode MODE and data type TYPE.
599 (TYPE is null for libcalls where that information may not be available.) */
600
601 void
602 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
603 tree type, int named)
604 {
605 if (TARGET_DEBUG)
606 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
607 GET_MODE_NAME (mode), named);
608 if (! TARGET_MEMPARM
609 && named
610 && type
611 && ! MUST_PASS_IN_STACK (mode, type))
612 {
613 /* Look for float, double, or long double argument. */
614 if (mode == QFmode || mode == HFmode)
615 cum->floats++;
616 /* Look for integer, enumeral, boolean, char, or pointer argument. */
617 else if (mode == QImode || mode == Pmode)
618 cum->ints++;
619 }
620 else if (! TARGET_MEMPARM && ! type)
621 {
622 /* Handle libcall arguments. */
623 if (mode == QFmode || mode == HFmode)
624 cum->floats++;
625 else if (mode == QImode || mode == Pmode)
626 cum->ints++;
627 }
628 return;
629 }
630
631
632 /* Define where to put the arguments to a function. Value is zero to
633 push the argument on the stack, or a hard register in which to
634 store the argument.
635
636 MODE is the argument's machine mode.
637 TYPE is the data type of the argument (as a tree).
638 This is null for libcalls where that information may
639 not be available.
640 CUM is a variable of type CUMULATIVE_ARGS which gives info about
641 the preceding args and about the function being called.
642 NAMED is nonzero if this argument is a named parameter
643 (otherwise it is an extra parameter matching an ellipsis). */
644
645 struct rtx_def *
646 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
647 tree type, int named)
648 {
649 int reg = 0; /* Default to passing argument on stack. */
650
651 if (! cum->init)
652 {
653 /* We can handle at most 2 floats in R2, R3. */
654 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
655
656 /* We can handle at most 6 integers minus number of floats passed
657 in registers. */
658 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
659 6 - cum->maxfloats : cum->ints;
660
661 /* If there is no prototype, assume all the arguments are integers. */
662 if (! cum->prototype)
663 cum->maxints = 6;
664
665 cum->ints = cum->floats = 0;
666 cum->init = 1;
667 }
668
669 /* This marks the last argument. We don't need to pass this through
670 to the call insn. */
671 if (type == void_type_node)
672 return 0;
673
674 if (! TARGET_MEMPARM
675 && named
676 && type
677 && ! MUST_PASS_IN_STACK (mode, type))
678 {
679 /* Look for float, double, or long double argument. */
680 if (mode == QFmode || mode == HFmode)
681 {
682 if (cum->floats < cum->maxfloats)
683 reg = c4x_fp_reglist[cum->floats];
684 }
685 /* Look for integer, enumeral, boolean, char, or pointer argument. */
686 else if (mode == QImode || mode == Pmode)
687 {
688 if (cum->ints < cum->maxints)
689 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
690 }
691 }
692 else if (! TARGET_MEMPARM && ! type)
693 {
694 /* We could use a different argument calling model for libcalls,
695 since we're only calling functions in libgcc. Thus we could
696 pass arguments for long longs in registers rather than on the
697 stack. In the meantime, use the odd TI format. We make the
698 assumption that we won't have more than two floating point
699 args, six integer args, and that all the arguments are of the
700 same mode. */
701 if (mode == QFmode || mode == HFmode)
702 reg = c4x_fp_reglist[cum->floats];
703 else if (mode == QImode || mode == Pmode)
704 reg = c4x_int_reglist[0][cum->ints];
705 }
706
707 if (TARGET_DEBUG)
708 {
709 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
710 GET_MODE_NAME (mode), named);
711 if (reg)
712 fprintf (stderr, ", reg=%s", reg_names[reg]);
713 else
714 fprintf (stderr, ", stack");
715 fprintf (stderr, ")\n");
716 }
717 if (reg)
718 return gen_rtx_REG (mode, reg);
719 else
720 return NULL_RTX;
721 }
722
723 /* C[34]x arguments grow in weird ways (downwards) that the standard
724 varargs stuff can't handle.. */
725 rtx
726 c4x_va_arg (tree valist, tree type)
727 {
728 tree t;
729
730 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
731 build_int_2 (int_size_in_bytes (type), 0));
732 TREE_SIDE_EFFECTS (t) = 1;
733
734 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
735 }
736
737
738 static int
739 c4x_isr_reg_used_p (unsigned int regno)
740 {
741 /* Don't save/restore FP or ST, we handle them separately. */
742 if (regno == FRAME_POINTER_REGNUM
743 || IS_ST_REGNO (regno))
744 return 0;
745
746 /* We could be a little smarter abut saving/restoring DP.
747 We'll only save if for the big memory model or if
748 we're paranoid. ;-) */
749 if (IS_DP_REGNO (regno))
750 return ! TARGET_SMALL || TARGET_PARANOID;
751
752 /* Only save/restore regs in leaf function that are used. */
753 if (c4x_leaf_function)
754 return regs_ever_live[regno] && fixed_regs[regno] == 0;
755
756 /* Only save/restore regs that are used by the ISR and regs
757 that are likely to be used by functions the ISR calls
758 if they are not fixed. */
759 return IS_EXT_REGNO (regno)
760 || ((regs_ever_live[regno] || call_used_regs[regno])
761 && fixed_regs[regno] == 0);
762 }
763
764
765 static int
766 c4x_leaf_function_p (void)
767 {
768 /* A leaf function makes no calls, so we only need
769 to save/restore the registers we actually use.
770 For the global variable leaf_function to be set, we need
771 to define LEAF_REGISTERS and all that it entails.
772 Let's check ourselves.... */
773
774 if (lookup_attribute ("leaf_pretend",
775 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
776 return 1;
777
778 /* Use the leaf_pretend attribute at your own risk. This is a hack
779 to speed up ISRs that call a function infrequently where the
780 overhead of saving and restoring the additional registers is not
781 warranted. You must save and restore the additional registers
782 required by the called function. Caveat emptor. Here's enough
783 rope... */
784
785 if (leaf_function_p ())
786 return 1;
787
788 return 0;
789 }
790
791
792 static int
793 c4x_naked_function_p (void)
794 {
795 tree type;
796
797 type = TREE_TYPE (current_function_decl);
798 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
799 }
800
801
802 int
803 c4x_interrupt_function_p (void)
804 {
805 const char *cfun_name;
806 if (lookup_attribute ("interrupt",
807 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
808 return 1;
809
810 /* Look for TI style c_intnn. */
811 cfun_name = current_function_name ();
812 return cfun_name[0] == 'c'
813 && cfun_name[1] == '_'
814 && cfun_name[2] == 'i'
815 && cfun_name[3] == 'n'
816 && cfun_name[4] == 't'
817 && ISDIGIT (cfun_name[5])
818 && ISDIGIT (cfun_name[6]);
819 }
820
821 void
822 c4x_expand_prologue (void)
823 {
824 unsigned int regno;
825 int size = get_frame_size ();
826 rtx insn;
827
828 /* In functions where ar3 is not used but frame pointers are still
829 specified, frame pointers are not adjusted (if >= -O2) and this
830 is used so it won't needlessly push the frame pointer. */
831 int dont_push_ar3;
832
833 /* For __naked__ function don't build a prologue. */
834 if (c4x_naked_function_p ())
835 {
836 return;
837 }
838
839 /* For __interrupt__ function build specific prologue. */
840 if (c4x_interrupt_function_p ())
841 {
842 c4x_leaf_function = c4x_leaf_function_p ();
843
844 insn = emit_insn (gen_push_st ());
845 RTX_FRAME_RELATED_P (insn) = 1;
846 if (size)
847 {
848 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
849 RTX_FRAME_RELATED_P (insn) = 1;
850 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
851 gen_rtx_REG (QImode, SP_REGNO)));
852 RTX_FRAME_RELATED_P (insn) = 1;
853 /* We require that an ISR uses fewer than 32768 words of
854 local variables, otherwise we have to go to lots of
855 effort to save a register, load it with the desired size,
856 adjust the stack pointer, and then restore the modified
857 register. Frankly, I think it is a poor ISR that
858 requires more than 32767 words of local temporary
859 storage! */
860 if (size > 32767)
861 error ("ISR %s requires %d words of local vars, max is 32767",
862 current_function_name (), size);
863
864 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
865 gen_rtx_REG (QImode, SP_REGNO),
866 GEN_INT (size)));
867 RTX_FRAME_RELATED_P (insn) = 1;
868 }
869 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
870 {
871 if (c4x_isr_reg_used_p (regno))
872 {
873 if (regno == DP_REGNO)
874 {
875 insn = emit_insn (gen_push_dp ());
876 RTX_FRAME_RELATED_P (insn) = 1;
877 }
878 else
879 {
880 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
881 RTX_FRAME_RELATED_P (insn) = 1;
882 if (IS_EXT_REGNO (regno))
883 {
884 insn = emit_insn (gen_pushqf
885 (gen_rtx_REG (QFmode, regno)));
886 RTX_FRAME_RELATED_P (insn) = 1;
887 }
888 }
889 }
890 }
891 /* We need to clear the repeat mode flag if the ISR is
892 going to use a RPTB instruction or uses the RC, RS, or RE
893 registers. */
894 if (regs_ever_live[RC_REGNO]
895 || regs_ever_live[RS_REGNO]
896 || regs_ever_live[RE_REGNO])
897 {
898 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
899 RTX_FRAME_RELATED_P (insn) = 1;
900 }
901
902 /* Reload DP reg if we are paranoid about some turkey
903 violating small memory model rules. */
904 if (TARGET_SMALL && TARGET_PARANOID)
905 {
906 insn = emit_insn (gen_set_ldp_prologue
907 (gen_rtx_REG (QImode, DP_REGNO),
908 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
909 RTX_FRAME_RELATED_P (insn) = 1;
910 }
911 }
912 else
913 {
914 if (frame_pointer_needed)
915 {
916 if ((size != 0)
917 || (current_function_args_size != 0)
918 || (optimize < 2))
919 {
920 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
921 RTX_FRAME_RELATED_P (insn) = 1;
922 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
923 gen_rtx_REG (QImode, SP_REGNO)));
924 RTX_FRAME_RELATED_P (insn) = 1;
925 dont_push_ar3 = 1;
926 }
927 else
928 {
929 /* Since ar3 is not used, we don't need to push it. */
930 dont_push_ar3 = 1;
931 }
932 }
933 else
934 {
935 /* If we use ar3, we need to push it. */
936 dont_push_ar3 = 0;
937 if ((size != 0) || (current_function_args_size != 0))
938 {
939 /* If we are omitting the frame pointer, we still have
940 to make space for it so the offsets are correct
941 unless we don't use anything on the stack at all. */
942 size += 1;
943 }
944 }
945
946 if (size > 32767)
947 {
948 /* Local vars are too big, it will take multiple operations
949 to increment SP. */
950 if (TARGET_C3X)
951 {
952 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
953 GEN_INT(size >> 16)));
954 RTX_FRAME_RELATED_P (insn) = 1;
955 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
956 gen_rtx_REG (QImode, R1_REGNO),
957 GEN_INT(-16)));
958 RTX_FRAME_RELATED_P (insn) = 1;
959 }
960 else
961 {
962 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
963 GEN_INT(size & ~0xffff)));
964 RTX_FRAME_RELATED_P (insn) = 1;
965 }
966 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
967 gen_rtx_REG (QImode, R1_REGNO),
968 GEN_INT(size & 0xffff)));
969 RTX_FRAME_RELATED_P (insn) = 1;
970 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
971 gen_rtx_REG (QImode, SP_REGNO),
972 gen_rtx_REG (QImode, R1_REGNO)));
973 RTX_FRAME_RELATED_P (insn) = 1;
974 }
975 else if (size != 0)
976 {
977 /* Local vars take up less than 32767 words, so we can directly
978 add the number. */
979 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
980 gen_rtx_REG (QImode, SP_REGNO),
981 GEN_INT (size)));
982 RTX_FRAME_RELATED_P (insn) = 1;
983 }
984
985 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
986 {
987 if (regs_ever_live[regno] && ! call_used_regs[regno])
988 {
989 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
990 {
991 if (TARGET_PRESERVE_FLOAT)
992 {
993 insn = emit_insn (gen_pushqi
994 (gen_rtx_REG (QImode, regno)));
995 RTX_FRAME_RELATED_P (insn) = 1;
996 }
997 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
998 RTX_FRAME_RELATED_P (insn) = 1;
999 }
1000 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1001 {
1002 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1003 RTX_FRAME_RELATED_P (insn) = 1;
1004 }
1005 }
1006 }
1007 }
1008 }
1009
1010
1011 void
1012 c4x_expand_epilogue(void)
1013 {
1014 int regno;
1015 int jump = 0;
1016 int dont_pop_ar3;
1017 rtx insn;
1018 int size = get_frame_size ();
1019
1020 /* For __naked__ function build no epilogue. */
1021 if (c4x_naked_function_p ())
1022 {
1023 insn = emit_jump_insn (gen_return_from_epilogue ());
1024 RTX_FRAME_RELATED_P (insn) = 1;
1025 return;
1026 }
1027
1028 /* For __interrupt__ function build specific epilogue. */
1029 if (c4x_interrupt_function_p ())
1030 {
1031 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1032 {
1033 if (! c4x_isr_reg_used_p (regno))
1034 continue;
1035 if (regno == DP_REGNO)
1036 {
1037 insn = emit_insn (gen_pop_dp ());
1038 RTX_FRAME_RELATED_P (insn) = 1;
1039 }
1040 else
1041 {
1042 /* We have to use unspec because the compiler will delete insns
1043 that are not call-saved. */
1044 if (IS_EXT_REGNO (regno))
1045 {
1046 insn = emit_insn (gen_popqf_unspec
1047 (gen_rtx_REG (QFmode, regno)));
1048 RTX_FRAME_RELATED_P (insn) = 1;
1049 }
1050 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1051 RTX_FRAME_RELATED_P (insn) = 1;
1052 }
1053 }
1054 if (size)
1055 {
1056 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1057 gen_rtx_REG (QImode, SP_REGNO),
1058 GEN_INT(size)));
1059 RTX_FRAME_RELATED_P (insn) = 1;
1060 insn = emit_insn (gen_popqi
1061 (gen_rtx_REG (QImode, AR3_REGNO)));
1062 RTX_FRAME_RELATED_P (insn) = 1;
1063 }
1064 insn = emit_insn (gen_pop_st ());
1065 RTX_FRAME_RELATED_P (insn) = 1;
1066 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1067 RTX_FRAME_RELATED_P (insn) = 1;
1068 }
1069 else
1070 {
1071 if (frame_pointer_needed)
1072 {
1073 if ((size != 0)
1074 || (current_function_args_size != 0)
1075 || (optimize < 2))
1076 {
1077 insn = emit_insn
1078 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1079 gen_rtx_MEM (QImode,
1080 gen_rtx_PLUS
1081 (QImode, gen_rtx_REG (QImode,
1082 AR3_REGNO),
1083 constm1_rtx))));
1084 RTX_FRAME_RELATED_P (insn) = 1;
1085
1086 /* We already have the return value and the fp,
1087 so we need to add those to the stack. */
1088 size += 2;
1089 jump = 1;
1090 dont_pop_ar3 = 1;
1091 }
1092 else
1093 {
1094 /* Since ar3 is not used for anything, we don't need to
1095 pop it. */
1096 dont_pop_ar3 = 1;
1097 }
1098 }
1099 else
1100 {
1101 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1102 if (size || current_function_args_size)
1103 {
1104 /* If we are omitting the frame pointer, we still have
1105 to make space for it so the offsets are correct
1106 unless we don't use anything on the stack at all. */
1107 size += 1;
1108 }
1109 }
1110
1111 /* Now restore the saved registers, putting in the delayed branch
1112 where required. */
1113 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1114 {
1115 if (regs_ever_live[regno] && ! call_used_regs[regno])
1116 {
1117 if (regno == AR3_REGNO && dont_pop_ar3)
1118 continue;
1119
1120 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1121 {
1122 insn = emit_insn (gen_popqf_unspec
1123 (gen_rtx_REG (QFmode, regno)));
1124 RTX_FRAME_RELATED_P (insn) = 1;
1125 if (TARGET_PRESERVE_FLOAT)
1126 {
1127 insn = emit_insn (gen_popqi_unspec
1128 (gen_rtx_REG (QImode, regno)));
1129 RTX_FRAME_RELATED_P (insn) = 1;
1130 }
1131 }
1132 else
1133 {
1134 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1135 RTX_FRAME_RELATED_P (insn) = 1;
1136 }
1137 }
1138 }
1139
1140 if (frame_pointer_needed)
1141 {
1142 if ((size != 0)
1143 || (current_function_args_size != 0)
1144 || (optimize < 2))
1145 {
1146 /* Restore the old FP. */
1147 insn = emit_insn
1148 (gen_movqi
1149 (gen_rtx_REG (QImode, AR3_REGNO),
1150 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1151
1152 RTX_FRAME_RELATED_P (insn) = 1;
1153 }
1154 }
1155
1156 if (size > 32767)
1157 {
1158 /* Local vars are too big, it will take multiple operations
1159 to decrement SP. */
1160 if (TARGET_C3X)
1161 {
1162 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1163 GEN_INT(size >> 16)));
1164 RTX_FRAME_RELATED_P (insn) = 1;
1165 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1166 gen_rtx_REG (QImode, R3_REGNO),
1167 GEN_INT(-16)));
1168 RTX_FRAME_RELATED_P (insn) = 1;
1169 }
1170 else
1171 {
1172 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1173 GEN_INT(size & ~0xffff)));
1174 RTX_FRAME_RELATED_P (insn) = 1;
1175 }
1176 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1177 gen_rtx_REG (QImode, R3_REGNO),
1178 GEN_INT(size & 0xffff)));
1179 RTX_FRAME_RELATED_P (insn) = 1;
1180 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1181 gen_rtx_REG (QImode, SP_REGNO),
1182 gen_rtx_REG (QImode, R3_REGNO)));
1183 RTX_FRAME_RELATED_P (insn) = 1;
1184 }
1185 else if (size != 0)
1186 {
1187 /* Local vars take up less than 32768 words, so we can directly
1188 subtract the number. */
1189 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1190 gen_rtx_REG (QImode, SP_REGNO),
1191 GEN_INT(size)));
1192 RTX_FRAME_RELATED_P (insn) = 1;
1193 }
1194
1195 if (jump)
1196 {
1197 insn = emit_jump_insn (gen_return_indirect_internal
1198 (gen_rtx_REG (QImode, R2_REGNO)));
1199 RTX_FRAME_RELATED_P (insn) = 1;
1200 }
1201 else
1202 {
1203 insn = emit_jump_insn (gen_return_from_epilogue ());
1204 RTX_FRAME_RELATED_P (insn) = 1;
1205 }
1206 }
1207 }
1208
1209
1210 int
1211 c4x_null_epilogue_p (void)
1212 {
1213 int regno;
1214
1215 if (reload_completed
1216 && ! c4x_naked_function_p ()
1217 && ! c4x_interrupt_function_p ()
1218 && ! current_function_calls_alloca
1219 && ! current_function_args_size
1220 && ! (optimize < 2)
1221 && ! get_frame_size ())
1222 {
1223 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1224 if (regs_ever_live[regno] && ! call_used_regs[regno]
1225 && (regno != AR3_REGNO))
1226 return 1;
1227 return 0;
1228 }
1229 return 1;
1230 }
1231
1232
1233 int
1234 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1235 {
1236 rtx op0 = operands[0];
1237 rtx op1 = operands[1];
1238
1239 if (! reload_in_progress
1240 && ! REG_P (op0)
1241 && ! REG_P (op1)
1242 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1243 op1 = force_reg (mode, op1);
1244
1245 if (GET_CODE (op1) == LO_SUM
1246 && GET_MODE (op1) == Pmode
1247 && dp_reg_operand (XEXP (op1, 0), mode))
1248 {
1249 /* expand_increment will sometimes create a LO_SUM immediate
1250 address. Undo this silliness. */
1251 op1 = XEXP (op1, 1);
1252 }
1253
1254 if (symbolic_address_operand (op1, mode))
1255 {
1256 if (TARGET_LOAD_ADDRESS)
1257 {
1258 /* Alias analysis seems to do a better job if we force
1259 constant addresses to memory after reload. */
1260 emit_insn (gen_load_immed_address (op0, op1));
1261 return 1;
1262 }
1263 else
1264 {
1265 /* Stick symbol or label address into the constant pool. */
1266 op1 = force_const_mem (Pmode, op1);
1267 }
1268 }
1269 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1270 {
1271 /* We could be a lot smarter about loading some of these
1272 constants... */
1273 op1 = force_const_mem (mode, op1);
1274 }
1275
1276 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1277 and emit associated (HIGH (SYMREF)) if large memory model.
1278 c4x_legitimize_address could be used to do this,
1279 perhaps by calling validize_address. */
1280 if (TARGET_EXPOSE_LDP
1281 && ! (reload_in_progress || reload_completed)
1282 && GET_CODE (op1) == MEM
1283 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1284 {
1285 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1286 if (! TARGET_SMALL)
1287 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1288 op1 = change_address (op1, mode,
1289 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1290 }
1291
1292 if (TARGET_EXPOSE_LDP
1293 && ! (reload_in_progress || reload_completed)
1294 && GET_CODE (op0) == MEM
1295 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1296 {
1297 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1298 if (! TARGET_SMALL)
1299 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1300 op0 = change_address (op0, mode,
1301 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1302 }
1303
1304 if (GET_CODE (op0) == SUBREG
1305 && mixed_subreg_operand (op0, mode))
1306 {
1307 /* We should only generate these mixed mode patterns
1308 during RTL generation. If we need do it later on
1309 then we'll have to emit patterns that won't clobber CC. */
1310 if (reload_in_progress || reload_completed)
1311 abort ();
1312 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1313 op0 = SUBREG_REG (op0);
1314 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1315 {
1316 op0 = copy_rtx (op0);
1317 PUT_MODE (op0, QImode);
1318 }
1319 else
1320 abort ();
1321
1322 if (mode == QFmode)
1323 emit_insn (gen_storeqf_int_clobber (op0, op1));
1324 else
1325 abort ();
1326 return 1;
1327 }
1328
1329 if (GET_CODE (op1) == SUBREG
1330 && mixed_subreg_operand (op1, mode))
1331 {
1332 /* We should only generate these mixed mode patterns
1333 during RTL generation. If we need do it later on
1334 then we'll have to emit patterns that won't clobber CC. */
1335 if (reload_in_progress || reload_completed)
1336 abort ();
1337 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1338 op1 = SUBREG_REG (op1);
1339 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1340 {
1341 op1 = copy_rtx (op1);
1342 PUT_MODE (op1, QImode);
1343 }
1344 else
1345 abort ();
1346
1347 if (mode == QFmode)
1348 emit_insn (gen_loadqf_int_clobber (op0, op1));
1349 else
1350 abort ();
1351 return 1;
1352 }
1353
1354 if (mode == QImode
1355 && reg_operand (op0, mode)
1356 && const_int_operand (op1, mode)
1357 && ! IS_INT16_CONST (INTVAL (op1))
1358 && ! IS_HIGH_CONST (INTVAL (op1)))
1359 {
1360 emit_insn (gen_loadqi_big_constant (op0, op1));
1361 return 1;
1362 }
1363
1364 if (mode == HImode
1365 && reg_operand (op0, mode)
1366 && const_int_operand (op1, mode))
1367 {
1368 emit_insn (gen_loadhi_big_constant (op0, op1));
1369 return 1;
1370 }
1371
1372 /* Adjust operands in case we have modified them. */
1373 operands[0] = op0;
1374 operands[1] = op1;
1375
1376 /* Emit normal pattern. */
1377 return 0;
1378 }
1379
1380
1381 void
1382 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1383 enum machine_mode dmode, enum machine_mode smode,
1384 int noperands, rtx *operands)
1385 {
1386 rtx ret;
1387 rtx insns;
1388 rtx equiv;
1389
1390 start_sequence ();
1391 switch (noperands)
1392 {
1393 case 2:
1394 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1395 operands[1], smode);
1396 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1397 break;
1398
1399 case 3:
1400 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1401 operands[1], smode, operands[2], smode);
1402 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1403 break;
1404
1405 default:
1406 abort ();
1407 }
1408
1409 insns = get_insns ();
1410 end_sequence ();
1411 emit_libcall_block (insns, operands[0], ret, equiv);
1412 }
1413
1414
1415 void
1416 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1417 enum machine_mode mode, rtx *operands)
1418 {
1419 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1420 }
1421
1422
1423 void
1424 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1425 enum machine_mode mode, rtx *operands)
1426 {
1427 rtx ret;
1428 rtx insns;
1429 rtx equiv;
1430
1431 start_sequence ();
1432 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1433 operands[1], mode, operands[2], mode);
1434 equiv = gen_rtx_TRUNCATE (mode,
1435 gen_rtx_LSHIFTRT (HImode,
1436 gen_rtx_MULT (HImode,
1437 gen_rtx_fmt_e (code, HImode, operands[1]),
1438 gen_rtx_fmt_e (code, HImode, operands[2])),
1439 GEN_INT (32)));
1440 insns = get_insns ();
1441 end_sequence ();
1442 emit_libcall_block (insns, operands[0], ret, equiv);
1443 }
1444
1445
1446 int
1447 c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1448 {
1449 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1450 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1451 rtx disp = NULL_RTX; /* Displacement. */
1452 enum rtx_code code;
1453
1454 code = GET_CODE (addr);
1455 switch (code)
1456 {
1457 /* Register indirect with auto increment/decrement. We don't
1458 allow SP here---push_operand should recognize an operand
1459 being pushed on the stack. */
1460
1461 case PRE_DEC:
1462 case PRE_INC:
1463 case POST_DEC:
1464 if (mode != QImode && mode != QFmode)
1465 return 0;
1466
1467 case POST_INC:
1468 base = XEXP (addr, 0);
1469 if (! REG_P (base))
1470 return 0;
1471 break;
1472
1473 case PRE_MODIFY:
1474 case POST_MODIFY:
1475 {
1476 rtx op0 = XEXP (addr, 0);
1477 rtx op1 = XEXP (addr, 1);
1478
1479 if (mode != QImode && mode != QFmode)
1480 return 0;
1481
1482 if (! REG_P (op0)
1483 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1484 return 0;
1485 base = XEXP (op1, 0);
1486 if (! REG_P (base))
1487 return 0;
1488 if (REGNO (base) != REGNO (op0))
1489 return 0;
1490 if (REG_P (XEXP (op1, 1)))
1491 indx = XEXP (op1, 1);
1492 else
1493 disp = XEXP (op1, 1);
1494 }
1495 break;
1496
1497 /* Register indirect. */
1498 case REG:
1499 base = addr;
1500 break;
1501
1502 /* Register indirect with displacement or index. */
1503 case PLUS:
1504 {
1505 rtx op0 = XEXP (addr, 0);
1506 rtx op1 = XEXP (addr, 1);
1507 enum rtx_code code0 = GET_CODE (op0);
1508
1509 switch (code0)
1510 {
1511 case REG:
1512 if (REG_P (op1))
1513 {
1514 base = op0; /* Base + index. */
1515 indx = op1;
1516 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1517 {
1518 base = op1;
1519 indx = op0;
1520 }
1521 }
1522 else
1523 {
1524 base = op0; /* Base + displacement. */
1525 disp = op1;
1526 }
1527 break;
1528
1529 default:
1530 return 0;
1531 }
1532 }
1533 break;
1534
1535 /* Direct addressing with DP register. */
1536 case LO_SUM:
1537 {
1538 rtx op0 = XEXP (addr, 0);
1539 rtx op1 = XEXP (addr, 1);
1540
1541 /* HImode and HFmode direct memory references aren't truly
1542 offsettable (consider case at end of data page). We
1543 probably get better code by loading a pointer and using an
1544 indirect memory reference. */
1545 if (mode == HImode || mode == HFmode)
1546 return 0;
1547
1548 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1549 return 0;
1550
1551 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1552 return 1;
1553
1554 if (GET_CODE (op1) == CONST)
1555 return 1;
1556 return 0;
1557 }
1558 break;
1559
1560 /* Direct addressing with some work for the assembler... */
1561 case CONST:
1562 /* Direct addressing. */
1563 case LABEL_REF:
1564 case SYMBOL_REF:
1565 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1566 return 1;
1567 /* These need to be converted to a LO_SUM (...).
1568 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1569 return 0;
1570
1571 /* Do not allow direct memory access to absolute addresses.
1572 This is more pain than it's worth, especially for the
1573 small memory model where we can't guarantee that
1574 this address is within the data page---we don't want
1575 to modify the DP register in the small memory model,
1576 even temporarily, since an interrupt can sneak in.... */
1577 case CONST_INT:
1578 return 0;
1579
1580 /* Indirect indirect addressing. */
1581 case MEM:
1582 return 0;
1583
1584 case CONST_DOUBLE:
1585 fatal_insn ("using CONST_DOUBLE for address", addr);
1586
1587 default:
1588 return 0;
1589 }
1590
1591 /* Validate the base register. */
1592 if (base)
1593 {
1594 /* Check that the address is offsettable for HImode and HFmode. */
1595 if (indx && (mode == HImode || mode == HFmode))
1596 return 0;
1597
1598 /* Handle DP based stuff. */
1599 if (REGNO (base) == DP_REGNO)
1600 return 1;
1601 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1602 return 0;
1603 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1604 return 0;
1605 }
1606
1607 /* Now validate the index register. */
1608 if (indx)
1609 {
1610 if (GET_CODE (indx) != REG)
1611 return 0;
1612 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1613 return 0;
1614 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1615 return 0;
1616 }
1617
1618 /* Validate displacement. */
1619 if (disp)
1620 {
1621 if (GET_CODE (disp) != CONST_INT)
1622 return 0;
1623 if (mode == HImode || mode == HFmode)
1624 {
1625 /* The offset displacement must be legitimate. */
1626 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1627 return 0;
1628 }
1629 else
1630 {
1631 if (! IS_DISP8_CONST (INTVAL (disp)))
1632 return 0;
1633 }
1634 /* Can't add an index with a disp. */
1635 if (indx)
1636 return 0;
1637 }
1638 return 1;
1639 }
1640
1641
1642 rtx
1643 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1644 enum machine_mode mode ATTRIBUTE_UNUSED)
1645 {
1646 if (GET_CODE (orig) == SYMBOL_REF
1647 || GET_CODE (orig) == LABEL_REF)
1648 {
1649 if (mode == HImode || mode == HFmode)
1650 {
1651 /* We need to force the address into
1652 a register so that it is offsettable. */
1653 rtx addr_reg = gen_reg_rtx (Pmode);
1654 emit_move_insn (addr_reg, orig);
1655 return addr_reg;
1656 }
1657 else
1658 {
1659 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1660
1661 if (! TARGET_SMALL)
1662 emit_insn (gen_set_ldp (dp_reg, orig));
1663
1664 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1665 }
1666 }
1667
1668 return NULL_RTX;
1669 }
1670
1671
1672 /* Provide the costs of an addressing mode that contains ADDR.
1673 If ADDR is not a valid address, its cost is irrelevant.
1674 This is used in cse and loop optimization to determine
1675 if it is worthwhile storing a common address into a register.
1676 Unfortunately, the C4x address cost depends on other operands. */
1677
1678 static int
1679 c4x_address_cost (rtx addr)
1680 {
1681 switch (GET_CODE (addr))
1682 {
1683 case REG:
1684 return 1;
1685
1686 case POST_INC:
1687 case POST_DEC:
1688 case PRE_INC:
1689 case PRE_DEC:
1690 return 1;
1691
1692 /* These shouldn't be directly generated. */
1693 case SYMBOL_REF:
1694 case LABEL_REF:
1695 case CONST:
1696 return 10;
1697
1698 case LO_SUM:
1699 {
1700 rtx op1 = XEXP (addr, 1);
1701
1702 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1703 return TARGET_SMALL ? 3 : 4;
1704
1705 if (GET_CODE (op1) == CONST)
1706 {
1707 rtx offset = const0_rtx;
1708
1709 op1 = eliminate_constant_term (op1, &offset);
1710
1711 /* ??? These costs need rethinking... */
1712 if (GET_CODE (op1) == LABEL_REF)
1713 return 3;
1714
1715 if (GET_CODE (op1) != SYMBOL_REF)
1716 return 4;
1717
1718 if (INTVAL (offset) == 0)
1719 return 3;
1720
1721 return 4;
1722 }
1723 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1724 }
1725 break;
1726
1727 case PLUS:
1728 {
1729 register rtx op0 = XEXP (addr, 0);
1730 register rtx op1 = XEXP (addr, 1);
1731
1732 if (GET_CODE (op0) != REG)
1733 break;
1734
1735 switch (GET_CODE (op1))
1736 {
1737 default:
1738 break;
1739
1740 case REG:
1741 /* This cost for REG+REG must be greater than the cost
1742 for REG if we want autoincrement addressing modes. */
1743 return 2;
1744
1745 case CONST_INT:
1746 /* The following tries to improve GIV combination
1747 in strength reduce but appears not to help. */
1748 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1749 return 1;
1750
1751 if (IS_DISP1_CONST (INTVAL (op1)))
1752 return 1;
1753
1754 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1755 return 2;
1756
1757 return 3;
1758 }
1759 }
1760 default:
1761 break;
1762 }
1763
1764 return 4;
1765 }
1766
1767
1768 rtx
1769 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1770 {
1771 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1772 rtx cc_reg;
1773
1774 if (mode == CC_NOOVmode
1775 && (code == LE || code == GE || code == LT || code == GT))
1776 return NULL_RTX;
1777
1778 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1779 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1780 gen_rtx_COMPARE (mode, x, y)));
1781 return cc_reg;
1782 }
1783
1784 char *
1785 c4x_output_cbranch (const char *form, rtx seq)
1786 {
1787 int delayed = 0;
1788 int annultrue = 0;
1789 int annulfalse = 0;
1790 rtx delay;
1791 char *cp;
1792 static char str[100];
1793
1794 if (final_sequence)
1795 {
1796 delay = XVECEXP (final_sequence, 0, 1);
1797 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1798 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1799 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1800 }
1801 strcpy (str, form);
1802 cp = &str [strlen (str)];
1803 if (delayed)
1804 {
1805 *cp++ = '%';
1806 *cp++ = '#';
1807 }
1808 if (annultrue)
1809 {
1810 *cp++ = 'a';
1811 *cp++ = 't';
1812 }
1813 if (annulfalse)
1814 {
1815 *cp++ = 'a';
1816 *cp++ = 'f';
1817 }
1818 *cp++ = '\t';
1819 *cp++ = '%';
1820 *cp++ = 'l';
1821 *cp++ = '1';
1822 *cp = 0;
1823 return str;
1824 }
1825
1826 void
1827 c4x_print_operand (FILE *file, rtx op, int letter)
1828 {
1829 rtx op1;
1830 enum rtx_code code;
1831
1832 switch (letter)
1833 {
1834 case '#': /* Delayed. */
1835 if (final_sequence)
1836 fprintf (file, "d");
1837 return;
1838 }
1839
1840 code = GET_CODE (op);
1841 switch (letter)
1842 {
1843 case 'A': /* Direct address. */
1844 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1845 fprintf (file, "@");
1846 break;
1847
1848 case 'H': /* Sethi. */
1849 output_addr_const (file, op);
1850 return;
1851
1852 case 'I': /* Reversed condition. */
1853 code = reverse_condition (code);
1854 break;
1855
1856 case 'L': /* Log 2 of constant. */
1857 if (code != CONST_INT)
1858 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1859 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1860 return;
1861
1862 case 'N': /* Ones complement of small constant. */
1863 if (code != CONST_INT)
1864 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1865 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1866 return;
1867
1868 case 'K': /* Generate ldp(k) if direct address. */
1869 if (! TARGET_SMALL
1870 && code == MEM
1871 && GET_CODE (XEXP (op, 0)) == LO_SUM
1872 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1873 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1874 {
1875 op1 = XEXP (XEXP (op, 0), 1);
1876 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1877 {
1878 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1879 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1880 fprintf (file, "\n");
1881 }
1882 }
1883 return;
1884
1885 case 'M': /* Generate ldp(k) if direct address. */
1886 if (! TARGET_SMALL /* Only used in asm statements. */
1887 && code == MEM
1888 && (GET_CODE (XEXP (op, 0)) == CONST
1889 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1890 {
1891 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1892 output_address (XEXP (op, 0));
1893 fprintf (file, "\n\t");
1894 }
1895 return;
1896
1897 case 'O': /* Offset address. */
1898 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1899 break;
1900 else if (code == MEM)
1901 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1902 else if (code == REG)
1903 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1904 else
1905 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1906 return;
1907
1908 case 'C': /* Call. */
1909 break;
1910
1911 case 'U': /* Call/callu. */
1912 if (code != SYMBOL_REF)
1913 fprintf (file, "u");
1914 return;
1915
1916 default:
1917 break;
1918 }
1919
1920 switch (code)
1921 {
1922 case REG:
1923 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1924 && ! TARGET_TI)
1925 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1926 else
1927 fprintf (file, "%s", reg_names[REGNO (op)]);
1928 break;
1929
1930 case MEM:
1931 output_address (XEXP (op, 0));
1932 break;
1933
1934 case CONST_DOUBLE:
1935 {
1936 char str[64];
1937
1938 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1939 sizeof (str), 0, 1);
1940 fprintf (file, "%s", str);
1941 }
1942 break;
1943
1944 case CONST_INT:
1945 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1946 break;
1947
1948 case NE:
1949 fprintf (file, "ne");
1950 break;
1951
1952 case EQ:
1953 fprintf (file, "eq");
1954 break;
1955
1956 case GE:
1957 fprintf (file, "ge");
1958 break;
1959
1960 case GT:
1961 fprintf (file, "gt");
1962 break;
1963
1964 case LE:
1965 fprintf (file, "le");
1966 break;
1967
1968 case LT:
1969 fprintf (file, "lt");
1970 break;
1971
1972 case GEU:
1973 fprintf (file, "hs");
1974 break;
1975
1976 case GTU:
1977 fprintf (file, "hi");
1978 break;
1979
1980 case LEU:
1981 fprintf (file, "ls");
1982 break;
1983
1984 case LTU:
1985 fprintf (file, "lo");
1986 break;
1987
1988 case SYMBOL_REF:
1989 output_addr_const (file, op);
1990 break;
1991
1992 case CONST:
1993 output_addr_const (file, XEXP (op, 0));
1994 break;
1995
1996 case CODE_LABEL:
1997 break;
1998
1999 default:
2000 fatal_insn ("c4x_print_operand: Bad operand case", op);
2001 break;
2002 }
2003 }
2004
2005
2006 void
2007 c4x_print_operand_address (FILE *file, rtx addr)
2008 {
2009 switch (GET_CODE (addr))
2010 {
2011 case REG:
2012 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2013 break;
2014
2015 case PRE_DEC:
2016 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2017 break;
2018
2019 case POST_INC:
2020 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2021 break;
2022
2023 case POST_MODIFY:
2024 {
2025 rtx op0 = XEXP (XEXP (addr, 1), 0);
2026 rtx op1 = XEXP (XEXP (addr, 1), 1);
2027
2028 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2029 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2030 reg_names[REGNO (op1)]);
2031 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2032 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2033 reg_names[REGNO (op0)], INTVAL (op1));
2034 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2035 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2036 reg_names[REGNO (op0)], -INTVAL (op1));
2037 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2038 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2039 reg_names[REGNO (op1)]);
2040 else
2041 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2042 }
2043 break;
2044
2045 case PRE_MODIFY:
2046 {
2047 rtx op0 = XEXP (XEXP (addr, 1), 0);
2048 rtx op1 = XEXP (XEXP (addr, 1), 1);
2049
2050 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2051 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2052 reg_names[REGNO (op1)]);
2053 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2054 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2055 reg_names[REGNO (op0)], INTVAL (op1));
2056 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2057 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2058 reg_names[REGNO (op0)], -INTVAL (op1));
2059 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2060 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2061 reg_names[REGNO (op1)]);
2062 else
2063 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2064 }
2065 break;
2066
2067 case PRE_INC:
2068 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2069 break;
2070
2071 case POST_DEC:
2072 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2073 break;
2074
2075 case PLUS: /* Indirect with displacement. */
2076 {
2077 rtx op0 = XEXP (addr, 0);
2078 rtx op1 = XEXP (addr, 1);
2079
2080 if (REG_P (op0))
2081 {
2082 if (REG_P (op1))
2083 {
2084 if (IS_INDEX_REG (op0))
2085 {
2086 fprintf (file, "*+%s(%s)",
2087 reg_names[REGNO (op1)],
2088 reg_names[REGNO (op0)]); /* Index + base. */
2089 }
2090 else
2091 {
2092 fprintf (file, "*+%s(%s)",
2093 reg_names[REGNO (op0)],
2094 reg_names[REGNO (op1)]); /* Base + index. */
2095 }
2096 }
2097 else if (INTVAL (op1) < 0)
2098 {
2099 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2100 reg_names[REGNO (op0)],
2101 -INTVAL (op1)); /* Base - displacement. */
2102 }
2103 else
2104 {
2105 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2106 reg_names[REGNO (op0)],
2107 INTVAL (op1)); /* Base + displacement. */
2108 }
2109 }
2110 else
2111 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2112 }
2113 break;
2114
2115 case LO_SUM:
2116 {
2117 rtx op0 = XEXP (addr, 0);
2118 rtx op1 = XEXP (addr, 1);
2119
2120 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2121 c4x_print_operand_address (file, op1);
2122 else
2123 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2124 }
2125 break;
2126
2127 case CONST:
2128 case SYMBOL_REF:
2129 case LABEL_REF:
2130 fprintf (file, "@");
2131 output_addr_const (file, addr);
2132 break;
2133
2134 /* We shouldn't access CONST_INT addresses. */
2135 case CONST_INT:
2136
2137 default:
2138 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2139 break;
2140 }
2141 }
2142
2143
2144 /* Return nonzero if the floating point operand will fit
2145 in the immediate field. */
2146
2147 static int
2148 c4x_immed_float_p (rtx op)
2149 {
2150 long convval[2];
2151 int exponent;
2152 REAL_VALUE_TYPE r;
2153
2154 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2155 if (GET_MODE (op) == HFmode)
2156 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2157 else
2158 {
2159 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2160 convval[1] = 0;
2161 }
2162
2163 /* Sign extend exponent. */
2164 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2165 if (exponent == -128)
2166 return 1; /* 0.0 */
2167 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2168 return 0; /* Precision doesn't fit. */
2169 return (exponent <= 7) /* Positive exp. */
2170 && (exponent >= -7); /* Negative exp. */
2171 }
2172
2173
2174 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2175 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2176
2177 None of the last four instructions from the bottom of the block can
2178 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2179 BcondAT or RETIcondD.
2180
2181 This routine scans the four previous insns for a jump insn, and if
2182 one is found, returns 1 so that we bung in a nop instruction.
2183 This simple minded strategy will add a nop, when it may not
2184 be required. Say when there is a JUMP_INSN near the end of the
2185 block that doesn't get converted into a delayed branch.
2186
2187 Note that we cannot have a call insn, since we don't generate
2188 repeat loops with calls in them (although I suppose we could, but
2189 there's no benefit.)
2190
2191 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2192
2193 int
2194 c4x_rptb_nop_p (rtx insn)
2195 {
2196 rtx start_label;
2197 int i;
2198
2199 /* Extract the start label from the jump pattern (rptb_end). */
2200 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2201
2202 /* If there is a label at the end of the loop we must insert
2203 a NOP. */
2204 do {
2205 insn = previous_insn (insn);
2206 } while (GET_CODE (insn) == NOTE
2207 || GET_CODE (insn) == USE
2208 || GET_CODE (insn) == CLOBBER);
2209 if (GET_CODE (insn) == CODE_LABEL)
2210 return 1;
2211
2212 for (i = 0; i < 4; i++)
2213 {
2214 /* Search back for prev non-note and non-label insn. */
2215 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2216 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2217 {
2218 if (insn == start_label)
2219 return i == 0;
2220
2221 insn = previous_insn (insn);
2222 };
2223
2224 /* If we have a jump instruction we should insert a NOP. If we
2225 hit repeat block top we should only insert a NOP if the loop
2226 is empty. */
2227 if (GET_CODE (insn) == JUMP_INSN)
2228 return 1;
2229 insn = previous_insn (insn);
2230 }
2231 return 0;
2232 }
2233
2234
2235 /* The C4x looping instruction needs to be emitted at the top of the
2236 loop. Emitting the true RTL for a looping instruction at the top of
2237 the loop can cause problems with flow analysis. So instead, a dummy
2238 doloop insn is emitted at the end of the loop. This routine checks
2239 for the presence of this doloop insn and then searches back to the
2240 top of the loop, where it inserts the true looping insn (provided
2241 there are no instructions in the loop which would cause problems).
2242 Any additional labels can be emitted at this point. In addition, if
2243 the desired loop count register was not allocated, this routine does
2244 nothing.
2245
2246 Before we can create a repeat block looping instruction we have to
2247 verify that there are no jumps outside the loop and no jumps outside
2248 the loop go into this loop. This can happen in the basic blocks reorder
2249 pass. The C4x cpu can not handle this. */
2250
2251 static int
2252 c4x_label_ref_used_p (rtx x, rtx code_label)
2253 {
2254 enum rtx_code code;
2255 int i, j;
2256 const char *fmt;
2257
2258 if (x == 0)
2259 return 0;
2260
2261 code = GET_CODE (x);
2262 if (code == LABEL_REF)
2263 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2264
2265 fmt = GET_RTX_FORMAT (code);
2266 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2267 {
2268 if (fmt[i] == 'e')
2269 {
2270 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2271 return 1;
2272 }
2273 else if (fmt[i] == 'E')
2274 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2275 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2276 return 1;
2277 }
2278 return 0;
2279 }
2280
2281
2282 static int
2283 c4x_rptb_valid_p (rtx insn, rtx start_label)
2284 {
2285 rtx end = insn;
2286 rtx start;
2287 rtx tmp;
2288
2289 /* Find the start label. */
2290 for (; insn; insn = PREV_INSN (insn))
2291 if (insn == start_label)
2292 break;
2293
2294 /* Note found then we can not use a rptb or rpts. The label was
2295 probably moved by the basic block reorder pass. */
2296 if (! insn)
2297 return 0;
2298
2299 start = insn;
2300 /* If any jump jumps inside this block then we must fail. */
2301 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2302 {
2303 if (GET_CODE (insn) == CODE_LABEL)
2304 {
2305 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2306 if (GET_CODE (tmp) == JUMP_INSN
2307 && c4x_label_ref_used_p (tmp, insn))
2308 return 0;
2309 }
2310 }
2311 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2312 {
2313 if (GET_CODE (insn) == CODE_LABEL)
2314 {
2315 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2316 if (GET_CODE (tmp) == JUMP_INSN
2317 && c4x_label_ref_used_p (tmp, insn))
2318 return 0;
2319 }
2320 }
2321 /* If any jump jumps outside this block then we must fail. */
2322 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2323 {
2324 if (GET_CODE (insn) == CODE_LABEL)
2325 {
2326 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2327 if (GET_CODE (tmp) == JUMP_INSN
2328 && c4x_label_ref_used_p (tmp, insn))
2329 return 0;
2330 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2331 if (GET_CODE (tmp) == JUMP_INSN
2332 && c4x_label_ref_used_p (tmp, insn))
2333 return 0;
2334 }
2335 }
2336
2337 /* All checks OK. */
2338 return 1;
2339 }
2340
2341
2342 void
2343 c4x_rptb_insert (rtx insn)
2344 {
2345 rtx end_label;
2346 rtx start_label;
2347 rtx new_start_label;
2348 rtx count_reg;
2349
2350 /* If the count register has not been allocated to RC, say if
2351 there is a movstr pattern in the loop, then do not insert a
2352 RPTB instruction. Instead we emit a decrement and branch
2353 at the end of the loop. */
2354 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2355 if (REGNO (count_reg) != RC_REGNO)
2356 return;
2357
2358 /* Extract the start label from the jump pattern (rptb_end). */
2359 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2360
2361 if (! c4x_rptb_valid_p (insn, start_label))
2362 {
2363 /* We can not use the rptb insn. Replace it so reorg can use
2364 the delay slots of the jump insn. */
2365 emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2366 emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2367 emit_insn_before (gen_bge (start_label), insn);
2368 LABEL_NUSES (start_label)++;
2369 delete_insn (insn);
2370 return;
2371 }
2372
2373 end_label = gen_label_rtx ();
2374 LABEL_NUSES (end_label)++;
2375 emit_label_after (end_label, insn);
2376
2377 new_start_label = gen_label_rtx ();
2378 LABEL_NUSES (new_start_label)++;
2379
2380 for (; insn; insn = PREV_INSN (insn))
2381 {
2382 if (insn == start_label)
2383 break;
2384 if (GET_CODE (insn) == JUMP_INSN &&
2385 JUMP_LABEL (insn) == start_label)
2386 redirect_jump (insn, new_start_label, 0);
2387 }
2388 if (! insn)
2389 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2390
2391 emit_label_after (new_start_label, insn);
2392
2393 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2394 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2395 else
2396 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2397 if (LABEL_NUSES (start_label) == 0)
2398 delete_insn (start_label);
2399 }
2400
2401
2402 /* We need to use direct addressing for large constants and addresses
2403 that cannot fit within an instruction. We must check for these
2404 after after the final jump optimization pass, since this may
2405 introduce a local_move insn for a SYMBOL_REF. This pass
2406 must come before delayed branch slot filling since it can generate
2407 additional instructions.
2408
2409 This function also fixes up RTPB style loops that didn't get RC
2410 allocated as the loop counter. */
2411
2412 static void
2413 c4x_reorg (void)
2414 {
2415 rtx insn;
2416
2417 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2418 {
2419 /* Look for insn. */
2420 if (INSN_P (insn))
2421 {
2422 int insn_code_number;
2423 rtx old;
2424
2425 insn_code_number = recog_memoized (insn);
2426
2427 if (insn_code_number < 0)
2428 continue;
2429
2430 /* Insert the RTX for RPTB at the top of the loop
2431 and a label at the end of the loop. */
2432 if (insn_code_number == CODE_FOR_rptb_end)
2433 c4x_rptb_insert(insn);
2434
2435 /* We need to split the insn here. Otherwise the calls to
2436 force_const_mem will not work for load_immed_address. */
2437 old = insn;
2438
2439 /* Don't split the insn if it has been deleted. */
2440 if (! INSN_DELETED_P (old))
2441 insn = try_split (PATTERN(old), old, 1);
2442
2443 /* When not optimizing, the old insn will be still left around
2444 with only the 'deleted' bit set. Transform it into a note
2445 to avoid confusion of subsequent processing. */
2446 if (INSN_DELETED_P (old))
2447 {
2448 PUT_CODE (old, NOTE);
2449 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2450 NOTE_SOURCE_FILE (old) = 0;
2451 }
2452 }
2453 }
2454 }
2455
2456
2457 static int
2458 c4x_a_register (rtx op)
2459 {
2460 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2461 }
2462
2463
2464 static int
2465 c4x_x_register (rtx op)
2466 {
2467 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2468 }
2469
2470
2471 static int
2472 c4x_immed_int_constant (rtx op)
2473 {
2474 if (GET_CODE (op) != CONST_INT)
2475 return 0;
2476
2477 return GET_MODE (op) == VOIDmode
2478 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2479 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2480 }
2481
2482
2483 static int
2484 c4x_immed_float_constant (rtx op)
2485 {
2486 if (GET_CODE (op) != CONST_DOUBLE)
2487 return 0;
2488
2489 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2490 present this only means that a MEM rtx has been generated. It does
2491 not mean the rtx is really in memory. */
2492
2493 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2494 }
2495
2496
2497 int
2498 c4x_shiftable_constant (rtx op)
2499 {
2500 int i;
2501 int mask;
2502 int val = INTVAL (op);
2503
2504 for (i = 0; i < 16; i++)
2505 {
2506 if (val & (1 << i))
2507 break;
2508 }
2509 mask = ((0xffff >> i) << 16) | 0xffff;
2510 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2511 : (val >> i) & mask))
2512 return i;
2513 return -1;
2514 }
2515
2516
2517 int
2518 c4x_H_constant (rtx op)
2519 {
2520 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2521 }
2522
2523
2524 int
2525 c4x_I_constant (rtx op)
2526 {
2527 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2528 }
2529
2530
2531 int
2532 c4x_J_constant (rtx op)
2533 {
2534 if (TARGET_C3X)
2535 return 0;
2536 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2537 }
2538
2539
2540 static int
2541 c4x_K_constant (rtx op)
2542 {
2543 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2544 return 0;
2545 return IS_INT5_CONST (INTVAL (op));
2546 }
2547
2548
2549 int
2550 c4x_L_constant (rtx op)
2551 {
2552 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2553 }
2554
2555
2556 static int
2557 c4x_N_constant (rtx op)
2558 {
2559 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2560 }
2561
2562
2563 static int
2564 c4x_O_constant (rtx op)
2565 {
2566 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2567 }
2568
2569
2570 /* The constraints do not have to check the register class,
2571 except when needed to discriminate between the constraints.
2572 The operand has been checked by the predicates to be valid. */
2573
2574 /* ARx + 9-bit signed const or IRn
2575 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2576 We don't include the pre/post inc/dec forms here since
2577 they are handled by the <> constraints. */
2578
2579 int
2580 c4x_Q_constraint (rtx op)
2581 {
2582 enum machine_mode mode = GET_MODE (op);
2583
2584 if (GET_CODE (op) != MEM)
2585 return 0;
2586 op = XEXP (op, 0);
2587 switch (GET_CODE (op))
2588 {
2589 case REG:
2590 return 1;
2591
2592 case PLUS:
2593 {
2594 rtx op0 = XEXP (op, 0);
2595 rtx op1 = XEXP (op, 1);
2596
2597 if (! REG_P (op0))
2598 return 0;
2599
2600 if (REG_P (op1))
2601 return 1;
2602
2603 if (GET_CODE (op1) != CONST_INT)
2604 return 0;
2605
2606 /* HImode and HFmode must be offsettable. */
2607 if (mode == HImode || mode == HFmode)
2608 return IS_DISP8_OFF_CONST (INTVAL (op1));
2609
2610 return IS_DISP8_CONST (INTVAL (op1));
2611 }
2612 break;
2613
2614 default:
2615 break;
2616 }
2617 return 0;
2618 }
2619
2620
2621 /* ARx + 5-bit unsigned const
2622 *ARx, *+ARx(n) for n < 32. */
2623
2624 int
2625 c4x_R_constraint (rtx op)
2626 {
2627 enum machine_mode mode = GET_MODE (op);
2628
2629 if (TARGET_C3X)
2630 return 0;
2631 if (GET_CODE (op) != MEM)
2632 return 0;
2633 op = XEXP (op, 0);
2634 switch (GET_CODE (op))
2635 {
2636 case REG:
2637 return 1;
2638
2639 case PLUS:
2640 {
2641 rtx op0 = XEXP (op, 0);
2642 rtx op1 = XEXP (op, 1);
2643
2644 if (! REG_P (op0))
2645 return 0;
2646
2647 if (GET_CODE (op1) != CONST_INT)
2648 return 0;
2649
2650 /* HImode and HFmode must be offsettable. */
2651 if (mode == HImode || mode == HFmode)
2652 return IS_UINT5_CONST (INTVAL (op1) + 1);
2653
2654 return IS_UINT5_CONST (INTVAL (op1));
2655 }
2656 break;
2657
2658 default:
2659 break;
2660 }
2661 return 0;
2662 }
2663
2664
2665 static int
2666 c4x_R_indirect (rtx op)
2667 {
2668 enum machine_mode mode = GET_MODE (op);
2669
2670 if (TARGET_C3X || GET_CODE (op) != MEM)
2671 return 0;
2672
2673 op = XEXP (op, 0);
2674 switch (GET_CODE (op))
2675 {
2676 case REG:
2677 return IS_ADDR_OR_PSEUDO_REG (op);
2678
2679 case PLUS:
2680 {
2681 rtx op0 = XEXP (op, 0);
2682 rtx op1 = XEXP (op, 1);
2683
2684 /* HImode and HFmode must be offsettable. */
2685 if (mode == HImode || mode == HFmode)
2686 return IS_ADDR_OR_PSEUDO_REG (op0)
2687 && GET_CODE (op1) == CONST_INT
2688 && IS_UINT5_CONST (INTVAL (op1) + 1);
2689
2690 return REG_P (op0)
2691 && IS_ADDR_OR_PSEUDO_REG (op0)
2692 && GET_CODE (op1) == CONST_INT
2693 && IS_UINT5_CONST (INTVAL (op1));
2694 }
2695 break;
2696
2697 default:
2698 break;
2699 }
2700 return 0;
2701 }
2702
2703
2704 /* ARx + 1-bit unsigned const or IRn
2705 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2706 We don't include the pre/post inc/dec forms here since
2707 they are handled by the <> constraints. */
2708
2709 int
2710 c4x_S_constraint (rtx op)
2711 {
2712 enum machine_mode mode = GET_MODE (op);
2713 if (GET_CODE (op) != MEM)
2714 return 0;
2715 op = XEXP (op, 0);
2716 switch (GET_CODE (op))
2717 {
2718 case REG:
2719 return 1;
2720
2721 case PRE_MODIFY:
2722 case POST_MODIFY:
2723 {
2724 rtx op0 = XEXP (op, 0);
2725 rtx op1 = XEXP (op, 1);
2726
2727 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2728 || (op0 != XEXP (op1, 0)))
2729 return 0;
2730
2731 op0 = XEXP (op1, 0);
2732 op1 = XEXP (op1, 1);
2733 return REG_P (op0) && REG_P (op1);
2734 /* Pre or post_modify with a displacement of 0 or 1
2735 should not be generated. */
2736 }
2737 break;
2738
2739 case PLUS:
2740 {
2741 rtx op0 = XEXP (op, 0);
2742 rtx op1 = XEXP (op, 1);
2743
2744 if (!REG_P (op0))
2745 return 0;
2746
2747 if (REG_P (op1))
2748 return 1;
2749
2750 if (GET_CODE (op1) != CONST_INT)
2751 return 0;
2752
2753 /* HImode and HFmode must be offsettable. */
2754 if (mode == HImode || mode == HFmode)
2755 return IS_DISP1_OFF_CONST (INTVAL (op1));
2756
2757 return IS_DISP1_CONST (INTVAL (op1));
2758 }
2759 break;
2760
2761 default:
2762 break;
2763 }
2764 return 0;
2765 }
2766
2767
2768 static int
2769 c4x_S_indirect (rtx op)
2770 {
2771 enum machine_mode mode = GET_MODE (op);
2772 if (GET_CODE (op) != MEM)
2773 return 0;
2774
2775 op = XEXP (op, 0);
2776 switch (GET_CODE (op))
2777 {
2778 case PRE_DEC:
2779 case POST_DEC:
2780 if (mode != QImode && mode != QFmode)
2781 return 0;
2782 case PRE_INC:
2783 case POST_INC:
2784 op = XEXP (op, 0);
2785
2786 case REG:
2787 return IS_ADDR_OR_PSEUDO_REG (op);
2788
2789 case PRE_MODIFY:
2790 case POST_MODIFY:
2791 {
2792 rtx op0 = XEXP (op, 0);
2793 rtx op1 = XEXP (op, 1);
2794
2795 if (mode != QImode && mode != QFmode)
2796 return 0;
2797
2798 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2799 || (op0 != XEXP (op1, 0)))
2800 return 0;
2801
2802 op0 = XEXP (op1, 0);
2803 op1 = XEXP (op1, 1);
2804 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2805 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2806 /* Pre or post_modify with a displacement of 0 or 1
2807 should not be generated. */
2808 }
2809
2810 case PLUS:
2811 {
2812 rtx op0 = XEXP (op, 0);
2813 rtx op1 = XEXP (op, 1);
2814
2815 if (REG_P (op0))
2816 {
2817 /* HImode and HFmode must be offsettable. */
2818 if (mode == HImode || mode == HFmode)
2819 return IS_ADDR_OR_PSEUDO_REG (op0)
2820 && GET_CODE (op1) == CONST_INT
2821 && IS_DISP1_OFF_CONST (INTVAL (op1));
2822
2823 if (REG_P (op1))
2824 return (IS_INDEX_OR_PSEUDO_REG (op1)
2825 && IS_ADDR_OR_PSEUDO_REG (op0))
2826 || (IS_ADDR_OR_PSEUDO_REG (op1)
2827 && IS_INDEX_OR_PSEUDO_REG (op0));
2828
2829 return IS_ADDR_OR_PSEUDO_REG (op0)
2830 && GET_CODE (op1) == CONST_INT
2831 && IS_DISP1_CONST (INTVAL (op1));
2832 }
2833 }
2834 break;
2835
2836 default:
2837 break;
2838 }
2839 return 0;
2840 }
2841
2842
2843 /* Direct memory operand. */
2844
2845 int
2846 c4x_T_constraint (rtx op)
2847 {
2848 if (GET_CODE (op) != MEM)
2849 return 0;
2850 op = XEXP (op, 0);
2851
2852 if (GET_CODE (op) != LO_SUM)
2853 {
2854 /* Allow call operands. */
2855 return GET_CODE (op) == SYMBOL_REF
2856 && GET_MODE (op) == Pmode
2857 && SYMBOL_REF_FUNCTION_P (op);
2858 }
2859
2860 /* HImode and HFmode are not offsettable. */
2861 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2862 return 0;
2863
2864 if ((GET_CODE (XEXP (op, 0)) == REG)
2865 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2866 return c4x_U_constraint (XEXP (op, 1));
2867
2868 return 0;
2869 }
2870
2871
2872 /* Symbolic operand. */
2873
2874 int
2875 c4x_U_constraint (rtx op)
2876 {
2877 /* Don't allow direct addressing to an arbitrary constant. */
2878 return GET_CODE (op) == CONST
2879 || GET_CODE (op) == SYMBOL_REF
2880 || GET_CODE (op) == LABEL_REF;
2881 }
2882
2883
2884 int
2885 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2886 {
2887 if (GET_CODE (op) == MEM)
2888 {
2889 enum rtx_code code = GET_CODE (XEXP (op, 0));
2890
2891 if (code == PRE_INC
2892 || code == PRE_DEC
2893 || code == POST_INC
2894 || code == POST_DEC
2895 || code == PRE_MODIFY
2896 || code == POST_MODIFY
2897 )
2898 return 1;
2899 }
2900 return 0;
2901 }
2902
2903
2904 /* Match any operand. */
2905
2906 int
2907 any_operand (register rtx op ATTRIBUTE_UNUSED,
2908 enum machine_mode mode ATTRIBUTE_UNUSED)
2909 {
2910 return 1;
2911 }
2912
2913
2914 /* Nonzero if OP is a floating point value with value 0.0. */
2915
2916 int
2917 fp_zero_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2918 {
2919 REAL_VALUE_TYPE r;
2920
2921 if (GET_CODE (op) != CONST_DOUBLE)
2922 return 0;
2923 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2924 return REAL_VALUES_EQUAL (r, dconst0);
2925 }
2926
2927
2928 int
2929 const_operand (register rtx op, register enum machine_mode mode)
2930 {
2931 switch (mode)
2932 {
2933 case QFmode:
2934 case HFmode:
2935 if (GET_CODE (op) != CONST_DOUBLE
2936 || GET_MODE (op) != mode
2937 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2938 return 0;
2939
2940 return c4x_immed_float_p (op);
2941
2942 #if Pmode != QImode
2943 case Pmode:
2944 #endif
2945 case QImode:
2946 if (GET_CODE (op) != CONST_INT
2947 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
2948 || GET_MODE_CLASS (mode) != MODE_INT)
2949 return 0;
2950
2951 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
2952
2953 case HImode:
2954 return 0;
2955
2956 default:
2957 return 0;
2958 }
2959 }
2960
2961
2962 int
2963 stik_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2964 {
2965 return c4x_K_constant (op);
2966 }
2967
2968
2969 int
2970 not_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2971 {
2972 return c4x_N_constant (op);
2973 }
2974
2975
2976 int
2977 reg_operand (rtx op, enum machine_mode mode)
2978 {
2979 if (GET_CODE (op) == SUBREG
2980 && GET_MODE (op) == QFmode)
2981 return 0;
2982 return register_operand (op, mode);
2983 }
2984
2985
2986 int
2987 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2988 {
2989 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2990 int and a long double. */
2991 if (GET_CODE (op) == SUBREG
2992 && (GET_MODE (op) == QFmode)
2993 && (GET_MODE (SUBREG_REG (op)) == QImode
2994 || GET_MODE (SUBREG_REG (op)) == HImode))
2995 return 1;
2996 return 0;
2997 }
2998
2999
3000 int
3001 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3002 {
3003 if (REG_P (op) || CONSTANT_P (op))
3004 return 1;
3005 return 0;
3006 }
3007
3008
3009 int
3010 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3011 {
3012 if (REG_P (op) || CONSTANT_P (op))
3013 return 1;
3014 if (GET_CODE (op) != MEM)
3015 return 0;
3016 op = XEXP (op, 0);
3017 switch (GET_CODE (op))
3018 {
3019 case REG:
3020 return 1;
3021
3022 case PLUS:
3023 {
3024 rtx op0 = XEXP (op, 0);
3025 rtx op1 = XEXP (op, 1);
3026
3027 if (! REG_P (op0))
3028 return 0;
3029
3030 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3031 return 1;
3032 }
3033
3034 case LO_SUM:
3035 {
3036 rtx op0 = XEXP (op, 0);
3037
3038 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3039 return 1;
3040 }
3041 break;
3042
3043 case CONST:
3044 case SYMBOL_REF:
3045 case LABEL_REF:
3046 return 1;
3047
3048 default:
3049 break;
3050 }
3051 return 0;
3052 }
3053
3054
3055 int
3056 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3057 {
3058 if (REG_P (op) && REGNO (op) == RC_REGNO)
3059 return 0;
3060 return 1;
3061 }
3062
3063
3064 /* Extended precision register R0-R1. */
3065
3066 int
3067 r0r1_reg_operand (rtx op, enum machine_mode mode)
3068 {
3069 if (! reg_operand (op, mode))
3070 return 0;
3071 if (GET_CODE (op) == SUBREG)
3072 op = SUBREG_REG (op);
3073 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3074 }
3075
3076
3077 /* Extended precision register R2-R3. */
3078
3079 int
3080 r2r3_reg_operand (rtx op, enum machine_mode mode)
3081 {
3082 if (! reg_operand (op, mode))
3083 return 0;
3084 if (GET_CODE (op) == SUBREG)
3085 op = SUBREG_REG (op);
3086 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3087 }
3088
3089
3090 /* Low extended precision register R0-R7. */
3091
3092 int
3093 ext_low_reg_operand (rtx op, enum machine_mode mode)
3094 {
3095 if (! reg_operand (op, mode))
3096 return 0;
3097 if (GET_CODE (op) == SUBREG)
3098 op = SUBREG_REG (op);
3099 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3100 }
3101
3102
3103 /* Extended precision register. */
3104
3105 int
3106 ext_reg_operand (rtx op, enum machine_mode mode)
3107 {
3108 if (! reg_operand (op, mode))
3109 return 0;
3110 if (GET_CODE (op) == SUBREG)
3111 op = SUBREG_REG (op);
3112 if (! REG_P (op))
3113 return 0;
3114 return IS_EXT_OR_PSEUDO_REG (op);
3115 }
3116
3117
3118 /* Standard precision register. */
3119
3120 int
3121 std_reg_operand (rtx op, enum machine_mode mode)
3122 {
3123 if (! reg_operand (op, mode))
3124 return 0;
3125 if (GET_CODE (op) == SUBREG)
3126 op = SUBREG_REG (op);
3127 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3128 }
3129
3130 /* Standard precision or normal register. */
3131
3132 int
3133 std_or_reg_operand (rtx op, enum machine_mode mode)
3134 {
3135 if (reload_in_progress)
3136 return std_reg_operand (op, mode);
3137 return reg_operand (op, mode);
3138 }
3139
3140 /* Address register. */
3141
3142 int
3143 addr_reg_operand (rtx op, enum machine_mode mode)
3144 {
3145 if (! reg_operand (op, mode))
3146 return 0;
3147 return c4x_a_register (op);
3148 }
3149
3150
3151 /* Index register. */
3152
3153 int
3154 index_reg_operand (rtx op, enum machine_mode mode)
3155 {
3156 if (! reg_operand (op, mode))
3157 return 0;
3158 if (GET_CODE (op) == SUBREG)
3159 op = SUBREG_REG (op);
3160 return c4x_x_register (op);
3161 }
3162
3163
3164 /* DP register. */
3165
3166 int
3167 dp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3168 {
3169 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3170 }
3171
3172
3173 /* SP register. */
3174
3175 int
3176 sp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3177 {
3178 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3179 }
3180
3181
3182 /* ST register. */
3183
3184 int
3185 st_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3186 {
3187 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3188 }
3189
3190
3191 /* RC register. */
3192
3193 int
3194 rc_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3195 {
3196 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3197 }
3198
3199
3200 int
3201 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3202 {
3203 return (REG_P (op) || symbolic_address_operand (op, mode));
3204 }
3205
3206
3207 /* Symbolic address operand. */
3208
3209 int
3210 symbolic_address_operand (register rtx op,
3211 enum machine_mode mode ATTRIBUTE_UNUSED)
3212 {
3213 switch (GET_CODE (op))
3214 {
3215 case CONST:
3216 case SYMBOL_REF:
3217 case LABEL_REF:
3218 return 1;
3219 default:
3220 return 0;
3221 }
3222 }
3223
3224
3225 /* Check dst operand of a move instruction. */
3226
3227 int
3228 dst_operand (rtx op, enum machine_mode mode)
3229 {
3230 if (GET_CODE (op) == SUBREG
3231 && mixed_subreg_operand (op, mode))
3232 return 0;
3233
3234 if (REG_P (op))
3235 return reg_operand (op, mode);
3236
3237 return nonimmediate_operand (op, mode);
3238 }
3239
3240
3241 /* Check src operand of two operand arithmetic instructions. */
3242
3243 int
3244 src_operand (rtx op, enum machine_mode mode)
3245 {
3246 if (GET_CODE (op) == SUBREG
3247 && mixed_subreg_operand (op, mode))
3248 return 0;
3249
3250 if (REG_P (op))
3251 return reg_operand (op, mode);
3252
3253 if (mode == VOIDmode)
3254 mode = GET_MODE (op);
3255
3256 if (GET_CODE (op) == CONST_INT)
3257 return (mode == QImode || mode == Pmode || mode == HImode)
3258 && c4x_I_constant (op);
3259
3260 /* We don't like CONST_DOUBLE integers. */
3261 if (GET_CODE (op) == CONST_DOUBLE)
3262 return c4x_H_constant (op);
3263
3264 /* Disallow symbolic addresses. Only the predicate
3265 symbolic_address_operand will match these. */
3266 if (GET_CODE (op) == SYMBOL_REF
3267 || GET_CODE (op) == LABEL_REF
3268 || GET_CODE (op) == CONST)
3269 return 0;
3270
3271 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3272 access to symbolic addresses. These operands will get forced
3273 into a register and the movqi expander will generate a
3274 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3275 if (GET_CODE (op) == MEM
3276 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3277 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3278 || GET_CODE (XEXP (op, 0)) == CONST)))
3279 return !TARGET_EXPOSE_LDP &&
3280 ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3281
3282 return general_operand (op, mode);
3283 }
3284
3285
3286 int
3287 src_hi_operand (rtx op, enum machine_mode mode)
3288 {
3289 if (c4x_O_constant (op))
3290 return 1;
3291 return src_operand (op, mode);
3292 }
3293
3294
3295 /* Check src operand of two operand logical instructions. */
3296
3297 int
3298 lsrc_operand (rtx op, enum machine_mode mode)
3299 {
3300 if (mode == VOIDmode)
3301 mode = GET_MODE (op);
3302
3303 if (mode != QImode && mode != Pmode)
3304 fatal_insn ("mode not QImode", op);
3305
3306 if (GET_CODE (op) == CONST_INT)
3307 return c4x_L_constant (op) || c4x_J_constant (op);
3308
3309 return src_operand (op, mode);
3310 }
3311
3312
3313 /* Check src operand of two operand tricky instructions. */
3314
3315 int
3316 tsrc_operand (rtx op, enum machine_mode mode)
3317 {
3318 if (mode == VOIDmode)
3319 mode = GET_MODE (op);
3320
3321 if (mode != QImode && mode != Pmode)
3322 fatal_insn ("mode not QImode", op);
3323
3324 if (GET_CODE (op) == CONST_INT)
3325 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3326
3327 return src_operand (op, mode);
3328 }
3329
3330
3331 /* Check src operand of two operand non immedidate instructions. */
3332
3333 int
3334 nonimmediate_src_operand (rtx op, enum machine_mode mode)
3335 {
3336 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3337 return 0;
3338
3339 return src_operand (op, mode);
3340 }
3341
3342
3343 /* Check logical src operand of two operand non immedidate instructions. */
3344
3345 int
3346 nonimmediate_lsrc_operand (rtx op, enum machine_mode mode)
3347 {
3348 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3349 return 0;
3350
3351 return lsrc_operand (op, mode);
3352 }
3353
3354
3355 int
3356 reg_or_const_operand (rtx op, enum machine_mode mode)
3357 {
3358 return reg_operand (op, mode) || const_operand (op, mode);
3359 }
3360
3361
3362 /* Check for indirect operands allowable in parallel instruction. */
3363
3364 int
3365 par_ind_operand (rtx op, enum machine_mode mode)
3366 {
3367 if (mode != VOIDmode && mode != GET_MODE (op))
3368 return 0;
3369
3370 return c4x_S_indirect (op);
3371 }
3372
3373
3374 /* Check for operands allowable in parallel instruction. */
3375
3376 int
3377 parallel_operand (rtx op, enum machine_mode mode)
3378 {
3379 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3380 }
3381
3382
3383 static void
3384 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
3385 {
3386 *base = 0;
3387 *incdec = 0;
3388 *index = 0;
3389 *disp = 0;
3390
3391 if (GET_CODE (op) != MEM)
3392 fatal_insn ("invalid indirect memory address", op);
3393
3394 op = XEXP (op, 0);
3395 switch (GET_CODE (op))
3396 {
3397 case PRE_DEC:
3398 *base = REGNO (XEXP (op, 0));
3399 *incdec = 1;
3400 *disp = -1;
3401 return;
3402
3403 case POST_DEC:
3404 *base = REGNO (XEXP (op, 0));
3405 *incdec = 1;
3406 *disp = 0;
3407 return;
3408
3409 case PRE_INC:
3410 *base = REGNO (XEXP (op, 0));
3411 *incdec = 1;
3412 *disp = 1;
3413 return;
3414
3415 case POST_INC:
3416 *base = REGNO (XEXP (op, 0));
3417 *incdec = 1;
3418 *disp = 0;
3419 return;
3420
3421 case POST_MODIFY:
3422 *base = REGNO (XEXP (op, 0));
3423 if (REG_P (XEXP (XEXP (op, 1), 1)))
3424 {
3425 *index = REGNO (XEXP (XEXP (op, 1), 1));
3426 *disp = 0; /* ??? */
3427 }
3428 else
3429 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3430 *incdec = 1;
3431 return;
3432
3433 case PRE_MODIFY:
3434 *base = REGNO (XEXP (op, 0));
3435 if (REG_P (XEXP (XEXP (op, 1), 1)))
3436 {
3437 *index = REGNO (XEXP (XEXP (op, 1), 1));
3438 *disp = 1; /* ??? */
3439 }
3440 else
3441 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3442 *incdec = 1;
3443
3444 return;
3445
3446 case REG:
3447 *base = REGNO (op);
3448 return;
3449
3450 case PLUS:
3451 {
3452 rtx op0 = XEXP (op, 0);
3453 rtx op1 = XEXP (op, 1);
3454
3455 if (c4x_a_register (op0))
3456 {
3457 if (c4x_x_register (op1))
3458 {
3459 *base = REGNO (op0);
3460 *index = REGNO (op1);
3461 return;
3462 }
3463 else if ((GET_CODE (op1) == CONST_INT
3464 && IS_DISP1_CONST (INTVAL (op1))))
3465 {
3466 *base = REGNO (op0);
3467 *disp = INTVAL (op1);
3468 return;
3469 }
3470 }
3471 else if (c4x_x_register (op0) && c4x_a_register (op1))
3472 {
3473 *base = REGNO (op1);
3474 *index = REGNO (op0);
3475 return;
3476 }
3477 }
3478 /* Fall through. */
3479
3480 default:
3481 fatal_insn ("invalid indirect (S) memory address", op);
3482 }
3483 }
3484
3485
3486 int
3487 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3488 {
3489 int base0;
3490 int base1;
3491 int incdec0;
3492 int incdec1;
3493 int index0;
3494 int index1;
3495 int disp0;
3496 int disp1;
3497
3498 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3499 return 1;
3500
3501 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3502 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3503
3504 if (store0 && store1)
3505 {
3506 /* If we have two stores in parallel to the same address, then
3507 the C4x only executes one of the stores. This is unlikely to
3508 cause problems except when writing to a hardware device such
3509 as a FIFO since the second write will be lost. The user
3510 should flag the hardware location as being volatile so that
3511 we don't do this optimization. While it is unlikely that we
3512 have an aliased address if both locations are not marked
3513 volatile, it is probably safer to flag a potential conflict
3514 if either location is volatile. */
3515 if (! flag_argument_noalias)
3516 {
3517 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3518 return 1;
3519 }
3520 }
3521
3522 /* If have a parallel load and a store to the same address, the load
3523 is performed first, so there is no conflict. Similarly, there is
3524 no conflict if have parallel loads from the same address. */
3525
3526 /* Cannot use auto increment or auto decrement twice for same
3527 base register. */
3528 if (base0 == base1 && incdec0 && incdec0)
3529 return 1;
3530
3531 /* It might be too confusing for GCC if we have use a base register
3532 with a side effect and a memory reference using the same register
3533 in parallel. */
3534 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3535 return 1;
3536
3537 /* We can not optimize the case where op1 and op2 refer to the same
3538 address. */
3539 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3540 return 1;
3541
3542 /* No conflict. */
3543 return 0;
3544 }
3545
3546
3547 /* Check for while loop inside a decrement and branch loop. */
3548
3549 int
3550 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3551 {
3552 while (insn)
3553 {
3554 if (GET_CODE (insn) == CODE_LABEL)
3555 {
3556 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3557 return 1;
3558 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3559 return 0;
3560 }
3561 insn = PREV_INSN (insn);
3562 }
3563 return 1;
3564 }
3565
3566
3567 /* Validate combination of operands for parallel load/store instructions. */
3568
3569 int
3570 valid_parallel_load_store (rtx *operands,
3571 enum machine_mode mode ATTRIBUTE_UNUSED)
3572 {
3573 rtx op0 = operands[0];
3574 rtx op1 = operands[1];
3575 rtx op2 = operands[2];
3576 rtx op3 = operands[3];
3577
3578 if (GET_CODE (op0) == SUBREG)
3579 op0 = SUBREG_REG (op0);
3580 if (GET_CODE (op1) == SUBREG)
3581 op1 = SUBREG_REG (op1);
3582 if (GET_CODE (op2) == SUBREG)
3583 op2 = SUBREG_REG (op2);
3584 if (GET_CODE (op3) == SUBREG)
3585 op3 = SUBREG_REG (op3);
3586
3587 /* The patterns should only allow ext_low_reg_operand() or
3588 par_ind_operand() operands. Thus of the 4 operands, only 2
3589 should be REGs and the other 2 should be MEMs. */
3590
3591 /* This test prevents the multipack pass from using this pattern if
3592 op0 is used as an index or base register in op2 or op3, since
3593 this combination will require reloading. */
3594 if (GET_CODE (op0) == REG
3595 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3596 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3597 return 0;
3598
3599 /* LDI||LDI. */
3600 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3601 return (REGNO (op0) != REGNO (op2))
3602 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3603 && ! c4x_address_conflict (op1, op3, 0, 0);
3604
3605 /* STI||STI. */
3606 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3607 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3608 && ! c4x_address_conflict (op0, op2, 1, 1);
3609
3610 /* LDI||STI. */
3611 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3612 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3613 && ! c4x_address_conflict (op1, op2, 0, 1);
3614
3615 /* STI||LDI. */
3616 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3617 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3618 && ! c4x_address_conflict (op0, op3, 1, 0);
3619
3620 return 0;
3621 }
3622
3623
3624 int
3625 valid_parallel_operands_4 (rtx *operands,
3626 enum machine_mode mode ATTRIBUTE_UNUSED)
3627 {
3628 rtx op0 = operands[0];
3629 rtx op2 = operands[2];
3630
3631 if (GET_CODE (op0) == SUBREG)
3632 op0 = SUBREG_REG (op0);
3633 if (GET_CODE (op2) == SUBREG)
3634 op2 = SUBREG_REG (op2);
3635
3636 /* This test prevents the multipack pass from using this pattern if
3637 op0 is used as an index or base register in op2, since this combination
3638 will require reloading. */
3639 if (GET_CODE (op0) == REG
3640 && GET_CODE (op2) == MEM
3641 && reg_mentioned_p (op0, XEXP (op2, 0)))
3642 return 0;
3643
3644 return 1;
3645 }
3646
3647
3648 int
3649 valid_parallel_operands_5 (rtx *operands,
3650 enum machine_mode mode ATTRIBUTE_UNUSED)
3651 {
3652 int regs = 0;
3653 rtx op0 = operands[0];
3654 rtx op1 = operands[1];
3655 rtx op2 = operands[2];
3656 rtx op3 = operands[3];
3657
3658 if (GET_CODE (op0) == SUBREG)
3659 op0 = SUBREG_REG (op0);
3660 if (GET_CODE (op1) == SUBREG)
3661 op1 = SUBREG_REG (op1);
3662 if (GET_CODE (op2) == SUBREG)
3663 op2 = SUBREG_REG (op2);
3664
3665 /* The patterns should only allow ext_low_reg_operand() or
3666 par_ind_operand() operands. Operands 1 and 2 may be commutative
3667 but only one of them can be a register. */
3668 if (GET_CODE (op1) == REG)
3669 regs++;
3670 if (GET_CODE (op2) == REG)
3671 regs++;
3672
3673 if (regs != 1)
3674 return 0;
3675
3676 /* This test prevents the multipack pass from using this pattern if
3677 op0 is used as an index or base register in op3, since this combination
3678 will require reloading. */
3679 if (GET_CODE (op0) == REG
3680 && GET_CODE (op3) == MEM
3681 && reg_mentioned_p (op0, XEXP (op3, 0)))
3682 return 0;
3683
3684 return 1;
3685 }
3686
3687
3688 int
3689 valid_parallel_operands_6 (rtx *operands,
3690 enum machine_mode mode ATTRIBUTE_UNUSED)
3691 {
3692 int regs = 0;
3693 rtx op0 = operands[0];
3694 rtx op1 = operands[1];
3695 rtx op2 = operands[2];
3696 rtx op4 = operands[4];
3697 rtx op5 = operands[5];
3698
3699 if (GET_CODE (op1) == SUBREG)
3700 op1 = SUBREG_REG (op1);
3701 if (GET_CODE (op2) == SUBREG)
3702 op2 = SUBREG_REG (op2);
3703 if (GET_CODE (op4) == SUBREG)
3704 op4 = SUBREG_REG (op4);
3705 if (GET_CODE (op5) == SUBREG)
3706 op5 = SUBREG_REG (op5);
3707
3708 /* The patterns should only allow ext_low_reg_operand() or
3709 par_ind_operand() operands. Thus of the 4 input operands, only 2
3710 should be REGs and the other 2 should be MEMs. */
3711
3712 if (GET_CODE (op1) == REG)
3713 regs++;
3714 if (GET_CODE (op2) == REG)
3715 regs++;
3716 if (GET_CODE (op4) == REG)
3717 regs++;
3718 if (GET_CODE (op5) == REG)
3719 regs++;
3720
3721 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3722 Perhaps we should count the MEMs as well? */
3723 if (regs != 2)
3724 return 0;
3725
3726 /* This test prevents the multipack pass from using this pattern if
3727 op0 is used as an index or base register in op4 or op5, since
3728 this combination will require reloading. */
3729 if (GET_CODE (op0) == REG
3730 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3731 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3732 return 0;
3733
3734 return 1;
3735 }
3736
3737
3738 /* Validate combination of src operands. Note that the operands have
3739 been screened by the src_operand predicate. We just have to check
3740 that the combination of operands is valid. If FORCE is set, ensure
3741 that the destination regno is valid if we have a 2 operand insn. */
3742
3743 static int
3744 c4x_valid_operands (enum rtx_code code, rtx *operands,
3745 enum machine_mode mode ATTRIBUTE_UNUSED,
3746 int force)
3747 {
3748 rtx op0;
3749 rtx op1;
3750 rtx op2;
3751 enum rtx_code code1;
3752 enum rtx_code code2;
3753
3754
3755 /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3756 if (code == IF_THEN_ELSE)
3757 return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3758
3759 if (code == COMPARE)
3760 {
3761 op1 = operands[0];
3762 op2 = operands[1];
3763 }
3764 else
3765 {
3766 op1 = operands[1];
3767 op2 = operands[2];
3768 }
3769
3770 op0 = operands[0];
3771
3772 if (GET_CODE (op0) == SUBREG)
3773 op0 = SUBREG_REG (op0);
3774 if (GET_CODE (op1) == SUBREG)
3775 op1 = SUBREG_REG (op1);
3776 if (GET_CODE (op2) == SUBREG)
3777 op2 = SUBREG_REG (op2);
3778
3779 code1 = GET_CODE (op1);
3780 code2 = GET_CODE (op2);
3781
3782
3783 if (code1 == REG && code2 == REG)
3784 return 1;
3785
3786 if (code1 == MEM && code2 == MEM)
3787 {
3788 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3789 return 1;
3790 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3791 }
3792
3793 /* We cannot handle two MEMs or two CONSTS, etc. */
3794 if (code1 == code2)
3795 return 0;
3796
3797 if (code1 == REG)
3798 {
3799 switch (code2)
3800 {
3801 case CONST_INT:
3802 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3803 return 1;
3804 break;
3805
3806 case CONST_DOUBLE:
3807 if (! c4x_H_constant (op2))
3808 return 0;
3809 break;
3810
3811 /* Any valid memory operand screened by src_operand is OK. */
3812 case MEM:
3813 break;
3814
3815 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3816 into a stack slot memory address comprising a PLUS and a
3817 constant. */
3818 case ADDRESSOF:
3819 break;
3820
3821 default:
3822 fatal_insn ("c4x_valid_operands: Internal error", op2);
3823 break;
3824 }
3825
3826 if (GET_CODE (op0) == SCRATCH)
3827 return 1;
3828
3829 if (!REG_P (op0))
3830 return 0;
3831
3832 /* Check that we have a valid destination register for a two operand
3833 instruction. */
3834 return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3835 }
3836
3837
3838 /* Check non-commutative operators. */
3839 if (code == ASHIFTRT || code == LSHIFTRT
3840 || code == ASHIFT || code == COMPARE)
3841 return code2 == REG
3842 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3843
3844
3845 /* Assume MINUS is commutative since the subtract patterns
3846 also support the reverse subtract instructions. Since op1
3847 is not a register, and op2 is a register, op1 can only
3848 be a restricted memory operand for a shift instruction. */
3849 if (code2 == REG)
3850 {
3851 switch (code1)
3852 {
3853 case CONST_INT:
3854 break;
3855
3856 case CONST_DOUBLE:
3857 if (! c4x_H_constant (op1))
3858 return 0;
3859 break;
3860
3861 /* Any valid memory operand screened by src_operand is OK. */
3862 case MEM:
3863 break;
3864
3865 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3866 into a stack slot memory address comprising a PLUS and a
3867 constant. */
3868 case ADDRESSOF:
3869 break;
3870
3871 default:
3872 abort ();
3873 break;
3874 }
3875
3876 if (GET_CODE (op0) == SCRATCH)
3877 return 1;
3878
3879 if (!REG_P (op0))
3880 return 0;
3881
3882 /* Check that we have a valid destination register for a two operand
3883 instruction. */
3884 return ! force || REGNO (op1) == REGNO (op0);
3885 }
3886
3887 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3888 return 1;
3889
3890 return 0;
3891 }
3892
3893
3894 int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3895 {
3896
3897 /* If we are not optimizing then we have to let anything go and let
3898 reload fix things up. instantiate_decl in function.c can produce
3899 invalid insns by changing the offset of a memory operand from a
3900 valid one into an invalid one, when the second operand is also a
3901 memory operand. The alternative is not to allow two memory
3902 operands for an insn when not optimizing. The problem only rarely
3903 occurs, for example with the C-torture program DFcmp.c. */
3904
3905 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3906 }
3907
3908
3909 int
3910 legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3911 {
3912 /* Compare only has 2 operands. */
3913 if (code == COMPARE)
3914 {
3915 /* During RTL generation, force constants into pseudos so that
3916 they can get hoisted out of loops. This will tie up an extra
3917 register but can save an extra cycle. Only do this if loop
3918 optimization enabled. (We cannot pull this trick for add and
3919 sub instructions since the flow pass won't find
3920 autoincrements etc.) This allows us to generate compare
3921 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3922 of LDI *AR0++, R0; CMPI 42, R0.
3923
3924 Note that expand_binops will try to load an expensive constant
3925 into a register if it is used within a loop. Unfortunately,
3926 the cost mechanism doesn't allow us to look at the other
3927 operand to decide whether the constant is expensive. */
3928
3929 if (! reload_in_progress
3930 && TARGET_HOIST
3931 && optimize > 0
3932 && GET_CODE (operands[1]) == CONST_INT
3933 && preserve_subexpressions_p ()
3934 && rtx_cost (operands[1], code) > 1)
3935 operands[1] = force_reg (mode, operands[1]);
3936
3937 if (! reload_in_progress
3938 && ! c4x_valid_operands (code, operands, mode, 0))
3939 operands[0] = force_reg (mode, operands[0]);
3940 return 1;
3941 }
3942
3943 /* We cannot do this for ADDI/SUBI insns since we will
3944 defeat the flow pass from finding autoincrement addressing
3945 opportunities. */
3946 if (! reload_in_progress
3947 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3948 && TARGET_HOIST
3949 && optimize > 1
3950 && GET_CODE (operands[2]) == CONST_INT
3951 && preserve_subexpressions_p ()
3952 && rtx_cost (operands[2], code) > 1)
3953 operands[2] = force_reg (mode, operands[2]);
3954
3955 /* We can get better code on a C30 if we force constant shift counts
3956 into a register. This way they can get hoisted out of loops,
3957 tying up a register but saving an instruction. The downside is
3958 that they may get allocated to an address or index register, and
3959 thus we will get a pipeline conflict if there is a nearby
3960 indirect address using an address register.
3961
3962 Note that expand_binops will not try to load an expensive constant
3963 into a register if it is used within a loop for a shift insn. */
3964
3965 if (! reload_in_progress
3966 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3967 {
3968 /* If the operand combination is invalid, we force operand1 into a
3969 register, preventing reload from having doing to do this at a
3970 later stage. */
3971 operands[1] = force_reg (mode, operands[1]);
3972 if (TARGET_FORCE)
3973 {
3974 emit_move_insn (operands[0], operands[1]);
3975 operands[1] = copy_rtx (operands[0]);
3976 }
3977 else
3978 {
3979 /* Just in case... */
3980 if (! c4x_valid_operands (code, operands, mode, 0))
3981 operands[2] = force_reg (mode, operands[2]);
3982 }
3983 }
3984
3985 /* Right shifts require a negative shift count, but GCC expects
3986 a positive count, so we emit a NEG. */
3987 if ((code == ASHIFTRT || code == LSHIFTRT)
3988 && (GET_CODE (operands[2]) != CONST_INT))
3989 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3990
3991
3992 /* When the shift count is greater than 32 then the result
3993 can be implementation dependent. We truncate the result to
3994 fit in 5 bits so that we do not emit invalid code when
3995 optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3996 if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3997 && (GET_CODE (operands[2]) == CONST_INT))
3998 && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3999 operands[2]
4000 = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
4001
4002 return 1;
4003 }
4004
4005
4006 /* The following predicates are used for instruction scheduling. */
4007
4008 int
4009 group1_reg_operand (rtx op, enum machine_mode mode)
4010 {
4011 if (mode != VOIDmode && mode != GET_MODE (op))
4012 return 0;
4013 if (GET_CODE (op) == SUBREG)
4014 op = SUBREG_REG (op);
4015 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4016 }
4017
4018
4019 int
4020 group1_mem_operand (rtx op, enum machine_mode mode)
4021 {
4022 if (mode != VOIDmode && mode != GET_MODE (op))
4023 return 0;
4024
4025 if (GET_CODE (op) == MEM)
4026 {
4027 op = XEXP (op, 0);
4028 if (GET_CODE (op) == PLUS)
4029 {
4030 rtx op0 = XEXP (op, 0);
4031 rtx op1 = XEXP (op, 1);
4032
4033 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4034 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4035 return 1;
4036 }
4037 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4038 return 1;
4039 }
4040
4041 return 0;
4042 }
4043
4044
4045 /* Return true if any one of the address registers. */
4046
4047 int
4048 arx_reg_operand (rtx op, enum machine_mode mode)
4049 {
4050 if (mode != VOIDmode && mode != GET_MODE (op))
4051 return 0;
4052 if (GET_CODE (op) == SUBREG)
4053 op = SUBREG_REG (op);
4054 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4055 }
4056
4057
4058 static int
4059 c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
4060 {
4061 if (mode != VOIDmode && mode != GET_MODE (op))
4062 return 0;
4063 if (GET_CODE (op) == SUBREG)
4064 op = SUBREG_REG (op);
4065 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4066 }
4067
4068
4069 static int
4070 c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
4071 {
4072 if (mode != VOIDmode && mode != GET_MODE (op))
4073 return 0;
4074
4075 if (GET_CODE (op) == MEM)
4076 {
4077 op = XEXP (op, 0);
4078 switch (GET_CODE (op))
4079 {
4080 case PRE_DEC:
4081 case POST_DEC:
4082 case PRE_INC:
4083 case POST_INC:
4084 op = XEXP (op, 0);
4085
4086 case REG:
4087 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4088
4089 case PRE_MODIFY:
4090 case POST_MODIFY:
4091 if (REG_P (XEXP (op, 0)) && (! reload_completed
4092 || (REGNO (XEXP (op, 0)) == regno)))
4093 return 1;
4094 if (REG_P (XEXP (XEXP (op, 1), 1))
4095 && (! reload_completed
4096 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4097 return 1;
4098 break;
4099
4100 case PLUS:
4101 {
4102 rtx op0 = XEXP (op, 0);
4103 rtx op1 = XEXP (op, 1);
4104
4105 if ((REG_P (op0) && (! reload_completed
4106 || (REGNO (op0) == regno)))
4107 || (REG_P (op1) && (! reload_completed
4108 || (REGNO (op1) == regno))))
4109 return 1;
4110 }
4111 break;
4112
4113 default:
4114 break;
4115 }
4116 }
4117 return 0;
4118 }
4119
4120
4121 int
4122 ar0_reg_operand (rtx op, enum machine_mode mode)
4123 {
4124 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4125 }
4126
4127
4128 int
4129 ar0_mem_operand (rtx op, enum machine_mode mode)
4130 {
4131 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4132 }
4133
4134
4135 int
4136 ar1_reg_operand (rtx op, enum machine_mode mode)
4137 {
4138 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4139 }
4140
4141
4142 int
4143 ar1_mem_operand (rtx op, enum machine_mode mode)
4144 {
4145 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4146 }
4147
4148
4149 int
4150 ar2_reg_operand (rtx op, enum machine_mode mode)
4151 {
4152 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4153 }
4154
4155
4156 int
4157 ar2_mem_operand (rtx op, enum machine_mode mode)
4158 {
4159 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4160 }
4161
4162
4163 int
4164 ar3_reg_operand (rtx op, enum machine_mode mode)
4165 {
4166 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4167 }
4168
4169
4170 int
4171 ar3_mem_operand (rtx op, enum machine_mode mode)
4172 {
4173 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4174 }
4175
4176
4177 int
4178 ar4_reg_operand (rtx op, enum machine_mode mode)
4179 {
4180 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4181 }
4182
4183
4184 int
4185 ar4_mem_operand (rtx op, enum machine_mode mode)
4186 {
4187 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4188 }
4189
4190
4191 int
4192 ar5_reg_operand (rtx op, enum machine_mode mode)
4193 {
4194 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4195 }
4196
4197
4198 int
4199 ar5_mem_operand (rtx op, enum machine_mode mode)
4200 {
4201 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4202 }
4203
4204
4205 int
4206 ar6_reg_operand (rtx op, enum machine_mode mode)
4207 {
4208 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4209 }
4210
4211
4212 int
4213 ar6_mem_operand (rtx op, enum machine_mode mode)
4214 {
4215 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4216 }
4217
4218
4219 int
4220 ar7_reg_operand (rtx op, enum machine_mode mode)
4221 {
4222 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4223 }
4224
4225
4226 int
4227 ar7_mem_operand (rtx op, enum machine_mode mode)
4228 {
4229 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4230 }
4231
4232
4233 int
4234 ir0_reg_operand (rtx op, enum machine_mode mode)
4235 {
4236 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4237 }
4238
4239
4240 int
4241 ir0_mem_operand (rtx op, enum machine_mode mode)
4242 {
4243 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4244 }
4245
4246
4247 int
4248 ir1_reg_operand (rtx op, enum machine_mode mode)
4249 {
4250 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4251 }
4252
4253
4254 int
4255 ir1_mem_operand (rtx op, enum machine_mode mode)
4256 {
4257 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4258 }
4259
4260
4261 /* This is similar to operand_subword but allows autoincrement
4262 addressing. */
4263
4264 rtx
4265 c4x_operand_subword (rtx op, int i, int validate_address,
4266 enum machine_mode mode)
4267 {
4268 if (mode != HImode && mode != HFmode)
4269 fatal_insn ("c4x_operand_subword: invalid mode", op);
4270
4271 if (mode == HFmode && REG_P (op))
4272 fatal_insn ("c4x_operand_subword: invalid operand", op);
4273
4274 if (GET_CODE (op) == MEM)
4275 {
4276 enum rtx_code code = GET_CODE (XEXP (op, 0));
4277 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4278 enum machine_mode submode;
4279
4280 submode = mode;
4281 if (mode == HImode)
4282 submode = QImode;
4283 else if (mode == HFmode)
4284 submode = QFmode;
4285
4286 switch (code)
4287 {
4288 case POST_INC:
4289 case PRE_INC:
4290 return gen_rtx_MEM (submode, XEXP (op, 0));
4291
4292 case POST_DEC:
4293 case PRE_DEC:
4294 case PRE_MODIFY:
4295 case POST_MODIFY:
4296 /* We could handle these with some difficulty.
4297 e.g., *p-- => *(p-=2); *(p+1). */
4298 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4299
4300 case SYMBOL_REF:
4301 case LABEL_REF:
4302 case CONST:
4303 case CONST_INT:
4304 fatal_insn ("c4x_operand_subword: invalid address", op);
4305
4306 /* Even though offsettable_address_p considers (MEM
4307 (LO_SUM)) to be offsettable, it is not safe if the
4308 address is at the end of the data page since we also have
4309 to fix up the associated high PART. In this case where
4310 we are trying to split a HImode or HFmode memory
4311 reference, we would have to emit another insn to reload a
4312 new HIGH value. It's easier to disable LO_SUM memory references
4313 in HImode or HFmode and we probably get better code. */
4314 case LO_SUM:
4315 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4316
4317 default:
4318 break;
4319 }
4320 }
4321
4322 return operand_subword (op, i, validate_address, mode);
4323 }
4324
4325 struct name_list
4326 {
4327 struct name_list *next;
4328 const char *name;
4329 };
4330
4331 static struct name_list *global_head;
4332 static struct name_list *extern_head;
4333
4334
4335 /* Add NAME to list of global symbols and remove from external list if
4336 present on external list. */
4337
4338 void
4339 c4x_global_label (const char *name)
4340 {
4341 struct name_list *p, *last;
4342
4343 /* Do not insert duplicate names, so linearly search through list of
4344 existing names. */
4345 p = global_head;
4346 while (p)
4347 {
4348 if (strcmp (p->name, name) == 0)
4349 return;
4350 p = p->next;
4351 }
4352 p = (struct name_list *) xmalloc (sizeof *p);
4353 p->next = global_head;
4354 p->name = name;
4355 global_head = p;
4356
4357 /* Remove this name from ref list if present. */
4358 last = NULL;
4359 p = extern_head;
4360 while (p)
4361 {
4362 if (strcmp (p->name, name) == 0)
4363 {
4364 if (last)
4365 last->next = p->next;
4366 else
4367 extern_head = p->next;
4368 break;
4369 }
4370 last = p;
4371 p = p->next;
4372 }
4373 }
4374
4375
4376 /* Add NAME to list of external symbols. */
4377
4378 void
4379 c4x_external_ref (const char *name)
4380 {
4381 struct name_list *p;
4382
4383 /* Do not insert duplicate names. */
4384 p = extern_head;
4385 while (p)
4386 {
4387 if (strcmp (p->name, name) == 0)
4388 return;
4389 p = p->next;
4390 }
4391
4392 /* Do not insert ref if global found. */
4393 p = global_head;
4394 while (p)
4395 {
4396 if (strcmp (p->name, name) == 0)
4397 return;
4398 p = p->next;
4399 }
4400 p = (struct name_list *) xmalloc (sizeof *p);
4401 p->next = extern_head;
4402 p->name = name;
4403 extern_head = p;
4404 }
4405
4406 /* We need to have a data section we can identify so that we can set
4407 the DP register back to a data pointer in the small memory model.
4408 This is only required for ISRs if we are paranoid that someone
4409 may have quietly changed this register on the sly. */
4410 static void
4411 c4x_file_start (void)
4412 {
4413 int dspversion = 0;
4414 if (TARGET_C30) dspversion = 30;
4415 if (TARGET_C31) dspversion = 31;
4416 if (TARGET_C32) dspversion = 32;
4417 if (TARGET_C33) dspversion = 33;
4418 if (TARGET_C40) dspversion = 40;
4419 if (TARGET_C44) dspversion = 44;
4420
4421 default_file_start ();
4422 fprintf (asm_out_file, "\t.version\t%d\n", dspversion);
4423 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4424 }
4425
4426
4427 static void
4428 c4x_file_end (void)
4429 {
4430 struct name_list *p;
4431
4432 /* Output all external names that are not global. */
4433 p = extern_head;
4434 while (p)
4435 {
4436 fprintf (asm_out_file, "\t.ref\t");
4437 assemble_name (asm_out_file, p->name);
4438 fprintf (asm_out_file, "\n");
4439 p = p->next;
4440 }
4441 fprintf (asm_out_file, "\t.end\n");
4442 }
4443
4444
4445 static void
4446 c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4447 {
4448 while (list != NULL_TREE
4449 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4450 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4451 list = TREE_CHAIN (list);
4452 if (list)
4453 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4454 *attributes);
4455 }
4456
4457
4458 static void
4459 c4x_insert_attributes (tree decl, tree *attributes)
4460 {
4461 switch (TREE_CODE (decl))
4462 {
4463 case FUNCTION_DECL:
4464 c4x_check_attribute ("section", code_tree, decl, attributes);
4465 c4x_check_attribute ("const", pure_tree, decl, attributes);
4466 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4467 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4468 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4469 break;
4470
4471 case VAR_DECL:
4472 c4x_check_attribute ("section", data_tree, decl, attributes);
4473 break;
4474
4475 default:
4476 break;
4477 }
4478 }
4479
4480 /* Table of valid machine attributes. */
4481 const struct attribute_spec c4x_attribute_table[] =
4482 {
4483 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4484 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4485 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4486 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4487 { NULL, 0, 0, false, false, false, NULL }
4488 };
4489
4490 /* Handle an attribute requiring a FUNCTION_TYPE;
4491 arguments as in struct attribute_spec.handler. */
4492 static tree
4493 c4x_handle_fntype_attribute (tree *node, tree name,
4494 tree args ATTRIBUTE_UNUSED,
4495 int flags ATTRIBUTE_UNUSED,
4496 bool *no_add_attrs)
4497 {
4498 if (TREE_CODE (*node) != FUNCTION_TYPE)
4499 {
4500 warning ("`%s' attribute only applies to functions",
4501 IDENTIFIER_POINTER (name));
4502 *no_add_attrs = true;
4503 }
4504
4505 return NULL_TREE;
4506 }
4507
4508
4509 /* !!! FIXME to emit RPTS correctly. */
4510
4511 int
4512 c4x_rptb_rpts_p (rtx insn, rtx op)
4513 {
4514 /* The next insn should be our label marking where the
4515 repeat block starts. */
4516 insn = NEXT_INSN (insn);
4517 if (GET_CODE (insn) != CODE_LABEL)
4518 {
4519 /* Some insns may have been shifted between the RPTB insn
4520 and the top label... They were probably destined to
4521 be moved out of the loop. For now, let's leave them
4522 where they are and print a warning. We should
4523 probably move these insns before the repeat block insn. */
4524 if (TARGET_DEBUG)
4525 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4526 insn);
4527 return 0;
4528 }
4529
4530 /* Skip any notes. */
4531 insn = next_nonnote_insn (insn);
4532
4533 /* This should be our first insn in the loop. */
4534 if (! INSN_P (insn))
4535 return 0;
4536
4537 /* Skip any notes. */
4538 insn = next_nonnote_insn (insn);
4539
4540 if (! INSN_P (insn))
4541 return 0;
4542
4543 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4544 return 0;
4545
4546 if (TARGET_RPTS)
4547 return 1;
4548
4549 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4550 }
4551
4552
4553 /* Check if register r11 is used as the destination of an insn. */
4554
4555 static int
4556 c4x_r11_set_p(rtx x)
4557 {
4558 rtx set;
4559 int i, j;
4560 const char *fmt;
4561
4562 if (x == 0)
4563 return 0;
4564
4565 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4566 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4567
4568 if (INSN_P (x) && (set = single_set (x)))
4569 x = SET_DEST (set);
4570
4571 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4572 return 1;
4573
4574 fmt = GET_RTX_FORMAT (GET_CODE (x));
4575 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4576 {
4577 if (fmt[i] == 'e')
4578 {
4579 if (c4x_r11_set_p (XEXP (x, i)))
4580 return 1;
4581 }
4582 else if (fmt[i] == 'E')
4583 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4584 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4585 return 1;
4586 }
4587 return 0;
4588 }
4589
4590
4591 /* The c4x sometimes has a problem when the insn before the laj insn
4592 sets the r11 register. Check for this situation. */
4593
4594 int
4595 c4x_check_laj_p (rtx insn)
4596 {
4597 insn = prev_nonnote_insn (insn);
4598
4599 /* If this is the start of the function no nop is needed. */
4600 if (insn == 0)
4601 return 0;
4602
4603 /* If the previous insn is a code label we have to insert a nop. This
4604 could be a jump or table jump. We can find the normal jumps by
4605 scanning the function but this will not find table jumps. */
4606 if (GET_CODE (insn) == CODE_LABEL)
4607 return 1;
4608
4609 /* If the previous insn sets register r11 we have to insert a nop. */
4610 if (c4x_r11_set_p (insn))
4611 return 1;
4612
4613 /* No nop needed. */
4614 return 0;
4615 }
4616
4617
4618 /* Adjust the cost of a scheduling dependency. Return the new cost of
4619 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4620 A set of an address register followed by a use occurs a 2 cycle
4621 stall (reduced to a single cycle on the c40 using LDA), while
4622 a read of an address register followed by a use occurs a single cycle. */
4623
4624 #define SET_USE_COST 3
4625 #define SETLDA_USE_COST 2
4626 #define READ_USE_COST 2
4627
4628 static int
4629 c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4630 {
4631 /* Don't worry about this until we know what registers have been
4632 assigned. */
4633 if (flag_schedule_insns == 0 && ! reload_completed)
4634 return 0;
4635
4636 /* How do we handle dependencies where a read followed by another
4637 read causes a pipeline stall? For example, a read of ar0 followed
4638 by the use of ar0 for a memory reference. It looks like we
4639 need to extend the scheduler to handle this case. */
4640
4641 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4642 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4643 so only deal with insns we know about. */
4644 if (recog_memoized (dep_insn) < 0)
4645 return 0;
4646
4647 if (REG_NOTE_KIND (link) == 0)
4648 {
4649 int max = 0;
4650
4651 /* Data dependency; DEP_INSN writes a register that INSN reads some
4652 cycles later. */
4653 if (TARGET_C3X)
4654 {
4655 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4656 max = SET_USE_COST > max ? SET_USE_COST : max;
4657 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4658 max = READ_USE_COST > max ? READ_USE_COST : max;
4659 }
4660 else
4661 {
4662 /* This could be significantly optimized. We should look
4663 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4664 insn uses ar0-ar7. We then test if the same register
4665 is used. The tricky bit is that some operands will
4666 use several registers... */
4667 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4668 max = SET_USE_COST > max ? SET_USE_COST : max;
4669 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4670 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4671 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4672 max = READ_USE_COST > max ? READ_USE_COST : max;
4673
4674 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4675 max = SET_USE_COST > max ? SET_USE_COST : max;
4676 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4677 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4678 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4679 max = READ_USE_COST > max ? READ_USE_COST : max;
4680
4681 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4682 max = SET_USE_COST > max ? SET_USE_COST : max;
4683 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4684 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4685 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4686 max = READ_USE_COST > max ? READ_USE_COST : max;
4687
4688 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4689 max = SET_USE_COST > max ? SET_USE_COST : max;
4690 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4691 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4692 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4693 max = READ_USE_COST > max ? READ_USE_COST : max;
4694
4695 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4696 max = SET_USE_COST > max ? SET_USE_COST : max;
4697 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4698 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4699 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4700 max = READ_USE_COST > max ? READ_USE_COST : max;
4701
4702 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4703 max = SET_USE_COST > max ? SET_USE_COST : max;
4704 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4705 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4706 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4707 max = READ_USE_COST > max ? READ_USE_COST : max;
4708
4709 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4710 max = SET_USE_COST > max ? SET_USE_COST : max;
4711 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4712 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4713 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4714 max = READ_USE_COST > max ? READ_USE_COST : max;
4715
4716 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4717 max = SET_USE_COST > max ? SET_USE_COST : max;
4718 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4719 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4720 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4721 max = READ_USE_COST > max ? READ_USE_COST : max;
4722
4723 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4724 max = SET_USE_COST > max ? SET_USE_COST : max;
4725 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4726 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4727
4728 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4729 max = SET_USE_COST > max ? SET_USE_COST : max;
4730 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4731 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4732 }
4733
4734 if (max)
4735 cost = max;
4736
4737 /* For other data dependencies, the default cost specified in the
4738 md is correct. */
4739 return cost;
4740 }
4741 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4742 {
4743 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4744 cycles later. */
4745
4746 /* For c4x anti dependencies, the cost is 0. */
4747 return 0;
4748 }
4749 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4750 {
4751 /* Output dependency; DEP_INSN writes a register that INSN writes some
4752 cycles later. */
4753
4754 /* For c4x output dependencies, the cost is 0. */
4755 return 0;
4756 }
4757 else
4758 abort ();
4759 }
4760
4761 void
4762 c4x_init_builtins (void)
4763 {
4764 tree endlink = void_list_node;
4765
4766 builtin_function ("fast_ftoi",
4767 build_function_type
4768 (integer_type_node,
4769 tree_cons (NULL_TREE, double_type_node, endlink)),
4770 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4771 builtin_function ("ansi_ftoi",
4772 build_function_type
4773 (integer_type_node,
4774 tree_cons (NULL_TREE, double_type_node, endlink)),
4775 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4776 if (TARGET_C3X)
4777 builtin_function ("fast_imult",
4778 build_function_type
4779 (integer_type_node,
4780 tree_cons (NULL_TREE, integer_type_node,
4781 tree_cons (NULL_TREE,
4782 integer_type_node, endlink))),
4783 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4784 else
4785 {
4786 builtin_function ("toieee",
4787 build_function_type
4788 (double_type_node,
4789 tree_cons (NULL_TREE, double_type_node, endlink)),
4790 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4791 builtin_function ("frieee",
4792 build_function_type
4793 (double_type_node,
4794 tree_cons (NULL_TREE, double_type_node, endlink)),
4795 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4796 builtin_function ("fast_invf",
4797 build_function_type
4798 (double_type_node,
4799 tree_cons (NULL_TREE, double_type_node, endlink)),
4800 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4801 }
4802 }
4803
4804
4805 rtx
4806 c4x_expand_builtin (tree exp, rtx target,
4807 rtx subtarget ATTRIBUTE_UNUSED,
4808 enum machine_mode mode ATTRIBUTE_UNUSED,
4809 int ignore ATTRIBUTE_UNUSED)
4810 {
4811 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4812 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4813 tree arglist = TREE_OPERAND (exp, 1);
4814 tree arg0, arg1;
4815 rtx r0, r1;
4816
4817 switch (fcode)
4818 {
4819 case C4X_BUILTIN_FIX:
4820 arg0 = TREE_VALUE (arglist);
4821 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4822 r0 = protect_from_queue (r0, 0);
4823 if (! target || ! register_operand (target, QImode))
4824 target = gen_reg_rtx (QImode);
4825 emit_insn (gen_fixqfqi_clobber (target, r0));
4826 return target;
4827
4828 case C4X_BUILTIN_FIX_ANSI:
4829 arg0 = TREE_VALUE (arglist);
4830 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4831 r0 = protect_from_queue (r0, 0);
4832 if (! target || ! register_operand (target, QImode))
4833 target = gen_reg_rtx (QImode);
4834 emit_insn (gen_fix_truncqfqi2 (target, r0));
4835 return target;
4836
4837 case C4X_BUILTIN_MPYI:
4838 if (! TARGET_C3X)
4839 break;
4840 arg0 = TREE_VALUE (arglist);
4841 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4842 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4843 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4844 r0 = protect_from_queue (r0, 0);
4845 r1 = protect_from_queue (r1, 0);
4846 if (! target || ! register_operand (target, QImode))
4847 target = gen_reg_rtx (QImode);
4848 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4849 return target;
4850
4851 case C4X_BUILTIN_TOIEEE:
4852 if (TARGET_C3X)
4853 break;
4854 arg0 = TREE_VALUE (arglist);
4855 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4856 r0 = protect_from_queue (r0, 0);
4857 if (! target || ! register_operand (target, QFmode))
4858 target = gen_reg_rtx (QFmode);
4859 emit_insn (gen_toieee (target, r0));
4860 return target;
4861
4862 case C4X_BUILTIN_FRIEEE:
4863 if (TARGET_C3X)
4864 break;
4865 arg0 = TREE_VALUE (arglist);
4866 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
4867 put_var_into_stack (arg0, /*rescan=*/true);
4868 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4869 r0 = protect_from_queue (r0, 0);
4870 if (register_operand (r0, QFmode))
4871 {
4872 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4873 emit_move_insn (r1, r0);
4874 r0 = r1;
4875 }
4876 if (! target || ! register_operand (target, QFmode))
4877 target = gen_reg_rtx (QFmode);
4878 emit_insn (gen_frieee (target, r0));
4879 return target;
4880
4881 case C4X_BUILTIN_RCPF:
4882 if (TARGET_C3X)
4883 break;
4884 arg0 = TREE_VALUE (arglist);
4885 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4886 r0 = protect_from_queue (r0, 0);
4887 if (! target || ! register_operand (target, QFmode))
4888 target = gen_reg_rtx (QFmode);
4889 emit_insn (gen_rcpfqf_clobber (target, r0));
4890 return target;
4891 }
4892 return NULL_RTX;
4893 }
4894
4895 static void
4896 c4x_init_libfuncs (void)
4897 {
4898 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4899 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4900 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4901 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4902 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4903 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4904 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4905 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4906 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4907 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4908 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4909 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4910 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4911 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4912 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4913 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4914 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4915 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4916 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4917 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4918 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4919 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4920 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4921 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4922 }
4923
4924 static void
4925 c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED)
4926 {
4927 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4928 }
4929
4930 static void
4931 c4x_globalize_label (FILE *stream, const char *name)
4932 {
4933 default_globalize_label (stream, name);
4934 c4x_global_label (name);
4935 }
4936 \f
4937 #define SHIFT_CODE_P(C) \
4938 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4939 #define LOGICAL_CODE_P(C) \
4940 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4941
4942 /* Compute a (partial) cost for rtx X. Return true if the complete
4943 cost has been computed, and false if subexpressions should be
4944 scanned. In either case, *TOTAL contains the cost result. */
4945
4946 static bool
4947 c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4948 {
4949 HOST_WIDE_INT val;
4950
4951 switch (code)
4952 {
4953 /* Some small integers are effectively free for the C40. We should
4954 also consider if we are using the small memory model. With
4955 the big memory model we require an extra insn for a constant
4956 loaded from memory. */
4957
4958 case CONST_INT:
4959 val = INTVAL (x);
4960 if (c4x_J_constant (x))
4961 *total = 0;
4962 else if (! TARGET_C3X
4963 && outer_code == AND
4964 && (val == 255 || val == 65535))
4965 *total = 0;
4966 else if (! TARGET_C3X
4967 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4968 && (val == 16 || val == 24))
4969 *total = 0;
4970 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4971 *total = 3;
4972 else if (LOGICAL_CODE_P (outer_code)
4973 ? c4x_L_constant (x) : c4x_I_constant (x))
4974 *total = 2;
4975 else
4976 *total = 4;
4977 return true;
4978
4979 case CONST:
4980 case LABEL_REF:
4981 case SYMBOL_REF:
4982 *total = 4;
4983 return true;
4984
4985 case CONST_DOUBLE:
4986 if (c4x_H_constant (x))
4987 *total = 2;
4988 else if (GET_MODE (x) == QFmode)
4989 *total = 4;
4990 else
4991 *total = 8;
4992 return true;
4993
4994 /* ??? Note that we return true, rather than false so that rtx_cost
4995 doesn't include the constant costs. Otherwise expand_mult will
4996 think that it is cheaper to synthesize a multiply rather than to
4997 use a multiply instruction. I think this is because the algorithm
4998 synth_mult doesn't take into account the loading of the operands,
4999 whereas the calculation of mult_cost does. */
5000 case PLUS:
5001 case MINUS:
5002 case AND:
5003 case IOR:
5004 case XOR:
5005 case ASHIFT:
5006 case ASHIFTRT:
5007 case LSHIFTRT:
5008 *total = COSTS_N_INSNS (1);
5009 return true;
5010
5011 case MULT:
5012 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5013 || TARGET_MPYI ? 1 : 14);
5014 return true;
5015
5016 case DIV:
5017 case UDIV:
5018 case MOD:
5019 case UMOD:
5020 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
5021 ? 15 : 50);
5022 return true;
5023
5024 default:
5025 return false;
5026 }
5027 }
5028 \f
5029 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
5030
5031 static void
5032 c4x_external_libcall (rtx fun)
5033 {
5034 /* This is only needed to keep asm30 happy for ___divqf3 etc. */
5035 c4x_external_ref (XSTR (fun, 0));
5036 }
5037
5038 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
5039
5040 static rtx
5041 c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
5042 int incoming ATTRIBUTE_UNUSED)
5043 {
5044 return gen_rtx_REG (Pmode, AR0_REGNO);
5045 }