]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rx/rx.c
* doc/tm.texi.in (OVERRIDE_OPTIONS): Remove documentation.
[thirdparty/gcc.git] / gcc / config / rx / rx.c
CommitLineData
24833e1a 1/* Subroutines used for code generation on Renesas RX processors.
98cb9b5b 2 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
24833e1a 3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "tree.h"
30#include "rtl.h"
31#include "regs.h"
32#include "hard-reg-set.h"
24833e1a 33#include "insn-config.h"
34#include "conditions.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
38#include "function.h"
39#include "expr.h"
40#include "optabs.h"
41#include "libfuncs.h"
42#include "recog.h"
0b205f4c 43#include "diagnostic-core.h"
24833e1a 44#include "toplev.h"
45#include "reload.h"
46#include "df.h"
47#include "ggc.h"
48#include "tm_p.h"
49#include "debug.h"
50#include "target.h"
51#include "target-def.h"
52#include "langhooks.h"
53\f
6bb30542 54static void rx_print_operand (FILE *, rtx, int);
55
67e66e16 56enum rx_cpu_types rx_cpu_type = RX600;
57\f
24833e1a 58/* Return true if OP is a reference to an object in a small data area. */
59
60static bool
61rx_small_data_operand (rtx op)
62{
63 if (rx_small_data_limit == 0)
64 return false;
65
66 if (GET_CODE (op) == SYMBOL_REF)
67 return SYMBOL_REF_SMALL_P (op);
68
69 return false;
70}
71
72static bool
73rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
74{
75 if (RTX_OK_FOR_BASE (x, strict))
76 /* Register Indirect. */
77 return true;
78
79 if (GET_MODE_SIZE (mode) == 4
80 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
81 /* Pre-decrement Register Indirect or
82 Post-increment Register Indirect. */
83 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
84
85 if (GET_CODE (x) == PLUS)
86 {
87 rtx arg1 = XEXP (x, 0);
88 rtx arg2 = XEXP (x, 1);
89 rtx index = NULL_RTX;
90
91 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
92 index = arg2;
93 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
94 index = arg1;
95 else
96 return false;
97
98 switch (GET_CODE (index))
99 {
100 case CONST_INT:
101 {
102 /* Register Relative: REG + INT.
103 Only positive, mode-aligned, mode-sized
104 displacements are allowed. */
105 HOST_WIDE_INT val = INTVAL (index);
106 int factor;
107
108 if (val < 0)
109 return false;
110
111 switch (GET_MODE_SIZE (mode))
112 {
113 default:
114 case 4: factor = 4; break;
115 case 2: factor = 2; break;
116 case 1: factor = 1; break;
117 }
118
119 if (val > (65535 * factor))
120 return false;
121 return (val % factor) == 0;
122 }
123
124 case REG:
125 /* Unscaled Indexed Register Indirect: REG + REG
126 Size has to be "QI", REG has to be valid. */
127 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
128
129 case MULT:
130 {
131 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
132 Factor has to equal the mode size, REG has to be valid. */
133 rtx factor;
134
135 factor = XEXP (index, 1);
136 index = XEXP (index, 0);
137
138 return REG_P (index)
139 && RTX_OK_FOR_BASE (index, strict)
140 && CONST_INT_P (factor)
141 && GET_MODE_SIZE (mode) == INTVAL (factor);
142 }
143
144 default:
145 return false;
146 }
147 }
148
149 /* Small data area accesses turn into register relative offsets. */
150 return rx_small_data_operand (x);
151}
152
153/* Returns TRUE for simple memory addreses, ie ones
154 that do not involve register indirect addressing
155 or pre/post increment/decrement. */
156
157bool
158rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
159{
160 rtx base, index;
161
162 if (! rx_is_legitimate_address
163 (mode, mem, reload_in_progress || reload_completed))
164 return false;
165
166 switch (GET_CODE (mem))
167 {
168 case REG:
169 /* Simple memory addresses are OK. */
170 return true;
171
172 case PRE_DEC:
173 case POST_INC:
174 return false;
175
176 case PLUS:
177 /* Only allow REG+INT addressing. */
178 base = XEXP (mem, 0);
179 index = XEXP (mem, 1);
180
181 return RX_REG_P (base) && CONST_INT_P (index);
182
183 case SYMBOL_REF:
184 /* Can happen when small data is being supported.
185 Assume that it will be resolved into GP+INT. */
186 return true;
187
188 default:
189 gcc_unreachable ();
190 }
191}
192
193bool
194rx_is_mode_dependent_addr (rtx addr)
195{
196 if (GET_CODE (addr) == CONST)
197 addr = XEXP (addr, 0);
198
199 switch (GET_CODE (addr))
200 {
201 /* --REG and REG++ only work in SImode. */
202 case PRE_DEC:
203 case POST_INC:
204 return true;
205
206 case MINUS:
207 case PLUS:
208 if (! REG_P (XEXP (addr, 0)))
209 return true;
210
211 addr = XEXP (addr, 1);
212
213 switch (GET_CODE (addr))
214 {
215 case REG:
216 /* REG+REG only works in SImode. */
217 return true;
218
219 case CONST_INT:
220 /* REG+INT is only mode independent if INT is a
221 multiple of 4, positive and will fit into 8-bits. */
222 if (((INTVAL (addr) & 3) == 0)
223 && IN_RANGE (INTVAL (addr), 4, 252))
224 return false;
225 return true;
226
227 case SYMBOL_REF:
228 case LABEL_REF:
229 return true;
230
231 case MULT:
232 gcc_assert (REG_P (XEXP (addr, 0)));
233 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
234 /* REG+REG*SCALE is always mode dependent. */
235 return true;
236
237 default:
238 /* Not recognized, so treat as mode dependent. */
239 return true;
240 }
241
242 case CONST_INT:
243 case SYMBOL_REF:
244 case LABEL_REF:
245 case REG:
246 /* These are all mode independent. */
247 return false;
248
249 default:
250 /* Everything else is unrecognized,
251 so treat as mode dependent. */
252 return true;
253 }
254}
255\f
24833e1a 256/* A C compound statement to output to stdio stream FILE the
257 assembler syntax for an instruction operand that is a memory
258 reference whose address is ADDR. */
259
6bb30542 260static void
24833e1a 261rx_print_operand_address (FILE * file, rtx addr)
262{
263 switch (GET_CODE (addr))
264 {
265 case REG:
266 fprintf (file, "[");
267 rx_print_operand (file, addr, 0);
268 fprintf (file, "]");
269 break;
270
271 case PRE_DEC:
272 fprintf (file, "[-");
273 rx_print_operand (file, XEXP (addr, 0), 0);
274 fprintf (file, "]");
275 break;
276
277 case POST_INC:
278 fprintf (file, "[");
279 rx_print_operand (file, XEXP (addr, 0), 0);
280 fprintf (file, "+]");
281 break;
282
283 case PLUS:
284 {
285 rtx arg1 = XEXP (addr, 0);
286 rtx arg2 = XEXP (addr, 1);
287 rtx base, index;
288
289 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
290 base = arg1, index = arg2;
291 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
292 base = arg2, index = arg1;
293 else
294 {
295 rx_print_operand (file, arg1, 0);
296 fprintf (file, " + ");
297 rx_print_operand (file, arg2, 0);
298 break;
299 }
300
301 if (REG_P (index) || GET_CODE (index) == MULT)
302 {
303 fprintf (file, "[");
304 rx_print_operand (file, index, 'A');
305 fprintf (file, ",");
306 }
307 else /* GET_CODE (index) == CONST_INT */
308 {
309 rx_print_operand (file, index, 'A');
310 fprintf (file, "[");
311 }
312 rx_print_operand (file, base, 0);
313 fprintf (file, "]");
314 break;
315 }
316
317 case LABEL_REF:
318 case SYMBOL_REF:
319 case CONST:
320 fprintf (file, "#");
321 default:
322 output_addr_const (file, addr);
323 break;
324 }
325}
326
327static void
328rx_print_integer (FILE * file, HOST_WIDE_INT val)
329{
330 if (IN_RANGE (val, -64, 64))
331 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
332 else
333 fprintf (file,
334 TARGET_AS100_SYNTAX
335 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
336 val);
337}
338
339static bool
340rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
341{
342 const char * op = integer_asm_op (size, is_aligned);
343
344 if (! CONST_INT_P (x))
345 return default_assemble_integer (x, size, is_aligned);
346
347 if (op == NULL)
348 return false;
349 fputs (op, asm_out_file);
350
351 rx_print_integer (asm_out_file, INTVAL (x));
352 fputc ('\n', asm_out_file);
353 return true;
354}
355
356
357int rx_float_compare_mode;
358
359/* Handles the insertion of a single operand into the assembler output.
360 The %<letter> directives supported are:
361
362 %A Print an operand without a leading # character.
363 %B Print an integer comparison name.
364 %C Print a control register name.
365 %F Print a condition code flag name.
366 %H Print high part of a DImode register, integer or address.
367 %L Print low part of a DImode register, integer or address.
6bb30542 368 %N Print the negation of the immediate value.
24833e1a 369 %Q If the operand is a MEM, then correctly generate
370 register indirect or register relative addressing. */
371
6bb30542 372static void
24833e1a 373rx_print_operand (FILE * file, rtx op, int letter)
374{
375 switch (letter)
376 {
377 case 'A':
378 /* Print an operand without a leading #. */
379 if (MEM_P (op))
380 op = XEXP (op, 0);
381
382 switch (GET_CODE (op))
383 {
384 case LABEL_REF:
385 case SYMBOL_REF:
386 output_addr_const (file, op);
387 break;
388 case CONST_INT:
389 fprintf (file, "%ld", (long) INTVAL (op));
390 break;
391 default:
392 rx_print_operand (file, op, 0);
393 break;
394 }
395 break;
396
397 case 'B':
398 switch (GET_CODE (op))
399 {
400 case LT: fprintf (file, "lt"); break;
401 case GE: fprintf (file, "ge"); break;
402 case GT: fprintf (file, "gt"); break;
403 case LE: fprintf (file, "le"); break;
404 case GEU: fprintf (file, "geu"); break;
405 case LTU: fprintf (file, "ltu"); break;
406 case GTU: fprintf (file, "gtu"); break;
407 case LEU: fprintf (file, "leu"); break;
408 case EQ: fprintf (file, "eq"); break;
409 case NE: fprintf (file, "ne"); break;
410 default: debug_rtx (op); gcc_unreachable ();
411 }
412 break;
413
414 case 'C':
415 gcc_assert (CONST_INT_P (op));
416 switch (INTVAL (op))
417 {
418 case 0: fprintf (file, "psw"); break;
419 case 2: fprintf (file, "usp"); break;
420 case 3: fprintf (file, "fpsw"); break;
421 case 4: fprintf (file, "cpen"); break;
422 case 8: fprintf (file, "bpsw"); break;
423 case 9: fprintf (file, "bpc"); break;
424 case 0xa: fprintf (file, "isp"); break;
425 case 0xb: fprintf (file, "fintv"); break;
426 case 0xc: fprintf (file, "intb"); break;
427 default:
98cb9b5b 428 warning (0, "unreocgnized control register number: %d - using 'psw'",
6bb30542 429 (int) INTVAL (op));
98cb9b5b 430 fprintf (file, "psw");
431 break;
24833e1a 432 }
433 break;
434
435 case 'F':
436 gcc_assert (CONST_INT_P (op));
437 switch (INTVAL (op))
438 {
439 case 0: case 'c': case 'C': fprintf (file, "C"); break;
440 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
441 case 2: case 's': case 'S': fprintf (file, "S"); break;
442 case 3: case 'o': case 'O': fprintf (file, "O"); break;
443 case 8: case 'i': case 'I': fprintf (file, "I"); break;
444 case 9: case 'u': case 'U': fprintf (file, "U"); break;
445 default:
446 gcc_unreachable ();
447 }
448 break;
449
450 case 'H':
6bb30542 451 switch (GET_CODE (op))
24833e1a 452 {
6bb30542 453 case REG:
454 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
455 break;
456 case CONST_INT:
457 {
458 HOST_WIDE_INT v = INTVAL (op);
67e66e16 459
6bb30542 460 fprintf (file, "#");
461 /* Trickery to avoid problems with shifting 32 bits at a time. */
462 v = v >> 16;
463 v = v >> 16;
464 rx_print_integer (file, v);
465 break;
466 }
467 case CONST_DOUBLE:
24833e1a 468 fprintf (file, "#");
6bb30542 469 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
470 break;
471 case MEM:
24833e1a 472 if (! WORDS_BIG_ENDIAN)
473 op = adjust_address (op, SImode, 4);
474 output_address (XEXP (op, 0));
6bb30542 475 break;
476 default:
477 gcc_unreachable ();
24833e1a 478 }
479 break;
480
481 case 'L':
6bb30542 482 switch (GET_CODE (op))
24833e1a 483 {
6bb30542 484 case REG:
485 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
486 break;
487 case CONST_INT:
24833e1a 488 fprintf (file, "#");
489 rx_print_integer (file, INTVAL (op) & 0xffffffff);
6bb30542 490 break;
491 case CONST_DOUBLE:
492 fprintf (file, "#");
493 rx_print_integer (file, CONST_DOUBLE_LOW (op));
494 break;
495 case MEM:
24833e1a 496 if (WORDS_BIG_ENDIAN)
497 op = adjust_address (op, SImode, 4);
498 output_address (XEXP (op, 0));
6bb30542 499 break;
500 default:
501 gcc_unreachable ();
24833e1a 502 }
503 break;
504
39349585 505 case 'N':
506 gcc_assert (CONST_INT_P (op));
507 fprintf (file, "#");
508 rx_print_integer (file, - INTVAL (op));
509 break;
510
24833e1a 511 case 'Q':
512 if (MEM_P (op))
513 {
514 HOST_WIDE_INT offset;
515
516 op = XEXP (op, 0);
517
518 if (REG_P (op))
519 offset = 0;
520 else if (GET_CODE (op) == PLUS)
521 {
522 rtx displacement;
523
524 if (REG_P (XEXP (op, 0)))
525 {
526 displacement = XEXP (op, 1);
527 op = XEXP (op, 0);
528 }
529 else
530 {
531 displacement = XEXP (op, 0);
532 op = XEXP (op, 1);
533 gcc_assert (REG_P (op));
534 }
535
536 gcc_assert (CONST_INT_P (displacement));
537 offset = INTVAL (displacement);
538 gcc_assert (offset >= 0);
539
540 fprintf (file, "%ld", offset);
541 }
542 else
543 gcc_unreachable ();
544
545 fprintf (file, "[");
546 rx_print_operand (file, op, 0);
547 fprintf (file, "].");
548
549 switch (GET_MODE_SIZE (GET_MODE (op)))
550 {
551 case 1:
552 gcc_assert (offset < 65535 * 1);
553 fprintf (file, "B");
554 break;
555 case 2:
556 gcc_assert (offset % 2 == 0);
557 gcc_assert (offset < 65535 * 2);
558 fprintf (file, "W");
559 break;
560 default:
561 gcc_assert (offset % 4 == 0);
562 gcc_assert (offset < 65535 * 4);
563 fprintf (file, "L");
564 break;
565 }
566 break;
567 }
568
569 /* Fall through. */
570
571 default:
572 switch (GET_CODE (op))
573 {
574 case MULT:
575 /* Should be the scaled part of an
576 indexed register indirect address. */
577 {
578 rtx base = XEXP (op, 0);
579 rtx index = XEXP (op, 1);
580
581 /* Check for a swaped index register and scaling factor.
582 Not sure if this can happen, but be prepared to handle it. */
583 if (CONST_INT_P (base) && REG_P (index))
584 {
585 rtx tmp = base;
586 base = index;
587 index = tmp;
588 }
589
590 gcc_assert (REG_P (base));
591 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
592 gcc_assert (CONST_INT_P (index));
593 /* Do not try to verify the value of the scalar as it is based
594 on the mode of the MEM not the mode of the MULT. (Which
595 will always be SImode). */
596 fprintf (file, "%s", reg_names [REGNO (base)]);
597 break;
598 }
599
600 case MEM:
601 output_address (XEXP (op, 0));
602 break;
603
604 case PLUS:
605 output_address (op);
606 break;
607
608 case REG:
609 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
610 fprintf (file, "%s", reg_names [REGNO (op)]);
611 break;
612
613 case SUBREG:
614 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
615 fprintf (file, "%s", reg_names [subreg_regno (op)]);
616 break;
617
618 /* This will only be single precision.... */
619 case CONST_DOUBLE:
620 {
621 unsigned long val;
622 REAL_VALUE_TYPE rv;
623
624 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
625 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
626 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
627 break;
628 }
629
630 case CONST_INT:
631 fprintf (file, "#");
632 rx_print_integer (file, INTVAL (op));
633 break;
634
635 case SYMBOL_REF:
636 case CONST:
637 case LABEL_REF:
638 case CODE_LABEL:
639 case UNSPEC:
640 rx_print_operand_address (file, op);
641 break;
642
643 default:
644 gcc_unreachable ();
645 }
646 break;
647 }
648}
649
650/* Returns an assembler template for a move instruction. */
651
652char *
653rx_gen_move_template (rtx * operands, bool is_movu)
654{
6bb30542 655 static char out_template [64];
24833e1a 656 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
657 const char * src_template;
658 const char * dst_template;
659 rtx dest = operands[0];
660 rtx src = operands[1];
661
662 /* Decide which extension, if any, should be given to the move instruction. */
663 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
664 {
665 case QImode:
666 /* The .B extension is not valid when
667 loading an immediate into a register. */
668 if (! REG_P (dest) || ! CONST_INT_P (src))
669 extension = ".B";
670 break;
671 case HImode:
672 if (! REG_P (dest) || ! CONST_INT_P (src))
673 /* The .W extension is not valid when
674 loading an immediate into a register. */
675 extension = ".W";
676 break;
677 case SFmode:
678 case SImode:
679 extension = ".L";
680 break;
681 case VOIDmode:
682 /* This mode is used by constants. */
683 break;
684 default:
685 debug_rtx (src);
686 gcc_unreachable ();
687 }
688
689 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
690 src_template = "%%gp(%A1)[r13]";
691 else
692 src_template = "%1";
693
694 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
695 dst_template = "%%gp(%A0)[r13]";
696 else
697 dst_template = "%0";
698
6bb30542 699 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
24833e1a 700 extension, src_template, dst_template);
6bb30542 701 return out_template;
24833e1a 702}
703
704/* Returns an assembler template for a conditional branch instruction. */
705
706const char *
707rx_gen_cond_branch_template (rtx condition, bool reversed)
708{
709 enum rtx_code code = GET_CODE (condition);
710
24833e1a 711 if (reversed)
712 {
713 if (rx_float_compare_mode)
714 code = reverse_condition_maybe_unordered (code);
715 else
716 code = reverse_condition (code);
717 }
718
719 /* We do not worry about encoding the branch length here as GAS knows
720 how to choose the smallest version, and how to expand a branch that
721 is to a destination that is out of range. */
722
723 switch (code)
724 {
725 case UNEQ: return "bo\t1f\n\tbeq\t%0\n1:";
726 case LTGT: return "bo\t1f\n\tbne\t%0\n1:";
727 case UNLT: return "bo\t1f\n\tbn\t%0\n1:";
728 case UNGE: return "bo\t1f\n\tbpz\t%0\n1:";
729 case UNLE: return "bo\t1f\n\tbgt\t1f\n\tbra\t%0\n1:";
730 case UNGT: return "bo\t1f\n\tble\t1f\n\tbra\t%0\n1:";
731 case UNORDERED: return "bo\t%0";
732 case ORDERED: return "bno\t%0";
733
734 case LT: return rx_float_compare_mode ? "bn\t%0" : "blt\t%0";
735 case GE: return rx_float_compare_mode ? "bpz\t%0" : "bge\t%0";
736 case GT: return "bgt\t%0";
737 case LE: return "ble\t%0";
738 case GEU: return "bgeu\t%0";
739 case LTU: return "bltu\t%0";
740 case GTU: return "bgtu\t%0";
741 case LEU: return "bleu\t%0";
742 case EQ: return "beq\t%0";
743 case NE: return "bne\t%0";
744 default:
745 gcc_unreachable ();
746 }
747}
748\f
749/* Return VALUE rounded up to the next ALIGNMENT boundary. */
750
751static inline unsigned int
752rx_round_up (unsigned int value, unsigned int alignment)
753{
754 alignment -= 1;
755 return (value + alignment) & (~ alignment);
756}
757
758/* Return the number of bytes in the argument registers
759 occupied by an argument of type TYPE and mode MODE. */
760
ee4e8428 761static unsigned int
24833e1a 762rx_function_arg_size (Mmode mode, const_tree type)
763{
764 unsigned int num_bytes;
765
766 num_bytes = (mode == BLKmode)
767 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
768 return rx_round_up (num_bytes, UNITS_PER_WORD);
769}
770
771#define NUM_ARG_REGS 4
772#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
773
774/* Return an RTL expression describing the register holding a function
775 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
776 be passed on the stack. CUM describes the previous parameters to the
777 function and NAMED is false if the parameter is part of a variable
778 parameter list, or the last named parameter before the start of a
779 variable parameter list. */
780
ee4e8428 781static rtx
24833e1a 782rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
783{
784 unsigned int next_reg;
785 unsigned int bytes_so_far = *cum;
786 unsigned int size;
787 unsigned int rounded_size;
788
789 /* An exploded version of rx_function_arg_size. */
790 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
6bb30542 791 /* If the size is not known it cannot be passed in registers. */
792 if (size < 1)
793 return NULL_RTX;
24833e1a 794
795 rounded_size = rx_round_up (size, UNITS_PER_WORD);
796
797 /* Don't pass this arg via registers if there
798 are insufficient registers to hold all of it. */
799 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
800 return NULL_RTX;
801
802 /* Unnamed arguments and the last named argument in a
803 variadic function are always passed on the stack. */
804 if (!named)
805 return NULL_RTX;
806
807 /* Structures must occupy an exact number of registers,
808 otherwise they are passed on the stack. */
809 if ((type == NULL || AGGREGATE_TYPE_P (type))
810 && (size % UNITS_PER_WORD) != 0)
811 return NULL_RTX;
812
813 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
814
815 return gen_rtx_REG (mode, next_reg);
816}
817
ee4e8428 818static void
819rx_function_arg_advance (Fargs * cum, Mmode mode, const_tree type,
820 bool named ATTRIBUTE_UNUSED)
821{
822 *cum += rx_function_arg_size (mode, type);
823}
824
24833e1a 825/* Return an RTL describing where a function return value of type RET_TYPE
826 is held. */
827
828static rtx
829rx_function_value (const_tree ret_type,
830 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
831 bool outgoing ATTRIBUTE_UNUSED)
832{
833 return gen_rtx_REG (TYPE_MODE (ret_type), FUNC_RETURN_REGNUM);
834}
835
836static bool
837rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
838{
839 HOST_WIDE_INT size;
840
841 if (TYPE_MODE (type) != BLKmode
842 && ! AGGREGATE_TYPE_P (type))
843 return false;
844
845 size = int_size_in_bytes (type);
846 /* Large structs and those whose size is not an
847 exact multiple of 4 are returned in memory. */
848 return size < 1
849 || size > 16
850 || (size % UNITS_PER_WORD) != 0;
851}
852
853static rtx
854rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
855 int incoming ATTRIBUTE_UNUSED)
856{
857 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
858}
859
860static bool
861rx_return_in_msb (const_tree valtype)
862{
863 return TARGET_BIG_ENDIAN_DATA
864 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
865}
866
867/* Returns true if the provided function has the specified attribute. */
868
869static inline bool
870has_func_attr (const_tree decl, const char * func_attr)
871{
872 if (decl == NULL_TREE)
873 decl = current_function_decl;
874
875 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
876}
877
67e66e16 878/* Returns true if the provided function has the "fast_interrupt" attribute. */
24833e1a 879
880static inline bool
881is_fast_interrupt_func (const_tree decl)
882{
67e66e16 883 return has_func_attr (decl, "fast_interrupt");
24833e1a 884}
885
67e66e16 886/* Returns true if the provided function has the "interrupt" attribute. */
24833e1a 887
888static inline bool
67e66e16 889is_interrupt_func (const_tree decl)
24833e1a 890{
67e66e16 891 return has_func_attr (decl, "interrupt");
24833e1a 892}
893
894/* Returns true if the provided function has the "naked" attribute. */
895
896static inline bool
897is_naked_func (const_tree decl)
898{
899 return has_func_attr (decl, "naked");
900}
901\f
902static bool use_fixed_regs = false;
903
904void
905rx_conditional_register_usage (void)
906{
907 static bool using_fixed_regs = false;
908
909 if (rx_small_data_limit > 0)
910 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
911
912 if (use_fixed_regs != using_fixed_regs)
913 {
914 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
915 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
916
917 if (use_fixed_regs)
918 {
24833e1a 919 unsigned int r;
920
24833e1a 921 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
922 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
e4d9e8e5 923
924 /* This is for fast interrupt handlers. Any register in
925 the range r10 to r13 (inclusive) that is currently
926 marked as fixed is now a viable, call-used register. */
24833e1a 927 for (r = 10; r <= 13; r++)
928 if (fixed_regs[r])
929 {
930 fixed_regs[r] = 0;
931 call_used_regs[r] = 1;
24833e1a 932 }
933
e4d9e8e5 934 /* Mark r7 as fixed. This is just a hack to avoid
935 altering the reg_alloc_order array so that the newly
936 freed r10-r13 registers are the preferred registers. */
937 fixed_regs[7] = call_used_regs[7] = 1;
24833e1a 938 }
939 else
940 {
941 /* Restore the normal register masks. */
942 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
943 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
944 }
945
946 using_fixed_regs = use_fixed_regs;
947 }
948}
949
950/* Perform any actions necessary before starting to compile FNDECL.
951 For the RX we use this to make sure that we have the correct
952 set of register masks selected. If FNDECL is NULL then we are
953 compiling top level things. */
954
955static void
956rx_set_current_function (tree fndecl)
957{
958 /* Remember the last target of rx_set_current_function. */
959 static tree rx_previous_fndecl;
67e66e16 960 bool prev_was_fast_interrupt;
961 bool current_is_fast_interrupt;
24833e1a 962
963 /* Only change the context if the function changes. This hook is called
964 several times in the course of compiling a function, and we don't want
965 to slow things down too much or call target_reinit when it isn't safe. */
966 if (fndecl == rx_previous_fndecl)
967 return;
968
67e66e16 969 prev_was_fast_interrupt
24833e1a 970 = rx_previous_fndecl
971 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
67e66e16 972
973 current_is_fast_interrupt
24833e1a 974 = fndecl ? is_fast_interrupt_func (fndecl) : false;
975
67e66e16 976 if (prev_was_fast_interrupt != current_is_fast_interrupt)
24833e1a 977 {
67e66e16 978 use_fixed_regs = current_is_fast_interrupt;
24833e1a 979 target_reinit ();
980 }
67e66e16 981
24833e1a 982 rx_previous_fndecl = fndecl;
983}
984\f
985/* Typical stack layout should looks like this after the function's prologue:
986
987 | |
988 -- ^
989 | | \ |
990 | | arguments saved | Increasing
991 | | on the stack | addresses
992 PARENT arg pointer -> | | /
993 -------------------------- ---- -------------------
994 CHILD |ret | return address
995 --
996 | | \
997 | | call saved
998 | | registers
999 | | /
1000 --
1001 | | \
1002 | | local
1003 | | variables
1004 frame pointer -> | | /
1005 --
1006 | | \
1007 | | outgoing | Decreasing
1008 | | arguments | addresses
1009 current stack pointer -> | | / |
1010 -------------------------- ---- ------------------ V
1011 | | */
1012
1013static unsigned int
1014bit_count (unsigned int x)
1015{
1016 const unsigned int m1 = 0x55555555;
1017 const unsigned int m2 = 0x33333333;
1018 const unsigned int m4 = 0x0f0f0f0f;
1019
1020 x -= (x >> 1) & m1;
1021 x = (x & m2) + ((x >> 2) & m2);
1022 x = (x + (x >> 4)) & m4;
1023 x += x >> 8;
1024
1025 return (x + (x >> 16)) & 0x3f;
1026}
1027
e4d9e8e5 1028#define MUST_SAVE_ACC_REGISTER \
1029 (TARGET_SAVE_ACC_REGISTER \
1030 && (is_interrupt_func (NULL_TREE) \
1031 || is_fast_interrupt_func (NULL_TREE)))
1032
24833e1a 1033/* Returns either the lowest numbered and highest numbered registers that
1034 occupy the call-saved area of the stack frame, if the registers are
1035 stored as a contiguous block, or else a bitmask of the individual
1036 registers if they are stored piecemeal.
1037
1038 Also computes the size of the frame and the size of the outgoing
1039 arguments block (in bytes). */
1040
1041static void
1042rx_get_stack_layout (unsigned int * lowest,
1043 unsigned int * highest,
1044 unsigned int * register_mask,
1045 unsigned int * frame_size,
1046 unsigned int * stack_size)
1047{
1048 unsigned int reg;
1049 unsigned int low;
1050 unsigned int high;
1051 unsigned int fixed_reg = 0;
1052 unsigned int save_mask;
1053 unsigned int pushed_mask;
1054 unsigned int unneeded_pushes;
1055
e4d9e8e5 1056 if (is_naked_func (NULL_TREE))
24833e1a 1057 {
1058 /* Naked functions do not create their own stack frame.
e4d9e8e5 1059 Instead the programmer must do that for us. */
24833e1a 1060 * lowest = 0;
1061 * highest = 0;
1062 * register_mask = 0;
1063 * frame_size = 0;
1064 * stack_size = 0;
1065 return;
1066 }
1067
9d2f1b03 1068 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
24833e1a 1069 {
21cde6ec 1070 if ((df_regs_ever_live_p (reg)
1071 /* Always save all call clobbered registers inside interrupt
1072 handlers, even if they are not live - they may be used in
1073 routines called from this one. */
1074 || (call_used_regs[reg] && is_interrupt_func (NULL_TREE)))
24833e1a 1075 && (! call_used_regs[reg]
1076 /* Even call clobbered registered must
67e66e16 1077 be pushed inside interrupt handlers. */
e4d9e8e5 1078 || is_interrupt_func (NULL_TREE)
1079 /* Likewise for fast interrupt handlers, except registers r10 -
1080 r13. These are normally call-saved, but may have been set
1081 to call-used by rx_conditional_register_usage. If so then
1082 they can be used in the fast interrupt handler without
1083 saving them on the stack. */
1084 || (is_fast_interrupt_func (NULL_TREE)
1085 && ! IN_RANGE (reg, 10, 13))))
24833e1a 1086 {
1087 if (low == 0)
1088 low = reg;
1089 high = reg;
1090
1091 save_mask |= 1 << reg;
1092 }
1093
1094 /* Remember if we see a fixed register
1095 after having found the low register. */
1096 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1097 fixed_reg = reg;
1098 }
1099
e4d9e8e5 1100 /* If we have to save the accumulator register, make sure
1101 that at least two registers are pushed into the frame. */
1102 if (MUST_SAVE_ACC_REGISTER
1103 && bit_count (save_mask) < 2)
1104 {
1105 save_mask |= (1 << 13) | (1 << 14);
1106 if (low == 0)
1107 low = 13;
bc9bb967 1108 if (high == 0 || low == high)
1109 high = low + 1;
e4d9e8e5 1110 }
1111
24833e1a 1112 /* Decide if it would be faster fill in the call-saved area of the stack
1113 frame using multiple PUSH instructions instead of a single PUSHM
1114 instruction.
1115
1116 SAVE_MASK is a bitmask of the registers that must be stored in the
1117 call-save area. PUSHED_MASK is a bitmask of the registers that would
1118 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1119 is a bitmask of those registers in pushed_mask that are not in
1120 save_mask.
1121
1122 We use a simple heuristic that says that it is better to use
1123 multiple PUSH instructions if the number of unnecessary pushes is
1124 greater than the number of necessary pushes.
1125
1126 We also use multiple PUSH instructions if there are any fixed registers
1127 between LOW and HIGH. The only way that this can happen is if the user
1128 has specified --fixed-<reg-name> on the command line and in such
1129 circumstances we do not want to touch the fixed registers at all.
1130
1131 FIXME: Is it worth improving this heuristic ? */
1132 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1133 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1134
1135 if ((fixed_reg && fixed_reg <= high)
1136 || (optimize_function_for_speed_p (cfun)
1137 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1138 {
1139 /* Use multiple pushes. */
1140 * lowest = 0;
1141 * highest = 0;
1142 * register_mask = save_mask;
1143 }
1144 else
1145 {
1146 /* Use one push multiple instruction. */
1147 * lowest = low;
1148 * highest = high;
1149 * register_mask = 0;
1150 }
1151
1152 * frame_size = rx_round_up
1153 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1154
1155 if (crtl->args.size > 0)
1156 * frame_size += rx_round_up
1157 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1158
1159 * stack_size = rx_round_up
1160 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1161}
1162
1163/* Generate a PUSHM instruction that matches the given operands. */
1164
1165void
1166rx_emit_stack_pushm (rtx * operands)
1167{
1168 HOST_WIDE_INT last_reg;
1169 rtx first_push;
1170
1171 gcc_assert (CONST_INT_P (operands[0]));
1172 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1173
1174 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1175 first_push = XVECEXP (operands[1], 0, 1);
1176 gcc_assert (SET_P (first_push));
1177 first_push = SET_SRC (first_push);
1178 gcc_assert (REG_P (first_push));
1179
1180 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
67e66e16 1181 reg_names [REGNO (first_push) - last_reg],
1182 reg_names [REGNO (first_push)]);
24833e1a 1183}
1184
1185/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1186
1187static rtx
1188gen_rx_store_vector (unsigned int low, unsigned int high)
1189{
1190 unsigned int i;
1191 unsigned int count = (high - low) + 2;
1192 rtx vector;
1193
1194 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1195
1196 XVECEXP (vector, 0, 0) =
1197 gen_rtx_SET (SImode, stack_pointer_rtx,
1198 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1199 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1200
1201 for (i = 0; i < count - 1; i++)
1202 XVECEXP (vector, 0, i + 1) =
1203 gen_rtx_SET (SImode,
1204 gen_rtx_MEM (SImode,
67e66e16 1205 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1206 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1207 gen_rtx_REG (SImode, high - i));
24833e1a 1208 return vector;
1209}
1210
67e66e16 1211/* Mark INSN as being frame related. If it is a PARALLEL
1212 then mark each element as being frame related as well. */
1213
1214static void
1215mark_frame_related (rtx insn)
1216{
1217 RTX_FRAME_RELATED_P (insn) = 1;
1218 insn = PATTERN (insn);
1219
1220 if (GET_CODE (insn) == PARALLEL)
1221 {
1222 unsigned int i;
1223
61fc50a0 1224 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
67e66e16 1225 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1226 }
1227}
1228
24833e1a 1229void
1230rx_expand_prologue (void)
1231{
1232 unsigned int stack_size;
1233 unsigned int frame_size;
1234 unsigned int mask;
1235 unsigned int low;
1236 unsigned int high;
67e66e16 1237 unsigned int reg;
24833e1a 1238 rtx insn;
1239
1240 /* Naked functions use their own, programmer provided prologues. */
e4d9e8e5 1241 if (is_naked_func (NULL_TREE))
24833e1a 1242 return;
1243
1244 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1245
1246 /* If we use any of the callee-saved registers, save them now. */
1247 if (mask)
1248 {
24833e1a 1249 /* Push registers in reverse order. */
9d2f1b03 1250 for (reg = CC_REGNUM; reg --;)
24833e1a 1251 if (mask & (1 << reg))
1252 {
1253 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
67e66e16 1254 mark_frame_related (insn);
24833e1a 1255 }
1256 }
1257 else if (low)
1258 {
1259 if (high == low)
1260 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1261 else
1262 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1263 * UNITS_PER_WORD),
1264 gen_rx_store_vector (low, high)));
67e66e16 1265 mark_frame_related (insn);
1266 }
1267
e4d9e8e5 1268 if (MUST_SAVE_ACC_REGISTER)
67e66e16 1269 {
1270 unsigned int acc_high, acc_low;
1271
1272 /* Interrupt handlers have to preserve the accumulator
1273 register if so requested by the user. Use the first
e4d9e8e5 1274 two pushed registers as intermediaries. */
67e66e16 1275 if (mask)
1276 {
1277 acc_low = acc_high = 0;
1278
9d2f1b03 1279 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1280 if (mask & (1 << reg))
1281 {
1282 if (acc_low == 0)
1283 acc_low = reg;
1284 else
1285 {
1286 acc_high = reg;
1287 break;
1288 }
1289 }
1290
1291 /* We have assumed that there are at least two registers pushed... */
1292 gcc_assert (acc_high != 0);
1293
1294 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1295 We just assume that they are zero. */
1296 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1297 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1298 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1299 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1300 }
1301 else
1302 {
1303 acc_low = low;
1304 acc_high = low + 1;
1305
1306 /* We have assumed that there are at least two registers pushed... */
1307 gcc_assert (acc_high <= high);
1308
1309 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1310 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1311 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1312 gen_rx_store_vector (acc_low, acc_high)));
1313 }
24833e1a 1314 }
1315
1316 /* If needed, set up the frame pointer. */
1317 if (frame_pointer_needed)
1318 {
1319 if (frame_size)
1320 insn = emit_insn (gen_addsi3 (frame_pointer_rtx, stack_pointer_rtx,
1321 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1322 else
1323 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1324
1325 RTX_FRAME_RELATED_P (insn) = 1;
1326 }
1327
1328 insn = NULL_RTX;
1329
1330 /* Allocate space for the outgoing args.
1331 If the stack frame has not already been set up then handle this as well. */
1332 if (stack_size)
1333 {
1334 if (frame_size)
1335 {
1336 if (frame_pointer_needed)
1337 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1338 GEN_INT (- (HOST_WIDE_INT)
1339 stack_size)));
1340 else
1341 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1342 GEN_INT (- (HOST_WIDE_INT)
1343 (frame_size + stack_size))));
1344 }
1345 else
1346 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1347 GEN_INT (- (HOST_WIDE_INT) stack_size)));
1348 }
1349 else if (frame_size)
1350 {
1351 if (! frame_pointer_needed)
1352 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1353 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1354 else
1355 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1356 }
1357
1358 if (insn != NULL_RTX)
1359 RTX_FRAME_RELATED_P (insn) = 1;
1360}
1361
1362static void
1363rx_output_function_prologue (FILE * file,
1364 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1365{
1366 if (is_fast_interrupt_func (NULL_TREE))
1367 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1368
67e66e16 1369 if (is_interrupt_func (NULL_TREE))
1370 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
24833e1a 1371
1372 if (is_naked_func (NULL_TREE))
1373 asm_fprintf (file, "\t; Note: Naked Function\n");
1374
1375 if (cfun->static_chain_decl != NULL)
1376 asm_fprintf (file, "\t; Note: Nested function declared "
1377 "inside another function.\n");
1378
1379 if (crtl->calls_eh_return)
1380 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1381}
1382
1383/* Generate a POPM or RTSD instruction that matches the given operands. */
1384
1385void
1386rx_emit_stack_popm (rtx * operands, bool is_popm)
1387{
1388 HOST_WIDE_INT stack_adjust;
1389 HOST_WIDE_INT last_reg;
1390 rtx first_push;
1391
1392 gcc_assert (CONST_INT_P (operands[0]));
1393 stack_adjust = INTVAL (operands[0]);
1394
1395 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1396 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1397
1398 first_push = XVECEXP (operands[1], 0, 1);
1399 gcc_assert (SET_P (first_push));
1400 first_push = SET_DEST (first_push);
1401 gcc_assert (REG_P (first_push));
1402
1403 if (is_popm)
1404 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1405 reg_names [REGNO (first_push)],
1406 reg_names [REGNO (first_push) + last_reg]);
1407 else
1408 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1409 (int) stack_adjust,
1410 reg_names [REGNO (first_push)],
1411 reg_names [REGNO (first_push) + last_reg]);
1412}
1413
1414/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1415
1416static rtx
1417gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1418{
1419 unsigned int i;
1420 unsigned int bias = 3;
1421 unsigned int count = (high - low) + bias;
1422 rtx vector;
1423
1424 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1425
1426 XVECEXP (vector, 0, 0) =
1427 gen_rtx_SET (SImode, stack_pointer_rtx,
1428 plus_constant (stack_pointer_rtx, adjust));
1429
1430 for (i = 0; i < count - 2; i++)
1431 XVECEXP (vector, 0, i + 1) =
1432 gen_rtx_SET (SImode,
1433 gen_rtx_REG (SImode, low + i),
1434 gen_rtx_MEM (SImode,
1435 i == 0 ? stack_pointer_rtx
1436 : plus_constant (stack_pointer_rtx,
1437 i * UNITS_PER_WORD)));
1438
1439 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1440
1441 return vector;
1442}
1443
1444/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1445
1446static rtx
1447gen_rx_popm_vector (unsigned int low, unsigned int high)
1448{
1449 unsigned int i;
1450 unsigned int count = (high - low) + 2;
1451 rtx vector;
1452
1453 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1454
1455 XVECEXP (vector, 0, 0) =
1456 gen_rtx_SET (SImode, stack_pointer_rtx,
1457 plus_constant (stack_pointer_rtx,
1458 (count - 1) * UNITS_PER_WORD));
1459
1460 for (i = 0; i < count - 1; i++)
1461 XVECEXP (vector, 0, i + 1) =
1462 gen_rtx_SET (SImode,
1463 gen_rtx_REG (SImode, low + i),
1464 gen_rtx_MEM (SImode,
1465 i == 0 ? stack_pointer_rtx
1466 : plus_constant (stack_pointer_rtx,
1467 i * UNITS_PER_WORD)));
1468
1469 return vector;
1470}
1471
1472void
1473rx_expand_epilogue (bool is_sibcall)
1474{
1475 unsigned int low;
1476 unsigned int high;
1477 unsigned int frame_size;
1478 unsigned int stack_size;
1479 unsigned int register_mask;
1480 unsigned int regs_size;
67e66e16 1481 unsigned int reg;
24833e1a 1482 unsigned HOST_WIDE_INT total_size;
1483
61fc50a0 1484 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1485 cannot guarantee that the register holding the function address is a
1486 call-used register. If it is a call-saved register then the stack
1487 pop instructions generated in the epilogue will corrupt the address
1488 before it is used.
1489
1490 Creating a new call-used-only register class works but then the
1491 reload pass gets stuck because it cannot always find a call-used
1492 register for spilling sibcalls.
1493
1494 The other possible solution is for this pass to scan forward for the
1495 sibcall instruction (if it has been generated) and work out if it
1496 is an indirect sibcall using a call-saved register. If it is then
1497 the address can copied into a call-used register in this epilogue
1498 code and the sibcall instruction modified to use that register. */
1499
24833e1a 1500 if (is_naked_func (NULL_TREE))
1501 {
61fc50a0 1502 gcc_assert (! is_sibcall);
1503
24833e1a 1504 /* Naked functions use their own, programmer provided epilogues.
1505 But, in order to keep gcc happy we have to generate some kind of
1506 epilogue RTL. */
1507 emit_jump_insn (gen_naked_return ());
1508 return;
1509 }
1510
1511 rx_get_stack_layout (& low, & high, & register_mask,
1512 & frame_size, & stack_size);
1513
1514 total_size = frame_size + stack_size;
1515 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1516
1517 /* See if we are unable to use the special stack frame deconstruct and
1518 return instructions. In most cases we can use them, but the exceptions
1519 are:
1520
1521 - Sibling calling functions deconstruct the frame but do not return to
1522 their caller. Instead they branch to their sibling and allow their
1523 return instruction to return to this function's parent.
1524
67e66e16 1525 - Fast and normal interrupt handling functions have to use special
24833e1a 1526 return instructions.
1527
1528 - Functions where we have pushed a fragmented set of registers into the
1529 call-save area must have the same set of registers popped. */
1530 if (is_sibcall
1531 || is_fast_interrupt_func (NULL_TREE)
67e66e16 1532 || is_interrupt_func (NULL_TREE)
24833e1a 1533 || register_mask)
1534 {
1535 /* Cannot use the special instructions - deconstruct by hand. */
1536 if (total_size)
1537 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1538 GEN_INT (total_size)));
1539
e4d9e8e5 1540 if (MUST_SAVE_ACC_REGISTER)
24833e1a 1541 {
67e66e16 1542 unsigned int acc_low, acc_high;
1543
1544 /* Reverse the saving of the accumulator register onto the stack.
1545 Note we must adjust the saved "low" accumulator value as it
1546 is really the middle 32-bits of the accumulator. */
1547 if (register_mask)
1548 {
1549 acc_low = acc_high = 0;
9d2f1b03 1550
1551 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1552 if (register_mask & (1 << reg))
1553 {
1554 if (acc_low == 0)
1555 acc_low = reg;
1556 else
1557 {
1558 acc_high = reg;
1559 break;
1560 }
1561 }
1562 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1563 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1564 }
1565 else
1566 {
1567 acc_low = low;
1568 acc_high = low + 1;
1569 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1570 gen_rx_popm_vector (acc_low, acc_high)));
1571 }
1572
1573 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1574 gen_rtx_REG (SImode, acc_low),
1575 GEN_INT (16)));
1576 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1577 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1578 }
24833e1a 1579
67e66e16 1580 if (register_mask)
1581 {
9d2f1b03 1582 for (reg = 0; reg < CC_REGNUM; reg ++)
24833e1a 1583 if (register_mask & (1 << reg))
1584 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1585 }
1586 else if (low)
1587 {
1588 if (high == low)
1589 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1590 else
1591 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1592 gen_rx_popm_vector (low, high)));
1593 }
1594
1595 if (is_fast_interrupt_func (NULL_TREE))
61fc50a0 1596 {
1597 gcc_assert (! is_sibcall);
1598 emit_jump_insn (gen_fast_interrupt_return ());
1599 }
67e66e16 1600 else if (is_interrupt_func (NULL_TREE))
61fc50a0 1601 {
1602 gcc_assert (! is_sibcall);
1603 emit_jump_insn (gen_exception_return ());
1604 }
24833e1a 1605 else if (! is_sibcall)
1606 emit_jump_insn (gen_simple_return ());
1607
1608 return;
1609 }
1610
1611 /* If we allocated space on the stack, free it now. */
1612 if (total_size)
1613 {
1614 unsigned HOST_WIDE_INT rtsd_size;
1615
1616 /* See if we can use the RTSD instruction. */
1617 rtsd_size = total_size + regs_size;
1618 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1619 {
1620 if (low)
1621 emit_jump_insn (gen_pop_and_return
1622 (GEN_INT (rtsd_size),
1623 gen_rx_rtsd_vector (rtsd_size, low, high)));
1624 else
1625 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1626
1627 return;
1628 }
1629
1630 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1631 GEN_INT (total_size)));
1632 }
1633
1634 if (low)
1635 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1636 gen_rx_rtsd_vector (regs_size,
1637 low, high)));
1638 else
1639 emit_jump_insn (gen_simple_return ());
1640}
1641
1642
1643/* Compute the offset (in words) between FROM (arg pointer
1644 or frame pointer) and TO (frame pointer or stack pointer).
1645 See ASCII art comment at the start of rx_expand_prologue
1646 for more information. */
1647
1648int
1649rx_initial_elimination_offset (int from, int to)
1650{
1651 unsigned int low;
1652 unsigned int high;
1653 unsigned int frame_size;
1654 unsigned int stack_size;
1655 unsigned int mask;
1656
1657 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1658
1659 if (from == ARG_POINTER_REGNUM)
1660 {
1661 /* Extend the computed size of the stack frame to
1662 include the registers pushed in the prologue. */
1663 if (low)
1664 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1665 else
1666 frame_size += bit_count (mask) * UNITS_PER_WORD;
1667
1668 /* Remember to include the return address. */
1669 frame_size += 1 * UNITS_PER_WORD;
1670
1671 if (to == FRAME_POINTER_REGNUM)
1672 return frame_size;
1673
1674 gcc_assert (to == STACK_POINTER_REGNUM);
1675 return frame_size + stack_size;
1676 }
1677
1678 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1679 return stack_size;
1680}
1681
24833e1a 1682/* Decide if a variable should go into one of the small data sections. */
1683
1684static bool
1685rx_in_small_data (const_tree decl)
1686{
1687 int size;
1688 const_tree section;
1689
1690 if (rx_small_data_limit == 0)
1691 return false;
1692
1693 if (TREE_CODE (decl) != VAR_DECL)
1694 return false;
1695
1696 /* We do not put read-only variables into a small data area because
1697 they would be placed with the other read-only sections, far away
1698 from the read-write data sections, and we only have one small
1699 data area pointer.
1700 Similarly commons are placed in the .bss section which might be
1701 far away (and out of alignment with respect to) the .data section. */
1702 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1703 return false;
1704
1705 section = DECL_SECTION_NAME (decl);
1706 if (section)
1707 {
1708 const char * const name = TREE_STRING_POINTER (section);
1709
1710 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1711 }
1712
1713 size = int_size_in_bytes (TREE_TYPE (decl));
1714
1715 return (size > 0) && (size <= rx_small_data_limit);
1716}
1717
1718/* Return a section for X.
1719 The only special thing we do here is to honor small data. */
1720
1721static section *
1722rx_select_rtx_section (enum machine_mode mode,
1723 rtx x,
1724 unsigned HOST_WIDE_INT align)
1725{
1726 if (rx_small_data_limit > 0
1727 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1728 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1729 return sdata_section;
1730
1731 return default_elf_select_rtx_section (mode, x, align);
1732}
1733
1734static section *
1735rx_select_section (tree decl,
1736 int reloc,
1737 unsigned HOST_WIDE_INT align)
1738{
1739 if (rx_small_data_limit > 0)
1740 {
1741 switch (categorize_decl_for_section (decl, reloc))
1742 {
1743 case SECCAT_SDATA: return sdata_section;
1744 case SECCAT_SBSS: return sbss_section;
1745 case SECCAT_SRODATA:
1746 /* Fall through. We do not put small, read only
1747 data into the C_2 section because we are not
1748 using the C_2 section. We do not use the C_2
1749 section because it is located with the other
1750 read-only data sections, far away from the read-write
1751 data sections and we only have one small data
1752 pointer (r13). */
1753 default:
1754 break;
1755 }
1756 }
1757
1758 /* If we are supporting the Renesas assembler
1759 we cannot use mergeable sections. */
1760 if (TARGET_AS100_SYNTAX)
1761 switch (categorize_decl_for_section (decl, reloc))
1762 {
1763 case SECCAT_RODATA_MERGE_CONST:
1764 case SECCAT_RODATA_MERGE_STR_INIT:
1765 case SECCAT_RODATA_MERGE_STR:
1766 return readonly_data_section;
1767
1768 default:
1769 break;
1770 }
1771
1772 return default_elf_select_section (decl, reloc, align);
1773}
1774\f
1775enum rx_builtin
1776{
1777 RX_BUILTIN_BRK,
1778 RX_BUILTIN_CLRPSW,
1779 RX_BUILTIN_INT,
1780 RX_BUILTIN_MACHI,
1781 RX_BUILTIN_MACLO,
1782 RX_BUILTIN_MULHI,
1783 RX_BUILTIN_MULLO,
1784 RX_BUILTIN_MVFACHI,
1785 RX_BUILTIN_MVFACMI,
1786 RX_BUILTIN_MVFC,
1787 RX_BUILTIN_MVTACHI,
1788 RX_BUILTIN_MVTACLO,
1789 RX_BUILTIN_MVTC,
67e66e16 1790 RX_BUILTIN_MVTIPL,
24833e1a 1791 RX_BUILTIN_RACW,
1792 RX_BUILTIN_REVW,
1793 RX_BUILTIN_RMPA,
1794 RX_BUILTIN_ROUND,
1795 RX_BUILTIN_SAT,
1796 RX_BUILTIN_SETPSW,
1797 RX_BUILTIN_WAIT,
1798 RX_BUILTIN_max
1799};
1800
1801static void
1802rx_init_builtins (void)
1803{
1804#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1805 add_builtin_function ("__builtin_rx_" LC_NAME, \
1806 build_function_type_list (RET_TYPE##_type_node, \
1807 ARG_TYPE##_type_node, \
1808 NULL_TREE), \
1809 RX_BUILTIN_##UC_NAME, \
1810 BUILT_IN_MD, NULL, NULL_TREE)
1811
1812#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1813 add_builtin_function ("__builtin_rx_" LC_NAME, \
1814 build_function_type_list (RET_TYPE##_type_node, \
1815 ARG_TYPE1##_type_node,\
1816 ARG_TYPE2##_type_node,\
1817 NULL_TREE), \
1818 RX_BUILTIN_##UC_NAME, \
1819 BUILT_IN_MD, NULL, NULL_TREE)
1820
1821#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1822 add_builtin_function ("__builtin_rx_" LC_NAME, \
1823 build_function_type_list (RET_TYPE##_type_node, \
1824 ARG_TYPE1##_type_node,\
1825 ARG_TYPE2##_type_node,\
1826 ARG_TYPE3##_type_node,\
1827 NULL_TREE), \
1828 RX_BUILTIN_##UC_NAME, \
1829 BUILT_IN_MD, NULL, NULL_TREE)
1830
1831 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
1832 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1833 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1834 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1835 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1836 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1837 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1838 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1839 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1840 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1841 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1842 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1843 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
1844 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1845 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
67e66e16 1846 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
24833e1a 1847 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1848 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1849 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
1850 ADD_RX_BUILTIN1 (SAT, "sat", intSI, intSI);
1851 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
1852}
1853
24833e1a 1854static rtx
1855rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1856{
1857 if (reg && ! REG_P (arg))
1858 arg = force_reg (SImode, arg);
1859
1860 emit_insn (gen_func (arg));
1861
1862 return NULL_RTX;
1863}
1864
1865static rtx
1866rx_expand_builtin_mvtc (tree exp)
1867{
1868 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1869 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1870
1871 if (! CONST_INT_P (arg1))
1872 return NULL_RTX;
1873
1874 if (! REG_P (arg2))
1875 arg2 = force_reg (SImode, arg2);
1876
1877 emit_insn (gen_mvtc (arg1, arg2));
1878
1879 return NULL_RTX;
1880}
1881
1882static rtx
1883rx_expand_builtin_mvfc (tree t_arg, rtx target)
1884{
1885 rtx arg = expand_normal (t_arg);
1886
1887 if (! CONST_INT_P (arg))
1888 return NULL_RTX;
1889
e4d9e8e5 1890 if (target == NULL_RTX)
1891 return NULL_RTX;
1892
24833e1a 1893 if (! REG_P (target))
1894 target = force_reg (SImode, target);
1895
1896 emit_insn (gen_mvfc (target, arg));
1897
1898 return target;
1899}
1900
67e66e16 1901static rtx
1902rx_expand_builtin_mvtipl (rtx arg)
1903{
1904 /* The RX610 does not support the MVTIPL instruction. */
1905 if (rx_cpu_type == RX610)
1906 return NULL_RTX;
1907
1908 if (! CONST_INT_P (arg) || ! IN_RANGE (arg, 0, (1 << 4) - 1))
1909 return NULL_RTX;
1910
1911 emit_insn (gen_mvtipl (arg));
1912
1913 return NULL_RTX;
1914}
1915
24833e1a 1916static rtx
1917rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
1918{
1919 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1920 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1921
1922 if (! REG_P (arg1))
1923 arg1 = force_reg (SImode, arg1);
1924
1925 if (! REG_P (arg2))
1926 arg2 = force_reg (SImode, arg2);
1927
1928 emit_insn (gen_func (arg1, arg2));
1929
1930 return NULL_RTX;
1931}
1932
1933static rtx
1934rx_expand_int_builtin_1_arg (rtx arg,
1935 rtx target,
1936 rtx (* gen_func)(rtx, rtx),
1937 bool mem_ok)
1938{
1939 if (! REG_P (arg))
1940 if (!mem_ok || ! MEM_P (arg))
1941 arg = force_reg (SImode, arg);
1942
1943 if (target == NULL_RTX || ! REG_P (target))
1944 target = gen_reg_rtx (SImode);
1945
1946 emit_insn (gen_func (target, arg));
1947
1948 return target;
1949}
1950
1951static rtx
1952rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
1953{
1954 if (target == NULL_RTX || ! REG_P (target))
1955 target = gen_reg_rtx (SImode);
1956
1957 emit_insn (gen_func (target));
1958
1959 return target;
1960}
1961
1962static rtx
1963rx_expand_builtin_round (rtx arg, rtx target)
1964{
1965 if ((! REG_P (arg) && ! MEM_P (arg))
1966 || GET_MODE (arg) != SFmode)
1967 arg = force_reg (SFmode, arg);
1968
1969 if (target == NULL_RTX || ! REG_P (target))
1970 target = gen_reg_rtx (SImode);
1971
1972 emit_insn (gen_lrintsf2 (target, arg));
1973
1974 return target;
1975}
1976
1977static rtx
1978rx_expand_builtin (tree exp,
1979 rtx target,
1980 rtx subtarget ATTRIBUTE_UNUSED,
1981 enum machine_mode mode ATTRIBUTE_UNUSED,
1982 int ignore ATTRIBUTE_UNUSED)
1983{
1984 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
432093e5 1985 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
24833e1a 1986 rtx op = arg ? expand_normal (arg) : NULL_RTX;
1987 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
1988
1989 switch (fcode)
1990 {
1991 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
1992 case RX_BUILTIN_CLRPSW: return rx_expand_void_builtin_1_arg
1993 (op, gen_clrpsw, false);
1994 case RX_BUILTIN_SETPSW: return rx_expand_void_builtin_1_arg
1995 (op, gen_setpsw, false);
1996 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
1997 (op, gen_int, false);
1998 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
1999 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2000 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2001 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2002 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2003 (target, gen_mvfachi);
2004 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2005 (target, gen_mvfacmi);
2006 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2007 (op, gen_mvtachi, true);
2008 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2009 (op, gen_mvtaclo, true);
2010 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2011 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2012 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
67e66e16 2013 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
24833e1a 2014 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2015 (op, gen_racw, false);
2016 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2017 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2018 (op, target, gen_revw, false);
2019 case RX_BUILTIN_SAT: return rx_expand_int_builtin_1_arg
2020 (op, target, gen_sat, false);
2021 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2022
2023 default:
2024 internal_error ("bad builtin code");
2025 break;
2026 }
2027
2028 return NULL_RTX;
2029}
2030\f
2031/* Place an element into a constructor or destructor section.
2032 Like default_ctor_section_asm_out_constructor in varasm.c
2033 except that it uses .init_array (or .fini_array) and it
2034 handles constructor priorities. */
2035
2036static void
2037rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2038{
2039 section * s;
2040
2041 if (priority != DEFAULT_INIT_PRIORITY)
2042 {
2043 char buf[18];
2044
2045 sprintf (buf, "%s.%.5u",
2046 is_ctor ? ".init_array" : ".fini_array",
2047 priority);
2048 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2049 }
2050 else if (is_ctor)
2051 s = ctors_section;
2052 else
2053 s = dtors_section;
2054
2055 switch_to_section (s);
2056 assemble_align (POINTER_SIZE);
2057 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2058}
2059
2060static void
2061rx_elf_asm_constructor (rtx symbol, int priority)
2062{
2063 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2064}
2065
2066static void
2067rx_elf_asm_destructor (rtx symbol, int priority)
2068{
2069 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2070}
2071\f
67e66e16 2072/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
24833e1a 2073
2074static tree
2075rx_handle_func_attribute (tree * node,
2076 tree name,
2077 tree args,
2078 int flags ATTRIBUTE_UNUSED,
2079 bool * no_add_attrs)
2080{
2081 gcc_assert (DECL_P (* node));
2082 gcc_assert (args == NULL_TREE);
2083
2084 if (TREE_CODE (* node) != FUNCTION_DECL)
2085 {
2086 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2087 name);
2088 * no_add_attrs = true;
2089 }
2090
2091 /* FIXME: We ought to check for conflicting attributes. */
2092
2093 /* FIXME: We ought to check that the interrupt and exception
2094 handler attributes have been applied to void functions. */
2095 return NULL_TREE;
2096}
2097
2098/* Table of RX specific attributes. */
2099const struct attribute_spec rx_attribute_table[] =
2100{
2101 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
24833e1a 2102 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
67e66e16 2103 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
24833e1a 2104 { "naked", 0, 0, true, false, false, rx_handle_func_attribute },
2105 { NULL, 0, 0, false, false, false, NULL }
2106};
2107
98cb9b5b 2108/* Extra processing for target specific command line options. */
2109
2110static bool
2111rx_handle_option (size_t code, const char * arg ATTRIBUTE_UNUSED, int value)
2112{
2113 switch (code)
2114 {
2115 case OPT_mint_register_:
2116 switch (value)
2117 {
2118 case 4:
2119 fixed_regs[10] = call_used_regs [10] = 1;
2120 /* Fall through. */
2121 case 3:
2122 fixed_regs[11] = call_used_regs [11] = 1;
2123 /* Fall through. */
2124 case 2:
2125 fixed_regs[12] = call_used_regs [12] = 1;
2126 /* Fall through. */
2127 case 1:
2128 fixed_regs[13] = call_used_regs [13] = 1;
2129 /* Fall through. */
2130 case 0:
2131 return true;
2132 default:
2133 return false;
2134 }
2135 break;
2136
2137 case OPT_mmax_constant_size_:
2138 /* Make sure that the -mmax-constant_size option is in range. */
2139 return value >= 0 && value <= 4;
2140
2141 case OPT_mcpu_:
98cb9b5b 2142 if (strcasecmp (arg, "RX610") == 0)
2143 rx_cpu_type = RX610;
2144 else if (strcasecmp (arg, "RX200") == 0)
2145 {
2146 target_flags |= MASK_NO_USE_FPU;
2147 rx_cpu_type = RX200;
2148 }
2149 else if (strcasecmp (arg, "RX600") != 0)
2150 warning (0, "unrecognized argument '%s' to -mcpu= option", arg);
2151 break;
2152
2153 case OPT_fpu:
2154 if (rx_cpu_type == RX200)
2155 error ("The RX200 cpu does not have FPU hardware");
2156 break;
2157
2158 default:
2159 break;
2160 }
2161
2162 return true;
2163}
2164
2165void
2166rx_set_optimization_options (void)
2167{
2168 static bool first_time = TRUE;
2169 static bool saved_allow_rx_fpu = TRUE;
2170
2171 if (first_time)
2172 {
2173 /* If this is the first time through and the user has not disabled
2174 the use of RX FPU hardware then enable unsafe math optimizations,
2175 since the FPU instructions themselves are unsafe. */
2176 if (TARGET_USE_FPU)
2177 set_fast_math_flags (true);
2178
2179 /* FIXME: For some unknown reason LTO compression is not working,
2180 at least on my local system. So set the default compression
2181 level to none, for now. */
2182 if (flag_lto_compression_level == -1)
2183 flag_lto_compression_level = 0;
2184
2185 saved_allow_rx_fpu = ALLOW_RX_FPU_INSNS;
2186 first_time = FALSE;
2187 }
2188 else
2189 {
2190 /* Alert the user if they are changing the optimization options
2191 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2192 if (TARGET_USE_FPU
2193 && ! fast_math_flags_set_p ())
2194 warning (0, "RX FPU instructions are not IEEE compliant");
2195
2196 if (saved_allow_rx_fpu != ALLOW_RX_FPU_INSNS)
2197 error ("Changing the FPU insns/math optimizations pairing is not supported");
2198 }
2199}
2200
1af17d44 2201static void
2202rx_option_override (void)
2203{
2204 /* This target defaults to strict volatile bitfields. */
2205 if (flag_strict_volatile_bitfields < 0)
2206 flag_strict_volatile_bitfields = 1;
2207}
2208
98cb9b5b 2209\f
24833e1a 2210static bool
2211rx_allocate_stack_slots_for_args (void)
2212{
2213 /* Naked functions should not allocate stack slots for arguments. */
2214 return ! is_naked_func (NULL_TREE);
2215}
2216
2217static bool
2218rx_func_attr_inlinable (const_tree decl)
2219{
2220 return ! is_fast_interrupt_func (decl)
67e66e16 2221 && ! is_interrupt_func (decl)
24833e1a 2222 && ! is_naked_func (decl);
2223}
2224
61fc50a0 2225/* Return nonzero if it is ok to make a tail-call to DECL,
2226 a function_decl or NULL if this is an indirect call, using EXP */
2227
2228static bool
e4d9e8e5 2229rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
61fc50a0 2230{
2231 /* Do not allow indirect tailcalls. The
2232 sibcall patterns do not support them. */
2233 if (decl == NULL)
2234 return false;
2235
2236 /* Never tailcall from inside interrupt handlers or naked functions. */
2237 if (is_fast_interrupt_func (NULL_TREE)
2238 || is_interrupt_func (NULL_TREE)
2239 || is_naked_func (NULL_TREE))
2240 return false;
2241
2242 return true;
2243}
2244
24833e1a 2245static void
2246rx_file_start (void)
2247{
2248 if (! TARGET_AS100_SYNTAX)
2249 default_file_start ();
2250}
2251
2252static bool
2253rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2254{
2255 return TRUE;
2256}
2257
2258/* Try to generate code for the "isnv" pattern which inserts bits
2259 into a word.
2260 operands[0] => Location to be altered.
2261 operands[1] => Number of bits to change.
2262 operands[2] => Starting bit.
2263 operands[3] => Value to insert.
2264 Returns TRUE if successful, FALSE otherwise. */
2265
2266bool
2267rx_expand_insv (rtx * operands)
2268{
2269 if (INTVAL (operands[1]) != 1
2270 || ! CONST_INT_P (operands[3]))
2271 return false;
2272
2273 if (MEM_P (operands[0])
2274 && INTVAL (operands[2]) > 7)
2275 return false;
2276
2277 switch (INTVAL (operands[3]))
2278 {
2279 case 0:
2280 if (MEM_P (operands[0]))
2281 emit_insn (gen_bitclr_in_memory (operands[0], operands[0],
2282 operands[2]));
2283 else
2284 emit_insn (gen_bitclr (operands[0], operands[0], operands[2]));
2285 break;
2286 case 1:
2287 case -1:
2288 if (MEM_P (operands[0]))
2289 emit_insn (gen_bitset_in_memory (operands[0], operands[0],
2290 operands[2]));
2291 else
2292 emit_insn (gen_bitset (operands[0], operands[0], operands[2]));
2293 break;
2294 default:
2295 return false;
2296 }
2297 return true;
2298}
2299\f
2300/* Returns true if X a legitimate constant for an immediate
2301 operand on the RX. X is already known to satisfy CONSTANT_P. */
2302
2303bool
2304rx_is_legitimate_constant (rtx x)
2305{
2306 HOST_WIDE_INT val;
2307
2308 switch (GET_CODE (x))
2309 {
2310 case CONST:
2311 x = XEXP (x, 0);
2312
2313 if (GET_CODE (x) == PLUS)
2314 {
2315 if (! CONST_INT_P (XEXP (x, 1)))
2316 return false;
2317
2318 /* GCC would not pass us CONST_INT + CONST_INT so we
2319 know that we have {SYMBOL|LABEL} + CONST_INT. */
2320 x = XEXP (x, 0);
2321 gcc_assert (! CONST_INT_P (x));
2322 }
2323
2324 switch (GET_CODE (x))
2325 {
2326 case LABEL_REF:
2327 case SYMBOL_REF:
2328 return true;
2329
2330 /* One day we may have to handle UNSPEC constants here. */
2331 default:
2332 /* FIXME: Can this ever happen ? */
2333 abort ();
2334 return false;
2335 }
2336 break;
2337
2338 case LABEL_REF:
2339 case SYMBOL_REF:
2340 return true;
2341 case CONST_DOUBLE:
09bb92cc 2342 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
24833e1a 2343 case CONST_VECTOR:
2344 return false;
2345 default:
2346 gcc_assert (CONST_INT_P (x));
2347 break;
2348 }
2349
09bb92cc 2350 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
24833e1a 2351 /* If there is no constraint on the size of constants
2352 used as operands, then any value is legitimate. */
2353 return true;
2354
2355 val = INTVAL (x);
2356
2357 /* rx_max_constant_size specifies the maximum number
2358 of bytes that can be used to hold a signed value. */
2359 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
2360 ( 1 << (rx_max_constant_size * 8)));
2361}
2362
24833e1a 2363static int
2364rx_address_cost (rtx addr, bool speed)
2365{
2366 rtx a, b;
2367
2368 if (GET_CODE (addr) != PLUS)
2369 return COSTS_N_INSNS (1);
2370
2371 a = XEXP (addr, 0);
2372 b = XEXP (addr, 1);
2373
2374 if (REG_P (a) && REG_P (b))
2375 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2376 return COSTS_N_INSNS (4);
2377
2378 if (speed)
2379 /* [REG+OFF] is just as fast as [REG]. */
2380 return COSTS_N_INSNS (1);
2381
2382 if (CONST_INT_P (b)
2383 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2384 /* Try to discourage REG + <large OFF> when optimizing for size. */
2385 return COSTS_N_INSNS (2);
2386
2387 return COSTS_N_INSNS (1);
2388}
2389
2390static bool
2391rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2392{
2393 /* We can always eliminate to the frame pointer.
2394 We can eliminate to the stack pointer unless a frame
2395 pointer is needed. */
2396
2397 return to == FRAME_POINTER_REGNUM
2398 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2399}
2400\f
2401
2402static void
2403rx_trampoline_template (FILE * file)
2404{
2405 /* Output assembler code for a block containing the constant
2406 part of a trampoline, leaving space for the variable parts.
2407
2408 On the RX, (where r8 is the static chain regnum) the trampoline
2409 looks like:
2410
2411 mov #<static chain value>, r8
2412 mov #<function's address>, r9
2413 jmp r9
2414
2415 In big-endian-data-mode however instructions are read into the CPU
2416 4 bytes at a time. These bytes are then swapped around before being
2417 passed to the decoder. So...we must partition our trampoline into
2418 4 byte packets and swap these packets around so that the instruction
2419 reader will reverse the process. But, in order to avoid splitting
2420 the 32-bit constants across these packet boundaries, (making inserting
2421 them into the constructed trampoline very difficult) we have to pad the
2422 instruction sequence with NOP insns. ie:
2423
2424 nop
2425 nop
2426 mov.l #<...>, r8
2427 nop
2428 nop
2429 mov.l #<...>, r9
2430 jmp r9
2431 nop
2432 nop */
2433
2434 if (! TARGET_BIG_ENDIAN_DATA)
2435 {
2436 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2437 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2438 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2439 }
2440 else
2441 {
2442 char r8 = '0' + STATIC_CHAIN_REGNUM;
2443 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2444
2445 if (TARGET_AS100_SYNTAX)
2446 {
2447 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2448 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2449 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2450 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2451 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2452 }
2453 else
2454 {
2455 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2456 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2457 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2458 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2459 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2460 }
2461 }
2462}
2463
2464static void
2465rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2466{
2467 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2468
2469 emit_block_move (tramp, assemble_trampoline_template (),
2470 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2471
2472 if (TARGET_BIG_ENDIAN_DATA)
2473 {
2474 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2475 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2476 }
2477 else
2478 {
2479 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2480 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2481 }
2482}
2483\f
9d2f1b03 2484
2485static enum machine_mode
2486rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
2487{
2488 if (m1 == CCmode)
2489 return m2;
2490 if (m2 == CCmode)
2491 return m1;
2492 if (m1 == m2)
2493 return m1;
2494 if (m1 == CC_ZSmode)
2495 return m1;
2496 if (m2 == CC_ZSmode)
2497 return m2;
2498 return VOIDmode;
2499}
2500
2501#define CC_FLAG_S (1 << 0)
2502#define CC_FLAG_Z (1 << 1)
2503#define CC_FLAG_O (1 << 2)
2504#define CC_FLAG_C (1 << 3)
2505
2506static unsigned int
2507flags_needed_for_conditional (rtx conditional)
2508{
2509 switch (GET_CODE (conditional))
2510 {
2511 case LE:
2512 case GT: return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2513
2514 case LEU:
2515 case GTU: return CC_FLAG_Z | CC_FLAG_C;
2516
2517 case LT:
2518 case GE: return CC_FLAG_S | CC_FLAG_O;
2519
2520 case LTU:
2521 case GEU: return CC_FLAG_C;
2522
2523 case EQ:
2524 case NE: return CC_FLAG_Z;
2525
2526 default: gcc_unreachable ();
2527 }
2528}
2529
2530static unsigned int
2531flags_from_mode (enum machine_mode mode)
2532{
2533 switch (mode)
2534 {
2535 case CCmode: return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2536 case CC_ZSmode: return CC_FLAG_S | CC_FLAG_Z;
2537 case CC_ZSOmode: return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2538 case CC_ZSCmode: return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2539 default: gcc_unreachable ();
2540 }
2541}
2542
2543/* Returns true if a compare insn is redundant because it
2544 would only set flags that are already set correctly. */
2545
2546bool
2547rx_compare_redundant (rtx cmp)
2548{
2549 unsigned int flags_needed;
2550 unsigned int flags_set;
2551 rtx next;
2552 rtx prev;
2553 rtx source;
2554 rtx dest;
2555 static rtx cc_reg = NULL_RTX;
2556
2557 if (cc_reg == NULL_RTX)
2558 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
2559
2560 /* We can only eliminate compares against 0. */
2561 if (GET_CODE (XEXP (SET_SRC (PATTERN (cmp)), 1)) != CONST_INT
2562 || INTVAL (XEXP (SET_SRC (PATTERN (cmp)), 1)) != 0)
2563 return false;
2564
2565 /* Locate the branch insn that follows the
2566 compare and which tests the bits in the PSW. */
2567 next = cmp;
2568 do
2569 {
2570 /* If we have found an insn that sets or clobbers the CC
2571 register and it was not the IF_THEN_ELSE insn that we
2572 are looking for, then the comparison is redundant. */
2573 if (next != cmp && reg_mentioned_p (cc_reg, PATTERN (next)))
2574 return true;
2575
2576 next = next_nonnote_insn (next);
2577
2578 /* If we run out of insns without finding the
2579 user then the comparison is unnecessary. */
2580 if (next == NULL_RTX)
2581 return true;
2582
2583 /* If we have found another comparison
2584 insn then the first one is redundant. */
2585 if (INSN_P (next)
2586 && GET_CODE (PATTERN (next)) == SET
2587 && REG_P (SET_DEST (PATTERN (next)))
2588 && REGNO (SET_DEST (PATTERN (next))) == CC_REGNUM)
2589 return true;
2590
2591 /* If we have found another arithmetic/logic insn that
2592 sets the PSW flags then the comparison is redundant. */
2593 if (INSN_P (next)
2594 && GET_CODE (PATTERN (next)) == PARALLEL
2595 && GET_CODE (XVECEXP (PATTERN (next), 0, 1)) == SET
2596 && REG_P (SET_DEST (XVECEXP (PATTERN (next), 0, 1)))
2597 && REGNO (SET_DEST (XVECEXP (PATTERN (next), 0, 1))) == CC_REGNUM)
2598 return true;
2599
2600 /* If we have found an unconditional branch then the
2601 PSW flags might be carried along with the jump, so
2602 the comparison is necessary. */
2603 if (INSN_P (next) && JUMP_P (next))
2604 {
2605 if (GET_CODE (PATTERN (next)) != SET)
2606 /* If the jump does not involve setting the PC
2607 then it is a return of some kind, and we know
2608 that the comparison is not used. */
2609 return true;
2610
2611 if (GET_CODE (SET_SRC (PATTERN (next))) != IF_THEN_ELSE)
2612 return false;
2613 }
2614 }
2615 while (! INSN_P (next)
2616 || DEBUG_INSN_P (next)
2617 || GET_CODE (PATTERN (next)) != SET
2618 || GET_CODE (SET_SRC (PATTERN (next))) != IF_THEN_ELSE);
2619
2620 flags_needed = flags_needed_for_conditional (XEXP (SET_SRC (PATTERN (next)), 0));
2621
2622 /* Now look to see if there was a previous
2623 instruction which set the PSW bits. */
2624 source = XEXP (SET_SRC (PATTERN (cmp)), 0);
2625 prev = cmp;
2626 do
2627 {
2628 /* If this insn uses/sets/clobbers the CC register
2629 and it is not the insn that we are looking for
2630 below, then we must need the comparison. */
2631 if (prev != cmp && reg_mentioned_p (cc_reg, PATTERN (prev)))
2632 return false;
2633
2634 prev = prev_nonnote_insn (prev);
2635
2636 if (prev == NULL_RTX)
2637 return false;
2638
2639 /* If we encounter an insn which changes the contents of
2640 the register which is the source of the comparison then
2641 we will definitely need the comparison. */
2642 if (INSN_P (prev)
2643 && GET_CODE (PATTERN (prev)) == SET
2644 && rtx_equal_p (SET_DEST (PATTERN (prev)), source))
2645 {
2646 /* Unless this instruction is a simple register move
2647 instruction. In which case we can continue our
2648 scan backwards, but now using the *source* of this
2649 set instruction. */
2650 if (REG_P (SET_SRC (PATTERN (prev))))
2651 source = SET_SRC (PATTERN (prev));
2652 /* We can also survive a sign-extension if the test is
2653 for EQ/NE. Note the same does not apply to zero-
2654 extension as this can turn a non-zero bit-pattern
2655 into zero. */
2656 else if (flags_needed == CC_FLAG_Z
2657 && GET_CODE (SET_SRC (PATTERN (prev))) == SIGN_EXTEND)
2658 source = XEXP (SET_SRC (PATTERN (prev)), 0);
2659 else
2660 return false;
2661 }
2662
2663 /* A label means a possible branch into the
2664 code here, so we have to stop scanning. */
2665 if (LABEL_P (prev))
2666 return false;
2667 }
2668 while (! INSN_P (prev)
2669 || DEBUG_INSN_P (prev)
2670 || GET_CODE (PATTERN (prev)) != PARALLEL
2671 || GET_CODE (XVECEXP (PATTERN (prev), 0, 1)) != SET
2672 || ! REG_P (SET_DEST (XVECEXP (PATTERN (prev), 0, 1)))
2673 || REGNO (SET_DEST (XVECEXP (PATTERN (prev), 0, 1))) != CC_REGNUM);
2674
2675 flags_set = flags_from_mode (GET_MODE (SET_DEST (XVECEXP (PATTERN (prev), 0, 1))));
2676
2677 dest = SET_DEST (XVECEXP (PATTERN (prev), 0, 0));
2678 /* The destination of the previous arithmetic/logic instruction
2679 must match the source in the comparison operation. For registers
2680 we ignore the mode as there may have been a sign-extension involved. */
2681 if (! rtx_equal_p (source, dest))
2682 {
2683 if (REG_P (source) && REG_P (dest) && REGNO (dest) == REGNO (source))
2684 ;
2685 else
2686 return false;
2687 }
2688
2689 return ((flags_set & flags_needed) == flags_needed);
2690}
2691
2692static int
2693rx_memory_move_cost (enum machine_mode mode, enum reg_class regclass, bool in)
2694{
2695 return 2 + memory_move_secondary_cost (mode, regclass, in);
2696}
2697\f
24833e1a 2698#undef TARGET_FUNCTION_VALUE
2699#define TARGET_FUNCTION_VALUE rx_function_value
2700
2701#undef TARGET_RETURN_IN_MSB
2702#define TARGET_RETURN_IN_MSB rx_return_in_msb
2703
2704#undef TARGET_IN_SMALL_DATA_P
2705#define TARGET_IN_SMALL_DATA_P rx_in_small_data
2706
2707#undef TARGET_RETURN_IN_MEMORY
2708#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
2709
2710#undef TARGET_HAVE_SRODATA_SECTION
2711#define TARGET_HAVE_SRODATA_SECTION true
2712
2713#undef TARGET_ASM_SELECT_RTX_SECTION
2714#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
2715
2716#undef TARGET_ASM_SELECT_SECTION
2717#define TARGET_ASM_SELECT_SECTION rx_select_section
2718
2719#undef TARGET_INIT_BUILTINS
2720#define TARGET_INIT_BUILTINS rx_init_builtins
2721
2722#undef TARGET_EXPAND_BUILTIN
2723#define TARGET_EXPAND_BUILTIN rx_expand_builtin
2724
2725#undef TARGET_ASM_CONSTRUCTOR
2726#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2727
2728#undef TARGET_ASM_DESTRUCTOR
2729#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
2730
2731#undef TARGET_STRUCT_VALUE_RTX
2732#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
2733
2734#undef TARGET_ATTRIBUTE_TABLE
2735#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
2736
2737#undef TARGET_ASM_FILE_START
2738#define TARGET_ASM_FILE_START rx_file_start
2739
2740#undef TARGET_MS_BITFIELD_LAYOUT_P
2741#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
2742
2743#undef TARGET_LEGITIMATE_ADDRESS_P
2744#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
2745
2746#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2747#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
2748
2749#undef TARGET_ASM_FUNCTION_PROLOGUE
2750#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
2751
2752#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2753#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
2754
61fc50a0 2755#undef TARGET_FUNCTION_OK_FOR_SIBCALL
2756#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
2757
ee4e8428 2758#undef TARGET_FUNCTION_ARG
2759#define TARGET_FUNCTION_ARG rx_function_arg
2760
2761#undef TARGET_FUNCTION_ARG_ADVANCE
2762#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
2763
24833e1a 2764#undef TARGET_SET_CURRENT_FUNCTION
2765#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2766
2767#undef TARGET_HANDLE_OPTION
2768#define TARGET_HANDLE_OPTION rx_handle_option
2769
2770#undef TARGET_ASM_INTEGER
2771#define TARGET_ASM_INTEGER rx_assemble_integer
2772
2773#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
2774#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
2775
2776#undef TARGET_MAX_ANCHOR_OFFSET
2777#define TARGET_MAX_ANCHOR_OFFSET 32
2778
2779#undef TARGET_ADDRESS_COST
2780#define TARGET_ADDRESS_COST rx_address_cost
2781
2782#undef TARGET_CAN_ELIMINATE
2783#define TARGET_CAN_ELIMINATE rx_can_eliminate
2784
2785#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2786#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
2787
2788#undef TARGET_TRAMPOLINE_INIT
2789#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
2790
6bb30542 2791#undef TARGET_PRINT_OPERAND
2792#define TARGET_PRINT_OPERAND rx_print_operand
2793
2794#undef TARGET_PRINT_OPERAND_ADDRESS
2795#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
2796
9d2f1b03 2797#undef TARGET_CC_MODES_COMPATIBLE
2798#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
2799
2800#undef TARGET_MEMORY_MOVE_COST
2801#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
2802
1af17d44 2803#undef TARGET_OPTION_OVERRIDE
2804#define TARGET_OPTION_OVERRIDE rx_option_override
2805
24833e1a 2806struct gcc_target targetm = TARGET_INITIALIZER;
2807
2808/* #include "gt-rx.h" */