]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rx/rx.c
PR rtl-optimization/44031
[thirdparty/gcc.git] / gcc / config / rx / rx.c
CommitLineData
24833e1a 1/* Subroutines used for code generation on Renesas RX processors.
95272799 2 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
24833e1a 3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "tree.h"
30#include "rtl.h"
31#include "regs.h"
32#include "hard-reg-set.h"
24833e1a 33#include "insn-config.h"
34#include "conditions.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
38#include "function.h"
39#include "expr.h"
40#include "optabs.h"
41#include "libfuncs.h"
42#include "recog.h"
0b205f4c 43#include "diagnostic-core.h"
24833e1a 44#include "toplev.h"
45#include "reload.h"
46#include "df.h"
47#include "ggc.h"
48#include "tm_p.h"
49#include "debug.h"
50#include "target.h"
51#include "target-def.h"
52#include "langhooks.h"
53\f
6bb30542 54static void rx_print_operand (FILE *, rtx, int);
55
ccfccd66 56#define CC_FLAG_S (1 << 0)
57#define CC_FLAG_Z (1 << 1)
58#define CC_FLAG_O (1 << 2)
59#define CC_FLAG_C (1 << 3)
60#define CC_FLAG_FP (1 << 4) /* fake, to differentiate CC_Fmode */
61
62static unsigned int flags_from_mode (enum machine_mode mode);
63static unsigned int flags_from_code (enum rtx_code code);
64
67e66e16 65enum rx_cpu_types rx_cpu_type = RX600;
66\f
24833e1a 67/* Return true if OP is a reference to an object in a small data area. */
68
69static bool
70rx_small_data_operand (rtx op)
71{
72 if (rx_small_data_limit == 0)
73 return false;
74
75 if (GET_CODE (op) == SYMBOL_REF)
76 return SYMBOL_REF_SMALL_P (op);
77
78 return false;
79}
80
81static bool
82rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
83{
84 if (RTX_OK_FOR_BASE (x, strict))
85 /* Register Indirect. */
86 return true;
87
88 if (GET_MODE_SIZE (mode) == 4
89 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
90 /* Pre-decrement Register Indirect or
91 Post-increment Register Indirect. */
92 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
93
94 if (GET_CODE (x) == PLUS)
95 {
96 rtx arg1 = XEXP (x, 0);
97 rtx arg2 = XEXP (x, 1);
98 rtx index = NULL_RTX;
99
100 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
101 index = arg2;
102 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
103 index = arg1;
104 else
105 return false;
106
107 switch (GET_CODE (index))
108 {
109 case CONST_INT:
110 {
111 /* Register Relative: REG + INT.
112 Only positive, mode-aligned, mode-sized
113 displacements are allowed. */
114 HOST_WIDE_INT val = INTVAL (index);
115 int factor;
116
117 if (val < 0)
118 return false;
119
120 switch (GET_MODE_SIZE (mode))
121 {
122 default:
123 case 4: factor = 4; break;
124 case 2: factor = 2; break;
125 case 1: factor = 1; break;
126 }
127
128 if (val > (65535 * factor))
129 return false;
130 return (val % factor) == 0;
131 }
132
133 case REG:
134 /* Unscaled Indexed Register Indirect: REG + REG
135 Size has to be "QI", REG has to be valid. */
136 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
137
138 case MULT:
139 {
140 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
141 Factor has to equal the mode size, REG has to be valid. */
142 rtx factor;
143
144 factor = XEXP (index, 1);
145 index = XEXP (index, 0);
146
147 return REG_P (index)
148 && RTX_OK_FOR_BASE (index, strict)
149 && CONST_INT_P (factor)
150 && GET_MODE_SIZE (mode) == INTVAL (factor);
151 }
152
153 default:
154 return false;
155 }
156 }
157
158 /* Small data area accesses turn into register relative offsets. */
159 return rx_small_data_operand (x);
160}
161
162/* Returns TRUE for simple memory addreses, ie ones
163 that do not involve register indirect addressing
164 or pre/post increment/decrement. */
165
166bool
167rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
168{
169 rtx base, index;
170
171 if (! rx_is_legitimate_address
172 (mode, mem, reload_in_progress || reload_completed))
173 return false;
174
175 switch (GET_CODE (mem))
176 {
177 case REG:
178 /* Simple memory addresses are OK. */
179 return true;
180
181 case PRE_DEC:
182 case POST_INC:
183 return false;
184
185 case PLUS:
186 /* Only allow REG+INT addressing. */
187 base = XEXP (mem, 0);
188 index = XEXP (mem, 1);
189
190 return RX_REG_P (base) && CONST_INT_P (index);
191
192 case SYMBOL_REF:
193 /* Can happen when small data is being supported.
194 Assume that it will be resolved into GP+INT. */
195 return true;
196
197 default:
198 gcc_unreachable ();
199 }
200}
201
202bool
203rx_is_mode_dependent_addr (rtx addr)
204{
205 if (GET_CODE (addr) == CONST)
206 addr = XEXP (addr, 0);
207
208 switch (GET_CODE (addr))
209 {
210 /* --REG and REG++ only work in SImode. */
211 case PRE_DEC:
212 case POST_INC:
213 return true;
214
215 case MINUS:
216 case PLUS:
217 if (! REG_P (XEXP (addr, 0)))
218 return true;
219
220 addr = XEXP (addr, 1);
221
222 switch (GET_CODE (addr))
223 {
224 case REG:
225 /* REG+REG only works in SImode. */
226 return true;
227
228 case CONST_INT:
229 /* REG+INT is only mode independent if INT is a
230 multiple of 4, positive and will fit into 8-bits. */
231 if (((INTVAL (addr) & 3) == 0)
232 && IN_RANGE (INTVAL (addr), 4, 252))
233 return false;
234 return true;
235
236 case SYMBOL_REF:
237 case LABEL_REF:
238 return true;
239
240 case MULT:
241 gcc_assert (REG_P (XEXP (addr, 0)));
242 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
243 /* REG+REG*SCALE is always mode dependent. */
244 return true;
245
246 default:
247 /* Not recognized, so treat as mode dependent. */
248 return true;
249 }
250
251 case CONST_INT:
252 case SYMBOL_REF:
253 case LABEL_REF:
254 case REG:
255 /* These are all mode independent. */
256 return false;
257
258 default:
259 /* Everything else is unrecognized,
260 so treat as mode dependent. */
261 return true;
262 }
263}
264\f
24833e1a 265/* A C compound statement to output to stdio stream FILE the
266 assembler syntax for an instruction operand that is a memory
267 reference whose address is ADDR. */
268
6bb30542 269static void
24833e1a 270rx_print_operand_address (FILE * file, rtx addr)
271{
272 switch (GET_CODE (addr))
273 {
274 case REG:
275 fprintf (file, "[");
276 rx_print_operand (file, addr, 0);
277 fprintf (file, "]");
278 break;
279
280 case PRE_DEC:
281 fprintf (file, "[-");
282 rx_print_operand (file, XEXP (addr, 0), 0);
283 fprintf (file, "]");
284 break;
285
286 case POST_INC:
287 fprintf (file, "[");
288 rx_print_operand (file, XEXP (addr, 0), 0);
289 fprintf (file, "+]");
290 break;
291
292 case PLUS:
293 {
294 rtx arg1 = XEXP (addr, 0);
295 rtx arg2 = XEXP (addr, 1);
296 rtx base, index;
297
298 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
299 base = arg1, index = arg2;
300 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
301 base = arg2, index = arg1;
302 else
303 {
304 rx_print_operand (file, arg1, 0);
305 fprintf (file, " + ");
306 rx_print_operand (file, arg2, 0);
307 break;
308 }
309
310 if (REG_P (index) || GET_CODE (index) == MULT)
311 {
312 fprintf (file, "[");
313 rx_print_operand (file, index, 'A');
314 fprintf (file, ",");
315 }
316 else /* GET_CODE (index) == CONST_INT */
317 {
318 rx_print_operand (file, index, 'A');
319 fprintf (file, "[");
320 }
321 rx_print_operand (file, base, 0);
322 fprintf (file, "]");
323 break;
324 }
325
95272799 326 case CONST:
327 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
328 {
329 addr = XEXP (addr, 0);
330 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
331
332 addr = XVECEXP (addr, 0, 0);
333 gcc_assert (CONST_INT_P (addr));
334 }
335 /* Fall through. */
24833e1a 336 case LABEL_REF:
337 case SYMBOL_REF:
24833e1a 338 fprintf (file, "#");
95272799 339
24833e1a 340 default:
341 output_addr_const (file, addr);
342 break;
343 }
344}
345
346static void
347rx_print_integer (FILE * file, HOST_WIDE_INT val)
348{
349 if (IN_RANGE (val, -64, 64))
350 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
351 else
352 fprintf (file,
353 TARGET_AS100_SYNTAX
354 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
355 val);
356}
357
358static bool
359rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
360{
361 const char * op = integer_asm_op (size, is_aligned);
362
363 if (! CONST_INT_P (x))
364 return default_assemble_integer (x, size, is_aligned);
365
366 if (op == NULL)
367 return false;
368 fputs (op, asm_out_file);
369
370 rx_print_integer (asm_out_file, INTVAL (x));
371 fputc ('\n', asm_out_file);
372 return true;
373}
374
375
24833e1a 376/* Handles the insertion of a single operand into the assembler output.
377 The %<letter> directives supported are:
378
379 %A Print an operand without a leading # character.
380 %B Print an integer comparison name.
381 %C Print a control register name.
382 %F Print a condition code flag name.
383 %H Print high part of a DImode register, integer or address.
384 %L Print low part of a DImode register, integer or address.
6bb30542 385 %N Print the negation of the immediate value.
24833e1a 386 %Q If the operand is a MEM, then correctly generate
387 register indirect or register relative addressing. */
388
6bb30542 389static void
24833e1a 390rx_print_operand (FILE * file, rtx op, int letter)
391{
392 switch (letter)
393 {
394 case 'A':
395 /* Print an operand without a leading #. */
396 if (MEM_P (op))
397 op = XEXP (op, 0);
398
399 switch (GET_CODE (op))
400 {
401 case LABEL_REF:
402 case SYMBOL_REF:
403 output_addr_const (file, op);
404 break;
405 case CONST_INT:
406 fprintf (file, "%ld", (long) INTVAL (op));
407 break;
408 default:
409 rx_print_operand (file, op, 0);
410 break;
411 }
412 break;
413
414 case 'B':
ccfccd66 415 {
416 enum rtx_code code = GET_CODE (op);
417 enum machine_mode mode = GET_MODE (XEXP (op, 0));
418 const char *ret;
419
420 if (mode == CC_Fmode)
421 {
422 /* C flag is undefined, and O flag carries unordered. None of the
423 branch combinations that include O use it helpfully. */
424 switch (code)
425 {
426 case ORDERED:
427 ret = "no";
428 break;
429 case UNORDERED:
430 ret = "o";
431 break;
432 case LT:
433 ret = "n";
434 break;
435 case GE:
436 ret = "pz";
437 break;
438 case EQ:
439 ret = "eq";
440 break;
441 case NE:
442 ret = "ne";
443 break;
444 default:
445 gcc_unreachable ();
446 }
447 }
448 else
449 {
450 switch (code)
451 {
452 case LT:
453 ret = "n";
454 break;
455 case GE:
456 ret = "pz";
457 break;
458 case GT:
459 ret = "gt";
460 break;
461 case LE:
462 ret = "le";
463 break;
464 case GEU:
465 ret = "geu";
466 break;
467 case LTU:
468 ret = "ltu";
469 break;
470 case GTU:
471 ret = "gtu";
472 break;
473 case LEU:
474 ret = "leu";
475 break;
476 case EQ:
477 ret = "eq";
478 break;
479 case NE:
480 ret = "ne";
481 break;
482 default:
483 gcc_unreachable ();
484 }
ccfccd66 485 gcc_checking_assert ((flags_from_code (code)
486 & ~flags_from_mode (mode)) == 0);
487 }
488 fputs (ret, file);
489 break;
490 }
24833e1a 491
492 case 'C':
493 gcc_assert (CONST_INT_P (op));
494 switch (INTVAL (op))
495 {
496 case 0: fprintf (file, "psw"); break;
497 case 2: fprintf (file, "usp"); break;
498 case 3: fprintf (file, "fpsw"); break;
499 case 4: fprintf (file, "cpen"); break;
500 case 8: fprintf (file, "bpsw"); break;
501 case 9: fprintf (file, "bpc"); break;
502 case 0xa: fprintf (file, "isp"); break;
503 case 0xb: fprintf (file, "fintv"); break;
504 case 0xc: fprintf (file, "intb"); break;
505 default:
98cb9b5b 506 warning (0, "unreocgnized control register number: %d - using 'psw'",
6bb30542 507 (int) INTVAL (op));
98cb9b5b 508 fprintf (file, "psw");
509 break;
24833e1a 510 }
511 break;
512
513 case 'F':
514 gcc_assert (CONST_INT_P (op));
515 switch (INTVAL (op))
516 {
517 case 0: case 'c': case 'C': fprintf (file, "C"); break;
518 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
519 case 2: case 's': case 'S': fprintf (file, "S"); break;
520 case 3: case 'o': case 'O': fprintf (file, "O"); break;
521 case 8: case 'i': case 'I': fprintf (file, "I"); break;
522 case 9: case 'u': case 'U': fprintf (file, "U"); break;
523 default:
524 gcc_unreachable ();
525 }
526 break;
527
528 case 'H':
6bb30542 529 switch (GET_CODE (op))
24833e1a 530 {
6bb30542 531 case REG:
532 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
533 break;
534 case CONST_INT:
535 {
536 HOST_WIDE_INT v = INTVAL (op);
67e66e16 537
6bb30542 538 fprintf (file, "#");
539 /* Trickery to avoid problems with shifting 32 bits at a time. */
540 v = v >> 16;
541 v = v >> 16;
542 rx_print_integer (file, v);
543 break;
544 }
545 case CONST_DOUBLE:
24833e1a 546 fprintf (file, "#");
6bb30542 547 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
548 break;
549 case MEM:
24833e1a 550 if (! WORDS_BIG_ENDIAN)
551 op = adjust_address (op, SImode, 4);
552 output_address (XEXP (op, 0));
6bb30542 553 break;
554 default:
555 gcc_unreachable ();
24833e1a 556 }
557 break;
558
559 case 'L':
6bb30542 560 switch (GET_CODE (op))
24833e1a 561 {
6bb30542 562 case REG:
563 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
564 break;
565 case CONST_INT:
24833e1a 566 fprintf (file, "#");
567 rx_print_integer (file, INTVAL (op) & 0xffffffff);
6bb30542 568 break;
569 case CONST_DOUBLE:
570 fprintf (file, "#");
571 rx_print_integer (file, CONST_DOUBLE_LOW (op));
572 break;
573 case MEM:
24833e1a 574 if (WORDS_BIG_ENDIAN)
575 op = adjust_address (op, SImode, 4);
576 output_address (XEXP (op, 0));
6bb30542 577 break;
578 default:
579 gcc_unreachable ();
24833e1a 580 }
581 break;
582
39349585 583 case 'N':
584 gcc_assert (CONST_INT_P (op));
585 fprintf (file, "#");
586 rx_print_integer (file, - INTVAL (op));
587 break;
588
24833e1a 589 case 'Q':
590 if (MEM_P (op))
591 {
592 HOST_WIDE_INT offset;
593
594 op = XEXP (op, 0);
595
596 if (REG_P (op))
597 offset = 0;
598 else if (GET_CODE (op) == PLUS)
599 {
600 rtx displacement;
601
602 if (REG_P (XEXP (op, 0)))
603 {
604 displacement = XEXP (op, 1);
605 op = XEXP (op, 0);
606 }
607 else
608 {
609 displacement = XEXP (op, 0);
610 op = XEXP (op, 1);
611 gcc_assert (REG_P (op));
612 }
613
614 gcc_assert (CONST_INT_P (displacement));
615 offset = INTVAL (displacement);
616 gcc_assert (offset >= 0);
617
618 fprintf (file, "%ld", offset);
619 }
620 else
621 gcc_unreachable ();
622
623 fprintf (file, "[");
624 rx_print_operand (file, op, 0);
625 fprintf (file, "].");
626
627 switch (GET_MODE_SIZE (GET_MODE (op)))
628 {
629 case 1:
630 gcc_assert (offset < 65535 * 1);
631 fprintf (file, "B");
632 break;
633 case 2:
634 gcc_assert (offset % 2 == 0);
635 gcc_assert (offset < 65535 * 2);
636 fprintf (file, "W");
637 break;
638 default:
639 gcc_assert (offset % 4 == 0);
640 gcc_assert (offset < 65535 * 4);
641 fprintf (file, "L");
642 break;
643 }
644 break;
645 }
646
647 /* Fall through. */
648
649 default:
650 switch (GET_CODE (op))
651 {
652 case MULT:
653 /* Should be the scaled part of an
654 indexed register indirect address. */
655 {
656 rtx base = XEXP (op, 0);
657 rtx index = XEXP (op, 1);
658
659 /* Check for a swaped index register and scaling factor.
660 Not sure if this can happen, but be prepared to handle it. */
661 if (CONST_INT_P (base) && REG_P (index))
662 {
663 rtx tmp = base;
664 base = index;
665 index = tmp;
666 }
667
668 gcc_assert (REG_P (base));
669 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
670 gcc_assert (CONST_INT_P (index));
671 /* Do not try to verify the value of the scalar as it is based
672 on the mode of the MEM not the mode of the MULT. (Which
673 will always be SImode). */
674 fprintf (file, "%s", reg_names [REGNO (base)]);
675 break;
676 }
677
678 case MEM:
679 output_address (XEXP (op, 0));
680 break;
681
682 case PLUS:
683 output_address (op);
684 break;
685
686 case REG:
687 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
688 fprintf (file, "%s", reg_names [REGNO (op)]);
689 break;
690
691 case SUBREG:
692 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
693 fprintf (file, "%s", reg_names [subreg_regno (op)]);
694 break;
695
696 /* This will only be single precision.... */
697 case CONST_DOUBLE:
698 {
699 unsigned long val;
700 REAL_VALUE_TYPE rv;
701
702 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
703 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
704 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
705 break;
706 }
707
708 case CONST_INT:
709 fprintf (file, "#");
710 rx_print_integer (file, INTVAL (op));
711 break;
712
713 case SYMBOL_REF:
714 case CONST:
715 case LABEL_REF:
716 case CODE_LABEL:
717 case UNSPEC:
718 rx_print_operand_address (file, op);
719 break;
720
721 default:
722 gcc_unreachable ();
723 }
724 break;
725 }
726}
727
728/* Returns an assembler template for a move instruction. */
729
730char *
731rx_gen_move_template (rtx * operands, bool is_movu)
732{
6bb30542 733 static char out_template [64];
24833e1a 734 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
735 const char * src_template;
736 const char * dst_template;
737 rtx dest = operands[0];
738 rtx src = operands[1];
739
740 /* Decide which extension, if any, should be given to the move instruction. */
741 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
742 {
743 case QImode:
744 /* The .B extension is not valid when
745 loading an immediate into a register. */
746 if (! REG_P (dest) || ! CONST_INT_P (src))
747 extension = ".B";
748 break;
749 case HImode:
750 if (! REG_P (dest) || ! CONST_INT_P (src))
751 /* The .W extension is not valid when
752 loading an immediate into a register. */
753 extension = ".W";
754 break;
755 case SFmode:
756 case SImode:
757 extension = ".L";
758 break;
759 case VOIDmode:
760 /* This mode is used by constants. */
761 break;
762 default:
763 debug_rtx (src);
764 gcc_unreachable ();
765 }
766
767 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
768 src_template = "%%gp(%A1)[r13]";
769 else
770 src_template = "%1";
771
772 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
773 dst_template = "%%gp(%A0)[r13]";
774 else
775 dst_template = "%0";
776
6bb30542 777 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
24833e1a 778 extension, src_template, dst_template);
6bb30542 779 return out_template;
24833e1a 780}
24833e1a 781\f
782/* Return VALUE rounded up to the next ALIGNMENT boundary. */
783
784static inline unsigned int
785rx_round_up (unsigned int value, unsigned int alignment)
786{
787 alignment -= 1;
788 return (value + alignment) & (~ alignment);
789}
790
791/* Return the number of bytes in the argument registers
792 occupied by an argument of type TYPE and mode MODE. */
793
ee4e8428 794static unsigned int
24833e1a 795rx_function_arg_size (Mmode mode, const_tree type)
796{
797 unsigned int num_bytes;
798
799 num_bytes = (mode == BLKmode)
800 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
801 return rx_round_up (num_bytes, UNITS_PER_WORD);
802}
803
804#define NUM_ARG_REGS 4
805#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
806
807/* Return an RTL expression describing the register holding a function
808 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
809 be passed on the stack. CUM describes the previous parameters to the
810 function and NAMED is false if the parameter is part of a variable
811 parameter list, or the last named parameter before the start of a
812 variable parameter list. */
813
ee4e8428 814static rtx
24833e1a 815rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
816{
817 unsigned int next_reg;
818 unsigned int bytes_so_far = *cum;
819 unsigned int size;
820 unsigned int rounded_size;
821
822 /* An exploded version of rx_function_arg_size. */
823 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
6bb30542 824 /* If the size is not known it cannot be passed in registers. */
825 if (size < 1)
826 return NULL_RTX;
24833e1a 827
828 rounded_size = rx_round_up (size, UNITS_PER_WORD);
829
830 /* Don't pass this arg via registers if there
831 are insufficient registers to hold all of it. */
832 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
833 return NULL_RTX;
834
835 /* Unnamed arguments and the last named argument in a
836 variadic function are always passed on the stack. */
837 if (!named)
838 return NULL_RTX;
839
840 /* Structures must occupy an exact number of registers,
841 otherwise they are passed on the stack. */
842 if ((type == NULL || AGGREGATE_TYPE_P (type))
843 && (size % UNITS_PER_WORD) != 0)
844 return NULL_RTX;
845
846 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
847
848 return gen_rtx_REG (mode, next_reg);
849}
850
ee4e8428 851static void
852rx_function_arg_advance (Fargs * cum, Mmode mode, const_tree type,
853 bool named ATTRIBUTE_UNUSED)
854{
855 *cum += rx_function_arg_size (mode, type);
856}
857
bd99ba64 858static unsigned int
859rx_function_arg_boundary (Mmode mode ATTRIBUTE_UNUSED,
860 const_tree type ATTRIBUTE_UNUSED)
861{
862 return 32;
863}
864
24833e1a 865/* Return an RTL describing where a function return value of type RET_TYPE
866 is held. */
867
868static rtx
869rx_function_value (const_tree ret_type,
870 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
871 bool outgoing ATTRIBUTE_UNUSED)
872{
bd7d2835 873 enum machine_mode mode = TYPE_MODE (ret_type);
874
875 /* RX ABI specifies that small integer types are
876 promoted to int when returned by a function. */
02f06d23 877 if (GET_MODE_SIZE (mode) > 0
878 && GET_MODE_SIZE (mode) < 4
879 && ! COMPLEX_MODE_P (mode)
880 )
bd7d2835 881 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
882
883 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
884}
885
886/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
887 regard to function returns as does TARGET_FUNCTION_VALUE. */
888
889static enum machine_mode
890rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
891 enum machine_mode mode,
0318c61a 892 int * punsignedp ATTRIBUTE_UNUSED,
bd7d2835 893 const_tree funtype ATTRIBUTE_UNUSED,
894 int for_return)
895{
896 if (for_return != 1
897 || GET_MODE_SIZE (mode) >= 4
02f06d23 898 || COMPLEX_MODE_P (mode)
bd7d2835 899 || GET_MODE_SIZE (mode) < 1)
900 return mode;
901
902 return SImode;
24833e1a 903}
904
905static bool
906rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
907{
908 HOST_WIDE_INT size;
909
910 if (TYPE_MODE (type) != BLKmode
911 && ! AGGREGATE_TYPE_P (type))
912 return false;
913
914 size = int_size_in_bytes (type);
915 /* Large structs and those whose size is not an
916 exact multiple of 4 are returned in memory. */
917 return size < 1
918 || size > 16
919 || (size % UNITS_PER_WORD) != 0;
920}
921
922static rtx
923rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
924 int incoming ATTRIBUTE_UNUSED)
925{
926 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
927}
928
929static bool
930rx_return_in_msb (const_tree valtype)
931{
932 return TARGET_BIG_ENDIAN_DATA
933 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
934}
935
936/* Returns true if the provided function has the specified attribute. */
937
938static inline bool
939has_func_attr (const_tree decl, const char * func_attr)
940{
941 if (decl == NULL_TREE)
942 decl = current_function_decl;
943
944 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
945}
946
67e66e16 947/* Returns true if the provided function has the "fast_interrupt" attribute. */
24833e1a 948
949static inline bool
950is_fast_interrupt_func (const_tree decl)
951{
67e66e16 952 return has_func_attr (decl, "fast_interrupt");
24833e1a 953}
954
67e66e16 955/* Returns true if the provided function has the "interrupt" attribute. */
24833e1a 956
957static inline bool
67e66e16 958is_interrupt_func (const_tree decl)
24833e1a 959{
67e66e16 960 return has_func_attr (decl, "interrupt");
24833e1a 961}
962
963/* Returns true if the provided function has the "naked" attribute. */
964
965static inline bool
966is_naked_func (const_tree decl)
967{
968 return has_func_attr (decl, "naked");
969}
970\f
971static bool use_fixed_regs = false;
972
b2d7ede1 973static void
24833e1a 974rx_conditional_register_usage (void)
975{
976 static bool using_fixed_regs = false;
977
978 if (rx_small_data_limit > 0)
979 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
980
981 if (use_fixed_regs != using_fixed_regs)
982 {
983 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
984 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
985
986 if (use_fixed_regs)
987 {
24833e1a 988 unsigned int r;
989
24833e1a 990 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
991 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
e4d9e8e5 992
993 /* This is for fast interrupt handlers. Any register in
994 the range r10 to r13 (inclusive) that is currently
995 marked as fixed is now a viable, call-used register. */
24833e1a 996 for (r = 10; r <= 13; r++)
997 if (fixed_regs[r])
998 {
999 fixed_regs[r] = 0;
1000 call_used_regs[r] = 1;
24833e1a 1001 }
1002
e4d9e8e5 1003 /* Mark r7 as fixed. This is just a hack to avoid
1004 altering the reg_alloc_order array so that the newly
1005 freed r10-r13 registers are the preferred registers. */
1006 fixed_regs[7] = call_used_regs[7] = 1;
24833e1a 1007 }
1008 else
1009 {
1010 /* Restore the normal register masks. */
1011 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1012 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1013 }
1014
1015 using_fixed_regs = use_fixed_regs;
1016 }
1017}
1018
1019/* Perform any actions necessary before starting to compile FNDECL.
1020 For the RX we use this to make sure that we have the correct
1021 set of register masks selected. If FNDECL is NULL then we are
1022 compiling top level things. */
1023
1024static void
1025rx_set_current_function (tree fndecl)
1026{
1027 /* Remember the last target of rx_set_current_function. */
1028 static tree rx_previous_fndecl;
67e66e16 1029 bool prev_was_fast_interrupt;
1030 bool current_is_fast_interrupt;
24833e1a 1031
1032 /* Only change the context if the function changes. This hook is called
1033 several times in the course of compiling a function, and we don't want
1034 to slow things down too much or call target_reinit when it isn't safe. */
1035 if (fndecl == rx_previous_fndecl)
1036 return;
1037
67e66e16 1038 prev_was_fast_interrupt
24833e1a 1039 = rx_previous_fndecl
1040 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
67e66e16 1041
1042 current_is_fast_interrupt
24833e1a 1043 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1044
67e66e16 1045 if (prev_was_fast_interrupt != current_is_fast_interrupt)
24833e1a 1046 {
67e66e16 1047 use_fixed_regs = current_is_fast_interrupt;
24833e1a 1048 target_reinit ();
1049 }
67e66e16 1050
24833e1a 1051 rx_previous_fndecl = fndecl;
1052}
1053\f
1054/* Typical stack layout should looks like this after the function's prologue:
1055
1056 | |
1057 -- ^
1058 | | \ |
1059 | | arguments saved | Increasing
1060 | | on the stack | addresses
1061 PARENT arg pointer -> | | /
1062 -------------------------- ---- -------------------
1063 CHILD |ret | return address
1064 --
1065 | | \
1066 | | call saved
1067 | | registers
1068 | | /
1069 --
1070 | | \
1071 | | local
1072 | | variables
1073 frame pointer -> | | /
1074 --
1075 | | \
1076 | | outgoing | Decreasing
1077 | | arguments | addresses
1078 current stack pointer -> | | / |
1079 -------------------------- ---- ------------------ V
1080 | | */
1081
1082static unsigned int
1083bit_count (unsigned int x)
1084{
1085 const unsigned int m1 = 0x55555555;
1086 const unsigned int m2 = 0x33333333;
1087 const unsigned int m4 = 0x0f0f0f0f;
1088
1089 x -= (x >> 1) & m1;
1090 x = (x & m2) + ((x >> 2) & m2);
1091 x = (x + (x >> 4)) & m4;
1092 x += x >> 8;
1093
1094 return (x + (x >> 16)) & 0x3f;
1095}
1096
e4d9e8e5 1097#define MUST_SAVE_ACC_REGISTER \
1098 (TARGET_SAVE_ACC_REGISTER \
1099 && (is_interrupt_func (NULL_TREE) \
1100 || is_fast_interrupt_func (NULL_TREE)))
1101
24833e1a 1102/* Returns either the lowest numbered and highest numbered registers that
1103 occupy the call-saved area of the stack frame, if the registers are
1104 stored as a contiguous block, or else a bitmask of the individual
1105 registers if they are stored piecemeal.
1106
1107 Also computes the size of the frame and the size of the outgoing
1108 arguments block (in bytes). */
1109
1110static void
1111rx_get_stack_layout (unsigned int * lowest,
1112 unsigned int * highest,
1113 unsigned int * register_mask,
1114 unsigned int * frame_size,
1115 unsigned int * stack_size)
1116{
1117 unsigned int reg;
1118 unsigned int low;
1119 unsigned int high;
1120 unsigned int fixed_reg = 0;
1121 unsigned int save_mask;
1122 unsigned int pushed_mask;
1123 unsigned int unneeded_pushes;
1124
e4d9e8e5 1125 if (is_naked_func (NULL_TREE))
24833e1a 1126 {
1127 /* Naked functions do not create their own stack frame.
e4d9e8e5 1128 Instead the programmer must do that for us. */
24833e1a 1129 * lowest = 0;
1130 * highest = 0;
1131 * register_mask = 0;
1132 * frame_size = 0;
1133 * stack_size = 0;
1134 return;
1135 }
1136
9d2f1b03 1137 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
24833e1a 1138 {
21cde6ec 1139 if ((df_regs_ever_live_p (reg)
1140 /* Always save all call clobbered registers inside interrupt
1141 handlers, even if they are not live - they may be used in
1142 routines called from this one. */
1143 || (call_used_regs[reg] && is_interrupt_func (NULL_TREE)))
24833e1a 1144 && (! call_used_regs[reg]
1145 /* Even call clobbered registered must
67e66e16 1146 be pushed inside interrupt handlers. */
e4d9e8e5 1147 || is_interrupt_func (NULL_TREE)
1148 /* Likewise for fast interrupt handlers, except registers r10 -
1149 r13. These are normally call-saved, but may have been set
1150 to call-used by rx_conditional_register_usage. If so then
1151 they can be used in the fast interrupt handler without
1152 saving them on the stack. */
1153 || (is_fast_interrupt_func (NULL_TREE)
1154 && ! IN_RANGE (reg, 10, 13))))
24833e1a 1155 {
1156 if (low == 0)
1157 low = reg;
1158 high = reg;
1159
1160 save_mask |= 1 << reg;
1161 }
1162
1163 /* Remember if we see a fixed register
1164 after having found the low register. */
1165 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1166 fixed_reg = reg;
1167 }
1168
e4d9e8e5 1169 /* If we have to save the accumulator register, make sure
1170 that at least two registers are pushed into the frame. */
1171 if (MUST_SAVE_ACC_REGISTER
1172 && bit_count (save_mask) < 2)
1173 {
1174 save_mask |= (1 << 13) | (1 << 14);
1175 if (low == 0)
1176 low = 13;
bc9bb967 1177 if (high == 0 || low == high)
1178 high = low + 1;
e4d9e8e5 1179 }
1180
24833e1a 1181 /* Decide if it would be faster fill in the call-saved area of the stack
1182 frame using multiple PUSH instructions instead of a single PUSHM
1183 instruction.
1184
1185 SAVE_MASK is a bitmask of the registers that must be stored in the
1186 call-save area. PUSHED_MASK is a bitmask of the registers that would
1187 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1188 is a bitmask of those registers in pushed_mask that are not in
1189 save_mask.
1190
1191 We use a simple heuristic that says that it is better to use
1192 multiple PUSH instructions if the number of unnecessary pushes is
1193 greater than the number of necessary pushes.
1194
1195 We also use multiple PUSH instructions if there are any fixed registers
1196 between LOW and HIGH. The only way that this can happen is if the user
1197 has specified --fixed-<reg-name> on the command line and in such
1198 circumstances we do not want to touch the fixed registers at all.
1199
1200 FIXME: Is it worth improving this heuristic ? */
1201 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1202 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1203
1204 if ((fixed_reg && fixed_reg <= high)
1205 || (optimize_function_for_speed_p (cfun)
1206 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1207 {
1208 /* Use multiple pushes. */
1209 * lowest = 0;
1210 * highest = 0;
1211 * register_mask = save_mask;
1212 }
1213 else
1214 {
1215 /* Use one push multiple instruction. */
1216 * lowest = low;
1217 * highest = high;
1218 * register_mask = 0;
1219 }
1220
1221 * frame_size = rx_round_up
1222 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1223
1224 if (crtl->args.size > 0)
1225 * frame_size += rx_round_up
1226 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1227
1228 * stack_size = rx_round_up
1229 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1230}
1231
1232/* Generate a PUSHM instruction that matches the given operands. */
1233
1234void
1235rx_emit_stack_pushm (rtx * operands)
1236{
1237 HOST_WIDE_INT last_reg;
1238 rtx first_push;
1239
1240 gcc_assert (CONST_INT_P (operands[0]));
1241 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1242
1243 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1244 first_push = XVECEXP (operands[1], 0, 1);
1245 gcc_assert (SET_P (first_push));
1246 first_push = SET_SRC (first_push);
1247 gcc_assert (REG_P (first_push));
1248
1249 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
67e66e16 1250 reg_names [REGNO (first_push) - last_reg],
1251 reg_names [REGNO (first_push)]);
24833e1a 1252}
1253
1254/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1255
1256static rtx
1257gen_rx_store_vector (unsigned int low, unsigned int high)
1258{
1259 unsigned int i;
1260 unsigned int count = (high - low) + 2;
1261 rtx vector;
1262
1263 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1264
1265 XVECEXP (vector, 0, 0) =
51e241f8 1266 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1267 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1268 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1269
1270 for (i = 0; i < count - 1; i++)
1271 XVECEXP (vector, 0, i + 1) =
51e241f8 1272 gen_rtx_SET (VOIDmode,
24833e1a 1273 gen_rtx_MEM (SImode,
67e66e16 1274 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1275 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1276 gen_rtx_REG (SImode, high - i));
24833e1a 1277 return vector;
1278}
1279
67e66e16 1280/* Mark INSN as being frame related. If it is a PARALLEL
1281 then mark each element as being frame related as well. */
1282
1283static void
1284mark_frame_related (rtx insn)
1285{
1286 RTX_FRAME_RELATED_P (insn) = 1;
1287 insn = PATTERN (insn);
1288
1289 if (GET_CODE (insn) == PARALLEL)
1290 {
1291 unsigned int i;
1292
61fc50a0 1293 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
67e66e16 1294 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1295 }
1296}
1297
95272799 1298static bool
1299ok_for_max_constant (HOST_WIDE_INT val)
1300{
1301 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1302 /* If there is no constraint on the size of constants
1303 used as operands, then any value is legitimate. */
1304 return true;
1305
1306 /* rx_max_constant_size specifies the maximum number
1307 of bytes that can be used to hold a signed value. */
1308 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1309 ( 1 << (rx_max_constant_size * 8)));
1310}
1311
1312/* Generate an ADD of SRC plus VAL into DEST.
1313 Handles the case where VAL is too big for max_constant_value.
1314 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1315
1316static void
1317gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1318{
1319 rtx insn;
1320
1321 if (val == NULL_RTX || INTVAL (val) == 0)
1322 {
1323 gcc_assert (dest != src);
1324
1325 insn = emit_move_insn (dest, src);
1326 }
1327 else if (ok_for_max_constant (INTVAL (val)))
1328 insn = emit_insn (gen_addsi3 (dest, src, val));
1329 else
1330 {
02f06d23 1331 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1332 will not reject it. */
1333 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1334 insn = emit_insn (gen_addsi3 (dest, src, val));
95272799 1335
1336 if (is_frame_related)
1337 /* We have to provide our own frame related note here
1338 as the dwarf2out code cannot be expected to grok
1339 our unspec. */
1340 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1341 gen_rtx_SET (SImode, dest,
1342 gen_rtx_PLUS (SImode, src, val)));
1343 return;
1344 }
1345
1346 if (is_frame_related)
1347 RTX_FRAME_RELATED_P (insn) = 1;
1348 return;
1349}
1350
24833e1a 1351void
1352rx_expand_prologue (void)
1353{
1354 unsigned int stack_size;
1355 unsigned int frame_size;
1356 unsigned int mask;
1357 unsigned int low;
1358 unsigned int high;
67e66e16 1359 unsigned int reg;
24833e1a 1360 rtx insn;
1361
1362 /* Naked functions use their own, programmer provided prologues. */
e4d9e8e5 1363 if (is_naked_func (NULL_TREE))
24833e1a 1364 return;
1365
1366 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1367
1368 /* If we use any of the callee-saved registers, save them now. */
1369 if (mask)
1370 {
24833e1a 1371 /* Push registers in reverse order. */
9d2f1b03 1372 for (reg = CC_REGNUM; reg --;)
24833e1a 1373 if (mask & (1 << reg))
1374 {
1375 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
67e66e16 1376 mark_frame_related (insn);
24833e1a 1377 }
1378 }
1379 else if (low)
1380 {
1381 if (high == low)
1382 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1383 else
1384 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1385 * UNITS_PER_WORD),
1386 gen_rx_store_vector (low, high)));
67e66e16 1387 mark_frame_related (insn);
1388 }
1389
e4d9e8e5 1390 if (MUST_SAVE_ACC_REGISTER)
67e66e16 1391 {
1392 unsigned int acc_high, acc_low;
1393
1394 /* Interrupt handlers have to preserve the accumulator
1395 register if so requested by the user. Use the first
e4d9e8e5 1396 two pushed registers as intermediaries. */
67e66e16 1397 if (mask)
1398 {
1399 acc_low = acc_high = 0;
1400
9d2f1b03 1401 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1402 if (mask & (1 << reg))
1403 {
1404 if (acc_low == 0)
1405 acc_low = reg;
1406 else
1407 {
1408 acc_high = reg;
1409 break;
1410 }
1411 }
1412
1413 /* We have assumed that there are at least two registers pushed... */
1414 gcc_assert (acc_high != 0);
1415
1416 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1417 We just assume that they are zero. */
1418 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1419 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1420 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1421 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1422 }
1423 else
1424 {
1425 acc_low = low;
1426 acc_high = low + 1;
1427
1428 /* We have assumed that there are at least two registers pushed... */
1429 gcc_assert (acc_high <= high);
1430
1431 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1432 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1433 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1434 gen_rx_store_vector (acc_low, acc_high)));
1435 }
24833e1a 1436 }
1437
1438 /* If needed, set up the frame pointer. */
1439 if (frame_pointer_needed)
95272799 1440 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1441 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1442
1443 /* Allocate space for the outgoing args.
1444 If the stack frame has not already been set up then handle this as well. */
1445 if (stack_size)
1446 {
1447 if (frame_size)
1448 {
1449 if (frame_pointer_needed)
95272799 1450 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1451 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1452 else
95272799 1453 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1454 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1455 true);
24833e1a 1456 }
1457 else
95272799 1458 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1459 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1460 }
1461 else if (frame_size)
1462 {
1463 if (! frame_pointer_needed)
95272799 1464 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1465 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1466 else
95272799 1467 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1468 true);
24833e1a 1469 }
24833e1a 1470}
1471
1472static void
1473rx_output_function_prologue (FILE * file,
1474 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1475{
1476 if (is_fast_interrupt_func (NULL_TREE))
1477 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1478
67e66e16 1479 if (is_interrupt_func (NULL_TREE))
1480 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
24833e1a 1481
1482 if (is_naked_func (NULL_TREE))
1483 asm_fprintf (file, "\t; Note: Naked Function\n");
1484
1485 if (cfun->static_chain_decl != NULL)
1486 asm_fprintf (file, "\t; Note: Nested function declared "
1487 "inside another function.\n");
1488
1489 if (crtl->calls_eh_return)
1490 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1491}
1492
1493/* Generate a POPM or RTSD instruction that matches the given operands. */
1494
1495void
1496rx_emit_stack_popm (rtx * operands, bool is_popm)
1497{
1498 HOST_WIDE_INT stack_adjust;
1499 HOST_WIDE_INT last_reg;
1500 rtx first_push;
1501
1502 gcc_assert (CONST_INT_P (operands[0]));
1503 stack_adjust = INTVAL (operands[0]);
1504
1505 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1506 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1507
1508 first_push = XVECEXP (operands[1], 0, 1);
1509 gcc_assert (SET_P (first_push));
1510 first_push = SET_DEST (first_push);
1511 gcc_assert (REG_P (first_push));
1512
1513 if (is_popm)
1514 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1515 reg_names [REGNO (first_push)],
1516 reg_names [REGNO (first_push) + last_reg]);
1517 else
1518 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1519 (int) stack_adjust,
1520 reg_names [REGNO (first_push)],
1521 reg_names [REGNO (first_push) + last_reg]);
1522}
1523
1524/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1525
1526static rtx
1527gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1528{
1529 unsigned int i;
1530 unsigned int bias = 3;
1531 unsigned int count = (high - low) + bias;
1532 rtx vector;
1533
1534 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1535
1536 XVECEXP (vector, 0, 0) =
51e241f8 1537 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1538 plus_constant (stack_pointer_rtx, adjust));
1539
1540 for (i = 0; i < count - 2; i++)
1541 XVECEXP (vector, 0, i + 1) =
51e241f8 1542 gen_rtx_SET (VOIDmode,
24833e1a 1543 gen_rtx_REG (SImode, low + i),
1544 gen_rtx_MEM (SImode,
1545 i == 0 ? stack_pointer_rtx
1546 : plus_constant (stack_pointer_rtx,
1547 i * UNITS_PER_WORD)));
1548
1549 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1550
1551 return vector;
1552}
1553
1554/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1555
1556static rtx
1557gen_rx_popm_vector (unsigned int low, unsigned int high)
1558{
1559 unsigned int i;
1560 unsigned int count = (high - low) + 2;
1561 rtx vector;
1562
1563 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1564
1565 XVECEXP (vector, 0, 0) =
51e241f8 1566 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1567 plus_constant (stack_pointer_rtx,
1568 (count - 1) * UNITS_PER_WORD));
1569
1570 for (i = 0; i < count - 1; i++)
1571 XVECEXP (vector, 0, i + 1) =
51e241f8 1572 gen_rtx_SET (VOIDmode,
24833e1a 1573 gen_rtx_REG (SImode, low + i),
1574 gen_rtx_MEM (SImode,
1575 i == 0 ? stack_pointer_rtx
1576 : plus_constant (stack_pointer_rtx,
1577 i * UNITS_PER_WORD)));
1578
1579 return vector;
1580}
1581
1582void
1583rx_expand_epilogue (bool is_sibcall)
1584{
1585 unsigned int low;
1586 unsigned int high;
1587 unsigned int frame_size;
1588 unsigned int stack_size;
1589 unsigned int register_mask;
1590 unsigned int regs_size;
67e66e16 1591 unsigned int reg;
24833e1a 1592 unsigned HOST_WIDE_INT total_size;
1593
61fc50a0 1594 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1595 cannot guarantee that the register holding the function address is a
1596 call-used register. If it is a call-saved register then the stack
1597 pop instructions generated in the epilogue will corrupt the address
1598 before it is used.
1599
1600 Creating a new call-used-only register class works but then the
1601 reload pass gets stuck because it cannot always find a call-used
1602 register for spilling sibcalls.
1603
1604 The other possible solution is for this pass to scan forward for the
1605 sibcall instruction (if it has been generated) and work out if it
1606 is an indirect sibcall using a call-saved register. If it is then
1607 the address can copied into a call-used register in this epilogue
1608 code and the sibcall instruction modified to use that register. */
1609
24833e1a 1610 if (is_naked_func (NULL_TREE))
1611 {
61fc50a0 1612 gcc_assert (! is_sibcall);
1613
24833e1a 1614 /* Naked functions use their own, programmer provided epilogues.
1615 But, in order to keep gcc happy we have to generate some kind of
1616 epilogue RTL. */
1617 emit_jump_insn (gen_naked_return ());
1618 return;
1619 }
1620
1621 rx_get_stack_layout (& low, & high, & register_mask,
1622 & frame_size, & stack_size);
1623
1624 total_size = frame_size + stack_size;
1625 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1626
1627 /* See if we are unable to use the special stack frame deconstruct and
1628 return instructions. In most cases we can use them, but the exceptions
1629 are:
1630
1631 - Sibling calling functions deconstruct the frame but do not return to
1632 their caller. Instead they branch to their sibling and allow their
1633 return instruction to return to this function's parent.
1634
67e66e16 1635 - Fast and normal interrupt handling functions have to use special
24833e1a 1636 return instructions.
1637
1638 - Functions where we have pushed a fragmented set of registers into the
1639 call-save area must have the same set of registers popped. */
1640 if (is_sibcall
1641 || is_fast_interrupt_func (NULL_TREE)
67e66e16 1642 || is_interrupt_func (NULL_TREE)
24833e1a 1643 || register_mask)
1644 {
1645 /* Cannot use the special instructions - deconstruct by hand. */
1646 if (total_size)
95272799 1647 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1648 GEN_INT (total_size), false);
24833e1a 1649
e4d9e8e5 1650 if (MUST_SAVE_ACC_REGISTER)
24833e1a 1651 {
67e66e16 1652 unsigned int acc_low, acc_high;
1653
1654 /* Reverse the saving of the accumulator register onto the stack.
1655 Note we must adjust the saved "low" accumulator value as it
1656 is really the middle 32-bits of the accumulator. */
1657 if (register_mask)
1658 {
1659 acc_low = acc_high = 0;
9d2f1b03 1660
1661 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1662 if (register_mask & (1 << reg))
1663 {
1664 if (acc_low == 0)
1665 acc_low = reg;
1666 else
1667 {
1668 acc_high = reg;
1669 break;
1670 }
1671 }
1672 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1673 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1674 }
1675 else
1676 {
1677 acc_low = low;
1678 acc_high = low + 1;
1679 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1680 gen_rx_popm_vector (acc_low, acc_high)));
1681 }
1682
1683 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1684 gen_rtx_REG (SImode, acc_low),
1685 GEN_INT (16)));
1686 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1687 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1688 }
24833e1a 1689
67e66e16 1690 if (register_mask)
1691 {
9d2f1b03 1692 for (reg = 0; reg < CC_REGNUM; reg ++)
24833e1a 1693 if (register_mask & (1 << reg))
1694 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1695 }
1696 else if (low)
1697 {
1698 if (high == low)
1699 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1700 else
1701 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1702 gen_rx_popm_vector (low, high)));
1703 }
1704
1705 if (is_fast_interrupt_func (NULL_TREE))
61fc50a0 1706 {
1707 gcc_assert (! is_sibcall);
1708 emit_jump_insn (gen_fast_interrupt_return ());
1709 }
67e66e16 1710 else if (is_interrupt_func (NULL_TREE))
61fc50a0 1711 {
1712 gcc_assert (! is_sibcall);
1713 emit_jump_insn (gen_exception_return ());
1714 }
24833e1a 1715 else if (! is_sibcall)
1716 emit_jump_insn (gen_simple_return ());
1717
1718 return;
1719 }
1720
1721 /* If we allocated space on the stack, free it now. */
1722 if (total_size)
1723 {
1724 unsigned HOST_WIDE_INT rtsd_size;
1725
1726 /* See if we can use the RTSD instruction. */
1727 rtsd_size = total_size + regs_size;
1728 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1729 {
1730 if (low)
1731 emit_jump_insn (gen_pop_and_return
1732 (GEN_INT (rtsd_size),
1733 gen_rx_rtsd_vector (rtsd_size, low, high)));
1734 else
1735 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1736
1737 return;
1738 }
1739
95272799 1740 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1741 GEN_INT (total_size), false);
24833e1a 1742 }
1743
1744 if (low)
1745 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1746 gen_rx_rtsd_vector (regs_size,
1747 low, high)));
1748 else
1749 emit_jump_insn (gen_simple_return ());
1750}
1751
1752
1753/* Compute the offset (in words) between FROM (arg pointer
1754 or frame pointer) and TO (frame pointer or stack pointer).
1755 See ASCII art comment at the start of rx_expand_prologue
1756 for more information. */
1757
1758int
1759rx_initial_elimination_offset (int from, int to)
1760{
1761 unsigned int low;
1762 unsigned int high;
1763 unsigned int frame_size;
1764 unsigned int stack_size;
1765 unsigned int mask;
1766
1767 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1768
1769 if (from == ARG_POINTER_REGNUM)
1770 {
1771 /* Extend the computed size of the stack frame to
1772 include the registers pushed in the prologue. */
1773 if (low)
1774 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1775 else
1776 frame_size += bit_count (mask) * UNITS_PER_WORD;
1777
1778 /* Remember to include the return address. */
1779 frame_size += 1 * UNITS_PER_WORD;
1780
1781 if (to == FRAME_POINTER_REGNUM)
1782 return frame_size;
1783
1784 gcc_assert (to == STACK_POINTER_REGNUM);
1785 return frame_size + stack_size;
1786 }
1787
1788 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1789 return stack_size;
1790}
1791
24833e1a 1792/* Decide if a variable should go into one of the small data sections. */
1793
1794static bool
1795rx_in_small_data (const_tree decl)
1796{
1797 int size;
1798 const_tree section;
1799
1800 if (rx_small_data_limit == 0)
1801 return false;
1802
1803 if (TREE_CODE (decl) != VAR_DECL)
1804 return false;
1805
1806 /* We do not put read-only variables into a small data area because
1807 they would be placed with the other read-only sections, far away
1808 from the read-write data sections, and we only have one small
1809 data area pointer.
1810 Similarly commons are placed in the .bss section which might be
1811 far away (and out of alignment with respect to) the .data section. */
1812 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1813 return false;
1814
1815 section = DECL_SECTION_NAME (decl);
1816 if (section)
1817 {
1818 const char * const name = TREE_STRING_POINTER (section);
1819
1820 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1821 }
1822
1823 size = int_size_in_bytes (TREE_TYPE (decl));
1824
1825 return (size > 0) && (size <= rx_small_data_limit);
1826}
1827
1828/* Return a section for X.
1829 The only special thing we do here is to honor small data. */
1830
1831static section *
1832rx_select_rtx_section (enum machine_mode mode,
1833 rtx x,
1834 unsigned HOST_WIDE_INT align)
1835{
1836 if (rx_small_data_limit > 0
1837 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1838 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1839 return sdata_section;
1840
1841 return default_elf_select_rtx_section (mode, x, align);
1842}
1843
1844static section *
1845rx_select_section (tree decl,
1846 int reloc,
1847 unsigned HOST_WIDE_INT align)
1848{
1849 if (rx_small_data_limit > 0)
1850 {
1851 switch (categorize_decl_for_section (decl, reloc))
1852 {
1853 case SECCAT_SDATA: return sdata_section;
1854 case SECCAT_SBSS: return sbss_section;
1855 case SECCAT_SRODATA:
1856 /* Fall through. We do not put small, read only
1857 data into the C_2 section because we are not
1858 using the C_2 section. We do not use the C_2
1859 section because it is located with the other
1860 read-only data sections, far away from the read-write
1861 data sections and we only have one small data
1862 pointer (r13). */
1863 default:
1864 break;
1865 }
1866 }
1867
1868 /* If we are supporting the Renesas assembler
1869 we cannot use mergeable sections. */
1870 if (TARGET_AS100_SYNTAX)
1871 switch (categorize_decl_for_section (decl, reloc))
1872 {
1873 case SECCAT_RODATA_MERGE_CONST:
1874 case SECCAT_RODATA_MERGE_STR_INIT:
1875 case SECCAT_RODATA_MERGE_STR:
1876 return readonly_data_section;
1877
1878 default:
1879 break;
1880 }
1881
1882 return default_elf_select_section (decl, reloc, align);
1883}
1884\f
1885enum rx_builtin
1886{
1887 RX_BUILTIN_BRK,
1888 RX_BUILTIN_CLRPSW,
1889 RX_BUILTIN_INT,
1890 RX_BUILTIN_MACHI,
1891 RX_BUILTIN_MACLO,
1892 RX_BUILTIN_MULHI,
1893 RX_BUILTIN_MULLO,
1894 RX_BUILTIN_MVFACHI,
1895 RX_BUILTIN_MVFACMI,
1896 RX_BUILTIN_MVFC,
1897 RX_BUILTIN_MVTACHI,
1898 RX_BUILTIN_MVTACLO,
1899 RX_BUILTIN_MVTC,
67e66e16 1900 RX_BUILTIN_MVTIPL,
24833e1a 1901 RX_BUILTIN_RACW,
1902 RX_BUILTIN_REVW,
1903 RX_BUILTIN_RMPA,
1904 RX_BUILTIN_ROUND,
24833e1a 1905 RX_BUILTIN_SETPSW,
1906 RX_BUILTIN_WAIT,
1907 RX_BUILTIN_max
1908};
1909
1910static void
1911rx_init_builtins (void)
1912{
1913#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1914 add_builtin_function ("__builtin_rx_" LC_NAME, \
1915 build_function_type_list (RET_TYPE##_type_node, \
1916 ARG_TYPE##_type_node, \
1917 NULL_TREE), \
1918 RX_BUILTIN_##UC_NAME, \
1919 BUILT_IN_MD, NULL, NULL_TREE)
1920
1921#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1922 add_builtin_function ("__builtin_rx_" LC_NAME, \
1923 build_function_type_list (RET_TYPE##_type_node, \
1924 ARG_TYPE1##_type_node,\
1925 ARG_TYPE2##_type_node,\
1926 NULL_TREE), \
1927 RX_BUILTIN_##UC_NAME, \
1928 BUILT_IN_MD, NULL, NULL_TREE)
1929
1930#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1931 add_builtin_function ("__builtin_rx_" LC_NAME, \
1932 build_function_type_list (RET_TYPE##_type_node, \
1933 ARG_TYPE1##_type_node,\
1934 ARG_TYPE2##_type_node,\
1935 ARG_TYPE3##_type_node,\
1936 NULL_TREE), \
1937 RX_BUILTIN_##UC_NAME, \
1938 BUILT_IN_MD, NULL, NULL_TREE)
1939
1940 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
1941 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1942 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1943 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1944 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1945 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1946 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1947 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1948 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1949 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1950 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1951 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1952 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
1953 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1954 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
67e66e16 1955 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
24833e1a 1956 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1957 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1958 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
24833e1a 1959 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
1960}
1961
24833e1a 1962static rtx
1963rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1964{
1965 if (reg && ! REG_P (arg))
1966 arg = force_reg (SImode, arg);
1967
1968 emit_insn (gen_func (arg));
1969
1970 return NULL_RTX;
1971}
1972
1973static rtx
1974rx_expand_builtin_mvtc (tree exp)
1975{
1976 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1977 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1978
1979 if (! CONST_INT_P (arg1))
1980 return NULL_RTX;
1981
1982 if (! REG_P (arg2))
1983 arg2 = force_reg (SImode, arg2);
1984
1985 emit_insn (gen_mvtc (arg1, arg2));
1986
1987 return NULL_RTX;
1988}
1989
1990static rtx
1991rx_expand_builtin_mvfc (tree t_arg, rtx target)
1992{
1993 rtx arg = expand_normal (t_arg);
1994
1995 if (! CONST_INT_P (arg))
1996 return NULL_RTX;
1997
e4d9e8e5 1998 if (target == NULL_RTX)
1999 return NULL_RTX;
2000
24833e1a 2001 if (! REG_P (target))
2002 target = force_reg (SImode, target);
2003
2004 emit_insn (gen_mvfc (target, arg));
2005
2006 return target;
2007}
2008
67e66e16 2009static rtx
2010rx_expand_builtin_mvtipl (rtx arg)
2011{
2012 /* The RX610 does not support the MVTIPL instruction. */
2013 if (rx_cpu_type == RX610)
2014 return NULL_RTX;
2015
e5743482 2016 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
67e66e16 2017 return NULL_RTX;
2018
2019 emit_insn (gen_mvtipl (arg));
2020
2021 return NULL_RTX;
2022}
2023
24833e1a 2024static rtx
2025rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2026{
2027 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2028 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2029
2030 if (! REG_P (arg1))
2031 arg1 = force_reg (SImode, arg1);
2032
2033 if (! REG_P (arg2))
2034 arg2 = force_reg (SImode, arg2);
2035
2036 emit_insn (gen_func (arg1, arg2));
2037
2038 return NULL_RTX;
2039}
2040
2041static rtx
2042rx_expand_int_builtin_1_arg (rtx arg,
2043 rtx target,
2044 rtx (* gen_func)(rtx, rtx),
2045 bool mem_ok)
2046{
2047 if (! REG_P (arg))
2048 if (!mem_ok || ! MEM_P (arg))
2049 arg = force_reg (SImode, arg);
2050
2051 if (target == NULL_RTX || ! REG_P (target))
2052 target = gen_reg_rtx (SImode);
2053
2054 emit_insn (gen_func (target, arg));
2055
2056 return target;
2057}
2058
2059static rtx
2060rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2061{
2062 if (target == NULL_RTX || ! REG_P (target))
2063 target = gen_reg_rtx (SImode);
2064
2065 emit_insn (gen_func (target));
2066
2067 return target;
2068}
2069
2070static rtx
2071rx_expand_builtin_round (rtx arg, rtx target)
2072{
2073 if ((! REG_P (arg) && ! MEM_P (arg))
2074 || GET_MODE (arg) != SFmode)
2075 arg = force_reg (SFmode, arg);
2076
2077 if (target == NULL_RTX || ! REG_P (target))
2078 target = gen_reg_rtx (SImode);
2079
2080 emit_insn (gen_lrintsf2 (target, arg));
2081
2082 return target;
2083}
2084
e5743482 2085static int
0318c61a 2086valid_psw_flag (rtx op, const char *which)
e5743482 2087{
2088 static int mvtc_inform_done = 0;
2089
2090 if (GET_CODE (op) == CONST_INT)
2091 switch (INTVAL (op))
2092 {
2093 case 0: case 'c': case 'C':
2094 case 1: case 'z': case 'Z':
2095 case 2: case 's': case 'S':
2096 case 3: case 'o': case 'O':
2097 case 8: case 'i': case 'I':
2098 case 9: case 'u': case 'U':
2099 return 1;
2100 }
2101
2102 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2103 if (!mvtc_inform_done)
2104 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2105 mvtc_inform_done = 1;
2106
2107 return 0;
2108}
2109
24833e1a 2110static rtx
2111rx_expand_builtin (tree exp,
2112 rtx target,
2113 rtx subtarget ATTRIBUTE_UNUSED,
2114 enum machine_mode mode ATTRIBUTE_UNUSED,
2115 int ignore ATTRIBUTE_UNUSED)
2116{
2117 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
432093e5 2118 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
24833e1a 2119 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2120 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2121
2122 switch (fcode)
2123 {
2124 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
e5743482 2125 case RX_BUILTIN_CLRPSW:
2126 if (!valid_psw_flag (op, "clrpsw"))
2127 return NULL_RTX;
2128 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2129 case RX_BUILTIN_SETPSW:
2130 if (!valid_psw_flag (op, "setpsw"))
2131 return NULL_RTX;
2132 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
24833e1a 2133 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2134 (op, gen_int, false);
2135 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2136 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2137 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2138 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2139 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2140 (target, gen_mvfachi);
2141 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2142 (target, gen_mvfacmi);
2143 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2144 (op, gen_mvtachi, true);
2145 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2146 (op, gen_mvtaclo, true);
2147 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2148 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2149 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
67e66e16 2150 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
24833e1a 2151 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2152 (op, gen_racw, false);
2153 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2154 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2155 (op, target, gen_revw, false);
24833e1a 2156 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2157
2158 default:
2159 internal_error ("bad builtin code");
2160 break;
2161 }
2162
2163 return NULL_RTX;
2164}
2165\f
2166/* Place an element into a constructor or destructor section.
2167 Like default_ctor_section_asm_out_constructor in varasm.c
2168 except that it uses .init_array (or .fini_array) and it
2169 handles constructor priorities. */
2170
2171static void
2172rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2173{
2174 section * s;
2175
2176 if (priority != DEFAULT_INIT_PRIORITY)
2177 {
2178 char buf[18];
2179
2180 sprintf (buf, "%s.%.5u",
2181 is_ctor ? ".init_array" : ".fini_array",
2182 priority);
2183 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2184 }
2185 else if (is_ctor)
2186 s = ctors_section;
2187 else
2188 s = dtors_section;
2189
2190 switch_to_section (s);
2191 assemble_align (POINTER_SIZE);
2192 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2193}
2194
2195static void
2196rx_elf_asm_constructor (rtx symbol, int priority)
2197{
2198 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2199}
2200
2201static void
2202rx_elf_asm_destructor (rtx symbol, int priority)
2203{
2204 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2205}
2206\f
67e66e16 2207/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
24833e1a 2208
2209static tree
2210rx_handle_func_attribute (tree * node,
2211 tree name,
2212 tree args,
2213 int flags ATTRIBUTE_UNUSED,
2214 bool * no_add_attrs)
2215{
2216 gcc_assert (DECL_P (* node));
2217 gcc_assert (args == NULL_TREE);
2218
2219 if (TREE_CODE (* node) != FUNCTION_DECL)
2220 {
2221 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2222 name);
2223 * no_add_attrs = true;
2224 }
2225
2226 /* FIXME: We ought to check for conflicting attributes. */
2227
2228 /* FIXME: We ought to check that the interrupt and exception
2229 handler attributes have been applied to void functions. */
2230 return NULL_TREE;
2231}
2232
2233/* Table of RX specific attributes. */
2234const struct attribute_spec rx_attribute_table[] =
2235{
2236 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
24833e1a 2237 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
67e66e16 2238 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
24833e1a 2239 { "naked", 0, 0, true, false, false, rx_handle_func_attribute },
2240 { NULL, 0, 0, false, false, false, NULL }
2241};
2242
98cb9b5b 2243/* Extra processing for target specific command line options. */
2244
2245static bool
2246rx_handle_option (size_t code, const char * arg ATTRIBUTE_UNUSED, int value)
2247{
2248 switch (code)
2249 {
2250 case OPT_mint_register_:
2251 switch (value)
2252 {
2253 case 4:
2254 fixed_regs[10] = call_used_regs [10] = 1;
2255 /* Fall through. */
2256 case 3:
2257 fixed_regs[11] = call_used_regs [11] = 1;
2258 /* Fall through. */
2259 case 2:
2260 fixed_regs[12] = call_used_regs [12] = 1;
2261 /* Fall through. */
2262 case 1:
2263 fixed_regs[13] = call_used_regs [13] = 1;
2264 /* Fall through. */
2265 case 0:
2266 return true;
2267 default:
2268 return false;
2269 }
2270 break;
2271
2272 case OPT_mmax_constant_size_:
2273 /* Make sure that the -mmax-constant_size option is in range. */
2274 return value >= 0 && value <= 4;
2275
2276 case OPT_mcpu_:
98cb9b5b 2277 if (strcasecmp (arg, "RX610") == 0)
2278 rx_cpu_type = RX610;
2279 else if (strcasecmp (arg, "RX200") == 0)
2280 {
2281 target_flags |= MASK_NO_USE_FPU;
2282 rx_cpu_type = RX200;
2283 }
2284 else if (strcasecmp (arg, "RX600") != 0)
2285 warning (0, "unrecognized argument '%s' to -mcpu= option", arg);
2286 break;
2287
2288 case OPT_fpu:
2289 if (rx_cpu_type == RX200)
bf776685 2290 error ("the RX200 cpu does not have FPU hardware");
98cb9b5b 2291 break;
2292
2293 default:
2294 break;
2295 }
2296
2297 return true;
2298}
2299
42d89991 2300/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
02e53c17 2301
2302static void
42d89991 2303rx_override_options_after_change (void)
98cb9b5b 2304{
2305 static bool first_time = TRUE;
98cb9b5b 2306
2307 if (first_time)
2308 {
2309 /* If this is the first time through and the user has not disabled
42d89991 2310 the use of RX FPU hardware then enable -ffinite-math-only,
2311 since the FPU instructions do not support NaNs and infinities. */
98cb9b5b 2312 if (TARGET_USE_FPU)
42d89991 2313 flag_finite_math_only = 1;
98cb9b5b 2314
98cb9b5b 2315 first_time = FALSE;
2316 }
2317 else
2318 {
2319 /* Alert the user if they are changing the optimization options
2320 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2321 if (TARGET_USE_FPU
42d89991 2322 && !flag_finite_math_only)
2323 warning (0, "RX FPU instructions do not support NaNs and infinities");
98cb9b5b 2324 }
2325}
2326
1af17d44 2327static void
2328rx_option_override (void)
2329{
2330 /* This target defaults to strict volatile bitfields. */
2331 if (flag_strict_volatile_bitfields < 0)
2332 flag_strict_volatile_bitfields = 1;
42d89991 2333
2334 rx_override_options_after_change ();
1af17d44 2335}
2336
c17f64cc 2337/* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
2338static const struct default_options rx_option_optimization_table[] =
2339 {
2340 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
2341 { OPT_LEVELS_NONE, 0, NULL, 0 }
2342 };
2343
98cb9b5b 2344\f
24833e1a 2345static bool
2346rx_allocate_stack_slots_for_args (void)
2347{
2348 /* Naked functions should not allocate stack slots for arguments. */
2349 return ! is_naked_func (NULL_TREE);
2350}
2351
2352static bool
2353rx_func_attr_inlinable (const_tree decl)
2354{
2355 return ! is_fast_interrupt_func (decl)
67e66e16 2356 && ! is_interrupt_func (decl)
24833e1a 2357 && ! is_naked_func (decl);
2358}
2359
61fc50a0 2360/* Return nonzero if it is ok to make a tail-call to DECL,
2361 a function_decl or NULL if this is an indirect call, using EXP */
2362
2363static bool
e4d9e8e5 2364rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
61fc50a0 2365{
2366 /* Do not allow indirect tailcalls. The
2367 sibcall patterns do not support them. */
2368 if (decl == NULL)
2369 return false;
2370
2371 /* Never tailcall from inside interrupt handlers or naked functions. */
2372 if (is_fast_interrupt_func (NULL_TREE)
2373 || is_interrupt_func (NULL_TREE)
2374 || is_naked_func (NULL_TREE))
2375 return false;
2376
2377 return true;
2378}
2379
24833e1a 2380static void
2381rx_file_start (void)
2382{
2383 if (! TARGET_AS100_SYNTAX)
2384 default_file_start ();
2385}
2386
2387static bool
2388rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2389{
c6347c7a 2390 /* The packed attribute overrides the MS behaviour. */
2391 return ! TYPE_PACKED (record_type);
24833e1a 2392}
24833e1a 2393\f
2394/* Returns true if X a legitimate constant for an immediate
2395 operand on the RX. X is already known to satisfy CONSTANT_P. */
2396
2397bool
2398rx_is_legitimate_constant (rtx x)
2399{
24833e1a 2400 switch (GET_CODE (x))
2401 {
2402 case CONST:
2403 x = XEXP (x, 0);
2404
2405 if (GET_CODE (x) == PLUS)
2406 {
2407 if (! CONST_INT_P (XEXP (x, 1)))
2408 return false;
2409
2410 /* GCC would not pass us CONST_INT + CONST_INT so we
2411 know that we have {SYMBOL|LABEL} + CONST_INT. */
2412 x = XEXP (x, 0);
2413 gcc_assert (! CONST_INT_P (x));
2414 }
2415
2416 switch (GET_CODE (x))
2417 {
2418 case LABEL_REF:
2419 case SYMBOL_REF:
2420 return true;
2421
95272799 2422 case UNSPEC:
2423 return XINT (x, 1) == UNSPEC_CONST;
2424
24833e1a 2425 default:
2426 /* FIXME: Can this ever happen ? */
2427 abort ();
2428 return false;
2429 }
2430 break;
2431
2432 case LABEL_REF:
2433 case SYMBOL_REF:
2434 return true;
2435 case CONST_DOUBLE:
09bb92cc 2436 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
24833e1a 2437 case CONST_VECTOR:
2438 return false;
2439 default:
2440 gcc_assert (CONST_INT_P (x));
2441 break;
2442 }
2443
95272799 2444 return ok_for_max_constant (INTVAL (x));
24833e1a 2445}
2446
24833e1a 2447static int
2448rx_address_cost (rtx addr, bool speed)
2449{
2450 rtx a, b;
2451
2452 if (GET_CODE (addr) != PLUS)
2453 return COSTS_N_INSNS (1);
2454
2455 a = XEXP (addr, 0);
2456 b = XEXP (addr, 1);
2457
2458 if (REG_P (a) && REG_P (b))
2459 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2460 return COSTS_N_INSNS (4);
2461
2462 if (speed)
2463 /* [REG+OFF] is just as fast as [REG]. */
2464 return COSTS_N_INSNS (1);
2465
2466 if (CONST_INT_P (b)
2467 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2468 /* Try to discourage REG + <large OFF> when optimizing for size. */
2469 return COSTS_N_INSNS (2);
2470
2471 return COSTS_N_INSNS (1);
2472}
2473
2474static bool
2475rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2476{
2477 /* We can always eliminate to the frame pointer.
2478 We can eliminate to the stack pointer unless a frame
2479 pointer is needed. */
2480
2481 return to == FRAME_POINTER_REGNUM
2482 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2483}
2484\f
2485
2486static void
2487rx_trampoline_template (FILE * file)
2488{
2489 /* Output assembler code for a block containing the constant
2490 part of a trampoline, leaving space for the variable parts.
2491
2492 On the RX, (where r8 is the static chain regnum) the trampoline
2493 looks like:
2494
2495 mov #<static chain value>, r8
2496 mov #<function's address>, r9
2497 jmp r9
2498
2499 In big-endian-data-mode however instructions are read into the CPU
2500 4 bytes at a time. These bytes are then swapped around before being
2501 passed to the decoder. So...we must partition our trampoline into
2502 4 byte packets and swap these packets around so that the instruction
2503 reader will reverse the process. But, in order to avoid splitting
2504 the 32-bit constants across these packet boundaries, (making inserting
2505 them into the constructed trampoline very difficult) we have to pad the
2506 instruction sequence with NOP insns. ie:
2507
2508 nop
2509 nop
2510 mov.l #<...>, r8
2511 nop
2512 nop
2513 mov.l #<...>, r9
2514 jmp r9
2515 nop
2516 nop */
2517
2518 if (! TARGET_BIG_ENDIAN_DATA)
2519 {
2520 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2521 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2522 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2523 }
2524 else
2525 {
2526 char r8 = '0' + STATIC_CHAIN_REGNUM;
2527 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2528
2529 if (TARGET_AS100_SYNTAX)
2530 {
2531 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2532 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2533 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2534 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2535 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2536 }
2537 else
2538 {
2539 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2540 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2541 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2542 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2543 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2544 }
2545 }
2546}
2547
2548static void
2549rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2550{
2551 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2552
2553 emit_block_move (tramp, assemble_trampoline_template (),
2554 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2555
2556 if (TARGET_BIG_ENDIAN_DATA)
2557 {
2558 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2559 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2560 }
2561 else
2562 {
2563 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2564 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2565 }
2566}
2567\f
ccfccd66 2568static int
2569rx_memory_move_cost (enum machine_mode mode, reg_class_t regclass, bool in)
9d2f1b03 2570{
ccfccd66 2571 return 2 + memory_move_secondary_cost (mode, regclass, in);
9d2f1b03 2572}
2573
ccfccd66 2574/* Convert a CC_MODE to the set of flags that it represents. */
9d2f1b03 2575
2576static unsigned int
ccfccd66 2577flags_from_mode (enum machine_mode mode)
9d2f1b03 2578{
ccfccd66 2579 switch (mode)
9d2f1b03 2580 {
ccfccd66 2581 case CC_ZSmode:
2582 return CC_FLAG_S | CC_FLAG_Z;
2583 case CC_ZSOmode:
2584 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2585 case CC_ZSCmode:
2586 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2587 case CCmode:
2588 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2589 case CC_Fmode:
2590 return CC_FLAG_FP;
2591 default:
2592 gcc_unreachable ();
2593 }
2594}
9d2f1b03 2595
ccfccd66 2596/* Convert a set of flags to a CC_MODE that can implement it. */
9d2f1b03 2597
ccfccd66 2598static enum machine_mode
2599mode_from_flags (unsigned int f)
2600{
2601 if (f & CC_FLAG_FP)
2602 return CC_Fmode;
2603 if (f & CC_FLAG_O)
2604 {
2605 if (f & CC_FLAG_C)
2606 return CCmode;
2607 else
2608 return CC_ZSOmode;
9d2f1b03 2609 }
ccfccd66 2610 else if (f & CC_FLAG_C)
2611 return CC_ZSCmode;
2612 else
2613 return CC_ZSmode;
9d2f1b03 2614}
2615
ccfccd66 2616/* Convert an RTX_CODE to the set of flags needed to implement it.
2617 This assumes an integer comparison. */
2618
9d2f1b03 2619static unsigned int
ccfccd66 2620flags_from_code (enum rtx_code code)
9d2f1b03 2621{
ccfccd66 2622 switch (code)
9d2f1b03 2623 {
ccfccd66 2624 case LT:
2625 case GE:
2626 return CC_FLAG_S;
2627 case GT:
2628 case LE:
2629 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
2630 case GEU:
2631 case LTU:
2632 return CC_FLAG_C;
2633 case GTU:
2634 case LEU:
2635 return CC_FLAG_C | CC_FLAG_Z;
2636 case EQ:
2637 case NE:
2638 return CC_FLAG_Z;
2639 default:
2640 gcc_unreachable ();
9d2f1b03 2641 }
2642}
2643
ccfccd66 2644/* Return a CC_MODE of which both M1 and M2 are subsets. */
2645
2646static enum machine_mode
2647rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
9d2f1b03 2648{
ccfccd66 2649 unsigned f;
2650
2651 /* Early out for identical modes. */
2652 if (m1 == m2)
2653 return m1;
2654
2655 /* There's no valid combination for FP vs non-FP. */
2656 f = flags_from_mode (m1) | flags_from_mode (m2);
2657 if (f & CC_FLAG_FP)
2658 return VOIDmode;
2659
2660 /* Otherwise, see what mode can implement all the flags. */
2661 return mode_from_flags (f);
9d2f1b03 2662}
8b8777b9 2663
2664/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
2665
2666enum machine_mode
2667rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y ATTRIBUTE_UNUSED)
2668{
2669 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2670 return CC_Fmode;
2671
ccfccd66 2672 return mode_from_flags (flags_from_code (cmp_code));
2673}
2674
ccfccd66 2675/* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
2676 CC_MODE, and use that in branches based on that compare. */
2677
2678void
2679rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
2680 rtx c1, rtx c2, rtx label)
2681{
2682 rtx flags, x;
2683
2684 flags = gen_rtx_REG (cc_mode, CC_REG);
2685 x = gen_rtx_COMPARE (cc_mode, c1, c2);
2686 x = gen_rtx_SET (VOIDmode, flags, x);
2687 emit_insn (x);
2688
2689 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
2690 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
2691 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2692 emit_jump_insn (x);
8b8777b9 2693}
2694
fc3b02a9 2695/* A helper function for matching parallels that set the flags. */
2696
2697bool
2698rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
2699{
2700 rtx op1, flags;
2701 enum machine_mode flags_mode;
2702
2703 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
2704
2705 op1 = XVECEXP (PATTERN (insn), 0, 1);
2706 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
2707
2708 flags = SET_DEST (op1);
2709 flags_mode = GET_MODE (flags);
2710
2711 if (GET_MODE (SET_SRC (op1)) != flags_mode)
2712 return false;
2713 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
2714 return false;
2715
2716 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
2717 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
2718 return false;
2719
2720 return true;
2721}
2722
9d2f1b03 2723\f
24833e1a 2724#undef TARGET_FUNCTION_VALUE
2725#define TARGET_FUNCTION_VALUE rx_function_value
2726
2727#undef TARGET_RETURN_IN_MSB
2728#define TARGET_RETURN_IN_MSB rx_return_in_msb
2729
2730#undef TARGET_IN_SMALL_DATA_P
2731#define TARGET_IN_SMALL_DATA_P rx_in_small_data
2732
2733#undef TARGET_RETURN_IN_MEMORY
2734#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
2735
2736#undef TARGET_HAVE_SRODATA_SECTION
2737#define TARGET_HAVE_SRODATA_SECTION true
2738
2739#undef TARGET_ASM_SELECT_RTX_SECTION
2740#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
2741
2742#undef TARGET_ASM_SELECT_SECTION
2743#define TARGET_ASM_SELECT_SECTION rx_select_section
2744
2745#undef TARGET_INIT_BUILTINS
2746#define TARGET_INIT_BUILTINS rx_init_builtins
2747
2748#undef TARGET_EXPAND_BUILTIN
2749#define TARGET_EXPAND_BUILTIN rx_expand_builtin
2750
2751#undef TARGET_ASM_CONSTRUCTOR
2752#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2753
2754#undef TARGET_ASM_DESTRUCTOR
2755#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
2756
2757#undef TARGET_STRUCT_VALUE_RTX
2758#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
2759
2760#undef TARGET_ATTRIBUTE_TABLE
2761#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
2762
2763#undef TARGET_ASM_FILE_START
2764#define TARGET_ASM_FILE_START rx_file_start
2765
2766#undef TARGET_MS_BITFIELD_LAYOUT_P
2767#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
2768
2769#undef TARGET_LEGITIMATE_ADDRESS_P
2770#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
2771
2772#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2773#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
2774
2775#undef TARGET_ASM_FUNCTION_PROLOGUE
2776#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
2777
2778#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2779#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
2780
61fc50a0 2781#undef TARGET_FUNCTION_OK_FOR_SIBCALL
2782#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
2783
ee4e8428 2784#undef TARGET_FUNCTION_ARG
2785#define TARGET_FUNCTION_ARG rx_function_arg
2786
2787#undef TARGET_FUNCTION_ARG_ADVANCE
2788#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
2789
bd99ba64 2790#undef TARGET_FUNCTION_ARG_BOUNDARY
2791#define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
2792
24833e1a 2793#undef TARGET_SET_CURRENT_FUNCTION
2794#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2795
2796#undef TARGET_HANDLE_OPTION
2797#define TARGET_HANDLE_OPTION rx_handle_option
2798
2799#undef TARGET_ASM_INTEGER
2800#define TARGET_ASM_INTEGER rx_assemble_integer
2801
2802#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
2803#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
2804
2805#undef TARGET_MAX_ANCHOR_OFFSET
2806#define TARGET_MAX_ANCHOR_OFFSET 32
2807
2808#undef TARGET_ADDRESS_COST
2809#define TARGET_ADDRESS_COST rx_address_cost
2810
2811#undef TARGET_CAN_ELIMINATE
2812#define TARGET_CAN_ELIMINATE rx_can_eliminate
2813
b2d7ede1 2814#undef TARGET_CONDITIONAL_REGISTER_USAGE
2815#define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
2816
24833e1a 2817#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2818#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
2819
2820#undef TARGET_TRAMPOLINE_INIT
2821#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
2822
6bb30542 2823#undef TARGET_PRINT_OPERAND
2824#define TARGET_PRINT_OPERAND rx_print_operand
2825
2826#undef TARGET_PRINT_OPERAND_ADDRESS
2827#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
2828
9d2f1b03 2829#undef TARGET_CC_MODES_COMPATIBLE
2830#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
2831
2832#undef TARGET_MEMORY_MOVE_COST
2833#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
2834
1af17d44 2835#undef TARGET_OPTION_OVERRIDE
2836#define TARGET_OPTION_OVERRIDE rx_option_override
2837
c17f64cc 2838#undef TARGET_OPTION_OPTIMIZATION_TABLE
2839#define TARGET_OPTION_OPTIMIZATION_TABLE rx_option_optimization_table
2840
bd7d2835 2841#undef TARGET_PROMOTE_FUNCTION_MODE
2842#define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
2843
42d89991 2844#undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
2845#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
02e53c17 2846
f3274970 2847#undef TARGET_EXCEPT_UNWIND_INFO
2848#define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2849
77de4b78 2850#undef TARGET_FLAGS_REGNUM
2851#define TARGET_FLAGS_REGNUM CC_REG
2852
24833e1a 2853struct gcc_target targetm = TARGET_INITIALIZER;
2854
2855/* #include "gt-rx.h" */