]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/rx/rx.c
Turn HARD_REGNO_MODE_OK into a target hook
[thirdparty/gcc.git] / gcc / config / rx / rx.c
1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008-2017 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "cfghooks.h"
35 #include "df.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "regs.h"
39 #include "emit-rtl.h"
40 #include "diagnostic-core.h"
41 #include "varasm.h"
42 #include "stor-layout.h"
43 #include "calls.h"
44 #include "output.h"
45 #include "flags.h"
46 #include "explow.h"
47 #include "expr.h"
48 #include "toplev.h"
49 #include "langhooks.h"
50 #include "opts.h"
51 #include "builtins.h"
52
53 /* This file should be included last. */
54 #include "target-def.h"
55
56 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
57 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
58 static unsigned int rx_num_interrupt_regs;
59 \f
60 static unsigned int
61 rx_gp_base_regnum (void)
62 {
63 if (rx_gp_base_regnum_val == INVALID_REGNUM)
64 gcc_unreachable ();
65 return rx_gp_base_regnum_val;
66 }
67
68 static unsigned int
69 rx_pid_base_regnum (void)
70 {
71 if (rx_pid_base_regnum_val == INVALID_REGNUM)
72 gcc_unreachable ();
73 return rx_pid_base_regnum_val;
74 }
75
76 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
77
78 static tree
79 rx_decl_for_addr (rtx op)
80 {
81 if (GET_CODE (op) == MEM)
82 op = XEXP (op, 0);
83 if (GET_CODE (op) == CONST)
84 op = XEXP (op, 0);
85 while (GET_CODE (op) == PLUS)
86 op = XEXP (op, 0);
87 if (GET_CODE (op) == SYMBOL_REF)
88 return SYMBOL_REF_DECL (op);
89 return NULL_TREE;
90 }
91
92 static void rx_print_operand (FILE *, rtx, int);
93
94 #define CC_FLAG_S (1 << 0)
95 #define CC_FLAG_Z (1 << 1)
96 #define CC_FLAG_O (1 << 2)
97 #define CC_FLAG_C (1 << 3)
98 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
99
100 static unsigned int flags_from_mode (machine_mode mode);
101 static unsigned int flags_from_code (enum rtx_code code);
102 \f
103 /* Return true if OP is a reference to an object in a PID data area. */
104
105 enum pid_type
106 {
107 PID_NOT_PID = 0, /* The object is not in the PID data area. */
108 PID_ENCODED, /* The object is in the PID data area. */
109 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
110 };
111
112 static enum pid_type
113 rx_pid_data_operand (rtx op)
114 {
115 tree op_decl;
116
117 if (!TARGET_PID)
118 return PID_NOT_PID;
119
120 if (GET_CODE (op) == PLUS
121 && GET_CODE (XEXP (op, 0)) == REG
122 && GET_CODE (XEXP (op, 1)) == CONST
123 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
124 return PID_ENCODED;
125
126 op_decl = rx_decl_for_addr (op);
127
128 if (op_decl)
129 {
130 if (TREE_READONLY (op_decl))
131 return PID_UNENCODED;
132 }
133 else
134 {
135 /* Sigh, some special cases. */
136 if (GET_CODE (op) == SYMBOL_REF
137 || GET_CODE (op) == LABEL_REF)
138 return PID_UNENCODED;
139 }
140
141 return PID_NOT_PID;
142 }
143
144 static rtx
145 rx_legitimize_address (rtx x,
146 rtx oldx ATTRIBUTE_UNUSED,
147 machine_mode mode ATTRIBUTE_UNUSED)
148 {
149 if (rx_pid_data_operand (x) == PID_UNENCODED)
150 {
151 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
152 return rv;
153 }
154
155 if (GET_CODE (x) == PLUS
156 && GET_CODE (XEXP (x, 0)) == PLUS
157 && REG_P (XEXP (XEXP (x, 0), 0))
158 && REG_P (XEXP (x, 1)))
159 return force_reg (SImode, x);
160
161 return x;
162 }
163
164 /* Return true if OP is a reference to an object in a small data area. */
165
166 static bool
167 rx_small_data_operand (rtx op)
168 {
169 if (rx_small_data_limit == 0)
170 return false;
171
172 if (GET_CODE (op) == SYMBOL_REF)
173 return SYMBOL_REF_SMALL_P (op);
174
175 return false;
176 }
177
178 static bool
179 rx_is_legitimate_address (machine_mode mode, rtx x,
180 bool strict ATTRIBUTE_UNUSED)
181 {
182 if (RTX_OK_FOR_BASE (x, strict))
183 /* Register Indirect. */
184 return true;
185
186 if ((GET_MODE_SIZE (mode) == 4
187 || GET_MODE_SIZE (mode) == 2
188 || GET_MODE_SIZE (mode) == 1)
189 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
190 /* Pre-decrement Register Indirect or
191 Post-increment Register Indirect. */
192 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
193
194 switch (rx_pid_data_operand (x))
195 {
196 case PID_UNENCODED:
197 return false;
198 case PID_ENCODED:
199 return true;
200 default:
201 break;
202 }
203
204 if (GET_CODE (x) == PLUS)
205 {
206 rtx arg1 = XEXP (x, 0);
207 rtx arg2 = XEXP (x, 1);
208 rtx index = NULL_RTX;
209
210 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
211 index = arg2;
212 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
213 index = arg1;
214 else
215 return false;
216
217 switch (GET_CODE (index))
218 {
219 case CONST_INT:
220 {
221 /* Register Relative: REG + INT.
222 Only positive, mode-aligned, mode-sized
223 displacements are allowed. */
224 HOST_WIDE_INT val = INTVAL (index);
225 int factor;
226
227 if (val < 0)
228 return false;
229
230 switch (GET_MODE_SIZE (mode))
231 {
232 default:
233 case 4: factor = 4; break;
234 case 2: factor = 2; break;
235 case 1: factor = 1; break;
236 }
237
238 if (val > (65535 * factor))
239 return false;
240 return (val % factor) == 0;
241 }
242
243 case REG:
244 /* Unscaled Indexed Register Indirect: REG + REG
245 Size has to be "QI", REG has to be valid. */
246 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
247
248 case MULT:
249 {
250 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
251 Factor has to equal the mode size, REG has to be valid. */
252 rtx factor;
253
254 factor = XEXP (index, 1);
255 index = XEXP (index, 0);
256
257 return REG_P (index)
258 && RTX_OK_FOR_BASE (index, strict)
259 && CONST_INT_P (factor)
260 && GET_MODE_SIZE (mode) == INTVAL (factor);
261 }
262
263 default:
264 return false;
265 }
266 }
267
268 /* Small data area accesses turn into register relative offsets. */
269 return rx_small_data_operand (x);
270 }
271
272 /* Returns TRUE for simple memory addresses, ie ones
273 that do not involve register indirect addressing
274 or pre/post increment/decrement. */
275
276 bool
277 rx_is_restricted_memory_address (rtx mem, machine_mode mode)
278 {
279 if (! rx_is_legitimate_address
280 (mode, mem, reload_in_progress || reload_completed))
281 return false;
282
283 switch (GET_CODE (mem))
284 {
285 case REG:
286 /* Simple memory addresses are OK. */
287 return true;
288
289 case PRE_DEC:
290 case POST_INC:
291 return false;
292
293 case PLUS:
294 {
295 rtx base, index;
296
297 /* Only allow REG+INT addressing. */
298 base = XEXP (mem, 0);
299 index = XEXP (mem, 1);
300
301 if (! RX_REG_P (base) || ! CONST_INT_P (index))
302 return false;
303
304 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
305 }
306
307 case SYMBOL_REF:
308 /* Can happen when small data is being supported.
309 Assume that it will be resolved into GP+INT. */
310 return true;
311
312 default:
313 gcc_unreachable ();
314 }
315 }
316
317 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
318
319 static bool
320 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
321 {
322 if (GET_CODE (addr) == CONST)
323 addr = XEXP (addr, 0);
324
325 switch (GET_CODE (addr))
326 {
327 /* --REG and REG++ only work in SImode. */
328 case PRE_DEC:
329 case POST_INC:
330 return true;
331
332 case MINUS:
333 case PLUS:
334 if (! REG_P (XEXP (addr, 0)))
335 return true;
336
337 addr = XEXP (addr, 1);
338
339 switch (GET_CODE (addr))
340 {
341 case REG:
342 /* REG+REG only works in SImode. */
343 return true;
344
345 case CONST_INT:
346 /* REG+INT is only mode independent if INT is a
347 multiple of 4, positive and will fit into 16-bits. */
348 if (((INTVAL (addr) & 3) == 0)
349 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
350 return false;
351 return true;
352
353 case SYMBOL_REF:
354 case LABEL_REF:
355 return true;
356
357 case MULT:
358 /* REG+REG*SCALE is always mode dependent. */
359 return true;
360
361 default:
362 /* Not recognized, so treat as mode dependent. */
363 return true;
364 }
365
366 case CONST_INT:
367 case SYMBOL_REF:
368 case LABEL_REF:
369 case REG:
370 /* These are all mode independent. */
371 return false;
372
373 default:
374 /* Everything else is unrecognized,
375 so treat as mode dependent. */
376 return true;
377 }
378 }
379 \f
380 /* A C compound statement to output to stdio stream FILE the
381 assembler syntax for an instruction operand that is a memory
382 reference whose address is ADDR. */
383
384 static void
385 rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
386 {
387 switch (GET_CODE (addr))
388 {
389 case REG:
390 fprintf (file, "[");
391 rx_print_operand (file, addr, 0);
392 fprintf (file, "]");
393 break;
394
395 case PRE_DEC:
396 fprintf (file, "[-");
397 rx_print_operand (file, XEXP (addr, 0), 0);
398 fprintf (file, "]");
399 break;
400
401 case POST_INC:
402 fprintf (file, "[");
403 rx_print_operand (file, XEXP (addr, 0), 0);
404 fprintf (file, "+]");
405 break;
406
407 case PLUS:
408 {
409 rtx arg1 = XEXP (addr, 0);
410 rtx arg2 = XEXP (addr, 1);
411 rtx base, index;
412
413 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
414 base = arg1, index = arg2;
415 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
416 base = arg2, index = arg1;
417 else
418 {
419 rx_print_operand (file, arg1, 0);
420 fprintf (file, " + ");
421 rx_print_operand (file, arg2, 0);
422 break;
423 }
424
425 if (REG_P (index) || GET_CODE (index) == MULT)
426 {
427 fprintf (file, "[");
428 rx_print_operand (file, index, 'A');
429 fprintf (file, ",");
430 }
431 else /* GET_CODE (index) == CONST_INT */
432 {
433 rx_print_operand (file, index, 'A');
434 fprintf (file, "[");
435 }
436 rx_print_operand (file, base, 0);
437 fprintf (file, "]");
438 break;
439 }
440
441 case CONST:
442 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
443 {
444 addr = XEXP (addr, 0);
445 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
446
447 addr = XVECEXP (addr, 0, 0);
448 gcc_assert (CONST_INT_P (addr));
449 fprintf (file, "#");
450 output_addr_const (file, addr);
451 break;
452 }
453 fprintf (file, "#");
454 output_addr_const (file, XEXP (addr, 0));
455 break;
456
457 case UNSPEC:
458 addr = XVECEXP (addr, 0, 0);
459 /* Fall through. */
460 case LABEL_REF:
461 case SYMBOL_REF:
462 fprintf (file, "#");
463 /* Fall through. */
464 default:
465 output_addr_const (file, addr);
466 break;
467 }
468 }
469
470 static void
471 rx_print_integer (FILE * file, HOST_WIDE_INT val)
472 {
473 if (val < 64)
474 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
475 else
476 fprintf (file,
477 TARGET_AS100_SYNTAX
478 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
479 val);
480 }
481
482 static bool
483 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
484 {
485 const char * op = integer_asm_op (size, is_aligned);
486
487 if (! CONST_INT_P (x))
488 return default_assemble_integer (x, size, is_aligned);
489
490 if (op == NULL)
491 return false;
492 fputs (op, asm_out_file);
493
494 rx_print_integer (asm_out_file, INTVAL (x));
495 fputc ('\n', asm_out_file);
496 return true;
497 }
498
499
500 /* Handles the insertion of a single operand into the assembler output.
501 The %<letter> directives supported are:
502
503 %A Print an operand without a leading # character.
504 %B Print an integer comparison name.
505 %C Print a control register name.
506 %F Print a condition code flag name.
507 %G Register used for small-data-area addressing
508 %H Print high part of a DImode register, integer or address.
509 %L Print low part of a DImode register, integer or address.
510 %N Print the negation of the immediate value.
511 %P Register used for PID addressing
512 %Q If the operand is a MEM, then correctly generate
513 register indirect or register relative addressing.
514 %R Like %Q but for zero-extending loads. */
515
516 static void
517 rx_print_operand (FILE * file, rtx op, int letter)
518 {
519 bool unsigned_load = false;
520 bool print_hash = true;
521
522 if (letter == 'A'
523 && ((GET_CODE (op) == CONST
524 && GET_CODE (XEXP (op, 0)) == UNSPEC)
525 || GET_CODE (op) == UNSPEC))
526 {
527 print_hash = false;
528 letter = 0;
529 }
530
531 switch (letter)
532 {
533 case 'A':
534 /* Print an operand without a leading #. */
535 if (MEM_P (op))
536 op = XEXP (op, 0);
537
538 switch (GET_CODE (op))
539 {
540 case LABEL_REF:
541 case SYMBOL_REF:
542 output_addr_const (file, op);
543 break;
544 case CONST_INT:
545 fprintf (file, "%ld", (long) INTVAL (op));
546 break;
547 default:
548 rx_print_operand (file, op, 0);
549 break;
550 }
551 break;
552
553 case 'B':
554 {
555 enum rtx_code code = GET_CODE (op);
556 machine_mode mode = GET_MODE (XEXP (op, 0));
557 const char *ret;
558
559 if (mode == CC_Fmode)
560 {
561 /* C flag is undefined, and O flag carries unordered. None of the
562 branch combinations that include O use it helpfully. */
563 switch (code)
564 {
565 case ORDERED:
566 ret = "no";
567 break;
568 case UNORDERED:
569 ret = "o";
570 break;
571 case LT:
572 ret = "n";
573 break;
574 case GE:
575 ret = "pz";
576 break;
577 case EQ:
578 ret = "eq";
579 break;
580 case NE:
581 ret = "ne";
582 break;
583 default:
584 gcc_unreachable ();
585 }
586 }
587 else
588 {
589 unsigned int flags = flags_from_mode (mode);
590
591 switch (code)
592 {
593 case LT:
594 ret = (flags & CC_FLAG_O ? "lt" : "n");
595 break;
596 case GE:
597 ret = (flags & CC_FLAG_O ? "ge" : "pz");
598 break;
599 case GT:
600 ret = "gt";
601 break;
602 case LE:
603 ret = "le";
604 break;
605 case GEU:
606 ret = "geu";
607 break;
608 case LTU:
609 ret = "ltu";
610 break;
611 case GTU:
612 ret = "gtu";
613 break;
614 case LEU:
615 ret = "leu";
616 break;
617 case EQ:
618 ret = "eq";
619 break;
620 case NE:
621 ret = "ne";
622 break;
623 default:
624 gcc_unreachable ();
625 }
626 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
627 }
628 fputs (ret, file);
629 break;
630 }
631
632 case 'C':
633 gcc_assert (CONST_INT_P (op));
634 switch (INTVAL (op))
635 {
636 case CTRLREG_PSW: fprintf (file, "psw"); break;
637 case CTRLREG_USP: fprintf (file, "usp"); break;
638 case CTRLREG_FPSW: fprintf (file, "fpsw"); break;
639 case CTRLREG_CPEN: fprintf (file, "cpen"); break;
640 case CTRLREG_BPSW: fprintf (file, "bpsw"); break;
641 case CTRLREG_BPC: fprintf (file, "bpc"); break;
642 case CTRLREG_ISP: fprintf (file, "isp"); break;
643 case CTRLREG_FINTV: fprintf (file, "fintv"); break;
644 case CTRLREG_INTB: fprintf (file, "intb"); break;
645 default:
646 warning (0, "unrecognized control register number: %d - using 'psw'",
647 (int) INTVAL (op));
648 fprintf (file, "psw");
649 break;
650 }
651 break;
652
653 case 'F':
654 gcc_assert (CONST_INT_P (op));
655 switch (INTVAL (op))
656 {
657 case 0: case 'c': case 'C': fprintf (file, "C"); break;
658 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
659 case 2: case 's': case 'S': fprintf (file, "S"); break;
660 case 3: case 'o': case 'O': fprintf (file, "O"); break;
661 case 8: case 'i': case 'I': fprintf (file, "I"); break;
662 case 9: case 'u': case 'U': fprintf (file, "U"); break;
663 default:
664 gcc_unreachable ();
665 }
666 break;
667
668 case 'G':
669 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
670 break;
671
672 case 'H':
673 switch (GET_CODE (op))
674 {
675 case REG:
676 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
677 break;
678 case CONST_INT:
679 {
680 HOST_WIDE_INT v = INTVAL (op);
681
682 fprintf (file, "#");
683 /* Trickery to avoid problems with shifting 32 bits at a time. */
684 v = v >> 16;
685 v = v >> 16;
686 rx_print_integer (file, v);
687 break;
688 }
689 case CONST_DOUBLE:
690 fprintf (file, "#");
691 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
692 break;
693 case MEM:
694 if (! WORDS_BIG_ENDIAN)
695 op = adjust_address (op, SImode, 4);
696 output_address (GET_MODE (op), XEXP (op, 0));
697 break;
698 default:
699 gcc_unreachable ();
700 }
701 break;
702
703 case 'L':
704 switch (GET_CODE (op))
705 {
706 case REG:
707 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
708 break;
709 case CONST_INT:
710 fprintf (file, "#");
711 rx_print_integer (file, INTVAL (op) & 0xffffffff);
712 break;
713 case CONST_DOUBLE:
714 fprintf (file, "#");
715 rx_print_integer (file, CONST_DOUBLE_LOW (op));
716 break;
717 case MEM:
718 if (WORDS_BIG_ENDIAN)
719 op = adjust_address (op, SImode, 4);
720 output_address (GET_MODE (op), XEXP (op, 0));
721 break;
722 default:
723 gcc_unreachable ();
724 }
725 break;
726
727 case 'N':
728 gcc_assert (CONST_INT_P (op));
729 fprintf (file, "#");
730 rx_print_integer (file, - INTVAL (op));
731 break;
732
733 case 'P':
734 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
735 break;
736
737 case 'R':
738 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
739 unsigned_load = true;
740 /* Fall through. */
741 case 'Q':
742 if (MEM_P (op))
743 {
744 HOST_WIDE_INT offset;
745 rtx mem = op;
746
747 op = XEXP (op, 0);
748
749 if (REG_P (op))
750 offset = 0;
751 else if (GET_CODE (op) == PLUS)
752 {
753 rtx displacement;
754
755 if (REG_P (XEXP (op, 0)))
756 {
757 displacement = XEXP (op, 1);
758 op = XEXP (op, 0);
759 }
760 else
761 {
762 displacement = XEXP (op, 0);
763 op = XEXP (op, 1);
764 gcc_assert (REG_P (op));
765 }
766
767 gcc_assert (CONST_INT_P (displacement));
768 offset = INTVAL (displacement);
769 gcc_assert (offset >= 0);
770
771 fprintf (file, "%ld", offset);
772 }
773 else
774 gcc_unreachable ();
775
776 fprintf (file, "[");
777 rx_print_operand (file, op, 0);
778 fprintf (file, "].");
779
780 switch (GET_MODE_SIZE (GET_MODE (mem)))
781 {
782 case 1:
783 gcc_assert (offset <= 65535 * 1);
784 fprintf (file, unsigned_load ? "UB" : "B");
785 break;
786 case 2:
787 gcc_assert (offset % 2 == 0);
788 gcc_assert (offset <= 65535 * 2);
789 fprintf (file, unsigned_load ? "UW" : "W");
790 break;
791 case 4:
792 gcc_assert (offset % 4 == 0);
793 gcc_assert (offset <= 65535 * 4);
794 fprintf (file, "L");
795 break;
796 default:
797 gcc_unreachable ();
798 }
799 break;
800 }
801
802 /* Fall through. */
803
804 default:
805 if (GET_CODE (op) == CONST
806 && GET_CODE (XEXP (op, 0)) == UNSPEC)
807 op = XEXP (op, 0);
808 else if (GET_CODE (op) == CONST
809 && GET_CODE (XEXP (op, 0)) == PLUS
810 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
811 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
812 {
813 if (print_hash)
814 fprintf (file, "#");
815 fprintf (file, "(");
816 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
817 fprintf (file, " + ");
818 output_addr_const (file, XEXP (XEXP (op, 0), 1));
819 fprintf (file, ")");
820 return;
821 }
822
823 switch (GET_CODE (op))
824 {
825 case MULT:
826 /* Should be the scaled part of an
827 indexed register indirect address. */
828 {
829 rtx base = XEXP (op, 0);
830 rtx index = XEXP (op, 1);
831
832 /* Check for a swaped index register and scaling factor.
833 Not sure if this can happen, but be prepared to handle it. */
834 if (CONST_INT_P (base) && REG_P (index))
835 {
836 rtx tmp = base;
837 base = index;
838 index = tmp;
839 }
840
841 gcc_assert (REG_P (base));
842 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
843 gcc_assert (CONST_INT_P (index));
844 /* Do not try to verify the value of the scalar as it is based
845 on the mode of the MEM not the mode of the MULT. (Which
846 will always be SImode). */
847 fprintf (file, "%s", reg_names [REGNO (base)]);
848 break;
849 }
850
851 case MEM:
852 output_address (GET_MODE (op), XEXP (op, 0));
853 break;
854
855 case PLUS:
856 output_address (VOIDmode, op);
857 break;
858
859 case REG:
860 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
861 fprintf (file, "%s", reg_names [REGNO (op)]);
862 break;
863
864 case SUBREG:
865 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
866 fprintf (file, "%s", reg_names [subreg_regno (op)]);
867 break;
868
869 /* This will only be single precision.... */
870 case CONST_DOUBLE:
871 {
872 unsigned long val;
873
874 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
875 if (print_hash)
876 fprintf (file, "#");
877 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
878 break;
879 }
880
881 case CONST_INT:
882 if (print_hash)
883 fprintf (file, "#");
884 rx_print_integer (file, INTVAL (op));
885 break;
886
887 case UNSPEC:
888 switch (XINT (op, 1))
889 {
890 case UNSPEC_PID_ADDR:
891 {
892 rtx sym, add;
893
894 if (print_hash)
895 fprintf (file, "#");
896 sym = XVECEXP (op, 0, 0);
897 add = NULL_RTX;
898 fprintf (file, "(");
899 if (GET_CODE (sym) == PLUS)
900 {
901 add = XEXP (sym, 1);
902 sym = XEXP (sym, 0);
903 }
904 output_addr_const (file, sym);
905 if (add != NULL_RTX)
906 {
907 fprintf (file, "+");
908 output_addr_const (file, add);
909 }
910 fprintf (file, "-__pid_base");
911 fprintf (file, ")");
912 return;
913 }
914 }
915 /* Fall through */
916
917 case CONST:
918 case SYMBOL_REF:
919 case LABEL_REF:
920 case CODE_LABEL:
921 rx_print_operand_address (file, VOIDmode, op);
922 break;
923
924 default:
925 gcc_unreachable ();
926 }
927 break;
928 }
929 }
930
931 /* Maybe convert an operand into its PID format. */
932
933 rtx
934 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
935 {
936 if (rx_pid_data_operand (op) == PID_UNENCODED)
937 {
938 if (GET_CODE (op) == MEM)
939 {
940 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
941 op = replace_equiv_address (op, a);
942 }
943 else
944 {
945 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
946 }
947
948 if (copy_to_reg)
949 op = copy_to_mode_reg (GET_MODE (op), op);
950 }
951 return op;
952 }
953
954 /* Returns an assembler template for a move instruction. */
955
956 char *
957 rx_gen_move_template (rtx * operands, bool is_movu)
958 {
959 static char out_template [64];
960 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
961 const char * src_template;
962 const char * dst_template;
963 rtx dest = operands[0];
964 rtx src = operands[1];
965
966 /* Decide which extension, if any, should be given to the move instruction. */
967 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
968 {
969 case E_QImode:
970 /* The .B extension is not valid when
971 loading an immediate into a register. */
972 if (! REG_P (dest) || ! CONST_INT_P (src))
973 extension = ".B";
974 break;
975 case E_HImode:
976 if (! REG_P (dest) || ! CONST_INT_P (src))
977 /* The .W extension is not valid when
978 loading an immediate into a register. */
979 extension = ".W";
980 break;
981 case E_DFmode:
982 case E_DImode:
983 case E_SFmode:
984 case E_SImode:
985 extension = ".L";
986 break;
987 case E_VOIDmode:
988 /* This mode is used by constants. */
989 break;
990 default:
991 debug_rtx (src);
992 gcc_unreachable ();
993 }
994
995 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
996 {
997 gcc_assert (GET_MODE (src) != DImode);
998 gcc_assert (GET_MODE (src) != DFmode);
999
1000 src_template = "(%A1 - __pid_base)[%P1]";
1001 }
1002 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1003 {
1004 gcc_assert (GET_MODE (src) != DImode);
1005 gcc_assert (GET_MODE (src) != DFmode);
1006
1007 src_template = "%%gp(%A1)[%G1]";
1008 }
1009 else
1010 src_template = "%1";
1011
1012 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1013 {
1014 gcc_assert (GET_MODE (dest) != DImode);
1015 gcc_assert (GET_MODE (dest) != DFmode);
1016
1017 dst_template = "%%gp(%A0)[%G0]";
1018 }
1019 else
1020 dst_template = "%0";
1021
1022 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1023 {
1024 gcc_assert (! is_movu);
1025
1026 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1027 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1028 else
1029 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1030 }
1031 else
1032 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1033 extension, src_template, dst_template);
1034 return out_template;
1035 }
1036 \f
1037 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
1038
1039 static inline unsigned int
1040 rx_round_up (unsigned int value, unsigned int alignment)
1041 {
1042 alignment -= 1;
1043 return (value + alignment) & (~ alignment);
1044 }
1045
1046 /* Return the number of bytes in the argument registers
1047 occupied by an argument of type TYPE and mode MODE. */
1048
1049 static unsigned int
1050 rx_function_arg_size (machine_mode mode, const_tree type)
1051 {
1052 unsigned int num_bytes;
1053
1054 num_bytes = (mode == BLKmode)
1055 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1056 return rx_round_up (num_bytes, UNITS_PER_WORD);
1057 }
1058
1059 #define NUM_ARG_REGS 4
1060 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1061
1062 /* Return an RTL expression describing the register holding a function
1063 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1064 be passed on the stack. CUM describes the previous parameters to the
1065 function and NAMED is false if the parameter is part of a variable
1066 parameter list, or the last named parameter before the start of a
1067 variable parameter list. */
1068
1069 static rtx
1070 rx_function_arg (cumulative_args_t cum, machine_mode mode,
1071 const_tree type, bool named)
1072 {
1073 unsigned int next_reg;
1074 unsigned int bytes_so_far = *get_cumulative_args (cum);
1075 unsigned int size;
1076 unsigned int rounded_size;
1077
1078 /* An exploded version of rx_function_arg_size. */
1079 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1080 /* If the size is not known it cannot be passed in registers. */
1081 if (size < 1)
1082 return NULL_RTX;
1083
1084 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1085
1086 /* Don't pass this arg via registers if there
1087 are insufficient registers to hold all of it. */
1088 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1089 return NULL_RTX;
1090
1091 /* Unnamed arguments and the last named argument in a
1092 variadic function are always passed on the stack. */
1093 if (!named)
1094 return NULL_RTX;
1095
1096 /* Structures must occupy an exact number of registers,
1097 otherwise they are passed on the stack. */
1098 if ((type == NULL || AGGREGATE_TYPE_P (type))
1099 && (size % UNITS_PER_WORD) != 0)
1100 return NULL_RTX;
1101
1102 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1103
1104 return gen_rtx_REG (mode, next_reg);
1105 }
1106
1107 static void
1108 rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
1109 const_tree type, bool named ATTRIBUTE_UNUSED)
1110 {
1111 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1112 }
1113
1114 static unsigned int
1115 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1116 const_tree type ATTRIBUTE_UNUSED)
1117 {
1118 /* Older versions of the RX backend aligned all on-stack arguments
1119 to 32-bits. The RX C ABI however says that they should be
1120 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1121 if (TARGET_GCC_ABI)
1122 return STACK_BOUNDARY;
1123
1124 if (type)
1125 {
1126 if (DECL_P (type))
1127 return DECL_ALIGN (type);
1128 return TYPE_ALIGN (type);
1129 }
1130
1131 return PARM_BOUNDARY;
1132 }
1133
1134 /* Return an RTL describing where a function return value of type RET_TYPE
1135 is held. */
1136
1137 static rtx
1138 rx_function_value (const_tree ret_type,
1139 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1140 bool outgoing ATTRIBUTE_UNUSED)
1141 {
1142 machine_mode mode = TYPE_MODE (ret_type);
1143
1144 /* RX ABI specifies that small integer types are
1145 promoted to int when returned by a function. */
1146 if (GET_MODE_SIZE (mode) > 0
1147 && GET_MODE_SIZE (mode) < 4
1148 && ! COMPLEX_MODE_P (mode)
1149 && ! VECTOR_TYPE_P (ret_type)
1150 && ! VECTOR_MODE_P (mode)
1151 )
1152 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1153
1154 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1155 }
1156
1157 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1158 regard to function returns as does TARGET_FUNCTION_VALUE. */
1159
1160 static machine_mode
1161 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1162 machine_mode mode,
1163 int * punsignedp ATTRIBUTE_UNUSED,
1164 const_tree funtype ATTRIBUTE_UNUSED,
1165 int for_return)
1166 {
1167 if (for_return != 1
1168 || GET_MODE_SIZE (mode) >= 4
1169 || COMPLEX_MODE_P (mode)
1170 || VECTOR_MODE_P (mode)
1171 || VECTOR_TYPE_P (type)
1172 || GET_MODE_SIZE (mode) < 1)
1173 return mode;
1174
1175 return SImode;
1176 }
1177
1178 static bool
1179 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1180 {
1181 HOST_WIDE_INT size;
1182
1183 if (TYPE_MODE (type) != BLKmode
1184 && ! AGGREGATE_TYPE_P (type))
1185 return false;
1186
1187 size = int_size_in_bytes (type);
1188 /* Large structs and those whose size is not an
1189 exact multiple of 4 are returned in memory. */
1190 return size < 1
1191 || size > 16
1192 || (size % UNITS_PER_WORD) != 0;
1193 }
1194
1195 static rtx
1196 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1197 int incoming ATTRIBUTE_UNUSED)
1198 {
1199 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1200 }
1201
1202 static bool
1203 rx_return_in_msb (const_tree valtype)
1204 {
1205 return TARGET_BIG_ENDIAN_DATA
1206 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1207 }
1208
1209 /* Returns true if the provided function has the specified attribute. */
1210
1211 static inline bool
1212 has_func_attr (const_tree decl, const char * func_attr)
1213 {
1214 if (decl == NULL_TREE)
1215 decl = current_function_decl;
1216
1217 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1218 }
1219
1220 /* Returns true if the provided function has the "fast_interrupt" attribute. */
1221
1222 bool
1223 is_fast_interrupt_func (const_tree decl)
1224 {
1225 return has_func_attr (decl, "fast_interrupt");
1226 }
1227
1228 /* Returns true if the provided function has the "interrupt" attribute. */
1229
1230 bool
1231 is_interrupt_func (const_tree decl)
1232 {
1233 return has_func_attr (decl, "interrupt");
1234 }
1235
1236 /* Returns true if the provided function has the "naked" attribute. */
1237
1238 static inline bool
1239 is_naked_func (const_tree decl)
1240 {
1241 return has_func_attr (decl, "naked");
1242 }
1243 \f
1244 static bool use_fixed_regs = false;
1245
1246 static void
1247 rx_conditional_register_usage (void)
1248 {
1249 static bool using_fixed_regs = false;
1250
1251 if (TARGET_PID)
1252 {
1253 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1254 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1255 }
1256
1257 if (rx_small_data_limit > 0)
1258 {
1259 if (TARGET_PID)
1260 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1261 else
1262 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1263
1264 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1265 }
1266
1267 if (use_fixed_regs != using_fixed_regs)
1268 {
1269 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1270 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1271
1272 if (use_fixed_regs)
1273 {
1274 unsigned int r;
1275
1276 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1277 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1278
1279 /* This is for fast interrupt handlers. Any register in
1280 the range r10 to r13 (inclusive) that is currently
1281 marked as fixed is now a viable, call-used register. */
1282 for (r = 10; r <= 13; r++)
1283 if (fixed_regs[r])
1284 {
1285 fixed_regs[r] = 0;
1286 call_used_regs[r] = 1;
1287 }
1288
1289 /* Mark r7 as fixed. This is just a hack to avoid
1290 altering the reg_alloc_order array so that the newly
1291 freed r10-r13 registers are the preferred registers. */
1292 fixed_regs[7] = call_used_regs[7] = 1;
1293 }
1294 else
1295 {
1296 /* Restore the normal register masks. */
1297 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1298 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1299 }
1300
1301 using_fixed_regs = use_fixed_regs;
1302 }
1303 }
1304
1305 struct decl_chain
1306 {
1307 tree fndecl;
1308 struct decl_chain * next;
1309 };
1310
1311 /* Stack of decls for which we have issued warnings. */
1312 static struct decl_chain * warned_decls = NULL;
1313
1314 static void
1315 add_warned_decl (tree fndecl)
1316 {
1317 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1318
1319 warned->fndecl = fndecl;
1320 warned->next = warned_decls;
1321 warned_decls = warned;
1322 }
1323
1324 /* Returns TRUE if FNDECL is on our list of warned about decls. */
1325
1326 static bool
1327 already_warned (tree fndecl)
1328 {
1329 struct decl_chain * warned;
1330
1331 for (warned = warned_decls;
1332 warned != NULL;
1333 warned = warned->next)
1334 if (warned->fndecl == fndecl)
1335 return true;
1336
1337 return false;
1338 }
1339
1340 /* Perform any actions necessary before starting to compile FNDECL.
1341 For the RX we use this to make sure that we have the correct
1342 set of register masks selected. If FNDECL is NULL then we are
1343 compiling top level things. */
1344
1345 static void
1346 rx_set_current_function (tree fndecl)
1347 {
1348 /* Remember the last target of rx_set_current_function. */
1349 static tree rx_previous_fndecl;
1350 bool prev_was_fast_interrupt;
1351 bool current_is_fast_interrupt;
1352
1353 /* Only change the context if the function changes. This hook is called
1354 several times in the course of compiling a function, and we don't want
1355 to slow things down too much or call target_reinit when it isn't safe. */
1356 if (fndecl == rx_previous_fndecl)
1357 return;
1358
1359 prev_was_fast_interrupt
1360 = rx_previous_fndecl
1361 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1362
1363 current_is_fast_interrupt
1364 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1365
1366 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1367 {
1368 use_fixed_regs = current_is_fast_interrupt;
1369 target_reinit ();
1370 }
1371
1372 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1373 {
1374 /* We do not warn about the first fast interrupt routine that
1375 we see. Instead we just push it onto the stack. */
1376 if (warned_decls == NULL)
1377 add_warned_decl (fndecl);
1378
1379 /* Otherwise if this fast interrupt is one for which we have
1380 not already issued a warning, generate one and then push
1381 it onto the stack as well. */
1382 else if (! already_warned (fndecl))
1383 {
1384 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1385 fndecl, warned_decls->fndecl);
1386 add_warned_decl (fndecl);
1387 }
1388 }
1389
1390 rx_previous_fndecl = fndecl;
1391 }
1392 \f
1393 /* Typical stack layout should looks like this after the function's prologue:
1394
1395 | |
1396 -- ^
1397 | | \ |
1398 | | arguments saved | Increasing
1399 | | on the stack | addresses
1400 PARENT arg pointer -> | | /
1401 -------------------------- ---- -------------------
1402 CHILD |ret | return address
1403 --
1404 | | \
1405 | | call saved
1406 | | registers
1407 | | /
1408 --
1409 | | \
1410 | | local
1411 | | variables
1412 frame pointer -> | | /
1413 --
1414 | | \
1415 | | outgoing | Decreasing
1416 | | arguments | addresses
1417 current stack pointer -> | | / |
1418 -------------------------- ---- ------------------ V
1419 | | */
1420
1421 static unsigned int
1422 bit_count (unsigned int x)
1423 {
1424 const unsigned int m1 = 0x55555555;
1425 const unsigned int m2 = 0x33333333;
1426 const unsigned int m4 = 0x0f0f0f0f;
1427
1428 x -= (x >> 1) & m1;
1429 x = (x & m2) + ((x >> 2) & m2);
1430 x = (x + (x >> 4)) & m4;
1431 x += x >> 8;
1432
1433 return (x + (x >> 16)) & 0x3f;
1434 }
1435
1436 #define MUST_SAVE_ACC_REGISTER \
1437 (TARGET_SAVE_ACC_REGISTER \
1438 && (is_interrupt_func (NULL_TREE) \
1439 || is_fast_interrupt_func (NULL_TREE)))
1440
1441 /* Returns either the lowest numbered and highest numbered registers that
1442 occupy the call-saved area of the stack frame, if the registers are
1443 stored as a contiguous block, or else a bitmask of the individual
1444 registers if they are stored piecemeal.
1445
1446 Also computes the size of the frame and the size of the outgoing
1447 arguments block (in bytes). */
1448
1449 static void
1450 rx_get_stack_layout (unsigned int * lowest,
1451 unsigned int * highest,
1452 unsigned int * register_mask,
1453 unsigned int * frame_size,
1454 unsigned int * stack_size)
1455 {
1456 unsigned int reg;
1457 unsigned int low;
1458 unsigned int high;
1459 unsigned int fixed_reg = 0;
1460 unsigned int save_mask;
1461 unsigned int pushed_mask;
1462 unsigned int unneeded_pushes;
1463
1464 if (is_naked_func (NULL_TREE))
1465 {
1466 /* Naked functions do not create their own stack frame.
1467 Instead the programmer must do that for us. */
1468 * lowest = 0;
1469 * highest = 0;
1470 * register_mask = 0;
1471 * frame_size = 0;
1472 * stack_size = 0;
1473 return;
1474 }
1475
1476 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1477 {
1478 if ((df_regs_ever_live_p (reg)
1479 /* Always save all call clobbered registers inside non-leaf
1480 interrupt handlers, even if they are not live - they may
1481 be used in (non-interrupt aware) routines called from this one. */
1482 || (call_used_regs[reg]
1483 && is_interrupt_func (NULL_TREE)
1484 && ! crtl->is_leaf))
1485 && (! call_used_regs[reg]
1486 /* Even call clobbered registered must
1487 be pushed inside interrupt handlers. */
1488 || is_interrupt_func (NULL_TREE)
1489 /* Likewise for fast interrupt handlers, except registers r10 -
1490 r13. These are normally call-saved, but may have been set
1491 to call-used by rx_conditional_register_usage. If so then
1492 they can be used in the fast interrupt handler without
1493 saving them on the stack. */
1494 || (is_fast_interrupt_func (NULL_TREE)
1495 && ! IN_RANGE (reg, 10, 13))))
1496 {
1497 if (low == 0)
1498 low = reg;
1499 high = reg;
1500
1501 save_mask |= 1 << reg;
1502 }
1503
1504 /* Remember if we see a fixed register
1505 after having found the low register. */
1506 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1507 fixed_reg = reg;
1508 }
1509
1510 /* If we have to save the accumulator register, make sure
1511 that at least two registers are pushed into the frame. */
1512 if (MUST_SAVE_ACC_REGISTER
1513 && bit_count (save_mask) < 2)
1514 {
1515 save_mask |= (1 << 13) | (1 << 14);
1516 if (low == 0)
1517 low = 13;
1518 if (high == 0 || low == high)
1519 high = low + 1;
1520 }
1521
1522 /* Decide if it would be faster fill in the call-saved area of the stack
1523 frame using multiple PUSH instructions instead of a single PUSHM
1524 instruction.
1525
1526 SAVE_MASK is a bitmask of the registers that must be stored in the
1527 call-save area. PUSHED_MASK is a bitmask of the registers that would
1528 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1529 is a bitmask of those registers in pushed_mask that are not in
1530 save_mask.
1531
1532 We use a simple heuristic that says that it is better to use
1533 multiple PUSH instructions if the number of unnecessary pushes is
1534 greater than the number of necessary pushes.
1535
1536 We also use multiple PUSH instructions if there are any fixed registers
1537 between LOW and HIGH. The only way that this can happen is if the user
1538 has specified --fixed-<reg-name> on the command line and in such
1539 circumstances we do not want to touch the fixed registers at all.
1540
1541 Note also that the code in the prologue/epilogue handlers will
1542 automatically merge multiple PUSHes of adjacent registers into a single
1543 PUSHM.
1544
1545 FIXME: Is it worth improving this heuristic ? */
1546 pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
1547 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1548
1549 if ((fixed_reg && fixed_reg <= high)
1550 || (optimize_function_for_speed_p (cfun)
1551 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1552 {
1553 /* Use multiple pushes. */
1554 * lowest = 0;
1555 * highest = 0;
1556 * register_mask = save_mask;
1557 }
1558 else
1559 {
1560 /* Use one push multiple instruction. */
1561 * lowest = low;
1562 * highest = high;
1563 * register_mask = 0;
1564 }
1565
1566 * frame_size = rx_round_up
1567 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1568
1569 if (crtl->args.size > 0)
1570 * frame_size += rx_round_up
1571 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1572
1573 * stack_size = rx_round_up
1574 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1575 }
1576
1577 /* Generate a PUSHM instruction that matches the given operands. */
1578
1579 void
1580 rx_emit_stack_pushm (rtx * operands)
1581 {
1582 HOST_WIDE_INT last_reg;
1583 rtx first_push;
1584
1585 gcc_assert (CONST_INT_P (operands[0]));
1586 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1587
1588 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1589 first_push = XVECEXP (operands[1], 0, 1);
1590 gcc_assert (SET_P (first_push));
1591 first_push = SET_SRC (first_push);
1592 gcc_assert (REG_P (first_push));
1593
1594 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1595 reg_names [REGNO (first_push) - last_reg],
1596 reg_names [REGNO (first_push)]);
1597 }
1598
1599 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1600
1601 static rtx
1602 gen_rx_store_vector (unsigned int low, unsigned int high)
1603 {
1604 unsigned int i;
1605 unsigned int count = (high - low) + 2;
1606 rtx vector;
1607
1608 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1609
1610 XVECEXP (vector, 0, 0) =
1611 gen_rtx_SET (stack_pointer_rtx,
1612 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1613 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1614
1615 for (i = 0; i < count - 1; i++)
1616 XVECEXP (vector, 0, i + 1) =
1617 gen_rtx_SET (gen_rtx_MEM (SImode,
1618 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1619 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1620 gen_rtx_REG (SImode, high - i));
1621 return vector;
1622 }
1623
1624 /* Mark INSN as being frame related. If it is a PARALLEL
1625 then mark each element as being frame related as well. */
1626
1627 static void
1628 mark_frame_related (rtx insn)
1629 {
1630 RTX_FRAME_RELATED_P (insn) = 1;
1631 insn = PATTERN (insn);
1632
1633 if (GET_CODE (insn) == PARALLEL)
1634 {
1635 unsigned int i;
1636
1637 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1638 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1639 }
1640 }
1641
1642 static bool
1643 ok_for_max_constant (HOST_WIDE_INT val)
1644 {
1645 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1646 /* If there is no constraint on the size of constants
1647 used as operands, then any value is legitimate. */
1648 return true;
1649
1650 /* rx_max_constant_size specifies the maximum number
1651 of bytes that can be used to hold a signed value. */
1652 return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
1653 ( 1 << (rx_max_constant_size * 8)));
1654 }
1655
1656 /* Generate an ADD of SRC plus VAL into DEST.
1657 Handles the case where VAL is too big for max_constant_value.
1658 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1659
1660 static void
1661 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1662 {
1663 rtx insn;
1664
1665 if (val == NULL_RTX || INTVAL (val) == 0)
1666 {
1667 gcc_assert (dest != src);
1668
1669 insn = emit_move_insn (dest, src);
1670 }
1671 else if (ok_for_max_constant (INTVAL (val)))
1672 insn = emit_insn (gen_addsi3 (dest, src, val));
1673 else
1674 {
1675 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1676 will not reject it. */
1677 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1678 insn = emit_insn (gen_addsi3 (dest, src, val));
1679
1680 if (is_frame_related)
1681 /* We have to provide our own frame related note here
1682 as the dwarf2out code cannot be expected to grok
1683 our unspec. */
1684 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1685 gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
1686 return;
1687 }
1688
1689 if (is_frame_related)
1690 RTX_FRAME_RELATED_P (insn) = 1;
1691 }
1692
1693 static void
1694 push_regs (unsigned int high, unsigned int low)
1695 {
1696 rtx insn;
1697
1698 if (low == high)
1699 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1700 else
1701 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1702 gen_rx_store_vector (low, high)));
1703 mark_frame_related (insn);
1704 }
1705
1706 void
1707 rx_expand_prologue (void)
1708 {
1709 unsigned int stack_size;
1710 unsigned int frame_size;
1711 unsigned int mask;
1712 unsigned int low;
1713 unsigned int high;
1714 unsigned int reg;
1715
1716 /* Naked functions use their own, programmer provided prologues. */
1717 if (is_naked_func (NULL_TREE))
1718 return;
1719
1720 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1721
1722 if (flag_stack_usage_info)
1723 current_function_static_stack_size = frame_size + stack_size;
1724
1725 /* If we use any of the callee-saved registers, save them now. */
1726 if (mask)
1727 {
1728 /* Push registers in reverse order. */
1729 for (reg = CC_REGNUM; reg --;)
1730 if (mask & (1 << reg))
1731 {
1732 low = high = reg;
1733
1734 /* Look for a span of registers.
1735 Note - we do not have to worry about -Os and whether
1736 it is better to use a single, longer PUSHM as
1737 rx_get_stack_layout has already done that for us. */
1738 while (reg-- > 0)
1739 if ((mask & (1 << reg)) == 0)
1740 break;
1741 else
1742 --low;
1743
1744 push_regs (high, low);
1745 if (reg == (unsigned) -1)
1746 break;
1747 }
1748 }
1749 else if (low)
1750 push_regs (high, low);
1751
1752 if (MUST_SAVE_ACC_REGISTER)
1753 {
1754 unsigned int acc_high, acc_low;
1755
1756 /* Interrupt handlers have to preserve the accumulator
1757 register if so requested by the user. Use the first
1758 two pushed registers as intermediaries. */
1759 if (mask)
1760 {
1761 acc_low = acc_high = 0;
1762
1763 for (reg = 1; reg < CC_REGNUM; reg ++)
1764 if (mask & (1 << reg))
1765 {
1766 if (acc_low == 0)
1767 acc_low = reg;
1768 else
1769 {
1770 acc_high = reg;
1771 break;
1772 }
1773 }
1774
1775 /* We have assumed that there are at least two registers pushed... */
1776 gcc_assert (acc_high != 0);
1777
1778 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1779 We just assume that they are zero. */
1780 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1781 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1782 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1783 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1784 }
1785 else
1786 {
1787 acc_low = low;
1788 acc_high = low + 1;
1789
1790 /* We have assumed that there are at least two registers pushed... */
1791 gcc_assert (acc_high <= high);
1792
1793 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1794 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1795 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1796 gen_rx_store_vector (acc_low, acc_high)));
1797 }
1798 }
1799
1800 /* If needed, set up the frame pointer. */
1801 if (frame_pointer_needed)
1802 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1803 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1804
1805 /* Allocate space for the outgoing args.
1806 If the stack frame has not already been set up then handle this as well. */
1807 if (stack_size)
1808 {
1809 if (frame_size)
1810 {
1811 if (frame_pointer_needed)
1812 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1813 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1814 else
1815 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1816 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1817 true);
1818 }
1819 else
1820 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1821 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1822 }
1823 else if (frame_size)
1824 {
1825 if (! frame_pointer_needed)
1826 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1827 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1828 else
1829 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1830 false /* False because the epilogue will use the FP not the SP. */);
1831 }
1832 }
1833
1834 static void
1835 add_vector_labels (FILE *file, const char *aname)
1836 {
1837 tree vec_attr;
1838 tree val_attr;
1839 const char *vname = "vect";
1840 const char *s;
1841 int vnum;
1842
1843 /* This node is for the vector/interrupt tag itself */
1844 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1845 if (!vec_attr)
1846 return;
1847
1848 /* Now point it at the first argument */
1849 vec_attr = TREE_VALUE (vec_attr);
1850
1851 /* Iterate through the arguments. */
1852 while (vec_attr)
1853 {
1854 val_attr = TREE_VALUE (vec_attr);
1855 switch (TREE_CODE (val_attr))
1856 {
1857 case STRING_CST:
1858 s = TREE_STRING_POINTER (val_attr);
1859 goto string_id_common;
1860
1861 case IDENTIFIER_NODE:
1862 s = IDENTIFIER_POINTER (val_attr);
1863
1864 string_id_common:
1865 if (strcmp (s, "$default") == 0)
1866 {
1867 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1868 fprintf (file, "$tableentry$default$%s:\n", vname);
1869 }
1870 else
1871 vname = s;
1872 break;
1873
1874 case INTEGER_CST:
1875 vnum = TREE_INT_CST_LOW (val_attr);
1876
1877 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1878 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1879 break;
1880
1881 default:
1882 ;
1883 }
1884
1885 vec_attr = TREE_CHAIN (vec_attr);
1886 }
1887
1888 }
1889
1890 static void
1891 rx_output_function_prologue (FILE * file)
1892 {
1893 add_vector_labels (file, "interrupt");
1894 add_vector_labels (file, "vector");
1895
1896 if (is_fast_interrupt_func (NULL_TREE))
1897 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1898
1899 if (is_interrupt_func (NULL_TREE))
1900 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1901
1902 if (is_naked_func (NULL_TREE))
1903 asm_fprintf (file, "\t; Note: Naked Function\n");
1904
1905 if (cfun->static_chain_decl != NULL)
1906 asm_fprintf (file, "\t; Note: Nested function declared "
1907 "inside another function.\n");
1908
1909 if (crtl->calls_eh_return)
1910 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1911 }
1912
1913 /* Generate a POPM or RTSD instruction that matches the given operands. */
1914
1915 void
1916 rx_emit_stack_popm (rtx * operands, bool is_popm)
1917 {
1918 HOST_WIDE_INT stack_adjust;
1919 HOST_WIDE_INT last_reg;
1920 rtx first_push;
1921
1922 gcc_assert (CONST_INT_P (operands[0]));
1923 stack_adjust = INTVAL (operands[0]);
1924
1925 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1926 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1927
1928 first_push = XVECEXP (operands[1], 0, 1);
1929 gcc_assert (SET_P (first_push));
1930 first_push = SET_DEST (first_push);
1931 gcc_assert (REG_P (first_push));
1932
1933 if (is_popm)
1934 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1935 reg_names [REGNO (first_push)],
1936 reg_names [REGNO (first_push) + last_reg]);
1937 else
1938 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1939 (int) stack_adjust,
1940 reg_names [REGNO (first_push)],
1941 reg_names [REGNO (first_push) + last_reg]);
1942 }
1943
1944 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1945
1946 static rtx
1947 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1948 {
1949 unsigned int i;
1950 unsigned int bias = 3;
1951 unsigned int count = (high - low) + bias;
1952 rtx vector;
1953
1954 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1955
1956 XVECEXP (vector, 0, 0) =
1957 gen_rtx_SET (stack_pointer_rtx,
1958 plus_constant (Pmode, stack_pointer_rtx, adjust));
1959
1960 for (i = 0; i < count - 2; i++)
1961 XVECEXP (vector, 0, i + 1) =
1962 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1963 gen_rtx_MEM (SImode,
1964 i == 0 ? stack_pointer_rtx
1965 : plus_constant (Pmode, stack_pointer_rtx,
1966 i * UNITS_PER_WORD)));
1967
1968 XVECEXP (vector, 0, count - 1) = ret_rtx;
1969
1970 return vector;
1971 }
1972
1973 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1974
1975 static rtx
1976 gen_rx_popm_vector (unsigned int low, unsigned int high)
1977 {
1978 unsigned int i;
1979 unsigned int count = (high - low) + 2;
1980 rtx vector;
1981
1982 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1983
1984 XVECEXP (vector, 0, 0) =
1985 gen_rtx_SET (stack_pointer_rtx,
1986 plus_constant (Pmode, stack_pointer_rtx,
1987 (count - 1) * UNITS_PER_WORD));
1988
1989 for (i = 0; i < count - 1; i++)
1990 XVECEXP (vector, 0, i + 1) =
1991 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1992 gen_rtx_MEM (SImode,
1993 i == 0 ? stack_pointer_rtx
1994 : plus_constant (Pmode, stack_pointer_rtx,
1995 i * UNITS_PER_WORD)));
1996
1997 return vector;
1998 }
1999
2000 /* Returns true if a simple return insn can be used. */
2001
2002 bool
2003 rx_can_use_simple_return (void)
2004 {
2005 unsigned int low;
2006 unsigned int high;
2007 unsigned int frame_size;
2008 unsigned int stack_size;
2009 unsigned int register_mask;
2010
2011 if (is_naked_func (NULL_TREE)
2012 || is_fast_interrupt_func (NULL_TREE)
2013 || is_interrupt_func (NULL_TREE))
2014 return false;
2015
2016 rx_get_stack_layout (& low, & high, & register_mask,
2017 & frame_size, & stack_size);
2018
2019 return (register_mask == 0
2020 && (frame_size + stack_size) == 0
2021 && low == 0);
2022 }
2023
2024 static void
2025 pop_regs (unsigned int high, unsigned int low)
2026 {
2027 if (high == low)
2028 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2029 else
2030 emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
2031 gen_rx_popm_vector (low, high)));
2032 }
2033
2034 void
2035 rx_expand_epilogue (bool is_sibcall)
2036 {
2037 unsigned int low;
2038 unsigned int high;
2039 unsigned int frame_size;
2040 unsigned int stack_size;
2041 unsigned int register_mask;
2042 unsigned int regs_size;
2043 unsigned int reg;
2044 unsigned HOST_WIDE_INT total_size;
2045
2046 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2047 cannot guarantee that the register holding the function address is a
2048 call-used register. If it is a call-saved register then the stack
2049 pop instructions generated in the epilogue will corrupt the address
2050 before it is used.
2051
2052 Creating a new call-used-only register class works but then the
2053 reload pass gets stuck because it cannot always find a call-used
2054 register for spilling sibcalls.
2055
2056 The other possible solution is for this pass to scan forward for the
2057 sibcall instruction (if it has been generated) and work out if it
2058 is an indirect sibcall using a call-saved register. If it is then
2059 the address can copied into a call-used register in this epilogue
2060 code and the sibcall instruction modified to use that register. */
2061
2062 if (is_naked_func (NULL_TREE))
2063 {
2064 gcc_assert (! is_sibcall);
2065
2066 /* Naked functions use their own, programmer provided epilogues.
2067 But, in order to keep gcc happy we have to generate some kind of
2068 epilogue RTL. */
2069 emit_jump_insn (gen_naked_return ());
2070 return;
2071 }
2072
2073 rx_get_stack_layout (& low, & high, & register_mask,
2074 & frame_size, & stack_size);
2075
2076 total_size = frame_size + stack_size;
2077 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2078
2079 /* See if we are unable to use the special stack frame deconstruct and
2080 return instructions. In most cases we can use them, but the exceptions
2081 are:
2082
2083 - Sibling calling functions deconstruct the frame but do not return to
2084 their caller. Instead they branch to their sibling and allow their
2085 return instruction to return to this function's parent.
2086
2087 - Fast and normal interrupt handling functions have to use special
2088 return instructions.
2089
2090 - Functions where we have pushed a fragmented set of registers into the
2091 call-save area must have the same set of registers popped. */
2092 if (is_sibcall
2093 || is_fast_interrupt_func (NULL_TREE)
2094 || is_interrupt_func (NULL_TREE)
2095 || register_mask)
2096 {
2097 /* Cannot use the special instructions - deconstruct by hand. */
2098 if (total_size)
2099 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2100 GEN_INT (total_size), false);
2101
2102 if (MUST_SAVE_ACC_REGISTER)
2103 {
2104 unsigned int acc_low, acc_high;
2105
2106 /* Reverse the saving of the accumulator register onto the stack.
2107 Note we must adjust the saved "low" accumulator value as it
2108 is really the middle 32-bits of the accumulator. */
2109 if (register_mask)
2110 {
2111 acc_low = acc_high = 0;
2112
2113 for (reg = 1; reg < CC_REGNUM; reg ++)
2114 if (register_mask & (1 << reg))
2115 {
2116 if (acc_low == 0)
2117 acc_low = reg;
2118 else
2119 {
2120 acc_high = reg;
2121 break;
2122 }
2123 }
2124 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2125 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2126 }
2127 else
2128 {
2129 acc_low = low;
2130 acc_high = low + 1;
2131 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2132 gen_rx_popm_vector (acc_low, acc_high)));
2133 }
2134
2135 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2136 gen_rtx_REG (SImode, acc_low),
2137 GEN_INT (16)));
2138 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2139 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2140 }
2141
2142 if (register_mask)
2143 {
2144 for (reg = 0; reg < CC_REGNUM; reg ++)
2145 if (register_mask & (1 << reg))
2146 {
2147 low = high = reg;
2148 while (register_mask & (1 << high))
2149 high ++;
2150 pop_regs (high - 1, low);
2151 reg = high;
2152 }
2153 }
2154 else if (low)
2155 pop_regs (high, low);
2156
2157 if (is_fast_interrupt_func (NULL_TREE))
2158 {
2159 gcc_assert (! is_sibcall);
2160 emit_jump_insn (gen_fast_interrupt_return ());
2161 }
2162 else if (is_interrupt_func (NULL_TREE))
2163 {
2164 gcc_assert (! is_sibcall);
2165 emit_jump_insn (gen_exception_return ());
2166 }
2167 else if (! is_sibcall)
2168 emit_jump_insn (gen_simple_return ());
2169
2170 return;
2171 }
2172
2173 /* If we allocated space on the stack, free it now. */
2174 if (total_size)
2175 {
2176 unsigned HOST_WIDE_INT rtsd_size;
2177
2178 /* See if we can use the RTSD instruction. */
2179 rtsd_size = total_size + regs_size;
2180 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2181 {
2182 if (low)
2183 emit_jump_insn (gen_pop_and_return
2184 (GEN_INT (rtsd_size),
2185 gen_rx_rtsd_vector (rtsd_size, low, high)));
2186 else
2187 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2188
2189 return;
2190 }
2191
2192 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2193 GEN_INT (total_size), false);
2194 }
2195
2196 if (low)
2197 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2198 gen_rx_rtsd_vector (regs_size,
2199 low, high)));
2200 else
2201 emit_jump_insn (gen_simple_return ());
2202 }
2203
2204
2205 /* Compute the offset (in words) between FROM (arg pointer
2206 or frame pointer) and TO (frame pointer or stack pointer).
2207 See ASCII art comment at the start of rx_expand_prologue
2208 for more information. */
2209
2210 int
2211 rx_initial_elimination_offset (int from, int to)
2212 {
2213 unsigned int low;
2214 unsigned int high;
2215 unsigned int frame_size;
2216 unsigned int stack_size;
2217 unsigned int mask;
2218
2219 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2220
2221 if (from == ARG_POINTER_REGNUM)
2222 {
2223 /* Extend the computed size of the stack frame to
2224 include the registers pushed in the prologue. */
2225 if (low)
2226 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2227 else
2228 frame_size += bit_count (mask) * UNITS_PER_WORD;
2229
2230 /* Remember to include the return address. */
2231 frame_size += 1 * UNITS_PER_WORD;
2232
2233 if (to == FRAME_POINTER_REGNUM)
2234 return frame_size;
2235
2236 gcc_assert (to == STACK_POINTER_REGNUM);
2237 return frame_size + stack_size;
2238 }
2239
2240 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2241 return stack_size;
2242 }
2243
2244 /* Decide if a variable should go into one of the small data sections. */
2245
2246 static bool
2247 rx_in_small_data (const_tree decl)
2248 {
2249 int size;
2250 const char * section;
2251
2252 if (rx_small_data_limit == 0)
2253 return false;
2254
2255 if (TREE_CODE (decl) != VAR_DECL)
2256 return false;
2257
2258 /* We do not put read-only variables into a small data area because
2259 they would be placed with the other read-only sections, far away
2260 from the read-write data sections, and we only have one small
2261 data area pointer.
2262 Similarly commons are placed in the .bss section which might be
2263 far away (and out of alignment with respect to) the .data section. */
2264 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2265 return false;
2266
2267 section = DECL_SECTION_NAME (decl);
2268 if (section)
2269 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2270
2271 size = int_size_in_bytes (TREE_TYPE (decl));
2272
2273 return (size > 0) && (size <= rx_small_data_limit);
2274 }
2275
2276 /* Return a section for X.
2277 The only special thing we do here is to honor small data. */
2278
2279 static section *
2280 rx_select_rtx_section (machine_mode mode,
2281 rtx x,
2282 unsigned HOST_WIDE_INT align)
2283 {
2284 if (rx_small_data_limit > 0
2285 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2286 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2287 return sdata_section;
2288
2289 return default_elf_select_rtx_section (mode, x, align);
2290 }
2291
2292 static section *
2293 rx_select_section (tree decl,
2294 int reloc,
2295 unsigned HOST_WIDE_INT align)
2296 {
2297 if (rx_small_data_limit > 0)
2298 {
2299 switch (categorize_decl_for_section (decl, reloc))
2300 {
2301 case SECCAT_SDATA: return sdata_section;
2302 case SECCAT_SBSS: return sbss_section;
2303 case SECCAT_SRODATA:
2304 /* Fall through. We do not put small, read only
2305 data into the C_2 section because we are not
2306 using the C_2 section. We do not use the C_2
2307 section because it is located with the other
2308 read-only data sections, far away from the read-write
2309 data sections and we only have one small data
2310 pointer (r13). */
2311 default:
2312 break;
2313 }
2314 }
2315
2316 /* If we are supporting the Renesas assembler
2317 we cannot use mergeable sections. */
2318 if (TARGET_AS100_SYNTAX)
2319 switch (categorize_decl_for_section (decl, reloc))
2320 {
2321 case SECCAT_RODATA_MERGE_CONST:
2322 case SECCAT_RODATA_MERGE_STR_INIT:
2323 case SECCAT_RODATA_MERGE_STR:
2324 return readonly_data_section;
2325
2326 default:
2327 break;
2328 }
2329
2330 return default_elf_select_section (decl, reloc, align);
2331 }
2332 \f
2333 enum rx_builtin
2334 {
2335 RX_BUILTIN_BRK,
2336 RX_BUILTIN_CLRPSW,
2337 RX_BUILTIN_INT,
2338 RX_BUILTIN_MACHI,
2339 RX_BUILTIN_MACLO,
2340 RX_BUILTIN_MULHI,
2341 RX_BUILTIN_MULLO,
2342 RX_BUILTIN_MVFACHI,
2343 RX_BUILTIN_MVFACMI,
2344 RX_BUILTIN_MVFC,
2345 RX_BUILTIN_MVTACHI,
2346 RX_BUILTIN_MVTACLO,
2347 RX_BUILTIN_MVTC,
2348 RX_BUILTIN_MVTIPL,
2349 RX_BUILTIN_RACW,
2350 RX_BUILTIN_REVW,
2351 RX_BUILTIN_RMPA,
2352 RX_BUILTIN_ROUND,
2353 RX_BUILTIN_SETPSW,
2354 RX_BUILTIN_WAIT,
2355 RX_BUILTIN_max
2356 };
2357
2358 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2359
2360 static void
2361 rx_init_builtins (void)
2362 {
2363 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2364 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2365 add_builtin_function ("__builtin_rx_" LC_NAME, \
2366 build_function_type_list (RET_TYPE##_type_node, \
2367 NULL_TREE), \
2368 RX_BUILTIN_##UC_NAME, \
2369 BUILT_IN_MD, NULL, NULL_TREE)
2370
2371 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
2372 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2373 add_builtin_function ("__builtin_rx_" LC_NAME, \
2374 build_function_type_list (RET_TYPE##_type_node, \
2375 ARG_TYPE##_type_node, \
2376 NULL_TREE), \
2377 RX_BUILTIN_##UC_NAME, \
2378 BUILT_IN_MD, NULL, NULL_TREE)
2379
2380 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2381 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2382 add_builtin_function ("__builtin_rx_" LC_NAME, \
2383 build_function_type_list (RET_TYPE##_type_node, \
2384 ARG_TYPE1##_type_node,\
2385 ARG_TYPE2##_type_node,\
2386 NULL_TREE), \
2387 RX_BUILTIN_##UC_NAME, \
2388 BUILT_IN_MD, NULL, NULL_TREE)
2389
2390 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2391 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2392 add_builtin_function ("__builtin_rx_" LC_NAME, \
2393 build_function_type_list (RET_TYPE##_type_node, \
2394 ARG_TYPE1##_type_node,\
2395 ARG_TYPE2##_type_node,\
2396 ARG_TYPE3##_type_node,\
2397 NULL_TREE), \
2398 RX_BUILTIN_##UC_NAME, \
2399 BUILT_IN_MD, NULL, NULL_TREE)
2400
2401 ADD_RX_BUILTIN0 (BRK, "brk", void);
2402 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2403 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2404 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2405 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2406 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2407 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2408 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2409 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2410 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2411 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2412 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2413 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
2414 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2415 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
2416 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
2417 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2418 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2419 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
2420 ADD_RX_BUILTIN0 (WAIT, "wait", void);
2421 }
2422
2423 /* Return the RX builtin for CODE. */
2424
2425 static tree
2426 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2427 {
2428 if (code >= RX_BUILTIN_max)
2429 return error_mark_node;
2430
2431 return rx_builtins[code];
2432 }
2433
2434 static rtx
2435 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2436 {
2437 if (reg && ! REG_P (arg))
2438 arg = force_reg (SImode, arg);
2439
2440 emit_insn (gen_func (arg));
2441
2442 return NULL_RTX;
2443 }
2444
2445 static rtx
2446 rx_expand_builtin_mvtc (tree exp)
2447 {
2448 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2449 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2450
2451 if (! CONST_INT_P (arg1))
2452 return NULL_RTX;
2453
2454 if (! REG_P (arg2))
2455 arg2 = force_reg (SImode, arg2);
2456
2457 emit_insn (gen_mvtc (arg1, arg2));
2458
2459 return NULL_RTX;
2460 }
2461
2462 static rtx
2463 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2464 {
2465 rtx arg = expand_normal (t_arg);
2466
2467 if (! CONST_INT_P (arg))
2468 return NULL_RTX;
2469
2470 if (target == NULL_RTX)
2471 return NULL_RTX;
2472
2473 if (! REG_P (target))
2474 target = force_reg (SImode, target);
2475
2476 emit_insn (gen_mvfc (target, arg));
2477
2478 return target;
2479 }
2480
2481 static rtx
2482 rx_expand_builtin_mvtipl (rtx arg)
2483 {
2484 /* The RX610 does not support the MVTIPL instruction. */
2485 if (rx_cpu_type == RX610)
2486 return NULL_RTX;
2487
2488 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2489 return NULL_RTX;
2490
2491 emit_insn (gen_mvtipl (arg));
2492
2493 return NULL_RTX;
2494 }
2495
2496 static rtx
2497 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2498 {
2499 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2500 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2501
2502 if (! REG_P (arg1))
2503 arg1 = force_reg (SImode, arg1);
2504
2505 if (! REG_P (arg2))
2506 arg2 = force_reg (SImode, arg2);
2507
2508 emit_insn (gen_func (arg1, arg2));
2509
2510 return NULL_RTX;
2511 }
2512
2513 static rtx
2514 rx_expand_int_builtin_1_arg (rtx arg,
2515 rtx target,
2516 rtx (* gen_func)(rtx, rtx),
2517 bool mem_ok)
2518 {
2519 if (! REG_P (arg))
2520 if (!mem_ok || ! MEM_P (arg))
2521 arg = force_reg (SImode, arg);
2522
2523 if (target == NULL_RTX || ! REG_P (target))
2524 target = gen_reg_rtx (SImode);
2525
2526 emit_insn (gen_func (target, arg));
2527
2528 return target;
2529 }
2530
2531 static rtx
2532 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2533 {
2534 if (target == NULL_RTX || ! REG_P (target))
2535 target = gen_reg_rtx (SImode);
2536
2537 emit_insn (gen_func (target));
2538
2539 return target;
2540 }
2541
2542 static rtx
2543 rx_expand_builtin_round (rtx arg, rtx target)
2544 {
2545 if ((! REG_P (arg) && ! MEM_P (arg))
2546 || GET_MODE (arg) != SFmode)
2547 arg = force_reg (SFmode, arg);
2548
2549 if (target == NULL_RTX || ! REG_P (target))
2550 target = gen_reg_rtx (SImode);
2551
2552 emit_insn (gen_lrintsf2 (target, arg));
2553
2554 return target;
2555 }
2556
2557 static int
2558 valid_psw_flag (rtx op, const char *which)
2559 {
2560 static int mvtc_inform_done = 0;
2561
2562 if (GET_CODE (op) == CONST_INT)
2563 switch (INTVAL (op))
2564 {
2565 case 0: case 'c': case 'C':
2566 case 1: case 'z': case 'Z':
2567 case 2: case 's': case 'S':
2568 case 3: case 'o': case 'O':
2569 case 8: case 'i': case 'I':
2570 case 9: case 'u': case 'U':
2571 return 1;
2572 }
2573
2574 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2575 if (!mvtc_inform_done)
2576 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2577 mvtc_inform_done = 1;
2578
2579 return 0;
2580 }
2581
2582 static rtx
2583 rx_expand_builtin (tree exp,
2584 rtx target,
2585 rtx subtarget ATTRIBUTE_UNUSED,
2586 machine_mode mode ATTRIBUTE_UNUSED,
2587 int ignore ATTRIBUTE_UNUSED)
2588 {
2589 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2590 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2591 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2592 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2593
2594 switch (fcode)
2595 {
2596 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2597 case RX_BUILTIN_CLRPSW:
2598 if (!valid_psw_flag (op, "clrpsw"))
2599 return NULL_RTX;
2600 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2601 case RX_BUILTIN_SETPSW:
2602 if (!valid_psw_flag (op, "setpsw"))
2603 return NULL_RTX;
2604 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2605 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2606 (op, gen_int, false);
2607 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2608 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2609 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2610 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2611 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2612 (target, gen_mvfachi);
2613 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2614 (target, gen_mvfacmi);
2615 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2616 (op, gen_mvtachi, true);
2617 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2618 (op, gen_mvtaclo, true);
2619 case RX_BUILTIN_RMPA:
2620 if (rx_allow_string_insns)
2621 emit_insn (gen_rmpa ());
2622 else
2623 error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
2624 return NULL_RTX;
2625 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2626 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2627 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2628 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2629 (op, gen_racw, false);
2630 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2631 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2632 (op, target, gen_revw, false);
2633 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2634
2635 default:
2636 internal_error ("bad builtin code");
2637 break;
2638 }
2639
2640 return NULL_RTX;
2641 }
2642 \f
2643 /* Place an element into a constructor or destructor section.
2644 Like default_ctor_section_asm_out_constructor in varasm.c
2645 except that it uses .init_array (or .fini_array) and it
2646 handles constructor priorities. */
2647
2648 static void
2649 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2650 {
2651 section * s;
2652
2653 if (priority != DEFAULT_INIT_PRIORITY)
2654 {
2655 char buf[18];
2656
2657 sprintf (buf, "%s.%.5u",
2658 is_ctor ? ".init_array" : ".fini_array",
2659 priority);
2660 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2661 }
2662 else if (is_ctor)
2663 s = ctors_section;
2664 else
2665 s = dtors_section;
2666
2667 switch_to_section (s);
2668 assemble_align (POINTER_SIZE);
2669 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2670 }
2671
2672 static void
2673 rx_elf_asm_constructor (rtx symbol, int priority)
2674 {
2675 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2676 }
2677
2678 static void
2679 rx_elf_asm_destructor (rtx symbol, int priority)
2680 {
2681 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2682 }
2683 \f
2684 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2685
2686 static tree
2687 rx_handle_func_attribute (tree * node,
2688 tree name,
2689 tree args ATTRIBUTE_UNUSED,
2690 int flags ATTRIBUTE_UNUSED,
2691 bool * no_add_attrs)
2692 {
2693 gcc_assert (DECL_P (* node));
2694
2695 if (TREE_CODE (* node) != FUNCTION_DECL)
2696 {
2697 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2698 name);
2699 * no_add_attrs = true;
2700 }
2701
2702 /* FIXME: We ought to check for conflicting attributes. */
2703
2704 /* FIXME: We ought to check that the interrupt and exception
2705 handler attributes have been applied to void functions. */
2706 return NULL_TREE;
2707 }
2708
2709 /* Check "vector" attribute. */
2710
2711 static tree
2712 rx_handle_vector_attribute (tree * node,
2713 tree name,
2714 tree args,
2715 int flags ATTRIBUTE_UNUSED,
2716 bool * no_add_attrs)
2717 {
2718 gcc_assert (DECL_P (* node));
2719 gcc_assert (args != NULL_TREE);
2720
2721 if (TREE_CODE (* node) != FUNCTION_DECL)
2722 {
2723 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2724 name);
2725 * no_add_attrs = true;
2726 }
2727
2728 return NULL_TREE;
2729 }
2730
2731 /* Table of RX specific attributes. */
2732 const struct attribute_spec rx_attribute_table[] =
2733 {
2734 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2735 affects_type_identity. */
2736 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2737 false },
2738 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute,
2739 false },
2740 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2741 false },
2742 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2743 false },
2744 { NULL, 0, 0, false, false, false, NULL, false }
2745 };
2746
2747 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2748
2749 static void
2750 rx_override_options_after_change (void)
2751 {
2752 static bool first_time = TRUE;
2753
2754 if (first_time)
2755 {
2756 /* If this is the first time through and the user has not disabled
2757 the use of RX FPU hardware then enable -ffinite-math-only,
2758 since the FPU instructions do not support NaNs and infinities. */
2759 if (TARGET_USE_FPU)
2760 flag_finite_math_only = 1;
2761
2762 first_time = FALSE;
2763 }
2764 else
2765 {
2766 /* Alert the user if they are changing the optimization options
2767 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2768 if (TARGET_USE_FPU
2769 && !flag_finite_math_only)
2770 warning (0, "RX FPU instructions do not support NaNs and infinities");
2771 }
2772 }
2773
2774 static void
2775 rx_option_override (void)
2776 {
2777 unsigned int i;
2778 cl_deferred_option *opt;
2779 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2780
2781 if (v)
2782 FOR_EACH_VEC_ELT (*v, i, opt)
2783 {
2784 switch (opt->opt_index)
2785 {
2786 case OPT_mint_register_:
2787 switch (opt->value)
2788 {
2789 case 4:
2790 fixed_regs[10] = call_used_regs [10] = 1;
2791 /* Fall through. */
2792 case 3:
2793 fixed_regs[11] = call_used_regs [11] = 1;
2794 /* Fall through. */
2795 case 2:
2796 fixed_regs[12] = call_used_regs [12] = 1;
2797 /* Fall through. */
2798 case 1:
2799 fixed_regs[13] = call_used_regs [13] = 1;
2800 /* Fall through. */
2801 case 0:
2802 rx_num_interrupt_regs = opt->value;
2803 break;
2804 default:
2805 rx_num_interrupt_regs = 0;
2806 /* Error message already given because rx_handle_option
2807 returned false. */
2808 break;
2809 }
2810 break;
2811
2812 default:
2813 gcc_unreachable ();
2814 }
2815 }
2816
2817 /* This target defaults to strict volatile bitfields. */
2818 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2819 flag_strict_volatile_bitfields = 1;
2820
2821 rx_override_options_after_change ();
2822
2823 /* These values are bytes, not log. */
2824 if (align_jumps == 0 && ! optimize_size)
2825 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2826 if (align_loops == 0 && ! optimize_size)
2827 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2828 if (align_labels == 0 && ! optimize_size)
2829 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2830 }
2831
2832 \f
2833 static bool
2834 rx_allocate_stack_slots_for_args (void)
2835 {
2836 /* Naked functions should not allocate stack slots for arguments. */
2837 return ! is_naked_func (NULL_TREE);
2838 }
2839
2840 static bool
2841 rx_func_attr_inlinable (const_tree decl)
2842 {
2843 return ! is_fast_interrupt_func (decl)
2844 && ! is_interrupt_func (decl)
2845 && ! is_naked_func (decl);
2846 }
2847
2848 static bool
2849 rx_warn_func_return (tree decl)
2850 {
2851 /* Naked functions are implemented entirely in assembly, including the
2852 return sequence, so suppress warnings about this. */
2853 return !is_naked_func (decl);
2854 }
2855
2856 /* Return nonzero if it is ok to make a tail-call to DECL,
2857 a function_decl or NULL if this is an indirect call, using EXP */
2858
2859 static bool
2860 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2861 {
2862 if (TARGET_JSR)
2863 return false;
2864
2865 /* Do not allow indirect tailcalls. The
2866 sibcall patterns do not support them. */
2867 if (decl == NULL)
2868 return false;
2869
2870 /* Never tailcall from inside interrupt handlers or naked functions. */
2871 if (is_fast_interrupt_func (NULL_TREE)
2872 || is_interrupt_func (NULL_TREE)
2873 || is_naked_func (NULL_TREE))
2874 return false;
2875
2876 return true;
2877 }
2878
2879 static void
2880 rx_file_start (void)
2881 {
2882 if (! TARGET_AS100_SYNTAX)
2883 default_file_start ();
2884 }
2885
2886 static bool
2887 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2888 {
2889 /* The packed attribute overrides the MS behavior. */
2890 return ! TYPE_PACKED (record_type);
2891 }
2892 \f
2893 /* Returns true if X a legitimate constant for an immediate
2894 operand on the RX. X is already known to satisfy CONSTANT_P. */
2895
2896 bool
2897 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2898 {
2899 switch (GET_CODE (x))
2900 {
2901 case CONST:
2902 x = XEXP (x, 0);
2903
2904 if (GET_CODE (x) == PLUS)
2905 {
2906 if (! CONST_INT_P (XEXP (x, 1)))
2907 return false;
2908
2909 /* GCC would not pass us CONST_INT + CONST_INT so we
2910 know that we have {SYMBOL|LABEL} + CONST_INT. */
2911 x = XEXP (x, 0);
2912 gcc_assert (! CONST_INT_P (x));
2913 }
2914
2915 switch (GET_CODE (x))
2916 {
2917 case LABEL_REF:
2918 case SYMBOL_REF:
2919 return true;
2920
2921 case UNSPEC:
2922 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2923
2924 default:
2925 /* FIXME: Can this ever happen ? */
2926 gcc_unreachable ();
2927 }
2928 break;
2929
2930 case LABEL_REF:
2931 case SYMBOL_REF:
2932 return true;
2933 case CONST_DOUBLE:
2934 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2935 case CONST_VECTOR:
2936 return false;
2937 default:
2938 gcc_assert (CONST_INT_P (x));
2939 break;
2940 }
2941
2942 return ok_for_max_constant (INTVAL (x));
2943 }
2944
2945 static int
2946 rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2947 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2948 {
2949 rtx a, b;
2950
2951 if (GET_CODE (addr) != PLUS)
2952 return COSTS_N_INSNS (1);
2953
2954 a = XEXP (addr, 0);
2955 b = XEXP (addr, 1);
2956
2957 if (REG_P (a) && REG_P (b))
2958 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2959 return COSTS_N_INSNS (4);
2960
2961 if (speed)
2962 /* [REG+OFF] is just as fast as [REG]. */
2963 return COSTS_N_INSNS (1);
2964
2965 if (CONST_INT_P (b)
2966 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2967 /* Try to discourage REG + <large OFF> when optimizing for size. */
2968 return COSTS_N_INSNS (2);
2969
2970 return COSTS_N_INSNS (1);
2971 }
2972
2973 static bool
2974 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2975 {
2976 /* We can always eliminate to the frame pointer.
2977 We can eliminate to the stack pointer unless a frame
2978 pointer is needed. */
2979
2980 return to == FRAME_POINTER_REGNUM
2981 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2982 }
2983 \f
2984
2985 static void
2986 rx_trampoline_template (FILE * file)
2987 {
2988 /* Output assembler code for a block containing the constant
2989 part of a trampoline, leaving space for the variable parts.
2990
2991 On the RX, (where r8 is the static chain regnum) the trampoline
2992 looks like:
2993
2994 mov #<static chain value>, r8
2995 mov #<function's address>, r9
2996 jmp r9
2997
2998 In big-endian-data-mode however instructions are read into the CPU
2999 4 bytes at a time. These bytes are then swapped around before being
3000 passed to the decoder. So...we must partition our trampoline into
3001 4 byte packets and swap these packets around so that the instruction
3002 reader will reverse the process. But, in order to avoid splitting
3003 the 32-bit constants across these packet boundaries, (making inserting
3004 them into the constructed trampoline very difficult) we have to pad the
3005 instruction sequence with NOP insns. ie:
3006
3007 nop
3008 nop
3009 mov.l #<...>, r8
3010 nop
3011 nop
3012 mov.l #<...>, r9
3013 jmp r9
3014 nop
3015 nop */
3016
3017 if (! TARGET_BIG_ENDIAN_DATA)
3018 {
3019 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3020 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3021 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3022 }
3023 else
3024 {
3025 char r8 = '0' + STATIC_CHAIN_REGNUM;
3026 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3027
3028 if (TARGET_AS100_SYNTAX)
3029 {
3030 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3031 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3032 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3033 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3034 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3035 }
3036 else
3037 {
3038 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3039 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3040 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3041 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3042 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3043 }
3044 }
3045 }
3046
3047 static void
3048 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3049 {
3050 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3051
3052 emit_block_move (tramp, assemble_trampoline_template (),
3053 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3054
3055 if (TARGET_BIG_ENDIAN_DATA)
3056 {
3057 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3058 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3059 }
3060 else
3061 {
3062 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3063 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3064 }
3065 }
3066 \f
3067 static int
3068 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3069 reg_class_t regclass ATTRIBUTE_UNUSED,
3070 bool in)
3071 {
3072 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3073 }
3074
3075 /* Convert a CC_MODE to the set of flags that it represents. */
3076
3077 static unsigned int
3078 flags_from_mode (machine_mode mode)
3079 {
3080 switch (mode)
3081 {
3082 case E_CC_ZSmode:
3083 return CC_FLAG_S | CC_FLAG_Z;
3084 case E_CC_ZSOmode:
3085 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3086 case E_CC_ZSCmode:
3087 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3088 case E_CCmode:
3089 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3090 case E_CC_Fmode:
3091 return CC_FLAG_FP;
3092 default:
3093 gcc_unreachable ();
3094 }
3095 }
3096
3097 /* Convert a set of flags to a CC_MODE that can implement it. */
3098
3099 static machine_mode
3100 mode_from_flags (unsigned int f)
3101 {
3102 if (f & CC_FLAG_FP)
3103 return CC_Fmode;
3104 if (f & CC_FLAG_O)
3105 {
3106 if (f & CC_FLAG_C)
3107 return CCmode;
3108 else
3109 return CC_ZSOmode;
3110 }
3111 else if (f & CC_FLAG_C)
3112 return CC_ZSCmode;
3113 else
3114 return CC_ZSmode;
3115 }
3116
3117 /* Convert an RTX_CODE to the set of flags needed to implement it.
3118 This assumes an integer comparison. */
3119
3120 static unsigned int
3121 flags_from_code (enum rtx_code code)
3122 {
3123 switch (code)
3124 {
3125 case LT:
3126 case GE:
3127 return CC_FLAG_S;
3128 case GT:
3129 case LE:
3130 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3131 case GEU:
3132 case LTU:
3133 return CC_FLAG_C;
3134 case GTU:
3135 case LEU:
3136 return CC_FLAG_C | CC_FLAG_Z;
3137 case EQ:
3138 case NE:
3139 return CC_FLAG_Z;
3140 default:
3141 gcc_unreachable ();
3142 }
3143 }
3144
3145 /* Return a CC_MODE of which both M1 and M2 are subsets. */
3146
3147 static machine_mode
3148 rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3149 {
3150 unsigned f;
3151
3152 /* Early out for identical modes. */
3153 if (m1 == m2)
3154 return m1;
3155
3156 /* There's no valid combination for FP vs non-FP. */
3157 f = flags_from_mode (m1) | flags_from_mode (m2);
3158 if (f & CC_FLAG_FP)
3159 return VOIDmode;
3160
3161 /* Otherwise, see what mode can implement all the flags. */
3162 return mode_from_flags (f);
3163 }
3164
3165 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3166
3167 machine_mode
3168 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3169 {
3170 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3171 return CC_Fmode;
3172
3173 if (y != const0_rtx)
3174 return CCmode;
3175
3176 return mode_from_flags (flags_from_code (cmp_code));
3177 }
3178
3179 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3180 CC_MODE, and use that in branches based on that compare. */
3181
3182 void
3183 rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3184 rtx c1, rtx c2, rtx label)
3185 {
3186 rtx flags, x;
3187
3188 flags = gen_rtx_REG (cc_mode, CC_REG);
3189 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3190 x = gen_rtx_SET (flags, x);
3191 emit_insn (x);
3192
3193 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3194 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3195 x = gen_rtx_SET (pc_rtx, x);
3196 emit_jump_insn (x);
3197 }
3198
3199 /* A helper function for matching parallels that set the flags. */
3200
3201 bool
3202 rx_match_ccmode (rtx insn, machine_mode cc_mode)
3203 {
3204 rtx op1, flags;
3205 machine_mode flags_mode;
3206
3207 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3208
3209 op1 = XVECEXP (PATTERN (insn), 0, 0);
3210 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3211
3212 flags = SET_DEST (op1);
3213 flags_mode = GET_MODE (flags);
3214
3215 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3216 return false;
3217 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3218 return false;
3219
3220 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3221 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3222 return false;
3223
3224 return true;
3225 }
3226 \f
3227 int
3228 rx_align_for_label (rtx lab, int uses_threshold)
3229 {
3230 /* This is a simple heuristic to guess when an alignment would not be useful
3231 because the delay due to the inserted NOPs would be greater than the delay
3232 due to the misaligned branch. If uses_threshold is zero then the alignment
3233 is always useful. */
3234 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3235 return 0;
3236
3237 if (optimize_size)
3238 return 0;
3239 /* These values are log, not bytes. */
3240 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3241 return 2; /* 4 bytes */
3242 return 3; /* 8 bytes */
3243 }
3244
3245 static int
3246 rx_max_skip_for_label (rtx_insn *lab)
3247 {
3248 int opsize;
3249 rtx_insn *op;
3250
3251 if (optimize_size)
3252 return 0;
3253
3254 if (lab == NULL)
3255 return 0;
3256
3257 op = lab;
3258 do
3259 {
3260 op = next_nonnote_nondebug_insn (op);
3261 }
3262 while (op && (LABEL_P (op)
3263 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3264 if (!op)
3265 return 0;
3266
3267 opsize = get_attr_length (op);
3268 if (opsize >= 0 && opsize < 8)
3269 return opsize - 1;
3270 return 0;
3271 }
3272
3273 /* Compute the real length of the extending load-and-op instructions. */
3274
3275 int
3276 rx_adjust_insn_length (rtx_insn *insn, int current_length)
3277 {
3278 rtx extend, mem, offset;
3279 bool zero;
3280 int factor;
3281
3282 if (!INSN_P (insn))
3283 return current_length;
3284
3285 switch (INSN_CODE (insn))
3286 {
3287 default:
3288 return current_length;
3289
3290 case CODE_FOR_plussi3_zero_extendhi:
3291 case CODE_FOR_andsi3_zero_extendhi:
3292 case CODE_FOR_iorsi3_zero_extendhi:
3293 case CODE_FOR_xorsi3_zero_extendhi:
3294 case CODE_FOR_divsi3_zero_extendhi:
3295 case CODE_FOR_udivsi3_zero_extendhi:
3296 case CODE_FOR_minussi3_zero_extendhi:
3297 case CODE_FOR_smaxsi3_zero_extendhi:
3298 case CODE_FOR_sminsi3_zero_extendhi:
3299 case CODE_FOR_multsi3_zero_extendhi:
3300 case CODE_FOR_comparesi3_zero_extendhi:
3301 zero = true;
3302 factor = 2;
3303 break;
3304
3305 case CODE_FOR_plussi3_sign_extendhi:
3306 case CODE_FOR_andsi3_sign_extendhi:
3307 case CODE_FOR_iorsi3_sign_extendhi:
3308 case CODE_FOR_xorsi3_sign_extendhi:
3309 case CODE_FOR_divsi3_sign_extendhi:
3310 case CODE_FOR_udivsi3_sign_extendhi:
3311 case CODE_FOR_minussi3_sign_extendhi:
3312 case CODE_FOR_smaxsi3_sign_extendhi:
3313 case CODE_FOR_sminsi3_sign_extendhi:
3314 case CODE_FOR_multsi3_sign_extendhi:
3315 case CODE_FOR_comparesi3_sign_extendhi:
3316 zero = false;
3317 factor = 2;
3318 break;
3319
3320 case CODE_FOR_plussi3_zero_extendqi:
3321 case CODE_FOR_andsi3_zero_extendqi:
3322 case CODE_FOR_iorsi3_zero_extendqi:
3323 case CODE_FOR_xorsi3_zero_extendqi:
3324 case CODE_FOR_divsi3_zero_extendqi:
3325 case CODE_FOR_udivsi3_zero_extendqi:
3326 case CODE_FOR_minussi3_zero_extendqi:
3327 case CODE_FOR_smaxsi3_zero_extendqi:
3328 case CODE_FOR_sminsi3_zero_extendqi:
3329 case CODE_FOR_multsi3_zero_extendqi:
3330 case CODE_FOR_comparesi3_zero_extendqi:
3331 zero = true;
3332 factor = 1;
3333 break;
3334
3335 case CODE_FOR_plussi3_sign_extendqi:
3336 case CODE_FOR_andsi3_sign_extendqi:
3337 case CODE_FOR_iorsi3_sign_extendqi:
3338 case CODE_FOR_xorsi3_sign_extendqi:
3339 case CODE_FOR_divsi3_sign_extendqi:
3340 case CODE_FOR_udivsi3_sign_extendqi:
3341 case CODE_FOR_minussi3_sign_extendqi:
3342 case CODE_FOR_smaxsi3_sign_extendqi:
3343 case CODE_FOR_sminsi3_sign_extendqi:
3344 case CODE_FOR_multsi3_sign_extendqi:
3345 case CODE_FOR_comparesi3_sign_extendqi:
3346 zero = false;
3347 factor = 1;
3348 break;
3349 }
3350
3351 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3352 extend = single_set (insn);
3353 gcc_assert (extend != NULL_RTX);
3354
3355 extend = SET_SRC (extend);
3356 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3357 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3358 extend = XEXP (extend, 0);
3359 else
3360 extend = XEXP (extend, 1);
3361
3362 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3363 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3364
3365 mem = XEXP (extend, 0);
3366 gcc_checking_assert (MEM_P (mem));
3367 if (REG_P (XEXP (mem, 0)))
3368 return (zero && factor == 1) ? 2 : 3;
3369
3370 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3371 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3372 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3373
3374 offset = XEXP (XEXP (mem, 0), 1);
3375 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3376
3377 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3378 return (zero && factor == 1) ? 3 : 4;
3379
3380 return (zero && factor == 1) ? 4 : 5;
3381 }
3382
3383 static bool
3384 rx_narrow_volatile_bitfield (void)
3385 {
3386 return true;
3387 }
3388
3389 static bool
3390 rx_ok_to_inline (tree caller, tree callee)
3391 {
3392 /* Do not inline functions with local variables
3393 into a naked CALLER - naked function have no stack frame and
3394 locals need a frame in order to have somewhere to live.
3395
3396 Unfortunately we have no way to determine the presence of
3397 local variables in CALLEE, so we have to be cautious and
3398 assume that there might be some there.
3399
3400 We do allow inlining when CALLEE has the "inline" type
3401 modifier or the "always_inline" or "gnu_inline" attributes. */
3402 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3403 || DECL_DECLARED_INLINE_P (callee)
3404 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3405 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3406 }
3407
3408 static bool
3409 rx_enable_lra (void)
3410 {
3411 return TARGET_ENABLE_LRA;
3412 }
3413
3414 rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3415 {
3416 if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3417 {
3418 /* If we are inside an interrupt handler, assume that interrupts are
3419 off -- which is the default hardware behavior. In this case, there
3420 is no need to disable the interrupts. */
3421 m_prev_psw_reg = NULL;
3422 }
3423 else
3424 {
3425 m_prev_psw_reg = gen_reg_rtx (SImode);
3426 emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3427 emit_insn (gen_clrpsw (GEN_INT ('I')));
3428 }
3429 }
3430
3431 rx_atomic_sequence::~rx_atomic_sequence (void)
3432 {
3433 if (m_prev_psw_reg != NULL)
3434 emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3435 }
3436
3437 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3438
3439 static bool
3440 rx_hard_regno_mode_ok (unsigned int regno, machine_mode)
3441 {
3442 return REGNO_REG_CLASS (regno) == GR_REGS;
3443 }
3444 \f
3445 #undef TARGET_NARROW_VOLATILE_BITFIELD
3446 #define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3447
3448 #undef TARGET_CAN_INLINE_P
3449 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3450
3451 #undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3452 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3453 #undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3454 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3455 #undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3456 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3457 #undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3458 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3459
3460 #undef TARGET_FUNCTION_VALUE
3461 #define TARGET_FUNCTION_VALUE rx_function_value
3462
3463 #undef TARGET_RETURN_IN_MSB
3464 #define TARGET_RETURN_IN_MSB rx_return_in_msb
3465
3466 #undef TARGET_IN_SMALL_DATA_P
3467 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
3468
3469 #undef TARGET_RETURN_IN_MEMORY
3470 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3471
3472 #undef TARGET_HAVE_SRODATA_SECTION
3473 #define TARGET_HAVE_SRODATA_SECTION true
3474
3475 #undef TARGET_ASM_SELECT_RTX_SECTION
3476 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3477
3478 #undef TARGET_ASM_SELECT_SECTION
3479 #define TARGET_ASM_SELECT_SECTION rx_select_section
3480
3481 #undef TARGET_INIT_BUILTINS
3482 #define TARGET_INIT_BUILTINS rx_init_builtins
3483
3484 #undef TARGET_BUILTIN_DECL
3485 #define TARGET_BUILTIN_DECL rx_builtin_decl
3486
3487 #undef TARGET_EXPAND_BUILTIN
3488 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
3489
3490 #undef TARGET_ASM_CONSTRUCTOR
3491 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3492
3493 #undef TARGET_ASM_DESTRUCTOR
3494 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3495
3496 #undef TARGET_STRUCT_VALUE_RTX
3497 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3498
3499 #undef TARGET_ATTRIBUTE_TABLE
3500 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3501
3502 #undef TARGET_ASM_FILE_START
3503 #define TARGET_ASM_FILE_START rx_file_start
3504
3505 #undef TARGET_MS_BITFIELD_LAYOUT_P
3506 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3507
3508 #undef TARGET_LEGITIMATE_ADDRESS_P
3509 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3510
3511 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
3512 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3513
3514 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3515 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3516
3517 #undef TARGET_ASM_FUNCTION_PROLOGUE
3518 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3519
3520 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3521 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3522
3523 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3524 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3525
3526 #undef TARGET_FUNCTION_ARG
3527 #define TARGET_FUNCTION_ARG rx_function_arg
3528
3529 #undef TARGET_FUNCTION_ARG_ADVANCE
3530 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3531
3532 #undef TARGET_FUNCTION_ARG_BOUNDARY
3533 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3534
3535 #undef TARGET_SET_CURRENT_FUNCTION
3536 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3537
3538 #undef TARGET_ASM_INTEGER
3539 #define TARGET_ASM_INTEGER rx_assemble_integer
3540
3541 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3542 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3543
3544 #undef TARGET_MAX_ANCHOR_OFFSET
3545 #define TARGET_MAX_ANCHOR_OFFSET 32
3546
3547 #undef TARGET_ADDRESS_COST
3548 #define TARGET_ADDRESS_COST rx_address_cost
3549
3550 #undef TARGET_CAN_ELIMINATE
3551 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3552
3553 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3554 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3555
3556 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3557 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3558
3559 #undef TARGET_TRAMPOLINE_INIT
3560 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3561
3562 #undef TARGET_PRINT_OPERAND
3563 #define TARGET_PRINT_OPERAND rx_print_operand
3564
3565 #undef TARGET_PRINT_OPERAND_ADDRESS
3566 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3567
3568 #undef TARGET_CC_MODES_COMPATIBLE
3569 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3570
3571 #undef TARGET_MEMORY_MOVE_COST
3572 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3573
3574 #undef TARGET_OPTION_OVERRIDE
3575 #define TARGET_OPTION_OVERRIDE rx_option_override
3576
3577 #undef TARGET_PROMOTE_FUNCTION_MODE
3578 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3579
3580 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3581 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3582
3583 #undef TARGET_FLAGS_REGNUM
3584 #define TARGET_FLAGS_REGNUM CC_REG
3585
3586 #undef TARGET_LEGITIMATE_CONSTANT_P
3587 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3588
3589 #undef TARGET_LEGITIMIZE_ADDRESS
3590 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3591
3592 #undef TARGET_WARN_FUNC_RETURN
3593 #define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3594
3595 #undef TARGET_LRA_P
3596 #define TARGET_LRA_P rx_enable_lra
3597
3598 #undef TARGET_HARD_REGNO_MODE_OK
3599 #define TARGET_HARD_REGNO_MODE_OK rx_hard_regno_mode_ok
3600
3601 struct gcc_target targetm = TARGET_INITIALIZER;
3602
3603 #include "gt-rx.h"