]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/rx/rx.c
Update copyright years.
[thirdparty/gcc.git] / gcc / config / rx / rx.c
1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008-2019 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25 #define IN_TARGET_CODE 1
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "cfghooks.h"
37 #include "df.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "regs.h"
41 #include "emit-rtl.h"
42 #include "diagnostic-core.h"
43 #include "varasm.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "output.h"
47 #include "flags.h"
48 #include "explow.h"
49 #include "expr.h"
50 #include "toplev.h"
51 #include "langhooks.h"
52 #include "opts.h"
53 #include "builtins.h"
54
55 /* This file should be included last. */
56 #include "target-def.h"
57
58 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
59 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
60 static unsigned int rx_num_interrupt_regs;
61 \f
62 static unsigned int
63 rx_gp_base_regnum (void)
64 {
65 if (rx_gp_base_regnum_val == INVALID_REGNUM)
66 gcc_unreachable ();
67 return rx_gp_base_regnum_val;
68 }
69
70 static unsigned int
71 rx_pid_base_regnum (void)
72 {
73 if (rx_pid_base_regnum_val == INVALID_REGNUM)
74 gcc_unreachable ();
75 return rx_pid_base_regnum_val;
76 }
77
78 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
79
80 static tree
81 rx_decl_for_addr (rtx op)
82 {
83 if (GET_CODE (op) == MEM)
84 op = XEXP (op, 0);
85 if (GET_CODE (op) == CONST)
86 op = XEXP (op, 0);
87 while (GET_CODE (op) == PLUS)
88 op = XEXP (op, 0);
89 if (GET_CODE (op) == SYMBOL_REF)
90 return SYMBOL_REF_DECL (op);
91 return NULL_TREE;
92 }
93
94 static void rx_print_operand (FILE *, rtx, int);
95
96 #define CC_FLAG_S (1 << 0)
97 #define CC_FLAG_Z (1 << 1)
98 #define CC_FLAG_O (1 << 2)
99 #define CC_FLAG_C (1 << 3)
100 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
101
102 static unsigned int flags_from_mode (machine_mode mode);
103 static unsigned int flags_from_code (enum rtx_code code);
104 \f
105 /* Return true if OP is a reference to an object in a PID data area. */
106
107 enum pid_type
108 {
109 PID_NOT_PID = 0, /* The object is not in the PID data area. */
110 PID_ENCODED, /* The object is in the PID data area. */
111 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
112 };
113
114 static enum pid_type
115 rx_pid_data_operand (rtx op)
116 {
117 tree op_decl;
118
119 if (!TARGET_PID)
120 return PID_NOT_PID;
121
122 if (GET_CODE (op) == PLUS
123 && GET_CODE (XEXP (op, 0)) == REG
124 && GET_CODE (XEXP (op, 1)) == CONST
125 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
126 return PID_ENCODED;
127
128 op_decl = rx_decl_for_addr (op);
129
130 if (op_decl)
131 {
132 if (TREE_READONLY (op_decl))
133 return PID_UNENCODED;
134 }
135 else
136 {
137 /* Sigh, some special cases. */
138 if (GET_CODE (op) == SYMBOL_REF
139 || GET_CODE (op) == LABEL_REF)
140 return PID_UNENCODED;
141 }
142
143 return PID_NOT_PID;
144 }
145
146 static rtx
147 rx_legitimize_address (rtx x,
148 rtx oldx ATTRIBUTE_UNUSED,
149 machine_mode mode ATTRIBUTE_UNUSED)
150 {
151 if (rx_pid_data_operand (x) == PID_UNENCODED)
152 {
153 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
154 return rv;
155 }
156
157 if (GET_CODE (x) == PLUS
158 && GET_CODE (XEXP (x, 0)) == PLUS
159 && REG_P (XEXP (XEXP (x, 0), 0))
160 && REG_P (XEXP (x, 1)))
161 return force_reg (SImode, x);
162
163 return x;
164 }
165
166 /* Return true if OP is a reference to an object in a small data area. */
167
168 static bool
169 rx_small_data_operand (rtx op)
170 {
171 if (rx_small_data_limit == 0)
172 return false;
173
174 if (GET_CODE (op) == SYMBOL_REF)
175 return SYMBOL_REF_SMALL_P (op);
176
177 return false;
178 }
179
180 static bool
181 rx_is_legitimate_address (machine_mode mode, rtx x,
182 bool strict ATTRIBUTE_UNUSED)
183 {
184 if (RTX_OK_FOR_BASE (x, strict))
185 /* Register Indirect. */
186 return true;
187
188 if ((GET_MODE_SIZE (mode) == 4
189 || GET_MODE_SIZE (mode) == 2
190 || GET_MODE_SIZE (mode) == 1)
191 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
192 /* Pre-decrement Register Indirect or
193 Post-increment Register Indirect. */
194 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
195
196 switch (rx_pid_data_operand (x))
197 {
198 case PID_UNENCODED:
199 return false;
200 case PID_ENCODED:
201 return true;
202 default:
203 break;
204 }
205
206 if (GET_CODE (x) == PLUS)
207 {
208 rtx arg1 = XEXP (x, 0);
209 rtx arg2 = XEXP (x, 1);
210 rtx index = NULL_RTX;
211
212 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
213 index = arg2;
214 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
215 index = arg1;
216 else
217 return false;
218
219 switch (GET_CODE (index))
220 {
221 case CONST_INT:
222 {
223 /* Register Relative: REG + INT.
224 Only positive, mode-aligned, mode-sized
225 displacements are allowed. */
226 HOST_WIDE_INT val = INTVAL (index);
227 int factor;
228
229 if (val < 0)
230 return false;
231
232 switch (GET_MODE_SIZE (mode))
233 {
234 default:
235 case 4: factor = 4; break;
236 case 2: factor = 2; break;
237 case 1: factor = 1; break;
238 }
239
240 if (val > (65535 * factor))
241 return false;
242 return (val % factor) == 0;
243 }
244
245 case REG:
246 /* Unscaled Indexed Register Indirect: REG + REG
247 Size has to be "QI", REG has to be valid. */
248 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
249
250 case MULT:
251 {
252 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
253 Factor has to equal the mode size, REG has to be valid. */
254 rtx factor;
255
256 factor = XEXP (index, 1);
257 index = XEXP (index, 0);
258
259 return REG_P (index)
260 && RTX_OK_FOR_BASE (index, strict)
261 && CONST_INT_P (factor)
262 && GET_MODE_SIZE (mode) == INTVAL (factor);
263 }
264
265 default:
266 return false;
267 }
268 }
269
270 /* Small data area accesses turn into register relative offsets. */
271 return rx_small_data_operand (x);
272 }
273
274 /* Returns TRUE for simple memory addresses, ie ones
275 that do not involve register indirect addressing
276 or pre/post increment/decrement. */
277
278 bool
279 rx_is_restricted_memory_address (rtx mem, machine_mode mode)
280 {
281 if (! rx_is_legitimate_address
282 (mode, mem, reload_in_progress || reload_completed))
283 return false;
284
285 switch (GET_CODE (mem))
286 {
287 case REG:
288 /* Simple memory addresses are OK. */
289 return true;
290
291 case SUBREG:
292 return RX_REG_P (SUBREG_REG (mem));
293
294 case PRE_DEC:
295 case POST_INC:
296 return false;
297
298 case PLUS:
299 {
300 rtx base, index;
301
302 /* Only allow REG+INT addressing. */
303 base = XEXP (mem, 0);
304 index = XEXP (mem, 1);
305
306 if (! RX_REG_P (base) || ! CONST_INT_P (index))
307 return false;
308
309 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
310 }
311
312 case SYMBOL_REF:
313 /* Can happen when small data is being supported.
314 Assume that it will be resolved into GP+INT. */
315 return true;
316
317 default:
318 gcc_unreachable ();
319 }
320 }
321
322 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
323
324 static bool
325 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
326 {
327 if (GET_CODE (addr) == CONST)
328 addr = XEXP (addr, 0);
329
330 switch (GET_CODE (addr))
331 {
332 /* --REG and REG++ only work in SImode. */
333 case PRE_DEC:
334 case POST_INC:
335 return true;
336
337 case MINUS:
338 case PLUS:
339 if (! REG_P (XEXP (addr, 0)))
340 return true;
341
342 addr = XEXP (addr, 1);
343
344 switch (GET_CODE (addr))
345 {
346 case REG:
347 /* REG+REG only works in SImode. */
348 return true;
349
350 case CONST_INT:
351 /* REG+INT is only mode independent if INT is a
352 multiple of 4, positive and will fit into 16-bits. */
353 if (((INTVAL (addr) & 3) == 0)
354 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
355 return false;
356 return true;
357
358 case SYMBOL_REF:
359 case LABEL_REF:
360 return true;
361
362 case MULT:
363 /* REG+REG*SCALE is always mode dependent. */
364 return true;
365
366 default:
367 /* Not recognized, so treat as mode dependent. */
368 return true;
369 }
370
371 case CONST_INT:
372 case SYMBOL_REF:
373 case LABEL_REF:
374 case REG:
375 /* These are all mode independent. */
376 return false;
377
378 default:
379 /* Everything else is unrecognized,
380 so treat as mode dependent. */
381 return true;
382 }
383 }
384 \f
385 /* A C compound statement to output to stdio stream FILE the
386 assembler syntax for an instruction operand that is a memory
387 reference whose address is ADDR. */
388
389 static void
390 rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
391 {
392 switch (GET_CODE (addr))
393 {
394 case REG:
395 fprintf (file, "[");
396 rx_print_operand (file, addr, 0);
397 fprintf (file, "]");
398 break;
399
400 case PRE_DEC:
401 fprintf (file, "[-");
402 rx_print_operand (file, XEXP (addr, 0), 0);
403 fprintf (file, "]");
404 break;
405
406 case POST_INC:
407 fprintf (file, "[");
408 rx_print_operand (file, XEXP (addr, 0), 0);
409 fprintf (file, "+]");
410 break;
411
412 case PLUS:
413 {
414 rtx arg1 = XEXP (addr, 0);
415 rtx arg2 = XEXP (addr, 1);
416 rtx base, index;
417
418 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
419 base = arg1, index = arg2;
420 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
421 base = arg2, index = arg1;
422 else
423 {
424 rx_print_operand (file, arg1, 0);
425 fprintf (file, " + ");
426 rx_print_operand (file, arg2, 0);
427 break;
428 }
429
430 if (REG_P (index) || GET_CODE (index) == MULT)
431 {
432 fprintf (file, "[");
433 rx_print_operand (file, index, 'A');
434 fprintf (file, ",");
435 }
436 else /* GET_CODE (index) == CONST_INT */
437 {
438 rx_print_operand (file, index, 'A');
439 fprintf (file, "[");
440 }
441 rx_print_operand (file, base, 0);
442 fprintf (file, "]");
443 break;
444 }
445
446 case CONST:
447 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
448 {
449 addr = XEXP (addr, 0);
450 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
451
452 addr = XVECEXP (addr, 0, 0);
453 gcc_assert (CONST_INT_P (addr));
454 fprintf (file, "#");
455 output_addr_const (file, addr);
456 break;
457 }
458 fprintf (file, "#");
459 output_addr_const (file, XEXP (addr, 0));
460 break;
461
462 case UNSPEC:
463 addr = XVECEXP (addr, 0, 0);
464 /* Fall through. */
465 case LABEL_REF:
466 case SYMBOL_REF:
467 fprintf (file, "#");
468 /* Fall through. */
469 default:
470 output_addr_const (file, addr);
471 break;
472 }
473 }
474
475 static void
476 rx_print_integer (FILE * file, HOST_WIDE_INT val)
477 {
478 if (val < 64)
479 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
480 else
481 fprintf (file,
482 TARGET_AS100_SYNTAX
483 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
484 val);
485 }
486
487 static bool
488 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
489 {
490 const char * op = integer_asm_op (size, is_aligned);
491
492 if (! CONST_INT_P (x))
493 return default_assemble_integer (x, size, is_aligned);
494
495 if (op == NULL)
496 return false;
497 fputs (op, asm_out_file);
498
499 rx_print_integer (asm_out_file, INTVAL (x));
500 fputc ('\n', asm_out_file);
501 return true;
502 }
503
504
505 /* Handles the insertion of a single operand into the assembler output.
506 The %<letter> directives supported are:
507
508 %A Print an operand without a leading # character.
509 %B Print an integer comparison name.
510 %C Print a control register name.
511 %F Print a condition code flag name.
512 %G Register used for small-data-area addressing
513 %H Print high part of a DImode register, integer or address.
514 %L Print low part of a DImode register, integer or address.
515 %N Print the negation of the immediate value.
516 %P Register used for PID addressing
517 %Q If the operand is a MEM, then correctly generate
518 register indirect or register relative addressing.
519 %R Like %Q but for zero-extending loads. */
520
521 static void
522 rx_print_operand (FILE * file, rtx op, int letter)
523 {
524 bool unsigned_load = false;
525 bool print_hash = true;
526
527 if (letter == 'A'
528 && ((GET_CODE (op) == CONST
529 && GET_CODE (XEXP (op, 0)) == UNSPEC)
530 || GET_CODE (op) == UNSPEC))
531 {
532 print_hash = false;
533 letter = 0;
534 }
535
536 switch (letter)
537 {
538 case 'A':
539 /* Print an operand without a leading #. */
540 if (MEM_P (op))
541 op = XEXP (op, 0);
542
543 switch (GET_CODE (op))
544 {
545 case LABEL_REF:
546 case SYMBOL_REF:
547 output_addr_const (file, op);
548 break;
549 case CONST_INT:
550 fprintf (file, "%ld", (long) INTVAL (op));
551 break;
552 default:
553 rx_print_operand (file, op, 0);
554 break;
555 }
556 break;
557
558 case 'B':
559 {
560 enum rtx_code code = GET_CODE (op);
561 machine_mode mode = GET_MODE (XEXP (op, 0));
562 const char *ret;
563
564 if (mode == CC_Fmode)
565 {
566 /* C flag is undefined, and O flag carries unordered. None of the
567 branch combinations that include O use it helpfully. */
568 switch (code)
569 {
570 case ORDERED:
571 ret = "no";
572 break;
573 case UNORDERED:
574 ret = "o";
575 break;
576 case LT:
577 ret = "n";
578 break;
579 case GE:
580 ret = "pz";
581 break;
582 case EQ:
583 ret = "eq";
584 break;
585 case NE:
586 ret = "ne";
587 break;
588 default:
589 gcc_unreachable ();
590 }
591 }
592 else
593 {
594 unsigned int flags = flags_from_mode (mode);
595
596 switch (code)
597 {
598 case LT:
599 ret = (flags & CC_FLAG_O ? "lt" : "n");
600 break;
601 case GE:
602 ret = (flags & CC_FLAG_O ? "ge" : "pz");
603 break;
604 case GT:
605 ret = "gt";
606 break;
607 case LE:
608 ret = "le";
609 break;
610 case GEU:
611 ret = "geu";
612 break;
613 case LTU:
614 ret = "ltu";
615 break;
616 case GTU:
617 ret = "gtu";
618 break;
619 case LEU:
620 ret = "leu";
621 break;
622 case EQ:
623 ret = "eq";
624 break;
625 case NE:
626 ret = "ne";
627 break;
628 default:
629 gcc_unreachable ();
630 }
631 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
632 }
633 fputs (ret, file);
634 break;
635 }
636
637 case 'C':
638 gcc_assert (CONST_INT_P (op));
639 switch (INTVAL (op))
640 {
641 case CTRLREG_PSW: fprintf (file, "psw"); break;
642 case CTRLREG_USP: fprintf (file, "usp"); break;
643 case CTRLREG_FPSW: fprintf (file, "fpsw"); break;
644 case CTRLREG_CPEN: fprintf (file, "cpen"); break;
645 case CTRLREG_BPSW: fprintf (file, "bpsw"); break;
646 case CTRLREG_BPC: fprintf (file, "bpc"); break;
647 case CTRLREG_ISP: fprintf (file, "isp"); break;
648 case CTRLREG_FINTV: fprintf (file, "fintv"); break;
649 case CTRLREG_INTB: fprintf (file, "intb"); break;
650 default:
651 warning (0, "unrecognized control register number: %d - using 'psw'",
652 (int) INTVAL (op));
653 fprintf (file, "psw");
654 break;
655 }
656 break;
657
658 case 'F':
659 gcc_assert (CONST_INT_P (op));
660 switch (INTVAL (op))
661 {
662 case 0: case 'c': case 'C': fprintf (file, "C"); break;
663 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
664 case 2: case 's': case 'S': fprintf (file, "S"); break;
665 case 3: case 'o': case 'O': fprintf (file, "O"); break;
666 case 8: case 'i': case 'I': fprintf (file, "I"); break;
667 case 9: case 'u': case 'U': fprintf (file, "U"); break;
668 default:
669 gcc_unreachable ();
670 }
671 break;
672
673 case 'G':
674 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
675 break;
676
677 case 'H':
678 switch (GET_CODE (op))
679 {
680 case REG:
681 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
682 break;
683 case CONST_INT:
684 {
685 HOST_WIDE_INT v = INTVAL (op);
686
687 fprintf (file, "#");
688 /* Trickery to avoid problems with shifting 32 bits at a time. */
689 v = v >> 16;
690 v = v >> 16;
691 rx_print_integer (file, v);
692 break;
693 }
694 case CONST_DOUBLE:
695 fprintf (file, "#");
696 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
697 break;
698 case MEM:
699 if (! WORDS_BIG_ENDIAN)
700 op = adjust_address (op, SImode, 4);
701 output_address (GET_MODE (op), XEXP (op, 0));
702 break;
703 default:
704 gcc_unreachable ();
705 }
706 break;
707
708 case 'L':
709 switch (GET_CODE (op))
710 {
711 case REG:
712 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
713 break;
714 case CONST_INT:
715 fprintf (file, "#");
716 rx_print_integer (file, INTVAL (op) & 0xffffffff);
717 break;
718 case CONST_DOUBLE:
719 fprintf (file, "#");
720 rx_print_integer (file, CONST_DOUBLE_LOW (op));
721 break;
722 case MEM:
723 if (WORDS_BIG_ENDIAN)
724 op = adjust_address (op, SImode, 4);
725 output_address (GET_MODE (op), XEXP (op, 0));
726 break;
727 default:
728 gcc_unreachable ();
729 }
730 break;
731
732 case 'N':
733 gcc_assert (CONST_INT_P (op));
734 fprintf (file, "#");
735 rx_print_integer (file, - INTVAL (op));
736 break;
737
738 case 'P':
739 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
740 break;
741
742 case 'R':
743 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
744 unsigned_load = true;
745 /* Fall through. */
746 case 'Q':
747 if (MEM_P (op))
748 {
749 HOST_WIDE_INT offset;
750 rtx mem = op;
751
752 op = XEXP (op, 0);
753
754 if (REG_P (op))
755 offset = 0;
756 else if (GET_CODE (op) == PLUS)
757 {
758 rtx displacement;
759
760 if (REG_P (XEXP (op, 0)))
761 {
762 displacement = XEXP (op, 1);
763 op = XEXP (op, 0);
764 }
765 else
766 {
767 displacement = XEXP (op, 0);
768 op = XEXP (op, 1);
769 gcc_assert (REG_P (op));
770 }
771
772 gcc_assert (CONST_INT_P (displacement));
773 offset = INTVAL (displacement);
774 gcc_assert (offset >= 0);
775
776 fprintf (file, "%ld", offset);
777 }
778 else
779 gcc_unreachable ();
780
781 fprintf (file, "[");
782 rx_print_operand (file, op, 0);
783 fprintf (file, "].");
784
785 switch (GET_MODE_SIZE (GET_MODE (mem)))
786 {
787 case 1:
788 gcc_assert (offset <= 65535 * 1);
789 fprintf (file, unsigned_load ? "UB" : "B");
790 break;
791 case 2:
792 gcc_assert (offset % 2 == 0);
793 gcc_assert (offset <= 65535 * 2);
794 fprintf (file, unsigned_load ? "UW" : "W");
795 break;
796 case 4:
797 gcc_assert (offset % 4 == 0);
798 gcc_assert (offset <= 65535 * 4);
799 fprintf (file, "L");
800 break;
801 default:
802 gcc_unreachable ();
803 }
804 break;
805 }
806
807 /* Fall through. */
808
809 default:
810 if (GET_CODE (op) == CONST
811 && GET_CODE (XEXP (op, 0)) == UNSPEC)
812 op = XEXP (op, 0);
813 else if (GET_CODE (op) == CONST
814 && GET_CODE (XEXP (op, 0)) == PLUS
815 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
816 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
817 {
818 if (print_hash)
819 fprintf (file, "#");
820 fprintf (file, "(");
821 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
822 fprintf (file, " + ");
823 output_addr_const (file, XEXP (XEXP (op, 0), 1));
824 fprintf (file, ")");
825 return;
826 }
827
828 switch (GET_CODE (op))
829 {
830 case MULT:
831 /* Should be the scaled part of an
832 indexed register indirect address. */
833 {
834 rtx base = XEXP (op, 0);
835 rtx index = XEXP (op, 1);
836
837 /* Check for a swaped index register and scaling factor.
838 Not sure if this can happen, but be prepared to handle it. */
839 if (CONST_INT_P (base) && REG_P (index))
840 {
841 rtx tmp = base;
842 base = index;
843 index = tmp;
844 }
845
846 gcc_assert (REG_P (base));
847 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
848 gcc_assert (CONST_INT_P (index));
849 /* Do not try to verify the value of the scalar as it is based
850 on the mode of the MEM not the mode of the MULT. (Which
851 will always be SImode). */
852 fprintf (file, "%s", reg_names [REGNO (base)]);
853 break;
854 }
855
856 case MEM:
857 output_address (GET_MODE (op), XEXP (op, 0));
858 break;
859
860 case PLUS:
861 output_address (VOIDmode, op);
862 break;
863
864 case REG:
865 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
866 fprintf (file, "%s", reg_names [REGNO (op)]);
867 break;
868
869 case SUBREG:
870 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
871 fprintf (file, "%s", reg_names [subreg_regno (op)]);
872 break;
873
874 /* This will only be single precision.... */
875 case CONST_DOUBLE:
876 {
877 unsigned long val;
878
879 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
880 if (print_hash)
881 fprintf (file, "#");
882 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
883 break;
884 }
885
886 case CONST_INT:
887 if (print_hash)
888 fprintf (file, "#");
889 rx_print_integer (file, INTVAL (op));
890 break;
891
892 case UNSPEC:
893 switch (XINT (op, 1))
894 {
895 case UNSPEC_PID_ADDR:
896 {
897 rtx sym, add;
898
899 if (print_hash)
900 fprintf (file, "#");
901 sym = XVECEXP (op, 0, 0);
902 add = NULL_RTX;
903 fprintf (file, "(");
904 if (GET_CODE (sym) == PLUS)
905 {
906 add = XEXP (sym, 1);
907 sym = XEXP (sym, 0);
908 }
909 output_addr_const (file, sym);
910 if (add != NULL_RTX)
911 {
912 fprintf (file, "+");
913 output_addr_const (file, add);
914 }
915 fprintf (file, "-__pid_base");
916 fprintf (file, ")");
917 return;
918 }
919 }
920 /* Fall through */
921
922 case CONST:
923 case SYMBOL_REF:
924 case LABEL_REF:
925 case CODE_LABEL:
926 rx_print_operand_address (file, VOIDmode, op);
927 break;
928
929 default:
930 gcc_unreachable ();
931 }
932 break;
933 }
934 }
935
936 /* Maybe convert an operand into its PID format. */
937
938 rtx
939 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
940 {
941 if (rx_pid_data_operand (op) == PID_UNENCODED)
942 {
943 if (GET_CODE (op) == MEM)
944 {
945 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
946 op = replace_equiv_address (op, a);
947 }
948 else
949 {
950 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
951 }
952
953 if (copy_to_reg)
954 op = copy_to_mode_reg (GET_MODE (op), op);
955 }
956 return op;
957 }
958
959 /* Returns an assembler template for a move instruction. */
960
961 char *
962 rx_gen_move_template (rtx * operands, bool is_movu)
963 {
964 static char out_template [64];
965 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
966 const char * src_template;
967 const char * dst_template;
968 rtx dest = operands[0];
969 rtx src = operands[1];
970
971 /* Decide which extension, if any, should be given to the move instruction. */
972 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
973 {
974 case E_QImode:
975 /* The .B extension is not valid when
976 loading an immediate into a register. */
977 if (! REG_P (dest) || ! CONST_INT_P (src))
978 extension = ".B";
979 break;
980 case E_HImode:
981 if (! REG_P (dest) || ! CONST_INT_P (src))
982 /* The .W extension is not valid when
983 loading an immediate into a register. */
984 extension = ".W";
985 break;
986 case E_DFmode:
987 case E_DImode:
988 case E_SFmode:
989 case E_SImode:
990 extension = ".L";
991 break;
992 case E_VOIDmode:
993 /* This mode is used by constants. */
994 break;
995 default:
996 debug_rtx (src);
997 gcc_unreachable ();
998 }
999
1000 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
1001 {
1002 gcc_assert (GET_MODE (src) != DImode);
1003 gcc_assert (GET_MODE (src) != DFmode);
1004
1005 src_template = "(%A1 - __pid_base)[%P1]";
1006 }
1007 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1008 {
1009 gcc_assert (GET_MODE (src) != DImode);
1010 gcc_assert (GET_MODE (src) != DFmode);
1011
1012 src_template = "%%gp(%A1)[%G1]";
1013 }
1014 else
1015 src_template = "%1";
1016
1017 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1018 {
1019 gcc_assert (GET_MODE (dest) != DImode);
1020 gcc_assert (GET_MODE (dest) != DFmode);
1021
1022 dst_template = "%%gp(%A0)[%G0]";
1023 }
1024 else
1025 dst_template = "%0";
1026
1027 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1028 {
1029 gcc_assert (! is_movu);
1030
1031 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1032 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1033 else
1034 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1035 }
1036 else
1037 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1038 extension, src_template, dst_template);
1039 return out_template;
1040 }
1041 \f
1042 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
1043
1044 static inline unsigned int
1045 rx_round_up (unsigned int value, unsigned int alignment)
1046 {
1047 alignment -= 1;
1048 return (value + alignment) & (~ alignment);
1049 }
1050
1051 /* Return the number of bytes in the argument registers
1052 occupied by an argument of type TYPE and mode MODE. */
1053
1054 static unsigned int
1055 rx_function_arg_size (machine_mode mode, const_tree type)
1056 {
1057 unsigned int num_bytes;
1058
1059 num_bytes = (mode == BLKmode)
1060 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1061 return rx_round_up (num_bytes, UNITS_PER_WORD);
1062 }
1063
1064 #define NUM_ARG_REGS 4
1065 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1066
1067 /* Return an RTL expression describing the register holding a function
1068 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1069 be passed on the stack. CUM describes the previous parameters to the
1070 function and NAMED is false if the parameter is part of a variable
1071 parameter list, or the last named parameter before the start of a
1072 variable parameter list. */
1073
1074 static rtx
1075 rx_function_arg (cumulative_args_t cum, machine_mode mode,
1076 const_tree type, bool named)
1077 {
1078 unsigned int next_reg;
1079 unsigned int bytes_so_far = *get_cumulative_args (cum);
1080 unsigned int size;
1081 unsigned int rounded_size;
1082
1083 /* An exploded version of rx_function_arg_size. */
1084 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1085 /* If the size is not known it cannot be passed in registers. */
1086 if (size < 1)
1087 return NULL_RTX;
1088
1089 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1090
1091 /* Don't pass this arg via registers if there
1092 are insufficient registers to hold all of it. */
1093 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1094 return NULL_RTX;
1095
1096 /* Unnamed arguments and the last named argument in a
1097 variadic function are always passed on the stack. */
1098 if (!named)
1099 return NULL_RTX;
1100
1101 /* Structures must occupy an exact number of registers,
1102 otherwise they are passed on the stack. */
1103 if ((type == NULL || AGGREGATE_TYPE_P (type))
1104 && (size % UNITS_PER_WORD) != 0)
1105 return NULL_RTX;
1106
1107 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1108
1109 return gen_rtx_REG (mode, next_reg);
1110 }
1111
1112 static void
1113 rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
1114 const_tree type, bool named ATTRIBUTE_UNUSED)
1115 {
1116 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1117 }
1118
1119 static unsigned int
1120 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1121 const_tree type ATTRIBUTE_UNUSED)
1122 {
1123 /* Older versions of the RX backend aligned all on-stack arguments
1124 to 32-bits. The RX C ABI however says that they should be
1125 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1126 if (TARGET_GCC_ABI)
1127 return STACK_BOUNDARY;
1128
1129 if (type)
1130 {
1131 if (DECL_P (type))
1132 return DECL_ALIGN (type);
1133 return TYPE_ALIGN (type);
1134 }
1135
1136 return PARM_BOUNDARY;
1137 }
1138
1139 /* Return an RTL describing where a function return value of type RET_TYPE
1140 is held. */
1141
1142 static rtx
1143 rx_function_value (const_tree ret_type,
1144 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1145 bool outgoing ATTRIBUTE_UNUSED)
1146 {
1147 machine_mode mode = TYPE_MODE (ret_type);
1148
1149 /* RX ABI specifies that small integer types are
1150 promoted to int when returned by a function. */
1151 if (GET_MODE_SIZE (mode) > 0
1152 && GET_MODE_SIZE (mode) < 4
1153 && ! COMPLEX_MODE_P (mode)
1154 && ! VECTOR_TYPE_P (ret_type)
1155 && ! VECTOR_MODE_P (mode)
1156 )
1157 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1158
1159 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1160 }
1161
1162 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1163 regard to function returns as does TARGET_FUNCTION_VALUE. */
1164
1165 static machine_mode
1166 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1167 machine_mode mode,
1168 int * punsignedp ATTRIBUTE_UNUSED,
1169 const_tree funtype ATTRIBUTE_UNUSED,
1170 int for_return)
1171 {
1172 if (for_return != 1
1173 || GET_MODE_SIZE (mode) >= 4
1174 || COMPLEX_MODE_P (mode)
1175 || VECTOR_MODE_P (mode)
1176 || VECTOR_TYPE_P (type)
1177 || GET_MODE_SIZE (mode) < 1)
1178 return mode;
1179
1180 return SImode;
1181 }
1182
1183 static bool
1184 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1185 {
1186 HOST_WIDE_INT size;
1187
1188 if (TYPE_MODE (type) != BLKmode
1189 && ! AGGREGATE_TYPE_P (type))
1190 return false;
1191
1192 size = int_size_in_bytes (type);
1193 /* Large structs and those whose size is not an
1194 exact multiple of 4 are returned in memory. */
1195 return size < 1
1196 || size > 16
1197 || (size % UNITS_PER_WORD) != 0;
1198 }
1199
1200 static rtx
1201 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1202 int incoming ATTRIBUTE_UNUSED)
1203 {
1204 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1205 }
1206
1207 static bool
1208 rx_return_in_msb (const_tree valtype)
1209 {
1210 return TARGET_BIG_ENDIAN_DATA
1211 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1212 }
1213
1214 /* Returns true if the provided function has the specified attribute. */
1215
1216 static inline bool
1217 has_func_attr (const_tree decl, const char * func_attr)
1218 {
1219 if (decl == NULL_TREE)
1220 decl = current_function_decl;
1221
1222 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1223 }
1224
1225 /* Returns true if the provided function has the "fast_interrupt" attribute. */
1226
1227 bool
1228 is_fast_interrupt_func (const_tree decl)
1229 {
1230 return has_func_attr (decl, "fast_interrupt");
1231 }
1232
1233 /* Returns true if the provided function has the "interrupt" attribute. */
1234
1235 bool
1236 is_interrupt_func (const_tree decl)
1237 {
1238 return has_func_attr (decl, "interrupt");
1239 }
1240
1241 /* Returns true if the provided function has the "naked" attribute. */
1242
1243 static inline bool
1244 is_naked_func (const_tree decl)
1245 {
1246 return has_func_attr (decl, "naked");
1247 }
1248 \f
1249 static bool use_fixed_regs = false;
1250
1251 static void
1252 rx_conditional_register_usage (void)
1253 {
1254 static bool using_fixed_regs = false;
1255
1256 if (TARGET_PID)
1257 {
1258 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1259 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1260 }
1261
1262 if (rx_small_data_limit > 0)
1263 {
1264 if (TARGET_PID)
1265 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1266 else
1267 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1268
1269 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1270 }
1271
1272 if (use_fixed_regs != using_fixed_regs)
1273 {
1274 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1275 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1276
1277 if (use_fixed_regs)
1278 {
1279 unsigned int r;
1280
1281 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1282 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1283
1284 /* This is for fast interrupt handlers. Any register in
1285 the range r10 to r13 (inclusive) that is currently
1286 marked as fixed is now a viable, call-used register. */
1287 for (r = 10; r <= 13; r++)
1288 if (fixed_regs[r])
1289 {
1290 fixed_regs[r] = 0;
1291 call_used_regs[r] = 1;
1292 }
1293
1294 /* Mark r7 as fixed. This is just a hack to avoid
1295 altering the reg_alloc_order array so that the newly
1296 freed r10-r13 registers are the preferred registers. */
1297 fixed_regs[7] = call_used_regs[7] = 1;
1298 }
1299 else
1300 {
1301 /* Restore the normal register masks. */
1302 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1303 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1304 }
1305
1306 using_fixed_regs = use_fixed_regs;
1307 }
1308 }
1309
1310 struct decl_chain
1311 {
1312 tree fndecl;
1313 struct decl_chain * next;
1314 };
1315
1316 /* Stack of decls for which we have issued warnings. */
1317 static struct decl_chain * warned_decls = NULL;
1318
1319 static void
1320 add_warned_decl (tree fndecl)
1321 {
1322 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1323
1324 warned->fndecl = fndecl;
1325 warned->next = warned_decls;
1326 warned_decls = warned;
1327 }
1328
1329 /* Returns TRUE if FNDECL is on our list of warned about decls. */
1330
1331 static bool
1332 already_warned (tree fndecl)
1333 {
1334 struct decl_chain * warned;
1335
1336 for (warned = warned_decls;
1337 warned != NULL;
1338 warned = warned->next)
1339 if (warned->fndecl == fndecl)
1340 return true;
1341
1342 return false;
1343 }
1344
1345 /* Perform any actions necessary before starting to compile FNDECL.
1346 For the RX we use this to make sure that we have the correct
1347 set of register masks selected. If FNDECL is NULL then we are
1348 compiling top level things. */
1349
1350 static void
1351 rx_set_current_function (tree fndecl)
1352 {
1353 /* Remember the last target of rx_set_current_function. */
1354 static tree rx_previous_fndecl;
1355 bool prev_was_fast_interrupt;
1356 bool current_is_fast_interrupt;
1357
1358 /* Only change the context if the function changes. This hook is called
1359 several times in the course of compiling a function, and we don't want
1360 to slow things down too much or call target_reinit when it isn't safe. */
1361 if (fndecl == rx_previous_fndecl)
1362 return;
1363
1364 prev_was_fast_interrupt
1365 = rx_previous_fndecl
1366 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1367
1368 current_is_fast_interrupt
1369 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1370
1371 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1372 {
1373 use_fixed_regs = current_is_fast_interrupt;
1374 target_reinit ();
1375 }
1376
1377 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1378 {
1379 /* We do not warn about the first fast interrupt routine that
1380 we see. Instead we just push it onto the stack. */
1381 if (warned_decls == NULL)
1382 add_warned_decl (fndecl);
1383
1384 /* Otherwise if this fast interrupt is one for which we have
1385 not already issued a warning, generate one and then push
1386 it onto the stack as well. */
1387 else if (! already_warned (fndecl))
1388 {
1389 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1390 fndecl, warned_decls->fndecl);
1391 add_warned_decl (fndecl);
1392 }
1393 }
1394
1395 rx_previous_fndecl = fndecl;
1396 }
1397 \f
1398 /* Typical stack layout should looks like this after the function's prologue:
1399
1400 | |
1401 -- ^
1402 | | \ |
1403 | | arguments saved | Increasing
1404 | | on the stack | addresses
1405 PARENT arg pointer -> | | /
1406 -------------------------- ---- -------------------
1407 CHILD |ret | return address
1408 --
1409 | | \
1410 | | call saved
1411 | | registers
1412 | | /
1413 --
1414 | | \
1415 | | local
1416 | | variables
1417 frame pointer -> | | /
1418 --
1419 | | \
1420 | | outgoing | Decreasing
1421 | | arguments | addresses
1422 current stack pointer -> | | / |
1423 -------------------------- ---- ------------------ V
1424 | | */
1425
1426 static unsigned int
1427 bit_count (unsigned int x)
1428 {
1429 const unsigned int m1 = 0x55555555;
1430 const unsigned int m2 = 0x33333333;
1431 const unsigned int m4 = 0x0f0f0f0f;
1432
1433 x -= (x >> 1) & m1;
1434 x = (x & m2) + ((x >> 2) & m2);
1435 x = (x + (x >> 4)) & m4;
1436 x += x >> 8;
1437
1438 return (x + (x >> 16)) & 0x3f;
1439 }
1440
1441 #define MUST_SAVE_ACC_REGISTER \
1442 (TARGET_SAVE_ACC_REGISTER \
1443 && (is_interrupt_func (NULL_TREE) \
1444 || is_fast_interrupt_func (NULL_TREE)))
1445
1446 /* Returns either the lowest numbered and highest numbered registers that
1447 occupy the call-saved area of the stack frame, if the registers are
1448 stored as a contiguous block, or else a bitmask of the individual
1449 registers if they are stored piecemeal.
1450
1451 Also computes the size of the frame and the size of the outgoing
1452 arguments block (in bytes). */
1453
1454 static void
1455 rx_get_stack_layout (unsigned int * lowest,
1456 unsigned int * highest,
1457 unsigned int * register_mask,
1458 unsigned int * frame_size,
1459 unsigned int * stack_size)
1460 {
1461 unsigned int reg;
1462 unsigned int low;
1463 unsigned int high;
1464 unsigned int fixed_reg = 0;
1465 unsigned int save_mask;
1466 unsigned int pushed_mask;
1467 unsigned int unneeded_pushes;
1468
1469 if (is_naked_func (NULL_TREE))
1470 {
1471 /* Naked functions do not create their own stack frame.
1472 Instead the programmer must do that for us. */
1473 * lowest = 0;
1474 * highest = 0;
1475 * register_mask = 0;
1476 * frame_size = 0;
1477 * stack_size = 0;
1478 return;
1479 }
1480
1481 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1482 {
1483 if ((df_regs_ever_live_p (reg)
1484 /* Always save all call clobbered registers inside non-leaf
1485 interrupt handlers, even if they are not live - they may
1486 be used in (non-interrupt aware) routines called from this one. */
1487 || (call_used_regs[reg]
1488 && is_interrupt_func (NULL_TREE)
1489 && ! crtl->is_leaf))
1490 && (! call_used_regs[reg]
1491 /* Even call clobbered registered must
1492 be pushed inside interrupt handlers. */
1493 || is_interrupt_func (NULL_TREE)
1494 /* Likewise for fast interrupt handlers, except registers r10 -
1495 r13. These are normally call-saved, but may have been set
1496 to call-used by rx_conditional_register_usage. If so then
1497 they can be used in the fast interrupt handler without
1498 saving them on the stack. */
1499 || (is_fast_interrupt_func (NULL_TREE)
1500 && ! IN_RANGE (reg, 10, 13))))
1501 {
1502 if (low == 0)
1503 low = reg;
1504 high = reg;
1505
1506 save_mask |= 1 << reg;
1507 }
1508
1509 /* Remember if we see a fixed register
1510 after having found the low register. */
1511 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1512 fixed_reg = reg;
1513 }
1514
1515 /* If we have to save the accumulator register, make sure
1516 that at least two registers are pushed into the frame. */
1517 if (MUST_SAVE_ACC_REGISTER
1518 && bit_count (save_mask) < 2)
1519 {
1520 save_mask |= (1 << 13) | (1 << 14);
1521 if (low == 0)
1522 low = 13;
1523 if (high == 0 || low == high)
1524 high = low + 1;
1525 }
1526
1527 /* Decide if it would be faster fill in the call-saved area of the stack
1528 frame using multiple PUSH instructions instead of a single PUSHM
1529 instruction.
1530
1531 SAVE_MASK is a bitmask of the registers that must be stored in the
1532 call-save area. PUSHED_MASK is a bitmask of the registers that would
1533 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1534 is a bitmask of those registers in pushed_mask that are not in
1535 save_mask.
1536
1537 We use a simple heuristic that says that it is better to use
1538 multiple PUSH instructions if the number of unnecessary pushes is
1539 greater than the number of necessary pushes.
1540
1541 We also use multiple PUSH instructions if there are any fixed registers
1542 between LOW and HIGH. The only way that this can happen is if the user
1543 has specified --fixed-<reg-name> on the command line and in such
1544 circumstances we do not want to touch the fixed registers at all.
1545
1546 Note also that the code in the prologue/epilogue handlers will
1547 automatically merge multiple PUSHes of adjacent registers into a single
1548 PUSHM.
1549
1550 FIXME: Is it worth improving this heuristic ? */
1551 pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
1552 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1553
1554 if ((fixed_reg && fixed_reg <= high)
1555 || (optimize_function_for_speed_p (cfun)
1556 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1557 {
1558 /* Use multiple pushes. */
1559 * lowest = 0;
1560 * highest = 0;
1561 * register_mask = save_mask;
1562 }
1563 else
1564 {
1565 /* Use one push multiple instruction. */
1566 * lowest = low;
1567 * highest = high;
1568 * register_mask = 0;
1569 }
1570
1571 * frame_size = rx_round_up
1572 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1573
1574 if (crtl->args.size > 0)
1575 * frame_size += rx_round_up
1576 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1577
1578 * stack_size = rx_round_up
1579 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1580 }
1581
1582 /* Generate a PUSHM instruction that matches the given operands. */
1583
1584 void
1585 rx_emit_stack_pushm (rtx * operands)
1586 {
1587 HOST_WIDE_INT last_reg;
1588 rtx first_push;
1589
1590 gcc_assert (CONST_INT_P (operands[0]));
1591 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1592
1593 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1594 first_push = XVECEXP (operands[1], 0, 1);
1595 gcc_assert (SET_P (first_push));
1596 first_push = SET_SRC (first_push);
1597 gcc_assert (REG_P (first_push));
1598
1599 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1600 reg_names [REGNO (first_push) - last_reg],
1601 reg_names [REGNO (first_push)]);
1602 }
1603
1604 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1605
1606 static rtx
1607 gen_rx_store_vector (unsigned int low, unsigned int high)
1608 {
1609 unsigned int i;
1610 unsigned int count = (high - low) + 2;
1611 rtx vector;
1612
1613 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1614
1615 XVECEXP (vector, 0, 0) =
1616 gen_rtx_SET (stack_pointer_rtx,
1617 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1618 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1619
1620 for (i = 0; i < count - 1; i++)
1621 XVECEXP (vector, 0, i + 1) =
1622 gen_rtx_SET (gen_rtx_MEM (SImode,
1623 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1624 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1625 gen_rtx_REG (SImode, high - i));
1626 return vector;
1627 }
1628
1629 /* Mark INSN as being frame related. If it is a PARALLEL
1630 then mark each element as being frame related as well. */
1631
1632 static void
1633 mark_frame_related (rtx insn)
1634 {
1635 RTX_FRAME_RELATED_P (insn) = 1;
1636 insn = PATTERN (insn);
1637
1638 if (GET_CODE (insn) == PARALLEL)
1639 {
1640 unsigned int i;
1641
1642 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1643 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1644 }
1645 }
1646
1647 /* Create CFI notes for register pops. */
1648 static void
1649 add_pop_cfi_notes (rtx_insn *insn, unsigned int high, unsigned int low)
1650 {
1651 rtx t = plus_constant (Pmode, stack_pointer_rtx,
1652 (high - low + 1) * UNITS_PER_WORD);
1653 t = gen_rtx_SET (stack_pointer_rtx, t);
1654 add_reg_note (insn, REG_CFA_ADJUST_CFA, t);
1655 RTX_FRAME_RELATED_P (insn) = 1;
1656 for (unsigned int i = low; i <= high; i++)
1657 add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (word_mode, i));
1658 }
1659
1660
1661 static bool
1662 ok_for_max_constant (HOST_WIDE_INT val)
1663 {
1664 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1665 /* If there is no constraint on the size of constants
1666 used as operands, then any value is legitimate. */
1667 return true;
1668
1669 /* rx_max_constant_size specifies the maximum number
1670 of bytes that can be used to hold a signed value. */
1671 return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
1672 ( 1 << (rx_max_constant_size * 8)));
1673 }
1674
1675 /* Generate an ADD of SRC plus VAL into DEST.
1676 Handles the case where VAL is too big for max_constant_value.
1677 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1678
1679 static void
1680 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1681 {
1682 rtx insn;
1683
1684 if (val == NULL_RTX || INTVAL (val) == 0)
1685 {
1686 gcc_assert (dest != src);
1687
1688 insn = emit_move_insn (dest, src);
1689 }
1690 else if (ok_for_max_constant (INTVAL (val)))
1691 insn = emit_insn (gen_addsi3 (dest, src, val));
1692 else
1693 {
1694 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1695 will not reject it. */
1696 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1697 insn = emit_insn (gen_addsi3 (dest, src, val));
1698
1699 if (is_frame_related)
1700 /* We have to provide our own frame related note here
1701 as the dwarf2out code cannot be expected to grok
1702 our unspec. */
1703 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1704 gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
1705 return;
1706 }
1707
1708 if (is_frame_related)
1709 RTX_FRAME_RELATED_P (insn) = 1;
1710 }
1711
1712 static void
1713 push_regs (unsigned int high, unsigned int low)
1714 {
1715 rtx insn;
1716
1717 if (low == high)
1718 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1719 else
1720 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1721 gen_rx_store_vector (low, high)));
1722 mark_frame_related (insn);
1723 }
1724
1725 void
1726 rx_expand_prologue (void)
1727 {
1728 unsigned int stack_size;
1729 unsigned int frame_size;
1730 unsigned int mask;
1731 unsigned int low;
1732 unsigned int high;
1733 unsigned int reg;
1734
1735 /* Naked functions use their own, programmer provided prologues. */
1736 if (is_naked_func (NULL_TREE))
1737 return;
1738
1739 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1740
1741 if (flag_stack_usage_info)
1742 current_function_static_stack_size = frame_size + stack_size;
1743
1744 /* If we use any of the callee-saved registers, save them now. */
1745 if (mask)
1746 {
1747 /* Push registers in reverse order. */
1748 for (reg = CC_REGNUM; reg --;)
1749 if (mask & (1 << reg))
1750 {
1751 low = high = reg;
1752
1753 /* Look for a span of registers.
1754 Note - we do not have to worry about -Os and whether
1755 it is better to use a single, longer PUSHM as
1756 rx_get_stack_layout has already done that for us. */
1757 while (reg-- > 0)
1758 if ((mask & (1 << reg)) == 0)
1759 break;
1760 else
1761 --low;
1762
1763 push_regs (high, low);
1764 if (reg == (unsigned) -1)
1765 break;
1766 }
1767 }
1768 else if (low)
1769 push_regs (high, low);
1770
1771 if (MUST_SAVE_ACC_REGISTER)
1772 {
1773 unsigned int acc_high, acc_low;
1774
1775 /* Interrupt handlers have to preserve the accumulator
1776 register if so requested by the user. Use the first
1777 two pushed registers as intermediaries. */
1778 if (mask)
1779 {
1780 acc_low = acc_high = 0;
1781
1782 for (reg = 1; reg < CC_REGNUM; reg ++)
1783 if (mask & (1 << reg))
1784 {
1785 if (acc_low == 0)
1786 acc_low = reg;
1787 else
1788 {
1789 acc_high = reg;
1790 break;
1791 }
1792 }
1793
1794 /* We have assumed that there are at least two registers pushed... */
1795 gcc_assert (acc_high != 0);
1796
1797 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1798 We just assume that they are zero. */
1799 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1800 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1801 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1802 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1803 }
1804 else
1805 {
1806 acc_low = low;
1807 acc_high = low + 1;
1808
1809 /* We have assumed that there are at least two registers pushed... */
1810 gcc_assert (acc_high <= high);
1811
1812 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1813 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1814 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1815 gen_rx_store_vector (acc_low, acc_high)));
1816 }
1817 }
1818
1819 /* If needed, set up the frame pointer. */
1820 if (frame_pointer_needed)
1821 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1822 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1823
1824 /* Allocate space for the outgoing args.
1825 If the stack frame has not already been set up then handle this as well. */
1826 if (stack_size)
1827 {
1828 if (frame_size)
1829 {
1830 if (frame_pointer_needed)
1831 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1832 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1833 else
1834 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1835 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1836 true);
1837 }
1838 else
1839 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1840 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1841 }
1842 else if (frame_size)
1843 {
1844 if (! frame_pointer_needed)
1845 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1846 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1847 else
1848 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1849 false /* False because the epilogue will use the FP not the SP. */);
1850 }
1851 }
1852
1853 static void
1854 add_vector_labels (FILE *file, const char *aname)
1855 {
1856 tree vec_attr;
1857 tree val_attr;
1858 const char *vname = "vect";
1859 const char *s;
1860 int vnum;
1861
1862 /* This node is for the vector/interrupt tag itself */
1863 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1864 if (!vec_attr)
1865 return;
1866
1867 /* Now point it at the first argument */
1868 vec_attr = TREE_VALUE (vec_attr);
1869
1870 /* Iterate through the arguments. */
1871 while (vec_attr)
1872 {
1873 val_attr = TREE_VALUE (vec_attr);
1874 switch (TREE_CODE (val_attr))
1875 {
1876 case STRING_CST:
1877 s = TREE_STRING_POINTER (val_attr);
1878 goto string_id_common;
1879
1880 case IDENTIFIER_NODE:
1881 s = IDENTIFIER_POINTER (val_attr);
1882
1883 string_id_common:
1884 if (strcmp (s, "$default") == 0)
1885 {
1886 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1887 fprintf (file, "$tableentry$default$%s:\n", vname);
1888 }
1889 else
1890 vname = s;
1891 break;
1892
1893 case INTEGER_CST:
1894 vnum = TREE_INT_CST_LOW (val_attr);
1895
1896 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1897 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1898 break;
1899
1900 default:
1901 ;
1902 }
1903
1904 vec_attr = TREE_CHAIN (vec_attr);
1905 }
1906
1907 }
1908
1909 static void
1910 rx_output_function_prologue (FILE * file)
1911 {
1912 add_vector_labels (file, "interrupt");
1913 add_vector_labels (file, "vector");
1914
1915 if (is_fast_interrupt_func (NULL_TREE))
1916 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1917
1918 if (is_interrupt_func (NULL_TREE))
1919 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1920
1921 if (is_naked_func (NULL_TREE))
1922 asm_fprintf (file, "\t; Note: Naked Function\n");
1923
1924 if (cfun->static_chain_decl != NULL)
1925 asm_fprintf (file, "\t; Note: Nested function declared "
1926 "inside another function.\n");
1927
1928 if (crtl->calls_eh_return)
1929 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1930 }
1931
1932 /* Generate a POPM or RTSD instruction that matches the given operands. */
1933
1934 void
1935 rx_emit_stack_popm (rtx * operands, bool is_popm)
1936 {
1937 HOST_WIDE_INT stack_adjust;
1938 HOST_WIDE_INT last_reg;
1939 rtx first_push;
1940
1941 gcc_assert (CONST_INT_P (operands[0]));
1942 stack_adjust = INTVAL (operands[0]);
1943
1944 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1945 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1946
1947 first_push = XVECEXP (operands[1], 0, 1);
1948 gcc_assert (SET_P (first_push));
1949 first_push = SET_DEST (first_push);
1950 gcc_assert (REG_P (first_push));
1951
1952 if (is_popm)
1953 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1954 reg_names [REGNO (first_push)],
1955 reg_names [REGNO (first_push) + last_reg]);
1956 else
1957 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1958 (int) stack_adjust,
1959 reg_names [REGNO (first_push)],
1960 reg_names [REGNO (first_push) + last_reg]);
1961 }
1962
1963 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1964
1965 static rtx
1966 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1967 {
1968 unsigned int i;
1969 unsigned int bias = 3;
1970 unsigned int count = (high - low) + bias;
1971 rtx vector;
1972
1973 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1974
1975 XVECEXP (vector, 0, 0) =
1976 gen_rtx_SET (stack_pointer_rtx,
1977 plus_constant (Pmode, stack_pointer_rtx, adjust));
1978
1979 for (i = 0; i < count - 2; i++)
1980 XVECEXP (vector, 0, i + 1) =
1981 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1982 gen_rtx_MEM (SImode,
1983 i == 0 ? stack_pointer_rtx
1984 : plus_constant (Pmode, stack_pointer_rtx,
1985 i * UNITS_PER_WORD)));
1986
1987 XVECEXP (vector, 0, count - 1) = ret_rtx;
1988
1989 return vector;
1990 }
1991
1992 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1993
1994 static rtx
1995 gen_rx_popm_vector (unsigned int low, unsigned int high)
1996 {
1997 unsigned int i;
1998 unsigned int count = (high - low) + 2;
1999 rtx vector;
2000
2001 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2002
2003 XVECEXP (vector, 0, 0) =
2004 gen_rtx_SET (stack_pointer_rtx,
2005 plus_constant (Pmode, stack_pointer_rtx,
2006 (count - 1) * UNITS_PER_WORD));
2007
2008 for (i = 0; i < count - 1; i++)
2009 XVECEXP (vector, 0, i + 1) =
2010 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
2011 gen_rtx_MEM (SImode,
2012 i == 0 ? stack_pointer_rtx
2013 : plus_constant (Pmode, stack_pointer_rtx,
2014 i * UNITS_PER_WORD)));
2015
2016 return vector;
2017 }
2018
2019 /* Returns true if a simple return insn can be used. */
2020
2021 bool
2022 rx_can_use_simple_return (void)
2023 {
2024 unsigned int low;
2025 unsigned int high;
2026 unsigned int frame_size;
2027 unsigned int stack_size;
2028 unsigned int register_mask;
2029
2030 if (is_naked_func (NULL_TREE)
2031 || is_fast_interrupt_func (NULL_TREE)
2032 || is_interrupt_func (NULL_TREE))
2033 return false;
2034
2035 rx_get_stack_layout (& low, & high, & register_mask,
2036 & frame_size, & stack_size);
2037
2038 return (register_mask == 0
2039 && (frame_size + stack_size) == 0
2040 && low == 0);
2041 }
2042
2043 static void
2044 pop_regs (unsigned int high, unsigned int low)
2045 {
2046 rtx_insn *insn;
2047 if (high == low)
2048 insn = emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2049 else
2050 insn = emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1)
2051 * UNITS_PER_WORD),
2052 gen_rx_popm_vector (low, high)));
2053 add_pop_cfi_notes (insn, high, low);
2054 }
2055
2056 void
2057 rx_expand_epilogue (bool is_sibcall)
2058 {
2059 unsigned int low;
2060 unsigned int high;
2061 unsigned int frame_size;
2062 unsigned int stack_size;
2063 unsigned int register_mask;
2064 unsigned int regs_size;
2065 unsigned int reg;
2066 unsigned HOST_WIDE_INT total_size;
2067
2068 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2069 cannot guarantee that the register holding the function address is a
2070 call-used register. If it is a call-saved register then the stack
2071 pop instructions generated in the epilogue will corrupt the address
2072 before it is used.
2073
2074 Creating a new call-used-only register class works but then the
2075 reload pass gets stuck because it cannot always find a call-used
2076 register for spilling sibcalls.
2077
2078 The other possible solution is for this pass to scan forward for the
2079 sibcall instruction (if it has been generated) and work out if it
2080 is an indirect sibcall using a call-saved register. If it is then
2081 the address can copied into a call-used register in this epilogue
2082 code and the sibcall instruction modified to use that register. */
2083
2084 if (is_naked_func (NULL_TREE))
2085 {
2086 gcc_assert (! is_sibcall);
2087
2088 /* Naked functions use their own, programmer provided epilogues.
2089 But, in order to keep gcc happy we have to generate some kind of
2090 epilogue RTL. */
2091 emit_jump_insn (gen_naked_return ());
2092 return;
2093 }
2094
2095 rx_get_stack_layout (& low, & high, & register_mask,
2096 & frame_size, & stack_size);
2097
2098 total_size = frame_size + stack_size;
2099 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2100
2101 /* See if we are unable to use the special stack frame deconstruct and
2102 return instructions. In most cases we can use them, but the exceptions
2103 are:
2104
2105 - Sibling calling functions deconstruct the frame but do not return to
2106 their caller. Instead they branch to their sibling and allow their
2107 return instruction to return to this function's parent.
2108
2109 - Fast and normal interrupt handling functions have to use special
2110 return instructions.
2111
2112 - Functions where we have pushed a fragmented set of registers into the
2113 call-save area must have the same set of registers popped. */
2114 if (is_sibcall
2115 || is_fast_interrupt_func (NULL_TREE)
2116 || is_interrupt_func (NULL_TREE)
2117 || register_mask)
2118 {
2119 /* Cannot use the special instructions - deconstruct by hand. */
2120 if (total_size)
2121 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2122 GEN_INT (total_size), false);
2123
2124 if (MUST_SAVE_ACC_REGISTER)
2125 {
2126 unsigned int acc_low, acc_high;
2127
2128 /* Reverse the saving of the accumulator register onto the stack.
2129 Note we must adjust the saved "low" accumulator value as it
2130 is really the middle 32-bits of the accumulator. */
2131 if (register_mask)
2132 {
2133 acc_low = acc_high = 0;
2134
2135 for (reg = 1; reg < CC_REGNUM; reg ++)
2136 if (register_mask & (1 << reg))
2137 {
2138 if (acc_low == 0)
2139 acc_low = reg;
2140 else
2141 {
2142 acc_high = reg;
2143 break;
2144 }
2145 }
2146 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2147 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2148 }
2149 else
2150 {
2151 acc_low = low;
2152 acc_high = low + 1;
2153 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2154 gen_rx_popm_vector (acc_low, acc_high)));
2155 }
2156
2157 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2158 gen_rtx_REG (SImode, acc_low),
2159 GEN_INT (16)));
2160 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2161 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2162 }
2163
2164 if (register_mask)
2165 {
2166 for (reg = 0; reg < CC_REGNUM; reg ++)
2167 if (register_mask & (1 << reg))
2168 {
2169 low = high = reg;
2170 while (register_mask & (1 << high))
2171 high ++;
2172 pop_regs (high - 1, low);
2173 reg = high;
2174 }
2175 }
2176 else if (low)
2177 pop_regs (high, low);
2178
2179 if (is_fast_interrupt_func (NULL_TREE))
2180 {
2181 gcc_assert (! is_sibcall);
2182 emit_jump_insn (gen_fast_interrupt_return ());
2183 }
2184 else if (is_interrupt_func (NULL_TREE))
2185 {
2186 gcc_assert (! is_sibcall);
2187 emit_jump_insn (gen_exception_return ());
2188 }
2189 else if (! is_sibcall)
2190 emit_jump_insn (gen_simple_return ());
2191
2192 return;
2193 }
2194
2195 /* If we allocated space on the stack, free it now. */
2196 if (total_size)
2197 {
2198 unsigned HOST_WIDE_INT rtsd_size;
2199
2200 /* See if we can use the RTSD instruction. */
2201 rtsd_size = total_size + regs_size;
2202 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2203 {
2204 if (low)
2205 emit_jump_insn (gen_pop_and_return
2206 (GEN_INT (rtsd_size),
2207 gen_rx_rtsd_vector (rtsd_size, low, high)));
2208 else
2209 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2210
2211 return;
2212 }
2213
2214 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2215 GEN_INT (total_size), false);
2216 }
2217
2218 if (low)
2219 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2220 gen_rx_rtsd_vector (regs_size,
2221 low, high)));
2222 else
2223 emit_jump_insn (gen_simple_return ());
2224 }
2225
2226
2227 /* Compute the offset (in words) between FROM (arg pointer
2228 or frame pointer) and TO (frame pointer or stack pointer).
2229 See ASCII art comment at the start of rx_expand_prologue
2230 for more information. */
2231
2232 int
2233 rx_initial_elimination_offset (int from, int to)
2234 {
2235 unsigned int low;
2236 unsigned int high;
2237 unsigned int frame_size;
2238 unsigned int stack_size;
2239 unsigned int mask;
2240
2241 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2242
2243 if (from == ARG_POINTER_REGNUM)
2244 {
2245 /* Extend the computed size of the stack frame to
2246 include the registers pushed in the prologue. */
2247 if (low)
2248 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2249 else
2250 frame_size += bit_count (mask) * UNITS_PER_WORD;
2251
2252 /* Remember to include the return address. */
2253 frame_size += 1 * UNITS_PER_WORD;
2254
2255 if (to == FRAME_POINTER_REGNUM)
2256 return frame_size;
2257
2258 gcc_assert (to == STACK_POINTER_REGNUM);
2259 return frame_size + stack_size;
2260 }
2261
2262 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2263 return stack_size;
2264 }
2265
2266 /* Decide if a variable should go into one of the small data sections. */
2267
2268 static bool
2269 rx_in_small_data (const_tree decl)
2270 {
2271 int size;
2272 const char * section;
2273
2274 if (rx_small_data_limit == 0)
2275 return false;
2276
2277 if (TREE_CODE (decl) != VAR_DECL)
2278 return false;
2279
2280 /* We do not put read-only variables into a small data area because
2281 they would be placed with the other read-only sections, far away
2282 from the read-write data sections, and we only have one small
2283 data area pointer.
2284 Similarly commons are placed in the .bss section which might be
2285 far away (and out of alignment with respect to) the .data section. */
2286 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2287 return false;
2288
2289 section = DECL_SECTION_NAME (decl);
2290 if (section)
2291 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2292
2293 size = int_size_in_bytes (TREE_TYPE (decl));
2294
2295 return (size > 0) && (size <= rx_small_data_limit);
2296 }
2297
2298 /* Return a section for X.
2299 The only special thing we do here is to honor small data. */
2300
2301 static section *
2302 rx_select_rtx_section (machine_mode mode,
2303 rtx x,
2304 unsigned HOST_WIDE_INT align)
2305 {
2306 if (rx_small_data_limit > 0
2307 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2308 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2309 return sdata_section;
2310
2311 return default_elf_select_rtx_section (mode, x, align);
2312 }
2313
2314 static section *
2315 rx_select_section (tree decl,
2316 int reloc,
2317 unsigned HOST_WIDE_INT align)
2318 {
2319 if (rx_small_data_limit > 0)
2320 {
2321 switch (categorize_decl_for_section (decl, reloc))
2322 {
2323 case SECCAT_SDATA: return sdata_section;
2324 case SECCAT_SBSS: return sbss_section;
2325 case SECCAT_SRODATA:
2326 /* Fall through. We do not put small, read only
2327 data into the C_2 section because we are not
2328 using the C_2 section. We do not use the C_2
2329 section because it is located with the other
2330 read-only data sections, far away from the read-write
2331 data sections and we only have one small data
2332 pointer (r13). */
2333 default:
2334 break;
2335 }
2336 }
2337
2338 /* If we are supporting the Renesas assembler
2339 we cannot use mergeable sections. */
2340 if (TARGET_AS100_SYNTAX)
2341 switch (categorize_decl_for_section (decl, reloc))
2342 {
2343 case SECCAT_RODATA_MERGE_CONST:
2344 case SECCAT_RODATA_MERGE_STR_INIT:
2345 case SECCAT_RODATA_MERGE_STR:
2346 return readonly_data_section;
2347
2348 default:
2349 break;
2350 }
2351
2352 return default_elf_select_section (decl, reloc, align);
2353 }
2354 \f
2355 enum rx_builtin
2356 {
2357 RX_BUILTIN_BRK,
2358 RX_BUILTIN_CLRPSW,
2359 RX_BUILTIN_INT,
2360 RX_BUILTIN_MACHI,
2361 RX_BUILTIN_MACLO,
2362 RX_BUILTIN_MULHI,
2363 RX_BUILTIN_MULLO,
2364 RX_BUILTIN_MVFACHI,
2365 RX_BUILTIN_MVFACMI,
2366 RX_BUILTIN_MVFC,
2367 RX_BUILTIN_MVTACHI,
2368 RX_BUILTIN_MVTACLO,
2369 RX_BUILTIN_MVTC,
2370 RX_BUILTIN_MVTIPL,
2371 RX_BUILTIN_RACW,
2372 RX_BUILTIN_REVW,
2373 RX_BUILTIN_RMPA,
2374 RX_BUILTIN_ROUND,
2375 RX_BUILTIN_SETPSW,
2376 RX_BUILTIN_WAIT,
2377 RX_BUILTIN_max
2378 };
2379
2380 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2381
2382 static void
2383 rx_init_builtins (void)
2384 {
2385 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2386 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2387 add_builtin_function ("__builtin_rx_" LC_NAME, \
2388 build_function_type_list (RET_TYPE##_type_node, \
2389 NULL_TREE), \
2390 RX_BUILTIN_##UC_NAME, \
2391 BUILT_IN_MD, NULL, NULL_TREE)
2392
2393 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
2394 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2395 add_builtin_function ("__builtin_rx_" LC_NAME, \
2396 build_function_type_list (RET_TYPE##_type_node, \
2397 ARG_TYPE##_type_node, \
2398 NULL_TREE), \
2399 RX_BUILTIN_##UC_NAME, \
2400 BUILT_IN_MD, NULL, NULL_TREE)
2401
2402 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2403 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2404 add_builtin_function ("__builtin_rx_" LC_NAME, \
2405 build_function_type_list (RET_TYPE##_type_node, \
2406 ARG_TYPE1##_type_node,\
2407 ARG_TYPE2##_type_node,\
2408 NULL_TREE), \
2409 RX_BUILTIN_##UC_NAME, \
2410 BUILT_IN_MD, NULL, NULL_TREE)
2411
2412 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2413 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2414 add_builtin_function ("__builtin_rx_" LC_NAME, \
2415 build_function_type_list (RET_TYPE##_type_node, \
2416 ARG_TYPE1##_type_node,\
2417 ARG_TYPE2##_type_node,\
2418 ARG_TYPE3##_type_node,\
2419 NULL_TREE), \
2420 RX_BUILTIN_##UC_NAME, \
2421 BUILT_IN_MD, NULL, NULL_TREE)
2422
2423 ADD_RX_BUILTIN0 (BRK, "brk", void);
2424 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2425 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2426 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2427 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2428 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2429 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2430 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2431 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2432 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2433 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2434 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2435 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
2436 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2437 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
2438 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
2439 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2440 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2441 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
2442 ADD_RX_BUILTIN0 (WAIT, "wait", void);
2443 }
2444
2445 /* Return the RX builtin for CODE. */
2446
2447 static tree
2448 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2449 {
2450 if (code >= RX_BUILTIN_max)
2451 return error_mark_node;
2452
2453 return rx_builtins[code];
2454 }
2455
2456 static rtx
2457 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2458 {
2459 if (reg && ! REG_P (arg))
2460 arg = force_reg (SImode, arg);
2461
2462 emit_insn (gen_func (arg));
2463
2464 return NULL_RTX;
2465 }
2466
2467 static rtx
2468 rx_expand_builtin_mvtc (tree exp)
2469 {
2470 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2471 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2472
2473 if (! CONST_INT_P (arg1))
2474 return NULL_RTX;
2475
2476 if (! REG_P (arg2))
2477 arg2 = force_reg (SImode, arg2);
2478
2479 emit_insn (gen_mvtc (arg1, arg2));
2480
2481 return NULL_RTX;
2482 }
2483
2484 static rtx
2485 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2486 {
2487 rtx arg = expand_normal (t_arg);
2488
2489 if (! CONST_INT_P (arg))
2490 return NULL_RTX;
2491
2492 if (target == NULL_RTX)
2493 return NULL_RTX;
2494
2495 if (! REG_P (target))
2496 target = force_reg (SImode, target);
2497
2498 emit_insn (gen_mvfc (target, arg));
2499
2500 return target;
2501 }
2502
2503 static rtx
2504 rx_expand_builtin_mvtipl (rtx arg)
2505 {
2506 /* The RX610 does not support the MVTIPL instruction. */
2507 if (rx_cpu_type == RX610)
2508 return NULL_RTX;
2509
2510 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2511 return NULL_RTX;
2512
2513 emit_insn (gen_mvtipl (arg));
2514
2515 return NULL_RTX;
2516 }
2517
2518 static rtx
2519 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2520 {
2521 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2522 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2523
2524 if (! REG_P (arg1))
2525 arg1 = force_reg (SImode, arg1);
2526
2527 if (! REG_P (arg2))
2528 arg2 = force_reg (SImode, arg2);
2529
2530 emit_insn (gen_func (arg1, arg2));
2531
2532 return NULL_RTX;
2533 }
2534
2535 static rtx
2536 rx_expand_int_builtin_1_arg (rtx arg,
2537 rtx target,
2538 rtx (* gen_func)(rtx, rtx),
2539 bool mem_ok)
2540 {
2541 if (! REG_P (arg))
2542 if (!mem_ok || ! MEM_P (arg))
2543 arg = force_reg (SImode, arg);
2544
2545 if (target == NULL_RTX || ! REG_P (target))
2546 target = gen_reg_rtx (SImode);
2547
2548 emit_insn (gen_func (target, arg));
2549
2550 return target;
2551 }
2552
2553 static rtx
2554 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2555 {
2556 if (target == NULL_RTX || ! REG_P (target))
2557 target = gen_reg_rtx (SImode);
2558
2559 emit_insn (gen_func (target));
2560
2561 return target;
2562 }
2563
2564 static rtx
2565 rx_expand_builtin_round (rtx arg, rtx target)
2566 {
2567 if ((! REG_P (arg) && ! MEM_P (arg))
2568 || GET_MODE (arg) != SFmode)
2569 arg = force_reg (SFmode, arg);
2570
2571 if (target == NULL_RTX || ! REG_P (target))
2572 target = gen_reg_rtx (SImode);
2573
2574 emit_insn (gen_lrintsf2 (target, arg));
2575
2576 return target;
2577 }
2578
2579 static int
2580 valid_psw_flag (rtx op, const char *which)
2581 {
2582 static int mvtc_inform_done = 0;
2583
2584 if (GET_CODE (op) == CONST_INT)
2585 switch (INTVAL (op))
2586 {
2587 case 0: case 'c': case 'C':
2588 case 1: case 'z': case 'Z':
2589 case 2: case 's': case 'S':
2590 case 3: case 'o': case 'O':
2591 case 8: case 'i': case 'I':
2592 case 9: case 'u': case 'U':
2593 return 1;
2594 }
2595
2596 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2597 if (!mvtc_inform_done)
2598 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2599 mvtc_inform_done = 1;
2600
2601 return 0;
2602 }
2603
2604 static rtx
2605 rx_expand_builtin (tree exp,
2606 rtx target,
2607 rtx subtarget ATTRIBUTE_UNUSED,
2608 machine_mode mode ATTRIBUTE_UNUSED,
2609 int ignore ATTRIBUTE_UNUSED)
2610 {
2611 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2612 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2613 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2614 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2615
2616 switch (fcode)
2617 {
2618 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2619 case RX_BUILTIN_CLRPSW:
2620 if (!valid_psw_flag (op, "clrpsw"))
2621 return NULL_RTX;
2622 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2623 case RX_BUILTIN_SETPSW:
2624 if (!valid_psw_flag (op, "setpsw"))
2625 return NULL_RTX;
2626 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2627 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2628 (op, gen_int, false);
2629 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2630 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2631 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2632 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2633 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2634 (target, gen_mvfachi);
2635 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2636 (target, gen_mvfacmi);
2637 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2638 (op, gen_mvtachi, true);
2639 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2640 (op, gen_mvtaclo, true);
2641 case RX_BUILTIN_RMPA:
2642 if (rx_allow_string_insns)
2643 emit_insn (gen_rmpa ());
2644 else
2645 error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
2646 return NULL_RTX;
2647 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2648 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2649 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2650 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2651 (op, gen_racw, false);
2652 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2653 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2654 (op, target, gen_revw, false);
2655 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2656
2657 default:
2658 internal_error ("bad builtin code");
2659 break;
2660 }
2661
2662 return NULL_RTX;
2663 }
2664 \f
2665 /* Place an element into a constructor or destructor section.
2666 Like default_ctor_section_asm_out_constructor in varasm.c
2667 except that it uses .init_array (or .fini_array) and it
2668 handles constructor priorities. */
2669
2670 static void
2671 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2672 {
2673 section * s;
2674
2675 if (priority != DEFAULT_INIT_PRIORITY)
2676 {
2677 char buf[18];
2678
2679 sprintf (buf, "%s.%.5u",
2680 is_ctor ? ".init_array" : ".fini_array",
2681 priority);
2682 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2683 }
2684 else if (is_ctor)
2685 s = ctors_section;
2686 else
2687 s = dtors_section;
2688
2689 switch_to_section (s);
2690 assemble_align (POINTER_SIZE);
2691 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2692 }
2693
2694 static void
2695 rx_elf_asm_constructor (rtx symbol, int priority)
2696 {
2697 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2698 }
2699
2700 static void
2701 rx_elf_asm_destructor (rtx symbol, int priority)
2702 {
2703 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2704 }
2705 \f
2706 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2707
2708 static tree
2709 rx_handle_func_attribute (tree * node,
2710 tree name,
2711 tree args ATTRIBUTE_UNUSED,
2712 int flags ATTRIBUTE_UNUSED,
2713 bool * no_add_attrs)
2714 {
2715 gcc_assert (DECL_P (* node));
2716
2717 if (TREE_CODE (* node) != FUNCTION_DECL)
2718 {
2719 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2720 name);
2721 * no_add_attrs = true;
2722 }
2723
2724 /* FIXME: We ought to check for conflicting attributes. */
2725
2726 /* FIXME: We ought to check that the interrupt and exception
2727 handler attributes have been applied to void functions. */
2728 return NULL_TREE;
2729 }
2730
2731 /* Check "vector" attribute. */
2732
2733 static tree
2734 rx_handle_vector_attribute (tree * node,
2735 tree name,
2736 tree args,
2737 int flags ATTRIBUTE_UNUSED,
2738 bool * no_add_attrs)
2739 {
2740 gcc_assert (DECL_P (* node));
2741 gcc_assert (args != NULL_TREE);
2742
2743 if (TREE_CODE (* node) != FUNCTION_DECL)
2744 {
2745 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2746 name);
2747 * no_add_attrs = true;
2748 }
2749
2750 return NULL_TREE;
2751 }
2752
2753 /* Table of RX specific attributes. */
2754 const struct attribute_spec rx_attribute_table[] =
2755 {
2756 /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
2757 affects_type_identity, handler, exclude. */
2758 { "fast_interrupt", 0, 0, true, false, false, false,
2759 rx_handle_func_attribute, NULL },
2760 { "interrupt", 0, -1, true, false, false, false,
2761 rx_handle_func_attribute, NULL },
2762 { "naked", 0, 0, true, false, false, false,
2763 rx_handle_func_attribute, NULL },
2764 { "vector", 1, -1, true, false, false, false,
2765 rx_handle_vector_attribute, NULL },
2766 { NULL, 0, 0, false, false, false, false, NULL, NULL }
2767 };
2768
2769 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2770
2771 static void
2772 rx_override_options_after_change (void)
2773 {
2774 static bool first_time = TRUE;
2775
2776 if (first_time)
2777 {
2778 /* If this is the first time through and the user has not disabled
2779 the use of RX FPU hardware then enable -ffinite-math-only,
2780 since the FPU instructions do not support NaNs and infinities. */
2781 if (TARGET_USE_FPU)
2782 flag_finite_math_only = 1;
2783
2784 first_time = FALSE;
2785 }
2786 else
2787 {
2788 /* Alert the user if they are changing the optimization options
2789 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2790 if (TARGET_USE_FPU
2791 && !flag_finite_math_only)
2792 warning (0, "RX FPU instructions do not support NaNs and infinities");
2793 }
2794 }
2795
2796 static void
2797 rx_option_override (void)
2798 {
2799 unsigned int i;
2800 cl_deferred_option *opt;
2801 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2802
2803 if (v)
2804 FOR_EACH_VEC_ELT (*v, i, opt)
2805 {
2806 switch (opt->opt_index)
2807 {
2808 case OPT_mint_register_:
2809 switch (opt->value)
2810 {
2811 case 4:
2812 fixed_regs[10] = call_used_regs [10] = 1;
2813 /* Fall through. */
2814 case 3:
2815 fixed_regs[11] = call_used_regs [11] = 1;
2816 /* Fall through. */
2817 case 2:
2818 fixed_regs[12] = call_used_regs [12] = 1;
2819 /* Fall through. */
2820 case 1:
2821 fixed_regs[13] = call_used_regs [13] = 1;
2822 /* Fall through. */
2823 case 0:
2824 rx_num_interrupt_regs = opt->value;
2825 break;
2826 default:
2827 rx_num_interrupt_regs = 0;
2828 /* Error message already given because rx_handle_option
2829 returned false. */
2830 break;
2831 }
2832 break;
2833
2834 default:
2835 gcc_unreachable ();
2836 }
2837 }
2838
2839 /* This target defaults to strict volatile bitfields. */
2840 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2841 flag_strict_volatile_bitfields = 1;
2842
2843 rx_override_options_after_change ();
2844
2845 /* These values are bytes, not log. */
2846 if (! optimize_size)
2847 {
2848 if (flag_align_jumps && !str_align_jumps)
2849 str_align_jumps = ((rx_cpu_type == RX100
2850 || rx_cpu_type == RX200) ? "4" : "8");
2851 if (flag_align_loops && !str_align_loops)
2852 str_align_loops = ((rx_cpu_type == RX100
2853 || rx_cpu_type == RX200) ? "4" : "8");
2854 if (flag_align_labels && !str_align_labels)
2855 str_align_labels = ((rx_cpu_type == RX100
2856 || rx_cpu_type == RX200) ? "4" : "8");
2857 }
2858 }
2859
2860 \f
2861 static bool
2862 rx_allocate_stack_slots_for_args (void)
2863 {
2864 /* Naked functions should not allocate stack slots for arguments. */
2865 return ! is_naked_func (NULL_TREE);
2866 }
2867
2868 static bool
2869 rx_func_attr_inlinable (const_tree decl)
2870 {
2871 return ! is_fast_interrupt_func (decl)
2872 && ! is_interrupt_func (decl)
2873 && ! is_naked_func (decl);
2874 }
2875
2876 static bool
2877 rx_warn_func_return (tree decl)
2878 {
2879 /* Naked functions are implemented entirely in assembly, including the
2880 return sequence, so suppress warnings about this. */
2881 return !is_naked_func (decl);
2882 }
2883
2884 /* Return nonzero if it is ok to make a tail-call to DECL,
2885 a function_decl or NULL if this is an indirect call, using EXP */
2886
2887 static bool
2888 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2889 {
2890 if (TARGET_JSR)
2891 return false;
2892
2893 /* Do not allow indirect tailcalls. The
2894 sibcall patterns do not support them. */
2895 if (decl == NULL)
2896 return false;
2897
2898 /* Never tailcall from inside interrupt handlers or naked functions. */
2899 if (is_fast_interrupt_func (NULL_TREE)
2900 || is_interrupt_func (NULL_TREE)
2901 || is_naked_func (NULL_TREE))
2902 return false;
2903
2904 return true;
2905 }
2906
2907 static void
2908 rx_file_start (void)
2909 {
2910 if (! TARGET_AS100_SYNTAX)
2911 default_file_start ();
2912 }
2913
2914 static bool
2915 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2916 {
2917 /* The packed attribute overrides the MS behavior. */
2918 return ! TYPE_PACKED (record_type);
2919 }
2920 \f
2921 /* Returns true if X a legitimate constant for an immediate
2922 operand on the RX. X is already known to satisfy CONSTANT_P. */
2923
2924 bool
2925 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2926 {
2927 switch (GET_CODE (x))
2928 {
2929 case CONST:
2930 x = XEXP (x, 0);
2931
2932 if (GET_CODE (x) == PLUS)
2933 {
2934 if (! CONST_INT_P (XEXP (x, 1)))
2935 return false;
2936
2937 /* GCC would not pass us CONST_INT + CONST_INT so we
2938 know that we have {SYMBOL|LABEL} + CONST_INT. */
2939 x = XEXP (x, 0);
2940 gcc_assert (! CONST_INT_P (x));
2941 }
2942
2943 switch (GET_CODE (x))
2944 {
2945 case LABEL_REF:
2946 case SYMBOL_REF:
2947 return true;
2948
2949 case UNSPEC:
2950 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2951
2952 default:
2953 /* FIXME: Can this ever happen ? */
2954 gcc_unreachable ();
2955 }
2956 break;
2957
2958 case LABEL_REF:
2959 case SYMBOL_REF:
2960 return true;
2961 case CONST_DOUBLE:
2962 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2963 case CONST_VECTOR:
2964 return false;
2965 default:
2966 gcc_assert (CONST_INT_P (x));
2967 break;
2968 }
2969
2970 return ok_for_max_constant (INTVAL (x));
2971 }
2972
2973 static int
2974 rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2975 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2976 {
2977 rtx a, b;
2978
2979 if (GET_CODE (addr) != PLUS)
2980 return COSTS_N_INSNS (1);
2981
2982 a = XEXP (addr, 0);
2983 b = XEXP (addr, 1);
2984
2985 if (REG_P (a) && REG_P (b))
2986 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2987 return COSTS_N_INSNS (4);
2988
2989 if (speed)
2990 /* [REG+OFF] is just as fast as [REG]. */
2991 return COSTS_N_INSNS (1);
2992
2993 if (CONST_INT_P (b)
2994 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2995 /* Try to discourage REG + <large OFF> when optimizing for size. */
2996 return COSTS_N_INSNS (2);
2997
2998 return COSTS_N_INSNS (1);
2999 }
3000
3001 static bool
3002 rx_rtx_costs (rtx x, machine_mode mode, int outer_code ATTRIBUTE_UNUSED,
3003 int opno ATTRIBUTE_UNUSED, int* total, bool speed)
3004 {
3005 if (x == const0_rtx)
3006 {
3007 *total = 0;
3008 return true;
3009 }
3010
3011 switch (GET_CODE (x))
3012 {
3013 case MULT:
3014 if (mode == DImode)
3015 {
3016 *total = COSTS_N_INSNS (2);
3017 return true;
3018 }
3019 /* fall through */
3020
3021 case PLUS:
3022 case MINUS:
3023 case AND:
3024 case COMPARE:
3025 case IOR:
3026 case XOR:
3027 *total = COSTS_N_INSNS (1);
3028 return true;
3029
3030 case DIV:
3031 if (speed)
3032 /* This is the worst case for a division. Pessimize divisions when
3033 not optimizing for size and allow reciprocal optimizations which
3034 produce bigger code. */
3035 *total = COSTS_N_INSNS (20);
3036 else
3037 *total = COSTS_N_INSNS (3);
3038 return true;
3039
3040 case UDIV:
3041 if (speed)
3042 /* This is the worst case for a division. Pessimize divisions when
3043 not optimizing for size and allow reciprocal optimizations which
3044 produce bigger code. */
3045 *total = COSTS_N_INSNS (18);
3046 else
3047 *total = COSTS_N_INSNS (3);
3048 return true;
3049
3050 default:
3051 break;
3052 }
3053
3054 return false;
3055 }
3056
3057 static bool
3058 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3059 {
3060 /* We can always eliminate to the frame pointer.
3061 We can eliminate to the stack pointer unless a frame
3062 pointer is needed. */
3063
3064 return to == FRAME_POINTER_REGNUM
3065 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
3066 }
3067 \f
3068
3069 static void
3070 rx_trampoline_template (FILE * file)
3071 {
3072 /* Output assembler code for a block containing the constant
3073 part of a trampoline, leaving space for the variable parts.
3074
3075 On the RX, (where r8 is the static chain regnum) the trampoline
3076 looks like:
3077
3078 mov #<static chain value>, r8
3079 mov #<function's address>, r9
3080 jmp r9
3081
3082 In big-endian-data-mode however instructions are read into the CPU
3083 4 bytes at a time. These bytes are then swapped around before being
3084 passed to the decoder. So...we must partition our trampoline into
3085 4 byte packets and swap these packets around so that the instruction
3086 reader will reverse the process. But, in order to avoid splitting
3087 the 32-bit constants across these packet boundaries, (making inserting
3088 them into the constructed trampoline very difficult) we have to pad the
3089 instruction sequence with NOP insns. ie:
3090
3091 nop
3092 nop
3093 mov.l #<...>, r8
3094 nop
3095 nop
3096 mov.l #<...>, r9
3097 jmp r9
3098 nop
3099 nop */
3100
3101 if (! TARGET_BIG_ENDIAN_DATA)
3102 {
3103 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3104 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3105 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3106 }
3107 else
3108 {
3109 char r8 = '0' + STATIC_CHAIN_REGNUM;
3110 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3111
3112 if (TARGET_AS100_SYNTAX)
3113 {
3114 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3115 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3116 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3117 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3118 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3119 }
3120 else
3121 {
3122 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3123 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3124 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3125 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3126 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3127 }
3128 }
3129 }
3130
3131 static void
3132 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3133 {
3134 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3135
3136 emit_block_move (tramp, assemble_trampoline_template (),
3137 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3138
3139 if (TARGET_BIG_ENDIAN_DATA)
3140 {
3141 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3142 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3143 }
3144 else
3145 {
3146 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3147 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3148 }
3149 }
3150 \f
3151 static int
3152 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3153 reg_class_t regclass ATTRIBUTE_UNUSED,
3154 bool in)
3155 {
3156 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3157 }
3158
3159 /* Convert a CC_MODE to the set of flags that it represents. */
3160
3161 static unsigned int
3162 flags_from_mode (machine_mode mode)
3163 {
3164 switch (mode)
3165 {
3166 case E_CC_ZSmode:
3167 return CC_FLAG_S | CC_FLAG_Z;
3168 case E_CC_ZSOmode:
3169 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3170 case E_CC_ZSCmode:
3171 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3172 case E_CCmode:
3173 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3174 case E_CC_Fmode:
3175 return CC_FLAG_FP;
3176 default:
3177 gcc_unreachable ();
3178 }
3179 }
3180
3181 /* Convert a set of flags to a CC_MODE that can implement it. */
3182
3183 static machine_mode
3184 mode_from_flags (unsigned int f)
3185 {
3186 if (f & CC_FLAG_FP)
3187 return CC_Fmode;
3188 if (f & CC_FLAG_O)
3189 {
3190 if (f & CC_FLAG_C)
3191 return CCmode;
3192 else
3193 return CC_ZSOmode;
3194 }
3195 else if (f & CC_FLAG_C)
3196 return CC_ZSCmode;
3197 else
3198 return CC_ZSmode;
3199 }
3200
3201 /* Convert an RTX_CODE to the set of flags needed to implement it.
3202 This assumes an integer comparison. */
3203
3204 static unsigned int
3205 flags_from_code (enum rtx_code code)
3206 {
3207 switch (code)
3208 {
3209 case LT:
3210 case GE:
3211 return CC_FLAG_S;
3212 case GT:
3213 case LE:
3214 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3215 case GEU:
3216 case LTU:
3217 return CC_FLAG_C;
3218 case GTU:
3219 case LEU:
3220 return CC_FLAG_C | CC_FLAG_Z;
3221 case EQ:
3222 case NE:
3223 return CC_FLAG_Z;
3224 default:
3225 gcc_unreachable ();
3226 }
3227 }
3228
3229 /* Return a CC_MODE of which both M1 and M2 are subsets. */
3230
3231 static machine_mode
3232 rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3233 {
3234 unsigned f;
3235
3236 /* Early out for identical modes. */
3237 if (m1 == m2)
3238 return m1;
3239
3240 /* There's no valid combination for FP vs non-FP. */
3241 f = flags_from_mode (m1) | flags_from_mode (m2);
3242 if (f & CC_FLAG_FP)
3243 return VOIDmode;
3244
3245 /* Otherwise, see what mode can implement all the flags. */
3246 return mode_from_flags (f);
3247 }
3248
3249 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3250
3251 machine_mode
3252 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3253 {
3254 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3255 return CC_Fmode;
3256
3257 if (y != const0_rtx)
3258 return CCmode;
3259
3260 return mode_from_flags (flags_from_code (cmp_code));
3261 }
3262
3263 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3264 CC_MODE, and use that in branches based on that compare. */
3265
3266 void
3267 rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3268 rtx c1, rtx c2, rtx label)
3269 {
3270 rtx flags, x;
3271
3272 flags = gen_rtx_REG (cc_mode, CC_REG);
3273 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3274 x = gen_rtx_SET (flags, x);
3275 emit_insn (x);
3276
3277 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3278 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3279 x = gen_rtx_SET (pc_rtx, x);
3280 emit_jump_insn (x);
3281 }
3282
3283 /* A helper function for matching parallels that set the flags. */
3284
3285 bool
3286 rx_match_ccmode (rtx insn, machine_mode cc_mode)
3287 {
3288 rtx op1, flags;
3289 machine_mode flags_mode;
3290
3291 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3292
3293 op1 = XVECEXP (PATTERN (insn), 0, 0);
3294 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3295
3296 flags = SET_DEST (op1);
3297 flags_mode = GET_MODE (flags);
3298
3299 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3300 return false;
3301 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3302 return false;
3303
3304 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3305 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3306 return false;
3307
3308 return true;
3309 }
3310 \f
3311
3312 static int
3313 rx_max_skip_for_label (rtx_insn *lab)
3314 {
3315 int opsize;
3316 rtx_insn *op;
3317
3318 if (optimize_size)
3319 return 0;
3320
3321 if (lab == NULL)
3322 return 0;
3323
3324 op = lab;
3325 do
3326 {
3327 op = next_nonnote_nondebug_insn (op);
3328 }
3329 while (op && (LABEL_P (op)
3330 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3331 if (!op)
3332 return 0;
3333
3334 opsize = get_attr_length (op);
3335 if (opsize >= 0 && opsize < 8)
3336 return MAX (0, opsize - 1);
3337 return 0;
3338 }
3339
3340 static int
3341 rx_align_log_for_label (rtx_insn *lab, int uses_threshold)
3342 {
3343 /* This is a simple heuristic to guess when an alignment would not be useful
3344 because the delay due to the inserted NOPs would be greater than the delay
3345 due to the misaligned branch. If uses_threshold is zero then the alignment
3346 is always useful. */
3347 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3348 return 0;
3349
3350 if (optimize_size)
3351 return 0;
3352
3353 /* Return zero if max_skip not a positive number. */
3354 int max_skip = rx_max_skip_for_label (lab);
3355 if (max_skip <= 0)
3356 return 0;
3357
3358 /* These values are log, not bytes. */
3359 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3360 return 2; /* 4 bytes */
3361 return 3; /* 8 bytes */
3362 }
3363
3364 align_flags
3365 rx_align_for_label (rtx_insn *lab, int uses_threshold)
3366 {
3367 return align_flags (rx_align_log_for_label (lab, uses_threshold),
3368 rx_max_skip_for_label (lab));
3369 }
3370
3371 /* Compute the real length of the extending load-and-op instructions. */
3372
3373 int
3374 rx_adjust_insn_length (rtx_insn *insn, int current_length)
3375 {
3376 rtx extend, mem, offset;
3377 bool zero;
3378 int factor;
3379
3380 if (!INSN_P (insn))
3381 return current_length;
3382
3383 switch (INSN_CODE (insn))
3384 {
3385 default:
3386 return current_length;
3387
3388 case CODE_FOR_plussi3_zero_extendhi:
3389 case CODE_FOR_andsi3_zero_extendhi:
3390 case CODE_FOR_iorsi3_zero_extendhi:
3391 case CODE_FOR_xorsi3_zero_extendhi:
3392 case CODE_FOR_divsi3_zero_extendhi:
3393 case CODE_FOR_udivsi3_zero_extendhi:
3394 case CODE_FOR_minussi3_zero_extendhi:
3395 case CODE_FOR_smaxsi3_zero_extendhi:
3396 case CODE_FOR_sminsi3_zero_extendhi:
3397 case CODE_FOR_multsi3_zero_extendhi:
3398 case CODE_FOR_comparesi3_zero_extendhi:
3399 zero = true;
3400 factor = 2;
3401 break;
3402
3403 case CODE_FOR_plussi3_sign_extendhi:
3404 case CODE_FOR_andsi3_sign_extendhi:
3405 case CODE_FOR_iorsi3_sign_extendhi:
3406 case CODE_FOR_xorsi3_sign_extendhi:
3407 case CODE_FOR_divsi3_sign_extendhi:
3408 case CODE_FOR_udivsi3_sign_extendhi:
3409 case CODE_FOR_minussi3_sign_extendhi:
3410 case CODE_FOR_smaxsi3_sign_extendhi:
3411 case CODE_FOR_sminsi3_sign_extendhi:
3412 case CODE_FOR_multsi3_sign_extendhi:
3413 case CODE_FOR_comparesi3_sign_extendhi:
3414 zero = false;
3415 factor = 2;
3416 break;
3417
3418 case CODE_FOR_plussi3_zero_extendqi:
3419 case CODE_FOR_andsi3_zero_extendqi:
3420 case CODE_FOR_iorsi3_zero_extendqi:
3421 case CODE_FOR_xorsi3_zero_extendqi:
3422 case CODE_FOR_divsi3_zero_extendqi:
3423 case CODE_FOR_udivsi3_zero_extendqi:
3424 case CODE_FOR_minussi3_zero_extendqi:
3425 case CODE_FOR_smaxsi3_zero_extendqi:
3426 case CODE_FOR_sminsi3_zero_extendqi:
3427 case CODE_FOR_multsi3_zero_extendqi:
3428 case CODE_FOR_comparesi3_zero_extendqi:
3429 zero = true;
3430 factor = 1;
3431 break;
3432
3433 case CODE_FOR_plussi3_sign_extendqi:
3434 case CODE_FOR_andsi3_sign_extendqi:
3435 case CODE_FOR_iorsi3_sign_extendqi:
3436 case CODE_FOR_xorsi3_sign_extendqi:
3437 case CODE_FOR_divsi3_sign_extendqi:
3438 case CODE_FOR_udivsi3_sign_extendqi:
3439 case CODE_FOR_minussi3_sign_extendqi:
3440 case CODE_FOR_smaxsi3_sign_extendqi:
3441 case CODE_FOR_sminsi3_sign_extendqi:
3442 case CODE_FOR_multsi3_sign_extendqi:
3443 case CODE_FOR_comparesi3_sign_extendqi:
3444 zero = false;
3445 factor = 1;
3446 break;
3447 }
3448
3449 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3450 extend = single_set (insn);
3451 gcc_assert (extend != NULL_RTX);
3452
3453 extend = SET_SRC (extend);
3454 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3455 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3456 extend = XEXP (extend, 0);
3457 else
3458 extend = XEXP (extend, 1);
3459
3460 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3461 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3462
3463 mem = XEXP (extend, 0);
3464 gcc_checking_assert (MEM_P (mem));
3465 if (REG_P (XEXP (mem, 0)))
3466 return (zero && factor == 1) ? 2 : 3;
3467
3468 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3469 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3470 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3471
3472 offset = XEXP (XEXP (mem, 0), 1);
3473 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3474
3475 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3476 return (zero && factor == 1) ? 3 : 4;
3477
3478 return (zero && factor == 1) ? 4 : 5;
3479 }
3480
3481 static bool
3482 rx_narrow_volatile_bitfield (void)
3483 {
3484 return true;
3485 }
3486
3487 static bool
3488 rx_ok_to_inline (tree caller, tree callee)
3489 {
3490 /* Do not inline functions with local variables
3491 into a naked CALLER - naked function have no stack frame and
3492 locals need a frame in order to have somewhere to live.
3493
3494 Unfortunately we have no way to determine the presence of
3495 local variables in CALLEE, so we have to be cautious and
3496 assume that there might be some there.
3497
3498 We do allow inlining when CALLEE has the "inline" type
3499 modifier or the "always_inline" or "gnu_inline" attributes. */
3500 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3501 || DECL_DECLARED_INLINE_P (callee)
3502 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3503 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3504 }
3505
3506 static bool
3507 rx_enable_lra (void)
3508 {
3509 return TARGET_ENABLE_LRA;
3510 }
3511
3512 rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3513 {
3514 if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3515 {
3516 /* If we are inside an interrupt handler, assume that interrupts are
3517 off -- which is the default hardware behavior. In this case, there
3518 is no need to disable the interrupts. */
3519 m_prev_psw_reg = NULL;
3520 }
3521 else
3522 {
3523 m_prev_psw_reg = gen_reg_rtx (SImode);
3524 emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3525 emit_insn (gen_clrpsw (GEN_INT ('I')));
3526 }
3527 }
3528
3529 rx_atomic_sequence::~rx_atomic_sequence (void)
3530 {
3531 if (m_prev_psw_reg != NULL)
3532 emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3533 }
3534
3535 /* Given an insn and a reg number, tell whether the reg dies or is unused
3536 after the insn. */
3537 bool
3538 rx_reg_dead_or_unused_after_insn (const rtx_insn* i, int regno)
3539 {
3540 return find_regno_note (i, REG_DEAD, regno) != NULL
3541 || find_regno_note (i, REG_UNUSED, regno) != NULL;
3542 }
3543
3544 /* Copy dead and unused notes from SRC to DST for the specified REGNO. */
3545 void
3546 rx_copy_reg_dead_or_unused_notes (rtx reg, const rtx_insn* src, rtx_insn* dst)
3547 {
3548 int regno = REGNO (SUBREG_P (reg) ? SUBREG_REG (reg) : reg);
3549
3550 if (rtx note = find_regno_note (src, REG_DEAD, regno))
3551 add_shallow_copy_of_reg_note (dst, note);
3552
3553 if (rtx note = find_regno_note (src, REG_UNUSED, regno))
3554 add_shallow_copy_of_reg_note (dst, note);
3555 }
3556
3557 /* Try to fuse the current bit-operation insn with the surrounding memory load
3558 and store. */
3559 bool
3560 rx_fuse_in_memory_bitop (rtx* operands, rtx_insn* curr_insn,
3561 rtx (*gen_insn)(rtx, rtx))
3562 {
3563 rtx op2_reg = SUBREG_P (operands[2]) ? SUBREG_REG (operands[2]) : operands[2];
3564
3565 set_of_reg op2_def = rx_find_set_of_reg (op2_reg, curr_insn,
3566 prev_nonnote_nondebug_insn_bb);
3567 if (op2_def.set_src == NULL_RTX
3568 || !MEM_P (op2_def.set_src)
3569 || GET_MODE (op2_def.set_src) != QImode
3570 || !rx_is_restricted_memory_address (XEXP (op2_def.set_src, 0),
3571 GET_MODE (op2_def.set_src))
3572 || reg_used_between_p (operands[2], op2_def.insn, curr_insn)
3573 || !rx_reg_dead_or_unused_after_insn (curr_insn, REGNO (op2_reg))
3574 )
3575 return false;
3576
3577 /* The register operand originates from a memory load and the memory load
3578 could be fused with the bitop insn.
3579 Look for the following memory store with the same memory operand. */
3580 rtx mem = op2_def.set_src;
3581
3582 /* If the memory is an auto-mod address, it can't be fused. */
3583 if (GET_CODE (XEXP (mem, 0)) == POST_INC
3584 || GET_CODE (XEXP (mem, 0)) == PRE_INC
3585 || GET_CODE (XEXP (mem, 0)) == POST_DEC
3586 || GET_CODE (XEXP (mem, 0)) == PRE_DEC)
3587 return false;
3588
3589 rtx_insn* op0_use = rx_find_use_of_reg (operands[0], curr_insn,
3590 next_nonnote_nondebug_insn_bb);
3591 if (op0_use == NULL
3592 || !(GET_CODE (PATTERN (op0_use)) == SET
3593 && RX_REG_P (XEXP (PATTERN (op0_use), 1))
3594 && reg_overlap_mentioned_p (operands[0], XEXP (PATTERN (op0_use), 1))
3595 && rtx_equal_p (mem, XEXP (PATTERN (op0_use), 0)))
3596 || !rx_reg_dead_or_unused_after_insn (op0_use, REGNO (operands[0]))
3597 || reg_set_between_p (operands[2], curr_insn, op0_use))
3598 return false;
3599
3600 /* If the load-modify-store operation is fused it could potentially modify
3601 load/store ordering if there are other memory accesses between the load
3602 and the store for this insn. If there are volatile mems between the load
3603 and store it's better not to change the ordering. If there is a call
3604 between the load and store, it's also not safe to fuse it. */
3605 for (rtx_insn* i = next_nonnote_nondebug_insn_bb (op2_def.insn);
3606 i != NULL && i != op0_use;
3607 i = next_nonnote_nondebug_insn_bb (i))
3608 if (volatile_insn_p (PATTERN (i)) || CALL_P (i))
3609 return false;
3610
3611 emit_insn (gen_insn (mem, gen_lowpart (QImode, operands[1])));
3612 set_insn_deleted (op2_def.insn);
3613 set_insn_deleted (op0_use);
3614 return true;
3615 }
3616
3617 /* Implement TARGET_HARD_REGNO_NREGS. */
3618
3619 static unsigned int
3620 rx_hard_regno_nregs (unsigned int, machine_mode mode)
3621 {
3622 return CLASS_MAX_NREGS (0, mode);
3623 }
3624
3625 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3626
3627 static bool
3628 rx_hard_regno_mode_ok (unsigned int regno, machine_mode)
3629 {
3630 return REGNO_REG_CLASS (regno) == GR_REGS;
3631 }
3632
3633 /* Implement TARGET_MODES_TIEABLE_P. */
3634
3635 static bool
3636 rx_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3637 {
3638 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
3639 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
3640 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
3641 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
3642 }
3643 \f
3644 #undef TARGET_NARROW_VOLATILE_BITFIELD
3645 #define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3646
3647 #undef TARGET_CAN_INLINE_P
3648 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3649
3650 #undef TARGET_FUNCTION_VALUE
3651 #define TARGET_FUNCTION_VALUE rx_function_value
3652
3653 #undef TARGET_RETURN_IN_MSB
3654 #define TARGET_RETURN_IN_MSB rx_return_in_msb
3655
3656 #undef TARGET_IN_SMALL_DATA_P
3657 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
3658
3659 #undef TARGET_RETURN_IN_MEMORY
3660 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3661
3662 #undef TARGET_HAVE_SRODATA_SECTION
3663 #define TARGET_HAVE_SRODATA_SECTION true
3664
3665 #undef TARGET_ASM_SELECT_RTX_SECTION
3666 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3667
3668 #undef TARGET_ASM_SELECT_SECTION
3669 #define TARGET_ASM_SELECT_SECTION rx_select_section
3670
3671 #undef TARGET_INIT_BUILTINS
3672 #define TARGET_INIT_BUILTINS rx_init_builtins
3673
3674 #undef TARGET_BUILTIN_DECL
3675 #define TARGET_BUILTIN_DECL rx_builtin_decl
3676
3677 #undef TARGET_EXPAND_BUILTIN
3678 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
3679
3680 #undef TARGET_ASM_CONSTRUCTOR
3681 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3682
3683 #undef TARGET_ASM_DESTRUCTOR
3684 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3685
3686 #undef TARGET_STRUCT_VALUE_RTX
3687 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3688
3689 #undef TARGET_ATTRIBUTE_TABLE
3690 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3691
3692 #undef TARGET_ASM_FILE_START
3693 #define TARGET_ASM_FILE_START rx_file_start
3694
3695 #undef TARGET_MS_BITFIELD_LAYOUT_P
3696 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3697
3698 #undef TARGET_LEGITIMATE_ADDRESS_P
3699 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3700
3701 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
3702 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3703
3704 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3705 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3706
3707 #undef TARGET_ASM_FUNCTION_PROLOGUE
3708 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3709
3710 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3711 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3712
3713 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3714 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3715
3716 #undef TARGET_FUNCTION_ARG
3717 #define TARGET_FUNCTION_ARG rx_function_arg
3718
3719 #undef TARGET_FUNCTION_ARG_ADVANCE
3720 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3721
3722 #undef TARGET_FUNCTION_ARG_BOUNDARY
3723 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3724
3725 #undef TARGET_SET_CURRENT_FUNCTION
3726 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3727
3728 #undef TARGET_ASM_INTEGER
3729 #define TARGET_ASM_INTEGER rx_assemble_integer
3730
3731 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3732 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3733
3734 #undef TARGET_MAX_ANCHOR_OFFSET
3735 #define TARGET_MAX_ANCHOR_OFFSET 32
3736
3737 #undef TARGET_ADDRESS_COST
3738 #define TARGET_ADDRESS_COST rx_address_cost
3739
3740 #undef TARGET_CAN_ELIMINATE
3741 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3742
3743 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3744 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3745
3746 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3747 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3748
3749 #undef TARGET_TRAMPOLINE_INIT
3750 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3751
3752 #undef TARGET_PRINT_OPERAND
3753 #define TARGET_PRINT_OPERAND rx_print_operand
3754
3755 #undef TARGET_PRINT_OPERAND_ADDRESS
3756 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3757
3758 #undef TARGET_CC_MODES_COMPATIBLE
3759 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3760
3761 #undef TARGET_MEMORY_MOVE_COST
3762 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3763
3764 #undef TARGET_OPTION_OVERRIDE
3765 #define TARGET_OPTION_OVERRIDE rx_option_override
3766
3767 #undef TARGET_PROMOTE_FUNCTION_MODE
3768 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3769
3770 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3771 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3772
3773 #undef TARGET_FLAGS_REGNUM
3774 #define TARGET_FLAGS_REGNUM CC_REG
3775
3776 #undef TARGET_LEGITIMATE_CONSTANT_P
3777 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3778
3779 #undef TARGET_LEGITIMIZE_ADDRESS
3780 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3781
3782 #undef TARGET_WARN_FUNC_RETURN
3783 #define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3784
3785 #undef TARGET_LRA_P
3786 #define TARGET_LRA_P rx_enable_lra
3787
3788 #undef TARGET_HARD_REGNO_NREGS
3789 #define TARGET_HARD_REGNO_NREGS rx_hard_regno_nregs
3790 #undef TARGET_HARD_REGNO_MODE_OK
3791 #define TARGET_HARD_REGNO_MODE_OK rx_hard_regno_mode_ok
3792
3793 #undef TARGET_MODES_TIEABLE_P
3794 #define TARGET_MODES_TIEABLE_P rx_modes_tieable_p
3795
3796 #undef TARGET_RTX_COSTS
3797 #define TARGET_RTX_COSTS rx_rtx_costs
3798
3799 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
3800 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
3801
3802 struct gcc_target targetm = TARGET_INITIALIZER;
3803
3804 #include "gt-rx.h"