]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/rx/rx.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / config / rx / rx.cc
1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008-2024 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25 #define IN_TARGET_CODE 1
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "cfghooks.h"
37 #include "df.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "regs.h"
41 #include "emit-rtl.h"
42 #include "diagnostic-core.h"
43 #include "varasm.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "output.h"
47 #include "flags.h"
48 #include "explow.h"
49 #include "expr.h"
50 #include "toplev.h"
51 #include "langhooks.h"
52 #include "opts.h"
53 #include "builtins.h"
54
55 /* This file should be included last. */
56 #include "target-def.h"
57
58 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
59 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
60 static unsigned int rx_num_interrupt_regs;
61 \f
62 static unsigned int
63 rx_gp_base_regnum (void)
64 {
65 if (rx_gp_base_regnum_val == INVALID_REGNUM)
66 gcc_unreachable ();
67 return rx_gp_base_regnum_val;
68 }
69
70 static unsigned int
71 rx_pid_base_regnum (void)
72 {
73 if (rx_pid_base_regnum_val == INVALID_REGNUM)
74 gcc_unreachable ();
75 return rx_pid_base_regnum_val;
76 }
77
78 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
79
80 static tree
81 rx_decl_for_addr (rtx op)
82 {
83 if (GET_CODE (op) == MEM)
84 op = XEXP (op, 0);
85 if (GET_CODE (op) == CONST)
86 op = XEXP (op, 0);
87 while (GET_CODE (op) == PLUS)
88 op = XEXP (op, 0);
89 if (GET_CODE (op) == SYMBOL_REF)
90 return SYMBOL_REF_DECL (op);
91 return NULL_TREE;
92 }
93
94 static void rx_print_operand (FILE *, rtx, int);
95
96 #define CC_FLAG_S (1 << 0)
97 #define CC_FLAG_Z (1 << 1)
98 #define CC_FLAG_O (1 << 2)
99 #define CC_FLAG_C (1 << 3)
100 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
101
102 static unsigned int flags_from_mode (machine_mode mode);
103 static unsigned int flags_from_code (enum rtx_code code);
104 \f
105 /* Return true if OP is a reference to an object in a PID data area. */
106
107 enum pid_type
108 {
109 PID_NOT_PID = 0, /* The object is not in the PID data area. */
110 PID_ENCODED, /* The object is in the PID data area. */
111 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
112 };
113
114 static enum pid_type
115 rx_pid_data_operand (rtx op)
116 {
117 tree op_decl;
118
119 if (!TARGET_PID)
120 return PID_NOT_PID;
121
122 if (GET_CODE (op) == PLUS
123 && GET_CODE (XEXP (op, 0)) == REG
124 && GET_CODE (XEXP (op, 1)) == CONST
125 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
126 return PID_ENCODED;
127
128 op_decl = rx_decl_for_addr (op);
129
130 if (op_decl)
131 {
132 if (TREE_READONLY (op_decl))
133 return PID_UNENCODED;
134 }
135 else
136 {
137 /* Sigh, some special cases. */
138 if (GET_CODE (op) == SYMBOL_REF
139 || GET_CODE (op) == LABEL_REF)
140 return PID_UNENCODED;
141 }
142
143 return PID_NOT_PID;
144 }
145
146 static rtx
147 rx_legitimize_address (rtx x,
148 rtx oldx ATTRIBUTE_UNUSED,
149 machine_mode mode ATTRIBUTE_UNUSED)
150 {
151 if (rx_pid_data_operand (x) == PID_UNENCODED)
152 {
153 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
154 return rv;
155 }
156
157 if (GET_CODE (x) == PLUS
158 && GET_CODE (XEXP (x, 0)) == PLUS
159 && REG_P (XEXP (XEXP (x, 0), 0))
160 && REG_P (XEXP (x, 1)))
161 return force_reg (SImode, x);
162
163 return x;
164 }
165
166 /* Return true if OP is a reference to an object in a small data area. */
167
168 static bool
169 rx_small_data_operand (rtx op)
170 {
171 if (rx_small_data_limit == 0)
172 return false;
173
174 if (GET_CODE (op) == SYMBOL_REF)
175 return SYMBOL_REF_SMALL_P (op);
176
177 return false;
178 }
179
180 static bool
181 rx_is_legitimate_address (machine_mode mode, rtx x,
182 bool strict ATTRIBUTE_UNUSED,
183 code_helper = ERROR_MARK)
184 {
185 if (RTX_OK_FOR_BASE (x, strict))
186 /* Register Indirect. */
187 return true;
188
189 if ((GET_MODE_SIZE (mode) == 4
190 || GET_MODE_SIZE (mode) == 2
191 || GET_MODE_SIZE (mode) == 1)
192 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
193 /* Pre-decrement Register Indirect or
194 Post-increment Register Indirect. */
195 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
196
197 switch (rx_pid_data_operand (x))
198 {
199 case PID_UNENCODED:
200 return false;
201 case PID_ENCODED:
202 return true;
203 default:
204 break;
205 }
206
207 if (GET_CODE (x) == PLUS)
208 {
209 rtx arg1 = XEXP (x, 0);
210 rtx arg2 = XEXP (x, 1);
211 rtx index = NULL_RTX;
212
213 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
214 index = arg2;
215 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
216 index = arg1;
217 else
218 return false;
219
220 switch (GET_CODE (index))
221 {
222 case CONST_INT:
223 {
224 /* Register Relative: REG + INT.
225 Only positive, mode-aligned, mode-sized
226 displacements are allowed. */
227 HOST_WIDE_INT val = INTVAL (index);
228 int factor;
229
230 if (val < 0)
231 return false;
232
233 switch (GET_MODE_SIZE (mode))
234 {
235 default:
236 case 4: factor = 4; break;
237 case 2: factor = 2; break;
238 case 1: factor = 1; break;
239 }
240
241 if (val > (65535 * factor))
242 return false;
243 return (val % factor) == 0;
244 }
245
246 case REG:
247 /* Unscaled Indexed Register Indirect: REG + REG
248 Size has to be "QI", REG has to be valid. */
249 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
250
251 case MULT:
252 {
253 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
254 Factor has to equal the mode size, REG has to be valid. */
255 rtx factor;
256
257 factor = XEXP (index, 1);
258 index = XEXP (index, 0);
259
260 return REG_P (index)
261 && RTX_OK_FOR_BASE (index, strict)
262 && CONST_INT_P (factor)
263 && GET_MODE_SIZE (mode) == INTVAL (factor);
264 }
265
266 default:
267 return false;
268 }
269 }
270
271 /* Small data area accesses turn into register relative offsets. */
272 return rx_small_data_operand (x);
273 }
274
275 /* Returns TRUE for simple memory addresses, ie ones
276 that do not involve register indirect addressing
277 or pre/post increment/decrement. */
278
279 bool
280 rx_is_restricted_memory_address (rtx mem, machine_mode mode)
281 {
282 if (! rx_is_legitimate_address
283 (mode, mem, reload_in_progress || reload_completed))
284 return false;
285
286 switch (GET_CODE (mem))
287 {
288 case REG:
289 /* Simple memory addresses are OK. */
290 return true;
291
292 case SUBREG:
293 return RX_REG_P (SUBREG_REG (mem));
294
295 case PRE_DEC:
296 case POST_INC:
297 return false;
298
299 case PLUS:
300 {
301 rtx base, index;
302
303 /* Only allow REG+INT addressing. */
304 base = XEXP (mem, 0);
305 index = XEXP (mem, 1);
306
307 if (! RX_REG_P (base) || ! CONST_INT_P (index))
308 return false;
309
310 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
311 }
312
313 case SYMBOL_REF:
314 /* Can happen when small data is being supported.
315 Assume that it will be resolved into GP+INT. */
316 return true;
317
318 default:
319 gcc_unreachable ();
320 }
321 }
322
323 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
324
325 static bool
326 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
327 {
328 if (GET_CODE (addr) == CONST)
329 addr = XEXP (addr, 0);
330
331 switch (GET_CODE (addr))
332 {
333 /* --REG and REG++ only work in SImode. */
334 case PRE_DEC:
335 case POST_INC:
336 return true;
337
338 case MINUS:
339 case PLUS:
340 if (! REG_P (XEXP (addr, 0)))
341 return true;
342
343 addr = XEXP (addr, 1);
344
345 switch (GET_CODE (addr))
346 {
347 case REG:
348 /* REG+REG only works in SImode. */
349 return true;
350
351 case CONST_INT:
352 /* REG+INT is only mode independent if INT is a
353 multiple of 4, positive and will fit into 16-bits. */
354 if (((INTVAL (addr) & 3) == 0)
355 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
356 return false;
357 return true;
358
359 case SYMBOL_REF:
360 case LABEL_REF:
361 return true;
362
363 case MULT:
364 /* REG+REG*SCALE is always mode dependent. */
365 return true;
366
367 default:
368 /* Not recognized, so treat as mode dependent. */
369 return true;
370 }
371
372 case CONST_INT:
373 case SYMBOL_REF:
374 case LABEL_REF:
375 case REG:
376 /* These are all mode independent. */
377 return false;
378
379 default:
380 /* Everything else is unrecognized,
381 so treat as mode dependent. */
382 return true;
383 }
384 }
385 \f
386 /* A C compound statement to output to stdio stream FILE the
387 assembler syntax for an instruction operand that is a memory
388 reference whose address is ADDR. */
389
390 static void
391 rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
392 {
393 switch (GET_CODE (addr))
394 {
395 case REG:
396 fprintf (file, "[");
397 rx_print_operand (file, addr, 0);
398 fprintf (file, "]");
399 break;
400
401 case PRE_DEC:
402 fprintf (file, "[-");
403 rx_print_operand (file, XEXP (addr, 0), 0);
404 fprintf (file, "]");
405 break;
406
407 case POST_INC:
408 fprintf (file, "[");
409 rx_print_operand (file, XEXP (addr, 0), 0);
410 fprintf (file, "+]");
411 break;
412
413 case PLUS:
414 {
415 rtx arg1 = XEXP (addr, 0);
416 rtx arg2 = XEXP (addr, 1);
417 rtx base, index;
418
419 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
420 base = arg1, index = arg2;
421 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
422 base = arg2, index = arg1;
423 else
424 {
425 rx_print_operand (file, arg1, 0);
426 fprintf (file, " + ");
427 rx_print_operand (file, arg2, 0);
428 break;
429 }
430
431 if (REG_P (index) || GET_CODE (index) == MULT)
432 {
433 fprintf (file, "[");
434 rx_print_operand (file, index, 'A');
435 fprintf (file, ",");
436 }
437 else /* GET_CODE (index) == CONST_INT */
438 {
439 rx_print_operand (file, index, 'A');
440 fprintf (file, "[");
441 }
442 rx_print_operand (file, base, 0);
443 fprintf (file, "]");
444 break;
445 }
446
447 case CONST:
448 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
449 {
450 addr = XEXP (addr, 0);
451 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
452
453 addr = XVECEXP (addr, 0, 0);
454 gcc_assert (CONST_INT_P (addr));
455 fprintf (file, "#");
456 output_addr_const (file, addr);
457 break;
458 }
459 fprintf (file, "#");
460 output_addr_const (file, XEXP (addr, 0));
461 break;
462
463 case UNSPEC:
464 addr = XVECEXP (addr, 0, 0);
465 /* Fall through. */
466 case LABEL_REF:
467 case SYMBOL_REF:
468 fprintf (file, "#");
469 /* Fall through. */
470 default:
471 output_addr_const (file, addr);
472 break;
473 }
474 }
475
476 static void
477 rx_print_integer (FILE * file, HOST_WIDE_INT val)
478 {
479 if (val < 64)
480 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
481 else
482 fprintf (file,
483 TARGET_AS100_SYNTAX
484 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
485 val);
486 }
487
488 static bool
489 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
490 {
491 const char * op = integer_asm_op (size, is_aligned);
492
493 if (! CONST_INT_P (x))
494 return default_assemble_integer (x, size, is_aligned);
495
496 if (op == NULL)
497 return false;
498 fputs (op, asm_out_file);
499
500 rx_print_integer (asm_out_file, INTVAL (x));
501 fputc ('\n', asm_out_file);
502 return true;
503 }
504
505
506 /* Handles the insertion of a single operand into the assembler output.
507 The %<letter> directives supported are:
508
509 %A Print an operand without a leading # character.
510 %B Print an integer comparison name.
511 %C Print a control register name.
512 %F Print a condition code flag name.
513 %G Register used for small-data-area addressing
514 %H Print high part of a DImode register, integer or address.
515 %L Print low part of a DImode register, integer or address.
516 %N Print the negation of the immediate value.
517 %P Register used for PID addressing
518 %Q If the operand is a MEM, then correctly generate
519 register indirect or register relative addressing.
520 %R Like %Q but for zero-extending loads. */
521
522 static void
523 rx_print_operand (FILE * file, rtx op, int letter)
524 {
525 bool unsigned_load = false;
526 bool print_hash = true;
527
528 if (letter == 'A'
529 && ((GET_CODE (op) == CONST
530 && GET_CODE (XEXP (op, 0)) == UNSPEC)
531 || GET_CODE (op) == UNSPEC))
532 {
533 print_hash = false;
534 letter = 0;
535 }
536
537 switch (letter)
538 {
539 case 'A':
540 /* Print an operand without a leading #. */
541 if (MEM_P (op))
542 op = XEXP (op, 0);
543
544 switch (GET_CODE (op))
545 {
546 case LABEL_REF:
547 case SYMBOL_REF:
548 output_addr_const (file, op);
549 break;
550 case CONST_INT:
551 fprintf (file, "%ld", (long) INTVAL (op));
552 break;
553 default:
554 rx_print_operand (file, op, 0);
555 break;
556 }
557 break;
558
559 case 'B':
560 {
561 enum rtx_code code = GET_CODE (op);
562 machine_mode mode = GET_MODE (XEXP (op, 0));
563 const char *ret;
564
565 if (mode == CC_Fmode)
566 {
567 /* C flag is undefined, and O flag carries unordered. None of the
568 branch combinations that include O use it helpfully. */
569 switch (code)
570 {
571 case ORDERED:
572 ret = "no";
573 break;
574 case UNORDERED:
575 ret = "o";
576 break;
577 case LT:
578 ret = "n";
579 break;
580 case GE:
581 ret = "pz";
582 break;
583 case EQ:
584 ret = "eq";
585 break;
586 case NE:
587 ret = "ne";
588 break;
589 default:
590 gcc_unreachable ();
591 }
592 }
593 else
594 {
595 unsigned int flags = flags_from_mode (mode);
596
597 switch (code)
598 {
599 case LT:
600 ret = (flags & CC_FLAG_O ? "lt" : "n");
601 break;
602 case GE:
603 ret = (flags & CC_FLAG_O ? "ge" : "pz");
604 break;
605 case GT:
606 ret = "gt";
607 break;
608 case LE:
609 ret = "le";
610 break;
611 case GEU:
612 ret = "geu";
613 break;
614 case LTU:
615 ret = "ltu";
616 break;
617 case GTU:
618 ret = "gtu";
619 break;
620 case LEU:
621 ret = "leu";
622 break;
623 case EQ:
624 ret = "eq";
625 break;
626 case NE:
627 ret = "ne";
628 break;
629 default:
630 gcc_unreachable ();
631 }
632 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
633 }
634 fputs (ret, file);
635 break;
636 }
637
638 case 'C':
639 gcc_assert (CONST_INT_P (op));
640 switch (INTVAL (op))
641 {
642 case CTRLREG_PSW: fprintf (file, "psw"); break;
643 case CTRLREG_PC: fprintf (file, "pc"); break;
644 case CTRLREG_USP: fprintf (file, "usp"); break;
645 case CTRLREG_FPSW: fprintf (file, "fpsw"); break;
646 case CTRLREG_BPSW: fprintf (file, "bpsw"); break;
647 case CTRLREG_BPC: fprintf (file, "bpc"); break;
648 case CTRLREG_ISP: fprintf (file, "isp"); break;
649 case CTRLREG_FINTV: fprintf (file, "fintv"); break;
650 case CTRLREG_INTB: fprintf (file, "intb"); break;
651 default:
652 warning (0, "unrecognized control register number: %d"
653 " - using %<psw%>", (int) INTVAL (op));
654 fprintf (file, "psw");
655 break;
656 }
657 break;
658
659 case 'F':
660 gcc_assert (CONST_INT_P (op));
661 switch (INTVAL (op))
662 {
663 case 0: case 'c': case 'C': fprintf (file, "C"); break;
664 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
665 case 2: case 's': case 'S': fprintf (file, "S"); break;
666 case 3: case 'o': case 'O': fprintf (file, "O"); break;
667 case 8: case 'i': case 'I': fprintf (file, "I"); break;
668 case 9: case 'u': case 'U': fprintf (file, "U"); break;
669 default:
670 gcc_unreachable ();
671 }
672 break;
673
674 case 'G':
675 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
676 break;
677
678 case 'H':
679 switch (GET_CODE (op))
680 {
681 case REG:
682 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
683 break;
684 case CONST_INT:
685 {
686 HOST_WIDE_INT v = INTVAL (op);
687
688 fprintf (file, "#");
689 /* Trickery to avoid problems with shifting 32 bits at a time. */
690 v = v >> 16;
691 v = v >> 16;
692 rx_print_integer (file, v);
693 break;
694 }
695 case CONST_DOUBLE:
696 fprintf (file, "#");
697 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
698 break;
699 case MEM:
700 if (! WORDS_BIG_ENDIAN)
701 op = adjust_address (op, SImode, 4);
702 output_address (GET_MODE (op), XEXP (op, 0));
703 break;
704 default:
705 gcc_unreachable ();
706 }
707 break;
708
709 case 'L':
710 switch (GET_CODE (op))
711 {
712 case REG:
713 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
714 break;
715 case CONST_INT:
716 fprintf (file, "#");
717 rx_print_integer (file, INTVAL (op) & 0xffffffff);
718 break;
719 case CONST_DOUBLE:
720 fprintf (file, "#");
721 rx_print_integer (file, CONST_DOUBLE_LOW (op));
722 break;
723 case MEM:
724 if (WORDS_BIG_ENDIAN)
725 op = adjust_address (op, SImode, 4);
726 output_address (GET_MODE (op), XEXP (op, 0));
727 break;
728 default:
729 gcc_unreachable ();
730 }
731 break;
732
733 case 'N':
734 gcc_assert (CONST_INT_P (op));
735 fprintf (file, "#");
736 rx_print_integer (file, - INTVAL (op));
737 break;
738
739 case 'P':
740 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
741 break;
742
743 case 'R':
744 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
745 unsigned_load = true;
746 /* Fall through. */
747 case 'Q':
748 if (MEM_P (op))
749 {
750 HOST_WIDE_INT offset;
751 rtx mem = op;
752
753 op = XEXP (op, 0);
754
755 if (REG_P (op))
756 offset = 0;
757 else if (GET_CODE (op) == PLUS)
758 {
759 rtx displacement;
760
761 if (REG_P (XEXP (op, 0)))
762 {
763 displacement = XEXP (op, 1);
764 op = XEXP (op, 0);
765 }
766 else
767 {
768 displacement = XEXP (op, 0);
769 op = XEXP (op, 1);
770 gcc_assert (REG_P (op));
771 }
772
773 gcc_assert (CONST_INT_P (displacement));
774 offset = INTVAL (displacement);
775 gcc_assert (offset >= 0);
776
777 fprintf (file, "%ld", offset);
778 }
779 else
780 gcc_unreachable ();
781
782 fprintf (file, "[");
783 rx_print_operand (file, op, 0);
784 fprintf (file, "].");
785
786 switch (GET_MODE_SIZE (GET_MODE (mem)))
787 {
788 case 1:
789 gcc_assert (offset <= 65535 * 1);
790 fprintf (file, unsigned_load ? "UB" : "B");
791 break;
792 case 2:
793 gcc_assert (offset % 2 == 0);
794 gcc_assert (offset <= 65535 * 2);
795 fprintf (file, unsigned_load ? "UW" : "W");
796 break;
797 case 4:
798 gcc_assert (offset % 4 == 0);
799 gcc_assert (offset <= 65535 * 4);
800 fprintf (file, "L");
801 break;
802 default:
803 gcc_unreachable ();
804 }
805 break;
806 }
807
808 /* Fall through. */
809
810 default:
811 if (GET_CODE (op) == CONST
812 && GET_CODE (XEXP (op, 0)) == UNSPEC)
813 op = XEXP (op, 0);
814 else if (GET_CODE (op) == CONST
815 && GET_CODE (XEXP (op, 0)) == PLUS
816 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
817 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
818 {
819 if (print_hash)
820 fprintf (file, "#");
821 fprintf (file, "(");
822 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
823 fprintf (file, " + ");
824 output_addr_const (file, XEXP (XEXP (op, 0), 1));
825 fprintf (file, ")");
826 return;
827 }
828
829 switch (GET_CODE (op))
830 {
831 case MULT:
832 /* Should be the scaled part of an
833 indexed register indirect address. */
834 {
835 rtx base = XEXP (op, 0);
836 rtx index = XEXP (op, 1);
837
838 /* Check for a swaped index register and scaling factor.
839 Not sure if this can happen, but be prepared to handle it. */
840 if (CONST_INT_P (base) && REG_P (index))
841 {
842 rtx tmp = base;
843 base = index;
844 index = tmp;
845 }
846
847 gcc_assert (REG_P (base));
848 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
849 gcc_assert (CONST_INT_P (index));
850 /* Do not try to verify the value of the scalar as it is based
851 on the mode of the MEM not the mode of the MULT. (Which
852 will always be SImode). */
853 fprintf (file, "%s", reg_names [REGNO (base)]);
854 break;
855 }
856
857 case MEM:
858 output_address (GET_MODE (op), XEXP (op, 0));
859 break;
860
861 case PLUS:
862 output_address (VOIDmode, op);
863 break;
864
865 case REG:
866 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
867 fprintf (file, "%s", reg_names [REGNO (op)]);
868 break;
869
870 case SUBREG:
871 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
872 fprintf (file, "%s", reg_names [subreg_regno (op)]);
873 break;
874
875 /* This will only be single precision.... */
876 case CONST_DOUBLE:
877 {
878 unsigned long val;
879
880 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
881 if (print_hash)
882 fprintf (file, "#");
883 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
884 break;
885 }
886
887 case CONST_INT:
888 if (print_hash)
889 fprintf (file, "#");
890 rx_print_integer (file, INTVAL (op));
891 break;
892
893 case UNSPEC:
894 switch (XINT (op, 1))
895 {
896 case UNSPEC_PID_ADDR:
897 {
898 rtx sym, add;
899
900 if (print_hash)
901 fprintf (file, "#");
902 sym = XVECEXP (op, 0, 0);
903 add = NULL_RTX;
904 fprintf (file, "(");
905 if (GET_CODE (sym) == PLUS)
906 {
907 add = XEXP (sym, 1);
908 sym = XEXP (sym, 0);
909 }
910 output_addr_const (file, sym);
911 if (add != NULL_RTX)
912 {
913 fprintf (file, "+");
914 output_addr_const (file, add);
915 }
916 fprintf (file, "-__pid_base");
917 fprintf (file, ")");
918 return;
919 }
920 }
921 /* Fall through */
922
923 case CONST:
924 case SYMBOL_REF:
925 case LABEL_REF:
926 case CODE_LABEL:
927 rx_print_operand_address (file, VOIDmode, op);
928 break;
929
930 default:
931 gcc_unreachable ();
932 }
933 break;
934 }
935 }
936
937 /* Maybe convert an operand into its PID format. */
938
939 rtx
940 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
941 {
942 if (rx_pid_data_operand (op) == PID_UNENCODED)
943 {
944 if (GET_CODE (op) == MEM)
945 {
946 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
947 op = replace_equiv_address (op, a);
948 }
949 else
950 {
951 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
952 }
953
954 if (copy_to_reg)
955 op = copy_to_mode_reg (GET_MODE (op), op);
956 }
957 return op;
958 }
959
960 /* Returns an assembler template for a move instruction. */
961
962 char *
963 rx_gen_move_template (rtx * operands, bool is_movu)
964 {
965 static char out_template [64];
966 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
967 const char * src_template;
968 const char * dst_template;
969 rtx dest = operands[0];
970 rtx src = operands[1];
971
972 /* Decide which extension, if any, should be given to the move instruction. */
973 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
974 {
975 case E_QImode:
976 /* The .B extension is not valid when
977 loading an immediate into a register. */
978 if (! REG_P (dest) || ! CONST_INT_P (src))
979 extension = ".B";
980 break;
981 case E_HImode:
982 if (! REG_P (dest) || ! CONST_INT_P (src))
983 /* The .W extension is not valid when
984 loading an immediate into a register. */
985 extension = ".W";
986 break;
987 case E_DFmode:
988 case E_DImode:
989 case E_SFmode:
990 case E_SImode:
991 extension = ".L";
992 break;
993 case E_VOIDmode:
994 /* This mode is used by constants. */
995 break;
996 default:
997 debug_rtx (src);
998 gcc_unreachable ();
999 }
1000
1001 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
1002 {
1003 gcc_assert (GET_MODE (src) != DImode);
1004 gcc_assert (GET_MODE (src) != DFmode);
1005
1006 src_template = "(%A1 - __pid_base)[%P1]";
1007 }
1008 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1009 {
1010 gcc_assert (GET_MODE (src) != DImode);
1011 gcc_assert (GET_MODE (src) != DFmode);
1012
1013 src_template = "%%gp(%A1)[%G1]";
1014 }
1015 else
1016 src_template = "%1";
1017
1018 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1019 {
1020 gcc_assert (GET_MODE (dest) != DImode);
1021 gcc_assert (GET_MODE (dest) != DFmode);
1022
1023 dst_template = "%%gp(%A0)[%G0]";
1024 }
1025 else
1026 dst_template = "%0";
1027
1028 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1029 {
1030 gcc_assert (! is_movu);
1031
1032 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1033 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1034 else
1035 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1036 }
1037 else
1038 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1039 extension, src_template, dst_template);
1040 return out_template;
1041 }
1042 \f
1043 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
1044
1045 static inline unsigned int
1046 rx_round_up (unsigned int value, unsigned int alignment)
1047 {
1048 alignment -= 1;
1049 return (value + alignment) & (~ alignment);
1050 }
1051
1052 /* Return the number of bytes in the argument registers
1053 occupied by an argument of type TYPE and mode MODE. */
1054
1055 static unsigned int
1056 rx_function_arg_size (machine_mode mode, const_tree type)
1057 {
1058 unsigned int num_bytes;
1059
1060 num_bytes = (mode == BLKmode)
1061 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1062 return rx_round_up (num_bytes, UNITS_PER_WORD);
1063 }
1064
1065 #define NUM_ARG_REGS 4
1066 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1067
1068 /* Return an RTL expression describing the register holding function
1069 argument ARG or NULL_RTX if the parameter should be passed on the
1070 stack. CUM describes the previous parameters to the function. */
1071
1072 static rtx
1073 rx_function_arg (cumulative_args_t cum, const function_arg_info &arg)
1074 {
1075 unsigned int next_reg;
1076 unsigned int bytes_so_far = *get_cumulative_args (cum);
1077 unsigned int size;
1078 unsigned int rounded_size;
1079
1080 size = arg.promoted_size_in_bytes ();
1081 /* If the size is not known it cannot be passed in registers. */
1082 if (size < 1)
1083 return NULL_RTX;
1084
1085 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1086
1087 /* Don't pass this arg via registers if there
1088 are insufficient registers to hold all of it. */
1089 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1090 return NULL_RTX;
1091
1092 /* Unnamed arguments and the last named argument in a
1093 variadic function are always passed on the stack. */
1094 if (!arg.named)
1095 return NULL_RTX;
1096
1097 /* Structures must occupy an exact number of registers,
1098 otherwise they are passed on the stack. */
1099 if ((arg.type == NULL || AGGREGATE_TYPE_P (arg.type))
1100 && (size % UNITS_PER_WORD) != 0)
1101 return NULL_RTX;
1102
1103 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1104
1105 return gen_rtx_REG (arg.mode, next_reg);
1106 }
1107
1108 static void
1109 rx_function_arg_advance (cumulative_args_t cum,
1110 const function_arg_info &arg)
1111 {
1112 *get_cumulative_args (cum) += rx_function_arg_size (arg.mode, arg.type);
1113 }
1114
1115 static unsigned int
1116 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1117 const_tree type ATTRIBUTE_UNUSED)
1118 {
1119 /* Older versions of the RX backend aligned all on-stack arguments
1120 to 32-bits. The RX C ABI however says that they should be
1121 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1122 if (TARGET_GCC_ABI)
1123 return STACK_BOUNDARY;
1124
1125 if (type)
1126 {
1127 if (DECL_P (type))
1128 return DECL_ALIGN (type);
1129 return TYPE_ALIGN (type);
1130 }
1131
1132 return PARM_BOUNDARY;
1133 }
1134
1135 /* Return an RTL describing where a function return value of type RET_TYPE
1136 is held. */
1137
1138 static rtx
1139 rx_function_value (const_tree ret_type,
1140 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1141 bool outgoing ATTRIBUTE_UNUSED)
1142 {
1143 machine_mode mode = TYPE_MODE (ret_type);
1144
1145 /* RX ABI specifies that small integer types are
1146 promoted to int when returned by a function. */
1147 if (GET_MODE_SIZE (mode) > 0
1148 && GET_MODE_SIZE (mode) < 4
1149 && ! COMPLEX_MODE_P (mode)
1150 && ! VECTOR_TYPE_P (ret_type)
1151 && ! VECTOR_MODE_P (mode)
1152 )
1153 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1154
1155 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1156 }
1157
1158 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1159 regard to function returns as does TARGET_FUNCTION_VALUE. */
1160
1161 static machine_mode
1162 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1163 machine_mode mode,
1164 int * punsignedp ATTRIBUTE_UNUSED,
1165 const_tree funtype ATTRIBUTE_UNUSED,
1166 int for_return)
1167 {
1168 if (for_return != 1
1169 || GET_MODE_SIZE (mode) >= 4
1170 || COMPLEX_MODE_P (mode)
1171 || VECTOR_MODE_P (mode)
1172 || VECTOR_TYPE_P (type)
1173 || GET_MODE_SIZE (mode) < 1)
1174 return mode;
1175
1176 return SImode;
1177 }
1178
1179 static bool
1180 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1181 {
1182 HOST_WIDE_INT size;
1183
1184 if (TYPE_MODE (type) != BLKmode
1185 && ! AGGREGATE_TYPE_P (type))
1186 return false;
1187
1188 size = int_size_in_bytes (type);
1189 /* Large structs and those whose size is not an
1190 exact multiple of 4 are returned in memory. */
1191 return size < 1
1192 || size > 16
1193 || (size % UNITS_PER_WORD) != 0;
1194 }
1195
1196 static rtx
1197 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1198 int incoming ATTRIBUTE_UNUSED)
1199 {
1200 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1201 }
1202
1203 static bool
1204 rx_return_in_msb (const_tree valtype)
1205 {
1206 return TARGET_BIG_ENDIAN_DATA
1207 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1208 }
1209
1210 /* Returns true if the provided function has the specified attribute. */
1211
1212 static inline bool
1213 has_func_attr (const_tree decl, const char * func_attr)
1214 {
1215 if (decl == NULL_TREE)
1216 decl = current_function_decl;
1217
1218 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1219 }
1220
1221 /* Returns true if the provided function has the "fast_interrupt" attribute. */
1222
1223 bool
1224 is_fast_interrupt_func (const_tree decl)
1225 {
1226 return has_func_attr (decl, "fast_interrupt");
1227 }
1228
1229 /* Returns true if the provided function has the "interrupt" attribute. */
1230
1231 bool
1232 is_interrupt_func (const_tree decl)
1233 {
1234 return has_func_attr (decl, "interrupt");
1235 }
1236
1237 /* Returns true if the provided function has the "naked" attribute. */
1238
1239 static inline bool
1240 is_naked_func (const_tree decl)
1241 {
1242 return has_func_attr (decl, "naked");
1243 }
1244 \f
1245 static bool use_fixed_regs = false;
1246
1247 static void
1248 rx_conditional_register_usage (void)
1249 {
1250 static bool using_fixed_regs = false;
1251
1252 if (TARGET_PID)
1253 {
1254 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1255 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1256 }
1257
1258 if (rx_small_data_limit > 0)
1259 {
1260 if (TARGET_PID)
1261 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1262 else
1263 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1264
1265 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1266 }
1267
1268 if (use_fixed_regs != using_fixed_regs)
1269 {
1270 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1271 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1272
1273 if (use_fixed_regs)
1274 {
1275 unsigned int r;
1276
1277 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1278 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1279
1280 /* This is for fast interrupt handlers. Any register in
1281 the range r10 to r13 (inclusive) that is currently
1282 marked as fixed is now a viable, call-used register. */
1283 for (r = 10; r <= 13; r++)
1284 if (fixed_regs[r])
1285 {
1286 fixed_regs[r] = 0;
1287 call_used_regs[r] = 1;
1288 }
1289
1290 /* Mark r7 as fixed. This is just a hack to avoid
1291 altering the reg_alloc_order array so that the newly
1292 freed r10-r13 registers are the preferred registers. */
1293 fixed_regs[7] = call_used_regs[7] = 1;
1294 }
1295 else
1296 {
1297 /* Restore the normal register masks. */
1298 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1299 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1300 }
1301
1302 using_fixed_regs = use_fixed_regs;
1303 }
1304 }
1305
1306 struct decl_chain
1307 {
1308 tree fndecl;
1309 struct decl_chain * next;
1310 };
1311
1312 /* Stack of decls for which we have issued warnings. */
1313 static struct decl_chain * warned_decls = NULL;
1314
1315 static void
1316 add_warned_decl (tree fndecl)
1317 {
1318 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1319
1320 warned->fndecl = fndecl;
1321 warned->next = warned_decls;
1322 warned_decls = warned;
1323 }
1324
1325 /* Returns TRUE if FNDECL is on our list of warned about decls. */
1326
1327 static bool
1328 already_warned (tree fndecl)
1329 {
1330 struct decl_chain * warned;
1331
1332 for (warned = warned_decls;
1333 warned != NULL;
1334 warned = warned->next)
1335 if (warned->fndecl == fndecl)
1336 return true;
1337
1338 return false;
1339 }
1340
1341 /* Perform any actions necessary before starting to compile FNDECL.
1342 For the RX we use this to make sure that we have the correct
1343 set of register masks selected. If FNDECL is NULL then we are
1344 compiling top level things. */
1345
1346 static void
1347 rx_set_current_function (tree fndecl)
1348 {
1349 /* Remember the last target of rx_set_current_function. */
1350 static tree rx_previous_fndecl;
1351 bool prev_was_fast_interrupt;
1352 bool current_is_fast_interrupt;
1353
1354 /* Only change the context if the function changes. This hook is called
1355 several times in the course of compiling a function, and we don't want
1356 to slow things down too much or call target_reinit when it isn't safe. */
1357 if (fndecl == rx_previous_fndecl)
1358 return;
1359
1360 prev_was_fast_interrupt
1361 = rx_previous_fndecl
1362 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1363
1364 current_is_fast_interrupt
1365 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1366
1367 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1368 {
1369 use_fixed_regs = current_is_fast_interrupt;
1370 target_reinit ();
1371 }
1372
1373 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1374 {
1375 /* We do not warn about the first fast interrupt routine that
1376 we see. Instead we just push it onto the stack. */
1377 if (warned_decls == NULL)
1378 add_warned_decl (fndecl);
1379
1380 /* Otherwise if this fast interrupt is one for which we have
1381 not already issued a warning, generate one and then push
1382 it onto the stack as well. */
1383 else if (! already_warned (fndecl))
1384 {
1385 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1386 fndecl, warned_decls->fndecl);
1387 add_warned_decl (fndecl);
1388 }
1389 }
1390
1391 rx_previous_fndecl = fndecl;
1392 }
1393 \f
1394 /* Typical stack layout should looks like this after the function's prologue:
1395
1396 | |
1397 -- ^
1398 | | \ |
1399 | | arguments saved | Increasing
1400 | | on the stack | addresses
1401 PARENT arg pointer -> | | /
1402 -------------------------- ---- -------------------
1403 CHILD |ret | return address
1404 --
1405 | | \
1406 | | call saved
1407 | | registers
1408 | | /
1409 --
1410 | | \
1411 | | local
1412 | | variables
1413 frame pointer -> | | /
1414 --
1415 | | \
1416 | | outgoing | Decreasing
1417 | | arguments | addresses
1418 current stack pointer -> | | / |
1419 -------------------------- ---- ------------------ V
1420 | | */
1421
1422 static unsigned int
1423 bit_count (unsigned int x)
1424 {
1425 const unsigned int m1 = 0x55555555;
1426 const unsigned int m2 = 0x33333333;
1427 const unsigned int m4 = 0x0f0f0f0f;
1428
1429 x -= (x >> 1) & m1;
1430 x = (x & m2) + ((x >> 2) & m2);
1431 x = (x + (x >> 4)) & m4;
1432 x += x >> 8;
1433
1434 return (x + (x >> 16)) & 0x3f;
1435 }
1436
1437 #if defined(TARGET_SAVE_ACC_REGISTER)
1438 #define MUST_SAVE_ACC_REGISTER \
1439 (TARGET_SAVE_ACC_REGISTER \
1440 && (is_interrupt_func (NULL_TREE) \
1441 || is_fast_interrupt_func (NULL_TREE)))
1442 #else
1443 #define MUST_SAVE_ACC_REGISTER 0
1444 #endif
1445
1446 /* Returns either the lowest numbered and highest numbered registers that
1447 occupy the call-saved area of the stack frame, if the registers are
1448 stored as a contiguous block, or else a bitmask of the individual
1449 registers if they are stored piecemeal.
1450
1451 Also computes the size of the frame and the size of the outgoing
1452 arguments block (in bytes). */
1453
1454 static void
1455 rx_get_stack_layout (unsigned int * lowest,
1456 unsigned int * highest,
1457 unsigned int * register_mask,
1458 unsigned int * frame_size,
1459 unsigned int * stack_size)
1460 {
1461 unsigned int reg;
1462 unsigned int low;
1463 unsigned int high;
1464 unsigned int fixed_reg = 0;
1465 unsigned int save_mask;
1466 unsigned int pushed_mask;
1467 unsigned int unneeded_pushes;
1468
1469 if (is_naked_func (NULL_TREE))
1470 {
1471 /* Naked functions do not create their own stack frame.
1472 Instead the programmer must do that for us. */
1473 * lowest = 0;
1474 * highest = 0;
1475 * register_mask = 0;
1476 * frame_size = 0;
1477 * stack_size = 0;
1478 return;
1479 }
1480
1481 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1482 {
1483 if ((df_regs_ever_live_p (reg)
1484 /* Always save all call clobbered registers inside non-leaf
1485 interrupt handlers, even if they are not live - they may
1486 be used in (non-interrupt aware) routines called from this one. */
1487 || (call_used_or_fixed_reg_p (reg)
1488 && is_interrupt_func (NULL_TREE)
1489 && ! crtl->is_leaf))
1490 && (! call_used_or_fixed_reg_p (reg)
1491 /* Even call clobbered registered must
1492 be pushed inside interrupt handlers. */
1493 || is_interrupt_func (NULL_TREE)
1494 /* Likewise for fast interrupt handlers, except registers r10 -
1495 r13. These are normally call-saved, but may have been set
1496 to call-used by rx_conditional_register_usage. If so then
1497 they can be used in the fast interrupt handler without
1498 saving them on the stack. */
1499 || (is_fast_interrupt_func (NULL_TREE)
1500 && ! IN_RANGE (reg, 10, 13))))
1501 {
1502 if (low == 0)
1503 low = reg;
1504 high = reg;
1505
1506 save_mask |= 1 << reg;
1507 }
1508
1509 /* Remember if we see a fixed register
1510 after having found the low register. */
1511 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1512 fixed_reg = reg;
1513 }
1514
1515 /* If we have to save the accumulator register, make sure
1516 that at least two registers are pushed into the frame. */
1517 if (MUST_SAVE_ACC_REGISTER
1518 && bit_count (save_mask) < 2)
1519 {
1520 save_mask |= (1 << 13) | (1 << 14);
1521 if (low == 0)
1522 low = 13;
1523 if (high == 0 || low == high)
1524 high = low + 1;
1525 }
1526
1527 /* Decide if it would be faster fill in the call-saved area of the stack
1528 frame using multiple PUSH instructions instead of a single PUSHM
1529 instruction.
1530
1531 SAVE_MASK is a bitmask of the registers that must be stored in the
1532 call-save area. PUSHED_MASK is a bitmask of the registers that would
1533 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1534 is a bitmask of those registers in pushed_mask that are not in
1535 save_mask.
1536
1537 We use a simple heuristic that says that it is better to use
1538 multiple PUSH instructions if the number of unnecessary pushes is
1539 greater than the number of necessary pushes.
1540
1541 We also use multiple PUSH instructions if there are any fixed registers
1542 between LOW and HIGH. The only way that this can happen is if the user
1543 has specified --fixed-<reg-name> on the command line and in such
1544 circumstances we do not want to touch the fixed registers at all.
1545
1546 Note also that the code in the prologue/epilogue handlers will
1547 automatically merge multiple PUSHes of adjacent registers into a single
1548 PUSHM.
1549
1550 FIXME: Is it worth improving this heuristic ? */
1551 pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
1552 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1553
1554 if ((fixed_reg && fixed_reg <= high)
1555 || (optimize_function_for_speed_p (cfun)
1556 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1557 {
1558 /* Use multiple pushes. */
1559 * lowest = 0;
1560 * highest = 0;
1561 * register_mask = save_mask;
1562 }
1563 else
1564 {
1565 /* Use one push multiple instruction. */
1566 * lowest = low;
1567 * highest = high;
1568 * register_mask = 0;
1569 }
1570
1571 * frame_size = rx_round_up
1572 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1573
1574 if (crtl->args.size > 0)
1575 * frame_size += rx_round_up
1576 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1577
1578 * stack_size = rx_round_up
1579 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1580 }
1581
1582 /* Generate a PUSHM instruction that matches the given operands. */
1583
1584 void
1585 rx_emit_stack_pushm (rtx * operands)
1586 {
1587 HOST_WIDE_INT last_reg;
1588 rtx first_push;
1589
1590 gcc_assert (CONST_INT_P (operands[0]));
1591 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1592
1593 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1594 first_push = XVECEXP (operands[1], 0, 1);
1595 gcc_assert (SET_P (first_push));
1596 first_push = SET_SRC (first_push);
1597 gcc_assert (REG_P (first_push));
1598
1599 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1600 reg_names [REGNO (first_push) - last_reg],
1601 reg_names [REGNO (first_push)]);
1602 }
1603
1604 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1605
1606 static rtx
1607 gen_rx_store_vector (unsigned int low, unsigned int high)
1608 {
1609 unsigned int i;
1610 unsigned int count = (high - low) + 2;
1611 rtx vector;
1612
1613 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1614
1615 XVECEXP (vector, 0, 0) =
1616 gen_rtx_SET (stack_pointer_rtx,
1617 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1618 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1619
1620 for (i = 0; i < count - 1; i++)
1621 XVECEXP (vector, 0, i + 1) =
1622 gen_rtx_SET (gen_rtx_MEM (SImode,
1623 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1624 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1625 gen_rtx_REG (SImode, high - i));
1626 return vector;
1627 }
1628
1629 /* Mark INSN as being frame related. If it is a PARALLEL
1630 then mark each element as being frame related as well. */
1631
1632 static void
1633 mark_frame_related (rtx insn)
1634 {
1635 RTX_FRAME_RELATED_P (insn) = 1;
1636 insn = PATTERN (insn);
1637
1638 if (GET_CODE (insn) == PARALLEL)
1639 {
1640 unsigned int i;
1641
1642 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1643 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1644 }
1645 }
1646
1647 /* Create CFI notes for register pops. */
1648 static void
1649 add_pop_cfi_notes (rtx_insn *insn, unsigned int high, unsigned int low)
1650 {
1651 rtx t = plus_constant (Pmode, stack_pointer_rtx,
1652 (high - low + 1) * UNITS_PER_WORD);
1653 t = gen_rtx_SET (stack_pointer_rtx, t);
1654 add_reg_note (insn, REG_CFA_ADJUST_CFA, t);
1655 RTX_FRAME_RELATED_P (insn) = 1;
1656 for (unsigned int i = low; i <= high; i++)
1657 add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (word_mode, i));
1658 }
1659
1660
1661 static bool
1662 ok_for_max_constant (HOST_WIDE_INT val)
1663 {
1664 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1665 /* If there is no constraint on the size of constants
1666 used as operands, then any value is legitimate. */
1667 return true;
1668
1669 /* rx_max_constant_size specifies the maximum number
1670 of bytes that can be used to hold a signed value. */
1671 return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
1672 ( 1 << (rx_max_constant_size * 8)));
1673 }
1674
1675 /* Generate an ADD of SRC plus VAL into DEST.
1676 Handles the case where VAL is too big for max_constant_value.
1677 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1678
1679 static void
1680 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1681 {
1682 rtx insn;
1683
1684 if (val == NULL_RTX || INTVAL (val) == 0)
1685 {
1686 gcc_assert (dest != src);
1687
1688 insn = emit_move_insn (dest, src);
1689 }
1690 else if (ok_for_max_constant (INTVAL (val)))
1691 insn = emit_insn (gen_addsi3 (dest, src, val));
1692 else
1693 {
1694 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1695 will not reject it. */
1696 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1697 insn = emit_insn (gen_addsi3 (dest, src, val));
1698
1699 if (is_frame_related)
1700 /* We have to provide our own frame related note here
1701 as the dwarf2out code cannot be expected to grok
1702 our unspec. */
1703 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1704 gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
1705 return;
1706 }
1707
1708 if (is_frame_related)
1709 RTX_FRAME_RELATED_P (insn) = 1;
1710 }
1711
1712 static void
1713 push_regs (unsigned int high, unsigned int low)
1714 {
1715 rtx insn;
1716
1717 if (low == high)
1718 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1719 else
1720 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1721 gen_rx_store_vector (low, high)));
1722 mark_frame_related (insn);
1723 }
1724
1725 void
1726 rx_expand_prologue (void)
1727 {
1728 unsigned int stack_size;
1729 unsigned int frame_size;
1730 unsigned int mask;
1731 unsigned int low;
1732 unsigned int high;
1733 unsigned int reg;
1734
1735 /* Naked functions use their own, programmer provided prologues. */
1736 if (is_naked_func (NULL_TREE))
1737 return;
1738
1739 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1740
1741 if (flag_stack_usage_info)
1742 current_function_static_stack_size = frame_size + stack_size;
1743
1744 /* If we use any of the callee-saved registers, save them now. */
1745 if (mask)
1746 {
1747 /* Push registers in reverse order. */
1748 for (reg = CC_REGNUM; reg --;)
1749 if (mask & (1 << reg))
1750 {
1751 low = high = reg;
1752
1753 /* Look for a span of registers.
1754 Note - we do not have to worry about -Os and whether
1755 it is better to use a single, longer PUSHM as
1756 rx_get_stack_layout has already done that for us. */
1757 while (reg-- > 0)
1758 if ((mask & (1 << reg)) == 0)
1759 break;
1760 else
1761 --low;
1762
1763 push_regs (high, low);
1764 if (reg == (unsigned) -1)
1765 break;
1766 }
1767 }
1768 else if (low)
1769 push_regs (high, low);
1770
1771 if (MUST_SAVE_ACC_REGISTER)
1772 {
1773 unsigned int acc_high, acc_low;
1774
1775 /* Interrupt handlers have to preserve the accumulator
1776 register if so requested by the user. Use the first
1777 two pushed registers as intermediaries. */
1778 if (mask)
1779 {
1780 acc_low = acc_high = 0;
1781
1782 for (reg = 1; reg < CC_REGNUM; reg ++)
1783 if (mask & (1 << reg))
1784 {
1785 if (acc_low == 0)
1786 acc_low = reg;
1787 else
1788 {
1789 acc_high = reg;
1790 break;
1791 }
1792 }
1793
1794 /* We have assumed that there are at least two registers pushed... */
1795 gcc_assert (acc_high != 0);
1796
1797 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1798 We just assume that they are zero. */
1799 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1800 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1801 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1802 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1803 }
1804 else
1805 {
1806 acc_low = low;
1807 acc_high = low + 1;
1808
1809 /* We have assumed that there are at least two registers pushed... */
1810 gcc_assert (acc_high <= high);
1811
1812 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1813 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1814 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1815 gen_rx_store_vector (acc_low, acc_high)));
1816 }
1817 }
1818
1819 /* If needed, set up the frame pointer. */
1820 if (frame_pointer_needed)
1821 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1822 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1823
1824 /* Allocate space for the outgoing args.
1825 If the stack frame has not already been set up then handle this as well. */
1826 if (stack_size)
1827 {
1828 if (frame_size)
1829 {
1830 if (frame_pointer_needed)
1831 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1832 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1833 else
1834 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1835 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1836 true);
1837 }
1838 else
1839 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1840 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1841 }
1842 else if (frame_size)
1843 {
1844 if (! frame_pointer_needed)
1845 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1846 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1847 else
1848 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1849 false /* False because the epilogue will use the FP not the SP. */);
1850 }
1851 }
1852
1853 static void
1854 add_vector_labels (FILE *file, const char *aname)
1855 {
1856 tree vec_attr;
1857 tree val_attr;
1858 const char *vname = "vect";
1859 const char *s;
1860 int vnum;
1861
1862 /* This node is for the vector/interrupt tag itself */
1863 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1864 if (!vec_attr)
1865 return;
1866
1867 /* Now point it at the first argument */
1868 vec_attr = TREE_VALUE (vec_attr);
1869
1870 /* Iterate through the arguments. */
1871 while (vec_attr)
1872 {
1873 val_attr = TREE_VALUE (vec_attr);
1874 switch (TREE_CODE (val_attr))
1875 {
1876 case STRING_CST:
1877 s = TREE_STRING_POINTER (val_attr);
1878 goto string_id_common;
1879
1880 case IDENTIFIER_NODE:
1881 s = IDENTIFIER_POINTER (val_attr);
1882
1883 string_id_common:
1884 if (strcmp (s, "$default") == 0)
1885 {
1886 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1887 fprintf (file, "$tableentry$default$%s:\n", vname);
1888 }
1889 else
1890 vname = s;
1891 break;
1892
1893 case INTEGER_CST:
1894 vnum = TREE_INT_CST_LOW (val_attr);
1895
1896 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1897 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1898 break;
1899
1900 default:
1901 ;
1902 }
1903
1904 vec_attr = TREE_CHAIN (vec_attr);
1905 }
1906
1907 }
1908
1909 static void
1910 rx_output_function_prologue (FILE * file)
1911 {
1912 add_vector_labels (file, "interrupt");
1913 add_vector_labels (file, "vector");
1914
1915 if (is_fast_interrupt_func (NULL_TREE))
1916 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1917
1918 if (is_interrupt_func (NULL_TREE))
1919 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1920
1921 if (is_naked_func (NULL_TREE))
1922 asm_fprintf (file, "\t; Note: Naked Function\n");
1923
1924 if (cfun->static_chain_decl != NULL)
1925 asm_fprintf (file, "\t; Note: Nested function declared "
1926 "inside another function.\n");
1927
1928 if (crtl->calls_eh_return)
1929 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1930 }
1931
1932 /* Generate a POPM or RTSD instruction that matches the given operands. */
1933
1934 void
1935 rx_emit_stack_popm (rtx * operands, bool is_popm)
1936 {
1937 HOST_WIDE_INT stack_adjust;
1938 HOST_WIDE_INT last_reg;
1939 rtx first_push;
1940
1941 gcc_assert (CONST_INT_P (operands[0]));
1942 stack_adjust = INTVAL (operands[0]);
1943
1944 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1945 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1946
1947 first_push = XVECEXP (operands[1], 0, 1);
1948 gcc_assert (SET_P (first_push));
1949 first_push = SET_DEST (first_push);
1950 gcc_assert (REG_P (first_push));
1951
1952 if (is_popm)
1953 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1954 reg_names [REGNO (first_push)],
1955 reg_names [REGNO (first_push) + last_reg]);
1956 else
1957 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1958 (int) stack_adjust,
1959 reg_names [REGNO (first_push)],
1960 reg_names [REGNO (first_push) + last_reg]);
1961 }
1962
1963 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1964
1965 static rtx
1966 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1967 {
1968 unsigned int i;
1969 unsigned int bias = 3;
1970 unsigned int count = (high - low) + bias;
1971 rtx vector;
1972
1973 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1974
1975 XVECEXP (vector, 0, 0) =
1976 gen_rtx_SET (stack_pointer_rtx,
1977 plus_constant (Pmode, stack_pointer_rtx, adjust));
1978
1979 for (i = 0; i < count - 2; i++)
1980 XVECEXP (vector, 0, i + 1) =
1981 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1982 gen_rtx_MEM (SImode,
1983 i == 0 ? stack_pointer_rtx
1984 : plus_constant (Pmode, stack_pointer_rtx,
1985 i * UNITS_PER_WORD)));
1986
1987 XVECEXP (vector, 0, count - 1) = ret_rtx;
1988
1989 return vector;
1990 }
1991
1992 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1993
1994 static rtx
1995 gen_rx_popm_vector (unsigned int low, unsigned int high)
1996 {
1997 unsigned int i;
1998 unsigned int count = (high - low) + 2;
1999 rtx vector;
2000
2001 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2002
2003 XVECEXP (vector, 0, 0) =
2004 gen_rtx_SET (stack_pointer_rtx,
2005 plus_constant (Pmode, stack_pointer_rtx,
2006 (count - 1) * UNITS_PER_WORD));
2007
2008 for (i = 0; i < count - 1; i++)
2009 XVECEXP (vector, 0, i + 1) =
2010 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
2011 gen_rtx_MEM (SImode,
2012 i == 0 ? stack_pointer_rtx
2013 : plus_constant (Pmode, stack_pointer_rtx,
2014 i * UNITS_PER_WORD)));
2015
2016 return vector;
2017 }
2018
2019 /* Returns true if a simple return insn can be used. */
2020
2021 bool
2022 rx_can_use_simple_return (void)
2023 {
2024 unsigned int low;
2025 unsigned int high;
2026 unsigned int frame_size;
2027 unsigned int stack_size;
2028 unsigned int register_mask;
2029
2030 if (is_naked_func (NULL_TREE)
2031 || is_fast_interrupt_func (NULL_TREE)
2032 || is_interrupt_func (NULL_TREE))
2033 return false;
2034
2035 rx_get_stack_layout (& low, & high, & register_mask,
2036 & frame_size, & stack_size);
2037
2038 return (register_mask == 0
2039 && (frame_size + stack_size) == 0
2040 && low == 0);
2041 }
2042
2043 static void
2044 pop_regs (unsigned int high, unsigned int low)
2045 {
2046 rtx_insn *insn;
2047 if (high == low)
2048 insn = emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2049 else
2050 insn = emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1)
2051 * UNITS_PER_WORD),
2052 gen_rx_popm_vector (low, high)));
2053 add_pop_cfi_notes (insn, high, low);
2054 }
2055
2056 void
2057 rx_expand_epilogue (bool is_sibcall)
2058 {
2059 unsigned int low;
2060 unsigned int high;
2061 unsigned int frame_size;
2062 unsigned int stack_size;
2063 unsigned int register_mask;
2064 unsigned int regs_size;
2065 unsigned int reg;
2066 unsigned HOST_WIDE_INT total_size;
2067
2068 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2069 cannot guarantee that the register holding the function address is a
2070 call-used register. If it is a call-saved register then the stack
2071 pop instructions generated in the epilogue will corrupt the address
2072 before it is used.
2073
2074 Creating a new call-used-only register class works but then the
2075 reload pass gets stuck because it cannot always find a call-used
2076 register for spilling sibcalls.
2077
2078 The other possible solution is for this pass to scan forward for the
2079 sibcall instruction (if it has been generated) and work out if it
2080 is an indirect sibcall using a call-saved register. If it is then
2081 the address can copied into a call-used register in this epilogue
2082 code and the sibcall instruction modified to use that register. */
2083
2084 if (is_naked_func (NULL_TREE))
2085 {
2086 gcc_assert (! is_sibcall);
2087
2088 /* Naked functions use their own, programmer provided epilogues.
2089 But, in order to keep gcc happy we have to generate some kind of
2090 epilogue RTL. */
2091 emit_jump_insn (gen_naked_return ());
2092 return;
2093 }
2094
2095 rx_get_stack_layout (& low, & high, & register_mask,
2096 & frame_size, & stack_size);
2097
2098 total_size = frame_size + stack_size;
2099 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2100
2101 /* See if we are unable to use the special stack frame deconstruct and
2102 return instructions. In most cases we can use them, but the exceptions
2103 are:
2104
2105 - Sibling calling functions deconstruct the frame but do not return to
2106 their caller. Instead they branch to their sibling and allow their
2107 return instruction to return to this function's parent.
2108
2109 - Fast and normal interrupt handling functions have to use special
2110 return instructions.
2111
2112 - Functions where we have pushed a fragmented set of registers into the
2113 call-save area must have the same set of registers popped. */
2114 if (is_sibcall
2115 || is_fast_interrupt_func (NULL_TREE)
2116 || is_interrupt_func (NULL_TREE)
2117 || register_mask)
2118 {
2119 /* Cannot use the special instructions - deconstruct by hand. */
2120 if (total_size)
2121 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2122 GEN_INT (total_size), false);
2123
2124 if (MUST_SAVE_ACC_REGISTER)
2125 {
2126 unsigned int acc_low, acc_high;
2127
2128 /* Reverse the saving of the accumulator register onto the stack.
2129 Note we must adjust the saved "low" accumulator value as it
2130 is really the middle 32-bits of the accumulator. */
2131 if (register_mask)
2132 {
2133 acc_low = acc_high = 0;
2134
2135 for (reg = 1; reg < CC_REGNUM; reg ++)
2136 if (register_mask & (1 << reg))
2137 {
2138 if (acc_low == 0)
2139 acc_low = reg;
2140 else
2141 {
2142 acc_high = reg;
2143 break;
2144 }
2145 }
2146 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2147 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2148 }
2149 else
2150 {
2151 acc_low = low;
2152 acc_high = low + 1;
2153 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2154 gen_rx_popm_vector (acc_low, acc_high)));
2155 }
2156
2157 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2158 gen_rtx_REG (SImode, acc_low),
2159 GEN_INT (16)));
2160 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2161 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2162 }
2163
2164 if (register_mask)
2165 {
2166 for (reg = 0; reg < CC_REGNUM; reg ++)
2167 if (register_mask & (1 << reg))
2168 {
2169 low = high = reg;
2170 while (register_mask & (1 << high))
2171 high ++;
2172 pop_regs (high - 1, low);
2173 reg = high;
2174 }
2175 }
2176 else if (low)
2177 pop_regs (high, low);
2178
2179 if (is_fast_interrupt_func (NULL_TREE))
2180 {
2181 gcc_assert (! is_sibcall);
2182 emit_jump_insn (gen_fast_interrupt_return ());
2183 }
2184 else if (is_interrupt_func (NULL_TREE))
2185 {
2186 gcc_assert (! is_sibcall);
2187 emit_jump_insn (gen_exception_return ());
2188 }
2189 else if (! is_sibcall)
2190 emit_jump_insn (gen_simple_return ());
2191
2192 return;
2193 }
2194
2195 /* If we allocated space on the stack, free it now. */
2196 if (total_size)
2197 {
2198 unsigned HOST_WIDE_INT rtsd_size;
2199
2200 /* See if we can use the RTSD instruction. */
2201 rtsd_size = total_size + regs_size;
2202 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2203 {
2204 if (low)
2205 emit_jump_insn (gen_pop_and_return
2206 (GEN_INT (rtsd_size),
2207 gen_rx_rtsd_vector (rtsd_size, low, high)));
2208 else
2209 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2210
2211 return;
2212 }
2213
2214 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2215 GEN_INT (total_size), false);
2216 }
2217
2218 if (low)
2219 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2220 gen_rx_rtsd_vector (regs_size,
2221 low, high)));
2222 else
2223 emit_jump_insn (gen_simple_return ());
2224 }
2225
2226
2227 /* Compute the offset (in words) between FROM (arg pointer
2228 or frame pointer) and TO (frame pointer or stack pointer).
2229 See ASCII art comment at the start of rx_expand_prologue
2230 for more information. */
2231
2232 int
2233 rx_initial_elimination_offset (int from, int to)
2234 {
2235 unsigned int low;
2236 unsigned int high;
2237 unsigned int frame_size;
2238 unsigned int stack_size;
2239 unsigned int mask;
2240
2241 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2242
2243 if (from == ARG_POINTER_REGNUM)
2244 {
2245 /* Extend the computed size of the stack frame to
2246 include the registers pushed in the prologue. */
2247 if (low)
2248 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2249 else
2250 frame_size += bit_count (mask) * UNITS_PER_WORD;
2251
2252 /* Remember to include the return address. */
2253 frame_size += 1 * UNITS_PER_WORD;
2254
2255 if (to == FRAME_POINTER_REGNUM)
2256 return frame_size;
2257
2258 gcc_assert (to == STACK_POINTER_REGNUM);
2259 return frame_size + stack_size;
2260 }
2261
2262 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2263 return stack_size;
2264 }
2265
2266 /* Decide if a variable should go into one of the small data sections. */
2267
2268 static bool
2269 rx_in_small_data (const_tree decl)
2270 {
2271 int size;
2272 const char * section;
2273
2274 if (rx_small_data_limit == 0)
2275 return false;
2276
2277 if (TREE_CODE (decl) != VAR_DECL)
2278 return false;
2279
2280 /* We do not put read-only variables into a small data area because
2281 they would be placed with the other read-only sections, far away
2282 from the read-write data sections, and we only have one small
2283 data area pointer.
2284 Similarly commons are placed in the .bss section which might be
2285 far away (and out of alignment with respect to) the .data section. */
2286 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2287 return false;
2288
2289 section = DECL_SECTION_NAME (decl);
2290 if (section)
2291 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2292
2293 size = int_size_in_bytes (TREE_TYPE (decl));
2294
2295 return (size > 0) && (size <= rx_small_data_limit);
2296 }
2297
2298 /* Return a section for X.
2299 The only special thing we do here is to honor small data. */
2300
2301 static section *
2302 rx_select_rtx_section (machine_mode mode,
2303 rtx x,
2304 unsigned HOST_WIDE_INT align)
2305 {
2306 if (rx_small_data_limit > 0
2307 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2308 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2309 return sdata_section;
2310
2311 return default_elf_select_rtx_section (mode, x, align);
2312 }
2313
2314 static section *
2315 rx_select_section (tree decl,
2316 int reloc,
2317 unsigned HOST_WIDE_INT align)
2318 {
2319 if (rx_small_data_limit > 0)
2320 {
2321 switch (categorize_decl_for_section (decl, reloc))
2322 {
2323 case SECCAT_SDATA: return sdata_section;
2324 case SECCAT_SBSS: return sbss_section;
2325 case SECCAT_SRODATA:
2326 /* Fall through. We do not put small, read only
2327 data into the C_2 section because we are not
2328 using the C_2 section. We do not use the C_2
2329 section because it is located with the other
2330 read-only data sections, far away from the read-write
2331 data sections and we only have one small data
2332 pointer (r13). */
2333 default:
2334 break;
2335 }
2336 }
2337
2338 /* If we are supporting the Renesas assembler
2339 we cannot use mergeable sections. */
2340 if (TARGET_AS100_SYNTAX)
2341 switch (categorize_decl_for_section (decl, reloc))
2342 {
2343 case SECCAT_RODATA_MERGE_CONST:
2344 case SECCAT_RODATA_MERGE_STR_INIT:
2345 case SECCAT_RODATA_MERGE_STR:
2346 return readonly_data_section;
2347
2348 default:
2349 break;
2350 }
2351
2352 return default_elf_select_section (decl, reloc, align);
2353 }
2354 \f
2355 enum rx_builtin
2356 {
2357 RX_BUILTIN_BRK,
2358 RX_BUILTIN_CLRPSW,
2359 RX_BUILTIN_INT,
2360 RX_BUILTIN_MACHI,
2361 RX_BUILTIN_MACLO,
2362 RX_BUILTIN_MULHI,
2363 RX_BUILTIN_MULLO,
2364 RX_BUILTIN_MVFACHI,
2365 RX_BUILTIN_MVFACMI,
2366 RX_BUILTIN_MVFC,
2367 RX_BUILTIN_MVTACHI,
2368 RX_BUILTIN_MVTACLO,
2369 RX_BUILTIN_MVTC,
2370 RX_BUILTIN_MVTIPL,
2371 RX_BUILTIN_RACW,
2372 RX_BUILTIN_REVW,
2373 RX_BUILTIN_RMPA,
2374 RX_BUILTIN_ROUND,
2375 RX_BUILTIN_SETPSW,
2376 RX_BUILTIN_WAIT,
2377 RX_BUILTIN_max
2378 };
2379
2380 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2381
2382 static void
2383 rx_init_builtins (void)
2384 {
2385 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2386 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2387 add_builtin_function ("__builtin_rx_" LC_NAME, \
2388 build_function_type_list (RET_TYPE##_type_node, \
2389 NULL_TREE), \
2390 RX_BUILTIN_##UC_NAME, \
2391 BUILT_IN_MD, NULL, NULL_TREE)
2392
2393 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
2394 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2395 add_builtin_function ("__builtin_rx_" LC_NAME, \
2396 build_function_type_list (RET_TYPE##_type_node, \
2397 ARG_TYPE##_type_node, \
2398 NULL_TREE), \
2399 RX_BUILTIN_##UC_NAME, \
2400 BUILT_IN_MD, NULL, NULL_TREE)
2401
2402 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2403 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2404 add_builtin_function ("__builtin_rx_" LC_NAME, \
2405 build_function_type_list (RET_TYPE##_type_node, \
2406 ARG_TYPE1##_type_node,\
2407 ARG_TYPE2##_type_node,\
2408 NULL_TREE), \
2409 RX_BUILTIN_##UC_NAME, \
2410 BUILT_IN_MD, NULL, NULL_TREE)
2411
2412 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2413 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2414 add_builtin_function ("__builtin_rx_" LC_NAME, \
2415 build_function_type_list (RET_TYPE##_type_node, \
2416 ARG_TYPE1##_type_node,\
2417 ARG_TYPE2##_type_node,\
2418 ARG_TYPE3##_type_node,\
2419 NULL_TREE), \
2420 RX_BUILTIN_##UC_NAME, \
2421 BUILT_IN_MD, NULL, NULL_TREE)
2422
2423 ADD_RX_BUILTIN0 (BRK, "brk", void);
2424 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2425 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2426 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2427 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2428 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2429 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2430 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2431 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2432 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2433 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2434 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2435 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
2436 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2437 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
2438 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
2439 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2440 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2441 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
2442 ADD_RX_BUILTIN0 (WAIT, "wait", void);
2443 }
2444
2445 /* Return the RX builtin for CODE. */
2446
2447 static tree
2448 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2449 {
2450 if (code >= RX_BUILTIN_max)
2451 return error_mark_node;
2452
2453 return rx_builtins[code];
2454 }
2455
2456 static rtx
2457 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2458 {
2459 if (reg && ! REG_P (arg))
2460 arg = force_reg (SImode, arg);
2461
2462 emit_insn (gen_func (arg));
2463
2464 return NULL_RTX;
2465 }
2466
2467 static rtx
2468 rx_expand_builtin_mvtc (tree exp)
2469 {
2470 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2471 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2472
2473 if (! CONST_INT_P (arg1))
2474 return NULL_RTX;
2475
2476 if (! REG_P (arg2))
2477 arg2 = force_reg (SImode, arg2);
2478
2479 if (INTVAL (arg1) == 1)
2480 {
2481 warning (0, "invalid control register %d for mvtc; using %<psw%>",
2482 (int) INTVAL (arg1));
2483 arg1 = const0_rtx;
2484 }
2485
2486 emit_insn (gen_mvtc (arg1, arg2));
2487
2488 return NULL_RTX;
2489 }
2490
2491 static rtx
2492 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2493 {
2494 rtx arg = expand_normal (t_arg);
2495
2496 if (! CONST_INT_P (arg))
2497 return NULL_RTX;
2498
2499 if (target == NULL_RTX)
2500 return NULL_RTX;
2501
2502 if (! REG_P (target))
2503 target = force_reg (SImode, target);
2504
2505 emit_insn (gen_mvfc (target, arg));
2506
2507 return target;
2508 }
2509
2510 static rtx
2511 rx_expand_builtin_mvtipl (rtx arg)
2512 {
2513 /* The RX610 does not support the MVTIPL instruction. */
2514 if (rx_cpu_type == RX610)
2515 return NULL_RTX;
2516
2517 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2518 return NULL_RTX;
2519
2520 emit_insn (gen_mvtipl (arg));
2521
2522 return NULL_RTX;
2523 }
2524
2525 static rtx
2526 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2527 {
2528 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2529 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2530
2531 if (! REG_P (arg1))
2532 arg1 = force_reg (SImode, arg1);
2533
2534 if (! REG_P (arg2))
2535 arg2 = force_reg (SImode, arg2);
2536
2537 emit_insn (gen_func (arg1, arg2));
2538
2539 return NULL_RTX;
2540 }
2541
2542 static rtx
2543 rx_expand_int_builtin_1_arg (rtx arg,
2544 rtx target,
2545 rtx (* gen_func)(rtx, rtx),
2546 bool mem_ok)
2547 {
2548 if (! REG_P (arg))
2549 if (!mem_ok || ! MEM_P (arg))
2550 arg = force_reg (SImode, arg);
2551
2552 if (target == NULL_RTX || ! REG_P (target))
2553 target = gen_reg_rtx (SImode);
2554
2555 emit_insn (gen_func (target, arg));
2556
2557 return target;
2558 }
2559
2560 static rtx
2561 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2562 {
2563 if (target == NULL_RTX || ! REG_P (target))
2564 target = gen_reg_rtx (SImode);
2565
2566 emit_insn (gen_func (target));
2567
2568 return target;
2569 }
2570
2571 static rtx
2572 rx_expand_builtin_round (rtx arg, rtx target)
2573 {
2574 if ((! REG_P (arg) && ! MEM_P (arg))
2575 || GET_MODE (arg) != SFmode)
2576 arg = force_reg (SFmode, arg);
2577
2578 if (target == NULL_RTX || ! REG_P (target))
2579 target = gen_reg_rtx (SImode);
2580
2581 emit_insn (gen_lrintsf2 (target, arg));
2582
2583 return target;
2584 }
2585
2586 static int
2587 valid_psw_flag (rtx op, const char *which)
2588 {
2589 static int mvtc_inform_done = 0;
2590
2591 if (GET_CODE (op) == CONST_INT)
2592 switch (INTVAL (op))
2593 {
2594 case 0: case 'c': case 'C':
2595 case 1: case 'z': case 'Z':
2596 case 2: case 's': case 'S':
2597 case 3: case 'o': case 'O':
2598 case 8: case 'i': case 'I':
2599 case 9: case 'u': case 'U':
2600 return 1;
2601 }
2602
2603 error ("%<__builtin_rx_%s%> takes %<C%>, %<Z%>, %<S%>, %<O%>, %<I%>, "
2604 "or %<U%>", which);
2605 if (!mvtc_inform_done)
2606 error ("use %<__builtin_rx_mvtc (0, ... )%> to write arbitrary values to PSW");
2607 mvtc_inform_done = 1;
2608
2609 return 0;
2610 }
2611
2612 static rtx
2613 rx_expand_builtin (tree exp,
2614 rtx target,
2615 rtx subtarget ATTRIBUTE_UNUSED,
2616 machine_mode mode ATTRIBUTE_UNUSED,
2617 int ignore ATTRIBUTE_UNUSED)
2618 {
2619 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2620 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2621 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2622 unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
2623
2624 switch (fcode)
2625 {
2626 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2627 case RX_BUILTIN_CLRPSW:
2628 if (!valid_psw_flag (op, "clrpsw"))
2629 return NULL_RTX;
2630 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2631 case RX_BUILTIN_SETPSW:
2632 if (!valid_psw_flag (op, "setpsw"))
2633 return NULL_RTX;
2634 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2635 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2636 (op, gen_int, false);
2637 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2638 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2639 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2640 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2641 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2642 (target, gen_mvfachi);
2643 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2644 (target, gen_mvfacmi);
2645 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2646 (op, gen_mvtachi, true);
2647 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2648 (op, gen_mvtaclo, true);
2649 case RX_BUILTIN_RMPA:
2650 if (rx_allow_string_insns)
2651 emit_insn (gen_rmpa ());
2652 else
2653 error ("%<-mno-allow-string-insns%> forbids the generation "
2654 "of the RMPA instruction");
2655 return NULL_RTX;
2656 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2657 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2658 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2659 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2660 (op, gen_racw, false);
2661 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2662 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2663 (op, target, gen_revw, false);
2664 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2665
2666 default:
2667 internal_error ("bad builtin code");
2668 break;
2669 }
2670
2671 return NULL_RTX;
2672 }
2673 \f
2674 /* Place an element into a constructor or destructor section.
2675 Like default_ctor_section_asm_out_constructor in varasm.cc
2676 except that it uses .init_array (or .fini_array) and it
2677 handles constructor priorities. */
2678
2679 static void
2680 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2681 {
2682 section * s;
2683
2684 if (priority != DEFAULT_INIT_PRIORITY)
2685 {
2686 char buf[18];
2687
2688 sprintf (buf, "%s.%.5u",
2689 is_ctor ? ".init_array" : ".fini_array",
2690 priority);
2691 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2692 }
2693 else if (is_ctor)
2694 s = ctors_section;
2695 else
2696 s = dtors_section;
2697
2698 switch_to_section (s);
2699 assemble_align (POINTER_SIZE);
2700 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2701 }
2702
2703 static void
2704 rx_elf_asm_constructor (rtx symbol, int priority)
2705 {
2706 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2707 }
2708
2709 static void
2710 rx_elf_asm_destructor (rtx symbol, int priority)
2711 {
2712 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2713 }
2714 \f
2715 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2716
2717 static tree
2718 rx_handle_func_attribute (tree * node,
2719 tree name,
2720 tree args ATTRIBUTE_UNUSED,
2721 int flags ATTRIBUTE_UNUSED,
2722 bool * no_add_attrs)
2723 {
2724 gcc_assert (DECL_P (* node));
2725
2726 if (TREE_CODE (* node) != FUNCTION_DECL)
2727 {
2728 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2729 name);
2730 * no_add_attrs = true;
2731 }
2732
2733 /* FIXME: We ought to check for conflicting attributes. */
2734
2735 /* FIXME: We ought to check that the interrupt and exception
2736 handler attributes have been applied to void functions. */
2737 return NULL_TREE;
2738 }
2739
2740 /* Check "vector" attribute. */
2741
2742 static tree
2743 rx_handle_vector_attribute (tree * node,
2744 tree name,
2745 tree args,
2746 int flags ATTRIBUTE_UNUSED,
2747 bool * no_add_attrs)
2748 {
2749 gcc_assert (DECL_P (* node));
2750 gcc_assert (args != NULL_TREE);
2751
2752 if (TREE_CODE (* node) != FUNCTION_DECL)
2753 {
2754 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2755 name);
2756 * no_add_attrs = true;
2757 }
2758
2759 return NULL_TREE;
2760 }
2761
2762 /* Table of RX specific attributes. */
2763 TARGET_GNU_ATTRIBUTES (rx_attribute_table,
2764 {
2765 /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
2766 affects_type_identity, handler, exclude. */
2767 { "fast_interrupt", 0, 0, true, false, false, false,
2768 rx_handle_func_attribute, NULL },
2769 { "interrupt", 0, -1, true, false, false, false,
2770 rx_handle_func_attribute, NULL },
2771 { "naked", 0, 0, true, false, false, false,
2772 rx_handle_func_attribute, NULL },
2773 { "vector", 1, -1, true, false, false, false,
2774 rx_handle_vector_attribute, NULL }
2775 });
2776
2777 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2778
2779 static void
2780 rx_override_options_after_change (void)
2781 {
2782 static bool first_time = TRUE;
2783
2784 if (first_time)
2785 {
2786 /* If this is the first time through and the user has not disabled
2787 the use of RX FPU hardware then enable -ffinite-math-only,
2788 since the FPU instructions do not support NaNs and infinities. */
2789 if (TARGET_USE_FPU)
2790 flag_finite_math_only = 1;
2791
2792 first_time = FALSE;
2793 }
2794 else
2795 {
2796 /* Alert the user if they are changing the optimization options
2797 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2798 if (TARGET_USE_FPU
2799 && !flag_finite_math_only)
2800 warning (0, "RX FPU instructions do not support NaNs and infinities");
2801 }
2802 }
2803
2804 static void
2805 rx_option_override (void)
2806 {
2807 unsigned int i;
2808 cl_deferred_option *opt;
2809 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2810
2811 if (v)
2812 FOR_EACH_VEC_ELT (*v, i, opt)
2813 {
2814 switch (opt->opt_index)
2815 {
2816 case OPT_mint_register_:
2817 switch (opt->value)
2818 {
2819 case 4:
2820 fixed_regs[10] = call_used_regs [10] = 1;
2821 /* Fall through. */
2822 case 3:
2823 fixed_regs[11] = call_used_regs [11] = 1;
2824 /* Fall through. */
2825 case 2:
2826 fixed_regs[12] = call_used_regs [12] = 1;
2827 /* Fall through. */
2828 case 1:
2829 fixed_regs[13] = call_used_regs [13] = 1;
2830 /* Fall through. */
2831 case 0:
2832 rx_num_interrupt_regs = opt->value;
2833 break;
2834 default:
2835 rx_num_interrupt_regs = 0;
2836 /* Error message already given because rx_handle_option
2837 returned false. */
2838 break;
2839 }
2840 break;
2841
2842 default:
2843 gcc_unreachable ();
2844 }
2845 }
2846
2847 /* This target defaults to strict volatile bitfields. */
2848 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2849 flag_strict_volatile_bitfields = 1;
2850
2851 rx_override_options_after_change ();
2852
2853 /* These values are bytes, not log. */
2854 if (! optimize_size)
2855 {
2856 if (flag_align_jumps && !str_align_jumps)
2857 str_align_jumps = ((rx_cpu_type == RX100
2858 || rx_cpu_type == RX200) ? "4" : "8");
2859 if (flag_align_loops && !str_align_loops)
2860 str_align_loops = ((rx_cpu_type == RX100
2861 || rx_cpu_type == RX200) ? "4" : "8");
2862 if (flag_align_labels && !str_align_labels)
2863 str_align_labels = ((rx_cpu_type == RX100
2864 || rx_cpu_type == RX200) ? "4" : "8");
2865 }
2866 }
2867
2868 \f
2869 static bool
2870 rx_allocate_stack_slots_for_args (void)
2871 {
2872 /* Naked functions should not allocate stack slots for arguments. */
2873 return ! is_naked_func (NULL_TREE);
2874 }
2875
2876 static bool
2877 rx_func_attr_inlinable (const_tree decl)
2878 {
2879 return ! is_fast_interrupt_func (decl)
2880 && ! is_interrupt_func (decl)
2881 && ! is_naked_func (decl);
2882 }
2883
2884 static bool
2885 rx_warn_func_return (tree decl)
2886 {
2887 /* Naked functions are implemented entirely in assembly, including the
2888 return sequence, so suppress warnings about this. */
2889 return !is_naked_func (decl);
2890 }
2891
2892 /* Return nonzero if it is ok to make a tail-call to DECL,
2893 a function_decl or NULL if this is an indirect call, using EXP */
2894
2895 static bool
2896 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2897 {
2898 if (TARGET_JSR)
2899 return false;
2900
2901 /* Do not allow indirect tailcalls. The
2902 sibcall patterns do not support them. */
2903 if (decl == NULL)
2904 return false;
2905
2906 /* Never tailcall from inside interrupt handlers or naked functions. */
2907 if (is_fast_interrupt_func (NULL_TREE)
2908 || is_interrupt_func (NULL_TREE)
2909 || is_naked_func (NULL_TREE))
2910 return false;
2911
2912 return true;
2913 }
2914
2915 static void
2916 rx_file_start (void)
2917 {
2918 if (! TARGET_AS100_SYNTAX)
2919 default_file_start ();
2920 }
2921
2922 static bool
2923 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2924 {
2925 /* The packed attribute overrides the MS behavior. */
2926 return ! TYPE_PACKED (record_type);
2927 }
2928 \f
2929 /* Returns true if X a legitimate constant for an immediate
2930 operand on the RX. X is already known to satisfy CONSTANT_P. */
2931
2932 bool
2933 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2934 {
2935 switch (GET_CODE (x))
2936 {
2937 case CONST:
2938 x = XEXP (x, 0);
2939
2940 if (GET_CODE (x) == PLUS)
2941 {
2942 if (! CONST_INT_P (XEXP (x, 1)))
2943 return false;
2944
2945 /* GCC would not pass us CONST_INT + CONST_INT so we
2946 know that we have {SYMBOL|LABEL} + CONST_INT. */
2947 x = XEXP (x, 0);
2948 gcc_assert (! CONST_INT_P (x));
2949 }
2950
2951 switch (GET_CODE (x))
2952 {
2953 case LABEL_REF:
2954 case SYMBOL_REF:
2955 return true;
2956
2957 case UNSPEC:
2958 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2959
2960 default:
2961 /* FIXME: Can this ever happen ? */
2962 gcc_unreachable ();
2963 }
2964 break;
2965
2966 case LABEL_REF:
2967 case SYMBOL_REF:
2968 return true;
2969 case CONST_DOUBLE:
2970 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2971 case CONST_VECTOR:
2972 return false;
2973 default:
2974 gcc_assert (CONST_INT_P (x));
2975 break;
2976 }
2977
2978 return ok_for_max_constant (INTVAL (x));
2979 }
2980
2981 static int
2982 rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2983 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2984 {
2985 rtx a, b;
2986
2987 if (GET_CODE (addr) != PLUS)
2988 return COSTS_N_INSNS (1);
2989
2990 a = XEXP (addr, 0);
2991 b = XEXP (addr, 1);
2992
2993 if (REG_P (a) && REG_P (b))
2994 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2995 return COSTS_N_INSNS (4);
2996
2997 if (speed)
2998 /* [REG+OFF] is just as fast as [REG]. */
2999 return COSTS_N_INSNS (1);
3000
3001 if (CONST_INT_P (b)
3002 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
3003 /* Try to discourage REG + <large OFF> when optimizing for size. */
3004 return COSTS_N_INSNS (2);
3005
3006 return COSTS_N_INSNS (1);
3007 }
3008
3009 static bool
3010 rx_rtx_costs (rtx x, machine_mode mode, int outer_code ATTRIBUTE_UNUSED,
3011 int opno ATTRIBUTE_UNUSED, int* total, bool speed)
3012 {
3013 if (x == const0_rtx)
3014 {
3015 *total = 0;
3016 return true;
3017 }
3018
3019 switch (GET_CODE (x))
3020 {
3021 case MULT:
3022 if (mode == DImode)
3023 {
3024 *total = COSTS_N_INSNS (2);
3025 return true;
3026 }
3027 /* fall through */
3028
3029 case PLUS:
3030 case MINUS:
3031 case AND:
3032 case COMPARE:
3033 case IOR:
3034 case XOR:
3035 *total = COSTS_N_INSNS (1);
3036 return true;
3037
3038 case DIV:
3039 if (speed)
3040 /* This is the worst case for a division. Pessimize divisions when
3041 not optimizing for size and allow reciprocal optimizations which
3042 produce bigger code. */
3043 *total = COSTS_N_INSNS (20);
3044 else
3045 *total = COSTS_N_INSNS (3);
3046 return true;
3047
3048 case UDIV:
3049 if (speed)
3050 /* This is the worst case for a division. Pessimize divisions when
3051 not optimizing for size and allow reciprocal optimizations which
3052 produce bigger code. */
3053 *total = COSTS_N_INSNS (18);
3054 else
3055 *total = COSTS_N_INSNS (3);
3056 return true;
3057
3058 default:
3059 break;
3060 }
3061
3062 return false;
3063 }
3064
3065 static bool
3066 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3067 {
3068 /* We can always eliminate to the frame pointer.
3069 We can eliminate to the stack pointer unless a frame
3070 pointer is needed. */
3071
3072 return to == FRAME_POINTER_REGNUM
3073 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
3074 }
3075 \f
3076
3077 static void
3078 rx_trampoline_template (FILE * file)
3079 {
3080 /* Output assembler code for a block containing the constant
3081 part of a trampoline, leaving space for the variable parts.
3082
3083 On the RX, (where r8 is the static chain regnum) the trampoline
3084 looks like:
3085
3086 mov #<static chain value>, r8
3087 mov #<function's address>, r9
3088 jmp r9
3089
3090 In big-endian-data-mode however instructions are read into the CPU
3091 4 bytes at a time. These bytes are then swapped around before being
3092 passed to the decoder. So...we must partition our trampoline into
3093 4 byte packets and swap these packets around so that the instruction
3094 reader will reverse the process. But, in order to avoid splitting
3095 the 32-bit constants across these packet boundaries, (making inserting
3096 them into the constructed trampoline very difficult) we have to pad the
3097 instruction sequence with NOP insns. ie:
3098
3099 nop
3100 nop
3101 mov.l #<...>, r8
3102 nop
3103 nop
3104 mov.l #<...>, r9
3105 jmp r9
3106 nop
3107 nop */
3108
3109 if (! TARGET_BIG_ENDIAN_DATA)
3110 {
3111 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3112 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3113 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3114 }
3115 else
3116 {
3117 char r8 = '0' + STATIC_CHAIN_REGNUM;
3118 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3119
3120 if (TARGET_AS100_SYNTAX)
3121 {
3122 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3123 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3124 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3125 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3126 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3127 }
3128 else
3129 {
3130 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3131 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3132 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3133 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3134 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3135 }
3136 }
3137 }
3138
3139 static void
3140 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3141 {
3142 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3143
3144 emit_block_move (tramp, assemble_trampoline_template (),
3145 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3146
3147 if (TARGET_BIG_ENDIAN_DATA)
3148 {
3149 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3150 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3151 }
3152 else
3153 {
3154 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3155 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3156 }
3157 }
3158 \f
3159 static int
3160 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3161 reg_class_t regclass ATTRIBUTE_UNUSED,
3162 bool in)
3163 {
3164 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3165 }
3166
3167 /* Convert a CC_MODE to the set of flags that it represents. */
3168
3169 static unsigned int
3170 flags_from_mode (machine_mode mode)
3171 {
3172 switch (mode)
3173 {
3174 case E_CC_ZSmode:
3175 return CC_FLAG_S | CC_FLAG_Z;
3176 case E_CC_ZSOmode:
3177 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3178 case E_CC_ZSCmode:
3179 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3180 case E_CCmode:
3181 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3182 case E_CC_Fmode:
3183 return CC_FLAG_FP;
3184 default:
3185 gcc_unreachable ();
3186 }
3187 }
3188
3189 /* Convert a set of flags to a CC_MODE that can implement it. */
3190
3191 static machine_mode
3192 mode_from_flags (unsigned int f)
3193 {
3194 if (f & CC_FLAG_FP)
3195 return CC_Fmode;
3196 if (f & CC_FLAG_O)
3197 {
3198 if (f & CC_FLAG_C)
3199 return CCmode;
3200 else
3201 return CC_ZSOmode;
3202 }
3203 else if (f & CC_FLAG_C)
3204 return CC_ZSCmode;
3205 else
3206 return CC_ZSmode;
3207 }
3208
3209 /* Convert an RTX_CODE to the set of flags needed to implement it.
3210 This assumes an integer comparison. */
3211
3212 static unsigned int
3213 flags_from_code (enum rtx_code code)
3214 {
3215 switch (code)
3216 {
3217 case LT:
3218 case GE:
3219 return CC_FLAG_S;
3220 case GT:
3221 case LE:
3222 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3223 case GEU:
3224 case LTU:
3225 return CC_FLAG_C;
3226 case GTU:
3227 case LEU:
3228 return CC_FLAG_C | CC_FLAG_Z;
3229 case EQ:
3230 case NE:
3231 return CC_FLAG_Z;
3232 default:
3233 gcc_unreachable ();
3234 }
3235 }
3236
3237 /* Return a CC_MODE of which both M1 and M2 are subsets. */
3238
3239 static machine_mode
3240 rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3241 {
3242 unsigned f;
3243
3244 /* Early out for identical modes. */
3245 if (m1 == m2)
3246 return m1;
3247
3248 /* There's no valid combination for FP vs non-FP. */
3249 f = flags_from_mode (m1) | flags_from_mode (m2);
3250 if (f & CC_FLAG_FP)
3251 return VOIDmode;
3252
3253 /* Otherwise, see what mode can implement all the flags. */
3254 return mode_from_flags (f);
3255 }
3256
3257 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3258
3259 machine_mode
3260 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3261 {
3262 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3263 return CC_Fmode;
3264
3265 if (y != const0_rtx)
3266 return CCmode;
3267
3268 return mode_from_flags (flags_from_code (cmp_code));
3269 }
3270
3271 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3272 CC_MODE, and use that in branches based on that compare. */
3273
3274 void
3275 rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3276 rtx c1, rtx c2, rtx label)
3277 {
3278 rtx flags, x;
3279
3280 flags = gen_rtx_REG (cc_mode, CC_REG);
3281 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3282 x = gen_rtx_SET (flags, x);
3283 emit_insn (x);
3284
3285 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3286 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3287 x = gen_rtx_SET (pc_rtx, x);
3288 emit_jump_insn (x);
3289 }
3290
3291 /* A helper function for matching parallels that set the flags. */
3292
3293 bool
3294 rx_match_ccmode (rtx insn, machine_mode cc_mode)
3295 {
3296 rtx op1, flags;
3297 machine_mode flags_mode;
3298
3299 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3300
3301 op1 = XVECEXP (PATTERN (insn), 0, 0);
3302 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3303
3304 flags = SET_DEST (op1);
3305 flags_mode = GET_MODE (flags);
3306
3307 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3308 return false;
3309 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3310 return false;
3311
3312 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3313 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3314 return false;
3315
3316 return true;
3317 }
3318 \f
3319
3320 static int
3321 rx_max_skip_for_label (rtx_insn *lab)
3322 {
3323 int opsize;
3324 rtx_insn *op;
3325
3326 if (optimize_size)
3327 return 0;
3328
3329 if (lab == NULL)
3330 return 0;
3331
3332 op = lab;
3333 do
3334 {
3335 op = next_nonnote_nondebug_insn (op);
3336 }
3337 while (op && (LABEL_P (op)
3338 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3339 if (!op)
3340 return 0;
3341
3342 opsize = get_attr_length (op);
3343 if (opsize >= 0 && opsize < 8)
3344 return MAX (0, opsize - 1);
3345 return 0;
3346 }
3347
3348 static int
3349 rx_align_log_for_label (rtx_insn *lab, int uses_threshold)
3350 {
3351 /* This is a simple heuristic to guess when an alignment would not be useful
3352 because the delay due to the inserted NOPs would be greater than the delay
3353 due to the misaligned branch. If uses_threshold is zero then the alignment
3354 is always useful. */
3355 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3356 return 0;
3357
3358 if (optimize_size)
3359 return 0;
3360
3361 /* Return zero if max_skip not a positive number. */
3362 int max_skip = rx_max_skip_for_label (lab);
3363 if (max_skip <= 0)
3364 return 0;
3365
3366 /* These values are log, not bytes. */
3367 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3368 return 2; /* 4 bytes */
3369 return 3; /* 8 bytes */
3370 }
3371
3372 align_flags
3373 rx_align_for_label (rtx_insn *lab, int uses_threshold)
3374 {
3375 return align_flags (rx_align_log_for_label (lab, uses_threshold),
3376 rx_max_skip_for_label (lab));
3377 }
3378
3379 /* Compute the real length of the extending load-and-op instructions. */
3380
3381 int
3382 rx_adjust_insn_length (rtx_insn *insn, int current_length)
3383 {
3384 rtx extend, mem, offset;
3385 bool zero;
3386 int factor;
3387
3388 if (!INSN_P (insn))
3389 return current_length;
3390
3391 switch (INSN_CODE (insn))
3392 {
3393 default:
3394 return current_length;
3395
3396 case CODE_FOR_plussi3_zero_extendhi:
3397 case CODE_FOR_andsi3_zero_extendhi:
3398 case CODE_FOR_iorsi3_zero_extendhi:
3399 case CODE_FOR_xorsi3_zero_extendhi:
3400 case CODE_FOR_divsi3_zero_extendhi:
3401 case CODE_FOR_udivsi3_zero_extendhi:
3402 case CODE_FOR_minussi3_zero_extendhi:
3403 case CODE_FOR_smaxsi3_zero_extendhi:
3404 case CODE_FOR_sminsi3_zero_extendhi:
3405 case CODE_FOR_multsi3_zero_extendhi:
3406 case CODE_FOR_comparesi3_zero_extendhi:
3407 zero = true;
3408 factor = 2;
3409 break;
3410
3411 case CODE_FOR_plussi3_sign_extendhi:
3412 case CODE_FOR_andsi3_sign_extendhi:
3413 case CODE_FOR_iorsi3_sign_extendhi:
3414 case CODE_FOR_xorsi3_sign_extendhi:
3415 case CODE_FOR_divsi3_sign_extendhi:
3416 case CODE_FOR_udivsi3_sign_extendhi:
3417 case CODE_FOR_minussi3_sign_extendhi:
3418 case CODE_FOR_smaxsi3_sign_extendhi:
3419 case CODE_FOR_sminsi3_sign_extendhi:
3420 case CODE_FOR_multsi3_sign_extendhi:
3421 case CODE_FOR_comparesi3_sign_extendhi:
3422 zero = false;
3423 factor = 2;
3424 break;
3425
3426 case CODE_FOR_plussi3_zero_extendqi:
3427 case CODE_FOR_andsi3_zero_extendqi:
3428 case CODE_FOR_iorsi3_zero_extendqi:
3429 case CODE_FOR_xorsi3_zero_extendqi:
3430 case CODE_FOR_divsi3_zero_extendqi:
3431 case CODE_FOR_udivsi3_zero_extendqi:
3432 case CODE_FOR_minussi3_zero_extendqi:
3433 case CODE_FOR_smaxsi3_zero_extendqi:
3434 case CODE_FOR_sminsi3_zero_extendqi:
3435 case CODE_FOR_multsi3_zero_extendqi:
3436 case CODE_FOR_comparesi3_zero_extendqi:
3437 zero = true;
3438 factor = 1;
3439 break;
3440
3441 case CODE_FOR_plussi3_sign_extendqi:
3442 case CODE_FOR_andsi3_sign_extendqi:
3443 case CODE_FOR_iorsi3_sign_extendqi:
3444 case CODE_FOR_xorsi3_sign_extendqi:
3445 case CODE_FOR_divsi3_sign_extendqi:
3446 case CODE_FOR_udivsi3_sign_extendqi:
3447 case CODE_FOR_minussi3_sign_extendqi:
3448 case CODE_FOR_smaxsi3_sign_extendqi:
3449 case CODE_FOR_sminsi3_sign_extendqi:
3450 case CODE_FOR_multsi3_sign_extendqi:
3451 case CODE_FOR_comparesi3_sign_extendqi:
3452 zero = false;
3453 factor = 1;
3454 break;
3455 }
3456
3457 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3458 extend = single_set (insn);
3459 gcc_assert (extend != NULL_RTX);
3460
3461 extend = SET_SRC (extend);
3462 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3463 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3464 extend = XEXP (extend, 0);
3465 else
3466 extend = XEXP (extend, 1);
3467
3468 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3469 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3470
3471 mem = XEXP (extend, 0);
3472 gcc_checking_assert (MEM_P (mem));
3473 if (REG_P (XEXP (mem, 0)))
3474 return (zero && factor == 1) ? 2 : 3;
3475
3476 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3477 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3478 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3479
3480 offset = XEXP (XEXP (mem, 0), 1);
3481 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3482
3483 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3484 return (zero && factor == 1) ? 3 : 4;
3485
3486 return (zero && factor == 1) ? 4 : 5;
3487 }
3488
3489 static bool
3490 rx_narrow_volatile_bitfield (void)
3491 {
3492 return true;
3493 }
3494
3495 static bool
3496 rx_ok_to_inline (tree caller, tree callee)
3497 {
3498 /* Do not inline functions with local variables
3499 into a naked CALLER - naked function have no stack frame and
3500 locals need a frame in order to have somewhere to live.
3501
3502 Unfortunately we have no way to determine the presence of
3503 local variables in CALLEE, so we have to be cautious and
3504 assume that there might be some there.
3505
3506 We do allow inlining when CALLEE has the "inline" type
3507 modifier or the "always_inline" or "gnu_inline" attributes. */
3508 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3509 || DECL_DECLARED_INLINE_P (callee)
3510 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3511 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3512 }
3513
3514 static bool
3515 rx_enable_lra (void)
3516 {
3517 return TARGET_ENABLE_LRA;
3518 }
3519
3520 rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3521 {
3522 if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3523 {
3524 /* If we are inside an interrupt handler, assume that interrupts are
3525 off -- which is the default hardware behavior. In this case, there
3526 is no need to disable the interrupts. */
3527 m_prev_psw_reg = NULL;
3528 }
3529 else
3530 {
3531 m_prev_psw_reg = gen_reg_rtx (SImode);
3532 emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3533 emit_insn (gen_clrpsw (GEN_INT ('I')));
3534 }
3535 }
3536
3537 rx_atomic_sequence::~rx_atomic_sequence (void)
3538 {
3539 if (m_prev_psw_reg != NULL)
3540 emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3541 }
3542
3543 /* Given an insn and a reg number, tell whether the reg dies or is unused
3544 after the insn. */
3545 bool
3546 rx_reg_dead_or_unused_after_insn (const rtx_insn* i, int regno)
3547 {
3548 return find_regno_note (i, REG_DEAD, regno) != NULL
3549 || find_regno_note (i, REG_UNUSED, regno) != NULL;
3550 }
3551
3552 /* Copy dead and unused notes from SRC to DST for the specified REGNO. */
3553 void
3554 rx_copy_reg_dead_or_unused_notes (rtx reg, const rtx_insn* src, rtx_insn* dst)
3555 {
3556 int regno = REGNO (SUBREG_P (reg) ? SUBREG_REG (reg) : reg);
3557
3558 if (rtx note = find_regno_note (src, REG_DEAD, regno))
3559 add_shallow_copy_of_reg_note (dst, note);
3560
3561 if (rtx note = find_regno_note (src, REG_UNUSED, regno))
3562 add_shallow_copy_of_reg_note (dst, note);
3563 }
3564
3565 /* Try to fuse the current bit-operation insn with the surrounding memory load
3566 and store. */
3567 bool
3568 rx_fuse_in_memory_bitop (rtx* operands, rtx_insn* curr_insn,
3569 rtx (*gen_insn)(rtx, rtx))
3570 {
3571 rtx op2_reg = SUBREG_P (operands[2]) ? SUBREG_REG (operands[2]) : operands[2];
3572
3573 set_of_reg op2_def = rx_find_set_of_reg (op2_reg, curr_insn,
3574 prev_nonnote_nondebug_insn_bb);
3575 if (op2_def.set_src == NULL_RTX
3576 || !MEM_P (op2_def.set_src)
3577 || GET_MODE (op2_def.set_src) != QImode
3578 || !rx_is_restricted_memory_address (XEXP (op2_def.set_src, 0),
3579 GET_MODE (op2_def.set_src))
3580 || reg_used_between_p (operands[2], op2_def.insn, curr_insn)
3581 || !rx_reg_dead_or_unused_after_insn (curr_insn, REGNO (op2_reg))
3582 )
3583 return false;
3584
3585 /* The register operand originates from a memory load and the memory load
3586 could be fused with the bitop insn.
3587 Look for the following memory store with the same memory operand. */
3588 rtx mem = op2_def.set_src;
3589
3590 /* If the memory is an auto-mod address, it can't be fused. */
3591 if (GET_CODE (XEXP (mem, 0)) == POST_INC
3592 || GET_CODE (XEXP (mem, 0)) == PRE_INC
3593 || GET_CODE (XEXP (mem, 0)) == POST_DEC
3594 || GET_CODE (XEXP (mem, 0)) == PRE_DEC)
3595 return false;
3596
3597 rtx_insn* op0_use = rx_find_use_of_reg (operands[0], curr_insn,
3598 next_nonnote_nondebug_insn_bb);
3599 if (op0_use == NULL
3600 || !(GET_CODE (PATTERN (op0_use)) == SET
3601 && RX_REG_P (XEXP (PATTERN (op0_use), 1))
3602 && reg_overlap_mentioned_p (operands[0], XEXP (PATTERN (op0_use), 1))
3603 && rtx_equal_p (mem, XEXP (PATTERN (op0_use), 0)))
3604 || !rx_reg_dead_or_unused_after_insn (op0_use, REGNO (operands[0]))
3605 || reg_set_between_p (operands[2], curr_insn, op0_use))
3606 return false;
3607
3608 /* If the load-modify-store operation is fused it could potentially modify
3609 load/store ordering if there are other memory accesses between the load
3610 and the store for this insn. If there are volatile mems between the load
3611 and store it's better not to change the ordering. If there is a call
3612 between the load and store, it's also not safe to fuse it. */
3613 for (rtx_insn* i = next_nonnote_nondebug_insn_bb (op2_def.insn);
3614 i != NULL && i != op0_use;
3615 i = next_nonnote_nondebug_insn_bb (i))
3616 if (volatile_insn_p (PATTERN (i)) || CALL_P (i))
3617 return false;
3618
3619 emit_insn (gen_insn (mem, gen_lowpart (QImode, operands[1])));
3620 set_insn_deleted (op2_def.insn);
3621 set_insn_deleted (op0_use);
3622 return true;
3623 }
3624
3625 /* Implement TARGET_HARD_REGNO_NREGS. */
3626
3627 static unsigned int
3628 rx_hard_regno_nregs (unsigned int, machine_mode mode)
3629 {
3630 return CLASS_MAX_NREGS (0, mode);
3631 }
3632
3633 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3634
3635 static bool
3636 rx_hard_regno_mode_ok (unsigned int regno, machine_mode)
3637 {
3638 return REGNO_REG_CLASS (regno) == GR_REGS;
3639 }
3640
3641 /* Implement TARGET_MODES_TIEABLE_P. */
3642
3643 static bool
3644 rx_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3645 {
3646 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
3647 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
3648 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
3649 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
3650 }
3651 \f
3652 #undef TARGET_NARROW_VOLATILE_BITFIELD
3653 #define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3654
3655 #undef TARGET_CAN_INLINE_P
3656 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3657
3658 #undef TARGET_FUNCTION_VALUE
3659 #define TARGET_FUNCTION_VALUE rx_function_value
3660
3661 #undef TARGET_RETURN_IN_MSB
3662 #define TARGET_RETURN_IN_MSB rx_return_in_msb
3663
3664 #undef TARGET_IN_SMALL_DATA_P
3665 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
3666
3667 #undef TARGET_RETURN_IN_MEMORY
3668 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3669
3670 #undef TARGET_HAVE_SRODATA_SECTION
3671 #define TARGET_HAVE_SRODATA_SECTION true
3672
3673 #undef TARGET_ASM_SELECT_RTX_SECTION
3674 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3675
3676 #undef TARGET_ASM_SELECT_SECTION
3677 #define TARGET_ASM_SELECT_SECTION rx_select_section
3678
3679 #undef TARGET_INIT_BUILTINS
3680 #define TARGET_INIT_BUILTINS rx_init_builtins
3681
3682 #undef TARGET_BUILTIN_DECL
3683 #define TARGET_BUILTIN_DECL rx_builtin_decl
3684
3685 #undef TARGET_EXPAND_BUILTIN
3686 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
3687
3688 #undef TARGET_ASM_CONSTRUCTOR
3689 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3690
3691 #undef TARGET_ASM_DESTRUCTOR
3692 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3693
3694 #undef TARGET_STRUCT_VALUE_RTX
3695 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3696
3697 #undef TARGET_ATTRIBUTE_TABLE
3698 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3699
3700 #undef TARGET_ASM_FILE_START
3701 #define TARGET_ASM_FILE_START rx_file_start
3702
3703 #undef TARGET_MS_BITFIELD_LAYOUT_P
3704 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3705
3706 #undef TARGET_LEGITIMATE_ADDRESS_P
3707 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3708
3709 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
3710 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3711
3712 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3713 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3714
3715 #undef TARGET_ASM_FUNCTION_PROLOGUE
3716 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3717
3718 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3719 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3720
3721 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3722 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3723
3724 #undef TARGET_FUNCTION_ARG
3725 #define TARGET_FUNCTION_ARG rx_function_arg
3726
3727 #undef TARGET_FUNCTION_ARG_ADVANCE
3728 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3729
3730 #undef TARGET_FUNCTION_ARG_BOUNDARY
3731 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3732
3733 #undef TARGET_SET_CURRENT_FUNCTION
3734 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3735
3736 #undef TARGET_ASM_INTEGER
3737 #define TARGET_ASM_INTEGER rx_assemble_integer
3738
3739 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3740 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3741
3742 #undef TARGET_MAX_ANCHOR_OFFSET
3743 #define TARGET_MAX_ANCHOR_OFFSET 32
3744
3745 #undef TARGET_ADDRESS_COST
3746 #define TARGET_ADDRESS_COST rx_address_cost
3747
3748 #undef TARGET_CAN_ELIMINATE
3749 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3750
3751 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3752 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3753
3754 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3755 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3756
3757 #undef TARGET_TRAMPOLINE_INIT
3758 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3759
3760 #undef TARGET_PRINT_OPERAND
3761 #define TARGET_PRINT_OPERAND rx_print_operand
3762
3763 #undef TARGET_PRINT_OPERAND_ADDRESS
3764 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3765
3766 #undef TARGET_CC_MODES_COMPATIBLE
3767 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3768
3769 #undef TARGET_MEMORY_MOVE_COST
3770 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3771
3772 #undef TARGET_OPTION_OVERRIDE
3773 #define TARGET_OPTION_OVERRIDE rx_option_override
3774
3775 #undef TARGET_PROMOTE_FUNCTION_MODE
3776 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3777
3778 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3779 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3780
3781 #undef TARGET_FLAGS_REGNUM
3782 #define TARGET_FLAGS_REGNUM CC_REG
3783
3784 #undef TARGET_LEGITIMATE_CONSTANT_P
3785 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3786
3787 #undef TARGET_LEGITIMIZE_ADDRESS
3788 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3789
3790 #undef TARGET_WARN_FUNC_RETURN
3791 #define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3792
3793 #undef TARGET_LRA_P
3794 #define TARGET_LRA_P rx_enable_lra
3795
3796 #undef TARGET_HARD_REGNO_NREGS
3797 #define TARGET_HARD_REGNO_NREGS rx_hard_regno_nregs
3798 #undef TARGET_HARD_REGNO_MODE_OK
3799 #define TARGET_HARD_REGNO_MODE_OK rx_hard_regno_mode_ok
3800
3801 #undef TARGET_MODES_TIEABLE_P
3802 #define TARGET_MODES_TIEABLE_P rx_modes_tieable_p
3803
3804 #undef TARGET_RTX_COSTS
3805 #define TARGET_RTX_COSTS rx_rtx_costs
3806
3807 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
3808 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
3809
3810 struct gcc_target targetm = TARGET_INITIALIZER;
3811
3812 #include "gt-rx.h"