]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/rx/rx.c
4d16cc3cab0405ce6b300f705738828a40a5efbf
[thirdparty/gcc.git] / gcc / config / rx / rx.c
1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "toplev.h"
45 #include "reload.h"
46 #include "df.h"
47 #include "ggc.h"
48 #include "tm_p.h"
49 #include "debug.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "langhooks.h"
53 #include "opts.h"
54 \f
55 static void rx_print_operand (FILE *, rtx, int);
56
57 #define CC_FLAG_S (1 << 0)
58 #define CC_FLAG_Z (1 << 1)
59 #define CC_FLAG_O (1 << 2)
60 #define CC_FLAG_C (1 << 3)
61 #define CC_FLAG_FP (1 << 4) /* fake, to differentiate CC_Fmode */
62
63 static unsigned int flags_from_mode (enum machine_mode mode);
64 static unsigned int flags_from_code (enum rtx_code code);
65 \f
66 /* Return true if OP is a reference to an object in a small data area. */
67
68 static bool
69 rx_small_data_operand (rtx op)
70 {
71 if (rx_small_data_limit == 0)
72 return false;
73
74 if (GET_CODE (op) == SYMBOL_REF)
75 return SYMBOL_REF_SMALL_P (op);
76
77 return false;
78 }
79
80 static bool
81 rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
82 {
83 if (RTX_OK_FOR_BASE (x, strict))
84 /* Register Indirect. */
85 return true;
86
87 if (GET_MODE_SIZE (mode) <= 4
88 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
89 /* Pre-decrement Register Indirect or
90 Post-increment Register Indirect. */
91 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
92
93 if (GET_CODE (x) == PLUS)
94 {
95 rtx arg1 = XEXP (x, 0);
96 rtx arg2 = XEXP (x, 1);
97 rtx index = NULL_RTX;
98
99 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
100 index = arg2;
101 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
102 index = arg1;
103 else
104 return false;
105
106 switch (GET_CODE (index))
107 {
108 case CONST_INT:
109 {
110 /* Register Relative: REG + INT.
111 Only positive, mode-aligned, mode-sized
112 displacements are allowed. */
113 HOST_WIDE_INT val = INTVAL (index);
114 int factor;
115
116 if (val < 0)
117 return false;
118
119 switch (GET_MODE_SIZE (mode))
120 {
121 default:
122 case 4: factor = 4; break;
123 case 2: factor = 2; break;
124 case 1: factor = 1; break;
125 }
126
127 if (val >= (0x10000 * factor))
128 return false;
129 return (val % factor) == 0;
130 }
131
132 case REG:
133 /* Unscaled Indexed Register Indirect: REG + REG
134 Size has to be "QI", REG has to be valid. */
135 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
136
137 case MULT:
138 {
139 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
140 Factor has to equal the mode size, REG has to be valid. */
141 rtx factor;
142
143 factor = XEXP (index, 1);
144 index = XEXP (index, 0);
145
146 return REG_P (index)
147 && RTX_OK_FOR_BASE (index, strict)
148 && CONST_INT_P (factor)
149 && GET_MODE_SIZE (mode) == INTVAL (factor);
150 }
151
152 default:
153 return false;
154 }
155 }
156
157 /* Small data area accesses turn into register relative offsets. */
158 return rx_small_data_operand (x);
159 }
160
161 /* Returns TRUE for simple memory addreses, ie ones
162 that do not involve register indirect addressing
163 or pre/post increment/decrement. */
164
165 bool
166 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
167 {
168 if (! rx_is_legitimate_address
169 (mode, mem, reload_in_progress || reload_completed))
170 return false;
171
172 switch (GET_CODE (mem))
173 {
174 case REG:
175 /* Simple memory addresses are OK. */
176 return true;
177
178 case PRE_DEC:
179 case POST_INC:
180 return false;
181
182 case PLUS:
183 {
184 rtx base, index;
185
186 /* Only allow REG+INT addressing. */
187 base = XEXP (mem, 0);
188 index = XEXP (mem, 1);
189
190 if (! RX_REG_P (base) || ! CONST_INT_P (index))
191 return false;
192
193 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
194 }
195
196 case SYMBOL_REF:
197 /* Can happen when small data is being supported.
198 Assume that it will be resolved into GP+INT. */
199 return true;
200
201 default:
202 gcc_unreachable ();
203 }
204 }
205
206 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
207
208 static bool
209 rx_mode_dependent_address_p (const_rtx addr)
210 {
211 if (GET_CODE (addr) == CONST)
212 addr = XEXP (addr, 0);
213
214 switch (GET_CODE (addr))
215 {
216 /* --REG and REG++ only work in SImode. */
217 case PRE_DEC:
218 case POST_INC:
219 return true;
220
221 case MINUS:
222 case PLUS:
223 if (! REG_P (XEXP (addr, 0)))
224 return true;
225
226 addr = XEXP (addr, 1);
227
228 switch (GET_CODE (addr))
229 {
230 case REG:
231 /* REG+REG only works in SImode. */
232 return true;
233
234 case CONST_INT:
235 /* REG+INT is only mode independent if INT is a
236 multiple of 4, positive and will fit into 8-bits. */
237 if (((INTVAL (addr) & 3) == 0)
238 && IN_RANGE (INTVAL (addr), 4, 252))
239 return false;
240 return true;
241
242 case SYMBOL_REF:
243 case LABEL_REF:
244 return true;
245
246 case MULT:
247 gcc_assert (REG_P (XEXP (addr, 0)));
248 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
249 /* REG+REG*SCALE is always mode dependent. */
250 return true;
251
252 default:
253 /* Not recognized, so treat as mode dependent. */
254 return true;
255 }
256
257 case CONST_INT:
258 case SYMBOL_REF:
259 case LABEL_REF:
260 case REG:
261 /* These are all mode independent. */
262 return false;
263
264 default:
265 /* Everything else is unrecognized,
266 so treat as mode dependent. */
267 return true;
268 }
269 }
270 \f
271 /* A C compound statement to output to stdio stream FILE the
272 assembler syntax for an instruction operand that is a memory
273 reference whose address is ADDR. */
274
275 static void
276 rx_print_operand_address (FILE * file, rtx addr)
277 {
278 switch (GET_CODE (addr))
279 {
280 case REG:
281 fprintf (file, "[");
282 rx_print_operand (file, addr, 0);
283 fprintf (file, "]");
284 break;
285
286 case PRE_DEC:
287 fprintf (file, "[-");
288 rx_print_operand (file, XEXP (addr, 0), 0);
289 fprintf (file, "]");
290 break;
291
292 case POST_INC:
293 fprintf (file, "[");
294 rx_print_operand (file, XEXP (addr, 0), 0);
295 fprintf (file, "+]");
296 break;
297
298 case PLUS:
299 {
300 rtx arg1 = XEXP (addr, 0);
301 rtx arg2 = XEXP (addr, 1);
302 rtx base, index;
303
304 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
305 base = arg1, index = arg2;
306 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
307 base = arg2, index = arg1;
308 else
309 {
310 rx_print_operand (file, arg1, 0);
311 fprintf (file, " + ");
312 rx_print_operand (file, arg2, 0);
313 break;
314 }
315
316 if (REG_P (index) || GET_CODE (index) == MULT)
317 {
318 fprintf (file, "[");
319 rx_print_operand (file, index, 'A');
320 fprintf (file, ",");
321 }
322 else /* GET_CODE (index) == CONST_INT */
323 {
324 rx_print_operand (file, index, 'A');
325 fprintf (file, "[");
326 }
327 rx_print_operand (file, base, 0);
328 fprintf (file, "]");
329 break;
330 }
331
332 case CONST:
333 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
334 {
335 addr = XEXP (addr, 0);
336 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
337
338 addr = XVECEXP (addr, 0, 0);
339 gcc_assert (CONST_INT_P (addr));
340 }
341 /* Fall through. */
342 case LABEL_REF:
343 case SYMBOL_REF:
344 fprintf (file, "#");
345
346 default:
347 output_addr_const (file, addr);
348 break;
349 }
350 }
351
352 static void
353 rx_print_integer (FILE * file, HOST_WIDE_INT val)
354 {
355 if (IN_RANGE (val, -64, 64))
356 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
357 else
358 fprintf (file,
359 TARGET_AS100_SYNTAX
360 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
361 val);
362 }
363
364 static bool
365 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
366 {
367 const char * op = integer_asm_op (size, is_aligned);
368
369 if (! CONST_INT_P (x))
370 return default_assemble_integer (x, size, is_aligned);
371
372 if (op == NULL)
373 return false;
374 fputs (op, asm_out_file);
375
376 rx_print_integer (asm_out_file, INTVAL (x));
377 fputc ('\n', asm_out_file);
378 return true;
379 }
380
381
382 /* Handles the insertion of a single operand into the assembler output.
383 The %<letter> directives supported are:
384
385 %A Print an operand without a leading # character.
386 %B Print an integer comparison name.
387 %C Print a control register name.
388 %F Print a condition code flag name.
389 %H Print high part of a DImode register, integer or address.
390 %L Print low part of a DImode register, integer or address.
391 %N Print the negation of the immediate value.
392 %Q If the operand is a MEM, then correctly generate
393 register indirect or register relative addressing.
394 %R Like %Q but for zero-extending loads. */
395
396 static void
397 rx_print_operand (FILE * file, rtx op, int letter)
398 {
399 bool unsigned_load = false;
400
401 switch (letter)
402 {
403 case 'A':
404 /* Print an operand without a leading #. */
405 if (MEM_P (op))
406 op = XEXP (op, 0);
407
408 switch (GET_CODE (op))
409 {
410 case LABEL_REF:
411 case SYMBOL_REF:
412 output_addr_const (file, op);
413 break;
414 case CONST_INT:
415 fprintf (file, "%ld", (long) INTVAL (op));
416 break;
417 default:
418 rx_print_operand (file, op, 0);
419 break;
420 }
421 break;
422
423 case 'B':
424 {
425 enum rtx_code code = GET_CODE (op);
426 enum machine_mode mode = GET_MODE (XEXP (op, 0));
427 const char *ret;
428
429 if (mode == CC_Fmode)
430 {
431 /* C flag is undefined, and O flag carries unordered. None of the
432 branch combinations that include O use it helpfully. */
433 switch (code)
434 {
435 case ORDERED:
436 ret = "no";
437 break;
438 case UNORDERED:
439 ret = "o";
440 break;
441 case LT:
442 ret = "n";
443 break;
444 case GE:
445 ret = "pz";
446 break;
447 case EQ:
448 ret = "eq";
449 break;
450 case NE:
451 ret = "ne";
452 break;
453 default:
454 gcc_unreachable ();
455 }
456 }
457 else
458 {
459 unsigned int flags = flags_from_mode (mode);
460
461 switch (code)
462 {
463 case LT:
464 ret = (flags & CC_FLAG_O ? "lt" : "n");
465 break;
466 case GE:
467 ret = (flags & CC_FLAG_O ? "ge" : "pz");
468 break;
469 case GT:
470 ret = "gt";
471 break;
472 case LE:
473 ret = "le";
474 break;
475 case GEU:
476 ret = "geu";
477 break;
478 case LTU:
479 ret = "ltu";
480 break;
481 case GTU:
482 ret = "gtu";
483 break;
484 case LEU:
485 ret = "leu";
486 break;
487 case EQ:
488 ret = "eq";
489 break;
490 case NE:
491 ret = "ne";
492 break;
493 default:
494 gcc_unreachable ();
495 }
496 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
497 }
498 fputs (ret, file);
499 break;
500 }
501
502 case 'C':
503 gcc_assert (CONST_INT_P (op));
504 switch (INTVAL (op))
505 {
506 case 0: fprintf (file, "psw"); break;
507 case 2: fprintf (file, "usp"); break;
508 case 3: fprintf (file, "fpsw"); break;
509 case 4: fprintf (file, "cpen"); break;
510 case 8: fprintf (file, "bpsw"); break;
511 case 9: fprintf (file, "bpc"); break;
512 case 0xa: fprintf (file, "isp"); break;
513 case 0xb: fprintf (file, "fintv"); break;
514 case 0xc: fprintf (file, "intb"); break;
515 default:
516 warning (0, "unreocgnized control register number: %d - using 'psw'",
517 (int) INTVAL (op));
518 fprintf (file, "psw");
519 break;
520 }
521 break;
522
523 case 'F':
524 gcc_assert (CONST_INT_P (op));
525 switch (INTVAL (op))
526 {
527 case 0: case 'c': case 'C': fprintf (file, "C"); break;
528 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
529 case 2: case 's': case 'S': fprintf (file, "S"); break;
530 case 3: case 'o': case 'O': fprintf (file, "O"); break;
531 case 8: case 'i': case 'I': fprintf (file, "I"); break;
532 case 9: case 'u': case 'U': fprintf (file, "U"); break;
533 default:
534 gcc_unreachable ();
535 }
536 break;
537
538 case 'H':
539 switch (GET_CODE (op))
540 {
541 case REG:
542 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
543 break;
544 case CONST_INT:
545 {
546 HOST_WIDE_INT v = INTVAL (op);
547
548 fprintf (file, "#");
549 /* Trickery to avoid problems with shifting 32 bits at a time. */
550 v = v >> 16;
551 v = v >> 16;
552 rx_print_integer (file, v);
553 break;
554 }
555 case CONST_DOUBLE:
556 fprintf (file, "#");
557 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
558 break;
559 case MEM:
560 if (! WORDS_BIG_ENDIAN)
561 op = adjust_address (op, SImode, 4);
562 output_address (XEXP (op, 0));
563 break;
564 default:
565 gcc_unreachable ();
566 }
567 break;
568
569 case 'L':
570 switch (GET_CODE (op))
571 {
572 case REG:
573 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
574 break;
575 case CONST_INT:
576 fprintf (file, "#");
577 rx_print_integer (file, INTVAL (op) & 0xffffffff);
578 break;
579 case CONST_DOUBLE:
580 fprintf (file, "#");
581 rx_print_integer (file, CONST_DOUBLE_LOW (op));
582 break;
583 case MEM:
584 if (WORDS_BIG_ENDIAN)
585 op = adjust_address (op, SImode, 4);
586 output_address (XEXP (op, 0));
587 break;
588 default:
589 gcc_unreachable ();
590 }
591 break;
592
593 case 'N':
594 gcc_assert (CONST_INT_P (op));
595 fprintf (file, "#");
596 rx_print_integer (file, - INTVAL (op));
597 break;
598
599 case 'R':
600 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) < 4);
601 unsigned_load = true;
602 /* Fall through. */
603 case 'Q':
604 if (MEM_P (op))
605 {
606 HOST_WIDE_INT offset;
607 rtx mem = op;
608
609 op = XEXP (op, 0);
610
611 if (REG_P (op))
612 offset = 0;
613 else if (GET_CODE (op) == PLUS)
614 {
615 rtx displacement;
616
617 if (REG_P (XEXP (op, 0)))
618 {
619 displacement = XEXP (op, 1);
620 op = XEXP (op, 0);
621 }
622 else
623 {
624 displacement = XEXP (op, 0);
625 op = XEXP (op, 1);
626 gcc_assert (REG_P (op));
627 }
628
629 gcc_assert (CONST_INT_P (displacement));
630 offset = INTVAL (displacement);
631 gcc_assert (offset >= 0);
632
633 fprintf (file, "%ld", offset);
634 }
635 else
636 gcc_unreachable ();
637
638 fprintf (file, "[");
639 rx_print_operand (file, op, 0);
640 fprintf (file, "].");
641
642 switch (GET_MODE_SIZE (GET_MODE (mem)))
643 {
644 case 1:
645 gcc_assert (offset <= 65535 * 1);
646 fprintf (file, unsigned_load ? "UB" : "B");
647 break;
648 case 2:
649 gcc_assert (offset % 2 == 0);
650 gcc_assert (offset <= 65535 * 2);
651 fprintf (file, unsigned_load ? "UW" : "W");
652 break;
653 case 4:
654 gcc_assert (offset % 4 == 0);
655 gcc_assert (offset <= 65535 * 4);
656 fprintf (file, "L");
657 break;
658 default:
659 gcc_unreachable ();
660 }
661 break;
662 }
663
664 /* Fall through. */
665
666 default:
667 switch (GET_CODE (op))
668 {
669 case MULT:
670 /* Should be the scaled part of an
671 indexed register indirect address. */
672 {
673 rtx base = XEXP (op, 0);
674 rtx index = XEXP (op, 1);
675
676 /* Check for a swaped index register and scaling factor.
677 Not sure if this can happen, but be prepared to handle it. */
678 if (CONST_INT_P (base) && REG_P (index))
679 {
680 rtx tmp = base;
681 base = index;
682 index = tmp;
683 }
684
685 gcc_assert (REG_P (base));
686 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
687 gcc_assert (CONST_INT_P (index));
688 /* Do not try to verify the value of the scalar as it is based
689 on the mode of the MEM not the mode of the MULT. (Which
690 will always be SImode). */
691 fprintf (file, "%s", reg_names [REGNO (base)]);
692 break;
693 }
694
695 case MEM:
696 output_address (XEXP (op, 0));
697 break;
698
699 case PLUS:
700 output_address (op);
701 break;
702
703 case REG:
704 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
705 fprintf (file, "%s", reg_names [REGNO (op)]);
706 break;
707
708 case SUBREG:
709 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
710 fprintf (file, "%s", reg_names [subreg_regno (op)]);
711 break;
712
713 /* This will only be single precision.... */
714 case CONST_DOUBLE:
715 {
716 unsigned long val;
717 REAL_VALUE_TYPE rv;
718
719 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
720 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
721 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
722 break;
723 }
724
725 case CONST_INT:
726 fprintf (file, "#");
727 rx_print_integer (file, INTVAL (op));
728 break;
729
730 case SYMBOL_REF:
731 case CONST:
732 case LABEL_REF:
733 case CODE_LABEL:
734 case UNSPEC:
735 rx_print_operand_address (file, op);
736 break;
737
738 default:
739 gcc_unreachable ();
740 }
741 break;
742 }
743 }
744
745 /* Returns an assembler template for a move instruction. */
746
747 char *
748 rx_gen_move_template (rtx * operands, bool is_movu)
749 {
750 static char out_template [64];
751 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
752 const char * src_template;
753 const char * dst_template;
754 rtx dest = operands[0];
755 rtx src = operands[1];
756
757 /* Decide which extension, if any, should be given to the move instruction. */
758 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
759 {
760 case QImode:
761 /* The .B extension is not valid when
762 loading an immediate into a register. */
763 if (! REG_P (dest) || ! CONST_INT_P (src))
764 extension = ".B";
765 break;
766 case HImode:
767 if (! REG_P (dest) || ! CONST_INT_P (src))
768 /* The .W extension is not valid when
769 loading an immediate into a register. */
770 extension = ".W";
771 break;
772 case SFmode:
773 case SImode:
774 extension = ".L";
775 break;
776 case VOIDmode:
777 /* This mode is used by constants. */
778 break;
779 default:
780 debug_rtx (src);
781 gcc_unreachable ();
782 }
783
784 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
785 src_template = "%%gp(%A1)[r13]";
786 else
787 src_template = "%1";
788
789 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
790 dst_template = "%%gp(%A0)[r13]";
791 else
792 dst_template = "%0";
793
794 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
795 extension, src_template, dst_template);
796 return out_template;
797 }
798 \f
799 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
800
801 static inline unsigned int
802 rx_round_up (unsigned int value, unsigned int alignment)
803 {
804 alignment -= 1;
805 return (value + alignment) & (~ alignment);
806 }
807
808 /* Return the number of bytes in the argument registers
809 occupied by an argument of type TYPE and mode MODE. */
810
811 static unsigned int
812 rx_function_arg_size (Mmode mode, const_tree type)
813 {
814 unsigned int num_bytes;
815
816 num_bytes = (mode == BLKmode)
817 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
818 return rx_round_up (num_bytes, UNITS_PER_WORD);
819 }
820
821 #define NUM_ARG_REGS 4
822 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
823
824 /* Return an RTL expression describing the register holding a function
825 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
826 be passed on the stack. CUM describes the previous parameters to the
827 function and NAMED is false if the parameter is part of a variable
828 parameter list, or the last named parameter before the start of a
829 variable parameter list. */
830
831 static rtx
832 rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
833 {
834 unsigned int next_reg;
835 unsigned int bytes_so_far = *cum;
836 unsigned int size;
837 unsigned int rounded_size;
838
839 /* An exploded version of rx_function_arg_size. */
840 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
841 /* If the size is not known it cannot be passed in registers. */
842 if (size < 1)
843 return NULL_RTX;
844
845 rounded_size = rx_round_up (size, UNITS_PER_WORD);
846
847 /* Don't pass this arg via registers if there
848 are insufficient registers to hold all of it. */
849 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
850 return NULL_RTX;
851
852 /* Unnamed arguments and the last named argument in a
853 variadic function are always passed on the stack. */
854 if (!named)
855 return NULL_RTX;
856
857 /* Structures must occupy an exact number of registers,
858 otherwise they are passed on the stack. */
859 if ((type == NULL || AGGREGATE_TYPE_P (type))
860 && (size % UNITS_PER_WORD) != 0)
861 return NULL_RTX;
862
863 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
864
865 return gen_rtx_REG (mode, next_reg);
866 }
867
868 static void
869 rx_function_arg_advance (Fargs * cum, Mmode mode, const_tree type,
870 bool named ATTRIBUTE_UNUSED)
871 {
872 *cum += rx_function_arg_size (mode, type);
873 }
874
875 static unsigned int
876 rx_function_arg_boundary (Mmode mode ATTRIBUTE_UNUSED,
877 const_tree type ATTRIBUTE_UNUSED)
878 {
879 return 32;
880 }
881
882 /* Return an RTL describing where a function return value of type RET_TYPE
883 is held. */
884
885 static rtx
886 rx_function_value (const_tree ret_type,
887 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
888 bool outgoing ATTRIBUTE_UNUSED)
889 {
890 enum machine_mode mode = TYPE_MODE (ret_type);
891
892 /* RX ABI specifies that small integer types are
893 promoted to int when returned by a function. */
894 if (GET_MODE_SIZE (mode) > 0
895 && GET_MODE_SIZE (mode) < 4
896 && ! COMPLEX_MODE_P (mode)
897 )
898 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
899
900 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
901 }
902
903 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
904 regard to function returns as does TARGET_FUNCTION_VALUE. */
905
906 static enum machine_mode
907 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
908 enum machine_mode mode,
909 int * punsignedp ATTRIBUTE_UNUSED,
910 const_tree funtype ATTRIBUTE_UNUSED,
911 int for_return)
912 {
913 if (for_return != 1
914 || GET_MODE_SIZE (mode) >= 4
915 || COMPLEX_MODE_P (mode)
916 || GET_MODE_SIZE (mode) < 1)
917 return mode;
918
919 return SImode;
920 }
921
922 static bool
923 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
924 {
925 HOST_WIDE_INT size;
926
927 if (TYPE_MODE (type) != BLKmode
928 && ! AGGREGATE_TYPE_P (type))
929 return false;
930
931 size = int_size_in_bytes (type);
932 /* Large structs and those whose size is not an
933 exact multiple of 4 are returned in memory. */
934 return size < 1
935 || size > 16
936 || (size % UNITS_PER_WORD) != 0;
937 }
938
939 static rtx
940 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
941 int incoming ATTRIBUTE_UNUSED)
942 {
943 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
944 }
945
946 static bool
947 rx_return_in_msb (const_tree valtype)
948 {
949 return TARGET_BIG_ENDIAN_DATA
950 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
951 }
952
953 /* Returns true if the provided function has the specified attribute. */
954
955 static inline bool
956 has_func_attr (const_tree decl, const char * func_attr)
957 {
958 if (decl == NULL_TREE)
959 decl = current_function_decl;
960
961 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
962 }
963
964 /* Returns true if the provided function has the "fast_interrupt" attribute. */
965
966 static inline bool
967 is_fast_interrupt_func (const_tree decl)
968 {
969 return has_func_attr (decl, "fast_interrupt");
970 }
971
972 /* Returns true if the provided function has the "interrupt" attribute. */
973
974 static inline bool
975 is_interrupt_func (const_tree decl)
976 {
977 return has_func_attr (decl, "interrupt");
978 }
979
980 /* Returns true if the provided function has the "naked" attribute. */
981
982 static inline bool
983 is_naked_func (const_tree decl)
984 {
985 return has_func_attr (decl, "naked");
986 }
987 \f
988 static bool use_fixed_regs = false;
989
990 static void
991 rx_conditional_register_usage (void)
992 {
993 static bool using_fixed_regs = false;
994
995 if (rx_small_data_limit > 0)
996 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
997
998 if (use_fixed_regs != using_fixed_regs)
999 {
1000 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1001 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1002
1003 if (use_fixed_regs)
1004 {
1005 unsigned int r;
1006
1007 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1008 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1009
1010 /* This is for fast interrupt handlers. Any register in
1011 the range r10 to r13 (inclusive) that is currently
1012 marked as fixed is now a viable, call-used register. */
1013 for (r = 10; r <= 13; r++)
1014 if (fixed_regs[r])
1015 {
1016 fixed_regs[r] = 0;
1017 call_used_regs[r] = 1;
1018 }
1019
1020 /* Mark r7 as fixed. This is just a hack to avoid
1021 altering the reg_alloc_order array so that the newly
1022 freed r10-r13 registers are the preferred registers. */
1023 fixed_regs[7] = call_used_regs[7] = 1;
1024 }
1025 else
1026 {
1027 /* Restore the normal register masks. */
1028 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1029 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1030 }
1031
1032 using_fixed_regs = use_fixed_regs;
1033 }
1034 }
1035
1036 /* Perform any actions necessary before starting to compile FNDECL.
1037 For the RX we use this to make sure that we have the correct
1038 set of register masks selected. If FNDECL is NULL then we are
1039 compiling top level things. */
1040
1041 static void
1042 rx_set_current_function (tree fndecl)
1043 {
1044 /* Remember the last target of rx_set_current_function. */
1045 static tree rx_previous_fndecl;
1046 bool prev_was_fast_interrupt;
1047 bool current_is_fast_interrupt;
1048
1049 /* Only change the context if the function changes. This hook is called
1050 several times in the course of compiling a function, and we don't want
1051 to slow things down too much or call target_reinit when it isn't safe. */
1052 if (fndecl == rx_previous_fndecl)
1053 return;
1054
1055 prev_was_fast_interrupt
1056 = rx_previous_fndecl
1057 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1058
1059 current_is_fast_interrupt
1060 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1061
1062 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1063 {
1064 use_fixed_regs = current_is_fast_interrupt;
1065 target_reinit ();
1066 }
1067
1068 rx_previous_fndecl = fndecl;
1069 }
1070 \f
1071 /* Typical stack layout should looks like this after the function's prologue:
1072
1073 | |
1074 -- ^
1075 | | \ |
1076 | | arguments saved | Increasing
1077 | | on the stack | addresses
1078 PARENT arg pointer -> | | /
1079 -------------------------- ---- -------------------
1080 CHILD |ret | return address
1081 --
1082 | | \
1083 | | call saved
1084 | | registers
1085 | | /
1086 --
1087 | | \
1088 | | local
1089 | | variables
1090 frame pointer -> | | /
1091 --
1092 | | \
1093 | | outgoing | Decreasing
1094 | | arguments | addresses
1095 current stack pointer -> | | / |
1096 -------------------------- ---- ------------------ V
1097 | | */
1098
1099 static unsigned int
1100 bit_count (unsigned int x)
1101 {
1102 const unsigned int m1 = 0x55555555;
1103 const unsigned int m2 = 0x33333333;
1104 const unsigned int m4 = 0x0f0f0f0f;
1105
1106 x -= (x >> 1) & m1;
1107 x = (x & m2) + ((x >> 2) & m2);
1108 x = (x + (x >> 4)) & m4;
1109 x += x >> 8;
1110
1111 return (x + (x >> 16)) & 0x3f;
1112 }
1113
1114 #define MUST_SAVE_ACC_REGISTER \
1115 (TARGET_SAVE_ACC_REGISTER \
1116 && (is_interrupt_func (NULL_TREE) \
1117 || is_fast_interrupt_func (NULL_TREE)))
1118
1119 /* Returns either the lowest numbered and highest numbered registers that
1120 occupy the call-saved area of the stack frame, if the registers are
1121 stored as a contiguous block, or else a bitmask of the individual
1122 registers if they are stored piecemeal.
1123
1124 Also computes the size of the frame and the size of the outgoing
1125 arguments block (in bytes). */
1126
1127 static void
1128 rx_get_stack_layout (unsigned int * lowest,
1129 unsigned int * highest,
1130 unsigned int * register_mask,
1131 unsigned int * frame_size,
1132 unsigned int * stack_size)
1133 {
1134 unsigned int reg;
1135 unsigned int low;
1136 unsigned int high;
1137 unsigned int fixed_reg = 0;
1138 unsigned int save_mask;
1139 unsigned int pushed_mask;
1140 unsigned int unneeded_pushes;
1141
1142 if (is_naked_func (NULL_TREE))
1143 {
1144 /* Naked functions do not create their own stack frame.
1145 Instead the programmer must do that for us. */
1146 * lowest = 0;
1147 * highest = 0;
1148 * register_mask = 0;
1149 * frame_size = 0;
1150 * stack_size = 0;
1151 return;
1152 }
1153
1154 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1155 {
1156 if ((df_regs_ever_live_p (reg)
1157 /* Always save all call clobbered registers inside non-leaf
1158 interrupt handlers, even if they are not live - they may
1159 be used in (non-interrupt aware) routines called from this one. */
1160 || (call_used_regs[reg]
1161 && is_interrupt_func (NULL_TREE)
1162 && ! current_function_is_leaf))
1163 && (! call_used_regs[reg]
1164 /* Even call clobbered registered must
1165 be pushed inside interrupt handlers. */
1166 || is_interrupt_func (NULL_TREE)
1167 /* Likewise for fast interrupt handlers, except registers r10 -
1168 r13. These are normally call-saved, but may have been set
1169 to call-used by rx_conditional_register_usage. If so then
1170 they can be used in the fast interrupt handler without
1171 saving them on the stack. */
1172 || (is_fast_interrupt_func (NULL_TREE)
1173 && ! IN_RANGE (reg, 10, 13))))
1174 {
1175 if (low == 0)
1176 low = reg;
1177 high = reg;
1178
1179 save_mask |= 1 << reg;
1180 }
1181
1182 /* Remember if we see a fixed register
1183 after having found the low register. */
1184 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1185 fixed_reg = reg;
1186 }
1187
1188 /* If we have to save the accumulator register, make sure
1189 that at least two registers are pushed into the frame. */
1190 if (MUST_SAVE_ACC_REGISTER
1191 && bit_count (save_mask) < 2)
1192 {
1193 save_mask |= (1 << 13) | (1 << 14);
1194 if (low == 0)
1195 low = 13;
1196 if (high == 0 || low == high)
1197 high = low + 1;
1198 }
1199
1200 /* Decide if it would be faster fill in the call-saved area of the stack
1201 frame using multiple PUSH instructions instead of a single PUSHM
1202 instruction.
1203
1204 SAVE_MASK is a bitmask of the registers that must be stored in the
1205 call-save area. PUSHED_MASK is a bitmask of the registers that would
1206 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1207 is a bitmask of those registers in pushed_mask that are not in
1208 save_mask.
1209
1210 We use a simple heuristic that says that it is better to use
1211 multiple PUSH instructions if the number of unnecessary pushes is
1212 greater than the number of necessary pushes.
1213
1214 We also use multiple PUSH instructions if there are any fixed registers
1215 between LOW and HIGH. The only way that this can happen is if the user
1216 has specified --fixed-<reg-name> on the command line and in such
1217 circumstances we do not want to touch the fixed registers at all.
1218
1219 FIXME: Is it worth improving this heuristic ? */
1220 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1221 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1222
1223 if ((fixed_reg && fixed_reg <= high)
1224 || (optimize_function_for_speed_p (cfun)
1225 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1226 {
1227 /* Use multiple pushes. */
1228 * lowest = 0;
1229 * highest = 0;
1230 * register_mask = save_mask;
1231 }
1232 else
1233 {
1234 /* Use one push multiple instruction. */
1235 * lowest = low;
1236 * highest = high;
1237 * register_mask = 0;
1238 }
1239
1240 * frame_size = rx_round_up
1241 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1242
1243 if (crtl->args.size > 0)
1244 * frame_size += rx_round_up
1245 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1246
1247 * stack_size = rx_round_up
1248 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1249 }
1250
1251 /* Generate a PUSHM instruction that matches the given operands. */
1252
1253 void
1254 rx_emit_stack_pushm (rtx * operands)
1255 {
1256 HOST_WIDE_INT last_reg;
1257 rtx first_push;
1258
1259 gcc_assert (CONST_INT_P (operands[0]));
1260 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1261
1262 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1263 first_push = XVECEXP (operands[1], 0, 1);
1264 gcc_assert (SET_P (first_push));
1265 first_push = SET_SRC (first_push);
1266 gcc_assert (REG_P (first_push));
1267
1268 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1269 reg_names [REGNO (first_push) - last_reg],
1270 reg_names [REGNO (first_push)]);
1271 }
1272
1273 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1274
1275 static rtx
1276 gen_rx_store_vector (unsigned int low, unsigned int high)
1277 {
1278 unsigned int i;
1279 unsigned int count = (high - low) + 2;
1280 rtx vector;
1281
1282 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1283
1284 XVECEXP (vector, 0, 0) =
1285 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1286 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1287 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1288
1289 for (i = 0; i < count - 1; i++)
1290 XVECEXP (vector, 0, i + 1) =
1291 gen_rtx_SET (VOIDmode,
1292 gen_rtx_MEM (SImode,
1293 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1294 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1295 gen_rtx_REG (SImode, high - i));
1296 return vector;
1297 }
1298
1299 /* Mark INSN as being frame related. If it is a PARALLEL
1300 then mark each element as being frame related as well. */
1301
1302 static void
1303 mark_frame_related (rtx insn)
1304 {
1305 RTX_FRAME_RELATED_P (insn) = 1;
1306 insn = PATTERN (insn);
1307
1308 if (GET_CODE (insn) == PARALLEL)
1309 {
1310 unsigned int i;
1311
1312 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1313 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1314 }
1315 }
1316
1317 static bool
1318 ok_for_max_constant (HOST_WIDE_INT val)
1319 {
1320 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1321 /* If there is no constraint on the size of constants
1322 used as operands, then any value is legitimate. */
1323 return true;
1324
1325 /* rx_max_constant_size specifies the maximum number
1326 of bytes that can be used to hold a signed value. */
1327 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1328 ( 1 << (rx_max_constant_size * 8)));
1329 }
1330
1331 /* Generate an ADD of SRC plus VAL into DEST.
1332 Handles the case where VAL is too big for max_constant_value.
1333 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1334
1335 static void
1336 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1337 {
1338 rtx insn;
1339
1340 if (val == NULL_RTX || INTVAL (val) == 0)
1341 {
1342 gcc_assert (dest != src);
1343
1344 insn = emit_move_insn (dest, src);
1345 }
1346 else if (ok_for_max_constant (INTVAL (val)))
1347 insn = emit_insn (gen_addsi3 (dest, src, val));
1348 else
1349 {
1350 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1351 will not reject it. */
1352 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1353 insn = emit_insn (gen_addsi3 (dest, src, val));
1354
1355 if (is_frame_related)
1356 /* We have to provide our own frame related note here
1357 as the dwarf2out code cannot be expected to grok
1358 our unspec. */
1359 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1360 gen_rtx_SET (SImode, dest,
1361 gen_rtx_PLUS (SImode, src, val)));
1362 return;
1363 }
1364
1365 if (is_frame_related)
1366 RTX_FRAME_RELATED_P (insn) = 1;
1367 return;
1368 }
1369
1370 void
1371 rx_expand_prologue (void)
1372 {
1373 unsigned int stack_size;
1374 unsigned int frame_size;
1375 unsigned int mask;
1376 unsigned int low;
1377 unsigned int high;
1378 unsigned int reg;
1379 rtx insn;
1380
1381 /* Naked functions use their own, programmer provided prologues. */
1382 if (is_naked_func (NULL_TREE))
1383 return;
1384
1385 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1386
1387 /* If we use any of the callee-saved registers, save them now. */
1388 if (mask)
1389 {
1390 /* Push registers in reverse order. */
1391 for (reg = CC_REGNUM; reg --;)
1392 if (mask & (1 << reg))
1393 {
1394 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1395 mark_frame_related (insn);
1396 }
1397 }
1398 else if (low)
1399 {
1400 if (high == low)
1401 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1402 else
1403 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1404 * UNITS_PER_WORD),
1405 gen_rx_store_vector (low, high)));
1406 mark_frame_related (insn);
1407 }
1408
1409 if (MUST_SAVE_ACC_REGISTER)
1410 {
1411 unsigned int acc_high, acc_low;
1412
1413 /* Interrupt handlers have to preserve the accumulator
1414 register if so requested by the user. Use the first
1415 two pushed registers as intermediaries. */
1416 if (mask)
1417 {
1418 acc_low = acc_high = 0;
1419
1420 for (reg = 1; reg < CC_REGNUM; reg ++)
1421 if (mask & (1 << reg))
1422 {
1423 if (acc_low == 0)
1424 acc_low = reg;
1425 else
1426 {
1427 acc_high = reg;
1428 break;
1429 }
1430 }
1431
1432 /* We have assumed that there are at least two registers pushed... */
1433 gcc_assert (acc_high != 0);
1434
1435 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1436 We just assume that they are zero. */
1437 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1438 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1439 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1440 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1441 }
1442 else
1443 {
1444 acc_low = low;
1445 acc_high = low + 1;
1446
1447 /* We have assumed that there are at least two registers pushed... */
1448 gcc_assert (acc_high <= high);
1449
1450 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1451 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1452 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1453 gen_rx_store_vector (acc_low, acc_high)));
1454 }
1455 }
1456
1457 /* If needed, set up the frame pointer. */
1458 if (frame_pointer_needed)
1459 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1460 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1461
1462 /* Allocate space for the outgoing args.
1463 If the stack frame has not already been set up then handle this as well. */
1464 if (stack_size)
1465 {
1466 if (frame_size)
1467 {
1468 if (frame_pointer_needed)
1469 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1470 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1471 else
1472 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1473 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1474 true);
1475 }
1476 else
1477 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1478 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1479 }
1480 else if (frame_size)
1481 {
1482 if (! frame_pointer_needed)
1483 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1484 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1485 else
1486 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1487 true);
1488 }
1489 }
1490
1491 static void
1492 rx_output_function_prologue (FILE * file,
1493 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1494 {
1495 if (is_fast_interrupt_func (NULL_TREE))
1496 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1497
1498 if (is_interrupt_func (NULL_TREE))
1499 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1500
1501 if (is_naked_func (NULL_TREE))
1502 asm_fprintf (file, "\t; Note: Naked Function\n");
1503
1504 if (cfun->static_chain_decl != NULL)
1505 asm_fprintf (file, "\t; Note: Nested function declared "
1506 "inside another function.\n");
1507
1508 if (crtl->calls_eh_return)
1509 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1510 }
1511
1512 /* Generate a POPM or RTSD instruction that matches the given operands. */
1513
1514 void
1515 rx_emit_stack_popm (rtx * operands, bool is_popm)
1516 {
1517 HOST_WIDE_INT stack_adjust;
1518 HOST_WIDE_INT last_reg;
1519 rtx first_push;
1520
1521 gcc_assert (CONST_INT_P (operands[0]));
1522 stack_adjust = INTVAL (operands[0]);
1523
1524 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1525 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1526
1527 first_push = XVECEXP (operands[1], 0, 1);
1528 gcc_assert (SET_P (first_push));
1529 first_push = SET_DEST (first_push);
1530 gcc_assert (REG_P (first_push));
1531
1532 if (is_popm)
1533 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1534 reg_names [REGNO (first_push)],
1535 reg_names [REGNO (first_push) + last_reg]);
1536 else
1537 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1538 (int) stack_adjust,
1539 reg_names [REGNO (first_push)],
1540 reg_names [REGNO (first_push) + last_reg]);
1541 }
1542
1543 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1544
1545 static rtx
1546 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1547 {
1548 unsigned int i;
1549 unsigned int bias = 3;
1550 unsigned int count = (high - low) + bias;
1551 rtx vector;
1552
1553 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1554
1555 XVECEXP (vector, 0, 0) =
1556 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1557 plus_constant (stack_pointer_rtx, adjust));
1558
1559 for (i = 0; i < count - 2; i++)
1560 XVECEXP (vector, 0, i + 1) =
1561 gen_rtx_SET (VOIDmode,
1562 gen_rtx_REG (SImode, low + i),
1563 gen_rtx_MEM (SImode,
1564 i == 0 ? stack_pointer_rtx
1565 : plus_constant (stack_pointer_rtx,
1566 i * UNITS_PER_WORD)));
1567
1568 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1569
1570 return vector;
1571 }
1572
1573 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1574
1575 static rtx
1576 gen_rx_popm_vector (unsigned int low, unsigned int high)
1577 {
1578 unsigned int i;
1579 unsigned int count = (high - low) + 2;
1580 rtx vector;
1581
1582 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1583
1584 XVECEXP (vector, 0, 0) =
1585 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1586 plus_constant (stack_pointer_rtx,
1587 (count - 1) * UNITS_PER_WORD));
1588
1589 for (i = 0; i < count - 1; i++)
1590 XVECEXP (vector, 0, i + 1) =
1591 gen_rtx_SET (VOIDmode,
1592 gen_rtx_REG (SImode, low + i),
1593 gen_rtx_MEM (SImode,
1594 i == 0 ? stack_pointer_rtx
1595 : plus_constant (stack_pointer_rtx,
1596 i * UNITS_PER_WORD)));
1597
1598 return vector;
1599 }
1600
1601 void
1602 rx_expand_epilogue (bool is_sibcall)
1603 {
1604 unsigned int low;
1605 unsigned int high;
1606 unsigned int frame_size;
1607 unsigned int stack_size;
1608 unsigned int register_mask;
1609 unsigned int regs_size;
1610 unsigned int reg;
1611 unsigned HOST_WIDE_INT total_size;
1612
1613 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1614 cannot guarantee that the register holding the function address is a
1615 call-used register. If it is a call-saved register then the stack
1616 pop instructions generated in the epilogue will corrupt the address
1617 before it is used.
1618
1619 Creating a new call-used-only register class works but then the
1620 reload pass gets stuck because it cannot always find a call-used
1621 register for spilling sibcalls.
1622
1623 The other possible solution is for this pass to scan forward for the
1624 sibcall instruction (if it has been generated) and work out if it
1625 is an indirect sibcall using a call-saved register. If it is then
1626 the address can copied into a call-used register in this epilogue
1627 code and the sibcall instruction modified to use that register. */
1628
1629 if (is_naked_func (NULL_TREE))
1630 {
1631 gcc_assert (! is_sibcall);
1632
1633 /* Naked functions use their own, programmer provided epilogues.
1634 But, in order to keep gcc happy we have to generate some kind of
1635 epilogue RTL. */
1636 emit_jump_insn (gen_naked_return ());
1637 return;
1638 }
1639
1640 rx_get_stack_layout (& low, & high, & register_mask,
1641 & frame_size, & stack_size);
1642
1643 total_size = frame_size + stack_size;
1644 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1645
1646 /* See if we are unable to use the special stack frame deconstruct and
1647 return instructions. In most cases we can use them, but the exceptions
1648 are:
1649
1650 - Sibling calling functions deconstruct the frame but do not return to
1651 their caller. Instead they branch to their sibling and allow their
1652 return instruction to return to this function's parent.
1653
1654 - Fast and normal interrupt handling functions have to use special
1655 return instructions.
1656
1657 - Functions where we have pushed a fragmented set of registers into the
1658 call-save area must have the same set of registers popped. */
1659 if (is_sibcall
1660 || is_fast_interrupt_func (NULL_TREE)
1661 || is_interrupt_func (NULL_TREE)
1662 || register_mask)
1663 {
1664 /* Cannot use the special instructions - deconstruct by hand. */
1665 if (total_size)
1666 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1667 GEN_INT (total_size), false);
1668
1669 if (MUST_SAVE_ACC_REGISTER)
1670 {
1671 unsigned int acc_low, acc_high;
1672
1673 /* Reverse the saving of the accumulator register onto the stack.
1674 Note we must adjust the saved "low" accumulator value as it
1675 is really the middle 32-bits of the accumulator. */
1676 if (register_mask)
1677 {
1678 acc_low = acc_high = 0;
1679
1680 for (reg = 1; reg < CC_REGNUM; reg ++)
1681 if (register_mask & (1 << reg))
1682 {
1683 if (acc_low == 0)
1684 acc_low = reg;
1685 else
1686 {
1687 acc_high = reg;
1688 break;
1689 }
1690 }
1691 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1692 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1693 }
1694 else
1695 {
1696 acc_low = low;
1697 acc_high = low + 1;
1698 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1699 gen_rx_popm_vector (acc_low, acc_high)));
1700 }
1701
1702 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1703 gen_rtx_REG (SImode, acc_low),
1704 GEN_INT (16)));
1705 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1706 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1707 }
1708
1709 if (register_mask)
1710 {
1711 for (reg = 0; reg < CC_REGNUM; reg ++)
1712 if (register_mask & (1 << reg))
1713 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1714 }
1715 else if (low)
1716 {
1717 if (high == low)
1718 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1719 else
1720 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1721 gen_rx_popm_vector (low, high)));
1722 }
1723
1724 if (is_fast_interrupt_func (NULL_TREE))
1725 {
1726 gcc_assert (! is_sibcall);
1727 emit_jump_insn (gen_fast_interrupt_return ());
1728 }
1729 else if (is_interrupt_func (NULL_TREE))
1730 {
1731 gcc_assert (! is_sibcall);
1732 emit_jump_insn (gen_exception_return ());
1733 }
1734 else if (! is_sibcall)
1735 emit_jump_insn (gen_simple_return ());
1736
1737 return;
1738 }
1739
1740 /* If we allocated space on the stack, free it now. */
1741 if (total_size)
1742 {
1743 unsigned HOST_WIDE_INT rtsd_size;
1744
1745 /* See if we can use the RTSD instruction. */
1746 rtsd_size = total_size + regs_size;
1747 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1748 {
1749 if (low)
1750 emit_jump_insn (gen_pop_and_return
1751 (GEN_INT (rtsd_size),
1752 gen_rx_rtsd_vector (rtsd_size, low, high)));
1753 else
1754 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1755
1756 return;
1757 }
1758
1759 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1760 GEN_INT (total_size), false);
1761 }
1762
1763 if (low)
1764 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1765 gen_rx_rtsd_vector (regs_size,
1766 low, high)));
1767 else
1768 emit_jump_insn (gen_simple_return ());
1769 }
1770
1771
1772 /* Compute the offset (in words) between FROM (arg pointer
1773 or frame pointer) and TO (frame pointer or stack pointer).
1774 See ASCII art comment at the start of rx_expand_prologue
1775 for more information. */
1776
1777 int
1778 rx_initial_elimination_offset (int from, int to)
1779 {
1780 unsigned int low;
1781 unsigned int high;
1782 unsigned int frame_size;
1783 unsigned int stack_size;
1784 unsigned int mask;
1785
1786 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1787
1788 if (from == ARG_POINTER_REGNUM)
1789 {
1790 /* Extend the computed size of the stack frame to
1791 include the registers pushed in the prologue. */
1792 if (low)
1793 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1794 else
1795 frame_size += bit_count (mask) * UNITS_PER_WORD;
1796
1797 /* Remember to include the return address. */
1798 frame_size += 1 * UNITS_PER_WORD;
1799
1800 if (to == FRAME_POINTER_REGNUM)
1801 return frame_size;
1802
1803 gcc_assert (to == STACK_POINTER_REGNUM);
1804 return frame_size + stack_size;
1805 }
1806
1807 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1808 return stack_size;
1809 }
1810
1811 /* Decide if a variable should go into one of the small data sections. */
1812
1813 static bool
1814 rx_in_small_data (const_tree decl)
1815 {
1816 int size;
1817 const_tree section;
1818
1819 if (rx_small_data_limit == 0)
1820 return false;
1821
1822 if (TREE_CODE (decl) != VAR_DECL)
1823 return false;
1824
1825 /* We do not put read-only variables into a small data area because
1826 they would be placed with the other read-only sections, far away
1827 from the read-write data sections, and we only have one small
1828 data area pointer.
1829 Similarly commons are placed in the .bss section which might be
1830 far away (and out of alignment with respect to) the .data section. */
1831 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1832 return false;
1833
1834 section = DECL_SECTION_NAME (decl);
1835 if (section)
1836 {
1837 const char * const name = TREE_STRING_POINTER (section);
1838
1839 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1840 }
1841
1842 size = int_size_in_bytes (TREE_TYPE (decl));
1843
1844 return (size > 0) && (size <= rx_small_data_limit);
1845 }
1846
1847 /* Return a section for X.
1848 The only special thing we do here is to honor small data. */
1849
1850 static section *
1851 rx_select_rtx_section (enum machine_mode mode,
1852 rtx x,
1853 unsigned HOST_WIDE_INT align)
1854 {
1855 if (rx_small_data_limit > 0
1856 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1857 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1858 return sdata_section;
1859
1860 return default_elf_select_rtx_section (mode, x, align);
1861 }
1862
1863 static section *
1864 rx_select_section (tree decl,
1865 int reloc,
1866 unsigned HOST_WIDE_INT align)
1867 {
1868 if (rx_small_data_limit > 0)
1869 {
1870 switch (categorize_decl_for_section (decl, reloc))
1871 {
1872 case SECCAT_SDATA: return sdata_section;
1873 case SECCAT_SBSS: return sbss_section;
1874 case SECCAT_SRODATA:
1875 /* Fall through. We do not put small, read only
1876 data into the C_2 section because we are not
1877 using the C_2 section. We do not use the C_2
1878 section because it is located with the other
1879 read-only data sections, far away from the read-write
1880 data sections and we only have one small data
1881 pointer (r13). */
1882 default:
1883 break;
1884 }
1885 }
1886
1887 /* If we are supporting the Renesas assembler
1888 we cannot use mergeable sections. */
1889 if (TARGET_AS100_SYNTAX)
1890 switch (categorize_decl_for_section (decl, reloc))
1891 {
1892 case SECCAT_RODATA_MERGE_CONST:
1893 case SECCAT_RODATA_MERGE_STR_INIT:
1894 case SECCAT_RODATA_MERGE_STR:
1895 return readonly_data_section;
1896
1897 default:
1898 break;
1899 }
1900
1901 return default_elf_select_section (decl, reloc, align);
1902 }
1903 \f
1904 enum rx_builtin
1905 {
1906 RX_BUILTIN_BRK,
1907 RX_BUILTIN_CLRPSW,
1908 RX_BUILTIN_INT,
1909 RX_BUILTIN_MACHI,
1910 RX_BUILTIN_MACLO,
1911 RX_BUILTIN_MULHI,
1912 RX_BUILTIN_MULLO,
1913 RX_BUILTIN_MVFACHI,
1914 RX_BUILTIN_MVFACMI,
1915 RX_BUILTIN_MVFC,
1916 RX_BUILTIN_MVTACHI,
1917 RX_BUILTIN_MVTACLO,
1918 RX_BUILTIN_MVTC,
1919 RX_BUILTIN_MVTIPL,
1920 RX_BUILTIN_RACW,
1921 RX_BUILTIN_REVW,
1922 RX_BUILTIN_RMPA,
1923 RX_BUILTIN_ROUND,
1924 RX_BUILTIN_SETPSW,
1925 RX_BUILTIN_WAIT,
1926 RX_BUILTIN_max
1927 };
1928
1929 static void
1930 rx_init_builtins (void)
1931 {
1932 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1933 add_builtin_function ("__builtin_rx_" LC_NAME, \
1934 build_function_type_list (RET_TYPE##_type_node, \
1935 ARG_TYPE##_type_node, \
1936 NULL_TREE), \
1937 RX_BUILTIN_##UC_NAME, \
1938 BUILT_IN_MD, NULL, NULL_TREE)
1939
1940 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1941 add_builtin_function ("__builtin_rx_" LC_NAME, \
1942 build_function_type_list (RET_TYPE##_type_node, \
1943 ARG_TYPE1##_type_node,\
1944 ARG_TYPE2##_type_node,\
1945 NULL_TREE), \
1946 RX_BUILTIN_##UC_NAME, \
1947 BUILT_IN_MD, NULL, NULL_TREE)
1948
1949 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1950 add_builtin_function ("__builtin_rx_" LC_NAME, \
1951 build_function_type_list (RET_TYPE##_type_node, \
1952 ARG_TYPE1##_type_node,\
1953 ARG_TYPE2##_type_node,\
1954 ARG_TYPE3##_type_node,\
1955 NULL_TREE), \
1956 RX_BUILTIN_##UC_NAME, \
1957 BUILT_IN_MD, NULL, NULL_TREE)
1958
1959 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
1960 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1961 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1962 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1963 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1964 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1965 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1966 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1967 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1968 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1969 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1970 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1971 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
1972 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1973 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
1974 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
1975 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1976 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1977 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
1978 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
1979 }
1980
1981 static rtx
1982 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1983 {
1984 if (reg && ! REG_P (arg))
1985 arg = force_reg (SImode, arg);
1986
1987 emit_insn (gen_func (arg));
1988
1989 return NULL_RTX;
1990 }
1991
1992 static rtx
1993 rx_expand_builtin_mvtc (tree exp)
1994 {
1995 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1996 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1997
1998 if (! CONST_INT_P (arg1))
1999 return NULL_RTX;
2000
2001 if (! REG_P (arg2))
2002 arg2 = force_reg (SImode, arg2);
2003
2004 emit_insn (gen_mvtc (arg1, arg2));
2005
2006 return NULL_RTX;
2007 }
2008
2009 static rtx
2010 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2011 {
2012 rtx arg = expand_normal (t_arg);
2013
2014 if (! CONST_INT_P (arg))
2015 return NULL_RTX;
2016
2017 if (target == NULL_RTX)
2018 return NULL_RTX;
2019
2020 if (! REG_P (target))
2021 target = force_reg (SImode, target);
2022
2023 emit_insn (gen_mvfc (target, arg));
2024
2025 return target;
2026 }
2027
2028 static rtx
2029 rx_expand_builtin_mvtipl (rtx arg)
2030 {
2031 /* The RX610 does not support the MVTIPL instruction. */
2032 if (rx_cpu_type == RX610)
2033 return NULL_RTX;
2034
2035 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2036 return NULL_RTX;
2037
2038 emit_insn (gen_mvtipl (arg));
2039
2040 return NULL_RTX;
2041 }
2042
2043 static rtx
2044 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2045 {
2046 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2047 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2048
2049 if (! REG_P (arg1))
2050 arg1 = force_reg (SImode, arg1);
2051
2052 if (! REG_P (arg2))
2053 arg2 = force_reg (SImode, arg2);
2054
2055 emit_insn (gen_func (arg1, arg2));
2056
2057 return NULL_RTX;
2058 }
2059
2060 static rtx
2061 rx_expand_int_builtin_1_arg (rtx arg,
2062 rtx target,
2063 rtx (* gen_func)(rtx, rtx),
2064 bool mem_ok)
2065 {
2066 if (! REG_P (arg))
2067 if (!mem_ok || ! MEM_P (arg))
2068 arg = force_reg (SImode, arg);
2069
2070 if (target == NULL_RTX || ! REG_P (target))
2071 target = gen_reg_rtx (SImode);
2072
2073 emit_insn (gen_func (target, arg));
2074
2075 return target;
2076 }
2077
2078 static rtx
2079 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2080 {
2081 if (target == NULL_RTX || ! REG_P (target))
2082 target = gen_reg_rtx (SImode);
2083
2084 emit_insn (gen_func (target));
2085
2086 return target;
2087 }
2088
2089 static rtx
2090 rx_expand_builtin_round (rtx arg, rtx target)
2091 {
2092 if ((! REG_P (arg) && ! MEM_P (arg))
2093 || GET_MODE (arg) != SFmode)
2094 arg = force_reg (SFmode, arg);
2095
2096 if (target == NULL_RTX || ! REG_P (target))
2097 target = gen_reg_rtx (SImode);
2098
2099 emit_insn (gen_lrintsf2 (target, arg));
2100
2101 return target;
2102 }
2103
2104 static int
2105 valid_psw_flag (rtx op, const char *which)
2106 {
2107 static int mvtc_inform_done = 0;
2108
2109 if (GET_CODE (op) == CONST_INT)
2110 switch (INTVAL (op))
2111 {
2112 case 0: case 'c': case 'C':
2113 case 1: case 'z': case 'Z':
2114 case 2: case 's': case 'S':
2115 case 3: case 'o': case 'O':
2116 case 8: case 'i': case 'I':
2117 case 9: case 'u': case 'U':
2118 return 1;
2119 }
2120
2121 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2122 if (!mvtc_inform_done)
2123 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2124 mvtc_inform_done = 1;
2125
2126 return 0;
2127 }
2128
2129 static rtx
2130 rx_expand_builtin (tree exp,
2131 rtx target,
2132 rtx subtarget ATTRIBUTE_UNUSED,
2133 enum machine_mode mode ATTRIBUTE_UNUSED,
2134 int ignore ATTRIBUTE_UNUSED)
2135 {
2136 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2137 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2138 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2139 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2140
2141 switch (fcode)
2142 {
2143 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2144 case RX_BUILTIN_CLRPSW:
2145 if (!valid_psw_flag (op, "clrpsw"))
2146 return NULL_RTX;
2147 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2148 case RX_BUILTIN_SETPSW:
2149 if (!valid_psw_flag (op, "setpsw"))
2150 return NULL_RTX;
2151 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2152 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2153 (op, gen_int, false);
2154 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2155 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2156 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2157 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2158 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2159 (target, gen_mvfachi);
2160 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2161 (target, gen_mvfacmi);
2162 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2163 (op, gen_mvtachi, true);
2164 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2165 (op, gen_mvtaclo, true);
2166 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2167 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2168 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2169 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2170 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2171 (op, gen_racw, false);
2172 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2173 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2174 (op, target, gen_revw, false);
2175 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2176
2177 default:
2178 internal_error ("bad builtin code");
2179 break;
2180 }
2181
2182 return NULL_RTX;
2183 }
2184 \f
2185 /* Place an element into a constructor or destructor section.
2186 Like default_ctor_section_asm_out_constructor in varasm.c
2187 except that it uses .init_array (or .fini_array) and it
2188 handles constructor priorities. */
2189
2190 static void
2191 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2192 {
2193 section * s;
2194
2195 if (priority != DEFAULT_INIT_PRIORITY)
2196 {
2197 char buf[18];
2198
2199 sprintf (buf, "%s.%.5u",
2200 is_ctor ? ".init_array" : ".fini_array",
2201 priority);
2202 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2203 }
2204 else if (is_ctor)
2205 s = ctors_section;
2206 else
2207 s = dtors_section;
2208
2209 switch_to_section (s);
2210 assemble_align (POINTER_SIZE);
2211 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2212 }
2213
2214 static void
2215 rx_elf_asm_constructor (rtx symbol, int priority)
2216 {
2217 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2218 }
2219
2220 static void
2221 rx_elf_asm_destructor (rtx symbol, int priority)
2222 {
2223 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2224 }
2225 \f
2226 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2227
2228 static tree
2229 rx_handle_func_attribute (tree * node,
2230 tree name,
2231 tree args,
2232 int flags ATTRIBUTE_UNUSED,
2233 bool * no_add_attrs)
2234 {
2235 gcc_assert (DECL_P (* node));
2236 gcc_assert (args == NULL_TREE);
2237
2238 if (TREE_CODE (* node) != FUNCTION_DECL)
2239 {
2240 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2241 name);
2242 * no_add_attrs = true;
2243 }
2244
2245 /* FIXME: We ought to check for conflicting attributes. */
2246
2247 /* FIXME: We ought to check that the interrupt and exception
2248 handler attributes have been applied to void functions. */
2249 return NULL_TREE;
2250 }
2251
2252 /* Table of RX specific attributes. */
2253 const struct attribute_spec rx_attribute_table[] =
2254 {
2255 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2256 affects_type_identity. */
2257 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2258 false },
2259 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2260 false },
2261 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2262 false },
2263 { NULL, 0, 0, false, false, false, NULL, false }
2264 };
2265
2266 /* Extra processing for target specific command line options. */
2267
2268 static bool
2269 rx_handle_option (struct gcc_options *opts,
2270 struct gcc_options *opts_set ATTRIBUTE_UNUSED,
2271 const struct cl_decoded_option *decoded,
2272 location_t loc)
2273 {
2274 size_t code = decoded->opt_index;
2275 int value = decoded->value;
2276
2277 switch (code)
2278 {
2279 case OPT_mint_register_:
2280 /* Make sure that the -mint-register option is in range. Other
2281 handling in rx_option_override. */
2282 return value >= 0 && value <= 4;
2283 break;
2284
2285 case OPT_mmax_constant_size_:
2286 /* Make sure that the -mmax-constant_size option is in range. */
2287 return value >= 0 && value <= 4;
2288
2289 case OPT_mcpu_:
2290 if ((enum rx_cpu_types) value == RX200)
2291 opts->x_target_flags |= MASK_NO_USE_FPU;
2292 break;
2293
2294 case OPT_fpu:
2295 if (opts->x_rx_cpu_type == RX200)
2296 error_at (loc, "the RX200 cpu does not have FPU hardware");
2297 break;
2298
2299 default:
2300 break;
2301 }
2302
2303 return true;
2304 }
2305
2306 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2307
2308 static void
2309 rx_override_options_after_change (void)
2310 {
2311 static bool first_time = TRUE;
2312
2313 if (first_time)
2314 {
2315 /* If this is the first time through and the user has not disabled
2316 the use of RX FPU hardware then enable -ffinite-math-only,
2317 since the FPU instructions do not support NaNs and infinities. */
2318 if (TARGET_USE_FPU)
2319 flag_finite_math_only = 1;
2320
2321 first_time = FALSE;
2322 }
2323 else
2324 {
2325 /* Alert the user if they are changing the optimization options
2326 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2327 if (TARGET_USE_FPU
2328 && !flag_finite_math_only)
2329 warning (0, "RX FPU instructions do not support NaNs and infinities");
2330 }
2331 }
2332
2333 static void
2334 rx_option_override (void)
2335 {
2336 unsigned int i;
2337 cl_deferred_option *opt;
2338 VEC(cl_deferred_option,heap) *vec
2339 = (VEC(cl_deferred_option,heap) *) rx_deferred_options;
2340
2341 FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
2342 {
2343 switch (opt->opt_index)
2344 {
2345 case OPT_mint_register_:
2346 switch (opt->value)
2347 {
2348 case 4:
2349 fixed_regs[10] = call_used_regs [10] = 1;
2350 /* Fall through. */
2351 case 3:
2352 fixed_regs[11] = call_used_regs [11] = 1;
2353 /* Fall through. */
2354 case 2:
2355 fixed_regs[12] = call_used_regs [12] = 1;
2356 /* Fall through. */
2357 case 1:
2358 fixed_regs[13] = call_used_regs [13] = 1;
2359 /* Fall through. */
2360 case 0:
2361 break;
2362 default:
2363 /* Error message already given because rx_handle_option
2364 returned false. */
2365 break;
2366 }
2367 break;
2368
2369 default:
2370 gcc_unreachable ();
2371 }
2372 }
2373
2374 /* This target defaults to strict volatile bitfields. */
2375 if (flag_strict_volatile_bitfields < 0)
2376 flag_strict_volatile_bitfields = 1;
2377
2378 rx_override_options_after_change ();
2379
2380 if (align_jumps == 0 && ! optimize_size)
2381 align_jumps = 3;
2382 if (align_loops == 0 && ! optimize_size)
2383 align_loops = 3;
2384 if (align_labels == 0 && ! optimize_size)
2385 align_labels = 3;
2386 }
2387
2388 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
2389 static const struct default_options rx_option_optimization_table[] =
2390 {
2391 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
2392 { OPT_LEVELS_NONE, 0, NULL, 0 }
2393 };
2394
2395 \f
2396 static bool
2397 rx_allocate_stack_slots_for_args (void)
2398 {
2399 /* Naked functions should not allocate stack slots for arguments. */
2400 return ! is_naked_func (NULL_TREE);
2401 }
2402
2403 static bool
2404 rx_func_attr_inlinable (const_tree decl)
2405 {
2406 return ! is_fast_interrupt_func (decl)
2407 && ! is_interrupt_func (decl)
2408 && ! is_naked_func (decl);
2409 }
2410
2411 /* Return nonzero if it is ok to make a tail-call to DECL,
2412 a function_decl or NULL if this is an indirect call, using EXP */
2413
2414 static bool
2415 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2416 {
2417 /* Do not allow indirect tailcalls. The
2418 sibcall patterns do not support them. */
2419 if (decl == NULL)
2420 return false;
2421
2422 /* Never tailcall from inside interrupt handlers or naked functions. */
2423 if (is_fast_interrupt_func (NULL_TREE)
2424 || is_interrupt_func (NULL_TREE)
2425 || is_naked_func (NULL_TREE))
2426 return false;
2427
2428 return true;
2429 }
2430
2431 static void
2432 rx_file_start (void)
2433 {
2434 if (! TARGET_AS100_SYNTAX)
2435 default_file_start ();
2436 }
2437
2438 static bool
2439 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2440 {
2441 /* The packed attribute overrides the MS behaviour. */
2442 return ! TYPE_PACKED (record_type);
2443 }
2444 \f
2445 /* Returns true if X a legitimate constant for an immediate
2446 operand on the RX. X is already known to satisfy CONSTANT_P. */
2447
2448 bool
2449 rx_is_legitimate_constant (rtx x)
2450 {
2451 switch (GET_CODE (x))
2452 {
2453 case CONST:
2454 x = XEXP (x, 0);
2455
2456 if (GET_CODE (x) == PLUS)
2457 {
2458 if (! CONST_INT_P (XEXP (x, 1)))
2459 return false;
2460
2461 /* GCC would not pass us CONST_INT + CONST_INT so we
2462 know that we have {SYMBOL|LABEL} + CONST_INT. */
2463 x = XEXP (x, 0);
2464 gcc_assert (! CONST_INT_P (x));
2465 }
2466
2467 switch (GET_CODE (x))
2468 {
2469 case LABEL_REF:
2470 case SYMBOL_REF:
2471 return true;
2472
2473 case UNSPEC:
2474 return XINT (x, 1) == UNSPEC_CONST;
2475
2476 default:
2477 /* FIXME: Can this ever happen ? */
2478 gcc_unreachable ();
2479 }
2480 break;
2481
2482 case LABEL_REF:
2483 case SYMBOL_REF:
2484 return true;
2485 case CONST_DOUBLE:
2486 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2487 case CONST_VECTOR:
2488 return false;
2489 default:
2490 gcc_assert (CONST_INT_P (x));
2491 break;
2492 }
2493
2494 return ok_for_max_constant (INTVAL (x));
2495 }
2496
2497 static int
2498 rx_address_cost (rtx addr, bool speed)
2499 {
2500 rtx a, b;
2501
2502 if (GET_CODE (addr) != PLUS)
2503 return COSTS_N_INSNS (1);
2504
2505 a = XEXP (addr, 0);
2506 b = XEXP (addr, 1);
2507
2508 if (REG_P (a) && REG_P (b))
2509 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2510 return COSTS_N_INSNS (4);
2511
2512 if (speed)
2513 /* [REG+OFF] is just as fast as [REG]. */
2514 return COSTS_N_INSNS (1);
2515
2516 if (CONST_INT_P (b)
2517 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2518 /* Try to discourage REG + <large OFF> when optimizing for size. */
2519 return COSTS_N_INSNS (2);
2520
2521 return COSTS_N_INSNS (1);
2522 }
2523
2524 static bool
2525 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2526 {
2527 /* We can always eliminate to the frame pointer.
2528 We can eliminate to the stack pointer unless a frame
2529 pointer is needed. */
2530
2531 return to == FRAME_POINTER_REGNUM
2532 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2533 }
2534 \f
2535
2536 static void
2537 rx_trampoline_template (FILE * file)
2538 {
2539 /* Output assembler code for a block containing the constant
2540 part of a trampoline, leaving space for the variable parts.
2541
2542 On the RX, (where r8 is the static chain regnum) the trampoline
2543 looks like:
2544
2545 mov #<static chain value>, r8
2546 mov #<function's address>, r9
2547 jmp r9
2548
2549 In big-endian-data-mode however instructions are read into the CPU
2550 4 bytes at a time. These bytes are then swapped around before being
2551 passed to the decoder. So...we must partition our trampoline into
2552 4 byte packets and swap these packets around so that the instruction
2553 reader will reverse the process. But, in order to avoid splitting
2554 the 32-bit constants across these packet boundaries, (making inserting
2555 them into the constructed trampoline very difficult) we have to pad the
2556 instruction sequence with NOP insns. ie:
2557
2558 nop
2559 nop
2560 mov.l #<...>, r8
2561 nop
2562 nop
2563 mov.l #<...>, r9
2564 jmp r9
2565 nop
2566 nop */
2567
2568 if (! TARGET_BIG_ENDIAN_DATA)
2569 {
2570 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2571 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2572 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2573 }
2574 else
2575 {
2576 char r8 = '0' + STATIC_CHAIN_REGNUM;
2577 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2578
2579 if (TARGET_AS100_SYNTAX)
2580 {
2581 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2582 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2583 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2584 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2585 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2586 }
2587 else
2588 {
2589 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2590 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2591 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2592 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2593 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2594 }
2595 }
2596 }
2597
2598 static void
2599 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2600 {
2601 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2602
2603 emit_block_move (tramp, assemble_trampoline_template (),
2604 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2605
2606 if (TARGET_BIG_ENDIAN_DATA)
2607 {
2608 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2609 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2610 }
2611 else
2612 {
2613 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2614 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2615 }
2616 }
2617 \f
2618 static int
2619 rx_memory_move_cost (enum machine_mode mode, reg_class_t regclass, bool in)
2620 {
2621 return (in ? 2 : 0) + memory_move_secondary_cost (mode, regclass, in);
2622 }
2623
2624 /* Convert a CC_MODE to the set of flags that it represents. */
2625
2626 static unsigned int
2627 flags_from_mode (enum machine_mode mode)
2628 {
2629 switch (mode)
2630 {
2631 case CC_ZSmode:
2632 return CC_FLAG_S | CC_FLAG_Z;
2633 case CC_ZSOmode:
2634 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2635 case CC_ZSCmode:
2636 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2637 case CCmode:
2638 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2639 case CC_Fmode:
2640 return CC_FLAG_FP;
2641 default:
2642 gcc_unreachable ();
2643 }
2644 }
2645
2646 /* Convert a set of flags to a CC_MODE that can implement it. */
2647
2648 static enum machine_mode
2649 mode_from_flags (unsigned int f)
2650 {
2651 if (f & CC_FLAG_FP)
2652 return CC_Fmode;
2653 if (f & CC_FLAG_O)
2654 {
2655 if (f & CC_FLAG_C)
2656 return CCmode;
2657 else
2658 return CC_ZSOmode;
2659 }
2660 else if (f & CC_FLAG_C)
2661 return CC_ZSCmode;
2662 else
2663 return CC_ZSmode;
2664 }
2665
2666 /* Convert an RTX_CODE to the set of flags needed to implement it.
2667 This assumes an integer comparison. */
2668
2669 static unsigned int
2670 flags_from_code (enum rtx_code code)
2671 {
2672 switch (code)
2673 {
2674 case LT:
2675 case GE:
2676 return CC_FLAG_S;
2677 case GT:
2678 case LE:
2679 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
2680 case GEU:
2681 case LTU:
2682 return CC_FLAG_C;
2683 case GTU:
2684 case LEU:
2685 return CC_FLAG_C | CC_FLAG_Z;
2686 case EQ:
2687 case NE:
2688 return CC_FLAG_Z;
2689 default:
2690 gcc_unreachable ();
2691 }
2692 }
2693
2694 /* Return a CC_MODE of which both M1 and M2 are subsets. */
2695
2696 static enum machine_mode
2697 rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
2698 {
2699 unsigned f;
2700
2701 /* Early out for identical modes. */
2702 if (m1 == m2)
2703 return m1;
2704
2705 /* There's no valid combination for FP vs non-FP. */
2706 f = flags_from_mode (m1) | flags_from_mode (m2);
2707 if (f & CC_FLAG_FP)
2708 return VOIDmode;
2709
2710 /* Otherwise, see what mode can implement all the flags. */
2711 return mode_from_flags (f);
2712 }
2713
2714 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
2715
2716 enum machine_mode
2717 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
2718 {
2719 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2720 return CC_Fmode;
2721
2722 if (y != const0_rtx)
2723 return CCmode;
2724
2725 return mode_from_flags (flags_from_code (cmp_code));
2726 }
2727
2728 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
2729 CC_MODE, and use that in branches based on that compare. */
2730
2731 void
2732 rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
2733 rtx c1, rtx c2, rtx label)
2734 {
2735 rtx flags, x;
2736
2737 flags = gen_rtx_REG (cc_mode, CC_REG);
2738 x = gen_rtx_COMPARE (cc_mode, c1, c2);
2739 x = gen_rtx_SET (VOIDmode, flags, x);
2740 emit_insn (x);
2741
2742 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
2743 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
2744 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2745 emit_jump_insn (x);
2746 }
2747
2748 /* A helper function for matching parallels that set the flags. */
2749
2750 bool
2751 rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
2752 {
2753 rtx op1, flags;
2754 enum machine_mode flags_mode;
2755
2756 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
2757
2758 op1 = XVECEXP (PATTERN (insn), 0, 1);
2759 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
2760
2761 flags = SET_DEST (op1);
2762 flags_mode = GET_MODE (flags);
2763
2764 if (GET_MODE (SET_SRC (op1)) != flags_mode)
2765 return false;
2766 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
2767 return false;
2768
2769 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
2770 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
2771 return false;
2772
2773 return true;
2774 }
2775 \f
2776 int
2777 rx_align_for_label (void)
2778 {
2779 return optimize_size ? 1 : 3;
2780 }
2781
2782 static int
2783 rx_max_skip_for_label (rtx lab)
2784 {
2785 int opsize;
2786 rtx op;
2787
2788 if (lab == NULL_RTX)
2789 return 0;
2790
2791 op = lab;
2792 do
2793 {
2794 op = next_nonnote_nondebug_insn (op);
2795 }
2796 while (op && (LABEL_P (op)
2797 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
2798 if (!op)
2799 return 0;
2800
2801 opsize = get_attr_length (op);
2802 if (opsize >= 0 && opsize < 8)
2803 return opsize - 1;
2804 return 0;
2805 }
2806
2807 /* Compute the real length of the extending load-and-op instructions. */
2808
2809 int
2810 rx_adjust_insn_length (rtx insn, int current_length)
2811 {
2812 rtx extend, mem, offset;
2813 bool zero;
2814 int factor;
2815
2816 switch (INSN_CODE (insn))
2817 {
2818 default:
2819 return current_length;
2820
2821 case CODE_FOR_plussi3_zero_extendhi:
2822 case CODE_FOR_andsi3_zero_extendhi:
2823 case CODE_FOR_iorsi3_zero_extendhi:
2824 case CODE_FOR_xorsi3_zero_extendhi:
2825 case CODE_FOR_divsi3_zero_extendhi:
2826 case CODE_FOR_udivsi3_zero_extendhi:
2827 case CODE_FOR_minussi3_zero_extendhi:
2828 case CODE_FOR_smaxsi3_zero_extendhi:
2829 case CODE_FOR_sminsi3_zero_extendhi:
2830 case CODE_FOR_multsi3_zero_extendhi:
2831 case CODE_FOR_comparesi3_zero_extendqi:
2832 zero = true;
2833 factor = 2;
2834 break;
2835
2836 case CODE_FOR_plussi3_sign_extendhi:
2837 case CODE_FOR_andsi3_sign_extendhi:
2838 case CODE_FOR_iorsi3_sign_extendhi:
2839 case CODE_FOR_xorsi3_sign_extendhi:
2840 case CODE_FOR_divsi3_sign_extendhi:
2841 case CODE_FOR_udivsi3_sign_extendhi:
2842 case CODE_FOR_minussi3_sign_extendhi:
2843 case CODE_FOR_smaxsi3_sign_extendhi:
2844 case CODE_FOR_sminsi3_sign_extendhi:
2845 case CODE_FOR_multsi3_sign_extendhi:
2846 case CODE_FOR_comparesi3_zero_extendhi:
2847 zero = false;
2848 factor = 2;
2849 break;
2850
2851 case CODE_FOR_plussi3_zero_extendqi:
2852 case CODE_FOR_andsi3_zero_extendqi:
2853 case CODE_FOR_iorsi3_zero_extendqi:
2854 case CODE_FOR_xorsi3_zero_extendqi:
2855 case CODE_FOR_divsi3_zero_extendqi:
2856 case CODE_FOR_udivsi3_zero_extendqi:
2857 case CODE_FOR_minussi3_zero_extendqi:
2858 case CODE_FOR_smaxsi3_zero_extendqi:
2859 case CODE_FOR_sminsi3_zero_extendqi:
2860 case CODE_FOR_multsi3_zero_extendqi:
2861 case CODE_FOR_comparesi3_sign_extendqi:
2862 zero = true;
2863 factor = 1;
2864 break;
2865
2866 case CODE_FOR_plussi3_sign_extendqi:
2867 case CODE_FOR_andsi3_sign_extendqi:
2868 case CODE_FOR_iorsi3_sign_extendqi:
2869 case CODE_FOR_xorsi3_sign_extendqi:
2870 case CODE_FOR_divsi3_sign_extendqi:
2871 case CODE_FOR_udivsi3_sign_extendqi:
2872 case CODE_FOR_minussi3_sign_extendqi:
2873 case CODE_FOR_smaxsi3_sign_extendqi:
2874 case CODE_FOR_sminsi3_sign_extendqi:
2875 case CODE_FOR_multsi3_sign_extendqi:
2876 case CODE_FOR_comparesi3_sign_extendhi:
2877 zero = false;
2878 factor = 1;
2879 break;
2880 }
2881
2882 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
2883 extend = single_set (insn);
2884 gcc_assert (extend != NULL_RTX);
2885
2886 extend = SET_SRC (extend);
2887 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
2888 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
2889 extend = XEXP (extend, 0);
2890 else
2891 extend = XEXP (extend, 1);
2892
2893 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
2894 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
2895
2896 mem = XEXP (extend, 0);
2897 gcc_checking_assert (MEM_P (mem));
2898 if (REG_P (XEXP (mem, 0)))
2899 return (zero && factor == 1) ? 2 : 3;
2900
2901 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
2902 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
2903 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
2904
2905 offset = XEXP (XEXP (mem, 0), 1);
2906 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
2907
2908 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
2909 return (zero && factor == 1) ? 3 : 4;
2910
2911 return (zero && factor == 1) ? 4 : 5;
2912 }
2913 \f
2914 #undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
2915 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
2916 #undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
2917 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
2918 #undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
2919 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
2920 #undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
2921 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
2922
2923 #undef TARGET_FUNCTION_VALUE
2924 #define TARGET_FUNCTION_VALUE rx_function_value
2925
2926 #undef TARGET_RETURN_IN_MSB
2927 #define TARGET_RETURN_IN_MSB rx_return_in_msb
2928
2929 #undef TARGET_IN_SMALL_DATA_P
2930 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
2931
2932 #undef TARGET_RETURN_IN_MEMORY
2933 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
2934
2935 #undef TARGET_HAVE_SRODATA_SECTION
2936 #define TARGET_HAVE_SRODATA_SECTION true
2937
2938 #undef TARGET_ASM_SELECT_RTX_SECTION
2939 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
2940
2941 #undef TARGET_ASM_SELECT_SECTION
2942 #define TARGET_ASM_SELECT_SECTION rx_select_section
2943
2944 #undef TARGET_INIT_BUILTINS
2945 #define TARGET_INIT_BUILTINS rx_init_builtins
2946
2947 #undef TARGET_EXPAND_BUILTIN
2948 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
2949
2950 #undef TARGET_ASM_CONSTRUCTOR
2951 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2952
2953 #undef TARGET_ASM_DESTRUCTOR
2954 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
2955
2956 #undef TARGET_STRUCT_VALUE_RTX
2957 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
2958
2959 #undef TARGET_ATTRIBUTE_TABLE
2960 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
2961
2962 #undef TARGET_ASM_FILE_START
2963 #define TARGET_ASM_FILE_START rx_file_start
2964
2965 #undef TARGET_MS_BITFIELD_LAYOUT_P
2966 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
2967
2968 #undef TARGET_LEGITIMATE_ADDRESS_P
2969 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
2970
2971 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2972 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
2973
2974 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2975 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
2976
2977 #undef TARGET_ASM_FUNCTION_PROLOGUE
2978 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
2979
2980 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2981 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
2982
2983 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2984 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
2985
2986 #undef TARGET_FUNCTION_ARG
2987 #define TARGET_FUNCTION_ARG rx_function_arg
2988
2989 #undef TARGET_FUNCTION_ARG_ADVANCE
2990 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
2991
2992 #undef TARGET_FUNCTION_ARG_BOUNDARY
2993 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
2994
2995 #undef TARGET_SET_CURRENT_FUNCTION
2996 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2997
2998 #undef TARGET_HANDLE_OPTION
2999 #define TARGET_HANDLE_OPTION rx_handle_option
3000
3001 #undef TARGET_ASM_INTEGER
3002 #define TARGET_ASM_INTEGER rx_assemble_integer
3003
3004 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3005 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3006
3007 #undef TARGET_MAX_ANCHOR_OFFSET
3008 #define TARGET_MAX_ANCHOR_OFFSET 32
3009
3010 #undef TARGET_ADDRESS_COST
3011 #define TARGET_ADDRESS_COST rx_address_cost
3012
3013 #undef TARGET_CAN_ELIMINATE
3014 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3015
3016 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3017 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3018
3019 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3020 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3021
3022 #undef TARGET_TRAMPOLINE_INIT
3023 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3024
3025 #undef TARGET_PRINT_OPERAND
3026 #define TARGET_PRINT_OPERAND rx_print_operand
3027
3028 #undef TARGET_PRINT_OPERAND_ADDRESS
3029 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3030
3031 #undef TARGET_CC_MODES_COMPATIBLE
3032 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3033
3034 #undef TARGET_MEMORY_MOVE_COST
3035 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3036
3037 #undef TARGET_OPTION_OVERRIDE
3038 #define TARGET_OPTION_OVERRIDE rx_option_override
3039
3040 #undef TARGET_OPTION_OPTIMIZATION_TABLE
3041 #define TARGET_OPTION_OPTIMIZATION_TABLE rx_option_optimization_table
3042
3043 #undef TARGET_PROMOTE_FUNCTION_MODE
3044 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3045
3046 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3047 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3048
3049 #undef TARGET_EXCEPT_UNWIND_INFO
3050 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
3051
3052 #undef TARGET_FLAGS_REGNUM
3053 #define TARGET_FLAGS_REGNUM CC_REG
3054
3055 struct gcc_target targetm = TARGET_INITIALIZER;
3056
3057 /* #include "gt-rx.h" */