]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rx/rx.c
Tweak date last change.
[thirdparty/gcc.git] / gcc / config / rx / rx.c
CommitLineData
24833e1a 1/* Subroutines used for code generation on Renesas RX processors.
3aea1f79 2 Copyright (C) 2008-2014 Free Software Foundation, Inc.
24833e1a 3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "tree.h"
9ed99284 30#include "varasm.h"
31#include "stor-layout.h"
32#include "calls.h"
24833e1a 33#include "rtl.h"
34#include "regs.h"
35#include "hard-reg-set.h"
24833e1a 36#include "insn-config.h"
37#include "conditions.h"
38#include "output.h"
39#include "insn-attr.h"
40#include "flags.h"
41#include "function.h"
42#include "expr.h"
43#include "optabs.h"
44#include "libfuncs.h"
45#include "recog.h"
0b205f4c 46#include "diagnostic-core.h"
24833e1a 47#include "toplev.h"
48#include "reload.h"
49#include "df.h"
50#include "ggc.h"
51#include "tm_p.h"
52#include "debug.h"
53#include "target.h"
54#include "target-def.h"
55#include "langhooks.h"
fba5dd52 56#include "opts.h"
367b1459 57#include "cgraph.h"
f7715905 58#include "builtins.h"
6e507301 59
60static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
61static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
62static unsigned int rx_num_interrupt_regs;
24833e1a 63\f
6e507301 64static unsigned int
65rx_gp_base_regnum (void)
66{
67 if (rx_gp_base_regnum_val == INVALID_REGNUM)
68 gcc_unreachable ();
69 return rx_gp_base_regnum_val;
70}
71
72static unsigned int
73rx_pid_base_regnum (void)
74{
75 if (rx_pid_base_regnum_val == INVALID_REGNUM)
76 gcc_unreachable ();
77 return rx_pid_base_regnum_val;
78}
79
80/* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
81
82static tree
83rx_decl_for_addr (rtx op)
84{
85 if (GET_CODE (op) == MEM)
86 op = XEXP (op, 0);
87 if (GET_CODE (op) == CONST)
88 op = XEXP (op, 0);
89 while (GET_CODE (op) == PLUS)
90 op = XEXP (op, 0);
91 if (GET_CODE (op) == SYMBOL_REF)
92 return SYMBOL_REF_DECL (op);
93 return NULL_TREE;
94}
95
6bb30542 96static void rx_print_operand (FILE *, rtx, int);
97
ccfccd66 98#define CC_FLAG_S (1 << 0)
99#define CC_FLAG_Z (1 << 1)
100#define CC_FLAG_O (1 << 2)
101#define CC_FLAG_C (1 << 3)
f7fcec1a 102#define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
ccfccd66 103
104static unsigned int flags_from_mode (enum machine_mode mode);
105static unsigned int flags_from_code (enum rtx_code code);
67e66e16 106\f
6e507301 107/* Return true if OP is a reference to an object in a PID data area. */
108
109enum pid_type
110{
111 PID_NOT_PID = 0, /* The object is not in the PID data area. */
112 PID_ENCODED, /* The object is in the PID data area. */
113 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
114};
115
116static enum pid_type
117rx_pid_data_operand (rtx op)
118{
119 tree op_decl;
120
121 if (!TARGET_PID)
122 return PID_NOT_PID;
123
124 if (GET_CODE (op) == PLUS
125 && GET_CODE (XEXP (op, 0)) == REG
126 && GET_CODE (XEXP (op, 1)) == CONST
127 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
128 return PID_ENCODED;
129
130 op_decl = rx_decl_for_addr (op);
131
132 if (op_decl)
133 {
134 if (TREE_READONLY (op_decl))
135 return PID_UNENCODED;
136 }
137 else
138 {
139 /* Sigh, some special cases. */
140 if (GET_CODE (op) == SYMBOL_REF
141 || GET_CODE (op) == LABEL_REF)
142 return PID_UNENCODED;
143 }
144
145 return PID_NOT_PID;
146}
147
148static rtx
149rx_legitimize_address (rtx x,
150 rtx oldx ATTRIBUTE_UNUSED,
151 enum machine_mode mode ATTRIBUTE_UNUSED)
152{
153 if (rx_pid_data_operand (x) == PID_UNENCODED)
154 {
155 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
156 return rv;
157 }
158
159 if (GET_CODE (x) == PLUS
160 && GET_CODE (XEXP (x, 0)) == PLUS
161 && REG_P (XEXP (XEXP (x, 0), 0))
162 && REG_P (XEXP (x, 1)))
163 return force_reg (SImode, x);
164
165 return x;
166}
167
24833e1a 168/* Return true if OP is a reference to an object in a small data area. */
169
170static bool
171rx_small_data_operand (rtx op)
172{
173 if (rx_small_data_limit == 0)
174 return false;
175
176 if (GET_CODE (op) == SYMBOL_REF)
177 return SYMBOL_REF_SMALL_P (op);
178
179 return false;
180}
181
182static bool
4bccad5e 183rx_is_legitimate_address (enum machine_mode mode, rtx x,
184 bool strict ATTRIBUTE_UNUSED)
24833e1a 185{
186 if (RTX_OK_FOR_BASE (x, strict))
187 /* Register Indirect. */
188 return true;
189
f7fcec1a 190 if ((GET_MODE_SIZE (mode) == 4
191 || GET_MODE_SIZE (mode) == 2
192 || GET_MODE_SIZE (mode) == 1)
24833e1a 193 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
194 /* Pre-decrement Register Indirect or
195 Post-increment Register Indirect. */
196 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
197
6e507301 198 switch (rx_pid_data_operand (x))
199 {
200 case PID_UNENCODED:
201 return false;
202 case PID_ENCODED:
203 return true;
204 default:
205 break;
206 }
207
24833e1a 208 if (GET_CODE (x) == PLUS)
209 {
210 rtx arg1 = XEXP (x, 0);
211 rtx arg2 = XEXP (x, 1);
212 rtx index = NULL_RTX;
213
214 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
215 index = arg2;
216 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
217 index = arg1;
218 else
219 return false;
220
221 switch (GET_CODE (index))
222 {
223 case CONST_INT:
224 {
225 /* Register Relative: REG + INT.
226 Only positive, mode-aligned, mode-sized
227 displacements are allowed. */
228 HOST_WIDE_INT val = INTVAL (index);
229 int factor;
230
231 if (val < 0)
232 return false;
776f1390 233
24833e1a 234 switch (GET_MODE_SIZE (mode))
235 {
236 default:
237 case 4: factor = 4; break;
238 case 2: factor = 2; break;
239 case 1: factor = 1; break;
240 }
241
f7fcec1a 242 if (val > (65535 * factor))
24833e1a 243 return false;
244 return (val % factor) == 0;
245 }
246
247 case REG:
248 /* Unscaled Indexed Register Indirect: REG + REG
249 Size has to be "QI", REG has to be valid. */
250 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
251
252 case MULT:
253 {
254 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
255 Factor has to equal the mode size, REG has to be valid. */
256 rtx factor;
257
258 factor = XEXP (index, 1);
259 index = XEXP (index, 0);
260
261 return REG_P (index)
262 && RTX_OK_FOR_BASE (index, strict)
263 && CONST_INT_P (factor)
264 && GET_MODE_SIZE (mode) == INTVAL (factor);
265 }
266
267 default:
268 return false;
269 }
270 }
271
272 /* Small data area accesses turn into register relative offsets. */
273 return rx_small_data_operand (x);
274}
275
276/* Returns TRUE for simple memory addreses, ie ones
277 that do not involve register indirect addressing
278 or pre/post increment/decrement. */
279
280bool
281rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
282{
24833e1a 283 if (! rx_is_legitimate_address
284 (mode, mem, reload_in_progress || reload_completed))
285 return false;
286
287 switch (GET_CODE (mem))
288 {
289 case REG:
290 /* Simple memory addresses are OK. */
291 return true;
292
293 case PRE_DEC:
294 case POST_INC:
295 return false;
296
297 case PLUS:
776f1390 298 {
299 rtx base, index;
300
301 /* Only allow REG+INT addressing. */
302 base = XEXP (mem, 0);
303 index = XEXP (mem, 1);
24833e1a 304
776f1390 305 if (! RX_REG_P (base) || ! CONST_INT_P (index))
306 return false;
307
308 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
309 }
24833e1a 310
311 case SYMBOL_REF:
312 /* Can happen when small data is being supported.
313 Assume that it will be resolved into GP+INT. */
314 return true;
315
316 default:
317 gcc_unreachable ();
318 }
319}
320
5afe50d9 321/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
322
323static bool
4e27ffd0 324rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
24833e1a 325{
326 if (GET_CODE (addr) == CONST)
327 addr = XEXP (addr, 0);
328
329 switch (GET_CODE (addr))
330 {
331 /* --REG and REG++ only work in SImode. */
332 case PRE_DEC:
333 case POST_INC:
334 return true;
335
336 case MINUS:
337 case PLUS:
338 if (! REG_P (XEXP (addr, 0)))
339 return true;
340
341 addr = XEXP (addr, 1);
342
343 switch (GET_CODE (addr))
344 {
345 case REG:
346 /* REG+REG only works in SImode. */
347 return true;
348
349 case CONST_INT:
350 /* REG+INT is only mode independent if INT is a
b546cdca 351 multiple of 4, positive and will fit into 16-bits. */
24833e1a 352 if (((INTVAL (addr) & 3) == 0)
b546cdca 353 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
24833e1a 354 return false;
355 return true;
356
357 case SYMBOL_REF:
358 case LABEL_REF:
359 return true;
360
361 case MULT:
362 gcc_assert (REG_P (XEXP (addr, 0)));
363 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
364 /* REG+REG*SCALE is always mode dependent. */
365 return true;
366
367 default:
368 /* Not recognized, so treat as mode dependent. */
369 return true;
370 }
371
372 case CONST_INT:
373 case SYMBOL_REF:
374 case LABEL_REF:
375 case REG:
376 /* These are all mode independent. */
377 return false;
378
379 default:
380 /* Everything else is unrecognized,
381 so treat as mode dependent. */
382 return true;
383 }
384}
385\f
24833e1a 386/* A C compound statement to output to stdio stream FILE the
387 assembler syntax for an instruction operand that is a memory
388 reference whose address is ADDR. */
389
6bb30542 390static void
24833e1a 391rx_print_operand_address (FILE * file, rtx addr)
392{
393 switch (GET_CODE (addr))
394 {
395 case REG:
396 fprintf (file, "[");
397 rx_print_operand (file, addr, 0);
398 fprintf (file, "]");
399 break;
400
401 case PRE_DEC:
402 fprintf (file, "[-");
403 rx_print_operand (file, XEXP (addr, 0), 0);
404 fprintf (file, "]");
405 break;
406
407 case POST_INC:
408 fprintf (file, "[");
409 rx_print_operand (file, XEXP (addr, 0), 0);
410 fprintf (file, "+]");
411 break;
412
413 case PLUS:
414 {
415 rtx arg1 = XEXP (addr, 0);
416 rtx arg2 = XEXP (addr, 1);
417 rtx base, index;
418
419 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
420 base = arg1, index = arg2;
421 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
422 base = arg2, index = arg1;
423 else
424 {
425 rx_print_operand (file, arg1, 0);
426 fprintf (file, " + ");
427 rx_print_operand (file, arg2, 0);
428 break;
429 }
430
431 if (REG_P (index) || GET_CODE (index) == MULT)
432 {
433 fprintf (file, "[");
434 rx_print_operand (file, index, 'A');
435 fprintf (file, ",");
436 }
437 else /* GET_CODE (index) == CONST_INT */
438 {
439 rx_print_operand (file, index, 'A');
440 fprintf (file, "[");
441 }
442 rx_print_operand (file, base, 0);
443 fprintf (file, "]");
444 break;
445 }
446
95272799 447 case CONST:
448 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
449 {
450 addr = XEXP (addr, 0);
451 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
6e507301 452
453 /* FIXME: Putting this case label here is an appalling abuse of the C language. */
454 case UNSPEC:
455 addr = XVECEXP (addr, 0, 0);
95272799 456 gcc_assert (CONST_INT_P (addr));
457 }
458 /* Fall through. */
24833e1a 459 case LABEL_REF:
460 case SYMBOL_REF:
24833e1a 461 fprintf (file, "#");
6e507301 462 /* Fall through. */
24833e1a 463 default:
464 output_addr_const (file, addr);
465 break;
466 }
467}
468
469static void
470rx_print_integer (FILE * file, HOST_WIDE_INT val)
471{
472 if (IN_RANGE (val, -64, 64))
473 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
474 else
475 fprintf (file,
476 TARGET_AS100_SYNTAX
477 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
478 val);
479}
480
481static bool
482rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
483{
484 const char * op = integer_asm_op (size, is_aligned);
485
486 if (! CONST_INT_P (x))
487 return default_assemble_integer (x, size, is_aligned);
488
489 if (op == NULL)
490 return false;
491 fputs (op, asm_out_file);
492
493 rx_print_integer (asm_out_file, INTVAL (x));
494 fputc ('\n', asm_out_file);
495 return true;
496}
497
498
24833e1a 499/* Handles the insertion of a single operand into the assembler output.
500 The %<letter> directives supported are:
501
502 %A Print an operand without a leading # character.
503 %B Print an integer comparison name.
504 %C Print a control register name.
505 %F Print a condition code flag name.
6e507301 506 %G Register used for small-data-area addressing
24833e1a 507 %H Print high part of a DImode register, integer or address.
508 %L Print low part of a DImode register, integer or address.
6bb30542 509 %N Print the negation of the immediate value.
6e507301 510 %P Register used for PID addressing
24833e1a 511 %Q If the operand is a MEM, then correctly generate
776f1390 512 register indirect or register relative addressing.
513 %R Like %Q but for zero-extending loads. */
24833e1a 514
6bb30542 515static void
24833e1a 516rx_print_operand (FILE * file, rtx op, int letter)
517{
776f1390 518 bool unsigned_load = false;
6e507301 519 bool print_hash = true;
520
521 if (letter == 'A'
522 && ((GET_CODE (op) == CONST
523 && GET_CODE (XEXP (op, 0)) == UNSPEC)
524 || GET_CODE (op) == UNSPEC))
525 {
526 print_hash = false;
527 letter = 0;
528 }
776f1390 529
24833e1a 530 switch (letter)
531 {
532 case 'A':
533 /* Print an operand without a leading #. */
534 if (MEM_P (op))
535 op = XEXP (op, 0);
536
537 switch (GET_CODE (op))
538 {
539 case LABEL_REF:
540 case SYMBOL_REF:
541 output_addr_const (file, op);
542 break;
543 case CONST_INT:
544 fprintf (file, "%ld", (long) INTVAL (op));
545 break;
546 default:
547 rx_print_operand (file, op, 0);
548 break;
549 }
550 break;
551
552 case 'B':
ccfccd66 553 {
554 enum rtx_code code = GET_CODE (op);
555 enum machine_mode mode = GET_MODE (XEXP (op, 0));
556 const char *ret;
557
558 if (mode == CC_Fmode)
559 {
560 /* C flag is undefined, and O flag carries unordered. None of the
561 branch combinations that include O use it helpfully. */
562 switch (code)
563 {
564 case ORDERED:
565 ret = "no";
566 break;
567 case UNORDERED:
568 ret = "o";
569 break;
570 case LT:
571 ret = "n";
572 break;
573 case GE:
574 ret = "pz";
575 break;
576 case EQ:
577 ret = "eq";
578 break;
579 case NE:
580 ret = "ne";
581 break;
582 default:
583 gcc_unreachable ();
584 }
585 }
586 else
587 {
24ad6c43 588 unsigned int flags = flags_from_mode (mode);
776f1390 589
ccfccd66 590 switch (code)
591 {
592 case LT:
24ad6c43 593 ret = (flags & CC_FLAG_O ? "lt" : "n");
ccfccd66 594 break;
595 case GE:
24ad6c43 596 ret = (flags & CC_FLAG_O ? "ge" : "pz");
ccfccd66 597 break;
598 case GT:
599 ret = "gt";
600 break;
601 case LE:
602 ret = "le";
603 break;
604 case GEU:
605 ret = "geu";
606 break;
607 case LTU:
608 ret = "ltu";
609 break;
610 case GTU:
611 ret = "gtu";
612 break;
613 case LEU:
614 ret = "leu";
615 break;
616 case EQ:
617 ret = "eq";
618 break;
619 case NE:
620 ret = "ne";
621 break;
622 default:
623 gcc_unreachable ();
624 }
24ad6c43 625 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
ccfccd66 626 }
627 fputs (ret, file);
628 break;
629 }
24833e1a 630
631 case 'C':
632 gcc_assert (CONST_INT_P (op));
633 switch (INTVAL (op))
634 {
635 case 0: fprintf (file, "psw"); break;
636 case 2: fprintf (file, "usp"); break;
637 case 3: fprintf (file, "fpsw"); break;
638 case 4: fprintf (file, "cpen"); break;
639 case 8: fprintf (file, "bpsw"); break;
640 case 9: fprintf (file, "bpc"); break;
641 case 0xa: fprintf (file, "isp"); break;
642 case 0xb: fprintf (file, "fintv"); break;
643 case 0xc: fprintf (file, "intb"); break;
644 default:
98a5f45d 645 warning (0, "unrecognized control register number: %d - using 'psw'",
6bb30542 646 (int) INTVAL (op));
98cb9b5b 647 fprintf (file, "psw");
648 break;
24833e1a 649 }
650 break;
651
652 case 'F':
653 gcc_assert (CONST_INT_P (op));
654 switch (INTVAL (op))
655 {
656 case 0: case 'c': case 'C': fprintf (file, "C"); break;
657 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
658 case 2: case 's': case 'S': fprintf (file, "S"); break;
659 case 3: case 'o': case 'O': fprintf (file, "O"); break;
660 case 8: case 'i': case 'I': fprintf (file, "I"); break;
661 case 9: case 'u': case 'U': fprintf (file, "U"); break;
662 default:
663 gcc_unreachable ();
664 }
665 break;
666
6e507301 667 case 'G':
668 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
669 break;
670
24833e1a 671 case 'H':
6bb30542 672 switch (GET_CODE (op))
24833e1a 673 {
6bb30542 674 case REG:
675 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
676 break;
677 case CONST_INT:
678 {
679 HOST_WIDE_INT v = INTVAL (op);
67e66e16 680
6bb30542 681 fprintf (file, "#");
682 /* Trickery to avoid problems with shifting 32 bits at a time. */
683 v = v >> 16;
684 v = v >> 16;
685 rx_print_integer (file, v);
686 break;
687 }
688 case CONST_DOUBLE:
24833e1a 689 fprintf (file, "#");
6bb30542 690 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
691 break;
692 case MEM:
24833e1a 693 if (! WORDS_BIG_ENDIAN)
694 op = adjust_address (op, SImode, 4);
695 output_address (XEXP (op, 0));
6bb30542 696 break;
697 default:
698 gcc_unreachable ();
24833e1a 699 }
700 break;
701
702 case 'L':
6bb30542 703 switch (GET_CODE (op))
24833e1a 704 {
6bb30542 705 case REG:
706 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
707 break;
708 case CONST_INT:
24833e1a 709 fprintf (file, "#");
710 rx_print_integer (file, INTVAL (op) & 0xffffffff);
6bb30542 711 break;
712 case CONST_DOUBLE:
713 fprintf (file, "#");
714 rx_print_integer (file, CONST_DOUBLE_LOW (op));
715 break;
716 case MEM:
24833e1a 717 if (WORDS_BIG_ENDIAN)
718 op = adjust_address (op, SImode, 4);
719 output_address (XEXP (op, 0));
6bb30542 720 break;
721 default:
722 gcc_unreachable ();
24833e1a 723 }
724 break;
725
39349585 726 case 'N':
727 gcc_assert (CONST_INT_P (op));
728 fprintf (file, "#");
729 rx_print_integer (file, - INTVAL (op));
730 break;
731
6e507301 732 case 'P':
733 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
734 break;
735
776f1390 736 case 'R':
737 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) < 4);
738 unsigned_load = true;
739 /* Fall through. */
24833e1a 740 case 'Q':
741 if (MEM_P (op))
742 {
743 HOST_WIDE_INT offset;
776f1390 744 rtx mem = op;
24833e1a 745
746 op = XEXP (op, 0);
747
748 if (REG_P (op))
749 offset = 0;
750 else if (GET_CODE (op) == PLUS)
751 {
752 rtx displacement;
753
754 if (REG_P (XEXP (op, 0)))
755 {
756 displacement = XEXP (op, 1);
757 op = XEXP (op, 0);
758 }
759 else
760 {
761 displacement = XEXP (op, 0);
762 op = XEXP (op, 1);
763 gcc_assert (REG_P (op));
764 }
765
766 gcc_assert (CONST_INT_P (displacement));
767 offset = INTVAL (displacement);
768 gcc_assert (offset >= 0);
769
770 fprintf (file, "%ld", offset);
771 }
772 else
773 gcc_unreachable ();
774
775 fprintf (file, "[");
776 rx_print_operand (file, op, 0);
777 fprintf (file, "].");
778
776f1390 779 switch (GET_MODE_SIZE (GET_MODE (mem)))
24833e1a 780 {
781 case 1:
776f1390 782 gcc_assert (offset <= 65535 * 1);
783 fprintf (file, unsigned_load ? "UB" : "B");
24833e1a 784 break;
785 case 2:
786 gcc_assert (offset % 2 == 0);
776f1390 787 gcc_assert (offset <= 65535 * 2);
788 fprintf (file, unsigned_load ? "UW" : "W");
24833e1a 789 break;
776f1390 790 case 4:
24833e1a 791 gcc_assert (offset % 4 == 0);
776f1390 792 gcc_assert (offset <= 65535 * 4);
24833e1a 793 fprintf (file, "L");
794 break;
776f1390 795 default:
796 gcc_unreachable ();
24833e1a 797 }
798 break;
799 }
800
801 /* Fall through. */
802
803 default:
6e507301 804 if (GET_CODE (op) == CONST
805 && GET_CODE (XEXP (op, 0)) == UNSPEC)
806 op = XEXP (op, 0);
807 else if (GET_CODE (op) == CONST
808 && GET_CODE (XEXP (op, 0)) == PLUS
809 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
810 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
811 {
812 if (print_hash)
813 fprintf (file, "#");
814 fprintf (file, "(");
815 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
816 fprintf (file, " + ");
817 output_addr_const (file, XEXP (XEXP (op, 0), 1));
818 fprintf (file, ")");
819 return;
820 }
821
24833e1a 822 switch (GET_CODE (op))
823 {
824 case MULT:
825 /* Should be the scaled part of an
826 indexed register indirect address. */
827 {
828 rtx base = XEXP (op, 0);
829 rtx index = XEXP (op, 1);
830
831 /* Check for a swaped index register and scaling factor.
832 Not sure if this can happen, but be prepared to handle it. */
833 if (CONST_INT_P (base) && REG_P (index))
834 {
835 rtx tmp = base;
836 base = index;
837 index = tmp;
838 }
839
840 gcc_assert (REG_P (base));
841 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
842 gcc_assert (CONST_INT_P (index));
843 /* Do not try to verify the value of the scalar as it is based
844 on the mode of the MEM not the mode of the MULT. (Which
845 will always be SImode). */
846 fprintf (file, "%s", reg_names [REGNO (base)]);
847 break;
848 }
849
850 case MEM:
851 output_address (XEXP (op, 0));
852 break;
853
854 case PLUS:
855 output_address (op);
856 break;
857
858 case REG:
859 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
860 fprintf (file, "%s", reg_names [REGNO (op)]);
861 break;
862
863 case SUBREG:
864 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
865 fprintf (file, "%s", reg_names [subreg_regno (op)]);
866 break;
867
868 /* This will only be single precision.... */
869 case CONST_DOUBLE:
870 {
871 unsigned long val;
872 REAL_VALUE_TYPE rv;
873
874 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
875 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
6e507301 876 if (print_hash)
877 fprintf (file, "#");
878 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
24833e1a 879 break;
880 }
881
882 case CONST_INT:
6e507301 883 if (print_hash)
884 fprintf (file, "#");
24833e1a 885 rx_print_integer (file, INTVAL (op));
886 break;
887
6e507301 888 case UNSPEC:
889 switch (XINT (op, 1))
890 {
891 case UNSPEC_PID_ADDR:
892 {
893 rtx sym, add;
894
895 if (print_hash)
896 fprintf (file, "#");
897 sym = XVECEXP (op, 0, 0);
898 add = NULL_RTX;
899 fprintf (file, "(");
900 if (GET_CODE (sym) == PLUS)
901 {
902 add = XEXP (sym, 1);
903 sym = XEXP (sym, 0);
904 }
905 output_addr_const (file, sym);
906 if (add != NULL_RTX)
907 {
908 fprintf (file, "+");
909 output_addr_const (file, add);
910 }
911 fprintf (file, "-__pid_base");
912 fprintf (file, ")");
913 return;
914 }
915 }
916 /* Fall through */
917
24833e1a 918 case CONST:
6e507301 919 case SYMBOL_REF:
24833e1a 920 case LABEL_REF:
921 case CODE_LABEL:
24833e1a 922 rx_print_operand_address (file, op);
923 break;
924
925 default:
926 gcc_unreachable ();
927 }
928 break;
929 }
930}
931
6e507301 932/* Maybe convert an operand into its PID format. */
933
934rtx
935rx_maybe_pidify_operand (rtx op, int copy_to_reg)
936{
937 if (rx_pid_data_operand (op) == PID_UNENCODED)
938 {
939 if (GET_CODE (op) == MEM)
940 {
941 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
942 op = replace_equiv_address (op, a);
943 }
944 else
945 {
946 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
947 }
948
949 if (copy_to_reg)
950 op = copy_to_mode_reg (GET_MODE (op), op);
951 }
952 return op;
953}
954
24833e1a 955/* Returns an assembler template for a move instruction. */
956
957char *
958rx_gen_move_template (rtx * operands, bool is_movu)
959{
6bb30542 960 static char out_template [64];
24833e1a 961 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
962 const char * src_template;
963 const char * dst_template;
964 rtx dest = operands[0];
965 rtx src = operands[1];
966
967 /* Decide which extension, if any, should be given to the move instruction. */
968 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
969 {
970 case QImode:
971 /* The .B extension is not valid when
972 loading an immediate into a register. */
973 if (! REG_P (dest) || ! CONST_INT_P (src))
974 extension = ".B";
975 break;
976 case HImode:
977 if (! REG_P (dest) || ! CONST_INT_P (src))
978 /* The .W extension is not valid when
979 loading an immediate into a register. */
980 extension = ".W";
981 break;
f0964309 982 case DFmode:
983 case DImode:
24833e1a 984 case SFmode:
985 case SImode:
986 extension = ".L";
987 break;
988 case VOIDmode:
989 /* This mode is used by constants. */
990 break;
991 default:
992 debug_rtx (src);
993 gcc_unreachable ();
994 }
995
6e507301 996 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
f0964309 997 {
998 gcc_assert (GET_MODE (src) != DImode);
999 gcc_assert (GET_MODE (src) != DFmode);
1000
1001 src_template = "(%A1 - __pid_base)[%P1]";
1002 }
6e507301 1003 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
f0964309 1004 {
1005 gcc_assert (GET_MODE (src) != DImode);
1006 gcc_assert (GET_MODE (src) != DFmode);
1007
1008 src_template = "%%gp(%A1)[%G1]";
1009 }
24833e1a 1010 else
1011 src_template = "%1";
1012
1013 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
f0964309 1014 {
1015 gcc_assert (GET_MODE (dest) != DImode);
1016 gcc_assert (GET_MODE (dest) != DFmode);
1017
1018 dst_template = "%%gp(%A0)[%G0]";
1019 }
24833e1a 1020 else
1021 dst_template = "%0";
1022
f0964309 1023 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1024 {
1025 gcc_assert (! is_movu);
1026
1027 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
734bbdc0 1028 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
f0964309 1029 else
734bbdc0 1030 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
f0964309 1031 }
1032 else
1033 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1034 extension, src_template, dst_template);
6bb30542 1035 return out_template;
24833e1a 1036}
24833e1a 1037\f
1038/* Return VALUE rounded up to the next ALIGNMENT boundary. */
1039
1040static inline unsigned int
1041rx_round_up (unsigned int value, unsigned int alignment)
1042{
1043 alignment -= 1;
1044 return (value + alignment) & (~ alignment);
1045}
1046
1047/* Return the number of bytes in the argument registers
1048 occupied by an argument of type TYPE and mode MODE. */
1049
ee4e8428 1050static unsigned int
4bccad5e 1051rx_function_arg_size (enum machine_mode mode, const_tree type)
24833e1a 1052{
1053 unsigned int num_bytes;
1054
1055 num_bytes = (mode == BLKmode)
1056 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1057 return rx_round_up (num_bytes, UNITS_PER_WORD);
1058}
1059
1060#define NUM_ARG_REGS 4
1061#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1062
1063/* Return an RTL expression describing the register holding a function
1064 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1065 be passed on the stack. CUM describes the previous parameters to the
1066 function and NAMED is false if the parameter is part of a variable
1067 parameter list, or the last named parameter before the start of a
1068 variable parameter list. */
1069
ee4e8428 1070static rtx
39cba157 1071rx_function_arg (cumulative_args_t cum, enum machine_mode mode,
4bccad5e 1072 const_tree type, bool named)
24833e1a 1073{
1074 unsigned int next_reg;
39cba157 1075 unsigned int bytes_so_far = *get_cumulative_args (cum);
24833e1a 1076 unsigned int size;
1077 unsigned int rounded_size;
1078
1079 /* An exploded version of rx_function_arg_size. */
1080 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
6bb30542 1081 /* If the size is not known it cannot be passed in registers. */
1082 if (size < 1)
1083 return NULL_RTX;
24833e1a 1084
1085 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1086
1087 /* Don't pass this arg via registers if there
1088 are insufficient registers to hold all of it. */
1089 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1090 return NULL_RTX;
1091
1092 /* Unnamed arguments and the last named argument in a
1093 variadic function are always passed on the stack. */
1094 if (!named)
1095 return NULL_RTX;
1096
1097 /* Structures must occupy an exact number of registers,
1098 otherwise they are passed on the stack. */
1099 if ((type == NULL || AGGREGATE_TYPE_P (type))
1100 && (size % UNITS_PER_WORD) != 0)
1101 return NULL_RTX;
1102
1103 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1104
1105 return gen_rtx_REG (mode, next_reg);
1106}
1107
ee4e8428 1108static void
39cba157 1109rx_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
4bccad5e 1110 const_tree type, bool named ATTRIBUTE_UNUSED)
ee4e8428 1111{
39cba157 1112 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
ee4e8428 1113}
1114
bd99ba64 1115static unsigned int
4bccad5e 1116rx_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
bd99ba64 1117 const_tree type ATTRIBUTE_UNUSED)
1118{
4246a5c7 1119 /* Older versions of the RX backend aligned all on-stack arguments
ee1401ac 1120 to 32-bits. The RX C ABI however says that they should be
1121 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1122 if (TARGET_GCC_ABI)
1123 return STACK_BOUNDARY;
1124
1125 if (type)
1126 {
1127 if (DECL_P (type))
1128 return DECL_ALIGN (type);
1129 return TYPE_ALIGN (type);
1130 }
1131
1132 return PARM_BOUNDARY;
bd99ba64 1133}
1134
24833e1a 1135/* Return an RTL describing where a function return value of type RET_TYPE
1136 is held. */
1137
1138static rtx
1139rx_function_value (const_tree ret_type,
1140 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1141 bool outgoing ATTRIBUTE_UNUSED)
1142{
bd7d2835 1143 enum machine_mode mode = TYPE_MODE (ret_type);
1144
1145 /* RX ABI specifies that small integer types are
1146 promoted to int when returned by a function. */
02f06d23 1147 if (GET_MODE_SIZE (mode) > 0
1148 && GET_MODE_SIZE (mode) < 4
1149 && ! COMPLEX_MODE_P (mode)
1150 )
bd7d2835 1151 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1152
1153 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1154}
1155
1156/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1157 regard to function returns as does TARGET_FUNCTION_VALUE. */
1158
1159static enum machine_mode
1160rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1161 enum machine_mode mode,
0318c61a 1162 int * punsignedp ATTRIBUTE_UNUSED,
bd7d2835 1163 const_tree funtype ATTRIBUTE_UNUSED,
1164 int for_return)
1165{
1166 if (for_return != 1
1167 || GET_MODE_SIZE (mode) >= 4
02f06d23 1168 || COMPLEX_MODE_P (mode)
bd7d2835 1169 || GET_MODE_SIZE (mode) < 1)
1170 return mode;
1171
1172 return SImode;
24833e1a 1173}
1174
1175static bool
1176rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1177{
1178 HOST_WIDE_INT size;
1179
1180 if (TYPE_MODE (type) != BLKmode
1181 && ! AGGREGATE_TYPE_P (type))
1182 return false;
1183
1184 size = int_size_in_bytes (type);
1185 /* Large structs and those whose size is not an
1186 exact multiple of 4 are returned in memory. */
1187 return size < 1
1188 || size > 16
1189 || (size % UNITS_PER_WORD) != 0;
1190}
1191
1192static rtx
1193rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1194 int incoming ATTRIBUTE_UNUSED)
1195{
1196 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1197}
1198
1199static bool
1200rx_return_in_msb (const_tree valtype)
1201{
1202 return TARGET_BIG_ENDIAN_DATA
1203 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1204}
1205
1206/* Returns true if the provided function has the specified attribute. */
1207
1208static inline bool
1209has_func_attr (const_tree decl, const char * func_attr)
1210{
1211 if (decl == NULL_TREE)
1212 decl = current_function_decl;
1213
1214 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1215}
1216
67e66e16 1217/* Returns true if the provided function has the "fast_interrupt" attribute. */
24833e1a 1218
1219static inline bool
1220is_fast_interrupt_func (const_tree decl)
1221{
67e66e16 1222 return has_func_attr (decl, "fast_interrupt");
24833e1a 1223}
1224
67e66e16 1225/* Returns true if the provided function has the "interrupt" attribute. */
24833e1a 1226
1227static inline bool
67e66e16 1228is_interrupt_func (const_tree decl)
24833e1a 1229{
67e66e16 1230 return has_func_attr (decl, "interrupt");
24833e1a 1231}
1232
1233/* Returns true if the provided function has the "naked" attribute. */
1234
1235static inline bool
1236is_naked_func (const_tree decl)
1237{
1238 return has_func_attr (decl, "naked");
1239}
1240\f
1241static bool use_fixed_regs = false;
1242
b2d7ede1 1243static void
24833e1a 1244rx_conditional_register_usage (void)
1245{
1246 static bool using_fixed_regs = false;
1247
6e507301 1248 if (TARGET_PID)
1249 {
1250 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1251 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1252 }
1253
24833e1a 1254 if (rx_small_data_limit > 0)
6e507301 1255 {
1256 if (TARGET_PID)
1257 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1258 else
1259 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1260
1261 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1262 }
24833e1a 1263
1264 if (use_fixed_regs != using_fixed_regs)
1265 {
1266 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1267 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1268
1269 if (use_fixed_regs)
1270 {
24833e1a 1271 unsigned int r;
1272
24833e1a 1273 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1274 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
e4d9e8e5 1275
1276 /* This is for fast interrupt handlers. Any register in
1277 the range r10 to r13 (inclusive) that is currently
1278 marked as fixed is now a viable, call-used register. */
24833e1a 1279 for (r = 10; r <= 13; r++)
1280 if (fixed_regs[r])
1281 {
1282 fixed_regs[r] = 0;
1283 call_used_regs[r] = 1;
24833e1a 1284 }
1285
e4d9e8e5 1286 /* Mark r7 as fixed. This is just a hack to avoid
1287 altering the reg_alloc_order array so that the newly
1288 freed r10-r13 registers are the preferred registers. */
1289 fixed_regs[7] = call_used_regs[7] = 1;
24833e1a 1290 }
1291 else
1292 {
1293 /* Restore the normal register masks. */
1294 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1295 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1296 }
1297
1298 using_fixed_regs = use_fixed_regs;
1299 }
1300}
1301
6a47b360 1302struct decl_chain
1303{
1304 tree fndecl;
1305 struct decl_chain * next;
1306};
1307
1308/* Stack of decls for which we have issued warnings. */
1309static struct decl_chain * warned_decls = NULL;
1310
1311static void
1312add_warned_decl (tree fndecl)
1313{
1314 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1315
1316 warned->fndecl = fndecl;
1317 warned->next = warned_decls;
1318 warned_decls = warned;
1319}
1320
1321/* Returns TRUE if FNDECL is on our list of warned about decls. */
1322
1323static bool
1324already_warned (tree fndecl)
1325{
1326 struct decl_chain * warned;
1327
1328 for (warned = warned_decls;
1329 warned != NULL;
1330 warned = warned->next)
1331 if (warned->fndecl == fndecl)
1332 return true;
1333
1334 return false;
1335}
1336
24833e1a 1337/* Perform any actions necessary before starting to compile FNDECL.
1338 For the RX we use this to make sure that we have the correct
1339 set of register masks selected. If FNDECL is NULL then we are
1340 compiling top level things. */
1341
1342static void
1343rx_set_current_function (tree fndecl)
1344{
1345 /* Remember the last target of rx_set_current_function. */
1346 static tree rx_previous_fndecl;
67e66e16 1347 bool prev_was_fast_interrupt;
1348 bool current_is_fast_interrupt;
24833e1a 1349
1350 /* Only change the context if the function changes. This hook is called
1351 several times in the course of compiling a function, and we don't want
1352 to slow things down too much or call target_reinit when it isn't safe. */
1353 if (fndecl == rx_previous_fndecl)
1354 return;
1355
67e66e16 1356 prev_was_fast_interrupt
24833e1a 1357 = rx_previous_fndecl
1358 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
67e66e16 1359
1360 current_is_fast_interrupt
24833e1a 1361 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1362
67e66e16 1363 if (prev_was_fast_interrupt != current_is_fast_interrupt)
24833e1a 1364 {
67e66e16 1365 use_fixed_regs = current_is_fast_interrupt;
24833e1a 1366 target_reinit ();
1367 }
67e66e16 1368
6a47b360 1369 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1370 {
1371 /* We do not warn about the first fast interrupt routine that
1372 we see. Instead we just push it onto the stack. */
1373 if (warned_decls == NULL)
1374 add_warned_decl (fndecl);
1375
1376 /* Otherwise if this fast interrupt is one for which we have
1377 not already issued a warning, generate one and then push
1378 it onto the stack as well. */
1379 else if (! already_warned (fndecl))
1380 {
1381 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1382 fndecl, warned_decls->fndecl);
1383 add_warned_decl (fndecl);
1384 }
1385 }
1386
24833e1a 1387 rx_previous_fndecl = fndecl;
1388}
1389\f
1390/* Typical stack layout should looks like this after the function's prologue:
1391
1392 | |
1393 -- ^
1394 | | \ |
1395 | | arguments saved | Increasing
1396 | | on the stack | addresses
1397 PARENT arg pointer -> | | /
1398 -------------------------- ---- -------------------
1399 CHILD |ret | return address
1400 --
1401 | | \
1402 | | call saved
1403 | | registers
1404 | | /
1405 --
1406 | | \
1407 | | local
1408 | | variables
1409 frame pointer -> | | /
1410 --
1411 | | \
1412 | | outgoing | Decreasing
1413 | | arguments | addresses
1414 current stack pointer -> | | / |
1415 -------------------------- ---- ------------------ V
1416 | | */
1417
1418static unsigned int
1419bit_count (unsigned int x)
1420{
1421 const unsigned int m1 = 0x55555555;
1422 const unsigned int m2 = 0x33333333;
1423 const unsigned int m4 = 0x0f0f0f0f;
1424
1425 x -= (x >> 1) & m1;
1426 x = (x & m2) + ((x >> 2) & m2);
1427 x = (x + (x >> 4)) & m4;
1428 x += x >> 8;
1429
1430 return (x + (x >> 16)) & 0x3f;
1431}
1432
e4d9e8e5 1433#define MUST_SAVE_ACC_REGISTER \
1434 (TARGET_SAVE_ACC_REGISTER \
1435 && (is_interrupt_func (NULL_TREE) \
1436 || is_fast_interrupt_func (NULL_TREE)))
1437
24833e1a 1438/* Returns either the lowest numbered and highest numbered registers that
1439 occupy the call-saved area of the stack frame, if the registers are
1440 stored as a contiguous block, or else a bitmask of the individual
1441 registers if they are stored piecemeal.
1442
1443 Also computes the size of the frame and the size of the outgoing
1444 arguments block (in bytes). */
1445
1446static void
1447rx_get_stack_layout (unsigned int * lowest,
1448 unsigned int * highest,
1449 unsigned int * register_mask,
1450 unsigned int * frame_size,
1451 unsigned int * stack_size)
1452{
1453 unsigned int reg;
1454 unsigned int low;
1455 unsigned int high;
1456 unsigned int fixed_reg = 0;
1457 unsigned int save_mask;
1458 unsigned int pushed_mask;
1459 unsigned int unneeded_pushes;
1460
e4d9e8e5 1461 if (is_naked_func (NULL_TREE))
24833e1a 1462 {
1463 /* Naked functions do not create their own stack frame.
e4d9e8e5 1464 Instead the programmer must do that for us. */
24833e1a 1465 * lowest = 0;
1466 * highest = 0;
1467 * register_mask = 0;
1468 * frame_size = 0;
1469 * stack_size = 0;
1470 return;
1471 }
1472
9d2f1b03 1473 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
24833e1a 1474 {
21cde6ec 1475 if ((df_regs_ever_live_p (reg)
382ffb70 1476 /* Always save all call clobbered registers inside non-leaf
1477 interrupt handlers, even if they are not live - they may
1478 be used in (non-interrupt aware) routines called from this one. */
1479 || (call_used_regs[reg]
1480 && is_interrupt_func (NULL_TREE)
d5bf7b64 1481 && ! crtl->is_leaf))
24833e1a 1482 && (! call_used_regs[reg]
1483 /* Even call clobbered registered must
67e66e16 1484 be pushed inside interrupt handlers. */
e4d9e8e5 1485 || is_interrupt_func (NULL_TREE)
1486 /* Likewise for fast interrupt handlers, except registers r10 -
1487 r13. These are normally call-saved, but may have been set
1488 to call-used by rx_conditional_register_usage. If so then
1489 they can be used in the fast interrupt handler without
1490 saving them on the stack. */
1491 || (is_fast_interrupt_func (NULL_TREE)
1492 && ! IN_RANGE (reg, 10, 13))))
24833e1a 1493 {
1494 if (low == 0)
1495 low = reg;
1496 high = reg;
1497
1498 save_mask |= 1 << reg;
1499 }
1500
1501 /* Remember if we see a fixed register
1502 after having found the low register. */
1503 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1504 fixed_reg = reg;
1505 }
1506
e4d9e8e5 1507 /* If we have to save the accumulator register, make sure
1508 that at least two registers are pushed into the frame. */
1509 if (MUST_SAVE_ACC_REGISTER
1510 && bit_count (save_mask) < 2)
1511 {
1512 save_mask |= (1 << 13) | (1 << 14);
1513 if (low == 0)
1514 low = 13;
bc9bb967 1515 if (high == 0 || low == high)
1516 high = low + 1;
e4d9e8e5 1517 }
1518
24833e1a 1519 /* Decide if it would be faster fill in the call-saved area of the stack
1520 frame using multiple PUSH instructions instead of a single PUSHM
1521 instruction.
1522
1523 SAVE_MASK is a bitmask of the registers that must be stored in the
1524 call-save area. PUSHED_MASK is a bitmask of the registers that would
1525 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1526 is a bitmask of those registers in pushed_mask that are not in
1527 save_mask.
1528
1529 We use a simple heuristic that says that it is better to use
1530 multiple PUSH instructions if the number of unnecessary pushes is
1531 greater than the number of necessary pushes.
1532
1533 We also use multiple PUSH instructions if there are any fixed registers
1534 between LOW and HIGH. The only way that this can happen is if the user
1535 has specified --fixed-<reg-name> on the command line and in such
1536 circumstances we do not want to touch the fixed registers at all.
1537
1538 FIXME: Is it worth improving this heuristic ? */
1539 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1540 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1541
1542 if ((fixed_reg && fixed_reg <= high)
1543 || (optimize_function_for_speed_p (cfun)
1544 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1545 {
1546 /* Use multiple pushes. */
1547 * lowest = 0;
1548 * highest = 0;
1549 * register_mask = save_mask;
1550 }
1551 else
1552 {
1553 /* Use one push multiple instruction. */
1554 * lowest = low;
1555 * highest = high;
1556 * register_mask = 0;
1557 }
1558
1559 * frame_size = rx_round_up
1560 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1561
1562 if (crtl->args.size > 0)
1563 * frame_size += rx_round_up
1564 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1565
1566 * stack_size = rx_round_up
1567 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1568}
1569
1570/* Generate a PUSHM instruction that matches the given operands. */
1571
1572void
1573rx_emit_stack_pushm (rtx * operands)
1574{
1575 HOST_WIDE_INT last_reg;
1576 rtx first_push;
1577
1578 gcc_assert (CONST_INT_P (operands[0]));
1579 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1580
1581 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1582 first_push = XVECEXP (operands[1], 0, 1);
1583 gcc_assert (SET_P (first_push));
1584 first_push = SET_SRC (first_push);
1585 gcc_assert (REG_P (first_push));
1586
1587 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
67e66e16 1588 reg_names [REGNO (first_push) - last_reg],
1589 reg_names [REGNO (first_push)]);
24833e1a 1590}
1591
1592/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1593
1594static rtx
1595gen_rx_store_vector (unsigned int low, unsigned int high)
1596{
1597 unsigned int i;
1598 unsigned int count = (high - low) + 2;
1599 rtx vector;
1600
1601 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1602
1603 XVECEXP (vector, 0, 0) =
51e241f8 1604 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1605 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1606 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1607
1608 for (i = 0; i < count - 1; i++)
1609 XVECEXP (vector, 0, i + 1) =
51e241f8 1610 gen_rtx_SET (VOIDmode,
24833e1a 1611 gen_rtx_MEM (SImode,
67e66e16 1612 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1613 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1614 gen_rtx_REG (SImode, high - i));
24833e1a 1615 return vector;
1616}
1617
67e66e16 1618/* Mark INSN as being frame related. If it is a PARALLEL
1619 then mark each element as being frame related as well. */
1620
1621static void
1622mark_frame_related (rtx insn)
1623{
1624 RTX_FRAME_RELATED_P (insn) = 1;
1625 insn = PATTERN (insn);
1626
1627 if (GET_CODE (insn) == PARALLEL)
1628 {
1629 unsigned int i;
1630
61fc50a0 1631 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
67e66e16 1632 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1633 }
1634}
1635
95272799 1636static bool
1637ok_for_max_constant (HOST_WIDE_INT val)
1638{
1639 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1640 /* If there is no constraint on the size of constants
1641 used as operands, then any value is legitimate. */
1642 return true;
1643
1644 /* rx_max_constant_size specifies the maximum number
1645 of bytes that can be used to hold a signed value. */
1646 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1647 ( 1 << (rx_max_constant_size * 8)));
1648}
1649
1650/* Generate an ADD of SRC plus VAL into DEST.
1651 Handles the case where VAL is too big for max_constant_value.
1652 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1653
1654static void
1655gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1656{
1657 rtx insn;
1658
1659 if (val == NULL_RTX || INTVAL (val) == 0)
1660 {
1661 gcc_assert (dest != src);
1662
1663 insn = emit_move_insn (dest, src);
1664 }
1665 else if (ok_for_max_constant (INTVAL (val)))
1666 insn = emit_insn (gen_addsi3 (dest, src, val));
1667 else
1668 {
f7fcec1a 1669 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
02f06d23 1670 will not reject it. */
1671 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1672 insn = emit_insn (gen_addsi3 (dest, src, val));
95272799 1673
1674 if (is_frame_related)
1675 /* We have to provide our own frame related note here
1676 as the dwarf2out code cannot be expected to grok
1677 our unspec. */
1678 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1679 gen_rtx_SET (SImode, dest,
1680 gen_rtx_PLUS (SImode, src, val)));
1681 return;
1682 }
1683
1684 if (is_frame_related)
1685 RTX_FRAME_RELATED_P (insn) = 1;
1686 return;
1687}
1688
24833e1a 1689void
1690rx_expand_prologue (void)
1691{
1692 unsigned int stack_size;
1693 unsigned int frame_size;
1694 unsigned int mask;
1695 unsigned int low;
1696 unsigned int high;
67e66e16 1697 unsigned int reg;
24833e1a 1698 rtx insn;
1699
1700 /* Naked functions use their own, programmer provided prologues. */
e4d9e8e5 1701 if (is_naked_func (NULL_TREE))
24833e1a 1702 return;
1703
1704 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1705
ecfbd70a 1706 if (flag_stack_usage_info)
1707 current_function_static_stack_size = frame_size + stack_size;
1708
24833e1a 1709 /* If we use any of the callee-saved registers, save them now. */
1710 if (mask)
1711 {
24833e1a 1712 /* Push registers in reverse order. */
9d2f1b03 1713 for (reg = CC_REGNUM; reg --;)
24833e1a 1714 if (mask & (1 << reg))
1715 {
1716 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
67e66e16 1717 mark_frame_related (insn);
24833e1a 1718 }
1719 }
1720 else if (low)
1721 {
1722 if (high == low)
1723 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1724 else
1725 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1726 * UNITS_PER_WORD),
1727 gen_rx_store_vector (low, high)));
67e66e16 1728 mark_frame_related (insn);
1729 }
1730
e4d9e8e5 1731 if (MUST_SAVE_ACC_REGISTER)
67e66e16 1732 {
1733 unsigned int acc_high, acc_low;
1734
1735 /* Interrupt handlers have to preserve the accumulator
1736 register if so requested by the user. Use the first
e4d9e8e5 1737 two pushed registers as intermediaries. */
67e66e16 1738 if (mask)
1739 {
1740 acc_low = acc_high = 0;
1741
9d2f1b03 1742 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1743 if (mask & (1 << reg))
1744 {
1745 if (acc_low == 0)
1746 acc_low = reg;
1747 else
1748 {
1749 acc_high = reg;
1750 break;
1751 }
1752 }
1753
1754 /* We have assumed that there are at least two registers pushed... */
1755 gcc_assert (acc_high != 0);
1756
1757 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1758 We just assume that they are zero. */
1759 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1760 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1761 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1762 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1763 }
1764 else
1765 {
1766 acc_low = low;
1767 acc_high = low + 1;
1768
1769 /* We have assumed that there are at least two registers pushed... */
1770 gcc_assert (acc_high <= high);
1771
1772 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1773 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1774 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1775 gen_rx_store_vector (acc_low, acc_high)));
1776 }
24833e1a 1777 }
1778
1779 /* If needed, set up the frame pointer. */
1780 if (frame_pointer_needed)
95272799 1781 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1782 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1783
1784 /* Allocate space for the outgoing args.
1785 If the stack frame has not already been set up then handle this as well. */
1786 if (stack_size)
1787 {
1788 if (frame_size)
1789 {
1790 if (frame_pointer_needed)
95272799 1791 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1792 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1793 else
95272799 1794 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1795 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1796 true);
24833e1a 1797 }
1798 else
95272799 1799 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1800 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1801 }
1802 else if (frame_size)
1803 {
1804 if (! frame_pointer_needed)
95272799 1805 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1806 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1807 else
95272799 1808 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1809 true);
24833e1a 1810 }
24833e1a 1811}
1812
7ce85a1f 1813static void
1814add_vector_labels (FILE *file, const char *aname)
1815{
1816 tree vec_attr;
1817 tree val_attr;
1818 const char *vname = "vect";
1819 const char *s;
1820 int vnum;
1821
1822 /* This node is for the vector/interrupt tag itself */
1823 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1824 if (!vec_attr)
1825 return;
1826
1827 /* Now point it at the first argument */
1828 vec_attr = TREE_VALUE (vec_attr);
1829
1830 /* Iterate through the arguments. */
1831 while (vec_attr)
1832 {
1833 val_attr = TREE_VALUE (vec_attr);
1834 switch (TREE_CODE (val_attr))
1835 {
1836 case STRING_CST:
1837 s = TREE_STRING_POINTER (val_attr);
1838 goto string_id_common;
1839
1840 case IDENTIFIER_NODE:
1841 s = IDENTIFIER_POINTER (val_attr);
1842
1843 string_id_common:
1844 if (strcmp (s, "$default") == 0)
1845 {
1846 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1847 fprintf (file, "$tableentry$default$%s:\n", vname);
1848 }
1849 else
1850 vname = s;
1851 break;
1852
1853 case INTEGER_CST:
1854 vnum = TREE_INT_CST_LOW (val_attr);
1855
1856 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1857 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1858 break;
1859
1860 default:
1861 ;
1862 }
1863
1864 vec_attr = TREE_CHAIN (vec_attr);
1865 }
1866
1867}
1868
24833e1a 1869static void
1870rx_output_function_prologue (FILE * file,
1871 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1872{
7ce85a1f 1873 add_vector_labels (file, "interrupt");
1874 add_vector_labels (file, "vector");
1875
24833e1a 1876 if (is_fast_interrupt_func (NULL_TREE))
1877 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1878
67e66e16 1879 if (is_interrupt_func (NULL_TREE))
1880 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
24833e1a 1881
1882 if (is_naked_func (NULL_TREE))
1883 asm_fprintf (file, "\t; Note: Naked Function\n");
1884
1885 if (cfun->static_chain_decl != NULL)
1886 asm_fprintf (file, "\t; Note: Nested function declared "
1887 "inside another function.\n");
1888
1889 if (crtl->calls_eh_return)
1890 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1891}
1892
1893/* Generate a POPM or RTSD instruction that matches the given operands. */
1894
1895void
1896rx_emit_stack_popm (rtx * operands, bool is_popm)
1897{
1898 HOST_WIDE_INT stack_adjust;
1899 HOST_WIDE_INT last_reg;
1900 rtx first_push;
1901
1902 gcc_assert (CONST_INT_P (operands[0]));
1903 stack_adjust = INTVAL (operands[0]);
1904
1905 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1906 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1907
1908 first_push = XVECEXP (operands[1], 0, 1);
1909 gcc_assert (SET_P (first_push));
1910 first_push = SET_DEST (first_push);
1911 gcc_assert (REG_P (first_push));
1912
1913 if (is_popm)
1914 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1915 reg_names [REGNO (first_push)],
1916 reg_names [REGNO (first_push) + last_reg]);
1917 else
1918 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1919 (int) stack_adjust,
1920 reg_names [REGNO (first_push)],
1921 reg_names [REGNO (first_push) + last_reg]);
1922}
1923
1924/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1925
1926static rtx
1927gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1928{
1929 unsigned int i;
1930 unsigned int bias = 3;
1931 unsigned int count = (high - low) + bias;
1932 rtx vector;
1933
1934 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1935
1936 XVECEXP (vector, 0, 0) =
51e241f8 1937 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
29c05e22 1938 plus_constant (Pmode, stack_pointer_rtx, adjust));
24833e1a 1939
1940 for (i = 0; i < count - 2; i++)
1941 XVECEXP (vector, 0, i + 1) =
51e241f8 1942 gen_rtx_SET (VOIDmode,
24833e1a 1943 gen_rtx_REG (SImode, low + i),
1944 gen_rtx_MEM (SImode,
1945 i == 0 ? stack_pointer_rtx
29c05e22 1946 : plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1947 i * UNITS_PER_WORD)));
1948
1a860023 1949 XVECEXP (vector, 0, count - 1) = ret_rtx;
24833e1a 1950
1951 return vector;
1952}
1953
1954/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1955
1956static rtx
1957gen_rx_popm_vector (unsigned int low, unsigned int high)
1958{
1959 unsigned int i;
1960 unsigned int count = (high - low) + 2;
1961 rtx vector;
1962
1963 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1964
1965 XVECEXP (vector, 0, 0) =
51e241f8 1966 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
29c05e22 1967 plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1968 (count - 1) * UNITS_PER_WORD));
1969
1970 for (i = 0; i < count - 1; i++)
1971 XVECEXP (vector, 0, i + 1) =
51e241f8 1972 gen_rtx_SET (VOIDmode,
24833e1a 1973 gen_rtx_REG (SImode, low + i),
1974 gen_rtx_MEM (SImode,
1975 i == 0 ? stack_pointer_rtx
29c05e22 1976 : plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1977 i * UNITS_PER_WORD)));
1978
1979 return vector;
1980}
f35edb6f 1981
1982/* Returns true if a simple return insn can be used. */
1983
1984bool
1985rx_can_use_simple_return (void)
1986{
1987 unsigned int low;
1988 unsigned int high;
1989 unsigned int frame_size;
1990 unsigned int stack_size;
1991 unsigned int register_mask;
1992
1993 if (is_naked_func (NULL_TREE)
1994 || is_fast_interrupt_func (NULL_TREE)
1995 || is_interrupt_func (NULL_TREE))
1996 return false;
1997
1998 rx_get_stack_layout (& low, & high, & register_mask,
1999 & frame_size, & stack_size);
2000
2001 return (register_mask == 0
2002 && (frame_size + stack_size) == 0
2003 && low == 0);
2004}
2005
24833e1a 2006void
2007rx_expand_epilogue (bool is_sibcall)
2008{
2009 unsigned int low;
2010 unsigned int high;
2011 unsigned int frame_size;
2012 unsigned int stack_size;
2013 unsigned int register_mask;
2014 unsigned int regs_size;
67e66e16 2015 unsigned int reg;
24833e1a 2016 unsigned HOST_WIDE_INT total_size;
2017
61fc50a0 2018 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2019 cannot guarantee that the register holding the function address is a
2020 call-used register. If it is a call-saved register then the stack
2021 pop instructions generated in the epilogue will corrupt the address
2022 before it is used.
2023
2024 Creating a new call-used-only register class works but then the
2025 reload pass gets stuck because it cannot always find a call-used
2026 register for spilling sibcalls.
2027
2028 The other possible solution is for this pass to scan forward for the
2029 sibcall instruction (if it has been generated) and work out if it
2030 is an indirect sibcall using a call-saved register. If it is then
2031 the address can copied into a call-used register in this epilogue
2032 code and the sibcall instruction modified to use that register. */
2033
24833e1a 2034 if (is_naked_func (NULL_TREE))
2035 {
61fc50a0 2036 gcc_assert (! is_sibcall);
2037
24833e1a 2038 /* Naked functions use their own, programmer provided epilogues.
2039 But, in order to keep gcc happy we have to generate some kind of
2040 epilogue RTL. */
2041 emit_jump_insn (gen_naked_return ());
2042 return;
2043 }
2044
2045 rx_get_stack_layout (& low, & high, & register_mask,
2046 & frame_size, & stack_size);
2047
2048 total_size = frame_size + stack_size;
2049 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2050
2051 /* See if we are unable to use the special stack frame deconstruct and
2052 return instructions. In most cases we can use them, but the exceptions
2053 are:
2054
2055 - Sibling calling functions deconstruct the frame but do not return to
2056 their caller. Instead they branch to their sibling and allow their
2057 return instruction to return to this function's parent.
2058
67e66e16 2059 - Fast and normal interrupt handling functions have to use special
24833e1a 2060 return instructions.
2061
2062 - Functions where we have pushed a fragmented set of registers into the
2063 call-save area must have the same set of registers popped. */
2064 if (is_sibcall
2065 || is_fast_interrupt_func (NULL_TREE)
67e66e16 2066 || is_interrupt_func (NULL_TREE)
24833e1a 2067 || register_mask)
2068 {
2069 /* Cannot use the special instructions - deconstruct by hand. */
2070 if (total_size)
95272799 2071 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2072 GEN_INT (total_size), false);
24833e1a 2073
e4d9e8e5 2074 if (MUST_SAVE_ACC_REGISTER)
24833e1a 2075 {
67e66e16 2076 unsigned int acc_low, acc_high;
2077
2078 /* Reverse the saving of the accumulator register onto the stack.
2079 Note we must adjust the saved "low" accumulator value as it
2080 is really the middle 32-bits of the accumulator. */
2081 if (register_mask)
2082 {
2083 acc_low = acc_high = 0;
9d2f1b03 2084
2085 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 2086 if (register_mask & (1 << reg))
2087 {
2088 if (acc_low == 0)
2089 acc_low = reg;
2090 else
2091 {
2092 acc_high = reg;
2093 break;
2094 }
2095 }
2096 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2097 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2098 }
2099 else
2100 {
2101 acc_low = low;
2102 acc_high = low + 1;
2103 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2104 gen_rx_popm_vector (acc_low, acc_high)));
2105 }
2106
2107 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2108 gen_rtx_REG (SImode, acc_low),
2109 GEN_INT (16)));
2110 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2111 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2112 }
24833e1a 2113
67e66e16 2114 if (register_mask)
2115 {
9d2f1b03 2116 for (reg = 0; reg < CC_REGNUM; reg ++)
24833e1a 2117 if (register_mask & (1 << reg))
2118 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
2119 }
2120 else if (low)
2121 {
2122 if (high == low)
2123 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2124 else
2125 emit_insn (gen_stack_popm (GEN_INT (regs_size),
2126 gen_rx_popm_vector (low, high)));
2127 }
2128
2129 if (is_fast_interrupt_func (NULL_TREE))
61fc50a0 2130 {
2131 gcc_assert (! is_sibcall);
2132 emit_jump_insn (gen_fast_interrupt_return ());
2133 }
67e66e16 2134 else if (is_interrupt_func (NULL_TREE))
61fc50a0 2135 {
2136 gcc_assert (! is_sibcall);
2137 emit_jump_insn (gen_exception_return ());
2138 }
24833e1a 2139 else if (! is_sibcall)
2140 emit_jump_insn (gen_simple_return ());
2141
2142 return;
2143 }
2144
2145 /* If we allocated space on the stack, free it now. */
2146 if (total_size)
2147 {
2148 unsigned HOST_WIDE_INT rtsd_size;
2149
2150 /* See if we can use the RTSD instruction. */
2151 rtsd_size = total_size + regs_size;
2152 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2153 {
2154 if (low)
2155 emit_jump_insn (gen_pop_and_return
2156 (GEN_INT (rtsd_size),
2157 gen_rx_rtsd_vector (rtsd_size, low, high)));
2158 else
2159 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2160
2161 return;
2162 }
2163
95272799 2164 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2165 GEN_INT (total_size), false);
24833e1a 2166 }
2167
2168 if (low)
2169 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2170 gen_rx_rtsd_vector (regs_size,
2171 low, high)));
2172 else
2173 emit_jump_insn (gen_simple_return ());
2174}
2175
2176
2177/* Compute the offset (in words) between FROM (arg pointer
2178 or frame pointer) and TO (frame pointer or stack pointer).
2179 See ASCII art comment at the start of rx_expand_prologue
2180 for more information. */
2181
2182int
2183rx_initial_elimination_offset (int from, int to)
2184{
2185 unsigned int low;
2186 unsigned int high;
2187 unsigned int frame_size;
2188 unsigned int stack_size;
2189 unsigned int mask;
2190
2191 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2192
2193 if (from == ARG_POINTER_REGNUM)
2194 {
2195 /* Extend the computed size of the stack frame to
2196 include the registers pushed in the prologue. */
2197 if (low)
2198 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2199 else
2200 frame_size += bit_count (mask) * UNITS_PER_WORD;
2201
2202 /* Remember to include the return address. */
2203 frame_size += 1 * UNITS_PER_WORD;
2204
2205 if (to == FRAME_POINTER_REGNUM)
2206 return frame_size;
2207
2208 gcc_assert (to == STACK_POINTER_REGNUM);
2209 return frame_size + stack_size;
2210 }
2211
2212 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2213 return stack_size;
2214}
2215
24833e1a 2216/* Decide if a variable should go into one of the small data sections. */
2217
2218static bool
2219rx_in_small_data (const_tree decl)
2220{
2221 int size;
738a6bda 2222 const char * section;
24833e1a 2223
2224 if (rx_small_data_limit == 0)
2225 return false;
2226
2227 if (TREE_CODE (decl) != VAR_DECL)
2228 return false;
2229
2230 /* We do not put read-only variables into a small data area because
2231 they would be placed with the other read-only sections, far away
2232 from the read-write data sections, and we only have one small
2233 data area pointer.
2234 Similarly commons are placed in the .bss section which might be
2235 far away (and out of alignment with respect to) the .data section. */
2236 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2237 return false;
2238
2239 section = DECL_SECTION_NAME (decl);
2240 if (section)
738a6bda 2241 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
24833e1a 2242
2243 size = int_size_in_bytes (TREE_TYPE (decl));
2244
2245 return (size > 0) && (size <= rx_small_data_limit);
2246}
2247
2248/* Return a section for X.
2249 The only special thing we do here is to honor small data. */
2250
2251static section *
2252rx_select_rtx_section (enum machine_mode mode,
2253 rtx x,
2254 unsigned HOST_WIDE_INT align)
2255{
2256 if (rx_small_data_limit > 0
2257 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2258 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2259 return sdata_section;
2260
2261 return default_elf_select_rtx_section (mode, x, align);
2262}
2263
2264static section *
2265rx_select_section (tree decl,
2266 int reloc,
2267 unsigned HOST_WIDE_INT align)
2268{
2269 if (rx_small_data_limit > 0)
2270 {
2271 switch (categorize_decl_for_section (decl, reloc))
2272 {
2273 case SECCAT_SDATA: return sdata_section;
2274 case SECCAT_SBSS: return sbss_section;
2275 case SECCAT_SRODATA:
2276 /* Fall through. We do not put small, read only
2277 data into the C_2 section because we are not
2278 using the C_2 section. We do not use the C_2
2279 section because it is located with the other
2280 read-only data sections, far away from the read-write
2281 data sections and we only have one small data
2282 pointer (r13). */
2283 default:
2284 break;
2285 }
2286 }
2287
2288 /* If we are supporting the Renesas assembler
2289 we cannot use mergeable sections. */
2290 if (TARGET_AS100_SYNTAX)
2291 switch (categorize_decl_for_section (decl, reloc))
2292 {
2293 case SECCAT_RODATA_MERGE_CONST:
2294 case SECCAT_RODATA_MERGE_STR_INIT:
2295 case SECCAT_RODATA_MERGE_STR:
2296 return readonly_data_section;
2297
2298 default:
2299 break;
2300 }
2301
2302 return default_elf_select_section (decl, reloc, align);
2303}
2304\f
2305enum rx_builtin
2306{
2307 RX_BUILTIN_BRK,
2308 RX_BUILTIN_CLRPSW,
2309 RX_BUILTIN_INT,
2310 RX_BUILTIN_MACHI,
2311 RX_BUILTIN_MACLO,
2312 RX_BUILTIN_MULHI,
2313 RX_BUILTIN_MULLO,
2314 RX_BUILTIN_MVFACHI,
2315 RX_BUILTIN_MVFACMI,
2316 RX_BUILTIN_MVFC,
2317 RX_BUILTIN_MVTACHI,
2318 RX_BUILTIN_MVTACLO,
2319 RX_BUILTIN_MVTC,
67e66e16 2320 RX_BUILTIN_MVTIPL,
24833e1a 2321 RX_BUILTIN_RACW,
2322 RX_BUILTIN_REVW,
2323 RX_BUILTIN_RMPA,
2324 RX_BUILTIN_ROUND,
24833e1a 2325 RX_BUILTIN_SETPSW,
2326 RX_BUILTIN_WAIT,
2327 RX_BUILTIN_max
2328};
2329
103700c7 2330static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2331
24833e1a 2332static void
2333rx_init_builtins (void)
2334{
dbf38144 2335#define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2336 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2337 add_builtin_function ("__builtin_rx_" LC_NAME, \
2338 build_function_type_list (RET_TYPE##_type_node, \
2339 NULL_TREE), \
2340 RX_BUILTIN_##UC_NAME, \
2341 BUILT_IN_MD, NULL, NULL_TREE)
2342
24833e1a 2343#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
103700c7 2344 rx_builtins[RX_BUILTIN_##UC_NAME] = \
f7fcec1a 2345 add_builtin_function ("__builtin_rx_" LC_NAME, \
24833e1a 2346 build_function_type_list (RET_TYPE##_type_node, \
2347 ARG_TYPE##_type_node, \
2348 NULL_TREE), \
2349 RX_BUILTIN_##UC_NAME, \
2350 BUILT_IN_MD, NULL, NULL_TREE)
2351
2352#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
103700c7 2353 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2354 add_builtin_function ("__builtin_rx_" LC_NAME, \
2355 build_function_type_list (RET_TYPE##_type_node, \
2356 ARG_TYPE1##_type_node,\
2357 ARG_TYPE2##_type_node,\
2358 NULL_TREE), \
2359 RX_BUILTIN_##UC_NAME, \
2360 BUILT_IN_MD, NULL, NULL_TREE)
2361
2362#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
103700c7 2363 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2364 add_builtin_function ("__builtin_rx_" LC_NAME, \
2365 build_function_type_list (RET_TYPE##_type_node, \
2366 ARG_TYPE1##_type_node,\
2367 ARG_TYPE2##_type_node,\
2368 ARG_TYPE3##_type_node,\
2369 NULL_TREE), \
2370 RX_BUILTIN_##UC_NAME, \
2371 BUILT_IN_MD, NULL, NULL_TREE)
2372
dbf38144 2373 ADD_RX_BUILTIN0 (BRK, "brk", void);
24833e1a 2374 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2375 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2376 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2377 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2378 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2379 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2380 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
dbf38144 2381 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2382 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
24833e1a 2383 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2384 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
dbf38144 2385 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
24833e1a 2386 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2387 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
67e66e16 2388 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
24833e1a 2389 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2390 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2391 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
dbf38144 2392 ADD_RX_BUILTIN0 (WAIT, "wait", void);
24833e1a 2393}
2394
103700c7 2395/* Return the RX builtin for CODE. */
2396
2397static tree
2398rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2399{
2400 if (code >= RX_BUILTIN_max)
2401 return error_mark_node;
2402
2403 return rx_builtins[code];
2404}
2405
24833e1a 2406static rtx
2407rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2408{
2409 if (reg && ! REG_P (arg))
2410 arg = force_reg (SImode, arg);
2411
2412 emit_insn (gen_func (arg));
2413
2414 return NULL_RTX;
2415}
2416
2417static rtx
2418rx_expand_builtin_mvtc (tree exp)
2419{
2420 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2421 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2422
2423 if (! CONST_INT_P (arg1))
2424 return NULL_RTX;
2425
2426 if (! REG_P (arg2))
2427 arg2 = force_reg (SImode, arg2);
2428
2429 emit_insn (gen_mvtc (arg1, arg2));
2430
2431 return NULL_RTX;
2432}
2433
2434static rtx
2435rx_expand_builtin_mvfc (tree t_arg, rtx target)
2436{
2437 rtx arg = expand_normal (t_arg);
2438
2439 if (! CONST_INT_P (arg))
2440 return NULL_RTX;
2441
e4d9e8e5 2442 if (target == NULL_RTX)
2443 return NULL_RTX;
2444
24833e1a 2445 if (! REG_P (target))
2446 target = force_reg (SImode, target);
2447
2448 emit_insn (gen_mvfc (target, arg));
2449
2450 return target;
2451}
2452
67e66e16 2453static rtx
2454rx_expand_builtin_mvtipl (rtx arg)
2455{
2456 /* The RX610 does not support the MVTIPL instruction. */
2457 if (rx_cpu_type == RX610)
2458 return NULL_RTX;
2459
e5743482 2460 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
67e66e16 2461 return NULL_RTX;
2462
2463 emit_insn (gen_mvtipl (arg));
2464
2465 return NULL_RTX;
2466}
2467
24833e1a 2468static rtx
2469rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2470{
2471 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2472 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2473
2474 if (! REG_P (arg1))
2475 arg1 = force_reg (SImode, arg1);
2476
2477 if (! REG_P (arg2))
2478 arg2 = force_reg (SImode, arg2);
2479
2480 emit_insn (gen_func (arg1, arg2));
2481
2482 return NULL_RTX;
2483}
2484
2485static rtx
2486rx_expand_int_builtin_1_arg (rtx arg,
2487 rtx target,
2488 rtx (* gen_func)(rtx, rtx),
2489 bool mem_ok)
2490{
2491 if (! REG_P (arg))
2492 if (!mem_ok || ! MEM_P (arg))
2493 arg = force_reg (SImode, arg);
2494
2495 if (target == NULL_RTX || ! REG_P (target))
2496 target = gen_reg_rtx (SImode);
2497
2498 emit_insn (gen_func (target, arg));
2499
2500 return target;
2501}
2502
2503static rtx
2504rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2505{
2506 if (target == NULL_RTX || ! REG_P (target))
2507 target = gen_reg_rtx (SImode);
2508
2509 emit_insn (gen_func (target));
2510
2511 return target;
2512}
2513
2514static rtx
2515rx_expand_builtin_round (rtx arg, rtx target)
2516{
2517 if ((! REG_P (arg) && ! MEM_P (arg))
2518 || GET_MODE (arg) != SFmode)
2519 arg = force_reg (SFmode, arg);
2520
2521 if (target == NULL_RTX || ! REG_P (target))
2522 target = gen_reg_rtx (SImode);
2523
2524 emit_insn (gen_lrintsf2 (target, arg));
2525
2526 return target;
2527}
2528
e5743482 2529static int
0318c61a 2530valid_psw_flag (rtx op, const char *which)
e5743482 2531{
2532 static int mvtc_inform_done = 0;
2533
2534 if (GET_CODE (op) == CONST_INT)
2535 switch (INTVAL (op))
2536 {
2537 case 0: case 'c': case 'C':
2538 case 1: case 'z': case 'Z':
2539 case 2: case 's': case 'S':
2540 case 3: case 'o': case 'O':
2541 case 8: case 'i': case 'I':
2542 case 9: case 'u': case 'U':
2543 return 1;
2544 }
2545
2546 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2547 if (!mvtc_inform_done)
2548 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2549 mvtc_inform_done = 1;
2550
2551 return 0;
2552}
2553
24833e1a 2554static rtx
2555rx_expand_builtin (tree exp,
2556 rtx target,
2557 rtx subtarget ATTRIBUTE_UNUSED,
2558 enum machine_mode mode ATTRIBUTE_UNUSED,
2559 int ignore ATTRIBUTE_UNUSED)
2560{
2561 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
432093e5 2562 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
24833e1a 2563 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2564 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2565
2566 switch (fcode)
2567 {
2568 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
e5743482 2569 case RX_BUILTIN_CLRPSW:
2570 if (!valid_psw_flag (op, "clrpsw"))
2571 return NULL_RTX;
2572 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2573 case RX_BUILTIN_SETPSW:
2574 if (!valid_psw_flag (op, "setpsw"))
2575 return NULL_RTX;
2576 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
24833e1a 2577 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2578 (op, gen_int, false);
2579 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2580 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2581 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2582 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2583 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2584 (target, gen_mvfachi);
2585 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2586 (target, gen_mvfacmi);
2587 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2588 (op, gen_mvtachi, true);
2589 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2590 (op, gen_mvtaclo, true);
2591 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2592 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2593 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
67e66e16 2594 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
24833e1a 2595 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2596 (op, gen_racw, false);
2597 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2598 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2599 (op, target, gen_revw, false);
24833e1a 2600 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2601
2602 default:
2603 internal_error ("bad builtin code");
2604 break;
2605 }
2606
2607 return NULL_RTX;
2608}
2609\f
2610/* Place an element into a constructor or destructor section.
2611 Like default_ctor_section_asm_out_constructor in varasm.c
2612 except that it uses .init_array (or .fini_array) and it
2613 handles constructor priorities. */
2614
2615static void
2616rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2617{
2618 section * s;
2619
2620 if (priority != DEFAULT_INIT_PRIORITY)
2621 {
2622 char buf[18];
2623
2624 sprintf (buf, "%s.%.5u",
2625 is_ctor ? ".init_array" : ".fini_array",
2626 priority);
2627 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2628 }
2629 else if (is_ctor)
2630 s = ctors_section;
2631 else
2632 s = dtors_section;
2633
2634 switch_to_section (s);
2635 assemble_align (POINTER_SIZE);
2636 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2637}
2638
2639static void
2640rx_elf_asm_constructor (rtx symbol, int priority)
2641{
2642 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2643}
2644
2645static void
2646rx_elf_asm_destructor (rtx symbol, int priority)
2647{
2648 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2649}
2650\f
67e66e16 2651/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
24833e1a 2652
2653static tree
2654rx_handle_func_attribute (tree * node,
2655 tree name,
2656 tree args,
2657 int flags ATTRIBUTE_UNUSED,
2658 bool * no_add_attrs)
2659{
2660 gcc_assert (DECL_P (* node));
24833e1a 2661
2662 if (TREE_CODE (* node) != FUNCTION_DECL)
2663 {
2664 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2665 name);
2666 * no_add_attrs = true;
2667 }
2668
2669 /* FIXME: We ought to check for conflicting attributes. */
2670
2671 /* FIXME: We ought to check that the interrupt and exception
2672 handler attributes have been applied to void functions. */
2673 return NULL_TREE;
2674}
2675
7ce85a1f 2676/* Check "vector" attribute. */
2677
2678static tree
2679rx_handle_vector_attribute (tree * node,
2680 tree name,
2681 tree args,
2682 int flags ATTRIBUTE_UNUSED,
2683 bool * no_add_attrs)
2684{
2685 gcc_assert (DECL_P (* node));
2686 gcc_assert (args != NULL_TREE);
2687
2688 if (TREE_CODE (* node) != FUNCTION_DECL)
2689 {
2690 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2691 name);
2692 * no_add_attrs = true;
2693 }
2694
2695 return NULL_TREE;
2696}
2697
24833e1a 2698/* Table of RX specific attributes. */
2699const struct attribute_spec rx_attribute_table[] =
2700{
ac86af5d 2701 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2702 affects_type_identity. */
2703 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2704 false },
7ce85a1f 2705 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute,
ac86af5d 2706 false },
2707 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2708 false },
7ce85a1f 2709 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2710 false },
ac86af5d 2711 { NULL, 0, 0, false, false, false, NULL, false }
24833e1a 2712};
2713
42d89991 2714/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
02e53c17 2715
2716static void
42d89991 2717rx_override_options_after_change (void)
98cb9b5b 2718{
2719 static bool first_time = TRUE;
98cb9b5b 2720
2721 if (first_time)
2722 {
2723 /* If this is the first time through and the user has not disabled
42d89991 2724 the use of RX FPU hardware then enable -ffinite-math-only,
2725 since the FPU instructions do not support NaNs and infinities. */
98cb9b5b 2726 if (TARGET_USE_FPU)
42d89991 2727 flag_finite_math_only = 1;
98cb9b5b 2728
98cb9b5b 2729 first_time = FALSE;
2730 }
2731 else
2732 {
2733 /* Alert the user if they are changing the optimization options
2734 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2735 if (TARGET_USE_FPU
42d89991 2736 && !flag_finite_math_only)
2737 warning (0, "RX FPU instructions do not support NaNs and infinities");
98cb9b5b 2738 }
2739}
2740
1af17d44 2741static void
2742rx_option_override (void)
2743{
8cb00d70 2744 unsigned int i;
2745 cl_deferred_option *opt;
f1f41a6c 2746 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
8cb00d70 2747
f1f41a6c 2748 if (v)
2749 FOR_EACH_VEC_ELT (*v, i, opt)
2750 {
2751 switch (opt->opt_index)
2752 {
2753 case OPT_mint_register_:
2754 switch (opt->value)
2755 {
2756 case 4:
2757 fixed_regs[10] = call_used_regs [10] = 1;
2758 /* Fall through. */
2759 case 3:
2760 fixed_regs[11] = call_used_regs [11] = 1;
2761 /* Fall through. */
2762 case 2:
2763 fixed_regs[12] = call_used_regs [12] = 1;
2764 /* Fall through. */
2765 case 1:
2766 fixed_regs[13] = call_used_regs [13] = 1;
2767 /* Fall through. */
2768 case 0:
2769 rx_num_interrupt_regs = opt->value;
2770 break;
2771 default:
2772 rx_num_interrupt_regs = 0;
2773 /* Error message already given because rx_handle_option
2774 returned false. */
2775 break;
2776 }
2777 break;
8cb00d70 2778
f1f41a6c 2779 default:
2780 gcc_unreachable ();
2781 }
2782 }
8cb00d70 2783
1af17d44 2784 /* This target defaults to strict volatile bitfields. */
941a2396 2785 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
1af17d44 2786 flag_strict_volatile_bitfields = 1;
42d89991 2787
2788 rx_override_options_after_change ();
9f9a3b39 2789
2790 if (align_jumps == 0 && ! optimize_size)
958c4dc5 2791 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 2 : 3);
9f9a3b39 2792 if (align_loops == 0 && ! optimize_size)
958c4dc5 2793 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 2 : 3);
9f9a3b39 2794 if (align_labels == 0 && ! optimize_size)
958c4dc5 2795 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 2 : 3);
1af17d44 2796}
2797
98cb9b5b 2798\f
24833e1a 2799static bool
2800rx_allocate_stack_slots_for_args (void)
2801{
2802 /* Naked functions should not allocate stack slots for arguments. */
2803 return ! is_naked_func (NULL_TREE);
2804}
2805
2806static bool
2807rx_func_attr_inlinable (const_tree decl)
2808{
2809 return ! is_fast_interrupt_func (decl)
67e66e16 2810 && ! is_interrupt_func (decl)
24833e1a 2811 && ! is_naked_func (decl);
2812}
2813
08c6cbd2 2814static bool
2815rx_warn_func_return (tree decl)
2816{
2817 /* Naked functions are implemented entirely in assembly, including the
2818 return sequence, so suppress warnings about this. */
2819 return !is_naked_func (decl);
2820}
2821
61fc50a0 2822/* Return nonzero if it is ok to make a tail-call to DECL,
2823 a function_decl or NULL if this is an indirect call, using EXP */
2824
2825static bool
e4d9e8e5 2826rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
61fc50a0 2827{
2828 /* Do not allow indirect tailcalls. The
2829 sibcall patterns do not support them. */
2830 if (decl == NULL)
2831 return false;
2832
2833 /* Never tailcall from inside interrupt handlers or naked functions. */
2834 if (is_fast_interrupt_func (NULL_TREE)
2835 || is_interrupt_func (NULL_TREE)
2836 || is_naked_func (NULL_TREE))
2837 return false;
2838
2839 return true;
2840}
2841
24833e1a 2842static void
2843rx_file_start (void)
2844{
2845 if (! TARGET_AS100_SYNTAX)
2846 default_file_start ();
2847}
2848
2849static bool
2850rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2851{
c6347c7a 2852 /* The packed attribute overrides the MS behaviour. */
2853 return ! TYPE_PACKED (record_type);
24833e1a 2854}
24833e1a 2855\f
2856/* Returns true if X a legitimate constant for an immediate
2857 operand on the RX. X is already known to satisfy CONSTANT_P. */
2858
2859bool
f7fcec1a 2860rx_is_legitimate_constant (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
24833e1a 2861{
24833e1a 2862 switch (GET_CODE (x))
2863 {
2864 case CONST:
2865 x = XEXP (x, 0);
2866
2867 if (GET_CODE (x) == PLUS)
2868 {
2869 if (! CONST_INT_P (XEXP (x, 1)))
2870 return false;
2871
2872 /* GCC would not pass us CONST_INT + CONST_INT so we
2873 know that we have {SYMBOL|LABEL} + CONST_INT. */
2874 x = XEXP (x, 0);
2875 gcc_assert (! CONST_INT_P (x));
2876 }
2877
2878 switch (GET_CODE (x))
2879 {
2880 case LABEL_REF:
2881 case SYMBOL_REF:
2882 return true;
2883
95272799 2884 case UNSPEC:
6e507301 2885 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
95272799 2886
24833e1a 2887 default:
2888 /* FIXME: Can this ever happen ? */
776f1390 2889 gcc_unreachable ();
24833e1a 2890 }
2891 break;
2892
2893 case LABEL_REF:
2894 case SYMBOL_REF:
2895 return true;
2896 case CONST_DOUBLE:
09bb92cc 2897 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
24833e1a 2898 case CONST_VECTOR:
2899 return false;
2900 default:
2901 gcc_assert (CONST_INT_P (x));
2902 break;
2903 }
2904
95272799 2905 return ok_for_max_constant (INTVAL (x));
24833e1a 2906}
2907
24833e1a 2908static int
d9c5e5f4 2909rx_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
2910 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
24833e1a 2911{
2912 rtx a, b;
2913
2914 if (GET_CODE (addr) != PLUS)
2915 return COSTS_N_INSNS (1);
2916
2917 a = XEXP (addr, 0);
2918 b = XEXP (addr, 1);
2919
2920 if (REG_P (a) && REG_P (b))
2921 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2922 return COSTS_N_INSNS (4);
2923
2924 if (speed)
2925 /* [REG+OFF] is just as fast as [REG]. */
2926 return COSTS_N_INSNS (1);
2927
2928 if (CONST_INT_P (b)
2929 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2930 /* Try to discourage REG + <large OFF> when optimizing for size. */
2931 return COSTS_N_INSNS (2);
2932
2933 return COSTS_N_INSNS (1);
2934}
2935
2936static bool
2937rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2938{
2939 /* We can always eliminate to the frame pointer.
2940 We can eliminate to the stack pointer unless a frame
2941 pointer is needed. */
2942
2943 return to == FRAME_POINTER_REGNUM
2944 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2945}
2946\f
2947
2948static void
2949rx_trampoline_template (FILE * file)
2950{
2951 /* Output assembler code for a block containing the constant
2952 part of a trampoline, leaving space for the variable parts.
2953
2954 On the RX, (where r8 is the static chain regnum) the trampoline
2955 looks like:
2956
2957 mov #<static chain value>, r8
2958 mov #<function's address>, r9
2959 jmp r9
2960
2961 In big-endian-data-mode however instructions are read into the CPU
2962 4 bytes at a time. These bytes are then swapped around before being
2963 passed to the decoder. So...we must partition our trampoline into
2964 4 byte packets and swap these packets around so that the instruction
2965 reader will reverse the process. But, in order to avoid splitting
2966 the 32-bit constants across these packet boundaries, (making inserting
2967 them into the constructed trampoline very difficult) we have to pad the
2968 instruction sequence with NOP insns. ie:
2969
2970 nop
2971 nop
2972 mov.l #<...>, r8
2973 nop
2974 nop
2975 mov.l #<...>, r9
2976 jmp r9
2977 nop
2978 nop */
2979
2980 if (! TARGET_BIG_ENDIAN_DATA)
2981 {
2982 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2983 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2984 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2985 }
2986 else
2987 {
2988 char r8 = '0' + STATIC_CHAIN_REGNUM;
2989 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2990
2991 if (TARGET_AS100_SYNTAX)
2992 {
2993 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2994 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2995 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2996 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2997 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2998 }
2999 else
3000 {
3001 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3002 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3003 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3004 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3005 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3006 }
3007 }
3008}
3009
3010static void
3011rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3012{
3013 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3014
3015 emit_block_move (tramp, assemble_trampoline_template (),
3016 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3017
3018 if (TARGET_BIG_ENDIAN_DATA)
3019 {
3020 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3021 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3022 }
3023 else
3024 {
3025 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3026 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3027 }
3028}
3029\f
ccfccd66 3030static int
3e8d9684 3031rx_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
3032 reg_class_t regclass ATTRIBUTE_UNUSED,
3033 bool in)
9d2f1b03 3034{
6145a46d 3035 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
9d2f1b03 3036}
3037
ccfccd66 3038/* Convert a CC_MODE to the set of flags that it represents. */
9d2f1b03 3039
3040static unsigned int
ccfccd66 3041flags_from_mode (enum machine_mode mode)
9d2f1b03 3042{
ccfccd66 3043 switch (mode)
9d2f1b03 3044 {
ccfccd66 3045 case CC_ZSmode:
3046 return CC_FLAG_S | CC_FLAG_Z;
3047 case CC_ZSOmode:
3048 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3049 case CC_ZSCmode:
3050 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3051 case CCmode:
3052 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3053 case CC_Fmode:
3054 return CC_FLAG_FP;
3055 default:
3056 gcc_unreachable ();
3057 }
3058}
9d2f1b03 3059
ccfccd66 3060/* Convert a set of flags to a CC_MODE that can implement it. */
9d2f1b03 3061
ccfccd66 3062static enum machine_mode
3063mode_from_flags (unsigned int f)
3064{
3065 if (f & CC_FLAG_FP)
3066 return CC_Fmode;
3067 if (f & CC_FLAG_O)
3068 {
3069 if (f & CC_FLAG_C)
3070 return CCmode;
3071 else
3072 return CC_ZSOmode;
9d2f1b03 3073 }
ccfccd66 3074 else if (f & CC_FLAG_C)
3075 return CC_ZSCmode;
3076 else
3077 return CC_ZSmode;
9d2f1b03 3078}
3079
ccfccd66 3080/* Convert an RTX_CODE to the set of flags needed to implement it.
3081 This assumes an integer comparison. */
3082
9d2f1b03 3083static unsigned int
ccfccd66 3084flags_from_code (enum rtx_code code)
9d2f1b03 3085{
ccfccd66 3086 switch (code)
9d2f1b03 3087 {
ccfccd66 3088 case LT:
3089 case GE:
24ad6c43 3090 return CC_FLAG_S;
ccfccd66 3091 case GT:
3092 case LE:
3093 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3094 case GEU:
3095 case LTU:
3096 return CC_FLAG_C;
3097 case GTU:
3098 case LEU:
3099 return CC_FLAG_C | CC_FLAG_Z;
3100 case EQ:
3101 case NE:
3102 return CC_FLAG_Z;
3103 default:
3104 gcc_unreachable ();
9d2f1b03 3105 }
3106}
3107
ccfccd66 3108/* Return a CC_MODE of which both M1 and M2 are subsets. */
3109
3110static enum machine_mode
3111rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
9d2f1b03 3112{
ccfccd66 3113 unsigned f;
3114
3115 /* Early out for identical modes. */
3116 if (m1 == m2)
3117 return m1;
3118
3119 /* There's no valid combination for FP vs non-FP. */
3120 f = flags_from_mode (m1) | flags_from_mode (m2);
3121 if (f & CC_FLAG_FP)
3122 return VOIDmode;
3123
3124 /* Otherwise, see what mode can implement all the flags. */
3125 return mode_from_flags (f);
9d2f1b03 3126}
8b8777b9 3127
3128/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3129
3130enum machine_mode
24ad6c43 3131rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
8b8777b9 3132{
3133 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3134 return CC_Fmode;
3135
24ad6c43 3136 if (y != const0_rtx)
3137 return CCmode;
3138
ccfccd66 3139 return mode_from_flags (flags_from_code (cmp_code));
3140}
3141
ccfccd66 3142/* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3143 CC_MODE, and use that in branches based on that compare. */
3144
3145void
3146rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
3147 rtx c1, rtx c2, rtx label)
3148{
3149 rtx flags, x;
3150
3151 flags = gen_rtx_REG (cc_mode, CC_REG);
3152 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3153 x = gen_rtx_SET (VOIDmode, flags, x);
3154 emit_insn (x);
3155
3156 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3157 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3158 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3159 emit_jump_insn (x);
8b8777b9 3160}
3161
fc3b02a9 3162/* A helper function for matching parallels that set the flags. */
3163
3164bool
3165rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
3166{
3167 rtx op1, flags;
3168 enum machine_mode flags_mode;
3169
3170 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3171
3172 op1 = XVECEXP (PATTERN (insn), 0, 1);
3173 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3174
3175 flags = SET_DEST (op1);
3176 flags_mode = GET_MODE (flags);
3177
3178 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3179 return false;
3180 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3181 return false;
3182
3183 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3184 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3185 return false;
3186
3187 return true;
3188}
9f9a3b39 3189\f
3190int
001afa63 3191rx_align_for_label (rtx lab, int uses_threshold)
9f9a3b39 3192{
001afa63 3193 /* This is a simple heuristic to guess when an alignment would not be useful
3194 because the delay due to the inserted NOPs would be greater than the delay
3195 due to the misaligned branch. If uses_threshold is zero then the alignment
3196 is always useful. */
f7fcec1a 3197 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
001afa63 3198 return 0;
3199
958c4dc5 3200 if (optimize_size)
3201 return 0;
3202 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3203 return 2;
3204 return 2;
9f9a3b39 3205}
3206
3207static int
3208rx_max_skip_for_label (rtx lab)
3209{
3210 int opsize;
3211 rtx op;
3212
e6cf07b2 3213 if (optimize_size)
3214 return 0;
3215
9f9a3b39 3216 if (lab == NULL_RTX)
3217 return 0;
fc3b02a9 3218
9f9a3b39 3219 op = lab;
3220 do
3221 {
3222 op = next_nonnote_nondebug_insn (op);
3223 }
3224 while (op && (LABEL_P (op)
3225 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3226 if (!op)
3227 return 0;
3228
3229 opsize = get_attr_length (op);
3230 if (opsize >= 0 && opsize < 8)
3231 return opsize - 1;
3232 return 0;
3233}
776f1390 3234
3235/* Compute the real length of the extending load-and-op instructions. */
3236
3237int
3238rx_adjust_insn_length (rtx insn, int current_length)
3239{
3240 rtx extend, mem, offset;
3241 bool zero;
3242 int factor;
3243
7ce85a1f 3244 if (!INSN_P (insn))
3245 return current_length;
3246
776f1390 3247 switch (INSN_CODE (insn))
3248 {
3249 default:
3250 return current_length;
3251
3252 case CODE_FOR_plussi3_zero_extendhi:
3253 case CODE_FOR_andsi3_zero_extendhi:
3254 case CODE_FOR_iorsi3_zero_extendhi:
3255 case CODE_FOR_xorsi3_zero_extendhi:
3256 case CODE_FOR_divsi3_zero_extendhi:
3257 case CODE_FOR_udivsi3_zero_extendhi:
3258 case CODE_FOR_minussi3_zero_extendhi:
3259 case CODE_FOR_smaxsi3_zero_extendhi:
3260 case CODE_FOR_sminsi3_zero_extendhi:
3261 case CODE_FOR_multsi3_zero_extendhi:
f7fcec1a 3262 case CODE_FOR_comparesi3_zero_extendhi:
776f1390 3263 zero = true;
3264 factor = 2;
3265 break;
3266
3267 case CODE_FOR_plussi3_sign_extendhi:
3268 case CODE_FOR_andsi3_sign_extendhi:
3269 case CODE_FOR_iorsi3_sign_extendhi:
3270 case CODE_FOR_xorsi3_sign_extendhi:
3271 case CODE_FOR_divsi3_sign_extendhi:
3272 case CODE_FOR_udivsi3_sign_extendhi:
3273 case CODE_FOR_minussi3_sign_extendhi:
3274 case CODE_FOR_smaxsi3_sign_extendhi:
3275 case CODE_FOR_sminsi3_sign_extendhi:
3276 case CODE_FOR_multsi3_sign_extendhi:
f7fcec1a 3277 case CODE_FOR_comparesi3_sign_extendhi:
776f1390 3278 zero = false;
3279 factor = 2;
3280 break;
3281
3282 case CODE_FOR_plussi3_zero_extendqi:
3283 case CODE_FOR_andsi3_zero_extendqi:
3284 case CODE_FOR_iorsi3_zero_extendqi:
3285 case CODE_FOR_xorsi3_zero_extendqi:
3286 case CODE_FOR_divsi3_zero_extendqi:
3287 case CODE_FOR_udivsi3_zero_extendqi:
3288 case CODE_FOR_minussi3_zero_extendqi:
3289 case CODE_FOR_smaxsi3_zero_extendqi:
3290 case CODE_FOR_sminsi3_zero_extendqi:
3291 case CODE_FOR_multsi3_zero_extendqi:
f7fcec1a 3292 case CODE_FOR_comparesi3_zero_extendqi:
776f1390 3293 zero = true;
3294 factor = 1;
3295 break;
3296
3297 case CODE_FOR_plussi3_sign_extendqi:
3298 case CODE_FOR_andsi3_sign_extendqi:
3299 case CODE_FOR_iorsi3_sign_extendqi:
3300 case CODE_FOR_xorsi3_sign_extendqi:
3301 case CODE_FOR_divsi3_sign_extendqi:
3302 case CODE_FOR_udivsi3_sign_extendqi:
3303 case CODE_FOR_minussi3_sign_extendqi:
3304 case CODE_FOR_smaxsi3_sign_extendqi:
3305 case CODE_FOR_sminsi3_sign_extendqi:
3306 case CODE_FOR_multsi3_sign_extendqi:
f7fcec1a 3307 case CODE_FOR_comparesi3_sign_extendqi:
776f1390 3308 zero = false;
3309 factor = 1;
3310 break;
3311 }
3312
3313 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3314 extend = single_set (insn);
3315 gcc_assert (extend != NULL_RTX);
3316
3317 extend = SET_SRC (extend);
3318 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3319 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3320 extend = XEXP (extend, 0);
3321 else
3322 extend = XEXP (extend, 1);
3323
3324 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3325 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3326
3327 mem = XEXP (extend, 0);
3328 gcc_checking_assert (MEM_P (mem));
3329 if (REG_P (XEXP (mem, 0)))
3330 return (zero && factor == 1) ? 2 : 3;
3331
3332 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3333 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3334 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3335
3336 offset = XEXP (XEXP (mem, 0), 1);
3337 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3338
3339 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3340 return (zero && factor == 1) ? 3 : 4;
3341
3342 return (zero && factor == 1) ? 4 : 5;
3343}
ee1401ac 3344
3345static bool
3346rx_narrow_volatile_bitfield (void)
3347{
3348 return true;
3349}
3350
3351static bool
3352rx_ok_to_inline (tree caller, tree callee)
3353{
3354 /* Do not inline functions with local variables
3355 into a naked CALLER - naked function have no stack frame and
3356 locals need a frame in order to have somewhere to live.
3357
3358 Unfortunately we have no way to determine the presence of
3359 local variables in CALLEE, so we have to be cautious and
3360 assume that there might be some there.
3361
3362 We do allow inlining when CALLEE has the "inline" type
3363 modifier or the "always_inline" or "gnu_inline" attributes. */
3364 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3365 || DECL_DECLARED_INLINE_P (callee)
3366 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3367 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3368}
3369
f0964309 3370static bool
3371rx_enable_lra (void)
3372{
734bbdc0 3373 return TARGET_ENABLE_LRA;
f0964309 3374}
3375
9d2f1b03 3376\f
ee1401ac 3377#undef TARGET_NARROW_VOLATILE_BITFIELD
3378#define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3379
3380#undef TARGET_CAN_INLINE_P
3381#define TARGET_CAN_INLINE_P rx_ok_to_inline
3382
9f9a3b39 3383#undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3384#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3385#undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3386#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3387#undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3388#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3389#undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3390#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3391
24833e1a 3392#undef TARGET_FUNCTION_VALUE
3393#define TARGET_FUNCTION_VALUE rx_function_value
3394
3395#undef TARGET_RETURN_IN_MSB
3396#define TARGET_RETURN_IN_MSB rx_return_in_msb
3397
3398#undef TARGET_IN_SMALL_DATA_P
3399#define TARGET_IN_SMALL_DATA_P rx_in_small_data
3400
3401#undef TARGET_RETURN_IN_MEMORY
3402#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3403
3404#undef TARGET_HAVE_SRODATA_SECTION
3405#define TARGET_HAVE_SRODATA_SECTION true
3406
3407#undef TARGET_ASM_SELECT_RTX_SECTION
3408#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3409
3410#undef TARGET_ASM_SELECT_SECTION
3411#define TARGET_ASM_SELECT_SECTION rx_select_section
3412
3413#undef TARGET_INIT_BUILTINS
3414#define TARGET_INIT_BUILTINS rx_init_builtins
3415
103700c7 3416#undef TARGET_BUILTIN_DECL
3417#define TARGET_BUILTIN_DECL rx_builtin_decl
3418
24833e1a 3419#undef TARGET_EXPAND_BUILTIN
3420#define TARGET_EXPAND_BUILTIN rx_expand_builtin
3421
3422#undef TARGET_ASM_CONSTRUCTOR
3423#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3424
3425#undef TARGET_ASM_DESTRUCTOR
3426#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3427
3428#undef TARGET_STRUCT_VALUE_RTX
3429#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3430
3431#undef TARGET_ATTRIBUTE_TABLE
3432#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3433
3434#undef TARGET_ASM_FILE_START
3435#define TARGET_ASM_FILE_START rx_file_start
3436
3437#undef TARGET_MS_BITFIELD_LAYOUT_P
3438#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3439
3440#undef TARGET_LEGITIMATE_ADDRESS_P
3441#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3442
5afe50d9 3443#undef TARGET_MODE_DEPENDENT_ADDRESS_P
3444#define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3445
24833e1a 3446#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3447#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3448
3449#undef TARGET_ASM_FUNCTION_PROLOGUE
3450#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3451
3452#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3453#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3454
61fc50a0 3455#undef TARGET_FUNCTION_OK_FOR_SIBCALL
3456#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3457
ee4e8428 3458#undef TARGET_FUNCTION_ARG
3459#define TARGET_FUNCTION_ARG rx_function_arg
3460
3461#undef TARGET_FUNCTION_ARG_ADVANCE
3462#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3463
bd99ba64 3464#undef TARGET_FUNCTION_ARG_BOUNDARY
3465#define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3466
24833e1a 3467#undef TARGET_SET_CURRENT_FUNCTION
3468#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3469
24833e1a 3470#undef TARGET_ASM_INTEGER
3471#define TARGET_ASM_INTEGER rx_assemble_integer
3472
3473#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3474#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3475
3476#undef TARGET_MAX_ANCHOR_OFFSET
3477#define TARGET_MAX_ANCHOR_OFFSET 32
3478
3479#undef TARGET_ADDRESS_COST
3480#define TARGET_ADDRESS_COST rx_address_cost
3481
3482#undef TARGET_CAN_ELIMINATE
3483#define TARGET_CAN_ELIMINATE rx_can_eliminate
3484
b2d7ede1 3485#undef TARGET_CONDITIONAL_REGISTER_USAGE
3486#define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3487
24833e1a 3488#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3489#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3490
3491#undef TARGET_TRAMPOLINE_INIT
3492#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3493
6bb30542 3494#undef TARGET_PRINT_OPERAND
3495#define TARGET_PRINT_OPERAND rx_print_operand
3496
3497#undef TARGET_PRINT_OPERAND_ADDRESS
3498#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3499
9d2f1b03 3500#undef TARGET_CC_MODES_COMPATIBLE
3501#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3502
3503#undef TARGET_MEMORY_MOVE_COST
3504#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3505
1af17d44 3506#undef TARGET_OPTION_OVERRIDE
3507#define TARGET_OPTION_OVERRIDE rx_option_override
3508
bd7d2835 3509#undef TARGET_PROMOTE_FUNCTION_MODE
3510#define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3511
42d89991 3512#undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3513#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
02e53c17 3514
77de4b78 3515#undef TARGET_FLAGS_REGNUM
3516#define TARGET_FLAGS_REGNUM CC_REG
3517
ca316360 3518#undef TARGET_LEGITIMATE_CONSTANT_P
f7fcec1a 3519#define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
ca316360 3520
6e507301 3521#undef TARGET_LEGITIMIZE_ADDRESS
3522#define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3523
ee1401ac 3524#undef TARGET_WARN_FUNC_RETURN
3525#define TARGET_WARN_FUNC_RETURN rx_warn_func_return
08c6cbd2 3526
f0964309 3527#undef TARGET_LRA_P
3528#define TARGET_LRA_P rx_enable_lra
3529
24833e1a 3530struct gcc_target targetm = TARGET_INITIALIZER;
3531
103700c7 3532#include "gt-rx.h"