]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rx/rx.c
Replace printf with __builtin_printf
[thirdparty/gcc.git] / gcc / config / rx / rx.c
CommitLineData
24833e1a 1/* Subroutines used for code generation on Renesas RX processors.
d353bf18 2 Copyright (C) 2008-2015 Free Software Foundation, Inc.
24833e1a 3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
9ef16211 28#include "backend.h"
d040a5b0 29#include "cfghooks.h"
24833e1a 30#include "tree.h"
9ef16211 31#include "rtl.h"
32#include "df.h"
33#include "alias.h"
9ed99284 34#include "varasm.h"
35#include "stor-layout.h"
36#include "calls.h"
24833e1a 37#include "regs.h"
24833e1a 38#include "insn-config.h"
39#include "conditions.h"
40#include "output.h"
41#include "insn-attr.h"
42#include "flags.h"
d53441c8 43#include "expmed.h"
44#include "dojump.h"
45#include "explow.h"
46#include "emit-rtl.h"
47#include "stmt.h"
24833e1a 48#include "expr.h"
34517c64 49#include "insn-codes.h"
24833e1a 50#include "optabs.h"
51#include "libfuncs.h"
52#include "recog.h"
0b205f4c 53#include "diagnostic-core.h"
24833e1a 54#include "toplev.h"
55#include "reload.h"
94ea8568 56#include "cfgrtl.h"
57#include "cfganal.h"
58#include "lcm.h"
59#include "cfgbuild.h"
60#include "cfgcleanup.h"
24833e1a 61#include "tm_p.h"
62#include "debug.h"
63#include "target.h"
24833e1a 64#include "langhooks.h"
fba5dd52 65#include "opts.h"
367b1459 66#include "cgraph.h"
f7715905 67#include "builtins.h"
6e507301 68
0c71fb4f 69/* This file should be included last. */
4b498588 70#include "target-def.h"
71
6e507301 72static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
73static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
74static unsigned int rx_num_interrupt_regs;
24833e1a 75\f
6e507301 76static unsigned int
77rx_gp_base_regnum (void)
78{
79 if (rx_gp_base_regnum_val == INVALID_REGNUM)
80 gcc_unreachable ();
81 return rx_gp_base_regnum_val;
82}
83
84static unsigned int
85rx_pid_base_regnum (void)
86{
87 if (rx_pid_base_regnum_val == INVALID_REGNUM)
88 gcc_unreachable ();
89 return rx_pid_base_regnum_val;
90}
91
92/* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
93
94static tree
95rx_decl_for_addr (rtx op)
96{
97 if (GET_CODE (op) == MEM)
98 op = XEXP (op, 0);
99 if (GET_CODE (op) == CONST)
100 op = XEXP (op, 0);
101 while (GET_CODE (op) == PLUS)
102 op = XEXP (op, 0);
103 if (GET_CODE (op) == SYMBOL_REF)
104 return SYMBOL_REF_DECL (op);
105 return NULL_TREE;
106}
107
6bb30542 108static void rx_print_operand (FILE *, rtx, int);
109
ccfccd66 110#define CC_FLAG_S (1 << 0)
111#define CC_FLAG_Z (1 << 1)
112#define CC_FLAG_O (1 << 2)
113#define CC_FLAG_C (1 << 3)
f7fcec1a 114#define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
ccfccd66 115
3754d046 116static unsigned int flags_from_mode (machine_mode mode);
ccfccd66 117static unsigned int flags_from_code (enum rtx_code code);
67e66e16 118\f
6e507301 119/* Return true if OP is a reference to an object in a PID data area. */
120
121enum pid_type
122{
123 PID_NOT_PID = 0, /* The object is not in the PID data area. */
124 PID_ENCODED, /* The object is in the PID data area. */
125 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
126};
127
128static enum pid_type
129rx_pid_data_operand (rtx op)
130{
131 tree op_decl;
132
133 if (!TARGET_PID)
134 return PID_NOT_PID;
135
136 if (GET_CODE (op) == PLUS
137 && GET_CODE (XEXP (op, 0)) == REG
138 && GET_CODE (XEXP (op, 1)) == CONST
139 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
140 return PID_ENCODED;
141
142 op_decl = rx_decl_for_addr (op);
143
144 if (op_decl)
145 {
146 if (TREE_READONLY (op_decl))
147 return PID_UNENCODED;
148 }
149 else
150 {
151 /* Sigh, some special cases. */
152 if (GET_CODE (op) == SYMBOL_REF
153 || GET_CODE (op) == LABEL_REF)
154 return PID_UNENCODED;
155 }
156
157 return PID_NOT_PID;
158}
159
160static rtx
161rx_legitimize_address (rtx x,
162 rtx oldx ATTRIBUTE_UNUSED,
3754d046 163 machine_mode mode ATTRIBUTE_UNUSED)
6e507301 164{
165 if (rx_pid_data_operand (x) == PID_UNENCODED)
166 {
167 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
168 return rv;
169 }
170
171 if (GET_CODE (x) == PLUS
172 && GET_CODE (XEXP (x, 0)) == PLUS
173 && REG_P (XEXP (XEXP (x, 0), 0))
174 && REG_P (XEXP (x, 1)))
175 return force_reg (SImode, x);
176
177 return x;
178}
179
24833e1a 180/* Return true if OP is a reference to an object in a small data area. */
181
182static bool
183rx_small_data_operand (rtx op)
184{
185 if (rx_small_data_limit == 0)
186 return false;
187
188 if (GET_CODE (op) == SYMBOL_REF)
189 return SYMBOL_REF_SMALL_P (op);
190
191 return false;
192}
193
194static bool
3754d046 195rx_is_legitimate_address (machine_mode mode, rtx x,
4bccad5e 196 bool strict ATTRIBUTE_UNUSED)
24833e1a 197{
198 if (RTX_OK_FOR_BASE (x, strict))
199 /* Register Indirect. */
200 return true;
201
f7fcec1a 202 if ((GET_MODE_SIZE (mode) == 4
203 || GET_MODE_SIZE (mode) == 2
204 || GET_MODE_SIZE (mode) == 1)
24833e1a 205 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
206 /* Pre-decrement Register Indirect or
207 Post-increment Register Indirect. */
208 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
209
6e507301 210 switch (rx_pid_data_operand (x))
211 {
212 case PID_UNENCODED:
213 return false;
214 case PID_ENCODED:
215 return true;
216 default:
217 break;
218 }
219
24833e1a 220 if (GET_CODE (x) == PLUS)
221 {
222 rtx arg1 = XEXP (x, 0);
223 rtx arg2 = XEXP (x, 1);
224 rtx index = NULL_RTX;
225
226 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
227 index = arg2;
228 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
229 index = arg1;
230 else
231 return false;
232
233 switch (GET_CODE (index))
234 {
235 case CONST_INT:
236 {
237 /* Register Relative: REG + INT.
238 Only positive, mode-aligned, mode-sized
239 displacements are allowed. */
240 HOST_WIDE_INT val = INTVAL (index);
241 int factor;
242
243 if (val < 0)
244 return false;
776f1390 245
24833e1a 246 switch (GET_MODE_SIZE (mode))
247 {
248 default:
249 case 4: factor = 4; break;
250 case 2: factor = 2; break;
251 case 1: factor = 1; break;
252 }
253
f7fcec1a 254 if (val > (65535 * factor))
24833e1a 255 return false;
256 return (val % factor) == 0;
257 }
258
259 case REG:
260 /* Unscaled Indexed Register Indirect: REG + REG
261 Size has to be "QI", REG has to be valid. */
262 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
263
264 case MULT:
265 {
266 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
267 Factor has to equal the mode size, REG has to be valid. */
268 rtx factor;
269
270 factor = XEXP (index, 1);
271 index = XEXP (index, 0);
272
273 return REG_P (index)
274 && RTX_OK_FOR_BASE (index, strict)
275 && CONST_INT_P (factor)
276 && GET_MODE_SIZE (mode) == INTVAL (factor);
277 }
278
279 default:
280 return false;
281 }
282 }
283
284 /* Small data area accesses turn into register relative offsets. */
285 return rx_small_data_operand (x);
286}
287
288/* Returns TRUE for simple memory addreses, ie ones
289 that do not involve register indirect addressing
290 or pre/post increment/decrement. */
291
292bool
3754d046 293rx_is_restricted_memory_address (rtx mem, machine_mode mode)
24833e1a 294{
24833e1a 295 if (! rx_is_legitimate_address
296 (mode, mem, reload_in_progress || reload_completed))
297 return false;
298
299 switch (GET_CODE (mem))
300 {
301 case REG:
302 /* Simple memory addresses are OK. */
303 return true;
304
305 case PRE_DEC:
306 case POST_INC:
307 return false;
308
309 case PLUS:
776f1390 310 {
311 rtx base, index;
312
313 /* Only allow REG+INT addressing. */
314 base = XEXP (mem, 0);
315 index = XEXP (mem, 1);
24833e1a 316
776f1390 317 if (! RX_REG_P (base) || ! CONST_INT_P (index))
318 return false;
319
320 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
321 }
24833e1a 322
323 case SYMBOL_REF:
324 /* Can happen when small data is being supported.
325 Assume that it will be resolved into GP+INT. */
326 return true;
327
328 default:
329 gcc_unreachable ();
330 }
331}
332
5afe50d9 333/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
334
335static bool
4e27ffd0 336rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
24833e1a 337{
338 if (GET_CODE (addr) == CONST)
339 addr = XEXP (addr, 0);
340
341 switch (GET_CODE (addr))
342 {
343 /* --REG and REG++ only work in SImode. */
344 case PRE_DEC:
345 case POST_INC:
346 return true;
347
348 case MINUS:
349 case PLUS:
350 if (! REG_P (XEXP (addr, 0)))
351 return true;
352
353 addr = XEXP (addr, 1);
354
355 switch (GET_CODE (addr))
356 {
357 case REG:
358 /* REG+REG only works in SImode. */
359 return true;
360
361 case CONST_INT:
362 /* REG+INT is only mode independent if INT is a
b546cdca 363 multiple of 4, positive and will fit into 16-bits. */
24833e1a 364 if (((INTVAL (addr) & 3) == 0)
b546cdca 365 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
24833e1a 366 return false;
367 return true;
368
369 case SYMBOL_REF:
370 case LABEL_REF:
371 return true;
372
373 case MULT:
24833e1a 374 /* REG+REG*SCALE is always mode dependent. */
375 return true;
376
377 default:
378 /* Not recognized, so treat as mode dependent. */
379 return true;
380 }
381
382 case CONST_INT:
383 case SYMBOL_REF:
384 case LABEL_REF:
385 case REG:
386 /* These are all mode independent. */
387 return false;
388
389 default:
390 /* Everything else is unrecognized,
391 so treat as mode dependent. */
392 return true;
393 }
394}
395\f
24833e1a 396/* A C compound statement to output to stdio stream FILE the
397 assembler syntax for an instruction operand that is a memory
398 reference whose address is ADDR. */
399
6bb30542 400static void
24833e1a 401rx_print_operand_address (FILE * file, rtx addr)
402{
403 switch (GET_CODE (addr))
404 {
405 case REG:
406 fprintf (file, "[");
407 rx_print_operand (file, addr, 0);
408 fprintf (file, "]");
409 break;
410
411 case PRE_DEC:
412 fprintf (file, "[-");
413 rx_print_operand (file, XEXP (addr, 0), 0);
414 fprintf (file, "]");
415 break;
416
417 case POST_INC:
418 fprintf (file, "[");
419 rx_print_operand (file, XEXP (addr, 0), 0);
420 fprintf (file, "+]");
421 break;
422
423 case PLUS:
424 {
425 rtx arg1 = XEXP (addr, 0);
426 rtx arg2 = XEXP (addr, 1);
427 rtx base, index;
428
429 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
430 base = arg1, index = arg2;
431 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
432 base = arg2, index = arg1;
433 else
434 {
435 rx_print_operand (file, arg1, 0);
436 fprintf (file, " + ");
437 rx_print_operand (file, arg2, 0);
438 break;
439 }
440
441 if (REG_P (index) || GET_CODE (index) == MULT)
442 {
443 fprintf (file, "[");
444 rx_print_operand (file, index, 'A');
445 fprintf (file, ",");
446 }
447 else /* GET_CODE (index) == CONST_INT */
448 {
449 rx_print_operand (file, index, 'A');
450 fprintf (file, "[");
451 }
452 rx_print_operand (file, base, 0);
453 fprintf (file, "]");
454 break;
455 }
456
95272799 457 case CONST:
458 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
459 {
460 addr = XEXP (addr, 0);
461 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
6e507301 462
6e507301 463 addr = XVECEXP (addr, 0, 0);
95272799 464 gcc_assert (CONST_INT_P (addr));
942ca701 465 fprintf (file, "#");
466 output_addr_const (file, addr);
467 break;
95272799 468 }
942ca701 469 fprintf (file, "#");
470 output_addr_const (file, XEXP (addr, 0));
471 break;
472
473 case UNSPEC:
474 addr = XVECEXP (addr, 0, 0);
95272799 475 /* Fall through. */
24833e1a 476 case LABEL_REF:
477 case SYMBOL_REF:
24833e1a 478 fprintf (file, "#");
6e507301 479 /* Fall through. */
24833e1a 480 default:
481 output_addr_const (file, addr);
482 break;
483 }
484}
485
486static void
487rx_print_integer (FILE * file, HOST_WIDE_INT val)
488{
489 if (IN_RANGE (val, -64, 64))
490 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
491 else
492 fprintf (file,
493 TARGET_AS100_SYNTAX
494 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
495 val);
496}
497
498static bool
499rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
500{
501 const char * op = integer_asm_op (size, is_aligned);
502
503 if (! CONST_INT_P (x))
504 return default_assemble_integer (x, size, is_aligned);
505
506 if (op == NULL)
507 return false;
508 fputs (op, asm_out_file);
509
510 rx_print_integer (asm_out_file, INTVAL (x));
511 fputc ('\n', asm_out_file);
512 return true;
513}
514
515
24833e1a 516/* Handles the insertion of a single operand into the assembler output.
517 The %<letter> directives supported are:
518
519 %A Print an operand without a leading # character.
520 %B Print an integer comparison name.
521 %C Print a control register name.
522 %F Print a condition code flag name.
6e507301 523 %G Register used for small-data-area addressing
24833e1a 524 %H Print high part of a DImode register, integer or address.
525 %L Print low part of a DImode register, integer or address.
6bb30542 526 %N Print the negation of the immediate value.
6e507301 527 %P Register used for PID addressing
24833e1a 528 %Q If the operand is a MEM, then correctly generate
776f1390 529 register indirect or register relative addressing.
530 %R Like %Q but for zero-extending loads. */
24833e1a 531
6bb30542 532static void
24833e1a 533rx_print_operand (FILE * file, rtx op, int letter)
534{
776f1390 535 bool unsigned_load = false;
6e507301 536 bool print_hash = true;
537
538 if (letter == 'A'
539 && ((GET_CODE (op) == CONST
540 && GET_CODE (XEXP (op, 0)) == UNSPEC)
541 || GET_CODE (op) == UNSPEC))
542 {
543 print_hash = false;
544 letter = 0;
545 }
776f1390 546
24833e1a 547 switch (letter)
548 {
549 case 'A':
550 /* Print an operand without a leading #. */
551 if (MEM_P (op))
552 op = XEXP (op, 0);
553
554 switch (GET_CODE (op))
555 {
556 case LABEL_REF:
557 case SYMBOL_REF:
558 output_addr_const (file, op);
559 break;
560 case CONST_INT:
561 fprintf (file, "%ld", (long) INTVAL (op));
562 break;
563 default:
564 rx_print_operand (file, op, 0);
565 break;
566 }
567 break;
568
569 case 'B':
ccfccd66 570 {
571 enum rtx_code code = GET_CODE (op);
3754d046 572 machine_mode mode = GET_MODE (XEXP (op, 0));
ccfccd66 573 const char *ret;
574
575 if (mode == CC_Fmode)
576 {
577 /* C flag is undefined, and O flag carries unordered. None of the
578 branch combinations that include O use it helpfully. */
579 switch (code)
580 {
581 case ORDERED:
582 ret = "no";
583 break;
584 case UNORDERED:
585 ret = "o";
586 break;
587 case LT:
588 ret = "n";
589 break;
590 case GE:
591 ret = "pz";
592 break;
593 case EQ:
594 ret = "eq";
595 break;
596 case NE:
597 ret = "ne";
598 break;
599 default:
600 gcc_unreachable ();
601 }
602 }
603 else
604 {
24ad6c43 605 unsigned int flags = flags_from_mode (mode);
776f1390 606
ccfccd66 607 switch (code)
608 {
609 case LT:
24ad6c43 610 ret = (flags & CC_FLAG_O ? "lt" : "n");
ccfccd66 611 break;
612 case GE:
24ad6c43 613 ret = (flags & CC_FLAG_O ? "ge" : "pz");
ccfccd66 614 break;
615 case GT:
616 ret = "gt";
617 break;
618 case LE:
619 ret = "le";
620 break;
621 case GEU:
622 ret = "geu";
623 break;
624 case LTU:
625 ret = "ltu";
626 break;
627 case GTU:
628 ret = "gtu";
629 break;
630 case LEU:
631 ret = "leu";
632 break;
633 case EQ:
634 ret = "eq";
635 break;
636 case NE:
637 ret = "ne";
638 break;
639 default:
640 gcc_unreachable ();
641 }
24ad6c43 642 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
ccfccd66 643 }
644 fputs (ret, file);
645 break;
646 }
24833e1a 647
648 case 'C':
649 gcc_assert (CONST_INT_P (op));
650 switch (INTVAL (op))
651 {
652 case 0: fprintf (file, "psw"); break;
653 case 2: fprintf (file, "usp"); break;
654 case 3: fprintf (file, "fpsw"); break;
655 case 4: fprintf (file, "cpen"); break;
656 case 8: fprintf (file, "bpsw"); break;
657 case 9: fprintf (file, "bpc"); break;
658 case 0xa: fprintf (file, "isp"); break;
659 case 0xb: fprintf (file, "fintv"); break;
660 case 0xc: fprintf (file, "intb"); break;
661 default:
98a5f45d 662 warning (0, "unrecognized control register number: %d - using 'psw'",
6bb30542 663 (int) INTVAL (op));
98cb9b5b 664 fprintf (file, "psw");
665 break;
24833e1a 666 }
667 break;
668
669 case 'F':
670 gcc_assert (CONST_INT_P (op));
671 switch (INTVAL (op))
672 {
673 case 0: case 'c': case 'C': fprintf (file, "C"); break;
674 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
675 case 2: case 's': case 'S': fprintf (file, "S"); break;
676 case 3: case 'o': case 'O': fprintf (file, "O"); break;
677 case 8: case 'i': case 'I': fprintf (file, "I"); break;
678 case 9: case 'u': case 'U': fprintf (file, "U"); break;
679 default:
680 gcc_unreachable ();
681 }
682 break;
683
6e507301 684 case 'G':
685 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
686 break;
687
24833e1a 688 case 'H':
6bb30542 689 switch (GET_CODE (op))
24833e1a 690 {
6bb30542 691 case REG:
692 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
693 break;
694 case CONST_INT:
695 {
696 HOST_WIDE_INT v = INTVAL (op);
67e66e16 697
6bb30542 698 fprintf (file, "#");
699 /* Trickery to avoid problems with shifting 32 bits at a time. */
700 v = v >> 16;
701 v = v >> 16;
702 rx_print_integer (file, v);
703 break;
704 }
705 case CONST_DOUBLE:
24833e1a 706 fprintf (file, "#");
6bb30542 707 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
708 break;
709 case MEM:
24833e1a 710 if (! WORDS_BIG_ENDIAN)
711 op = adjust_address (op, SImode, 4);
712 output_address (XEXP (op, 0));
6bb30542 713 break;
714 default:
715 gcc_unreachable ();
24833e1a 716 }
717 break;
718
719 case 'L':
6bb30542 720 switch (GET_CODE (op))
24833e1a 721 {
6bb30542 722 case REG:
723 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
724 break;
725 case CONST_INT:
24833e1a 726 fprintf (file, "#");
727 rx_print_integer (file, INTVAL (op) & 0xffffffff);
6bb30542 728 break;
729 case CONST_DOUBLE:
730 fprintf (file, "#");
731 rx_print_integer (file, CONST_DOUBLE_LOW (op));
732 break;
733 case MEM:
24833e1a 734 if (WORDS_BIG_ENDIAN)
735 op = adjust_address (op, SImode, 4);
736 output_address (XEXP (op, 0));
6bb30542 737 break;
738 default:
739 gcc_unreachable ();
24833e1a 740 }
741 break;
742
39349585 743 case 'N':
744 gcc_assert (CONST_INT_P (op));
745 fprintf (file, "#");
746 rx_print_integer (file, - INTVAL (op));
747 break;
748
6e507301 749 case 'P':
750 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
751 break;
752
776f1390 753 case 'R':
5794450f 754 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
776f1390 755 unsigned_load = true;
756 /* Fall through. */
24833e1a 757 case 'Q':
758 if (MEM_P (op))
759 {
760 HOST_WIDE_INT offset;
776f1390 761 rtx mem = op;
24833e1a 762
763 op = XEXP (op, 0);
764
765 if (REG_P (op))
766 offset = 0;
767 else if (GET_CODE (op) == PLUS)
768 {
769 rtx displacement;
770
771 if (REG_P (XEXP (op, 0)))
772 {
773 displacement = XEXP (op, 1);
774 op = XEXP (op, 0);
775 }
776 else
777 {
778 displacement = XEXP (op, 0);
779 op = XEXP (op, 1);
780 gcc_assert (REG_P (op));
781 }
782
783 gcc_assert (CONST_INT_P (displacement));
784 offset = INTVAL (displacement);
785 gcc_assert (offset >= 0);
786
787 fprintf (file, "%ld", offset);
788 }
789 else
790 gcc_unreachable ();
791
792 fprintf (file, "[");
793 rx_print_operand (file, op, 0);
794 fprintf (file, "].");
795
776f1390 796 switch (GET_MODE_SIZE (GET_MODE (mem)))
24833e1a 797 {
798 case 1:
776f1390 799 gcc_assert (offset <= 65535 * 1);
800 fprintf (file, unsigned_load ? "UB" : "B");
24833e1a 801 break;
802 case 2:
803 gcc_assert (offset % 2 == 0);
776f1390 804 gcc_assert (offset <= 65535 * 2);
805 fprintf (file, unsigned_load ? "UW" : "W");
24833e1a 806 break;
776f1390 807 case 4:
24833e1a 808 gcc_assert (offset % 4 == 0);
776f1390 809 gcc_assert (offset <= 65535 * 4);
24833e1a 810 fprintf (file, "L");
811 break;
776f1390 812 default:
813 gcc_unreachable ();
24833e1a 814 }
815 break;
816 }
817
818 /* Fall through. */
819
820 default:
6e507301 821 if (GET_CODE (op) == CONST
822 && GET_CODE (XEXP (op, 0)) == UNSPEC)
823 op = XEXP (op, 0);
824 else if (GET_CODE (op) == CONST
825 && GET_CODE (XEXP (op, 0)) == PLUS
826 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
827 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
828 {
829 if (print_hash)
830 fprintf (file, "#");
831 fprintf (file, "(");
832 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
833 fprintf (file, " + ");
834 output_addr_const (file, XEXP (XEXP (op, 0), 1));
835 fprintf (file, ")");
836 return;
837 }
838
24833e1a 839 switch (GET_CODE (op))
840 {
841 case MULT:
842 /* Should be the scaled part of an
843 indexed register indirect address. */
844 {
845 rtx base = XEXP (op, 0);
846 rtx index = XEXP (op, 1);
847
848 /* Check for a swaped index register and scaling factor.
849 Not sure if this can happen, but be prepared to handle it. */
850 if (CONST_INT_P (base) && REG_P (index))
851 {
852 rtx tmp = base;
853 base = index;
854 index = tmp;
855 }
856
857 gcc_assert (REG_P (base));
858 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
859 gcc_assert (CONST_INT_P (index));
860 /* Do not try to verify the value of the scalar as it is based
861 on the mode of the MEM not the mode of the MULT. (Which
862 will always be SImode). */
863 fprintf (file, "%s", reg_names [REGNO (base)]);
864 break;
865 }
866
867 case MEM:
868 output_address (XEXP (op, 0));
869 break;
870
871 case PLUS:
872 output_address (op);
873 break;
874
875 case REG:
876 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
877 fprintf (file, "%s", reg_names [REGNO (op)]);
878 break;
879
880 case SUBREG:
881 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
882 fprintf (file, "%s", reg_names [subreg_regno (op)]);
883 break;
884
885 /* This will only be single precision.... */
886 case CONST_DOUBLE:
887 {
888 unsigned long val;
24833e1a 889
945f7b03 890 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
6e507301 891 if (print_hash)
892 fprintf (file, "#");
893 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
24833e1a 894 break;
895 }
896
897 case CONST_INT:
6e507301 898 if (print_hash)
899 fprintf (file, "#");
24833e1a 900 rx_print_integer (file, INTVAL (op));
901 break;
902
6e507301 903 case UNSPEC:
904 switch (XINT (op, 1))
905 {
906 case UNSPEC_PID_ADDR:
907 {
908 rtx sym, add;
909
910 if (print_hash)
911 fprintf (file, "#");
912 sym = XVECEXP (op, 0, 0);
913 add = NULL_RTX;
914 fprintf (file, "(");
915 if (GET_CODE (sym) == PLUS)
916 {
917 add = XEXP (sym, 1);
918 sym = XEXP (sym, 0);
919 }
920 output_addr_const (file, sym);
921 if (add != NULL_RTX)
922 {
923 fprintf (file, "+");
924 output_addr_const (file, add);
925 }
926 fprintf (file, "-__pid_base");
927 fprintf (file, ")");
928 return;
929 }
930 }
931 /* Fall through */
932
24833e1a 933 case CONST:
6e507301 934 case SYMBOL_REF:
24833e1a 935 case LABEL_REF:
936 case CODE_LABEL:
24833e1a 937 rx_print_operand_address (file, op);
938 break;
939
940 default:
941 gcc_unreachable ();
942 }
943 break;
944 }
945}
946
6e507301 947/* Maybe convert an operand into its PID format. */
948
949rtx
950rx_maybe_pidify_operand (rtx op, int copy_to_reg)
951{
952 if (rx_pid_data_operand (op) == PID_UNENCODED)
953 {
954 if (GET_CODE (op) == MEM)
955 {
956 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
957 op = replace_equiv_address (op, a);
958 }
959 else
960 {
961 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
962 }
963
964 if (copy_to_reg)
965 op = copy_to_mode_reg (GET_MODE (op), op);
966 }
967 return op;
968}
969
24833e1a 970/* Returns an assembler template for a move instruction. */
971
972char *
973rx_gen_move_template (rtx * operands, bool is_movu)
974{
6bb30542 975 static char out_template [64];
24833e1a 976 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
977 const char * src_template;
978 const char * dst_template;
979 rtx dest = operands[0];
980 rtx src = operands[1];
981
982 /* Decide which extension, if any, should be given to the move instruction. */
983 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
984 {
985 case QImode:
986 /* The .B extension is not valid when
987 loading an immediate into a register. */
988 if (! REG_P (dest) || ! CONST_INT_P (src))
989 extension = ".B";
990 break;
991 case HImode:
992 if (! REG_P (dest) || ! CONST_INT_P (src))
993 /* The .W extension is not valid when
994 loading an immediate into a register. */
995 extension = ".W";
996 break;
f0964309 997 case DFmode:
998 case DImode:
24833e1a 999 case SFmode:
1000 case SImode:
1001 extension = ".L";
1002 break;
1003 case VOIDmode:
1004 /* This mode is used by constants. */
1005 break;
1006 default:
1007 debug_rtx (src);
1008 gcc_unreachable ();
1009 }
1010
6e507301 1011 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
f0964309 1012 {
1013 gcc_assert (GET_MODE (src) != DImode);
1014 gcc_assert (GET_MODE (src) != DFmode);
1015
1016 src_template = "(%A1 - __pid_base)[%P1]";
1017 }
6e507301 1018 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
f0964309 1019 {
1020 gcc_assert (GET_MODE (src) != DImode);
1021 gcc_assert (GET_MODE (src) != DFmode);
1022
1023 src_template = "%%gp(%A1)[%G1]";
1024 }
24833e1a 1025 else
1026 src_template = "%1";
1027
1028 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
f0964309 1029 {
1030 gcc_assert (GET_MODE (dest) != DImode);
1031 gcc_assert (GET_MODE (dest) != DFmode);
1032
1033 dst_template = "%%gp(%A0)[%G0]";
1034 }
24833e1a 1035 else
1036 dst_template = "%0";
1037
f0964309 1038 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1039 {
1040 gcc_assert (! is_movu);
1041
1042 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
734bbdc0 1043 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
f0964309 1044 else
734bbdc0 1045 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
f0964309 1046 }
1047 else
1048 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1049 extension, src_template, dst_template);
6bb30542 1050 return out_template;
24833e1a 1051}
24833e1a 1052\f
1053/* Return VALUE rounded up to the next ALIGNMENT boundary. */
1054
1055static inline unsigned int
1056rx_round_up (unsigned int value, unsigned int alignment)
1057{
1058 alignment -= 1;
1059 return (value + alignment) & (~ alignment);
1060}
1061
1062/* Return the number of bytes in the argument registers
1063 occupied by an argument of type TYPE and mode MODE. */
1064
ee4e8428 1065static unsigned int
3754d046 1066rx_function_arg_size (machine_mode mode, const_tree type)
24833e1a 1067{
1068 unsigned int num_bytes;
1069
1070 num_bytes = (mode == BLKmode)
1071 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1072 return rx_round_up (num_bytes, UNITS_PER_WORD);
1073}
1074
1075#define NUM_ARG_REGS 4
1076#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1077
1078/* Return an RTL expression describing the register holding a function
1079 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1080 be passed on the stack. CUM describes the previous parameters to the
1081 function and NAMED is false if the parameter is part of a variable
1082 parameter list, or the last named parameter before the start of a
1083 variable parameter list. */
1084
ee4e8428 1085static rtx
3754d046 1086rx_function_arg (cumulative_args_t cum, machine_mode mode,
4bccad5e 1087 const_tree type, bool named)
24833e1a 1088{
1089 unsigned int next_reg;
39cba157 1090 unsigned int bytes_so_far = *get_cumulative_args (cum);
24833e1a 1091 unsigned int size;
1092 unsigned int rounded_size;
1093
1094 /* An exploded version of rx_function_arg_size. */
1095 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
6bb30542 1096 /* If the size is not known it cannot be passed in registers. */
1097 if (size < 1)
1098 return NULL_RTX;
24833e1a 1099
1100 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1101
1102 /* Don't pass this arg via registers if there
1103 are insufficient registers to hold all of it. */
1104 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1105 return NULL_RTX;
1106
1107 /* Unnamed arguments and the last named argument in a
1108 variadic function are always passed on the stack. */
1109 if (!named)
1110 return NULL_RTX;
1111
1112 /* Structures must occupy an exact number of registers,
1113 otherwise they are passed on the stack. */
1114 if ((type == NULL || AGGREGATE_TYPE_P (type))
1115 && (size % UNITS_PER_WORD) != 0)
1116 return NULL_RTX;
1117
1118 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1119
1120 return gen_rtx_REG (mode, next_reg);
1121}
1122
ee4e8428 1123static void
3754d046 1124rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
4bccad5e 1125 const_tree type, bool named ATTRIBUTE_UNUSED)
ee4e8428 1126{
39cba157 1127 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
ee4e8428 1128}
1129
bd99ba64 1130static unsigned int
3754d046 1131rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
bd99ba64 1132 const_tree type ATTRIBUTE_UNUSED)
1133{
4246a5c7 1134 /* Older versions of the RX backend aligned all on-stack arguments
ee1401ac 1135 to 32-bits. The RX C ABI however says that they should be
1136 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1137 if (TARGET_GCC_ABI)
1138 return STACK_BOUNDARY;
1139
1140 if (type)
1141 {
1142 if (DECL_P (type))
1143 return DECL_ALIGN (type);
1144 return TYPE_ALIGN (type);
1145 }
1146
1147 return PARM_BOUNDARY;
bd99ba64 1148}
1149
24833e1a 1150/* Return an RTL describing where a function return value of type RET_TYPE
1151 is held. */
1152
1153static rtx
1154rx_function_value (const_tree ret_type,
1155 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1156 bool outgoing ATTRIBUTE_UNUSED)
1157{
3754d046 1158 machine_mode mode = TYPE_MODE (ret_type);
bd7d2835 1159
1160 /* RX ABI specifies that small integer types are
1161 promoted to int when returned by a function. */
02f06d23 1162 if (GET_MODE_SIZE (mode) > 0
1163 && GET_MODE_SIZE (mode) < 4
1164 && ! COMPLEX_MODE_P (mode)
942ca701 1165 && ! VECTOR_TYPE_P (ret_type)
1166 && ! VECTOR_MODE_P (mode)
02f06d23 1167 )
bd7d2835 1168 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1169
1170 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1171}
1172
1173/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1174 regard to function returns as does TARGET_FUNCTION_VALUE. */
1175
3754d046 1176static machine_mode
bd7d2835 1177rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
3754d046 1178 machine_mode mode,
0318c61a 1179 int * punsignedp ATTRIBUTE_UNUSED,
bd7d2835 1180 const_tree funtype ATTRIBUTE_UNUSED,
1181 int for_return)
1182{
1183 if (for_return != 1
1184 || GET_MODE_SIZE (mode) >= 4
02f06d23 1185 || COMPLEX_MODE_P (mode)
942ca701 1186 || VECTOR_MODE_P (mode)
1187 || VECTOR_TYPE_P (type)
bd7d2835 1188 || GET_MODE_SIZE (mode) < 1)
1189 return mode;
1190
1191 return SImode;
24833e1a 1192}
1193
1194static bool
1195rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1196{
1197 HOST_WIDE_INT size;
1198
1199 if (TYPE_MODE (type) != BLKmode
1200 && ! AGGREGATE_TYPE_P (type))
1201 return false;
1202
1203 size = int_size_in_bytes (type);
1204 /* Large structs and those whose size is not an
1205 exact multiple of 4 are returned in memory. */
1206 return size < 1
1207 || size > 16
1208 || (size % UNITS_PER_WORD) != 0;
1209}
1210
1211static rtx
1212rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1213 int incoming ATTRIBUTE_UNUSED)
1214{
1215 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1216}
1217
1218static bool
1219rx_return_in_msb (const_tree valtype)
1220{
1221 return TARGET_BIG_ENDIAN_DATA
1222 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1223}
1224
1225/* Returns true if the provided function has the specified attribute. */
1226
1227static inline bool
1228has_func_attr (const_tree decl, const char * func_attr)
1229{
1230 if (decl == NULL_TREE)
1231 decl = current_function_decl;
1232
1233 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1234}
1235
67e66e16 1236/* Returns true if the provided function has the "fast_interrupt" attribute. */
24833e1a 1237
1238static inline bool
1239is_fast_interrupt_func (const_tree decl)
1240{
67e66e16 1241 return has_func_attr (decl, "fast_interrupt");
24833e1a 1242}
1243
67e66e16 1244/* Returns true if the provided function has the "interrupt" attribute. */
24833e1a 1245
1246static inline bool
67e66e16 1247is_interrupt_func (const_tree decl)
24833e1a 1248{
67e66e16 1249 return has_func_attr (decl, "interrupt");
24833e1a 1250}
1251
1252/* Returns true if the provided function has the "naked" attribute. */
1253
1254static inline bool
1255is_naked_func (const_tree decl)
1256{
1257 return has_func_attr (decl, "naked");
1258}
1259\f
1260static bool use_fixed_regs = false;
1261
b2d7ede1 1262static void
24833e1a 1263rx_conditional_register_usage (void)
1264{
1265 static bool using_fixed_regs = false;
1266
6e507301 1267 if (TARGET_PID)
1268 {
1269 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1270 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1271 }
1272
24833e1a 1273 if (rx_small_data_limit > 0)
6e507301 1274 {
1275 if (TARGET_PID)
1276 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1277 else
1278 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1279
1280 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1281 }
24833e1a 1282
1283 if (use_fixed_regs != using_fixed_regs)
1284 {
1285 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1286 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1287
1288 if (use_fixed_regs)
1289 {
24833e1a 1290 unsigned int r;
1291
24833e1a 1292 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1293 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
e4d9e8e5 1294
1295 /* This is for fast interrupt handlers. Any register in
1296 the range r10 to r13 (inclusive) that is currently
1297 marked as fixed is now a viable, call-used register. */
24833e1a 1298 for (r = 10; r <= 13; r++)
1299 if (fixed_regs[r])
1300 {
1301 fixed_regs[r] = 0;
1302 call_used_regs[r] = 1;
24833e1a 1303 }
1304
e4d9e8e5 1305 /* Mark r7 as fixed. This is just a hack to avoid
1306 altering the reg_alloc_order array so that the newly
1307 freed r10-r13 registers are the preferred registers. */
1308 fixed_regs[7] = call_used_regs[7] = 1;
24833e1a 1309 }
1310 else
1311 {
1312 /* Restore the normal register masks. */
1313 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1314 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1315 }
1316
1317 using_fixed_regs = use_fixed_regs;
1318 }
1319}
1320
6a47b360 1321struct decl_chain
1322{
1323 tree fndecl;
1324 struct decl_chain * next;
1325};
1326
1327/* Stack of decls for which we have issued warnings. */
1328static struct decl_chain * warned_decls = NULL;
1329
1330static void
1331add_warned_decl (tree fndecl)
1332{
1333 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1334
1335 warned->fndecl = fndecl;
1336 warned->next = warned_decls;
1337 warned_decls = warned;
1338}
1339
1340/* Returns TRUE if FNDECL is on our list of warned about decls. */
1341
1342static bool
1343already_warned (tree fndecl)
1344{
1345 struct decl_chain * warned;
1346
1347 for (warned = warned_decls;
1348 warned != NULL;
1349 warned = warned->next)
1350 if (warned->fndecl == fndecl)
1351 return true;
1352
1353 return false;
1354}
1355
24833e1a 1356/* Perform any actions necessary before starting to compile FNDECL.
1357 For the RX we use this to make sure that we have the correct
1358 set of register masks selected. If FNDECL is NULL then we are
1359 compiling top level things. */
1360
1361static void
1362rx_set_current_function (tree fndecl)
1363{
1364 /* Remember the last target of rx_set_current_function. */
1365 static tree rx_previous_fndecl;
67e66e16 1366 bool prev_was_fast_interrupt;
1367 bool current_is_fast_interrupt;
24833e1a 1368
1369 /* Only change the context if the function changes. This hook is called
1370 several times in the course of compiling a function, and we don't want
1371 to slow things down too much or call target_reinit when it isn't safe. */
1372 if (fndecl == rx_previous_fndecl)
1373 return;
1374
67e66e16 1375 prev_was_fast_interrupt
24833e1a 1376 = rx_previous_fndecl
1377 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
67e66e16 1378
1379 current_is_fast_interrupt
24833e1a 1380 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1381
67e66e16 1382 if (prev_was_fast_interrupt != current_is_fast_interrupt)
24833e1a 1383 {
67e66e16 1384 use_fixed_regs = current_is_fast_interrupt;
24833e1a 1385 target_reinit ();
1386 }
67e66e16 1387
6a47b360 1388 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1389 {
1390 /* We do not warn about the first fast interrupt routine that
1391 we see. Instead we just push it onto the stack. */
1392 if (warned_decls == NULL)
1393 add_warned_decl (fndecl);
1394
1395 /* Otherwise if this fast interrupt is one for which we have
1396 not already issued a warning, generate one and then push
1397 it onto the stack as well. */
1398 else if (! already_warned (fndecl))
1399 {
1400 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1401 fndecl, warned_decls->fndecl);
1402 add_warned_decl (fndecl);
1403 }
1404 }
1405
24833e1a 1406 rx_previous_fndecl = fndecl;
1407}
1408\f
1409/* Typical stack layout should looks like this after the function's prologue:
1410
1411 | |
1412 -- ^
1413 | | \ |
1414 | | arguments saved | Increasing
1415 | | on the stack | addresses
1416 PARENT arg pointer -> | | /
1417 -------------------------- ---- -------------------
1418 CHILD |ret | return address
1419 --
1420 | | \
1421 | | call saved
1422 | | registers
1423 | | /
1424 --
1425 | | \
1426 | | local
1427 | | variables
1428 frame pointer -> | | /
1429 --
1430 | | \
1431 | | outgoing | Decreasing
1432 | | arguments | addresses
1433 current stack pointer -> | | / |
1434 -------------------------- ---- ------------------ V
1435 | | */
1436
1437static unsigned int
1438bit_count (unsigned int x)
1439{
1440 const unsigned int m1 = 0x55555555;
1441 const unsigned int m2 = 0x33333333;
1442 const unsigned int m4 = 0x0f0f0f0f;
1443
1444 x -= (x >> 1) & m1;
1445 x = (x & m2) + ((x >> 2) & m2);
1446 x = (x + (x >> 4)) & m4;
1447 x += x >> 8;
1448
1449 return (x + (x >> 16)) & 0x3f;
1450}
1451
e4d9e8e5 1452#define MUST_SAVE_ACC_REGISTER \
1453 (TARGET_SAVE_ACC_REGISTER \
1454 && (is_interrupt_func (NULL_TREE) \
1455 || is_fast_interrupt_func (NULL_TREE)))
1456
24833e1a 1457/* Returns either the lowest numbered and highest numbered registers that
1458 occupy the call-saved area of the stack frame, if the registers are
1459 stored as a contiguous block, or else a bitmask of the individual
1460 registers if they are stored piecemeal.
1461
1462 Also computes the size of the frame and the size of the outgoing
1463 arguments block (in bytes). */
1464
1465static void
1466rx_get_stack_layout (unsigned int * lowest,
1467 unsigned int * highest,
1468 unsigned int * register_mask,
1469 unsigned int * frame_size,
1470 unsigned int * stack_size)
1471{
1472 unsigned int reg;
1473 unsigned int low;
1474 unsigned int high;
1475 unsigned int fixed_reg = 0;
1476 unsigned int save_mask;
1477 unsigned int pushed_mask;
1478 unsigned int unneeded_pushes;
1479
e4d9e8e5 1480 if (is_naked_func (NULL_TREE))
24833e1a 1481 {
1482 /* Naked functions do not create their own stack frame.
e4d9e8e5 1483 Instead the programmer must do that for us. */
24833e1a 1484 * lowest = 0;
1485 * highest = 0;
1486 * register_mask = 0;
1487 * frame_size = 0;
1488 * stack_size = 0;
1489 return;
1490 }
1491
9d2f1b03 1492 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
24833e1a 1493 {
21cde6ec 1494 if ((df_regs_ever_live_p (reg)
382ffb70 1495 /* Always save all call clobbered registers inside non-leaf
1496 interrupt handlers, even if they are not live - they may
1497 be used in (non-interrupt aware) routines called from this one. */
1498 || (call_used_regs[reg]
1499 && is_interrupt_func (NULL_TREE)
d5bf7b64 1500 && ! crtl->is_leaf))
24833e1a 1501 && (! call_used_regs[reg]
1502 /* Even call clobbered registered must
67e66e16 1503 be pushed inside interrupt handlers. */
e4d9e8e5 1504 || is_interrupt_func (NULL_TREE)
1505 /* Likewise for fast interrupt handlers, except registers r10 -
1506 r13. These are normally call-saved, but may have been set
1507 to call-used by rx_conditional_register_usage. If so then
1508 they can be used in the fast interrupt handler without
1509 saving them on the stack. */
1510 || (is_fast_interrupt_func (NULL_TREE)
1511 && ! IN_RANGE (reg, 10, 13))))
24833e1a 1512 {
1513 if (low == 0)
1514 low = reg;
1515 high = reg;
1516
1517 save_mask |= 1 << reg;
1518 }
1519
1520 /* Remember if we see a fixed register
1521 after having found the low register. */
1522 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1523 fixed_reg = reg;
1524 }
1525
e4d9e8e5 1526 /* If we have to save the accumulator register, make sure
1527 that at least two registers are pushed into the frame. */
1528 if (MUST_SAVE_ACC_REGISTER
1529 && bit_count (save_mask) < 2)
1530 {
1531 save_mask |= (1 << 13) | (1 << 14);
1532 if (low == 0)
1533 low = 13;
bc9bb967 1534 if (high == 0 || low == high)
1535 high = low + 1;
e4d9e8e5 1536 }
1537
24833e1a 1538 /* Decide if it would be faster fill in the call-saved area of the stack
1539 frame using multiple PUSH instructions instead of a single PUSHM
1540 instruction.
1541
1542 SAVE_MASK is a bitmask of the registers that must be stored in the
1543 call-save area. PUSHED_MASK is a bitmask of the registers that would
1544 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1545 is a bitmask of those registers in pushed_mask that are not in
1546 save_mask.
1547
1548 We use a simple heuristic that says that it is better to use
1549 multiple PUSH instructions if the number of unnecessary pushes is
1550 greater than the number of necessary pushes.
1551
1552 We also use multiple PUSH instructions if there are any fixed registers
1553 between LOW and HIGH. The only way that this can happen is if the user
1554 has specified --fixed-<reg-name> on the command line and in such
1555 circumstances we do not want to touch the fixed registers at all.
1556
75759166 1557 Note also that the code in the prologue/epilogue handlers will
1558 automatically merge multiple PUSHes of adjacent registers into a single
1559 PUSHM.
1560
24833e1a 1561 FIXME: Is it worth improving this heuristic ? */
1e9446db 1562 pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
24833e1a 1563 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1564
1565 if ((fixed_reg && fixed_reg <= high)
1566 || (optimize_function_for_speed_p (cfun)
1567 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1568 {
1569 /* Use multiple pushes. */
1570 * lowest = 0;
1571 * highest = 0;
1572 * register_mask = save_mask;
1573 }
1574 else
1575 {
1576 /* Use one push multiple instruction. */
1577 * lowest = low;
1578 * highest = high;
1579 * register_mask = 0;
1580 }
1581
1582 * frame_size = rx_round_up
1583 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1584
1585 if (crtl->args.size > 0)
1586 * frame_size += rx_round_up
1587 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1588
1589 * stack_size = rx_round_up
1590 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1591}
1592
1593/* Generate a PUSHM instruction that matches the given operands. */
1594
1595void
1596rx_emit_stack_pushm (rtx * operands)
1597{
1598 HOST_WIDE_INT last_reg;
1599 rtx first_push;
1600
1601 gcc_assert (CONST_INT_P (operands[0]));
1602 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1603
1604 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1605 first_push = XVECEXP (operands[1], 0, 1);
1606 gcc_assert (SET_P (first_push));
1607 first_push = SET_SRC (first_push);
1608 gcc_assert (REG_P (first_push));
1609
1610 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
67e66e16 1611 reg_names [REGNO (first_push) - last_reg],
1612 reg_names [REGNO (first_push)]);
24833e1a 1613}
1614
1615/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1616
1617static rtx
1618gen_rx_store_vector (unsigned int low, unsigned int high)
1619{
1620 unsigned int i;
1621 unsigned int count = (high - low) + 2;
1622 rtx vector;
1623
1624 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1625
1626 XVECEXP (vector, 0, 0) =
d1f9b275 1627 gen_rtx_SET (stack_pointer_rtx,
24833e1a 1628 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1629 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1630
1631 for (i = 0; i < count - 1; i++)
1632 XVECEXP (vector, 0, i + 1) =
d1f9b275 1633 gen_rtx_SET (gen_rtx_MEM (SImode,
67e66e16 1634 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1635 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1636 gen_rtx_REG (SImode, high - i));
24833e1a 1637 return vector;
1638}
1639
67e66e16 1640/* Mark INSN as being frame related. If it is a PARALLEL
1641 then mark each element as being frame related as well. */
1642
1643static void
1644mark_frame_related (rtx insn)
1645{
1646 RTX_FRAME_RELATED_P (insn) = 1;
1647 insn = PATTERN (insn);
1648
1649 if (GET_CODE (insn) == PARALLEL)
1650 {
1651 unsigned int i;
1652
61fc50a0 1653 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
67e66e16 1654 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1655 }
1656}
1657
95272799 1658static bool
1659ok_for_max_constant (HOST_WIDE_INT val)
1660{
1661 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1662 /* If there is no constraint on the size of constants
1663 used as operands, then any value is legitimate. */
1664 return true;
1665
1666 /* rx_max_constant_size specifies the maximum number
1667 of bytes that can be used to hold a signed value. */
1e9446db 1668 return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
95272799 1669 ( 1 << (rx_max_constant_size * 8)));
1670}
1671
1672/* Generate an ADD of SRC plus VAL into DEST.
1673 Handles the case where VAL is too big for max_constant_value.
1674 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1675
1676static void
1677gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1678{
1679 rtx insn;
1680
1681 if (val == NULL_RTX || INTVAL (val) == 0)
1682 {
1683 gcc_assert (dest != src);
1684
1685 insn = emit_move_insn (dest, src);
1686 }
1687 else if (ok_for_max_constant (INTVAL (val)))
1688 insn = emit_insn (gen_addsi3 (dest, src, val));
1689 else
1690 {
f7fcec1a 1691 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
02f06d23 1692 will not reject it. */
1693 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1694 insn = emit_insn (gen_addsi3 (dest, src, val));
95272799 1695
1696 if (is_frame_related)
1697 /* We have to provide our own frame related note here
1698 as the dwarf2out code cannot be expected to grok
1699 our unspec. */
1700 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
d1f9b275 1701 gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
95272799 1702 return;
1703 }
1704
1705 if (is_frame_related)
1706 RTX_FRAME_RELATED_P (insn) = 1;
95272799 1707}
1708
75759166 1709static void
1710push_regs (unsigned int high, unsigned int low)
1711{
1712 rtx insn;
1713
1714 if (low == high)
1715 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1716 else
1717 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1718 gen_rx_store_vector (low, high)));
1719 mark_frame_related (insn);
1720}
1721
24833e1a 1722void
1723rx_expand_prologue (void)
1724{
1725 unsigned int stack_size;
1726 unsigned int frame_size;
1727 unsigned int mask;
1728 unsigned int low;
1729 unsigned int high;
67e66e16 1730 unsigned int reg;
24833e1a 1731
1732 /* Naked functions use their own, programmer provided prologues. */
e4d9e8e5 1733 if (is_naked_func (NULL_TREE))
24833e1a 1734 return;
1735
1736 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1737
ecfbd70a 1738 if (flag_stack_usage_info)
1739 current_function_static_stack_size = frame_size + stack_size;
942ca701 1740
24833e1a 1741 /* If we use any of the callee-saved registers, save them now. */
1742 if (mask)
1743 {
24833e1a 1744 /* Push registers in reverse order. */
9d2f1b03 1745 for (reg = CC_REGNUM; reg --;)
24833e1a 1746 if (mask & (1 << reg))
1747 {
75759166 1748 low = high = reg;
1749
1750 /* Look for a span of registers.
1751 Note - we do not have to worry about -Os and whether
1752 it is better to use a single, longer PUSHM as
1753 rx_get_stack_layout has already done that for us. */
1754 while (reg-- > 0)
1755 if ((mask & (1 << reg)) == 0)
1756 break;
1757 else
1758 --low;
1759
1760 push_regs (high, low);
1761 if (reg == (unsigned) -1)
1762 break;
24833e1a 1763 }
1764 }
1765 else if (low)
75759166 1766 push_regs (high, low);
67e66e16 1767
e4d9e8e5 1768 if (MUST_SAVE_ACC_REGISTER)
67e66e16 1769 {
1770 unsigned int acc_high, acc_low;
1771
1772 /* Interrupt handlers have to preserve the accumulator
1773 register if so requested by the user. Use the first
e4d9e8e5 1774 two pushed registers as intermediaries. */
67e66e16 1775 if (mask)
1776 {
1777 acc_low = acc_high = 0;
1778
9d2f1b03 1779 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1780 if (mask & (1 << reg))
1781 {
1782 if (acc_low == 0)
1783 acc_low = reg;
1784 else
1785 {
1786 acc_high = reg;
1787 break;
1788 }
1789 }
1790
1791 /* We have assumed that there are at least two registers pushed... */
1792 gcc_assert (acc_high != 0);
1793
1794 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1795 We just assume that they are zero. */
1796 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1797 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1798 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1799 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1800 }
1801 else
1802 {
1803 acc_low = low;
1804 acc_high = low + 1;
1805
1806 /* We have assumed that there are at least two registers pushed... */
1807 gcc_assert (acc_high <= high);
1808
1809 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1810 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1811 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1812 gen_rx_store_vector (acc_low, acc_high)));
1813 }
24833e1a 1814 }
1815
1816 /* If needed, set up the frame pointer. */
1817 if (frame_pointer_needed)
95272799 1818 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1819 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1820
1821 /* Allocate space for the outgoing args.
1822 If the stack frame has not already been set up then handle this as well. */
1823 if (stack_size)
1824 {
1825 if (frame_size)
1826 {
1827 if (frame_pointer_needed)
95272799 1828 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1829 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1830 else
95272799 1831 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1832 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1833 true);
24833e1a 1834 }
1835 else
95272799 1836 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1837 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1838 }
1839 else if (frame_size)
1840 {
1841 if (! frame_pointer_needed)
95272799 1842 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1843 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1844 else
95272799 1845 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
942ca701 1846 false /* False because the epilogue will use the FP not the SP. */);
24833e1a 1847 }
24833e1a 1848}
1849
7ce85a1f 1850static void
1851add_vector_labels (FILE *file, const char *aname)
1852{
1853 tree vec_attr;
1854 tree val_attr;
1855 const char *vname = "vect";
1856 const char *s;
1857 int vnum;
1858
1859 /* This node is for the vector/interrupt tag itself */
1860 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1861 if (!vec_attr)
1862 return;
1863
1864 /* Now point it at the first argument */
1865 vec_attr = TREE_VALUE (vec_attr);
1866
1867 /* Iterate through the arguments. */
1868 while (vec_attr)
1869 {
1870 val_attr = TREE_VALUE (vec_attr);
1871 switch (TREE_CODE (val_attr))
1872 {
1873 case STRING_CST:
1874 s = TREE_STRING_POINTER (val_attr);
1875 goto string_id_common;
1876
1877 case IDENTIFIER_NODE:
1878 s = IDENTIFIER_POINTER (val_attr);
1879
1880 string_id_common:
1881 if (strcmp (s, "$default") == 0)
1882 {
1883 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1884 fprintf (file, "$tableentry$default$%s:\n", vname);
1885 }
1886 else
1887 vname = s;
1888 break;
1889
1890 case INTEGER_CST:
1891 vnum = TREE_INT_CST_LOW (val_attr);
1892
1893 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1894 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1895 break;
1896
1897 default:
1898 ;
1899 }
1900
1901 vec_attr = TREE_CHAIN (vec_attr);
1902 }
1903
1904}
1905
24833e1a 1906static void
1907rx_output_function_prologue (FILE * file,
1908 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1909{
7ce85a1f 1910 add_vector_labels (file, "interrupt");
1911 add_vector_labels (file, "vector");
1912
24833e1a 1913 if (is_fast_interrupt_func (NULL_TREE))
1914 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1915
67e66e16 1916 if (is_interrupt_func (NULL_TREE))
1917 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
24833e1a 1918
1919 if (is_naked_func (NULL_TREE))
1920 asm_fprintf (file, "\t; Note: Naked Function\n");
1921
1922 if (cfun->static_chain_decl != NULL)
1923 asm_fprintf (file, "\t; Note: Nested function declared "
1924 "inside another function.\n");
1925
1926 if (crtl->calls_eh_return)
1927 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1928}
1929
1930/* Generate a POPM or RTSD instruction that matches the given operands. */
1931
1932void
1933rx_emit_stack_popm (rtx * operands, bool is_popm)
1934{
1935 HOST_WIDE_INT stack_adjust;
1936 HOST_WIDE_INT last_reg;
1937 rtx first_push;
1938
1939 gcc_assert (CONST_INT_P (operands[0]));
1940 stack_adjust = INTVAL (operands[0]);
1941
1942 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1943 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1944
1945 first_push = XVECEXP (operands[1], 0, 1);
1946 gcc_assert (SET_P (first_push));
1947 first_push = SET_DEST (first_push);
1948 gcc_assert (REG_P (first_push));
1949
1950 if (is_popm)
1951 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1952 reg_names [REGNO (first_push)],
1953 reg_names [REGNO (first_push) + last_reg]);
1954 else
1955 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1956 (int) stack_adjust,
1957 reg_names [REGNO (first_push)],
1958 reg_names [REGNO (first_push) + last_reg]);
1959}
1960
1961/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1962
1963static rtx
1964gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1965{
1966 unsigned int i;
1967 unsigned int bias = 3;
1968 unsigned int count = (high - low) + bias;
1969 rtx vector;
1970
1971 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1972
1973 XVECEXP (vector, 0, 0) =
d1f9b275 1974 gen_rtx_SET (stack_pointer_rtx,
29c05e22 1975 plus_constant (Pmode, stack_pointer_rtx, adjust));
24833e1a 1976
1977 for (i = 0; i < count - 2; i++)
1978 XVECEXP (vector, 0, i + 1) =
d1f9b275 1979 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
24833e1a 1980 gen_rtx_MEM (SImode,
1981 i == 0 ? stack_pointer_rtx
29c05e22 1982 : plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1983 i * UNITS_PER_WORD)));
1984
1a860023 1985 XVECEXP (vector, 0, count - 1) = ret_rtx;
24833e1a 1986
1987 return vector;
1988}
1989
1990/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1991
1992static rtx
1993gen_rx_popm_vector (unsigned int low, unsigned int high)
1994{
1995 unsigned int i;
1996 unsigned int count = (high - low) + 2;
1997 rtx vector;
1998
1999 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2000
2001 XVECEXP (vector, 0, 0) =
d1f9b275 2002 gen_rtx_SET (stack_pointer_rtx,
29c05e22 2003 plus_constant (Pmode, stack_pointer_rtx,
24833e1a 2004 (count - 1) * UNITS_PER_WORD));
2005
2006 for (i = 0; i < count - 1; i++)
2007 XVECEXP (vector, 0, i + 1) =
d1f9b275 2008 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
24833e1a 2009 gen_rtx_MEM (SImode,
2010 i == 0 ? stack_pointer_rtx
29c05e22 2011 : plus_constant (Pmode, stack_pointer_rtx,
24833e1a 2012 i * UNITS_PER_WORD)));
2013
2014 return vector;
2015}
f35edb6f 2016
2017/* Returns true if a simple return insn can be used. */
2018
2019bool
2020rx_can_use_simple_return (void)
2021{
2022 unsigned int low;
2023 unsigned int high;
2024 unsigned int frame_size;
2025 unsigned int stack_size;
2026 unsigned int register_mask;
2027
2028 if (is_naked_func (NULL_TREE)
2029 || is_fast_interrupt_func (NULL_TREE)
2030 || is_interrupt_func (NULL_TREE))
2031 return false;
2032
2033 rx_get_stack_layout (& low, & high, & register_mask,
2034 & frame_size, & stack_size);
2035
2036 return (register_mask == 0
2037 && (frame_size + stack_size) == 0
2038 && low == 0);
2039}
2040
75759166 2041static void
2042pop_regs (unsigned int high, unsigned int low)
2043{
2044 if (high == low)
2045 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2046 else
2047 emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
2048 gen_rx_popm_vector (low, high)));
2049}
2050
24833e1a 2051void
2052rx_expand_epilogue (bool is_sibcall)
2053{
2054 unsigned int low;
2055 unsigned int high;
2056 unsigned int frame_size;
2057 unsigned int stack_size;
2058 unsigned int register_mask;
2059 unsigned int regs_size;
67e66e16 2060 unsigned int reg;
24833e1a 2061 unsigned HOST_WIDE_INT total_size;
2062
61fc50a0 2063 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2064 cannot guarantee that the register holding the function address is a
2065 call-used register. If it is a call-saved register then the stack
2066 pop instructions generated in the epilogue will corrupt the address
2067 before it is used.
2068
2069 Creating a new call-used-only register class works but then the
2070 reload pass gets stuck because it cannot always find a call-used
2071 register for spilling sibcalls.
2072
2073 The other possible solution is for this pass to scan forward for the
2074 sibcall instruction (if it has been generated) and work out if it
2075 is an indirect sibcall using a call-saved register. If it is then
2076 the address can copied into a call-used register in this epilogue
2077 code and the sibcall instruction modified to use that register. */
2078
24833e1a 2079 if (is_naked_func (NULL_TREE))
2080 {
61fc50a0 2081 gcc_assert (! is_sibcall);
2082
24833e1a 2083 /* Naked functions use their own, programmer provided epilogues.
2084 But, in order to keep gcc happy we have to generate some kind of
2085 epilogue RTL. */
2086 emit_jump_insn (gen_naked_return ());
2087 return;
2088 }
2089
2090 rx_get_stack_layout (& low, & high, & register_mask,
2091 & frame_size, & stack_size);
2092
2093 total_size = frame_size + stack_size;
2094 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2095
2096 /* See if we are unable to use the special stack frame deconstruct and
2097 return instructions. In most cases we can use them, but the exceptions
2098 are:
2099
2100 - Sibling calling functions deconstruct the frame but do not return to
2101 their caller. Instead they branch to their sibling and allow their
2102 return instruction to return to this function's parent.
2103
67e66e16 2104 - Fast and normal interrupt handling functions have to use special
24833e1a 2105 return instructions.
2106
2107 - Functions where we have pushed a fragmented set of registers into the
2108 call-save area must have the same set of registers popped. */
2109 if (is_sibcall
2110 || is_fast_interrupt_func (NULL_TREE)
67e66e16 2111 || is_interrupt_func (NULL_TREE)
24833e1a 2112 || register_mask)
2113 {
2114 /* Cannot use the special instructions - deconstruct by hand. */
2115 if (total_size)
95272799 2116 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2117 GEN_INT (total_size), false);
24833e1a 2118
e4d9e8e5 2119 if (MUST_SAVE_ACC_REGISTER)
24833e1a 2120 {
67e66e16 2121 unsigned int acc_low, acc_high;
2122
2123 /* Reverse the saving of the accumulator register onto the stack.
2124 Note we must adjust the saved "low" accumulator value as it
2125 is really the middle 32-bits of the accumulator. */
2126 if (register_mask)
2127 {
2128 acc_low = acc_high = 0;
9d2f1b03 2129
2130 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 2131 if (register_mask & (1 << reg))
2132 {
2133 if (acc_low == 0)
2134 acc_low = reg;
2135 else
2136 {
2137 acc_high = reg;
2138 break;
2139 }
2140 }
2141 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2142 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2143 }
2144 else
2145 {
2146 acc_low = low;
2147 acc_high = low + 1;
2148 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2149 gen_rx_popm_vector (acc_low, acc_high)));
2150 }
2151
2152 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2153 gen_rtx_REG (SImode, acc_low),
2154 GEN_INT (16)));
2155 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2156 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2157 }
24833e1a 2158
67e66e16 2159 if (register_mask)
2160 {
9d2f1b03 2161 for (reg = 0; reg < CC_REGNUM; reg ++)
24833e1a 2162 if (register_mask & (1 << reg))
75759166 2163 {
2164 low = high = reg;
2165 while (register_mask & (1 << high))
2166 high ++;
2167 pop_regs (high - 1, low);
2168 reg = high;
2169 }
24833e1a 2170 }
2171 else if (low)
75759166 2172 pop_regs (high, low);
24833e1a 2173
2174 if (is_fast_interrupt_func (NULL_TREE))
61fc50a0 2175 {
2176 gcc_assert (! is_sibcall);
2177 emit_jump_insn (gen_fast_interrupt_return ());
2178 }
67e66e16 2179 else if (is_interrupt_func (NULL_TREE))
61fc50a0 2180 {
2181 gcc_assert (! is_sibcall);
2182 emit_jump_insn (gen_exception_return ());
2183 }
24833e1a 2184 else if (! is_sibcall)
2185 emit_jump_insn (gen_simple_return ());
2186
2187 return;
2188 }
2189
2190 /* If we allocated space on the stack, free it now. */
2191 if (total_size)
2192 {
2193 unsigned HOST_WIDE_INT rtsd_size;
2194
2195 /* See if we can use the RTSD instruction. */
2196 rtsd_size = total_size + regs_size;
2197 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2198 {
2199 if (low)
2200 emit_jump_insn (gen_pop_and_return
2201 (GEN_INT (rtsd_size),
2202 gen_rx_rtsd_vector (rtsd_size, low, high)));
2203 else
2204 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2205
2206 return;
2207 }
2208
95272799 2209 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2210 GEN_INT (total_size), false);
24833e1a 2211 }
2212
2213 if (low)
2214 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2215 gen_rx_rtsd_vector (regs_size,
2216 low, high)));
2217 else
2218 emit_jump_insn (gen_simple_return ());
2219}
2220
2221
2222/* Compute the offset (in words) between FROM (arg pointer
2223 or frame pointer) and TO (frame pointer or stack pointer).
2224 See ASCII art comment at the start of rx_expand_prologue
2225 for more information. */
2226
2227int
2228rx_initial_elimination_offset (int from, int to)
2229{
2230 unsigned int low;
2231 unsigned int high;
2232 unsigned int frame_size;
2233 unsigned int stack_size;
2234 unsigned int mask;
2235
2236 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2237
2238 if (from == ARG_POINTER_REGNUM)
2239 {
2240 /* Extend the computed size of the stack frame to
2241 include the registers pushed in the prologue. */
2242 if (low)
2243 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2244 else
2245 frame_size += bit_count (mask) * UNITS_PER_WORD;
2246
2247 /* Remember to include the return address. */
2248 frame_size += 1 * UNITS_PER_WORD;
2249
2250 if (to == FRAME_POINTER_REGNUM)
2251 return frame_size;
2252
2253 gcc_assert (to == STACK_POINTER_REGNUM);
2254 return frame_size + stack_size;
2255 }
2256
2257 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2258 return stack_size;
2259}
2260
24833e1a 2261/* Decide if a variable should go into one of the small data sections. */
2262
2263static bool
2264rx_in_small_data (const_tree decl)
2265{
2266 int size;
738a6bda 2267 const char * section;
24833e1a 2268
2269 if (rx_small_data_limit == 0)
2270 return false;
2271
2272 if (TREE_CODE (decl) != VAR_DECL)
2273 return false;
2274
2275 /* We do not put read-only variables into a small data area because
2276 they would be placed with the other read-only sections, far away
2277 from the read-write data sections, and we only have one small
2278 data area pointer.
2279 Similarly commons are placed in the .bss section which might be
2280 far away (and out of alignment with respect to) the .data section. */
2281 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2282 return false;
2283
2284 section = DECL_SECTION_NAME (decl);
2285 if (section)
738a6bda 2286 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
24833e1a 2287
2288 size = int_size_in_bytes (TREE_TYPE (decl));
2289
2290 return (size > 0) && (size <= rx_small_data_limit);
2291}
2292
2293/* Return a section for X.
2294 The only special thing we do here is to honor small data. */
2295
2296static section *
3754d046 2297rx_select_rtx_section (machine_mode mode,
24833e1a 2298 rtx x,
2299 unsigned HOST_WIDE_INT align)
2300{
2301 if (rx_small_data_limit > 0
2302 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2303 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2304 return sdata_section;
2305
2306 return default_elf_select_rtx_section (mode, x, align);
2307}
2308
2309static section *
2310rx_select_section (tree decl,
2311 int reloc,
2312 unsigned HOST_WIDE_INT align)
2313{
2314 if (rx_small_data_limit > 0)
2315 {
2316 switch (categorize_decl_for_section (decl, reloc))
2317 {
2318 case SECCAT_SDATA: return sdata_section;
2319 case SECCAT_SBSS: return sbss_section;
2320 case SECCAT_SRODATA:
2321 /* Fall through. We do not put small, read only
2322 data into the C_2 section because we are not
2323 using the C_2 section. We do not use the C_2
2324 section because it is located with the other
2325 read-only data sections, far away from the read-write
2326 data sections and we only have one small data
2327 pointer (r13). */
2328 default:
2329 break;
2330 }
2331 }
2332
2333 /* If we are supporting the Renesas assembler
2334 we cannot use mergeable sections. */
2335 if (TARGET_AS100_SYNTAX)
2336 switch (categorize_decl_for_section (decl, reloc))
2337 {
2338 case SECCAT_RODATA_MERGE_CONST:
2339 case SECCAT_RODATA_MERGE_STR_INIT:
2340 case SECCAT_RODATA_MERGE_STR:
2341 return readonly_data_section;
2342
2343 default:
2344 break;
2345 }
2346
2347 return default_elf_select_section (decl, reloc, align);
2348}
2349\f
2350enum rx_builtin
2351{
2352 RX_BUILTIN_BRK,
2353 RX_BUILTIN_CLRPSW,
2354 RX_BUILTIN_INT,
2355 RX_BUILTIN_MACHI,
2356 RX_BUILTIN_MACLO,
2357 RX_BUILTIN_MULHI,
2358 RX_BUILTIN_MULLO,
2359 RX_BUILTIN_MVFACHI,
2360 RX_BUILTIN_MVFACMI,
2361 RX_BUILTIN_MVFC,
2362 RX_BUILTIN_MVTACHI,
2363 RX_BUILTIN_MVTACLO,
2364 RX_BUILTIN_MVTC,
67e66e16 2365 RX_BUILTIN_MVTIPL,
24833e1a 2366 RX_BUILTIN_RACW,
2367 RX_BUILTIN_REVW,
2368 RX_BUILTIN_RMPA,
2369 RX_BUILTIN_ROUND,
24833e1a 2370 RX_BUILTIN_SETPSW,
2371 RX_BUILTIN_WAIT,
2372 RX_BUILTIN_max
2373};
2374
103700c7 2375static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2376
24833e1a 2377static void
2378rx_init_builtins (void)
2379{
dbf38144 2380#define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2381 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2382 add_builtin_function ("__builtin_rx_" LC_NAME, \
2383 build_function_type_list (RET_TYPE##_type_node, \
2384 NULL_TREE), \
2385 RX_BUILTIN_##UC_NAME, \
2386 BUILT_IN_MD, NULL, NULL_TREE)
2387
24833e1a 2388#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
103700c7 2389 rx_builtins[RX_BUILTIN_##UC_NAME] = \
f7fcec1a 2390 add_builtin_function ("__builtin_rx_" LC_NAME, \
24833e1a 2391 build_function_type_list (RET_TYPE##_type_node, \
2392 ARG_TYPE##_type_node, \
2393 NULL_TREE), \
2394 RX_BUILTIN_##UC_NAME, \
2395 BUILT_IN_MD, NULL, NULL_TREE)
2396
2397#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
103700c7 2398 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2399 add_builtin_function ("__builtin_rx_" LC_NAME, \
2400 build_function_type_list (RET_TYPE##_type_node, \
2401 ARG_TYPE1##_type_node,\
2402 ARG_TYPE2##_type_node,\
2403 NULL_TREE), \
2404 RX_BUILTIN_##UC_NAME, \
2405 BUILT_IN_MD, NULL, NULL_TREE)
2406
2407#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
103700c7 2408 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2409 add_builtin_function ("__builtin_rx_" LC_NAME, \
2410 build_function_type_list (RET_TYPE##_type_node, \
2411 ARG_TYPE1##_type_node,\
2412 ARG_TYPE2##_type_node,\
2413 ARG_TYPE3##_type_node,\
2414 NULL_TREE), \
2415 RX_BUILTIN_##UC_NAME, \
2416 BUILT_IN_MD, NULL, NULL_TREE)
2417
dbf38144 2418 ADD_RX_BUILTIN0 (BRK, "brk", void);
24833e1a 2419 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2420 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2421 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2422 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2423 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2424 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2425 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
dbf38144 2426 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2427 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
24833e1a 2428 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2429 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
dbf38144 2430 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
24833e1a 2431 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2432 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
67e66e16 2433 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
24833e1a 2434 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2435 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2436 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
dbf38144 2437 ADD_RX_BUILTIN0 (WAIT, "wait", void);
24833e1a 2438}
2439
103700c7 2440/* Return the RX builtin for CODE. */
2441
2442static tree
2443rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2444{
2445 if (code >= RX_BUILTIN_max)
2446 return error_mark_node;
2447
2448 return rx_builtins[code];
2449}
2450
24833e1a 2451static rtx
2452rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2453{
2454 if (reg && ! REG_P (arg))
2455 arg = force_reg (SImode, arg);
2456
2457 emit_insn (gen_func (arg));
2458
2459 return NULL_RTX;
2460}
2461
2462static rtx
2463rx_expand_builtin_mvtc (tree exp)
2464{
2465 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2466 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2467
2468 if (! CONST_INT_P (arg1))
2469 return NULL_RTX;
2470
2471 if (! REG_P (arg2))
2472 arg2 = force_reg (SImode, arg2);
2473
2474 emit_insn (gen_mvtc (arg1, arg2));
2475
2476 return NULL_RTX;
2477}
2478
2479static rtx
2480rx_expand_builtin_mvfc (tree t_arg, rtx target)
2481{
2482 rtx arg = expand_normal (t_arg);
2483
2484 if (! CONST_INT_P (arg))
2485 return NULL_RTX;
2486
e4d9e8e5 2487 if (target == NULL_RTX)
2488 return NULL_RTX;
2489
24833e1a 2490 if (! REG_P (target))
2491 target = force_reg (SImode, target);
2492
2493 emit_insn (gen_mvfc (target, arg));
2494
2495 return target;
2496}
2497
67e66e16 2498static rtx
2499rx_expand_builtin_mvtipl (rtx arg)
2500{
2501 /* The RX610 does not support the MVTIPL instruction. */
2502 if (rx_cpu_type == RX610)
2503 return NULL_RTX;
2504
e5743482 2505 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
67e66e16 2506 return NULL_RTX;
2507
2508 emit_insn (gen_mvtipl (arg));
2509
2510 return NULL_RTX;
2511}
2512
24833e1a 2513static rtx
2514rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2515{
2516 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2517 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2518
2519 if (! REG_P (arg1))
2520 arg1 = force_reg (SImode, arg1);
2521
2522 if (! REG_P (arg2))
2523 arg2 = force_reg (SImode, arg2);
2524
2525 emit_insn (gen_func (arg1, arg2));
2526
2527 return NULL_RTX;
2528}
2529
2530static rtx
2531rx_expand_int_builtin_1_arg (rtx arg,
2532 rtx target,
2533 rtx (* gen_func)(rtx, rtx),
2534 bool mem_ok)
2535{
2536 if (! REG_P (arg))
2537 if (!mem_ok || ! MEM_P (arg))
2538 arg = force_reg (SImode, arg);
2539
2540 if (target == NULL_RTX || ! REG_P (target))
2541 target = gen_reg_rtx (SImode);
2542
2543 emit_insn (gen_func (target, arg));
2544
2545 return target;
2546}
2547
2548static rtx
2549rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2550{
2551 if (target == NULL_RTX || ! REG_P (target))
2552 target = gen_reg_rtx (SImode);
2553
2554 emit_insn (gen_func (target));
2555
2556 return target;
2557}
2558
2559static rtx
2560rx_expand_builtin_round (rtx arg, rtx target)
2561{
2562 if ((! REG_P (arg) && ! MEM_P (arg))
2563 || GET_MODE (arg) != SFmode)
2564 arg = force_reg (SFmode, arg);
2565
2566 if (target == NULL_RTX || ! REG_P (target))
2567 target = gen_reg_rtx (SImode);
2568
2569 emit_insn (gen_lrintsf2 (target, arg));
2570
2571 return target;
2572}
2573
e5743482 2574static int
0318c61a 2575valid_psw_flag (rtx op, const char *which)
e5743482 2576{
2577 static int mvtc_inform_done = 0;
2578
2579 if (GET_CODE (op) == CONST_INT)
2580 switch (INTVAL (op))
2581 {
2582 case 0: case 'c': case 'C':
2583 case 1: case 'z': case 'Z':
2584 case 2: case 's': case 'S':
2585 case 3: case 'o': case 'O':
2586 case 8: case 'i': case 'I':
2587 case 9: case 'u': case 'U':
2588 return 1;
2589 }
2590
2591 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2592 if (!mvtc_inform_done)
2593 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2594 mvtc_inform_done = 1;
2595
2596 return 0;
2597}
2598
24833e1a 2599static rtx
2600rx_expand_builtin (tree exp,
2601 rtx target,
2602 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 2603 machine_mode mode ATTRIBUTE_UNUSED,
24833e1a 2604 int ignore ATTRIBUTE_UNUSED)
2605{
2606 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
432093e5 2607 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
24833e1a 2608 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2609 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2610
2611 switch (fcode)
2612 {
2613 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
e5743482 2614 case RX_BUILTIN_CLRPSW:
2615 if (!valid_psw_flag (op, "clrpsw"))
2616 return NULL_RTX;
2617 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2618 case RX_BUILTIN_SETPSW:
2619 if (!valid_psw_flag (op, "setpsw"))
2620 return NULL_RTX;
2621 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
24833e1a 2622 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2623 (op, gen_int, false);
2624 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2625 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2626 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2627 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2628 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2629 (target, gen_mvfachi);
2630 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2631 (target, gen_mvfacmi);
2632 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2633 (op, gen_mvtachi, true);
2634 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2635 (op, gen_mvtaclo, true);
6202f892 2636 case RX_BUILTIN_RMPA:
2637 if (rx_allow_string_insns)
2638 emit_insn (gen_rmpa ());
2639 else
2640 error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
2641 return NULL_RTX;
24833e1a 2642 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2643 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
67e66e16 2644 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
24833e1a 2645 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2646 (op, gen_racw, false);
2647 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2648 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2649 (op, target, gen_revw, false);
24833e1a 2650 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2651
2652 default:
2653 internal_error ("bad builtin code");
2654 break;
2655 }
2656
2657 return NULL_RTX;
2658}
2659\f
2660/* Place an element into a constructor or destructor section.
2661 Like default_ctor_section_asm_out_constructor in varasm.c
2662 except that it uses .init_array (or .fini_array) and it
2663 handles constructor priorities. */
2664
2665static void
2666rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2667{
2668 section * s;
2669
2670 if (priority != DEFAULT_INIT_PRIORITY)
2671 {
2672 char buf[18];
2673
2674 sprintf (buf, "%s.%.5u",
2675 is_ctor ? ".init_array" : ".fini_array",
2676 priority);
2677 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2678 }
2679 else if (is_ctor)
2680 s = ctors_section;
2681 else
2682 s = dtors_section;
2683
2684 switch_to_section (s);
2685 assemble_align (POINTER_SIZE);
2686 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2687}
2688
2689static void
2690rx_elf_asm_constructor (rtx symbol, int priority)
2691{
2692 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2693}
2694
2695static void
2696rx_elf_asm_destructor (rtx symbol, int priority)
2697{
2698 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2699}
2700\f
67e66e16 2701/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
24833e1a 2702
2703static tree
2704rx_handle_func_attribute (tree * node,
2705 tree name,
277d3719 2706 tree args ATTRIBUTE_UNUSED,
24833e1a 2707 int flags ATTRIBUTE_UNUSED,
2708 bool * no_add_attrs)
2709{
2710 gcc_assert (DECL_P (* node));
24833e1a 2711
2712 if (TREE_CODE (* node) != FUNCTION_DECL)
2713 {
2714 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2715 name);
2716 * no_add_attrs = true;
2717 }
2718
2719 /* FIXME: We ought to check for conflicting attributes. */
2720
2721 /* FIXME: We ought to check that the interrupt and exception
2722 handler attributes have been applied to void functions. */
2723 return NULL_TREE;
2724}
2725
7ce85a1f 2726/* Check "vector" attribute. */
2727
2728static tree
2729rx_handle_vector_attribute (tree * node,
2730 tree name,
2731 tree args,
2732 int flags ATTRIBUTE_UNUSED,
2733 bool * no_add_attrs)
2734{
2735 gcc_assert (DECL_P (* node));
2736 gcc_assert (args != NULL_TREE);
2737
2738 if (TREE_CODE (* node) != FUNCTION_DECL)
2739 {
2740 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2741 name);
2742 * no_add_attrs = true;
2743 }
2744
2745 return NULL_TREE;
2746}
2747
24833e1a 2748/* Table of RX specific attributes. */
2749const struct attribute_spec rx_attribute_table[] =
2750{
ac86af5d 2751 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2752 affects_type_identity. */
2753 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2754 false },
7ce85a1f 2755 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute,
ac86af5d 2756 false },
2757 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2758 false },
7ce85a1f 2759 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2760 false },
ac86af5d 2761 { NULL, 0, 0, false, false, false, NULL, false }
24833e1a 2762};
2763
42d89991 2764/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
02e53c17 2765
2766static void
42d89991 2767rx_override_options_after_change (void)
98cb9b5b 2768{
2769 static bool first_time = TRUE;
98cb9b5b 2770
2771 if (first_time)
2772 {
2773 /* If this is the first time through and the user has not disabled
42d89991 2774 the use of RX FPU hardware then enable -ffinite-math-only,
2775 since the FPU instructions do not support NaNs and infinities. */
98cb9b5b 2776 if (TARGET_USE_FPU)
42d89991 2777 flag_finite_math_only = 1;
98cb9b5b 2778
98cb9b5b 2779 first_time = FALSE;
2780 }
2781 else
2782 {
2783 /* Alert the user if they are changing the optimization options
2784 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2785 if (TARGET_USE_FPU
42d89991 2786 && !flag_finite_math_only)
2787 warning (0, "RX FPU instructions do not support NaNs and infinities");
98cb9b5b 2788 }
2789}
2790
1af17d44 2791static void
2792rx_option_override (void)
2793{
8cb00d70 2794 unsigned int i;
2795 cl_deferred_option *opt;
f1f41a6c 2796 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
8cb00d70 2797
f1f41a6c 2798 if (v)
2799 FOR_EACH_VEC_ELT (*v, i, opt)
2800 {
2801 switch (opt->opt_index)
2802 {
2803 case OPT_mint_register_:
2804 switch (opt->value)
2805 {
2806 case 4:
2807 fixed_regs[10] = call_used_regs [10] = 1;
2808 /* Fall through. */
2809 case 3:
2810 fixed_regs[11] = call_used_regs [11] = 1;
2811 /* Fall through. */
2812 case 2:
2813 fixed_regs[12] = call_used_regs [12] = 1;
2814 /* Fall through. */
2815 case 1:
2816 fixed_regs[13] = call_used_regs [13] = 1;
2817 /* Fall through. */
2818 case 0:
2819 rx_num_interrupt_regs = opt->value;
2820 break;
2821 default:
2822 rx_num_interrupt_regs = 0;
2823 /* Error message already given because rx_handle_option
2824 returned false. */
2825 break;
2826 }
2827 break;
8cb00d70 2828
f1f41a6c 2829 default:
2830 gcc_unreachable ();
2831 }
2832 }
8cb00d70 2833
1af17d44 2834 /* This target defaults to strict volatile bitfields. */
941a2396 2835 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
1af17d44 2836 flag_strict_volatile_bitfields = 1;
42d89991 2837
2838 rx_override_options_after_change ();
9f9a3b39 2839
5005fc53 2840 /* These values are bytes, not log. */
9f9a3b39 2841 if (align_jumps == 0 && ! optimize_size)
5005fc53 2842 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
9f9a3b39 2843 if (align_loops == 0 && ! optimize_size)
5005fc53 2844 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
9f9a3b39 2845 if (align_labels == 0 && ! optimize_size)
5005fc53 2846 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
1af17d44 2847}
2848
98cb9b5b 2849\f
24833e1a 2850static bool
2851rx_allocate_stack_slots_for_args (void)
2852{
2853 /* Naked functions should not allocate stack slots for arguments. */
2854 return ! is_naked_func (NULL_TREE);
2855}
2856
2857static bool
2858rx_func_attr_inlinable (const_tree decl)
2859{
2860 return ! is_fast_interrupt_func (decl)
67e66e16 2861 && ! is_interrupt_func (decl)
24833e1a 2862 && ! is_naked_func (decl);
2863}
2864
08c6cbd2 2865static bool
2866rx_warn_func_return (tree decl)
2867{
2868 /* Naked functions are implemented entirely in assembly, including the
2869 return sequence, so suppress warnings about this. */
2870 return !is_naked_func (decl);
2871}
2872
61fc50a0 2873/* Return nonzero if it is ok to make a tail-call to DECL,
2874 a function_decl or NULL if this is an indirect call, using EXP */
2875
2876static bool
e4d9e8e5 2877rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
61fc50a0 2878{
2879 /* Do not allow indirect tailcalls. The
2880 sibcall patterns do not support them. */
2881 if (decl == NULL)
2882 return false;
2883
2884 /* Never tailcall from inside interrupt handlers or naked functions. */
2885 if (is_fast_interrupt_func (NULL_TREE)
2886 || is_interrupt_func (NULL_TREE)
2887 || is_naked_func (NULL_TREE))
2888 return false;
2889
2890 return true;
2891}
2892
24833e1a 2893static void
2894rx_file_start (void)
2895{
2896 if (! TARGET_AS100_SYNTAX)
2897 default_file_start ();
2898}
2899
2900static bool
2901rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2902{
c6347c7a 2903 /* The packed attribute overrides the MS behaviour. */
2904 return ! TYPE_PACKED (record_type);
24833e1a 2905}
24833e1a 2906\f
2907/* Returns true if X a legitimate constant for an immediate
2908 operand on the RX. X is already known to satisfy CONSTANT_P. */
2909
2910bool
3754d046 2911rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
24833e1a 2912{
24833e1a 2913 switch (GET_CODE (x))
2914 {
2915 case CONST:
2916 x = XEXP (x, 0);
2917
2918 if (GET_CODE (x) == PLUS)
2919 {
2920 if (! CONST_INT_P (XEXP (x, 1)))
2921 return false;
2922
2923 /* GCC would not pass us CONST_INT + CONST_INT so we
2924 know that we have {SYMBOL|LABEL} + CONST_INT. */
2925 x = XEXP (x, 0);
2926 gcc_assert (! CONST_INT_P (x));
2927 }
2928
2929 switch (GET_CODE (x))
2930 {
2931 case LABEL_REF:
2932 case SYMBOL_REF:
2933 return true;
2934
95272799 2935 case UNSPEC:
6e507301 2936 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
95272799 2937
24833e1a 2938 default:
2939 /* FIXME: Can this ever happen ? */
776f1390 2940 gcc_unreachable ();
24833e1a 2941 }
2942 break;
2943
2944 case LABEL_REF:
2945 case SYMBOL_REF:
2946 return true;
2947 case CONST_DOUBLE:
09bb92cc 2948 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
24833e1a 2949 case CONST_VECTOR:
2950 return false;
2951 default:
2952 gcc_assert (CONST_INT_P (x));
2953 break;
2954 }
2955
95272799 2956 return ok_for_max_constant (INTVAL (x));
24833e1a 2957}
2958
24833e1a 2959static int
3754d046 2960rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
d9c5e5f4 2961 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
24833e1a 2962{
2963 rtx a, b;
2964
2965 if (GET_CODE (addr) != PLUS)
2966 return COSTS_N_INSNS (1);
2967
2968 a = XEXP (addr, 0);
2969 b = XEXP (addr, 1);
2970
2971 if (REG_P (a) && REG_P (b))
2972 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2973 return COSTS_N_INSNS (4);
2974
2975 if (speed)
2976 /* [REG+OFF] is just as fast as [REG]. */
2977 return COSTS_N_INSNS (1);
2978
2979 if (CONST_INT_P (b)
2980 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2981 /* Try to discourage REG + <large OFF> when optimizing for size. */
2982 return COSTS_N_INSNS (2);
2983
2984 return COSTS_N_INSNS (1);
2985}
2986
2987static bool
2988rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2989{
2990 /* We can always eliminate to the frame pointer.
2991 We can eliminate to the stack pointer unless a frame
2992 pointer is needed. */
2993
2994 return to == FRAME_POINTER_REGNUM
2995 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2996}
2997\f
2998
2999static void
3000rx_trampoline_template (FILE * file)
3001{
3002 /* Output assembler code for a block containing the constant
3003 part of a trampoline, leaving space for the variable parts.
3004
3005 On the RX, (where r8 is the static chain regnum) the trampoline
3006 looks like:
3007
3008 mov #<static chain value>, r8
3009 mov #<function's address>, r9
3010 jmp r9
3011
3012 In big-endian-data-mode however instructions are read into the CPU
3013 4 bytes at a time. These bytes are then swapped around before being
3014 passed to the decoder. So...we must partition our trampoline into
3015 4 byte packets and swap these packets around so that the instruction
3016 reader will reverse the process. But, in order to avoid splitting
3017 the 32-bit constants across these packet boundaries, (making inserting
3018 them into the constructed trampoline very difficult) we have to pad the
3019 instruction sequence with NOP insns. ie:
3020
3021 nop
3022 nop
3023 mov.l #<...>, r8
3024 nop
3025 nop
3026 mov.l #<...>, r9
3027 jmp r9
3028 nop
3029 nop */
3030
3031 if (! TARGET_BIG_ENDIAN_DATA)
3032 {
3033 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3034 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3035 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3036 }
3037 else
3038 {
3039 char r8 = '0' + STATIC_CHAIN_REGNUM;
3040 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3041
3042 if (TARGET_AS100_SYNTAX)
3043 {
3044 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3045 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3046 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3047 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3048 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3049 }
3050 else
3051 {
3052 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3053 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3054 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3055 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3056 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3057 }
3058 }
3059}
3060
3061static void
3062rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3063{
3064 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3065
3066 emit_block_move (tramp, assemble_trampoline_template (),
3067 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3068
3069 if (TARGET_BIG_ENDIAN_DATA)
3070 {
3071 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3072 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3073 }
3074 else
3075 {
3076 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3077 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3078 }
3079}
3080\f
ccfccd66 3081static int
3754d046 3082rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3e8d9684 3083 reg_class_t regclass ATTRIBUTE_UNUSED,
3084 bool in)
9d2f1b03 3085{
6145a46d 3086 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
9d2f1b03 3087}
3088
ccfccd66 3089/* Convert a CC_MODE to the set of flags that it represents. */
9d2f1b03 3090
3091static unsigned int
3754d046 3092flags_from_mode (machine_mode mode)
9d2f1b03 3093{
ccfccd66 3094 switch (mode)
9d2f1b03 3095 {
ccfccd66 3096 case CC_ZSmode:
3097 return CC_FLAG_S | CC_FLAG_Z;
3098 case CC_ZSOmode:
3099 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3100 case CC_ZSCmode:
3101 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3102 case CCmode:
3103 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3104 case CC_Fmode:
3105 return CC_FLAG_FP;
3106 default:
3107 gcc_unreachable ();
3108 }
3109}
9d2f1b03 3110
ccfccd66 3111/* Convert a set of flags to a CC_MODE that can implement it. */
9d2f1b03 3112
3754d046 3113static machine_mode
ccfccd66 3114mode_from_flags (unsigned int f)
3115{
3116 if (f & CC_FLAG_FP)
3117 return CC_Fmode;
3118 if (f & CC_FLAG_O)
3119 {
3120 if (f & CC_FLAG_C)
3121 return CCmode;
3122 else
3123 return CC_ZSOmode;
9d2f1b03 3124 }
ccfccd66 3125 else if (f & CC_FLAG_C)
3126 return CC_ZSCmode;
3127 else
3128 return CC_ZSmode;
9d2f1b03 3129}
3130
ccfccd66 3131/* Convert an RTX_CODE to the set of flags needed to implement it.
3132 This assumes an integer comparison. */
3133
9d2f1b03 3134static unsigned int
ccfccd66 3135flags_from_code (enum rtx_code code)
9d2f1b03 3136{
ccfccd66 3137 switch (code)
9d2f1b03 3138 {
ccfccd66 3139 case LT:
3140 case GE:
24ad6c43 3141 return CC_FLAG_S;
ccfccd66 3142 case GT:
3143 case LE:
3144 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3145 case GEU:
3146 case LTU:
3147 return CC_FLAG_C;
3148 case GTU:
3149 case LEU:
3150 return CC_FLAG_C | CC_FLAG_Z;
3151 case EQ:
3152 case NE:
3153 return CC_FLAG_Z;
3154 default:
3155 gcc_unreachable ();
9d2f1b03 3156 }
3157}
3158
ccfccd66 3159/* Return a CC_MODE of which both M1 and M2 are subsets. */
3160
3754d046 3161static machine_mode
3162rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
9d2f1b03 3163{
ccfccd66 3164 unsigned f;
3165
3166 /* Early out for identical modes. */
3167 if (m1 == m2)
3168 return m1;
3169
3170 /* There's no valid combination for FP vs non-FP. */
3171 f = flags_from_mode (m1) | flags_from_mode (m2);
3172 if (f & CC_FLAG_FP)
3173 return VOIDmode;
3174
3175 /* Otherwise, see what mode can implement all the flags. */
3176 return mode_from_flags (f);
9d2f1b03 3177}
8b8777b9 3178
3179/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3180
3754d046 3181machine_mode
24ad6c43 3182rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
8b8777b9 3183{
3184 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3185 return CC_Fmode;
3186
24ad6c43 3187 if (y != const0_rtx)
3188 return CCmode;
3189
ccfccd66 3190 return mode_from_flags (flags_from_code (cmp_code));
3191}
3192
ccfccd66 3193/* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3194 CC_MODE, and use that in branches based on that compare. */
3195
3196void
3754d046 3197rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
ccfccd66 3198 rtx c1, rtx c2, rtx label)
3199{
3200 rtx flags, x;
3201
3202 flags = gen_rtx_REG (cc_mode, CC_REG);
3203 x = gen_rtx_COMPARE (cc_mode, c1, c2);
d1f9b275 3204 x = gen_rtx_SET (flags, x);
ccfccd66 3205 emit_insn (x);
3206
3207 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3208 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
d1f9b275 3209 x = gen_rtx_SET (pc_rtx, x);
ccfccd66 3210 emit_jump_insn (x);
8b8777b9 3211}
3212
fc3b02a9 3213/* A helper function for matching parallels that set the flags. */
3214
3215bool
3754d046 3216rx_match_ccmode (rtx insn, machine_mode cc_mode)
fc3b02a9 3217{
3218 rtx op1, flags;
3754d046 3219 machine_mode flags_mode;
fc3b02a9 3220
3221 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3222
3223 op1 = XVECEXP (PATTERN (insn), 0, 1);
3224 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3225
3226 flags = SET_DEST (op1);
3227 flags_mode = GET_MODE (flags);
3228
3229 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3230 return false;
3231 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3232 return false;
3233
3234 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3235 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3236 return false;
3237
3238 return true;
3239}
9f9a3b39 3240\f
3241int
001afa63 3242rx_align_for_label (rtx lab, int uses_threshold)
9f9a3b39 3243{
001afa63 3244 /* This is a simple heuristic to guess when an alignment would not be useful
3245 because the delay due to the inserted NOPs would be greater than the delay
3246 due to the misaligned branch. If uses_threshold is zero then the alignment
3247 is always useful. */
f7fcec1a 3248 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
001afa63 3249 return 0;
3250
958c4dc5 3251 if (optimize_size)
3252 return 0;
5005fc53 3253 /* These values are log, not bytes. */
958c4dc5 3254 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
5005fc53 3255 return 2; /* 4 bytes */
3256 return 3; /* 8 bytes */
9f9a3b39 3257}
3258
3259static int
695d0571 3260rx_max_skip_for_label (rtx_insn *lab)
9f9a3b39 3261{
3262 int opsize;
695d0571 3263 rtx_insn *op;
9f9a3b39 3264
e6cf07b2 3265 if (optimize_size)
3266 return 0;
3267
695d0571 3268 if (lab == NULL)
9f9a3b39 3269 return 0;
fc3b02a9 3270
9f9a3b39 3271 op = lab;
3272 do
3273 {
3274 op = next_nonnote_nondebug_insn (op);
3275 }
3276 while (op && (LABEL_P (op)
3277 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3278 if (!op)
3279 return 0;
3280
3281 opsize = get_attr_length (op);
3282 if (opsize >= 0 && opsize < 8)
3283 return opsize - 1;
3284 return 0;
3285}
776f1390 3286
3287/* Compute the real length of the extending load-and-op instructions. */
3288
3289int
fd535fc1 3290rx_adjust_insn_length (rtx_insn *insn, int current_length)
776f1390 3291{
3292 rtx extend, mem, offset;
3293 bool zero;
3294 int factor;
3295
7ce85a1f 3296 if (!INSN_P (insn))
3297 return current_length;
3298
776f1390 3299 switch (INSN_CODE (insn))
3300 {
3301 default:
3302 return current_length;
3303
3304 case CODE_FOR_plussi3_zero_extendhi:
3305 case CODE_FOR_andsi3_zero_extendhi:
3306 case CODE_FOR_iorsi3_zero_extendhi:
3307 case CODE_FOR_xorsi3_zero_extendhi:
3308 case CODE_FOR_divsi3_zero_extendhi:
3309 case CODE_FOR_udivsi3_zero_extendhi:
3310 case CODE_FOR_minussi3_zero_extendhi:
3311 case CODE_FOR_smaxsi3_zero_extendhi:
3312 case CODE_FOR_sminsi3_zero_extendhi:
3313 case CODE_FOR_multsi3_zero_extendhi:
f7fcec1a 3314 case CODE_FOR_comparesi3_zero_extendhi:
776f1390 3315 zero = true;
3316 factor = 2;
3317 break;
3318
3319 case CODE_FOR_plussi3_sign_extendhi:
3320 case CODE_FOR_andsi3_sign_extendhi:
3321 case CODE_FOR_iorsi3_sign_extendhi:
3322 case CODE_FOR_xorsi3_sign_extendhi:
3323 case CODE_FOR_divsi3_sign_extendhi:
3324 case CODE_FOR_udivsi3_sign_extendhi:
3325 case CODE_FOR_minussi3_sign_extendhi:
3326 case CODE_FOR_smaxsi3_sign_extendhi:
3327 case CODE_FOR_sminsi3_sign_extendhi:
3328 case CODE_FOR_multsi3_sign_extendhi:
f7fcec1a 3329 case CODE_FOR_comparesi3_sign_extendhi:
776f1390 3330 zero = false;
3331 factor = 2;
3332 break;
3333
3334 case CODE_FOR_plussi3_zero_extendqi:
3335 case CODE_FOR_andsi3_zero_extendqi:
3336 case CODE_FOR_iorsi3_zero_extendqi:
3337 case CODE_FOR_xorsi3_zero_extendqi:
3338 case CODE_FOR_divsi3_zero_extendqi:
3339 case CODE_FOR_udivsi3_zero_extendqi:
3340 case CODE_FOR_minussi3_zero_extendqi:
3341 case CODE_FOR_smaxsi3_zero_extendqi:
3342 case CODE_FOR_sminsi3_zero_extendqi:
3343 case CODE_FOR_multsi3_zero_extendqi:
f7fcec1a 3344 case CODE_FOR_comparesi3_zero_extendqi:
776f1390 3345 zero = true;
3346 factor = 1;
3347 break;
3348
3349 case CODE_FOR_plussi3_sign_extendqi:
3350 case CODE_FOR_andsi3_sign_extendqi:
3351 case CODE_FOR_iorsi3_sign_extendqi:
3352 case CODE_FOR_xorsi3_sign_extendqi:
3353 case CODE_FOR_divsi3_sign_extendqi:
3354 case CODE_FOR_udivsi3_sign_extendqi:
3355 case CODE_FOR_minussi3_sign_extendqi:
3356 case CODE_FOR_smaxsi3_sign_extendqi:
3357 case CODE_FOR_sminsi3_sign_extendqi:
3358 case CODE_FOR_multsi3_sign_extendqi:
f7fcec1a 3359 case CODE_FOR_comparesi3_sign_extendqi:
776f1390 3360 zero = false;
3361 factor = 1;
3362 break;
3363 }
3364
3365 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3366 extend = single_set (insn);
3367 gcc_assert (extend != NULL_RTX);
3368
3369 extend = SET_SRC (extend);
3370 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3371 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3372 extend = XEXP (extend, 0);
3373 else
3374 extend = XEXP (extend, 1);
3375
3376 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3377 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3378
3379 mem = XEXP (extend, 0);
3380 gcc_checking_assert (MEM_P (mem));
3381 if (REG_P (XEXP (mem, 0)))
3382 return (zero && factor == 1) ? 2 : 3;
3383
3384 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3385 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3386 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3387
3388 offset = XEXP (XEXP (mem, 0), 1);
3389 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3390
3391 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3392 return (zero && factor == 1) ? 3 : 4;
3393
3394 return (zero && factor == 1) ? 4 : 5;
3395}
ee1401ac 3396
3397static bool
3398rx_narrow_volatile_bitfield (void)
3399{
3400 return true;
3401}
3402
3403static bool
3404rx_ok_to_inline (tree caller, tree callee)
3405{
3406 /* Do not inline functions with local variables
3407 into a naked CALLER - naked function have no stack frame and
3408 locals need a frame in order to have somewhere to live.
3409
3410 Unfortunately we have no way to determine the presence of
3411 local variables in CALLEE, so we have to be cautious and
3412 assume that there might be some there.
3413
3414 We do allow inlining when CALLEE has the "inline" type
3415 modifier or the "always_inline" or "gnu_inline" attributes. */
3416 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3417 || DECL_DECLARED_INLINE_P (callee)
3418 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3419 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3420}
3421
f0964309 3422static bool
3423rx_enable_lra (void)
3424{
734bbdc0 3425 return TARGET_ENABLE_LRA;
f0964309 3426}
3427
9d2f1b03 3428\f
ee1401ac 3429#undef TARGET_NARROW_VOLATILE_BITFIELD
3430#define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3431
3432#undef TARGET_CAN_INLINE_P
3433#define TARGET_CAN_INLINE_P rx_ok_to_inline
3434
9f9a3b39 3435#undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3436#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3437#undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3438#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3439#undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3440#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3441#undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3442#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3443
24833e1a 3444#undef TARGET_FUNCTION_VALUE
3445#define TARGET_FUNCTION_VALUE rx_function_value
3446
3447#undef TARGET_RETURN_IN_MSB
3448#define TARGET_RETURN_IN_MSB rx_return_in_msb
3449
3450#undef TARGET_IN_SMALL_DATA_P
3451#define TARGET_IN_SMALL_DATA_P rx_in_small_data
3452
3453#undef TARGET_RETURN_IN_MEMORY
3454#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3455
3456#undef TARGET_HAVE_SRODATA_SECTION
3457#define TARGET_HAVE_SRODATA_SECTION true
3458
3459#undef TARGET_ASM_SELECT_RTX_SECTION
3460#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3461
3462#undef TARGET_ASM_SELECT_SECTION
3463#define TARGET_ASM_SELECT_SECTION rx_select_section
3464
3465#undef TARGET_INIT_BUILTINS
3466#define TARGET_INIT_BUILTINS rx_init_builtins
3467
103700c7 3468#undef TARGET_BUILTIN_DECL
3469#define TARGET_BUILTIN_DECL rx_builtin_decl
3470
24833e1a 3471#undef TARGET_EXPAND_BUILTIN
3472#define TARGET_EXPAND_BUILTIN rx_expand_builtin
3473
3474#undef TARGET_ASM_CONSTRUCTOR
3475#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3476
3477#undef TARGET_ASM_DESTRUCTOR
3478#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3479
3480#undef TARGET_STRUCT_VALUE_RTX
3481#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3482
3483#undef TARGET_ATTRIBUTE_TABLE
3484#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3485
3486#undef TARGET_ASM_FILE_START
3487#define TARGET_ASM_FILE_START rx_file_start
3488
3489#undef TARGET_MS_BITFIELD_LAYOUT_P
3490#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3491
3492#undef TARGET_LEGITIMATE_ADDRESS_P
3493#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3494
5afe50d9 3495#undef TARGET_MODE_DEPENDENT_ADDRESS_P
3496#define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3497
24833e1a 3498#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3499#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3500
3501#undef TARGET_ASM_FUNCTION_PROLOGUE
3502#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3503
3504#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3505#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3506
61fc50a0 3507#undef TARGET_FUNCTION_OK_FOR_SIBCALL
3508#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3509
ee4e8428 3510#undef TARGET_FUNCTION_ARG
3511#define TARGET_FUNCTION_ARG rx_function_arg
3512
3513#undef TARGET_FUNCTION_ARG_ADVANCE
3514#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3515
bd99ba64 3516#undef TARGET_FUNCTION_ARG_BOUNDARY
3517#define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3518
24833e1a 3519#undef TARGET_SET_CURRENT_FUNCTION
3520#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3521
24833e1a 3522#undef TARGET_ASM_INTEGER
3523#define TARGET_ASM_INTEGER rx_assemble_integer
3524
3525#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3526#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3527
3528#undef TARGET_MAX_ANCHOR_OFFSET
3529#define TARGET_MAX_ANCHOR_OFFSET 32
3530
3531#undef TARGET_ADDRESS_COST
3532#define TARGET_ADDRESS_COST rx_address_cost
3533
3534#undef TARGET_CAN_ELIMINATE
3535#define TARGET_CAN_ELIMINATE rx_can_eliminate
3536
b2d7ede1 3537#undef TARGET_CONDITIONAL_REGISTER_USAGE
3538#define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3539
24833e1a 3540#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3541#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3542
3543#undef TARGET_TRAMPOLINE_INIT
3544#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3545
6bb30542 3546#undef TARGET_PRINT_OPERAND
3547#define TARGET_PRINT_OPERAND rx_print_operand
3548
3549#undef TARGET_PRINT_OPERAND_ADDRESS
3550#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3551
9d2f1b03 3552#undef TARGET_CC_MODES_COMPATIBLE
3553#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3554
3555#undef TARGET_MEMORY_MOVE_COST
3556#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3557
1af17d44 3558#undef TARGET_OPTION_OVERRIDE
3559#define TARGET_OPTION_OVERRIDE rx_option_override
3560
bd7d2835 3561#undef TARGET_PROMOTE_FUNCTION_MODE
3562#define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3563
42d89991 3564#undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3565#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
02e53c17 3566
77de4b78 3567#undef TARGET_FLAGS_REGNUM
3568#define TARGET_FLAGS_REGNUM CC_REG
3569
ca316360 3570#undef TARGET_LEGITIMATE_CONSTANT_P
f7fcec1a 3571#define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
ca316360 3572
6e507301 3573#undef TARGET_LEGITIMIZE_ADDRESS
3574#define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3575
ee1401ac 3576#undef TARGET_WARN_FUNC_RETURN
3577#define TARGET_WARN_FUNC_RETURN rx_warn_func_return
08c6cbd2 3578
f0964309 3579#undef TARGET_LRA_P
3580#define TARGET_LRA_P rx_enable_lra
3581
24833e1a 3582struct gcc_target targetm = TARGET_INITIALIZER;
3583
103700c7 3584#include "gt-rx.h"