]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rx/rx.c
Adjust scan string for PIE
[thirdparty/gcc.git] / gcc / config / rx / rx.c
CommitLineData
24833e1a 1/* Subroutines used for code generation on Renesas RX processors.
d353bf18 2 Copyright (C) 2008-2015 Free Software Foundation, Inc.
24833e1a 3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
b20a8bb4 29#include "hash-set.h"
30#include "machmode.h"
31#include "vec.h"
32#include "double-int.h"
33#include "input.h"
34#include "alias.h"
35#include "symtab.h"
36#include "wide-int.h"
37#include "inchash.h"
24833e1a 38#include "tree.h"
9ed99284 39#include "varasm.h"
40#include "stor-layout.h"
41#include "calls.h"
24833e1a 42#include "rtl.h"
43#include "regs.h"
44#include "hard-reg-set.h"
24833e1a 45#include "insn-config.h"
46#include "conditions.h"
47#include "output.h"
48#include "insn-attr.h"
49#include "flags.h"
a3020f2f 50#include "input.h"
24833e1a 51#include "function.h"
52#include "expr.h"
34517c64 53#include "insn-codes.h"
24833e1a 54#include "optabs.h"
55#include "libfuncs.h"
56#include "recog.h"
0b205f4c 57#include "diagnostic-core.h"
24833e1a 58#include "toplev.h"
59#include "reload.h"
94ea8568 60#include "dominance.h"
61#include "cfg.h"
62#include "cfgrtl.h"
63#include "cfganal.h"
64#include "lcm.h"
65#include "cfgbuild.h"
66#include "cfgcleanup.h"
67#include "predict.h"
68#include "basic-block.h"
24833e1a 69#include "df.h"
70#include "ggc.h"
71#include "tm_p.h"
72#include "debug.h"
73#include "target.h"
74#include "target-def.h"
75#include "langhooks.h"
fba5dd52 76#include "opts.h"
1140c305 77#include "hash-map.h"
78#include "is-a.h"
79#include "plugin-api.h"
80#include "ipa-ref.h"
367b1459 81#include "cgraph.h"
f7715905 82#include "builtins.h"
6e507301 83
84static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
85static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
86static unsigned int rx_num_interrupt_regs;
24833e1a 87\f
6e507301 88static unsigned int
89rx_gp_base_regnum (void)
90{
91 if (rx_gp_base_regnum_val == INVALID_REGNUM)
92 gcc_unreachable ();
93 return rx_gp_base_regnum_val;
94}
95
96static unsigned int
97rx_pid_base_regnum (void)
98{
99 if (rx_pid_base_regnum_val == INVALID_REGNUM)
100 gcc_unreachable ();
101 return rx_pid_base_regnum_val;
102}
103
104/* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
105
106static tree
107rx_decl_for_addr (rtx op)
108{
109 if (GET_CODE (op) == MEM)
110 op = XEXP (op, 0);
111 if (GET_CODE (op) == CONST)
112 op = XEXP (op, 0);
113 while (GET_CODE (op) == PLUS)
114 op = XEXP (op, 0);
115 if (GET_CODE (op) == SYMBOL_REF)
116 return SYMBOL_REF_DECL (op);
117 return NULL_TREE;
118}
119
6bb30542 120static void rx_print_operand (FILE *, rtx, int);
121
ccfccd66 122#define CC_FLAG_S (1 << 0)
123#define CC_FLAG_Z (1 << 1)
124#define CC_FLAG_O (1 << 2)
125#define CC_FLAG_C (1 << 3)
f7fcec1a 126#define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
ccfccd66 127
3754d046 128static unsigned int flags_from_mode (machine_mode mode);
ccfccd66 129static unsigned int flags_from_code (enum rtx_code code);
67e66e16 130\f
6e507301 131/* Return true if OP is a reference to an object in a PID data area. */
132
133enum pid_type
134{
135 PID_NOT_PID = 0, /* The object is not in the PID data area. */
136 PID_ENCODED, /* The object is in the PID data area. */
137 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
138};
139
140static enum pid_type
141rx_pid_data_operand (rtx op)
142{
143 tree op_decl;
144
145 if (!TARGET_PID)
146 return PID_NOT_PID;
147
148 if (GET_CODE (op) == PLUS
149 && GET_CODE (XEXP (op, 0)) == REG
150 && GET_CODE (XEXP (op, 1)) == CONST
151 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
152 return PID_ENCODED;
153
154 op_decl = rx_decl_for_addr (op);
155
156 if (op_decl)
157 {
158 if (TREE_READONLY (op_decl))
159 return PID_UNENCODED;
160 }
161 else
162 {
163 /* Sigh, some special cases. */
164 if (GET_CODE (op) == SYMBOL_REF
165 || GET_CODE (op) == LABEL_REF)
166 return PID_UNENCODED;
167 }
168
169 return PID_NOT_PID;
170}
171
172static rtx
173rx_legitimize_address (rtx x,
174 rtx oldx ATTRIBUTE_UNUSED,
3754d046 175 machine_mode mode ATTRIBUTE_UNUSED)
6e507301 176{
177 if (rx_pid_data_operand (x) == PID_UNENCODED)
178 {
179 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
180 return rv;
181 }
182
183 if (GET_CODE (x) == PLUS
184 && GET_CODE (XEXP (x, 0)) == PLUS
185 && REG_P (XEXP (XEXP (x, 0), 0))
186 && REG_P (XEXP (x, 1)))
187 return force_reg (SImode, x);
188
189 return x;
190}
191
24833e1a 192/* Return true if OP is a reference to an object in a small data area. */
193
194static bool
195rx_small_data_operand (rtx op)
196{
197 if (rx_small_data_limit == 0)
198 return false;
199
200 if (GET_CODE (op) == SYMBOL_REF)
201 return SYMBOL_REF_SMALL_P (op);
202
203 return false;
204}
205
206static bool
3754d046 207rx_is_legitimate_address (machine_mode mode, rtx x,
4bccad5e 208 bool strict ATTRIBUTE_UNUSED)
24833e1a 209{
210 if (RTX_OK_FOR_BASE (x, strict))
211 /* Register Indirect. */
212 return true;
213
f7fcec1a 214 if ((GET_MODE_SIZE (mode) == 4
215 || GET_MODE_SIZE (mode) == 2
216 || GET_MODE_SIZE (mode) == 1)
24833e1a 217 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
218 /* Pre-decrement Register Indirect or
219 Post-increment Register Indirect. */
220 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
221
6e507301 222 switch (rx_pid_data_operand (x))
223 {
224 case PID_UNENCODED:
225 return false;
226 case PID_ENCODED:
227 return true;
228 default:
229 break;
230 }
231
24833e1a 232 if (GET_CODE (x) == PLUS)
233 {
234 rtx arg1 = XEXP (x, 0);
235 rtx arg2 = XEXP (x, 1);
236 rtx index = NULL_RTX;
237
238 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
239 index = arg2;
240 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
241 index = arg1;
242 else
243 return false;
244
245 switch (GET_CODE (index))
246 {
247 case CONST_INT:
248 {
249 /* Register Relative: REG + INT.
250 Only positive, mode-aligned, mode-sized
251 displacements are allowed. */
252 HOST_WIDE_INT val = INTVAL (index);
253 int factor;
254
255 if (val < 0)
256 return false;
776f1390 257
24833e1a 258 switch (GET_MODE_SIZE (mode))
259 {
260 default:
261 case 4: factor = 4; break;
262 case 2: factor = 2; break;
263 case 1: factor = 1; break;
264 }
265
f7fcec1a 266 if (val > (65535 * factor))
24833e1a 267 return false;
268 return (val % factor) == 0;
269 }
270
271 case REG:
272 /* Unscaled Indexed Register Indirect: REG + REG
273 Size has to be "QI", REG has to be valid. */
274 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
275
276 case MULT:
277 {
278 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
279 Factor has to equal the mode size, REG has to be valid. */
280 rtx factor;
281
282 factor = XEXP (index, 1);
283 index = XEXP (index, 0);
284
285 return REG_P (index)
286 && RTX_OK_FOR_BASE (index, strict)
287 && CONST_INT_P (factor)
288 && GET_MODE_SIZE (mode) == INTVAL (factor);
289 }
290
291 default:
292 return false;
293 }
294 }
295
296 /* Small data area accesses turn into register relative offsets. */
297 return rx_small_data_operand (x);
298}
299
300/* Returns TRUE for simple memory addreses, ie ones
301 that do not involve register indirect addressing
302 or pre/post increment/decrement. */
303
304bool
3754d046 305rx_is_restricted_memory_address (rtx mem, machine_mode mode)
24833e1a 306{
24833e1a 307 if (! rx_is_legitimate_address
308 (mode, mem, reload_in_progress || reload_completed))
309 return false;
310
311 switch (GET_CODE (mem))
312 {
313 case REG:
314 /* Simple memory addresses are OK. */
315 return true;
316
317 case PRE_DEC:
318 case POST_INC:
319 return false;
320
321 case PLUS:
776f1390 322 {
323 rtx base, index;
324
325 /* Only allow REG+INT addressing. */
326 base = XEXP (mem, 0);
327 index = XEXP (mem, 1);
24833e1a 328
776f1390 329 if (! RX_REG_P (base) || ! CONST_INT_P (index))
330 return false;
331
332 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
333 }
24833e1a 334
335 case SYMBOL_REF:
336 /* Can happen when small data is being supported.
337 Assume that it will be resolved into GP+INT. */
338 return true;
339
340 default:
341 gcc_unreachable ();
342 }
343}
344
5afe50d9 345/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
346
347static bool
4e27ffd0 348rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
24833e1a 349{
350 if (GET_CODE (addr) == CONST)
351 addr = XEXP (addr, 0);
352
353 switch (GET_CODE (addr))
354 {
355 /* --REG and REG++ only work in SImode. */
356 case PRE_DEC:
357 case POST_INC:
358 return true;
359
360 case MINUS:
361 case PLUS:
362 if (! REG_P (XEXP (addr, 0)))
363 return true;
364
365 addr = XEXP (addr, 1);
366
367 switch (GET_CODE (addr))
368 {
369 case REG:
370 /* REG+REG only works in SImode. */
371 return true;
372
373 case CONST_INT:
374 /* REG+INT is only mode independent if INT is a
b546cdca 375 multiple of 4, positive and will fit into 16-bits. */
24833e1a 376 if (((INTVAL (addr) & 3) == 0)
b546cdca 377 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
24833e1a 378 return false;
379 return true;
380
381 case SYMBOL_REF:
382 case LABEL_REF:
383 return true;
384
385 case MULT:
386 gcc_assert (REG_P (XEXP (addr, 0)));
387 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
388 /* REG+REG*SCALE is always mode dependent. */
389 return true;
390
391 default:
392 /* Not recognized, so treat as mode dependent. */
393 return true;
394 }
395
396 case CONST_INT:
397 case SYMBOL_REF:
398 case LABEL_REF:
399 case REG:
400 /* These are all mode independent. */
401 return false;
402
403 default:
404 /* Everything else is unrecognized,
405 so treat as mode dependent. */
406 return true;
407 }
408}
409\f
24833e1a 410/* A C compound statement to output to stdio stream FILE the
411 assembler syntax for an instruction operand that is a memory
412 reference whose address is ADDR. */
413
6bb30542 414static void
24833e1a 415rx_print_operand_address (FILE * file, rtx addr)
416{
417 switch (GET_CODE (addr))
418 {
419 case REG:
420 fprintf (file, "[");
421 rx_print_operand (file, addr, 0);
422 fprintf (file, "]");
423 break;
424
425 case PRE_DEC:
426 fprintf (file, "[-");
427 rx_print_operand (file, XEXP (addr, 0), 0);
428 fprintf (file, "]");
429 break;
430
431 case POST_INC:
432 fprintf (file, "[");
433 rx_print_operand (file, XEXP (addr, 0), 0);
434 fprintf (file, "+]");
435 break;
436
437 case PLUS:
438 {
439 rtx arg1 = XEXP (addr, 0);
440 rtx arg2 = XEXP (addr, 1);
441 rtx base, index;
442
443 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
444 base = arg1, index = arg2;
445 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
446 base = arg2, index = arg1;
447 else
448 {
449 rx_print_operand (file, arg1, 0);
450 fprintf (file, " + ");
451 rx_print_operand (file, arg2, 0);
452 break;
453 }
454
455 if (REG_P (index) || GET_CODE (index) == MULT)
456 {
457 fprintf (file, "[");
458 rx_print_operand (file, index, 'A');
459 fprintf (file, ",");
460 }
461 else /* GET_CODE (index) == CONST_INT */
462 {
463 rx_print_operand (file, index, 'A');
464 fprintf (file, "[");
465 }
466 rx_print_operand (file, base, 0);
467 fprintf (file, "]");
468 break;
469 }
470
95272799 471 case CONST:
472 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
473 {
474 addr = XEXP (addr, 0);
475 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
6e507301 476
477 /* FIXME: Putting this case label here is an appalling abuse of the C language. */
478 case UNSPEC:
479 addr = XVECEXP (addr, 0, 0);
95272799 480 gcc_assert (CONST_INT_P (addr));
481 }
482 /* Fall through. */
24833e1a 483 case LABEL_REF:
484 case SYMBOL_REF:
24833e1a 485 fprintf (file, "#");
6e507301 486 /* Fall through. */
24833e1a 487 default:
488 output_addr_const (file, addr);
489 break;
490 }
491}
492
493static void
494rx_print_integer (FILE * file, HOST_WIDE_INT val)
495{
496 if (IN_RANGE (val, -64, 64))
497 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
498 else
499 fprintf (file,
500 TARGET_AS100_SYNTAX
501 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
502 val);
503}
504
505static bool
506rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
507{
508 const char * op = integer_asm_op (size, is_aligned);
509
510 if (! CONST_INT_P (x))
511 return default_assemble_integer (x, size, is_aligned);
512
513 if (op == NULL)
514 return false;
515 fputs (op, asm_out_file);
516
517 rx_print_integer (asm_out_file, INTVAL (x));
518 fputc ('\n', asm_out_file);
519 return true;
520}
521
522
24833e1a 523/* Handles the insertion of a single operand into the assembler output.
524 The %<letter> directives supported are:
525
526 %A Print an operand without a leading # character.
527 %B Print an integer comparison name.
528 %C Print a control register name.
529 %F Print a condition code flag name.
6e507301 530 %G Register used for small-data-area addressing
24833e1a 531 %H Print high part of a DImode register, integer or address.
532 %L Print low part of a DImode register, integer or address.
6bb30542 533 %N Print the negation of the immediate value.
6e507301 534 %P Register used for PID addressing
24833e1a 535 %Q If the operand is a MEM, then correctly generate
776f1390 536 register indirect or register relative addressing.
537 %R Like %Q but for zero-extending loads. */
24833e1a 538
6bb30542 539static void
24833e1a 540rx_print_operand (FILE * file, rtx op, int letter)
541{
776f1390 542 bool unsigned_load = false;
6e507301 543 bool print_hash = true;
544
545 if (letter == 'A'
546 && ((GET_CODE (op) == CONST
547 && GET_CODE (XEXP (op, 0)) == UNSPEC)
548 || GET_CODE (op) == UNSPEC))
549 {
550 print_hash = false;
551 letter = 0;
552 }
776f1390 553
24833e1a 554 switch (letter)
555 {
556 case 'A':
557 /* Print an operand without a leading #. */
558 if (MEM_P (op))
559 op = XEXP (op, 0);
560
561 switch (GET_CODE (op))
562 {
563 case LABEL_REF:
564 case SYMBOL_REF:
565 output_addr_const (file, op);
566 break;
567 case CONST_INT:
568 fprintf (file, "%ld", (long) INTVAL (op));
569 break;
570 default:
571 rx_print_operand (file, op, 0);
572 break;
573 }
574 break;
575
576 case 'B':
ccfccd66 577 {
578 enum rtx_code code = GET_CODE (op);
3754d046 579 machine_mode mode = GET_MODE (XEXP (op, 0));
ccfccd66 580 const char *ret;
581
582 if (mode == CC_Fmode)
583 {
584 /* C flag is undefined, and O flag carries unordered. None of the
585 branch combinations that include O use it helpfully. */
586 switch (code)
587 {
588 case ORDERED:
589 ret = "no";
590 break;
591 case UNORDERED:
592 ret = "o";
593 break;
594 case LT:
595 ret = "n";
596 break;
597 case GE:
598 ret = "pz";
599 break;
600 case EQ:
601 ret = "eq";
602 break;
603 case NE:
604 ret = "ne";
605 break;
606 default:
607 gcc_unreachable ();
608 }
609 }
610 else
611 {
24ad6c43 612 unsigned int flags = flags_from_mode (mode);
776f1390 613
ccfccd66 614 switch (code)
615 {
616 case LT:
24ad6c43 617 ret = (flags & CC_FLAG_O ? "lt" : "n");
ccfccd66 618 break;
619 case GE:
24ad6c43 620 ret = (flags & CC_FLAG_O ? "ge" : "pz");
ccfccd66 621 break;
622 case GT:
623 ret = "gt";
624 break;
625 case LE:
626 ret = "le";
627 break;
628 case GEU:
629 ret = "geu";
630 break;
631 case LTU:
632 ret = "ltu";
633 break;
634 case GTU:
635 ret = "gtu";
636 break;
637 case LEU:
638 ret = "leu";
639 break;
640 case EQ:
641 ret = "eq";
642 break;
643 case NE:
644 ret = "ne";
645 break;
646 default:
647 gcc_unreachable ();
648 }
24ad6c43 649 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
ccfccd66 650 }
651 fputs (ret, file);
652 break;
653 }
24833e1a 654
655 case 'C':
656 gcc_assert (CONST_INT_P (op));
657 switch (INTVAL (op))
658 {
659 case 0: fprintf (file, "psw"); break;
660 case 2: fprintf (file, "usp"); break;
661 case 3: fprintf (file, "fpsw"); break;
662 case 4: fprintf (file, "cpen"); break;
663 case 8: fprintf (file, "bpsw"); break;
664 case 9: fprintf (file, "bpc"); break;
665 case 0xa: fprintf (file, "isp"); break;
666 case 0xb: fprintf (file, "fintv"); break;
667 case 0xc: fprintf (file, "intb"); break;
668 default:
98a5f45d 669 warning (0, "unrecognized control register number: %d - using 'psw'",
6bb30542 670 (int) INTVAL (op));
98cb9b5b 671 fprintf (file, "psw");
672 break;
24833e1a 673 }
674 break;
675
676 case 'F':
677 gcc_assert (CONST_INT_P (op));
678 switch (INTVAL (op))
679 {
680 case 0: case 'c': case 'C': fprintf (file, "C"); break;
681 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
682 case 2: case 's': case 'S': fprintf (file, "S"); break;
683 case 3: case 'o': case 'O': fprintf (file, "O"); break;
684 case 8: case 'i': case 'I': fprintf (file, "I"); break;
685 case 9: case 'u': case 'U': fprintf (file, "U"); break;
686 default:
687 gcc_unreachable ();
688 }
689 break;
690
6e507301 691 case 'G':
692 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
693 break;
694
24833e1a 695 case 'H':
6bb30542 696 switch (GET_CODE (op))
24833e1a 697 {
6bb30542 698 case REG:
699 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
700 break;
701 case CONST_INT:
702 {
703 HOST_WIDE_INT v = INTVAL (op);
67e66e16 704
6bb30542 705 fprintf (file, "#");
706 /* Trickery to avoid problems with shifting 32 bits at a time. */
707 v = v >> 16;
708 v = v >> 16;
709 rx_print_integer (file, v);
710 break;
711 }
712 case CONST_DOUBLE:
24833e1a 713 fprintf (file, "#");
6bb30542 714 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
715 break;
716 case MEM:
24833e1a 717 if (! WORDS_BIG_ENDIAN)
718 op = adjust_address (op, SImode, 4);
719 output_address (XEXP (op, 0));
6bb30542 720 break;
721 default:
722 gcc_unreachable ();
24833e1a 723 }
724 break;
725
726 case 'L':
6bb30542 727 switch (GET_CODE (op))
24833e1a 728 {
6bb30542 729 case REG:
730 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
731 break;
732 case CONST_INT:
24833e1a 733 fprintf (file, "#");
734 rx_print_integer (file, INTVAL (op) & 0xffffffff);
6bb30542 735 break;
736 case CONST_DOUBLE:
737 fprintf (file, "#");
738 rx_print_integer (file, CONST_DOUBLE_LOW (op));
739 break;
740 case MEM:
24833e1a 741 if (WORDS_BIG_ENDIAN)
742 op = adjust_address (op, SImode, 4);
743 output_address (XEXP (op, 0));
6bb30542 744 break;
745 default:
746 gcc_unreachable ();
24833e1a 747 }
748 break;
749
39349585 750 case 'N':
751 gcc_assert (CONST_INT_P (op));
752 fprintf (file, "#");
753 rx_print_integer (file, - INTVAL (op));
754 break;
755
6e507301 756 case 'P':
757 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
758 break;
759
776f1390 760 case 'R':
5794450f 761 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
776f1390 762 unsigned_load = true;
763 /* Fall through. */
24833e1a 764 case 'Q':
765 if (MEM_P (op))
766 {
767 HOST_WIDE_INT offset;
776f1390 768 rtx mem = op;
24833e1a 769
770 op = XEXP (op, 0);
771
772 if (REG_P (op))
773 offset = 0;
774 else if (GET_CODE (op) == PLUS)
775 {
776 rtx displacement;
777
778 if (REG_P (XEXP (op, 0)))
779 {
780 displacement = XEXP (op, 1);
781 op = XEXP (op, 0);
782 }
783 else
784 {
785 displacement = XEXP (op, 0);
786 op = XEXP (op, 1);
787 gcc_assert (REG_P (op));
788 }
789
790 gcc_assert (CONST_INT_P (displacement));
791 offset = INTVAL (displacement);
792 gcc_assert (offset >= 0);
793
794 fprintf (file, "%ld", offset);
795 }
796 else
797 gcc_unreachable ();
798
799 fprintf (file, "[");
800 rx_print_operand (file, op, 0);
801 fprintf (file, "].");
802
776f1390 803 switch (GET_MODE_SIZE (GET_MODE (mem)))
24833e1a 804 {
805 case 1:
776f1390 806 gcc_assert (offset <= 65535 * 1);
807 fprintf (file, unsigned_load ? "UB" : "B");
24833e1a 808 break;
809 case 2:
810 gcc_assert (offset % 2 == 0);
776f1390 811 gcc_assert (offset <= 65535 * 2);
812 fprintf (file, unsigned_load ? "UW" : "W");
24833e1a 813 break;
776f1390 814 case 4:
24833e1a 815 gcc_assert (offset % 4 == 0);
776f1390 816 gcc_assert (offset <= 65535 * 4);
24833e1a 817 fprintf (file, "L");
818 break;
776f1390 819 default:
820 gcc_unreachable ();
24833e1a 821 }
822 break;
823 }
824
825 /* Fall through. */
826
827 default:
6e507301 828 if (GET_CODE (op) == CONST
829 && GET_CODE (XEXP (op, 0)) == UNSPEC)
830 op = XEXP (op, 0);
831 else if (GET_CODE (op) == CONST
832 && GET_CODE (XEXP (op, 0)) == PLUS
833 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
834 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
835 {
836 if (print_hash)
837 fprintf (file, "#");
838 fprintf (file, "(");
839 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
840 fprintf (file, " + ");
841 output_addr_const (file, XEXP (XEXP (op, 0), 1));
842 fprintf (file, ")");
843 return;
844 }
845
24833e1a 846 switch (GET_CODE (op))
847 {
848 case MULT:
849 /* Should be the scaled part of an
850 indexed register indirect address. */
851 {
852 rtx base = XEXP (op, 0);
853 rtx index = XEXP (op, 1);
854
855 /* Check for a swaped index register and scaling factor.
856 Not sure if this can happen, but be prepared to handle it. */
857 if (CONST_INT_P (base) && REG_P (index))
858 {
859 rtx tmp = base;
860 base = index;
861 index = tmp;
862 }
863
864 gcc_assert (REG_P (base));
865 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
866 gcc_assert (CONST_INT_P (index));
867 /* Do not try to verify the value of the scalar as it is based
868 on the mode of the MEM not the mode of the MULT. (Which
869 will always be SImode). */
870 fprintf (file, "%s", reg_names [REGNO (base)]);
871 break;
872 }
873
874 case MEM:
875 output_address (XEXP (op, 0));
876 break;
877
878 case PLUS:
879 output_address (op);
880 break;
881
882 case REG:
883 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
884 fprintf (file, "%s", reg_names [REGNO (op)]);
885 break;
886
887 case SUBREG:
888 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
889 fprintf (file, "%s", reg_names [subreg_regno (op)]);
890 break;
891
892 /* This will only be single precision.... */
893 case CONST_DOUBLE:
894 {
895 unsigned long val;
896 REAL_VALUE_TYPE rv;
897
898 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
899 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
6e507301 900 if (print_hash)
901 fprintf (file, "#");
902 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
24833e1a 903 break;
904 }
905
906 case CONST_INT:
6e507301 907 if (print_hash)
908 fprintf (file, "#");
24833e1a 909 rx_print_integer (file, INTVAL (op));
910 break;
911
6e507301 912 case UNSPEC:
913 switch (XINT (op, 1))
914 {
915 case UNSPEC_PID_ADDR:
916 {
917 rtx sym, add;
918
919 if (print_hash)
920 fprintf (file, "#");
921 sym = XVECEXP (op, 0, 0);
922 add = NULL_RTX;
923 fprintf (file, "(");
924 if (GET_CODE (sym) == PLUS)
925 {
926 add = XEXP (sym, 1);
927 sym = XEXP (sym, 0);
928 }
929 output_addr_const (file, sym);
930 if (add != NULL_RTX)
931 {
932 fprintf (file, "+");
933 output_addr_const (file, add);
934 }
935 fprintf (file, "-__pid_base");
936 fprintf (file, ")");
937 return;
938 }
939 }
940 /* Fall through */
941
24833e1a 942 case CONST:
6e507301 943 case SYMBOL_REF:
24833e1a 944 case LABEL_REF:
945 case CODE_LABEL:
24833e1a 946 rx_print_operand_address (file, op);
947 break;
948
949 default:
950 gcc_unreachable ();
951 }
952 break;
953 }
954}
955
6e507301 956/* Maybe convert an operand into its PID format. */
957
958rtx
959rx_maybe_pidify_operand (rtx op, int copy_to_reg)
960{
961 if (rx_pid_data_operand (op) == PID_UNENCODED)
962 {
963 if (GET_CODE (op) == MEM)
964 {
965 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
966 op = replace_equiv_address (op, a);
967 }
968 else
969 {
970 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
971 }
972
973 if (copy_to_reg)
974 op = copy_to_mode_reg (GET_MODE (op), op);
975 }
976 return op;
977}
978
24833e1a 979/* Returns an assembler template for a move instruction. */
980
981char *
982rx_gen_move_template (rtx * operands, bool is_movu)
983{
6bb30542 984 static char out_template [64];
24833e1a 985 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
986 const char * src_template;
987 const char * dst_template;
988 rtx dest = operands[0];
989 rtx src = operands[1];
990
991 /* Decide which extension, if any, should be given to the move instruction. */
992 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
993 {
994 case QImode:
995 /* The .B extension is not valid when
996 loading an immediate into a register. */
997 if (! REG_P (dest) || ! CONST_INT_P (src))
998 extension = ".B";
999 break;
1000 case HImode:
1001 if (! REG_P (dest) || ! CONST_INT_P (src))
1002 /* The .W extension is not valid when
1003 loading an immediate into a register. */
1004 extension = ".W";
1005 break;
f0964309 1006 case DFmode:
1007 case DImode:
24833e1a 1008 case SFmode:
1009 case SImode:
1010 extension = ".L";
1011 break;
1012 case VOIDmode:
1013 /* This mode is used by constants. */
1014 break;
1015 default:
1016 debug_rtx (src);
1017 gcc_unreachable ();
1018 }
1019
6e507301 1020 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
f0964309 1021 {
1022 gcc_assert (GET_MODE (src) != DImode);
1023 gcc_assert (GET_MODE (src) != DFmode);
1024
1025 src_template = "(%A1 - __pid_base)[%P1]";
1026 }
6e507301 1027 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
f0964309 1028 {
1029 gcc_assert (GET_MODE (src) != DImode);
1030 gcc_assert (GET_MODE (src) != DFmode);
1031
1032 src_template = "%%gp(%A1)[%G1]";
1033 }
24833e1a 1034 else
1035 src_template = "%1";
1036
1037 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
f0964309 1038 {
1039 gcc_assert (GET_MODE (dest) != DImode);
1040 gcc_assert (GET_MODE (dest) != DFmode);
1041
1042 dst_template = "%%gp(%A0)[%G0]";
1043 }
24833e1a 1044 else
1045 dst_template = "%0";
1046
f0964309 1047 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1048 {
1049 gcc_assert (! is_movu);
1050
1051 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
734bbdc0 1052 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
f0964309 1053 else
734bbdc0 1054 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
f0964309 1055 }
1056 else
1057 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1058 extension, src_template, dst_template);
6bb30542 1059 return out_template;
24833e1a 1060}
24833e1a 1061\f
1062/* Return VALUE rounded up to the next ALIGNMENT boundary. */
1063
1064static inline unsigned int
1065rx_round_up (unsigned int value, unsigned int alignment)
1066{
1067 alignment -= 1;
1068 return (value + alignment) & (~ alignment);
1069}
1070
1071/* Return the number of bytes in the argument registers
1072 occupied by an argument of type TYPE and mode MODE. */
1073
ee4e8428 1074static unsigned int
3754d046 1075rx_function_arg_size (machine_mode mode, const_tree type)
24833e1a 1076{
1077 unsigned int num_bytes;
1078
1079 num_bytes = (mode == BLKmode)
1080 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1081 return rx_round_up (num_bytes, UNITS_PER_WORD);
1082}
1083
1084#define NUM_ARG_REGS 4
1085#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1086
1087/* Return an RTL expression describing the register holding a function
1088 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1089 be passed on the stack. CUM describes the previous parameters to the
1090 function and NAMED is false if the parameter is part of a variable
1091 parameter list, or the last named parameter before the start of a
1092 variable parameter list. */
1093
ee4e8428 1094static rtx
3754d046 1095rx_function_arg (cumulative_args_t cum, machine_mode mode,
4bccad5e 1096 const_tree type, bool named)
24833e1a 1097{
1098 unsigned int next_reg;
39cba157 1099 unsigned int bytes_so_far = *get_cumulative_args (cum);
24833e1a 1100 unsigned int size;
1101 unsigned int rounded_size;
1102
1103 /* An exploded version of rx_function_arg_size. */
1104 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
6bb30542 1105 /* If the size is not known it cannot be passed in registers. */
1106 if (size < 1)
1107 return NULL_RTX;
24833e1a 1108
1109 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1110
1111 /* Don't pass this arg via registers if there
1112 are insufficient registers to hold all of it. */
1113 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1114 return NULL_RTX;
1115
1116 /* Unnamed arguments and the last named argument in a
1117 variadic function are always passed on the stack. */
1118 if (!named)
1119 return NULL_RTX;
1120
1121 /* Structures must occupy an exact number of registers,
1122 otherwise they are passed on the stack. */
1123 if ((type == NULL || AGGREGATE_TYPE_P (type))
1124 && (size % UNITS_PER_WORD) != 0)
1125 return NULL_RTX;
1126
1127 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1128
1129 return gen_rtx_REG (mode, next_reg);
1130}
1131
ee4e8428 1132static void
3754d046 1133rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
4bccad5e 1134 const_tree type, bool named ATTRIBUTE_UNUSED)
ee4e8428 1135{
39cba157 1136 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
ee4e8428 1137}
1138
bd99ba64 1139static unsigned int
3754d046 1140rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
bd99ba64 1141 const_tree type ATTRIBUTE_UNUSED)
1142{
4246a5c7 1143 /* Older versions of the RX backend aligned all on-stack arguments
ee1401ac 1144 to 32-bits. The RX C ABI however says that they should be
1145 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1146 if (TARGET_GCC_ABI)
1147 return STACK_BOUNDARY;
1148
1149 if (type)
1150 {
1151 if (DECL_P (type))
1152 return DECL_ALIGN (type);
1153 return TYPE_ALIGN (type);
1154 }
1155
1156 return PARM_BOUNDARY;
bd99ba64 1157}
1158
24833e1a 1159/* Return an RTL describing where a function return value of type RET_TYPE
1160 is held. */
1161
1162static rtx
1163rx_function_value (const_tree ret_type,
1164 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1165 bool outgoing ATTRIBUTE_UNUSED)
1166{
3754d046 1167 machine_mode mode = TYPE_MODE (ret_type);
bd7d2835 1168
1169 /* RX ABI specifies that small integer types are
1170 promoted to int when returned by a function. */
02f06d23 1171 if (GET_MODE_SIZE (mode) > 0
1172 && GET_MODE_SIZE (mode) < 4
1173 && ! COMPLEX_MODE_P (mode)
1174 )
bd7d2835 1175 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1176
1177 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1178}
1179
1180/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1181 regard to function returns as does TARGET_FUNCTION_VALUE. */
1182
3754d046 1183static machine_mode
bd7d2835 1184rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
3754d046 1185 machine_mode mode,
0318c61a 1186 int * punsignedp ATTRIBUTE_UNUSED,
bd7d2835 1187 const_tree funtype ATTRIBUTE_UNUSED,
1188 int for_return)
1189{
1190 if (for_return != 1
1191 || GET_MODE_SIZE (mode) >= 4
02f06d23 1192 || COMPLEX_MODE_P (mode)
bd7d2835 1193 || GET_MODE_SIZE (mode) < 1)
1194 return mode;
1195
1196 return SImode;
24833e1a 1197}
1198
1199static bool
1200rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1201{
1202 HOST_WIDE_INT size;
1203
1204 if (TYPE_MODE (type) != BLKmode
1205 && ! AGGREGATE_TYPE_P (type))
1206 return false;
1207
1208 size = int_size_in_bytes (type);
1209 /* Large structs and those whose size is not an
1210 exact multiple of 4 are returned in memory. */
1211 return size < 1
1212 || size > 16
1213 || (size % UNITS_PER_WORD) != 0;
1214}
1215
1216static rtx
1217rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1218 int incoming ATTRIBUTE_UNUSED)
1219{
1220 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1221}
1222
1223static bool
1224rx_return_in_msb (const_tree valtype)
1225{
1226 return TARGET_BIG_ENDIAN_DATA
1227 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1228}
1229
1230/* Returns true if the provided function has the specified attribute. */
1231
1232static inline bool
1233has_func_attr (const_tree decl, const char * func_attr)
1234{
1235 if (decl == NULL_TREE)
1236 decl = current_function_decl;
1237
1238 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1239}
1240
67e66e16 1241/* Returns true if the provided function has the "fast_interrupt" attribute. */
24833e1a 1242
1243static inline bool
1244is_fast_interrupt_func (const_tree decl)
1245{
67e66e16 1246 return has_func_attr (decl, "fast_interrupt");
24833e1a 1247}
1248
67e66e16 1249/* Returns true if the provided function has the "interrupt" attribute. */
24833e1a 1250
1251static inline bool
67e66e16 1252is_interrupt_func (const_tree decl)
24833e1a 1253{
67e66e16 1254 return has_func_attr (decl, "interrupt");
24833e1a 1255}
1256
1257/* Returns true if the provided function has the "naked" attribute. */
1258
1259static inline bool
1260is_naked_func (const_tree decl)
1261{
1262 return has_func_attr (decl, "naked");
1263}
1264\f
1265static bool use_fixed_regs = false;
1266
b2d7ede1 1267static void
24833e1a 1268rx_conditional_register_usage (void)
1269{
1270 static bool using_fixed_regs = false;
1271
6e507301 1272 if (TARGET_PID)
1273 {
1274 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1275 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1276 }
1277
24833e1a 1278 if (rx_small_data_limit > 0)
6e507301 1279 {
1280 if (TARGET_PID)
1281 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1282 else
1283 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1284
1285 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1286 }
24833e1a 1287
1288 if (use_fixed_regs != using_fixed_regs)
1289 {
1290 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1291 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1292
1293 if (use_fixed_regs)
1294 {
24833e1a 1295 unsigned int r;
1296
24833e1a 1297 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1298 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
e4d9e8e5 1299
1300 /* This is for fast interrupt handlers. Any register in
1301 the range r10 to r13 (inclusive) that is currently
1302 marked as fixed is now a viable, call-used register. */
24833e1a 1303 for (r = 10; r <= 13; r++)
1304 if (fixed_regs[r])
1305 {
1306 fixed_regs[r] = 0;
1307 call_used_regs[r] = 1;
24833e1a 1308 }
1309
e4d9e8e5 1310 /* Mark r7 as fixed. This is just a hack to avoid
1311 altering the reg_alloc_order array so that the newly
1312 freed r10-r13 registers are the preferred registers. */
1313 fixed_regs[7] = call_used_regs[7] = 1;
24833e1a 1314 }
1315 else
1316 {
1317 /* Restore the normal register masks. */
1318 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1319 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1320 }
1321
1322 using_fixed_regs = use_fixed_regs;
1323 }
1324}
1325
6a47b360 1326struct decl_chain
1327{
1328 tree fndecl;
1329 struct decl_chain * next;
1330};
1331
1332/* Stack of decls for which we have issued warnings. */
1333static struct decl_chain * warned_decls = NULL;
1334
1335static void
1336add_warned_decl (tree fndecl)
1337{
1338 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1339
1340 warned->fndecl = fndecl;
1341 warned->next = warned_decls;
1342 warned_decls = warned;
1343}
1344
1345/* Returns TRUE if FNDECL is on our list of warned about decls. */
1346
1347static bool
1348already_warned (tree fndecl)
1349{
1350 struct decl_chain * warned;
1351
1352 for (warned = warned_decls;
1353 warned != NULL;
1354 warned = warned->next)
1355 if (warned->fndecl == fndecl)
1356 return true;
1357
1358 return false;
1359}
1360
24833e1a 1361/* Perform any actions necessary before starting to compile FNDECL.
1362 For the RX we use this to make sure that we have the correct
1363 set of register masks selected. If FNDECL is NULL then we are
1364 compiling top level things. */
1365
1366static void
1367rx_set_current_function (tree fndecl)
1368{
1369 /* Remember the last target of rx_set_current_function. */
1370 static tree rx_previous_fndecl;
67e66e16 1371 bool prev_was_fast_interrupt;
1372 bool current_is_fast_interrupt;
24833e1a 1373
1374 /* Only change the context if the function changes. This hook is called
1375 several times in the course of compiling a function, and we don't want
1376 to slow things down too much or call target_reinit when it isn't safe. */
1377 if (fndecl == rx_previous_fndecl)
1378 return;
1379
67e66e16 1380 prev_was_fast_interrupt
24833e1a 1381 = rx_previous_fndecl
1382 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
67e66e16 1383
1384 current_is_fast_interrupt
24833e1a 1385 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1386
67e66e16 1387 if (prev_was_fast_interrupt != current_is_fast_interrupt)
24833e1a 1388 {
67e66e16 1389 use_fixed_regs = current_is_fast_interrupt;
24833e1a 1390 target_reinit ();
1391 }
67e66e16 1392
6a47b360 1393 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1394 {
1395 /* We do not warn about the first fast interrupt routine that
1396 we see. Instead we just push it onto the stack. */
1397 if (warned_decls == NULL)
1398 add_warned_decl (fndecl);
1399
1400 /* Otherwise if this fast interrupt is one for which we have
1401 not already issued a warning, generate one and then push
1402 it onto the stack as well. */
1403 else if (! already_warned (fndecl))
1404 {
1405 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1406 fndecl, warned_decls->fndecl);
1407 add_warned_decl (fndecl);
1408 }
1409 }
1410
24833e1a 1411 rx_previous_fndecl = fndecl;
1412}
1413\f
1414/* Typical stack layout should looks like this after the function's prologue:
1415
1416 | |
1417 -- ^
1418 | | \ |
1419 | | arguments saved | Increasing
1420 | | on the stack | addresses
1421 PARENT arg pointer -> | | /
1422 -------------------------- ---- -------------------
1423 CHILD |ret | return address
1424 --
1425 | | \
1426 | | call saved
1427 | | registers
1428 | | /
1429 --
1430 | | \
1431 | | local
1432 | | variables
1433 frame pointer -> | | /
1434 --
1435 | | \
1436 | | outgoing | Decreasing
1437 | | arguments | addresses
1438 current stack pointer -> | | / |
1439 -------------------------- ---- ------------------ V
1440 | | */
1441
1442static unsigned int
1443bit_count (unsigned int x)
1444{
1445 const unsigned int m1 = 0x55555555;
1446 const unsigned int m2 = 0x33333333;
1447 const unsigned int m4 = 0x0f0f0f0f;
1448
1449 x -= (x >> 1) & m1;
1450 x = (x & m2) + ((x >> 2) & m2);
1451 x = (x + (x >> 4)) & m4;
1452 x += x >> 8;
1453
1454 return (x + (x >> 16)) & 0x3f;
1455}
1456
e4d9e8e5 1457#define MUST_SAVE_ACC_REGISTER \
1458 (TARGET_SAVE_ACC_REGISTER \
1459 && (is_interrupt_func (NULL_TREE) \
1460 || is_fast_interrupt_func (NULL_TREE)))
1461
24833e1a 1462/* Returns either the lowest numbered and highest numbered registers that
1463 occupy the call-saved area of the stack frame, if the registers are
1464 stored as a contiguous block, or else a bitmask of the individual
1465 registers if they are stored piecemeal.
1466
1467 Also computes the size of the frame and the size of the outgoing
1468 arguments block (in bytes). */
1469
1470static void
1471rx_get_stack_layout (unsigned int * lowest,
1472 unsigned int * highest,
1473 unsigned int * register_mask,
1474 unsigned int * frame_size,
1475 unsigned int * stack_size)
1476{
1477 unsigned int reg;
1478 unsigned int low;
1479 unsigned int high;
1480 unsigned int fixed_reg = 0;
1481 unsigned int save_mask;
1482 unsigned int pushed_mask;
1483 unsigned int unneeded_pushes;
1484
e4d9e8e5 1485 if (is_naked_func (NULL_TREE))
24833e1a 1486 {
1487 /* Naked functions do not create their own stack frame.
e4d9e8e5 1488 Instead the programmer must do that for us. */
24833e1a 1489 * lowest = 0;
1490 * highest = 0;
1491 * register_mask = 0;
1492 * frame_size = 0;
1493 * stack_size = 0;
1494 return;
1495 }
1496
9d2f1b03 1497 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
24833e1a 1498 {
21cde6ec 1499 if ((df_regs_ever_live_p (reg)
382ffb70 1500 /* Always save all call clobbered registers inside non-leaf
1501 interrupt handlers, even if they are not live - they may
1502 be used in (non-interrupt aware) routines called from this one. */
1503 || (call_used_regs[reg]
1504 && is_interrupt_func (NULL_TREE)
d5bf7b64 1505 && ! crtl->is_leaf))
24833e1a 1506 && (! call_used_regs[reg]
1507 /* Even call clobbered registered must
67e66e16 1508 be pushed inside interrupt handlers. */
e4d9e8e5 1509 || is_interrupt_func (NULL_TREE)
1510 /* Likewise for fast interrupt handlers, except registers r10 -
1511 r13. These are normally call-saved, but may have been set
1512 to call-used by rx_conditional_register_usage. If so then
1513 they can be used in the fast interrupt handler without
1514 saving them on the stack. */
1515 || (is_fast_interrupt_func (NULL_TREE)
1516 && ! IN_RANGE (reg, 10, 13))))
24833e1a 1517 {
1518 if (low == 0)
1519 low = reg;
1520 high = reg;
1521
1522 save_mask |= 1 << reg;
1523 }
1524
1525 /* Remember if we see a fixed register
1526 after having found the low register. */
1527 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1528 fixed_reg = reg;
1529 }
1530
e4d9e8e5 1531 /* If we have to save the accumulator register, make sure
1532 that at least two registers are pushed into the frame. */
1533 if (MUST_SAVE_ACC_REGISTER
1534 && bit_count (save_mask) < 2)
1535 {
1536 save_mask |= (1 << 13) | (1 << 14);
1537 if (low == 0)
1538 low = 13;
bc9bb967 1539 if (high == 0 || low == high)
1540 high = low + 1;
e4d9e8e5 1541 }
1542
24833e1a 1543 /* Decide if it would be faster fill in the call-saved area of the stack
1544 frame using multiple PUSH instructions instead of a single PUSHM
1545 instruction.
1546
1547 SAVE_MASK is a bitmask of the registers that must be stored in the
1548 call-save area. PUSHED_MASK is a bitmask of the registers that would
1549 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1550 is a bitmask of those registers in pushed_mask that are not in
1551 save_mask.
1552
1553 We use a simple heuristic that says that it is better to use
1554 multiple PUSH instructions if the number of unnecessary pushes is
1555 greater than the number of necessary pushes.
1556
1557 We also use multiple PUSH instructions if there are any fixed registers
1558 between LOW and HIGH. The only way that this can happen is if the user
1559 has specified --fixed-<reg-name> on the command line and in such
1560 circumstances we do not want to touch the fixed registers at all.
1561
1562 FIXME: Is it worth improving this heuristic ? */
1563 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1564 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1565
1566 if ((fixed_reg && fixed_reg <= high)
1567 || (optimize_function_for_speed_p (cfun)
1568 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1569 {
1570 /* Use multiple pushes. */
1571 * lowest = 0;
1572 * highest = 0;
1573 * register_mask = save_mask;
1574 }
1575 else
1576 {
1577 /* Use one push multiple instruction. */
1578 * lowest = low;
1579 * highest = high;
1580 * register_mask = 0;
1581 }
1582
1583 * frame_size = rx_round_up
1584 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1585
1586 if (crtl->args.size > 0)
1587 * frame_size += rx_round_up
1588 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1589
1590 * stack_size = rx_round_up
1591 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1592}
1593
1594/* Generate a PUSHM instruction that matches the given operands. */
1595
1596void
1597rx_emit_stack_pushm (rtx * operands)
1598{
1599 HOST_WIDE_INT last_reg;
1600 rtx first_push;
1601
1602 gcc_assert (CONST_INT_P (operands[0]));
1603 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1604
1605 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1606 first_push = XVECEXP (operands[1], 0, 1);
1607 gcc_assert (SET_P (first_push));
1608 first_push = SET_SRC (first_push);
1609 gcc_assert (REG_P (first_push));
1610
1611 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
67e66e16 1612 reg_names [REGNO (first_push) - last_reg],
1613 reg_names [REGNO (first_push)]);
24833e1a 1614}
1615
1616/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1617
1618static rtx
1619gen_rx_store_vector (unsigned int low, unsigned int high)
1620{
1621 unsigned int i;
1622 unsigned int count = (high - low) + 2;
1623 rtx vector;
1624
1625 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1626
1627 XVECEXP (vector, 0, 0) =
51e241f8 1628 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1629 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1630 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1631
1632 for (i = 0; i < count - 1; i++)
1633 XVECEXP (vector, 0, i + 1) =
51e241f8 1634 gen_rtx_SET (VOIDmode,
24833e1a 1635 gen_rtx_MEM (SImode,
67e66e16 1636 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1637 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1638 gen_rtx_REG (SImode, high - i));
24833e1a 1639 return vector;
1640}
1641
67e66e16 1642/* Mark INSN as being frame related. If it is a PARALLEL
1643 then mark each element as being frame related as well. */
1644
1645static void
1646mark_frame_related (rtx insn)
1647{
1648 RTX_FRAME_RELATED_P (insn) = 1;
1649 insn = PATTERN (insn);
1650
1651 if (GET_CODE (insn) == PARALLEL)
1652 {
1653 unsigned int i;
1654
61fc50a0 1655 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
67e66e16 1656 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1657 }
1658}
1659
95272799 1660static bool
1661ok_for_max_constant (HOST_WIDE_INT val)
1662{
1663 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1664 /* If there is no constraint on the size of constants
1665 used as operands, then any value is legitimate. */
1666 return true;
1667
1668 /* rx_max_constant_size specifies the maximum number
1669 of bytes that can be used to hold a signed value. */
1670 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1671 ( 1 << (rx_max_constant_size * 8)));
1672}
1673
1674/* Generate an ADD of SRC plus VAL into DEST.
1675 Handles the case where VAL is too big for max_constant_value.
1676 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1677
1678static void
1679gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1680{
1681 rtx insn;
1682
1683 if (val == NULL_RTX || INTVAL (val) == 0)
1684 {
1685 gcc_assert (dest != src);
1686
1687 insn = emit_move_insn (dest, src);
1688 }
1689 else if (ok_for_max_constant (INTVAL (val)))
1690 insn = emit_insn (gen_addsi3 (dest, src, val));
1691 else
1692 {
f7fcec1a 1693 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
02f06d23 1694 will not reject it. */
1695 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1696 insn = emit_insn (gen_addsi3 (dest, src, val));
95272799 1697
1698 if (is_frame_related)
1699 /* We have to provide our own frame related note here
1700 as the dwarf2out code cannot be expected to grok
1701 our unspec. */
1702 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1703 gen_rtx_SET (SImode, dest,
1704 gen_rtx_PLUS (SImode, src, val)));
1705 return;
1706 }
1707
1708 if (is_frame_related)
1709 RTX_FRAME_RELATED_P (insn) = 1;
1710 return;
1711}
1712
24833e1a 1713void
1714rx_expand_prologue (void)
1715{
1716 unsigned int stack_size;
1717 unsigned int frame_size;
1718 unsigned int mask;
1719 unsigned int low;
1720 unsigned int high;
67e66e16 1721 unsigned int reg;
24833e1a 1722 rtx insn;
1723
1724 /* Naked functions use their own, programmer provided prologues. */
e4d9e8e5 1725 if (is_naked_func (NULL_TREE))
24833e1a 1726 return;
1727
1728 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1729
ecfbd70a 1730 if (flag_stack_usage_info)
1731 current_function_static_stack_size = frame_size + stack_size;
1732
24833e1a 1733 /* If we use any of the callee-saved registers, save them now. */
1734 if (mask)
1735 {
24833e1a 1736 /* Push registers in reverse order. */
9d2f1b03 1737 for (reg = CC_REGNUM; reg --;)
24833e1a 1738 if (mask & (1 << reg))
1739 {
1740 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
67e66e16 1741 mark_frame_related (insn);
24833e1a 1742 }
1743 }
1744 else if (low)
1745 {
1746 if (high == low)
1747 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1748 else
1749 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1750 * UNITS_PER_WORD),
1751 gen_rx_store_vector (low, high)));
67e66e16 1752 mark_frame_related (insn);
1753 }
1754
e4d9e8e5 1755 if (MUST_SAVE_ACC_REGISTER)
67e66e16 1756 {
1757 unsigned int acc_high, acc_low;
1758
1759 /* Interrupt handlers have to preserve the accumulator
1760 register if so requested by the user. Use the first
e4d9e8e5 1761 two pushed registers as intermediaries. */
67e66e16 1762 if (mask)
1763 {
1764 acc_low = acc_high = 0;
1765
9d2f1b03 1766 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1767 if (mask & (1 << reg))
1768 {
1769 if (acc_low == 0)
1770 acc_low = reg;
1771 else
1772 {
1773 acc_high = reg;
1774 break;
1775 }
1776 }
1777
1778 /* We have assumed that there are at least two registers pushed... */
1779 gcc_assert (acc_high != 0);
1780
1781 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1782 We just assume that they are zero. */
1783 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1784 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1785 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1786 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1787 }
1788 else
1789 {
1790 acc_low = low;
1791 acc_high = low + 1;
1792
1793 /* We have assumed that there are at least two registers pushed... */
1794 gcc_assert (acc_high <= high);
1795
1796 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1797 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1798 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1799 gen_rx_store_vector (acc_low, acc_high)));
1800 }
24833e1a 1801 }
1802
1803 /* If needed, set up the frame pointer. */
1804 if (frame_pointer_needed)
95272799 1805 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1806 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1807
1808 /* Allocate space for the outgoing args.
1809 If the stack frame has not already been set up then handle this as well. */
1810 if (stack_size)
1811 {
1812 if (frame_size)
1813 {
1814 if (frame_pointer_needed)
95272799 1815 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1816 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1817 else
95272799 1818 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1819 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1820 true);
24833e1a 1821 }
1822 else
95272799 1823 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1824 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1825 }
1826 else if (frame_size)
1827 {
1828 if (! frame_pointer_needed)
95272799 1829 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1830 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1831 else
95272799 1832 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1833 true);
24833e1a 1834 }
24833e1a 1835}
1836
7ce85a1f 1837static void
1838add_vector_labels (FILE *file, const char *aname)
1839{
1840 tree vec_attr;
1841 tree val_attr;
1842 const char *vname = "vect";
1843 const char *s;
1844 int vnum;
1845
1846 /* This node is for the vector/interrupt tag itself */
1847 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1848 if (!vec_attr)
1849 return;
1850
1851 /* Now point it at the first argument */
1852 vec_attr = TREE_VALUE (vec_attr);
1853
1854 /* Iterate through the arguments. */
1855 while (vec_attr)
1856 {
1857 val_attr = TREE_VALUE (vec_attr);
1858 switch (TREE_CODE (val_attr))
1859 {
1860 case STRING_CST:
1861 s = TREE_STRING_POINTER (val_attr);
1862 goto string_id_common;
1863
1864 case IDENTIFIER_NODE:
1865 s = IDENTIFIER_POINTER (val_attr);
1866
1867 string_id_common:
1868 if (strcmp (s, "$default") == 0)
1869 {
1870 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1871 fprintf (file, "$tableentry$default$%s:\n", vname);
1872 }
1873 else
1874 vname = s;
1875 break;
1876
1877 case INTEGER_CST:
1878 vnum = TREE_INT_CST_LOW (val_attr);
1879
1880 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1881 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1882 break;
1883
1884 default:
1885 ;
1886 }
1887
1888 vec_attr = TREE_CHAIN (vec_attr);
1889 }
1890
1891}
1892
24833e1a 1893static void
1894rx_output_function_prologue (FILE * file,
1895 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1896{
7ce85a1f 1897 add_vector_labels (file, "interrupt");
1898 add_vector_labels (file, "vector");
1899
24833e1a 1900 if (is_fast_interrupt_func (NULL_TREE))
1901 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1902
67e66e16 1903 if (is_interrupt_func (NULL_TREE))
1904 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
24833e1a 1905
1906 if (is_naked_func (NULL_TREE))
1907 asm_fprintf (file, "\t; Note: Naked Function\n");
1908
1909 if (cfun->static_chain_decl != NULL)
1910 asm_fprintf (file, "\t; Note: Nested function declared "
1911 "inside another function.\n");
1912
1913 if (crtl->calls_eh_return)
1914 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1915}
1916
1917/* Generate a POPM or RTSD instruction that matches the given operands. */
1918
1919void
1920rx_emit_stack_popm (rtx * operands, bool is_popm)
1921{
1922 HOST_WIDE_INT stack_adjust;
1923 HOST_WIDE_INT last_reg;
1924 rtx first_push;
1925
1926 gcc_assert (CONST_INT_P (operands[0]));
1927 stack_adjust = INTVAL (operands[0]);
1928
1929 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1930 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1931
1932 first_push = XVECEXP (operands[1], 0, 1);
1933 gcc_assert (SET_P (first_push));
1934 first_push = SET_DEST (first_push);
1935 gcc_assert (REG_P (first_push));
1936
1937 if (is_popm)
1938 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1939 reg_names [REGNO (first_push)],
1940 reg_names [REGNO (first_push) + last_reg]);
1941 else
1942 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1943 (int) stack_adjust,
1944 reg_names [REGNO (first_push)],
1945 reg_names [REGNO (first_push) + last_reg]);
1946}
1947
1948/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1949
1950static rtx
1951gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1952{
1953 unsigned int i;
1954 unsigned int bias = 3;
1955 unsigned int count = (high - low) + bias;
1956 rtx vector;
1957
1958 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1959
1960 XVECEXP (vector, 0, 0) =
51e241f8 1961 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
29c05e22 1962 plus_constant (Pmode, stack_pointer_rtx, adjust));
24833e1a 1963
1964 for (i = 0; i < count - 2; i++)
1965 XVECEXP (vector, 0, i + 1) =
51e241f8 1966 gen_rtx_SET (VOIDmode,
24833e1a 1967 gen_rtx_REG (SImode, low + i),
1968 gen_rtx_MEM (SImode,
1969 i == 0 ? stack_pointer_rtx
29c05e22 1970 : plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1971 i * UNITS_PER_WORD)));
1972
1a860023 1973 XVECEXP (vector, 0, count - 1) = ret_rtx;
24833e1a 1974
1975 return vector;
1976}
1977
1978/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1979
1980static rtx
1981gen_rx_popm_vector (unsigned int low, unsigned int high)
1982{
1983 unsigned int i;
1984 unsigned int count = (high - low) + 2;
1985 rtx vector;
1986
1987 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1988
1989 XVECEXP (vector, 0, 0) =
51e241f8 1990 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
29c05e22 1991 plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1992 (count - 1) * UNITS_PER_WORD));
1993
1994 for (i = 0; i < count - 1; i++)
1995 XVECEXP (vector, 0, i + 1) =
51e241f8 1996 gen_rtx_SET (VOIDmode,
24833e1a 1997 gen_rtx_REG (SImode, low + i),
1998 gen_rtx_MEM (SImode,
1999 i == 0 ? stack_pointer_rtx
29c05e22 2000 : plus_constant (Pmode, stack_pointer_rtx,
24833e1a 2001 i * UNITS_PER_WORD)));
2002
2003 return vector;
2004}
f35edb6f 2005
2006/* Returns true if a simple return insn can be used. */
2007
2008bool
2009rx_can_use_simple_return (void)
2010{
2011 unsigned int low;
2012 unsigned int high;
2013 unsigned int frame_size;
2014 unsigned int stack_size;
2015 unsigned int register_mask;
2016
2017 if (is_naked_func (NULL_TREE)
2018 || is_fast_interrupt_func (NULL_TREE)
2019 || is_interrupt_func (NULL_TREE))
2020 return false;
2021
2022 rx_get_stack_layout (& low, & high, & register_mask,
2023 & frame_size, & stack_size);
2024
2025 return (register_mask == 0
2026 && (frame_size + stack_size) == 0
2027 && low == 0);
2028}
2029
24833e1a 2030void
2031rx_expand_epilogue (bool is_sibcall)
2032{
2033 unsigned int low;
2034 unsigned int high;
2035 unsigned int frame_size;
2036 unsigned int stack_size;
2037 unsigned int register_mask;
2038 unsigned int regs_size;
67e66e16 2039 unsigned int reg;
24833e1a 2040 unsigned HOST_WIDE_INT total_size;
2041
61fc50a0 2042 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2043 cannot guarantee that the register holding the function address is a
2044 call-used register. If it is a call-saved register then the stack
2045 pop instructions generated in the epilogue will corrupt the address
2046 before it is used.
2047
2048 Creating a new call-used-only register class works but then the
2049 reload pass gets stuck because it cannot always find a call-used
2050 register for spilling sibcalls.
2051
2052 The other possible solution is for this pass to scan forward for the
2053 sibcall instruction (if it has been generated) and work out if it
2054 is an indirect sibcall using a call-saved register. If it is then
2055 the address can copied into a call-used register in this epilogue
2056 code and the sibcall instruction modified to use that register. */
2057
24833e1a 2058 if (is_naked_func (NULL_TREE))
2059 {
61fc50a0 2060 gcc_assert (! is_sibcall);
2061
24833e1a 2062 /* Naked functions use their own, programmer provided epilogues.
2063 But, in order to keep gcc happy we have to generate some kind of
2064 epilogue RTL. */
2065 emit_jump_insn (gen_naked_return ());
2066 return;
2067 }
2068
2069 rx_get_stack_layout (& low, & high, & register_mask,
2070 & frame_size, & stack_size);
2071
2072 total_size = frame_size + stack_size;
2073 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2074
2075 /* See if we are unable to use the special stack frame deconstruct and
2076 return instructions. In most cases we can use them, but the exceptions
2077 are:
2078
2079 - Sibling calling functions deconstruct the frame but do not return to
2080 their caller. Instead they branch to their sibling and allow their
2081 return instruction to return to this function's parent.
2082
67e66e16 2083 - Fast and normal interrupt handling functions have to use special
24833e1a 2084 return instructions.
2085
2086 - Functions where we have pushed a fragmented set of registers into the
2087 call-save area must have the same set of registers popped. */
2088 if (is_sibcall
2089 || is_fast_interrupt_func (NULL_TREE)
67e66e16 2090 || is_interrupt_func (NULL_TREE)
24833e1a 2091 || register_mask)
2092 {
2093 /* Cannot use the special instructions - deconstruct by hand. */
2094 if (total_size)
95272799 2095 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2096 GEN_INT (total_size), false);
24833e1a 2097
e4d9e8e5 2098 if (MUST_SAVE_ACC_REGISTER)
24833e1a 2099 {
67e66e16 2100 unsigned int acc_low, acc_high;
2101
2102 /* Reverse the saving of the accumulator register onto the stack.
2103 Note we must adjust the saved "low" accumulator value as it
2104 is really the middle 32-bits of the accumulator. */
2105 if (register_mask)
2106 {
2107 acc_low = acc_high = 0;
9d2f1b03 2108
2109 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 2110 if (register_mask & (1 << reg))
2111 {
2112 if (acc_low == 0)
2113 acc_low = reg;
2114 else
2115 {
2116 acc_high = reg;
2117 break;
2118 }
2119 }
2120 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2121 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2122 }
2123 else
2124 {
2125 acc_low = low;
2126 acc_high = low + 1;
2127 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2128 gen_rx_popm_vector (acc_low, acc_high)));
2129 }
2130
2131 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2132 gen_rtx_REG (SImode, acc_low),
2133 GEN_INT (16)));
2134 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2135 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2136 }
24833e1a 2137
67e66e16 2138 if (register_mask)
2139 {
9d2f1b03 2140 for (reg = 0; reg < CC_REGNUM; reg ++)
24833e1a 2141 if (register_mask & (1 << reg))
2142 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
2143 }
2144 else if (low)
2145 {
2146 if (high == low)
2147 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2148 else
2149 emit_insn (gen_stack_popm (GEN_INT (regs_size),
2150 gen_rx_popm_vector (low, high)));
2151 }
2152
2153 if (is_fast_interrupt_func (NULL_TREE))
61fc50a0 2154 {
2155 gcc_assert (! is_sibcall);
2156 emit_jump_insn (gen_fast_interrupt_return ());
2157 }
67e66e16 2158 else if (is_interrupt_func (NULL_TREE))
61fc50a0 2159 {
2160 gcc_assert (! is_sibcall);
2161 emit_jump_insn (gen_exception_return ());
2162 }
24833e1a 2163 else if (! is_sibcall)
2164 emit_jump_insn (gen_simple_return ());
2165
2166 return;
2167 }
2168
2169 /* If we allocated space on the stack, free it now. */
2170 if (total_size)
2171 {
2172 unsigned HOST_WIDE_INT rtsd_size;
2173
2174 /* See if we can use the RTSD instruction. */
2175 rtsd_size = total_size + regs_size;
2176 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2177 {
2178 if (low)
2179 emit_jump_insn (gen_pop_and_return
2180 (GEN_INT (rtsd_size),
2181 gen_rx_rtsd_vector (rtsd_size, low, high)));
2182 else
2183 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2184
2185 return;
2186 }
2187
95272799 2188 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2189 GEN_INT (total_size), false);
24833e1a 2190 }
2191
2192 if (low)
2193 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2194 gen_rx_rtsd_vector (regs_size,
2195 low, high)));
2196 else
2197 emit_jump_insn (gen_simple_return ());
2198}
2199
2200
2201/* Compute the offset (in words) between FROM (arg pointer
2202 or frame pointer) and TO (frame pointer or stack pointer).
2203 See ASCII art comment at the start of rx_expand_prologue
2204 for more information. */
2205
2206int
2207rx_initial_elimination_offset (int from, int to)
2208{
2209 unsigned int low;
2210 unsigned int high;
2211 unsigned int frame_size;
2212 unsigned int stack_size;
2213 unsigned int mask;
2214
2215 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2216
2217 if (from == ARG_POINTER_REGNUM)
2218 {
2219 /* Extend the computed size of the stack frame to
2220 include the registers pushed in the prologue. */
2221 if (low)
2222 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2223 else
2224 frame_size += bit_count (mask) * UNITS_PER_WORD;
2225
2226 /* Remember to include the return address. */
2227 frame_size += 1 * UNITS_PER_WORD;
2228
2229 if (to == FRAME_POINTER_REGNUM)
2230 return frame_size;
2231
2232 gcc_assert (to == STACK_POINTER_REGNUM);
2233 return frame_size + stack_size;
2234 }
2235
2236 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2237 return stack_size;
2238}
2239
24833e1a 2240/* Decide if a variable should go into one of the small data sections. */
2241
2242static bool
2243rx_in_small_data (const_tree decl)
2244{
2245 int size;
738a6bda 2246 const char * section;
24833e1a 2247
2248 if (rx_small_data_limit == 0)
2249 return false;
2250
2251 if (TREE_CODE (decl) != VAR_DECL)
2252 return false;
2253
2254 /* We do not put read-only variables into a small data area because
2255 they would be placed with the other read-only sections, far away
2256 from the read-write data sections, and we only have one small
2257 data area pointer.
2258 Similarly commons are placed in the .bss section which might be
2259 far away (and out of alignment with respect to) the .data section. */
2260 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2261 return false;
2262
2263 section = DECL_SECTION_NAME (decl);
2264 if (section)
738a6bda 2265 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
24833e1a 2266
2267 size = int_size_in_bytes (TREE_TYPE (decl));
2268
2269 return (size > 0) && (size <= rx_small_data_limit);
2270}
2271
2272/* Return a section for X.
2273 The only special thing we do here is to honor small data. */
2274
2275static section *
3754d046 2276rx_select_rtx_section (machine_mode mode,
24833e1a 2277 rtx x,
2278 unsigned HOST_WIDE_INT align)
2279{
2280 if (rx_small_data_limit > 0
2281 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2282 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2283 return sdata_section;
2284
2285 return default_elf_select_rtx_section (mode, x, align);
2286}
2287
2288static section *
2289rx_select_section (tree decl,
2290 int reloc,
2291 unsigned HOST_WIDE_INT align)
2292{
2293 if (rx_small_data_limit > 0)
2294 {
2295 switch (categorize_decl_for_section (decl, reloc))
2296 {
2297 case SECCAT_SDATA: return sdata_section;
2298 case SECCAT_SBSS: return sbss_section;
2299 case SECCAT_SRODATA:
2300 /* Fall through. We do not put small, read only
2301 data into the C_2 section because we are not
2302 using the C_2 section. We do not use the C_2
2303 section because it is located with the other
2304 read-only data sections, far away from the read-write
2305 data sections and we only have one small data
2306 pointer (r13). */
2307 default:
2308 break;
2309 }
2310 }
2311
2312 /* If we are supporting the Renesas assembler
2313 we cannot use mergeable sections. */
2314 if (TARGET_AS100_SYNTAX)
2315 switch (categorize_decl_for_section (decl, reloc))
2316 {
2317 case SECCAT_RODATA_MERGE_CONST:
2318 case SECCAT_RODATA_MERGE_STR_INIT:
2319 case SECCAT_RODATA_MERGE_STR:
2320 return readonly_data_section;
2321
2322 default:
2323 break;
2324 }
2325
2326 return default_elf_select_section (decl, reloc, align);
2327}
2328\f
2329enum rx_builtin
2330{
2331 RX_BUILTIN_BRK,
2332 RX_BUILTIN_CLRPSW,
2333 RX_BUILTIN_INT,
2334 RX_BUILTIN_MACHI,
2335 RX_BUILTIN_MACLO,
2336 RX_BUILTIN_MULHI,
2337 RX_BUILTIN_MULLO,
2338 RX_BUILTIN_MVFACHI,
2339 RX_BUILTIN_MVFACMI,
2340 RX_BUILTIN_MVFC,
2341 RX_BUILTIN_MVTACHI,
2342 RX_BUILTIN_MVTACLO,
2343 RX_BUILTIN_MVTC,
67e66e16 2344 RX_BUILTIN_MVTIPL,
24833e1a 2345 RX_BUILTIN_RACW,
2346 RX_BUILTIN_REVW,
2347 RX_BUILTIN_RMPA,
2348 RX_BUILTIN_ROUND,
24833e1a 2349 RX_BUILTIN_SETPSW,
2350 RX_BUILTIN_WAIT,
2351 RX_BUILTIN_max
2352};
2353
103700c7 2354static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2355
24833e1a 2356static void
2357rx_init_builtins (void)
2358{
dbf38144 2359#define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2360 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2361 add_builtin_function ("__builtin_rx_" LC_NAME, \
2362 build_function_type_list (RET_TYPE##_type_node, \
2363 NULL_TREE), \
2364 RX_BUILTIN_##UC_NAME, \
2365 BUILT_IN_MD, NULL, NULL_TREE)
2366
24833e1a 2367#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
103700c7 2368 rx_builtins[RX_BUILTIN_##UC_NAME] = \
f7fcec1a 2369 add_builtin_function ("__builtin_rx_" LC_NAME, \
24833e1a 2370 build_function_type_list (RET_TYPE##_type_node, \
2371 ARG_TYPE##_type_node, \
2372 NULL_TREE), \
2373 RX_BUILTIN_##UC_NAME, \
2374 BUILT_IN_MD, NULL, NULL_TREE)
2375
2376#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
103700c7 2377 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2378 add_builtin_function ("__builtin_rx_" LC_NAME, \
2379 build_function_type_list (RET_TYPE##_type_node, \
2380 ARG_TYPE1##_type_node,\
2381 ARG_TYPE2##_type_node,\
2382 NULL_TREE), \
2383 RX_BUILTIN_##UC_NAME, \
2384 BUILT_IN_MD, NULL, NULL_TREE)
2385
2386#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
103700c7 2387 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2388 add_builtin_function ("__builtin_rx_" LC_NAME, \
2389 build_function_type_list (RET_TYPE##_type_node, \
2390 ARG_TYPE1##_type_node,\
2391 ARG_TYPE2##_type_node,\
2392 ARG_TYPE3##_type_node,\
2393 NULL_TREE), \
2394 RX_BUILTIN_##UC_NAME, \
2395 BUILT_IN_MD, NULL, NULL_TREE)
2396
dbf38144 2397 ADD_RX_BUILTIN0 (BRK, "brk", void);
24833e1a 2398 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2399 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2400 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2401 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2402 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2403 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2404 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
dbf38144 2405 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2406 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
24833e1a 2407 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2408 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
dbf38144 2409 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
24833e1a 2410 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2411 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
67e66e16 2412 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
24833e1a 2413 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2414 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2415 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
dbf38144 2416 ADD_RX_BUILTIN0 (WAIT, "wait", void);
24833e1a 2417}
2418
103700c7 2419/* Return the RX builtin for CODE. */
2420
2421static tree
2422rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2423{
2424 if (code >= RX_BUILTIN_max)
2425 return error_mark_node;
2426
2427 return rx_builtins[code];
2428}
2429
24833e1a 2430static rtx
2431rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2432{
2433 if (reg && ! REG_P (arg))
2434 arg = force_reg (SImode, arg);
2435
2436 emit_insn (gen_func (arg));
2437
2438 return NULL_RTX;
2439}
2440
2441static rtx
2442rx_expand_builtin_mvtc (tree exp)
2443{
2444 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2445 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2446
2447 if (! CONST_INT_P (arg1))
2448 return NULL_RTX;
2449
2450 if (! REG_P (arg2))
2451 arg2 = force_reg (SImode, arg2);
2452
2453 emit_insn (gen_mvtc (arg1, arg2));
2454
2455 return NULL_RTX;
2456}
2457
2458static rtx
2459rx_expand_builtin_mvfc (tree t_arg, rtx target)
2460{
2461 rtx arg = expand_normal (t_arg);
2462
2463 if (! CONST_INT_P (arg))
2464 return NULL_RTX;
2465
e4d9e8e5 2466 if (target == NULL_RTX)
2467 return NULL_RTX;
2468
24833e1a 2469 if (! REG_P (target))
2470 target = force_reg (SImode, target);
2471
2472 emit_insn (gen_mvfc (target, arg));
2473
2474 return target;
2475}
2476
67e66e16 2477static rtx
2478rx_expand_builtin_mvtipl (rtx arg)
2479{
2480 /* The RX610 does not support the MVTIPL instruction. */
2481 if (rx_cpu_type == RX610)
2482 return NULL_RTX;
2483
e5743482 2484 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
67e66e16 2485 return NULL_RTX;
2486
2487 emit_insn (gen_mvtipl (arg));
2488
2489 return NULL_RTX;
2490}
2491
24833e1a 2492static rtx
2493rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2494{
2495 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2496 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2497
2498 if (! REG_P (arg1))
2499 arg1 = force_reg (SImode, arg1);
2500
2501 if (! REG_P (arg2))
2502 arg2 = force_reg (SImode, arg2);
2503
2504 emit_insn (gen_func (arg1, arg2));
2505
2506 return NULL_RTX;
2507}
2508
2509static rtx
2510rx_expand_int_builtin_1_arg (rtx arg,
2511 rtx target,
2512 rtx (* gen_func)(rtx, rtx),
2513 bool mem_ok)
2514{
2515 if (! REG_P (arg))
2516 if (!mem_ok || ! MEM_P (arg))
2517 arg = force_reg (SImode, arg);
2518
2519 if (target == NULL_RTX || ! REG_P (target))
2520 target = gen_reg_rtx (SImode);
2521
2522 emit_insn (gen_func (target, arg));
2523
2524 return target;
2525}
2526
2527static rtx
2528rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2529{
2530 if (target == NULL_RTX || ! REG_P (target))
2531 target = gen_reg_rtx (SImode);
2532
2533 emit_insn (gen_func (target));
2534
2535 return target;
2536}
2537
2538static rtx
2539rx_expand_builtin_round (rtx arg, rtx target)
2540{
2541 if ((! REG_P (arg) && ! MEM_P (arg))
2542 || GET_MODE (arg) != SFmode)
2543 arg = force_reg (SFmode, arg);
2544
2545 if (target == NULL_RTX || ! REG_P (target))
2546 target = gen_reg_rtx (SImode);
2547
2548 emit_insn (gen_lrintsf2 (target, arg));
2549
2550 return target;
2551}
2552
e5743482 2553static int
0318c61a 2554valid_psw_flag (rtx op, const char *which)
e5743482 2555{
2556 static int mvtc_inform_done = 0;
2557
2558 if (GET_CODE (op) == CONST_INT)
2559 switch (INTVAL (op))
2560 {
2561 case 0: case 'c': case 'C':
2562 case 1: case 'z': case 'Z':
2563 case 2: case 's': case 'S':
2564 case 3: case 'o': case 'O':
2565 case 8: case 'i': case 'I':
2566 case 9: case 'u': case 'U':
2567 return 1;
2568 }
2569
2570 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2571 if (!mvtc_inform_done)
2572 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2573 mvtc_inform_done = 1;
2574
2575 return 0;
2576}
2577
24833e1a 2578static rtx
2579rx_expand_builtin (tree exp,
2580 rtx target,
2581 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 2582 machine_mode mode ATTRIBUTE_UNUSED,
24833e1a 2583 int ignore ATTRIBUTE_UNUSED)
2584{
2585 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
432093e5 2586 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
24833e1a 2587 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2588 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2589
2590 switch (fcode)
2591 {
2592 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
e5743482 2593 case RX_BUILTIN_CLRPSW:
2594 if (!valid_psw_flag (op, "clrpsw"))
2595 return NULL_RTX;
2596 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2597 case RX_BUILTIN_SETPSW:
2598 if (!valid_psw_flag (op, "setpsw"))
2599 return NULL_RTX;
2600 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
24833e1a 2601 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2602 (op, gen_int, false);
2603 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2604 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2605 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2606 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2607 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2608 (target, gen_mvfachi);
2609 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2610 (target, gen_mvfacmi);
2611 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2612 (op, gen_mvtachi, true);
2613 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2614 (op, gen_mvtaclo, true);
2615 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2616 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2617 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
67e66e16 2618 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
24833e1a 2619 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2620 (op, gen_racw, false);
2621 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2622 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2623 (op, target, gen_revw, false);
24833e1a 2624 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2625
2626 default:
2627 internal_error ("bad builtin code");
2628 break;
2629 }
2630
2631 return NULL_RTX;
2632}
2633\f
2634/* Place an element into a constructor or destructor section.
2635 Like default_ctor_section_asm_out_constructor in varasm.c
2636 except that it uses .init_array (or .fini_array) and it
2637 handles constructor priorities. */
2638
2639static void
2640rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2641{
2642 section * s;
2643
2644 if (priority != DEFAULT_INIT_PRIORITY)
2645 {
2646 char buf[18];
2647
2648 sprintf (buf, "%s.%.5u",
2649 is_ctor ? ".init_array" : ".fini_array",
2650 priority);
2651 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2652 }
2653 else if (is_ctor)
2654 s = ctors_section;
2655 else
2656 s = dtors_section;
2657
2658 switch_to_section (s);
2659 assemble_align (POINTER_SIZE);
2660 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2661}
2662
2663static void
2664rx_elf_asm_constructor (rtx symbol, int priority)
2665{
2666 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2667}
2668
2669static void
2670rx_elf_asm_destructor (rtx symbol, int priority)
2671{
2672 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2673}
2674\f
67e66e16 2675/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
24833e1a 2676
2677static tree
2678rx_handle_func_attribute (tree * node,
2679 tree name,
277d3719 2680 tree args ATTRIBUTE_UNUSED,
24833e1a 2681 int flags ATTRIBUTE_UNUSED,
2682 bool * no_add_attrs)
2683{
2684 gcc_assert (DECL_P (* node));
24833e1a 2685
2686 if (TREE_CODE (* node) != FUNCTION_DECL)
2687 {
2688 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2689 name);
2690 * no_add_attrs = true;
2691 }
2692
2693 /* FIXME: We ought to check for conflicting attributes. */
2694
2695 /* FIXME: We ought to check that the interrupt and exception
2696 handler attributes have been applied to void functions. */
2697 return NULL_TREE;
2698}
2699
7ce85a1f 2700/* Check "vector" attribute. */
2701
2702static tree
2703rx_handle_vector_attribute (tree * node,
2704 tree name,
2705 tree args,
2706 int flags ATTRIBUTE_UNUSED,
2707 bool * no_add_attrs)
2708{
2709 gcc_assert (DECL_P (* node));
2710 gcc_assert (args != NULL_TREE);
2711
2712 if (TREE_CODE (* node) != FUNCTION_DECL)
2713 {
2714 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2715 name);
2716 * no_add_attrs = true;
2717 }
2718
2719 return NULL_TREE;
2720}
2721
24833e1a 2722/* Table of RX specific attributes. */
2723const struct attribute_spec rx_attribute_table[] =
2724{
ac86af5d 2725 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2726 affects_type_identity. */
2727 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2728 false },
7ce85a1f 2729 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute,
ac86af5d 2730 false },
2731 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2732 false },
7ce85a1f 2733 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2734 false },
ac86af5d 2735 { NULL, 0, 0, false, false, false, NULL, false }
24833e1a 2736};
2737
42d89991 2738/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
02e53c17 2739
2740static void
42d89991 2741rx_override_options_after_change (void)
98cb9b5b 2742{
2743 static bool first_time = TRUE;
98cb9b5b 2744
2745 if (first_time)
2746 {
2747 /* If this is the first time through and the user has not disabled
42d89991 2748 the use of RX FPU hardware then enable -ffinite-math-only,
2749 since the FPU instructions do not support NaNs and infinities. */
98cb9b5b 2750 if (TARGET_USE_FPU)
42d89991 2751 flag_finite_math_only = 1;
98cb9b5b 2752
98cb9b5b 2753 first_time = FALSE;
2754 }
2755 else
2756 {
2757 /* Alert the user if they are changing the optimization options
2758 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2759 if (TARGET_USE_FPU
42d89991 2760 && !flag_finite_math_only)
2761 warning (0, "RX FPU instructions do not support NaNs and infinities");
98cb9b5b 2762 }
2763}
2764
1af17d44 2765static void
2766rx_option_override (void)
2767{
8cb00d70 2768 unsigned int i;
2769 cl_deferred_option *opt;
f1f41a6c 2770 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
8cb00d70 2771
f1f41a6c 2772 if (v)
2773 FOR_EACH_VEC_ELT (*v, i, opt)
2774 {
2775 switch (opt->opt_index)
2776 {
2777 case OPT_mint_register_:
2778 switch (opt->value)
2779 {
2780 case 4:
2781 fixed_regs[10] = call_used_regs [10] = 1;
2782 /* Fall through. */
2783 case 3:
2784 fixed_regs[11] = call_used_regs [11] = 1;
2785 /* Fall through. */
2786 case 2:
2787 fixed_regs[12] = call_used_regs [12] = 1;
2788 /* Fall through. */
2789 case 1:
2790 fixed_regs[13] = call_used_regs [13] = 1;
2791 /* Fall through. */
2792 case 0:
2793 rx_num_interrupt_regs = opt->value;
2794 break;
2795 default:
2796 rx_num_interrupt_regs = 0;
2797 /* Error message already given because rx_handle_option
2798 returned false. */
2799 break;
2800 }
2801 break;
8cb00d70 2802
f1f41a6c 2803 default:
2804 gcc_unreachable ();
2805 }
2806 }
8cb00d70 2807
1af17d44 2808 /* This target defaults to strict volatile bitfields. */
941a2396 2809 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
1af17d44 2810 flag_strict_volatile_bitfields = 1;
42d89991 2811
2812 rx_override_options_after_change ();
9f9a3b39 2813
5005fc53 2814 /* These values are bytes, not log. */
9f9a3b39 2815 if (align_jumps == 0 && ! optimize_size)
5005fc53 2816 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
9f9a3b39 2817 if (align_loops == 0 && ! optimize_size)
5005fc53 2818 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
9f9a3b39 2819 if (align_labels == 0 && ! optimize_size)
5005fc53 2820 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
1af17d44 2821}
2822
98cb9b5b 2823\f
24833e1a 2824static bool
2825rx_allocate_stack_slots_for_args (void)
2826{
2827 /* Naked functions should not allocate stack slots for arguments. */
2828 return ! is_naked_func (NULL_TREE);
2829}
2830
2831static bool
2832rx_func_attr_inlinable (const_tree decl)
2833{
2834 return ! is_fast_interrupt_func (decl)
67e66e16 2835 && ! is_interrupt_func (decl)
24833e1a 2836 && ! is_naked_func (decl);
2837}
2838
08c6cbd2 2839static bool
2840rx_warn_func_return (tree decl)
2841{
2842 /* Naked functions are implemented entirely in assembly, including the
2843 return sequence, so suppress warnings about this. */
2844 return !is_naked_func (decl);
2845}
2846
61fc50a0 2847/* Return nonzero if it is ok to make a tail-call to DECL,
2848 a function_decl or NULL if this is an indirect call, using EXP */
2849
2850static bool
e4d9e8e5 2851rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
61fc50a0 2852{
2853 /* Do not allow indirect tailcalls. The
2854 sibcall patterns do not support them. */
2855 if (decl == NULL)
2856 return false;
2857
2858 /* Never tailcall from inside interrupt handlers or naked functions. */
2859 if (is_fast_interrupt_func (NULL_TREE)
2860 || is_interrupt_func (NULL_TREE)
2861 || is_naked_func (NULL_TREE))
2862 return false;
2863
2864 return true;
2865}
2866
24833e1a 2867static void
2868rx_file_start (void)
2869{
2870 if (! TARGET_AS100_SYNTAX)
2871 default_file_start ();
2872}
2873
2874static bool
2875rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2876{
c6347c7a 2877 /* The packed attribute overrides the MS behaviour. */
2878 return ! TYPE_PACKED (record_type);
24833e1a 2879}
24833e1a 2880\f
2881/* Returns true if X a legitimate constant for an immediate
2882 operand on the RX. X is already known to satisfy CONSTANT_P. */
2883
2884bool
3754d046 2885rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
24833e1a 2886{
24833e1a 2887 switch (GET_CODE (x))
2888 {
2889 case CONST:
2890 x = XEXP (x, 0);
2891
2892 if (GET_CODE (x) == PLUS)
2893 {
2894 if (! CONST_INT_P (XEXP (x, 1)))
2895 return false;
2896
2897 /* GCC would not pass us CONST_INT + CONST_INT so we
2898 know that we have {SYMBOL|LABEL} + CONST_INT. */
2899 x = XEXP (x, 0);
2900 gcc_assert (! CONST_INT_P (x));
2901 }
2902
2903 switch (GET_CODE (x))
2904 {
2905 case LABEL_REF:
2906 case SYMBOL_REF:
2907 return true;
2908
95272799 2909 case UNSPEC:
6e507301 2910 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
95272799 2911
24833e1a 2912 default:
2913 /* FIXME: Can this ever happen ? */
776f1390 2914 gcc_unreachable ();
24833e1a 2915 }
2916 break;
2917
2918 case LABEL_REF:
2919 case SYMBOL_REF:
2920 return true;
2921 case CONST_DOUBLE:
09bb92cc 2922 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
24833e1a 2923 case CONST_VECTOR:
2924 return false;
2925 default:
2926 gcc_assert (CONST_INT_P (x));
2927 break;
2928 }
2929
95272799 2930 return ok_for_max_constant (INTVAL (x));
24833e1a 2931}
2932
24833e1a 2933static int
3754d046 2934rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
d9c5e5f4 2935 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
24833e1a 2936{
2937 rtx a, b;
2938
2939 if (GET_CODE (addr) != PLUS)
2940 return COSTS_N_INSNS (1);
2941
2942 a = XEXP (addr, 0);
2943 b = XEXP (addr, 1);
2944
2945 if (REG_P (a) && REG_P (b))
2946 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2947 return COSTS_N_INSNS (4);
2948
2949 if (speed)
2950 /* [REG+OFF] is just as fast as [REG]. */
2951 return COSTS_N_INSNS (1);
2952
2953 if (CONST_INT_P (b)
2954 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2955 /* Try to discourage REG + <large OFF> when optimizing for size. */
2956 return COSTS_N_INSNS (2);
2957
2958 return COSTS_N_INSNS (1);
2959}
2960
2961static bool
2962rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2963{
2964 /* We can always eliminate to the frame pointer.
2965 We can eliminate to the stack pointer unless a frame
2966 pointer is needed. */
2967
2968 return to == FRAME_POINTER_REGNUM
2969 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2970}
2971\f
2972
2973static void
2974rx_trampoline_template (FILE * file)
2975{
2976 /* Output assembler code for a block containing the constant
2977 part of a trampoline, leaving space for the variable parts.
2978
2979 On the RX, (where r8 is the static chain regnum) the trampoline
2980 looks like:
2981
2982 mov #<static chain value>, r8
2983 mov #<function's address>, r9
2984 jmp r9
2985
2986 In big-endian-data-mode however instructions are read into the CPU
2987 4 bytes at a time. These bytes are then swapped around before being
2988 passed to the decoder. So...we must partition our trampoline into
2989 4 byte packets and swap these packets around so that the instruction
2990 reader will reverse the process. But, in order to avoid splitting
2991 the 32-bit constants across these packet boundaries, (making inserting
2992 them into the constructed trampoline very difficult) we have to pad the
2993 instruction sequence with NOP insns. ie:
2994
2995 nop
2996 nop
2997 mov.l #<...>, r8
2998 nop
2999 nop
3000 mov.l #<...>, r9
3001 jmp r9
3002 nop
3003 nop */
3004
3005 if (! TARGET_BIG_ENDIAN_DATA)
3006 {
3007 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3008 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3009 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3010 }
3011 else
3012 {
3013 char r8 = '0' + STATIC_CHAIN_REGNUM;
3014 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3015
3016 if (TARGET_AS100_SYNTAX)
3017 {
3018 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3019 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3020 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3021 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3022 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3023 }
3024 else
3025 {
3026 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3027 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3028 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3029 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3030 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3031 }
3032 }
3033}
3034
3035static void
3036rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3037{
3038 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3039
3040 emit_block_move (tramp, assemble_trampoline_template (),
3041 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3042
3043 if (TARGET_BIG_ENDIAN_DATA)
3044 {
3045 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3046 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3047 }
3048 else
3049 {
3050 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3051 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3052 }
3053}
3054\f
ccfccd66 3055static int
3754d046 3056rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3e8d9684 3057 reg_class_t regclass ATTRIBUTE_UNUSED,
3058 bool in)
9d2f1b03 3059{
6145a46d 3060 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
9d2f1b03 3061}
3062
ccfccd66 3063/* Convert a CC_MODE to the set of flags that it represents. */
9d2f1b03 3064
3065static unsigned int
3754d046 3066flags_from_mode (machine_mode mode)
9d2f1b03 3067{
ccfccd66 3068 switch (mode)
9d2f1b03 3069 {
ccfccd66 3070 case CC_ZSmode:
3071 return CC_FLAG_S | CC_FLAG_Z;
3072 case CC_ZSOmode:
3073 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3074 case CC_ZSCmode:
3075 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3076 case CCmode:
3077 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3078 case CC_Fmode:
3079 return CC_FLAG_FP;
3080 default:
3081 gcc_unreachable ();
3082 }
3083}
9d2f1b03 3084
ccfccd66 3085/* Convert a set of flags to a CC_MODE that can implement it. */
9d2f1b03 3086
3754d046 3087static machine_mode
ccfccd66 3088mode_from_flags (unsigned int f)
3089{
3090 if (f & CC_FLAG_FP)
3091 return CC_Fmode;
3092 if (f & CC_FLAG_O)
3093 {
3094 if (f & CC_FLAG_C)
3095 return CCmode;
3096 else
3097 return CC_ZSOmode;
9d2f1b03 3098 }
ccfccd66 3099 else if (f & CC_FLAG_C)
3100 return CC_ZSCmode;
3101 else
3102 return CC_ZSmode;
9d2f1b03 3103}
3104
ccfccd66 3105/* Convert an RTX_CODE to the set of flags needed to implement it.
3106 This assumes an integer comparison. */
3107
9d2f1b03 3108static unsigned int
ccfccd66 3109flags_from_code (enum rtx_code code)
9d2f1b03 3110{
ccfccd66 3111 switch (code)
9d2f1b03 3112 {
ccfccd66 3113 case LT:
3114 case GE:
24ad6c43 3115 return CC_FLAG_S;
ccfccd66 3116 case GT:
3117 case LE:
3118 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3119 case GEU:
3120 case LTU:
3121 return CC_FLAG_C;
3122 case GTU:
3123 case LEU:
3124 return CC_FLAG_C | CC_FLAG_Z;
3125 case EQ:
3126 case NE:
3127 return CC_FLAG_Z;
3128 default:
3129 gcc_unreachable ();
9d2f1b03 3130 }
3131}
3132
ccfccd66 3133/* Return a CC_MODE of which both M1 and M2 are subsets. */
3134
3754d046 3135static machine_mode
3136rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
9d2f1b03 3137{
ccfccd66 3138 unsigned f;
3139
3140 /* Early out for identical modes. */
3141 if (m1 == m2)
3142 return m1;
3143
3144 /* There's no valid combination for FP vs non-FP. */
3145 f = flags_from_mode (m1) | flags_from_mode (m2);
3146 if (f & CC_FLAG_FP)
3147 return VOIDmode;
3148
3149 /* Otherwise, see what mode can implement all the flags. */
3150 return mode_from_flags (f);
9d2f1b03 3151}
8b8777b9 3152
3153/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3154
3754d046 3155machine_mode
24ad6c43 3156rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
8b8777b9 3157{
3158 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3159 return CC_Fmode;
3160
24ad6c43 3161 if (y != const0_rtx)
3162 return CCmode;
3163
ccfccd66 3164 return mode_from_flags (flags_from_code (cmp_code));
3165}
3166
ccfccd66 3167/* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3168 CC_MODE, and use that in branches based on that compare. */
3169
3170void
3754d046 3171rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
ccfccd66 3172 rtx c1, rtx c2, rtx label)
3173{
3174 rtx flags, x;
3175
3176 flags = gen_rtx_REG (cc_mode, CC_REG);
3177 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3178 x = gen_rtx_SET (VOIDmode, flags, x);
3179 emit_insn (x);
3180
3181 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3182 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3183 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3184 emit_jump_insn (x);
8b8777b9 3185}
3186
fc3b02a9 3187/* A helper function for matching parallels that set the flags. */
3188
3189bool
3754d046 3190rx_match_ccmode (rtx insn, machine_mode cc_mode)
fc3b02a9 3191{
3192 rtx op1, flags;
3754d046 3193 machine_mode flags_mode;
fc3b02a9 3194
3195 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3196
3197 op1 = XVECEXP (PATTERN (insn), 0, 1);
3198 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3199
3200 flags = SET_DEST (op1);
3201 flags_mode = GET_MODE (flags);
3202
3203 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3204 return false;
3205 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3206 return false;
3207
3208 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3209 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3210 return false;
3211
3212 return true;
3213}
9f9a3b39 3214\f
3215int
001afa63 3216rx_align_for_label (rtx lab, int uses_threshold)
9f9a3b39 3217{
001afa63 3218 /* This is a simple heuristic to guess when an alignment would not be useful
3219 because the delay due to the inserted NOPs would be greater than the delay
3220 due to the misaligned branch. If uses_threshold is zero then the alignment
3221 is always useful. */
f7fcec1a 3222 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
001afa63 3223 return 0;
3224
958c4dc5 3225 if (optimize_size)
3226 return 0;
5005fc53 3227 /* These values are log, not bytes. */
958c4dc5 3228 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
5005fc53 3229 return 2; /* 4 bytes */
3230 return 3; /* 8 bytes */
9f9a3b39 3231}
3232
3233static int
695d0571 3234rx_max_skip_for_label (rtx_insn *lab)
9f9a3b39 3235{
3236 int opsize;
695d0571 3237 rtx_insn *op;
9f9a3b39 3238
e6cf07b2 3239 if (optimize_size)
3240 return 0;
3241
695d0571 3242 if (lab == NULL)
9f9a3b39 3243 return 0;
fc3b02a9 3244
9f9a3b39 3245 op = lab;
3246 do
3247 {
3248 op = next_nonnote_nondebug_insn (op);
3249 }
3250 while (op && (LABEL_P (op)
3251 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3252 if (!op)
3253 return 0;
3254
3255 opsize = get_attr_length (op);
3256 if (opsize >= 0 && opsize < 8)
3257 return opsize - 1;
3258 return 0;
3259}
776f1390 3260
3261/* Compute the real length of the extending load-and-op instructions. */
3262
3263int
fd535fc1 3264rx_adjust_insn_length (rtx_insn *insn, int current_length)
776f1390 3265{
3266 rtx extend, mem, offset;
3267 bool zero;
3268 int factor;
3269
7ce85a1f 3270 if (!INSN_P (insn))
3271 return current_length;
3272
776f1390 3273 switch (INSN_CODE (insn))
3274 {
3275 default:
3276 return current_length;
3277
3278 case CODE_FOR_plussi3_zero_extendhi:
3279 case CODE_FOR_andsi3_zero_extendhi:
3280 case CODE_FOR_iorsi3_zero_extendhi:
3281 case CODE_FOR_xorsi3_zero_extendhi:
3282 case CODE_FOR_divsi3_zero_extendhi:
3283 case CODE_FOR_udivsi3_zero_extendhi:
3284 case CODE_FOR_minussi3_zero_extendhi:
3285 case CODE_FOR_smaxsi3_zero_extendhi:
3286 case CODE_FOR_sminsi3_zero_extendhi:
3287 case CODE_FOR_multsi3_zero_extendhi:
f7fcec1a 3288 case CODE_FOR_comparesi3_zero_extendhi:
776f1390 3289 zero = true;
3290 factor = 2;
3291 break;
3292
3293 case CODE_FOR_plussi3_sign_extendhi:
3294 case CODE_FOR_andsi3_sign_extendhi:
3295 case CODE_FOR_iorsi3_sign_extendhi:
3296 case CODE_FOR_xorsi3_sign_extendhi:
3297 case CODE_FOR_divsi3_sign_extendhi:
3298 case CODE_FOR_udivsi3_sign_extendhi:
3299 case CODE_FOR_minussi3_sign_extendhi:
3300 case CODE_FOR_smaxsi3_sign_extendhi:
3301 case CODE_FOR_sminsi3_sign_extendhi:
3302 case CODE_FOR_multsi3_sign_extendhi:
f7fcec1a 3303 case CODE_FOR_comparesi3_sign_extendhi:
776f1390 3304 zero = false;
3305 factor = 2;
3306 break;
3307
3308 case CODE_FOR_plussi3_zero_extendqi:
3309 case CODE_FOR_andsi3_zero_extendqi:
3310 case CODE_FOR_iorsi3_zero_extendqi:
3311 case CODE_FOR_xorsi3_zero_extendqi:
3312 case CODE_FOR_divsi3_zero_extendqi:
3313 case CODE_FOR_udivsi3_zero_extendqi:
3314 case CODE_FOR_minussi3_zero_extendqi:
3315 case CODE_FOR_smaxsi3_zero_extendqi:
3316 case CODE_FOR_sminsi3_zero_extendqi:
3317 case CODE_FOR_multsi3_zero_extendqi:
f7fcec1a 3318 case CODE_FOR_comparesi3_zero_extendqi:
776f1390 3319 zero = true;
3320 factor = 1;
3321 break;
3322
3323 case CODE_FOR_plussi3_sign_extendqi:
3324 case CODE_FOR_andsi3_sign_extendqi:
3325 case CODE_FOR_iorsi3_sign_extendqi:
3326 case CODE_FOR_xorsi3_sign_extendqi:
3327 case CODE_FOR_divsi3_sign_extendqi:
3328 case CODE_FOR_udivsi3_sign_extendqi:
3329 case CODE_FOR_minussi3_sign_extendqi:
3330 case CODE_FOR_smaxsi3_sign_extendqi:
3331 case CODE_FOR_sminsi3_sign_extendqi:
3332 case CODE_FOR_multsi3_sign_extendqi:
f7fcec1a 3333 case CODE_FOR_comparesi3_sign_extendqi:
776f1390 3334 zero = false;
3335 factor = 1;
3336 break;
3337 }
3338
3339 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3340 extend = single_set (insn);
3341 gcc_assert (extend != NULL_RTX);
3342
3343 extend = SET_SRC (extend);
3344 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3345 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3346 extend = XEXP (extend, 0);
3347 else
3348 extend = XEXP (extend, 1);
3349
3350 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3351 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3352
3353 mem = XEXP (extend, 0);
3354 gcc_checking_assert (MEM_P (mem));
3355 if (REG_P (XEXP (mem, 0)))
3356 return (zero && factor == 1) ? 2 : 3;
3357
3358 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3359 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3360 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3361
3362 offset = XEXP (XEXP (mem, 0), 1);
3363 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3364
3365 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3366 return (zero && factor == 1) ? 3 : 4;
3367
3368 return (zero && factor == 1) ? 4 : 5;
3369}
ee1401ac 3370
3371static bool
3372rx_narrow_volatile_bitfield (void)
3373{
3374 return true;
3375}
3376
3377static bool
3378rx_ok_to_inline (tree caller, tree callee)
3379{
3380 /* Do not inline functions with local variables
3381 into a naked CALLER - naked function have no stack frame and
3382 locals need a frame in order to have somewhere to live.
3383
3384 Unfortunately we have no way to determine the presence of
3385 local variables in CALLEE, so we have to be cautious and
3386 assume that there might be some there.
3387
3388 We do allow inlining when CALLEE has the "inline" type
3389 modifier or the "always_inline" or "gnu_inline" attributes. */
3390 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3391 || DECL_DECLARED_INLINE_P (callee)
3392 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3393 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3394}
3395
f0964309 3396static bool
3397rx_enable_lra (void)
3398{
734bbdc0 3399 return TARGET_ENABLE_LRA;
f0964309 3400}
3401
9d2f1b03 3402\f
ee1401ac 3403#undef TARGET_NARROW_VOLATILE_BITFIELD
3404#define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3405
3406#undef TARGET_CAN_INLINE_P
3407#define TARGET_CAN_INLINE_P rx_ok_to_inline
3408
9f9a3b39 3409#undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3410#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3411#undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3412#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3413#undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3414#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3415#undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3416#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3417
24833e1a 3418#undef TARGET_FUNCTION_VALUE
3419#define TARGET_FUNCTION_VALUE rx_function_value
3420
3421#undef TARGET_RETURN_IN_MSB
3422#define TARGET_RETURN_IN_MSB rx_return_in_msb
3423
3424#undef TARGET_IN_SMALL_DATA_P
3425#define TARGET_IN_SMALL_DATA_P rx_in_small_data
3426
3427#undef TARGET_RETURN_IN_MEMORY
3428#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3429
3430#undef TARGET_HAVE_SRODATA_SECTION
3431#define TARGET_HAVE_SRODATA_SECTION true
3432
3433#undef TARGET_ASM_SELECT_RTX_SECTION
3434#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3435
3436#undef TARGET_ASM_SELECT_SECTION
3437#define TARGET_ASM_SELECT_SECTION rx_select_section
3438
3439#undef TARGET_INIT_BUILTINS
3440#define TARGET_INIT_BUILTINS rx_init_builtins
3441
103700c7 3442#undef TARGET_BUILTIN_DECL
3443#define TARGET_BUILTIN_DECL rx_builtin_decl
3444
24833e1a 3445#undef TARGET_EXPAND_BUILTIN
3446#define TARGET_EXPAND_BUILTIN rx_expand_builtin
3447
3448#undef TARGET_ASM_CONSTRUCTOR
3449#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3450
3451#undef TARGET_ASM_DESTRUCTOR
3452#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3453
3454#undef TARGET_STRUCT_VALUE_RTX
3455#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3456
3457#undef TARGET_ATTRIBUTE_TABLE
3458#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3459
3460#undef TARGET_ASM_FILE_START
3461#define TARGET_ASM_FILE_START rx_file_start
3462
3463#undef TARGET_MS_BITFIELD_LAYOUT_P
3464#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3465
3466#undef TARGET_LEGITIMATE_ADDRESS_P
3467#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3468
5afe50d9 3469#undef TARGET_MODE_DEPENDENT_ADDRESS_P
3470#define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3471
24833e1a 3472#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3473#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3474
3475#undef TARGET_ASM_FUNCTION_PROLOGUE
3476#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3477
3478#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3479#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3480
61fc50a0 3481#undef TARGET_FUNCTION_OK_FOR_SIBCALL
3482#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3483
ee4e8428 3484#undef TARGET_FUNCTION_ARG
3485#define TARGET_FUNCTION_ARG rx_function_arg
3486
3487#undef TARGET_FUNCTION_ARG_ADVANCE
3488#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3489
bd99ba64 3490#undef TARGET_FUNCTION_ARG_BOUNDARY
3491#define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3492
24833e1a 3493#undef TARGET_SET_CURRENT_FUNCTION
3494#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3495
24833e1a 3496#undef TARGET_ASM_INTEGER
3497#define TARGET_ASM_INTEGER rx_assemble_integer
3498
3499#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3500#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3501
3502#undef TARGET_MAX_ANCHOR_OFFSET
3503#define TARGET_MAX_ANCHOR_OFFSET 32
3504
3505#undef TARGET_ADDRESS_COST
3506#define TARGET_ADDRESS_COST rx_address_cost
3507
3508#undef TARGET_CAN_ELIMINATE
3509#define TARGET_CAN_ELIMINATE rx_can_eliminate
3510
b2d7ede1 3511#undef TARGET_CONDITIONAL_REGISTER_USAGE
3512#define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3513
24833e1a 3514#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3515#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3516
3517#undef TARGET_TRAMPOLINE_INIT
3518#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3519
6bb30542 3520#undef TARGET_PRINT_OPERAND
3521#define TARGET_PRINT_OPERAND rx_print_operand
3522
3523#undef TARGET_PRINT_OPERAND_ADDRESS
3524#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3525
9d2f1b03 3526#undef TARGET_CC_MODES_COMPATIBLE
3527#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3528
3529#undef TARGET_MEMORY_MOVE_COST
3530#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3531
1af17d44 3532#undef TARGET_OPTION_OVERRIDE
3533#define TARGET_OPTION_OVERRIDE rx_option_override
3534
bd7d2835 3535#undef TARGET_PROMOTE_FUNCTION_MODE
3536#define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3537
42d89991 3538#undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3539#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
02e53c17 3540
77de4b78 3541#undef TARGET_FLAGS_REGNUM
3542#define TARGET_FLAGS_REGNUM CC_REG
3543
ca316360 3544#undef TARGET_LEGITIMATE_CONSTANT_P
f7fcec1a 3545#define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
ca316360 3546
6e507301 3547#undef TARGET_LEGITIMIZE_ADDRESS
3548#define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3549
ee1401ac 3550#undef TARGET_WARN_FUNC_RETURN
3551#define TARGET_WARN_FUNC_RETURN rx_warn_func_return
08c6cbd2 3552
f0964309 3553#undef TARGET_LRA_P
3554#define TARGET_LRA_P rx_enable_lra
3555
24833e1a 3556struct gcc_target targetm = TARGET_INITIALIZER;
3557
103700c7 3558#include "gt-rx.h"