]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rx/rx.c
2012-09-26 Christophe Lyon <christophe.lyon@linaro.org>
[thirdparty/gcc.git] / gcc / config / rx / rx.c
CommitLineData
24833e1a 1/* Subroutines used for code generation on Renesas RX processors.
f35edb6f 2 Copyright (C) 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
24833e1a 4 Contributed by Red Hat.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22/* To Do:
23
24 * Re-enable memory-to-memory copies and fix up reload. */
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "tree.h"
31#include "rtl.h"
32#include "regs.h"
33#include "hard-reg-set.h"
24833e1a 34#include "insn-config.h"
35#include "conditions.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
39#include "function.h"
40#include "expr.h"
41#include "optabs.h"
42#include "libfuncs.h"
43#include "recog.h"
0b205f4c 44#include "diagnostic-core.h"
24833e1a 45#include "toplev.h"
46#include "reload.h"
47#include "df.h"
48#include "ggc.h"
49#include "tm_p.h"
50#include "debug.h"
51#include "target.h"
52#include "target-def.h"
53#include "langhooks.h"
fba5dd52 54#include "opts.h"
367b1459 55#include "cgraph.h"
6e507301 56
57static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
58static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
59static unsigned int rx_num_interrupt_regs;
24833e1a 60\f
6e507301 61static unsigned int
62rx_gp_base_regnum (void)
63{
64 if (rx_gp_base_regnum_val == INVALID_REGNUM)
65 gcc_unreachable ();
66 return rx_gp_base_regnum_val;
67}
68
69static unsigned int
70rx_pid_base_regnum (void)
71{
72 if (rx_pid_base_regnum_val == INVALID_REGNUM)
73 gcc_unreachable ();
74 return rx_pid_base_regnum_val;
75}
76
77/* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
78
79static tree
80rx_decl_for_addr (rtx op)
81{
82 if (GET_CODE (op) == MEM)
83 op = XEXP (op, 0);
84 if (GET_CODE (op) == CONST)
85 op = XEXP (op, 0);
86 while (GET_CODE (op) == PLUS)
87 op = XEXP (op, 0);
88 if (GET_CODE (op) == SYMBOL_REF)
89 return SYMBOL_REF_DECL (op);
90 return NULL_TREE;
91}
92
6bb30542 93static void rx_print_operand (FILE *, rtx, int);
94
ccfccd66 95#define CC_FLAG_S (1 << 0)
96#define CC_FLAG_Z (1 << 1)
97#define CC_FLAG_O (1 << 2)
98#define CC_FLAG_C (1 << 3)
f7fcec1a 99#define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
ccfccd66 100
101static unsigned int flags_from_mode (enum machine_mode mode);
102static unsigned int flags_from_code (enum rtx_code code);
67e66e16 103\f
6e507301 104/* Return true if OP is a reference to an object in a PID data area. */
105
106enum pid_type
107{
108 PID_NOT_PID = 0, /* The object is not in the PID data area. */
109 PID_ENCODED, /* The object is in the PID data area. */
110 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
111};
112
113static enum pid_type
114rx_pid_data_operand (rtx op)
115{
116 tree op_decl;
117
118 if (!TARGET_PID)
119 return PID_NOT_PID;
120
121 if (GET_CODE (op) == PLUS
122 && GET_CODE (XEXP (op, 0)) == REG
123 && GET_CODE (XEXP (op, 1)) == CONST
124 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
125 return PID_ENCODED;
126
127 op_decl = rx_decl_for_addr (op);
128
129 if (op_decl)
130 {
131 if (TREE_READONLY (op_decl))
132 return PID_UNENCODED;
133 }
134 else
135 {
136 /* Sigh, some special cases. */
137 if (GET_CODE (op) == SYMBOL_REF
138 || GET_CODE (op) == LABEL_REF)
139 return PID_UNENCODED;
140 }
141
142 return PID_NOT_PID;
143}
144
145static rtx
146rx_legitimize_address (rtx x,
147 rtx oldx ATTRIBUTE_UNUSED,
148 enum machine_mode mode ATTRIBUTE_UNUSED)
149{
150 if (rx_pid_data_operand (x) == PID_UNENCODED)
151 {
152 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
153 return rv;
154 }
155
156 if (GET_CODE (x) == PLUS
157 && GET_CODE (XEXP (x, 0)) == PLUS
158 && REG_P (XEXP (XEXP (x, 0), 0))
159 && REG_P (XEXP (x, 1)))
160 return force_reg (SImode, x);
161
162 return x;
163}
164
24833e1a 165/* Return true if OP is a reference to an object in a small data area. */
166
167static bool
168rx_small_data_operand (rtx op)
169{
170 if (rx_small_data_limit == 0)
171 return false;
172
173 if (GET_CODE (op) == SYMBOL_REF)
174 return SYMBOL_REF_SMALL_P (op);
175
176 return false;
177}
178
179static bool
4bccad5e 180rx_is_legitimate_address (enum machine_mode mode, rtx x,
181 bool strict ATTRIBUTE_UNUSED)
24833e1a 182{
183 if (RTX_OK_FOR_BASE (x, strict))
184 /* Register Indirect. */
185 return true;
186
f7fcec1a 187 if ((GET_MODE_SIZE (mode) == 4
188 || GET_MODE_SIZE (mode) == 2
189 || GET_MODE_SIZE (mode) == 1)
24833e1a 190 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
191 /* Pre-decrement Register Indirect or
192 Post-increment Register Indirect. */
193 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
194
6e507301 195 switch (rx_pid_data_operand (x))
196 {
197 case PID_UNENCODED:
198 return false;
199 case PID_ENCODED:
200 return true;
201 default:
202 break;
203 }
204
24833e1a 205 if (GET_CODE (x) == PLUS)
206 {
207 rtx arg1 = XEXP (x, 0);
208 rtx arg2 = XEXP (x, 1);
209 rtx index = NULL_RTX;
210
211 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
212 index = arg2;
213 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
214 index = arg1;
215 else
216 return false;
217
218 switch (GET_CODE (index))
219 {
220 case CONST_INT:
221 {
222 /* Register Relative: REG + INT.
223 Only positive, mode-aligned, mode-sized
224 displacements are allowed. */
225 HOST_WIDE_INT val = INTVAL (index);
226 int factor;
227
228 if (val < 0)
229 return false;
776f1390 230
24833e1a 231 switch (GET_MODE_SIZE (mode))
232 {
233 default:
234 case 4: factor = 4; break;
235 case 2: factor = 2; break;
236 case 1: factor = 1; break;
237 }
238
f7fcec1a 239 if (val > (65535 * factor))
24833e1a 240 return false;
241 return (val % factor) == 0;
242 }
243
244 case REG:
245 /* Unscaled Indexed Register Indirect: REG + REG
246 Size has to be "QI", REG has to be valid. */
247 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
248
249 case MULT:
250 {
251 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
252 Factor has to equal the mode size, REG has to be valid. */
253 rtx factor;
254
255 factor = XEXP (index, 1);
256 index = XEXP (index, 0);
257
258 return REG_P (index)
259 && RTX_OK_FOR_BASE (index, strict)
260 && CONST_INT_P (factor)
261 && GET_MODE_SIZE (mode) == INTVAL (factor);
262 }
263
264 default:
265 return false;
266 }
267 }
268
269 /* Small data area accesses turn into register relative offsets. */
270 return rx_small_data_operand (x);
271}
272
273/* Returns TRUE for simple memory addreses, ie ones
274 that do not involve register indirect addressing
275 or pre/post increment/decrement. */
276
277bool
278rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
279{
24833e1a 280 if (! rx_is_legitimate_address
281 (mode, mem, reload_in_progress || reload_completed))
282 return false;
283
284 switch (GET_CODE (mem))
285 {
286 case REG:
287 /* Simple memory addresses are OK. */
288 return true;
289
290 case PRE_DEC:
291 case POST_INC:
292 return false;
293
294 case PLUS:
776f1390 295 {
296 rtx base, index;
297
298 /* Only allow REG+INT addressing. */
299 base = XEXP (mem, 0);
300 index = XEXP (mem, 1);
24833e1a 301
776f1390 302 if (! RX_REG_P (base) || ! CONST_INT_P (index))
303 return false;
304
305 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
306 }
24833e1a 307
308 case SYMBOL_REF:
309 /* Can happen when small data is being supported.
310 Assume that it will be resolved into GP+INT. */
311 return true;
312
313 default:
314 gcc_unreachable ();
315 }
316}
317
5afe50d9 318/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
319
320static bool
321rx_mode_dependent_address_p (const_rtx addr)
24833e1a 322{
323 if (GET_CODE (addr) == CONST)
324 addr = XEXP (addr, 0);
325
326 switch (GET_CODE (addr))
327 {
328 /* --REG and REG++ only work in SImode. */
329 case PRE_DEC:
330 case POST_INC:
331 return true;
332
333 case MINUS:
334 case PLUS:
335 if (! REG_P (XEXP (addr, 0)))
336 return true;
337
338 addr = XEXP (addr, 1);
339
340 switch (GET_CODE (addr))
341 {
342 case REG:
343 /* REG+REG only works in SImode. */
344 return true;
345
346 case CONST_INT:
347 /* REG+INT is only mode independent if INT is a
348 multiple of 4, positive and will fit into 8-bits. */
349 if (((INTVAL (addr) & 3) == 0)
350 && IN_RANGE (INTVAL (addr), 4, 252))
351 return false;
352 return true;
353
354 case SYMBOL_REF:
355 case LABEL_REF:
356 return true;
357
358 case MULT:
359 gcc_assert (REG_P (XEXP (addr, 0)));
360 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
361 /* REG+REG*SCALE is always mode dependent. */
362 return true;
363
364 default:
365 /* Not recognized, so treat as mode dependent. */
366 return true;
367 }
368
369 case CONST_INT:
370 case SYMBOL_REF:
371 case LABEL_REF:
372 case REG:
373 /* These are all mode independent. */
374 return false;
375
376 default:
377 /* Everything else is unrecognized,
378 so treat as mode dependent. */
379 return true;
380 }
381}
382\f
24833e1a 383/* A C compound statement to output to stdio stream FILE the
384 assembler syntax for an instruction operand that is a memory
385 reference whose address is ADDR. */
386
6bb30542 387static void
24833e1a 388rx_print_operand_address (FILE * file, rtx addr)
389{
390 switch (GET_CODE (addr))
391 {
392 case REG:
393 fprintf (file, "[");
394 rx_print_operand (file, addr, 0);
395 fprintf (file, "]");
396 break;
397
398 case PRE_DEC:
399 fprintf (file, "[-");
400 rx_print_operand (file, XEXP (addr, 0), 0);
401 fprintf (file, "]");
402 break;
403
404 case POST_INC:
405 fprintf (file, "[");
406 rx_print_operand (file, XEXP (addr, 0), 0);
407 fprintf (file, "+]");
408 break;
409
410 case PLUS:
411 {
412 rtx arg1 = XEXP (addr, 0);
413 rtx arg2 = XEXP (addr, 1);
414 rtx base, index;
415
416 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
417 base = arg1, index = arg2;
418 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
419 base = arg2, index = arg1;
420 else
421 {
422 rx_print_operand (file, arg1, 0);
423 fprintf (file, " + ");
424 rx_print_operand (file, arg2, 0);
425 break;
426 }
427
428 if (REG_P (index) || GET_CODE (index) == MULT)
429 {
430 fprintf (file, "[");
431 rx_print_operand (file, index, 'A');
432 fprintf (file, ",");
433 }
434 else /* GET_CODE (index) == CONST_INT */
435 {
436 rx_print_operand (file, index, 'A');
437 fprintf (file, "[");
438 }
439 rx_print_operand (file, base, 0);
440 fprintf (file, "]");
441 break;
442 }
443
95272799 444 case CONST:
445 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
446 {
447 addr = XEXP (addr, 0);
448 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
6e507301 449
450 /* FIXME: Putting this case label here is an appalling abuse of the C language. */
451 case UNSPEC:
452 addr = XVECEXP (addr, 0, 0);
95272799 453 gcc_assert (CONST_INT_P (addr));
454 }
455 /* Fall through. */
24833e1a 456 case LABEL_REF:
457 case SYMBOL_REF:
24833e1a 458 fprintf (file, "#");
6e507301 459 /* Fall through. */
24833e1a 460 default:
461 output_addr_const (file, addr);
462 break;
463 }
464}
465
466static void
467rx_print_integer (FILE * file, HOST_WIDE_INT val)
468{
469 if (IN_RANGE (val, -64, 64))
470 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
471 else
472 fprintf (file,
473 TARGET_AS100_SYNTAX
474 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
475 val);
476}
477
478static bool
479rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
480{
481 const char * op = integer_asm_op (size, is_aligned);
482
483 if (! CONST_INT_P (x))
484 return default_assemble_integer (x, size, is_aligned);
485
486 if (op == NULL)
487 return false;
488 fputs (op, asm_out_file);
489
490 rx_print_integer (asm_out_file, INTVAL (x));
491 fputc ('\n', asm_out_file);
492 return true;
493}
494
495
24833e1a 496/* Handles the insertion of a single operand into the assembler output.
497 The %<letter> directives supported are:
498
499 %A Print an operand without a leading # character.
500 %B Print an integer comparison name.
501 %C Print a control register name.
502 %F Print a condition code flag name.
6e507301 503 %G Register used for small-data-area addressing
24833e1a 504 %H Print high part of a DImode register, integer or address.
505 %L Print low part of a DImode register, integer or address.
6bb30542 506 %N Print the negation of the immediate value.
6e507301 507 %P Register used for PID addressing
24833e1a 508 %Q If the operand is a MEM, then correctly generate
776f1390 509 register indirect or register relative addressing.
510 %R Like %Q but for zero-extending loads. */
24833e1a 511
6bb30542 512static void
24833e1a 513rx_print_operand (FILE * file, rtx op, int letter)
514{
776f1390 515 bool unsigned_load = false;
6e507301 516 bool print_hash = true;
517
518 if (letter == 'A'
519 && ((GET_CODE (op) == CONST
520 && GET_CODE (XEXP (op, 0)) == UNSPEC)
521 || GET_CODE (op) == UNSPEC))
522 {
523 print_hash = false;
524 letter = 0;
525 }
776f1390 526
24833e1a 527 switch (letter)
528 {
529 case 'A':
530 /* Print an operand without a leading #. */
531 if (MEM_P (op))
532 op = XEXP (op, 0);
533
534 switch (GET_CODE (op))
535 {
536 case LABEL_REF:
537 case SYMBOL_REF:
538 output_addr_const (file, op);
539 break;
540 case CONST_INT:
541 fprintf (file, "%ld", (long) INTVAL (op));
542 break;
543 default:
544 rx_print_operand (file, op, 0);
545 break;
546 }
547 break;
548
549 case 'B':
ccfccd66 550 {
551 enum rtx_code code = GET_CODE (op);
552 enum machine_mode mode = GET_MODE (XEXP (op, 0));
553 const char *ret;
554
555 if (mode == CC_Fmode)
556 {
557 /* C flag is undefined, and O flag carries unordered. None of the
558 branch combinations that include O use it helpfully. */
559 switch (code)
560 {
561 case ORDERED:
562 ret = "no";
563 break;
564 case UNORDERED:
565 ret = "o";
566 break;
567 case LT:
568 ret = "n";
569 break;
570 case GE:
571 ret = "pz";
572 break;
573 case EQ:
574 ret = "eq";
575 break;
576 case NE:
577 ret = "ne";
578 break;
579 default:
580 gcc_unreachable ();
581 }
582 }
583 else
584 {
24ad6c43 585 unsigned int flags = flags_from_mode (mode);
776f1390 586
ccfccd66 587 switch (code)
588 {
589 case LT:
24ad6c43 590 ret = (flags & CC_FLAG_O ? "lt" : "n");
ccfccd66 591 break;
592 case GE:
24ad6c43 593 ret = (flags & CC_FLAG_O ? "ge" : "pz");
ccfccd66 594 break;
595 case GT:
596 ret = "gt";
597 break;
598 case LE:
599 ret = "le";
600 break;
601 case GEU:
602 ret = "geu";
603 break;
604 case LTU:
605 ret = "ltu";
606 break;
607 case GTU:
608 ret = "gtu";
609 break;
610 case LEU:
611 ret = "leu";
612 break;
613 case EQ:
614 ret = "eq";
615 break;
616 case NE:
617 ret = "ne";
618 break;
619 default:
620 gcc_unreachable ();
621 }
24ad6c43 622 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
ccfccd66 623 }
624 fputs (ret, file);
625 break;
626 }
24833e1a 627
628 case 'C':
629 gcc_assert (CONST_INT_P (op));
630 switch (INTVAL (op))
631 {
632 case 0: fprintf (file, "psw"); break;
633 case 2: fprintf (file, "usp"); break;
634 case 3: fprintf (file, "fpsw"); break;
635 case 4: fprintf (file, "cpen"); break;
636 case 8: fprintf (file, "bpsw"); break;
637 case 9: fprintf (file, "bpc"); break;
638 case 0xa: fprintf (file, "isp"); break;
639 case 0xb: fprintf (file, "fintv"); break;
640 case 0xc: fprintf (file, "intb"); break;
641 default:
98a5f45d 642 warning (0, "unrecognized control register number: %d - using 'psw'",
6bb30542 643 (int) INTVAL (op));
98cb9b5b 644 fprintf (file, "psw");
645 break;
24833e1a 646 }
647 break;
648
649 case 'F':
650 gcc_assert (CONST_INT_P (op));
651 switch (INTVAL (op))
652 {
653 case 0: case 'c': case 'C': fprintf (file, "C"); break;
654 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
655 case 2: case 's': case 'S': fprintf (file, "S"); break;
656 case 3: case 'o': case 'O': fprintf (file, "O"); break;
657 case 8: case 'i': case 'I': fprintf (file, "I"); break;
658 case 9: case 'u': case 'U': fprintf (file, "U"); break;
659 default:
660 gcc_unreachable ();
661 }
662 break;
663
6e507301 664 case 'G':
665 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
666 break;
667
24833e1a 668 case 'H':
6bb30542 669 switch (GET_CODE (op))
24833e1a 670 {
6bb30542 671 case REG:
672 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
673 break;
674 case CONST_INT:
675 {
676 HOST_WIDE_INT v = INTVAL (op);
67e66e16 677
6bb30542 678 fprintf (file, "#");
679 /* Trickery to avoid problems with shifting 32 bits at a time. */
680 v = v >> 16;
681 v = v >> 16;
682 rx_print_integer (file, v);
683 break;
684 }
685 case CONST_DOUBLE:
24833e1a 686 fprintf (file, "#");
6bb30542 687 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
688 break;
689 case MEM:
24833e1a 690 if (! WORDS_BIG_ENDIAN)
691 op = adjust_address (op, SImode, 4);
692 output_address (XEXP (op, 0));
6bb30542 693 break;
694 default:
695 gcc_unreachable ();
24833e1a 696 }
697 break;
698
699 case 'L':
6bb30542 700 switch (GET_CODE (op))
24833e1a 701 {
6bb30542 702 case REG:
703 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
704 break;
705 case CONST_INT:
24833e1a 706 fprintf (file, "#");
707 rx_print_integer (file, INTVAL (op) & 0xffffffff);
6bb30542 708 break;
709 case CONST_DOUBLE:
710 fprintf (file, "#");
711 rx_print_integer (file, CONST_DOUBLE_LOW (op));
712 break;
713 case MEM:
24833e1a 714 if (WORDS_BIG_ENDIAN)
715 op = adjust_address (op, SImode, 4);
716 output_address (XEXP (op, 0));
6bb30542 717 break;
718 default:
719 gcc_unreachable ();
24833e1a 720 }
721 break;
722
39349585 723 case 'N':
724 gcc_assert (CONST_INT_P (op));
725 fprintf (file, "#");
726 rx_print_integer (file, - INTVAL (op));
727 break;
728
6e507301 729 case 'P':
730 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
731 break;
732
776f1390 733 case 'R':
734 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) < 4);
735 unsigned_load = true;
736 /* Fall through. */
24833e1a 737 case 'Q':
738 if (MEM_P (op))
739 {
740 HOST_WIDE_INT offset;
776f1390 741 rtx mem = op;
24833e1a 742
743 op = XEXP (op, 0);
744
745 if (REG_P (op))
746 offset = 0;
747 else if (GET_CODE (op) == PLUS)
748 {
749 rtx displacement;
750
751 if (REG_P (XEXP (op, 0)))
752 {
753 displacement = XEXP (op, 1);
754 op = XEXP (op, 0);
755 }
756 else
757 {
758 displacement = XEXP (op, 0);
759 op = XEXP (op, 1);
760 gcc_assert (REG_P (op));
761 }
762
763 gcc_assert (CONST_INT_P (displacement));
764 offset = INTVAL (displacement);
765 gcc_assert (offset >= 0);
766
767 fprintf (file, "%ld", offset);
768 }
769 else
770 gcc_unreachable ();
771
772 fprintf (file, "[");
773 rx_print_operand (file, op, 0);
774 fprintf (file, "].");
775
776f1390 776 switch (GET_MODE_SIZE (GET_MODE (mem)))
24833e1a 777 {
778 case 1:
776f1390 779 gcc_assert (offset <= 65535 * 1);
780 fprintf (file, unsigned_load ? "UB" : "B");
24833e1a 781 break;
782 case 2:
783 gcc_assert (offset % 2 == 0);
776f1390 784 gcc_assert (offset <= 65535 * 2);
785 fprintf (file, unsigned_load ? "UW" : "W");
24833e1a 786 break;
776f1390 787 case 4:
24833e1a 788 gcc_assert (offset % 4 == 0);
776f1390 789 gcc_assert (offset <= 65535 * 4);
24833e1a 790 fprintf (file, "L");
791 break;
776f1390 792 default:
793 gcc_unreachable ();
24833e1a 794 }
795 break;
796 }
797
798 /* Fall through. */
799
800 default:
6e507301 801 if (GET_CODE (op) == CONST
802 && GET_CODE (XEXP (op, 0)) == UNSPEC)
803 op = XEXP (op, 0);
804 else if (GET_CODE (op) == CONST
805 && GET_CODE (XEXP (op, 0)) == PLUS
806 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
807 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
808 {
809 if (print_hash)
810 fprintf (file, "#");
811 fprintf (file, "(");
812 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
813 fprintf (file, " + ");
814 output_addr_const (file, XEXP (XEXP (op, 0), 1));
815 fprintf (file, ")");
816 return;
817 }
818
24833e1a 819 switch (GET_CODE (op))
820 {
821 case MULT:
822 /* Should be the scaled part of an
823 indexed register indirect address. */
824 {
825 rtx base = XEXP (op, 0);
826 rtx index = XEXP (op, 1);
827
828 /* Check for a swaped index register and scaling factor.
829 Not sure if this can happen, but be prepared to handle it. */
830 if (CONST_INT_P (base) && REG_P (index))
831 {
832 rtx tmp = base;
833 base = index;
834 index = tmp;
835 }
836
837 gcc_assert (REG_P (base));
838 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
839 gcc_assert (CONST_INT_P (index));
840 /* Do not try to verify the value of the scalar as it is based
841 on the mode of the MEM not the mode of the MULT. (Which
842 will always be SImode). */
843 fprintf (file, "%s", reg_names [REGNO (base)]);
844 break;
845 }
846
847 case MEM:
848 output_address (XEXP (op, 0));
849 break;
850
851 case PLUS:
852 output_address (op);
853 break;
854
855 case REG:
856 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
857 fprintf (file, "%s", reg_names [REGNO (op)]);
858 break;
859
860 case SUBREG:
861 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
862 fprintf (file, "%s", reg_names [subreg_regno (op)]);
863 break;
864
865 /* This will only be single precision.... */
866 case CONST_DOUBLE:
867 {
868 unsigned long val;
869 REAL_VALUE_TYPE rv;
870
871 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
872 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
6e507301 873 if (print_hash)
874 fprintf (file, "#");
875 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
24833e1a 876 break;
877 }
878
879 case CONST_INT:
6e507301 880 if (print_hash)
881 fprintf (file, "#");
24833e1a 882 rx_print_integer (file, INTVAL (op));
883 break;
884
6e507301 885 case UNSPEC:
886 switch (XINT (op, 1))
887 {
888 case UNSPEC_PID_ADDR:
889 {
890 rtx sym, add;
891
892 if (print_hash)
893 fprintf (file, "#");
894 sym = XVECEXP (op, 0, 0);
895 add = NULL_RTX;
896 fprintf (file, "(");
897 if (GET_CODE (sym) == PLUS)
898 {
899 add = XEXP (sym, 1);
900 sym = XEXP (sym, 0);
901 }
902 output_addr_const (file, sym);
903 if (add != NULL_RTX)
904 {
905 fprintf (file, "+");
906 output_addr_const (file, add);
907 }
908 fprintf (file, "-__pid_base");
909 fprintf (file, ")");
910 return;
911 }
912 }
913 /* Fall through */
914
24833e1a 915 case CONST:
6e507301 916 case SYMBOL_REF:
24833e1a 917 case LABEL_REF:
918 case CODE_LABEL:
24833e1a 919 rx_print_operand_address (file, op);
920 break;
921
922 default:
923 gcc_unreachable ();
924 }
925 break;
926 }
927}
928
6e507301 929/* Maybe convert an operand into its PID format. */
930
931rtx
932rx_maybe_pidify_operand (rtx op, int copy_to_reg)
933{
934 if (rx_pid_data_operand (op) == PID_UNENCODED)
935 {
936 if (GET_CODE (op) == MEM)
937 {
938 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
939 op = replace_equiv_address (op, a);
940 }
941 else
942 {
943 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
944 }
945
946 if (copy_to_reg)
947 op = copy_to_mode_reg (GET_MODE (op), op);
948 }
949 return op;
950}
951
24833e1a 952/* Returns an assembler template for a move instruction. */
953
954char *
955rx_gen_move_template (rtx * operands, bool is_movu)
956{
6bb30542 957 static char out_template [64];
24833e1a 958 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
959 const char * src_template;
960 const char * dst_template;
961 rtx dest = operands[0];
962 rtx src = operands[1];
963
964 /* Decide which extension, if any, should be given to the move instruction. */
965 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
966 {
967 case QImode:
968 /* The .B extension is not valid when
969 loading an immediate into a register. */
970 if (! REG_P (dest) || ! CONST_INT_P (src))
971 extension = ".B";
972 break;
973 case HImode:
974 if (! REG_P (dest) || ! CONST_INT_P (src))
975 /* The .W extension is not valid when
976 loading an immediate into a register. */
977 extension = ".W";
978 break;
979 case SFmode:
980 case SImode:
981 extension = ".L";
982 break;
983 case VOIDmode:
984 /* This mode is used by constants. */
985 break;
986 default:
987 debug_rtx (src);
988 gcc_unreachable ();
989 }
990
6e507301 991 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
992 src_template = "(%A1-__pid_base)[%P1]";
993 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
994 src_template = "%%gp(%A1)[%G1]";
24833e1a 995 else
996 src_template = "%1";
997
998 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
6e507301 999 dst_template = "%%gp(%A0)[%G0]";
24833e1a 1000 else
1001 dst_template = "%0";
1002
6bb30542 1003 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
24833e1a 1004 extension, src_template, dst_template);
6bb30542 1005 return out_template;
24833e1a 1006}
24833e1a 1007\f
1008/* Return VALUE rounded up to the next ALIGNMENT boundary. */
1009
1010static inline unsigned int
1011rx_round_up (unsigned int value, unsigned int alignment)
1012{
1013 alignment -= 1;
1014 return (value + alignment) & (~ alignment);
1015}
1016
1017/* Return the number of bytes in the argument registers
1018 occupied by an argument of type TYPE and mode MODE. */
1019
ee4e8428 1020static unsigned int
4bccad5e 1021rx_function_arg_size (enum machine_mode mode, const_tree type)
24833e1a 1022{
1023 unsigned int num_bytes;
1024
1025 num_bytes = (mode == BLKmode)
1026 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1027 return rx_round_up (num_bytes, UNITS_PER_WORD);
1028}
1029
1030#define NUM_ARG_REGS 4
1031#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1032
1033/* Return an RTL expression describing the register holding a function
1034 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1035 be passed on the stack. CUM describes the previous parameters to the
1036 function and NAMED is false if the parameter is part of a variable
1037 parameter list, or the last named parameter before the start of a
1038 variable parameter list. */
1039
ee4e8428 1040static rtx
39cba157 1041rx_function_arg (cumulative_args_t cum, enum machine_mode mode,
4bccad5e 1042 const_tree type, bool named)
24833e1a 1043{
1044 unsigned int next_reg;
39cba157 1045 unsigned int bytes_so_far = *get_cumulative_args (cum);
24833e1a 1046 unsigned int size;
1047 unsigned int rounded_size;
1048
1049 /* An exploded version of rx_function_arg_size. */
1050 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
6bb30542 1051 /* If the size is not known it cannot be passed in registers. */
1052 if (size < 1)
1053 return NULL_RTX;
24833e1a 1054
1055 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1056
1057 /* Don't pass this arg via registers if there
1058 are insufficient registers to hold all of it. */
1059 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1060 return NULL_RTX;
1061
1062 /* Unnamed arguments and the last named argument in a
1063 variadic function are always passed on the stack. */
1064 if (!named)
1065 return NULL_RTX;
1066
1067 /* Structures must occupy an exact number of registers,
1068 otherwise they are passed on the stack. */
1069 if ((type == NULL || AGGREGATE_TYPE_P (type))
1070 && (size % UNITS_PER_WORD) != 0)
1071 return NULL_RTX;
1072
1073 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1074
1075 return gen_rtx_REG (mode, next_reg);
1076}
1077
ee4e8428 1078static void
39cba157 1079rx_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
4bccad5e 1080 const_tree type, bool named ATTRIBUTE_UNUSED)
ee4e8428 1081{
39cba157 1082 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
ee4e8428 1083}
1084
bd99ba64 1085static unsigned int
4bccad5e 1086rx_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
bd99ba64 1087 const_tree type ATTRIBUTE_UNUSED)
1088{
1089 return 32;
1090}
1091
24833e1a 1092/* Return an RTL describing where a function return value of type RET_TYPE
1093 is held. */
1094
1095static rtx
1096rx_function_value (const_tree ret_type,
1097 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1098 bool outgoing ATTRIBUTE_UNUSED)
1099{
bd7d2835 1100 enum machine_mode mode = TYPE_MODE (ret_type);
1101
1102 /* RX ABI specifies that small integer types are
1103 promoted to int when returned by a function. */
02f06d23 1104 if (GET_MODE_SIZE (mode) > 0
1105 && GET_MODE_SIZE (mode) < 4
1106 && ! COMPLEX_MODE_P (mode)
1107 )
bd7d2835 1108 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1109
1110 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1111}
1112
1113/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1114 regard to function returns as does TARGET_FUNCTION_VALUE. */
1115
1116static enum machine_mode
1117rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1118 enum machine_mode mode,
0318c61a 1119 int * punsignedp ATTRIBUTE_UNUSED,
bd7d2835 1120 const_tree funtype ATTRIBUTE_UNUSED,
1121 int for_return)
1122{
1123 if (for_return != 1
1124 || GET_MODE_SIZE (mode) >= 4
02f06d23 1125 || COMPLEX_MODE_P (mode)
bd7d2835 1126 || GET_MODE_SIZE (mode) < 1)
1127 return mode;
1128
1129 return SImode;
24833e1a 1130}
1131
1132static bool
1133rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1134{
1135 HOST_WIDE_INT size;
1136
1137 if (TYPE_MODE (type) != BLKmode
1138 && ! AGGREGATE_TYPE_P (type))
1139 return false;
1140
1141 size = int_size_in_bytes (type);
1142 /* Large structs and those whose size is not an
1143 exact multiple of 4 are returned in memory. */
1144 return size < 1
1145 || size > 16
1146 || (size % UNITS_PER_WORD) != 0;
1147}
1148
1149static rtx
1150rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1151 int incoming ATTRIBUTE_UNUSED)
1152{
1153 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1154}
1155
1156static bool
1157rx_return_in_msb (const_tree valtype)
1158{
1159 return TARGET_BIG_ENDIAN_DATA
1160 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1161}
1162
1163/* Returns true if the provided function has the specified attribute. */
1164
1165static inline bool
1166has_func_attr (const_tree decl, const char * func_attr)
1167{
1168 if (decl == NULL_TREE)
1169 decl = current_function_decl;
1170
1171 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1172}
1173
67e66e16 1174/* Returns true if the provided function has the "fast_interrupt" attribute. */
24833e1a 1175
1176static inline bool
1177is_fast_interrupt_func (const_tree decl)
1178{
67e66e16 1179 return has_func_attr (decl, "fast_interrupt");
24833e1a 1180}
1181
67e66e16 1182/* Returns true if the provided function has the "interrupt" attribute. */
24833e1a 1183
1184static inline bool
67e66e16 1185is_interrupt_func (const_tree decl)
24833e1a 1186{
67e66e16 1187 return has_func_attr (decl, "interrupt");
24833e1a 1188}
1189
1190/* Returns true if the provided function has the "naked" attribute. */
1191
1192static inline bool
1193is_naked_func (const_tree decl)
1194{
1195 return has_func_attr (decl, "naked");
1196}
1197\f
1198static bool use_fixed_regs = false;
1199
b2d7ede1 1200static void
24833e1a 1201rx_conditional_register_usage (void)
1202{
1203 static bool using_fixed_regs = false;
1204
6e507301 1205 if (TARGET_PID)
1206 {
1207 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1208 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1209 }
1210
24833e1a 1211 if (rx_small_data_limit > 0)
6e507301 1212 {
1213 if (TARGET_PID)
1214 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1215 else
1216 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1217
1218 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1219 }
24833e1a 1220
1221 if (use_fixed_regs != using_fixed_regs)
1222 {
1223 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1224 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1225
1226 if (use_fixed_regs)
1227 {
24833e1a 1228 unsigned int r;
1229
24833e1a 1230 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1231 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
e4d9e8e5 1232
1233 /* This is for fast interrupt handlers. Any register in
1234 the range r10 to r13 (inclusive) that is currently
1235 marked as fixed is now a viable, call-used register. */
24833e1a 1236 for (r = 10; r <= 13; r++)
1237 if (fixed_regs[r])
1238 {
1239 fixed_regs[r] = 0;
1240 call_used_regs[r] = 1;
24833e1a 1241 }
1242
e4d9e8e5 1243 /* Mark r7 as fixed. This is just a hack to avoid
1244 altering the reg_alloc_order array so that the newly
1245 freed r10-r13 registers are the preferred registers. */
1246 fixed_regs[7] = call_used_regs[7] = 1;
24833e1a 1247 }
1248 else
1249 {
1250 /* Restore the normal register masks. */
1251 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1252 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1253 }
1254
1255 using_fixed_regs = use_fixed_regs;
1256 }
1257}
1258
1259/* Perform any actions necessary before starting to compile FNDECL.
1260 For the RX we use this to make sure that we have the correct
1261 set of register masks selected. If FNDECL is NULL then we are
1262 compiling top level things. */
1263
1264static void
1265rx_set_current_function (tree fndecl)
1266{
1267 /* Remember the last target of rx_set_current_function. */
1268 static tree rx_previous_fndecl;
67e66e16 1269 bool prev_was_fast_interrupt;
1270 bool current_is_fast_interrupt;
24833e1a 1271
1272 /* Only change the context if the function changes. This hook is called
1273 several times in the course of compiling a function, and we don't want
1274 to slow things down too much or call target_reinit when it isn't safe. */
1275 if (fndecl == rx_previous_fndecl)
1276 return;
1277
67e66e16 1278 prev_was_fast_interrupt
24833e1a 1279 = rx_previous_fndecl
1280 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
67e66e16 1281
1282 current_is_fast_interrupt
24833e1a 1283 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1284
67e66e16 1285 if (prev_was_fast_interrupt != current_is_fast_interrupt)
24833e1a 1286 {
67e66e16 1287 use_fixed_regs = current_is_fast_interrupt;
24833e1a 1288 target_reinit ();
1289 }
67e66e16 1290
24833e1a 1291 rx_previous_fndecl = fndecl;
1292}
1293\f
1294/* Typical stack layout should looks like this after the function's prologue:
1295
1296 | |
1297 -- ^
1298 | | \ |
1299 | | arguments saved | Increasing
1300 | | on the stack | addresses
1301 PARENT arg pointer -> | | /
1302 -------------------------- ---- -------------------
1303 CHILD |ret | return address
1304 --
1305 | | \
1306 | | call saved
1307 | | registers
1308 | | /
1309 --
1310 | | \
1311 | | local
1312 | | variables
1313 frame pointer -> | | /
1314 --
1315 | | \
1316 | | outgoing | Decreasing
1317 | | arguments | addresses
1318 current stack pointer -> | | / |
1319 -------------------------- ---- ------------------ V
1320 | | */
1321
1322static unsigned int
1323bit_count (unsigned int x)
1324{
1325 const unsigned int m1 = 0x55555555;
1326 const unsigned int m2 = 0x33333333;
1327 const unsigned int m4 = 0x0f0f0f0f;
1328
1329 x -= (x >> 1) & m1;
1330 x = (x & m2) + ((x >> 2) & m2);
1331 x = (x + (x >> 4)) & m4;
1332 x += x >> 8;
1333
1334 return (x + (x >> 16)) & 0x3f;
1335}
1336
e4d9e8e5 1337#define MUST_SAVE_ACC_REGISTER \
1338 (TARGET_SAVE_ACC_REGISTER \
1339 && (is_interrupt_func (NULL_TREE) \
1340 || is_fast_interrupt_func (NULL_TREE)))
1341
24833e1a 1342/* Returns either the lowest numbered and highest numbered registers that
1343 occupy the call-saved area of the stack frame, if the registers are
1344 stored as a contiguous block, or else a bitmask of the individual
1345 registers if they are stored piecemeal.
1346
1347 Also computes the size of the frame and the size of the outgoing
1348 arguments block (in bytes). */
1349
1350static void
1351rx_get_stack_layout (unsigned int * lowest,
1352 unsigned int * highest,
1353 unsigned int * register_mask,
1354 unsigned int * frame_size,
1355 unsigned int * stack_size)
1356{
1357 unsigned int reg;
1358 unsigned int low;
1359 unsigned int high;
1360 unsigned int fixed_reg = 0;
1361 unsigned int save_mask;
1362 unsigned int pushed_mask;
1363 unsigned int unneeded_pushes;
1364
e4d9e8e5 1365 if (is_naked_func (NULL_TREE))
24833e1a 1366 {
1367 /* Naked functions do not create their own stack frame.
e4d9e8e5 1368 Instead the programmer must do that for us. */
24833e1a 1369 * lowest = 0;
1370 * highest = 0;
1371 * register_mask = 0;
1372 * frame_size = 0;
1373 * stack_size = 0;
1374 return;
1375 }
1376
9d2f1b03 1377 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
24833e1a 1378 {
21cde6ec 1379 if ((df_regs_ever_live_p (reg)
382ffb70 1380 /* Always save all call clobbered registers inside non-leaf
1381 interrupt handlers, even if they are not live - they may
1382 be used in (non-interrupt aware) routines called from this one. */
1383 || (call_used_regs[reg]
1384 && is_interrupt_func (NULL_TREE)
d5bf7b64 1385 && ! crtl->is_leaf))
24833e1a 1386 && (! call_used_regs[reg]
1387 /* Even call clobbered registered must
67e66e16 1388 be pushed inside interrupt handlers. */
e4d9e8e5 1389 || is_interrupt_func (NULL_TREE)
1390 /* Likewise for fast interrupt handlers, except registers r10 -
1391 r13. These are normally call-saved, but may have been set
1392 to call-used by rx_conditional_register_usage. If so then
1393 they can be used in the fast interrupt handler without
1394 saving them on the stack. */
1395 || (is_fast_interrupt_func (NULL_TREE)
1396 && ! IN_RANGE (reg, 10, 13))))
24833e1a 1397 {
1398 if (low == 0)
1399 low = reg;
1400 high = reg;
1401
1402 save_mask |= 1 << reg;
1403 }
1404
1405 /* Remember if we see a fixed register
1406 after having found the low register. */
1407 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1408 fixed_reg = reg;
1409 }
1410
e4d9e8e5 1411 /* If we have to save the accumulator register, make sure
1412 that at least two registers are pushed into the frame. */
1413 if (MUST_SAVE_ACC_REGISTER
1414 && bit_count (save_mask) < 2)
1415 {
1416 save_mask |= (1 << 13) | (1 << 14);
1417 if (low == 0)
1418 low = 13;
bc9bb967 1419 if (high == 0 || low == high)
1420 high = low + 1;
e4d9e8e5 1421 }
1422
24833e1a 1423 /* Decide if it would be faster fill in the call-saved area of the stack
1424 frame using multiple PUSH instructions instead of a single PUSHM
1425 instruction.
1426
1427 SAVE_MASK is a bitmask of the registers that must be stored in the
1428 call-save area. PUSHED_MASK is a bitmask of the registers that would
1429 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1430 is a bitmask of those registers in pushed_mask that are not in
1431 save_mask.
1432
1433 We use a simple heuristic that says that it is better to use
1434 multiple PUSH instructions if the number of unnecessary pushes is
1435 greater than the number of necessary pushes.
1436
1437 We also use multiple PUSH instructions if there are any fixed registers
1438 between LOW and HIGH. The only way that this can happen is if the user
1439 has specified --fixed-<reg-name> on the command line and in such
1440 circumstances we do not want to touch the fixed registers at all.
1441
1442 FIXME: Is it worth improving this heuristic ? */
1443 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1444 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1445
1446 if ((fixed_reg && fixed_reg <= high)
1447 || (optimize_function_for_speed_p (cfun)
1448 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1449 {
1450 /* Use multiple pushes. */
1451 * lowest = 0;
1452 * highest = 0;
1453 * register_mask = save_mask;
1454 }
1455 else
1456 {
1457 /* Use one push multiple instruction. */
1458 * lowest = low;
1459 * highest = high;
1460 * register_mask = 0;
1461 }
1462
1463 * frame_size = rx_round_up
1464 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1465
1466 if (crtl->args.size > 0)
1467 * frame_size += rx_round_up
1468 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1469
1470 * stack_size = rx_round_up
1471 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1472}
1473
1474/* Generate a PUSHM instruction that matches the given operands. */
1475
1476void
1477rx_emit_stack_pushm (rtx * operands)
1478{
1479 HOST_WIDE_INT last_reg;
1480 rtx first_push;
1481
1482 gcc_assert (CONST_INT_P (operands[0]));
1483 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1484
1485 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1486 first_push = XVECEXP (operands[1], 0, 1);
1487 gcc_assert (SET_P (first_push));
1488 first_push = SET_SRC (first_push);
1489 gcc_assert (REG_P (first_push));
1490
1491 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
67e66e16 1492 reg_names [REGNO (first_push) - last_reg],
1493 reg_names [REGNO (first_push)]);
24833e1a 1494}
1495
1496/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1497
1498static rtx
1499gen_rx_store_vector (unsigned int low, unsigned int high)
1500{
1501 unsigned int i;
1502 unsigned int count = (high - low) + 2;
1503 rtx vector;
1504
1505 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1506
1507 XVECEXP (vector, 0, 0) =
51e241f8 1508 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1509 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1510 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1511
1512 for (i = 0; i < count - 1; i++)
1513 XVECEXP (vector, 0, i + 1) =
51e241f8 1514 gen_rtx_SET (VOIDmode,
24833e1a 1515 gen_rtx_MEM (SImode,
67e66e16 1516 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1517 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1518 gen_rtx_REG (SImode, high - i));
24833e1a 1519 return vector;
1520}
1521
67e66e16 1522/* Mark INSN as being frame related. If it is a PARALLEL
1523 then mark each element as being frame related as well. */
1524
1525static void
1526mark_frame_related (rtx insn)
1527{
1528 RTX_FRAME_RELATED_P (insn) = 1;
1529 insn = PATTERN (insn);
1530
1531 if (GET_CODE (insn) == PARALLEL)
1532 {
1533 unsigned int i;
1534
61fc50a0 1535 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
67e66e16 1536 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1537 }
1538}
1539
95272799 1540static bool
1541ok_for_max_constant (HOST_WIDE_INT val)
1542{
1543 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1544 /* If there is no constraint on the size of constants
1545 used as operands, then any value is legitimate. */
1546 return true;
1547
1548 /* rx_max_constant_size specifies the maximum number
1549 of bytes that can be used to hold a signed value. */
1550 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1551 ( 1 << (rx_max_constant_size * 8)));
1552}
1553
1554/* Generate an ADD of SRC plus VAL into DEST.
1555 Handles the case where VAL is too big for max_constant_value.
1556 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1557
1558static void
1559gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1560{
1561 rtx insn;
1562
1563 if (val == NULL_RTX || INTVAL (val) == 0)
1564 {
1565 gcc_assert (dest != src);
1566
1567 insn = emit_move_insn (dest, src);
1568 }
1569 else if (ok_for_max_constant (INTVAL (val)))
1570 insn = emit_insn (gen_addsi3 (dest, src, val));
1571 else
1572 {
f7fcec1a 1573 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
02f06d23 1574 will not reject it. */
1575 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1576 insn = emit_insn (gen_addsi3 (dest, src, val));
95272799 1577
1578 if (is_frame_related)
1579 /* We have to provide our own frame related note here
1580 as the dwarf2out code cannot be expected to grok
1581 our unspec. */
1582 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1583 gen_rtx_SET (SImode, dest,
1584 gen_rtx_PLUS (SImode, src, val)));
1585 return;
1586 }
1587
1588 if (is_frame_related)
1589 RTX_FRAME_RELATED_P (insn) = 1;
1590 return;
1591}
1592
24833e1a 1593void
1594rx_expand_prologue (void)
1595{
1596 unsigned int stack_size;
1597 unsigned int frame_size;
1598 unsigned int mask;
1599 unsigned int low;
1600 unsigned int high;
67e66e16 1601 unsigned int reg;
24833e1a 1602 rtx insn;
1603
1604 /* Naked functions use their own, programmer provided prologues. */
e4d9e8e5 1605 if (is_naked_func (NULL_TREE))
24833e1a 1606 return;
1607
1608 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1609
ecfbd70a 1610 if (flag_stack_usage_info)
1611 current_function_static_stack_size = frame_size + stack_size;
1612
24833e1a 1613 /* If we use any of the callee-saved registers, save them now. */
1614 if (mask)
1615 {
24833e1a 1616 /* Push registers in reverse order. */
9d2f1b03 1617 for (reg = CC_REGNUM; reg --;)
24833e1a 1618 if (mask & (1 << reg))
1619 {
1620 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
67e66e16 1621 mark_frame_related (insn);
24833e1a 1622 }
1623 }
1624 else if (low)
1625 {
1626 if (high == low)
1627 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1628 else
1629 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1630 * UNITS_PER_WORD),
1631 gen_rx_store_vector (low, high)));
67e66e16 1632 mark_frame_related (insn);
1633 }
1634
e4d9e8e5 1635 if (MUST_SAVE_ACC_REGISTER)
67e66e16 1636 {
1637 unsigned int acc_high, acc_low;
1638
1639 /* Interrupt handlers have to preserve the accumulator
1640 register if so requested by the user. Use the first
e4d9e8e5 1641 two pushed registers as intermediaries. */
67e66e16 1642 if (mask)
1643 {
1644 acc_low = acc_high = 0;
1645
9d2f1b03 1646 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1647 if (mask & (1 << reg))
1648 {
1649 if (acc_low == 0)
1650 acc_low = reg;
1651 else
1652 {
1653 acc_high = reg;
1654 break;
1655 }
1656 }
1657
1658 /* We have assumed that there are at least two registers pushed... */
1659 gcc_assert (acc_high != 0);
1660
1661 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1662 We just assume that they are zero. */
1663 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1664 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1665 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1666 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1667 }
1668 else
1669 {
1670 acc_low = low;
1671 acc_high = low + 1;
1672
1673 /* We have assumed that there are at least two registers pushed... */
1674 gcc_assert (acc_high <= high);
1675
1676 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1677 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1678 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1679 gen_rx_store_vector (acc_low, acc_high)));
1680 }
24833e1a 1681 }
1682
1683 /* If needed, set up the frame pointer. */
1684 if (frame_pointer_needed)
95272799 1685 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1686 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1687
1688 /* Allocate space for the outgoing args.
1689 If the stack frame has not already been set up then handle this as well. */
1690 if (stack_size)
1691 {
1692 if (frame_size)
1693 {
1694 if (frame_pointer_needed)
95272799 1695 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1696 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1697 else
95272799 1698 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1699 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1700 true);
24833e1a 1701 }
1702 else
95272799 1703 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1704 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1705 }
1706 else if (frame_size)
1707 {
1708 if (! frame_pointer_needed)
95272799 1709 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1710 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1711 else
95272799 1712 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1713 true);
24833e1a 1714 }
24833e1a 1715}
1716
1717static void
1718rx_output_function_prologue (FILE * file,
1719 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1720{
1721 if (is_fast_interrupt_func (NULL_TREE))
1722 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1723
67e66e16 1724 if (is_interrupt_func (NULL_TREE))
1725 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
24833e1a 1726
1727 if (is_naked_func (NULL_TREE))
1728 asm_fprintf (file, "\t; Note: Naked Function\n");
1729
1730 if (cfun->static_chain_decl != NULL)
1731 asm_fprintf (file, "\t; Note: Nested function declared "
1732 "inside another function.\n");
1733
1734 if (crtl->calls_eh_return)
1735 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1736}
1737
1738/* Generate a POPM or RTSD instruction that matches the given operands. */
1739
1740void
1741rx_emit_stack_popm (rtx * operands, bool is_popm)
1742{
1743 HOST_WIDE_INT stack_adjust;
1744 HOST_WIDE_INT last_reg;
1745 rtx first_push;
1746
1747 gcc_assert (CONST_INT_P (operands[0]));
1748 stack_adjust = INTVAL (operands[0]);
1749
1750 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1751 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1752
1753 first_push = XVECEXP (operands[1], 0, 1);
1754 gcc_assert (SET_P (first_push));
1755 first_push = SET_DEST (first_push);
1756 gcc_assert (REG_P (first_push));
1757
1758 if (is_popm)
1759 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1760 reg_names [REGNO (first_push)],
1761 reg_names [REGNO (first_push) + last_reg]);
1762 else
1763 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1764 (int) stack_adjust,
1765 reg_names [REGNO (first_push)],
1766 reg_names [REGNO (first_push) + last_reg]);
1767}
1768
1769/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1770
1771static rtx
1772gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1773{
1774 unsigned int i;
1775 unsigned int bias = 3;
1776 unsigned int count = (high - low) + bias;
1777 rtx vector;
1778
1779 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1780
1781 XVECEXP (vector, 0, 0) =
51e241f8 1782 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
29c05e22 1783 plus_constant (Pmode, stack_pointer_rtx, adjust));
24833e1a 1784
1785 for (i = 0; i < count - 2; i++)
1786 XVECEXP (vector, 0, i + 1) =
51e241f8 1787 gen_rtx_SET (VOIDmode,
24833e1a 1788 gen_rtx_REG (SImode, low + i),
1789 gen_rtx_MEM (SImode,
1790 i == 0 ? stack_pointer_rtx
29c05e22 1791 : plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1792 i * UNITS_PER_WORD)));
1793
1a860023 1794 XVECEXP (vector, 0, count - 1) = ret_rtx;
24833e1a 1795
1796 return vector;
1797}
1798
1799/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1800
1801static rtx
1802gen_rx_popm_vector (unsigned int low, unsigned int high)
1803{
1804 unsigned int i;
1805 unsigned int count = (high - low) + 2;
1806 rtx vector;
1807
1808 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1809
1810 XVECEXP (vector, 0, 0) =
51e241f8 1811 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
29c05e22 1812 plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1813 (count - 1) * UNITS_PER_WORD));
1814
1815 for (i = 0; i < count - 1; i++)
1816 XVECEXP (vector, 0, i + 1) =
51e241f8 1817 gen_rtx_SET (VOIDmode,
24833e1a 1818 gen_rtx_REG (SImode, low + i),
1819 gen_rtx_MEM (SImode,
1820 i == 0 ? stack_pointer_rtx
29c05e22 1821 : plus_constant (Pmode, stack_pointer_rtx,
24833e1a 1822 i * UNITS_PER_WORD)));
1823
1824 return vector;
1825}
f35edb6f 1826
1827/* Returns true if a simple return insn can be used. */
1828
1829bool
1830rx_can_use_simple_return (void)
1831{
1832 unsigned int low;
1833 unsigned int high;
1834 unsigned int frame_size;
1835 unsigned int stack_size;
1836 unsigned int register_mask;
1837
1838 if (is_naked_func (NULL_TREE)
1839 || is_fast_interrupt_func (NULL_TREE)
1840 || is_interrupt_func (NULL_TREE))
1841 return false;
1842
1843 rx_get_stack_layout (& low, & high, & register_mask,
1844 & frame_size, & stack_size);
1845
1846 return (register_mask == 0
1847 && (frame_size + stack_size) == 0
1848 && low == 0);
1849}
1850
24833e1a 1851void
1852rx_expand_epilogue (bool is_sibcall)
1853{
1854 unsigned int low;
1855 unsigned int high;
1856 unsigned int frame_size;
1857 unsigned int stack_size;
1858 unsigned int register_mask;
1859 unsigned int regs_size;
67e66e16 1860 unsigned int reg;
24833e1a 1861 unsigned HOST_WIDE_INT total_size;
1862
61fc50a0 1863 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1864 cannot guarantee that the register holding the function address is a
1865 call-used register. If it is a call-saved register then the stack
1866 pop instructions generated in the epilogue will corrupt the address
1867 before it is used.
1868
1869 Creating a new call-used-only register class works but then the
1870 reload pass gets stuck because it cannot always find a call-used
1871 register for spilling sibcalls.
1872
1873 The other possible solution is for this pass to scan forward for the
1874 sibcall instruction (if it has been generated) and work out if it
1875 is an indirect sibcall using a call-saved register. If it is then
1876 the address can copied into a call-used register in this epilogue
1877 code and the sibcall instruction modified to use that register. */
1878
24833e1a 1879 if (is_naked_func (NULL_TREE))
1880 {
61fc50a0 1881 gcc_assert (! is_sibcall);
1882
24833e1a 1883 /* Naked functions use their own, programmer provided epilogues.
1884 But, in order to keep gcc happy we have to generate some kind of
1885 epilogue RTL. */
1886 emit_jump_insn (gen_naked_return ());
1887 return;
1888 }
1889
1890 rx_get_stack_layout (& low, & high, & register_mask,
1891 & frame_size, & stack_size);
1892
1893 total_size = frame_size + stack_size;
1894 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1895
1896 /* See if we are unable to use the special stack frame deconstruct and
1897 return instructions. In most cases we can use them, but the exceptions
1898 are:
1899
1900 - Sibling calling functions deconstruct the frame but do not return to
1901 their caller. Instead they branch to their sibling and allow their
1902 return instruction to return to this function's parent.
1903
67e66e16 1904 - Fast and normal interrupt handling functions have to use special
24833e1a 1905 return instructions.
1906
1907 - Functions where we have pushed a fragmented set of registers into the
1908 call-save area must have the same set of registers popped. */
1909 if (is_sibcall
1910 || is_fast_interrupt_func (NULL_TREE)
67e66e16 1911 || is_interrupt_func (NULL_TREE)
24833e1a 1912 || register_mask)
1913 {
1914 /* Cannot use the special instructions - deconstruct by hand. */
1915 if (total_size)
95272799 1916 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1917 GEN_INT (total_size), false);
24833e1a 1918
e4d9e8e5 1919 if (MUST_SAVE_ACC_REGISTER)
24833e1a 1920 {
67e66e16 1921 unsigned int acc_low, acc_high;
1922
1923 /* Reverse the saving of the accumulator register onto the stack.
1924 Note we must adjust the saved "low" accumulator value as it
1925 is really the middle 32-bits of the accumulator. */
1926 if (register_mask)
1927 {
1928 acc_low = acc_high = 0;
9d2f1b03 1929
1930 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1931 if (register_mask & (1 << reg))
1932 {
1933 if (acc_low == 0)
1934 acc_low = reg;
1935 else
1936 {
1937 acc_high = reg;
1938 break;
1939 }
1940 }
1941 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1942 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1943 }
1944 else
1945 {
1946 acc_low = low;
1947 acc_high = low + 1;
1948 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1949 gen_rx_popm_vector (acc_low, acc_high)));
1950 }
1951
1952 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1953 gen_rtx_REG (SImode, acc_low),
1954 GEN_INT (16)));
1955 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1956 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1957 }
24833e1a 1958
67e66e16 1959 if (register_mask)
1960 {
9d2f1b03 1961 for (reg = 0; reg < CC_REGNUM; reg ++)
24833e1a 1962 if (register_mask & (1 << reg))
1963 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1964 }
1965 else if (low)
1966 {
1967 if (high == low)
1968 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1969 else
1970 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1971 gen_rx_popm_vector (low, high)));
1972 }
1973
1974 if (is_fast_interrupt_func (NULL_TREE))
61fc50a0 1975 {
1976 gcc_assert (! is_sibcall);
1977 emit_jump_insn (gen_fast_interrupt_return ());
1978 }
67e66e16 1979 else if (is_interrupt_func (NULL_TREE))
61fc50a0 1980 {
1981 gcc_assert (! is_sibcall);
1982 emit_jump_insn (gen_exception_return ());
1983 }
24833e1a 1984 else if (! is_sibcall)
1985 emit_jump_insn (gen_simple_return ());
1986
1987 return;
1988 }
1989
1990 /* If we allocated space on the stack, free it now. */
1991 if (total_size)
1992 {
1993 unsigned HOST_WIDE_INT rtsd_size;
1994
1995 /* See if we can use the RTSD instruction. */
1996 rtsd_size = total_size + regs_size;
1997 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1998 {
1999 if (low)
2000 emit_jump_insn (gen_pop_and_return
2001 (GEN_INT (rtsd_size),
2002 gen_rx_rtsd_vector (rtsd_size, low, high)));
2003 else
2004 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2005
2006 return;
2007 }
2008
95272799 2009 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2010 GEN_INT (total_size), false);
24833e1a 2011 }
2012
2013 if (low)
2014 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2015 gen_rx_rtsd_vector (regs_size,
2016 low, high)));
2017 else
2018 emit_jump_insn (gen_simple_return ());
2019}
2020
2021
2022/* Compute the offset (in words) between FROM (arg pointer
2023 or frame pointer) and TO (frame pointer or stack pointer).
2024 See ASCII art comment at the start of rx_expand_prologue
2025 for more information. */
2026
2027int
2028rx_initial_elimination_offset (int from, int to)
2029{
2030 unsigned int low;
2031 unsigned int high;
2032 unsigned int frame_size;
2033 unsigned int stack_size;
2034 unsigned int mask;
2035
2036 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2037
2038 if (from == ARG_POINTER_REGNUM)
2039 {
2040 /* Extend the computed size of the stack frame to
2041 include the registers pushed in the prologue. */
2042 if (low)
2043 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2044 else
2045 frame_size += bit_count (mask) * UNITS_PER_WORD;
2046
2047 /* Remember to include the return address. */
2048 frame_size += 1 * UNITS_PER_WORD;
2049
2050 if (to == FRAME_POINTER_REGNUM)
2051 return frame_size;
2052
2053 gcc_assert (to == STACK_POINTER_REGNUM);
2054 return frame_size + stack_size;
2055 }
2056
2057 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2058 return stack_size;
2059}
2060
24833e1a 2061/* Decide if a variable should go into one of the small data sections. */
2062
2063static bool
2064rx_in_small_data (const_tree decl)
2065{
2066 int size;
2067 const_tree section;
2068
2069 if (rx_small_data_limit == 0)
2070 return false;
2071
2072 if (TREE_CODE (decl) != VAR_DECL)
2073 return false;
2074
2075 /* We do not put read-only variables into a small data area because
2076 they would be placed with the other read-only sections, far away
2077 from the read-write data sections, and we only have one small
2078 data area pointer.
2079 Similarly commons are placed in the .bss section which might be
2080 far away (and out of alignment with respect to) the .data section. */
2081 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2082 return false;
2083
2084 section = DECL_SECTION_NAME (decl);
2085 if (section)
2086 {
2087 const char * const name = TREE_STRING_POINTER (section);
2088
2089 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
2090 }
2091
2092 size = int_size_in_bytes (TREE_TYPE (decl));
2093
2094 return (size > 0) && (size <= rx_small_data_limit);
2095}
2096
2097/* Return a section for X.
2098 The only special thing we do here is to honor small data. */
2099
2100static section *
2101rx_select_rtx_section (enum machine_mode mode,
2102 rtx x,
2103 unsigned HOST_WIDE_INT align)
2104{
2105 if (rx_small_data_limit > 0
2106 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2107 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2108 return sdata_section;
2109
2110 return default_elf_select_rtx_section (mode, x, align);
2111}
2112
2113static section *
2114rx_select_section (tree decl,
2115 int reloc,
2116 unsigned HOST_WIDE_INT align)
2117{
2118 if (rx_small_data_limit > 0)
2119 {
2120 switch (categorize_decl_for_section (decl, reloc))
2121 {
2122 case SECCAT_SDATA: return sdata_section;
2123 case SECCAT_SBSS: return sbss_section;
2124 case SECCAT_SRODATA:
2125 /* Fall through. We do not put small, read only
2126 data into the C_2 section because we are not
2127 using the C_2 section. We do not use the C_2
2128 section because it is located with the other
2129 read-only data sections, far away from the read-write
2130 data sections and we only have one small data
2131 pointer (r13). */
2132 default:
2133 break;
2134 }
2135 }
2136
2137 /* If we are supporting the Renesas assembler
2138 we cannot use mergeable sections. */
2139 if (TARGET_AS100_SYNTAX)
2140 switch (categorize_decl_for_section (decl, reloc))
2141 {
2142 case SECCAT_RODATA_MERGE_CONST:
2143 case SECCAT_RODATA_MERGE_STR_INIT:
2144 case SECCAT_RODATA_MERGE_STR:
2145 return readonly_data_section;
2146
2147 default:
2148 break;
2149 }
2150
2151 return default_elf_select_section (decl, reloc, align);
2152}
2153\f
2154enum rx_builtin
2155{
2156 RX_BUILTIN_BRK,
2157 RX_BUILTIN_CLRPSW,
2158 RX_BUILTIN_INT,
2159 RX_BUILTIN_MACHI,
2160 RX_BUILTIN_MACLO,
2161 RX_BUILTIN_MULHI,
2162 RX_BUILTIN_MULLO,
2163 RX_BUILTIN_MVFACHI,
2164 RX_BUILTIN_MVFACMI,
2165 RX_BUILTIN_MVFC,
2166 RX_BUILTIN_MVTACHI,
2167 RX_BUILTIN_MVTACLO,
2168 RX_BUILTIN_MVTC,
67e66e16 2169 RX_BUILTIN_MVTIPL,
24833e1a 2170 RX_BUILTIN_RACW,
2171 RX_BUILTIN_REVW,
2172 RX_BUILTIN_RMPA,
2173 RX_BUILTIN_ROUND,
24833e1a 2174 RX_BUILTIN_SETPSW,
2175 RX_BUILTIN_WAIT,
2176 RX_BUILTIN_max
2177};
2178
103700c7 2179static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2180
24833e1a 2181static void
2182rx_init_builtins (void)
2183{
2184#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
103700c7 2185 rx_builtins[RX_BUILTIN_##UC_NAME] = \
f7fcec1a 2186 add_builtin_function ("__builtin_rx_" LC_NAME, \
24833e1a 2187 build_function_type_list (RET_TYPE##_type_node, \
2188 ARG_TYPE##_type_node, \
2189 NULL_TREE), \
2190 RX_BUILTIN_##UC_NAME, \
2191 BUILT_IN_MD, NULL, NULL_TREE)
2192
2193#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
103700c7 2194 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2195 add_builtin_function ("__builtin_rx_" LC_NAME, \
2196 build_function_type_list (RET_TYPE##_type_node, \
2197 ARG_TYPE1##_type_node,\
2198 ARG_TYPE2##_type_node,\
2199 NULL_TREE), \
2200 RX_BUILTIN_##UC_NAME, \
2201 BUILT_IN_MD, NULL, NULL_TREE)
2202
2203#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
103700c7 2204 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2205 add_builtin_function ("__builtin_rx_" LC_NAME, \
2206 build_function_type_list (RET_TYPE##_type_node, \
2207 ARG_TYPE1##_type_node,\
2208 ARG_TYPE2##_type_node,\
2209 ARG_TYPE3##_type_node,\
2210 NULL_TREE), \
2211 RX_BUILTIN_##UC_NAME, \
2212 BUILT_IN_MD, NULL, NULL_TREE)
2213
2214 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
2215 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2216 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2217 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2218 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2219 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2220 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2221 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2222 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
2223 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
2224 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2225 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2226 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
2227 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2228 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
67e66e16 2229 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
24833e1a 2230 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2231 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2232 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
24833e1a 2233 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
2234}
2235
103700c7 2236/* Return the RX builtin for CODE. */
2237
2238static tree
2239rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2240{
2241 if (code >= RX_BUILTIN_max)
2242 return error_mark_node;
2243
2244 return rx_builtins[code];
2245}
2246
24833e1a 2247static rtx
2248rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2249{
2250 if (reg && ! REG_P (arg))
2251 arg = force_reg (SImode, arg);
2252
2253 emit_insn (gen_func (arg));
2254
2255 return NULL_RTX;
2256}
2257
2258static rtx
2259rx_expand_builtin_mvtc (tree exp)
2260{
2261 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2262 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2263
2264 if (! CONST_INT_P (arg1))
2265 return NULL_RTX;
2266
2267 if (! REG_P (arg2))
2268 arg2 = force_reg (SImode, arg2);
2269
2270 emit_insn (gen_mvtc (arg1, arg2));
2271
2272 return NULL_RTX;
2273}
2274
2275static rtx
2276rx_expand_builtin_mvfc (tree t_arg, rtx target)
2277{
2278 rtx arg = expand_normal (t_arg);
2279
2280 if (! CONST_INT_P (arg))
2281 return NULL_RTX;
2282
e4d9e8e5 2283 if (target == NULL_RTX)
2284 return NULL_RTX;
2285
24833e1a 2286 if (! REG_P (target))
2287 target = force_reg (SImode, target);
2288
2289 emit_insn (gen_mvfc (target, arg));
2290
2291 return target;
2292}
2293
67e66e16 2294static rtx
2295rx_expand_builtin_mvtipl (rtx arg)
2296{
2297 /* The RX610 does not support the MVTIPL instruction. */
2298 if (rx_cpu_type == RX610)
2299 return NULL_RTX;
2300
e5743482 2301 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
67e66e16 2302 return NULL_RTX;
2303
2304 emit_insn (gen_mvtipl (arg));
2305
2306 return NULL_RTX;
2307}
2308
24833e1a 2309static rtx
2310rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2311{
2312 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2313 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2314
2315 if (! REG_P (arg1))
2316 arg1 = force_reg (SImode, arg1);
2317
2318 if (! REG_P (arg2))
2319 arg2 = force_reg (SImode, arg2);
2320
2321 emit_insn (gen_func (arg1, arg2));
2322
2323 return NULL_RTX;
2324}
2325
2326static rtx
2327rx_expand_int_builtin_1_arg (rtx arg,
2328 rtx target,
2329 rtx (* gen_func)(rtx, rtx),
2330 bool mem_ok)
2331{
2332 if (! REG_P (arg))
2333 if (!mem_ok || ! MEM_P (arg))
2334 arg = force_reg (SImode, arg);
2335
2336 if (target == NULL_RTX || ! REG_P (target))
2337 target = gen_reg_rtx (SImode);
2338
2339 emit_insn (gen_func (target, arg));
2340
2341 return target;
2342}
2343
2344static rtx
2345rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2346{
2347 if (target == NULL_RTX || ! REG_P (target))
2348 target = gen_reg_rtx (SImode);
2349
2350 emit_insn (gen_func (target));
2351
2352 return target;
2353}
2354
2355static rtx
2356rx_expand_builtin_round (rtx arg, rtx target)
2357{
2358 if ((! REG_P (arg) && ! MEM_P (arg))
2359 || GET_MODE (arg) != SFmode)
2360 arg = force_reg (SFmode, arg);
2361
2362 if (target == NULL_RTX || ! REG_P (target))
2363 target = gen_reg_rtx (SImode);
2364
2365 emit_insn (gen_lrintsf2 (target, arg));
2366
2367 return target;
2368}
2369
e5743482 2370static int
0318c61a 2371valid_psw_flag (rtx op, const char *which)
e5743482 2372{
2373 static int mvtc_inform_done = 0;
2374
2375 if (GET_CODE (op) == CONST_INT)
2376 switch (INTVAL (op))
2377 {
2378 case 0: case 'c': case 'C':
2379 case 1: case 'z': case 'Z':
2380 case 2: case 's': case 'S':
2381 case 3: case 'o': case 'O':
2382 case 8: case 'i': case 'I':
2383 case 9: case 'u': case 'U':
2384 return 1;
2385 }
2386
2387 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2388 if (!mvtc_inform_done)
2389 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2390 mvtc_inform_done = 1;
2391
2392 return 0;
2393}
2394
24833e1a 2395static rtx
2396rx_expand_builtin (tree exp,
2397 rtx target,
2398 rtx subtarget ATTRIBUTE_UNUSED,
2399 enum machine_mode mode ATTRIBUTE_UNUSED,
2400 int ignore ATTRIBUTE_UNUSED)
2401{
2402 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
432093e5 2403 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
24833e1a 2404 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2405 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2406
2407 switch (fcode)
2408 {
2409 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
e5743482 2410 case RX_BUILTIN_CLRPSW:
2411 if (!valid_psw_flag (op, "clrpsw"))
2412 return NULL_RTX;
2413 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2414 case RX_BUILTIN_SETPSW:
2415 if (!valid_psw_flag (op, "setpsw"))
2416 return NULL_RTX;
2417 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
24833e1a 2418 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2419 (op, gen_int, false);
2420 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2421 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2422 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2423 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2424 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2425 (target, gen_mvfachi);
2426 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2427 (target, gen_mvfacmi);
2428 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2429 (op, gen_mvtachi, true);
2430 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2431 (op, gen_mvtaclo, true);
2432 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2433 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2434 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
67e66e16 2435 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
24833e1a 2436 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2437 (op, gen_racw, false);
2438 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2439 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2440 (op, target, gen_revw, false);
24833e1a 2441 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2442
2443 default:
2444 internal_error ("bad builtin code");
2445 break;
2446 }
2447
2448 return NULL_RTX;
2449}
2450\f
2451/* Place an element into a constructor or destructor section.
2452 Like default_ctor_section_asm_out_constructor in varasm.c
2453 except that it uses .init_array (or .fini_array) and it
2454 handles constructor priorities. */
2455
2456static void
2457rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2458{
2459 section * s;
2460
2461 if (priority != DEFAULT_INIT_PRIORITY)
2462 {
2463 char buf[18];
2464
2465 sprintf (buf, "%s.%.5u",
2466 is_ctor ? ".init_array" : ".fini_array",
2467 priority);
2468 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2469 }
2470 else if (is_ctor)
2471 s = ctors_section;
2472 else
2473 s = dtors_section;
2474
2475 switch_to_section (s);
2476 assemble_align (POINTER_SIZE);
2477 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2478}
2479
2480static void
2481rx_elf_asm_constructor (rtx symbol, int priority)
2482{
2483 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2484}
2485
2486static void
2487rx_elf_asm_destructor (rtx symbol, int priority)
2488{
2489 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2490}
2491\f
67e66e16 2492/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
24833e1a 2493
2494static tree
2495rx_handle_func_attribute (tree * node,
2496 tree name,
2497 tree args,
2498 int flags ATTRIBUTE_UNUSED,
2499 bool * no_add_attrs)
2500{
2501 gcc_assert (DECL_P (* node));
2502 gcc_assert (args == NULL_TREE);
2503
2504 if (TREE_CODE (* node) != FUNCTION_DECL)
2505 {
2506 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2507 name);
2508 * no_add_attrs = true;
2509 }
2510
2511 /* FIXME: We ought to check for conflicting attributes. */
2512
2513 /* FIXME: We ought to check that the interrupt and exception
2514 handler attributes have been applied to void functions. */
2515 return NULL_TREE;
2516}
2517
2518/* Table of RX specific attributes. */
2519const struct attribute_spec rx_attribute_table[] =
2520{
ac86af5d 2521 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2522 affects_type_identity. */
2523 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2524 false },
2525 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2526 false },
2527 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2528 false },
2529 { NULL, 0, 0, false, false, false, NULL, false }
24833e1a 2530};
2531
42d89991 2532/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
02e53c17 2533
2534static void
42d89991 2535rx_override_options_after_change (void)
98cb9b5b 2536{
2537 static bool first_time = TRUE;
98cb9b5b 2538
2539 if (first_time)
2540 {
2541 /* If this is the first time through and the user has not disabled
42d89991 2542 the use of RX FPU hardware then enable -ffinite-math-only,
2543 since the FPU instructions do not support NaNs and infinities. */
98cb9b5b 2544 if (TARGET_USE_FPU)
42d89991 2545 flag_finite_math_only = 1;
98cb9b5b 2546
98cb9b5b 2547 first_time = FALSE;
2548 }
2549 else
2550 {
2551 /* Alert the user if they are changing the optimization options
2552 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2553 if (TARGET_USE_FPU
42d89991 2554 && !flag_finite_math_only)
2555 warning (0, "RX FPU instructions do not support NaNs and infinities");
98cb9b5b 2556 }
2557}
2558
1af17d44 2559static void
2560rx_option_override (void)
2561{
8cb00d70 2562 unsigned int i;
2563 cl_deferred_option *opt;
2564 VEC(cl_deferred_option,heap) *vec
2565 = (VEC(cl_deferred_option,heap) *) rx_deferred_options;
2566
2567 FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
2568 {
2569 switch (opt->opt_index)
2570 {
2571 case OPT_mint_register_:
2572 switch (opt->value)
2573 {
2574 case 4:
2575 fixed_regs[10] = call_used_regs [10] = 1;
2576 /* Fall through. */
2577 case 3:
2578 fixed_regs[11] = call_used_regs [11] = 1;
2579 /* Fall through. */
2580 case 2:
2581 fixed_regs[12] = call_used_regs [12] = 1;
2582 /* Fall through. */
2583 case 1:
2584 fixed_regs[13] = call_used_regs [13] = 1;
2585 /* Fall through. */
2586 case 0:
6e507301 2587 rx_num_interrupt_regs = opt->value;
8cb00d70 2588 break;
2589 default:
6e507301 2590 rx_num_interrupt_regs = 0;
8cb00d70 2591 /* Error message already given because rx_handle_option
2592 returned false. */
2593 break;
2594 }
2595 break;
2596
2597 default:
2598 gcc_unreachable ();
2599 }
2600 }
2601
1af17d44 2602 /* This target defaults to strict volatile bitfields. */
941a2396 2603 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
1af17d44 2604 flag_strict_volatile_bitfields = 1;
42d89991 2605
2606 rx_override_options_after_change ();
9f9a3b39 2607
2608 if (align_jumps == 0 && ! optimize_size)
2609 align_jumps = 3;
2610 if (align_loops == 0 && ! optimize_size)
2611 align_loops = 3;
2612 if (align_labels == 0 && ! optimize_size)
2613 align_labels = 3;
1af17d44 2614}
2615
98cb9b5b 2616\f
24833e1a 2617static bool
2618rx_allocate_stack_slots_for_args (void)
2619{
2620 /* Naked functions should not allocate stack slots for arguments. */
2621 return ! is_naked_func (NULL_TREE);
2622}
2623
2624static bool
2625rx_func_attr_inlinable (const_tree decl)
2626{
2627 return ! is_fast_interrupt_func (decl)
67e66e16 2628 && ! is_interrupt_func (decl)
24833e1a 2629 && ! is_naked_func (decl);
2630}
2631
08c6cbd2 2632static bool
2633rx_warn_func_return (tree decl)
2634{
2635 /* Naked functions are implemented entirely in assembly, including the
2636 return sequence, so suppress warnings about this. */
2637 return !is_naked_func (decl);
2638}
2639
61fc50a0 2640/* Return nonzero if it is ok to make a tail-call to DECL,
2641 a function_decl or NULL if this is an indirect call, using EXP */
2642
2643static bool
e4d9e8e5 2644rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
61fc50a0 2645{
2646 /* Do not allow indirect tailcalls. The
2647 sibcall patterns do not support them. */
2648 if (decl == NULL)
2649 return false;
2650
2651 /* Never tailcall from inside interrupt handlers or naked functions. */
2652 if (is_fast_interrupt_func (NULL_TREE)
2653 || is_interrupt_func (NULL_TREE)
2654 || is_naked_func (NULL_TREE))
2655 return false;
2656
2657 return true;
2658}
2659
24833e1a 2660static void
2661rx_file_start (void)
2662{
2663 if (! TARGET_AS100_SYNTAX)
2664 default_file_start ();
2665}
2666
2667static bool
2668rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2669{
c6347c7a 2670 /* The packed attribute overrides the MS behaviour. */
2671 return ! TYPE_PACKED (record_type);
24833e1a 2672}
24833e1a 2673\f
2674/* Returns true if X a legitimate constant for an immediate
2675 operand on the RX. X is already known to satisfy CONSTANT_P. */
2676
2677bool
f7fcec1a 2678rx_is_legitimate_constant (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
24833e1a 2679{
24833e1a 2680 switch (GET_CODE (x))
2681 {
2682 case CONST:
2683 x = XEXP (x, 0);
2684
2685 if (GET_CODE (x) == PLUS)
2686 {
2687 if (! CONST_INT_P (XEXP (x, 1)))
2688 return false;
2689
2690 /* GCC would not pass us CONST_INT + CONST_INT so we
2691 know that we have {SYMBOL|LABEL} + CONST_INT. */
2692 x = XEXP (x, 0);
2693 gcc_assert (! CONST_INT_P (x));
2694 }
2695
2696 switch (GET_CODE (x))
2697 {
2698 case LABEL_REF:
2699 case SYMBOL_REF:
2700 return true;
2701
95272799 2702 case UNSPEC:
6e507301 2703 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
95272799 2704
24833e1a 2705 default:
2706 /* FIXME: Can this ever happen ? */
776f1390 2707 gcc_unreachable ();
24833e1a 2708 }
2709 break;
2710
2711 case LABEL_REF:
2712 case SYMBOL_REF:
2713 return true;
2714 case CONST_DOUBLE:
09bb92cc 2715 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
24833e1a 2716 case CONST_VECTOR:
2717 return false;
2718 default:
2719 gcc_assert (CONST_INT_P (x));
2720 break;
2721 }
2722
95272799 2723 return ok_for_max_constant (INTVAL (x));
24833e1a 2724}
2725
24833e1a 2726static int
d9c5e5f4 2727rx_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
2728 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
24833e1a 2729{
2730 rtx a, b;
2731
2732 if (GET_CODE (addr) != PLUS)
2733 return COSTS_N_INSNS (1);
2734
2735 a = XEXP (addr, 0);
2736 b = XEXP (addr, 1);
2737
2738 if (REG_P (a) && REG_P (b))
2739 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2740 return COSTS_N_INSNS (4);
2741
2742 if (speed)
2743 /* [REG+OFF] is just as fast as [REG]. */
2744 return COSTS_N_INSNS (1);
2745
2746 if (CONST_INT_P (b)
2747 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2748 /* Try to discourage REG + <large OFF> when optimizing for size. */
2749 return COSTS_N_INSNS (2);
2750
2751 return COSTS_N_INSNS (1);
2752}
2753
2754static bool
2755rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2756{
2757 /* We can always eliminate to the frame pointer.
2758 We can eliminate to the stack pointer unless a frame
2759 pointer is needed. */
2760
2761 return to == FRAME_POINTER_REGNUM
2762 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2763}
2764\f
2765
2766static void
2767rx_trampoline_template (FILE * file)
2768{
2769 /* Output assembler code for a block containing the constant
2770 part of a trampoline, leaving space for the variable parts.
2771
2772 On the RX, (where r8 is the static chain regnum) the trampoline
2773 looks like:
2774
2775 mov #<static chain value>, r8
2776 mov #<function's address>, r9
2777 jmp r9
2778
2779 In big-endian-data-mode however instructions are read into the CPU
2780 4 bytes at a time. These bytes are then swapped around before being
2781 passed to the decoder. So...we must partition our trampoline into
2782 4 byte packets and swap these packets around so that the instruction
2783 reader will reverse the process. But, in order to avoid splitting
2784 the 32-bit constants across these packet boundaries, (making inserting
2785 them into the constructed trampoline very difficult) we have to pad the
2786 instruction sequence with NOP insns. ie:
2787
2788 nop
2789 nop
2790 mov.l #<...>, r8
2791 nop
2792 nop
2793 mov.l #<...>, r9
2794 jmp r9
2795 nop
2796 nop */
2797
2798 if (! TARGET_BIG_ENDIAN_DATA)
2799 {
2800 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2801 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2802 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2803 }
2804 else
2805 {
2806 char r8 = '0' + STATIC_CHAIN_REGNUM;
2807 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2808
2809 if (TARGET_AS100_SYNTAX)
2810 {
2811 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2812 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2813 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2814 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2815 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2816 }
2817 else
2818 {
2819 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2820 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2821 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2822 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2823 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2824 }
2825 }
2826}
2827
2828static void
2829rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2830{
2831 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2832
2833 emit_block_move (tramp, assemble_trampoline_template (),
2834 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2835
2836 if (TARGET_BIG_ENDIAN_DATA)
2837 {
2838 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2839 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2840 }
2841 else
2842 {
2843 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2844 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2845 }
2846}
2847\f
ccfccd66 2848static int
3e8d9684 2849rx_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2850 reg_class_t regclass ATTRIBUTE_UNUSED,
2851 bool in)
9d2f1b03 2852{
6145a46d 2853 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
9d2f1b03 2854}
2855
ccfccd66 2856/* Convert a CC_MODE to the set of flags that it represents. */
9d2f1b03 2857
2858static unsigned int
ccfccd66 2859flags_from_mode (enum machine_mode mode)
9d2f1b03 2860{
ccfccd66 2861 switch (mode)
9d2f1b03 2862 {
ccfccd66 2863 case CC_ZSmode:
2864 return CC_FLAG_S | CC_FLAG_Z;
2865 case CC_ZSOmode:
2866 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2867 case CC_ZSCmode:
2868 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2869 case CCmode:
2870 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2871 case CC_Fmode:
2872 return CC_FLAG_FP;
2873 default:
2874 gcc_unreachable ();
2875 }
2876}
9d2f1b03 2877
ccfccd66 2878/* Convert a set of flags to a CC_MODE that can implement it. */
9d2f1b03 2879
ccfccd66 2880static enum machine_mode
2881mode_from_flags (unsigned int f)
2882{
2883 if (f & CC_FLAG_FP)
2884 return CC_Fmode;
2885 if (f & CC_FLAG_O)
2886 {
2887 if (f & CC_FLAG_C)
2888 return CCmode;
2889 else
2890 return CC_ZSOmode;
9d2f1b03 2891 }
ccfccd66 2892 else if (f & CC_FLAG_C)
2893 return CC_ZSCmode;
2894 else
2895 return CC_ZSmode;
9d2f1b03 2896}
2897
ccfccd66 2898/* Convert an RTX_CODE to the set of flags needed to implement it.
2899 This assumes an integer comparison. */
2900
9d2f1b03 2901static unsigned int
ccfccd66 2902flags_from_code (enum rtx_code code)
9d2f1b03 2903{
ccfccd66 2904 switch (code)
9d2f1b03 2905 {
ccfccd66 2906 case LT:
2907 case GE:
24ad6c43 2908 return CC_FLAG_S;
ccfccd66 2909 case GT:
2910 case LE:
2911 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
2912 case GEU:
2913 case LTU:
2914 return CC_FLAG_C;
2915 case GTU:
2916 case LEU:
2917 return CC_FLAG_C | CC_FLAG_Z;
2918 case EQ:
2919 case NE:
2920 return CC_FLAG_Z;
2921 default:
2922 gcc_unreachable ();
9d2f1b03 2923 }
2924}
2925
ccfccd66 2926/* Return a CC_MODE of which both M1 and M2 are subsets. */
2927
2928static enum machine_mode
2929rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
9d2f1b03 2930{
ccfccd66 2931 unsigned f;
2932
2933 /* Early out for identical modes. */
2934 if (m1 == m2)
2935 return m1;
2936
2937 /* There's no valid combination for FP vs non-FP. */
2938 f = flags_from_mode (m1) | flags_from_mode (m2);
2939 if (f & CC_FLAG_FP)
2940 return VOIDmode;
2941
2942 /* Otherwise, see what mode can implement all the flags. */
2943 return mode_from_flags (f);
9d2f1b03 2944}
8b8777b9 2945
2946/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
2947
2948enum machine_mode
24ad6c43 2949rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
8b8777b9 2950{
2951 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2952 return CC_Fmode;
2953
24ad6c43 2954 if (y != const0_rtx)
2955 return CCmode;
2956
ccfccd66 2957 return mode_from_flags (flags_from_code (cmp_code));
2958}
2959
ccfccd66 2960/* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
2961 CC_MODE, and use that in branches based on that compare. */
2962
2963void
2964rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
2965 rtx c1, rtx c2, rtx label)
2966{
2967 rtx flags, x;
2968
2969 flags = gen_rtx_REG (cc_mode, CC_REG);
2970 x = gen_rtx_COMPARE (cc_mode, c1, c2);
2971 x = gen_rtx_SET (VOIDmode, flags, x);
2972 emit_insn (x);
2973
2974 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
2975 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
2976 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2977 emit_jump_insn (x);
8b8777b9 2978}
2979
fc3b02a9 2980/* A helper function for matching parallels that set the flags. */
2981
2982bool
2983rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
2984{
2985 rtx op1, flags;
2986 enum machine_mode flags_mode;
2987
2988 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
2989
2990 op1 = XVECEXP (PATTERN (insn), 0, 1);
2991 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
2992
2993 flags = SET_DEST (op1);
2994 flags_mode = GET_MODE (flags);
2995
2996 if (GET_MODE (SET_SRC (op1)) != flags_mode)
2997 return false;
2998 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
2999 return false;
3000
3001 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3002 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3003 return false;
3004
3005 return true;
3006}
9f9a3b39 3007\f
3008int
001afa63 3009rx_align_for_label (rtx lab, int uses_threshold)
9f9a3b39 3010{
001afa63 3011 /* This is a simple heuristic to guess when an alignment would not be useful
3012 because the delay due to the inserted NOPs would be greater than the delay
3013 due to the misaligned branch. If uses_threshold is zero then the alignment
3014 is always useful. */
f7fcec1a 3015 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
001afa63 3016 return 0;
3017
9f9a3b39 3018 return optimize_size ? 1 : 3;
3019}
3020
3021static int
3022rx_max_skip_for_label (rtx lab)
3023{
3024 int opsize;
3025 rtx op;
3026
3027 if (lab == NULL_RTX)
3028 return 0;
fc3b02a9 3029
9f9a3b39 3030 op = lab;
3031 do
3032 {
3033 op = next_nonnote_nondebug_insn (op);
3034 }
3035 while (op && (LABEL_P (op)
3036 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3037 if (!op)
3038 return 0;
3039
3040 opsize = get_attr_length (op);
3041 if (opsize >= 0 && opsize < 8)
3042 return opsize - 1;
3043 return 0;
3044}
776f1390 3045
3046/* Compute the real length of the extending load-and-op instructions. */
3047
3048int
3049rx_adjust_insn_length (rtx insn, int current_length)
3050{
3051 rtx extend, mem, offset;
3052 bool zero;
3053 int factor;
3054
3055 switch (INSN_CODE (insn))
3056 {
3057 default:
3058 return current_length;
3059
3060 case CODE_FOR_plussi3_zero_extendhi:
3061 case CODE_FOR_andsi3_zero_extendhi:
3062 case CODE_FOR_iorsi3_zero_extendhi:
3063 case CODE_FOR_xorsi3_zero_extendhi:
3064 case CODE_FOR_divsi3_zero_extendhi:
3065 case CODE_FOR_udivsi3_zero_extendhi:
3066 case CODE_FOR_minussi3_zero_extendhi:
3067 case CODE_FOR_smaxsi3_zero_extendhi:
3068 case CODE_FOR_sminsi3_zero_extendhi:
3069 case CODE_FOR_multsi3_zero_extendhi:
f7fcec1a 3070 case CODE_FOR_comparesi3_zero_extendhi:
776f1390 3071 zero = true;
3072 factor = 2;
3073 break;
3074
3075 case CODE_FOR_plussi3_sign_extendhi:
3076 case CODE_FOR_andsi3_sign_extendhi:
3077 case CODE_FOR_iorsi3_sign_extendhi:
3078 case CODE_FOR_xorsi3_sign_extendhi:
3079 case CODE_FOR_divsi3_sign_extendhi:
3080 case CODE_FOR_udivsi3_sign_extendhi:
3081 case CODE_FOR_minussi3_sign_extendhi:
3082 case CODE_FOR_smaxsi3_sign_extendhi:
3083 case CODE_FOR_sminsi3_sign_extendhi:
3084 case CODE_FOR_multsi3_sign_extendhi:
f7fcec1a 3085 case CODE_FOR_comparesi3_sign_extendhi:
776f1390 3086 zero = false;
3087 factor = 2;
3088 break;
3089
3090 case CODE_FOR_plussi3_zero_extendqi:
3091 case CODE_FOR_andsi3_zero_extendqi:
3092 case CODE_FOR_iorsi3_zero_extendqi:
3093 case CODE_FOR_xorsi3_zero_extendqi:
3094 case CODE_FOR_divsi3_zero_extendqi:
3095 case CODE_FOR_udivsi3_zero_extendqi:
3096 case CODE_FOR_minussi3_zero_extendqi:
3097 case CODE_FOR_smaxsi3_zero_extendqi:
3098 case CODE_FOR_sminsi3_zero_extendqi:
3099 case CODE_FOR_multsi3_zero_extendqi:
f7fcec1a 3100 case CODE_FOR_comparesi3_zero_extendqi:
776f1390 3101 zero = true;
3102 factor = 1;
3103 break;
3104
3105 case CODE_FOR_plussi3_sign_extendqi:
3106 case CODE_FOR_andsi3_sign_extendqi:
3107 case CODE_FOR_iorsi3_sign_extendqi:
3108 case CODE_FOR_xorsi3_sign_extendqi:
3109 case CODE_FOR_divsi3_sign_extendqi:
3110 case CODE_FOR_udivsi3_sign_extendqi:
3111 case CODE_FOR_minussi3_sign_extendqi:
3112 case CODE_FOR_smaxsi3_sign_extendqi:
3113 case CODE_FOR_sminsi3_sign_extendqi:
3114 case CODE_FOR_multsi3_sign_extendqi:
f7fcec1a 3115 case CODE_FOR_comparesi3_sign_extendqi:
776f1390 3116 zero = false;
3117 factor = 1;
3118 break;
3119 }
3120
3121 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3122 extend = single_set (insn);
3123 gcc_assert (extend != NULL_RTX);
3124
3125 extend = SET_SRC (extend);
3126 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3127 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3128 extend = XEXP (extend, 0);
3129 else
3130 extend = XEXP (extend, 1);
3131
3132 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3133 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3134
3135 mem = XEXP (extend, 0);
3136 gcc_checking_assert (MEM_P (mem));
3137 if (REG_P (XEXP (mem, 0)))
3138 return (zero && factor == 1) ? 2 : 3;
3139
3140 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3141 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3142 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3143
3144 offset = XEXP (XEXP (mem, 0), 1);
3145 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3146
3147 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3148 return (zero && factor == 1) ? 3 : 4;
3149
3150 return (zero && factor == 1) ? 4 : 5;
3151}
9d2f1b03 3152\f
9f9a3b39 3153#undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3154#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3155#undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3156#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3157#undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3158#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3159#undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3160#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3161
24833e1a 3162#undef TARGET_FUNCTION_VALUE
3163#define TARGET_FUNCTION_VALUE rx_function_value
3164
3165#undef TARGET_RETURN_IN_MSB
3166#define TARGET_RETURN_IN_MSB rx_return_in_msb
3167
3168#undef TARGET_IN_SMALL_DATA_P
3169#define TARGET_IN_SMALL_DATA_P rx_in_small_data
3170
3171#undef TARGET_RETURN_IN_MEMORY
3172#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3173
3174#undef TARGET_HAVE_SRODATA_SECTION
3175#define TARGET_HAVE_SRODATA_SECTION true
3176
3177#undef TARGET_ASM_SELECT_RTX_SECTION
3178#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3179
3180#undef TARGET_ASM_SELECT_SECTION
3181#define TARGET_ASM_SELECT_SECTION rx_select_section
3182
3183#undef TARGET_INIT_BUILTINS
3184#define TARGET_INIT_BUILTINS rx_init_builtins
3185
103700c7 3186#undef TARGET_BUILTIN_DECL
3187#define TARGET_BUILTIN_DECL rx_builtin_decl
3188
24833e1a 3189#undef TARGET_EXPAND_BUILTIN
3190#define TARGET_EXPAND_BUILTIN rx_expand_builtin
3191
3192#undef TARGET_ASM_CONSTRUCTOR
3193#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3194
3195#undef TARGET_ASM_DESTRUCTOR
3196#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3197
3198#undef TARGET_STRUCT_VALUE_RTX
3199#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3200
3201#undef TARGET_ATTRIBUTE_TABLE
3202#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3203
3204#undef TARGET_ASM_FILE_START
3205#define TARGET_ASM_FILE_START rx_file_start
3206
3207#undef TARGET_MS_BITFIELD_LAYOUT_P
3208#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3209
3210#undef TARGET_LEGITIMATE_ADDRESS_P
3211#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3212
5afe50d9 3213#undef TARGET_MODE_DEPENDENT_ADDRESS_P
3214#define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3215
24833e1a 3216#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3217#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3218
3219#undef TARGET_ASM_FUNCTION_PROLOGUE
3220#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3221
3222#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3223#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3224
61fc50a0 3225#undef TARGET_FUNCTION_OK_FOR_SIBCALL
3226#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3227
ee4e8428 3228#undef TARGET_FUNCTION_ARG
3229#define TARGET_FUNCTION_ARG rx_function_arg
3230
3231#undef TARGET_FUNCTION_ARG_ADVANCE
3232#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3233
bd99ba64 3234#undef TARGET_FUNCTION_ARG_BOUNDARY
3235#define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3236
24833e1a 3237#undef TARGET_SET_CURRENT_FUNCTION
3238#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3239
24833e1a 3240#undef TARGET_ASM_INTEGER
3241#define TARGET_ASM_INTEGER rx_assemble_integer
3242
3243#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3244#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3245
3246#undef TARGET_MAX_ANCHOR_OFFSET
3247#define TARGET_MAX_ANCHOR_OFFSET 32
3248
3249#undef TARGET_ADDRESS_COST
3250#define TARGET_ADDRESS_COST rx_address_cost
3251
3252#undef TARGET_CAN_ELIMINATE
3253#define TARGET_CAN_ELIMINATE rx_can_eliminate
3254
b2d7ede1 3255#undef TARGET_CONDITIONAL_REGISTER_USAGE
3256#define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3257
24833e1a 3258#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3259#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3260
3261#undef TARGET_TRAMPOLINE_INIT
3262#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3263
6bb30542 3264#undef TARGET_PRINT_OPERAND
3265#define TARGET_PRINT_OPERAND rx_print_operand
3266
3267#undef TARGET_PRINT_OPERAND_ADDRESS
3268#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3269
9d2f1b03 3270#undef TARGET_CC_MODES_COMPATIBLE
3271#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3272
3273#undef TARGET_MEMORY_MOVE_COST
3274#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3275
1af17d44 3276#undef TARGET_OPTION_OVERRIDE
3277#define TARGET_OPTION_OVERRIDE rx_option_override
3278
bd7d2835 3279#undef TARGET_PROMOTE_FUNCTION_MODE
3280#define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3281
42d89991 3282#undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3283#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
02e53c17 3284
77de4b78 3285#undef TARGET_FLAGS_REGNUM
3286#define TARGET_FLAGS_REGNUM CC_REG
3287
ca316360 3288#undef TARGET_LEGITIMATE_CONSTANT_P
f7fcec1a 3289#define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
ca316360 3290
6e507301 3291#undef TARGET_LEGITIMIZE_ADDRESS
3292#define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3293
08c6cbd2 3294#undef TARGET_WARN_FUNC_RETURN
3295#define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3296
24833e1a 3297struct gcc_target targetm = TARGET_INITIALIZER;
3298
103700c7 3299#include "gt-rx.h"