]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rx/rx.c
Link libstdc++ with -lpthread on IRIX 6 (PR target/47852)
[thirdparty/gcc.git] / gcc / config / rx / rx.c
CommitLineData
24833e1a 1/* Subroutines used for code generation on Renesas RX processors.
95272799 2 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
24833e1a 3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "tree.h"
30#include "rtl.h"
31#include "regs.h"
32#include "hard-reg-set.h"
24833e1a 33#include "insn-config.h"
34#include "conditions.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
38#include "function.h"
39#include "expr.h"
40#include "optabs.h"
41#include "libfuncs.h"
42#include "recog.h"
0b205f4c 43#include "diagnostic-core.h"
24833e1a 44#include "toplev.h"
45#include "reload.h"
46#include "df.h"
47#include "ggc.h"
48#include "tm_p.h"
49#include "debug.h"
50#include "target.h"
51#include "target-def.h"
52#include "langhooks.h"
fba5dd52 53#include "opts.h"
6e507301 54
55static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
56static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
57static unsigned int rx_num_interrupt_regs;
24833e1a 58\f
6e507301 59static unsigned int
60rx_gp_base_regnum (void)
61{
62 if (rx_gp_base_regnum_val == INVALID_REGNUM)
63 gcc_unreachable ();
64 return rx_gp_base_regnum_val;
65}
66
67static unsigned int
68rx_pid_base_regnum (void)
69{
70 if (rx_pid_base_regnum_val == INVALID_REGNUM)
71 gcc_unreachable ();
72 return rx_pid_base_regnum_val;
73}
74
75/* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
76
77static tree
78rx_decl_for_addr (rtx op)
79{
80 if (GET_CODE (op) == MEM)
81 op = XEXP (op, 0);
82 if (GET_CODE (op) == CONST)
83 op = XEXP (op, 0);
84 while (GET_CODE (op) == PLUS)
85 op = XEXP (op, 0);
86 if (GET_CODE (op) == SYMBOL_REF)
87 return SYMBOL_REF_DECL (op);
88 return NULL_TREE;
89}
90
6bb30542 91static void rx_print_operand (FILE *, rtx, int);
92
ccfccd66 93#define CC_FLAG_S (1 << 0)
94#define CC_FLAG_Z (1 << 1)
95#define CC_FLAG_O (1 << 2)
96#define CC_FLAG_C (1 << 3)
f7fcec1a 97#define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
ccfccd66 98
99static unsigned int flags_from_mode (enum machine_mode mode);
100static unsigned int flags_from_code (enum rtx_code code);
67e66e16 101\f
6e507301 102/* Return true if OP is a reference to an object in a PID data area. */
103
104enum pid_type
105{
106 PID_NOT_PID = 0, /* The object is not in the PID data area. */
107 PID_ENCODED, /* The object is in the PID data area. */
108 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
109};
110
111static enum pid_type
112rx_pid_data_operand (rtx op)
113{
114 tree op_decl;
115
116 if (!TARGET_PID)
117 return PID_NOT_PID;
118
119 if (GET_CODE (op) == PLUS
120 && GET_CODE (XEXP (op, 0)) == REG
121 && GET_CODE (XEXP (op, 1)) == CONST
122 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
123 return PID_ENCODED;
124
125 op_decl = rx_decl_for_addr (op);
126
127 if (op_decl)
128 {
129 if (TREE_READONLY (op_decl))
130 return PID_UNENCODED;
131 }
132 else
133 {
134 /* Sigh, some special cases. */
135 if (GET_CODE (op) == SYMBOL_REF
136 || GET_CODE (op) == LABEL_REF)
137 return PID_UNENCODED;
138 }
139
140 return PID_NOT_PID;
141}
142
143static rtx
144rx_legitimize_address (rtx x,
145 rtx oldx ATTRIBUTE_UNUSED,
146 enum machine_mode mode ATTRIBUTE_UNUSED)
147{
148 if (rx_pid_data_operand (x) == PID_UNENCODED)
149 {
150 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
151 return rv;
152 }
153
154 if (GET_CODE (x) == PLUS
155 && GET_CODE (XEXP (x, 0)) == PLUS
156 && REG_P (XEXP (XEXP (x, 0), 0))
157 && REG_P (XEXP (x, 1)))
158 return force_reg (SImode, x);
159
160 return x;
161}
162
24833e1a 163/* Return true if OP is a reference to an object in a small data area. */
164
165static bool
166rx_small_data_operand (rtx op)
167{
168 if (rx_small_data_limit == 0)
169 return false;
170
171 if (GET_CODE (op) == SYMBOL_REF)
172 return SYMBOL_REF_SMALL_P (op);
173
174 return false;
175}
176
177static bool
4bccad5e 178rx_is_legitimate_address (enum machine_mode mode, rtx x,
179 bool strict ATTRIBUTE_UNUSED)
24833e1a 180{
181 if (RTX_OK_FOR_BASE (x, strict))
182 /* Register Indirect. */
183 return true;
184
f7fcec1a 185 if ((GET_MODE_SIZE (mode) == 4
186 || GET_MODE_SIZE (mode) == 2
187 || GET_MODE_SIZE (mode) == 1)
24833e1a 188 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
189 /* Pre-decrement Register Indirect or
190 Post-increment Register Indirect. */
191 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
192
6e507301 193 switch (rx_pid_data_operand (x))
194 {
195 case PID_UNENCODED:
196 return false;
197 case PID_ENCODED:
198 return true;
199 default:
200 break;
201 }
202
24833e1a 203 if (GET_CODE (x) == PLUS)
204 {
205 rtx arg1 = XEXP (x, 0);
206 rtx arg2 = XEXP (x, 1);
207 rtx index = NULL_RTX;
208
209 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
210 index = arg2;
211 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
212 index = arg1;
213 else
214 return false;
215
216 switch (GET_CODE (index))
217 {
218 case CONST_INT:
219 {
220 /* Register Relative: REG + INT.
221 Only positive, mode-aligned, mode-sized
222 displacements are allowed. */
223 HOST_WIDE_INT val = INTVAL (index);
224 int factor;
225
226 if (val < 0)
227 return false;
776f1390 228
24833e1a 229 switch (GET_MODE_SIZE (mode))
230 {
231 default:
232 case 4: factor = 4; break;
233 case 2: factor = 2; break;
234 case 1: factor = 1; break;
235 }
236
f7fcec1a 237 if (val > (65535 * factor))
24833e1a 238 return false;
239 return (val % factor) == 0;
240 }
241
242 case REG:
243 /* Unscaled Indexed Register Indirect: REG + REG
244 Size has to be "QI", REG has to be valid. */
245 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
246
247 case MULT:
248 {
249 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
250 Factor has to equal the mode size, REG has to be valid. */
251 rtx factor;
252
253 factor = XEXP (index, 1);
254 index = XEXP (index, 0);
255
256 return REG_P (index)
257 && RTX_OK_FOR_BASE (index, strict)
258 && CONST_INT_P (factor)
259 && GET_MODE_SIZE (mode) == INTVAL (factor);
260 }
261
262 default:
263 return false;
264 }
265 }
266
267 /* Small data area accesses turn into register relative offsets. */
268 return rx_small_data_operand (x);
269}
270
271/* Returns TRUE for simple memory addreses, ie ones
272 that do not involve register indirect addressing
273 or pre/post increment/decrement. */
274
275bool
276rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
277{
24833e1a 278 if (! rx_is_legitimate_address
279 (mode, mem, reload_in_progress || reload_completed))
280 return false;
281
282 switch (GET_CODE (mem))
283 {
284 case REG:
285 /* Simple memory addresses are OK. */
286 return true;
287
288 case PRE_DEC:
289 case POST_INC:
290 return false;
291
292 case PLUS:
776f1390 293 {
294 rtx base, index;
295
296 /* Only allow REG+INT addressing. */
297 base = XEXP (mem, 0);
298 index = XEXP (mem, 1);
24833e1a 299
776f1390 300 if (! RX_REG_P (base) || ! CONST_INT_P (index))
301 return false;
302
303 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
304 }
24833e1a 305
306 case SYMBOL_REF:
307 /* Can happen when small data is being supported.
308 Assume that it will be resolved into GP+INT. */
309 return true;
310
311 default:
312 gcc_unreachable ();
313 }
314}
315
5afe50d9 316/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
317
318static bool
319rx_mode_dependent_address_p (const_rtx addr)
24833e1a 320{
321 if (GET_CODE (addr) == CONST)
322 addr = XEXP (addr, 0);
323
324 switch (GET_CODE (addr))
325 {
326 /* --REG and REG++ only work in SImode. */
327 case PRE_DEC:
328 case POST_INC:
329 return true;
330
331 case MINUS:
332 case PLUS:
333 if (! REG_P (XEXP (addr, 0)))
334 return true;
335
336 addr = XEXP (addr, 1);
337
338 switch (GET_CODE (addr))
339 {
340 case REG:
341 /* REG+REG only works in SImode. */
342 return true;
343
344 case CONST_INT:
345 /* REG+INT is only mode independent if INT is a
346 multiple of 4, positive and will fit into 8-bits. */
347 if (((INTVAL (addr) & 3) == 0)
348 && IN_RANGE (INTVAL (addr), 4, 252))
349 return false;
350 return true;
351
352 case SYMBOL_REF:
353 case LABEL_REF:
354 return true;
355
356 case MULT:
357 gcc_assert (REG_P (XEXP (addr, 0)));
358 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
359 /* REG+REG*SCALE is always mode dependent. */
360 return true;
361
362 default:
363 /* Not recognized, so treat as mode dependent. */
364 return true;
365 }
366
367 case CONST_INT:
368 case SYMBOL_REF:
369 case LABEL_REF:
370 case REG:
371 /* These are all mode independent. */
372 return false;
373
374 default:
375 /* Everything else is unrecognized,
376 so treat as mode dependent. */
377 return true;
378 }
379}
380\f
24833e1a 381/* A C compound statement to output to stdio stream FILE the
382 assembler syntax for an instruction operand that is a memory
383 reference whose address is ADDR. */
384
6bb30542 385static void
24833e1a 386rx_print_operand_address (FILE * file, rtx addr)
387{
388 switch (GET_CODE (addr))
389 {
390 case REG:
391 fprintf (file, "[");
392 rx_print_operand (file, addr, 0);
393 fprintf (file, "]");
394 break;
395
396 case PRE_DEC:
397 fprintf (file, "[-");
398 rx_print_operand (file, XEXP (addr, 0), 0);
399 fprintf (file, "]");
400 break;
401
402 case POST_INC:
403 fprintf (file, "[");
404 rx_print_operand (file, XEXP (addr, 0), 0);
405 fprintf (file, "+]");
406 break;
407
408 case PLUS:
409 {
410 rtx arg1 = XEXP (addr, 0);
411 rtx arg2 = XEXP (addr, 1);
412 rtx base, index;
413
414 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
415 base = arg1, index = arg2;
416 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
417 base = arg2, index = arg1;
418 else
419 {
420 rx_print_operand (file, arg1, 0);
421 fprintf (file, " + ");
422 rx_print_operand (file, arg2, 0);
423 break;
424 }
425
426 if (REG_P (index) || GET_CODE (index) == MULT)
427 {
428 fprintf (file, "[");
429 rx_print_operand (file, index, 'A');
430 fprintf (file, ",");
431 }
432 else /* GET_CODE (index) == CONST_INT */
433 {
434 rx_print_operand (file, index, 'A');
435 fprintf (file, "[");
436 }
437 rx_print_operand (file, base, 0);
438 fprintf (file, "]");
439 break;
440 }
441
95272799 442 case CONST:
443 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
444 {
445 addr = XEXP (addr, 0);
446 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
6e507301 447
448 /* FIXME: Putting this case label here is an appalling abuse of the C language. */
449 case UNSPEC:
450 addr = XVECEXP (addr, 0, 0);
95272799 451 gcc_assert (CONST_INT_P (addr));
452 }
453 /* Fall through. */
24833e1a 454 case LABEL_REF:
455 case SYMBOL_REF:
24833e1a 456 fprintf (file, "#");
6e507301 457 /* Fall through. */
24833e1a 458 default:
459 output_addr_const (file, addr);
460 break;
461 }
462}
463
464static void
465rx_print_integer (FILE * file, HOST_WIDE_INT val)
466{
467 if (IN_RANGE (val, -64, 64))
468 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
469 else
470 fprintf (file,
471 TARGET_AS100_SYNTAX
472 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
473 val);
474}
475
476static bool
477rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
478{
479 const char * op = integer_asm_op (size, is_aligned);
480
481 if (! CONST_INT_P (x))
482 return default_assemble_integer (x, size, is_aligned);
483
484 if (op == NULL)
485 return false;
486 fputs (op, asm_out_file);
487
488 rx_print_integer (asm_out_file, INTVAL (x));
489 fputc ('\n', asm_out_file);
490 return true;
491}
492
493
24833e1a 494/* Handles the insertion of a single operand into the assembler output.
495 The %<letter> directives supported are:
496
497 %A Print an operand without a leading # character.
498 %B Print an integer comparison name.
499 %C Print a control register name.
500 %F Print a condition code flag name.
6e507301 501 %G Register used for small-data-area addressing
24833e1a 502 %H Print high part of a DImode register, integer or address.
503 %L Print low part of a DImode register, integer or address.
6bb30542 504 %N Print the negation of the immediate value.
6e507301 505 %P Register used for PID addressing
24833e1a 506 %Q If the operand is a MEM, then correctly generate
776f1390 507 register indirect or register relative addressing.
508 %R Like %Q but for zero-extending loads. */
24833e1a 509
6bb30542 510static void
24833e1a 511rx_print_operand (FILE * file, rtx op, int letter)
512{
776f1390 513 bool unsigned_load = false;
6e507301 514 bool print_hash = true;
515
516 if (letter == 'A'
517 && ((GET_CODE (op) == CONST
518 && GET_CODE (XEXP (op, 0)) == UNSPEC)
519 || GET_CODE (op) == UNSPEC))
520 {
521 print_hash = false;
522 letter = 0;
523 }
776f1390 524
24833e1a 525 switch (letter)
526 {
527 case 'A':
528 /* Print an operand without a leading #. */
529 if (MEM_P (op))
530 op = XEXP (op, 0);
531
532 switch (GET_CODE (op))
533 {
534 case LABEL_REF:
535 case SYMBOL_REF:
536 output_addr_const (file, op);
537 break;
538 case CONST_INT:
539 fprintf (file, "%ld", (long) INTVAL (op));
540 break;
541 default:
542 rx_print_operand (file, op, 0);
543 break;
544 }
545 break;
546
547 case 'B':
ccfccd66 548 {
549 enum rtx_code code = GET_CODE (op);
550 enum machine_mode mode = GET_MODE (XEXP (op, 0));
551 const char *ret;
552
553 if (mode == CC_Fmode)
554 {
555 /* C flag is undefined, and O flag carries unordered. None of the
556 branch combinations that include O use it helpfully. */
557 switch (code)
558 {
559 case ORDERED:
560 ret = "no";
561 break;
562 case UNORDERED:
563 ret = "o";
564 break;
565 case LT:
566 ret = "n";
567 break;
568 case GE:
569 ret = "pz";
570 break;
571 case EQ:
572 ret = "eq";
573 break;
574 case NE:
575 ret = "ne";
576 break;
577 default:
578 gcc_unreachable ();
579 }
580 }
581 else
582 {
24ad6c43 583 unsigned int flags = flags_from_mode (mode);
776f1390 584
ccfccd66 585 switch (code)
586 {
587 case LT:
24ad6c43 588 ret = (flags & CC_FLAG_O ? "lt" : "n");
ccfccd66 589 break;
590 case GE:
24ad6c43 591 ret = (flags & CC_FLAG_O ? "ge" : "pz");
ccfccd66 592 break;
593 case GT:
594 ret = "gt";
595 break;
596 case LE:
597 ret = "le";
598 break;
599 case GEU:
600 ret = "geu";
601 break;
602 case LTU:
603 ret = "ltu";
604 break;
605 case GTU:
606 ret = "gtu";
607 break;
608 case LEU:
609 ret = "leu";
610 break;
611 case EQ:
612 ret = "eq";
613 break;
614 case NE:
615 ret = "ne";
616 break;
617 default:
618 gcc_unreachable ();
619 }
24ad6c43 620 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
ccfccd66 621 }
622 fputs (ret, file);
623 break;
624 }
24833e1a 625
626 case 'C':
627 gcc_assert (CONST_INT_P (op));
628 switch (INTVAL (op))
629 {
630 case 0: fprintf (file, "psw"); break;
631 case 2: fprintf (file, "usp"); break;
632 case 3: fprintf (file, "fpsw"); break;
633 case 4: fprintf (file, "cpen"); break;
634 case 8: fprintf (file, "bpsw"); break;
635 case 9: fprintf (file, "bpc"); break;
636 case 0xa: fprintf (file, "isp"); break;
637 case 0xb: fprintf (file, "fintv"); break;
638 case 0xc: fprintf (file, "intb"); break;
639 default:
98a5f45d 640 warning (0, "unrecognized control register number: %d - using 'psw'",
6bb30542 641 (int) INTVAL (op));
98cb9b5b 642 fprintf (file, "psw");
643 break;
24833e1a 644 }
645 break;
646
647 case 'F':
648 gcc_assert (CONST_INT_P (op));
649 switch (INTVAL (op))
650 {
651 case 0: case 'c': case 'C': fprintf (file, "C"); break;
652 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
653 case 2: case 's': case 'S': fprintf (file, "S"); break;
654 case 3: case 'o': case 'O': fprintf (file, "O"); break;
655 case 8: case 'i': case 'I': fprintf (file, "I"); break;
656 case 9: case 'u': case 'U': fprintf (file, "U"); break;
657 default:
658 gcc_unreachable ();
659 }
660 break;
661
6e507301 662 case 'G':
663 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
664 break;
665
24833e1a 666 case 'H':
6bb30542 667 switch (GET_CODE (op))
24833e1a 668 {
6bb30542 669 case REG:
670 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
671 break;
672 case CONST_INT:
673 {
674 HOST_WIDE_INT v = INTVAL (op);
67e66e16 675
6bb30542 676 fprintf (file, "#");
677 /* Trickery to avoid problems with shifting 32 bits at a time. */
678 v = v >> 16;
679 v = v >> 16;
680 rx_print_integer (file, v);
681 break;
682 }
683 case CONST_DOUBLE:
24833e1a 684 fprintf (file, "#");
6bb30542 685 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
686 break;
687 case MEM:
24833e1a 688 if (! WORDS_BIG_ENDIAN)
689 op = adjust_address (op, SImode, 4);
690 output_address (XEXP (op, 0));
6bb30542 691 break;
692 default:
693 gcc_unreachable ();
24833e1a 694 }
695 break;
696
697 case 'L':
6bb30542 698 switch (GET_CODE (op))
24833e1a 699 {
6bb30542 700 case REG:
701 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
702 break;
703 case CONST_INT:
24833e1a 704 fprintf (file, "#");
705 rx_print_integer (file, INTVAL (op) & 0xffffffff);
6bb30542 706 break;
707 case CONST_DOUBLE:
708 fprintf (file, "#");
709 rx_print_integer (file, CONST_DOUBLE_LOW (op));
710 break;
711 case MEM:
24833e1a 712 if (WORDS_BIG_ENDIAN)
713 op = adjust_address (op, SImode, 4);
714 output_address (XEXP (op, 0));
6bb30542 715 break;
716 default:
717 gcc_unreachable ();
24833e1a 718 }
719 break;
720
39349585 721 case 'N':
722 gcc_assert (CONST_INT_P (op));
723 fprintf (file, "#");
724 rx_print_integer (file, - INTVAL (op));
725 break;
726
6e507301 727 case 'P':
728 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
729 break;
730
776f1390 731 case 'R':
732 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) < 4);
733 unsigned_load = true;
734 /* Fall through. */
24833e1a 735 case 'Q':
736 if (MEM_P (op))
737 {
738 HOST_WIDE_INT offset;
776f1390 739 rtx mem = op;
24833e1a 740
741 op = XEXP (op, 0);
742
743 if (REG_P (op))
744 offset = 0;
745 else if (GET_CODE (op) == PLUS)
746 {
747 rtx displacement;
748
749 if (REG_P (XEXP (op, 0)))
750 {
751 displacement = XEXP (op, 1);
752 op = XEXP (op, 0);
753 }
754 else
755 {
756 displacement = XEXP (op, 0);
757 op = XEXP (op, 1);
758 gcc_assert (REG_P (op));
759 }
760
761 gcc_assert (CONST_INT_P (displacement));
762 offset = INTVAL (displacement);
763 gcc_assert (offset >= 0);
764
765 fprintf (file, "%ld", offset);
766 }
767 else
768 gcc_unreachable ();
769
770 fprintf (file, "[");
771 rx_print_operand (file, op, 0);
772 fprintf (file, "].");
773
776f1390 774 switch (GET_MODE_SIZE (GET_MODE (mem)))
24833e1a 775 {
776 case 1:
776f1390 777 gcc_assert (offset <= 65535 * 1);
778 fprintf (file, unsigned_load ? "UB" : "B");
24833e1a 779 break;
780 case 2:
781 gcc_assert (offset % 2 == 0);
776f1390 782 gcc_assert (offset <= 65535 * 2);
783 fprintf (file, unsigned_load ? "UW" : "W");
24833e1a 784 break;
776f1390 785 case 4:
24833e1a 786 gcc_assert (offset % 4 == 0);
776f1390 787 gcc_assert (offset <= 65535 * 4);
24833e1a 788 fprintf (file, "L");
789 break;
776f1390 790 default:
791 gcc_unreachable ();
24833e1a 792 }
793 break;
794 }
795
796 /* Fall through. */
797
798 default:
6e507301 799 if (GET_CODE (op) == CONST
800 && GET_CODE (XEXP (op, 0)) == UNSPEC)
801 op = XEXP (op, 0);
802 else if (GET_CODE (op) == CONST
803 && GET_CODE (XEXP (op, 0)) == PLUS
804 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
805 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
806 {
807 if (print_hash)
808 fprintf (file, "#");
809 fprintf (file, "(");
810 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
811 fprintf (file, " + ");
812 output_addr_const (file, XEXP (XEXP (op, 0), 1));
813 fprintf (file, ")");
814 return;
815 }
816
24833e1a 817 switch (GET_CODE (op))
818 {
819 case MULT:
820 /* Should be the scaled part of an
821 indexed register indirect address. */
822 {
823 rtx base = XEXP (op, 0);
824 rtx index = XEXP (op, 1);
825
826 /* Check for a swaped index register and scaling factor.
827 Not sure if this can happen, but be prepared to handle it. */
828 if (CONST_INT_P (base) && REG_P (index))
829 {
830 rtx tmp = base;
831 base = index;
832 index = tmp;
833 }
834
835 gcc_assert (REG_P (base));
836 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
837 gcc_assert (CONST_INT_P (index));
838 /* Do not try to verify the value of the scalar as it is based
839 on the mode of the MEM not the mode of the MULT. (Which
840 will always be SImode). */
841 fprintf (file, "%s", reg_names [REGNO (base)]);
842 break;
843 }
844
845 case MEM:
846 output_address (XEXP (op, 0));
847 break;
848
849 case PLUS:
850 output_address (op);
851 break;
852
853 case REG:
854 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
855 fprintf (file, "%s", reg_names [REGNO (op)]);
856 break;
857
858 case SUBREG:
859 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
860 fprintf (file, "%s", reg_names [subreg_regno (op)]);
861 break;
862
863 /* This will only be single precision.... */
864 case CONST_DOUBLE:
865 {
866 unsigned long val;
867 REAL_VALUE_TYPE rv;
868
869 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
870 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
6e507301 871 if (print_hash)
872 fprintf (file, "#");
873 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
24833e1a 874 break;
875 }
876
877 case CONST_INT:
6e507301 878 if (print_hash)
879 fprintf (file, "#");
24833e1a 880 rx_print_integer (file, INTVAL (op));
881 break;
882
6e507301 883 case UNSPEC:
884 switch (XINT (op, 1))
885 {
886 case UNSPEC_PID_ADDR:
887 {
888 rtx sym, add;
889
890 if (print_hash)
891 fprintf (file, "#");
892 sym = XVECEXP (op, 0, 0);
893 add = NULL_RTX;
894 fprintf (file, "(");
895 if (GET_CODE (sym) == PLUS)
896 {
897 add = XEXP (sym, 1);
898 sym = XEXP (sym, 0);
899 }
900 output_addr_const (file, sym);
901 if (add != NULL_RTX)
902 {
903 fprintf (file, "+");
904 output_addr_const (file, add);
905 }
906 fprintf (file, "-__pid_base");
907 fprintf (file, ")");
908 return;
909 }
910 }
911 /* Fall through */
912
24833e1a 913 case CONST:
6e507301 914 case SYMBOL_REF:
24833e1a 915 case LABEL_REF:
916 case CODE_LABEL:
24833e1a 917 rx_print_operand_address (file, op);
918 break;
919
920 default:
921 gcc_unreachable ();
922 }
923 break;
924 }
925}
926
6e507301 927/* Maybe convert an operand into its PID format. */
928
929rtx
930rx_maybe_pidify_operand (rtx op, int copy_to_reg)
931{
932 if (rx_pid_data_operand (op) == PID_UNENCODED)
933 {
934 if (GET_CODE (op) == MEM)
935 {
936 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
937 op = replace_equiv_address (op, a);
938 }
939 else
940 {
941 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
942 }
943
944 if (copy_to_reg)
945 op = copy_to_mode_reg (GET_MODE (op), op);
946 }
947 return op;
948}
949
24833e1a 950/* Returns an assembler template for a move instruction. */
951
952char *
953rx_gen_move_template (rtx * operands, bool is_movu)
954{
6bb30542 955 static char out_template [64];
24833e1a 956 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
957 const char * src_template;
958 const char * dst_template;
959 rtx dest = operands[0];
960 rtx src = operands[1];
961
962 /* Decide which extension, if any, should be given to the move instruction. */
963 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
964 {
965 case QImode:
966 /* The .B extension is not valid when
967 loading an immediate into a register. */
968 if (! REG_P (dest) || ! CONST_INT_P (src))
969 extension = ".B";
970 break;
971 case HImode:
972 if (! REG_P (dest) || ! CONST_INT_P (src))
973 /* The .W extension is not valid when
974 loading an immediate into a register. */
975 extension = ".W";
976 break;
977 case SFmode:
978 case SImode:
979 extension = ".L";
980 break;
981 case VOIDmode:
982 /* This mode is used by constants. */
983 break;
984 default:
985 debug_rtx (src);
986 gcc_unreachable ();
987 }
988
6e507301 989 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
990 src_template = "(%A1-__pid_base)[%P1]";
991 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
992 src_template = "%%gp(%A1)[%G1]";
24833e1a 993 else
994 src_template = "%1";
995
996 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
6e507301 997 dst_template = "%%gp(%A0)[%G0]";
24833e1a 998 else
999 dst_template = "%0";
1000
6bb30542 1001 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
24833e1a 1002 extension, src_template, dst_template);
6bb30542 1003 return out_template;
24833e1a 1004}
24833e1a 1005\f
1006/* Return VALUE rounded up to the next ALIGNMENT boundary. */
1007
1008static inline unsigned int
1009rx_round_up (unsigned int value, unsigned int alignment)
1010{
1011 alignment -= 1;
1012 return (value + alignment) & (~ alignment);
1013}
1014
1015/* Return the number of bytes in the argument registers
1016 occupied by an argument of type TYPE and mode MODE. */
1017
ee4e8428 1018static unsigned int
4bccad5e 1019rx_function_arg_size (enum machine_mode mode, const_tree type)
24833e1a 1020{
1021 unsigned int num_bytes;
1022
1023 num_bytes = (mode == BLKmode)
1024 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1025 return rx_round_up (num_bytes, UNITS_PER_WORD);
1026}
1027
1028#define NUM_ARG_REGS 4
1029#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1030
1031/* Return an RTL expression describing the register holding a function
1032 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1033 be passed on the stack. CUM describes the previous parameters to the
1034 function and NAMED is false if the parameter is part of a variable
1035 parameter list, or the last named parameter before the start of a
1036 variable parameter list. */
1037
ee4e8428 1038static rtx
39cba157 1039rx_function_arg (cumulative_args_t cum, enum machine_mode mode,
4bccad5e 1040 const_tree type, bool named)
24833e1a 1041{
1042 unsigned int next_reg;
39cba157 1043 unsigned int bytes_so_far = *get_cumulative_args (cum);
24833e1a 1044 unsigned int size;
1045 unsigned int rounded_size;
1046
1047 /* An exploded version of rx_function_arg_size. */
1048 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
6bb30542 1049 /* If the size is not known it cannot be passed in registers. */
1050 if (size < 1)
1051 return NULL_RTX;
24833e1a 1052
1053 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1054
1055 /* Don't pass this arg via registers if there
1056 are insufficient registers to hold all of it. */
1057 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1058 return NULL_RTX;
1059
1060 /* Unnamed arguments and the last named argument in a
1061 variadic function are always passed on the stack. */
1062 if (!named)
1063 return NULL_RTX;
1064
1065 /* Structures must occupy an exact number of registers,
1066 otherwise they are passed on the stack. */
1067 if ((type == NULL || AGGREGATE_TYPE_P (type))
1068 && (size % UNITS_PER_WORD) != 0)
1069 return NULL_RTX;
1070
1071 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1072
1073 return gen_rtx_REG (mode, next_reg);
1074}
1075
ee4e8428 1076static void
39cba157 1077rx_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
4bccad5e 1078 const_tree type, bool named ATTRIBUTE_UNUSED)
ee4e8428 1079{
39cba157 1080 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
ee4e8428 1081}
1082
bd99ba64 1083static unsigned int
4bccad5e 1084rx_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
bd99ba64 1085 const_tree type ATTRIBUTE_UNUSED)
1086{
1087 return 32;
1088}
1089
24833e1a 1090/* Return an RTL describing where a function return value of type RET_TYPE
1091 is held. */
1092
1093static rtx
1094rx_function_value (const_tree ret_type,
1095 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1096 bool outgoing ATTRIBUTE_UNUSED)
1097{
bd7d2835 1098 enum machine_mode mode = TYPE_MODE (ret_type);
1099
1100 /* RX ABI specifies that small integer types are
1101 promoted to int when returned by a function. */
02f06d23 1102 if (GET_MODE_SIZE (mode) > 0
1103 && GET_MODE_SIZE (mode) < 4
1104 && ! COMPLEX_MODE_P (mode)
1105 )
bd7d2835 1106 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1107
1108 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1109}
1110
1111/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1112 regard to function returns as does TARGET_FUNCTION_VALUE. */
1113
1114static enum machine_mode
1115rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1116 enum machine_mode mode,
0318c61a 1117 int * punsignedp ATTRIBUTE_UNUSED,
bd7d2835 1118 const_tree funtype ATTRIBUTE_UNUSED,
1119 int for_return)
1120{
1121 if (for_return != 1
1122 || GET_MODE_SIZE (mode) >= 4
02f06d23 1123 || COMPLEX_MODE_P (mode)
bd7d2835 1124 || GET_MODE_SIZE (mode) < 1)
1125 return mode;
1126
1127 return SImode;
24833e1a 1128}
1129
1130static bool
1131rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1132{
1133 HOST_WIDE_INT size;
1134
1135 if (TYPE_MODE (type) != BLKmode
1136 && ! AGGREGATE_TYPE_P (type))
1137 return false;
1138
1139 size = int_size_in_bytes (type);
1140 /* Large structs and those whose size is not an
1141 exact multiple of 4 are returned in memory. */
1142 return size < 1
1143 || size > 16
1144 || (size % UNITS_PER_WORD) != 0;
1145}
1146
1147static rtx
1148rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1149 int incoming ATTRIBUTE_UNUSED)
1150{
1151 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1152}
1153
1154static bool
1155rx_return_in_msb (const_tree valtype)
1156{
1157 return TARGET_BIG_ENDIAN_DATA
1158 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1159}
1160
1161/* Returns true if the provided function has the specified attribute. */
1162
1163static inline bool
1164has_func_attr (const_tree decl, const char * func_attr)
1165{
1166 if (decl == NULL_TREE)
1167 decl = current_function_decl;
1168
1169 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1170}
1171
67e66e16 1172/* Returns true if the provided function has the "fast_interrupt" attribute. */
24833e1a 1173
1174static inline bool
1175is_fast_interrupt_func (const_tree decl)
1176{
67e66e16 1177 return has_func_attr (decl, "fast_interrupt");
24833e1a 1178}
1179
67e66e16 1180/* Returns true if the provided function has the "interrupt" attribute. */
24833e1a 1181
1182static inline bool
67e66e16 1183is_interrupt_func (const_tree decl)
24833e1a 1184{
67e66e16 1185 return has_func_attr (decl, "interrupt");
24833e1a 1186}
1187
1188/* Returns true if the provided function has the "naked" attribute. */
1189
1190static inline bool
1191is_naked_func (const_tree decl)
1192{
1193 return has_func_attr (decl, "naked");
1194}
1195\f
1196static bool use_fixed_regs = false;
1197
b2d7ede1 1198static void
24833e1a 1199rx_conditional_register_usage (void)
1200{
1201 static bool using_fixed_regs = false;
1202
6e507301 1203 if (TARGET_PID)
1204 {
1205 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1206 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1207 }
1208
24833e1a 1209 if (rx_small_data_limit > 0)
6e507301 1210 {
1211 if (TARGET_PID)
1212 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1213 else
1214 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1215
1216 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1217 }
24833e1a 1218
1219 if (use_fixed_regs != using_fixed_regs)
1220 {
1221 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1222 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1223
1224 if (use_fixed_regs)
1225 {
24833e1a 1226 unsigned int r;
1227
24833e1a 1228 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1229 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
e4d9e8e5 1230
1231 /* This is for fast interrupt handlers. Any register in
1232 the range r10 to r13 (inclusive) that is currently
1233 marked as fixed is now a viable, call-used register. */
24833e1a 1234 for (r = 10; r <= 13; r++)
1235 if (fixed_regs[r])
1236 {
1237 fixed_regs[r] = 0;
1238 call_used_regs[r] = 1;
24833e1a 1239 }
1240
e4d9e8e5 1241 /* Mark r7 as fixed. This is just a hack to avoid
1242 altering the reg_alloc_order array so that the newly
1243 freed r10-r13 registers are the preferred registers. */
1244 fixed_regs[7] = call_used_regs[7] = 1;
24833e1a 1245 }
1246 else
1247 {
1248 /* Restore the normal register masks. */
1249 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1250 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1251 }
1252
1253 using_fixed_regs = use_fixed_regs;
1254 }
1255}
1256
1257/* Perform any actions necessary before starting to compile FNDECL.
1258 For the RX we use this to make sure that we have the correct
1259 set of register masks selected. If FNDECL is NULL then we are
1260 compiling top level things. */
1261
1262static void
1263rx_set_current_function (tree fndecl)
1264{
1265 /* Remember the last target of rx_set_current_function. */
1266 static tree rx_previous_fndecl;
67e66e16 1267 bool prev_was_fast_interrupt;
1268 bool current_is_fast_interrupt;
24833e1a 1269
1270 /* Only change the context if the function changes. This hook is called
1271 several times in the course of compiling a function, and we don't want
1272 to slow things down too much or call target_reinit when it isn't safe. */
1273 if (fndecl == rx_previous_fndecl)
1274 return;
1275
67e66e16 1276 prev_was_fast_interrupt
24833e1a 1277 = rx_previous_fndecl
1278 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
67e66e16 1279
1280 current_is_fast_interrupt
24833e1a 1281 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1282
67e66e16 1283 if (prev_was_fast_interrupt != current_is_fast_interrupt)
24833e1a 1284 {
67e66e16 1285 use_fixed_regs = current_is_fast_interrupt;
24833e1a 1286 target_reinit ();
1287 }
67e66e16 1288
24833e1a 1289 rx_previous_fndecl = fndecl;
1290}
1291\f
1292/* Typical stack layout should looks like this after the function's prologue:
1293
1294 | |
1295 -- ^
1296 | | \ |
1297 | | arguments saved | Increasing
1298 | | on the stack | addresses
1299 PARENT arg pointer -> | | /
1300 -------------------------- ---- -------------------
1301 CHILD |ret | return address
1302 --
1303 | | \
1304 | | call saved
1305 | | registers
1306 | | /
1307 --
1308 | | \
1309 | | local
1310 | | variables
1311 frame pointer -> | | /
1312 --
1313 | | \
1314 | | outgoing | Decreasing
1315 | | arguments | addresses
1316 current stack pointer -> | | / |
1317 -------------------------- ---- ------------------ V
1318 | | */
1319
1320static unsigned int
1321bit_count (unsigned int x)
1322{
1323 const unsigned int m1 = 0x55555555;
1324 const unsigned int m2 = 0x33333333;
1325 const unsigned int m4 = 0x0f0f0f0f;
1326
1327 x -= (x >> 1) & m1;
1328 x = (x & m2) + ((x >> 2) & m2);
1329 x = (x + (x >> 4)) & m4;
1330 x += x >> 8;
1331
1332 return (x + (x >> 16)) & 0x3f;
1333}
1334
e4d9e8e5 1335#define MUST_SAVE_ACC_REGISTER \
1336 (TARGET_SAVE_ACC_REGISTER \
1337 && (is_interrupt_func (NULL_TREE) \
1338 || is_fast_interrupt_func (NULL_TREE)))
1339
24833e1a 1340/* Returns either the lowest numbered and highest numbered registers that
1341 occupy the call-saved area of the stack frame, if the registers are
1342 stored as a contiguous block, or else a bitmask of the individual
1343 registers if they are stored piecemeal.
1344
1345 Also computes the size of the frame and the size of the outgoing
1346 arguments block (in bytes). */
1347
1348static void
1349rx_get_stack_layout (unsigned int * lowest,
1350 unsigned int * highest,
1351 unsigned int * register_mask,
1352 unsigned int * frame_size,
1353 unsigned int * stack_size)
1354{
1355 unsigned int reg;
1356 unsigned int low;
1357 unsigned int high;
1358 unsigned int fixed_reg = 0;
1359 unsigned int save_mask;
1360 unsigned int pushed_mask;
1361 unsigned int unneeded_pushes;
1362
e4d9e8e5 1363 if (is_naked_func (NULL_TREE))
24833e1a 1364 {
1365 /* Naked functions do not create their own stack frame.
e4d9e8e5 1366 Instead the programmer must do that for us. */
24833e1a 1367 * lowest = 0;
1368 * highest = 0;
1369 * register_mask = 0;
1370 * frame_size = 0;
1371 * stack_size = 0;
1372 return;
1373 }
1374
9d2f1b03 1375 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
24833e1a 1376 {
21cde6ec 1377 if ((df_regs_ever_live_p (reg)
382ffb70 1378 /* Always save all call clobbered registers inside non-leaf
1379 interrupt handlers, even if they are not live - they may
1380 be used in (non-interrupt aware) routines called from this one. */
1381 || (call_used_regs[reg]
1382 && is_interrupt_func (NULL_TREE)
1383 && ! current_function_is_leaf))
24833e1a 1384 && (! call_used_regs[reg]
1385 /* Even call clobbered registered must
67e66e16 1386 be pushed inside interrupt handlers. */
e4d9e8e5 1387 || is_interrupt_func (NULL_TREE)
1388 /* Likewise for fast interrupt handlers, except registers r10 -
1389 r13. These are normally call-saved, but may have been set
1390 to call-used by rx_conditional_register_usage. If so then
1391 they can be used in the fast interrupt handler without
1392 saving them on the stack. */
1393 || (is_fast_interrupt_func (NULL_TREE)
1394 && ! IN_RANGE (reg, 10, 13))))
24833e1a 1395 {
1396 if (low == 0)
1397 low = reg;
1398 high = reg;
1399
1400 save_mask |= 1 << reg;
1401 }
1402
1403 /* Remember if we see a fixed register
1404 after having found the low register. */
1405 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1406 fixed_reg = reg;
1407 }
1408
e4d9e8e5 1409 /* If we have to save the accumulator register, make sure
1410 that at least two registers are pushed into the frame. */
1411 if (MUST_SAVE_ACC_REGISTER
1412 && bit_count (save_mask) < 2)
1413 {
1414 save_mask |= (1 << 13) | (1 << 14);
1415 if (low == 0)
1416 low = 13;
bc9bb967 1417 if (high == 0 || low == high)
1418 high = low + 1;
e4d9e8e5 1419 }
1420
24833e1a 1421 /* Decide if it would be faster fill in the call-saved area of the stack
1422 frame using multiple PUSH instructions instead of a single PUSHM
1423 instruction.
1424
1425 SAVE_MASK is a bitmask of the registers that must be stored in the
1426 call-save area. PUSHED_MASK is a bitmask of the registers that would
1427 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1428 is a bitmask of those registers in pushed_mask that are not in
1429 save_mask.
1430
1431 We use a simple heuristic that says that it is better to use
1432 multiple PUSH instructions if the number of unnecessary pushes is
1433 greater than the number of necessary pushes.
1434
1435 We also use multiple PUSH instructions if there are any fixed registers
1436 between LOW and HIGH. The only way that this can happen is if the user
1437 has specified --fixed-<reg-name> on the command line and in such
1438 circumstances we do not want to touch the fixed registers at all.
1439
1440 FIXME: Is it worth improving this heuristic ? */
1441 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1442 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1443
1444 if ((fixed_reg && fixed_reg <= high)
1445 || (optimize_function_for_speed_p (cfun)
1446 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1447 {
1448 /* Use multiple pushes. */
1449 * lowest = 0;
1450 * highest = 0;
1451 * register_mask = save_mask;
1452 }
1453 else
1454 {
1455 /* Use one push multiple instruction. */
1456 * lowest = low;
1457 * highest = high;
1458 * register_mask = 0;
1459 }
1460
1461 * frame_size = rx_round_up
1462 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1463
1464 if (crtl->args.size > 0)
1465 * frame_size += rx_round_up
1466 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1467
1468 * stack_size = rx_round_up
1469 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1470}
1471
1472/* Generate a PUSHM instruction that matches the given operands. */
1473
1474void
1475rx_emit_stack_pushm (rtx * operands)
1476{
1477 HOST_WIDE_INT last_reg;
1478 rtx first_push;
1479
1480 gcc_assert (CONST_INT_P (operands[0]));
1481 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1482
1483 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1484 first_push = XVECEXP (operands[1], 0, 1);
1485 gcc_assert (SET_P (first_push));
1486 first_push = SET_SRC (first_push);
1487 gcc_assert (REG_P (first_push));
1488
1489 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
67e66e16 1490 reg_names [REGNO (first_push) - last_reg],
1491 reg_names [REGNO (first_push)]);
24833e1a 1492}
1493
1494/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1495
1496static rtx
1497gen_rx_store_vector (unsigned int low, unsigned int high)
1498{
1499 unsigned int i;
1500 unsigned int count = (high - low) + 2;
1501 rtx vector;
1502
1503 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1504
1505 XVECEXP (vector, 0, 0) =
51e241f8 1506 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1507 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1508 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1509
1510 for (i = 0; i < count - 1; i++)
1511 XVECEXP (vector, 0, i + 1) =
51e241f8 1512 gen_rtx_SET (VOIDmode,
24833e1a 1513 gen_rtx_MEM (SImode,
67e66e16 1514 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1515 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1516 gen_rtx_REG (SImode, high - i));
24833e1a 1517 return vector;
1518}
1519
67e66e16 1520/* Mark INSN as being frame related. If it is a PARALLEL
1521 then mark each element as being frame related as well. */
1522
1523static void
1524mark_frame_related (rtx insn)
1525{
1526 RTX_FRAME_RELATED_P (insn) = 1;
1527 insn = PATTERN (insn);
1528
1529 if (GET_CODE (insn) == PARALLEL)
1530 {
1531 unsigned int i;
1532
61fc50a0 1533 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
67e66e16 1534 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1535 }
1536}
1537
95272799 1538static bool
1539ok_for_max_constant (HOST_WIDE_INT val)
1540{
1541 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1542 /* If there is no constraint on the size of constants
1543 used as operands, then any value is legitimate. */
1544 return true;
1545
1546 /* rx_max_constant_size specifies the maximum number
1547 of bytes that can be used to hold a signed value. */
1548 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1549 ( 1 << (rx_max_constant_size * 8)));
1550}
1551
1552/* Generate an ADD of SRC plus VAL into DEST.
1553 Handles the case where VAL is too big for max_constant_value.
1554 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1555
1556static void
1557gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1558{
1559 rtx insn;
1560
1561 if (val == NULL_RTX || INTVAL (val) == 0)
1562 {
1563 gcc_assert (dest != src);
1564
1565 insn = emit_move_insn (dest, src);
1566 }
1567 else if (ok_for_max_constant (INTVAL (val)))
1568 insn = emit_insn (gen_addsi3 (dest, src, val));
1569 else
1570 {
f7fcec1a 1571 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
02f06d23 1572 will not reject it. */
1573 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1574 insn = emit_insn (gen_addsi3 (dest, src, val));
95272799 1575
1576 if (is_frame_related)
1577 /* We have to provide our own frame related note here
1578 as the dwarf2out code cannot be expected to grok
1579 our unspec. */
1580 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1581 gen_rtx_SET (SImode, dest,
1582 gen_rtx_PLUS (SImode, src, val)));
1583 return;
1584 }
1585
1586 if (is_frame_related)
1587 RTX_FRAME_RELATED_P (insn) = 1;
1588 return;
1589}
1590
24833e1a 1591void
1592rx_expand_prologue (void)
1593{
1594 unsigned int stack_size;
1595 unsigned int frame_size;
1596 unsigned int mask;
1597 unsigned int low;
1598 unsigned int high;
67e66e16 1599 unsigned int reg;
24833e1a 1600 rtx insn;
1601
1602 /* Naked functions use their own, programmer provided prologues. */
e4d9e8e5 1603 if (is_naked_func (NULL_TREE))
24833e1a 1604 return;
1605
1606 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1607
1608 /* If we use any of the callee-saved registers, save them now. */
1609 if (mask)
1610 {
24833e1a 1611 /* Push registers in reverse order. */
9d2f1b03 1612 for (reg = CC_REGNUM; reg --;)
24833e1a 1613 if (mask & (1 << reg))
1614 {
1615 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
67e66e16 1616 mark_frame_related (insn);
24833e1a 1617 }
1618 }
1619 else if (low)
1620 {
1621 if (high == low)
1622 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1623 else
1624 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1625 * UNITS_PER_WORD),
1626 gen_rx_store_vector (low, high)));
67e66e16 1627 mark_frame_related (insn);
1628 }
1629
e4d9e8e5 1630 if (MUST_SAVE_ACC_REGISTER)
67e66e16 1631 {
1632 unsigned int acc_high, acc_low;
1633
1634 /* Interrupt handlers have to preserve the accumulator
1635 register if so requested by the user. Use the first
e4d9e8e5 1636 two pushed registers as intermediaries. */
67e66e16 1637 if (mask)
1638 {
1639 acc_low = acc_high = 0;
1640
9d2f1b03 1641 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1642 if (mask & (1 << reg))
1643 {
1644 if (acc_low == 0)
1645 acc_low = reg;
1646 else
1647 {
1648 acc_high = reg;
1649 break;
1650 }
1651 }
1652
1653 /* We have assumed that there are at least two registers pushed... */
1654 gcc_assert (acc_high != 0);
1655
1656 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1657 We just assume that they are zero. */
1658 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1659 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1660 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1661 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1662 }
1663 else
1664 {
1665 acc_low = low;
1666 acc_high = low + 1;
1667
1668 /* We have assumed that there are at least two registers pushed... */
1669 gcc_assert (acc_high <= high);
1670
1671 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1672 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1673 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1674 gen_rx_store_vector (acc_low, acc_high)));
1675 }
24833e1a 1676 }
1677
1678 /* If needed, set up the frame pointer. */
1679 if (frame_pointer_needed)
95272799 1680 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1681 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1682
1683 /* Allocate space for the outgoing args.
1684 If the stack frame has not already been set up then handle this as well. */
1685 if (stack_size)
1686 {
1687 if (frame_size)
1688 {
1689 if (frame_pointer_needed)
95272799 1690 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1691 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1692 else
95272799 1693 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1694 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1695 true);
24833e1a 1696 }
1697 else
95272799 1698 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1699 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
24833e1a 1700 }
1701 else if (frame_size)
1702 {
1703 if (! frame_pointer_needed)
95272799 1704 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1705 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
24833e1a 1706 else
95272799 1707 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1708 true);
24833e1a 1709 }
24833e1a 1710}
1711
1712static void
1713rx_output_function_prologue (FILE * file,
1714 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1715{
1716 if (is_fast_interrupt_func (NULL_TREE))
1717 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1718
67e66e16 1719 if (is_interrupt_func (NULL_TREE))
1720 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
24833e1a 1721
1722 if (is_naked_func (NULL_TREE))
1723 asm_fprintf (file, "\t; Note: Naked Function\n");
1724
1725 if (cfun->static_chain_decl != NULL)
1726 asm_fprintf (file, "\t; Note: Nested function declared "
1727 "inside another function.\n");
1728
1729 if (crtl->calls_eh_return)
1730 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1731}
1732
1733/* Generate a POPM or RTSD instruction that matches the given operands. */
1734
1735void
1736rx_emit_stack_popm (rtx * operands, bool is_popm)
1737{
1738 HOST_WIDE_INT stack_adjust;
1739 HOST_WIDE_INT last_reg;
1740 rtx first_push;
1741
1742 gcc_assert (CONST_INT_P (operands[0]));
1743 stack_adjust = INTVAL (operands[0]);
1744
1745 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1746 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1747
1748 first_push = XVECEXP (operands[1], 0, 1);
1749 gcc_assert (SET_P (first_push));
1750 first_push = SET_DEST (first_push);
1751 gcc_assert (REG_P (first_push));
1752
1753 if (is_popm)
1754 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1755 reg_names [REGNO (first_push)],
1756 reg_names [REGNO (first_push) + last_reg]);
1757 else
1758 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1759 (int) stack_adjust,
1760 reg_names [REGNO (first_push)],
1761 reg_names [REGNO (first_push) + last_reg]);
1762}
1763
1764/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1765
1766static rtx
1767gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1768{
1769 unsigned int i;
1770 unsigned int bias = 3;
1771 unsigned int count = (high - low) + bias;
1772 rtx vector;
1773
1774 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1775
1776 XVECEXP (vector, 0, 0) =
51e241f8 1777 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1778 plus_constant (stack_pointer_rtx, adjust));
1779
1780 for (i = 0; i < count - 2; i++)
1781 XVECEXP (vector, 0, i + 1) =
51e241f8 1782 gen_rtx_SET (VOIDmode,
24833e1a 1783 gen_rtx_REG (SImode, low + i),
1784 gen_rtx_MEM (SImode,
1785 i == 0 ? stack_pointer_rtx
1786 : plus_constant (stack_pointer_rtx,
1787 i * UNITS_PER_WORD)));
1788
1a860023 1789 XVECEXP (vector, 0, count - 1) = ret_rtx;
24833e1a 1790
1791 return vector;
1792}
1793
1794/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1795
1796static rtx
1797gen_rx_popm_vector (unsigned int low, unsigned int high)
1798{
1799 unsigned int i;
1800 unsigned int count = (high - low) + 2;
1801 rtx vector;
1802
1803 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1804
1805 XVECEXP (vector, 0, 0) =
51e241f8 1806 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
24833e1a 1807 plus_constant (stack_pointer_rtx,
1808 (count - 1) * UNITS_PER_WORD));
1809
1810 for (i = 0; i < count - 1; i++)
1811 XVECEXP (vector, 0, i + 1) =
51e241f8 1812 gen_rtx_SET (VOIDmode,
24833e1a 1813 gen_rtx_REG (SImode, low + i),
1814 gen_rtx_MEM (SImode,
1815 i == 0 ? stack_pointer_rtx
1816 : plus_constant (stack_pointer_rtx,
1817 i * UNITS_PER_WORD)));
1818
1819 return vector;
1820}
1821
1822void
1823rx_expand_epilogue (bool is_sibcall)
1824{
1825 unsigned int low;
1826 unsigned int high;
1827 unsigned int frame_size;
1828 unsigned int stack_size;
1829 unsigned int register_mask;
1830 unsigned int regs_size;
67e66e16 1831 unsigned int reg;
24833e1a 1832 unsigned HOST_WIDE_INT total_size;
1833
61fc50a0 1834 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1835 cannot guarantee that the register holding the function address is a
1836 call-used register. If it is a call-saved register then the stack
1837 pop instructions generated in the epilogue will corrupt the address
1838 before it is used.
1839
1840 Creating a new call-used-only register class works but then the
1841 reload pass gets stuck because it cannot always find a call-used
1842 register for spilling sibcalls.
1843
1844 The other possible solution is for this pass to scan forward for the
1845 sibcall instruction (if it has been generated) and work out if it
1846 is an indirect sibcall using a call-saved register. If it is then
1847 the address can copied into a call-used register in this epilogue
1848 code and the sibcall instruction modified to use that register. */
1849
24833e1a 1850 if (is_naked_func (NULL_TREE))
1851 {
61fc50a0 1852 gcc_assert (! is_sibcall);
1853
24833e1a 1854 /* Naked functions use their own, programmer provided epilogues.
1855 But, in order to keep gcc happy we have to generate some kind of
1856 epilogue RTL. */
1857 emit_jump_insn (gen_naked_return ());
1858 return;
1859 }
1860
1861 rx_get_stack_layout (& low, & high, & register_mask,
1862 & frame_size, & stack_size);
1863
1864 total_size = frame_size + stack_size;
1865 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1866
1867 /* See if we are unable to use the special stack frame deconstruct and
1868 return instructions. In most cases we can use them, but the exceptions
1869 are:
1870
1871 - Sibling calling functions deconstruct the frame but do not return to
1872 their caller. Instead they branch to their sibling and allow their
1873 return instruction to return to this function's parent.
1874
67e66e16 1875 - Fast and normal interrupt handling functions have to use special
24833e1a 1876 return instructions.
1877
1878 - Functions where we have pushed a fragmented set of registers into the
1879 call-save area must have the same set of registers popped. */
1880 if (is_sibcall
1881 || is_fast_interrupt_func (NULL_TREE)
67e66e16 1882 || is_interrupt_func (NULL_TREE)
24833e1a 1883 || register_mask)
1884 {
1885 /* Cannot use the special instructions - deconstruct by hand. */
1886 if (total_size)
95272799 1887 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1888 GEN_INT (total_size), false);
24833e1a 1889
e4d9e8e5 1890 if (MUST_SAVE_ACC_REGISTER)
24833e1a 1891 {
67e66e16 1892 unsigned int acc_low, acc_high;
1893
1894 /* Reverse the saving of the accumulator register onto the stack.
1895 Note we must adjust the saved "low" accumulator value as it
1896 is really the middle 32-bits of the accumulator. */
1897 if (register_mask)
1898 {
1899 acc_low = acc_high = 0;
9d2f1b03 1900
1901 for (reg = 1; reg < CC_REGNUM; reg ++)
67e66e16 1902 if (register_mask & (1 << reg))
1903 {
1904 if (acc_low == 0)
1905 acc_low = reg;
1906 else
1907 {
1908 acc_high = reg;
1909 break;
1910 }
1911 }
1912 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1913 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1914 }
1915 else
1916 {
1917 acc_low = low;
1918 acc_high = low + 1;
1919 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1920 gen_rx_popm_vector (acc_low, acc_high)));
1921 }
1922
1923 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1924 gen_rtx_REG (SImode, acc_low),
1925 GEN_INT (16)));
1926 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1927 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1928 }
24833e1a 1929
67e66e16 1930 if (register_mask)
1931 {
9d2f1b03 1932 for (reg = 0; reg < CC_REGNUM; reg ++)
24833e1a 1933 if (register_mask & (1 << reg))
1934 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1935 }
1936 else if (low)
1937 {
1938 if (high == low)
1939 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1940 else
1941 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1942 gen_rx_popm_vector (low, high)));
1943 }
1944
1945 if (is_fast_interrupt_func (NULL_TREE))
61fc50a0 1946 {
1947 gcc_assert (! is_sibcall);
1948 emit_jump_insn (gen_fast_interrupt_return ());
1949 }
67e66e16 1950 else if (is_interrupt_func (NULL_TREE))
61fc50a0 1951 {
1952 gcc_assert (! is_sibcall);
1953 emit_jump_insn (gen_exception_return ());
1954 }
24833e1a 1955 else if (! is_sibcall)
1956 emit_jump_insn (gen_simple_return ());
1957
1958 return;
1959 }
1960
1961 /* If we allocated space on the stack, free it now. */
1962 if (total_size)
1963 {
1964 unsigned HOST_WIDE_INT rtsd_size;
1965
1966 /* See if we can use the RTSD instruction. */
1967 rtsd_size = total_size + regs_size;
1968 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1969 {
1970 if (low)
1971 emit_jump_insn (gen_pop_and_return
1972 (GEN_INT (rtsd_size),
1973 gen_rx_rtsd_vector (rtsd_size, low, high)));
1974 else
1975 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1976
1977 return;
1978 }
1979
95272799 1980 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1981 GEN_INT (total_size), false);
24833e1a 1982 }
1983
1984 if (low)
1985 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1986 gen_rx_rtsd_vector (regs_size,
1987 low, high)));
1988 else
1989 emit_jump_insn (gen_simple_return ());
1990}
1991
1992
1993/* Compute the offset (in words) between FROM (arg pointer
1994 or frame pointer) and TO (frame pointer or stack pointer).
1995 See ASCII art comment at the start of rx_expand_prologue
1996 for more information. */
1997
1998int
1999rx_initial_elimination_offset (int from, int to)
2000{
2001 unsigned int low;
2002 unsigned int high;
2003 unsigned int frame_size;
2004 unsigned int stack_size;
2005 unsigned int mask;
2006
2007 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2008
2009 if (from == ARG_POINTER_REGNUM)
2010 {
2011 /* Extend the computed size of the stack frame to
2012 include the registers pushed in the prologue. */
2013 if (low)
2014 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2015 else
2016 frame_size += bit_count (mask) * UNITS_PER_WORD;
2017
2018 /* Remember to include the return address. */
2019 frame_size += 1 * UNITS_PER_WORD;
2020
2021 if (to == FRAME_POINTER_REGNUM)
2022 return frame_size;
2023
2024 gcc_assert (to == STACK_POINTER_REGNUM);
2025 return frame_size + stack_size;
2026 }
2027
2028 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2029 return stack_size;
2030}
2031
24833e1a 2032/* Decide if a variable should go into one of the small data sections. */
2033
2034static bool
2035rx_in_small_data (const_tree decl)
2036{
2037 int size;
2038 const_tree section;
2039
2040 if (rx_small_data_limit == 0)
2041 return false;
2042
2043 if (TREE_CODE (decl) != VAR_DECL)
2044 return false;
2045
2046 /* We do not put read-only variables into a small data area because
2047 they would be placed with the other read-only sections, far away
2048 from the read-write data sections, and we only have one small
2049 data area pointer.
2050 Similarly commons are placed in the .bss section which might be
2051 far away (and out of alignment with respect to) the .data section. */
2052 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2053 return false;
2054
2055 section = DECL_SECTION_NAME (decl);
2056 if (section)
2057 {
2058 const char * const name = TREE_STRING_POINTER (section);
2059
2060 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
2061 }
2062
2063 size = int_size_in_bytes (TREE_TYPE (decl));
2064
2065 return (size > 0) && (size <= rx_small_data_limit);
2066}
2067
2068/* Return a section for X.
2069 The only special thing we do here is to honor small data. */
2070
2071static section *
2072rx_select_rtx_section (enum machine_mode mode,
2073 rtx x,
2074 unsigned HOST_WIDE_INT align)
2075{
2076 if (rx_small_data_limit > 0
2077 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2078 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2079 return sdata_section;
2080
2081 return default_elf_select_rtx_section (mode, x, align);
2082}
2083
2084static section *
2085rx_select_section (tree decl,
2086 int reloc,
2087 unsigned HOST_WIDE_INT align)
2088{
2089 if (rx_small_data_limit > 0)
2090 {
2091 switch (categorize_decl_for_section (decl, reloc))
2092 {
2093 case SECCAT_SDATA: return sdata_section;
2094 case SECCAT_SBSS: return sbss_section;
2095 case SECCAT_SRODATA:
2096 /* Fall through. We do not put small, read only
2097 data into the C_2 section because we are not
2098 using the C_2 section. We do not use the C_2
2099 section because it is located with the other
2100 read-only data sections, far away from the read-write
2101 data sections and we only have one small data
2102 pointer (r13). */
2103 default:
2104 break;
2105 }
2106 }
2107
2108 /* If we are supporting the Renesas assembler
2109 we cannot use mergeable sections. */
2110 if (TARGET_AS100_SYNTAX)
2111 switch (categorize_decl_for_section (decl, reloc))
2112 {
2113 case SECCAT_RODATA_MERGE_CONST:
2114 case SECCAT_RODATA_MERGE_STR_INIT:
2115 case SECCAT_RODATA_MERGE_STR:
2116 return readonly_data_section;
2117
2118 default:
2119 break;
2120 }
2121
2122 return default_elf_select_section (decl, reloc, align);
2123}
2124\f
2125enum rx_builtin
2126{
2127 RX_BUILTIN_BRK,
2128 RX_BUILTIN_CLRPSW,
2129 RX_BUILTIN_INT,
2130 RX_BUILTIN_MACHI,
2131 RX_BUILTIN_MACLO,
2132 RX_BUILTIN_MULHI,
2133 RX_BUILTIN_MULLO,
2134 RX_BUILTIN_MVFACHI,
2135 RX_BUILTIN_MVFACMI,
2136 RX_BUILTIN_MVFC,
2137 RX_BUILTIN_MVTACHI,
2138 RX_BUILTIN_MVTACLO,
2139 RX_BUILTIN_MVTC,
67e66e16 2140 RX_BUILTIN_MVTIPL,
24833e1a 2141 RX_BUILTIN_RACW,
2142 RX_BUILTIN_REVW,
2143 RX_BUILTIN_RMPA,
2144 RX_BUILTIN_ROUND,
24833e1a 2145 RX_BUILTIN_SETPSW,
2146 RX_BUILTIN_WAIT,
2147 RX_BUILTIN_max
2148};
2149
103700c7 2150static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2151
24833e1a 2152static void
2153rx_init_builtins (void)
2154{
2155#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
103700c7 2156 rx_builtins[RX_BUILTIN_##UC_NAME] = \
f7fcec1a 2157 add_builtin_function ("__builtin_rx_" LC_NAME, \
24833e1a 2158 build_function_type_list (RET_TYPE##_type_node, \
2159 ARG_TYPE##_type_node, \
2160 NULL_TREE), \
2161 RX_BUILTIN_##UC_NAME, \
2162 BUILT_IN_MD, NULL, NULL_TREE)
2163
2164#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
103700c7 2165 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2166 add_builtin_function ("__builtin_rx_" LC_NAME, \
2167 build_function_type_list (RET_TYPE##_type_node, \
2168 ARG_TYPE1##_type_node,\
2169 ARG_TYPE2##_type_node,\
2170 NULL_TREE), \
2171 RX_BUILTIN_##UC_NAME, \
2172 BUILT_IN_MD, NULL, NULL_TREE)
2173
2174#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
103700c7 2175 rx_builtins[RX_BUILTIN_##UC_NAME] = \
24833e1a 2176 add_builtin_function ("__builtin_rx_" LC_NAME, \
2177 build_function_type_list (RET_TYPE##_type_node, \
2178 ARG_TYPE1##_type_node,\
2179 ARG_TYPE2##_type_node,\
2180 ARG_TYPE3##_type_node,\
2181 NULL_TREE), \
2182 RX_BUILTIN_##UC_NAME, \
2183 BUILT_IN_MD, NULL, NULL_TREE)
2184
2185 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
2186 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2187 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2188 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2189 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2190 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2191 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2192 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2193 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
2194 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
2195 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2196 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2197 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
2198 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2199 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
67e66e16 2200 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
24833e1a 2201 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2202 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2203 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
24833e1a 2204 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
2205}
2206
103700c7 2207/* Return the RX builtin for CODE. */
2208
2209static tree
2210rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2211{
2212 if (code >= RX_BUILTIN_max)
2213 return error_mark_node;
2214
2215 return rx_builtins[code];
2216}
2217
24833e1a 2218static rtx
2219rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2220{
2221 if (reg && ! REG_P (arg))
2222 arg = force_reg (SImode, arg);
2223
2224 emit_insn (gen_func (arg));
2225
2226 return NULL_RTX;
2227}
2228
2229static rtx
2230rx_expand_builtin_mvtc (tree exp)
2231{
2232 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2233 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2234
2235 if (! CONST_INT_P (arg1))
2236 return NULL_RTX;
2237
2238 if (! REG_P (arg2))
2239 arg2 = force_reg (SImode, arg2);
2240
2241 emit_insn (gen_mvtc (arg1, arg2));
2242
2243 return NULL_RTX;
2244}
2245
2246static rtx
2247rx_expand_builtin_mvfc (tree t_arg, rtx target)
2248{
2249 rtx arg = expand_normal (t_arg);
2250
2251 if (! CONST_INT_P (arg))
2252 return NULL_RTX;
2253
e4d9e8e5 2254 if (target == NULL_RTX)
2255 return NULL_RTX;
2256
24833e1a 2257 if (! REG_P (target))
2258 target = force_reg (SImode, target);
2259
2260 emit_insn (gen_mvfc (target, arg));
2261
2262 return target;
2263}
2264
67e66e16 2265static rtx
2266rx_expand_builtin_mvtipl (rtx arg)
2267{
2268 /* The RX610 does not support the MVTIPL instruction. */
2269 if (rx_cpu_type == RX610)
2270 return NULL_RTX;
2271
e5743482 2272 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
67e66e16 2273 return NULL_RTX;
2274
2275 emit_insn (gen_mvtipl (arg));
2276
2277 return NULL_RTX;
2278}
2279
24833e1a 2280static rtx
2281rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2282{
2283 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2284 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2285
2286 if (! REG_P (arg1))
2287 arg1 = force_reg (SImode, arg1);
2288
2289 if (! REG_P (arg2))
2290 arg2 = force_reg (SImode, arg2);
2291
2292 emit_insn (gen_func (arg1, arg2));
2293
2294 return NULL_RTX;
2295}
2296
2297static rtx
2298rx_expand_int_builtin_1_arg (rtx arg,
2299 rtx target,
2300 rtx (* gen_func)(rtx, rtx),
2301 bool mem_ok)
2302{
2303 if (! REG_P (arg))
2304 if (!mem_ok || ! MEM_P (arg))
2305 arg = force_reg (SImode, arg);
2306
2307 if (target == NULL_RTX || ! REG_P (target))
2308 target = gen_reg_rtx (SImode);
2309
2310 emit_insn (gen_func (target, arg));
2311
2312 return target;
2313}
2314
2315static rtx
2316rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2317{
2318 if (target == NULL_RTX || ! REG_P (target))
2319 target = gen_reg_rtx (SImode);
2320
2321 emit_insn (gen_func (target));
2322
2323 return target;
2324}
2325
2326static rtx
2327rx_expand_builtin_round (rtx arg, rtx target)
2328{
2329 if ((! REG_P (arg) && ! MEM_P (arg))
2330 || GET_MODE (arg) != SFmode)
2331 arg = force_reg (SFmode, arg);
2332
2333 if (target == NULL_RTX || ! REG_P (target))
2334 target = gen_reg_rtx (SImode);
2335
2336 emit_insn (gen_lrintsf2 (target, arg));
2337
2338 return target;
2339}
2340
e5743482 2341static int
0318c61a 2342valid_psw_flag (rtx op, const char *which)
e5743482 2343{
2344 static int mvtc_inform_done = 0;
2345
2346 if (GET_CODE (op) == CONST_INT)
2347 switch (INTVAL (op))
2348 {
2349 case 0: case 'c': case 'C':
2350 case 1: case 'z': case 'Z':
2351 case 2: case 's': case 'S':
2352 case 3: case 'o': case 'O':
2353 case 8: case 'i': case 'I':
2354 case 9: case 'u': case 'U':
2355 return 1;
2356 }
2357
2358 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2359 if (!mvtc_inform_done)
2360 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2361 mvtc_inform_done = 1;
2362
2363 return 0;
2364}
2365
24833e1a 2366static rtx
2367rx_expand_builtin (tree exp,
2368 rtx target,
2369 rtx subtarget ATTRIBUTE_UNUSED,
2370 enum machine_mode mode ATTRIBUTE_UNUSED,
2371 int ignore ATTRIBUTE_UNUSED)
2372{
2373 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
432093e5 2374 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
24833e1a 2375 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2376 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2377
2378 switch (fcode)
2379 {
2380 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
e5743482 2381 case RX_BUILTIN_CLRPSW:
2382 if (!valid_psw_flag (op, "clrpsw"))
2383 return NULL_RTX;
2384 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2385 case RX_BUILTIN_SETPSW:
2386 if (!valid_psw_flag (op, "setpsw"))
2387 return NULL_RTX;
2388 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
24833e1a 2389 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2390 (op, gen_int, false);
2391 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2392 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2393 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2394 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2395 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2396 (target, gen_mvfachi);
2397 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2398 (target, gen_mvfacmi);
2399 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2400 (op, gen_mvtachi, true);
2401 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2402 (op, gen_mvtaclo, true);
2403 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2404 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2405 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
67e66e16 2406 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
24833e1a 2407 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2408 (op, gen_racw, false);
2409 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2410 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2411 (op, target, gen_revw, false);
24833e1a 2412 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2413
2414 default:
2415 internal_error ("bad builtin code");
2416 break;
2417 }
2418
2419 return NULL_RTX;
2420}
2421\f
2422/* Place an element into a constructor or destructor section.
2423 Like default_ctor_section_asm_out_constructor in varasm.c
2424 except that it uses .init_array (or .fini_array) and it
2425 handles constructor priorities. */
2426
2427static void
2428rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2429{
2430 section * s;
2431
2432 if (priority != DEFAULT_INIT_PRIORITY)
2433 {
2434 char buf[18];
2435
2436 sprintf (buf, "%s.%.5u",
2437 is_ctor ? ".init_array" : ".fini_array",
2438 priority);
2439 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2440 }
2441 else if (is_ctor)
2442 s = ctors_section;
2443 else
2444 s = dtors_section;
2445
2446 switch_to_section (s);
2447 assemble_align (POINTER_SIZE);
2448 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2449}
2450
2451static void
2452rx_elf_asm_constructor (rtx symbol, int priority)
2453{
2454 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2455}
2456
2457static void
2458rx_elf_asm_destructor (rtx symbol, int priority)
2459{
2460 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2461}
2462\f
67e66e16 2463/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
24833e1a 2464
2465static tree
2466rx_handle_func_attribute (tree * node,
2467 tree name,
2468 tree args,
2469 int flags ATTRIBUTE_UNUSED,
2470 bool * no_add_attrs)
2471{
2472 gcc_assert (DECL_P (* node));
2473 gcc_assert (args == NULL_TREE);
2474
2475 if (TREE_CODE (* node) != FUNCTION_DECL)
2476 {
2477 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2478 name);
2479 * no_add_attrs = true;
2480 }
2481
2482 /* FIXME: We ought to check for conflicting attributes. */
2483
2484 /* FIXME: We ought to check that the interrupt and exception
2485 handler attributes have been applied to void functions. */
2486 return NULL_TREE;
2487}
2488
2489/* Table of RX specific attributes. */
2490const struct attribute_spec rx_attribute_table[] =
2491{
ac86af5d 2492 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2493 affects_type_identity. */
2494 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2495 false },
2496 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2497 false },
2498 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2499 false },
2500 { NULL, 0, 0, false, false, false, NULL, false }
24833e1a 2501};
2502
42d89991 2503/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
02e53c17 2504
2505static void
42d89991 2506rx_override_options_after_change (void)
98cb9b5b 2507{
2508 static bool first_time = TRUE;
98cb9b5b 2509
2510 if (first_time)
2511 {
2512 /* If this is the first time through and the user has not disabled
42d89991 2513 the use of RX FPU hardware then enable -ffinite-math-only,
2514 since the FPU instructions do not support NaNs and infinities. */
98cb9b5b 2515 if (TARGET_USE_FPU)
42d89991 2516 flag_finite_math_only = 1;
98cb9b5b 2517
98cb9b5b 2518 first_time = FALSE;
2519 }
2520 else
2521 {
2522 /* Alert the user if they are changing the optimization options
2523 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2524 if (TARGET_USE_FPU
42d89991 2525 && !flag_finite_math_only)
2526 warning (0, "RX FPU instructions do not support NaNs and infinities");
98cb9b5b 2527 }
2528}
2529
1af17d44 2530static void
2531rx_option_override (void)
2532{
8cb00d70 2533 unsigned int i;
2534 cl_deferred_option *opt;
2535 VEC(cl_deferred_option,heap) *vec
2536 = (VEC(cl_deferred_option,heap) *) rx_deferred_options;
2537
2538 FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
2539 {
2540 switch (opt->opt_index)
2541 {
2542 case OPT_mint_register_:
2543 switch (opt->value)
2544 {
2545 case 4:
2546 fixed_regs[10] = call_used_regs [10] = 1;
2547 /* Fall through. */
2548 case 3:
2549 fixed_regs[11] = call_used_regs [11] = 1;
2550 /* Fall through. */
2551 case 2:
2552 fixed_regs[12] = call_used_regs [12] = 1;
2553 /* Fall through. */
2554 case 1:
2555 fixed_regs[13] = call_used_regs [13] = 1;
2556 /* Fall through. */
2557 case 0:
6e507301 2558 rx_num_interrupt_regs = opt->value;
8cb00d70 2559 break;
2560 default:
6e507301 2561 rx_num_interrupt_regs = 0;
8cb00d70 2562 /* Error message already given because rx_handle_option
2563 returned false. */
2564 break;
2565 }
2566 break;
2567
2568 default:
2569 gcc_unreachable ();
2570 }
2571 }
2572
1af17d44 2573 /* This target defaults to strict volatile bitfields. */
941a2396 2574 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
1af17d44 2575 flag_strict_volatile_bitfields = 1;
42d89991 2576
2577 rx_override_options_after_change ();
9f9a3b39 2578
2579 if (align_jumps == 0 && ! optimize_size)
2580 align_jumps = 3;
2581 if (align_loops == 0 && ! optimize_size)
2582 align_loops = 3;
2583 if (align_labels == 0 && ! optimize_size)
2584 align_labels = 3;
1af17d44 2585}
2586
98cb9b5b 2587\f
24833e1a 2588static bool
2589rx_allocate_stack_slots_for_args (void)
2590{
2591 /* Naked functions should not allocate stack slots for arguments. */
2592 return ! is_naked_func (NULL_TREE);
2593}
2594
2595static bool
2596rx_func_attr_inlinable (const_tree decl)
2597{
2598 return ! is_fast_interrupt_func (decl)
67e66e16 2599 && ! is_interrupt_func (decl)
24833e1a 2600 && ! is_naked_func (decl);
2601}
2602
61fc50a0 2603/* Return nonzero if it is ok to make a tail-call to DECL,
2604 a function_decl or NULL if this is an indirect call, using EXP */
2605
2606static bool
e4d9e8e5 2607rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
61fc50a0 2608{
2609 /* Do not allow indirect tailcalls. The
2610 sibcall patterns do not support them. */
2611 if (decl == NULL)
2612 return false;
2613
2614 /* Never tailcall from inside interrupt handlers or naked functions. */
2615 if (is_fast_interrupt_func (NULL_TREE)
2616 || is_interrupt_func (NULL_TREE)
2617 || is_naked_func (NULL_TREE))
2618 return false;
2619
2620 return true;
2621}
2622
24833e1a 2623static void
2624rx_file_start (void)
2625{
2626 if (! TARGET_AS100_SYNTAX)
2627 default_file_start ();
2628}
2629
2630static bool
2631rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2632{
c6347c7a 2633 /* The packed attribute overrides the MS behaviour. */
2634 return ! TYPE_PACKED (record_type);
24833e1a 2635}
24833e1a 2636\f
2637/* Returns true if X a legitimate constant for an immediate
2638 operand on the RX. X is already known to satisfy CONSTANT_P. */
2639
2640bool
f7fcec1a 2641rx_is_legitimate_constant (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
24833e1a 2642{
24833e1a 2643 switch (GET_CODE (x))
2644 {
2645 case CONST:
2646 x = XEXP (x, 0);
2647
2648 if (GET_CODE (x) == PLUS)
2649 {
2650 if (! CONST_INT_P (XEXP (x, 1)))
2651 return false;
2652
2653 /* GCC would not pass us CONST_INT + CONST_INT so we
2654 know that we have {SYMBOL|LABEL} + CONST_INT. */
2655 x = XEXP (x, 0);
2656 gcc_assert (! CONST_INT_P (x));
2657 }
2658
2659 switch (GET_CODE (x))
2660 {
2661 case LABEL_REF:
2662 case SYMBOL_REF:
2663 return true;
2664
95272799 2665 case UNSPEC:
6e507301 2666 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
95272799 2667
24833e1a 2668 default:
2669 /* FIXME: Can this ever happen ? */
776f1390 2670 gcc_unreachable ();
24833e1a 2671 }
2672 break;
2673
2674 case LABEL_REF:
2675 case SYMBOL_REF:
2676 return true;
2677 case CONST_DOUBLE:
09bb92cc 2678 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
24833e1a 2679 case CONST_VECTOR:
2680 return false;
2681 default:
2682 gcc_assert (CONST_INT_P (x));
2683 break;
2684 }
2685
95272799 2686 return ok_for_max_constant (INTVAL (x));
24833e1a 2687}
2688
24833e1a 2689static int
2690rx_address_cost (rtx addr, bool speed)
2691{
2692 rtx a, b;
2693
2694 if (GET_CODE (addr) != PLUS)
2695 return COSTS_N_INSNS (1);
2696
2697 a = XEXP (addr, 0);
2698 b = XEXP (addr, 1);
2699
2700 if (REG_P (a) && REG_P (b))
2701 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2702 return COSTS_N_INSNS (4);
2703
2704 if (speed)
2705 /* [REG+OFF] is just as fast as [REG]. */
2706 return COSTS_N_INSNS (1);
2707
2708 if (CONST_INT_P (b)
2709 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2710 /* Try to discourage REG + <large OFF> when optimizing for size. */
2711 return COSTS_N_INSNS (2);
2712
2713 return COSTS_N_INSNS (1);
2714}
2715
2716static bool
2717rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2718{
2719 /* We can always eliminate to the frame pointer.
2720 We can eliminate to the stack pointer unless a frame
2721 pointer is needed. */
2722
2723 return to == FRAME_POINTER_REGNUM
2724 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2725}
2726\f
2727
2728static void
2729rx_trampoline_template (FILE * file)
2730{
2731 /* Output assembler code for a block containing the constant
2732 part of a trampoline, leaving space for the variable parts.
2733
2734 On the RX, (where r8 is the static chain regnum) the trampoline
2735 looks like:
2736
2737 mov #<static chain value>, r8
2738 mov #<function's address>, r9
2739 jmp r9
2740
2741 In big-endian-data-mode however instructions are read into the CPU
2742 4 bytes at a time. These bytes are then swapped around before being
2743 passed to the decoder. So...we must partition our trampoline into
2744 4 byte packets and swap these packets around so that the instruction
2745 reader will reverse the process. But, in order to avoid splitting
2746 the 32-bit constants across these packet boundaries, (making inserting
2747 them into the constructed trampoline very difficult) we have to pad the
2748 instruction sequence with NOP insns. ie:
2749
2750 nop
2751 nop
2752 mov.l #<...>, r8
2753 nop
2754 nop
2755 mov.l #<...>, r9
2756 jmp r9
2757 nop
2758 nop */
2759
2760 if (! TARGET_BIG_ENDIAN_DATA)
2761 {
2762 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2763 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2764 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2765 }
2766 else
2767 {
2768 char r8 = '0' + STATIC_CHAIN_REGNUM;
2769 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2770
2771 if (TARGET_AS100_SYNTAX)
2772 {
2773 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2774 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2775 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2776 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2777 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2778 }
2779 else
2780 {
2781 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2782 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2783 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2784 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2785 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2786 }
2787 }
2788}
2789
2790static void
2791rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2792{
2793 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2794
2795 emit_block_move (tramp, assemble_trampoline_template (),
2796 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2797
2798 if (TARGET_BIG_ENDIAN_DATA)
2799 {
2800 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2801 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2802 }
2803 else
2804 {
2805 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2806 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2807 }
2808}
2809\f
ccfccd66 2810static int
3e8d9684 2811rx_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2812 reg_class_t regclass ATTRIBUTE_UNUSED,
2813 bool in)
9d2f1b03 2814{
6145a46d 2815 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
9d2f1b03 2816}
2817
ccfccd66 2818/* Convert a CC_MODE to the set of flags that it represents. */
9d2f1b03 2819
2820static unsigned int
ccfccd66 2821flags_from_mode (enum machine_mode mode)
9d2f1b03 2822{
ccfccd66 2823 switch (mode)
9d2f1b03 2824 {
ccfccd66 2825 case CC_ZSmode:
2826 return CC_FLAG_S | CC_FLAG_Z;
2827 case CC_ZSOmode:
2828 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2829 case CC_ZSCmode:
2830 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2831 case CCmode:
2832 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2833 case CC_Fmode:
2834 return CC_FLAG_FP;
2835 default:
2836 gcc_unreachable ();
2837 }
2838}
9d2f1b03 2839
ccfccd66 2840/* Convert a set of flags to a CC_MODE that can implement it. */
9d2f1b03 2841
ccfccd66 2842static enum machine_mode
2843mode_from_flags (unsigned int f)
2844{
2845 if (f & CC_FLAG_FP)
2846 return CC_Fmode;
2847 if (f & CC_FLAG_O)
2848 {
2849 if (f & CC_FLAG_C)
2850 return CCmode;
2851 else
2852 return CC_ZSOmode;
9d2f1b03 2853 }
ccfccd66 2854 else if (f & CC_FLAG_C)
2855 return CC_ZSCmode;
2856 else
2857 return CC_ZSmode;
9d2f1b03 2858}
2859
ccfccd66 2860/* Convert an RTX_CODE to the set of flags needed to implement it.
2861 This assumes an integer comparison. */
2862
9d2f1b03 2863static unsigned int
ccfccd66 2864flags_from_code (enum rtx_code code)
9d2f1b03 2865{
ccfccd66 2866 switch (code)
9d2f1b03 2867 {
ccfccd66 2868 case LT:
2869 case GE:
24ad6c43 2870 return CC_FLAG_S;
ccfccd66 2871 case GT:
2872 case LE:
2873 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
2874 case GEU:
2875 case LTU:
2876 return CC_FLAG_C;
2877 case GTU:
2878 case LEU:
2879 return CC_FLAG_C | CC_FLAG_Z;
2880 case EQ:
2881 case NE:
2882 return CC_FLAG_Z;
2883 default:
2884 gcc_unreachable ();
9d2f1b03 2885 }
2886}
2887
ccfccd66 2888/* Return a CC_MODE of which both M1 and M2 are subsets. */
2889
2890static enum machine_mode
2891rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
9d2f1b03 2892{
ccfccd66 2893 unsigned f;
2894
2895 /* Early out for identical modes. */
2896 if (m1 == m2)
2897 return m1;
2898
2899 /* There's no valid combination for FP vs non-FP. */
2900 f = flags_from_mode (m1) | flags_from_mode (m2);
2901 if (f & CC_FLAG_FP)
2902 return VOIDmode;
2903
2904 /* Otherwise, see what mode can implement all the flags. */
2905 return mode_from_flags (f);
9d2f1b03 2906}
8b8777b9 2907
2908/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
2909
2910enum machine_mode
24ad6c43 2911rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
8b8777b9 2912{
2913 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2914 return CC_Fmode;
2915
24ad6c43 2916 if (y != const0_rtx)
2917 return CCmode;
2918
ccfccd66 2919 return mode_from_flags (flags_from_code (cmp_code));
2920}
2921
ccfccd66 2922/* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
2923 CC_MODE, and use that in branches based on that compare. */
2924
2925void
2926rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
2927 rtx c1, rtx c2, rtx label)
2928{
2929 rtx flags, x;
2930
2931 flags = gen_rtx_REG (cc_mode, CC_REG);
2932 x = gen_rtx_COMPARE (cc_mode, c1, c2);
2933 x = gen_rtx_SET (VOIDmode, flags, x);
2934 emit_insn (x);
2935
2936 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
2937 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
2938 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2939 emit_jump_insn (x);
8b8777b9 2940}
2941
fc3b02a9 2942/* A helper function for matching parallels that set the flags. */
2943
2944bool
2945rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
2946{
2947 rtx op1, flags;
2948 enum machine_mode flags_mode;
2949
2950 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
2951
2952 op1 = XVECEXP (PATTERN (insn), 0, 1);
2953 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
2954
2955 flags = SET_DEST (op1);
2956 flags_mode = GET_MODE (flags);
2957
2958 if (GET_MODE (SET_SRC (op1)) != flags_mode)
2959 return false;
2960 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
2961 return false;
2962
2963 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
2964 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
2965 return false;
2966
2967 return true;
2968}
9f9a3b39 2969\f
2970int
001afa63 2971rx_align_for_label (rtx lab, int uses_threshold)
9f9a3b39 2972{
001afa63 2973 /* This is a simple heuristic to guess when an alignment would not be useful
2974 because the delay due to the inserted NOPs would be greater than the delay
2975 due to the misaligned branch. If uses_threshold is zero then the alignment
2976 is always useful. */
f7fcec1a 2977 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
001afa63 2978 return 0;
2979
9f9a3b39 2980 return optimize_size ? 1 : 3;
2981}
2982
2983static int
2984rx_max_skip_for_label (rtx lab)
2985{
2986 int opsize;
2987 rtx op;
2988
2989 if (lab == NULL_RTX)
2990 return 0;
fc3b02a9 2991
9f9a3b39 2992 op = lab;
2993 do
2994 {
2995 op = next_nonnote_nondebug_insn (op);
2996 }
2997 while (op && (LABEL_P (op)
2998 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
2999 if (!op)
3000 return 0;
3001
3002 opsize = get_attr_length (op);
3003 if (opsize >= 0 && opsize < 8)
3004 return opsize - 1;
3005 return 0;
3006}
776f1390 3007
3008/* Compute the real length of the extending load-and-op instructions. */
3009
3010int
3011rx_adjust_insn_length (rtx insn, int current_length)
3012{
3013 rtx extend, mem, offset;
3014 bool zero;
3015 int factor;
3016
3017 switch (INSN_CODE (insn))
3018 {
3019 default:
3020 return current_length;
3021
3022 case CODE_FOR_plussi3_zero_extendhi:
3023 case CODE_FOR_andsi3_zero_extendhi:
3024 case CODE_FOR_iorsi3_zero_extendhi:
3025 case CODE_FOR_xorsi3_zero_extendhi:
3026 case CODE_FOR_divsi3_zero_extendhi:
3027 case CODE_FOR_udivsi3_zero_extendhi:
3028 case CODE_FOR_minussi3_zero_extendhi:
3029 case CODE_FOR_smaxsi3_zero_extendhi:
3030 case CODE_FOR_sminsi3_zero_extendhi:
3031 case CODE_FOR_multsi3_zero_extendhi:
f7fcec1a 3032 case CODE_FOR_comparesi3_zero_extendhi:
776f1390 3033 zero = true;
3034 factor = 2;
3035 break;
3036
3037 case CODE_FOR_plussi3_sign_extendhi:
3038 case CODE_FOR_andsi3_sign_extendhi:
3039 case CODE_FOR_iorsi3_sign_extendhi:
3040 case CODE_FOR_xorsi3_sign_extendhi:
3041 case CODE_FOR_divsi3_sign_extendhi:
3042 case CODE_FOR_udivsi3_sign_extendhi:
3043 case CODE_FOR_minussi3_sign_extendhi:
3044 case CODE_FOR_smaxsi3_sign_extendhi:
3045 case CODE_FOR_sminsi3_sign_extendhi:
3046 case CODE_FOR_multsi3_sign_extendhi:
f7fcec1a 3047 case CODE_FOR_comparesi3_sign_extendhi:
776f1390 3048 zero = false;
3049 factor = 2;
3050 break;
3051
3052 case CODE_FOR_plussi3_zero_extendqi:
3053 case CODE_FOR_andsi3_zero_extendqi:
3054 case CODE_FOR_iorsi3_zero_extendqi:
3055 case CODE_FOR_xorsi3_zero_extendqi:
3056 case CODE_FOR_divsi3_zero_extendqi:
3057 case CODE_FOR_udivsi3_zero_extendqi:
3058 case CODE_FOR_minussi3_zero_extendqi:
3059 case CODE_FOR_smaxsi3_zero_extendqi:
3060 case CODE_FOR_sminsi3_zero_extendqi:
3061 case CODE_FOR_multsi3_zero_extendqi:
f7fcec1a 3062 case CODE_FOR_comparesi3_zero_extendqi:
776f1390 3063 zero = true;
3064 factor = 1;
3065 break;
3066
3067 case CODE_FOR_plussi3_sign_extendqi:
3068 case CODE_FOR_andsi3_sign_extendqi:
3069 case CODE_FOR_iorsi3_sign_extendqi:
3070 case CODE_FOR_xorsi3_sign_extendqi:
3071 case CODE_FOR_divsi3_sign_extendqi:
3072 case CODE_FOR_udivsi3_sign_extendqi:
3073 case CODE_FOR_minussi3_sign_extendqi:
3074 case CODE_FOR_smaxsi3_sign_extendqi:
3075 case CODE_FOR_sminsi3_sign_extendqi:
3076 case CODE_FOR_multsi3_sign_extendqi:
f7fcec1a 3077 case CODE_FOR_comparesi3_sign_extendqi:
776f1390 3078 zero = false;
3079 factor = 1;
3080 break;
3081 }
3082
3083 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3084 extend = single_set (insn);
3085 gcc_assert (extend != NULL_RTX);
3086
3087 extend = SET_SRC (extend);
3088 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3089 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3090 extend = XEXP (extend, 0);
3091 else
3092 extend = XEXP (extend, 1);
3093
3094 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3095 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3096
3097 mem = XEXP (extend, 0);
3098 gcc_checking_assert (MEM_P (mem));
3099 if (REG_P (XEXP (mem, 0)))
3100 return (zero && factor == 1) ? 2 : 3;
3101
3102 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3103 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3104 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3105
3106 offset = XEXP (XEXP (mem, 0), 1);
3107 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3108
3109 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3110 return (zero && factor == 1) ? 3 : 4;
3111
3112 return (zero && factor == 1) ? 4 : 5;
3113}
9d2f1b03 3114\f
9f9a3b39 3115#undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3116#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3117#undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3118#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3119#undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3120#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3121#undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3122#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3123
24833e1a 3124#undef TARGET_FUNCTION_VALUE
3125#define TARGET_FUNCTION_VALUE rx_function_value
3126
3127#undef TARGET_RETURN_IN_MSB
3128#define TARGET_RETURN_IN_MSB rx_return_in_msb
3129
3130#undef TARGET_IN_SMALL_DATA_P
3131#define TARGET_IN_SMALL_DATA_P rx_in_small_data
3132
3133#undef TARGET_RETURN_IN_MEMORY
3134#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3135
3136#undef TARGET_HAVE_SRODATA_SECTION
3137#define TARGET_HAVE_SRODATA_SECTION true
3138
3139#undef TARGET_ASM_SELECT_RTX_SECTION
3140#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3141
3142#undef TARGET_ASM_SELECT_SECTION
3143#define TARGET_ASM_SELECT_SECTION rx_select_section
3144
3145#undef TARGET_INIT_BUILTINS
3146#define TARGET_INIT_BUILTINS rx_init_builtins
3147
103700c7 3148#undef TARGET_BUILTIN_DECL
3149#define TARGET_BUILTIN_DECL rx_builtin_decl
3150
24833e1a 3151#undef TARGET_EXPAND_BUILTIN
3152#define TARGET_EXPAND_BUILTIN rx_expand_builtin
3153
3154#undef TARGET_ASM_CONSTRUCTOR
3155#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3156
3157#undef TARGET_ASM_DESTRUCTOR
3158#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3159
3160#undef TARGET_STRUCT_VALUE_RTX
3161#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3162
3163#undef TARGET_ATTRIBUTE_TABLE
3164#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3165
3166#undef TARGET_ASM_FILE_START
3167#define TARGET_ASM_FILE_START rx_file_start
3168
3169#undef TARGET_MS_BITFIELD_LAYOUT_P
3170#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3171
3172#undef TARGET_LEGITIMATE_ADDRESS_P
3173#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3174
5afe50d9 3175#undef TARGET_MODE_DEPENDENT_ADDRESS_P
3176#define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3177
24833e1a 3178#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3179#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3180
3181#undef TARGET_ASM_FUNCTION_PROLOGUE
3182#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3183
3184#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3185#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3186
61fc50a0 3187#undef TARGET_FUNCTION_OK_FOR_SIBCALL
3188#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3189
ee4e8428 3190#undef TARGET_FUNCTION_ARG
3191#define TARGET_FUNCTION_ARG rx_function_arg
3192
3193#undef TARGET_FUNCTION_ARG_ADVANCE
3194#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3195
bd99ba64 3196#undef TARGET_FUNCTION_ARG_BOUNDARY
3197#define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3198
24833e1a 3199#undef TARGET_SET_CURRENT_FUNCTION
3200#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3201
24833e1a 3202#undef TARGET_ASM_INTEGER
3203#define TARGET_ASM_INTEGER rx_assemble_integer
3204
3205#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3206#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3207
3208#undef TARGET_MAX_ANCHOR_OFFSET
3209#define TARGET_MAX_ANCHOR_OFFSET 32
3210
3211#undef TARGET_ADDRESS_COST
3212#define TARGET_ADDRESS_COST rx_address_cost
3213
3214#undef TARGET_CAN_ELIMINATE
3215#define TARGET_CAN_ELIMINATE rx_can_eliminate
3216
b2d7ede1 3217#undef TARGET_CONDITIONAL_REGISTER_USAGE
3218#define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3219
24833e1a 3220#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3221#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3222
3223#undef TARGET_TRAMPOLINE_INIT
3224#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3225
6bb30542 3226#undef TARGET_PRINT_OPERAND
3227#define TARGET_PRINT_OPERAND rx_print_operand
3228
3229#undef TARGET_PRINT_OPERAND_ADDRESS
3230#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3231
9d2f1b03 3232#undef TARGET_CC_MODES_COMPATIBLE
3233#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3234
3235#undef TARGET_MEMORY_MOVE_COST
3236#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3237
1af17d44 3238#undef TARGET_OPTION_OVERRIDE
3239#define TARGET_OPTION_OVERRIDE rx_option_override
3240
bd7d2835 3241#undef TARGET_PROMOTE_FUNCTION_MODE
3242#define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3243
42d89991 3244#undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3245#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
02e53c17 3246
77de4b78 3247#undef TARGET_FLAGS_REGNUM
3248#define TARGET_FLAGS_REGNUM CC_REG
3249
ca316360 3250#undef TARGET_LEGITIMATE_CONSTANT_P
f7fcec1a 3251#define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
ca316360 3252
6e507301 3253#undef TARGET_LEGITIMIZE_ADDRESS
3254#define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3255
24833e1a 3256struct gcc_target targetm = TARGET_INITIALIZER;
3257
103700c7 3258#include "gt-rx.h"