]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rx/rx.c
decl.c, [...]: Remove redundant enum from machine_mode.
[thirdparty/gcc.git] / gcc / config / rx / rx.c
CommitLineData
65a324b4 1/* Subroutines used for code generation on Renesas RX processors.
23a5b65a 2 Copyright (C) 2008-2014 Free Software Foundation, Inc.
65a324b4
NC
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "tree.h"
d8a2d370
DN
30#include "varasm.h"
31#include "stor-layout.h"
32#include "calls.h"
65a324b4
NC
33#include "rtl.h"
34#include "regs.h"
35#include "hard-reg-set.h"
65a324b4
NC
36#include "insn-config.h"
37#include "conditions.h"
38#include "output.h"
39#include "insn-attr.h"
40#include "flags.h"
83685514
AM
41#include "hashtab.h"
42#include "hash-set.h"
43#include "vec.h"
44#include "machmode.h"
45#include "input.h"
65a324b4
NC
46#include "function.h"
47#include "expr.h"
48#include "optabs.h"
49#include "libfuncs.h"
50#include "recog.h"
718f9c0f 51#include "diagnostic-core.h"
65a324b4
NC
52#include "toplev.h"
53#include "reload.h"
60393bbc
AM
54#include "dominance.h"
55#include "cfg.h"
56#include "cfgrtl.h"
57#include "cfganal.h"
58#include "lcm.h"
59#include "cfgbuild.h"
60#include "cfgcleanup.h"
61#include "predict.h"
62#include "basic-block.h"
65a324b4
NC
63#include "df.h"
64#include "ggc.h"
65#include "tm_p.h"
66#include "debug.h"
67#include "target.h"
68#include "target-def.h"
69#include "langhooks.h"
96e45421 70#include "opts.h"
c582198b
AM
71#include "hash-map.h"
72#include "is-a.h"
73#include "plugin-api.h"
74#include "ipa-ref.h"
a8781821 75#include "cgraph.h"
9b2b7279 76#include "builtins.h"
878a9174
DD
77
78static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
79static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
80static unsigned int rx_num_interrupt_regs;
65a324b4 81\f
878a9174
DD
82static unsigned int
83rx_gp_base_regnum (void)
84{
85 if (rx_gp_base_regnum_val == INVALID_REGNUM)
86 gcc_unreachable ();
87 return rx_gp_base_regnum_val;
88}
89
90static unsigned int
91rx_pid_base_regnum (void)
92{
93 if (rx_pid_base_regnum_val == INVALID_REGNUM)
94 gcc_unreachable ();
95 return rx_pid_base_regnum_val;
96}
97
98/* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
99
100static tree
101rx_decl_for_addr (rtx op)
102{
103 if (GET_CODE (op) == MEM)
104 op = XEXP (op, 0);
105 if (GET_CODE (op) == CONST)
106 op = XEXP (op, 0);
107 while (GET_CODE (op) == PLUS)
108 op = XEXP (op, 0);
109 if (GET_CODE (op) == SYMBOL_REF)
110 return SYMBOL_REF_DECL (op);
111 return NULL_TREE;
112}
113
31e727b0
NC
114static void rx_print_operand (FILE *, rtx, int);
115
e963cb1a
RH
116#define CC_FLAG_S (1 << 0)
117#define CC_FLAG_Z (1 << 1)
118#define CC_FLAG_O (1 << 2)
119#define CC_FLAG_C (1 << 3)
5f2f13fd 120#define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
e963cb1a 121
ef4bddc2 122static unsigned int flags_from_mode (machine_mode mode);
e963cb1a 123static unsigned int flags_from_code (enum rtx_code code);
9595a419 124\f
878a9174
DD
125/* Return true if OP is a reference to an object in a PID data area. */
126
127enum pid_type
128{
129 PID_NOT_PID = 0, /* The object is not in the PID data area. */
130 PID_ENCODED, /* The object is in the PID data area. */
131 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
132};
133
134static enum pid_type
135rx_pid_data_operand (rtx op)
136{
137 tree op_decl;
138
139 if (!TARGET_PID)
140 return PID_NOT_PID;
141
142 if (GET_CODE (op) == PLUS
143 && GET_CODE (XEXP (op, 0)) == REG
144 && GET_CODE (XEXP (op, 1)) == CONST
145 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
146 return PID_ENCODED;
147
148 op_decl = rx_decl_for_addr (op);
149
150 if (op_decl)
151 {
152 if (TREE_READONLY (op_decl))
153 return PID_UNENCODED;
154 }
155 else
156 {
157 /* Sigh, some special cases. */
158 if (GET_CODE (op) == SYMBOL_REF
159 || GET_CODE (op) == LABEL_REF)
160 return PID_UNENCODED;
161 }
162
163 return PID_NOT_PID;
164}
165
166static rtx
167rx_legitimize_address (rtx x,
168 rtx oldx ATTRIBUTE_UNUSED,
ef4bddc2 169 machine_mode mode ATTRIBUTE_UNUSED)
878a9174
DD
170{
171 if (rx_pid_data_operand (x) == PID_UNENCODED)
172 {
173 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
174 return rv;
175 }
176
177 if (GET_CODE (x) == PLUS
178 && GET_CODE (XEXP (x, 0)) == PLUS
179 && REG_P (XEXP (XEXP (x, 0), 0))
180 && REG_P (XEXP (x, 1)))
181 return force_reg (SImode, x);
182
183 return x;
184}
185
65a324b4
NC
186/* Return true if OP is a reference to an object in a small data area. */
187
188static bool
189rx_small_data_operand (rtx op)
190{
191 if (rx_small_data_limit == 0)
192 return false;
193
194 if (GET_CODE (op) == SYMBOL_REF)
195 return SYMBOL_REF_SMALL_P (op);
196
197 return false;
198}
199
200static bool
ef4bddc2 201rx_is_legitimate_address (machine_mode mode, rtx x,
6b0e4cbb 202 bool strict ATTRIBUTE_UNUSED)
65a324b4
NC
203{
204 if (RTX_OK_FOR_BASE (x, strict))
205 /* Register Indirect. */
206 return true;
207
5f2f13fd
DD
208 if ((GET_MODE_SIZE (mode) == 4
209 || GET_MODE_SIZE (mode) == 2
210 || GET_MODE_SIZE (mode) == 1)
65a324b4
NC
211 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
212 /* Pre-decrement Register Indirect or
213 Post-increment Register Indirect. */
214 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
215
878a9174
DD
216 switch (rx_pid_data_operand (x))
217 {
218 case PID_UNENCODED:
219 return false;
220 case PID_ENCODED:
221 return true;
222 default:
223 break;
224 }
225
65a324b4
NC
226 if (GET_CODE (x) == PLUS)
227 {
228 rtx arg1 = XEXP (x, 0);
229 rtx arg2 = XEXP (x, 1);
230 rtx index = NULL_RTX;
231
232 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
233 index = arg2;
234 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
235 index = arg1;
236 else
237 return false;
238
239 switch (GET_CODE (index))
240 {
241 case CONST_INT:
242 {
243 /* Register Relative: REG + INT.
244 Only positive, mode-aligned, mode-sized
245 displacements are allowed. */
246 HOST_WIDE_INT val = INTVAL (index);
247 int factor;
248
249 if (val < 0)
250 return false;
e9c0470a 251
65a324b4
NC
252 switch (GET_MODE_SIZE (mode))
253 {
254 default:
255 case 4: factor = 4; break;
256 case 2: factor = 2; break;
257 case 1: factor = 1; break;
258 }
259
5f2f13fd 260 if (val > (65535 * factor))
65a324b4
NC
261 return false;
262 return (val % factor) == 0;
263 }
264
265 case REG:
266 /* Unscaled Indexed Register Indirect: REG + REG
267 Size has to be "QI", REG has to be valid. */
268 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
269
270 case MULT:
271 {
272 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
273 Factor has to equal the mode size, REG has to be valid. */
274 rtx factor;
275
276 factor = XEXP (index, 1);
277 index = XEXP (index, 0);
278
279 return REG_P (index)
280 && RTX_OK_FOR_BASE (index, strict)
281 && CONST_INT_P (factor)
282 && GET_MODE_SIZE (mode) == INTVAL (factor);
283 }
284
285 default:
286 return false;
287 }
288 }
289
290 /* Small data area accesses turn into register relative offsets. */
291 return rx_small_data_operand (x);
292}
293
294/* Returns TRUE for simple memory addreses, ie ones
295 that do not involve register indirect addressing
296 or pre/post increment/decrement. */
297
298bool
ef4bddc2 299rx_is_restricted_memory_address (rtx mem, machine_mode mode)
65a324b4 300{
65a324b4
NC
301 if (! rx_is_legitimate_address
302 (mode, mem, reload_in_progress || reload_completed))
303 return false;
304
305 switch (GET_CODE (mem))
306 {
307 case REG:
308 /* Simple memory addresses are OK. */
309 return true;
310
311 case PRE_DEC:
312 case POST_INC:
313 return false;
314
315 case PLUS:
e9c0470a
NC
316 {
317 rtx base, index;
318
319 /* Only allow REG+INT addressing. */
320 base = XEXP (mem, 0);
321 index = XEXP (mem, 1);
65a324b4 322
e9c0470a
NC
323 if (! RX_REG_P (base) || ! CONST_INT_P (index))
324 return false;
325
326 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
327 }
65a324b4
NC
328
329 case SYMBOL_REF:
330 /* Can happen when small data is being supported.
331 Assume that it will be resolved into GP+INT. */
332 return true;
333
334 default:
335 gcc_unreachable ();
336 }
337}
338
b09c3081
AS
339/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
340
341static bool
5bfed9a9 342rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
65a324b4
NC
343{
344 if (GET_CODE (addr) == CONST)
345 addr = XEXP (addr, 0);
346
347 switch (GET_CODE (addr))
348 {
349 /* --REG and REG++ only work in SImode. */
350 case PRE_DEC:
351 case POST_INC:
352 return true;
353
354 case MINUS:
355 case PLUS:
356 if (! REG_P (XEXP (addr, 0)))
357 return true;
358
359 addr = XEXP (addr, 1);
360
361 switch (GET_CODE (addr))
362 {
363 case REG:
364 /* REG+REG only works in SImode. */
365 return true;
366
367 case CONST_INT:
368 /* REG+INT is only mode independent if INT is a
eb1c879c 369 multiple of 4, positive and will fit into 16-bits. */
65a324b4 370 if (((INTVAL (addr) & 3) == 0)
eb1c879c 371 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
65a324b4
NC
372 return false;
373 return true;
374
375 case SYMBOL_REF:
376 case LABEL_REF:
377 return true;
378
379 case MULT:
380 gcc_assert (REG_P (XEXP (addr, 0)));
381 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
382 /* REG+REG*SCALE is always mode dependent. */
383 return true;
384
385 default:
386 /* Not recognized, so treat as mode dependent. */
387 return true;
388 }
389
390 case CONST_INT:
391 case SYMBOL_REF:
392 case LABEL_REF:
393 case REG:
394 /* These are all mode independent. */
395 return false;
396
397 default:
398 /* Everything else is unrecognized,
399 so treat as mode dependent. */
400 return true;
401 }
402}
403\f
65a324b4
NC
404/* A C compound statement to output to stdio stream FILE the
405 assembler syntax for an instruction operand that is a memory
406 reference whose address is ADDR. */
407
31e727b0 408static void
65a324b4
NC
409rx_print_operand_address (FILE * file, rtx addr)
410{
411 switch (GET_CODE (addr))
412 {
413 case REG:
414 fprintf (file, "[");
415 rx_print_operand (file, addr, 0);
416 fprintf (file, "]");
417 break;
418
419 case PRE_DEC:
420 fprintf (file, "[-");
421 rx_print_operand (file, XEXP (addr, 0), 0);
422 fprintf (file, "]");
423 break;
424
425 case POST_INC:
426 fprintf (file, "[");
427 rx_print_operand (file, XEXP (addr, 0), 0);
428 fprintf (file, "+]");
429 break;
430
431 case PLUS:
432 {
433 rtx arg1 = XEXP (addr, 0);
434 rtx arg2 = XEXP (addr, 1);
435 rtx base, index;
436
437 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
438 base = arg1, index = arg2;
439 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
440 base = arg2, index = arg1;
441 else
442 {
443 rx_print_operand (file, arg1, 0);
444 fprintf (file, " + ");
445 rx_print_operand (file, arg2, 0);
446 break;
447 }
448
449 if (REG_P (index) || GET_CODE (index) == MULT)
450 {
451 fprintf (file, "[");
452 rx_print_operand (file, index, 'A');
453 fprintf (file, ",");
454 }
455 else /* GET_CODE (index) == CONST_INT */
456 {
457 rx_print_operand (file, index, 'A');
458 fprintf (file, "[");
459 }
460 rx_print_operand (file, base, 0);
461 fprintf (file, "]");
462 break;
463 }
464
15ba5696
NC
465 case CONST:
466 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
467 {
468 addr = XEXP (addr, 0);
469 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
878a9174
DD
470
471 /* FIXME: Putting this case label here is an appalling abuse of the C language. */
472 case UNSPEC:
473 addr = XVECEXP (addr, 0, 0);
15ba5696
NC
474 gcc_assert (CONST_INT_P (addr));
475 }
476 /* Fall through. */
65a324b4
NC
477 case LABEL_REF:
478 case SYMBOL_REF:
65a324b4 479 fprintf (file, "#");
878a9174 480 /* Fall through. */
65a324b4
NC
481 default:
482 output_addr_const (file, addr);
483 break;
484 }
485}
486
487static void
488rx_print_integer (FILE * file, HOST_WIDE_INT val)
489{
490 if (IN_RANGE (val, -64, 64))
491 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
492 else
493 fprintf (file,
494 TARGET_AS100_SYNTAX
495 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
496 val);
497}
498
499static bool
500rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
501{
502 const char * op = integer_asm_op (size, is_aligned);
503
504 if (! CONST_INT_P (x))
505 return default_assemble_integer (x, size, is_aligned);
506
507 if (op == NULL)
508 return false;
509 fputs (op, asm_out_file);
510
511 rx_print_integer (asm_out_file, INTVAL (x));
512 fputc ('\n', asm_out_file);
513 return true;
514}
515
516
65a324b4
NC
517/* Handles the insertion of a single operand into the assembler output.
518 The %<letter> directives supported are:
519
520 %A Print an operand without a leading # character.
521 %B Print an integer comparison name.
522 %C Print a control register name.
523 %F Print a condition code flag name.
878a9174 524 %G Register used for small-data-area addressing
65a324b4
NC
525 %H Print high part of a DImode register, integer or address.
526 %L Print low part of a DImode register, integer or address.
31e727b0 527 %N Print the negation of the immediate value.
878a9174 528 %P Register used for PID addressing
65a324b4 529 %Q If the operand is a MEM, then correctly generate
e9c0470a
NC
530 register indirect or register relative addressing.
531 %R Like %Q but for zero-extending loads. */
65a324b4 532
31e727b0 533static void
65a324b4
NC
534rx_print_operand (FILE * file, rtx op, int letter)
535{
e9c0470a 536 bool unsigned_load = false;
878a9174
DD
537 bool print_hash = true;
538
539 if (letter == 'A'
540 && ((GET_CODE (op) == CONST
541 && GET_CODE (XEXP (op, 0)) == UNSPEC)
542 || GET_CODE (op) == UNSPEC))
543 {
544 print_hash = false;
545 letter = 0;
546 }
e9c0470a 547
65a324b4
NC
548 switch (letter)
549 {
550 case 'A':
551 /* Print an operand without a leading #. */
552 if (MEM_P (op))
553 op = XEXP (op, 0);
554
555 switch (GET_CODE (op))
556 {
557 case LABEL_REF:
558 case SYMBOL_REF:
559 output_addr_const (file, op);
560 break;
561 case CONST_INT:
562 fprintf (file, "%ld", (long) INTVAL (op));
563 break;
564 default:
565 rx_print_operand (file, op, 0);
566 break;
567 }
568 break;
569
570 case 'B':
e963cb1a
RH
571 {
572 enum rtx_code code = GET_CODE (op);
ef4bddc2 573 machine_mode mode = GET_MODE (XEXP (op, 0));
e963cb1a
RH
574 const char *ret;
575
576 if (mode == CC_Fmode)
577 {
578 /* C flag is undefined, and O flag carries unordered. None of the
579 branch combinations that include O use it helpfully. */
580 switch (code)
581 {
582 case ORDERED:
583 ret = "no";
584 break;
585 case UNORDERED:
586 ret = "o";
587 break;
588 case LT:
589 ret = "n";
590 break;
591 case GE:
592 ret = "pz";
593 break;
594 case EQ:
595 ret = "eq";
596 break;
597 case NE:
598 ret = "ne";
599 break;
600 default:
601 gcc_unreachable ();
602 }
603 }
604 else
605 {
72602cd1 606 unsigned int flags = flags_from_mode (mode);
e9c0470a 607
e963cb1a
RH
608 switch (code)
609 {
610 case LT:
72602cd1 611 ret = (flags & CC_FLAG_O ? "lt" : "n");
e963cb1a
RH
612 break;
613 case GE:
72602cd1 614 ret = (flags & CC_FLAG_O ? "ge" : "pz");
e963cb1a
RH
615 break;
616 case GT:
617 ret = "gt";
618 break;
619 case LE:
620 ret = "le";
621 break;
622 case GEU:
623 ret = "geu";
624 break;
625 case LTU:
626 ret = "ltu";
627 break;
628 case GTU:
629 ret = "gtu";
630 break;
631 case LEU:
632 ret = "leu";
633 break;
634 case EQ:
635 ret = "eq";
636 break;
637 case NE:
638 ret = "ne";
639 break;
640 default:
641 gcc_unreachable ();
642 }
72602cd1 643 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
e963cb1a
RH
644 }
645 fputs (ret, file);
646 break;
647 }
65a324b4
NC
648
649 case 'C':
650 gcc_assert (CONST_INT_P (op));
651 switch (INTVAL (op))
652 {
653 case 0: fprintf (file, "psw"); break;
654 case 2: fprintf (file, "usp"); break;
655 case 3: fprintf (file, "fpsw"); break;
656 case 4: fprintf (file, "cpen"); break;
657 case 8: fprintf (file, "bpsw"); break;
658 case 9: fprintf (file, "bpc"); break;
659 case 0xa: fprintf (file, "isp"); break;
660 case 0xb: fprintf (file, "fintv"); break;
661 case 0xc: fprintf (file, "intb"); break;
662 default:
9aaa9e89 663 warning (0, "unrecognized control register number: %d - using 'psw'",
31e727b0 664 (int) INTVAL (op));
5f75e477
NC
665 fprintf (file, "psw");
666 break;
65a324b4
NC
667 }
668 break;
669
670 case 'F':
671 gcc_assert (CONST_INT_P (op));
672 switch (INTVAL (op))
673 {
674 case 0: case 'c': case 'C': fprintf (file, "C"); break;
675 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
676 case 2: case 's': case 'S': fprintf (file, "S"); break;
677 case 3: case 'o': case 'O': fprintf (file, "O"); break;
678 case 8: case 'i': case 'I': fprintf (file, "I"); break;
679 case 9: case 'u': case 'U': fprintf (file, "U"); break;
680 default:
681 gcc_unreachable ();
682 }
683 break;
684
878a9174
DD
685 case 'G':
686 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
687 break;
688
65a324b4 689 case 'H':
31e727b0 690 switch (GET_CODE (op))
65a324b4 691 {
31e727b0
NC
692 case REG:
693 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
694 break;
695 case CONST_INT:
696 {
697 HOST_WIDE_INT v = INTVAL (op);
9595a419 698
31e727b0
NC
699 fprintf (file, "#");
700 /* Trickery to avoid problems with shifting 32 bits at a time. */
701 v = v >> 16;
702 v = v >> 16;
703 rx_print_integer (file, v);
704 break;
705 }
706 case CONST_DOUBLE:
65a324b4 707 fprintf (file, "#");
31e727b0
NC
708 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
709 break;
710 case MEM:
65a324b4
NC
711 if (! WORDS_BIG_ENDIAN)
712 op = adjust_address (op, SImode, 4);
713 output_address (XEXP (op, 0));
31e727b0
NC
714 break;
715 default:
716 gcc_unreachable ();
65a324b4
NC
717 }
718 break;
719
720 case 'L':
31e727b0 721 switch (GET_CODE (op))
65a324b4 722 {
31e727b0
NC
723 case REG:
724 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
725 break;
726 case CONST_INT:
65a324b4
NC
727 fprintf (file, "#");
728 rx_print_integer (file, INTVAL (op) & 0xffffffff);
31e727b0
NC
729 break;
730 case CONST_DOUBLE:
731 fprintf (file, "#");
732 rx_print_integer (file, CONST_DOUBLE_LOW (op));
733 break;
734 case MEM:
65a324b4
NC
735 if (WORDS_BIG_ENDIAN)
736 op = adjust_address (op, SImode, 4);
737 output_address (XEXP (op, 0));
31e727b0
NC
738 break;
739 default:
740 gcc_unreachable ();
65a324b4
NC
741 }
742 break;
743
c249a7bc
NC
744 case 'N':
745 gcc_assert (CONST_INT_P (op));
746 fprintf (file, "#");
747 rx_print_integer (file, - INTVAL (op));
748 break;
749
878a9174
DD
750 case 'P':
751 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
752 break;
753
e9c0470a 754 case 'R':
8ae9698d 755 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
e9c0470a
NC
756 unsigned_load = true;
757 /* Fall through. */
65a324b4
NC
758 case 'Q':
759 if (MEM_P (op))
760 {
761 HOST_WIDE_INT offset;
e9c0470a 762 rtx mem = op;
65a324b4
NC
763
764 op = XEXP (op, 0);
765
766 if (REG_P (op))
767 offset = 0;
768 else if (GET_CODE (op) == PLUS)
769 {
770 rtx displacement;
771
772 if (REG_P (XEXP (op, 0)))
773 {
774 displacement = XEXP (op, 1);
775 op = XEXP (op, 0);
776 }
777 else
778 {
779 displacement = XEXP (op, 0);
780 op = XEXP (op, 1);
781 gcc_assert (REG_P (op));
782 }
783
784 gcc_assert (CONST_INT_P (displacement));
785 offset = INTVAL (displacement);
786 gcc_assert (offset >= 0);
787
788 fprintf (file, "%ld", offset);
789 }
790 else
791 gcc_unreachable ();
792
793 fprintf (file, "[");
794 rx_print_operand (file, op, 0);
795 fprintf (file, "].");
796
e9c0470a 797 switch (GET_MODE_SIZE (GET_MODE (mem)))
65a324b4
NC
798 {
799 case 1:
e9c0470a
NC
800 gcc_assert (offset <= 65535 * 1);
801 fprintf (file, unsigned_load ? "UB" : "B");
65a324b4
NC
802 break;
803 case 2:
804 gcc_assert (offset % 2 == 0);
e9c0470a
NC
805 gcc_assert (offset <= 65535 * 2);
806 fprintf (file, unsigned_load ? "UW" : "W");
65a324b4 807 break;
e9c0470a 808 case 4:
65a324b4 809 gcc_assert (offset % 4 == 0);
e9c0470a 810 gcc_assert (offset <= 65535 * 4);
65a324b4
NC
811 fprintf (file, "L");
812 break;
e9c0470a
NC
813 default:
814 gcc_unreachable ();
65a324b4
NC
815 }
816 break;
817 }
818
819 /* Fall through. */
820
821 default:
878a9174
DD
822 if (GET_CODE (op) == CONST
823 && GET_CODE (XEXP (op, 0)) == UNSPEC)
824 op = XEXP (op, 0);
825 else if (GET_CODE (op) == CONST
826 && GET_CODE (XEXP (op, 0)) == PLUS
827 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
828 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
829 {
830 if (print_hash)
831 fprintf (file, "#");
832 fprintf (file, "(");
833 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
834 fprintf (file, " + ");
835 output_addr_const (file, XEXP (XEXP (op, 0), 1));
836 fprintf (file, ")");
837 return;
838 }
839
65a324b4
NC
840 switch (GET_CODE (op))
841 {
842 case MULT:
843 /* Should be the scaled part of an
844 indexed register indirect address. */
845 {
846 rtx base = XEXP (op, 0);
847 rtx index = XEXP (op, 1);
848
849 /* Check for a swaped index register and scaling factor.
850 Not sure if this can happen, but be prepared to handle it. */
851 if (CONST_INT_P (base) && REG_P (index))
852 {
853 rtx tmp = base;
854 base = index;
855 index = tmp;
856 }
857
858 gcc_assert (REG_P (base));
859 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
860 gcc_assert (CONST_INT_P (index));
861 /* Do not try to verify the value of the scalar as it is based
862 on the mode of the MEM not the mode of the MULT. (Which
863 will always be SImode). */
864 fprintf (file, "%s", reg_names [REGNO (base)]);
865 break;
866 }
867
868 case MEM:
869 output_address (XEXP (op, 0));
870 break;
871
872 case PLUS:
873 output_address (op);
874 break;
875
876 case REG:
877 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
878 fprintf (file, "%s", reg_names [REGNO (op)]);
879 break;
880
881 case SUBREG:
882 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
883 fprintf (file, "%s", reg_names [subreg_regno (op)]);
884 break;
885
886 /* This will only be single precision.... */
887 case CONST_DOUBLE:
888 {
889 unsigned long val;
890 REAL_VALUE_TYPE rv;
891
892 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
893 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
878a9174
DD
894 if (print_hash)
895 fprintf (file, "#");
896 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
65a324b4
NC
897 break;
898 }
899
900 case CONST_INT:
878a9174
DD
901 if (print_hash)
902 fprintf (file, "#");
65a324b4
NC
903 rx_print_integer (file, INTVAL (op));
904 break;
905
878a9174
DD
906 case UNSPEC:
907 switch (XINT (op, 1))
908 {
909 case UNSPEC_PID_ADDR:
910 {
911 rtx sym, add;
912
913 if (print_hash)
914 fprintf (file, "#");
915 sym = XVECEXP (op, 0, 0);
916 add = NULL_RTX;
917 fprintf (file, "(");
918 if (GET_CODE (sym) == PLUS)
919 {
920 add = XEXP (sym, 1);
921 sym = XEXP (sym, 0);
922 }
923 output_addr_const (file, sym);
924 if (add != NULL_RTX)
925 {
926 fprintf (file, "+");
927 output_addr_const (file, add);
928 }
929 fprintf (file, "-__pid_base");
930 fprintf (file, ")");
931 return;
932 }
933 }
934 /* Fall through */
935
65a324b4 936 case CONST:
878a9174 937 case SYMBOL_REF:
65a324b4
NC
938 case LABEL_REF:
939 case CODE_LABEL:
65a324b4
NC
940 rx_print_operand_address (file, op);
941 break;
942
943 default:
944 gcc_unreachable ();
945 }
946 break;
947 }
948}
949
878a9174
DD
950/* Maybe convert an operand into its PID format. */
951
952rtx
953rx_maybe_pidify_operand (rtx op, int copy_to_reg)
954{
955 if (rx_pid_data_operand (op) == PID_UNENCODED)
956 {
957 if (GET_CODE (op) == MEM)
958 {
959 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
960 op = replace_equiv_address (op, a);
961 }
962 else
963 {
964 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
965 }
966
967 if (copy_to_reg)
968 op = copy_to_mode_reg (GET_MODE (op), op);
969 }
970 return op;
971}
972
65a324b4
NC
973/* Returns an assembler template for a move instruction. */
974
975char *
976rx_gen_move_template (rtx * operands, bool is_movu)
977{
31e727b0 978 static char out_template [64];
65a324b4
NC
979 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
980 const char * src_template;
981 const char * dst_template;
982 rtx dest = operands[0];
983 rtx src = operands[1];
984
985 /* Decide which extension, if any, should be given to the move instruction. */
986 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
987 {
988 case QImode:
989 /* The .B extension is not valid when
990 loading an immediate into a register. */
991 if (! REG_P (dest) || ! CONST_INT_P (src))
992 extension = ".B";
993 break;
994 case HImode:
995 if (! REG_P (dest) || ! CONST_INT_P (src))
996 /* The .W extension is not valid when
997 loading an immediate into a register. */
998 extension = ".W";
999 break;
69f5aa9b
SKS
1000 case DFmode:
1001 case DImode:
65a324b4
NC
1002 case SFmode:
1003 case SImode:
1004 extension = ".L";
1005 break;
1006 case VOIDmode:
1007 /* This mode is used by constants. */
1008 break;
1009 default:
1010 debug_rtx (src);
1011 gcc_unreachable ();
1012 }
1013
878a9174 1014 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
69f5aa9b
SKS
1015 {
1016 gcc_assert (GET_MODE (src) != DImode);
1017 gcc_assert (GET_MODE (src) != DFmode);
1018
1019 src_template = "(%A1 - __pid_base)[%P1]";
1020 }
878a9174 1021 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
69f5aa9b
SKS
1022 {
1023 gcc_assert (GET_MODE (src) != DImode);
1024 gcc_assert (GET_MODE (src) != DFmode);
1025
1026 src_template = "%%gp(%A1)[%G1]";
1027 }
65a324b4
NC
1028 else
1029 src_template = "%1";
1030
1031 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
69f5aa9b
SKS
1032 {
1033 gcc_assert (GET_MODE (dest) != DImode);
1034 gcc_assert (GET_MODE (dest) != DFmode);
1035
1036 dst_template = "%%gp(%A0)[%G0]";
1037 }
65a324b4
NC
1038 else
1039 dst_template = "%0";
1040
69f5aa9b
SKS
1041 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1042 {
1043 gcc_assert (! is_movu);
1044
1045 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
da02a644 1046 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
69f5aa9b 1047 else
da02a644 1048 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
69f5aa9b
SKS
1049 }
1050 else
1051 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1052 extension, src_template, dst_template);
31e727b0 1053 return out_template;
65a324b4 1054}
65a324b4
NC
1055\f
1056/* Return VALUE rounded up to the next ALIGNMENT boundary. */
1057
1058static inline unsigned int
1059rx_round_up (unsigned int value, unsigned int alignment)
1060{
1061 alignment -= 1;
1062 return (value + alignment) & (~ alignment);
1063}
1064
1065/* Return the number of bytes in the argument registers
1066 occupied by an argument of type TYPE and mode MODE. */
1067
3968a1c0 1068static unsigned int
ef4bddc2 1069rx_function_arg_size (machine_mode mode, const_tree type)
65a324b4
NC
1070{
1071 unsigned int num_bytes;
1072
1073 num_bytes = (mode == BLKmode)
1074 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1075 return rx_round_up (num_bytes, UNITS_PER_WORD);
1076}
1077
1078#define NUM_ARG_REGS 4
1079#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1080
1081/* Return an RTL expression describing the register holding a function
1082 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1083 be passed on the stack. CUM describes the previous parameters to the
1084 function and NAMED is false if the parameter is part of a variable
1085 parameter list, or the last named parameter before the start of a
1086 variable parameter list. */
1087
3968a1c0 1088static rtx
ef4bddc2 1089rx_function_arg (cumulative_args_t cum, machine_mode mode,
6b0e4cbb 1090 const_tree type, bool named)
65a324b4
NC
1091{
1092 unsigned int next_reg;
d5cc9181 1093 unsigned int bytes_so_far = *get_cumulative_args (cum);
65a324b4
NC
1094 unsigned int size;
1095 unsigned int rounded_size;
1096
1097 /* An exploded version of rx_function_arg_size. */
1098 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
31e727b0
NC
1099 /* If the size is not known it cannot be passed in registers. */
1100 if (size < 1)
1101 return NULL_RTX;
65a324b4
NC
1102
1103 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1104
1105 /* Don't pass this arg via registers if there
1106 are insufficient registers to hold all of it. */
1107 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1108 return NULL_RTX;
1109
1110 /* Unnamed arguments and the last named argument in a
1111 variadic function are always passed on the stack. */
1112 if (!named)
1113 return NULL_RTX;
1114
1115 /* Structures must occupy an exact number of registers,
1116 otherwise they are passed on the stack. */
1117 if ((type == NULL || AGGREGATE_TYPE_P (type))
1118 && (size % UNITS_PER_WORD) != 0)
1119 return NULL_RTX;
1120
1121 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1122
1123 return gen_rtx_REG (mode, next_reg);
1124}
1125
3968a1c0 1126static void
ef4bddc2 1127rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
6b0e4cbb 1128 const_tree type, bool named ATTRIBUTE_UNUSED)
3968a1c0 1129{
d5cc9181 1130 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
3968a1c0
NF
1131}
1132
c2ed6cf8 1133static unsigned int
ef4bddc2 1134rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
c2ed6cf8
NF
1135 const_tree type ATTRIBUTE_UNUSED)
1136{
631b20a7 1137 /* Older versions of the RX backend aligned all on-stack arguments
47c9ac72
NC
1138 to 32-bits. The RX C ABI however says that they should be
1139 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1140 if (TARGET_GCC_ABI)
1141 return STACK_BOUNDARY;
1142
1143 if (type)
1144 {
1145 if (DECL_P (type))
1146 return DECL_ALIGN (type);
1147 return TYPE_ALIGN (type);
1148 }
1149
1150 return PARM_BOUNDARY;
c2ed6cf8
NF
1151}
1152
65a324b4
NC
1153/* Return an RTL describing where a function return value of type RET_TYPE
1154 is held. */
1155
1156static rtx
1157rx_function_value (const_tree ret_type,
1158 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1159 bool outgoing ATTRIBUTE_UNUSED)
1160{
ef4bddc2 1161 machine_mode mode = TYPE_MODE (ret_type);
e2f289f3
NC
1162
1163 /* RX ABI specifies that small integer types are
1164 promoted to int when returned by a function. */
bcddd3b9
NC
1165 if (GET_MODE_SIZE (mode) > 0
1166 && GET_MODE_SIZE (mode) < 4
1167 && ! COMPLEX_MODE_P (mode)
1168 )
e2f289f3
NC
1169 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1170
1171 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1172}
1173
1174/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1175 regard to function returns as does TARGET_FUNCTION_VALUE. */
1176
ef4bddc2 1177static machine_mode
e2f289f3 1178rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
ef4bddc2 1179 machine_mode mode,
197a830e 1180 int * punsignedp ATTRIBUTE_UNUSED,
e2f289f3
NC
1181 const_tree funtype ATTRIBUTE_UNUSED,
1182 int for_return)
1183{
1184 if (for_return != 1
1185 || GET_MODE_SIZE (mode) >= 4
bcddd3b9 1186 || COMPLEX_MODE_P (mode)
e2f289f3
NC
1187 || GET_MODE_SIZE (mode) < 1)
1188 return mode;
1189
1190 return SImode;
65a324b4
NC
1191}
1192
1193static bool
1194rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1195{
1196 HOST_WIDE_INT size;
1197
1198 if (TYPE_MODE (type) != BLKmode
1199 && ! AGGREGATE_TYPE_P (type))
1200 return false;
1201
1202 size = int_size_in_bytes (type);
1203 /* Large structs and those whose size is not an
1204 exact multiple of 4 are returned in memory. */
1205 return size < 1
1206 || size > 16
1207 || (size % UNITS_PER_WORD) != 0;
1208}
1209
1210static rtx
1211rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1212 int incoming ATTRIBUTE_UNUSED)
1213{
1214 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1215}
1216
1217static bool
1218rx_return_in_msb (const_tree valtype)
1219{
1220 return TARGET_BIG_ENDIAN_DATA
1221 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1222}
1223
1224/* Returns true if the provided function has the specified attribute. */
1225
1226static inline bool
1227has_func_attr (const_tree decl, const char * func_attr)
1228{
1229 if (decl == NULL_TREE)
1230 decl = current_function_decl;
1231
1232 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1233}
1234
9595a419 1235/* Returns true if the provided function has the "fast_interrupt" attribute. */
65a324b4
NC
1236
1237static inline bool
1238is_fast_interrupt_func (const_tree decl)
1239{
9595a419 1240 return has_func_attr (decl, "fast_interrupt");
65a324b4
NC
1241}
1242
9595a419 1243/* Returns true if the provided function has the "interrupt" attribute. */
65a324b4
NC
1244
1245static inline bool
9595a419 1246is_interrupt_func (const_tree decl)
65a324b4 1247{
9595a419 1248 return has_func_attr (decl, "interrupt");
65a324b4
NC
1249}
1250
1251/* Returns true if the provided function has the "naked" attribute. */
1252
1253static inline bool
1254is_naked_func (const_tree decl)
1255{
1256 return has_func_attr (decl, "naked");
1257}
1258\f
1259static bool use_fixed_regs = false;
1260
5efd84c5 1261static void
65a324b4
NC
1262rx_conditional_register_usage (void)
1263{
1264 static bool using_fixed_regs = false;
1265
878a9174
DD
1266 if (TARGET_PID)
1267 {
1268 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1269 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1270 }
1271
65a324b4 1272 if (rx_small_data_limit > 0)
878a9174
DD
1273 {
1274 if (TARGET_PID)
1275 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1276 else
1277 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1278
1279 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1280 }
65a324b4
NC
1281
1282 if (use_fixed_regs != using_fixed_regs)
1283 {
1284 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1285 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1286
1287 if (use_fixed_regs)
1288 {
65a324b4
NC
1289 unsigned int r;
1290
65a324b4
NC
1291 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1292 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
dafcb54e
NC
1293
1294 /* This is for fast interrupt handlers. Any register in
1295 the range r10 to r13 (inclusive) that is currently
1296 marked as fixed is now a viable, call-used register. */
65a324b4
NC
1297 for (r = 10; r <= 13; r++)
1298 if (fixed_regs[r])
1299 {
1300 fixed_regs[r] = 0;
1301 call_used_regs[r] = 1;
65a324b4
NC
1302 }
1303
dafcb54e
NC
1304 /* Mark r7 as fixed. This is just a hack to avoid
1305 altering the reg_alloc_order array so that the newly
1306 freed r10-r13 registers are the preferred registers. */
1307 fixed_regs[7] = call_used_regs[7] = 1;
65a324b4
NC
1308 }
1309 else
1310 {
1311 /* Restore the normal register masks. */
1312 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1313 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1314 }
1315
1316 using_fixed_regs = use_fixed_regs;
1317 }
1318}
1319
7fb80860
NC
1320struct decl_chain
1321{
1322 tree fndecl;
1323 struct decl_chain * next;
1324};
1325
1326/* Stack of decls for which we have issued warnings. */
1327static struct decl_chain * warned_decls = NULL;
1328
1329static void
1330add_warned_decl (tree fndecl)
1331{
1332 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1333
1334 warned->fndecl = fndecl;
1335 warned->next = warned_decls;
1336 warned_decls = warned;
1337}
1338
1339/* Returns TRUE if FNDECL is on our list of warned about decls. */
1340
1341static bool
1342already_warned (tree fndecl)
1343{
1344 struct decl_chain * warned;
1345
1346 for (warned = warned_decls;
1347 warned != NULL;
1348 warned = warned->next)
1349 if (warned->fndecl == fndecl)
1350 return true;
1351
1352 return false;
1353}
1354
65a324b4
NC
1355/* Perform any actions necessary before starting to compile FNDECL.
1356 For the RX we use this to make sure that we have the correct
1357 set of register masks selected. If FNDECL is NULL then we are
1358 compiling top level things. */
1359
1360static void
1361rx_set_current_function (tree fndecl)
1362{
1363 /* Remember the last target of rx_set_current_function. */
1364 static tree rx_previous_fndecl;
9595a419
NC
1365 bool prev_was_fast_interrupt;
1366 bool current_is_fast_interrupt;
65a324b4
NC
1367
1368 /* Only change the context if the function changes. This hook is called
1369 several times in the course of compiling a function, and we don't want
1370 to slow things down too much or call target_reinit when it isn't safe. */
1371 if (fndecl == rx_previous_fndecl)
1372 return;
1373
9595a419 1374 prev_was_fast_interrupt
65a324b4
NC
1375 = rx_previous_fndecl
1376 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
9595a419
NC
1377
1378 current_is_fast_interrupt
65a324b4
NC
1379 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1380
9595a419 1381 if (prev_was_fast_interrupt != current_is_fast_interrupt)
65a324b4 1382 {
9595a419 1383 use_fixed_regs = current_is_fast_interrupt;
65a324b4
NC
1384 target_reinit ();
1385 }
9595a419 1386
7fb80860
NC
1387 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1388 {
1389 /* We do not warn about the first fast interrupt routine that
1390 we see. Instead we just push it onto the stack. */
1391 if (warned_decls == NULL)
1392 add_warned_decl (fndecl);
1393
1394 /* Otherwise if this fast interrupt is one for which we have
1395 not already issued a warning, generate one and then push
1396 it onto the stack as well. */
1397 else if (! already_warned (fndecl))
1398 {
1399 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1400 fndecl, warned_decls->fndecl);
1401 add_warned_decl (fndecl);
1402 }
1403 }
1404
65a324b4
NC
1405 rx_previous_fndecl = fndecl;
1406}
1407\f
1408/* Typical stack layout should looks like this after the function's prologue:
1409
1410 | |
1411 -- ^
1412 | | \ |
1413 | | arguments saved | Increasing
1414 | | on the stack | addresses
1415 PARENT arg pointer -> | | /
1416 -------------------------- ---- -------------------
1417 CHILD |ret | return address
1418 --
1419 | | \
1420 | | call saved
1421 | | registers
1422 | | /
1423 --
1424 | | \
1425 | | local
1426 | | variables
1427 frame pointer -> | | /
1428 --
1429 | | \
1430 | | outgoing | Decreasing
1431 | | arguments | addresses
1432 current stack pointer -> | | / |
1433 -------------------------- ---- ------------------ V
1434 | | */
1435
1436static unsigned int
1437bit_count (unsigned int x)
1438{
1439 const unsigned int m1 = 0x55555555;
1440 const unsigned int m2 = 0x33333333;
1441 const unsigned int m4 = 0x0f0f0f0f;
1442
1443 x -= (x >> 1) & m1;
1444 x = (x & m2) + ((x >> 2) & m2);
1445 x = (x + (x >> 4)) & m4;
1446 x += x >> 8;
1447
1448 return (x + (x >> 16)) & 0x3f;
1449}
1450
dafcb54e
NC
1451#define MUST_SAVE_ACC_REGISTER \
1452 (TARGET_SAVE_ACC_REGISTER \
1453 && (is_interrupt_func (NULL_TREE) \
1454 || is_fast_interrupt_func (NULL_TREE)))
1455
65a324b4
NC
1456/* Returns either the lowest numbered and highest numbered registers that
1457 occupy the call-saved area of the stack frame, if the registers are
1458 stored as a contiguous block, or else a bitmask of the individual
1459 registers if they are stored piecemeal.
1460
1461 Also computes the size of the frame and the size of the outgoing
1462 arguments block (in bytes). */
1463
1464static void
1465rx_get_stack_layout (unsigned int * lowest,
1466 unsigned int * highest,
1467 unsigned int * register_mask,
1468 unsigned int * frame_size,
1469 unsigned int * stack_size)
1470{
1471 unsigned int reg;
1472 unsigned int low;
1473 unsigned int high;
1474 unsigned int fixed_reg = 0;
1475 unsigned int save_mask;
1476 unsigned int pushed_mask;
1477 unsigned int unneeded_pushes;
1478
dafcb54e 1479 if (is_naked_func (NULL_TREE))
65a324b4
NC
1480 {
1481 /* Naked functions do not create their own stack frame.
dafcb54e 1482 Instead the programmer must do that for us. */
65a324b4
NC
1483 * lowest = 0;
1484 * highest = 0;
1485 * register_mask = 0;
1486 * frame_size = 0;
1487 * stack_size = 0;
1488 return;
1489 }
1490
aea8fc97 1491 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
65a324b4 1492 {
e14ca1ce 1493 if ((df_regs_ever_live_p (reg)
d7862be3
NC
1494 /* Always save all call clobbered registers inside non-leaf
1495 interrupt handlers, even if they are not live - they may
1496 be used in (non-interrupt aware) routines called from this one. */
1497 || (call_used_regs[reg]
1498 && is_interrupt_func (NULL_TREE)
416ff32e 1499 && ! crtl->is_leaf))
65a324b4
NC
1500 && (! call_used_regs[reg]
1501 /* Even call clobbered registered must
9595a419 1502 be pushed inside interrupt handlers. */
dafcb54e
NC
1503 || is_interrupt_func (NULL_TREE)
1504 /* Likewise for fast interrupt handlers, except registers r10 -
1505 r13. These are normally call-saved, but may have been set
1506 to call-used by rx_conditional_register_usage. If so then
1507 they can be used in the fast interrupt handler without
1508 saving them on the stack. */
1509 || (is_fast_interrupt_func (NULL_TREE)
1510 && ! IN_RANGE (reg, 10, 13))))
65a324b4
NC
1511 {
1512 if (low == 0)
1513 low = reg;
1514 high = reg;
1515
1516 save_mask |= 1 << reg;
1517 }
1518
1519 /* Remember if we see a fixed register
1520 after having found the low register. */
1521 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1522 fixed_reg = reg;
1523 }
1524
dafcb54e
NC
1525 /* If we have to save the accumulator register, make sure
1526 that at least two registers are pushed into the frame. */
1527 if (MUST_SAVE_ACC_REGISTER
1528 && bit_count (save_mask) < 2)
1529 {
1530 save_mask |= (1 << 13) | (1 << 14);
1531 if (low == 0)
1532 low = 13;
105249d1
NC
1533 if (high == 0 || low == high)
1534 high = low + 1;
dafcb54e
NC
1535 }
1536
65a324b4
NC
1537 /* Decide if it would be faster fill in the call-saved area of the stack
1538 frame using multiple PUSH instructions instead of a single PUSHM
1539 instruction.
1540
1541 SAVE_MASK is a bitmask of the registers that must be stored in the
1542 call-save area. PUSHED_MASK is a bitmask of the registers that would
1543 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1544 is a bitmask of those registers in pushed_mask that are not in
1545 save_mask.
1546
1547 We use a simple heuristic that says that it is better to use
1548 multiple PUSH instructions if the number of unnecessary pushes is
1549 greater than the number of necessary pushes.
1550
1551 We also use multiple PUSH instructions if there are any fixed registers
1552 between LOW and HIGH. The only way that this can happen is if the user
1553 has specified --fixed-<reg-name> on the command line and in such
1554 circumstances we do not want to touch the fixed registers at all.
1555
1556 FIXME: Is it worth improving this heuristic ? */
1557 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1558 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1559
1560 if ((fixed_reg && fixed_reg <= high)
1561 || (optimize_function_for_speed_p (cfun)
1562 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1563 {
1564 /* Use multiple pushes. */
1565 * lowest = 0;
1566 * highest = 0;
1567 * register_mask = save_mask;
1568 }
1569 else
1570 {
1571 /* Use one push multiple instruction. */
1572 * lowest = low;
1573 * highest = high;
1574 * register_mask = 0;
1575 }
1576
1577 * frame_size = rx_round_up
1578 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1579
1580 if (crtl->args.size > 0)
1581 * frame_size += rx_round_up
1582 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1583
1584 * stack_size = rx_round_up
1585 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1586}
1587
1588/* Generate a PUSHM instruction that matches the given operands. */
1589
1590void
1591rx_emit_stack_pushm (rtx * operands)
1592{
1593 HOST_WIDE_INT last_reg;
1594 rtx first_push;
1595
1596 gcc_assert (CONST_INT_P (operands[0]));
1597 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1598
1599 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1600 first_push = XVECEXP (operands[1], 0, 1);
1601 gcc_assert (SET_P (first_push));
1602 first_push = SET_SRC (first_push);
1603 gcc_assert (REG_P (first_push));
1604
1605 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
9595a419
NC
1606 reg_names [REGNO (first_push) - last_reg],
1607 reg_names [REGNO (first_push)]);
65a324b4
NC
1608}
1609
1610/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1611
1612static rtx
1613gen_rx_store_vector (unsigned int low, unsigned int high)
1614{
1615 unsigned int i;
1616 unsigned int count = (high - low) + 2;
1617 rtx vector;
1618
1619 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1620
1621 XVECEXP (vector, 0, 0) =
265c835f 1622 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
65a324b4
NC
1623 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1624 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1625
1626 for (i = 0; i < count - 1; i++)
1627 XVECEXP (vector, 0, i + 1) =
265c835f 1628 gen_rtx_SET (VOIDmode,
65a324b4 1629 gen_rtx_MEM (SImode,
9595a419
NC
1630 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1631 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1632 gen_rtx_REG (SImode, high - i));
65a324b4
NC
1633 return vector;
1634}
1635
9595a419
NC
1636/* Mark INSN as being frame related. If it is a PARALLEL
1637 then mark each element as being frame related as well. */
1638
1639static void
1640mark_frame_related (rtx insn)
1641{
1642 RTX_FRAME_RELATED_P (insn) = 1;
1643 insn = PATTERN (insn);
1644
1645 if (GET_CODE (insn) == PARALLEL)
1646 {
1647 unsigned int i;
1648
0d8f38d3 1649 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
9595a419
NC
1650 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1651 }
1652}
1653
15ba5696
NC
1654static bool
1655ok_for_max_constant (HOST_WIDE_INT val)
1656{
1657 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1658 /* If there is no constraint on the size of constants
1659 used as operands, then any value is legitimate. */
1660 return true;
1661
1662 /* rx_max_constant_size specifies the maximum number
1663 of bytes that can be used to hold a signed value. */
1664 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1665 ( 1 << (rx_max_constant_size * 8)));
1666}
1667
1668/* Generate an ADD of SRC plus VAL into DEST.
1669 Handles the case where VAL is too big for max_constant_value.
1670 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1671
1672static void
1673gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1674{
1675 rtx insn;
1676
1677 if (val == NULL_RTX || INTVAL (val) == 0)
1678 {
1679 gcc_assert (dest != src);
1680
1681 insn = emit_move_insn (dest, src);
1682 }
1683 else if (ok_for_max_constant (INTVAL (val)))
1684 insn = emit_insn (gen_addsi3 (dest, src, val));
1685 else
1686 {
5f2f13fd 1687 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
bcddd3b9
NC
1688 will not reject it. */
1689 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1690 insn = emit_insn (gen_addsi3 (dest, src, val));
15ba5696
NC
1691
1692 if (is_frame_related)
1693 /* We have to provide our own frame related note here
1694 as the dwarf2out code cannot be expected to grok
1695 our unspec. */
1696 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1697 gen_rtx_SET (SImode, dest,
1698 gen_rtx_PLUS (SImode, src, val)));
1699 return;
1700 }
1701
1702 if (is_frame_related)
1703 RTX_FRAME_RELATED_P (insn) = 1;
1704 return;
1705}
1706
65a324b4
NC
1707void
1708rx_expand_prologue (void)
1709{
1710 unsigned int stack_size;
1711 unsigned int frame_size;
1712 unsigned int mask;
1713 unsigned int low;
1714 unsigned int high;
9595a419 1715 unsigned int reg;
65a324b4
NC
1716 rtx insn;
1717
1718 /* Naked functions use their own, programmer provided prologues. */
dafcb54e 1719 if (is_naked_func (NULL_TREE))
65a324b4
NC
1720 return;
1721
1722 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1723
84e60183
NC
1724 if (flag_stack_usage_info)
1725 current_function_static_stack_size = frame_size + stack_size;
1726
65a324b4
NC
1727 /* If we use any of the callee-saved registers, save them now. */
1728 if (mask)
1729 {
65a324b4 1730 /* Push registers in reverse order. */
aea8fc97 1731 for (reg = CC_REGNUM; reg --;)
65a324b4
NC
1732 if (mask & (1 << reg))
1733 {
1734 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
9595a419 1735 mark_frame_related (insn);
65a324b4
NC
1736 }
1737 }
1738 else if (low)
1739 {
1740 if (high == low)
1741 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1742 else
1743 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1744 * UNITS_PER_WORD),
1745 gen_rx_store_vector (low, high)));
9595a419
NC
1746 mark_frame_related (insn);
1747 }
1748
dafcb54e 1749 if (MUST_SAVE_ACC_REGISTER)
9595a419
NC
1750 {
1751 unsigned int acc_high, acc_low;
1752
1753 /* Interrupt handlers have to preserve the accumulator
1754 register if so requested by the user. Use the first
dafcb54e 1755 two pushed registers as intermediaries. */
9595a419
NC
1756 if (mask)
1757 {
1758 acc_low = acc_high = 0;
1759
aea8fc97 1760 for (reg = 1; reg < CC_REGNUM; reg ++)
9595a419
NC
1761 if (mask & (1 << reg))
1762 {
1763 if (acc_low == 0)
1764 acc_low = reg;
1765 else
1766 {
1767 acc_high = reg;
1768 break;
1769 }
1770 }
1771
1772 /* We have assumed that there are at least two registers pushed... */
1773 gcc_assert (acc_high != 0);
1774
1775 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1776 We just assume that they are zero. */
1777 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1778 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1779 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1780 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1781 }
1782 else
1783 {
1784 acc_low = low;
1785 acc_high = low + 1;
1786
1787 /* We have assumed that there are at least two registers pushed... */
1788 gcc_assert (acc_high <= high);
1789
1790 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1791 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1792 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1793 gen_rx_store_vector (acc_low, acc_high)));
1794 }
65a324b4
NC
1795 }
1796
1797 /* If needed, set up the frame pointer. */
1798 if (frame_pointer_needed)
15ba5696
NC
1799 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1800 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
65a324b4
NC
1801
1802 /* Allocate space for the outgoing args.
1803 If the stack frame has not already been set up then handle this as well. */
1804 if (stack_size)
1805 {
1806 if (frame_size)
1807 {
1808 if (frame_pointer_needed)
15ba5696
NC
1809 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1810 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
65a324b4 1811 else
15ba5696
NC
1812 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1813 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1814 true);
65a324b4
NC
1815 }
1816 else
15ba5696
NC
1817 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1818 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
65a324b4
NC
1819 }
1820 else if (frame_size)
1821 {
1822 if (! frame_pointer_needed)
15ba5696
NC
1823 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1824 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
65a324b4 1825 else
15ba5696
NC
1826 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1827 true);
65a324b4 1828 }
65a324b4
NC
1829}
1830
69c7a374
DD
1831static void
1832add_vector_labels (FILE *file, const char *aname)
1833{
1834 tree vec_attr;
1835 tree val_attr;
1836 const char *vname = "vect";
1837 const char *s;
1838 int vnum;
1839
1840 /* This node is for the vector/interrupt tag itself */
1841 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1842 if (!vec_attr)
1843 return;
1844
1845 /* Now point it at the first argument */
1846 vec_attr = TREE_VALUE (vec_attr);
1847
1848 /* Iterate through the arguments. */
1849 while (vec_attr)
1850 {
1851 val_attr = TREE_VALUE (vec_attr);
1852 switch (TREE_CODE (val_attr))
1853 {
1854 case STRING_CST:
1855 s = TREE_STRING_POINTER (val_attr);
1856 goto string_id_common;
1857
1858 case IDENTIFIER_NODE:
1859 s = IDENTIFIER_POINTER (val_attr);
1860
1861 string_id_common:
1862 if (strcmp (s, "$default") == 0)
1863 {
1864 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1865 fprintf (file, "$tableentry$default$%s:\n", vname);
1866 }
1867 else
1868 vname = s;
1869 break;
1870
1871 case INTEGER_CST:
1872 vnum = TREE_INT_CST_LOW (val_attr);
1873
1874 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1875 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1876 break;
1877
1878 default:
1879 ;
1880 }
1881
1882 vec_attr = TREE_CHAIN (vec_attr);
1883 }
1884
1885}
1886
65a324b4
NC
1887static void
1888rx_output_function_prologue (FILE * file,
1889 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1890{
69c7a374
DD
1891 add_vector_labels (file, "interrupt");
1892 add_vector_labels (file, "vector");
1893
65a324b4
NC
1894 if (is_fast_interrupt_func (NULL_TREE))
1895 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1896
9595a419
NC
1897 if (is_interrupt_func (NULL_TREE))
1898 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
65a324b4
NC
1899
1900 if (is_naked_func (NULL_TREE))
1901 asm_fprintf (file, "\t; Note: Naked Function\n");
1902
1903 if (cfun->static_chain_decl != NULL)
1904 asm_fprintf (file, "\t; Note: Nested function declared "
1905 "inside another function.\n");
1906
1907 if (crtl->calls_eh_return)
1908 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1909}
1910
1911/* Generate a POPM or RTSD instruction that matches the given operands. */
1912
1913void
1914rx_emit_stack_popm (rtx * operands, bool is_popm)
1915{
1916 HOST_WIDE_INT stack_adjust;
1917 HOST_WIDE_INT last_reg;
1918 rtx first_push;
1919
1920 gcc_assert (CONST_INT_P (operands[0]));
1921 stack_adjust = INTVAL (operands[0]);
1922
1923 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1924 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1925
1926 first_push = XVECEXP (operands[1], 0, 1);
1927 gcc_assert (SET_P (first_push));
1928 first_push = SET_DEST (first_push);
1929 gcc_assert (REG_P (first_push));
1930
1931 if (is_popm)
1932 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1933 reg_names [REGNO (first_push)],
1934 reg_names [REGNO (first_push) + last_reg]);
1935 else
1936 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1937 (int) stack_adjust,
1938 reg_names [REGNO (first_push)],
1939 reg_names [REGNO (first_push) + last_reg]);
1940}
1941
1942/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1943
1944static rtx
1945gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1946{
1947 unsigned int i;
1948 unsigned int bias = 3;
1949 unsigned int count = (high - low) + bias;
1950 rtx vector;
1951
1952 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1953
1954 XVECEXP (vector, 0, 0) =
265c835f 1955 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
0a81f074 1956 plus_constant (Pmode, stack_pointer_rtx, adjust));
65a324b4
NC
1957
1958 for (i = 0; i < count - 2; i++)
1959 XVECEXP (vector, 0, i + 1) =
265c835f 1960 gen_rtx_SET (VOIDmode,
65a324b4
NC
1961 gen_rtx_REG (SImode, low + i),
1962 gen_rtx_MEM (SImode,
1963 i == 0 ? stack_pointer_rtx
0a81f074 1964 : plus_constant (Pmode, stack_pointer_rtx,
65a324b4
NC
1965 i * UNITS_PER_WORD)));
1966
3810076b 1967 XVECEXP (vector, 0, count - 1) = ret_rtx;
65a324b4
NC
1968
1969 return vector;
1970}
1971
1972/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1973
1974static rtx
1975gen_rx_popm_vector (unsigned int low, unsigned int high)
1976{
1977 unsigned int i;
1978 unsigned int count = (high - low) + 2;
1979 rtx vector;
1980
1981 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1982
1983 XVECEXP (vector, 0, 0) =
265c835f 1984 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
0a81f074 1985 plus_constant (Pmode, stack_pointer_rtx,
65a324b4
NC
1986 (count - 1) * UNITS_PER_WORD));
1987
1988 for (i = 0; i < count - 1; i++)
1989 XVECEXP (vector, 0, i + 1) =
265c835f 1990 gen_rtx_SET (VOIDmode,
65a324b4
NC
1991 gen_rtx_REG (SImode, low + i),
1992 gen_rtx_MEM (SImode,
1993 i == 0 ? stack_pointer_rtx
0a81f074 1994 : plus_constant (Pmode, stack_pointer_rtx,
65a324b4
NC
1995 i * UNITS_PER_WORD)));
1996
1997 return vector;
1998}
be61ce52
NC
1999
2000/* Returns true if a simple return insn can be used. */
2001
2002bool
2003rx_can_use_simple_return (void)
2004{
2005 unsigned int low;
2006 unsigned int high;
2007 unsigned int frame_size;
2008 unsigned int stack_size;
2009 unsigned int register_mask;
2010
2011 if (is_naked_func (NULL_TREE)
2012 || is_fast_interrupt_func (NULL_TREE)
2013 || is_interrupt_func (NULL_TREE))
2014 return false;
2015
2016 rx_get_stack_layout (& low, & high, & register_mask,
2017 & frame_size, & stack_size);
2018
2019 return (register_mask == 0
2020 && (frame_size + stack_size) == 0
2021 && low == 0);
2022}
2023
65a324b4
NC
2024void
2025rx_expand_epilogue (bool is_sibcall)
2026{
2027 unsigned int low;
2028 unsigned int high;
2029 unsigned int frame_size;
2030 unsigned int stack_size;
2031 unsigned int register_mask;
2032 unsigned int regs_size;
9595a419 2033 unsigned int reg;
65a324b4
NC
2034 unsigned HOST_WIDE_INT total_size;
2035
0d8f38d3
NC
2036 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2037 cannot guarantee that the register holding the function address is a
2038 call-used register. If it is a call-saved register then the stack
2039 pop instructions generated in the epilogue will corrupt the address
2040 before it is used.
2041
2042 Creating a new call-used-only register class works but then the
2043 reload pass gets stuck because it cannot always find a call-used
2044 register for spilling sibcalls.
2045
2046 The other possible solution is for this pass to scan forward for the
2047 sibcall instruction (if it has been generated) and work out if it
2048 is an indirect sibcall using a call-saved register. If it is then
2049 the address can copied into a call-used register in this epilogue
2050 code and the sibcall instruction modified to use that register. */
2051
65a324b4
NC
2052 if (is_naked_func (NULL_TREE))
2053 {
0d8f38d3
NC
2054 gcc_assert (! is_sibcall);
2055
65a324b4
NC
2056 /* Naked functions use their own, programmer provided epilogues.
2057 But, in order to keep gcc happy we have to generate some kind of
2058 epilogue RTL. */
2059 emit_jump_insn (gen_naked_return ());
2060 return;
2061 }
2062
2063 rx_get_stack_layout (& low, & high, & register_mask,
2064 & frame_size, & stack_size);
2065
2066 total_size = frame_size + stack_size;
2067 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2068
2069 /* See if we are unable to use the special stack frame deconstruct and
2070 return instructions. In most cases we can use them, but the exceptions
2071 are:
2072
2073 - Sibling calling functions deconstruct the frame but do not return to
2074 their caller. Instead they branch to their sibling and allow their
2075 return instruction to return to this function's parent.
2076
9595a419 2077 - Fast and normal interrupt handling functions have to use special
65a324b4
NC
2078 return instructions.
2079
2080 - Functions where we have pushed a fragmented set of registers into the
2081 call-save area must have the same set of registers popped. */
2082 if (is_sibcall
2083 || is_fast_interrupt_func (NULL_TREE)
9595a419 2084 || is_interrupt_func (NULL_TREE)
65a324b4
NC
2085 || register_mask)
2086 {
2087 /* Cannot use the special instructions - deconstruct by hand. */
2088 if (total_size)
15ba5696
NC
2089 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2090 GEN_INT (total_size), false);
65a324b4 2091
dafcb54e 2092 if (MUST_SAVE_ACC_REGISTER)
65a324b4 2093 {
9595a419
NC
2094 unsigned int acc_low, acc_high;
2095
2096 /* Reverse the saving of the accumulator register onto the stack.
2097 Note we must adjust the saved "low" accumulator value as it
2098 is really the middle 32-bits of the accumulator. */
2099 if (register_mask)
2100 {
2101 acc_low = acc_high = 0;
aea8fc97
NC
2102
2103 for (reg = 1; reg < CC_REGNUM; reg ++)
9595a419
NC
2104 if (register_mask & (1 << reg))
2105 {
2106 if (acc_low == 0)
2107 acc_low = reg;
2108 else
2109 {
2110 acc_high = reg;
2111 break;
2112 }
2113 }
2114 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2115 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2116 }
2117 else
2118 {
2119 acc_low = low;
2120 acc_high = low + 1;
2121 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2122 gen_rx_popm_vector (acc_low, acc_high)));
2123 }
2124
2125 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2126 gen_rtx_REG (SImode, acc_low),
2127 GEN_INT (16)));
2128 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2129 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2130 }
65a324b4 2131
9595a419
NC
2132 if (register_mask)
2133 {
aea8fc97 2134 for (reg = 0; reg < CC_REGNUM; reg ++)
65a324b4
NC
2135 if (register_mask & (1 << reg))
2136 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
2137 }
2138 else if (low)
2139 {
2140 if (high == low)
2141 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2142 else
2143 emit_insn (gen_stack_popm (GEN_INT (regs_size),
2144 gen_rx_popm_vector (low, high)));
2145 }
2146
2147 if (is_fast_interrupt_func (NULL_TREE))
0d8f38d3
NC
2148 {
2149 gcc_assert (! is_sibcall);
2150 emit_jump_insn (gen_fast_interrupt_return ());
2151 }
9595a419 2152 else if (is_interrupt_func (NULL_TREE))
0d8f38d3
NC
2153 {
2154 gcc_assert (! is_sibcall);
2155 emit_jump_insn (gen_exception_return ());
2156 }
65a324b4
NC
2157 else if (! is_sibcall)
2158 emit_jump_insn (gen_simple_return ());
2159
2160 return;
2161 }
2162
2163 /* If we allocated space on the stack, free it now. */
2164 if (total_size)
2165 {
2166 unsigned HOST_WIDE_INT rtsd_size;
2167
2168 /* See if we can use the RTSD instruction. */
2169 rtsd_size = total_size + regs_size;
2170 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2171 {
2172 if (low)
2173 emit_jump_insn (gen_pop_and_return
2174 (GEN_INT (rtsd_size),
2175 gen_rx_rtsd_vector (rtsd_size, low, high)));
2176 else
2177 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2178
2179 return;
2180 }
2181
15ba5696
NC
2182 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2183 GEN_INT (total_size), false);
65a324b4
NC
2184 }
2185
2186 if (low)
2187 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2188 gen_rx_rtsd_vector (regs_size,
2189 low, high)));
2190 else
2191 emit_jump_insn (gen_simple_return ());
2192}
2193
2194
2195/* Compute the offset (in words) between FROM (arg pointer
2196 or frame pointer) and TO (frame pointer or stack pointer).
2197 See ASCII art comment at the start of rx_expand_prologue
2198 for more information. */
2199
2200int
2201rx_initial_elimination_offset (int from, int to)
2202{
2203 unsigned int low;
2204 unsigned int high;
2205 unsigned int frame_size;
2206 unsigned int stack_size;
2207 unsigned int mask;
2208
2209 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2210
2211 if (from == ARG_POINTER_REGNUM)
2212 {
2213 /* Extend the computed size of the stack frame to
2214 include the registers pushed in the prologue. */
2215 if (low)
2216 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2217 else
2218 frame_size += bit_count (mask) * UNITS_PER_WORD;
2219
2220 /* Remember to include the return address. */
2221 frame_size += 1 * UNITS_PER_WORD;
2222
2223 if (to == FRAME_POINTER_REGNUM)
2224 return frame_size;
2225
2226 gcc_assert (to == STACK_POINTER_REGNUM);
2227 return frame_size + stack_size;
2228 }
2229
2230 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2231 return stack_size;
2232}
2233
65a324b4
NC
2234/* Decide if a variable should go into one of the small data sections. */
2235
2236static bool
2237rx_in_small_data (const_tree decl)
2238{
2239 int size;
f961457f 2240 const char * section;
65a324b4
NC
2241
2242 if (rx_small_data_limit == 0)
2243 return false;
2244
2245 if (TREE_CODE (decl) != VAR_DECL)
2246 return false;
2247
2248 /* We do not put read-only variables into a small data area because
2249 they would be placed with the other read-only sections, far away
2250 from the read-write data sections, and we only have one small
2251 data area pointer.
2252 Similarly commons are placed in the .bss section which might be
2253 far away (and out of alignment with respect to) the .data section. */
2254 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2255 return false;
2256
2257 section = DECL_SECTION_NAME (decl);
2258 if (section)
f961457f 2259 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
65a324b4
NC
2260
2261 size = int_size_in_bytes (TREE_TYPE (decl));
2262
2263 return (size > 0) && (size <= rx_small_data_limit);
2264}
2265
2266/* Return a section for X.
2267 The only special thing we do here is to honor small data. */
2268
2269static section *
ef4bddc2 2270rx_select_rtx_section (machine_mode mode,
65a324b4
NC
2271 rtx x,
2272 unsigned HOST_WIDE_INT align)
2273{
2274 if (rx_small_data_limit > 0
2275 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2276 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2277 return sdata_section;
2278
2279 return default_elf_select_rtx_section (mode, x, align);
2280}
2281
2282static section *
2283rx_select_section (tree decl,
2284 int reloc,
2285 unsigned HOST_WIDE_INT align)
2286{
2287 if (rx_small_data_limit > 0)
2288 {
2289 switch (categorize_decl_for_section (decl, reloc))
2290 {
2291 case SECCAT_SDATA: return sdata_section;
2292 case SECCAT_SBSS: return sbss_section;
2293 case SECCAT_SRODATA:
2294 /* Fall through. We do not put small, read only
2295 data into the C_2 section because we are not
2296 using the C_2 section. We do not use the C_2
2297 section because it is located with the other
2298 read-only data sections, far away from the read-write
2299 data sections and we only have one small data
2300 pointer (r13). */
2301 default:
2302 break;
2303 }
2304 }
2305
2306 /* If we are supporting the Renesas assembler
2307 we cannot use mergeable sections. */
2308 if (TARGET_AS100_SYNTAX)
2309 switch (categorize_decl_for_section (decl, reloc))
2310 {
2311 case SECCAT_RODATA_MERGE_CONST:
2312 case SECCAT_RODATA_MERGE_STR_INIT:
2313 case SECCAT_RODATA_MERGE_STR:
2314 return readonly_data_section;
2315
2316 default:
2317 break;
2318 }
2319
2320 return default_elf_select_section (decl, reloc, align);
2321}
2322\f
2323enum rx_builtin
2324{
2325 RX_BUILTIN_BRK,
2326 RX_BUILTIN_CLRPSW,
2327 RX_BUILTIN_INT,
2328 RX_BUILTIN_MACHI,
2329 RX_BUILTIN_MACLO,
2330 RX_BUILTIN_MULHI,
2331 RX_BUILTIN_MULLO,
2332 RX_BUILTIN_MVFACHI,
2333 RX_BUILTIN_MVFACMI,
2334 RX_BUILTIN_MVFC,
2335 RX_BUILTIN_MVTACHI,
2336 RX_BUILTIN_MVTACLO,
2337 RX_BUILTIN_MVTC,
9595a419 2338 RX_BUILTIN_MVTIPL,
65a324b4
NC
2339 RX_BUILTIN_RACW,
2340 RX_BUILTIN_REVW,
2341 RX_BUILTIN_RMPA,
2342 RX_BUILTIN_ROUND,
65a324b4
NC
2343 RX_BUILTIN_SETPSW,
2344 RX_BUILTIN_WAIT,
2345 RX_BUILTIN_max
2346};
2347
87e91fca
DD
2348static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2349
65a324b4
NC
2350static void
2351rx_init_builtins (void)
2352{
4bbd2ea8
DD
2353#define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2354 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2355 add_builtin_function ("__builtin_rx_" LC_NAME, \
2356 build_function_type_list (RET_TYPE##_type_node, \
2357 NULL_TREE), \
2358 RX_BUILTIN_##UC_NAME, \
2359 BUILT_IN_MD, NULL, NULL_TREE)
2360
65a324b4 2361#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
87e91fca 2362 rx_builtins[RX_BUILTIN_##UC_NAME] = \
5f2f13fd 2363 add_builtin_function ("__builtin_rx_" LC_NAME, \
65a324b4
NC
2364 build_function_type_list (RET_TYPE##_type_node, \
2365 ARG_TYPE##_type_node, \
2366 NULL_TREE), \
2367 RX_BUILTIN_##UC_NAME, \
2368 BUILT_IN_MD, NULL, NULL_TREE)
2369
2370#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
87e91fca 2371 rx_builtins[RX_BUILTIN_##UC_NAME] = \
65a324b4
NC
2372 add_builtin_function ("__builtin_rx_" LC_NAME, \
2373 build_function_type_list (RET_TYPE##_type_node, \
2374 ARG_TYPE1##_type_node,\
2375 ARG_TYPE2##_type_node,\
2376 NULL_TREE), \
2377 RX_BUILTIN_##UC_NAME, \
2378 BUILT_IN_MD, NULL, NULL_TREE)
2379
2380#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
87e91fca 2381 rx_builtins[RX_BUILTIN_##UC_NAME] = \
65a324b4
NC
2382 add_builtin_function ("__builtin_rx_" LC_NAME, \
2383 build_function_type_list (RET_TYPE##_type_node, \
2384 ARG_TYPE1##_type_node,\
2385 ARG_TYPE2##_type_node,\
2386 ARG_TYPE3##_type_node,\
2387 NULL_TREE), \
2388 RX_BUILTIN_##UC_NAME, \
2389 BUILT_IN_MD, NULL, NULL_TREE)
2390
4bbd2ea8 2391 ADD_RX_BUILTIN0 (BRK, "brk", void);
65a324b4
NC
2392 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2393 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2394 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2395 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2396 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2397 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2398 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
4bbd2ea8
DD
2399 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2400 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
65a324b4
NC
2401 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2402 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
4bbd2ea8 2403 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
65a324b4
NC
2404 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2405 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
9595a419 2406 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
65a324b4
NC
2407 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2408 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2409 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
4bbd2ea8 2410 ADD_RX_BUILTIN0 (WAIT, "wait", void);
65a324b4
NC
2411}
2412
87e91fca
DD
2413/* Return the RX builtin for CODE. */
2414
2415static tree
2416rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2417{
2418 if (code >= RX_BUILTIN_max)
2419 return error_mark_node;
2420
2421 return rx_builtins[code];
2422}
2423
65a324b4
NC
2424static rtx
2425rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2426{
2427 if (reg && ! REG_P (arg))
2428 arg = force_reg (SImode, arg);
2429
2430 emit_insn (gen_func (arg));
2431
2432 return NULL_RTX;
2433}
2434
2435static rtx
2436rx_expand_builtin_mvtc (tree exp)
2437{
2438 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2439 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2440
2441 if (! CONST_INT_P (arg1))
2442 return NULL_RTX;
2443
2444 if (! REG_P (arg2))
2445 arg2 = force_reg (SImode, arg2);
2446
2447 emit_insn (gen_mvtc (arg1, arg2));
2448
2449 return NULL_RTX;
2450}
2451
2452static rtx
2453rx_expand_builtin_mvfc (tree t_arg, rtx target)
2454{
2455 rtx arg = expand_normal (t_arg);
2456
2457 if (! CONST_INT_P (arg))
2458 return NULL_RTX;
2459
dafcb54e
NC
2460 if (target == NULL_RTX)
2461 return NULL_RTX;
2462
65a324b4
NC
2463 if (! REG_P (target))
2464 target = force_reg (SImode, target);
2465
2466 emit_insn (gen_mvfc (target, arg));
2467
2468 return target;
2469}
2470
9595a419
NC
2471static rtx
2472rx_expand_builtin_mvtipl (rtx arg)
2473{
2474 /* The RX610 does not support the MVTIPL instruction. */
2475 if (rx_cpu_type == RX610)
2476 return NULL_RTX;
2477
bf9afb7d 2478 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
9595a419
NC
2479 return NULL_RTX;
2480
2481 emit_insn (gen_mvtipl (arg));
2482
2483 return NULL_RTX;
2484}
2485
65a324b4
NC
2486static rtx
2487rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2488{
2489 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2490 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2491
2492 if (! REG_P (arg1))
2493 arg1 = force_reg (SImode, arg1);
2494
2495 if (! REG_P (arg2))
2496 arg2 = force_reg (SImode, arg2);
2497
2498 emit_insn (gen_func (arg1, arg2));
2499
2500 return NULL_RTX;
2501}
2502
2503static rtx
2504rx_expand_int_builtin_1_arg (rtx arg,
2505 rtx target,
2506 rtx (* gen_func)(rtx, rtx),
2507 bool mem_ok)
2508{
2509 if (! REG_P (arg))
2510 if (!mem_ok || ! MEM_P (arg))
2511 arg = force_reg (SImode, arg);
2512
2513 if (target == NULL_RTX || ! REG_P (target))
2514 target = gen_reg_rtx (SImode);
2515
2516 emit_insn (gen_func (target, arg));
2517
2518 return target;
2519}
2520
2521static rtx
2522rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2523{
2524 if (target == NULL_RTX || ! REG_P (target))
2525 target = gen_reg_rtx (SImode);
2526
2527 emit_insn (gen_func (target));
2528
2529 return target;
2530}
2531
2532static rtx
2533rx_expand_builtin_round (rtx arg, rtx target)
2534{
2535 if ((! REG_P (arg) && ! MEM_P (arg))
2536 || GET_MODE (arg) != SFmode)
2537 arg = force_reg (SFmode, arg);
2538
2539 if (target == NULL_RTX || ! REG_P (target))
2540 target = gen_reg_rtx (SImode);
2541
2542 emit_insn (gen_lrintsf2 (target, arg));
2543
2544 return target;
2545}
2546
bf9afb7d 2547static int
197a830e 2548valid_psw_flag (rtx op, const char *which)
bf9afb7d
DD
2549{
2550 static int mvtc_inform_done = 0;
2551
2552 if (GET_CODE (op) == CONST_INT)
2553 switch (INTVAL (op))
2554 {
2555 case 0: case 'c': case 'C':
2556 case 1: case 'z': case 'Z':
2557 case 2: case 's': case 'S':
2558 case 3: case 'o': case 'O':
2559 case 8: case 'i': case 'I':
2560 case 9: case 'u': case 'U':
2561 return 1;
2562 }
2563
2564 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2565 if (!mvtc_inform_done)
2566 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2567 mvtc_inform_done = 1;
2568
2569 return 0;
2570}
2571
65a324b4
NC
2572static rtx
2573rx_expand_builtin (tree exp,
2574 rtx target,
2575 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 2576 machine_mode mode ATTRIBUTE_UNUSED,
65a324b4
NC
2577 int ignore ATTRIBUTE_UNUSED)
2578{
2579 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
58a11859 2580 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
65a324b4
NC
2581 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2582 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2583
2584 switch (fcode)
2585 {
2586 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
bf9afb7d
DD
2587 case RX_BUILTIN_CLRPSW:
2588 if (!valid_psw_flag (op, "clrpsw"))
2589 return NULL_RTX;
2590 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2591 case RX_BUILTIN_SETPSW:
2592 if (!valid_psw_flag (op, "setpsw"))
2593 return NULL_RTX;
2594 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
65a324b4
NC
2595 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2596 (op, gen_int, false);
2597 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2598 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2599 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2600 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2601 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2602 (target, gen_mvfachi);
2603 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2604 (target, gen_mvfacmi);
2605 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2606 (op, gen_mvtachi, true);
2607 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2608 (op, gen_mvtaclo, true);
2609 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2610 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2611 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
9595a419 2612 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
65a324b4
NC
2613 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2614 (op, gen_racw, false);
2615 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2616 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2617 (op, target, gen_revw, false);
65a324b4
NC
2618 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2619
2620 default:
2621 internal_error ("bad builtin code");
2622 break;
2623 }
2624
2625 return NULL_RTX;
2626}
2627\f
2628/* Place an element into a constructor or destructor section.
2629 Like default_ctor_section_asm_out_constructor in varasm.c
2630 except that it uses .init_array (or .fini_array) and it
2631 handles constructor priorities. */
2632
2633static void
2634rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2635{
2636 section * s;
2637
2638 if (priority != DEFAULT_INIT_PRIORITY)
2639 {
2640 char buf[18];
2641
2642 sprintf (buf, "%s.%.5u",
2643 is_ctor ? ".init_array" : ".fini_array",
2644 priority);
2645 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2646 }
2647 else if (is_ctor)
2648 s = ctors_section;
2649 else
2650 s = dtors_section;
2651
2652 switch_to_section (s);
2653 assemble_align (POINTER_SIZE);
2654 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2655}
2656
2657static void
2658rx_elf_asm_constructor (rtx symbol, int priority)
2659{
2660 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2661}
2662
2663static void
2664rx_elf_asm_destructor (rtx symbol, int priority)
2665{
2666 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2667}
2668\f
9595a419 2669/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
65a324b4
NC
2670
2671static tree
2672rx_handle_func_attribute (tree * node,
2673 tree name,
2674 tree args,
2675 int flags ATTRIBUTE_UNUSED,
2676 bool * no_add_attrs)
2677{
2678 gcc_assert (DECL_P (* node));
65a324b4
NC
2679
2680 if (TREE_CODE (* node) != FUNCTION_DECL)
2681 {
2682 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2683 name);
2684 * no_add_attrs = true;
2685 }
2686
2687 /* FIXME: We ought to check for conflicting attributes. */
2688
2689 /* FIXME: We ought to check that the interrupt and exception
2690 handler attributes have been applied to void functions. */
2691 return NULL_TREE;
2692}
2693
69c7a374
DD
2694/* Check "vector" attribute. */
2695
2696static tree
2697rx_handle_vector_attribute (tree * node,
2698 tree name,
2699 tree args,
2700 int flags ATTRIBUTE_UNUSED,
2701 bool * no_add_attrs)
2702{
2703 gcc_assert (DECL_P (* node));
2704 gcc_assert (args != NULL_TREE);
2705
2706 if (TREE_CODE (* node) != FUNCTION_DECL)
2707 {
2708 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2709 name);
2710 * no_add_attrs = true;
2711 }
2712
2713 return NULL_TREE;
2714}
2715
65a324b4
NC
2716/* Table of RX specific attributes. */
2717const struct attribute_spec rx_attribute_table[] =
2718{
62d784f7
KT
2719 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2720 affects_type_identity. */
2721 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2722 false },
69c7a374 2723 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute,
62d784f7
KT
2724 false },
2725 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2726 false },
69c7a374
DD
2727 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2728 false },
62d784f7 2729 { NULL, 0, 0, false, false, false, NULL, false }
65a324b4
NC
2730};
2731
a32b99ad 2732/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
fac0f722
JM
2733
2734static void
a32b99ad 2735rx_override_options_after_change (void)
5f75e477
NC
2736{
2737 static bool first_time = TRUE;
5f75e477
NC
2738
2739 if (first_time)
2740 {
2741 /* If this is the first time through and the user has not disabled
a32b99ad
JM
2742 the use of RX FPU hardware then enable -ffinite-math-only,
2743 since the FPU instructions do not support NaNs and infinities. */
5f75e477 2744 if (TARGET_USE_FPU)
a32b99ad 2745 flag_finite_math_only = 1;
5f75e477 2746
5f75e477
NC
2747 first_time = FALSE;
2748 }
2749 else
2750 {
2751 /* Alert the user if they are changing the optimization options
2752 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2753 if (TARGET_USE_FPU
a32b99ad
JM
2754 && !flag_finite_math_only)
2755 warning (0, "RX FPU instructions do not support NaNs and infinities");
5f75e477
NC
2756 }
2757}
2758
0685e770
DD
2759static void
2760rx_option_override (void)
2761{
abd016e6
JM
2762 unsigned int i;
2763 cl_deferred_option *opt;
9771b263 2764 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
abd016e6 2765
9771b263
DN
2766 if (v)
2767 FOR_EACH_VEC_ELT (*v, i, opt)
2768 {
2769 switch (opt->opt_index)
2770 {
2771 case OPT_mint_register_:
2772 switch (opt->value)
2773 {
2774 case 4:
2775 fixed_regs[10] = call_used_regs [10] = 1;
2776 /* Fall through. */
2777 case 3:
2778 fixed_regs[11] = call_used_regs [11] = 1;
2779 /* Fall through. */
2780 case 2:
2781 fixed_regs[12] = call_used_regs [12] = 1;
2782 /* Fall through. */
2783 case 1:
2784 fixed_regs[13] = call_used_regs [13] = 1;
2785 /* Fall through. */
2786 case 0:
2787 rx_num_interrupt_regs = opt->value;
2788 break;
2789 default:
2790 rx_num_interrupt_regs = 0;
2791 /* Error message already given because rx_handle_option
2792 returned false. */
2793 break;
2794 }
2795 break;
abd016e6 2796
9771b263
DN
2797 default:
2798 gcc_unreachable ();
2799 }
2800 }
abd016e6 2801
0685e770 2802 /* This target defaults to strict volatile bitfields. */
36acc1a2 2803 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
0685e770 2804 flag_strict_volatile_bitfields = 1;
a32b99ad
JM
2805
2806 rx_override_options_after_change ();
662666e5 2807
3fad4d00 2808 /* These values are bytes, not log. */
662666e5 2809 if (align_jumps == 0 && ! optimize_size)
3fad4d00 2810 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
662666e5 2811 if (align_loops == 0 && ! optimize_size)
3fad4d00 2812 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
662666e5 2813 if (align_labels == 0 && ! optimize_size)
3fad4d00 2814 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
0685e770
DD
2815}
2816
5f75e477 2817\f
65a324b4
NC
2818static bool
2819rx_allocate_stack_slots_for_args (void)
2820{
2821 /* Naked functions should not allocate stack slots for arguments. */
2822 return ! is_naked_func (NULL_TREE);
2823}
2824
2825static bool
2826rx_func_attr_inlinable (const_tree decl)
2827{
2828 return ! is_fast_interrupt_func (decl)
9595a419 2829 && ! is_interrupt_func (decl)
65a324b4
NC
2830 && ! is_naked_func (decl);
2831}
2832
d45eae79
SL
2833static bool
2834rx_warn_func_return (tree decl)
2835{
2836 /* Naked functions are implemented entirely in assembly, including the
2837 return sequence, so suppress warnings about this. */
2838 return !is_naked_func (decl);
2839}
2840
0d8f38d3
NC
2841/* Return nonzero if it is ok to make a tail-call to DECL,
2842 a function_decl or NULL if this is an indirect call, using EXP */
2843
2844static bool
dafcb54e 2845rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
0d8f38d3
NC
2846{
2847 /* Do not allow indirect tailcalls. The
2848 sibcall patterns do not support them. */
2849 if (decl == NULL)
2850 return false;
2851
2852 /* Never tailcall from inside interrupt handlers or naked functions. */
2853 if (is_fast_interrupt_func (NULL_TREE)
2854 || is_interrupt_func (NULL_TREE)
2855 || is_naked_func (NULL_TREE))
2856 return false;
2857
2858 return true;
2859}
2860
65a324b4
NC
2861static void
2862rx_file_start (void)
2863{
2864 if (! TARGET_AS100_SYNTAX)
2865 default_file_start ();
2866}
2867
2868static bool
2869rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2870{
27128fc3
NC
2871 /* The packed attribute overrides the MS behaviour. */
2872 return ! TYPE_PACKED (record_type);
65a324b4 2873}
65a324b4
NC
2874\f
2875/* Returns true if X a legitimate constant for an immediate
2876 operand on the RX. X is already known to satisfy CONSTANT_P. */
2877
2878bool
ef4bddc2 2879rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
65a324b4 2880{
65a324b4
NC
2881 switch (GET_CODE (x))
2882 {
2883 case CONST:
2884 x = XEXP (x, 0);
2885
2886 if (GET_CODE (x) == PLUS)
2887 {
2888 if (! CONST_INT_P (XEXP (x, 1)))
2889 return false;
2890
2891 /* GCC would not pass us CONST_INT + CONST_INT so we
2892 know that we have {SYMBOL|LABEL} + CONST_INT. */
2893 x = XEXP (x, 0);
2894 gcc_assert (! CONST_INT_P (x));
2895 }
2896
2897 switch (GET_CODE (x))
2898 {
2899 case LABEL_REF:
2900 case SYMBOL_REF:
2901 return true;
2902
15ba5696 2903 case UNSPEC:
878a9174 2904 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
15ba5696 2905
65a324b4
NC
2906 default:
2907 /* FIXME: Can this ever happen ? */
e9c0470a 2908 gcc_unreachable ();
65a324b4
NC
2909 }
2910 break;
2911
2912 case LABEL_REF:
2913 case SYMBOL_REF:
2914 return true;
2915 case CONST_DOUBLE:
c9c27b72 2916 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
65a324b4
NC
2917 case CONST_VECTOR:
2918 return false;
2919 default:
2920 gcc_assert (CONST_INT_P (x));
2921 break;
2922 }
2923
15ba5696 2924 return ok_for_max_constant (INTVAL (x));
65a324b4
NC
2925}
2926
65a324b4 2927static int
ef4bddc2 2928rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
b413068c 2929 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
65a324b4
NC
2930{
2931 rtx a, b;
2932
2933 if (GET_CODE (addr) != PLUS)
2934 return COSTS_N_INSNS (1);
2935
2936 a = XEXP (addr, 0);
2937 b = XEXP (addr, 1);
2938
2939 if (REG_P (a) && REG_P (b))
2940 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2941 return COSTS_N_INSNS (4);
2942
2943 if (speed)
2944 /* [REG+OFF] is just as fast as [REG]. */
2945 return COSTS_N_INSNS (1);
2946
2947 if (CONST_INT_P (b)
2948 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2949 /* Try to discourage REG + <large OFF> when optimizing for size. */
2950 return COSTS_N_INSNS (2);
2951
2952 return COSTS_N_INSNS (1);
2953}
2954
2955static bool
2956rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2957{
2958 /* We can always eliminate to the frame pointer.
2959 We can eliminate to the stack pointer unless a frame
2960 pointer is needed. */
2961
2962 return to == FRAME_POINTER_REGNUM
2963 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2964}
2965\f
2966
2967static void
2968rx_trampoline_template (FILE * file)
2969{
2970 /* Output assembler code for a block containing the constant
2971 part of a trampoline, leaving space for the variable parts.
2972
2973 On the RX, (where r8 is the static chain regnum) the trampoline
2974 looks like:
2975
2976 mov #<static chain value>, r8
2977 mov #<function's address>, r9
2978 jmp r9
2979
2980 In big-endian-data-mode however instructions are read into the CPU
2981 4 bytes at a time. These bytes are then swapped around before being
2982 passed to the decoder. So...we must partition our trampoline into
2983 4 byte packets and swap these packets around so that the instruction
2984 reader will reverse the process. But, in order to avoid splitting
2985 the 32-bit constants across these packet boundaries, (making inserting
2986 them into the constructed trampoline very difficult) we have to pad the
2987 instruction sequence with NOP insns. ie:
2988
2989 nop
2990 nop
2991 mov.l #<...>, r8
2992 nop
2993 nop
2994 mov.l #<...>, r9
2995 jmp r9
2996 nop
2997 nop */
2998
2999 if (! TARGET_BIG_ENDIAN_DATA)
3000 {
3001 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3002 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3003 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3004 }
3005 else
3006 {
3007 char r8 = '0' + STATIC_CHAIN_REGNUM;
3008 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3009
3010 if (TARGET_AS100_SYNTAX)
3011 {
3012 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3013 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3014 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3015 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3016 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3017 }
3018 else
3019 {
3020 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3021 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3022 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3023 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3024 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3025 }
3026 }
3027}
3028
3029static void
3030rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3031{
3032 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3033
3034 emit_block_move (tramp, assemble_trampoline_template (),
3035 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3036
3037 if (TARGET_BIG_ENDIAN_DATA)
3038 {
3039 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3040 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3041 }
3042 else
3043 {
3044 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3045 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3046 }
3047}
3048\f
e963cb1a 3049static int
ef4bddc2 3050rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
a5dfec9a
NC
3051 reg_class_t regclass ATTRIBUTE_UNUSED,
3052 bool in)
aea8fc97 3053{
a1d8754e 3054 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
aea8fc97
NC
3055}
3056
e963cb1a 3057/* Convert a CC_MODE to the set of flags that it represents. */
aea8fc97
NC
3058
3059static unsigned int
ef4bddc2 3060flags_from_mode (machine_mode mode)
aea8fc97 3061{
e963cb1a 3062 switch (mode)
aea8fc97 3063 {
e963cb1a
RH
3064 case CC_ZSmode:
3065 return CC_FLAG_S | CC_FLAG_Z;
3066 case CC_ZSOmode:
3067 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3068 case CC_ZSCmode:
3069 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3070 case CCmode:
3071 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3072 case CC_Fmode:
3073 return CC_FLAG_FP;
3074 default:
3075 gcc_unreachable ();
3076 }
3077}
aea8fc97 3078
e963cb1a 3079/* Convert a set of flags to a CC_MODE that can implement it. */
aea8fc97 3080
ef4bddc2 3081static machine_mode
e963cb1a
RH
3082mode_from_flags (unsigned int f)
3083{
3084 if (f & CC_FLAG_FP)
3085 return CC_Fmode;
3086 if (f & CC_FLAG_O)
3087 {
3088 if (f & CC_FLAG_C)
3089 return CCmode;
3090 else
3091 return CC_ZSOmode;
aea8fc97 3092 }
e963cb1a
RH
3093 else if (f & CC_FLAG_C)
3094 return CC_ZSCmode;
3095 else
3096 return CC_ZSmode;
aea8fc97
NC
3097}
3098
e963cb1a
RH
3099/* Convert an RTX_CODE to the set of flags needed to implement it.
3100 This assumes an integer comparison. */
3101
aea8fc97 3102static unsigned int
e963cb1a 3103flags_from_code (enum rtx_code code)
aea8fc97 3104{
e963cb1a 3105 switch (code)
aea8fc97 3106 {
e963cb1a
RH
3107 case LT:
3108 case GE:
72602cd1 3109 return CC_FLAG_S;
e963cb1a
RH
3110 case GT:
3111 case LE:
3112 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3113 case GEU:
3114 case LTU:
3115 return CC_FLAG_C;
3116 case GTU:
3117 case LEU:
3118 return CC_FLAG_C | CC_FLAG_Z;
3119 case EQ:
3120 case NE:
3121 return CC_FLAG_Z;
3122 default:
3123 gcc_unreachable ();
aea8fc97
NC
3124 }
3125}
3126
e963cb1a
RH
3127/* Return a CC_MODE of which both M1 and M2 are subsets. */
3128
ef4bddc2
RS
3129static machine_mode
3130rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
aea8fc97 3131{
e963cb1a
RH
3132 unsigned f;
3133
3134 /* Early out for identical modes. */
3135 if (m1 == m2)
3136 return m1;
3137
3138 /* There's no valid combination for FP vs non-FP. */
3139 f = flags_from_mode (m1) | flags_from_mode (m2);
3140 if (f & CC_FLAG_FP)
3141 return VOIDmode;
3142
3143 /* Otherwise, see what mode can implement all the flags. */
3144 return mode_from_flags (f);
aea8fc97 3145}
27bf36f3
RH
3146
3147/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3148
ef4bddc2 3149machine_mode
72602cd1 3150rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
27bf36f3
RH
3151{
3152 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3153 return CC_Fmode;
3154
72602cd1
RH
3155 if (y != const0_rtx)
3156 return CCmode;
3157
e963cb1a
RH
3158 return mode_from_flags (flags_from_code (cmp_code));
3159}
3160
e963cb1a
RH
3161/* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3162 CC_MODE, and use that in branches based on that compare. */
3163
3164void
ef4bddc2 3165rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
e963cb1a
RH
3166 rtx c1, rtx c2, rtx label)
3167{
3168 rtx flags, x;
3169
3170 flags = gen_rtx_REG (cc_mode, CC_REG);
3171 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3172 x = gen_rtx_SET (VOIDmode, flags, x);
3173 emit_insn (x);
3174
3175 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3176 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3177 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3178 emit_jump_insn (x);
27bf36f3
RH
3179}
3180
b4d83be3
RH
3181/* A helper function for matching parallels that set the flags. */
3182
3183bool
ef4bddc2 3184rx_match_ccmode (rtx insn, machine_mode cc_mode)
b4d83be3
RH
3185{
3186 rtx op1, flags;
ef4bddc2 3187 machine_mode flags_mode;
b4d83be3
RH
3188
3189 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3190
3191 op1 = XVECEXP (PATTERN (insn), 0, 1);
3192 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3193
3194 flags = SET_DEST (op1);
3195 flags_mode = GET_MODE (flags);
3196
3197 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3198 return false;
3199 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3200 return false;
3201
3202 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3203 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3204 return false;
3205
3206 return true;
3207}
662666e5
NC
3208\f
3209int
34cc3c86 3210rx_align_for_label (rtx lab, int uses_threshold)
662666e5 3211{
34cc3c86
DD
3212 /* This is a simple heuristic to guess when an alignment would not be useful
3213 because the delay due to the inserted NOPs would be greater than the delay
3214 due to the misaligned branch. If uses_threshold is zero then the alignment
3215 is always useful. */
5f2f13fd 3216 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
34cc3c86
DD
3217 return 0;
3218
1cf1574d
DD
3219 if (optimize_size)
3220 return 0;
3fad4d00 3221 /* These values are log, not bytes. */
1cf1574d 3222 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3fad4d00
DD
3223 return 2; /* 4 bytes */
3224 return 3; /* 8 bytes */
662666e5
NC
3225}
3226
3227static int
9158a0d8 3228rx_max_skip_for_label (rtx_insn *lab)
662666e5
NC
3229{
3230 int opsize;
9158a0d8 3231 rtx_insn *op;
662666e5 3232
1704a72b
DD
3233 if (optimize_size)
3234 return 0;
3235
9158a0d8 3236 if (lab == NULL)
662666e5 3237 return 0;
b4d83be3 3238
662666e5
NC
3239 op = lab;
3240 do
3241 {
3242 op = next_nonnote_nondebug_insn (op);
3243 }
3244 while (op && (LABEL_P (op)
3245 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3246 if (!op)
3247 return 0;
3248
3249 opsize = get_attr_length (op);
3250 if (opsize >= 0 && opsize < 8)
3251 return opsize - 1;
3252 return 0;
3253}
e9c0470a
NC
3254
3255/* Compute the real length of the extending load-and-op instructions. */
3256
3257int
3df4ecc2 3258rx_adjust_insn_length (rtx_insn *insn, int current_length)
e9c0470a
NC
3259{
3260 rtx extend, mem, offset;
3261 bool zero;
3262 int factor;
3263
69c7a374
DD
3264 if (!INSN_P (insn))
3265 return current_length;
3266
e9c0470a
NC
3267 switch (INSN_CODE (insn))
3268 {
3269 default:
3270 return current_length;
3271
3272 case CODE_FOR_plussi3_zero_extendhi:
3273 case CODE_FOR_andsi3_zero_extendhi:
3274 case CODE_FOR_iorsi3_zero_extendhi:
3275 case CODE_FOR_xorsi3_zero_extendhi:
3276 case CODE_FOR_divsi3_zero_extendhi:
3277 case CODE_FOR_udivsi3_zero_extendhi:
3278 case CODE_FOR_minussi3_zero_extendhi:
3279 case CODE_FOR_smaxsi3_zero_extendhi:
3280 case CODE_FOR_sminsi3_zero_extendhi:
3281 case CODE_FOR_multsi3_zero_extendhi:
5f2f13fd 3282 case CODE_FOR_comparesi3_zero_extendhi:
e9c0470a
NC
3283 zero = true;
3284 factor = 2;
3285 break;
3286
3287 case CODE_FOR_plussi3_sign_extendhi:
3288 case CODE_FOR_andsi3_sign_extendhi:
3289 case CODE_FOR_iorsi3_sign_extendhi:
3290 case CODE_FOR_xorsi3_sign_extendhi:
3291 case CODE_FOR_divsi3_sign_extendhi:
3292 case CODE_FOR_udivsi3_sign_extendhi:
3293 case CODE_FOR_minussi3_sign_extendhi:
3294 case CODE_FOR_smaxsi3_sign_extendhi:
3295 case CODE_FOR_sminsi3_sign_extendhi:
3296 case CODE_FOR_multsi3_sign_extendhi:
5f2f13fd 3297 case CODE_FOR_comparesi3_sign_extendhi:
e9c0470a
NC
3298 zero = false;
3299 factor = 2;
3300 break;
3301
3302 case CODE_FOR_plussi3_zero_extendqi:
3303 case CODE_FOR_andsi3_zero_extendqi:
3304 case CODE_FOR_iorsi3_zero_extendqi:
3305 case CODE_FOR_xorsi3_zero_extendqi:
3306 case CODE_FOR_divsi3_zero_extendqi:
3307 case CODE_FOR_udivsi3_zero_extendqi:
3308 case CODE_FOR_minussi3_zero_extendqi:
3309 case CODE_FOR_smaxsi3_zero_extendqi:
3310 case CODE_FOR_sminsi3_zero_extendqi:
3311 case CODE_FOR_multsi3_zero_extendqi:
5f2f13fd 3312 case CODE_FOR_comparesi3_zero_extendqi:
e9c0470a
NC
3313 zero = true;
3314 factor = 1;
3315 break;
3316
3317 case CODE_FOR_plussi3_sign_extendqi:
3318 case CODE_FOR_andsi3_sign_extendqi:
3319 case CODE_FOR_iorsi3_sign_extendqi:
3320 case CODE_FOR_xorsi3_sign_extendqi:
3321 case CODE_FOR_divsi3_sign_extendqi:
3322 case CODE_FOR_udivsi3_sign_extendqi:
3323 case CODE_FOR_minussi3_sign_extendqi:
3324 case CODE_FOR_smaxsi3_sign_extendqi:
3325 case CODE_FOR_sminsi3_sign_extendqi:
3326 case CODE_FOR_multsi3_sign_extendqi:
5f2f13fd 3327 case CODE_FOR_comparesi3_sign_extendqi:
e9c0470a
NC
3328 zero = false;
3329 factor = 1;
3330 break;
3331 }
3332
3333 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3334 extend = single_set (insn);
3335 gcc_assert (extend != NULL_RTX);
3336
3337 extend = SET_SRC (extend);
3338 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3339 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3340 extend = XEXP (extend, 0);
3341 else
3342 extend = XEXP (extend, 1);
3343
3344 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3345 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3346
3347 mem = XEXP (extend, 0);
3348 gcc_checking_assert (MEM_P (mem));
3349 if (REG_P (XEXP (mem, 0)))
3350 return (zero && factor == 1) ? 2 : 3;
3351
3352 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3353 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3354 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3355
3356 offset = XEXP (XEXP (mem, 0), 1);
3357 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3358
3359 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3360 return (zero && factor == 1) ? 3 : 4;
3361
3362 return (zero && factor == 1) ? 4 : 5;
3363}
47c9ac72
NC
3364
3365static bool
3366rx_narrow_volatile_bitfield (void)
3367{
3368 return true;
3369}
3370
3371static bool
3372rx_ok_to_inline (tree caller, tree callee)
3373{
3374 /* Do not inline functions with local variables
3375 into a naked CALLER - naked function have no stack frame and
3376 locals need a frame in order to have somewhere to live.
3377
3378 Unfortunately we have no way to determine the presence of
3379 local variables in CALLEE, so we have to be cautious and
3380 assume that there might be some there.
3381
3382 We do allow inlining when CALLEE has the "inline" type
3383 modifier or the "always_inline" or "gnu_inline" attributes. */
3384 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3385 || DECL_DECLARED_INLINE_P (callee)
3386 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3387 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3388}
3389
69f5aa9b
SKS
3390static bool
3391rx_enable_lra (void)
3392{
da02a644 3393 return TARGET_ENABLE_LRA;
69f5aa9b
SKS
3394}
3395
aea8fc97 3396\f
47c9ac72
NC
3397#undef TARGET_NARROW_VOLATILE_BITFIELD
3398#define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3399
3400#undef TARGET_CAN_INLINE_P
3401#define TARGET_CAN_INLINE_P rx_ok_to_inline
3402
662666e5
NC
3403#undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3404#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3405#undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3406#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3407#undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3408#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3409#undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3410#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3411
65a324b4
NC
3412#undef TARGET_FUNCTION_VALUE
3413#define TARGET_FUNCTION_VALUE rx_function_value
3414
3415#undef TARGET_RETURN_IN_MSB
3416#define TARGET_RETURN_IN_MSB rx_return_in_msb
3417
3418#undef TARGET_IN_SMALL_DATA_P
3419#define TARGET_IN_SMALL_DATA_P rx_in_small_data
3420
3421#undef TARGET_RETURN_IN_MEMORY
3422#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3423
3424#undef TARGET_HAVE_SRODATA_SECTION
3425#define TARGET_HAVE_SRODATA_SECTION true
3426
3427#undef TARGET_ASM_SELECT_RTX_SECTION
3428#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3429
3430#undef TARGET_ASM_SELECT_SECTION
3431#define TARGET_ASM_SELECT_SECTION rx_select_section
3432
3433#undef TARGET_INIT_BUILTINS
3434#define TARGET_INIT_BUILTINS rx_init_builtins
3435
87e91fca
DD
3436#undef TARGET_BUILTIN_DECL
3437#define TARGET_BUILTIN_DECL rx_builtin_decl
3438
65a324b4
NC
3439#undef TARGET_EXPAND_BUILTIN
3440#define TARGET_EXPAND_BUILTIN rx_expand_builtin
3441
3442#undef TARGET_ASM_CONSTRUCTOR
3443#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3444
3445#undef TARGET_ASM_DESTRUCTOR
3446#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3447
3448#undef TARGET_STRUCT_VALUE_RTX
3449#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3450
3451#undef TARGET_ATTRIBUTE_TABLE
3452#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3453
3454#undef TARGET_ASM_FILE_START
3455#define TARGET_ASM_FILE_START rx_file_start
3456
3457#undef TARGET_MS_BITFIELD_LAYOUT_P
3458#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3459
3460#undef TARGET_LEGITIMATE_ADDRESS_P
3461#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3462
b09c3081
AS
3463#undef TARGET_MODE_DEPENDENT_ADDRESS_P
3464#define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3465
65a324b4
NC
3466#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3467#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3468
3469#undef TARGET_ASM_FUNCTION_PROLOGUE
3470#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3471
3472#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3473#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3474
0d8f38d3
NC
3475#undef TARGET_FUNCTION_OK_FOR_SIBCALL
3476#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3477
3968a1c0
NF
3478#undef TARGET_FUNCTION_ARG
3479#define TARGET_FUNCTION_ARG rx_function_arg
3480
3481#undef TARGET_FUNCTION_ARG_ADVANCE
3482#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3483
c2ed6cf8
NF
3484#undef TARGET_FUNCTION_ARG_BOUNDARY
3485#define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3486
65a324b4
NC
3487#undef TARGET_SET_CURRENT_FUNCTION
3488#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3489
65a324b4
NC
3490#undef TARGET_ASM_INTEGER
3491#define TARGET_ASM_INTEGER rx_assemble_integer
3492
3493#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3494#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3495
3496#undef TARGET_MAX_ANCHOR_OFFSET
3497#define TARGET_MAX_ANCHOR_OFFSET 32
3498
3499#undef TARGET_ADDRESS_COST
3500#define TARGET_ADDRESS_COST rx_address_cost
3501
3502#undef TARGET_CAN_ELIMINATE
3503#define TARGET_CAN_ELIMINATE rx_can_eliminate
3504
5efd84c5
NF
3505#undef TARGET_CONDITIONAL_REGISTER_USAGE
3506#define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3507
65a324b4
NC
3508#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3509#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3510
3511#undef TARGET_TRAMPOLINE_INIT
3512#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3513
31e727b0
NC
3514#undef TARGET_PRINT_OPERAND
3515#define TARGET_PRINT_OPERAND rx_print_operand
3516
3517#undef TARGET_PRINT_OPERAND_ADDRESS
3518#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3519
aea8fc97
NC
3520#undef TARGET_CC_MODES_COMPATIBLE
3521#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3522
3523#undef TARGET_MEMORY_MOVE_COST
3524#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3525
0685e770
DD
3526#undef TARGET_OPTION_OVERRIDE
3527#define TARGET_OPTION_OVERRIDE rx_option_override
3528
e2f289f3
NC
3529#undef TARGET_PROMOTE_FUNCTION_MODE
3530#define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3531
a32b99ad
JM
3532#undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3533#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
fac0f722 3534
1b7ae0b7
RH
3535#undef TARGET_FLAGS_REGNUM
3536#define TARGET_FLAGS_REGNUM CC_REG
3537
1a627b35 3538#undef TARGET_LEGITIMATE_CONSTANT_P
5f2f13fd 3539#define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
1a627b35 3540
878a9174
DD
3541#undef TARGET_LEGITIMIZE_ADDRESS
3542#define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3543
47c9ac72
NC
3544#undef TARGET_WARN_FUNC_RETURN
3545#define TARGET_WARN_FUNC_RETURN rx_warn_func_return
d45eae79 3546
69f5aa9b
SKS
3547#undef TARGET_LRA_P
3548#define TARGET_LRA_P rx_enable_lra
3549
65a324b4
NC
3550struct gcc_target targetm = TARGET_INITIALIZER;
3551
87e91fca 3552#include "gt-rx.h"