]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
mips.c (coprocessor_operand, [...]): Move prototypes from here...
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308
BW
1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "rtl.h"
25#include "regs.h"
26#include "machmode.h"
27#include "hard-reg-set.h"
28#include "basic-block.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "insn-attr.h"
34#include "insn-codes.h"
35#include "recog.h"
36#include "output.h"
37#include "tree.h"
38#include "expr.h"
39#include "flags.h"
40#include "reload.h"
41#include "tm_p.h"
42#include "function.h"
43#include "toplev.h"
44#include "optabs.h"
b64a1b53 45#include "output.h"
03984308 46#include "libfuncs.h"
07232638 47#include "ggc.h"
03984308
BW
48#include "target.h"
49#include "target-def.h"
540eaea8 50#include "langhooks.h"
03984308
BW
51
52/* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
54 of EQ, NE, etc. */
55
56enum internal_test {
57 ITEST_EQ,
58 ITEST_NE,
59 ITEST_GT,
60 ITEST_GE,
61 ITEST_LT,
62 ITEST_LE,
63 ITEST_GTU,
64 ITEST_GEU,
65 ITEST_LTU,
66 ITEST_LEU,
67 ITEST_MAX
68 };
69
70/* Cached operands, and operator to compare for use in set/branch on
71 condition codes. */
72rtx branch_cmp[2];
73
74/* what type of branch to use */
75enum cmp_type branch_type;
76
77/* Array giving truth value on whether or not a given hard register
78 can support a given mode. */
79char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
80
81/* Current frame size calculated by compute_frame_size. */
82unsigned xtensa_current_frame_size;
83
84/* Tables of ld/st opcode names for block moves */
85const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
86const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
87#define LARGEST_MOVE_RATIO 15
88
89/* Define the structure for the machine field in struct function. */
e2500fed 90struct machine_function GTY(())
03984308
BW
91{
92 int accesses_prev_frame;
93};
94
95/* Vector, indexed by hard register number, which contains 1 for a
96 register that is allowable in a candidate for leaf function
97 treatment. */
98
99const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
100{
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
102 1, 1, 1,
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 1
105};
106
107/* Map hard register number to register class */
108const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
109{
110 GR_REGS, SP_REG, GR_REGS, GR_REGS,
111 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
112 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
113 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
114 AR_REGS, AR_REGS, BR_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 ACC_REG,
120};
121
122/* Map register constraint character to register class. */
123enum reg_class xtensa_char_to_class[256] =
124{
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189};
190
b64a1b53
RH
191static int b4const_or_zero PARAMS ((int));
192static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
193static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
194static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
195static rtx gen_conditional_move PARAMS ((rtx));
196static rtx fixup_subreg_mem PARAMS ((rtx x));
197static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
07232638 198static struct machine_function * xtensa_init_machine_status PARAMS ((void));
b64a1b53
RH
199static void printx PARAMS ((FILE *, signed int));
200static void xtensa_select_rtx_section PARAMS ((enum machine_mode, rtx,
201 unsigned HOST_WIDE_INT));
fb49053f 202static void xtensa_encode_section_info PARAMS ((tree, int));
b64a1b53
RH
203
204static rtx frame_size_const;
205static int current_function_arg_words;
206static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
207 REG_ALLOC_ORDER;
208\f
03984308
BW
209/* This macro generates the assembly code for function entry.
210 FILE is a stdio stream to output the code to.
211 SIZE is an int: how many units of temporary storage to allocate.
212 Refer to the array 'regs_ever_live' to determine which registers
213 to save; 'regs_ever_live[I]' is nonzero if register number I
214 is ever used in the function. This macro is responsible for
215 knowing which registers should not be saved even if used. */
216
217#undef TARGET_ASM_FUNCTION_PROLOGUE
218#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
219
220/* This macro generates the assembly code for function exit,
221 on machines that need it. If FUNCTION_EPILOGUE is not defined
222 then individual return instructions are generated for each
223 return statement. Args are same as for FUNCTION_PROLOGUE. */
224
225#undef TARGET_ASM_FUNCTION_EPILOGUE
226#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
227
228/* These hooks specify assembly directives for creating certain kinds
229 of integer object. */
230
231#undef TARGET_ASM_ALIGNED_SI_OP
232#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
233
b64a1b53
RH
234#undef TARGET_ASM_SELECT_RTX_SECTION
235#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
fb49053f
RH
236#undef TARGET_ENCODE_SECTION_INFO
237#define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
03984308 238
b64a1b53
RH
239struct gcc_target targetm = TARGET_INITIALIZER;
240\f
03984308
BW
241
242/*
243 * Functions to test Xtensa immediate operand validity.
244 */
245
246int
247xtensa_b4constu (v)
248 int v;
249{
250 switch (v)
251 {
252 case 32768:
253 case 65536:
254 case 2:
255 case 3:
256 case 4:
257 case 5:
258 case 6:
259 case 7:
260 case 8:
261 case 10:
262 case 12:
263 case 16:
264 case 32:
265 case 64:
266 case 128:
267 case 256:
268 return 1;
269 }
270 return 0;
271}
272
273int
274xtensa_simm8x256 (v)
275 int v;
276{
277 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
278}
279
280int
281xtensa_ai4const (v)
282 int v;
283{
284 return (v == -1 || (v >= 1 && v <= 15));
285}
286
287int
288xtensa_simm7 (v)
289 int v;
290{
291 return v >= -32 && v <= 95;
292}
293
294int
295xtensa_b4const (v)
296 int v;
297{
298 switch (v)
299 {
300 case -1:
301 case 1:
302 case 2:
303 case 3:
304 case 4:
305 case 5:
306 case 6:
307 case 7:
308 case 8:
309 case 10:
310 case 12:
311 case 16:
312 case 32:
313 case 64:
314 case 128:
315 case 256:
316 return 1;
317 }
318 return 0;
319}
320
321int
322xtensa_simm8 (v)
323 int v;
324{
325 return v >= -128 && v <= 127;
326}
327
328int
329xtensa_tp7 (v)
330 int v;
331{
332 return (v >= 7 && v <= 22);
333}
334
335int
336xtensa_lsi4x4 (v)
337 int v;
338{
339 return (v & 3) == 0 && (v >= 0 && v <= 60);
340}
341
342int
343xtensa_simm12b (v)
344 int v;
345{
346 return v >= -2048 && v <= 2047;
347}
348
349int
350xtensa_uimm8 (v)
351 int v;
352{
353 return v >= 0 && v <= 255;
354}
355
356int
357xtensa_uimm8x2 (v)
358 int v;
359{
360 return (v & 1) == 0 && (v >= 0 && v <= 510);
361}
362
363int
364xtensa_uimm8x4 (v)
365 int v;
366{
367 return (v & 3) == 0 && (v >= 0 && v <= 1020);
368}
369
370
371/* This is just like the standard true_regnum() function except that it
372 works even when reg_renumber is not initialized. */
373
374int
375xt_true_regnum (x)
376 rtx x;
377{
378 if (GET_CODE (x) == REG)
379 {
380 if (reg_renumber
381 && REGNO (x) >= FIRST_PSEUDO_REGISTER
382 && reg_renumber[REGNO (x)] >= 0)
383 return reg_renumber[REGNO (x)];
384 return REGNO (x);
385 }
386 if (GET_CODE (x) == SUBREG)
387 {
388 int base = xt_true_regnum (SUBREG_REG (x));
389 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
390 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
391 GET_MODE (SUBREG_REG (x)),
392 SUBREG_BYTE (x), GET_MODE (x));
393 }
394 return -1;
395}
396
397
398int
399add_operand (op, mode)
400 rtx op;
401 enum machine_mode mode;
402{
403 if (GET_CODE (op) == CONST_INT)
404 return (xtensa_simm8 (INTVAL (op)) ||
405 xtensa_simm8x256 (INTVAL (op)));
406
407 return register_operand (op, mode);
408}
409
410
411int
412arith_operand (op, mode)
413 rtx op;
414 enum machine_mode mode;
415{
416 if (GET_CODE (op) == CONST_INT)
417 return xtensa_simm8 (INTVAL (op));
418
419 return register_operand (op, mode);
420}
421
422
423int
424nonimmed_operand (op, mode)
425 rtx op;
426 enum machine_mode mode;
427{
428 /* We cannot use the standard nonimmediate_operand() predicate because
429 it includes constant pool memory operands. */
430
431 if (memory_operand (op, mode))
432 return !constantpool_address_p (XEXP (op, 0));
433
434 return register_operand (op, mode);
435}
436
437
438int
439mem_operand (op, mode)
440 rtx op;
441 enum machine_mode mode;
442{
443 /* We cannot use the standard memory_operand() predicate because
444 it includes constant pool memory operands. */
445
446 if (memory_operand (op, mode))
447 return !constantpool_address_p (XEXP (op, 0));
448
449 return FALSE;
450}
451
452
453int
a8cacfd2 454xtensa_valid_move (mode, operands)
03984308 455 enum machine_mode mode;
a8cacfd2 456 rtx *operands;
03984308 457{
a8cacfd2
BW
458 /* Either the destination or source must be a register, and the
459 MAC16 accumulator doesn't count. */
460
461 if (register_operand (operands[0], mode))
462 {
463 int dst_regnum = xt_true_regnum (operands[0]);
464
465 /* The stack pointer can only be assigned with a MOVSP opcode. */
466 if (dst_regnum == STACK_POINTER_REGNUM)
467 return (mode == SImode
468 && register_operand (operands[1], mode)
469 && !ACC_REG_P (xt_true_regnum (operands[1])));
470
471 if (!ACC_REG_P (dst_regnum))
472 return true;
473 }
3437320b 474 if (register_operand (operands[1], mode))
a8cacfd2
BW
475 {
476 int src_regnum = xt_true_regnum (operands[1]);
477 if (!ACC_REG_P (src_regnum))
478 return true;
479 }
03984308
BW
480 return FALSE;
481}
482
483
484int
485mask_operand (op, mode)
486 rtx op;
487 enum machine_mode mode;
488{
489 if (GET_CODE (op) == CONST_INT)
490 return xtensa_mask_immediate (INTVAL (op));
491
492 return register_operand (op, mode);
493}
494
495
496int
497extui_fldsz_operand (op, mode)
498 rtx op;
499 enum machine_mode mode ATTRIBUTE_UNUSED;
500{
501 return ((GET_CODE (op) == CONST_INT)
502 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
503}
504
505
506int
507sext_operand (op, mode)
508 rtx op;
509 enum machine_mode mode;
510{
511 if (TARGET_SEXT)
512 return nonimmed_operand (op, mode);
513 return mem_operand (op, mode);
514}
515
516
517int
518sext_fldsz_operand (op, mode)
519 rtx op;
520 enum machine_mode mode ATTRIBUTE_UNUSED;
521{
522 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
523}
524
525
526int
527lsbitnum_operand (op, mode)
528 rtx op;
529 enum machine_mode mode ATTRIBUTE_UNUSED;
530{
531 if (GET_CODE (op) == CONST_INT)
532 {
533 return (BITS_BIG_ENDIAN
534 ? (INTVAL (op) == BITS_PER_WORD-1)
535 : (INTVAL (op) == 0));
536 }
537 return FALSE;
538}
539
540
541static int
542b4const_or_zero (v)
543 int v;
544{
545 if (v == 0)
546 return TRUE;
547 return xtensa_b4const (v);
548}
549
550
551int
552branch_operand (op, mode)
553 rtx op;
554 enum machine_mode mode;
555{
556 if (GET_CODE (op) == CONST_INT)
557 return b4const_or_zero (INTVAL (op));
558
559 return register_operand (op, mode);
560}
561
562
563int
564ubranch_operand (op, mode)
565 rtx op;
566 enum machine_mode mode;
567{
568 if (GET_CODE (op) == CONST_INT)
569 return xtensa_b4constu (INTVAL (op));
570
571 return register_operand (op, mode);
572}
573
574
575int
576call_insn_operand (op, mode)
577 rtx op;
578 enum machine_mode mode ATTRIBUTE_UNUSED;
579{
580 if ((GET_CODE (op) == REG)
581 && (op != arg_pointer_rtx)
582 && ((REGNO (op) < FRAME_POINTER_REGNUM)
583 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
584 return TRUE;
585
586 if (CONSTANT_ADDRESS_P (op))
587 {
588 /* Direct calls only allowed to static functions with PIC. */
589 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
590 && SYMBOL_REF_FLAG (op)));
591 }
592
593 return FALSE;
594}
595
596
597int
598move_operand (op, mode)
599 rtx op;
600 enum machine_mode mode;
601{
602 if (register_operand (op, mode))
603 return TRUE;
604
605 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
606 result in 0/1. */
607 if (GET_CODE (op) == CONSTANT_P_RTX)
608 return TRUE;
609
610 if (GET_CODE (op) == CONST_INT)
611 return xtensa_simm12b (INTVAL (op));
612
613 if (GET_CODE (op) == MEM)
614 return memory_address_p (mode, XEXP (op, 0));
615
616 return FALSE;
617}
618
619
620int
621smalloffset_mem_p (op)
622 rtx op;
623{
624 if (GET_CODE (op) == MEM)
625 {
626 rtx addr = XEXP (op, 0);
627 if (GET_CODE (addr) == REG)
628 return REG_OK_FOR_BASE_P (addr);
629 if (GET_CODE (addr) == PLUS)
630 {
631 rtx offset = XEXP (addr, 0);
632 if (GET_CODE (offset) != CONST_INT)
633 offset = XEXP (addr, 1);
634 if (GET_CODE (offset) != CONST_INT)
635 return FALSE;
636 return xtensa_lsi4x4 (INTVAL (offset));
637 }
638 }
639 return FALSE;
640}
641
642
643int
644smalloffset_double_mem_p (op)
645 rtx op;
646{
647 if (!smalloffset_mem_p (op))
648 return FALSE;
649 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
650}
651
652
653int
654constantpool_address_p (addr)
655 rtx addr;
656{
657 rtx sym = addr;
658
659 if (GET_CODE (addr) == CONST)
660 {
661 rtx offset;
662
663 /* only handle (PLUS (SYM, OFFSET)) form */
664 addr = XEXP (addr, 0);
665 if (GET_CODE (addr) != PLUS)
666 return FALSE;
667
668 /* make sure the address is word aligned */
669 offset = XEXP (addr, 1);
670 if ((GET_CODE (offset) != CONST_INT)
671 || ((INTVAL (offset) & 3) != 0))
672 return FALSE;
673
674 sym = XEXP (addr, 0);
675 }
676
677 if ((GET_CODE (sym) == SYMBOL_REF)
678 && CONSTANT_POOL_ADDRESS_P (sym))
679 return TRUE;
680 return FALSE;
681}
682
683
684int
685constantpool_mem_p (op)
686 rtx op;
687{
688 if (GET_CODE (op) == MEM)
689 return constantpool_address_p (XEXP (op, 0));
690 return FALSE;
691}
692
693
694int
695non_const_move_operand (op, mode)
696 rtx op;
697 enum machine_mode mode;
698{
699 if (register_operand (op, mode))
700 return 1;
701 if (GET_CODE (op) == SUBREG)
702 op = SUBREG_REG (op);
703 if (GET_CODE (op) == MEM)
704 return memory_address_p (mode, XEXP (op, 0));
705 return FALSE;
706}
707
708
709/* Accept the floating point constant 1 in the appropriate mode. */
710
711int
712const_float_1_operand (op, mode)
713 rtx op;
714 enum machine_mode mode;
715{
716 REAL_VALUE_TYPE d;
717 static REAL_VALUE_TYPE onedf;
718 static REAL_VALUE_TYPE onesf;
719 static int one_initialized;
720
721 if ((GET_CODE (op) != CONST_DOUBLE)
722 || (mode != GET_MODE (op))
723 || (mode != DFmode && mode != SFmode))
724 return FALSE;
725
726 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
727
728 if (! one_initialized)
729 {
730 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
731 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
732 one_initialized = TRUE;
733 }
734
735 if (mode == DFmode)
736 return REAL_VALUES_EQUAL (d, onedf);
737 else
738 return REAL_VALUES_EQUAL (d, onesf);
739}
740
741
742int
743fpmem_offset_operand (op, mode)
744 rtx op;
745 enum machine_mode mode ATTRIBUTE_UNUSED;
746{
747 if (GET_CODE (op) == CONST_INT)
748 return xtensa_mem_offset (INTVAL (op), SFmode);
749 return 0;
750}
751
752
753void
754xtensa_extend_reg (dst, src)
755 rtx dst;
756 rtx src;
757{
758 rtx temp = gen_reg_rtx (SImode);
759 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
760
761 /* generate paradoxical subregs as needed so that the modes match */
762 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
763 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
764
765 emit_insn (gen_ashlsi3 (temp, src, shift));
766 emit_insn (gen_ashrsi3 (dst, temp, shift));
767}
768
769
770void
771xtensa_load_constant (dst, src)
772 rtx dst;
773 rtx src;
774{
775 enum machine_mode mode = GET_MODE (dst);
776 src = force_const_mem (SImode, src);
777
778 /* PC-relative loads are always SImode so we have to add a SUBREG if that
779 is not the desired mode */
780
781 if (mode != SImode)
782 {
783 if (register_operand (dst, mode))
784 dst = simplify_gen_subreg (SImode, dst, mode, 0);
785 else
786 {
787 src = force_reg (SImode, src);
788 src = gen_lowpart_SUBREG (mode, src);
789 }
790 }
791
792 emit_move_insn (dst, src);
793}
794
795
796int
797branch_operator (x, mode)
798 rtx x;
799 enum machine_mode mode;
800{
801 if (GET_MODE (x) != mode)
802 return FALSE;
803
804 switch (GET_CODE (x))
805 {
806 case EQ:
807 case NE:
808 case LT:
809 case GE:
810 return TRUE;
811 default:
812 break;
813 }
814 return FALSE;
815}
816
817
818int
819ubranch_operator (x, mode)
820 rtx x;
821 enum machine_mode mode;
822{
823 if (GET_MODE (x) != mode)
824 return FALSE;
825
826 switch (GET_CODE (x))
827 {
828 case LTU:
829 case GEU:
830 return TRUE;
831 default:
832 break;
833 }
834 return FALSE;
835}
836
837
838int
839boolean_operator (x, mode)
840 rtx x;
841 enum machine_mode mode;
842{
843 if (GET_MODE (x) != mode)
844 return FALSE;
845
846 switch (GET_CODE (x))
847 {
848 case EQ:
849 case NE:
850 return TRUE;
851 default:
852 break;
853 }
854 return FALSE;
855}
856
857
858int
859xtensa_mask_immediate (v)
860 int v;
861{
862#define MAX_MASK_SIZE 16
863 int mask_size;
864
865 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
866 {
867 if ((v & 1) == 0)
868 return FALSE;
869 v = v >> 1;
870 if (v == 0)
871 return TRUE;
872 }
873
874 return FALSE;
875}
876
877
878int
879xtensa_mem_offset (v, mode)
880 unsigned v;
881 enum machine_mode mode;
882{
883 switch (mode)
884 {
885 case BLKmode:
886 /* Handle the worst case for block moves. See xtensa_expand_block_move
887 where we emit an optimized block move operation if the block can be
888 moved in < "move_ratio" pieces. The worst case is when the block is
889 aligned but has a size of (3 mod 4) (does this happen?) so that the
890 last piece requires a byte load/store. */
891 return (xtensa_uimm8 (v) &&
892 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
893
894 case QImode:
895 return xtensa_uimm8 (v);
896
897 case HImode:
898 return xtensa_uimm8x2 (v);
899
900 case DFmode:
901 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
902
903 default:
904 break;
905 }
906
907 return xtensa_uimm8x4 (v);
908}
909
910
911/* Make normal rtx_code into something we can index from an array */
912
913static enum internal_test
914map_test_to_internal_test (test_code)
915 enum rtx_code test_code;
916{
917 enum internal_test test = ITEST_MAX;
918
919 switch (test_code)
920 {
921 default: break;
922 case EQ: test = ITEST_EQ; break;
923 case NE: test = ITEST_NE; break;
924 case GT: test = ITEST_GT; break;
925 case GE: test = ITEST_GE; break;
926 case LT: test = ITEST_LT; break;
927 case LE: test = ITEST_LE; break;
928 case GTU: test = ITEST_GTU; break;
929 case GEU: test = ITEST_GEU; break;
930 case LTU: test = ITEST_LTU; break;
931 case LEU: test = ITEST_LEU; break;
932 }
933
934 return test;
935}
936
937
938/* Generate the code to compare two integer values. The return value is
939 the comparison expression. */
940
941static rtx
942gen_int_relational (test_code, cmp0, cmp1, p_invert)
943 enum rtx_code test_code; /* relational test (EQ, etc) */
944 rtx cmp0; /* first operand to compare */
945 rtx cmp1; /* second operand to compare */
946 int *p_invert; /* whether branch needs to reverse its test */
947{
948 struct cmp_info {
949 enum rtx_code test_code; /* test code to use in insn */
950 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
951 int const_add; /* constant to add (convert LE -> LT) */
952 int reverse_regs; /* reverse registers in test */
953 int invert_const; /* != 0 if invert value if cmp1 is constant */
954 int invert_reg; /* != 0 if invert value if cmp1 is register */
955 int unsignedp; /* != 0 for unsigned comparisons. */
956 };
957
958 static struct cmp_info info[ (int)ITEST_MAX ] = {
959
960 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
961 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
962
963 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
964 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
965 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
966 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
967
968 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
969 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
970 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
971 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
972 };
973
974 enum internal_test test;
975 enum machine_mode mode;
976 struct cmp_info *p_info;
977
978 test = map_test_to_internal_test (test_code);
979 if (test == ITEST_MAX)
980 abort ();
981
982 p_info = &info[ (int)test ];
983
984 mode = GET_MODE (cmp0);
985 if (mode == VOIDmode)
986 mode = GET_MODE (cmp1);
987
988 /* Make sure we can handle any constants given to us. */
989 if (GET_CODE (cmp1) == CONST_INT)
990 {
991 HOST_WIDE_INT value = INTVAL (cmp1);
992 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
993
994 /* if the immediate overflows or does not fit in the immediate field,
995 spill it to a register */
996
997 if ((p_info->unsignedp ?
998 (uvalue + p_info->const_add > uvalue) :
999 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1000 {
1001 cmp1 = force_reg (mode, cmp1);
1002 }
1003 else if (!(p_info->const_range_p) (value + p_info->const_add))
1004 {
1005 cmp1 = force_reg (mode, cmp1);
1006 }
1007 }
1008 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1009 {
1010 cmp1 = force_reg (mode, cmp1);
1011 }
1012
1013 /* See if we need to invert the result. */
1014 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1015 ? p_info->invert_const
1016 : p_info->invert_reg);
1017
1018 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1019 Comparison between two registers, may involve switching operands. */
1020 if (GET_CODE (cmp1) == CONST_INT)
1021 {
1022 if (p_info->const_add != 0)
1023 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1024
1025 }
1026 else if (p_info->reverse_regs)
1027 {
1028 rtx temp = cmp0;
1029 cmp0 = cmp1;
1030 cmp1 = temp;
1031 }
1032
1033 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1034}
1035
1036
1037/* Generate the code to compare two float values. The return value is
1038 the comparison expression. */
1039
1040static rtx
1041gen_float_relational (test_code, cmp0, cmp1)
1042 enum rtx_code test_code; /* relational test (EQ, etc) */
1043 rtx cmp0; /* first operand to compare */
1044 rtx cmp1; /* second operand to compare */
1045{
1046 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1047 rtx brtmp;
1048 int reverse_regs, invert;
1049
1050 switch (test_code)
1051 {
1052 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1053 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1054 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1055 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1056 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1057 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1058 default:
1059 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1060 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1061 }
1062
1063 if (reverse_regs)
1064 {
1065 rtx temp = cmp0;
1066 cmp0 = cmp1;
1067 cmp1 = temp;
1068 }
1069
1070 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1071 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1072
1073 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1074}
1075
1076
1077void
1078xtensa_expand_conditional_branch (operands, test_code)
1079 rtx *operands;
1080 enum rtx_code test_code;
1081{
1082 enum cmp_type type = branch_type;
1083 rtx cmp0 = branch_cmp[0];
1084 rtx cmp1 = branch_cmp[1];
1085 rtx cmp;
1086 int invert;
1087 rtx label1, label2;
1088
1089 switch (type)
1090 {
1091 case CMP_DF:
1092 default:
1093 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1094
1095 case CMP_SI:
1096 invert = FALSE;
1097 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1098 break;
1099
1100 case CMP_SF:
1101 if (!TARGET_HARD_FLOAT)
1102 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1103 invert = FALSE;
1104 cmp = gen_float_relational (test_code, cmp0, cmp1);
1105 break;
1106 }
1107
1108 /* Generate the branch. */
1109
1110 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1111 label2 = pc_rtx;
1112
1113 if (invert)
1114 {
1115 label2 = label1;
1116 label1 = pc_rtx;
1117 }
1118
1119 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1120 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1121 label1,
1122 label2)));
1123}
1124
1125
1126static rtx
1127gen_conditional_move (cmp)
1128 rtx cmp;
1129{
1130 enum rtx_code code = GET_CODE (cmp);
1131 rtx op0 = branch_cmp[0];
1132 rtx op1 = branch_cmp[1];
1133
1134 if (branch_type == CMP_SI)
1135 {
1136 /* Jump optimization calls get_condition() which canonicalizes
1137 comparisons like (GE x <const>) to (GT x <const-1>).
1138 Transform those comparisons back to GE, since that is the
1139 comparison supported in Xtensa. We shouldn't have to
1140 transform <LE x const> comparisons, because neither
1141 xtensa_expand_conditional_branch() nor get_condition() will
1142 produce them. */
1143
1144 if ((code == GT) && (op1 == constm1_rtx))
1145 {
1146 code = GE;
1147 op1 = const0_rtx;
1148 }
1149 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1150
1151 if (boolean_operator (cmp, VOIDmode))
1152 {
1153 /* swap the operands to make const0 second */
1154 if (op0 == const0_rtx)
1155 {
1156 op0 = op1;
1157 op1 = const0_rtx;
1158 }
1159
1160 /* if not comparing against zero, emit a comparison (subtract) */
1161 if (op1 != const0_rtx)
1162 {
1163 op0 = expand_binop (SImode, sub_optab, op0, op1,
1164 0, 0, OPTAB_LIB_WIDEN);
1165 op1 = const0_rtx;
1166 }
1167 }
1168 else if (branch_operator (cmp, VOIDmode))
1169 {
1170 /* swap the operands to make const0 second */
1171 if (op0 == const0_rtx)
1172 {
1173 op0 = op1;
1174 op1 = const0_rtx;
1175
1176 switch (code)
1177 {
1178 case LT: code = GE; break;
1179 case GE: code = LT; break;
1180 default: abort ();
1181 }
1182 }
1183
1184 if (op1 != const0_rtx)
1185 return 0;
1186 }
1187 else
1188 return 0;
1189
1190 return gen_rtx (code, VOIDmode, op0, op1);
1191 }
1192
1193 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1194 return gen_float_relational (code, op0, op1);
1195
1196 return 0;
1197}
1198
1199
1200int
1201xtensa_expand_conditional_move (operands, isflt)
1202 rtx *operands;
1203 int isflt;
1204{
1205 rtx cmp;
1206 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1207
1208 if (!(cmp = gen_conditional_move (operands[1])))
1209 return 0;
1210
1211 if (isflt)
1212 gen_fn = (branch_type == CMP_SI
1213 ? gen_movsfcc_internal0
1214 : gen_movsfcc_internal1);
1215 else
1216 gen_fn = (branch_type == CMP_SI
1217 ? gen_movsicc_internal0
1218 : gen_movsicc_internal1);
1219
1220 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1221 operands[2], operands[3], cmp));
1222 return 1;
1223}
1224
1225
1226int
1227xtensa_expand_scc (operands)
1228 rtx *operands;
1229{
1230 rtx dest = operands[0];
1231 rtx cmp = operands[1];
1232 rtx one_tmp, zero_tmp;
1233 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1234
1235 if (!(cmp = gen_conditional_move (cmp)))
1236 return 0;
1237
1238 one_tmp = gen_reg_rtx (SImode);
1239 zero_tmp = gen_reg_rtx (SImode);
1240 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1241 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1242
1243 gen_fn = (branch_type == CMP_SI
1244 ? gen_movsicc_internal0
1245 : gen_movsicc_internal1);
1246 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1247 return 1;
1248}
1249
1250
1251/* Emit insns to move operands[1] into operands[0].
1252
1253 Return 1 if we have written out everything that needs to be done to
1254 do the move. Otherwise, return 0 and the caller will emit the move
1255 normally. */
1256
1257int
1258xtensa_emit_move_sequence (operands, mode)
1259 rtx *operands;
1260 enum machine_mode mode;
1261{
1262 if (CONSTANT_P (operands[1])
1263 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1264 && (GET_CODE (operands[1]) != CONST_INT
1265 || !xtensa_simm12b (INTVAL (operands[1]))))
1266 {
1267 xtensa_load_constant (operands[0], operands[1]);
1268 return 1;
1269 }
1270
1271 if (!(reload_in_progress | reload_completed))
1272 {
a8cacfd2 1273 if (!xtensa_valid_move (mode, operands))
03984308
BW
1274 operands[1] = force_reg (mode, operands[1]);
1275
1276 /* Check if this move is copying an incoming argument in a7. If
1277 so, emit the move, followed by the special "set_frame_ptr"
1278 unspec_volatile insn, at the very beginning of the function.
1279 This is necessary because the register allocator will ignore
1280 conflicts with a7 and may assign some other pseudo to a7. If
1281 that pseudo was assigned prior to this move, it would clobber
1282 the incoming argument in a7. By copying the argument out of
1283 a7 as the very first thing, and then immediately following
1284 that with an unspec_volatile to keep the scheduler away, we
1285 should avoid any problems. */
1286
1287 if (a7_overlap_mentioned_p (operands[1]))
1288 {
1289 rtx mov;
1290 switch (mode)
1291 {
1292 case SImode:
1293 mov = gen_movsi_internal (operands[0], operands[1]);
1294 break;
1295 case HImode:
1296 mov = gen_movhi_internal (operands[0], operands[1]);
1297 break;
1298 case QImode:
1299 mov = gen_movqi_internal (operands[0], operands[1]);
1300 break;
1301 default:
1302 abort ();
1303 }
1304
1305 /* Insert the instructions before any other argument copies.
1306 (The set_frame_ptr insn comes _after_ the move, so push it
1307 out first.) */
1308 push_topmost_sequence ();
1309 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1310 emit_insn_after (mov, get_insns ());
1311 pop_topmost_sequence ();
1312
1313 return 1;
1314 }
1315 }
1316
1317 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1318 instruction won't be recognized after reload. So we remove the
1319 subreg and adjust mem accordingly. */
1320 if (reload_in_progress)
1321 {
1322 operands[0] = fixup_subreg_mem (operands[0]);
1323 operands[1] = fixup_subreg_mem (operands[1]);
1324 }
1325 return 0;
1326}
1327
1328static rtx
1329fixup_subreg_mem (x)
1330 rtx x;
1331{
1332 if (GET_CODE (x) == SUBREG
1333 && GET_CODE (SUBREG_REG (x)) == REG
1334 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1335 {
1336 rtx temp =
1337 gen_rtx_SUBREG (GET_MODE (x),
1338 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1339 SUBREG_BYTE (x));
1340 x = alter_subreg (&temp);
1341 }
1342 return x;
1343}
1344
1345
1346/* Try to expand a block move operation to an RTL block move instruction.
1347 If not optimizing or if the block size is not a constant or if the
1348 block is small, the expansion fails and GCC falls back to calling
1349 memcpy().
1350
1351 operands[0] is the destination
1352 operands[1] is the source
1353 operands[2] is the length
1354 operands[3] is the alignment */
1355
1356int
1357xtensa_expand_block_move (operands)
1358 rtx *operands;
1359{
1360 rtx dest = operands[0];
1361 rtx src = operands[1];
1362 int bytes = INTVAL (operands[2]);
1363 int align = XINT (operands[3], 0);
1364 int num_pieces, move_ratio;
1365
1366 /* If this is not a fixed size move, just call memcpy */
1367 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1368 return 0;
1369
1370 /* Anything to move? */
1371 if (bytes <= 0)
1372 return 1;
1373
1374 if (align > MOVE_MAX)
1375 align = MOVE_MAX;
1376
1377 /* decide whether to expand inline based on the optimization level */
1378 move_ratio = 4;
1379 if (optimize > 2)
1380 move_ratio = LARGEST_MOVE_RATIO;
1381 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1382 if (num_pieces >= move_ratio)
1383 return 0;
1384
07232638 1385 /* make sure the memory addresses are valid */
0ae02efa
BW
1386 operands[0] = validize_mem (dest);
1387 operands[1] = validize_mem (src);
03984308
BW
1388
1389 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1390 operands[2], operands[3]));
1391 return 1;
1392}
1393
1394
1395/* Emit a sequence of instructions to implement a block move, trying
1396 to hide load delay slots as much as possible. Load N values into
1397 temporary registers, store those N values, and repeat until the
1398 complete block has been moved. N=delay_slots+1 */
1399
1400struct meminsnbuf {
1401 char template[30];
1402 rtx operands[2];
1403};
1404
1405void
1406xtensa_emit_block_move (operands, tmpregs, delay_slots)
1407 rtx *operands;
1408 rtx *tmpregs;
1409 int delay_slots;
1410{
1411 rtx dest = operands[0];
1412 rtx src = operands[1];
1413 int bytes = INTVAL (operands[2]);
1414 int align = XINT (operands[3], 0);
1415 rtx from_addr = XEXP (src, 0);
1416 rtx to_addr = XEXP (dest, 0);
1417 int from_struct = MEM_IN_STRUCT_P (src);
1418 int to_struct = MEM_IN_STRUCT_P (dest);
1419 int offset = 0;
1420 int chunk_size, item_size;
1421 struct meminsnbuf *ldinsns, *stinsns;
1422 const char *ldname, *stname;
1423 enum machine_mode mode;
1424
1425 if (align > MOVE_MAX)
1426 align = MOVE_MAX;
1427 item_size = align;
1428 chunk_size = delay_slots + 1;
1429
1430 ldinsns = (struct meminsnbuf *)
1431 alloca (chunk_size * sizeof (struct meminsnbuf));
1432 stinsns = (struct meminsnbuf *)
1433 alloca (chunk_size * sizeof (struct meminsnbuf));
1434
1435 mode = xtensa_find_mode_for_size (item_size);
1436 item_size = GET_MODE_SIZE (mode);
1437 ldname = xtensa_ld_opcodes[(int) mode];
1438 stname = xtensa_st_opcodes[(int) mode];
1439
1440 while (bytes > 0)
1441 {
1442 int n;
1443
1444 for (n = 0; n < chunk_size; n++)
1445 {
1446 rtx addr, mem;
1447
1448 if (bytes == 0)
1449 {
1450 chunk_size = n;
1451 break;
1452 }
1453
1454 if (bytes < item_size)
1455 {
1456 /* find a smaller item_size which we can load & store */
1457 item_size = bytes;
1458 mode = xtensa_find_mode_for_size (item_size);
1459 item_size = GET_MODE_SIZE (mode);
1460 ldname = xtensa_ld_opcodes[(int) mode];
1461 stname = xtensa_st_opcodes[(int) mode];
1462 }
1463
1464 /* record the load instruction opcode and operands */
1465 addr = plus_constant (from_addr, offset);
1466 mem = gen_rtx_MEM (mode, addr);
1467 if (! memory_address_p (mode, addr))
1468 abort ();
1469 MEM_IN_STRUCT_P (mem) = from_struct;
1470 ldinsns[n].operands[0] = tmpregs[n];
1471 ldinsns[n].operands[1] = mem;
1472 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1473
1474 /* record the store instruction opcode and operands */
1475 addr = plus_constant (to_addr, offset);
1476 mem = gen_rtx_MEM (mode, addr);
1477 if (! memory_address_p (mode, addr))
1478 abort ();
1479 MEM_IN_STRUCT_P (mem) = to_struct;
1480 stinsns[n].operands[0] = tmpregs[n];
1481 stinsns[n].operands[1] = mem;
1482 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1483
1484 offset += item_size;
1485 bytes -= item_size;
1486 }
1487
1488 /* now output the loads followed by the stores */
1489 for (n = 0; n < chunk_size; n++)
1490 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1491 for (n = 0; n < chunk_size; n++)
1492 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1493 }
1494}
1495
1496
1497static enum machine_mode
1498xtensa_find_mode_for_size (item_size)
1499 unsigned item_size;
1500{
1501 enum machine_mode mode, tmode;
1502
1503 while (1)
1504 {
1505 mode = VOIDmode;
1506
1507 /* find mode closest to but not bigger than item_size */
1508 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510 if (GET_MODE_SIZE (tmode) <= item_size)
1511 mode = tmode;
1512 if (mode == VOIDmode)
1513 abort ();
1514
1515 item_size = GET_MODE_SIZE (mode);
1516
1517 if (xtensa_ld_opcodes[(int) mode]
1518 && xtensa_st_opcodes[(int) mode])
1519 break;
1520
1521 /* cannot load & store this mode; try something smaller */
1522 item_size -= 1;
1523 }
1524
1525 return mode;
1526}
1527
1528
1529void
1530xtensa_expand_nonlocal_goto (operands)
1531 rtx *operands;
1532{
1533 rtx goto_handler = operands[1];
1534 rtx containing_fp = operands[3];
1535
1536 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1537 is too big to generate in-line */
1538
1539 if (GET_CODE (containing_fp) != REG)
1540 containing_fp = force_reg (Pmode, containing_fp);
1541
1542 goto_handler = replace_rtx (copy_rtx (goto_handler),
1543 virtual_stack_vars_rtx,
1544 containing_fp);
1545
1546 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1547 0, VOIDmode, 2,
1548 containing_fp, Pmode,
1549 goto_handler, Pmode);
1550}
1551
1552
e2500fed
GK
1553static struct machine_function *
1554xtensa_init_machine_status ()
03984308 1555{
e2500fed 1556 return ggc_alloc_cleared (sizeof (struct machine_function));
03984308
BW
1557}
1558
1559
1560void
1561xtensa_setup_frame_addresses ()
1562{
1563 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1564 cfun->machine->accesses_prev_frame = 1;
1565
1566 emit_library_call
1567 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1568 0, VOIDmode, 0);
1569}
1570
1571
1572/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1573 a comment showing where the end of the loop is. However, if there is a
1574 label or a branch at the end of the loop then we need to place a nop
1575 there. If the loop ends with a label we need the nop so that branches
1576 targetting that label will target the nop (and thus remain in the loop),
1577 instead of targetting the instruction after the loop (and thus exiting
1578 the loop). If the loop ends with a branch, we need the nop in case the
1579 branch is targetting a location inside the loop. When the branch
1580 executes it will cause the loop count to be decremented even if it is
1581 taken (because it is the last instruction in the loop), so we need to
1582 nop after the branch to prevent the loop count from being decremented
1583 when the branch is taken. */
1584
1585void
1586xtensa_emit_loop_end (insn, operands)
1587 rtx insn;
1588 rtx *operands;
1589{
1590 char done = 0;
1591
1592 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1593 {
1594 switch (GET_CODE (insn))
1595 {
1596 case NOTE:
1597 case BARRIER:
1598 break;
1599
1600 case CODE_LABEL:
1601 output_asm_insn ("nop.n", operands);
1602 done = 1;
1603 break;
1604
1605 default:
1606 {
1607 rtx body = PATTERN (insn);
1608
1609 if (GET_CODE (body) == JUMP_INSN)
1610 {
1611 output_asm_insn ("nop.n", operands);
1612 done = 1;
1613 }
1614 else if ((GET_CODE (body) != USE)
1615 && (GET_CODE (body) != CLOBBER))
1616 done = 1;
1617 }
1618 break;
1619 }
1620 }
1621
1622 output_asm_insn ("# loop end for %0", operands);
1623}
1624
1625
1626char *
1627xtensa_emit_call (callop, operands)
1628 int callop;
1629 rtx *operands;
1630{
b64a1b53 1631 static char result[64];
03984308
BW
1632 rtx tgt = operands[callop];
1633
1634 if (GET_CODE (tgt) == CONST_INT)
1635 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1636 else if (register_operand (tgt, VOIDmode))
1637 sprintf (result, "callx8\t%%%d", callop);
1638 else
1639 sprintf (result, "call8\t%%%d", callop);
1640
1641 return result;
1642}
1643
1644
1645/* Return the stabs register number to use for 'regno'. */
1646
1647int
1648xtensa_dbx_register_number (regno)
1649 int regno;
1650{
1651 int first = -1;
1652
1653 if (GP_REG_P (regno)) {
1654 regno -= GP_REG_FIRST;
1655 first = 0;
1656 }
1657 else if (BR_REG_P (regno)) {
1658 regno -= BR_REG_FIRST;
1659 first = 16;
1660 }
1661 else if (FP_REG_P (regno)) {
1662 regno -= FP_REG_FIRST;
1663 /* The current numbering convention is that TIE registers are
1664 numbered in libcc order beginning with 256. We can't guarantee
1665 that the FP registers will come first, so the following is just
1666 a guess. It seems like we should make a special case for FP
1667 registers and give them fixed numbers < 256. */
1668 first = 256;
1669 }
1670 else if (ACC_REG_P (regno))
1671 {
1672 first = 0;
1673 regno = -1;
1674 }
1675
1676 /* When optimizing, we sometimes get asked about pseudo-registers
1677 that don't represent hard registers. Return 0 for these. */
1678 if (first == -1)
1679 return 0;
1680
1681 return first + regno;
1682}
1683
1684
1685/* Argument support functions. */
1686
1687/* Initialize CUMULATIVE_ARGS for a function. */
1688
1689void
1690init_cumulative_args (cum, fntype, libname)
1691 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1692 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1693 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1694{
1695 cum->arg_words = 0;
1696}
1697
1698/* Advance the argument to the next argument position. */
1699
1700void
1701function_arg_advance (cum, mode, type)
1702 CUMULATIVE_ARGS *cum; /* current arg information */
1703 enum machine_mode mode; /* current arg mode */
1704 tree type; /* type of the argument or 0 if lib support */
1705{
1706 int words, max;
1707 int *arg_words;
1708
1709 arg_words = &cum->arg_words;
1710 max = MAX_ARGS_IN_REGISTERS;
1711
1712 words = (((mode != BLKmode)
1713 ? (int) GET_MODE_SIZE (mode)
1714 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1715
1716 if ((*arg_words + words > max) && (*arg_words < max))
1717 *arg_words = max;
1718
1719 *arg_words += words;
1720}
1721
1722
1723/* Return an RTL expression containing the register for the given mode,
1724 or 0 if the argument is to be passed on the stack. */
1725
1726rtx
1727function_arg (cum, mode, type, incoming_p)
1728 CUMULATIVE_ARGS *cum; /* current arg information */
1729 enum machine_mode mode; /* current arg mode */
1730 tree type; /* type of the argument or 0 if lib support */
1731 int incoming_p; /* computing the incoming registers? */
1732{
1733 int regbase, words, max;
1734 int *arg_words;
1735 int regno;
1736 enum machine_mode result_mode;
1737
1738 arg_words = &cum->arg_words;
1739 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1740 max = MAX_ARGS_IN_REGISTERS;
1741
1742 words = (((mode != BLKmode)
1743 ? (int) GET_MODE_SIZE (mode)
1744 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1745
1746 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1747 *arg_words += (*arg_words & 1);
1748
1749 if (*arg_words + words > max)
1750 return (rtx)0;
1751
1752 regno = regbase + *arg_words;
1753 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1754
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1757 modes bigger than 2 words (because we only have patterns for
1758 modes of 2 words or smaller), we can't control the expansion
1759 unless we explicitly list the individual registers in a PARALLEL. */
1760
1761 if ((mode == BLKmode || words > 2)
1762 && regno < A7_REG
1763 && regno + words > A7_REG)
1764 {
1765 rtx result;
1766 int n;
1767
1768 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1769 for (n = 0; n < words; n++)
1770 {
1771 XVECEXP (result, 0, n) =
1772 gen_rtx_EXPR_LIST (VOIDmode,
1773 gen_raw_REG (SImode, regno + n),
1774 GEN_INT (n * UNITS_PER_WORD));
1775 }
1776 return result;
1777 }
1778
1779 return gen_raw_REG (result_mode, regno);
1780}
1781
1782
1783void
1784override_options ()
1785{
1786 int regno;
1787 enum machine_mode mode;
1788
1789 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1790 error ("boolean registers required for the floating-point option");
1791
1792 /* set up the tables of ld/st opcode names for block moves */
1793 xtensa_ld_opcodes[(int) SImode] = "l32i";
1794 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1795 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1796 xtensa_st_opcodes[(int) SImode] = "s32i";
1797 xtensa_st_opcodes[(int) HImode] = "s16i";
1798 xtensa_st_opcodes[(int) QImode] = "s8i";
1799
1800 xtensa_char_to_class['q'] = SP_REG;
1801 xtensa_char_to_class['a'] = GR_REGS;
1802 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1803 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1804 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1805 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1806 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1807 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1808 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1809
1810 /* Set up array giving whether a given register can hold a given mode. */
1811 for (mode = VOIDmode;
1812 mode != MAX_MACHINE_MODE;
1813 mode = (enum machine_mode) ((int) mode + 1))
1814 {
1815 int size = GET_MODE_SIZE (mode);
1816 enum mode_class class = GET_MODE_CLASS (mode);
1817
1818 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1819 {
1820 int temp;
1821
1822 if (ACC_REG_P (regno))
1823 temp = (TARGET_MAC16 &&
1824 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1825 else if (GP_REG_P (regno))
1826 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1827 else if (FP_REG_P (regno))
1828 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1829 else if (BR_REG_P (regno))
1830 temp = (TARGET_BOOLEANS && (mode == CCmode));
1831 else
1832 temp = FALSE;
1833
1834 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1835 }
1836 }
1837
1838 init_machine_status = xtensa_init_machine_status;
03984308
BW
1839
1840 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1841 some targets need to always use PIC. */
a69c385e 1842 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
03984308
BW
1843 flag_pic = 1;
1844}
1845
1846
1847/* A C compound statement to output to stdio stream STREAM the
1848 assembler syntax for an instruction operand X. X is an RTL
1849 expression.
1850
1851 CODE is a value that can be used to specify one of several ways
1852 of printing the operand. It is used when identical operands
1853 must be printed differently depending on the context. CODE
1854 comes from the '%' specification that was used to request
1855 printing of the operand. If the specification was just '%DIGIT'
1856 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1857 is the ASCII code for LTR.
1858
1859 If X is a register, this macro should print the register's name.
1860 The names can be found in an array 'reg_names' whose type is
1861 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1862
1863 When the machine description has a specification '%PUNCT' (a '%'
1864 followed by a punctuation character), this macro is called with
1865 a null pointer for X and the punctuation character for CODE.
1866
1867 'a', 'c', 'l', and 'n' are reserved.
1868
1869 The Xtensa specific codes are:
1870
1871 'd' CONST_INT, print as signed decimal
1872 'x' CONST_INT, print as signed hexadecimal
1873 'K' CONST_INT, print number of bits in mask for EXTUI
1874 'R' CONST_INT, print (X & 0x1f)
1875 'L' CONST_INT, print ((32 - X) & 0x1f)
1876 'D' REG, print second register of double-word register operand
1877 'N' MEM, print address of next word following a memory operand
1878 'v' MEM, if memory reference is volatile, output a MEMW before it
1879*/
1880
1881static void
1882printx (file, val)
1883 FILE *file;
1884 signed int val;
1885{
1886 /* print a hexadecimal value in a nice way */
1887 if ((val > -0xa) && (val < 0xa))
1888 fprintf (file, "%d", val);
1889 else if (val < 0)
1890 fprintf (file, "-0x%x", -val);
1891 else
1892 fprintf (file, "0x%x", val);
1893}
1894
1895
1896void
1897print_operand (file, op, letter)
1898 FILE *file; /* file to write to */
1899 rtx op; /* operand to print */
1900 int letter; /* %<letter> or 0 */
1901{
1902 enum rtx_code code;
1903
1904 if (! op)
1905 error ("PRINT_OPERAND null pointer");
1906
1907 code = GET_CODE (op);
1908 switch (code)
1909 {
1910 case REG:
1911 case SUBREG:
1912 {
1913 int regnum = xt_true_regnum (op);
1914 if (letter == 'D')
1915 regnum++;
1916 fprintf (file, "%s", reg_names[regnum]);
1917 break;
1918 }
1919
1920 case MEM:
84bf8c2c
BW
1921 /* For a volatile memory reference, emit a MEMW before the
1922 load or store. */
03984308
BW
1923 if (letter == 'v')
1924 {
1925 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1926 fprintf (file, "memw\n\t");
1927 break;
1928 }
1929 else if (letter == 'N')
84bf8c2c
BW
1930 {
1931 enum machine_mode mode;
1932 switch (GET_MODE (op))
1933 {
1934 case DFmode: mode = SFmode; break;
1935 case DImode: mode = SImode; break;
1936 default: abort ();
1937 }
1938 op = adjust_address (op, mode, 4);
1939 }
03984308
BW
1940
1941 output_address (XEXP (op, 0));
1942 break;
1943
1944 case CONST_INT:
1945 switch (letter)
1946 {
1947 case 'K':
1948 {
1949 int num_bits = 0;
1950 unsigned val = INTVAL (op);
1951 while (val & 1)
1952 {
1953 num_bits += 1;
1954 val = val >> 1;
1955 }
1956 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1957 fatal_insn ("invalid mask", op);
1958
1959 fprintf (file, "%d", num_bits);
1960 break;
1961 }
1962
1963 case 'L':
1964 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1965 break;
1966
1967 case 'R':
1968 fprintf (file, "%d", INTVAL (op) & 0x1f);
1969 break;
1970
1971 case 'x':
1972 printx (file, INTVAL (op));
1973 break;
1974
1975 case 'd':
1976 default:
1977 fprintf (file, "%d", INTVAL (op));
1978 break;
1979
1980 }
1981 break;
1982
1983 default:
1984 output_addr_const (file, op);
1985 }
1986}
1987
1988
1989/* A C compound statement to output to stdio stream STREAM the
1990 assembler syntax for an instruction operand that is a memory
fb49053f 1991 reference whose address is ADDR. ADDR is an RTL expression. */
03984308
BW
1992
1993void
1994print_operand_address (file, addr)
1995 FILE *file;
1996 rtx addr;
1997{
1998 if (!addr)
1999 error ("PRINT_OPERAND_ADDRESS, null pointer");
2000
2001 switch (GET_CODE (addr))
2002 {
2003 default:
2004 fatal_insn ("invalid address", addr);
2005 break;
2006
2007 case REG:
2008 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2009 break;
2010
2011 case PLUS:
2012 {
2013 rtx reg = (rtx)0;
2014 rtx offset = (rtx)0;
2015 rtx arg0 = XEXP (addr, 0);
2016 rtx arg1 = XEXP (addr, 1);
2017
2018 if (GET_CODE (arg0) == REG)
2019 {
2020 reg = arg0;
2021 offset = arg1;
2022 }
2023 else if (GET_CODE (arg1) == REG)
2024 {
2025 reg = arg1;
2026 offset = arg0;
2027 }
2028 else
2029 fatal_insn ("no register in address", addr);
2030
2031 if (CONSTANT_P (offset))
2032 {
2033 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2034 output_addr_const (file, offset);
2035 }
2036 else
2037 fatal_insn ("address offset not a constant", addr);
2038 }
2039 break;
2040
2041 case LABEL_REF:
2042 case SYMBOL_REF:
2043 case CONST_INT:
2044 case CONST:
2045 output_addr_const (file, addr);
2046 break;
2047 }
2048}
2049
2050
2051/* Emit either a label, .comm, or .lcomm directive. */
2052
2053void
2054xtensa_declare_object (file, name, init_string, final_string, size)
2055 FILE *file;
2056 char *name;
2057 char *init_string;
2058 char *final_string;
2059 int size;
2060{
2061 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2062 assemble_name (file, name);
2063 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2064}
2065
2066
2067void
2068xtensa_output_literal (file, x, mode, labelno)
2069 FILE *file;
2070 rtx x;
2071 enum machine_mode mode;
2072 int labelno;
2073{
2074 long value_long[2];
b216cd4a 2075 REAL_VALUE_TYPE r;
03984308
BW
2076 int size;
2077
2078 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2079
2080 switch (GET_MODE_CLASS (mode))
2081 {
2082 case MODE_FLOAT:
2083 if (GET_CODE (x) != CONST_DOUBLE)
2084 abort ();
2085
b216cd4a 2086 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
03984308
BW
2087 switch (mode)
2088 {
2089 case SFmode:
b216cd4a
ZW
2090 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2091 fprintf (file, "0x%08lx\n", value_long[0]);
03984308
BW
2092 break;
2093
2094 case DFmode:
b216cd4a
ZW
2095 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2096 fprintf (file, "0x%08lx, 0x%08lx\n",
2097 value_long[0], value_long[1]);
03984308
BW
2098 break;
2099
2100 default:
2101 abort ();
2102 }
2103
2104 break;
2105
2106 case MODE_INT:
2107 case MODE_PARTIAL_INT:
2108 size = GET_MODE_SIZE (mode);
2109 if (size == 4)
2110 {
2111 output_addr_const (file, x);
2112 fputs ("\n", file);
2113 }
2114 else if (size == 8)
2115 {
2116 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2117 fputs (", ", file);
2118 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2119 fputs ("\n", file);
2120 }
2121 else
2122 abort ();
2123 break;
2124
2125 default:
2126 abort ();
2127 }
2128}
2129
2130
2131/* Return the bytes needed to compute the frame pointer from the current
2132 stack pointer. */
2133
2134#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2135#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2136
2137long
2138compute_frame_size (size)
2139 int size; /* # of var. bytes allocated */
2140{
2141 /* add space for the incoming static chain value */
2142 if (current_function_needs_context)
2143 size += (1 * UNITS_PER_WORD);
2144
2145 xtensa_current_frame_size =
2146 XTENSA_STACK_ALIGN (size
2147 + current_function_outgoing_args_size
2148 + (WINDOW_SIZE * UNITS_PER_WORD));
2149 return xtensa_current_frame_size;
2150}
2151
2152
2153int
2154xtensa_frame_pointer_required ()
2155{
2156 /* The code to expand builtin_frame_addr and builtin_return_addr
2157 currently uses the hard_frame_pointer instead of frame_pointer.
2158 This seems wrong but maybe it's necessary for other architectures.
2159 This function is derived from the i386 code. */
2160
2161 if (cfun->machine->accesses_prev_frame)
2162 return 1;
2163
2164 return 0;
2165}
2166
2167
2168void
2169xtensa_reorg (first)
2170 rtx first;
2171{
2172 rtx insn, set_frame_ptr_insn = 0;
2173
2174 unsigned long tsize = compute_frame_size (get_frame_size ());
2175 if (tsize < (1 << (12+3)))
2176 frame_size_const = 0;
2177 else
2178 {
2179 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2180
2181 /* make sure the constant is used so it doesn't get eliminated
2182 from the constant pool */
2183 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2184 }
2185
2186 if (!frame_pointer_needed)
2187 return;
2188
2189 /* Search all instructions, looking for the insn that sets up the
2190 frame pointer. This search will fail if the function does not
2191 have an incoming argument in $a7, but in that case, we can just
2192 set up the frame pointer at the very beginning of the
2193 function. */
2194
2195 for (insn = first; insn; insn = NEXT_INSN (insn))
2196 {
2197 rtx pat;
2198
2199 if (!INSN_P (insn))
2200 continue;
2201
2202 pat = PATTERN (insn);
2203 if (GET_CODE (pat) == UNSPEC_VOLATILE
2204 && (XINT (pat, 1) == UNSPECV_SET_FP))
2205 {
2206 set_frame_ptr_insn = insn;
2207 break;
2208 }
2209 }
2210
2211 if (set_frame_ptr_insn)
2212 {
2213 /* for all instructions prior to set_frame_ptr_insn, replace
2214 hard_frame_pointer references with stack_pointer */
2215 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2216 {
2217 if (INSN_P (insn))
2218 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2219 hard_frame_pointer_rtx,
2220 stack_pointer_rtx);
2221 }
2222 }
2223 else
2224 {
2225 /* emit the frame pointer move immediately after the NOTE that starts
2226 the function */
2227 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2228 stack_pointer_rtx), first);
2229 }
2230}
2231
2232
2233/* Set up the stack and frame (if desired) for the function. */
2234
2235void
2236xtensa_function_prologue (file, size)
2237 FILE *file;
2238 int size ATTRIBUTE_UNUSED;
2239{
2240 unsigned long tsize = compute_frame_size (get_frame_size ());
2241
2242 if (frame_pointer_needed)
2243 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2244 else
2245 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2246
2247
2248 if (tsize < (1 << (12+3)))
2249 {
2250 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2251 }
2252 else
2253 {
2254 fprintf (file, "\tentry\tsp, 16\n");
2255
2256 /* use a8 as a temporary since a0-a7 may be live */
2257 fprintf (file, "\tl32r\ta8, ");
2258 print_operand (file, frame_size_const, 0);
2259 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2260 fprintf (file, "\tmovsp\tsp, a8\n");
2261 }
2262}
2263
2264
2265/* Do any necessary cleanup after a function to restore
2266 stack, frame, and regs. */
2267
2268void
2269xtensa_function_epilogue (file, size)
2270 FILE *file;
2271 int size ATTRIBUTE_UNUSED;
2272{
2273 rtx insn = get_last_insn ();
2274 /* If the last insn was a BARRIER, we don't have to write anything. */
2275 if (GET_CODE (insn) == NOTE)
2276 insn = prev_nonnote_insn (insn);
2277 if (insn == 0 || GET_CODE (insn) != BARRIER)
2278 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2279
2280 xtensa_current_frame_size = 0;
2281}
2282
2283
2284/* Create the va_list data type.
2285 This structure is set up by __builtin_saveregs. The __va_reg
2286 field points to a stack-allocated region holding the contents of the
2287 incoming argument registers. The __va_ndx field is an index initialized
2288 to the position of the first unnamed (variable) argument. This same index
2289 is also used to address the arguments passed in memory. Thus, the
2290 __va_stk field is initialized to point to the position of the first
2291 argument in memory offset to account for the arguments passed in
2292 registers. E.G., if there are 6 argument registers, and each register is
2293 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2294 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2295 argument word N for N >= 6. */
2296
2297tree
2298xtensa_build_va_list (void)
2299{
540eaea8 2300 tree f_stk, f_reg, f_ndx, record, type_decl;
03984308 2301
540eaea8
BW
2302 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2303 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
03984308
BW
2304
2305 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2306 ptr_type_node);
2307 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2308 ptr_type_node);
2309 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2310 integer_type_node);
2311
2312 DECL_FIELD_CONTEXT (f_stk) = record;
2313 DECL_FIELD_CONTEXT (f_reg) = record;
2314 DECL_FIELD_CONTEXT (f_ndx) = record;
2315
540eaea8
BW
2316 TREE_CHAIN (record) = type_decl;
2317 TYPE_NAME (record) = type_decl;
03984308
BW
2318 TYPE_FIELDS (record) = f_stk;
2319 TREE_CHAIN (f_stk) = f_reg;
2320 TREE_CHAIN (f_reg) = f_ndx;
2321
2322 layout_type (record);
2323 return record;
2324}
2325
2326
2327/* Save the incoming argument registers on the stack. Returns the
2328 address of the saved registers. */
2329
2330rtx
2331xtensa_builtin_saveregs ()
2332{
2333 rtx gp_regs, dest;
2334 int arg_words = current_function_arg_words;
2335 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2336 int i;
2337
2338 if (gp_left == 0)
2339 return const0_rtx;
2340
2341 /* allocate the general-purpose register space */
2342 gp_regs = assign_stack_local
2343 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
540eaea8 2344 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
03984308
BW
2345
2346 /* Now store the incoming registers. */
2347 dest = change_address (gp_regs, SImode,
2348 plus_constant (XEXP (gp_regs, 0),
2349 arg_words * UNITS_PER_WORD));
2350
2351 /* Note: Don't use move_block_from_reg() here because the incoming
2352 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2353 Instead, call gen_raw_REG() directly so that we get a distinct
2354 instance of (REG:SI 7). */
2355 for (i = 0; i < gp_left; i++)
2356 {
2357 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2358 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2359 }
2360
2361 return XEXP (gp_regs, 0);
2362}
2363
2364
2365/* Implement `va_start' for varargs and stdarg. We look at the
2366 current function to fill in an initial va_list. */
2367
2368void
2369xtensa_va_start (stdarg_p, valist, nextarg)
2370 int stdarg_p ATTRIBUTE_UNUSED;
2371 tree valist;
2372 rtx nextarg ATTRIBUTE_UNUSED;
2373{
2374 tree f_stk, stk;
2375 tree f_reg, reg;
2376 tree f_ndx, ndx;
2377 tree t, u;
2378 int arg_words;
2379
2380 arg_words = current_function_args_info.arg_words;
2381
2382 f_stk = TYPE_FIELDS (va_list_type_node);
2383 f_reg = TREE_CHAIN (f_stk);
2384 f_ndx = TREE_CHAIN (f_reg);
2385
2386 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2387 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2388 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2389
2390 /* Call __builtin_saveregs; save the result in __va_reg */
2391 current_function_arg_words = arg_words;
2392 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2393 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2394 TREE_SIDE_EFFECTS (t) = 1;
2395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2396
2397 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2398 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2399 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2400 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2401 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2402 TREE_SIDE_EFFECTS (t) = 1;
2403 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2404
2405 /* Set the __va_ndx member. */
2406 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2407 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2408 TREE_SIDE_EFFECTS (t) = 1;
2409 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2410}
2411
2412
2413/* Implement `va_arg'. */
2414
2415rtx
2416xtensa_va_arg (valist, type)
2417 tree valist, type;
2418{
2419 tree f_stk, stk;
2420 tree f_reg, reg;
2421 tree f_ndx, ndx;
8be56275
BW
2422 tree tmp, addr_tree, type_size;
2423 rtx array, orig_ndx, r, addr, size, va_size;
03984308
BW
2424 rtx lab_false, lab_over, lab_false2;
2425
03984308
BW
2426 f_stk = TYPE_FIELDS (va_list_type_node);
2427 f_reg = TREE_CHAIN (f_stk);
2428 f_ndx = TREE_CHAIN (f_reg);
2429
2430 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2431 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2432 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2433
8be56275
BW
2434 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2435
2436 va_size = gen_reg_rtx (SImode);
2437 tmp = fold (build (MULT_EXPR, sizetype,
2438 fold (build (TRUNC_DIV_EXPR, sizetype,
2439 fold (build (PLUS_EXPR, sizetype,
2440 type_size,
2441 size_int (UNITS_PER_WORD - 1))),
2442 size_int (UNITS_PER_WORD))),
2443 size_int (UNITS_PER_WORD)));
2444 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2445 if (r != va_size)
2446 emit_move_insn (va_size, r);
2447
03984308
BW
2448
2449 /* First align __va_ndx to a double word boundary if necessary for this arg:
2450
2451 if (__alignof__ (TYPE) > 4)
2452 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2453 */
2454
2455 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2456 {
2457 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2458 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2459 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2460 build_int_2 (-2 * UNITS_PER_WORD, -1));
2461 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2462 TREE_SIDE_EFFECTS (tmp) = 1;
2463 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2464 }
2465
2466
2467 /* Increment __va_ndx to point past the argument:
2468
2469 orig_ndx = (AP).__va_ndx;
2470 (AP).__va_ndx += __va_size (TYPE);
2471 */
2472
2473 orig_ndx = gen_reg_rtx (SImode);
2474 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2475 if (r != orig_ndx)
2476 emit_move_insn (orig_ndx, r);
2477
8be56275
BW
2478 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2479 make_tree (intSI_type_node, va_size));
03984308
BW
2480 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2481 TREE_SIDE_EFFECTS (tmp) = 1;
2482 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2483
2484
2485 /* Check if the argument is in registers:
2486
bcf88f9b
BW
2487 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2488 && !MUST_PASS_IN_STACK (type))
03984308
BW
2489 __array = (AP).__va_reg;
2490 */
2491
03984308
BW
2492 array = gen_reg_rtx (Pmode);
2493
544ef5b5 2494 lab_over = NULL_RTX;
bcf88f9b
BW
2495 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2496 {
2497 lab_false = gen_label_rtx ();
2498 lab_over = gen_label_rtx ();
2499
2500 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2501 EXPAND_NORMAL),
2502 GEN_INT (MAX_ARGS_IN_REGISTERS
2503 * UNITS_PER_WORD),
2504 GT, const1_rtx, SImode, 0, lab_false);
2505
2506 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2507 if (r != array)
2508 emit_move_insn (array, r);
2509
2510 emit_jump_insn (gen_jump (lab_over));
2511 emit_barrier ();
2512 emit_label (lab_false);
2513 }
03984308
BW
2514
2515 /* ...otherwise, the argument is on the stack (never split between
2516 registers and the stack -- change __va_ndx if necessary):
2517
2518 else
2519 {
2520 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2521 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2522 __array = (AP).__va_stk;
2523 }
2524 */
2525
2526 lab_false2 = gen_label_rtx ();
2527 emit_cmp_and_jump_insns (orig_ndx,
2528 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2529 GE, const1_rtx, SImode, 0, lab_false2);
2530
8be56275
BW
2531 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2532 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
03984308
BW
2533 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2534 TREE_SIDE_EFFECTS (tmp) = 1;
2535 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2536
2537 emit_label (lab_false2);
2538
2539 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2540 if (r != array)
2541 emit_move_insn (array, r);
2542
544ef5b5 2543 if (lab_over != NULL_RTX)
bcf88f9b 2544 emit_label (lab_over);
8be56275 2545
03984308
BW
2546
2547 /* Given the base array pointer (__array) and index to the subsequent
2548 argument (__va_ndx), find the address:
2549
8be56275
BW
2550 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2551 ? sizeof (TYPE)
2552 : __va_size (TYPE))
03984308
BW
2553
2554 The results are endian-dependent because values smaller than one word
2555 are aligned differently.
2556 */
2557
8be56275
BW
2558 size = gen_reg_rtx (SImode);
2559 emit_move_insn (size, va_size);
2560
2561 if (BYTES_BIG_ENDIAN)
2562 {
2563 rtx lab_use_va_size = gen_label_rtx ();
2564
2565 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2566 EXPAND_NORMAL),
2567 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2568 GE, const1_rtx, SImode, 0, lab_use_va_size);
2569
2570 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2571 if (r != size)
2572 emit_move_insn (size, r);
2573
2574 emit_label (lab_use_va_size);
2575 }
03984308
BW
2576
2577 addr_tree = build (PLUS_EXPR, ptr_type_node,
2578 make_tree (ptr_type_node, array),
2579 ndx);
8be56275
BW
2580 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2581 make_tree (intSI_type_node, size));
03984308
BW
2582 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2583 addr = copy_to_reg (addr);
2584 return addr;
2585}
2586
2587
a8cacfd2
BW
2588enum reg_class
2589xtensa_preferred_reload_class (x, class)
2590 rtx x;
2591 enum reg_class class;
2592{
2593 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2594 return NO_REGS;
2595
2596 /* Don't use sp for reloads! */
2597 if (class == AR_REGS)
2598 return GR_REGS;
2599
2600 return class;
2601}
2602
2603
03984308
BW
2604enum reg_class
2605xtensa_secondary_reload_class (class, mode, x, isoutput)
2606 enum reg_class class;
2607 enum machine_mode mode ATTRIBUTE_UNUSED;
2608 rtx x;
2609 int isoutput;
2610{
2611 int regno;
2612
2613 if (GET_CODE (x) == SIGN_EXTEND)
2614 x = XEXP (x, 0);
2615 regno = xt_true_regnum (x);
2616
2617 if (!isoutput)
2618 {
2619 if (class == FP_REGS && constantpool_mem_p (x))
2620 return GR_REGS;
2621 }
2622
2623 if (ACC_REG_P (regno))
2624 return (class == GR_REGS ? NO_REGS : GR_REGS);
2625 if (class == ACC_REG)
2626 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2627
2628 return NO_REGS;
2629}
2630
2631
2632void
2633order_regs_for_local_alloc ()
2634{
2635 if (!leaf_function_p ())
2636 {
2637 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2638 FIRST_PSEUDO_REGISTER * sizeof (int));
2639 }
2640 else
2641 {
2642 int i, num_arg_regs;
2643 int nxt = 0;
2644
2645 /* use the AR registers in increasing order (skipping a0 and a1)
2646 but save the incoming argument registers for a last resort */
2647 num_arg_regs = current_function_args_info.arg_words;
2648 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2649 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2650 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2651 reg_alloc_order[nxt++] = i + num_arg_regs;
2652 for (i = 0; i < num_arg_regs; i++)
2653 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2654
2655 /* list the FP registers in order for now */
2656 for (i = 0; i < 16; i++)
2657 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2658
2659 /* GCC requires that we list *all* the registers.... */
2660 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2661 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2662 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2663 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2664
2665 /* list the coprocessor registers in order */
2666 for (i = 0; i < BR_REG_NUM; i++)
2667 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2668
2669 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2670 }
2671}
2672
2673
2674/* A customized version of reg_overlap_mentioned_p that only looks for
2675 references to a7 (as opposed to hard_frame_pointer_rtx). */
2676
2677int
2678a7_overlap_mentioned_p (x)
2679 rtx x;
2680{
2681 int i, j;
2682 unsigned int x_regno;
2683 const char *fmt;
2684
2685 if (GET_CODE (x) == REG)
2686 {
2687 x_regno = REGNO (x);
2688 return (x != hard_frame_pointer_rtx
2689 && x_regno < A7_REG + 1
2690 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2691 }
2692
2693 if (GET_CODE (x) == SUBREG
2694 && GET_CODE (SUBREG_REG (x)) == REG
2695 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2696 {
2697 x_regno = subreg_regno (x);
2698 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2699 && x_regno < A7_REG + 1
2700 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2701 }
2702
2703 /* X does not match, so try its subexpressions. */
2704 fmt = GET_RTX_FORMAT (GET_CODE (x));
2705 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2706 {
2707 if (fmt[i] == 'e')
2708 {
2709 if (a7_overlap_mentioned_p (XEXP (x, i)))
2710 return 1;
2711 }
2712 else if (fmt[i] == 'E')
2713 {
2714 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2715 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2716 return 1;
2717 }
2718 }
2719
2720 return 0;
2721}
b64a1b53
RH
2722
2723/* The literal pool stays with the function. */
2724
2725static void
2726xtensa_select_rtx_section (mode, x, align)
2727 enum machine_mode mode ATTRIBUTE_UNUSED;
2728 rtx x ATTRIBUTE_UNUSED;
2729 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2730{
2731 function_section (current_function_decl);
2732}
fb49053f
RH
2733
2734/* If we are referencing a function that is static, make the SYMBOL_REF
2735 special so that we can generate direct calls to it even with -fpic. */
2736
2737static void
2738xtensa_encode_section_info (decl, first)
2739 tree decl;
2740 int first ATTRIBUTE_UNUSED;
2741{
2742 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2743 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2744}
e2500fed
GK
2745
2746#include "gt-xtensa.h"