]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
2002-03-22 Eric Blake <ebb9@email.byu.edu>
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
f6b7ba2b 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "rtl.h"
25#include "regs.h"
26#include "machmode.h"
27#include "hard-reg-set.h"
28#include "basic-block.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "insn-attr.h"
34#include "insn-codes.h"
35#include "recog.h"
36#include "output.h"
37#include "tree.h"
38#include "expr.h"
39#include "flags.h"
40#include "reload.h"
41#include "tm_p.h"
42#include "function.h"
43#include "toplev.h"
44#include "optabs.h"
45#include "libfuncs.h"
46#include "target.h"
47#include "target-def.h"
48
49/* Enumeration for all of the relational tests, so that we can build
50 arrays indexed by the test type, and not worry about the order
51 of EQ, NE, etc. */
52
53enum internal_test {
54 ITEST_EQ,
55 ITEST_NE,
56 ITEST_GT,
57 ITEST_GE,
58 ITEST_LT,
59 ITEST_LE,
60 ITEST_GTU,
61 ITEST_GEU,
62 ITEST_LTU,
63 ITEST_LEU,
64 ITEST_MAX
65 };
66
67/* Cached operands, and operator to compare for use in set/branch on
68 condition codes. */
69rtx branch_cmp[2];
70
71/* what type of branch to use */
72enum cmp_type branch_type;
73
74/* Array giving truth value on whether or not a given hard register
75 can support a given mode. */
76char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
77
78/* Current frame size calculated by compute_frame_size. */
79unsigned xtensa_current_frame_size;
80
81/* Tables of ld/st opcode names for block moves */
82const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
83const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
84#define LARGEST_MOVE_RATIO 15
85
86/* Define the structure for the machine field in struct function. */
87struct machine_function
88{
89 int accesses_prev_frame;
90};
91
92/* Vector, indexed by hard register number, which contains 1 for a
93 register that is allowable in a candidate for leaf function
94 treatment. */
95
96const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
97{
98 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
99 1, 1, 1,
100 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
101 1
102};
103
104/* Map hard register number to register class */
105const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
106{
107 GR_REGS, SP_REG, GR_REGS, GR_REGS,
108 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
109 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
110 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
111 AR_REGS, AR_REGS, BR_REGS,
112 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 ACC_REG,
117};
118
119/* Map register constraint character to register class. */
120enum reg_class xtensa_char_to_class[256] =
121{
122 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
123 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
124 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186};
187
188/* This macro generates the assembly code for function entry.
189 FILE is a stdio stream to output the code to.
190 SIZE is an int: how many units of temporary storage to allocate.
191 Refer to the array 'regs_ever_live' to determine which registers
192 to save; 'regs_ever_live[I]' is nonzero if register number I
193 is ever used in the function. This macro is responsible for
194 knowing which registers should not be saved even if used. */
195
196#undef TARGET_ASM_FUNCTION_PROLOGUE
197#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
198
199/* This macro generates the assembly code for function exit,
200 on machines that need it. If FUNCTION_EPILOGUE is not defined
201 then individual return instructions are generated for each
202 return statement. Args are same as for FUNCTION_PROLOGUE. */
203
204#undef TARGET_ASM_FUNCTION_EPILOGUE
205#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
206
207/* These hooks specify assembly directives for creating certain kinds
208 of integer object. */
209
210#undef TARGET_ASM_ALIGNED_SI_OP
211#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
212
213struct gcc_target targetm = TARGET_INITIALIZER;
214
215static int b4const_or_zero PARAMS ((int));
216static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
217static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
218static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
219static rtx gen_conditional_move PARAMS ((rtx));
220static rtx fixup_subreg_mem PARAMS ((rtx x));
221static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
222static void xtensa_init_machine_status PARAMS ((struct function *p));
223static void xtensa_free_machine_status PARAMS ((struct function *p));
224static void printx PARAMS ((FILE *, signed int));
225static rtx frame_size_const;
226static int current_function_arg_words;
227static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
228 REG_ALLOC_ORDER;
229
230
231/*
232 * Functions to test Xtensa immediate operand validity.
233 */
234
235int
236xtensa_b4constu (v)
237 int v;
238{
239 switch (v)
240 {
241 case 32768:
242 case 65536:
243 case 2:
244 case 3:
245 case 4:
246 case 5:
247 case 6:
248 case 7:
249 case 8:
250 case 10:
251 case 12:
252 case 16:
253 case 32:
254 case 64:
255 case 128:
256 case 256:
257 return 1;
258 }
259 return 0;
260}
261
262int
263xtensa_simm8x256 (v)
264 int v;
265{
266 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
267}
268
269int
270xtensa_ai4const (v)
271 int v;
272{
273 return (v == -1 || (v >= 1 && v <= 15));
274}
275
276int
277xtensa_simm7 (v)
278 int v;
279{
280 return v >= -32 && v <= 95;
281}
282
283int
284xtensa_b4const (v)
285 int v;
286{
287 switch (v)
288 {
289 case -1:
290 case 1:
291 case 2:
292 case 3:
293 case 4:
294 case 5:
295 case 6:
296 case 7:
297 case 8:
298 case 10:
299 case 12:
300 case 16:
301 case 32:
302 case 64:
303 case 128:
304 case 256:
305 return 1;
306 }
307 return 0;
308}
309
310int
311xtensa_simm8 (v)
312 int v;
313{
314 return v >= -128 && v <= 127;
315}
316
317int
318xtensa_tp7 (v)
319 int v;
320{
321 return (v >= 7 && v <= 22);
322}
323
324int
325xtensa_lsi4x4 (v)
326 int v;
327{
328 return (v & 3) == 0 && (v >= 0 && v <= 60);
329}
330
331int
332xtensa_simm12b (v)
333 int v;
334{
335 return v >= -2048 && v <= 2047;
336}
337
338int
339xtensa_uimm8 (v)
340 int v;
341{
342 return v >= 0 && v <= 255;
343}
344
345int
346xtensa_uimm8x2 (v)
347 int v;
348{
349 return (v & 1) == 0 && (v >= 0 && v <= 510);
350}
351
352int
353xtensa_uimm8x4 (v)
354 int v;
355{
356 return (v & 3) == 0 && (v >= 0 && v <= 1020);
357}
358
359
360/* This is just like the standard true_regnum() function except that it
361 works even when reg_renumber is not initialized. */
362
363int
364xt_true_regnum (x)
365 rtx x;
366{
367 if (GET_CODE (x) == REG)
368 {
369 if (reg_renumber
370 && REGNO (x) >= FIRST_PSEUDO_REGISTER
371 && reg_renumber[REGNO (x)] >= 0)
372 return reg_renumber[REGNO (x)];
373 return REGNO (x);
374 }
375 if (GET_CODE (x) == SUBREG)
376 {
377 int base = xt_true_regnum (SUBREG_REG (x));
378 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
379 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
380 GET_MODE (SUBREG_REG (x)),
381 SUBREG_BYTE (x), GET_MODE (x));
382 }
383 return -1;
384}
385
386
387int
388add_operand (op, mode)
389 rtx op;
390 enum machine_mode mode;
391{
392 if (GET_CODE (op) == CONST_INT)
393 return (xtensa_simm8 (INTVAL (op)) ||
394 xtensa_simm8x256 (INTVAL (op)));
395
396 return register_operand (op, mode);
397}
398
399
400int
401arith_operand (op, mode)
402 rtx op;
403 enum machine_mode mode;
404{
405 if (GET_CODE (op) == CONST_INT)
406 return xtensa_simm8 (INTVAL (op));
407
408 return register_operand (op, mode);
409}
410
411
412int
413nonimmed_operand (op, mode)
414 rtx op;
415 enum machine_mode mode;
416{
417 /* We cannot use the standard nonimmediate_operand() predicate because
418 it includes constant pool memory operands. */
419
420 if (memory_operand (op, mode))
421 return !constantpool_address_p (XEXP (op, 0));
422
423 return register_operand (op, mode);
424}
425
426
427int
428mem_operand (op, mode)
429 rtx op;
430 enum machine_mode mode;
431{
432 /* We cannot use the standard memory_operand() predicate because
433 it includes constant pool memory operands. */
434
435 if (memory_operand (op, mode))
436 return !constantpool_address_p (XEXP (op, 0));
437
438 return FALSE;
439}
440
441
442int
fc12fa10 443xtensa_valid_move (mode, operands)
f6b7ba2b 444 enum machine_mode mode;
fc12fa10 445 rtx *operands;
f6b7ba2b 446{
fc12fa10 447 /* Either the destination or source must be a register, and the
448 MAC16 accumulator doesn't count. */
449
450 if (register_operand (operands[0], mode))
451 {
452 int dst_regnum = xt_true_regnum (operands[0]);
453
454 /* The stack pointer can only be assigned with a MOVSP opcode. */
455 if (dst_regnum == STACK_POINTER_REGNUM)
456 return (mode == SImode
457 && register_operand (operands[1], mode)
458 && !ACC_REG_P (xt_true_regnum (operands[1])));
459
460 if (!ACC_REG_P (dst_regnum))
461 return true;
462 }
463 else if (register_operand (operands[1], mode))
464 {
465 int src_regnum = xt_true_regnum (operands[1]);
466 if (!ACC_REG_P (src_regnum))
467 return true;
468 }
f6b7ba2b 469 return FALSE;
470}
471
472
473int
474mask_operand (op, mode)
475 rtx op;
476 enum machine_mode mode;
477{
478 if (GET_CODE (op) == CONST_INT)
479 return xtensa_mask_immediate (INTVAL (op));
480
481 return register_operand (op, mode);
482}
483
484
485int
486extui_fldsz_operand (op, mode)
487 rtx op;
488 enum machine_mode mode ATTRIBUTE_UNUSED;
489{
490 return ((GET_CODE (op) == CONST_INT)
491 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
492}
493
494
495int
496sext_operand (op, mode)
497 rtx op;
498 enum machine_mode mode;
499{
500 if (TARGET_SEXT)
501 return nonimmed_operand (op, mode);
502 return mem_operand (op, mode);
503}
504
505
506int
507sext_fldsz_operand (op, mode)
508 rtx op;
509 enum machine_mode mode ATTRIBUTE_UNUSED;
510{
511 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
512}
513
514
515int
516lsbitnum_operand (op, mode)
517 rtx op;
518 enum machine_mode mode ATTRIBUTE_UNUSED;
519{
520 if (GET_CODE (op) == CONST_INT)
521 {
522 return (BITS_BIG_ENDIAN
523 ? (INTVAL (op) == BITS_PER_WORD-1)
524 : (INTVAL (op) == 0));
525 }
526 return FALSE;
527}
528
529
530static int
531b4const_or_zero (v)
532 int v;
533{
534 if (v == 0)
535 return TRUE;
536 return xtensa_b4const (v);
537}
538
539
540int
541branch_operand (op, mode)
542 rtx op;
543 enum machine_mode mode;
544{
545 if (GET_CODE (op) == CONST_INT)
546 return b4const_or_zero (INTVAL (op));
547
548 return register_operand (op, mode);
549}
550
551
552int
553ubranch_operand (op, mode)
554 rtx op;
555 enum machine_mode mode;
556{
557 if (GET_CODE (op) == CONST_INT)
558 return xtensa_b4constu (INTVAL (op));
559
560 return register_operand (op, mode);
561}
562
563
564int
565call_insn_operand (op, mode)
566 rtx op;
567 enum machine_mode mode ATTRIBUTE_UNUSED;
568{
569 if ((GET_CODE (op) == REG)
570 && (op != arg_pointer_rtx)
571 && ((REGNO (op) < FRAME_POINTER_REGNUM)
572 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
573 return TRUE;
574
575 if (CONSTANT_ADDRESS_P (op))
576 {
577 /* Direct calls only allowed to static functions with PIC. */
578 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
579 && SYMBOL_REF_FLAG (op)));
580 }
581
582 return FALSE;
583}
584
585
586int
587move_operand (op, mode)
588 rtx op;
589 enum machine_mode mode;
590{
591 if (register_operand (op, mode))
592 return TRUE;
593
594 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
595 result in 0/1. */
596 if (GET_CODE (op) == CONSTANT_P_RTX)
597 return TRUE;
598
599 if (GET_CODE (op) == CONST_INT)
600 return xtensa_simm12b (INTVAL (op));
601
602 if (GET_CODE (op) == MEM)
603 return memory_address_p (mode, XEXP (op, 0));
604
605 return FALSE;
606}
607
608
609int
610smalloffset_mem_p (op)
611 rtx op;
612{
613 if (GET_CODE (op) == MEM)
614 {
615 rtx addr = XEXP (op, 0);
616 if (GET_CODE (addr) == REG)
617 return REG_OK_FOR_BASE_P (addr);
618 if (GET_CODE (addr) == PLUS)
619 {
620 rtx offset = XEXP (addr, 0);
621 if (GET_CODE (offset) != CONST_INT)
622 offset = XEXP (addr, 1);
623 if (GET_CODE (offset) != CONST_INT)
624 return FALSE;
625 return xtensa_lsi4x4 (INTVAL (offset));
626 }
627 }
628 return FALSE;
629}
630
631
632int
633smalloffset_double_mem_p (op)
634 rtx op;
635{
636 if (!smalloffset_mem_p (op))
637 return FALSE;
638 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
639}
640
641
642int
643constantpool_address_p (addr)
644 rtx addr;
645{
646 rtx sym = addr;
647
648 if (GET_CODE (addr) == CONST)
649 {
650 rtx offset;
651
652 /* only handle (PLUS (SYM, OFFSET)) form */
653 addr = XEXP (addr, 0);
654 if (GET_CODE (addr) != PLUS)
655 return FALSE;
656
657 /* make sure the address is word aligned */
658 offset = XEXP (addr, 1);
659 if ((GET_CODE (offset) != CONST_INT)
660 || ((INTVAL (offset) & 3) != 0))
661 return FALSE;
662
663 sym = XEXP (addr, 0);
664 }
665
666 if ((GET_CODE (sym) == SYMBOL_REF)
667 && CONSTANT_POOL_ADDRESS_P (sym))
668 return TRUE;
669 return FALSE;
670}
671
672
673int
674constantpool_mem_p (op)
675 rtx op;
676{
677 if (GET_CODE (op) == MEM)
678 return constantpool_address_p (XEXP (op, 0));
679 return FALSE;
680}
681
682
683int
684non_const_move_operand (op, mode)
685 rtx op;
686 enum machine_mode mode;
687{
688 if (register_operand (op, mode))
689 return 1;
690 if (GET_CODE (op) == SUBREG)
691 op = SUBREG_REG (op);
692 if (GET_CODE (op) == MEM)
693 return memory_address_p (mode, XEXP (op, 0));
694 return FALSE;
695}
696
697
698/* Accept the floating point constant 1 in the appropriate mode. */
699
700int
701const_float_1_operand (op, mode)
702 rtx op;
703 enum machine_mode mode;
704{
705 REAL_VALUE_TYPE d;
706 static REAL_VALUE_TYPE onedf;
707 static REAL_VALUE_TYPE onesf;
708 static int one_initialized;
709
710 if ((GET_CODE (op) != CONST_DOUBLE)
711 || (mode != GET_MODE (op))
712 || (mode != DFmode && mode != SFmode))
713 return FALSE;
714
715 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
716
717 if (! one_initialized)
718 {
719 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
720 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
721 one_initialized = TRUE;
722 }
723
724 if (mode == DFmode)
725 return REAL_VALUES_EQUAL (d, onedf);
726 else
727 return REAL_VALUES_EQUAL (d, onesf);
728}
729
730
731int
732fpmem_offset_operand (op, mode)
733 rtx op;
734 enum machine_mode mode ATTRIBUTE_UNUSED;
735{
736 if (GET_CODE (op) == CONST_INT)
737 return xtensa_mem_offset (INTVAL (op), SFmode);
738 return 0;
739}
740
741
742void
743xtensa_extend_reg (dst, src)
744 rtx dst;
745 rtx src;
746{
747 rtx temp = gen_reg_rtx (SImode);
748 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
749
750 /* generate paradoxical subregs as needed so that the modes match */
751 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
752 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
753
754 emit_insn (gen_ashlsi3 (temp, src, shift));
755 emit_insn (gen_ashrsi3 (dst, temp, shift));
756}
757
758
759void
760xtensa_load_constant (dst, src)
761 rtx dst;
762 rtx src;
763{
764 enum machine_mode mode = GET_MODE (dst);
765 src = force_const_mem (SImode, src);
766
767 /* PC-relative loads are always SImode so we have to add a SUBREG if that
768 is not the desired mode */
769
770 if (mode != SImode)
771 {
772 if (register_operand (dst, mode))
773 dst = simplify_gen_subreg (SImode, dst, mode, 0);
774 else
775 {
776 src = force_reg (SImode, src);
777 src = gen_lowpart_SUBREG (mode, src);
778 }
779 }
780
781 emit_move_insn (dst, src);
782}
783
784
785int
786branch_operator (x, mode)
787 rtx x;
788 enum machine_mode mode;
789{
790 if (GET_MODE (x) != mode)
791 return FALSE;
792
793 switch (GET_CODE (x))
794 {
795 case EQ:
796 case NE:
797 case LT:
798 case GE:
799 return TRUE;
800 default:
801 break;
802 }
803 return FALSE;
804}
805
806
807int
808ubranch_operator (x, mode)
809 rtx x;
810 enum machine_mode mode;
811{
812 if (GET_MODE (x) != mode)
813 return FALSE;
814
815 switch (GET_CODE (x))
816 {
817 case LTU:
818 case GEU:
819 return TRUE;
820 default:
821 break;
822 }
823 return FALSE;
824}
825
826
827int
828boolean_operator (x, mode)
829 rtx x;
830 enum machine_mode mode;
831{
832 if (GET_MODE (x) != mode)
833 return FALSE;
834
835 switch (GET_CODE (x))
836 {
837 case EQ:
838 case NE:
839 return TRUE;
840 default:
841 break;
842 }
843 return FALSE;
844}
845
846
847int
848xtensa_mask_immediate (v)
849 int v;
850{
851#define MAX_MASK_SIZE 16
852 int mask_size;
853
854 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
855 {
856 if ((v & 1) == 0)
857 return FALSE;
858 v = v >> 1;
859 if (v == 0)
860 return TRUE;
861 }
862
863 return FALSE;
864}
865
866
867int
868xtensa_mem_offset (v, mode)
869 unsigned v;
870 enum machine_mode mode;
871{
872 switch (mode)
873 {
874 case BLKmode:
875 /* Handle the worst case for block moves. See xtensa_expand_block_move
876 where we emit an optimized block move operation if the block can be
877 moved in < "move_ratio" pieces. The worst case is when the block is
878 aligned but has a size of (3 mod 4) (does this happen?) so that the
879 last piece requires a byte load/store. */
880 return (xtensa_uimm8 (v) &&
881 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
882
883 case QImode:
884 return xtensa_uimm8 (v);
885
886 case HImode:
887 return xtensa_uimm8x2 (v);
888
889 case DFmode:
890 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
891
892 default:
893 break;
894 }
895
896 return xtensa_uimm8x4 (v);
897}
898
899
900/* Make normal rtx_code into something we can index from an array */
901
902static enum internal_test
903map_test_to_internal_test (test_code)
904 enum rtx_code test_code;
905{
906 enum internal_test test = ITEST_MAX;
907
908 switch (test_code)
909 {
910 default: break;
911 case EQ: test = ITEST_EQ; break;
912 case NE: test = ITEST_NE; break;
913 case GT: test = ITEST_GT; break;
914 case GE: test = ITEST_GE; break;
915 case LT: test = ITEST_LT; break;
916 case LE: test = ITEST_LE; break;
917 case GTU: test = ITEST_GTU; break;
918 case GEU: test = ITEST_GEU; break;
919 case LTU: test = ITEST_LTU; break;
920 case LEU: test = ITEST_LEU; break;
921 }
922
923 return test;
924}
925
926
927/* Generate the code to compare two integer values. The return value is
928 the comparison expression. */
929
930static rtx
931gen_int_relational (test_code, cmp0, cmp1, p_invert)
932 enum rtx_code test_code; /* relational test (EQ, etc) */
933 rtx cmp0; /* first operand to compare */
934 rtx cmp1; /* second operand to compare */
935 int *p_invert; /* whether branch needs to reverse its test */
936{
937 struct cmp_info {
938 enum rtx_code test_code; /* test code to use in insn */
939 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
940 int const_add; /* constant to add (convert LE -> LT) */
941 int reverse_regs; /* reverse registers in test */
942 int invert_const; /* != 0 if invert value if cmp1 is constant */
943 int invert_reg; /* != 0 if invert value if cmp1 is register */
944 int unsignedp; /* != 0 for unsigned comparisons. */
945 };
946
947 static struct cmp_info info[ (int)ITEST_MAX ] = {
948
949 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
950 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
951
952 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
953 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
954 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
955 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
956
957 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
958 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
959 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
960 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
961 };
962
963 enum internal_test test;
964 enum machine_mode mode;
965 struct cmp_info *p_info;
966
967 test = map_test_to_internal_test (test_code);
968 if (test == ITEST_MAX)
969 abort ();
970
971 p_info = &info[ (int)test ];
972
973 mode = GET_MODE (cmp0);
974 if (mode == VOIDmode)
975 mode = GET_MODE (cmp1);
976
977 /* Make sure we can handle any constants given to us. */
978 if (GET_CODE (cmp1) == CONST_INT)
979 {
980 HOST_WIDE_INT value = INTVAL (cmp1);
981 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
982
983 /* if the immediate overflows or does not fit in the immediate field,
984 spill it to a register */
985
986 if ((p_info->unsignedp ?
987 (uvalue + p_info->const_add > uvalue) :
988 (value + p_info->const_add > value)) != (p_info->const_add > 0))
989 {
990 cmp1 = force_reg (mode, cmp1);
991 }
992 else if (!(p_info->const_range_p) (value + p_info->const_add))
993 {
994 cmp1 = force_reg (mode, cmp1);
995 }
996 }
997 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
998 {
999 cmp1 = force_reg (mode, cmp1);
1000 }
1001
1002 /* See if we need to invert the result. */
1003 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1004 ? p_info->invert_const
1005 : p_info->invert_reg);
1006
1007 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1008 Comparison between two registers, may involve switching operands. */
1009 if (GET_CODE (cmp1) == CONST_INT)
1010 {
1011 if (p_info->const_add != 0)
1012 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1013
1014 }
1015 else if (p_info->reverse_regs)
1016 {
1017 rtx temp = cmp0;
1018 cmp0 = cmp1;
1019 cmp1 = temp;
1020 }
1021
1022 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1023}
1024
1025
1026/* Generate the code to compare two float values. The return value is
1027 the comparison expression. */
1028
1029static rtx
1030gen_float_relational (test_code, cmp0, cmp1)
1031 enum rtx_code test_code; /* relational test (EQ, etc) */
1032 rtx cmp0; /* first operand to compare */
1033 rtx cmp1; /* second operand to compare */
1034{
1035 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1036 rtx brtmp;
1037 int reverse_regs, invert;
1038
1039 switch (test_code)
1040 {
1041 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1042 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1043 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1044 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1045 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1046 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1047 default:
1048 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1049 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1050 }
1051
1052 if (reverse_regs)
1053 {
1054 rtx temp = cmp0;
1055 cmp0 = cmp1;
1056 cmp1 = temp;
1057 }
1058
1059 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1060 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1061
1062 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1063}
1064
1065
1066void
1067xtensa_expand_conditional_branch (operands, test_code)
1068 rtx *operands;
1069 enum rtx_code test_code;
1070{
1071 enum cmp_type type = branch_type;
1072 rtx cmp0 = branch_cmp[0];
1073 rtx cmp1 = branch_cmp[1];
1074 rtx cmp;
1075 int invert;
1076 rtx label1, label2;
1077
1078 switch (type)
1079 {
1080 case CMP_DF:
1081 default:
1082 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1083
1084 case CMP_SI:
1085 invert = FALSE;
1086 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1087 break;
1088
1089 case CMP_SF:
1090 if (!TARGET_HARD_FLOAT)
1091 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1092 invert = FALSE;
1093 cmp = gen_float_relational (test_code, cmp0, cmp1);
1094 break;
1095 }
1096
1097 /* Generate the branch. */
1098
1099 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1100 label2 = pc_rtx;
1101
1102 if (invert)
1103 {
1104 label2 = label1;
1105 label1 = pc_rtx;
1106 }
1107
1108 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1109 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1110 label1,
1111 label2)));
1112}
1113
1114
1115static rtx
1116gen_conditional_move (cmp)
1117 rtx cmp;
1118{
1119 enum rtx_code code = GET_CODE (cmp);
1120 rtx op0 = branch_cmp[0];
1121 rtx op1 = branch_cmp[1];
1122
1123 if (branch_type == CMP_SI)
1124 {
1125 /* Jump optimization calls get_condition() which canonicalizes
1126 comparisons like (GE x <const>) to (GT x <const-1>).
1127 Transform those comparisons back to GE, since that is the
1128 comparison supported in Xtensa. We shouldn't have to
1129 transform <LE x const> comparisons, because neither
1130 xtensa_expand_conditional_branch() nor get_condition() will
1131 produce them. */
1132
1133 if ((code == GT) && (op1 == constm1_rtx))
1134 {
1135 code = GE;
1136 op1 = const0_rtx;
1137 }
1138 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1139
1140 if (boolean_operator (cmp, VOIDmode))
1141 {
1142 /* swap the operands to make const0 second */
1143 if (op0 == const0_rtx)
1144 {
1145 op0 = op1;
1146 op1 = const0_rtx;
1147 }
1148
1149 /* if not comparing against zero, emit a comparison (subtract) */
1150 if (op1 != const0_rtx)
1151 {
1152 op0 = expand_binop (SImode, sub_optab, op0, op1,
1153 0, 0, OPTAB_LIB_WIDEN);
1154 op1 = const0_rtx;
1155 }
1156 }
1157 else if (branch_operator (cmp, VOIDmode))
1158 {
1159 /* swap the operands to make const0 second */
1160 if (op0 == const0_rtx)
1161 {
1162 op0 = op1;
1163 op1 = const0_rtx;
1164
1165 switch (code)
1166 {
1167 case LT: code = GE; break;
1168 case GE: code = LT; break;
1169 default: abort ();
1170 }
1171 }
1172
1173 if (op1 != const0_rtx)
1174 return 0;
1175 }
1176 else
1177 return 0;
1178
1179 return gen_rtx (code, VOIDmode, op0, op1);
1180 }
1181
1182 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1183 return gen_float_relational (code, op0, op1);
1184
1185 return 0;
1186}
1187
1188
1189int
1190xtensa_expand_conditional_move (operands, isflt)
1191 rtx *operands;
1192 int isflt;
1193{
1194 rtx cmp;
1195 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1196
1197 if (!(cmp = gen_conditional_move (operands[1])))
1198 return 0;
1199
1200 if (isflt)
1201 gen_fn = (branch_type == CMP_SI
1202 ? gen_movsfcc_internal0
1203 : gen_movsfcc_internal1);
1204 else
1205 gen_fn = (branch_type == CMP_SI
1206 ? gen_movsicc_internal0
1207 : gen_movsicc_internal1);
1208
1209 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1210 operands[2], operands[3], cmp));
1211 return 1;
1212}
1213
1214
1215int
1216xtensa_expand_scc (operands)
1217 rtx *operands;
1218{
1219 rtx dest = operands[0];
1220 rtx cmp = operands[1];
1221 rtx one_tmp, zero_tmp;
1222 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1223
1224 if (!(cmp = gen_conditional_move (cmp)))
1225 return 0;
1226
1227 one_tmp = gen_reg_rtx (SImode);
1228 zero_tmp = gen_reg_rtx (SImode);
1229 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1230 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1231
1232 gen_fn = (branch_type == CMP_SI
1233 ? gen_movsicc_internal0
1234 : gen_movsicc_internal1);
1235 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1236 return 1;
1237}
1238
1239
1240/* Emit insns to move operands[1] into operands[0].
1241
1242 Return 1 if we have written out everything that needs to be done to
1243 do the move. Otherwise, return 0 and the caller will emit the move
1244 normally. */
1245
1246int
1247xtensa_emit_move_sequence (operands, mode)
1248 rtx *operands;
1249 enum machine_mode mode;
1250{
1251 if (CONSTANT_P (operands[1])
1252 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1253 && (GET_CODE (operands[1]) != CONST_INT
1254 || !xtensa_simm12b (INTVAL (operands[1]))))
1255 {
1256 xtensa_load_constant (operands[0], operands[1]);
1257 return 1;
1258 }
1259
1260 if (!(reload_in_progress | reload_completed))
1261 {
fc12fa10 1262 if (!xtensa_valid_move (mode, operands))
f6b7ba2b 1263 operands[1] = force_reg (mode, operands[1]);
1264
1265 /* Check if this move is copying an incoming argument in a7. If
1266 so, emit the move, followed by the special "set_frame_ptr"
1267 unspec_volatile insn, at the very beginning of the function.
1268 This is necessary because the register allocator will ignore
1269 conflicts with a7 and may assign some other pseudo to a7. If
1270 that pseudo was assigned prior to this move, it would clobber
1271 the incoming argument in a7. By copying the argument out of
1272 a7 as the very first thing, and then immediately following
1273 that with an unspec_volatile to keep the scheduler away, we
1274 should avoid any problems. */
1275
1276 if (a7_overlap_mentioned_p (operands[1]))
1277 {
1278 rtx mov;
1279 switch (mode)
1280 {
1281 case SImode:
1282 mov = gen_movsi_internal (operands[0], operands[1]);
1283 break;
1284 case HImode:
1285 mov = gen_movhi_internal (operands[0], operands[1]);
1286 break;
1287 case QImode:
1288 mov = gen_movqi_internal (operands[0], operands[1]);
1289 break;
1290 default:
1291 abort ();
1292 }
1293
1294 /* Insert the instructions before any other argument copies.
1295 (The set_frame_ptr insn comes _after_ the move, so push it
1296 out first.) */
1297 push_topmost_sequence ();
1298 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1299 emit_insn_after (mov, get_insns ());
1300 pop_topmost_sequence ();
1301
1302 return 1;
1303 }
1304 }
1305
1306 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1307 instruction won't be recognized after reload. So we remove the
1308 subreg and adjust mem accordingly. */
1309 if (reload_in_progress)
1310 {
1311 operands[0] = fixup_subreg_mem (operands[0]);
1312 operands[1] = fixup_subreg_mem (operands[1]);
1313 }
1314 return 0;
1315}
1316
1317static rtx
1318fixup_subreg_mem (x)
1319 rtx x;
1320{
1321 if (GET_CODE (x) == SUBREG
1322 && GET_CODE (SUBREG_REG (x)) == REG
1323 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1324 {
1325 rtx temp =
1326 gen_rtx_SUBREG (GET_MODE (x),
1327 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1328 SUBREG_BYTE (x));
1329 x = alter_subreg (&temp);
1330 }
1331 return x;
1332}
1333
1334
1335/* Try to expand a block move operation to an RTL block move instruction.
1336 If not optimizing or if the block size is not a constant or if the
1337 block is small, the expansion fails and GCC falls back to calling
1338 memcpy().
1339
1340 operands[0] is the destination
1341 operands[1] is the source
1342 operands[2] is the length
1343 operands[3] is the alignment */
1344
1345int
1346xtensa_expand_block_move (operands)
1347 rtx *operands;
1348{
1349 rtx dest = operands[0];
1350 rtx src = operands[1];
1351 int bytes = INTVAL (operands[2]);
1352 int align = XINT (operands[3], 0);
1353 int num_pieces, move_ratio;
1354
1355 /* If this is not a fixed size move, just call memcpy */
1356 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1357 return 0;
1358
1359 /* Anything to move? */
1360 if (bytes <= 0)
1361 return 1;
1362
1363 if (align > MOVE_MAX)
1364 align = MOVE_MAX;
1365
1366 /* decide whether to expand inline based on the optimization level */
1367 move_ratio = 4;
1368 if (optimize > 2)
1369 move_ratio = LARGEST_MOVE_RATIO;
1370 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1371 if (num_pieces >= move_ratio)
1372 return 0;
1373
1374 /* make sure the memory addresses are valid */
9c56a8c5 1375 operands[0] = validize_mem (dest);
1376 operands[1] = validize_mem (src);
f6b7ba2b 1377
1378 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1379 operands[2], operands[3]));
1380 return 1;
1381}
1382
1383
1384/* Emit a sequence of instructions to implement a block move, trying
1385 to hide load delay slots as much as possible. Load N values into
1386 temporary registers, store those N values, and repeat until the
1387 complete block has been moved. N=delay_slots+1 */
1388
1389struct meminsnbuf {
1390 char template[30];
1391 rtx operands[2];
1392};
1393
1394void
1395xtensa_emit_block_move (operands, tmpregs, delay_slots)
1396 rtx *operands;
1397 rtx *tmpregs;
1398 int delay_slots;
1399{
1400 rtx dest = operands[0];
1401 rtx src = operands[1];
1402 int bytes = INTVAL (operands[2]);
1403 int align = XINT (operands[3], 0);
1404 rtx from_addr = XEXP (src, 0);
1405 rtx to_addr = XEXP (dest, 0);
1406 int from_struct = MEM_IN_STRUCT_P (src);
1407 int to_struct = MEM_IN_STRUCT_P (dest);
1408 int offset = 0;
1409 int chunk_size, item_size;
1410 struct meminsnbuf *ldinsns, *stinsns;
1411 const char *ldname, *stname;
1412 enum machine_mode mode;
1413
1414 if (align > MOVE_MAX)
1415 align = MOVE_MAX;
1416 item_size = align;
1417 chunk_size = delay_slots + 1;
1418
1419 ldinsns = (struct meminsnbuf *)
1420 alloca (chunk_size * sizeof (struct meminsnbuf));
1421 stinsns = (struct meminsnbuf *)
1422 alloca (chunk_size * sizeof (struct meminsnbuf));
1423
1424 mode = xtensa_find_mode_for_size (item_size);
1425 item_size = GET_MODE_SIZE (mode);
1426 ldname = xtensa_ld_opcodes[(int) mode];
1427 stname = xtensa_st_opcodes[(int) mode];
1428
1429 while (bytes > 0)
1430 {
1431 int n;
1432
1433 for (n = 0; n < chunk_size; n++)
1434 {
1435 rtx addr, mem;
1436
1437 if (bytes == 0)
1438 {
1439 chunk_size = n;
1440 break;
1441 }
1442
1443 if (bytes < item_size)
1444 {
1445 /* find a smaller item_size which we can load & store */
1446 item_size = bytes;
1447 mode = xtensa_find_mode_for_size (item_size);
1448 item_size = GET_MODE_SIZE (mode);
1449 ldname = xtensa_ld_opcodes[(int) mode];
1450 stname = xtensa_st_opcodes[(int) mode];
1451 }
1452
1453 /* record the load instruction opcode and operands */
1454 addr = plus_constant (from_addr, offset);
1455 mem = gen_rtx_MEM (mode, addr);
1456 if (! memory_address_p (mode, addr))
1457 abort ();
1458 MEM_IN_STRUCT_P (mem) = from_struct;
1459 ldinsns[n].operands[0] = tmpregs[n];
1460 ldinsns[n].operands[1] = mem;
1461 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1462
1463 /* record the store instruction opcode and operands */
1464 addr = plus_constant (to_addr, offset);
1465 mem = gen_rtx_MEM (mode, addr);
1466 if (! memory_address_p (mode, addr))
1467 abort ();
1468 MEM_IN_STRUCT_P (mem) = to_struct;
1469 stinsns[n].operands[0] = tmpregs[n];
1470 stinsns[n].operands[1] = mem;
1471 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1472
1473 offset += item_size;
1474 bytes -= item_size;
1475 }
1476
1477 /* now output the loads followed by the stores */
1478 for (n = 0; n < chunk_size; n++)
1479 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1480 for (n = 0; n < chunk_size; n++)
1481 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1482 }
1483}
1484
1485
1486static enum machine_mode
1487xtensa_find_mode_for_size (item_size)
1488 unsigned item_size;
1489{
1490 enum machine_mode mode, tmode;
1491
1492 while (1)
1493 {
1494 mode = VOIDmode;
1495
1496 /* find mode closest to but not bigger than item_size */
1497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1499 if (GET_MODE_SIZE (tmode) <= item_size)
1500 mode = tmode;
1501 if (mode == VOIDmode)
1502 abort ();
1503
1504 item_size = GET_MODE_SIZE (mode);
1505
1506 if (xtensa_ld_opcodes[(int) mode]
1507 && xtensa_st_opcodes[(int) mode])
1508 break;
1509
1510 /* cannot load & store this mode; try something smaller */
1511 item_size -= 1;
1512 }
1513
1514 return mode;
1515}
1516
1517
1518void
1519xtensa_expand_nonlocal_goto (operands)
1520 rtx *operands;
1521{
1522 rtx goto_handler = operands[1];
1523 rtx containing_fp = operands[3];
1524
1525 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1526 is too big to generate in-line */
1527
1528 if (GET_CODE (containing_fp) != REG)
1529 containing_fp = force_reg (Pmode, containing_fp);
1530
1531 goto_handler = replace_rtx (copy_rtx (goto_handler),
1532 virtual_stack_vars_rtx,
1533 containing_fp);
1534
1535 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1536 0, VOIDmode, 2,
1537 containing_fp, Pmode,
1538 goto_handler, Pmode);
1539}
1540
1541
1542static void
1543xtensa_init_machine_status (p)
1544 struct function *p;
1545{
1546 p->machine = (struct machine_function *)
1547 xcalloc (1, sizeof (struct machine_function));
1548}
1549
1550
1551static void
1552xtensa_free_machine_status (p)
1553 struct function *p;
1554{
1555 free (p->machine);
1556 p->machine = NULL;
1557}
1558
1559
1560void
1561xtensa_setup_frame_addresses ()
1562{
1563 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1564 cfun->machine->accesses_prev_frame = 1;
1565
1566 emit_library_call
1567 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1568 0, VOIDmode, 0);
1569}
1570
1571
1572/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1573 a comment showing where the end of the loop is. However, if there is a
1574 label or a branch at the end of the loop then we need to place a nop
1575 there. If the loop ends with a label we need the nop so that branches
1576 targetting that label will target the nop (and thus remain in the loop),
1577 instead of targetting the instruction after the loop (and thus exiting
1578 the loop). If the loop ends with a branch, we need the nop in case the
1579 branch is targetting a location inside the loop. When the branch
1580 executes it will cause the loop count to be decremented even if it is
1581 taken (because it is the last instruction in the loop), so we need to
1582 nop after the branch to prevent the loop count from being decremented
1583 when the branch is taken. */
1584
1585void
1586xtensa_emit_loop_end (insn, operands)
1587 rtx insn;
1588 rtx *operands;
1589{
1590 char done = 0;
1591
1592 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1593 {
1594 switch (GET_CODE (insn))
1595 {
1596 case NOTE:
1597 case BARRIER:
1598 break;
1599
1600 case CODE_LABEL:
1601 output_asm_insn ("nop.n", operands);
1602 done = 1;
1603 break;
1604
1605 default:
1606 {
1607 rtx body = PATTERN (insn);
1608
1609 if (GET_CODE (body) == JUMP_INSN)
1610 {
1611 output_asm_insn ("nop.n", operands);
1612 done = 1;
1613 }
1614 else if ((GET_CODE (body) != USE)
1615 && (GET_CODE (body) != CLOBBER))
1616 done = 1;
1617 }
1618 break;
1619 }
1620 }
1621
1622 output_asm_insn ("# loop end for %0", operands);
1623}
1624
1625
1626char *
1627xtensa_emit_call (callop, operands)
1628 int callop;
1629 rtx *operands;
1630{
1631 char *result = (char *) malloc (64);
1632 rtx tgt = operands[callop];
1633
1634 if (GET_CODE (tgt) == CONST_INT)
1635 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1636 else if (register_operand (tgt, VOIDmode))
1637 sprintf (result, "callx8\t%%%d", callop);
1638 else
1639 sprintf (result, "call8\t%%%d", callop);
1640
1641 return result;
1642}
1643
1644
1645/* Return the stabs register number to use for 'regno'. */
1646
1647int
1648xtensa_dbx_register_number (regno)
1649 int regno;
1650{
1651 int first = -1;
1652
1653 if (GP_REG_P (regno)) {
1654 regno -= GP_REG_FIRST;
1655 first = 0;
1656 }
1657 else if (BR_REG_P (regno)) {
1658 regno -= BR_REG_FIRST;
1659 first = 16;
1660 }
1661 else if (FP_REG_P (regno)) {
1662 regno -= FP_REG_FIRST;
1663 /* The current numbering convention is that TIE registers are
1664 numbered in libcc order beginning with 256. We can't guarantee
1665 that the FP registers will come first, so the following is just
1666 a guess. It seems like we should make a special case for FP
1667 registers and give them fixed numbers < 256. */
1668 first = 256;
1669 }
1670 else if (ACC_REG_P (regno))
1671 {
1672 first = 0;
1673 regno = -1;
1674 }
1675
1676 /* When optimizing, we sometimes get asked about pseudo-registers
1677 that don't represent hard registers. Return 0 for these. */
1678 if (first == -1)
1679 return 0;
1680
1681 return first + regno;
1682}
1683
1684
1685/* Argument support functions. */
1686
1687/* Initialize CUMULATIVE_ARGS for a function. */
1688
1689void
1690init_cumulative_args (cum, fntype, libname)
1691 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1692 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1693 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1694{
1695 cum->arg_words = 0;
1696}
1697
1698/* Advance the argument to the next argument position. */
1699
1700void
1701function_arg_advance (cum, mode, type)
1702 CUMULATIVE_ARGS *cum; /* current arg information */
1703 enum machine_mode mode; /* current arg mode */
1704 tree type; /* type of the argument or 0 if lib support */
1705{
1706 int words, max;
1707 int *arg_words;
1708
1709 arg_words = &cum->arg_words;
1710 max = MAX_ARGS_IN_REGISTERS;
1711
1712 words = (((mode != BLKmode)
1713 ? (int) GET_MODE_SIZE (mode)
1714 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1715
1716 if ((*arg_words + words > max) && (*arg_words < max))
1717 *arg_words = max;
1718
1719 *arg_words += words;
1720}
1721
1722
1723/* Return an RTL expression containing the register for the given mode,
1724 or 0 if the argument is to be passed on the stack. */
1725
1726rtx
1727function_arg (cum, mode, type, incoming_p)
1728 CUMULATIVE_ARGS *cum; /* current arg information */
1729 enum machine_mode mode; /* current arg mode */
1730 tree type; /* type of the argument or 0 if lib support */
1731 int incoming_p; /* computing the incoming registers? */
1732{
1733 int regbase, words, max;
1734 int *arg_words;
1735 int regno;
1736 enum machine_mode result_mode;
1737
1738 arg_words = &cum->arg_words;
1739 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1740 max = MAX_ARGS_IN_REGISTERS;
1741
1742 words = (((mode != BLKmode)
1743 ? (int) GET_MODE_SIZE (mode)
1744 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1745
1746 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1747 *arg_words += (*arg_words & 1);
1748
1749 if (*arg_words + words > max)
1750 return (rtx)0;
1751
1752 regno = regbase + *arg_words;
1753 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1754
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1757 modes bigger than 2 words (because we only have patterns for
1758 modes of 2 words or smaller), we can't control the expansion
1759 unless we explicitly list the individual registers in a PARALLEL. */
1760
1761 if ((mode == BLKmode || words > 2)
1762 && regno < A7_REG
1763 && regno + words > A7_REG)
1764 {
1765 rtx result;
1766 int n;
1767
1768 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1769 for (n = 0; n < words; n++)
1770 {
1771 XVECEXP (result, 0, n) =
1772 gen_rtx_EXPR_LIST (VOIDmode,
1773 gen_raw_REG (SImode, regno + n),
1774 GEN_INT (n * UNITS_PER_WORD));
1775 }
1776 return result;
1777 }
1778
1779 return gen_raw_REG (result_mode, regno);
1780}
1781
1782
1783void
1784override_options ()
1785{
1786 int regno;
1787 enum machine_mode mode;
1788
1789 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1790 error ("boolean registers required for the floating-point option");
1791
1792 /* set up the tables of ld/st opcode names for block moves */
1793 xtensa_ld_opcodes[(int) SImode] = "l32i";
1794 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1795 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1796 xtensa_st_opcodes[(int) SImode] = "s32i";
1797 xtensa_st_opcodes[(int) HImode] = "s16i";
1798 xtensa_st_opcodes[(int) QImode] = "s8i";
1799
1800 xtensa_char_to_class['q'] = SP_REG;
1801 xtensa_char_to_class['a'] = GR_REGS;
1802 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1803 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1804 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1805 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1806 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1807 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1808 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1809
1810 /* Set up array giving whether a given register can hold a given mode. */
1811 for (mode = VOIDmode;
1812 mode != MAX_MACHINE_MODE;
1813 mode = (enum machine_mode) ((int) mode + 1))
1814 {
1815 int size = GET_MODE_SIZE (mode);
1816 enum mode_class class = GET_MODE_CLASS (mode);
1817
1818 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1819 {
1820 int temp;
1821
1822 if (ACC_REG_P (regno))
1823 temp = (TARGET_MAC16 &&
1824 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1825 else if (GP_REG_P (regno))
1826 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1827 else if (FP_REG_P (regno))
1828 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1829 else if (BR_REG_P (regno))
1830 temp = (TARGET_BOOLEANS && (mode == CCmode));
1831 else
1832 temp = FALSE;
1833
1834 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1835 }
1836 }
1837
1838 init_machine_status = xtensa_init_machine_status;
1839 free_machine_status = xtensa_free_machine_status;
1840
1841 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1842 some targets need to always use PIC. */
1843 if (XTENSA_ALWAYS_PIC)
1844 {
1845 if (flag_pic)
1846 warning ("-f%s ignored (all code is position independent)",
1847 (flag_pic > 1 ? "PIC" : "pic"));
1848 flag_pic = 1;
1849 }
1850 if (flag_pic > 1)
1851 flag_pic = 1;
1852}
1853
1854
1855/* A C compound statement to output to stdio stream STREAM the
1856 assembler syntax for an instruction operand X. X is an RTL
1857 expression.
1858
1859 CODE is a value that can be used to specify one of several ways
1860 of printing the operand. It is used when identical operands
1861 must be printed differently depending on the context. CODE
1862 comes from the '%' specification that was used to request
1863 printing of the operand. If the specification was just '%DIGIT'
1864 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1865 is the ASCII code for LTR.
1866
1867 If X is a register, this macro should print the register's name.
1868 The names can be found in an array 'reg_names' whose type is
1869 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1870
1871 When the machine description has a specification '%PUNCT' (a '%'
1872 followed by a punctuation character), this macro is called with
1873 a null pointer for X and the punctuation character for CODE.
1874
1875 'a', 'c', 'l', and 'n' are reserved.
1876
1877 The Xtensa specific codes are:
1878
1879 'd' CONST_INT, print as signed decimal
1880 'x' CONST_INT, print as signed hexadecimal
1881 'K' CONST_INT, print number of bits in mask for EXTUI
1882 'R' CONST_INT, print (X & 0x1f)
1883 'L' CONST_INT, print ((32 - X) & 0x1f)
1884 'D' REG, print second register of double-word register operand
1885 'N' MEM, print address of next word following a memory operand
1886 'v' MEM, if memory reference is volatile, output a MEMW before it
1887*/
1888
1889static void
1890printx (file, val)
1891 FILE *file;
1892 signed int val;
1893{
1894 /* print a hexadecimal value in a nice way */
1895 if ((val > -0xa) && (val < 0xa))
1896 fprintf (file, "%d", val);
1897 else if (val < 0)
1898 fprintf (file, "-0x%x", -val);
1899 else
1900 fprintf (file, "0x%x", val);
1901}
1902
1903
1904void
1905print_operand (file, op, letter)
1906 FILE *file; /* file to write to */
1907 rtx op; /* operand to print */
1908 int letter; /* %<letter> or 0 */
1909{
1910 enum rtx_code code;
1911
1912 if (! op)
1913 error ("PRINT_OPERAND null pointer");
1914
1915 code = GET_CODE (op);
1916 switch (code)
1917 {
1918 case REG:
1919 case SUBREG:
1920 {
1921 int regnum = xt_true_regnum (op);
1922 if (letter == 'D')
1923 regnum++;
1924 fprintf (file, "%s", reg_names[regnum]);
1925 break;
1926 }
1927
1928 case MEM:
1929 /*
1930 * For a volatile memory reference, emit a MEMW before the
1931 * load or store.
1932 */
1933 if (letter == 'v')
1934 {
1935 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1936 fprintf (file, "memw\n\t");
1937 break;
1938 }
1939 else if (letter == 'N')
1940 op = adjust_address (op, GET_MODE (op), 4);
1941
1942 output_address (XEXP (op, 0));
1943 break;
1944
1945 case CONST_INT:
1946 switch (letter)
1947 {
1948 case 'K':
1949 {
1950 int num_bits = 0;
1951 unsigned val = INTVAL (op);
1952 while (val & 1)
1953 {
1954 num_bits += 1;
1955 val = val >> 1;
1956 }
1957 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1958 fatal_insn ("invalid mask", op);
1959
1960 fprintf (file, "%d", num_bits);
1961 break;
1962 }
1963
1964 case 'L':
1965 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1966 break;
1967
1968 case 'R':
1969 fprintf (file, "%d", INTVAL (op) & 0x1f);
1970 break;
1971
1972 case 'x':
1973 printx (file, INTVAL (op));
1974 break;
1975
1976 case 'd':
1977 default:
1978 fprintf (file, "%d", INTVAL (op));
1979 break;
1980
1981 }
1982 break;
1983
1984 default:
1985 output_addr_const (file, op);
1986 }
1987}
1988
1989
1990/* A C compound statement to output to stdio stream STREAM the
1991 assembler syntax for an instruction operand that is a memory
1992 reference whose address is ADDR. ADDR is an RTL expression.
1993
1994 On some machines, the syntax for a symbolic address depends on
1995 the section that the address refers to. On these machines,
1996 define the macro 'ENCODE_SECTION_INFO' to store the information
1997 into the 'symbol_ref', and then check for it here. */
1998
1999void
2000print_operand_address (file, addr)
2001 FILE *file;
2002 rtx addr;
2003{
2004 if (!addr)
2005 error ("PRINT_OPERAND_ADDRESS, null pointer");
2006
2007 switch (GET_CODE (addr))
2008 {
2009 default:
2010 fatal_insn ("invalid address", addr);
2011 break;
2012
2013 case REG:
2014 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2015 break;
2016
2017 case PLUS:
2018 {
2019 rtx reg = (rtx)0;
2020 rtx offset = (rtx)0;
2021 rtx arg0 = XEXP (addr, 0);
2022 rtx arg1 = XEXP (addr, 1);
2023
2024 if (GET_CODE (arg0) == REG)
2025 {
2026 reg = arg0;
2027 offset = arg1;
2028 }
2029 else if (GET_CODE (arg1) == REG)
2030 {
2031 reg = arg1;
2032 offset = arg0;
2033 }
2034 else
2035 fatal_insn ("no register in address", addr);
2036
2037 if (CONSTANT_P (offset))
2038 {
2039 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2040 output_addr_const (file, offset);
2041 }
2042 else
2043 fatal_insn ("address offset not a constant", addr);
2044 }
2045 break;
2046
2047 case LABEL_REF:
2048 case SYMBOL_REF:
2049 case CONST_INT:
2050 case CONST:
2051 output_addr_const (file, addr);
2052 break;
2053 }
2054}
2055
2056
2057/* Emit either a label, .comm, or .lcomm directive. */
2058
2059void
2060xtensa_declare_object (file, name, init_string, final_string, size)
2061 FILE *file;
2062 char *name;
2063 char *init_string;
2064 char *final_string;
2065 int size;
2066{
2067 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2068 assemble_name (file, name);
2069 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2070}
2071
2072
2073void
2074xtensa_output_literal (file, x, mode, labelno)
2075 FILE *file;
2076 rtx x;
2077 enum machine_mode mode;
2078 int labelno;
2079{
2080 long value_long[2];
2081 union real_extract u;
2082 int size;
2083
2084 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2085
2086 switch (GET_MODE_CLASS (mode))
2087 {
2088 case MODE_FLOAT:
2089 if (GET_CODE (x) != CONST_DOUBLE)
2090 abort ();
2091
2092 memcpy ((char *) &u, (char *) &CONST_DOUBLE_LOW (x), sizeof u);
2093 switch (mode)
2094 {
2095 case SFmode:
2096 REAL_VALUE_TO_TARGET_SINGLE (u.d, value_long[0]);
2097 fprintf (file, "0x%08lx\t\t# %.12g (float)\n", value_long[0], u.d);
2098 break;
2099
2100 case DFmode:
2101 REAL_VALUE_TO_TARGET_DOUBLE (u.d, value_long);
2102 fprintf (file, "0x%08lx, 0x%08lx # %.20g (double)\n",
2103 value_long[0], value_long[1], u.d);
2104 break;
2105
2106 default:
2107 abort ();
2108 }
2109
2110 break;
2111
2112 case MODE_INT:
2113 case MODE_PARTIAL_INT:
2114 size = GET_MODE_SIZE (mode);
2115 if (size == 4)
2116 {
2117 output_addr_const (file, x);
2118 fputs ("\n", file);
2119 }
2120 else if (size == 8)
2121 {
2122 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2123 fputs (", ", file);
2124 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2125 fputs ("\n", file);
2126 }
2127 else
2128 abort ();
2129 break;
2130
2131 default:
2132 abort ();
2133 }
2134}
2135
2136
2137/* Return the bytes needed to compute the frame pointer from the current
2138 stack pointer. */
2139
2140#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2141#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2142
2143long
2144compute_frame_size (size)
2145 int size; /* # of var. bytes allocated */
2146{
2147 /* add space for the incoming static chain value */
2148 if (current_function_needs_context)
2149 size += (1 * UNITS_PER_WORD);
2150
2151 xtensa_current_frame_size =
2152 XTENSA_STACK_ALIGN (size
2153 + current_function_outgoing_args_size
2154 + (WINDOW_SIZE * UNITS_PER_WORD));
2155 return xtensa_current_frame_size;
2156}
2157
2158
2159int
2160xtensa_frame_pointer_required ()
2161{
2162 /* The code to expand builtin_frame_addr and builtin_return_addr
2163 currently uses the hard_frame_pointer instead of frame_pointer.
2164 This seems wrong but maybe it's necessary for other architectures.
2165 This function is derived from the i386 code. */
2166
2167 if (cfun->machine->accesses_prev_frame)
2168 return 1;
2169
2170 return 0;
2171}
2172
2173
2174void
2175xtensa_reorg (first)
2176 rtx first;
2177{
2178 rtx insn, set_frame_ptr_insn = 0;
2179
2180 unsigned long tsize = compute_frame_size (get_frame_size ());
2181 if (tsize < (1 << (12+3)))
2182 frame_size_const = 0;
2183 else
2184 {
2185 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2186
2187 /* make sure the constant is used so it doesn't get eliminated
2188 from the constant pool */
2189 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2190 }
2191
2192 if (!frame_pointer_needed)
2193 return;
2194
2195 /* Search all instructions, looking for the insn that sets up the
2196 frame pointer. This search will fail if the function does not
2197 have an incoming argument in $a7, but in that case, we can just
2198 set up the frame pointer at the very beginning of the
2199 function. */
2200
2201 for (insn = first; insn; insn = NEXT_INSN (insn))
2202 {
2203 rtx pat;
2204
2205 if (!INSN_P (insn))
2206 continue;
2207
2208 pat = PATTERN (insn);
2209 if (GET_CODE (pat) == UNSPEC_VOLATILE
2210 && (XINT (pat, 1) == UNSPECV_SET_FP))
2211 {
2212 set_frame_ptr_insn = insn;
2213 break;
2214 }
2215 }
2216
2217 if (set_frame_ptr_insn)
2218 {
2219 /* for all instructions prior to set_frame_ptr_insn, replace
2220 hard_frame_pointer references with stack_pointer */
2221 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2222 {
2223 if (INSN_P (insn))
2224 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2225 hard_frame_pointer_rtx,
2226 stack_pointer_rtx);
2227 }
2228 }
2229 else
2230 {
2231 /* emit the frame pointer move immediately after the NOTE that starts
2232 the function */
2233 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2234 stack_pointer_rtx), first);
2235 }
2236}
2237
2238
2239/* Set up the stack and frame (if desired) for the function. */
2240
2241void
2242xtensa_function_prologue (file, size)
2243 FILE *file;
2244 int size ATTRIBUTE_UNUSED;
2245{
2246 unsigned long tsize = compute_frame_size (get_frame_size ());
2247
2248 if (frame_pointer_needed)
2249 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2250 else
2251 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2252
2253
2254 if (tsize < (1 << (12+3)))
2255 {
2256 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2257 }
2258 else
2259 {
2260 fprintf (file, "\tentry\tsp, 16\n");
2261
2262 /* use a8 as a temporary since a0-a7 may be live */
2263 fprintf (file, "\tl32r\ta8, ");
2264 print_operand (file, frame_size_const, 0);
2265 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2266 fprintf (file, "\tmovsp\tsp, a8\n");
2267 }
2268}
2269
2270
2271/* Do any necessary cleanup after a function to restore
2272 stack, frame, and regs. */
2273
2274void
2275xtensa_function_epilogue (file, size)
2276 FILE *file;
2277 int size ATTRIBUTE_UNUSED;
2278{
2279 rtx insn = get_last_insn ();
2280 /* If the last insn was a BARRIER, we don't have to write anything. */
2281 if (GET_CODE (insn) == NOTE)
2282 insn = prev_nonnote_insn (insn);
2283 if (insn == 0 || GET_CODE (insn) != BARRIER)
2284 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2285
2286 xtensa_current_frame_size = 0;
2287}
2288
2289
2290/* Create the va_list data type.
2291 This structure is set up by __builtin_saveregs. The __va_reg
2292 field points to a stack-allocated region holding the contents of the
2293 incoming argument registers. The __va_ndx field is an index initialized
2294 to the position of the first unnamed (variable) argument. This same index
2295 is also used to address the arguments passed in memory. Thus, the
2296 __va_stk field is initialized to point to the position of the first
2297 argument in memory offset to account for the arguments passed in
2298 registers. E.G., if there are 6 argument registers, and each register is
2299 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2300 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2301 argument word N for N >= 6. */
2302
2303tree
2304xtensa_build_va_list (void)
2305{
2306 tree f_stk, f_reg, f_ndx, record;
2307
2308 record = make_node (RECORD_TYPE);
2309
2310 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2311 ptr_type_node);
2312 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2313 ptr_type_node);
2314 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2315 integer_type_node);
2316
2317 DECL_FIELD_CONTEXT (f_stk) = record;
2318 DECL_FIELD_CONTEXT (f_reg) = record;
2319 DECL_FIELD_CONTEXT (f_ndx) = record;
2320
2321 TYPE_FIELDS (record) = f_stk;
2322 TREE_CHAIN (f_stk) = f_reg;
2323 TREE_CHAIN (f_reg) = f_ndx;
2324
2325 layout_type (record);
2326 return record;
2327}
2328
2329
2330/* Save the incoming argument registers on the stack. Returns the
2331 address of the saved registers. */
2332
2333rtx
2334xtensa_builtin_saveregs ()
2335{
2336 rtx gp_regs, dest;
2337 int arg_words = current_function_arg_words;
2338 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2339 int i;
2340
2341 if (gp_left == 0)
2342 return const0_rtx;
2343
2344 /* allocate the general-purpose register space */
2345 gp_regs = assign_stack_local
2346 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2347 MEM_IN_STRUCT_P (gp_regs) = 1;
2348 RTX_UNCHANGING_P (gp_regs) = 1;
2349 RTX_UNCHANGING_P (XEXP (gp_regs, 0)) = 1;
2350
2351 /* Now store the incoming registers. */
2352 dest = change_address (gp_regs, SImode,
2353 plus_constant (XEXP (gp_regs, 0),
2354 arg_words * UNITS_PER_WORD));
2355
2356 /* Note: Don't use move_block_from_reg() here because the incoming
2357 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2358 Instead, call gen_raw_REG() directly so that we get a distinct
2359 instance of (REG:SI 7). */
2360 for (i = 0; i < gp_left; i++)
2361 {
2362 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2363 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2364 }
2365
2366 return XEXP (gp_regs, 0);
2367}
2368
2369
2370/* Implement `va_start' for varargs and stdarg. We look at the
2371 current function to fill in an initial va_list. */
2372
2373void
2374xtensa_va_start (stdarg_p, valist, nextarg)
2375 int stdarg_p ATTRIBUTE_UNUSED;
2376 tree valist;
2377 rtx nextarg ATTRIBUTE_UNUSED;
2378{
2379 tree f_stk, stk;
2380 tree f_reg, reg;
2381 tree f_ndx, ndx;
2382 tree t, u;
2383 int arg_words;
2384
2385 arg_words = current_function_args_info.arg_words;
2386
2387 f_stk = TYPE_FIELDS (va_list_type_node);
2388 f_reg = TREE_CHAIN (f_stk);
2389 f_ndx = TREE_CHAIN (f_reg);
2390
2391 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2392 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2393 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2394
2395 /* Call __builtin_saveregs; save the result in __va_reg */
2396 current_function_arg_words = arg_words;
2397 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2398 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2399 TREE_SIDE_EFFECTS (t) = 1;
2400 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2401
2402 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2403 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2404 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2405 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2406 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2407 TREE_SIDE_EFFECTS (t) = 1;
2408 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2409
2410 /* Set the __va_ndx member. */
2411 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2412 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2413 TREE_SIDE_EFFECTS (t) = 1;
2414 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2415}
2416
2417
2418/* Implement `va_arg'. */
2419
2420rtx
2421xtensa_va_arg (valist, type)
2422 tree valist, type;
2423{
2424 tree f_stk, stk;
2425 tree f_reg, reg;
2426 tree f_ndx, ndx;
2427 tree tmp, addr_tree;
2428 rtx array, orig_ndx, r, addr;
2429 HOST_WIDE_INT size, va_size;
2430 rtx lab_false, lab_over, lab_false2;
2431
2432 size = int_size_in_bytes (type);
2433 va_size = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
2434
2435 f_stk = TYPE_FIELDS (va_list_type_node);
2436 f_reg = TREE_CHAIN (f_stk);
2437 f_ndx = TREE_CHAIN (f_reg);
2438
2439 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2440 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2441 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2442
2443
2444 /* First align __va_ndx to a double word boundary if necessary for this arg:
2445
2446 if (__alignof__ (TYPE) > 4)
2447 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2448 */
2449
2450 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2451 {
2452 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2453 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2454 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2455 build_int_2 (-2 * UNITS_PER_WORD, -1));
2456 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2457 TREE_SIDE_EFFECTS (tmp) = 1;
2458 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2459 }
2460
2461
2462 /* Increment __va_ndx to point past the argument:
2463
2464 orig_ndx = (AP).__va_ndx;
2465 (AP).__va_ndx += __va_size (TYPE);
2466 */
2467
2468 orig_ndx = gen_reg_rtx (SImode);
2469 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2470 if (r != orig_ndx)
2471 emit_move_insn (orig_ndx, r);
2472
2473 tmp = build (PLUS_EXPR, integer_type_node, ndx, build_int_2 (va_size, 0));
2474 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2475 TREE_SIDE_EFFECTS (tmp) = 1;
2476 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2477
2478
2479 /* Check if the argument is in registers:
2480
2481 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2482 __array = (AP).__va_reg;
2483 */
2484
2485 lab_false = gen_label_rtx ();
2486 lab_over = gen_label_rtx ();
2487 array = gen_reg_rtx (Pmode);
2488
2489 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode, EXPAND_NORMAL),
2490 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2491 GT, const1_rtx, SImode, 0, lab_false);
2492
2493 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2494 if (r != array)
2495 emit_move_insn (array, r);
2496
2497 emit_jump_insn (gen_jump (lab_over));
2498 emit_barrier ();
2499 emit_label (lab_false);
2500
2501
2502 /* ...otherwise, the argument is on the stack (never split between
2503 registers and the stack -- change __va_ndx if necessary):
2504
2505 else
2506 {
2507 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2508 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2509 __array = (AP).__va_stk;
2510 }
2511 */
2512
2513 lab_false2 = gen_label_rtx ();
2514 emit_cmp_and_jump_insns (orig_ndx,
2515 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2516 GE, const1_rtx, SImode, 0, lab_false2);
2517
2518 tmp = build_int_2 ((MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD) + va_size, 0);
2519 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2520 TREE_SIDE_EFFECTS (tmp) = 1;
2521 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2522
2523 emit_label (lab_false2);
2524
2525 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2526 if (r != array)
2527 emit_move_insn (array, r);
2528
2529
2530 /* Given the base array pointer (__array) and index to the subsequent
2531 argument (__va_ndx), find the address:
2532
2533 Big-endian:
2534 __array + (AP).__va_ndx - sizeof (TYPE)
2535
2536 Little-endian:
2537 __array + (AP).__va_ndx - __va_size (TYPE)
2538
2539 The results are endian-dependent because values smaller than one word
2540 are aligned differently.
2541 */
2542
2543 emit_label (lab_over);
2544
2545 addr_tree = build (PLUS_EXPR, ptr_type_node,
2546 make_tree (ptr_type_node, array),
2547 ndx);
2548 addr_tree = build (PLUS_EXPR, ptr_type_node,
2549 addr_tree,
2550 build_int_2 (BYTES_BIG_ENDIAN
2551 && size < (PARM_BOUNDARY / BITS_PER_UNIT)
2552 ? -size
2553 : -va_size, -1));
2554 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2555 addr = copy_to_reg (addr);
2556 return addr;
2557}
2558
2559
fc12fa10 2560enum reg_class
2561xtensa_preferred_reload_class (x, class)
2562 rtx x;
2563 enum reg_class class;
2564{
2565 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2566 return NO_REGS;
2567
2568 /* Don't use sp for reloads! */
2569 if (class == AR_REGS)
2570 return GR_REGS;
2571
2572 return class;
2573}
2574
2575
f6b7ba2b 2576enum reg_class
2577xtensa_secondary_reload_class (class, mode, x, isoutput)
2578 enum reg_class class;
2579 enum machine_mode mode ATTRIBUTE_UNUSED;
2580 rtx x;
2581 int isoutput;
2582{
2583 int regno;
2584
2585 if (GET_CODE (x) == SIGN_EXTEND)
2586 x = XEXP (x, 0);
2587 regno = xt_true_regnum (x);
2588
2589 if (!isoutput)
2590 {
2591 if (class == FP_REGS && constantpool_mem_p (x))
2592 return GR_REGS;
2593 }
2594
2595 if (ACC_REG_P (regno))
2596 return (class == GR_REGS ? NO_REGS : GR_REGS);
2597 if (class == ACC_REG)
2598 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2599
2600 return NO_REGS;
2601}
2602
2603
2604void
2605order_regs_for_local_alloc ()
2606{
2607 if (!leaf_function_p ())
2608 {
2609 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2610 FIRST_PSEUDO_REGISTER * sizeof (int));
2611 }
2612 else
2613 {
2614 int i, num_arg_regs;
2615 int nxt = 0;
2616
2617 /* use the AR registers in increasing order (skipping a0 and a1)
2618 but save the incoming argument registers for a last resort */
2619 num_arg_regs = current_function_args_info.arg_words;
2620 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2621 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2622 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2623 reg_alloc_order[nxt++] = i + num_arg_regs;
2624 for (i = 0; i < num_arg_regs; i++)
2625 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2626
2627 /* list the FP registers in order for now */
2628 for (i = 0; i < 16; i++)
2629 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2630
2631 /* GCC requires that we list *all* the registers.... */
2632 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2633 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2634 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2635 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2636
2637 /* list the coprocessor registers in order */
2638 for (i = 0; i < BR_REG_NUM; i++)
2639 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2640
2641 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2642 }
2643}
2644
2645
2646/* A customized version of reg_overlap_mentioned_p that only looks for
2647 references to a7 (as opposed to hard_frame_pointer_rtx). */
2648
2649int
2650a7_overlap_mentioned_p (x)
2651 rtx x;
2652{
2653 int i, j;
2654 unsigned int x_regno;
2655 const char *fmt;
2656
2657 if (GET_CODE (x) == REG)
2658 {
2659 x_regno = REGNO (x);
2660 return (x != hard_frame_pointer_rtx
2661 && x_regno < A7_REG + 1
2662 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2663 }
2664
2665 if (GET_CODE (x) == SUBREG
2666 && GET_CODE (SUBREG_REG (x)) == REG
2667 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2668 {
2669 x_regno = subreg_regno (x);
2670 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2671 && x_regno < A7_REG + 1
2672 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2673 }
2674
2675 /* X does not match, so try its subexpressions. */
2676 fmt = GET_RTX_FORMAT (GET_CODE (x));
2677 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2678 {
2679 if (fmt[i] == 'e')
2680 {
2681 if (a7_overlap_mentioned_p (XEXP (x, i)))
2682 return 1;
2683 }
2684 else if (fmt[i] == 'E')
2685 {
2686 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2687 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2688 return 1;
2689 }
2690 }
2691
2692 return 0;
2693}