]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
Merge basic-improvements-branch to trunk
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
f6b7ba2b 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
a8332086 2 Copyright 2001,2002 Free Software Foundation, Inc.
f6b7ba2b 3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
f6b7ba2b 26#include "rtl.h"
27#include "regs.h"
28#include "machmode.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "insn-flags.h"
35#include "insn-attr.h"
36#include "insn-codes.h"
37#include "recog.h"
38#include "output.h"
39#include "tree.h"
40#include "expr.h"
41#include "flags.h"
42#include "reload.h"
43#include "tm_p.h"
44#include "function.h"
45#include "toplev.h"
46#include "optabs.h"
bbfbe351 47#include "output.h"
f6b7ba2b 48#include "libfuncs.h"
160b2123 49#include "ggc.h"
f6b7ba2b 50#include "target.h"
51#include "target-def.h"
049d6666 52#include "langhooks.h"
f6b7ba2b 53
54/* Enumeration for all of the relational tests, so that we can build
55 arrays indexed by the test type, and not worry about the order
56 of EQ, NE, etc. */
57
58enum internal_test {
59 ITEST_EQ,
60 ITEST_NE,
61 ITEST_GT,
62 ITEST_GE,
63 ITEST_LT,
64 ITEST_LE,
65 ITEST_GTU,
66 ITEST_GEU,
67 ITEST_LTU,
68 ITEST_LEU,
69 ITEST_MAX
70 };
71
72/* Cached operands, and operator to compare for use in set/branch on
73 condition codes. */
74rtx branch_cmp[2];
75
76/* what type of branch to use */
77enum cmp_type branch_type;
78
79/* Array giving truth value on whether or not a given hard register
80 can support a given mode. */
81char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82
83/* Current frame size calculated by compute_frame_size. */
84unsigned xtensa_current_frame_size;
85
86/* Tables of ld/st opcode names for block moves */
87const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
88const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
89#define LARGEST_MOVE_RATIO 15
90
91/* Define the structure for the machine field in struct function. */
1f3233d1 92struct machine_function GTY(())
f6b7ba2b 93{
94 int accesses_prev_frame;
78d6a4ed 95 bool incoming_a7_copied;
f6b7ba2b 96};
97
98/* Vector, indexed by hard register number, which contains 1 for a
99 register that is allowable in a candidate for leaf function
100 treatment. */
101
102const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103{
104 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
105 1, 1, 1,
106 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 1
108};
109
110/* Map hard register number to register class */
111const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112{
a8332086 113 RL_REGS, SP_REG, RL_REGS, RL_REGS,
114 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
f6b7ba2b 117 AR_REGS, AR_REGS, BR_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 ACC_REG,
123};
124
125/* Map register constraint character to register class. */
126enum reg_class xtensa_char_to_class[256] =
127{
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
192};
193
bbfbe351 194static int b4const_or_zero PARAMS ((int));
195static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
196static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
197static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
198static rtx gen_conditional_move PARAMS ((rtx));
199static rtx fixup_subreg_mem PARAMS ((rtx x));
200static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
160b2123 201static struct machine_function * xtensa_init_machine_status PARAMS ((void));
bbfbe351 202static void printx PARAMS ((FILE *, signed int));
5f4442bc 203static unsigned int xtensa_multibss_section_type_flags
204 PARAMS ((tree, const char *, int));
205static void xtensa_select_rtx_section
206 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
7811991d 207static void xtensa_encode_section_info PARAMS ((tree, int));
bbfbe351 208
209static rtx frame_size_const;
210static int current_function_arg_words;
211static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
212 REG_ALLOC_ORDER;
213\f
f6b7ba2b 214/* This macro generates the assembly code for function entry.
215 FILE is a stdio stream to output the code to.
216 SIZE is an int: how many units of temporary storage to allocate.
217 Refer to the array 'regs_ever_live' to determine which registers
218 to save; 'regs_ever_live[I]' is nonzero if register number I
219 is ever used in the function. This macro is responsible for
220 knowing which registers should not be saved even if used. */
221
222#undef TARGET_ASM_FUNCTION_PROLOGUE
223#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
224
225/* This macro generates the assembly code for function exit,
226 on machines that need it. If FUNCTION_EPILOGUE is not defined
227 then individual return instructions are generated for each
228 return statement. Args are same as for FUNCTION_PROLOGUE. */
229
230#undef TARGET_ASM_FUNCTION_EPILOGUE
231#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
232
233/* These hooks specify assembly directives for creating certain kinds
234 of integer object. */
235
236#undef TARGET_ASM_ALIGNED_SI_OP
237#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
238
bbfbe351 239#undef TARGET_ASM_SELECT_RTX_SECTION
240#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
7811991d 241#undef TARGET_ENCODE_SECTION_INFO
242#define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
f6b7ba2b 243
bbfbe351 244struct gcc_target targetm = TARGET_INITIALIZER;
245\f
f6b7ba2b 246
247/*
248 * Functions to test Xtensa immediate operand validity.
249 */
250
251int
252xtensa_b4constu (v)
253 int v;
254{
255 switch (v)
256 {
257 case 32768:
258 case 65536:
259 case 2:
260 case 3:
261 case 4:
262 case 5:
263 case 6:
264 case 7:
265 case 8:
266 case 10:
267 case 12:
268 case 16:
269 case 32:
270 case 64:
271 case 128:
272 case 256:
273 return 1;
274 }
275 return 0;
276}
277
278int
279xtensa_simm8x256 (v)
280 int v;
281{
282 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
283}
284
285int
286xtensa_ai4const (v)
287 int v;
288{
289 return (v == -1 || (v >= 1 && v <= 15));
290}
291
292int
293xtensa_simm7 (v)
294 int v;
295{
296 return v >= -32 && v <= 95;
297}
298
299int
300xtensa_b4const (v)
301 int v;
302{
303 switch (v)
304 {
305 case -1:
306 case 1:
307 case 2:
308 case 3:
309 case 4:
310 case 5:
311 case 6:
312 case 7:
313 case 8:
314 case 10:
315 case 12:
316 case 16:
317 case 32:
318 case 64:
319 case 128:
320 case 256:
321 return 1;
322 }
323 return 0;
324}
325
326int
327xtensa_simm8 (v)
328 int v;
329{
330 return v >= -128 && v <= 127;
331}
332
333int
334xtensa_tp7 (v)
335 int v;
336{
337 return (v >= 7 && v <= 22);
338}
339
340int
341xtensa_lsi4x4 (v)
342 int v;
343{
344 return (v & 3) == 0 && (v >= 0 && v <= 60);
345}
346
347int
348xtensa_simm12b (v)
349 int v;
350{
351 return v >= -2048 && v <= 2047;
352}
353
354int
355xtensa_uimm8 (v)
356 int v;
357{
358 return v >= 0 && v <= 255;
359}
360
361int
362xtensa_uimm8x2 (v)
363 int v;
364{
365 return (v & 1) == 0 && (v >= 0 && v <= 510);
366}
367
368int
369xtensa_uimm8x4 (v)
370 int v;
371{
372 return (v & 3) == 0 && (v >= 0 && v <= 1020);
373}
374
375
376/* This is just like the standard true_regnum() function except that it
377 works even when reg_renumber is not initialized. */
378
379int
380xt_true_regnum (x)
381 rtx x;
382{
383 if (GET_CODE (x) == REG)
384 {
385 if (reg_renumber
386 && REGNO (x) >= FIRST_PSEUDO_REGISTER
387 && reg_renumber[REGNO (x)] >= 0)
388 return reg_renumber[REGNO (x)];
389 return REGNO (x);
390 }
391 if (GET_CODE (x) == SUBREG)
392 {
393 int base = xt_true_regnum (SUBREG_REG (x));
394 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
395 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
396 GET_MODE (SUBREG_REG (x)),
397 SUBREG_BYTE (x), GET_MODE (x));
398 }
399 return -1;
400}
401
402
403int
404add_operand (op, mode)
405 rtx op;
406 enum machine_mode mode;
407{
408 if (GET_CODE (op) == CONST_INT)
409 return (xtensa_simm8 (INTVAL (op)) ||
410 xtensa_simm8x256 (INTVAL (op)));
411
412 return register_operand (op, mode);
413}
414
415
416int
417arith_operand (op, mode)
418 rtx op;
419 enum machine_mode mode;
420{
421 if (GET_CODE (op) == CONST_INT)
422 return xtensa_simm8 (INTVAL (op));
423
424 return register_operand (op, mode);
425}
426
427
428int
429nonimmed_operand (op, mode)
430 rtx op;
431 enum machine_mode mode;
432{
433 /* We cannot use the standard nonimmediate_operand() predicate because
434 it includes constant pool memory operands. */
435
436 if (memory_operand (op, mode))
437 return !constantpool_address_p (XEXP (op, 0));
438
439 return register_operand (op, mode);
440}
441
442
443int
444mem_operand (op, mode)
445 rtx op;
446 enum machine_mode mode;
447{
448 /* We cannot use the standard memory_operand() predicate because
449 it includes constant pool memory operands. */
450
451 if (memory_operand (op, mode))
452 return !constantpool_address_p (XEXP (op, 0));
453
454 return FALSE;
455}
456
457
458int
fc12fa10 459xtensa_valid_move (mode, operands)
f6b7ba2b 460 enum machine_mode mode;
fc12fa10 461 rtx *operands;
f6b7ba2b 462{
fc12fa10 463 /* Either the destination or source must be a register, and the
464 MAC16 accumulator doesn't count. */
465
466 if (register_operand (operands[0], mode))
467 {
468 int dst_regnum = xt_true_regnum (operands[0]);
469
470 /* The stack pointer can only be assigned with a MOVSP opcode. */
471 if (dst_regnum == STACK_POINTER_REGNUM)
472 return (mode == SImode
473 && register_operand (operands[1], mode)
474 && !ACC_REG_P (xt_true_regnum (operands[1])));
475
476 if (!ACC_REG_P (dst_regnum))
477 return true;
478 }
141e2ef6 479 if (register_operand (operands[1], mode))
fc12fa10 480 {
481 int src_regnum = xt_true_regnum (operands[1]);
482 if (!ACC_REG_P (src_regnum))
483 return true;
484 }
f6b7ba2b 485 return FALSE;
486}
487
488
489int
490mask_operand (op, mode)
491 rtx op;
492 enum machine_mode mode;
493{
494 if (GET_CODE (op) == CONST_INT)
495 return xtensa_mask_immediate (INTVAL (op));
496
497 return register_operand (op, mode);
498}
499
500
501int
502extui_fldsz_operand (op, mode)
503 rtx op;
504 enum machine_mode mode ATTRIBUTE_UNUSED;
505{
506 return ((GET_CODE (op) == CONST_INT)
507 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
508}
509
510
511int
512sext_operand (op, mode)
513 rtx op;
514 enum machine_mode mode;
515{
516 if (TARGET_SEXT)
517 return nonimmed_operand (op, mode);
518 return mem_operand (op, mode);
519}
520
521
522int
523sext_fldsz_operand (op, mode)
524 rtx op;
525 enum machine_mode mode ATTRIBUTE_UNUSED;
526{
527 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
528}
529
530
531int
532lsbitnum_operand (op, mode)
533 rtx op;
534 enum machine_mode mode ATTRIBUTE_UNUSED;
535{
536 if (GET_CODE (op) == CONST_INT)
537 {
538 return (BITS_BIG_ENDIAN
539 ? (INTVAL (op) == BITS_PER_WORD-1)
540 : (INTVAL (op) == 0));
541 }
542 return FALSE;
543}
544
545
546static int
547b4const_or_zero (v)
548 int v;
549{
550 if (v == 0)
551 return TRUE;
552 return xtensa_b4const (v);
553}
554
555
556int
557branch_operand (op, mode)
558 rtx op;
559 enum machine_mode mode;
560{
561 if (GET_CODE (op) == CONST_INT)
562 return b4const_or_zero (INTVAL (op));
563
564 return register_operand (op, mode);
565}
566
567
568int
569ubranch_operand (op, mode)
570 rtx op;
571 enum machine_mode mode;
572{
573 if (GET_CODE (op) == CONST_INT)
574 return xtensa_b4constu (INTVAL (op));
575
576 return register_operand (op, mode);
577}
578
579
580int
581call_insn_operand (op, mode)
582 rtx op;
583 enum machine_mode mode ATTRIBUTE_UNUSED;
584{
585 if ((GET_CODE (op) == REG)
586 && (op != arg_pointer_rtx)
587 && ((REGNO (op) < FRAME_POINTER_REGNUM)
588 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
589 return TRUE;
590
591 if (CONSTANT_ADDRESS_P (op))
592 {
593 /* Direct calls only allowed to static functions with PIC. */
594 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
595 && SYMBOL_REF_FLAG (op)));
596 }
597
598 return FALSE;
599}
600
601
602int
603move_operand (op, mode)
604 rtx op;
605 enum machine_mode mode;
606{
607 if (register_operand (op, mode))
608 return TRUE;
609
610 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
611 result in 0/1. */
612 if (GET_CODE (op) == CONSTANT_P_RTX)
613 return TRUE;
614
615 if (GET_CODE (op) == CONST_INT)
616 return xtensa_simm12b (INTVAL (op));
617
618 if (GET_CODE (op) == MEM)
619 return memory_address_p (mode, XEXP (op, 0));
620
621 return FALSE;
622}
623
624
625int
626smalloffset_mem_p (op)
627 rtx op;
628{
629 if (GET_CODE (op) == MEM)
630 {
631 rtx addr = XEXP (op, 0);
632 if (GET_CODE (addr) == REG)
633 return REG_OK_FOR_BASE_P (addr);
634 if (GET_CODE (addr) == PLUS)
635 {
636 rtx offset = XEXP (addr, 0);
637 if (GET_CODE (offset) != CONST_INT)
638 offset = XEXP (addr, 1);
639 if (GET_CODE (offset) != CONST_INT)
640 return FALSE;
641 return xtensa_lsi4x4 (INTVAL (offset));
642 }
643 }
644 return FALSE;
645}
646
647
648int
649smalloffset_double_mem_p (op)
650 rtx op;
651{
652 if (!smalloffset_mem_p (op))
653 return FALSE;
654 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
655}
656
657
658int
659constantpool_address_p (addr)
660 rtx addr;
661{
662 rtx sym = addr;
663
664 if (GET_CODE (addr) == CONST)
665 {
666 rtx offset;
667
668 /* only handle (PLUS (SYM, OFFSET)) form */
669 addr = XEXP (addr, 0);
670 if (GET_CODE (addr) != PLUS)
671 return FALSE;
672
673 /* make sure the address is word aligned */
674 offset = XEXP (addr, 1);
675 if ((GET_CODE (offset) != CONST_INT)
676 || ((INTVAL (offset) & 3) != 0))
677 return FALSE;
678
679 sym = XEXP (addr, 0);
680 }
681
682 if ((GET_CODE (sym) == SYMBOL_REF)
683 && CONSTANT_POOL_ADDRESS_P (sym))
684 return TRUE;
685 return FALSE;
686}
687
688
689int
690constantpool_mem_p (op)
691 rtx op;
692{
693 if (GET_CODE (op) == MEM)
694 return constantpool_address_p (XEXP (op, 0));
695 return FALSE;
696}
697
698
699int
700non_const_move_operand (op, mode)
701 rtx op;
702 enum machine_mode mode;
703{
704 if (register_operand (op, mode))
705 return 1;
706 if (GET_CODE (op) == SUBREG)
707 op = SUBREG_REG (op);
708 if (GET_CODE (op) == MEM)
709 return memory_address_p (mode, XEXP (op, 0));
710 return FALSE;
711}
712
713
714/* Accept the floating point constant 1 in the appropriate mode. */
715
716int
717const_float_1_operand (op, mode)
718 rtx op;
719 enum machine_mode mode;
720{
721 REAL_VALUE_TYPE d;
722 static REAL_VALUE_TYPE onedf;
723 static REAL_VALUE_TYPE onesf;
724 static int one_initialized;
725
726 if ((GET_CODE (op) != CONST_DOUBLE)
727 || (mode != GET_MODE (op))
728 || (mode != DFmode && mode != SFmode))
729 return FALSE;
730
731 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
732
733 if (! one_initialized)
734 {
735 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
736 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
737 one_initialized = TRUE;
738 }
739
740 if (mode == DFmode)
741 return REAL_VALUES_EQUAL (d, onedf);
742 else
743 return REAL_VALUES_EQUAL (d, onesf);
744}
745
746
747int
748fpmem_offset_operand (op, mode)
749 rtx op;
750 enum machine_mode mode ATTRIBUTE_UNUSED;
751{
752 if (GET_CODE (op) == CONST_INT)
753 return xtensa_mem_offset (INTVAL (op), SFmode);
754 return 0;
755}
756
757
758void
759xtensa_extend_reg (dst, src)
760 rtx dst;
761 rtx src;
762{
763 rtx temp = gen_reg_rtx (SImode);
764 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
765
766 /* generate paradoxical subregs as needed so that the modes match */
767 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
768 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
769
770 emit_insn (gen_ashlsi3 (temp, src, shift));
771 emit_insn (gen_ashrsi3 (dst, temp, shift));
772}
773
774
775void
776xtensa_load_constant (dst, src)
777 rtx dst;
778 rtx src;
779{
780 enum machine_mode mode = GET_MODE (dst);
781 src = force_const_mem (SImode, src);
782
783 /* PC-relative loads are always SImode so we have to add a SUBREG if that
784 is not the desired mode */
785
786 if (mode != SImode)
787 {
788 if (register_operand (dst, mode))
789 dst = simplify_gen_subreg (SImode, dst, mode, 0);
790 else
791 {
792 src = force_reg (SImode, src);
793 src = gen_lowpart_SUBREG (mode, src);
794 }
795 }
796
797 emit_move_insn (dst, src);
798}
799
800
801int
802branch_operator (x, mode)
803 rtx x;
804 enum machine_mode mode;
805{
806 if (GET_MODE (x) != mode)
807 return FALSE;
808
809 switch (GET_CODE (x))
810 {
811 case EQ:
812 case NE:
813 case LT:
814 case GE:
815 return TRUE;
816 default:
817 break;
818 }
819 return FALSE;
820}
821
822
823int
824ubranch_operator (x, mode)
825 rtx x;
826 enum machine_mode mode;
827{
828 if (GET_MODE (x) != mode)
829 return FALSE;
830
831 switch (GET_CODE (x))
832 {
833 case LTU:
834 case GEU:
835 return TRUE;
836 default:
837 break;
838 }
839 return FALSE;
840}
841
842
843int
844boolean_operator (x, mode)
845 rtx x;
846 enum machine_mode mode;
847{
848 if (GET_MODE (x) != mode)
849 return FALSE;
850
851 switch (GET_CODE (x))
852 {
853 case EQ:
854 case NE:
855 return TRUE;
856 default:
857 break;
858 }
859 return FALSE;
860}
861
862
863int
864xtensa_mask_immediate (v)
865 int v;
866{
867#define MAX_MASK_SIZE 16
868 int mask_size;
869
870 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
871 {
872 if ((v & 1) == 0)
873 return FALSE;
874 v = v >> 1;
875 if (v == 0)
876 return TRUE;
877 }
878
879 return FALSE;
880}
881
882
883int
884xtensa_mem_offset (v, mode)
885 unsigned v;
886 enum machine_mode mode;
887{
888 switch (mode)
889 {
890 case BLKmode:
891 /* Handle the worst case for block moves. See xtensa_expand_block_move
892 where we emit an optimized block move operation if the block can be
893 moved in < "move_ratio" pieces. The worst case is when the block is
894 aligned but has a size of (3 mod 4) (does this happen?) so that the
895 last piece requires a byte load/store. */
896 return (xtensa_uimm8 (v) &&
897 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
898
899 case QImode:
900 return xtensa_uimm8 (v);
901
902 case HImode:
903 return xtensa_uimm8x2 (v);
904
905 case DFmode:
906 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
907
908 default:
909 break;
910 }
911
912 return xtensa_uimm8x4 (v);
913}
914
915
916/* Make normal rtx_code into something we can index from an array */
917
918static enum internal_test
919map_test_to_internal_test (test_code)
920 enum rtx_code test_code;
921{
922 enum internal_test test = ITEST_MAX;
923
924 switch (test_code)
925 {
926 default: break;
927 case EQ: test = ITEST_EQ; break;
928 case NE: test = ITEST_NE; break;
929 case GT: test = ITEST_GT; break;
930 case GE: test = ITEST_GE; break;
931 case LT: test = ITEST_LT; break;
932 case LE: test = ITEST_LE; break;
933 case GTU: test = ITEST_GTU; break;
934 case GEU: test = ITEST_GEU; break;
935 case LTU: test = ITEST_LTU; break;
936 case LEU: test = ITEST_LEU; break;
937 }
938
939 return test;
940}
941
942
943/* Generate the code to compare two integer values. The return value is
944 the comparison expression. */
945
946static rtx
947gen_int_relational (test_code, cmp0, cmp1, p_invert)
948 enum rtx_code test_code; /* relational test (EQ, etc) */
949 rtx cmp0; /* first operand to compare */
950 rtx cmp1; /* second operand to compare */
951 int *p_invert; /* whether branch needs to reverse its test */
952{
953 struct cmp_info {
954 enum rtx_code test_code; /* test code to use in insn */
955 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
956 int const_add; /* constant to add (convert LE -> LT) */
957 int reverse_regs; /* reverse registers in test */
958 int invert_const; /* != 0 if invert value if cmp1 is constant */
959 int invert_reg; /* != 0 if invert value if cmp1 is register */
960 int unsignedp; /* != 0 for unsigned comparisons. */
961 };
962
963 static struct cmp_info info[ (int)ITEST_MAX ] = {
964
965 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
966 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
967
968 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
969 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
970 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
971 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
972
973 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
974 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
975 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
976 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
977 };
978
979 enum internal_test test;
980 enum machine_mode mode;
981 struct cmp_info *p_info;
982
983 test = map_test_to_internal_test (test_code);
984 if (test == ITEST_MAX)
985 abort ();
986
987 p_info = &info[ (int)test ];
988
989 mode = GET_MODE (cmp0);
990 if (mode == VOIDmode)
991 mode = GET_MODE (cmp1);
992
993 /* Make sure we can handle any constants given to us. */
994 if (GET_CODE (cmp1) == CONST_INT)
995 {
996 HOST_WIDE_INT value = INTVAL (cmp1);
997 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
998
999 /* if the immediate overflows or does not fit in the immediate field,
1000 spill it to a register */
1001
1002 if ((p_info->unsignedp ?
1003 (uvalue + p_info->const_add > uvalue) :
1004 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1005 {
1006 cmp1 = force_reg (mode, cmp1);
1007 }
1008 else if (!(p_info->const_range_p) (value + p_info->const_add))
1009 {
1010 cmp1 = force_reg (mode, cmp1);
1011 }
1012 }
1013 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1014 {
1015 cmp1 = force_reg (mode, cmp1);
1016 }
1017
1018 /* See if we need to invert the result. */
1019 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1020 ? p_info->invert_const
1021 : p_info->invert_reg);
1022
1023 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1024 Comparison between two registers, may involve switching operands. */
1025 if (GET_CODE (cmp1) == CONST_INT)
1026 {
1027 if (p_info->const_add != 0)
1028 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1029
1030 }
1031 else if (p_info->reverse_regs)
1032 {
1033 rtx temp = cmp0;
1034 cmp0 = cmp1;
1035 cmp1 = temp;
1036 }
1037
1038 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1039}
1040
1041
1042/* Generate the code to compare two float values. The return value is
1043 the comparison expression. */
1044
1045static rtx
1046gen_float_relational (test_code, cmp0, cmp1)
1047 enum rtx_code test_code; /* relational test (EQ, etc) */
1048 rtx cmp0; /* first operand to compare */
1049 rtx cmp1; /* second operand to compare */
1050{
1051 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1052 rtx brtmp;
1053 int reverse_regs, invert;
1054
1055 switch (test_code)
1056 {
1057 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1058 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1059 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1060 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1061 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1062 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1063 default:
1064 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1065 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1066 }
1067
1068 if (reverse_regs)
1069 {
1070 rtx temp = cmp0;
1071 cmp0 = cmp1;
1072 cmp1 = temp;
1073 }
1074
1075 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1076 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1077
1078 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1079}
1080
1081
1082void
1083xtensa_expand_conditional_branch (operands, test_code)
1084 rtx *operands;
1085 enum rtx_code test_code;
1086{
1087 enum cmp_type type = branch_type;
1088 rtx cmp0 = branch_cmp[0];
1089 rtx cmp1 = branch_cmp[1];
1090 rtx cmp;
1091 int invert;
1092 rtx label1, label2;
1093
1094 switch (type)
1095 {
1096 case CMP_DF:
1097 default:
1098 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1099
1100 case CMP_SI:
1101 invert = FALSE;
1102 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1103 break;
1104
1105 case CMP_SF:
1106 if (!TARGET_HARD_FLOAT)
1107 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1108 invert = FALSE;
1109 cmp = gen_float_relational (test_code, cmp0, cmp1);
1110 break;
1111 }
1112
1113 /* Generate the branch. */
1114
1115 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1116 label2 = pc_rtx;
1117
1118 if (invert)
1119 {
1120 label2 = label1;
1121 label1 = pc_rtx;
1122 }
1123
1124 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1125 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1126 label1,
1127 label2)));
1128}
1129
1130
1131static rtx
1132gen_conditional_move (cmp)
1133 rtx cmp;
1134{
1135 enum rtx_code code = GET_CODE (cmp);
1136 rtx op0 = branch_cmp[0];
1137 rtx op1 = branch_cmp[1];
1138
1139 if (branch_type == CMP_SI)
1140 {
1141 /* Jump optimization calls get_condition() which canonicalizes
1142 comparisons like (GE x <const>) to (GT x <const-1>).
1143 Transform those comparisons back to GE, since that is the
1144 comparison supported in Xtensa. We shouldn't have to
1145 transform <LE x const> comparisons, because neither
1146 xtensa_expand_conditional_branch() nor get_condition() will
1147 produce them. */
1148
1149 if ((code == GT) && (op1 == constm1_rtx))
1150 {
1151 code = GE;
1152 op1 = const0_rtx;
1153 }
1154 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1155
1156 if (boolean_operator (cmp, VOIDmode))
1157 {
1158 /* swap the operands to make const0 second */
1159 if (op0 == const0_rtx)
1160 {
1161 op0 = op1;
1162 op1 = const0_rtx;
1163 }
1164
1165 /* if not comparing against zero, emit a comparison (subtract) */
1166 if (op1 != const0_rtx)
1167 {
1168 op0 = expand_binop (SImode, sub_optab, op0, op1,
1169 0, 0, OPTAB_LIB_WIDEN);
1170 op1 = const0_rtx;
1171 }
1172 }
1173 else if (branch_operator (cmp, VOIDmode))
1174 {
1175 /* swap the operands to make const0 second */
1176 if (op0 == const0_rtx)
1177 {
1178 op0 = op1;
1179 op1 = const0_rtx;
1180
1181 switch (code)
1182 {
1183 case LT: code = GE; break;
1184 case GE: code = LT; break;
1185 default: abort ();
1186 }
1187 }
1188
1189 if (op1 != const0_rtx)
1190 return 0;
1191 }
1192 else
1193 return 0;
1194
1195 return gen_rtx (code, VOIDmode, op0, op1);
1196 }
1197
1198 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1199 return gen_float_relational (code, op0, op1);
1200
1201 return 0;
1202}
1203
1204
1205int
1206xtensa_expand_conditional_move (operands, isflt)
1207 rtx *operands;
1208 int isflt;
1209{
1210 rtx cmp;
1211 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1212
1213 if (!(cmp = gen_conditional_move (operands[1])))
1214 return 0;
1215
1216 if (isflt)
1217 gen_fn = (branch_type == CMP_SI
1218 ? gen_movsfcc_internal0
1219 : gen_movsfcc_internal1);
1220 else
1221 gen_fn = (branch_type == CMP_SI
1222 ? gen_movsicc_internal0
1223 : gen_movsicc_internal1);
1224
1225 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1226 operands[2], operands[3], cmp));
1227 return 1;
1228}
1229
1230
1231int
1232xtensa_expand_scc (operands)
1233 rtx *operands;
1234{
1235 rtx dest = operands[0];
1236 rtx cmp = operands[1];
1237 rtx one_tmp, zero_tmp;
1238 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1239
1240 if (!(cmp = gen_conditional_move (cmp)))
1241 return 0;
1242
1243 one_tmp = gen_reg_rtx (SImode);
1244 zero_tmp = gen_reg_rtx (SImode);
1245 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1246 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1247
1248 gen_fn = (branch_type == CMP_SI
1249 ? gen_movsicc_internal0
1250 : gen_movsicc_internal1);
1251 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1252 return 1;
1253}
1254
1255
1256/* Emit insns to move operands[1] into operands[0].
1257
1258 Return 1 if we have written out everything that needs to be done to
1259 do the move. Otherwise, return 0 and the caller will emit the move
1260 normally. */
1261
1262int
1263xtensa_emit_move_sequence (operands, mode)
1264 rtx *operands;
1265 enum machine_mode mode;
1266{
1267 if (CONSTANT_P (operands[1])
1268 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1269 && (GET_CODE (operands[1]) != CONST_INT
1270 || !xtensa_simm12b (INTVAL (operands[1]))))
1271 {
1272 xtensa_load_constant (operands[0], operands[1]);
1273 return 1;
1274 }
1275
1276 if (!(reload_in_progress | reload_completed))
1277 {
fc12fa10 1278 if (!xtensa_valid_move (mode, operands))
f6b7ba2b 1279 operands[1] = force_reg (mode, operands[1]);
1280
78d6a4ed 1281 if (xtensa_copy_incoming_a7 (operands, mode))
1282 return 1;
f6b7ba2b 1283 }
1284
1285 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1286 instruction won't be recognized after reload. So we remove the
1287 subreg and adjust mem accordingly. */
1288 if (reload_in_progress)
1289 {
1290 operands[0] = fixup_subreg_mem (operands[0]);
1291 operands[1] = fixup_subreg_mem (operands[1]);
1292 }
1293 return 0;
1294}
1295
1296static rtx
1297fixup_subreg_mem (x)
1298 rtx x;
1299{
1300 if (GET_CODE (x) == SUBREG
1301 && GET_CODE (SUBREG_REG (x)) == REG
1302 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1303 {
1304 rtx temp =
1305 gen_rtx_SUBREG (GET_MODE (x),
1306 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1307 SUBREG_BYTE (x));
1308 x = alter_subreg (&temp);
1309 }
1310 return x;
1311}
1312
1313
78d6a4ed 1314/* Check if this move is copying an incoming argument in a7. If so,
1315 emit the move, followed by the special "set_frame_ptr"
1316 unspec_volatile insn, at the very beginning of the function. This
1317 is necessary because the register allocator will ignore conflicts
1318 with a7 and may assign some other pseudo to a7. If that pseudo was
1319 assigned prior to this move, it would clobber the incoming argument
1320 in a7. By copying the argument out of a7 as the very first thing,
1321 and then immediately following that with an unspec_volatile to keep
1322 the scheduler away, we should avoid any problems. */
1323
1324bool
1325xtensa_copy_incoming_a7 (operands, mode)
1326 rtx *operands;
1327 enum machine_mode mode;
1328{
1329 if (a7_overlap_mentioned_p (operands[1])
1330 && !cfun->machine->incoming_a7_copied)
1331 {
1332 rtx mov;
1333 switch (mode)
1334 {
1335 case DFmode:
1336 mov = gen_movdf_internal (operands[0], operands[1]);
1337 break;
1338 case SFmode:
1339 mov = gen_movsf_internal (operands[0], operands[1]);
1340 break;
1341 case DImode:
1342 mov = gen_movdi_internal (operands[0], operands[1]);
1343 break;
1344 case SImode:
1345 mov = gen_movsi_internal (operands[0], operands[1]);
1346 break;
1347 case HImode:
1348 mov = gen_movhi_internal (operands[0], operands[1]);
1349 break;
1350 case QImode:
1351 mov = gen_movqi_internal (operands[0], operands[1]);
1352 break;
1353 default:
1354 abort ();
1355 }
1356
1357 /* Insert the instructions before any other argument copies.
1358 (The set_frame_ptr insn comes _after_ the move, so push it
1359 out first.) */
1360 push_topmost_sequence ();
1361 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1362 emit_insn_after (mov, get_insns ());
1363 pop_topmost_sequence ();
1364
1365 /* Ideally the incoming argument in a7 would only be copied
1366 once, since propagating a7 into the body of a function
1367 will almost certainly lead to errors. However, there is
1368 at least one harmless case (in GCSE) where the original
1369 copy from a7 is changed to copy into a new pseudo. Thus,
1370 we use a flag to only do this special treatment for the
1371 first copy of a7. */
1372
1373 cfun->machine->incoming_a7_copied = true;
1374
1375 return 1;
1376 }
1377
1378 return 0;
1379}
1380
1381
f6b7ba2b 1382/* Try to expand a block move operation to an RTL block move instruction.
1383 If not optimizing or if the block size is not a constant or if the
1384 block is small, the expansion fails and GCC falls back to calling
1385 memcpy().
1386
1387 operands[0] is the destination
1388 operands[1] is the source
1389 operands[2] is the length
1390 operands[3] is the alignment */
1391
1392int
1393xtensa_expand_block_move (operands)
1394 rtx *operands;
1395{
1396 rtx dest = operands[0];
1397 rtx src = operands[1];
1398 int bytes = INTVAL (operands[2]);
1399 int align = XINT (operands[3], 0);
1400 int num_pieces, move_ratio;
1401
1402 /* If this is not a fixed size move, just call memcpy */
1403 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1404 return 0;
1405
1406 /* Anything to move? */
1407 if (bytes <= 0)
1408 return 1;
1409
1410 if (align > MOVE_MAX)
1411 align = MOVE_MAX;
1412
1413 /* decide whether to expand inline based on the optimization level */
1414 move_ratio = 4;
1415 if (optimize > 2)
1416 move_ratio = LARGEST_MOVE_RATIO;
1417 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1418 if (num_pieces >= move_ratio)
1419 return 0;
1420
160b2123 1421 /* make sure the memory addresses are valid */
9c56a8c5 1422 operands[0] = validize_mem (dest);
1423 operands[1] = validize_mem (src);
f6b7ba2b 1424
1425 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1426 operands[2], operands[3]));
1427 return 1;
1428}
1429
1430
1431/* Emit a sequence of instructions to implement a block move, trying
1432 to hide load delay slots as much as possible. Load N values into
1433 temporary registers, store those N values, and repeat until the
1434 complete block has been moved. N=delay_slots+1 */
1435
1436struct meminsnbuf {
1437 char template[30];
1438 rtx operands[2];
1439};
1440
1441void
1442xtensa_emit_block_move (operands, tmpregs, delay_slots)
1443 rtx *operands;
1444 rtx *tmpregs;
1445 int delay_slots;
1446{
1447 rtx dest = operands[0];
1448 rtx src = operands[1];
1449 int bytes = INTVAL (operands[2]);
1450 int align = XINT (operands[3], 0);
1451 rtx from_addr = XEXP (src, 0);
1452 rtx to_addr = XEXP (dest, 0);
1453 int from_struct = MEM_IN_STRUCT_P (src);
1454 int to_struct = MEM_IN_STRUCT_P (dest);
1455 int offset = 0;
1456 int chunk_size, item_size;
1457 struct meminsnbuf *ldinsns, *stinsns;
1458 const char *ldname, *stname;
1459 enum machine_mode mode;
1460
1461 if (align > MOVE_MAX)
1462 align = MOVE_MAX;
1463 item_size = align;
1464 chunk_size = delay_slots + 1;
1465
1466 ldinsns = (struct meminsnbuf *)
1467 alloca (chunk_size * sizeof (struct meminsnbuf));
1468 stinsns = (struct meminsnbuf *)
1469 alloca (chunk_size * sizeof (struct meminsnbuf));
1470
1471 mode = xtensa_find_mode_for_size (item_size);
1472 item_size = GET_MODE_SIZE (mode);
1473 ldname = xtensa_ld_opcodes[(int) mode];
1474 stname = xtensa_st_opcodes[(int) mode];
1475
1476 while (bytes > 0)
1477 {
1478 int n;
1479
1480 for (n = 0; n < chunk_size; n++)
1481 {
1482 rtx addr, mem;
1483
1484 if (bytes == 0)
1485 {
1486 chunk_size = n;
1487 break;
1488 }
1489
1490 if (bytes < item_size)
1491 {
1492 /* find a smaller item_size which we can load & store */
1493 item_size = bytes;
1494 mode = xtensa_find_mode_for_size (item_size);
1495 item_size = GET_MODE_SIZE (mode);
1496 ldname = xtensa_ld_opcodes[(int) mode];
1497 stname = xtensa_st_opcodes[(int) mode];
1498 }
1499
1500 /* record the load instruction opcode and operands */
1501 addr = plus_constant (from_addr, offset);
1502 mem = gen_rtx_MEM (mode, addr);
1503 if (! memory_address_p (mode, addr))
1504 abort ();
1505 MEM_IN_STRUCT_P (mem) = from_struct;
1506 ldinsns[n].operands[0] = tmpregs[n];
1507 ldinsns[n].operands[1] = mem;
1508 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1509
1510 /* record the store instruction opcode and operands */
1511 addr = plus_constant (to_addr, offset);
1512 mem = gen_rtx_MEM (mode, addr);
1513 if (! memory_address_p (mode, addr))
1514 abort ();
1515 MEM_IN_STRUCT_P (mem) = to_struct;
1516 stinsns[n].operands[0] = tmpregs[n];
1517 stinsns[n].operands[1] = mem;
1518 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1519
1520 offset += item_size;
1521 bytes -= item_size;
1522 }
1523
1524 /* now output the loads followed by the stores */
1525 for (n = 0; n < chunk_size; n++)
1526 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1527 for (n = 0; n < chunk_size; n++)
1528 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1529 }
1530}
1531
1532
1533static enum machine_mode
1534xtensa_find_mode_for_size (item_size)
1535 unsigned item_size;
1536{
1537 enum machine_mode mode, tmode;
1538
1539 while (1)
1540 {
1541 mode = VOIDmode;
1542
1543 /* find mode closest to but not bigger than item_size */
1544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1546 if (GET_MODE_SIZE (tmode) <= item_size)
1547 mode = tmode;
1548 if (mode == VOIDmode)
1549 abort ();
1550
1551 item_size = GET_MODE_SIZE (mode);
1552
1553 if (xtensa_ld_opcodes[(int) mode]
1554 && xtensa_st_opcodes[(int) mode])
1555 break;
1556
1557 /* cannot load & store this mode; try something smaller */
1558 item_size -= 1;
1559 }
1560
1561 return mode;
1562}
1563
1564
1565void
1566xtensa_expand_nonlocal_goto (operands)
1567 rtx *operands;
1568{
1569 rtx goto_handler = operands[1];
1570 rtx containing_fp = operands[3];
1571
1572 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1573 is too big to generate in-line */
1574
1575 if (GET_CODE (containing_fp) != REG)
1576 containing_fp = force_reg (Pmode, containing_fp);
1577
1578 goto_handler = replace_rtx (copy_rtx (goto_handler),
1579 virtual_stack_vars_rtx,
1580 containing_fp);
1581
1582 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1583 0, VOIDmode, 2,
1584 containing_fp, Pmode,
1585 goto_handler, Pmode);
1586}
1587
1588
1f3233d1 1589static struct machine_function *
1590xtensa_init_machine_status ()
f6b7ba2b 1591{
1f3233d1 1592 return ggc_alloc_cleared (sizeof (struct machine_function));
f6b7ba2b 1593}
1594
1595
1596void
1597xtensa_setup_frame_addresses ()
1598{
1599 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1600 cfun->machine->accesses_prev_frame = 1;
1601
1602 emit_library_call
1603 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1604 0, VOIDmode, 0);
1605}
1606
1607
1608/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1609 a comment showing where the end of the loop is. However, if there is a
1610 label or a branch at the end of the loop then we need to place a nop
1611 there. If the loop ends with a label we need the nop so that branches
1612 targetting that label will target the nop (and thus remain in the loop),
1613 instead of targetting the instruction after the loop (and thus exiting
1614 the loop). If the loop ends with a branch, we need the nop in case the
1615 branch is targetting a location inside the loop. When the branch
1616 executes it will cause the loop count to be decremented even if it is
1617 taken (because it is the last instruction in the loop), so we need to
1618 nop after the branch to prevent the loop count from being decremented
1619 when the branch is taken. */
1620
1621void
1622xtensa_emit_loop_end (insn, operands)
1623 rtx insn;
1624 rtx *operands;
1625{
1626 char done = 0;
1627
1628 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1629 {
1630 switch (GET_CODE (insn))
1631 {
1632 case NOTE:
1633 case BARRIER:
1634 break;
1635
1636 case CODE_LABEL:
1637 output_asm_insn ("nop.n", operands);
1638 done = 1;
1639 break;
1640
1641 default:
1642 {
1643 rtx body = PATTERN (insn);
1644
1645 if (GET_CODE (body) == JUMP_INSN)
1646 {
1647 output_asm_insn ("nop.n", operands);
1648 done = 1;
1649 }
1650 else if ((GET_CODE (body) != USE)
1651 && (GET_CODE (body) != CLOBBER))
1652 done = 1;
1653 }
1654 break;
1655 }
1656 }
1657
1658 output_asm_insn ("# loop end for %0", operands);
1659}
1660
1661
1662char *
1663xtensa_emit_call (callop, operands)
1664 int callop;
1665 rtx *operands;
1666{
bbfbe351 1667 static char result[64];
f6b7ba2b 1668 rtx tgt = operands[callop];
1669
1670 if (GET_CODE (tgt) == CONST_INT)
1671 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1672 else if (register_operand (tgt, VOIDmode))
1673 sprintf (result, "callx8\t%%%d", callop);
1674 else
1675 sprintf (result, "call8\t%%%d", callop);
1676
1677 return result;
1678}
1679
1680
1681/* Return the stabs register number to use for 'regno'. */
1682
1683int
1684xtensa_dbx_register_number (regno)
1685 int regno;
1686{
1687 int first = -1;
1688
1689 if (GP_REG_P (regno)) {
1690 regno -= GP_REG_FIRST;
1691 first = 0;
1692 }
1693 else if (BR_REG_P (regno)) {
1694 regno -= BR_REG_FIRST;
1695 first = 16;
1696 }
1697 else if (FP_REG_P (regno)) {
1698 regno -= FP_REG_FIRST;
1699 /* The current numbering convention is that TIE registers are
1700 numbered in libcc order beginning with 256. We can't guarantee
1701 that the FP registers will come first, so the following is just
1702 a guess. It seems like we should make a special case for FP
1703 registers and give them fixed numbers < 256. */
1704 first = 256;
1705 }
1706 else if (ACC_REG_P (regno))
1707 {
1708 first = 0;
1709 regno = -1;
1710 }
1711
1712 /* When optimizing, we sometimes get asked about pseudo-registers
1713 that don't represent hard registers. Return 0 for these. */
1714 if (first == -1)
1715 return 0;
1716
1717 return first + regno;
1718}
1719
1720
1721/* Argument support functions. */
1722
1723/* Initialize CUMULATIVE_ARGS for a function. */
1724
1725void
1726init_cumulative_args (cum, fntype, libname)
1727 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1728 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1729 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1730{
1731 cum->arg_words = 0;
1732}
1733
1734/* Advance the argument to the next argument position. */
1735
1736void
1737function_arg_advance (cum, mode, type)
1738 CUMULATIVE_ARGS *cum; /* current arg information */
1739 enum machine_mode mode; /* current arg mode */
1740 tree type; /* type of the argument or 0 if lib support */
1741{
1742 int words, max;
1743 int *arg_words;
1744
1745 arg_words = &cum->arg_words;
1746 max = MAX_ARGS_IN_REGISTERS;
1747
1748 words = (((mode != BLKmode)
1749 ? (int) GET_MODE_SIZE (mode)
1750 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1751
1752 if ((*arg_words + words > max) && (*arg_words < max))
1753 *arg_words = max;
1754
1755 *arg_words += words;
1756}
1757
1758
1759/* Return an RTL expression containing the register for the given mode,
1760 or 0 if the argument is to be passed on the stack. */
1761
1762rtx
1763function_arg (cum, mode, type, incoming_p)
1764 CUMULATIVE_ARGS *cum; /* current arg information */
1765 enum machine_mode mode; /* current arg mode */
1766 tree type; /* type of the argument or 0 if lib support */
1767 int incoming_p; /* computing the incoming registers? */
1768{
1769 int regbase, words, max;
1770 int *arg_words;
1771 int regno;
1772 enum machine_mode result_mode;
1773
1774 arg_words = &cum->arg_words;
1775 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1776 max = MAX_ARGS_IN_REGISTERS;
1777
1778 words = (((mode != BLKmode)
1779 ? (int) GET_MODE_SIZE (mode)
1780 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1781
1782 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1783 *arg_words += (*arg_words & 1);
1784
1785 if (*arg_words + words > max)
1786 return (rtx)0;
1787
1788 regno = regbase + *arg_words;
1789 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1790
1791 /* We need to make sure that references to a7 are represented with
1792 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1793 modes bigger than 2 words (because we only have patterns for
1794 modes of 2 words or smaller), we can't control the expansion
1795 unless we explicitly list the individual registers in a PARALLEL. */
1796
1797 if ((mode == BLKmode || words > 2)
1798 && regno < A7_REG
1799 && regno + words > A7_REG)
1800 {
1801 rtx result;
1802 int n;
1803
1804 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1805 for (n = 0; n < words; n++)
1806 {
1807 XVECEXP (result, 0, n) =
1808 gen_rtx_EXPR_LIST (VOIDmode,
1809 gen_raw_REG (SImode, regno + n),
1810 GEN_INT (n * UNITS_PER_WORD));
1811 }
1812 return result;
1813 }
1814
1815 return gen_raw_REG (result_mode, regno);
1816}
1817
1818
1819void
1820override_options ()
1821{
1822 int regno;
1823 enum machine_mode mode;
1824
1825 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1826 error ("boolean registers required for the floating-point option");
1827
1828 /* set up the tables of ld/st opcode names for block moves */
1829 xtensa_ld_opcodes[(int) SImode] = "l32i";
1830 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1831 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1832 xtensa_st_opcodes[(int) SImode] = "s32i";
1833 xtensa_st_opcodes[(int) HImode] = "s16i";
1834 xtensa_st_opcodes[(int) QImode] = "s8i";
1835
1836 xtensa_char_to_class['q'] = SP_REG;
1837 xtensa_char_to_class['a'] = GR_REGS;
1838 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1839 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1840 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1841 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1842 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1843 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1844 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1845
1846 /* Set up array giving whether a given register can hold a given mode. */
1847 for (mode = VOIDmode;
1848 mode != MAX_MACHINE_MODE;
1849 mode = (enum machine_mode) ((int) mode + 1))
1850 {
1851 int size = GET_MODE_SIZE (mode);
1852 enum mode_class class = GET_MODE_CLASS (mode);
1853
1854 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1855 {
1856 int temp;
1857
1858 if (ACC_REG_P (regno))
1859 temp = (TARGET_MAC16 &&
1860 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1861 else if (GP_REG_P (regno))
1862 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1863 else if (FP_REG_P (regno))
1864 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1865 else if (BR_REG_P (regno))
1866 temp = (TARGET_BOOLEANS && (mode == CCmode));
1867 else
1868 temp = FALSE;
1869
1870 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1871 }
1872 }
1873
1874 init_machine_status = xtensa_init_machine_status;
f6b7ba2b 1875
1876 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1877 some targets need to always use PIC. */
1bc67528 1878 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
f6b7ba2b 1879 flag_pic = 1;
1880}
1881
1882
1883/* A C compound statement to output to stdio stream STREAM the
1884 assembler syntax for an instruction operand X. X is an RTL
1885 expression.
1886
1887 CODE is a value that can be used to specify one of several ways
1888 of printing the operand. It is used when identical operands
1889 must be printed differently depending on the context. CODE
1890 comes from the '%' specification that was used to request
1891 printing of the operand. If the specification was just '%DIGIT'
1892 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1893 is the ASCII code for LTR.
1894
1895 If X is a register, this macro should print the register's name.
1896 The names can be found in an array 'reg_names' whose type is
1897 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1898
1899 When the machine description has a specification '%PUNCT' (a '%'
1900 followed by a punctuation character), this macro is called with
1901 a null pointer for X and the punctuation character for CODE.
1902
1903 'a', 'c', 'l', and 'n' are reserved.
1904
1905 The Xtensa specific codes are:
1906
1907 'd' CONST_INT, print as signed decimal
1908 'x' CONST_INT, print as signed hexadecimal
1909 'K' CONST_INT, print number of bits in mask for EXTUI
1910 'R' CONST_INT, print (X & 0x1f)
1911 'L' CONST_INT, print ((32 - X) & 0x1f)
1912 'D' REG, print second register of double-word register operand
1913 'N' MEM, print address of next word following a memory operand
1914 'v' MEM, if memory reference is volatile, output a MEMW before it
1915*/
1916
1917static void
1918printx (file, val)
1919 FILE *file;
1920 signed int val;
1921{
1922 /* print a hexadecimal value in a nice way */
1923 if ((val > -0xa) && (val < 0xa))
1924 fprintf (file, "%d", val);
1925 else if (val < 0)
1926 fprintf (file, "-0x%x", -val);
1927 else
1928 fprintf (file, "0x%x", val);
1929}
1930
1931
1932void
1933print_operand (file, op, letter)
1934 FILE *file; /* file to write to */
1935 rtx op; /* operand to print */
1936 int letter; /* %<letter> or 0 */
1937{
1938 enum rtx_code code;
1939
1940 if (! op)
1941 error ("PRINT_OPERAND null pointer");
1942
1943 code = GET_CODE (op);
1944 switch (code)
1945 {
1946 case REG:
1947 case SUBREG:
1948 {
1949 int regnum = xt_true_regnum (op);
1950 if (letter == 'D')
1951 regnum++;
1952 fprintf (file, "%s", reg_names[regnum]);
1953 break;
1954 }
1955
1956 case MEM:
aac632cd 1957 /* For a volatile memory reference, emit a MEMW before the
1958 load or store. */
f6b7ba2b 1959 if (letter == 'v')
1960 {
1961 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1962 fprintf (file, "memw\n\t");
1963 break;
1964 }
1965 else if (letter == 'N')
aac632cd 1966 {
1967 enum machine_mode mode;
1968 switch (GET_MODE (op))
1969 {
1970 case DFmode: mode = SFmode; break;
1971 case DImode: mode = SImode; break;
1972 default: abort ();
1973 }
1974 op = adjust_address (op, mode, 4);
1975 }
f6b7ba2b 1976
1977 output_address (XEXP (op, 0));
1978 break;
1979
1980 case CONST_INT:
1981 switch (letter)
1982 {
1983 case 'K':
1984 {
1985 int num_bits = 0;
1986 unsigned val = INTVAL (op);
1987 while (val & 1)
1988 {
1989 num_bits += 1;
1990 val = val >> 1;
1991 }
1992 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1993 fatal_insn ("invalid mask", op);
1994
1995 fprintf (file, "%d", num_bits);
1996 break;
1997 }
1998
1999 case 'L':
2000 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
2001 break;
2002
2003 case 'R':
2004 fprintf (file, "%d", INTVAL (op) & 0x1f);
2005 break;
2006
2007 case 'x':
2008 printx (file, INTVAL (op));
2009 break;
2010
2011 case 'd':
2012 default:
2013 fprintf (file, "%d", INTVAL (op));
2014 break;
2015
2016 }
2017 break;
2018
2019 default:
2020 output_addr_const (file, op);
2021 }
2022}
2023
2024
2025/* A C compound statement to output to stdio stream STREAM the
2026 assembler syntax for an instruction operand that is a memory
7811991d 2027 reference whose address is ADDR. ADDR is an RTL expression. */
f6b7ba2b 2028
2029void
2030print_operand_address (file, addr)
2031 FILE *file;
2032 rtx addr;
2033{
2034 if (!addr)
2035 error ("PRINT_OPERAND_ADDRESS, null pointer");
2036
2037 switch (GET_CODE (addr))
2038 {
2039 default:
2040 fatal_insn ("invalid address", addr);
2041 break;
2042
2043 case REG:
2044 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2045 break;
2046
2047 case PLUS:
2048 {
2049 rtx reg = (rtx)0;
2050 rtx offset = (rtx)0;
2051 rtx arg0 = XEXP (addr, 0);
2052 rtx arg1 = XEXP (addr, 1);
2053
2054 if (GET_CODE (arg0) == REG)
2055 {
2056 reg = arg0;
2057 offset = arg1;
2058 }
2059 else if (GET_CODE (arg1) == REG)
2060 {
2061 reg = arg1;
2062 offset = arg0;
2063 }
2064 else
2065 fatal_insn ("no register in address", addr);
2066
2067 if (CONSTANT_P (offset))
2068 {
2069 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2070 output_addr_const (file, offset);
2071 }
2072 else
2073 fatal_insn ("address offset not a constant", addr);
2074 }
2075 break;
2076
2077 case LABEL_REF:
2078 case SYMBOL_REF:
2079 case CONST_INT:
2080 case CONST:
2081 output_addr_const (file, addr);
2082 break;
2083 }
2084}
2085
2086
2087/* Emit either a label, .comm, or .lcomm directive. */
2088
2089void
2090xtensa_declare_object (file, name, init_string, final_string, size)
2091 FILE *file;
2092 char *name;
2093 char *init_string;
2094 char *final_string;
2095 int size;
2096{
2097 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2098 assemble_name (file, name);
2099 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2100}
2101
2102
2103void
2104xtensa_output_literal (file, x, mode, labelno)
2105 FILE *file;
2106 rtx x;
2107 enum machine_mode mode;
2108 int labelno;
2109{
2110 long value_long[2];
badfe841 2111 REAL_VALUE_TYPE r;
f6b7ba2b 2112 int size;
2113
2114 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2115
2116 switch (GET_MODE_CLASS (mode))
2117 {
2118 case MODE_FLOAT:
2119 if (GET_CODE (x) != CONST_DOUBLE)
2120 abort ();
2121
badfe841 2122 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
f6b7ba2b 2123 switch (mode)
2124 {
2125 case SFmode:
badfe841 2126 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2127 fprintf (file, "0x%08lx\n", value_long[0]);
f6b7ba2b 2128 break;
2129
2130 case DFmode:
badfe841 2131 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2132 fprintf (file, "0x%08lx, 0x%08lx\n",
2133 value_long[0], value_long[1]);
f6b7ba2b 2134 break;
2135
2136 default:
2137 abort ();
2138 }
2139
2140 break;
2141
2142 case MODE_INT:
2143 case MODE_PARTIAL_INT:
2144 size = GET_MODE_SIZE (mode);
2145 if (size == 4)
2146 {
2147 output_addr_const (file, x);
2148 fputs ("\n", file);
2149 }
2150 else if (size == 8)
2151 {
2152 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2153 fputs (", ", file);
2154 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2155 fputs ("\n", file);
2156 }
2157 else
2158 abort ();
2159 break;
2160
2161 default:
2162 abort ();
2163 }
2164}
2165
2166
2167/* Return the bytes needed to compute the frame pointer from the current
2168 stack pointer. */
2169
2170#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2171#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2172
2173long
2174compute_frame_size (size)
2175 int size; /* # of var. bytes allocated */
2176{
2177 /* add space for the incoming static chain value */
2178 if (current_function_needs_context)
2179 size += (1 * UNITS_PER_WORD);
2180
2181 xtensa_current_frame_size =
2182 XTENSA_STACK_ALIGN (size
2183 + current_function_outgoing_args_size
2184 + (WINDOW_SIZE * UNITS_PER_WORD));
2185 return xtensa_current_frame_size;
2186}
2187
2188
2189int
2190xtensa_frame_pointer_required ()
2191{
2192 /* The code to expand builtin_frame_addr and builtin_return_addr
2193 currently uses the hard_frame_pointer instead of frame_pointer.
2194 This seems wrong but maybe it's necessary for other architectures.
2195 This function is derived from the i386 code. */
2196
2197 if (cfun->machine->accesses_prev_frame)
2198 return 1;
2199
2200 return 0;
2201}
2202
2203
2204void
2205xtensa_reorg (first)
2206 rtx first;
2207{
2208 rtx insn, set_frame_ptr_insn = 0;
2209
2210 unsigned long tsize = compute_frame_size (get_frame_size ());
2211 if (tsize < (1 << (12+3)))
2212 frame_size_const = 0;
2213 else
2214 {
2215 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2216
2217 /* make sure the constant is used so it doesn't get eliminated
2218 from the constant pool */
2219 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2220 }
2221
2222 if (!frame_pointer_needed)
2223 return;
2224
2225 /* Search all instructions, looking for the insn that sets up the
2226 frame pointer. This search will fail if the function does not
2227 have an incoming argument in $a7, but in that case, we can just
2228 set up the frame pointer at the very beginning of the
2229 function. */
2230
2231 for (insn = first; insn; insn = NEXT_INSN (insn))
2232 {
2233 rtx pat;
2234
2235 if (!INSN_P (insn))
2236 continue;
2237
2238 pat = PATTERN (insn);
2239 if (GET_CODE (pat) == UNSPEC_VOLATILE
2240 && (XINT (pat, 1) == UNSPECV_SET_FP))
2241 {
2242 set_frame_ptr_insn = insn;
2243 break;
2244 }
2245 }
2246
2247 if (set_frame_ptr_insn)
2248 {
2249 /* for all instructions prior to set_frame_ptr_insn, replace
2250 hard_frame_pointer references with stack_pointer */
2251 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2252 {
2253 if (INSN_P (insn))
2254 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2255 hard_frame_pointer_rtx,
2256 stack_pointer_rtx);
2257 }
2258 }
2259 else
2260 {
2261 /* emit the frame pointer move immediately after the NOTE that starts
2262 the function */
2263 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2264 stack_pointer_rtx), first);
2265 }
2266}
2267
2268
2269/* Set up the stack and frame (if desired) for the function. */
2270
2271void
2272xtensa_function_prologue (file, size)
2273 FILE *file;
2274 int size ATTRIBUTE_UNUSED;
2275{
2276 unsigned long tsize = compute_frame_size (get_frame_size ());
2277
2278 if (frame_pointer_needed)
2279 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2280 else
2281 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2282
2283
2284 if (tsize < (1 << (12+3)))
2285 {
2286 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2287 }
2288 else
2289 {
2290 fprintf (file, "\tentry\tsp, 16\n");
2291
2292 /* use a8 as a temporary since a0-a7 may be live */
2293 fprintf (file, "\tl32r\ta8, ");
2294 print_operand (file, frame_size_const, 0);
2295 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2296 fprintf (file, "\tmovsp\tsp, a8\n");
2297 }
2298}
2299
2300
2301/* Do any necessary cleanup after a function to restore
2302 stack, frame, and regs. */
2303
2304void
2305xtensa_function_epilogue (file, size)
2306 FILE *file;
2307 int size ATTRIBUTE_UNUSED;
2308{
2309 rtx insn = get_last_insn ();
2310 /* If the last insn was a BARRIER, we don't have to write anything. */
2311 if (GET_CODE (insn) == NOTE)
2312 insn = prev_nonnote_insn (insn);
2313 if (insn == 0 || GET_CODE (insn) != BARRIER)
2314 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2315
2316 xtensa_current_frame_size = 0;
2317}
2318
2319
43326cf7 2320rtx
2321xtensa_return_addr (count, frame)
2322 int count;
2323 rtx frame;
2324{
2325 rtx result, retaddr;
2326
2327 if (count == -1)
2328 retaddr = gen_rtx_REG (Pmode, 0);
2329 else
2330 {
2331 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2332 addr = memory_address (Pmode, addr);
2333 retaddr = gen_reg_rtx (Pmode);
2334 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2335 }
2336
2337 /* The 2 most-significant bits of the return address on Xtensa hold
2338 the register window size. To get the real return address, these
2339 bits must be replaced with the high bits from the current PC. */
2340
2341 result = gen_reg_rtx (Pmode);
2342 emit_insn (gen_fix_return_addr (result, retaddr));
2343 return result;
2344}
2345
2346
f6b7ba2b 2347/* Create the va_list data type.
2348 This structure is set up by __builtin_saveregs. The __va_reg
2349 field points to a stack-allocated region holding the contents of the
2350 incoming argument registers. The __va_ndx field is an index initialized
2351 to the position of the first unnamed (variable) argument. This same index
2352 is also used to address the arguments passed in memory. Thus, the
2353 __va_stk field is initialized to point to the position of the first
2354 argument in memory offset to account for the arguments passed in
2355 registers. E.G., if there are 6 argument registers, and each register is
2356 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2357 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2358 argument word N for N >= 6. */
2359
2360tree
908e141d 2361xtensa_build_va_list ()
f6b7ba2b 2362{
049d6666 2363 tree f_stk, f_reg, f_ndx, record, type_decl;
f6b7ba2b 2364
049d6666 2365 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2366 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
f6b7ba2b 2367
2368 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2369 ptr_type_node);
2370 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2371 ptr_type_node);
2372 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2373 integer_type_node);
2374
2375 DECL_FIELD_CONTEXT (f_stk) = record;
2376 DECL_FIELD_CONTEXT (f_reg) = record;
2377 DECL_FIELD_CONTEXT (f_ndx) = record;
2378
049d6666 2379 TREE_CHAIN (record) = type_decl;
2380 TYPE_NAME (record) = type_decl;
f6b7ba2b 2381 TYPE_FIELDS (record) = f_stk;
2382 TREE_CHAIN (f_stk) = f_reg;
2383 TREE_CHAIN (f_reg) = f_ndx;
2384
2385 layout_type (record);
2386 return record;
2387}
2388
2389
2390/* Save the incoming argument registers on the stack. Returns the
2391 address of the saved registers. */
2392
2393rtx
2394xtensa_builtin_saveregs ()
2395{
2396 rtx gp_regs, dest;
2397 int arg_words = current_function_arg_words;
2398 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2399 int i;
2400
2401 if (gp_left == 0)
2402 return const0_rtx;
2403
2404 /* allocate the general-purpose register space */
2405 gp_regs = assign_stack_local
2406 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
049d6666 2407 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
f6b7ba2b 2408
2409 /* Now store the incoming registers. */
2410 dest = change_address (gp_regs, SImode,
2411 plus_constant (XEXP (gp_regs, 0),
2412 arg_words * UNITS_PER_WORD));
2413
2414 /* Note: Don't use move_block_from_reg() here because the incoming
2415 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2416 Instead, call gen_raw_REG() directly so that we get a distinct
2417 instance of (REG:SI 7). */
2418 for (i = 0; i < gp_left; i++)
2419 {
2420 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2421 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2422 }
2423
2424 return XEXP (gp_regs, 0);
2425}
2426
2427
2428/* Implement `va_start' for varargs and stdarg. We look at the
2429 current function to fill in an initial va_list. */
2430
2431void
7df226a2 2432xtensa_va_start (valist, nextarg)
f6b7ba2b 2433 tree valist;
2434 rtx nextarg ATTRIBUTE_UNUSED;
2435{
2436 tree f_stk, stk;
2437 tree f_reg, reg;
2438 tree f_ndx, ndx;
2439 tree t, u;
2440 int arg_words;
2441
2442 arg_words = current_function_args_info.arg_words;
2443
2444 f_stk = TYPE_FIELDS (va_list_type_node);
2445 f_reg = TREE_CHAIN (f_stk);
2446 f_ndx = TREE_CHAIN (f_reg);
2447
2448 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2449 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2450 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2451
2452 /* Call __builtin_saveregs; save the result in __va_reg */
2453 current_function_arg_words = arg_words;
2454 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2455 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2456 TREE_SIDE_EFFECTS (t) = 1;
2457 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2458
2459 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2460 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2461 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2462 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2463 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2464 TREE_SIDE_EFFECTS (t) = 1;
2465 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2466
2467 /* Set the __va_ndx member. */
2468 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2469 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2470 TREE_SIDE_EFFECTS (t) = 1;
2471 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2472}
2473
2474
2475/* Implement `va_arg'. */
2476
2477rtx
2478xtensa_va_arg (valist, type)
2479 tree valist, type;
2480{
2481 tree f_stk, stk;
2482 tree f_reg, reg;
2483 tree f_ndx, ndx;
dd52a190 2484 tree tmp, addr_tree, type_size;
2485 rtx array, orig_ndx, r, addr, size, va_size;
f6b7ba2b 2486 rtx lab_false, lab_over, lab_false2;
2487
f6b7ba2b 2488 f_stk = TYPE_FIELDS (va_list_type_node);
2489 f_reg = TREE_CHAIN (f_stk);
2490 f_ndx = TREE_CHAIN (f_reg);
2491
2492 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2493 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2494 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2495
dd52a190 2496 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2497
2498 va_size = gen_reg_rtx (SImode);
2499 tmp = fold (build (MULT_EXPR, sizetype,
2500 fold (build (TRUNC_DIV_EXPR, sizetype,
2501 fold (build (PLUS_EXPR, sizetype,
2502 type_size,
2503 size_int (UNITS_PER_WORD - 1))),
2504 size_int (UNITS_PER_WORD))),
2505 size_int (UNITS_PER_WORD)));
2506 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2507 if (r != va_size)
2508 emit_move_insn (va_size, r);
2509
f6b7ba2b 2510
2511 /* First align __va_ndx to a double word boundary if necessary for this arg:
2512
2513 if (__alignof__ (TYPE) > 4)
2514 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2515 */
2516
2517 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2518 {
2519 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2520 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2521 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2522 build_int_2 (-2 * UNITS_PER_WORD, -1));
2523 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2524 TREE_SIDE_EFFECTS (tmp) = 1;
2525 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2526 }
2527
2528
2529 /* Increment __va_ndx to point past the argument:
2530
2531 orig_ndx = (AP).__va_ndx;
2532 (AP).__va_ndx += __va_size (TYPE);
2533 */
2534
2535 orig_ndx = gen_reg_rtx (SImode);
2536 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2537 if (r != orig_ndx)
2538 emit_move_insn (orig_ndx, r);
2539
dd52a190 2540 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2541 make_tree (intSI_type_node, va_size));
f6b7ba2b 2542 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2543 TREE_SIDE_EFFECTS (tmp) = 1;
2544 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2545
2546
2547 /* Check if the argument is in registers:
2548
89d4bc5e 2549 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2550 && !MUST_PASS_IN_STACK (type))
f6b7ba2b 2551 __array = (AP).__va_reg;
2552 */
2553
f6b7ba2b 2554 array = gen_reg_rtx (Pmode);
2555
4588bbd8 2556 lab_over = NULL_RTX;
89d4bc5e 2557 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2558 {
2559 lab_false = gen_label_rtx ();
2560 lab_over = gen_label_rtx ();
2561
2562 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2563 EXPAND_NORMAL),
2564 GEN_INT (MAX_ARGS_IN_REGISTERS
2565 * UNITS_PER_WORD),
2566 GT, const1_rtx, SImode, 0, lab_false);
2567
2568 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2569 if (r != array)
2570 emit_move_insn (array, r);
2571
2572 emit_jump_insn (gen_jump (lab_over));
2573 emit_barrier ();
2574 emit_label (lab_false);
2575 }
f6b7ba2b 2576
2577 /* ...otherwise, the argument is on the stack (never split between
2578 registers and the stack -- change __va_ndx if necessary):
2579
2580 else
2581 {
2582 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2583 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2584 __array = (AP).__va_stk;
2585 }
2586 */
2587
2588 lab_false2 = gen_label_rtx ();
2589 emit_cmp_and_jump_insns (orig_ndx,
2590 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2591 GE, const1_rtx, SImode, 0, lab_false2);
2592
dd52a190 2593 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2594 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
f6b7ba2b 2595 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2596 TREE_SIDE_EFFECTS (tmp) = 1;
2597 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2598
2599 emit_label (lab_false2);
2600
2601 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2602 if (r != array)
2603 emit_move_insn (array, r);
2604
4588bbd8 2605 if (lab_over != NULL_RTX)
89d4bc5e 2606 emit_label (lab_over);
dd52a190 2607
f6b7ba2b 2608
2609 /* Given the base array pointer (__array) and index to the subsequent
2610 argument (__va_ndx), find the address:
2611
dd52a190 2612 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2613 ? sizeof (TYPE)
2614 : __va_size (TYPE))
f6b7ba2b 2615
2616 The results are endian-dependent because values smaller than one word
2617 are aligned differently.
2618 */
2619
dd52a190 2620 size = gen_reg_rtx (SImode);
2621 emit_move_insn (size, va_size);
2622
2623 if (BYTES_BIG_ENDIAN)
2624 {
2625 rtx lab_use_va_size = gen_label_rtx ();
2626
2627 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2628 EXPAND_NORMAL),
2629 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2630 GE, const1_rtx, SImode, 0, lab_use_va_size);
2631
2632 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2633 if (r != size)
2634 emit_move_insn (size, r);
2635
2636 emit_label (lab_use_va_size);
2637 }
f6b7ba2b 2638
2639 addr_tree = build (PLUS_EXPR, ptr_type_node,
2640 make_tree (ptr_type_node, array),
2641 ndx);
dd52a190 2642 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2643 make_tree (intSI_type_node, size));
f6b7ba2b 2644 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2645 addr = copy_to_reg (addr);
2646 return addr;
2647}
2648
2649
fc12fa10 2650enum reg_class
a8332086 2651xtensa_preferred_reload_class (x, class, isoutput)
fc12fa10 2652 rtx x;
2653 enum reg_class class;
a8332086 2654 int isoutput;
fc12fa10 2655{
a8332086 2656 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
fc12fa10 2657 return NO_REGS;
2658
a8332086 2659 /* Don't use the stack pointer or hard frame pointer for reloads!
2660 The hard frame pointer would normally be OK except that it may
2661 briefly hold an incoming argument in the prologue, and reload
2662 won't know that it is live because the hard frame pointer is
2663 treated specially. */
2664
2665 if (class == AR_REGS || class == GR_REGS)
2666 return RL_REGS;
fc12fa10 2667
2668 return class;
2669}
2670
2671
f6b7ba2b 2672enum reg_class
2673xtensa_secondary_reload_class (class, mode, x, isoutput)
2674 enum reg_class class;
2675 enum machine_mode mode ATTRIBUTE_UNUSED;
2676 rtx x;
2677 int isoutput;
2678{
2679 int regno;
2680
2681 if (GET_CODE (x) == SIGN_EXTEND)
2682 x = XEXP (x, 0);
2683 regno = xt_true_regnum (x);
2684
2685 if (!isoutput)
2686 {
2687 if (class == FP_REGS && constantpool_mem_p (x))
a8332086 2688 return RL_REGS;
f6b7ba2b 2689 }
2690
2691 if (ACC_REG_P (regno))
a8332086 2692 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
f6b7ba2b 2693 if (class == ACC_REG)
a8332086 2694 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
f6b7ba2b 2695
2696 return NO_REGS;
2697}
2698
2699
2700void
2701order_regs_for_local_alloc ()
2702{
2703 if (!leaf_function_p ())
2704 {
2705 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2706 FIRST_PSEUDO_REGISTER * sizeof (int));
2707 }
2708 else
2709 {
2710 int i, num_arg_regs;
2711 int nxt = 0;
2712
2713 /* use the AR registers in increasing order (skipping a0 and a1)
2714 but save the incoming argument registers for a last resort */
2715 num_arg_regs = current_function_args_info.arg_words;
2716 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2717 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2718 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2719 reg_alloc_order[nxt++] = i + num_arg_regs;
2720 for (i = 0; i < num_arg_regs; i++)
2721 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2722
2723 /* list the FP registers in order for now */
2724 for (i = 0; i < 16; i++)
2725 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2726
2727 /* GCC requires that we list *all* the registers.... */
2728 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2729 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2730 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2731 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2732
2733 /* list the coprocessor registers in order */
2734 for (i = 0; i < BR_REG_NUM; i++)
2735 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2736
2737 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2738 }
2739}
2740
2741
2742/* A customized version of reg_overlap_mentioned_p that only looks for
2743 references to a7 (as opposed to hard_frame_pointer_rtx). */
2744
2745int
2746a7_overlap_mentioned_p (x)
2747 rtx x;
2748{
2749 int i, j;
2750 unsigned int x_regno;
2751 const char *fmt;
2752
2753 if (GET_CODE (x) == REG)
2754 {
2755 x_regno = REGNO (x);
2756 return (x != hard_frame_pointer_rtx
2757 && x_regno < A7_REG + 1
2758 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2759 }
2760
2761 if (GET_CODE (x) == SUBREG
2762 && GET_CODE (SUBREG_REG (x)) == REG
2763 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2764 {
2765 x_regno = subreg_regno (x);
2766 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2767 && x_regno < A7_REG + 1
2768 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2769 }
2770
2771 /* X does not match, so try its subexpressions. */
2772 fmt = GET_RTX_FORMAT (GET_CODE (x));
2773 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2774 {
2775 if (fmt[i] == 'e')
2776 {
2777 if (a7_overlap_mentioned_p (XEXP (x, i)))
2778 return 1;
2779 }
2780 else if (fmt[i] == 'E')
2781 {
2782 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2783 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2784 return 1;
2785 }
2786 }
2787
2788 return 0;
2789}
bbfbe351 2790
5f4442bc 2791
2792/* Some Xtensa targets support multiple bss sections. If the section
2793 name ends with ".bss", add SECTION_BSS to the flags. */
2794
2795static unsigned int
2796xtensa_multibss_section_type_flags (decl, name, reloc)
2797 tree decl;
2798 const char *name;
2799 int reloc;
2800{
2801 unsigned int flags = default_section_type_flags (decl, name, reloc);
2802 const char *suffix;
2803
2804 suffix = strrchr (name, '.');
2805 if (suffix && strcmp (suffix, ".bss") == 0)
2806 {
2807 if (!decl || (TREE_CODE (decl) == VAR_DECL
2808 && DECL_INITIAL (decl) == NULL_TREE))
2809 flags |= SECTION_BSS; /* @nobits */
2810 else
2811 warning ("only uninitialized variables can be placed in a "
2812 ".bss section");
2813 }
2814
2815 return flags;
2816}
2817
2818
bbfbe351 2819/* The literal pool stays with the function. */
2820
2821static void
2822xtensa_select_rtx_section (mode, x, align)
2823 enum machine_mode mode ATTRIBUTE_UNUSED;
2824 rtx x ATTRIBUTE_UNUSED;
2825 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2826{
2827 function_section (current_function_decl);
2828}
7811991d 2829
2830/* If we are referencing a function that is static, make the SYMBOL_REF
2831 special so that we can generate direct calls to it even with -fpic. */
2832
2833static void
2834xtensa_encode_section_info (decl, first)
2835 tree decl;
2836 int first ATTRIBUTE_UNUSED;
2837{
2838 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2839 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2840}
1f3233d1 2841
2842#include "gt-xtensa.h"