1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
70 /* Cached operands, and operator to compare for use in set/branch on
74 /* what type of branch to use */
75 enum cmp_type branch_type
;
77 /* Array giving truth value on whether or not a given hard register
78 can support a given mode. */
79 char xtensa_hard_regno_mode_ok
[(int) MAX_MACHINE_MODE
][FIRST_PSEUDO_REGISTER
];
81 /* Current frame size calculated by compute_frame_size. */
82 unsigned xtensa_current_frame_size
;
84 /* Tables of ld/st opcode names for block moves */
85 const char *xtensa_ld_opcodes
[(int) MAX_MACHINE_MODE
];
86 const char *xtensa_st_opcodes
[(int) MAX_MACHINE_MODE
];
87 #define LARGEST_MOVE_RATIO 15
89 /* Define the structure for the machine field in struct function. */
90 struct machine_function
GTY(())
92 int accesses_prev_frame
;
93 bool incoming_a7_copied
;
96 /* Vector, indexed by hard register number, which contains 1 for a
97 register that is allowable in a candidate for leaf function
100 const char xtensa_leaf_regs
[FIRST_PSEUDO_REGISTER
] =
102 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
108 /* Map hard register number to register class */
109 const enum reg_class xtensa_regno_to_class
[FIRST_PSEUDO_REGISTER
] =
111 RL_REGS
, SP_REG
, RL_REGS
, RL_REGS
,
112 RL_REGS
, RL_REGS
, RL_REGS
, GR_REGS
,
113 RL_REGS
, RL_REGS
, RL_REGS
, RL_REGS
,
114 RL_REGS
, RL_REGS
, RL_REGS
, RL_REGS
,
115 AR_REGS
, AR_REGS
, BR_REGS
,
116 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
117 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
118 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
119 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
123 /* Map register constraint character to register class. */
124 enum reg_class xtensa_char_to_class
[256] =
126 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
127 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
128 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
129 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
130 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
131 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
132 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
133 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
134 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
135 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
136 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
137 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
138 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
139 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
140 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
141 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
142 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
143 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
144 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
145 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
146 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
147 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
148 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
149 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
150 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
151 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
152 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
153 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
154 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
155 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
156 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
157 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
158 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
159 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
160 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
161 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
162 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
163 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
164 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
165 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
166 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
167 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
168 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
169 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
170 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
171 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
172 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
173 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
174 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
175 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
176 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
177 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
178 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
179 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
180 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
181 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
182 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
183 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
184 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
185 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
186 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
187 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
188 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
189 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
192 static int b4const_or_zero
PARAMS ((int));
193 static enum internal_test map_test_to_internal_test
PARAMS ((enum rtx_code
));
194 static rtx gen_int_relational
PARAMS ((enum rtx_code
, rtx
, rtx
, int *));
195 static rtx gen_float_relational
PARAMS ((enum rtx_code
, rtx
, rtx
));
196 static rtx gen_conditional_move
PARAMS ((rtx
));
197 static rtx fixup_subreg_mem
PARAMS ((rtx x
));
198 static enum machine_mode xtensa_find_mode_for_size
PARAMS ((unsigned));
199 static struct machine_function
* xtensa_init_machine_status
PARAMS ((void));
200 static void printx
PARAMS ((FILE *, signed int));
201 static unsigned int xtensa_multibss_section_type_flags
202 PARAMS ((tree
, const char *, int));
203 static void xtensa_select_rtx_section
204 PARAMS ((enum machine_mode
, rtx
, unsigned HOST_WIDE_INT
));
205 static void xtensa_encode_section_info
PARAMS ((tree
, int));
207 static rtx frame_size_const
;
208 static int current_function_arg_words
;
209 static const int reg_nonleaf_alloc_order
[FIRST_PSEUDO_REGISTER
] =
212 /* This macro generates the assembly code for function entry.
213 FILE is a stdio stream to output the code to.
214 SIZE is an int: how many units of temporary storage to allocate.
215 Refer to the array 'regs_ever_live' to determine which registers
216 to save; 'regs_ever_live[I]' is nonzero if register number I
217 is ever used in the function. This macro is responsible for
218 knowing which registers should not be saved even if used. */
220 #undef TARGET_ASM_FUNCTION_PROLOGUE
221 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
223 /* This macro generates the assembly code for function exit,
224 on machines that need it. If FUNCTION_EPILOGUE is not defined
225 then individual return instructions are generated for each
226 return statement. Args are same as for FUNCTION_PROLOGUE. */
228 #undef TARGET_ASM_FUNCTION_EPILOGUE
229 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
231 /* These hooks specify assembly directives for creating certain kinds
232 of integer object. */
234 #undef TARGET_ASM_ALIGNED_SI_OP
235 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
237 #undef TARGET_ASM_SELECT_RTX_SECTION
238 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
239 #undef TARGET_ENCODE_SECTION_INFO
240 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
242 struct gcc_target targetm
= TARGET_INITIALIZER
;
246 * Functions to test Xtensa immediate operand validity.
280 return (v
& 255) == 0 && (v
>= -32768 && v
<= 32512);
287 return (v
== -1 || (v
>= 1 && v
<= 15));
294 return v
>= -32 && v
<= 95;
328 return v
>= -128 && v
<= 127;
335 return (v
>= 7 && v
<= 22);
342 return (v
& 3) == 0 && (v
>= 0 && v
<= 60);
349 return v
>= -2048 && v
<= 2047;
356 return v
>= 0 && v
<= 255;
363 return (v
& 1) == 0 && (v
>= 0 && v
<= 510);
370 return (v
& 3) == 0 && (v
>= 0 && v
<= 1020);
374 /* This is just like the standard true_regnum() function except that it
375 works even when reg_renumber is not initialized. */
381 if (GET_CODE (x
) == REG
)
384 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
385 && reg_renumber
[REGNO (x
)] >= 0)
386 return reg_renumber
[REGNO (x
)];
389 if (GET_CODE (x
) == SUBREG
)
391 int base
= xt_true_regnum (SUBREG_REG (x
));
392 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
393 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
394 GET_MODE (SUBREG_REG (x
)),
395 SUBREG_BYTE (x
), GET_MODE (x
));
402 add_operand (op
, mode
)
404 enum machine_mode mode
;
406 if (GET_CODE (op
) == CONST_INT
)
407 return (xtensa_simm8 (INTVAL (op
)) ||
408 xtensa_simm8x256 (INTVAL (op
)));
410 return register_operand (op
, mode
);
415 arith_operand (op
, mode
)
417 enum machine_mode mode
;
419 if (GET_CODE (op
) == CONST_INT
)
420 return xtensa_simm8 (INTVAL (op
));
422 return register_operand (op
, mode
);
427 nonimmed_operand (op
, mode
)
429 enum machine_mode mode
;
431 /* We cannot use the standard nonimmediate_operand() predicate because
432 it includes constant pool memory operands. */
434 if (memory_operand (op
, mode
))
435 return !constantpool_address_p (XEXP (op
, 0));
437 return register_operand (op
, mode
);
442 mem_operand (op
, mode
)
444 enum machine_mode mode
;
446 /* We cannot use the standard memory_operand() predicate because
447 it includes constant pool memory operands. */
449 if (memory_operand (op
, mode
))
450 return !constantpool_address_p (XEXP (op
, 0));
457 xtensa_valid_move (mode
, operands
)
458 enum machine_mode mode
;
461 /* Either the destination or source must be a register, and the
462 MAC16 accumulator doesn't count. */
464 if (register_operand (operands
[0], mode
))
466 int dst_regnum
= xt_true_regnum (operands
[0]);
468 /* The stack pointer can only be assigned with a MOVSP opcode. */
469 if (dst_regnum
== STACK_POINTER_REGNUM
)
470 return (mode
== SImode
471 && register_operand (operands
[1], mode
)
472 && !ACC_REG_P (xt_true_regnum (operands
[1])));
474 if (!ACC_REG_P (dst_regnum
))
477 if (register_operand (operands
[1], mode
))
479 int src_regnum
= xt_true_regnum (operands
[1]);
480 if (!ACC_REG_P (src_regnum
))
488 mask_operand (op
, mode
)
490 enum machine_mode mode
;
492 if (GET_CODE (op
) == CONST_INT
)
493 return xtensa_mask_immediate (INTVAL (op
));
495 return register_operand (op
, mode
);
500 extui_fldsz_operand (op
, mode
)
502 enum machine_mode mode ATTRIBUTE_UNUSED
;
504 return ((GET_CODE (op
) == CONST_INT
)
505 && xtensa_mask_immediate ((1 << INTVAL (op
)) - 1));
510 sext_operand (op
, mode
)
512 enum machine_mode mode
;
515 return nonimmed_operand (op
, mode
);
516 return mem_operand (op
, mode
);
521 sext_fldsz_operand (op
, mode
)
523 enum machine_mode mode ATTRIBUTE_UNUSED
;
525 return ((GET_CODE (op
) == CONST_INT
) && xtensa_tp7 (INTVAL (op
) - 1));
530 lsbitnum_operand (op
, mode
)
532 enum machine_mode mode ATTRIBUTE_UNUSED
;
534 if (GET_CODE (op
) == CONST_INT
)
536 return (BITS_BIG_ENDIAN
537 ? (INTVAL (op
) == BITS_PER_WORD
-1)
538 : (INTVAL (op
) == 0));
550 return xtensa_b4const (v
);
555 branch_operand (op
, mode
)
557 enum machine_mode mode
;
559 if (GET_CODE (op
) == CONST_INT
)
560 return b4const_or_zero (INTVAL (op
));
562 return register_operand (op
, mode
);
567 ubranch_operand (op
, mode
)
569 enum machine_mode mode
;
571 if (GET_CODE (op
) == CONST_INT
)
572 return xtensa_b4constu (INTVAL (op
));
574 return register_operand (op
, mode
);
579 call_insn_operand (op
, mode
)
581 enum machine_mode mode ATTRIBUTE_UNUSED
;
583 if ((GET_CODE (op
) == REG
)
584 && (op
!= arg_pointer_rtx
)
585 && ((REGNO (op
) < FRAME_POINTER_REGNUM
)
586 || (REGNO (op
) > LAST_VIRTUAL_REGISTER
)))
589 if (CONSTANT_ADDRESS_P (op
))
591 /* Direct calls only allowed to static functions with PIC. */
592 return (!flag_pic
|| (GET_CODE (op
) == SYMBOL_REF
593 && SYMBOL_REF_FLAG (op
)));
601 move_operand (op
, mode
)
603 enum machine_mode mode
;
605 if (register_operand (op
, mode
))
608 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
610 if (GET_CODE (op
) == CONSTANT_P_RTX
)
613 if (GET_CODE (op
) == CONST_INT
)
614 return xtensa_simm12b (INTVAL (op
));
616 if (GET_CODE (op
) == MEM
)
617 return memory_address_p (mode
, XEXP (op
, 0));
624 smalloffset_mem_p (op
)
627 if (GET_CODE (op
) == MEM
)
629 rtx addr
= XEXP (op
, 0);
630 if (GET_CODE (addr
) == REG
)
631 return REG_OK_FOR_BASE_P (addr
);
632 if (GET_CODE (addr
) == PLUS
)
634 rtx offset
= XEXP (addr
, 0);
635 if (GET_CODE (offset
) != CONST_INT
)
636 offset
= XEXP (addr
, 1);
637 if (GET_CODE (offset
) != CONST_INT
)
639 return xtensa_lsi4x4 (INTVAL (offset
));
647 smalloffset_double_mem_p (op
)
650 if (!smalloffset_mem_p (op
))
652 return smalloffset_mem_p (adjust_address (op
, GET_MODE (op
), 4));
657 constantpool_address_p (addr
)
662 if (GET_CODE (addr
) == CONST
)
666 /* only handle (PLUS (SYM, OFFSET)) form */
667 addr
= XEXP (addr
, 0);
668 if (GET_CODE (addr
) != PLUS
)
671 /* make sure the address is word aligned */
672 offset
= XEXP (addr
, 1);
673 if ((GET_CODE (offset
) != CONST_INT
)
674 || ((INTVAL (offset
) & 3) != 0))
677 sym
= XEXP (addr
, 0);
680 if ((GET_CODE (sym
) == SYMBOL_REF
)
681 && CONSTANT_POOL_ADDRESS_P (sym
))
688 constantpool_mem_p (op
)
691 if (GET_CODE (op
) == MEM
)
692 return constantpool_address_p (XEXP (op
, 0));
698 non_const_move_operand (op
, mode
)
700 enum machine_mode mode
;
702 if (register_operand (op
, mode
))
704 if (GET_CODE (op
) == SUBREG
)
705 op
= SUBREG_REG (op
);
706 if (GET_CODE (op
) == MEM
)
707 return memory_address_p (mode
, XEXP (op
, 0));
712 /* Accept the floating point constant 1 in the appropriate mode. */
715 const_float_1_operand (op
, mode
)
717 enum machine_mode mode
;
720 static REAL_VALUE_TYPE onedf
;
721 static REAL_VALUE_TYPE onesf
;
722 static int one_initialized
;
724 if ((GET_CODE (op
) != CONST_DOUBLE
)
725 || (mode
!= GET_MODE (op
))
726 || (mode
!= DFmode
&& mode
!= SFmode
))
729 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
731 if (! one_initialized
)
733 onedf
= REAL_VALUE_ATOF ("1.0", DFmode
);
734 onesf
= REAL_VALUE_ATOF ("1.0", SFmode
);
735 one_initialized
= TRUE
;
739 return REAL_VALUES_EQUAL (d
, onedf
);
741 return REAL_VALUES_EQUAL (d
, onesf
);
746 fpmem_offset_operand (op
, mode
)
748 enum machine_mode mode ATTRIBUTE_UNUSED
;
750 if (GET_CODE (op
) == CONST_INT
)
751 return xtensa_mem_offset (INTVAL (op
), SFmode
);
757 xtensa_extend_reg (dst
, src
)
761 rtx temp
= gen_reg_rtx (SImode
);
762 rtx shift
= GEN_INT (BITS_PER_WORD
- GET_MODE_BITSIZE (GET_MODE (src
)));
764 /* generate paradoxical subregs as needed so that the modes match */
765 src
= simplify_gen_subreg (SImode
, src
, GET_MODE (src
), 0);
766 dst
= simplify_gen_subreg (SImode
, dst
, GET_MODE (dst
), 0);
768 emit_insn (gen_ashlsi3 (temp
, src
, shift
));
769 emit_insn (gen_ashrsi3 (dst
, temp
, shift
));
774 xtensa_load_constant (dst
, src
)
778 enum machine_mode mode
= GET_MODE (dst
);
779 src
= force_const_mem (SImode
, src
);
781 /* PC-relative loads are always SImode so we have to add a SUBREG if that
782 is not the desired mode */
786 if (register_operand (dst
, mode
))
787 dst
= simplify_gen_subreg (SImode
, dst
, mode
, 0);
790 src
= force_reg (SImode
, src
);
791 src
= gen_lowpart_SUBREG (mode
, src
);
795 emit_move_insn (dst
, src
);
800 branch_operator (x
, mode
)
802 enum machine_mode mode
;
804 if (GET_MODE (x
) != mode
)
807 switch (GET_CODE (x
))
822 ubranch_operator (x
, mode
)
824 enum machine_mode mode
;
826 if (GET_MODE (x
) != mode
)
829 switch (GET_CODE (x
))
842 boolean_operator (x
, mode
)
844 enum machine_mode mode
;
846 if (GET_MODE (x
) != mode
)
849 switch (GET_CODE (x
))
862 xtensa_mask_immediate (v
)
865 #define MAX_MASK_SIZE 16
868 for (mask_size
= 1; mask_size
<= MAX_MASK_SIZE
; mask_size
++)
882 xtensa_mem_offset (v
, mode
)
884 enum machine_mode mode
;
889 /* Handle the worst case for block moves. See xtensa_expand_block_move
890 where we emit an optimized block move operation if the block can be
891 moved in < "move_ratio" pieces. The worst case is when the block is
892 aligned but has a size of (3 mod 4) (does this happen?) so that the
893 last piece requires a byte load/store. */
894 return (xtensa_uimm8 (v
) &&
895 xtensa_uimm8 (v
+ MOVE_MAX
* LARGEST_MOVE_RATIO
));
898 return xtensa_uimm8 (v
);
901 return xtensa_uimm8x2 (v
);
904 return (xtensa_uimm8x4 (v
) && xtensa_uimm8x4 (v
+ 4));
910 return xtensa_uimm8x4 (v
);
914 /* Make normal rtx_code into something we can index from an array */
916 static enum internal_test
917 map_test_to_internal_test (test_code
)
918 enum rtx_code test_code
;
920 enum internal_test test
= ITEST_MAX
;
925 case EQ
: test
= ITEST_EQ
; break;
926 case NE
: test
= ITEST_NE
; break;
927 case GT
: test
= ITEST_GT
; break;
928 case GE
: test
= ITEST_GE
; break;
929 case LT
: test
= ITEST_LT
; break;
930 case LE
: test
= ITEST_LE
; break;
931 case GTU
: test
= ITEST_GTU
; break;
932 case GEU
: test
= ITEST_GEU
; break;
933 case LTU
: test
= ITEST_LTU
; break;
934 case LEU
: test
= ITEST_LEU
; break;
941 /* Generate the code to compare two integer values. The return value is
942 the comparison expression. */
945 gen_int_relational (test_code
, cmp0
, cmp1
, p_invert
)
946 enum rtx_code test_code
; /* relational test (EQ, etc) */
947 rtx cmp0
; /* first operand to compare */
948 rtx cmp1
; /* second operand to compare */
949 int *p_invert
; /* whether branch needs to reverse its test */
952 enum rtx_code test_code
; /* test code to use in insn */
953 int (*const_range_p
) PARAMS ((int)); /* predicate function to check range */
954 int const_add
; /* constant to add (convert LE -> LT) */
955 int reverse_regs
; /* reverse registers in test */
956 int invert_const
; /* != 0 if invert value if cmp1 is constant */
957 int invert_reg
; /* != 0 if invert value if cmp1 is register */
958 int unsignedp
; /* != 0 for unsigned comparisons. */
961 static struct cmp_info info
[ (int)ITEST_MAX
] = {
963 { EQ
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* EQ */
964 { NE
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* NE */
966 { LT
, b4const_or_zero
, 1, 1, 1, 0, 0 }, /* GT */
967 { GE
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* GE */
968 { LT
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* LT */
969 { GE
, b4const_or_zero
, 1, 1, 1, 0, 0 }, /* LE */
971 { LTU
, xtensa_b4constu
, 1, 1, 1, 0, 1 }, /* GTU */
972 { GEU
, xtensa_b4constu
, 0, 0, 0, 0, 1 }, /* GEU */
973 { LTU
, xtensa_b4constu
, 0, 0, 0, 0, 1 }, /* LTU */
974 { GEU
, xtensa_b4constu
, 1, 1, 1, 0, 1 }, /* LEU */
977 enum internal_test test
;
978 enum machine_mode mode
;
979 struct cmp_info
*p_info
;
981 test
= map_test_to_internal_test (test_code
);
982 if (test
== ITEST_MAX
)
985 p_info
= &info
[ (int)test
];
987 mode
= GET_MODE (cmp0
);
988 if (mode
== VOIDmode
)
989 mode
= GET_MODE (cmp1
);
991 /* Make sure we can handle any constants given to us. */
992 if (GET_CODE (cmp1
) == CONST_INT
)
994 HOST_WIDE_INT value
= INTVAL (cmp1
);
995 unsigned HOST_WIDE_INT uvalue
= (unsigned HOST_WIDE_INT
)value
;
997 /* if the immediate overflows or does not fit in the immediate field,
998 spill it to a register */
1000 if ((p_info
->unsignedp
?
1001 (uvalue
+ p_info
->const_add
> uvalue
) :
1002 (value
+ p_info
->const_add
> value
)) != (p_info
->const_add
> 0))
1004 cmp1
= force_reg (mode
, cmp1
);
1006 else if (!(p_info
->const_range_p
) (value
+ p_info
->const_add
))
1008 cmp1
= force_reg (mode
, cmp1
);
1011 else if ((GET_CODE (cmp1
) != REG
) && (GET_CODE (cmp1
) != SUBREG
))
1013 cmp1
= force_reg (mode
, cmp1
);
1016 /* See if we need to invert the result. */
1017 *p_invert
= ((GET_CODE (cmp1
) == CONST_INT
)
1018 ? p_info
->invert_const
1019 : p_info
->invert_reg
);
1021 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1022 Comparison between two registers, may involve switching operands. */
1023 if (GET_CODE (cmp1
) == CONST_INT
)
1025 if (p_info
->const_add
!= 0)
1026 cmp1
= GEN_INT (INTVAL (cmp1
) + p_info
->const_add
);
1029 else if (p_info
->reverse_regs
)
1036 return gen_rtx (p_info
->test_code
, VOIDmode
, cmp0
, cmp1
);
1040 /* Generate the code to compare two float values. The return value is
1041 the comparison expression. */
1044 gen_float_relational (test_code
, cmp0
, cmp1
)
1045 enum rtx_code test_code
; /* relational test (EQ, etc) */
1046 rtx cmp0
; /* first operand to compare */
1047 rtx cmp1
; /* second operand to compare */
1049 rtx (*gen_fn
) PARAMS ((rtx
, rtx
, rtx
));
1051 int reverse_regs
, invert
;
1055 case EQ
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_seq_sf
; break;
1056 case NE
: reverse_regs
= 0; invert
= 1; gen_fn
= gen_seq_sf
; break;
1057 case LE
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_sle_sf
; break;
1058 case GT
: reverse_regs
= 1; invert
= 0; gen_fn
= gen_slt_sf
; break;
1059 case LT
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_slt_sf
; break;
1060 case GE
: reverse_regs
= 1; invert
= 0; gen_fn
= gen_sle_sf
; break;
1062 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
1063 reverse_regs
= 0; invert
= 0; gen_fn
= 0; /* avoid compiler warnings */
1073 brtmp
= gen_rtx_REG (CCmode
, FPCC_REGNUM
);
1074 emit_insn (gen_fn (brtmp
, cmp0
, cmp1
));
1076 return gen_rtx (invert
? EQ
: NE
, VOIDmode
, brtmp
, const0_rtx
);
1081 xtensa_expand_conditional_branch (operands
, test_code
)
1083 enum rtx_code test_code
;
1085 enum cmp_type type
= branch_type
;
1086 rtx cmp0
= branch_cmp
[0];
1087 rtx cmp1
= branch_cmp
[1];
1096 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
1100 cmp
= gen_int_relational (test_code
, cmp0
, cmp1
, &invert
);
1104 if (!TARGET_HARD_FLOAT
)
1105 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
1107 cmp
= gen_float_relational (test_code
, cmp0
, cmp1
);
1111 /* Generate the branch. */
1113 label1
= gen_rtx_LABEL_REF (VOIDmode
, operands
[0]);
1122 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
1123 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp
,
1130 gen_conditional_move (cmp
)
1133 enum rtx_code code
= GET_CODE (cmp
);
1134 rtx op0
= branch_cmp
[0];
1135 rtx op1
= branch_cmp
[1];
1137 if (branch_type
== CMP_SI
)
1139 /* Jump optimization calls get_condition() which canonicalizes
1140 comparisons like (GE x <const>) to (GT x <const-1>).
1141 Transform those comparisons back to GE, since that is the
1142 comparison supported in Xtensa. We shouldn't have to
1143 transform <LE x const> comparisons, because neither
1144 xtensa_expand_conditional_branch() nor get_condition() will
1147 if ((code
== GT
) && (op1
== constm1_rtx
))
1152 cmp
= gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
1154 if (boolean_operator (cmp
, VOIDmode
))
1156 /* swap the operands to make const0 second */
1157 if (op0
== const0_rtx
)
1163 /* if not comparing against zero, emit a comparison (subtract) */
1164 if (op1
!= const0_rtx
)
1166 op0
= expand_binop (SImode
, sub_optab
, op0
, op1
,
1167 0, 0, OPTAB_LIB_WIDEN
);
1171 else if (branch_operator (cmp
, VOIDmode
))
1173 /* swap the operands to make const0 second */
1174 if (op0
== const0_rtx
)
1181 case LT
: code
= GE
; break;
1182 case GE
: code
= LT
; break;
1187 if (op1
!= const0_rtx
)
1193 return gen_rtx (code
, VOIDmode
, op0
, op1
);
1196 if (TARGET_HARD_FLOAT
&& (branch_type
== CMP_SF
))
1197 return gen_float_relational (code
, op0
, op1
);
1204 xtensa_expand_conditional_move (operands
, isflt
)
1209 rtx (*gen_fn
) PARAMS ((rtx
, rtx
, rtx
, rtx
, rtx
));
1211 if (!(cmp
= gen_conditional_move (operands
[1])))
1215 gen_fn
= (branch_type
== CMP_SI
1216 ? gen_movsfcc_internal0
1217 : gen_movsfcc_internal1
);
1219 gen_fn
= (branch_type
== CMP_SI
1220 ? gen_movsicc_internal0
1221 : gen_movsicc_internal1
);
1223 emit_insn (gen_fn (operands
[0], XEXP (cmp
, 0),
1224 operands
[2], operands
[3], cmp
));
1230 xtensa_expand_scc (operands
)
1233 rtx dest
= operands
[0];
1234 rtx cmp
= operands
[1];
1235 rtx one_tmp
, zero_tmp
;
1236 rtx (*gen_fn
) PARAMS ((rtx
, rtx
, rtx
, rtx
, rtx
));
1238 if (!(cmp
= gen_conditional_move (cmp
)))
1241 one_tmp
= gen_reg_rtx (SImode
);
1242 zero_tmp
= gen_reg_rtx (SImode
);
1243 emit_insn (gen_movsi (one_tmp
, const_true_rtx
));
1244 emit_insn (gen_movsi (zero_tmp
, const0_rtx
));
1246 gen_fn
= (branch_type
== CMP_SI
1247 ? gen_movsicc_internal0
1248 : gen_movsicc_internal1
);
1249 emit_insn (gen_fn (dest
, XEXP (cmp
, 0), one_tmp
, zero_tmp
, cmp
));
1254 /* Emit insns to move operands[1] into operands[0].
1256 Return 1 if we have written out everything that needs to be done to
1257 do the move. Otherwise, return 0 and the caller will emit the move
1261 xtensa_emit_move_sequence (operands
, mode
)
1263 enum machine_mode mode
;
1265 if (CONSTANT_P (operands
[1])
1266 && GET_CODE (operands
[1]) != CONSTANT_P_RTX
1267 && (GET_CODE (operands
[1]) != CONST_INT
1268 || !xtensa_simm12b (INTVAL (operands
[1]))))
1270 xtensa_load_constant (operands
[0], operands
[1]);
1274 if (!(reload_in_progress
| reload_completed
))
1276 if (!xtensa_valid_move (mode
, operands
))
1277 operands
[1] = force_reg (mode
, operands
[1]);
1279 if (xtensa_copy_incoming_a7 (operands
, mode
))
1283 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1284 instruction won't be recognized after reload. So we remove the
1285 subreg and adjust mem accordingly. */
1286 if (reload_in_progress
)
1288 operands
[0] = fixup_subreg_mem (operands
[0]);
1289 operands
[1] = fixup_subreg_mem (operands
[1]);
1295 fixup_subreg_mem (x
)
1298 if (GET_CODE (x
) == SUBREG
1299 && GET_CODE (SUBREG_REG (x
)) == REG
1300 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1303 gen_rtx_SUBREG (GET_MODE (x
),
1304 reg_equiv_mem
[REGNO (SUBREG_REG (x
))],
1306 x
= alter_subreg (&temp
);
1312 /* Check if this move is copying an incoming argument in a7. If so,
1313 emit the move, followed by the special "set_frame_ptr"
1314 unspec_volatile insn, at the very beginning of the function. This
1315 is necessary because the register allocator will ignore conflicts
1316 with a7 and may assign some other pseudo to a7. If that pseudo was
1317 assigned prior to this move, it would clobber the incoming argument
1318 in a7. By copying the argument out of a7 as the very first thing,
1319 and then immediately following that with an unspec_volatile to keep
1320 the scheduler away, we should avoid any problems. */
1323 xtensa_copy_incoming_a7 (operands
, mode
)
1325 enum machine_mode mode
;
1327 if (a7_overlap_mentioned_p (operands
[1])
1328 && !cfun
->machine
->incoming_a7_copied
)
1334 mov
= gen_movdf_internal (operands
[0], operands
[1]);
1337 mov
= gen_movsf_internal (operands
[0], operands
[1]);
1340 mov
= gen_movdi_internal (operands
[0], operands
[1]);
1343 mov
= gen_movsi_internal (operands
[0], operands
[1]);
1346 mov
= gen_movhi_internal (operands
[0], operands
[1]);
1349 mov
= gen_movqi_internal (operands
[0], operands
[1]);
1355 /* Insert the instructions before any other argument copies.
1356 (The set_frame_ptr insn comes _after_ the move, so push it
1358 push_topmost_sequence ();
1359 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1360 emit_insn_after (mov
, get_insns ());
1361 pop_topmost_sequence ();
1363 /* Ideally the incoming argument in a7 would only be copied
1364 once, since propagating a7 into the body of a function
1365 will almost certainly lead to errors. However, there is
1366 at least one harmless case (in GCSE) where the original
1367 copy from a7 is changed to copy into a new pseudo. Thus,
1368 we use a flag to only do this special treatment for the
1369 first copy of a7. */
1371 cfun
->machine
->incoming_a7_copied
= true;
1380 /* Try to expand a block move operation to an RTL block move instruction.
1381 If not optimizing or if the block size is not a constant or if the
1382 block is small, the expansion fails and GCC falls back to calling
1385 operands[0] is the destination
1386 operands[1] is the source
1387 operands[2] is the length
1388 operands[3] is the alignment */
1391 xtensa_expand_block_move (operands
)
1394 rtx dest
= operands
[0];
1395 rtx src
= operands
[1];
1396 int bytes
= INTVAL (operands
[2]);
1397 int align
= XINT (operands
[3], 0);
1398 int num_pieces
, move_ratio
;
1400 /* If this is not a fixed size move, just call memcpy */
1401 if (!optimize
|| (GET_CODE (operands
[2]) != CONST_INT
))
1404 /* Anything to move? */
1408 if (align
> MOVE_MAX
)
1411 /* decide whether to expand inline based on the optimization level */
1414 move_ratio
= LARGEST_MOVE_RATIO
;
1415 num_pieces
= (bytes
/ align
) + (bytes
% align
); /* close enough anyway */
1416 if (num_pieces
>= move_ratio
)
1419 /* make sure the memory addresses are valid */
1420 operands
[0] = validize_mem (dest
);
1421 operands
[1] = validize_mem (src
);
1423 emit_insn (gen_movstrsi_internal (operands
[0], operands
[1],
1424 operands
[2], operands
[3]));
1429 /* Emit a sequence of instructions to implement a block move, trying
1430 to hide load delay slots as much as possible. Load N values into
1431 temporary registers, store those N values, and repeat until the
1432 complete block has been moved. N=delay_slots+1 */
1440 xtensa_emit_block_move (operands
, tmpregs
, delay_slots
)
1445 rtx dest
= operands
[0];
1446 rtx src
= operands
[1];
1447 int bytes
= INTVAL (operands
[2]);
1448 int align
= XINT (operands
[3], 0);
1449 rtx from_addr
= XEXP (src
, 0);
1450 rtx to_addr
= XEXP (dest
, 0);
1451 int from_struct
= MEM_IN_STRUCT_P (src
);
1452 int to_struct
= MEM_IN_STRUCT_P (dest
);
1454 int chunk_size
, item_size
;
1455 struct meminsnbuf
*ldinsns
, *stinsns
;
1456 const char *ldname
, *stname
;
1457 enum machine_mode mode
;
1459 if (align
> MOVE_MAX
)
1462 chunk_size
= delay_slots
+ 1;
1464 ldinsns
= (struct meminsnbuf
*)
1465 alloca (chunk_size
* sizeof (struct meminsnbuf
));
1466 stinsns
= (struct meminsnbuf
*)
1467 alloca (chunk_size
* sizeof (struct meminsnbuf
));
1469 mode
= xtensa_find_mode_for_size (item_size
);
1470 item_size
= GET_MODE_SIZE (mode
);
1471 ldname
= xtensa_ld_opcodes
[(int) mode
];
1472 stname
= xtensa_st_opcodes
[(int) mode
];
1478 for (n
= 0; n
< chunk_size
; n
++)
1488 if (bytes
< item_size
)
1490 /* find a smaller item_size which we can load & store */
1492 mode
= xtensa_find_mode_for_size (item_size
);
1493 item_size
= GET_MODE_SIZE (mode
);
1494 ldname
= xtensa_ld_opcodes
[(int) mode
];
1495 stname
= xtensa_st_opcodes
[(int) mode
];
1498 /* record the load instruction opcode and operands */
1499 addr
= plus_constant (from_addr
, offset
);
1500 mem
= gen_rtx_MEM (mode
, addr
);
1501 if (! memory_address_p (mode
, addr
))
1503 MEM_IN_STRUCT_P (mem
) = from_struct
;
1504 ldinsns
[n
].operands
[0] = tmpregs
[n
];
1505 ldinsns
[n
].operands
[1] = mem
;
1506 sprintf (ldinsns
[n
].template, "%s\t%%0, %%1", ldname
);
1508 /* record the store instruction opcode and operands */
1509 addr
= plus_constant (to_addr
, offset
);
1510 mem
= gen_rtx_MEM (mode
, addr
);
1511 if (! memory_address_p (mode
, addr
))
1513 MEM_IN_STRUCT_P (mem
) = to_struct
;
1514 stinsns
[n
].operands
[0] = tmpregs
[n
];
1515 stinsns
[n
].operands
[1] = mem
;
1516 sprintf (stinsns
[n
].template, "%s\t%%0, %%1", stname
);
1518 offset
+= item_size
;
1522 /* now output the loads followed by the stores */
1523 for (n
= 0; n
< chunk_size
; n
++)
1524 output_asm_insn (ldinsns
[n
].template, ldinsns
[n
].operands
);
1525 for (n
= 0; n
< chunk_size
; n
++)
1526 output_asm_insn (stinsns
[n
].template, stinsns
[n
].operands
);
1531 static enum machine_mode
1532 xtensa_find_mode_for_size (item_size
)
1535 enum machine_mode mode
, tmode
;
1541 /* find mode closest to but not bigger than item_size */
1542 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1543 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1544 if (GET_MODE_SIZE (tmode
) <= item_size
)
1546 if (mode
== VOIDmode
)
1549 item_size
= GET_MODE_SIZE (mode
);
1551 if (xtensa_ld_opcodes
[(int) mode
]
1552 && xtensa_st_opcodes
[(int) mode
])
1555 /* cannot load & store this mode; try something smaller */
1564 xtensa_expand_nonlocal_goto (operands
)
1567 rtx goto_handler
= operands
[1];
1568 rtx containing_fp
= operands
[3];
1570 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1571 is too big to generate in-line */
1573 if (GET_CODE (containing_fp
) != REG
)
1574 containing_fp
= force_reg (Pmode
, containing_fp
);
1576 goto_handler
= replace_rtx (copy_rtx (goto_handler
),
1577 virtual_stack_vars_rtx
,
1580 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__xtensa_nonlocal_goto"),
1582 containing_fp
, Pmode
,
1583 goto_handler
, Pmode
);
1587 static struct machine_function
*
1588 xtensa_init_machine_status ()
1590 return ggc_alloc_cleared (sizeof (struct machine_function
));
1595 xtensa_setup_frame_addresses ()
1597 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1598 cfun
->machine
->accesses_prev_frame
= 1;
1601 (gen_rtx_SYMBOL_REF (Pmode
, "__xtensa_libgcc_window_spill"),
1606 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1607 a comment showing where the end of the loop is. However, if there is a
1608 label or a branch at the end of the loop then we need to place a nop
1609 there. If the loop ends with a label we need the nop so that branches
1610 targetting that label will target the nop (and thus remain in the loop),
1611 instead of targetting the instruction after the loop (and thus exiting
1612 the loop). If the loop ends with a branch, we need the nop in case the
1613 branch is targetting a location inside the loop. When the branch
1614 executes it will cause the loop count to be decremented even if it is
1615 taken (because it is the last instruction in the loop), so we need to
1616 nop after the branch to prevent the loop count from being decremented
1617 when the branch is taken. */
1620 xtensa_emit_loop_end (insn
, operands
)
1626 for (insn
= PREV_INSN (insn
); insn
&& !done
; insn
= PREV_INSN (insn
))
1628 switch (GET_CODE (insn
))
1635 output_asm_insn ("nop.n", operands
);
1641 rtx body
= PATTERN (insn
);
1643 if (GET_CODE (body
) == JUMP_INSN
)
1645 output_asm_insn ("nop.n", operands
);
1648 else if ((GET_CODE (body
) != USE
)
1649 && (GET_CODE (body
) != CLOBBER
))
1656 output_asm_insn ("# loop end for %0", operands
);
1661 xtensa_emit_call (callop
, operands
)
1665 static char result
[64];
1666 rtx tgt
= operands
[callop
];
1668 if (GET_CODE (tgt
) == CONST_INT
)
1669 sprintf (result
, "call8\t0x%x", INTVAL (tgt
));
1670 else if (register_operand (tgt
, VOIDmode
))
1671 sprintf (result
, "callx8\t%%%d", callop
);
1673 sprintf (result
, "call8\t%%%d", callop
);
1679 /* Return the stabs register number to use for 'regno'. */
1682 xtensa_dbx_register_number (regno
)
1687 if (GP_REG_P (regno
)) {
1688 regno
-= GP_REG_FIRST
;
1691 else if (BR_REG_P (regno
)) {
1692 regno
-= BR_REG_FIRST
;
1695 else if (FP_REG_P (regno
)) {
1696 regno
-= FP_REG_FIRST
;
1697 /* The current numbering convention is that TIE registers are
1698 numbered in libcc order beginning with 256. We can't guarantee
1699 that the FP registers will come first, so the following is just
1700 a guess. It seems like we should make a special case for FP
1701 registers and give them fixed numbers < 256. */
1704 else if (ACC_REG_P (regno
))
1710 /* When optimizing, we sometimes get asked about pseudo-registers
1711 that don't represent hard registers. Return 0 for these. */
1715 return first
+ regno
;
1719 /* Argument support functions. */
1721 /* Initialize CUMULATIVE_ARGS for a function. */
1724 init_cumulative_args (cum
, fntype
, libname
)
1725 CUMULATIVE_ARGS
*cum
; /* argument info to initialize */
1726 tree fntype ATTRIBUTE_UNUSED
; /* tree ptr for function decl */
1727 rtx libname ATTRIBUTE_UNUSED
; /* SYMBOL_REF of library name or 0 */
1732 /* Advance the argument to the next argument position. */
1735 function_arg_advance (cum
, mode
, type
)
1736 CUMULATIVE_ARGS
*cum
; /* current arg information */
1737 enum machine_mode mode
; /* current arg mode */
1738 tree type
; /* type of the argument or 0 if lib support */
1743 arg_words
= &cum
->arg_words
;
1744 max
= MAX_ARGS_IN_REGISTERS
;
1746 words
= (((mode
!= BLKmode
)
1747 ? (int) GET_MODE_SIZE (mode
)
1748 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1750 if ((*arg_words
+ words
> max
) && (*arg_words
< max
))
1753 *arg_words
+= words
;
1757 /* Return an RTL expression containing the register for the given mode,
1758 or 0 if the argument is to be passed on the stack. */
1761 function_arg (cum
, mode
, type
, incoming_p
)
1762 CUMULATIVE_ARGS
*cum
; /* current arg information */
1763 enum machine_mode mode
; /* current arg mode */
1764 tree type
; /* type of the argument or 0 if lib support */
1765 int incoming_p
; /* computing the incoming registers? */
1767 int regbase
, words
, max
;
1770 enum machine_mode result_mode
;
1772 arg_words
= &cum
->arg_words
;
1773 regbase
= (incoming_p
? GP_ARG_FIRST
: GP_OUTGOING_ARG_FIRST
);
1774 max
= MAX_ARGS_IN_REGISTERS
;
1776 words
= (((mode
!= BLKmode
)
1777 ? (int) GET_MODE_SIZE (mode
)
1778 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1780 if (type
&& (TYPE_ALIGN (type
) > BITS_PER_WORD
))
1781 *arg_words
+= (*arg_words
& 1);
1783 if (*arg_words
+ words
> max
)
1786 regno
= regbase
+ *arg_words
;
1787 result_mode
= (mode
== BLKmode
? TYPE_MODE (type
) : mode
);
1789 /* We need to make sure that references to a7 are represented with
1790 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1791 modes bigger than 2 words (because we only have patterns for
1792 modes of 2 words or smaller), we can't control the expansion
1793 unless we explicitly list the individual registers in a PARALLEL. */
1795 if ((mode
== BLKmode
|| words
> 2)
1797 && regno
+ words
> A7_REG
)
1802 result
= gen_rtx_PARALLEL (result_mode
, rtvec_alloc (words
));
1803 for (n
= 0; n
< words
; n
++)
1805 XVECEXP (result
, 0, n
) =
1806 gen_rtx_EXPR_LIST (VOIDmode
,
1807 gen_raw_REG (SImode
, regno
+ n
),
1808 GEN_INT (n
* UNITS_PER_WORD
));
1813 return gen_raw_REG (result_mode
, regno
);
1821 enum machine_mode mode
;
1823 if (!TARGET_BOOLEANS
&& TARGET_HARD_FLOAT
)
1824 error ("boolean registers required for the floating-point option");
1826 /* set up the tables of ld/st opcode names for block moves */
1827 xtensa_ld_opcodes
[(int) SImode
] = "l32i";
1828 xtensa_ld_opcodes
[(int) HImode
] = "l16ui";
1829 xtensa_ld_opcodes
[(int) QImode
] = "l8ui";
1830 xtensa_st_opcodes
[(int) SImode
] = "s32i";
1831 xtensa_st_opcodes
[(int) HImode
] = "s16i";
1832 xtensa_st_opcodes
[(int) QImode
] = "s8i";
1834 xtensa_char_to_class
['q'] = SP_REG
;
1835 xtensa_char_to_class
['a'] = GR_REGS
;
1836 xtensa_char_to_class
['b'] = ((TARGET_BOOLEANS
) ? BR_REGS
: NO_REGS
);
1837 xtensa_char_to_class
['f'] = ((TARGET_HARD_FLOAT
) ? FP_REGS
: NO_REGS
);
1838 xtensa_char_to_class
['A'] = ((TARGET_MAC16
) ? ACC_REG
: NO_REGS
);
1839 xtensa_char_to_class
['B'] = ((TARGET_SEXT
) ? GR_REGS
: NO_REGS
);
1840 xtensa_char_to_class
['C'] = ((TARGET_MUL16
) ? GR_REGS
: NO_REGS
);
1841 xtensa_char_to_class
['D'] = ((TARGET_DENSITY
) ? GR_REGS
: NO_REGS
);
1842 xtensa_char_to_class
['d'] = ((TARGET_DENSITY
) ? AR_REGS
: NO_REGS
);
1844 /* Set up array giving whether a given register can hold a given mode. */
1845 for (mode
= VOIDmode
;
1846 mode
!= MAX_MACHINE_MODE
;
1847 mode
= (enum machine_mode
) ((int) mode
+ 1))
1849 int size
= GET_MODE_SIZE (mode
);
1850 enum mode_class
class = GET_MODE_CLASS (mode
);
1852 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1856 if (ACC_REG_P (regno
))
1857 temp
= (TARGET_MAC16
&&
1858 (class == MODE_INT
) && (size
<= UNITS_PER_WORD
));
1859 else if (GP_REG_P (regno
))
1860 temp
= ((regno
& 1) == 0 || (size
<= UNITS_PER_WORD
));
1861 else if (FP_REG_P (regno
))
1862 temp
= (TARGET_HARD_FLOAT
&& (mode
== SFmode
));
1863 else if (BR_REG_P (regno
))
1864 temp
= (TARGET_BOOLEANS
&& (mode
== CCmode
));
1868 xtensa_hard_regno_mode_ok
[(int) mode
][regno
] = temp
;
1872 init_machine_status
= xtensa_init_machine_status
;
1874 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1875 some targets need to always use PIC. */
1876 if (flag_pic
> 1 || (XTENSA_ALWAYS_PIC
))
1881 /* A C compound statement to output to stdio stream STREAM the
1882 assembler syntax for an instruction operand X. X is an RTL
1885 CODE is a value that can be used to specify one of several ways
1886 of printing the operand. It is used when identical operands
1887 must be printed differently depending on the context. CODE
1888 comes from the '%' specification that was used to request
1889 printing of the operand. If the specification was just '%DIGIT'
1890 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1891 is the ASCII code for LTR.
1893 If X is a register, this macro should print the register's name.
1894 The names can be found in an array 'reg_names' whose type is
1895 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1897 When the machine description has a specification '%PUNCT' (a '%'
1898 followed by a punctuation character), this macro is called with
1899 a null pointer for X and the punctuation character for CODE.
1901 'a', 'c', 'l', and 'n' are reserved.
1903 The Xtensa specific codes are:
1905 'd' CONST_INT, print as signed decimal
1906 'x' CONST_INT, print as signed hexadecimal
1907 'K' CONST_INT, print number of bits in mask for EXTUI
1908 'R' CONST_INT, print (X & 0x1f)
1909 'L' CONST_INT, print ((32 - X) & 0x1f)
1910 'D' REG, print second register of double-word register operand
1911 'N' MEM, print address of next word following a memory operand
1912 'v' MEM, if memory reference is volatile, output a MEMW before it
1920 /* print a hexadecimal value in a nice way */
1921 if ((val
> -0xa) && (val
< 0xa))
1922 fprintf (file
, "%d", val
);
1924 fprintf (file
, "-0x%x", -val
);
1926 fprintf (file
, "0x%x", val
);
1931 print_operand (file
, op
, letter
)
1932 FILE *file
; /* file to write to */
1933 rtx op
; /* operand to print */
1934 int letter
; /* %<letter> or 0 */
1939 error ("PRINT_OPERAND null pointer");
1941 code
= GET_CODE (op
);
1947 int regnum
= xt_true_regnum (op
);
1950 fprintf (file
, "%s", reg_names
[regnum
]);
1955 /* For a volatile memory reference, emit a MEMW before the
1959 if (MEM_VOLATILE_P (op
) && TARGET_SERIALIZE_VOLATILE
)
1960 fprintf (file
, "memw\n\t");
1963 else if (letter
== 'N')
1965 enum machine_mode mode
;
1966 switch (GET_MODE (op
))
1968 case DFmode
: mode
= SFmode
; break;
1969 case DImode
: mode
= SImode
; break;
1972 op
= adjust_address (op
, mode
, 4);
1975 output_address (XEXP (op
, 0));
1984 unsigned val
= INTVAL (op
);
1990 if ((val
!= 0) || (num_bits
== 0) || (num_bits
> 16))
1991 fatal_insn ("invalid mask", op
);
1993 fprintf (file
, "%d", num_bits
);
1998 fprintf (file
, "%d", (32 - INTVAL (op
)) & 0x1f);
2002 fprintf (file
, "%d", INTVAL (op
) & 0x1f);
2006 printx (file
, INTVAL (op
));
2011 fprintf (file
, "%d", INTVAL (op
));
2018 output_addr_const (file
, op
);
2023 /* A C compound statement to output to stdio stream STREAM the
2024 assembler syntax for an instruction operand that is a memory
2025 reference whose address is ADDR. ADDR is an RTL expression. */
2028 print_operand_address (file
, addr
)
2033 error ("PRINT_OPERAND_ADDRESS, null pointer");
2035 switch (GET_CODE (addr
))
2038 fatal_insn ("invalid address", addr
);
2042 fprintf (file
, "%s, 0", reg_names
[REGNO (addr
)]);
2048 rtx offset
= (rtx
)0;
2049 rtx arg0
= XEXP (addr
, 0);
2050 rtx arg1
= XEXP (addr
, 1);
2052 if (GET_CODE (arg0
) == REG
)
2057 else if (GET_CODE (arg1
) == REG
)
2063 fatal_insn ("no register in address", addr
);
2065 if (CONSTANT_P (offset
))
2067 fprintf (file
, "%s, ", reg_names
[REGNO (reg
)]);
2068 output_addr_const (file
, offset
);
2071 fatal_insn ("address offset not a constant", addr
);
2079 output_addr_const (file
, addr
);
2085 /* Emit either a label, .comm, or .lcomm directive. */
2088 xtensa_declare_object (file
, name
, init_string
, final_string
, size
)
2095 fputs (init_string
, file
); /* "", "\t.comm\t", or "\t.lcomm\t" */
2096 assemble_name (file
, name
);
2097 fprintf (file
, final_string
, size
); /* ":\n", ",%u\n", ",%u\n" */
2102 xtensa_output_literal (file
, x
, mode
, labelno
)
2105 enum machine_mode mode
;
2112 fprintf (file
, "\t.literal .LC%u, ", (unsigned) labelno
);
2114 switch (GET_MODE_CLASS (mode
))
2117 if (GET_CODE (x
) != CONST_DOUBLE
)
2120 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2124 REAL_VALUE_TO_TARGET_SINGLE (r
, value_long
[0]);
2125 fprintf (file
, "0x%08lx\n", value_long
[0]);
2129 REAL_VALUE_TO_TARGET_DOUBLE (r
, value_long
);
2130 fprintf (file
, "0x%08lx, 0x%08lx\n",
2131 value_long
[0], value_long
[1]);
2141 case MODE_PARTIAL_INT
:
2142 size
= GET_MODE_SIZE (mode
);
2145 output_addr_const (file
, x
);
2150 output_addr_const (file
, operand_subword (x
, 0, 0, DImode
));
2152 output_addr_const (file
, operand_subword (x
, 1, 0, DImode
));
2165 /* Return the bytes needed to compute the frame pointer from the current
2168 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2169 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2172 compute_frame_size (size
)
2173 int size
; /* # of var. bytes allocated */
2175 /* add space for the incoming static chain value */
2176 if (current_function_needs_context
)
2177 size
+= (1 * UNITS_PER_WORD
);
2179 xtensa_current_frame_size
=
2180 XTENSA_STACK_ALIGN (size
2181 + current_function_outgoing_args_size
2182 + (WINDOW_SIZE
* UNITS_PER_WORD
));
2183 return xtensa_current_frame_size
;
2188 xtensa_frame_pointer_required ()
2190 /* The code to expand builtin_frame_addr and builtin_return_addr
2191 currently uses the hard_frame_pointer instead of frame_pointer.
2192 This seems wrong but maybe it's necessary for other architectures.
2193 This function is derived from the i386 code. */
2195 if (cfun
->machine
->accesses_prev_frame
)
2203 xtensa_reorg (first
)
2206 rtx insn
, set_frame_ptr_insn
= 0;
2208 unsigned long tsize
= compute_frame_size (get_frame_size ());
2209 if (tsize
< (1 << (12+3)))
2210 frame_size_const
= 0;
2213 frame_size_const
= force_const_mem (SImode
, GEN_INT (tsize
- 16));;
2215 /* make sure the constant is used so it doesn't get eliminated
2216 from the constant pool */
2217 emit_insn_before (gen_rtx_USE (SImode
, frame_size_const
), first
);
2220 if (!frame_pointer_needed
)
2223 /* Search all instructions, looking for the insn that sets up the
2224 frame pointer. This search will fail if the function does not
2225 have an incoming argument in $a7, but in that case, we can just
2226 set up the frame pointer at the very beginning of the
2229 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2236 pat
= PATTERN (insn
);
2237 if (GET_CODE (pat
) == UNSPEC_VOLATILE
2238 && (XINT (pat
, 1) == UNSPECV_SET_FP
))
2240 set_frame_ptr_insn
= insn
;
2245 if (set_frame_ptr_insn
)
2247 /* for all instructions prior to set_frame_ptr_insn, replace
2248 hard_frame_pointer references with stack_pointer */
2249 for (insn
= first
; insn
!= set_frame_ptr_insn
; insn
= NEXT_INSN (insn
))
2252 PATTERN (insn
) = replace_rtx (copy_rtx (PATTERN (insn
)),
2253 hard_frame_pointer_rtx
,
2259 /* emit the frame pointer move immediately after the NOTE that starts
2261 emit_insn_after (gen_movsi (hard_frame_pointer_rtx
,
2262 stack_pointer_rtx
), first
);
2267 /* Set up the stack and frame (if desired) for the function. */
2270 xtensa_function_prologue (file
, size
)
2272 int size ATTRIBUTE_UNUSED
;
2274 unsigned long tsize
= compute_frame_size (get_frame_size ());
2276 if (frame_pointer_needed
)
2277 fprintf (file
, "\t.frame\ta7, %ld\n", tsize
);
2279 fprintf (file
, "\t.frame\tsp, %ld\n", tsize
);
2282 if (tsize
< (1 << (12+3)))
2284 fprintf (file
, "\tentry\tsp, %ld\n", tsize
);
2288 fprintf (file
, "\tentry\tsp, 16\n");
2290 /* use a8 as a temporary since a0-a7 may be live */
2291 fprintf (file
, "\tl32r\ta8, ");
2292 print_operand (file
, frame_size_const
, 0);
2293 fprintf (file
, "\n\tsub\ta8, sp, a8\n");
2294 fprintf (file
, "\tmovsp\tsp, a8\n");
2299 /* Do any necessary cleanup after a function to restore
2300 stack, frame, and regs. */
2303 xtensa_function_epilogue (file
, size
)
2305 int size ATTRIBUTE_UNUSED
;
2307 rtx insn
= get_last_insn ();
2308 /* If the last insn was a BARRIER, we don't have to write anything. */
2309 if (GET_CODE (insn
) == NOTE
)
2310 insn
= prev_nonnote_insn (insn
);
2311 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
2312 fprintf (file
, TARGET_DENSITY
? "\tretw.n\n" : "\tretw\n");
2314 xtensa_current_frame_size
= 0;
2319 xtensa_return_addr (count
, frame
)
2323 rtx result
, retaddr
;
2326 retaddr
= gen_rtx_REG (Pmode
, 0);
2329 rtx addr
= plus_constant (frame
, -4 * UNITS_PER_WORD
);
2330 addr
= memory_address (Pmode
, addr
);
2331 retaddr
= gen_reg_rtx (Pmode
);
2332 emit_move_insn (retaddr
, gen_rtx_MEM (Pmode
, addr
));
2335 /* The 2 most-significant bits of the return address on Xtensa hold
2336 the register window size. To get the real return address, these
2337 bits must be replaced with the high bits from the current PC. */
2339 result
= gen_reg_rtx (Pmode
);
2340 emit_insn (gen_fix_return_addr (result
, retaddr
));
2345 /* Create the va_list data type.
2346 This structure is set up by __builtin_saveregs. The __va_reg
2347 field points to a stack-allocated region holding the contents of the
2348 incoming argument registers. The __va_ndx field is an index initialized
2349 to the position of the first unnamed (variable) argument. This same index
2350 is also used to address the arguments passed in memory. Thus, the
2351 __va_stk field is initialized to point to the position of the first
2352 argument in memory offset to account for the arguments passed in
2353 registers. E.G., if there are 6 argument registers, and each register is
2354 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2355 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2356 argument word N for N >= 6. */
2359 xtensa_build_va_list ()
2361 tree f_stk
, f_reg
, f_ndx
, record
, type_decl
;
2363 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
2364 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
2366 f_stk
= build_decl (FIELD_DECL
, get_identifier ("__va_stk"),
2368 f_reg
= build_decl (FIELD_DECL
, get_identifier ("__va_reg"),
2370 f_ndx
= build_decl (FIELD_DECL
, get_identifier ("__va_ndx"),
2373 DECL_FIELD_CONTEXT (f_stk
) = record
;
2374 DECL_FIELD_CONTEXT (f_reg
) = record
;
2375 DECL_FIELD_CONTEXT (f_ndx
) = record
;
2377 TREE_CHAIN (record
) = type_decl
;
2378 TYPE_NAME (record
) = type_decl
;
2379 TYPE_FIELDS (record
) = f_stk
;
2380 TREE_CHAIN (f_stk
) = f_reg
;
2381 TREE_CHAIN (f_reg
) = f_ndx
;
2383 layout_type (record
);
2388 /* Save the incoming argument registers on the stack. Returns the
2389 address of the saved registers. */
2392 xtensa_builtin_saveregs ()
2395 int arg_words
= current_function_arg_words
;
2396 int gp_left
= MAX_ARGS_IN_REGISTERS
- arg_words
;
2402 /* allocate the general-purpose register space */
2403 gp_regs
= assign_stack_local
2404 (BLKmode
, MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, -1);
2405 set_mem_alias_set (gp_regs
, get_varargs_alias_set ());
2407 /* Now store the incoming registers. */
2408 dest
= change_address (gp_regs
, SImode
,
2409 plus_constant (XEXP (gp_regs
, 0),
2410 arg_words
* UNITS_PER_WORD
));
2412 /* Note: Don't use move_block_from_reg() here because the incoming
2413 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2414 Instead, call gen_raw_REG() directly so that we get a distinct
2415 instance of (REG:SI 7). */
2416 for (i
= 0; i
< gp_left
; i
++)
2418 emit_move_insn (operand_subword (dest
, i
, 1, BLKmode
),
2419 gen_raw_REG (SImode
, GP_ARG_FIRST
+ arg_words
+ i
));
2422 return XEXP (gp_regs
, 0);
2426 /* Implement `va_start' for varargs and stdarg. We look at the
2427 current function to fill in an initial va_list. */
2430 xtensa_va_start (valist
, nextarg
)
2432 rtx nextarg ATTRIBUTE_UNUSED
;
2440 arg_words
= current_function_args_info
.arg_words
;
2442 f_stk
= TYPE_FIELDS (va_list_type_node
);
2443 f_reg
= TREE_CHAIN (f_stk
);
2444 f_ndx
= TREE_CHAIN (f_reg
);
2446 stk
= build (COMPONENT_REF
, TREE_TYPE (f_stk
), valist
, f_stk
);
2447 reg
= build (COMPONENT_REF
, TREE_TYPE (f_reg
), valist
, f_reg
);
2448 ndx
= build (COMPONENT_REF
, TREE_TYPE (f_ndx
), valist
, f_ndx
);
2450 /* Call __builtin_saveregs; save the result in __va_reg */
2451 current_function_arg_words
= arg_words
;
2452 u
= make_tree (ptr_type_node
, expand_builtin_saveregs ());
2453 t
= build (MODIFY_EXPR
, ptr_type_node
, reg
, u
);
2454 TREE_SIDE_EFFECTS (t
) = 1;
2455 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2457 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2458 u
= make_tree (ptr_type_node
, virtual_incoming_args_rtx
);
2459 u
= fold (build (PLUS_EXPR
, ptr_type_node
, u
,
2460 build_int_2 (-MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, -1)));
2461 t
= build (MODIFY_EXPR
, ptr_type_node
, stk
, u
);
2462 TREE_SIDE_EFFECTS (t
) = 1;
2463 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2465 /* Set the __va_ndx member. */
2466 u
= build_int_2 (arg_words
* UNITS_PER_WORD
, 0);
2467 t
= build (MODIFY_EXPR
, integer_type_node
, ndx
, u
);
2468 TREE_SIDE_EFFECTS (t
) = 1;
2469 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2473 /* Implement `va_arg'. */
2476 xtensa_va_arg (valist
, type
)
2482 tree tmp
, addr_tree
, type_size
;
2483 rtx array
, orig_ndx
, r
, addr
, size
, va_size
;
2484 rtx lab_false
, lab_over
, lab_false2
;
2486 f_stk
= TYPE_FIELDS (va_list_type_node
);
2487 f_reg
= TREE_CHAIN (f_stk
);
2488 f_ndx
= TREE_CHAIN (f_reg
);
2490 stk
= build (COMPONENT_REF
, TREE_TYPE (f_stk
), valist
, f_stk
);
2491 reg
= build (COMPONENT_REF
, TREE_TYPE (f_reg
), valist
, f_reg
);
2492 ndx
= build (COMPONENT_REF
, TREE_TYPE (f_ndx
), valist
, f_ndx
);
2494 type_size
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
2496 va_size
= gen_reg_rtx (SImode
);
2497 tmp
= fold (build (MULT_EXPR
, sizetype
,
2498 fold (build (TRUNC_DIV_EXPR
, sizetype
,
2499 fold (build (PLUS_EXPR
, sizetype
,
2501 size_int (UNITS_PER_WORD
- 1))),
2502 size_int (UNITS_PER_WORD
))),
2503 size_int (UNITS_PER_WORD
)));
2504 r
= expand_expr (tmp
, va_size
, SImode
, EXPAND_NORMAL
);
2506 emit_move_insn (va_size
, r
);
2509 /* First align __va_ndx to a double word boundary if necessary for this arg:
2511 if (__alignof__ (TYPE) > 4)
2512 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2515 if (TYPE_ALIGN (type
) > BITS_PER_WORD
)
2517 tmp
= build (PLUS_EXPR
, integer_type_node
, ndx
,
2518 build_int_2 ((2 * UNITS_PER_WORD
) - 1, 0));
2519 tmp
= build (BIT_AND_EXPR
, integer_type_node
, tmp
,
2520 build_int_2 (-2 * UNITS_PER_WORD
, -1));
2521 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2522 TREE_SIDE_EFFECTS (tmp
) = 1;
2523 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2527 /* Increment __va_ndx to point past the argument:
2529 orig_ndx = (AP).__va_ndx;
2530 (AP).__va_ndx += __va_size (TYPE);
2533 orig_ndx
= gen_reg_rtx (SImode
);
2534 r
= expand_expr (ndx
, orig_ndx
, SImode
, EXPAND_NORMAL
);
2536 emit_move_insn (orig_ndx
, r
);
2538 tmp
= build (PLUS_EXPR
, integer_type_node
, ndx
,
2539 make_tree (intSI_type_node
, va_size
));
2540 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2541 TREE_SIDE_EFFECTS (tmp
) = 1;
2542 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2545 /* Check if the argument is in registers:
2547 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2548 && !MUST_PASS_IN_STACK (type))
2549 __array = (AP).__va_reg;
2552 array
= gen_reg_rtx (Pmode
);
2554 lab_over
= NULL_RTX
;
2555 if (!MUST_PASS_IN_STACK (VOIDmode
, type
))
2557 lab_false
= gen_label_rtx ();
2558 lab_over
= gen_label_rtx ();
2560 emit_cmp_and_jump_insns (expand_expr (ndx
, NULL_RTX
, SImode
,
2562 GEN_INT (MAX_ARGS_IN_REGISTERS
2564 GT
, const1_rtx
, SImode
, 0, lab_false
);
2566 r
= expand_expr (reg
, array
, Pmode
, EXPAND_NORMAL
);
2568 emit_move_insn (array
, r
);
2570 emit_jump_insn (gen_jump (lab_over
));
2572 emit_label (lab_false
);
2575 /* ...otherwise, the argument is on the stack (never split between
2576 registers and the stack -- change __va_ndx if necessary):
2580 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2581 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2582 __array = (AP).__va_stk;
2586 lab_false2
= gen_label_rtx ();
2587 emit_cmp_and_jump_insns (orig_ndx
,
2588 GEN_INT (MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
),
2589 GE
, const1_rtx
, SImode
, 0, lab_false2
);
2591 tmp
= build (PLUS_EXPR
, sizetype
, make_tree (intSI_type_node
, va_size
),
2592 build_int_2 (MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, 0));
2593 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2594 TREE_SIDE_EFFECTS (tmp
) = 1;
2595 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2597 emit_label (lab_false2
);
2599 r
= expand_expr (stk
, array
, Pmode
, EXPAND_NORMAL
);
2601 emit_move_insn (array
, r
);
2603 if (lab_over
!= NULL_RTX
)
2604 emit_label (lab_over
);
2607 /* Given the base array pointer (__array) and index to the subsequent
2608 argument (__va_ndx), find the address:
2610 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2614 The results are endian-dependent because values smaller than one word
2615 are aligned differently.
2618 size
= gen_reg_rtx (SImode
);
2619 emit_move_insn (size
, va_size
);
2621 if (BYTES_BIG_ENDIAN
)
2623 rtx lab_use_va_size
= gen_label_rtx ();
2625 emit_cmp_and_jump_insns (expand_expr (type_size
, NULL_RTX
, SImode
,
2627 GEN_INT (PARM_BOUNDARY
/ BITS_PER_UNIT
),
2628 GE
, const1_rtx
, SImode
, 0, lab_use_va_size
);
2630 r
= expand_expr (type_size
, size
, SImode
, EXPAND_NORMAL
);
2632 emit_move_insn (size
, r
);
2634 emit_label (lab_use_va_size
);
2637 addr_tree
= build (PLUS_EXPR
, ptr_type_node
,
2638 make_tree (ptr_type_node
, array
),
2640 addr_tree
= build (MINUS_EXPR
, ptr_type_node
, addr_tree
,
2641 make_tree (intSI_type_node
, size
));
2642 addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
2643 addr
= copy_to_reg (addr
);
2649 xtensa_preferred_reload_class (x
, class, isoutput
)
2651 enum reg_class
class;
2654 if (!isoutput
&& CONSTANT_P (x
) && GET_CODE (x
) == CONST_DOUBLE
)
2657 /* Don't use the stack pointer or hard frame pointer for reloads!
2658 The hard frame pointer would normally be OK except that it may
2659 briefly hold an incoming argument in the prologue, and reload
2660 won't know that it is live because the hard frame pointer is
2661 treated specially. */
2663 if (class == AR_REGS
|| class == GR_REGS
)
2671 xtensa_secondary_reload_class (class, mode
, x
, isoutput
)
2672 enum reg_class
class;
2673 enum machine_mode mode ATTRIBUTE_UNUSED
;
2679 if (GET_CODE (x
) == SIGN_EXTEND
)
2681 regno
= xt_true_regnum (x
);
2685 if (class == FP_REGS
&& constantpool_mem_p (x
))
2689 if (ACC_REG_P (regno
))
2690 return ((class == GR_REGS
|| class == RL_REGS
) ? NO_REGS
: RL_REGS
);
2691 if (class == ACC_REG
)
2692 return (GP_REG_P (regno
) ? NO_REGS
: RL_REGS
);
2699 order_regs_for_local_alloc ()
2701 if (!leaf_function_p ())
2703 memcpy (reg_alloc_order
, reg_nonleaf_alloc_order
,
2704 FIRST_PSEUDO_REGISTER
* sizeof (int));
2708 int i
, num_arg_regs
;
2711 /* use the AR registers in increasing order (skipping a0 and a1)
2712 but save the incoming argument registers for a last resort */
2713 num_arg_regs
= current_function_args_info
.arg_words
;
2714 if (num_arg_regs
> MAX_ARGS_IN_REGISTERS
)
2715 num_arg_regs
= MAX_ARGS_IN_REGISTERS
;
2716 for (i
= GP_ARG_FIRST
; i
< 16 - num_arg_regs
; i
++)
2717 reg_alloc_order
[nxt
++] = i
+ num_arg_regs
;
2718 for (i
= 0; i
< num_arg_regs
; i
++)
2719 reg_alloc_order
[nxt
++] = GP_ARG_FIRST
+ i
;
2721 /* list the FP registers in order for now */
2722 for (i
= 0; i
< 16; i
++)
2723 reg_alloc_order
[nxt
++] = FP_REG_FIRST
+ i
;
2725 /* GCC requires that we list *all* the registers.... */
2726 reg_alloc_order
[nxt
++] = 0; /* a0 = return address */
2727 reg_alloc_order
[nxt
++] = 1; /* a1 = stack pointer */
2728 reg_alloc_order
[nxt
++] = 16; /* pseudo frame pointer */
2729 reg_alloc_order
[nxt
++] = 17; /* pseudo arg pointer */
2731 /* list the coprocessor registers in order */
2732 for (i
= 0; i
< BR_REG_NUM
; i
++)
2733 reg_alloc_order
[nxt
++] = BR_REG_FIRST
+ i
;
2735 reg_alloc_order
[nxt
++] = ACC_REG_FIRST
; /* MAC16 accumulator */
2740 /* A customized version of reg_overlap_mentioned_p that only looks for
2741 references to a7 (as opposed to hard_frame_pointer_rtx). */
2744 a7_overlap_mentioned_p (x
)
2748 unsigned int x_regno
;
2751 if (GET_CODE (x
) == REG
)
2753 x_regno
= REGNO (x
);
2754 return (x
!= hard_frame_pointer_rtx
2755 && x_regno
< A7_REG
+ 1
2756 && x_regno
+ HARD_REGNO_NREGS (A7_REG
, GET_MODE (x
)) > A7_REG
);
2759 if (GET_CODE (x
) == SUBREG
2760 && GET_CODE (SUBREG_REG (x
)) == REG
2761 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
2763 x_regno
= subreg_regno (x
);
2764 return (SUBREG_REG (x
) != hard_frame_pointer_rtx
2765 && x_regno
< A7_REG
+ 1
2766 && x_regno
+ HARD_REGNO_NREGS (A7_REG
, GET_MODE (x
)) > A7_REG
);
2769 /* X does not match, so try its subexpressions. */
2770 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2771 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2775 if (a7_overlap_mentioned_p (XEXP (x
, i
)))
2778 else if (fmt
[i
] == 'E')
2780 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
2781 if (a7_overlap_mentioned_p (XVECEXP (x
, i
, j
)))
2790 /* Some Xtensa targets support multiple bss sections. If the section
2791 name ends with ".bss", add SECTION_BSS to the flags. */
2794 xtensa_multibss_section_type_flags (decl
, name
, reloc
)
2799 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
2802 suffix
= strrchr (name
, '.');
2803 if (suffix
&& strcmp (suffix
, ".bss") == 0)
2805 if (!decl
|| (TREE_CODE (decl
) == VAR_DECL
2806 && DECL_INITIAL (decl
) == NULL_TREE
))
2807 flags
|= SECTION_BSS
; /* @nobits */
2809 warning ("only uninitialized variables can be placed in a "
2817 /* The literal pool stays with the function. */
2820 xtensa_select_rtx_section (mode
, x
, align
)
2821 enum machine_mode mode ATTRIBUTE_UNUSED
;
2822 rtx x ATTRIBUTE_UNUSED
;
2823 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
2825 function_section (current_function_decl
);
2828 /* If we are referencing a function that is static, make the SYMBOL_REF
2829 special so that we can generate direct calls to it even with -fpic. */
2832 xtensa_encode_section_info (decl
, first
)
2834 int first ATTRIBUTE_UNUSED
;
2836 if (TREE_CODE (decl
) == FUNCTION_DECL
&& ! TREE_PUBLIC (decl
))
2837 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
2840 #include "gt-xtensa.h"