]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
re PR fortran/47189 ([OOP] calling STORAGE_SIZE on a NULL-initialized class pointer)
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
a87cf97e 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
da1f39e4 3 Free Software Foundation, Inc.
03984308
BW
4 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
2f83c7d6 10Software Foundation; either version 3, or (at your option) any later
03984308
BW
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
2f83c7d6
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
03984308
BW
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
03984308
BW
26#include "rtl.h"
27#include "regs.h"
03984308
BW
28#include "hard-reg-set.h"
29#include "basic-block.h"
03984308
BW
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "insn-attr.h"
34#include "insn-codes.h"
35#include "recog.h"
36#include "output.h"
37#include "tree.h"
38#include "expr.h"
39#include "flags.h"
40#include "reload.h"
41#include "tm_p.h"
42#include "function.h"
718f9c0f 43#include "diagnostic-core.h"
03984308
BW
44#include "optabs.h"
45#include "libfuncs.h"
07232638 46#include "ggc.h"
03984308
BW
47#include "target.h"
48#include "target-def.h"
540eaea8 49#include "langhooks.h"
726a989a 50#include "gimple.h"
e70312d4 51#include "df.h"
85d53c1d 52
03984308
BW
53
54/* Enumeration for all of the relational tests, so that we can build
55 arrays indexed by the test type, and not worry about the order
638db43e 56 of EQ, NE, etc. */
03984308 57
ffbc8796
BW
58enum internal_test
59{
60 ITEST_EQ,
61 ITEST_NE,
62 ITEST_GT,
63 ITEST_GE,
64 ITEST_LT,
65 ITEST_LE,
66 ITEST_GTU,
67 ITEST_GEU,
68 ITEST_LTU,
69 ITEST_LEU,
70 ITEST_MAX
71};
03984308 72
03984308
BW
73/* Array giving truth value on whether or not a given hard register
74 can support a given mode. */
75char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
76
77/* Current frame size calculated by compute_frame_size. */
78unsigned xtensa_current_frame_size;
79
a46bbb5a 80/* Largest block move to handle in-line. */
03984308
BW
81#define LARGEST_MOVE_RATIO 15
82
83/* Define the structure for the machine field in struct function. */
d1b38208 84struct GTY(()) machine_function
03984308
BW
85{
86 int accesses_prev_frame;
997b8b4d
BW
87 bool need_a7_copy;
88 bool vararg_a7;
0d8442b8 89 rtx vararg_a7_copy;
997b8b4d 90 rtx set_frame_ptr_insn;
03984308
BW
91};
92
93/* Vector, indexed by hard register number, which contains 1 for a
94 register that is allowable in a candidate for leaf function
638db43e 95 treatment. */
03984308
BW
96
97const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
98{
99 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
100 1, 1, 1,
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
102 1
103};
104
105/* Map hard register number to register class */
106const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
107{
89f6025d
BW
108 RL_REGS, SP_REG, RL_REGS, RL_REGS,
109 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
110 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
111 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
03984308
BW
112 AR_REGS, AR_REGS, BR_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 ACC_REG,
118};
119
c5387660 120static void xtensa_option_override (void);
ffbc8796
BW
121static enum internal_test map_test_to_internal_test (enum rtx_code);
122static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
123static rtx gen_float_relational (enum rtx_code, rtx, rtx);
f90b7a5a 124static rtx gen_conditional_move (enum rtx_code, enum machine_mode, rtx, rtx);
ffbc8796 125static rtx fixup_subreg_mem (rtx);
ffbc8796 126static struct machine_function * xtensa_init_machine_status (void);
6a7a462c 127static rtx xtensa_legitimize_tls_address (rtx);
506d7b68 128static rtx xtensa_legitimize_address (rtx, rtx, enum machine_mode);
586de218 129static bool xtensa_return_in_msb (const_tree);
ffbc8796
BW
130static void printx (FILE *, signed int);
131static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
4c45af42 132static rtx xtensa_builtin_saveregs (void);
c6c3dba9 133static bool xtensa_legitimate_address_p (enum machine_mode, rtx, bool);
ffbc8796
BW
134static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
135 int) ATTRIBUTE_UNUSED;
d6b5193b
RS
136static section *xtensa_select_rtx_section (enum machine_mode, rtx,
137 unsigned HOST_WIDE_INT);
f40751dd 138static bool xtensa_rtx_costs (rtx, int, int, int *, bool);
c35d187f 139static tree xtensa_build_builtin_va_list (void);
586de218 140static bool xtensa_return_in_memory (const_tree, const_tree);
726a989a
RB
141static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
142 gimple_seq *);
626a4b31
NF
143static void xtensa_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
144 const_tree, bool);
145static rtx xtensa_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
146 const_tree, bool);
147static rtx xtensa_function_incoming_arg (CUMULATIVE_ARGS *,
148 enum machine_mode, const_tree, bool);
e2b2d01e 149static rtx xtensa_function_value (const_tree, const_tree, bool);
c2ed6cf8
NF
150static unsigned int xtensa_function_arg_boundary (enum machine_mode,
151 const_tree);
09fa8841 152static void xtensa_init_builtins (void);
f311c3b4 153static tree xtensa_fold_builtin (tree, int, tree *, bool);
09fa8841 154static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
9d0b1619 155static void xtensa_va_start (tree, rtx);
b52b1749 156static bool xtensa_frame_pointer_required (void);
2b4fa409 157static rtx xtensa_static_chain (const_tree, bool);
3c1229cb
RH
158static void xtensa_asm_trampoline_template (FILE *);
159static void xtensa_trampoline_init (rtx, tree, rtx);
b64a1b53 160
b64a1b53
RH
161static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
162 REG_ALLOC_ORDER;
3020190e
JM
163
164/* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
165
166static const struct default_options xtensa_option_optimization_table[] =
167 {
168 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
169 /* Reordering blocks for Xtensa is not a good idea unless the
170 compiler understands the range of conditional branches.
171 Currently all branch relaxation for Xtensa is handled in the
172 assembler, so GCC cannot do a good job of reordering blocks.
173 Do not enable reordering unless it is explicitly requested. */
174 { OPT_LEVELS_ALL, OPT_freorder_blocks, NULL, 0 },
175 { OPT_LEVELS_NONE, 0, NULL, 0 }
176 };
b64a1b53 177\f
03984308
BW
178
179/* This macro generates the assembly code for function exit,
180 on machines that need it. If FUNCTION_EPILOGUE is not defined
181 then individual return instructions are generated for each
182 return statement. Args are same as for FUNCTION_PROLOGUE. */
183
184#undef TARGET_ASM_FUNCTION_EPILOGUE
185#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
186
187/* These hooks specify assembly directives for creating certain kinds
188 of integer object. */
189
190#undef TARGET_ASM_ALIGNED_SI_OP
191#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
192
b64a1b53
RH
193#undef TARGET_ASM_SELECT_RTX_SECTION
194#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
03984308 195
66beb87a 196#undef TARGET_DEFAULT_TARGET_FLAGS
47c21725 197#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT)
66beb87a 198
506d7b68
PB
199#undef TARGET_LEGITIMIZE_ADDRESS
200#define TARGET_LEGITIMIZE_ADDRESS xtensa_legitimize_address
201
3c50106f
RH
202#undef TARGET_RTX_COSTS
203#define TARGET_RTX_COSTS xtensa_rtx_costs
dcefdf67 204#undef TARGET_ADDRESS_COST
f40751dd 205#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 206
c35d187f
RH
207#undef TARGET_BUILD_BUILTIN_VA_LIST
208#define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
209
d7bd8aeb
JJ
210#undef TARGET_EXPAND_BUILTIN_VA_START
211#define TARGET_EXPAND_BUILTIN_VA_START xtensa_va_start
212
cde0f3fd
PB
213#undef TARGET_PROMOTE_FUNCTION_MODE
214#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
4c45af42 215#undef TARGET_PROMOTE_PROTOTYPES
586de218 216#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
4c45af42 217
4c45af42
KH
218#undef TARGET_RETURN_IN_MEMORY
219#define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
e2b2d01e
AS
220#undef TARGET_FUNCTION_VALUE
221#define TARGET_FUNCTION_VALUE xtensa_function_value
42ba5130 222#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 223#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
224#undef TARGET_MUST_PASS_IN_STACK
225#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
626a4b31
NF
226#undef TARGET_FUNCTION_ARG_ADVANCE
227#define TARGET_FUNCTION_ARG_ADVANCE xtensa_function_arg_advance
228#undef TARGET_FUNCTION_ARG
229#define TARGET_FUNCTION_ARG xtensa_function_arg
230#undef TARGET_FUNCTION_INCOMING_ARG
231#define TARGET_FUNCTION_INCOMING_ARG xtensa_function_incoming_arg
c2ed6cf8
NF
232#undef TARGET_FUNCTION_ARG_BOUNDARY
233#define TARGET_FUNCTION_ARG_BOUNDARY xtensa_function_arg_boundary
4c45af42
KH
234
235#undef TARGET_EXPAND_BUILTIN_SAVEREGS
236#define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
85d53c1d
RH
237#undef TARGET_GIMPLIFY_VA_ARG_EXPR
238#define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
4c45af42 239
6e5ff6e7
BW
240#undef TARGET_RETURN_IN_MSB
241#define TARGET_RETURN_IN_MSB xtensa_return_in_msb
242
09fa8841
BW
243#undef TARGET_INIT_BUILTINS
244#define TARGET_INIT_BUILTINS xtensa_init_builtins
245#undef TARGET_FOLD_BUILTIN
246#define TARGET_FOLD_BUILTIN xtensa_fold_builtin
247#undef TARGET_EXPAND_BUILTIN
248#define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
249
37fbe8a3
BW
250#undef TARGET_SECONDARY_RELOAD
251#define TARGET_SECONDARY_RELOAD xtensa_secondary_reload
252
6a7a462c
BW
253#undef TARGET_HAVE_TLS
254#define TARGET_HAVE_TLS (TARGET_THREADPTR && HAVE_AS_TLS)
255
256#undef TARGET_CANNOT_FORCE_CONST_MEM
257#define TARGET_CANNOT_FORCE_CONST_MEM xtensa_tls_referenced_p
258
c6c3dba9
PB
259#undef TARGET_LEGITIMATE_ADDRESS_P
260#define TARGET_LEGITIMATE_ADDRESS_P xtensa_legitimate_address_p
261
b52b1749
AS
262#undef TARGET_FRAME_POINTER_REQUIRED
263#define TARGET_FRAME_POINTER_REQUIRED xtensa_frame_pointer_required
264
2b4fa409
RH
265#undef TARGET_STATIC_CHAIN
266#define TARGET_STATIC_CHAIN xtensa_static_chain
3c1229cb
RH
267#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
268#define TARGET_ASM_TRAMPOLINE_TEMPLATE xtensa_asm_trampoline_template
269#undef TARGET_TRAMPOLINE_INIT
270#define TARGET_TRAMPOLINE_INIT xtensa_trampoline_init
271
c5387660
JM
272#undef TARGET_OPTION_OVERRIDE
273#define TARGET_OPTION_OVERRIDE xtensa_option_override
3020190e
JM
274#undef TARGET_OPTION_OPTIMIZATION_TABLE
275#define TARGET_OPTION_OPTIMIZATION_TABLE xtensa_option_optimization_table
c5387660 276
b64a1b53 277struct gcc_target targetm = TARGET_INITIALIZER;
03984308 278
887af464
BW
279\f
280/* Functions to test Xtensa immediate operand validity. */
03984308 281
8eb1bc5c
BW
282bool
283xtensa_simm8 (HOST_WIDE_INT v)
284{
285 return v >= -128 && v <= 127;
286}
287
288
289bool
290xtensa_simm8x256 (HOST_WIDE_INT v)
291{
292 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
293}
294
295
296bool
297xtensa_simm12b (HOST_WIDE_INT v)
298{
299 return v >= -2048 && v <= 2047;
300}
301
302
303static bool
304xtensa_uimm8 (HOST_WIDE_INT v)
305{
306 return v >= 0 && v <= 255;
307}
308
309
310static bool
311xtensa_uimm8x2 (HOST_WIDE_INT v)
312{
313 return (v & 1) == 0 && (v >= 0 && v <= 510);
314}
315
316
317static bool
318xtensa_uimm8x4 (HOST_WIDE_INT v)
319{
320 return (v & 3) == 0 && (v >= 0 && v <= 1020);
321}
322
323
324static bool
325xtensa_b4const (HOST_WIDE_INT v)
03984308
BW
326{
327 switch (v)
328 {
8eb1bc5c
BW
329 case -1:
330 case 1:
03984308
BW
331 case 2:
332 case 3:
333 case 4:
334 case 5:
335 case 6:
336 case 7:
337 case 8:
338 case 10:
339 case 12:
340 case 16:
341 case 32:
342 case 64:
343 case 128:
344 case 256:
8eb1bc5c 345 return true;
03984308 346 }
8eb1bc5c 347 return false;
03984308
BW
348}
349
03984308 350
8eb1bc5c
BW
351bool
352xtensa_b4const_or_zero (HOST_WIDE_INT v)
03984308 353{
8eb1bc5c
BW
354 if (v == 0)
355 return true;
356 return xtensa_b4const (v);
03984308
BW
357}
358
03984308 359
8eb1bc5c
BW
360bool
361xtensa_b4constu (HOST_WIDE_INT v)
03984308
BW
362{
363 switch (v)
364 {
8eb1bc5c
BW
365 case 32768:
366 case 65536:
03984308
BW
367 case 2:
368 case 3:
369 case 4:
370 case 5:
371 case 6:
372 case 7:
373 case 8:
374 case 10:
375 case 12:
376 case 16:
377 case 32:
378 case 64:
379 case 128:
380 case 256:
8eb1bc5c 381 return true;
03984308 382 }
8eb1bc5c 383 return false;
03984308
BW
384}
385
03984308 386
8eb1bc5c
BW
387bool
388xtensa_mask_immediate (HOST_WIDE_INT v)
03984308 389{
8eb1bc5c
BW
390#define MAX_MASK_SIZE 16
391 int mask_size;
03984308 392
8eb1bc5c
BW
393 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
394 {
395 if ((v & 1) == 0)
396 return false;
397 v = v >> 1;
398 if (v == 0)
399 return true;
400 }
03984308 401
8eb1bc5c 402 return false;
03984308
BW
403}
404
03984308 405
03984308 406/* This is just like the standard true_regnum() function except that it
638db43e 407 works even when reg_renumber is not initialized. */
03984308
BW
408
409int
ffbc8796 410xt_true_regnum (rtx x)
03984308
BW
411{
412 if (GET_CODE (x) == REG)
413 {
414 if (reg_renumber
415 && REGNO (x) >= FIRST_PSEUDO_REGISTER
416 && reg_renumber[REGNO (x)] >= 0)
417 return reg_renumber[REGNO (x)];
418 return REGNO (x);
419 }
420 if (GET_CODE (x) == SUBREG)
421 {
422 int base = xt_true_regnum (SUBREG_REG (x));
423 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
424 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
425 GET_MODE (SUBREG_REG (x)),
426 SUBREG_BYTE (x), GET_MODE (x));
427 }
428 return -1;
429}
430
431
03984308 432int
ffbc8796 433xtensa_valid_move (enum machine_mode mode, rtx *operands)
03984308 434{
a8cacfd2
BW
435 /* Either the destination or source must be a register, and the
436 MAC16 accumulator doesn't count. */
437
438 if (register_operand (operands[0], mode))
439 {
440 int dst_regnum = xt_true_regnum (operands[0]);
441
638db43e 442 /* The stack pointer can only be assigned with a MOVSP opcode. */
a8cacfd2
BW
443 if (dst_regnum == STACK_POINTER_REGNUM)
444 return (mode == SImode
445 && register_operand (operands[1], mode)
446 && !ACC_REG_P (xt_true_regnum (operands[1])));
447
448 if (!ACC_REG_P (dst_regnum))
449 return true;
450 }
3437320b 451 if (register_operand (operands[1], mode))
a8cacfd2
BW
452 {
453 int src_regnum = xt_true_regnum (operands[1]);
454 if (!ACC_REG_P (src_regnum))
455 return true;
456 }
03984308
BW
457 return FALSE;
458}
459
460
03984308 461int
ffbc8796 462smalloffset_mem_p (rtx op)
03984308
BW
463{
464 if (GET_CODE (op) == MEM)
465 {
466 rtx addr = XEXP (op, 0);
467 if (GET_CODE (addr) == REG)
da1f39e4 468 return BASE_REG_P (addr, 0);
03984308
BW
469 if (GET_CODE (addr) == PLUS)
470 {
471 rtx offset = XEXP (addr, 0);
8eb1bc5c 472 HOST_WIDE_INT val;
03984308
BW
473 if (GET_CODE (offset) != CONST_INT)
474 offset = XEXP (addr, 1);
475 if (GET_CODE (offset) != CONST_INT)
476 return FALSE;
8eb1bc5c
BW
477
478 val = INTVAL (offset);
479 return (val & 3) == 0 && (val >= 0 && val <= 60);
03984308
BW
480 }
481 }
482 return FALSE;
483}
484
485
03984308 486int
ffbc8796 487constantpool_address_p (rtx addr)
03984308
BW
488{
489 rtx sym = addr;
490
491 if (GET_CODE (addr) == CONST)
492 {
493 rtx offset;
494
3bbc2af6 495 /* Only handle (PLUS (SYM, OFFSET)) form. */
03984308
BW
496 addr = XEXP (addr, 0);
497 if (GET_CODE (addr) != PLUS)
498 return FALSE;
499
3bbc2af6 500 /* Make sure the address is word aligned. */
03984308
BW
501 offset = XEXP (addr, 1);
502 if ((GET_CODE (offset) != CONST_INT)
503 || ((INTVAL (offset) & 3) != 0))
504 return FALSE;
505
506 sym = XEXP (addr, 0);
507 }
508
509 if ((GET_CODE (sym) == SYMBOL_REF)
510 && CONSTANT_POOL_ADDRESS_P (sym))
511 return TRUE;
512 return FALSE;
513}
514
515
516int
ffbc8796 517constantpool_mem_p (rtx op)
03984308 518{
63694bdd
BW
519 if (GET_CODE (op) == SUBREG)
520 op = SUBREG_REG (op);
03984308
BW
521 if (GET_CODE (op) == MEM)
522 return constantpool_address_p (XEXP (op, 0));
523 return FALSE;
524}
525
526
6a7a462c
BW
527/* Return TRUE if X is a thread-local symbol. */
528
529static bool
530xtensa_tls_symbol_p (rtx x)
531{
532 if (! TARGET_HAVE_TLS)
533 return false;
534
535 return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
536}
537
538
03984308 539void
ffbc8796 540xtensa_extend_reg (rtx dst, rtx src)
03984308
BW
541{
542 rtx temp = gen_reg_rtx (SImode);
543 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
544
3bbc2af6 545 /* Generate paradoxical subregs as needed so that the modes match. */
03984308
BW
546 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
547 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
548
549 emit_insn (gen_ashlsi3 (temp, src, shift));
550 emit_insn (gen_ashrsi3 (dst, temp, shift));
551}
552
553
8eb1bc5c 554bool
ffbc8796 555xtensa_mem_offset (unsigned v, enum machine_mode mode)
03984308
BW
556{
557 switch (mode)
558 {
559 case BLKmode:
560 /* Handle the worst case for block moves. See xtensa_expand_block_move
561 where we emit an optimized block move operation if the block can be
562 moved in < "move_ratio" pieces. The worst case is when the block is
563 aligned but has a size of (3 mod 4) (does this happen?) so that the
638db43e 564 last piece requires a byte load/store. */
f42f5a1b
BW
565 return (xtensa_uimm8 (v)
566 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
03984308
BW
567
568 case QImode:
569 return xtensa_uimm8 (v);
570
571 case HImode:
572 return xtensa_uimm8x2 (v);
573
574 case DFmode:
575 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
576
577 default:
578 break;
579 }
580
581 return xtensa_uimm8x4 (v);
582}
583
584
ffbc8796 585/* Make normal rtx_code into something we can index from an array. */
03984308
BW
586
587static enum internal_test
ffbc8796 588map_test_to_internal_test (enum rtx_code test_code)
03984308
BW
589{
590 enum internal_test test = ITEST_MAX;
591
592 switch (test_code)
593 {
594 default: break;
595 case EQ: test = ITEST_EQ; break;
596 case NE: test = ITEST_NE; break;
597 case GT: test = ITEST_GT; break;
598 case GE: test = ITEST_GE; break;
599 case LT: test = ITEST_LT; break;
600 case LE: test = ITEST_LE; break;
601 case GTU: test = ITEST_GTU; break;
602 case GEU: test = ITEST_GEU; break;
603 case LTU: test = ITEST_LTU; break;
604 case LEU: test = ITEST_LEU; break;
605 }
606
607 return test;
608}
609
610
611/* Generate the code to compare two integer values. The return value is
638db43e 612 the comparison expression. */
03984308
BW
613
614static rtx
ffbc8796
BW
615gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
616 rtx cmp0, /* first operand to compare */
617 rtx cmp1, /* second operand to compare */
618 int *p_invert /* whether branch needs to reverse test */)
03984308 619{
ffbc8796
BW
620 struct cmp_info
621 {
03984308 622 enum rtx_code test_code; /* test code to use in insn */
8eb1bc5c 623 bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
03984308
BW
624 int const_add; /* constant to add (convert LE -> LT) */
625 int reverse_regs; /* reverse registers in test */
626 int invert_const; /* != 0 if invert value if cmp1 is constant */
627 int invert_reg; /* != 0 if invert value if cmp1 is register */
628 int unsignedp; /* != 0 for unsigned comparisons. */
629 };
630
631 static struct cmp_info info[ (int)ITEST_MAX ] = {
632
8eb1bc5c
BW
633 { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
634 { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
03984308 635
8eb1bc5c
BW
636 { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
637 { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
638 { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
639 { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
03984308
BW
640
641 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
642 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
643 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
644 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
645 };
646
647 enum internal_test test;
648 enum machine_mode mode;
649 struct cmp_info *p_info;
650
651 test = map_test_to_internal_test (test_code);
177b6be0 652 gcc_assert (test != ITEST_MAX);
03984308
BW
653
654 p_info = &info[ (int)test ];
655
656 mode = GET_MODE (cmp0);
657 if (mode == VOIDmode)
658 mode = GET_MODE (cmp1);
659
660 /* Make sure we can handle any constants given to us. */
661 if (GET_CODE (cmp1) == CONST_INT)
662 {
663 HOST_WIDE_INT value = INTVAL (cmp1);
664 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
665
666 /* if the immediate overflows or does not fit in the immediate field,
667 spill it to a register */
668
669 if ((p_info->unsignedp ?
670 (uvalue + p_info->const_add > uvalue) :
671 (value + p_info->const_add > value)) != (p_info->const_add > 0))
672 {
673 cmp1 = force_reg (mode, cmp1);
674 }
675 else if (!(p_info->const_range_p) (value + p_info->const_add))
676 {
677 cmp1 = force_reg (mode, cmp1);
678 }
679 }
680 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
681 {
682 cmp1 = force_reg (mode, cmp1);
683 }
684
685 /* See if we need to invert the result. */
686 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
687 ? p_info->invert_const
688 : p_info->invert_reg);
689
690 /* Comparison to constants, may involve adding 1 to change a LT into LE.
691 Comparison between two registers, may involve switching operands. */
692 if (GET_CODE (cmp1) == CONST_INT)
693 {
694 if (p_info->const_add != 0)
695 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
696
697 }
698 else if (p_info->reverse_regs)
699 {
700 rtx temp = cmp0;
701 cmp0 = cmp1;
702 cmp1 = temp;
703 }
704
1c563bed 705 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
03984308
BW
706}
707
708
709/* Generate the code to compare two float values. The return value is
638db43e 710 the comparison expression. */
03984308
BW
711
712static rtx
ffbc8796
BW
713gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
714 rtx cmp0, /* first operand to compare */
715 rtx cmp1 /* second operand to compare */)
03984308 716{
ffbc8796 717 rtx (*gen_fn) (rtx, rtx, rtx);
03984308
BW
718 rtx brtmp;
719 int reverse_regs, invert;
720
721 switch (test_code)
722 {
723 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
724 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
725 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
726 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
727 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
728 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
ff779f98
BW
729 case UNEQ: reverse_regs = 0; invert = 0; gen_fn = gen_suneq_sf; break;
730 case LTGT: reverse_regs = 0; invert = 1; gen_fn = gen_suneq_sf; break;
731 case UNLE: reverse_regs = 0; invert = 0; gen_fn = gen_sunle_sf; break;
732 case UNGT: reverse_regs = 1; invert = 0; gen_fn = gen_sunlt_sf; break;
733 case UNLT: reverse_regs = 0; invert = 0; gen_fn = gen_sunlt_sf; break;
734 case UNGE: reverse_regs = 1; invert = 0; gen_fn = gen_sunle_sf; break;
735 case UNORDERED:
736 reverse_regs = 0; invert = 0; gen_fn = gen_sunordered_sf; break;
737 case ORDERED:
738 reverse_regs = 0; invert = 1; gen_fn = gen_sunordered_sf; break;
633e4eb4 739 default:
1c563bed 740 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
03984308
BW
741 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
742 }
743
744 if (reverse_regs)
745 {
746 rtx temp = cmp0;
747 cmp0 = cmp1;
748 cmp1 = temp;
749 }
750
751 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
752 emit_insn (gen_fn (brtmp, cmp0, cmp1));
753
1c563bed 754 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
03984308
BW
755}
756
757
758void
f90b7a5a 759xtensa_expand_conditional_branch (rtx *operands, enum machine_mode mode)
03984308 760{
f90b7a5a
PB
761 enum rtx_code test_code = GET_CODE (operands[0]);
762 rtx cmp0 = operands[1];
763 rtx cmp1 = operands[2];
03984308
BW
764 rtx cmp;
765 int invert;
766 rtx label1, label2;
767
f90b7a5a 768 switch (mode)
03984308 769 {
f90b7a5a 770 case DFmode:
03984308 771 default:
1c563bed 772 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
03984308 773
f90b7a5a 774 case SImode:
03984308
BW
775 invert = FALSE;
776 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
777 break;
778
f90b7a5a 779 case SFmode:
03984308 780 if (!TARGET_HARD_FLOAT)
da1f39e4
BW
781 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
782 cmp0, cmp1));
03984308
BW
783 invert = FALSE;
784 cmp = gen_float_relational (test_code, cmp0, cmp1);
785 break;
786 }
787
788 /* Generate the branch. */
789
f90b7a5a 790 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
03984308
BW
791 label2 = pc_rtx;
792
793 if (invert)
794 {
795 label2 = label1;
796 label1 = pc_rtx;
797 }
798
799 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
800 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
801 label1,
802 label2)));
803}
804
805
806static rtx
f90b7a5a
PB
807gen_conditional_move (enum rtx_code code, enum machine_mode mode,
808 rtx op0, rtx op1)
03984308 809{
f90b7a5a 810 if (mode == SImode)
03984308 811 {
f90b7a5a
PB
812 rtx cmp;
813
03984308
BW
814 /* Jump optimization calls get_condition() which canonicalizes
815 comparisons like (GE x <const>) to (GT x <const-1>).
816 Transform those comparisons back to GE, since that is the
817 comparison supported in Xtensa. We shouldn't have to
818 transform <LE x const> comparisons, because neither
819 xtensa_expand_conditional_branch() nor get_condition() will
638db43e 820 produce them. */
03984308
BW
821
822 if ((code == GT) && (op1 == constm1_rtx))
823 {
824 code = GE;
825 op1 = const0_rtx;
826 }
1c563bed 827 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
03984308
BW
828
829 if (boolean_operator (cmp, VOIDmode))
830 {
3bbc2af6 831 /* Swap the operands to make const0 second. */
03984308
BW
832 if (op0 == const0_rtx)
833 {
834 op0 = op1;
835 op1 = const0_rtx;
836 }
837
3bbc2af6 838 /* If not comparing against zero, emit a comparison (subtract). */
03984308
BW
839 if (op1 != const0_rtx)
840 {
841 op0 = expand_binop (SImode, sub_optab, op0, op1,
842 0, 0, OPTAB_LIB_WIDEN);
843 op1 = const0_rtx;
844 }
845 }
846 else if (branch_operator (cmp, VOIDmode))
847 {
3bbc2af6 848 /* Swap the operands to make const0 second. */
03984308
BW
849 if (op0 == const0_rtx)
850 {
851 op0 = op1;
852 op1 = const0_rtx;
853
854 switch (code)
855 {
856 case LT: code = GE; break;
857 case GE: code = LT; break;
177b6be0 858 default: gcc_unreachable ();
03984308
BW
859 }
860 }
861
862 if (op1 != const0_rtx)
863 return 0;
864 }
865 else
866 return 0;
867
1c563bed 868 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
03984308
BW
869 }
870
f90b7a5a 871 if (TARGET_HARD_FLOAT && mode == SFmode)
03984308
BW
872 return gen_float_relational (code, op0, op1);
873
874 return 0;
875}
876
877
878int
ffbc8796 879xtensa_expand_conditional_move (rtx *operands, int isflt)
03984308 880{
f90b7a5a
PB
881 rtx dest = operands[0];
882 rtx cmp = operands[1];
883 enum machine_mode cmp_mode = GET_MODE (XEXP (cmp, 0));
ffbc8796 884 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
03984308 885
f90b7a5a
PB
886 if (!(cmp = gen_conditional_move (GET_CODE (cmp), cmp_mode,
887 XEXP (cmp, 0), XEXP (cmp, 1))))
03984308
BW
888 return 0;
889
890 if (isflt)
f90b7a5a 891 gen_fn = (cmp_mode == SImode
03984308
BW
892 ? gen_movsfcc_internal0
893 : gen_movsfcc_internal1);
894 else
f90b7a5a 895 gen_fn = (cmp_mode == SImode
03984308
BW
896 ? gen_movsicc_internal0
897 : gen_movsicc_internal1);
898
f90b7a5a 899 emit_insn (gen_fn (dest, XEXP (cmp, 0), operands[2], operands[3], cmp));
03984308
BW
900 return 1;
901}
902
903
904int
f90b7a5a 905xtensa_expand_scc (rtx operands[4], enum machine_mode cmp_mode)
03984308
BW
906{
907 rtx dest = operands[0];
f90b7a5a 908 rtx cmp;
03984308 909 rtx one_tmp, zero_tmp;
ffbc8796 910 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
03984308 911
f90b7a5a
PB
912 if (!(cmp = gen_conditional_move (GET_CODE (operands[1]), cmp_mode,
913 operands[2], operands[3])))
03984308
BW
914 return 0;
915
916 one_tmp = gen_reg_rtx (SImode);
917 zero_tmp = gen_reg_rtx (SImode);
918 emit_insn (gen_movsi (one_tmp, const_true_rtx));
919 emit_insn (gen_movsi (zero_tmp, const0_rtx));
920
f90b7a5a 921 gen_fn = (cmp_mode == SImode
03984308
BW
922 ? gen_movsicc_internal0
923 : gen_movsicc_internal1);
924 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
925 return 1;
926}
927
928
633e4eb4
BW
929/* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
930 for the output, i.e., the input operands are twice as big as MODE. */
931
932void
ffbc8796 933xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
633e4eb4
BW
934{
935 switch (GET_CODE (operands[1]))
936 {
937 case REG:
938 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
939 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
940 break;
941
942 case MEM:
943 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
944 operands[2] = adjust_address (operands[1], mode, 0);
945 break;
946
947 case CONST_INT:
948 case CONST_DOUBLE:
949 split_double (operands[1], &operands[2], &operands[3]);
950 break;
951
952 default:
177b6be0 953 gcc_unreachable ();
633e4eb4
BW
954 }
955
956 switch (GET_CODE (operands[0]))
957 {
958 case REG:
959 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
960 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
961 break;
962
963 case MEM:
964 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
965 operands[0] = adjust_address (operands[0], mode, 0);
966 break;
967
968 default:
177b6be0 969 gcc_unreachable ();
633e4eb4
BW
970 }
971}
972
973
03984308 974/* Emit insns to move operands[1] into operands[0].
03984308
BW
975 Return 1 if we have written out everything that needs to be done to
976 do the move. Otherwise, return 0 and the caller will emit the move
977 normally. */
978
979int
ffbc8796 980xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
03984308 981{
6a7a462c
BW
982 rtx src = operands[1];
983
984 if (CONSTANT_P (src)
985 && (GET_CODE (src) != CONST_INT || ! xtensa_simm12b (INTVAL (src))))
03984308 986 {
6a7a462c
BW
987 rtx dst = operands[0];
988
989 if (xtensa_tls_referenced_p (src))
990 {
991 rtx addend = NULL;
992
993 if (GET_CODE (src) == CONST && GET_CODE (XEXP (src, 0)) == PLUS)
994 {
995 addend = XEXP (XEXP (src, 0), 1);
996 src = XEXP (XEXP (src, 0), 0);
997 }
998
999 src = xtensa_legitimize_tls_address (src);
1000 if (addend)
1001 {
1002 src = gen_rtx_PLUS (mode, src, addend);
1003 src = force_operand (src, dst);
1004 }
1005 emit_move_insn (dst, src);
1006 return 1;
1007 }
1008
1009 if (! TARGET_CONST16)
1010 {
1011 src = force_const_mem (SImode, src);
1012 operands[1] = src;
1013 }
f42f5a1b
BW
1014
1015 /* PC-relative loads are always SImode, and CONST16 is only
1016 supported in the movsi pattern, so add a SUBREG for any other
1017 (smaller) mode. */
1018
1019 if (mode != SImode)
1020 {
6a7a462c 1021 if (register_operand (dst, mode))
f42f5a1b 1022 {
6a7a462c 1023 emit_move_insn (simplify_gen_subreg (SImode, dst, mode, 0), src);
f42f5a1b
BW
1024 return 1;
1025 }
1026 else
1027 {
6a7a462c
BW
1028 src = force_reg (SImode, src);
1029 src = gen_lowpart_SUBREG (mode, src);
1030 operands[1] = src;
f42f5a1b
BW
1031 }
1032 }
03984308
BW
1033 }
1034
997b8b4d
BW
1035 if (!(reload_in_progress | reload_completed)
1036 && !xtensa_valid_move (mode, operands))
1037 operands[1] = force_reg (mode, operands[1]);
03984308 1038
997b8b4d 1039 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
03984308
BW
1040
1041 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
638db43e
BW
1042 instruction won't be recognized after reload, so we remove the
1043 subreg and adjust mem accordingly. */
03984308
BW
1044 if (reload_in_progress)
1045 {
1046 operands[0] = fixup_subreg_mem (operands[0]);
1047 operands[1] = fixup_subreg_mem (operands[1]);
1048 }
1049 return 0;
1050}
1051
f42f5a1b 1052
03984308 1053static rtx
ffbc8796 1054fixup_subreg_mem (rtx x)
03984308
BW
1055{
1056 if (GET_CODE (x) == SUBREG
1057 && GET_CODE (SUBREG_REG (x)) == REG
1058 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1059 {
1060 rtx temp =
1061 gen_rtx_SUBREG (GET_MODE (x),
1062 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1063 SUBREG_BYTE (x));
1064 x = alter_subreg (&temp);
1065 }
1066 return x;
1067}
1068
1069
997b8b4d
BW
1070/* Check if an incoming argument in a7 is expected to be used soon and
1071 if OPND is a register or register pair that includes a7. If so,
1072 create a new pseudo and copy a7 into that pseudo at the very
1073 beginning of the function, followed by the special "set_frame_ptr"
1074 unspec_volatile insn. The return value is either the original
1075 operand, if it is not a7, or the new pseudo containing a copy of
1076 the incoming argument. This is necessary because the register
1077 allocator will ignore conflicts with a7 and may either assign some
1078 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1079 the incoming argument in a7. By copying the argument out of a7 as
1080 the very first thing, and then immediately following that with an
1081 unspec_volatile to keep the scheduler away, we should avoid any
1082 problems. Putting the set_frame_ptr insn at the beginning, with
1083 only the a7 copy before it, also makes it easier for the prologue
1084 expander to initialize the frame pointer after the a7 copy and to
1085 fix up the a7 copy to use the stack pointer instead of the frame
1086 pointer. */
58db834b 1087
997b8b4d
BW
1088rtx
1089xtensa_copy_incoming_a7 (rtx opnd)
58db834b 1090{
997b8b4d
BW
1091 rtx entry_insns = 0;
1092 rtx reg, tmp;
1093 enum machine_mode mode;
1094
1095 if (!cfun->machine->need_a7_copy)
1096 return opnd;
1097
1098 /* This function should never be called again once a7 has been copied. */
177b6be0 1099 gcc_assert (!cfun->machine->set_frame_ptr_insn);
997b8b4d
BW
1100
1101 mode = GET_MODE (opnd);
1102
1103 /* The operand using a7 may come in a later instruction, so just return
1104 the original operand if it doesn't use a7. */
1105 reg = opnd;
1106 if (GET_CODE (reg) == SUBREG)
58db834b 1107 {
177b6be0 1108 gcc_assert (SUBREG_BYTE (reg) == 0);
997b8b4d
BW
1109 reg = SUBREG_REG (reg);
1110 }
1111 if (GET_CODE (reg) != REG
1112 || REGNO (reg) > A7_REG
1113 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1114 return opnd;
e6aecf8e 1115
997b8b4d 1116 /* 1-word args will always be in a7; 2-word args in a6/a7. */
177b6be0 1117 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
58db834b 1118
997b8b4d 1119 cfun->machine->need_a7_copy = false;
58db834b 1120
997b8b4d
BW
1121 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1122 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
58db834b 1123
0d8442b8 1124 start_sequence ();
997b8b4d 1125 tmp = gen_reg_rtx (mode);
58db834b 1126
997b8b4d
BW
1127 switch (mode)
1128 {
1129 case DFmode:
1130 case DImode:
b412869c
BW
1131 /* Copy the value out of A7 here but keep the first word in A6 until
1132 after the set_frame_ptr insn. Otherwise, the register allocator
1133 may decide to put "subreg (tmp, 0)" in A7 and clobber the incoming
1134 value. */
997b8b4d
BW
1135 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1136 gen_raw_REG (SImode, A7_REG)));
1137 break;
1138 case SFmode:
1139 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1140 break;
1141 case SImode:
1142 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1143 break;
1144 case HImode:
1145 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1146 break;
1147 case QImode:
1148 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1149 break;
1150 default:
177b6be0 1151 gcc_unreachable ();
58db834b
BW
1152 }
1153
997b8b4d 1154 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
b412869c
BW
1155
1156 /* For DF and DI mode arguments, copy the incoming value in A6 now. */
1157 if (mode == DFmode || mode == DImode)
1158 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1159 gen_rtx_REG (SImode, A7_REG - 1)));
997b8b4d
BW
1160 entry_insns = get_insns ();
1161 end_sequence ();
1162
1163 if (cfun->machine->vararg_a7)
1164 {
0d8442b8
BW
1165 /* This is called from within builtin_saveregs, which will insert the
1166 saveregs code at the function entry, ahead of anything placed at
1167 the function entry now. Instead, save the sequence to be inserted
1168 at the beginning of the saveregs code. */
1169 cfun->machine->vararg_a7_copy = entry_insns;
997b8b4d
BW
1170 }
1171 else
1172 {
1173 /* Put entry_insns after the NOTE that starts the function. If
1174 this is inside a start_sequence, make the outer-level insn
1175 chain current, so the code is placed at the start of the
1176 function. */
1177 push_topmost_sequence ();
0d8442b8
BW
1178 /* Do not use entry_of_function() here. This is called from within
1179 expand_function_start, when the CFG still holds GIMPLE. */
997b8b4d
BW
1180 emit_insn_after (entry_insns, get_insns ());
1181 pop_topmost_sequence ();
1182 }
1183
1184 return tmp;
58db834b
BW
1185}
1186
1187
a46bbb5a
BW
1188/* Try to expand a block move operation to a sequence of RTL move
1189 instructions. If not optimizing, or if the block size is not a
1190 constant, or if the block is too large, the expansion fails and GCC
1191 falls back to calling memcpy().
03984308
BW
1192
1193 operands[0] is the destination
1194 operands[1] is the source
1195 operands[2] is the length
1196 operands[3] is the alignment */
1197
1198int
ffbc8796 1199xtensa_expand_block_move (rtx *operands)
03984308 1200{
7eda7cda
RH
1201 static const enum machine_mode mode_from_align[] =
1202 {
1203 VOIDmode, QImode, HImode, VOIDmode, SImode,
1204 };
1205
1206 rtx dst_mem = operands[0];
1207 rtx src_mem = operands[1];
1208 HOST_WIDE_INT bytes, align;
03984308 1209 int num_pieces, move_ratio;
7eda7cda
RH
1210 rtx temp[2];
1211 enum machine_mode mode[2];
1212 int amount[2];
1213 bool active[2];
1214 int phase = 0;
1215 int next;
1216 int offset_ld = 0;
1217 int offset_st = 0;
1218 rtx x;
03984308 1219
3bbc2af6 1220 /* If this is not a fixed size move, just call memcpy. */
03984308
BW
1221 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1222 return 0;
1223
7eda7cda
RH
1224 bytes = INTVAL (operands[2]);
1225 align = INTVAL (operands[3]);
1226
3bbc2af6 1227 /* Anything to move? */
03984308 1228 if (bytes <= 0)
7eda7cda 1229 return 0;
03984308
BW
1230
1231 if (align > MOVE_MAX)
1232 align = MOVE_MAX;
1233
3bbc2af6 1234 /* Decide whether to expand inline based on the optimization level. */
03984308
BW
1235 move_ratio = 4;
1236 if (optimize > 2)
1237 move_ratio = LARGEST_MOVE_RATIO;
3bbc2af6 1238 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
7eda7cda 1239 if (num_pieces > move_ratio)
03984308
BW
1240 return 0;
1241
7eda7cda
RH
1242 x = XEXP (dst_mem, 0);
1243 if (!REG_P (x))
1244 {
1245 x = force_reg (Pmode, x);
1246 dst_mem = replace_equiv_address (dst_mem, x);
1247 }
03984308 1248
7eda7cda
RH
1249 x = XEXP (src_mem, 0);
1250 if (!REG_P (x))
1251 {
1252 x = force_reg (Pmode, x);
1253 src_mem = replace_equiv_address (src_mem, x);
1254 }
03984308 1255
7eda7cda 1256 active[0] = active[1] = false;
03984308 1257
7eda7cda 1258 do
03984308 1259 {
7eda7cda
RH
1260 next = phase;
1261 phase ^= 1;
03984308 1262
7eda7cda 1263 if (bytes > 0)
03984308 1264 {
7eda7cda 1265 int next_amount;
03984308 1266
7eda7cda
RH
1267 next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1268 next_amount = MIN (next_amount, align);
03984308 1269
7eda7cda
RH
1270 amount[next] = next_amount;
1271 mode[next] = mode_from_align[next_amount];
1272 temp[next] = gen_reg_rtx (mode[next]);
03984308 1273
7eda7cda
RH
1274 x = adjust_address (src_mem, mode[next], offset_ld);
1275 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
03984308 1276
7eda7cda
RH
1277 offset_ld += next_amount;
1278 bytes -= next_amount;
1279 active[next] = true;
1280 }
03984308 1281
7eda7cda
RH
1282 if (active[phase])
1283 {
1284 active[phase] = false;
1285
1286 x = adjust_address (dst_mem, mode[phase], offset_st);
1287 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
03984308 1288
7eda7cda
RH
1289 offset_st += amount[phase];
1290 }
03984308 1291 }
7eda7cda 1292 while (active[next]);
03984308 1293
7eda7cda 1294 return 1;
03984308
BW
1295}
1296
1297
1298void
ffbc8796 1299xtensa_expand_nonlocal_goto (rtx *operands)
03984308
BW
1300{
1301 rtx goto_handler = operands[1];
1302 rtx containing_fp = operands[3];
1303
3bbc2af6
KH
1304 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1305 is too big to generate in-line. */
03984308
BW
1306
1307 if (GET_CODE (containing_fp) != REG)
1308 containing_fp = force_reg (Pmode, containing_fp);
1309
03984308 1310 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
046845de 1311 LCT_NORMAL, VOIDmode, 2,
03984308
BW
1312 containing_fp, Pmode,
1313 goto_handler, Pmode);
1314}
1315
1316
e2500fed 1317static struct machine_function *
ffbc8796 1318xtensa_init_machine_status (void)
03984308 1319{
a9429e29 1320 return ggc_alloc_cleared_machine_function ();
03984308
BW
1321}
1322
1323
2a48b790
BW
1324/* Shift VAL of mode MODE left by COUNT bits. */
1325
1326static inline rtx
1327xtensa_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
1328{
1329 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1330 NULL_RTX, 1, OPTAB_DIRECT);
1331 return expand_simple_binop (SImode, ASHIFT, val, count,
1332 NULL_RTX, 1, OPTAB_DIRECT);
1333}
1334
1335
1336/* Structure to hold the initial parameters for a compare_and_swap operation
1337 in HImode and QImode. */
1338
1339struct alignment_context
1340{
1341 rtx memsi; /* SI aligned memory location. */
1342 rtx shift; /* Bit offset with regard to lsb. */
1343 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
1344 rtx modemaski; /* ~modemask */
1345};
1346
1347
1348/* Initialize structure AC for word access to HI and QI mode memory. */
1349
1350static void
1351init_alignment_context (struct alignment_context *ac, rtx mem)
1352{
1353 enum machine_mode mode = GET_MODE (mem);
1354 rtx byteoffset = NULL_RTX;
1355 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1356
1357 if (aligned)
1358 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1359 else
1360 {
1361 /* Alignment is unknown. */
1362 rtx addr, align;
1363
1364 /* Force the address into a register. */
1365 addr = force_reg (Pmode, XEXP (mem, 0));
1366
1367 /* Align it to SImode. */
1368 align = expand_simple_binop (Pmode, AND, addr,
1369 GEN_INT (-GET_MODE_SIZE (SImode)),
1370 NULL_RTX, 1, OPTAB_DIRECT);
1371 /* Generate MEM. */
1372 ac->memsi = gen_rtx_MEM (SImode, align);
1373 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
1374 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
1375 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
1376
1377 byteoffset = expand_simple_binop (Pmode, AND, addr,
1378 GEN_INT (GET_MODE_SIZE (SImode) - 1),
1379 NULL_RTX, 1, OPTAB_DIRECT);
1380 }
1381
1382 /* Calculate shiftcount. */
1383 if (TARGET_BIG_ENDIAN)
1384 {
1385 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
1386 if (!aligned)
1387 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
1388 NULL_RTX, 1, OPTAB_DIRECT);
1389 }
1390 else
1391 {
1392 if (aligned)
1393 ac->shift = NULL_RTX;
1394 else
1395 ac->shift = byteoffset;
1396 }
1397
1398 if (ac->shift != NULL_RTX)
1399 {
1400 /* Shift is the byte count, but we need the bitcount. */
1401 ac->shift = expand_simple_binop (SImode, MULT, ac->shift,
1402 GEN_INT (BITS_PER_UNIT),
1403 NULL_RTX, 1, OPTAB_DIRECT);
1404 ac->modemask = expand_simple_binop (SImode, ASHIFT,
1405 GEN_INT (GET_MODE_MASK (mode)),
1406 ac->shift,
1407 NULL_RTX, 1, OPTAB_DIRECT);
1408 }
1409 else
1410 ac->modemask = GEN_INT (GET_MODE_MASK (mode));
1411
1412 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
1413}
1414
1415
1416/* Expand an atomic compare and swap operation for HImode and QImode.
1417 MEM is the memory location, CMP the old value to compare MEM with
0a2aaacc 1418 and NEW_RTX the value to set if CMP == MEM. */
2a48b790
BW
1419
1420void
0a2aaacc 1421xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new_rtx)
2a48b790
BW
1422{
1423 enum machine_mode mode = GET_MODE (mem);
1424 struct alignment_context ac;
1425 rtx tmp, cmpv, newv, val;
1426 rtx oldval = gen_reg_rtx (SImode);
1427 rtx res = gen_reg_rtx (SImode);
1428 rtx csloop = gen_label_rtx ();
1429 rtx csend = gen_label_rtx ();
1430
1431 init_alignment_context (&ac, mem);
1432
1433 if (ac.shift != NULL_RTX)
1434 {
1435 cmp = xtensa_expand_mask_and_shift (cmp, mode, ac.shift);
0a2aaacc 1436 new_rtx = xtensa_expand_mask_and_shift (new_rtx, mode, ac.shift);
2a48b790
BW
1437 }
1438
1439 /* Load the surrounding word into VAL with the MEM value masked out. */
1440 val = force_reg (SImode, expand_simple_binop (SImode, AND, ac.memsi,
1441 ac.modemaski, NULL_RTX, 1,
1442 OPTAB_DIRECT));
1443 emit_label (csloop);
1444
0a2aaacc 1445 /* Patch CMP and NEW_RTX into VAL at correct position. */
2a48b790
BW
1446 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
1447 NULL_RTX, 1, OPTAB_DIRECT));
0a2aaacc 1448 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new_rtx, val,
2a48b790
BW
1449 NULL_RTX, 1, OPTAB_DIRECT));
1450
1451 /* Jump to end if we're done. */
1452 emit_insn (gen_sync_compare_and_swapsi (res, ac.memsi, cmpv, newv));
1453 emit_cmp_and_jump_insns (res, cmpv, EQ, const0_rtx, SImode, true, csend);
1454
1455 /* Check for changes outside mode. */
1456 emit_move_insn (oldval, val);
1457 tmp = expand_simple_binop (SImode, AND, res, ac.modemaski,
1458 val, 1, OPTAB_DIRECT);
1459 if (tmp != val)
1460 emit_move_insn (val, tmp);
1461
1462 /* Loop internal if so. */
1463 emit_cmp_and_jump_insns (oldval, val, NE, const0_rtx, SImode, true, csloop);
1464
1465 emit_label (csend);
1466
1467 /* Return the correct part of the bitfield. */
1468 convert_move (target,
1469 (ac.shift == NULL_RTX ? res
1470 : expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
1471 NULL_RTX, 1, OPTAB_DIRECT)),
1472 1);
1473}
1474
1475
1476/* Expand an atomic operation CODE of mode MODE (either HImode or QImode --
1477 the default expansion works fine for SImode). MEM is the memory location
1478 and VAL the value to play with. If AFTER is true then store the value
1479 MEM holds after the operation, if AFTER is false then store the value MEM
1480 holds before the operation. If TARGET is zero then discard that value, else
1481 store it to TARGET. */
1482
1483void
1484xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1485 bool after)
1486{
1487 enum machine_mode mode = GET_MODE (mem);
1488 struct alignment_context ac;
1489 rtx csloop = gen_label_rtx ();
1490 rtx cmp, tmp;
1491 rtx old = gen_reg_rtx (SImode);
0a2aaacc 1492 rtx new_rtx = gen_reg_rtx (SImode);
2a48b790
BW
1493 rtx orig = NULL_RTX;
1494
1495 init_alignment_context (&ac, mem);
1496
1497 /* Prepare values before the compare-and-swap loop. */
1498 if (ac.shift != NULL_RTX)
1499 val = xtensa_expand_mask_and_shift (val, mode, ac.shift);
1500 switch (code)
1501 {
1502 case PLUS:
1503 case MINUS:
1504 orig = gen_reg_rtx (SImode);
1505 convert_move (orig, val, 1);
1506 break;
1507
1508 case SET:
1509 case IOR:
1510 case XOR:
1511 break;
1512
1513 case MULT: /* NAND */
1514 case AND:
1515 /* val = "11..1<val>11..1" */
1516 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
1517 NULL_RTX, 1, OPTAB_DIRECT);
1518 break;
1519
1520 default:
1521 gcc_unreachable ();
1522 }
1523
1524 /* Load full word. Subsequent loads are performed by S32C1I. */
1525 cmp = force_reg (SImode, ac.memsi);
1526
1527 emit_label (csloop);
1528 emit_move_insn (old, cmp);
1529
1530 switch (code)
1531 {
1532 case PLUS:
1533 case MINUS:
1534 val = expand_simple_binop (SImode, code, old, orig,
1535 NULL_RTX, 1, OPTAB_DIRECT);
1536 val = expand_simple_binop (SImode, AND, val, ac.modemask,
1537 NULL_RTX, 1, OPTAB_DIRECT);
1538 /* FALLTHRU */
1539 case SET:
1540 tmp = expand_simple_binop (SImode, AND, old, ac.modemaski,
1541 NULL_RTX, 1, OPTAB_DIRECT);
1542 tmp = expand_simple_binop (SImode, IOR, tmp, val,
0a2aaacc 1543 new_rtx, 1, OPTAB_DIRECT);
2a48b790
BW
1544 break;
1545
1546 case AND:
1547 case IOR:
1548 case XOR:
1549 tmp = expand_simple_binop (SImode, code, old, val,
0a2aaacc 1550 new_rtx, 1, OPTAB_DIRECT);
2a48b790
BW
1551 break;
1552
1553 case MULT: /* NAND */
1554 tmp = expand_simple_binop (SImode, XOR, old, ac.modemask,
1555 NULL_RTX, 1, OPTAB_DIRECT);
1556 tmp = expand_simple_binop (SImode, AND, tmp, val,
0a2aaacc 1557 new_rtx, 1, OPTAB_DIRECT);
2a48b790
BW
1558 break;
1559
1560 default:
1561 gcc_unreachable ();
1562 }
1563
0a2aaacc
KG
1564 if (tmp != new_rtx)
1565 emit_move_insn (new_rtx, tmp);
1566 emit_insn (gen_sync_compare_and_swapsi (cmp, ac.memsi, old, new_rtx));
2a48b790
BW
1567 emit_cmp_and_jump_insns (cmp, old, NE, const0_rtx, SImode, true, csloop);
1568
1569 if (target)
1570 {
0a2aaacc 1571 tmp = (after ? new_rtx : cmp);
2a48b790
BW
1572 convert_move (target,
1573 (ac.shift == NULL_RTX ? tmp
1574 : expand_simple_binop (SImode, LSHIFTRT, tmp, ac.shift,
1575 NULL_RTX, 1, OPTAB_DIRECT)),
1576 1);
1577 }
1578}
1579
1580
03984308 1581void
ffbc8796 1582xtensa_setup_frame_addresses (void)
03984308 1583{
b52b1749 1584 /* Set flag to cause TARGET_FRAME_POINTER_REQUIRED to return true. */
03984308
BW
1585 cfun->machine->accesses_prev_frame = 1;
1586
1587 emit_library_call
1588 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
046845de 1589 LCT_NORMAL, VOIDmode, 0);
03984308
BW
1590}
1591
1592
638db43e
BW
1593/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1594 a comment showing where the end of the loop is. However, if there is a
03984308 1595 label or a branch at the end of the loop then we need to place a nop
638db43e 1596 there. If the loop ends with a label we need the nop so that branches
839a4992
KH
1597 targeting that label will target the nop (and thus remain in the loop),
1598 instead of targeting the instruction after the loop (and thus exiting
638db43e 1599 the loop). If the loop ends with a branch, we need the nop in case the
839a4992 1600 branch is targeting a location inside the loop. When the branch
03984308
BW
1601 executes it will cause the loop count to be decremented even if it is
1602 taken (because it is the last instruction in the loop), so we need to
1603 nop after the branch to prevent the loop count from being decremented
638db43e 1604 when the branch is taken. */
03984308
BW
1605
1606void
ffbc8796 1607xtensa_emit_loop_end (rtx insn, rtx *operands)
03984308
BW
1608{
1609 char done = 0;
1610
1611 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1612 {
1613 switch (GET_CODE (insn))
1614 {
1615 case NOTE:
1616 case BARRIER:
1617 break;
1618
1619 case CODE_LABEL:
0bd0703d 1620 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
03984308
BW
1621 done = 1;
1622 break;
1623
1624 default:
1625 {
1626 rtx body = PATTERN (insn);
1627
1628 if (GET_CODE (body) == JUMP_INSN)
1629 {
0bd0703d 1630 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
03984308
BW
1631 done = 1;
1632 }
1633 else if ((GET_CODE (body) != USE)
1634 && (GET_CODE (body) != CLOBBER))
1635 done = 1;
1636 }
1637 break;
1638 }
1639 }
1640
1641 output_asm_insn ("# loop end for %0", operands);
1642}
1643
1644
036a2b7a
BW
1645char *
1646xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1647{
1648 static char result[64];
1649 enum rtx_code code;
1650 const char *op;
1651
1652 code = GET_CODE (operands[3]);
1653 switch (code)
1654 {
1655 case EQ: op = inverted ? "ne" : "eq"; break;
1656 case NE: op = inverted ? "eq" : "ne"; break;
1657 case LT: op = inverted ? "ge" : "lt"; break;
1658 case GE: op = inverted ? "lt" : "ge"; break;
1659 case LTU: op = inverted ? "geu" : "ltu"; break;
1660 case GEU: op = inverted ? "ltu" : "geu"; break;
1661 default: gcc_unreachable ();
1662 }
1663
1664 if (immed)
1665 {
1666 if (INTVAL (operands[1]) == 0)
1667 sprintf (result, "b%sz%s\t%%0, %%2", op,
1668 (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1669 else
1670 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1671 }
1672 else
1673 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1674
1675 return result;
1676}
1677
1678
1679char *
1680xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1681{
1682 static char result[64];
1683 const char *op;
1684
1685 switch (GET_CODE (operands[3]))
1686 {
1687 case EQ: op = inverted ? "bs" : "bc"; break;
1688 case NE: op = inverted ? "bc" : "bs"; break;
1689 default: gcc_unreachable ();
1690 }
1691
1692 if (immed)
1693 {
1694 unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1695 operands[1] = GEN_INT (bitnum);
1696 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1697 }
1698 else
1699 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1700
1701 return result;
1702}
1703
1704
1705char *
1706xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1707{
1708 static char result[64];
1709 enum rtx_code code;
1710 const char *op;
1711
1712 code = GET_CODE (operands[4]);
1713 if (isbool)
1714 {
1715 switch (code)
1716 {
1717 case EQ: op = inverted ? "t" : "f"; break;
1718 case NE: op = inverted ? "f" : "t"; break;
1719 default: gcc_unreachable ();
1720 }
1721 }
1722 else
1723 {
1724 switch (code)
1725 {
1726 case EQ: op = inverted ? "nez" : "eqz"; break;
1727 case NE: op = inverted ? "eqz" : "nez"; break;
1728 case LT: op = inverted ? "gez" : "ltz"; break;
1729 case GE: op = inverted ? "ltz" : "gez"; break;
1730 default: gcc_unreachable ();
1731 }
1732 }
1733
1734 sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1735 op, isfp ? ".s" : "", inverted ? 3 : 2);
1736 return result;
1737}
1738
1739
03984308 1740char *
ffbc8796 1741xtensa_emit_call (int callop, rtx *operands)
03984308 1742{
b64a1b53 1743 static char result[64];
03984308
BW
1744 rtx tgt = operands[callop];
1745
1746 if (GET_CODE (tgt) == CONST_INT)
1d0ea52e 1747 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
03984308
BW
1748 else if (register_operand (tgt, VOIDmode))
1749 sprintf (result, "callx8\t%%%d", callop);
1750 else
1751 sprintf (result, "call8\t%%%d", callop);
1752
1753 return result;
1754}
1755
1756
da1f39e4
BW
1757bool
1758xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
1759{
1760 /* Allow constant pool addresses. */
1761 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
6a7a462c
BW
1762 && ! TARGET_CONST16 && constantpool_address_p (addr)
1763 && ! xtensa_tls_referenced_p (addr))
da1f39e4
BW
1764 return true;
1765
1766 while (GET_CODE (addr) == SUBREG)
1767 addr = SUBREG_REG (addr);
1768
1769 /* Allow base registers. */
1770 if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1771 return true;
1772
1773 /* Check for "register + offset" addressing. */
1774 if (GET_CODE (addr) == PLUS)
1775 {
1776 rtx xplus0 = XEXP (addr, 0);
1777 rtx xplus1 = XEXP (addr, 1);
1778 enum rtx_code code0;
1779 enum rtx_code code1;
1780
1781 while (GET_CODE (xplus0) == SUBREG)
1782 xplus0 = SUBREG_REG (xplus0);
1783 code0 = GET_CODE (xplus0);
1784
1785 while (GET_CODE (xplus1) == SUBREG)
1786 xplus1 = SUBREG_REG (xplus1);
1787 code1 = GET_CODE (xplus1);
1788
1789 /* Swap operands if necessary so the register is first. */
1790 if (code0 != REG && code1 == REG)
1791 {
1792 xplus0 = XEXP (addr, 1);
1793 xplus1 = XEXP (addr, 0);
1794 code0 = GET_CODE (xplus0);
1795 code1 = GET_CODE (xplus1);
1796 }
1797
1798 if (code0 == REG && BASE_REG_P (xplus0, strict)
1799 && code1 == CONST_INT
1800 && xtensa_mem_offset (INTVAL (xplus1), mode))
1801 return true;
1802 }
1803
1804 return false;
1805}
1806
1807
6a7a462c
BW
1808/* Construct the SYMBOL_REF for the _TLS_MODULE_BASE_ symbol. */
1809
1810static GTY(()) rtx xtensa_tls_module_base_symbol;
1811
1812static rtx
1813xtensa_tls_module_base (void)
1814{
1815 if (! xtensa_tls_module_base_symbol)
1816 {
1817 xtensa_tls_module_base_symbol =
1818 gen_rtx_SYMBOL_REF (Pmode, "_TLS_MODULE_BASE_");
1819 SYMBOL_REF_FLAGS (xtensa_tls_module_base_symbol)
1820 |= TLS_MODEL_GLOBAL_DYNAMIC << SYMBOL_FLAG_TLS_SHIFT;
1821 }
1822
1823 return xtensa_tls_module_base_symbol;
1824}
1825
1826
1827static rtx
1828xtensa_call_tls_desc (rtx sym, rtx *retp)
1829{
1830 rtx fn, arg, a10, call_insn, insns;
1831
1832 start_sequence ();
1833 fn = gen_reg_rtx (Pmode);
1834 arg = gen_reg_rtx (Pmode);
1835 a10 = gen_rtx_REG (Pmode, 10);
1836
1837 emit_insn (gen_tls_func (fn, sym));
1838 emit_insn (gen_tls_arg (arg, sym));
1839 emit_move_insn (a10, arg);
1840 call_insn = emit_call_insn (gen_tls_call (a10, fn, sym, const1_rtx));
1841 CALL_INSN_FUNCTION_USAGE (call_insn)
1842 = gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_USE (VOIDmode, a10),
1843 CALL_INSN_FUNCTION_USAGE (call_insn));
1844 insns = get_insns ();
1845 end_sequence ();
1846
1847 *retp = a10;
1848 return insns;
1849}
1850
1851
1852static rtx
1853xtensa_legitimize_tls_address (rtx x)
1854{
1855 unsigned int model = SYMBOL_REF_TLS_MODEL (x);
1856 rtx dest, tp, ret, modbase, base, addend, insns;
1857
1858 dest = gen_reg_rtx (Pmode);
1859 switch (model)
1860 {
1861 case TLS_MODEL_GLOBAL_DYNAMIC:
1862 insns = xtensa_call_tls_desc (x, &ret);
1863 emit_libcall_block (insns, dest, ret, x);
1864 break;
1865
1866 case TLS_MODEL_LOCAL_DYNAMIC:
1867 base = gen_reg_rtx (Pmode);
1868 modbase = xtensa_tls_module_base ();
1869 insns = xtensa_call_tls_desc (modbase, &ret);
1870 emit_libcall_block (insns, base, ret, modbase);
1871 addend = force_reg (SImode, gen_sym_DTPOFF (x));
1872 emit_insn (gen_addsi3 (dest, base, addend));
1873 break;
1874
1875 case TLS_MODEL_INITIAL_EXEC:
1876 case TLS_MODEL_LOCAL_EXEC:
1877 tp = gen_reg_rtx (SImode);
1878 emit_insn (gen_load_tp (tp));
1879 addend = force_reg (SImode, gen_sym_TPOFF (x));
1880 emit_insn (gen_addsi3 (dest, tp, addend));
1881 break;
1882
1883 default:
1884 gcc_unreachable ();
1885 }
1886
1887 return dest;
1888}
1889
1890
da1f39e4
BW
1891rtx
1892xtensa_legitimize_address (rtx x,
1893 rtx oldx ATTRIBUTE_UNUSED,
1894 enum machine_mode mode)
1895{
6a7a462c
BW
1896 if (xtensa_tls_symbol_p (x))
1897 return xtensa_legitimize_tls_address (x);
1898
da1f39e4
BW
1899 if (GET_CODE (x) == PLUS)
1900 {
1901 rtx plus0 = XEXP (x, 0);
1902 rtx plus1 = XEXP (x, 1);
1903
1904 if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
1905 {
1906 plus0 = XEXP (x, 1);
1907 plus1 = XEXP (x, 0);
1908 }
1909
1910 /* Try to split up the offset to use an ADDMI instruction. */
1911 if (GET_CODE (plus0) == REG
1912 && GET_CODE (plus1) == CONST_INT
1913 && !xtensa_mem_offset (INTVAL (plus1), mode)
1914 && !xtensa_simm8 (INTVAL (plus1))
1915 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1916 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1917 {
1918 rtx temp = gen_reg_rtx (Pmode);
1919 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
1920 emit_insn (gen_rtx_SET (Pmode, temp,
1921 gen_rtx_PLUS (Pmode, plus0, addmi_offset)));
1922 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1923 }
1924 }
1925
506d7b68 1926 return x;
da1f39e4
BW
1927}
1928
1929
6a7a462c
BW
1930/* Helper for xtensa_tls_referenced_p. */
1931
1932static int
1933xtensa_tls_referenced_p_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1934{
1935 if (GET_CODE (*x) == SYMBOL_REF)
1936 return SYMBOL_REF_TLS_MODEL (*x) != 0;
1937
1938 /* Ignore TLS references that have already been legitimized. */
1939 if (GET_CODE (*x) == UNSPEC)
1940 {
1941 switch (XINT (*x, 1))
1942 {
1943 case UNSPEC_TPOFF:
1944 case UNSPEC_DTPOFF:
1945 case UNSPEC_TLS_FUNC:
1946 case UNSPEC_TLS_ARG:
1947 case UNSPEC_TLS_CALL:
1948 return -1;
1949 default:
1950 break;
1951 }
1952 }
1953
1954 return 0;
1955}
1956
1957
1958/* Return TRUE if X contains any TLS symbol references. */
1959
1960bool
1961xtensa_tls_referenced_p (rtx x)
1962{
1963 if (! TARGET_HAVE_TLS)
1964 return false;
1965
1966 return for_each_rtx (&x, xtensa_tls_referenced_p_1, NULL);
1967}
1968
1969
b0c6e48f 1970/* Return the debugger register number to use for 'regno'. */
03984308
BW
1971
1972int
ffbc8796 1973xtensa_dbx_register_number (int regno)
03984308
BW
1974{
1975 int first = -1;
633e4eb4
BW
1976
1977 if (GP_REG_P (regno))
1978 {
1979 regno -= GP_REG_FIRST;
1980 first = 0;
1981 }
1982 else if (BR_REG_P (regno))
1983 {
1984 regno -= BR_REG_FIRST;
1985 first = 16;
1986 }
1987 else if (FP_REG_P (regno))
1988 {
1989 regno -= FP_REG_FIRST;
b0c6e48f 1990 first = 48;
633e4eb4 1991 }
03984308
BW
1992 else if (ACC_REG_P (regno))
1993 {
b0c6e48f
BW
1994 first = 0x200; /* Start of Xtensa special registers. */
1995 regno = 16; /* ACCLO is special register 16. */
03984308
BW
1996 }
1997
1998 /* When optimizing, we sometimes get asked about pseudo-registers
638db43e 1999 that don't represent hard registers. Return 0 for these. */
03984308
BW
2000 if (first == -1)
2001 return 0;
2002
2003 return first + regno;
2004}
2005
2006
2007/* Argument support functions. */
2008
2009/* Initialize CUMULATIVE_ARGS for a function. */
2010
2011void
997b8b4d 2012init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
03984308
BW
2013{
2014 cum->arg_words = 0;
997b8b4d 2015 cum->incoming = incoming;
03984308
BW
2016}
2017
ffbc8796 2018
03984308
BW
2019/* Advance the argument to the next argument position. */
2020
626a4b31
NF
2021static void
2022xtensa_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
2023 const_tree type, bool named ATTRIBUTE_UNUSED)
03984308
BW
2024{
2025 int words, max;
2026 int *arg_words;
2027
2028 arg_words = &cum->arg_words;
2029 max = MAX_ARGS_IN_REGISTERS;
2030
2031 words = (((mode != BLKmode)
2032 ? (int) GET_MODE_SIZE (mode)
2033 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2034
85d91d5b
BW
2035 if (*arg_words < max
2036 && (targetm.calls.must_pass_in_stack (mode, type)
2037 || *arg_words + words > max))
03984308
BW
2038 *arg_words = max;
2039
2040 *arg_words += words;
2041}
2042
2043
2044/* Return an RTL expression containing the register for the given mode,
368ebcd6 2045 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
ffbc8796 2046 if this is an incoming argument to the current function. */
03984308 2047
626a4b31 2048static rtx
046845de 2049xtensa_function_arg_1 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
626a4b31 2050 const_tree type, bool incoming_p)
03984308
BW
2051{
2052 int regbase, words, max;
2053 int *arg_words;
2054 int regno;
03984308
BW
2055
2056 arg_words = &cum->arg_words;
2057 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
2058 max = MAX_ARGS_IN_REGISTERS;
2059
2060 words = (((mode != BLKmode)
2061 ? (int) GET_MODE_SIZE (mode)
2062 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2063
2064 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
822e895c 2065 {
d2348985 2066 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
822e895c
BW
2067 *arg_words = (*arg_words + align - 1) & -align;
2068 }
03984308
BW
2069
2070 if (*arg_words + words > max)
2071 return (rtx)0;
2072
2073 regno = regbase + *arg_words;
03984308 2074
997b8b4d
BW
2075 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
2076 cfun->machine->need_a7_copy = true;
03984308 2077
997b8b4d 2078 return gen_rtx_REG (mode, regno);
03984308
BW
2079}
2080
626a4b31
NF
2081/* Implement TARGET_FUNCTION_ARG. */
2082
2083static rtx
2084xtensa_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
2085 const_tree type, bool named ATTRIBUTE_UNUSED)
2086{
2087 return xtensa_function_arg_1 (cum, mode, type, false);
2088}
2089
2090/* Implement TARGET_FUNCTION_INCOMING_ARG. */
2091
2092static rtx
2093xtensa_function_incoming_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
2094 const_tree type, bool named ATTRIBUTE_UNUSED)
2095{
2096 return xtensa_function_arg_1 (cum, mode, type, true);
2097}
03984308 2098
c2ed6cf8 2099static unsigned int
84ec98d2 2100xtensa_function_arg_boundary (enum machine_mode mode, const_tree type)
d2348985
BW
2101{
2102 unsigned int alignment;
2103
2104 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
2105 if (alignment < PARM_BOUNDARY)
2106 alignment = PARM_BOUNDARY;
2107 if (alignment > STACK_BOUNDARY)
2108 alignment = STACK_BOUNDARY;
2109 return alignment;
2110}
2111
2112
6e5ff6e7 2113static bool
586de218 2114xtensa_return_in_msb (const_tree valtype)
6e5ff6e7
BW
2115{
2116 return (TARGET_BIG_ENDIAN
2117 && AGGREGATE_TYPE_P (valtype)
2118 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
2119}
2120
2121
c5387660
JM
2122static void
2123xtensa_option_override (void)
03984308
BW
2124{
2125 int regno;
2126 enum machine_mode mode;
2127
2128 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
2129 error ("boolean registers required for the floating-point option");
2130
638db43e 2131 /* Set up array giving whether a given register can hold a given mode. */
03984308
BW
2132 for (mode = VOIDmode;
2133 mode != MAX_MACHINE_MODE;
2134 mode = (enum machine_mode) ((int) mode + 1))
2135 {
2136 int size = GET_MODE_SIZE (mode);
0a2aaacc 2137 enum mode_class mclass = GET_MODE_CLASS (mode);
03984308
BW
2138
2139 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2140 {
2141 int temp;
2142
2143 if (ACC_REG_P (regno))
f42f5a1b 2144 temp = (TARGET_MAC16
0a2aaacc 2145 && (mclass == MODE_INT) && (size <= UNITS_PER_WORD));
03984308
BW
2146 else if (GP_REG_P (regno))
2147 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
2148 else if (FP_REG_P (regno))
2149 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
2150 else if (BR_REG_P (regno))
2151 temp = (TARGET_BOOLEANS && (mode == CCmode));
2152 else
2153 temp = FALSE;
2154
2155 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
2156 }
2157 }
2158
2159 init_machine_status = xtensa_init_machine_status;
03984308 2160
f42f5a1b
BW
2161 /* Check PIC settings. PIC is only supported when using L32R
2162 instructions, and some targets need to always use PIC. */
2163 if (flag_pic && TARGET_CONST16)
2164 error ("-f%s is not supported with CONST16 instructions",
2165 (flag_pic > 1 ? "PIC" : "pic"));
1b408ba1
SA
2166 else if (TARGET_FORCE_NO_PIC)
2167 flag_pic = 0;
f42f5a1b
BW
2168 else if (XTENSA_ALWAYS_PIC)
2169 {
2170 if (TARGET_CONST16)
2171 error ("PIC is required but not supported with CONST16 instructions");
2172 flag_pic = 1;
2173 }
2174 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
2175 if (flag_pic > 1)
03984308 2176 flag_pic = 1;
166b25dc
BW
2177 if (flag_pic && !flag_pie)
2178 flag_shlib = 1;
87c8b4be
CT
2179
2180 /* Hot/cold partitioning does not work on this architecture, because of
2181 constant pools (the load instruction cannot necessarily reach that far).
2182 Therefore disable it on this architecture. */
2183 if (flag_reorder_blocks_and_partition)
2184 {
2185 flag_reorder_blocks_and_partition = 0;
2186 flag_reorder_blocks = 1;
2187 }
03984308
BW
2188}
2189
03984308
BW
2190/* A C compound statement to output to stdio stream STREAM the
2191 assembler syntax for an instruction operand X. X is an RTL
2192 expression.
2193
2194 CODE is a value that can be used to specify one of several ways
2195 of printing the operand. It is used when identical operands
2196 must be printed differently depending on the context. CODE
2197 comes from the '%' specification that was used to request
2198 printing of the operand. If the specification was just '%DIGIT'
2199 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
2200 is the ASCII code for LTR.
2201
2202 If X is a register, this macro should print the register's name.
2203 The names can be found in an array 'reg_names' whose type is
2204 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
2205
2206 When the machine description has a specification '%PUNCT' (a '%'
2207 followed by a punctuation character), this macro is called with
2208 a null pointer for X and the punctuation character for CODE.
2209
2210 'a', 'c', 'l', and 'n' are reserved.
633e4eb4 2211
03984308
BW
2212 The Xtensa specific codes are:
2213
2214 'd' CONST_INT, print as signed decimal
2215 'x' CONST_INT, print as signed hexadecimal
2216 'K' CONST_INT, print number of bits in mask for EXTUI
2217 'R' CONST_INT, print (X & 0x1f)
2218 'L' CONST_INT, print ((32 - X) & 0x1f)
2219 'D' REG, print second register of double-word register operand
2220 'N' MEM, print address of next word following a memory operand
2221 'v' MEM, if memory reference is volatile, output a MEMW before it
f42f5a1b
BW
2222 't' any constant, add "@h" suffix for top 16 bits
2223 'b' any constant, add "@l" suffix for bottom 16 bits
03984308
BW
2224*/
2225
2226static void
ffbc8796 2227printx (FILE *file, signed int val)
03984308 2228{
ffbc8796 2229 /* Print a hexadecimal value in a nice way. */
03984308
BW
2230 if ((val > -0xa) && (val < 0xa))
2231 fprintf (file, "%d", val);
2232 else if (val < 0)
2233 fprintf (file, "-0x%x", -val);
2234 else
2235 fprintf (file, "0x%x", val);
2236}
2237
2238
2239void
ffbc8796 2240print_operand (FILE *file, rtx x, int letter)
03984308 2241{
f42f5a1b 2242 if (!x)
03984308
BW
2243 error ("PRINT_OPERAND null pointer");
2244
f42f5a1b 2245 switch (letter)
03984308 2246 {
f42f5a1b
BW
2247 case 'D':
2248 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2249 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
2250 else
2251 output_operand_lossage ("invalid %%D value");
2252 break;
03984308 2253
f42f5a1b
BW
2254 case 'v':
2255 if (GET_CODE (x) == MEM)
2256 {
2257 /* For a volatile memory reference, emit a MEMW before the
2258 load or store. */
66e58b33 2259 if (MEM_VOLATILE_P (x) && TARGET_SERIALIZE_VOLATILE)
f42f5a1b
BW
2260 fprintf (file, "memw\n\t");
2261 }
2262 else
2263 output_operand_lossage ("invalid %%v value");
2264 break;
03984308 2265
f42f5a1b
BW
2266 case 'N':
2267 if (GET_CODE (x) == MEM
2268 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
2269 {
2270 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
2271 output_address (XEXP (x, 0));
2272 }
2273 else
2274 output_operand_lossage ("invalid %%N value");
2275 break;
03984308 2276
f42f5a1b
BW
2277 case 'K':
2278 if (GET_CODE (x) == CONST_INT)
03984308 2279 {
f42f5a1b
BW
2280 int num_bits = 0;
2281 unsigned val = INTVAL (x);
2282 while (val & 1)
2283 {
2284 num_bits += 1;
2285 val = val >> 1;
2286 }
2287 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2288 fatal_insn ("invalid mask", x);
03984308 2289
f42f5a1b
BW
2290 fprintf (file, "%d", num_bits);
2291 }
2292 else
2293 output_operand_lossage ("invalid %%K value");
2294 break;
03984308 2295
f42f5a1b
BW
2296 case 'L':
2297 if (GET_CODE (x) == CONST_INT)
2298 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2299 else
2300 output_operand_lossage ("invalid %%L value");
2301 break;
03984308 2302
f42f5a1b
BW
2303 case 'R':
2304 if (GET_CODE (x) == CONST_INT)
2305 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2306 else
2307 output_operand_lossage ("invalid %%R value");
2308 break;
03984308 2309
f42f5a1b
BW
2310 case 'x':
2311 if (GET_CODE (x) == CONST_INT)
2312 printx (file, INTVAL (x));
2313 else
2314 output_operand_lossage ("invalid %%x value");
2315 break;
03984308 2316
f42f5a1b
BW
2317 case 'd':
2318 if (GET_CODE (x) == CONST_INT)
2319 fprintf (file, "%ld", INTVAL (x));
2320 else
2321 output_operand_lossage ("invalid %%d value");
2322 break;
03984308 2323
f42f5a1b
BW
2324 case 't':
2325 case 'b':
2326 if (GET_CODE (x) == CONST_INT)
2327 {
2328 printx (file, INTVAL (x));
2329 fputs (letter == 't' ? "@h" : "@l", file);
2330 }
2331 else if (GET_CODE (x) == CONST_DOUBLE)
2332 {
2333 REAL_VALUE_TYPE r;
2334 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2335 if (GET_MODE (x) == SFmode)
2336 {
2337 long l;
2338 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2339 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2340 }
2341 else
2342 output_operand_lossage ("invalid %%t/%%b value");
2343 }
2344 else if (GET_CODE (x) == CONST)
2345 {
2346 /* X must be a symbolic constant on ELF. Write an expression
2347 suitable for 'const16' that sets the high or low 16 bits. */
2348 if (GET_CODE (XEXP (x, 0)) != PLUS
2349 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2350 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2351 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2352 output_operand_lossage ("invalid %%t/%%b value");
2353 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2354 fputs (letter == 't' ? "@h" : "@l", file);
2355 /* There must be a non-alphanumeric character between 'h' or 'l'
2356 and the number. The '-' is added by print_operand() already. */
2357 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2358 fputs ("+", file);
2359 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2360 }
2361 else
633e4eb4 2362 {
f42f5a1b
BW
2363 output_addr_const (file, x);
2364 fputs (letter == 't' ? "@h" : "@l", file);
03984308
BW
2365 }
2366 break;
2367
2368 default:
f42f5a1b
BW
2369 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2370 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2371 else if (GET_CODE (x) == MEM)
2372 output_address (XEXP (x, 0));
2373 else if (GET_CODE (x) == CONST_INT)
2374 fprintf (file, "%ld", INTVAL (x));
2375 else
2376 output_addr_const (file, x);
03984308
BW
2377 }
2378}
2379
2380
2381/* A C compound statement to output to stdio stream STREAM the
2382 assembler syntax for an instruction operand that is a memory
fb49053f 2383 reference whose address is ADDR. ADDR is an RTL expression. */
03984308
BW
2384
2385void
ffbc8796 2386print_operand_address (FILE *file, rtx addr)
03984308
BW
2387{
2388 if (!addr)
2389 error ("PRINT_OPERAND_ADDRESS, null pointer");
2390
2391 switch (GET_CODE (addr))
2392 {
2393 default:
2394 fatal_insn ("invalid address", addr);
2395 break;
2396
2397 case REG:
2398 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2399 break;
2400
2401 case PLUS:
2402 {
2403 rtx reg = (rtx)0;
2404 rtx offset = (rtx)0;
2405 rtx arg0 = XEXP (addr, 0);
2406 rtx arg1 = XEXP (addr, 1);
2407
2408 if (GET_CODE (arg0) == REG)
2409 {
2410 reg = arg0;
2411 offset = arg1;
2412 }
2413 else if (GET_CODE (arg1) == REG)
2414 {
2415 reg = arg1;
2416 offset = arg0;
2417 }
2418 else
2419 fatal_insn ("no register in address", addr);
2420
2421 if (CONSTANT_P (offset))
2422 {
2423 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2424 output_addr_const (file, offset);
2425 }
2426 else
2427 fatal_insn ("address offset not a constant", addr);
2428 }
2429 break;
2430
2431 case LABEL_REF:
2432 case SYMBOL_REF:
2433 case CONST_INT:
2434 case CONST:
2435 output_addr_const (file, addr);
2436 break;
2437 }
2438}
2439
2440
da1f39e4
BW
2441bool
2442xtensa_output_addr_const_extra (FILE *fp, rtx x)
2443{
2444 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
2445 {
2446 switch (XINT (x, 1))
2447 {
6a7a462c
BW
2448 case UNSPEC_TPOFF:
2449 output_addr_const (fp, XVECEXP (x, 0, 0));
2450 fputs ("@TPOFF", fp);
2451 return true;
2452 case UNSPEC_DTPOFF:
2453 output_addr_const (fp, XVECEXP (x, 0, 0));
2454 fputs ("@DTPOFF", fp);
2455 return true;
da1f39e4
BW
2456 case UNSPEC_PLT:
2457 if (flag_pic)
2458 {
2459 output_addr_const (fp, XVECEXP (x, 0, 0));
2460 fputs ("@PLT", fp);
2461 return true;
2462 }
2463 break;
2464 default:
2465 break;
2466 }
2467 }
2468 return false;
2469}
2470
2471
03984308 2472void
ffbc8796 2473xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
03984308
BW
2474{
2475 long value_long[2];
b216cd4a 2476 REAL_VALUE_TYPE r;
03984308 2477 int size;
74ed13f5 2478 rtx first, second;
03984308
BW
2479
2480 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2481
2482 switch (GET_MODE_CLASS (mode))
2483 {
2484 case MODE_FLOAT:
177b6be0 2485 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
03984308 2486
b216cd4a 2487 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
03984308
BW
2488 switch (mode)
2489 {
2490 case SFmode:
b216cd4a 2491 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
4575a800
BW
2492 if (HOST_BITS_PER_LONG > 32)
2493 value_long[0] &= 0xffffffff;
b216cd4a 2494 fprintf (file, "0x%08lx\n", value_long[0]);
03984308
BW
2495 break;
2496
2497 case DFmode:
b216cd4a 2498 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
4575a800
BW
2499 if (HOST_BITS_PER_LONG > 32)
2500 {
2501 value_long[0] &= 0xffffffff;
2502 value_long[1] &= 0xffffffff;
2503 }
b216cd4a
ZW
2504 fprintf (file, "0x%08lx, 0x%08lx\n",
2505 value_long[0], value_long[1]);
03984308
BW
2506 break;
2507
2508 default:
177b6be0 2509 gcc_unreachable ();
03984308
BW
2510 }
2511
2512 break;
2513
2514 case MODE_INT:
2515 case MODE_PARTIAL_INT:
2516 size = GET_MODE_SIZE (mode);
177b6be0 2517 switch (size)
03984308 2518 {
177b6be0 2519 case 4:
03984308
BW
2520 output_addr_const (file, x);
2521 fputs ("\n", file);
177b6be0
NS
2522 break;
2523
2524 case 8:
74ed13f5
BW
2525 split_double (x, &first, &second);
2526 output_addr_const (file, first);
03984308 2527 fputs (", ", file);
74ed13f5 2528 output_addr_const (file, second);
03984308 2529 fputs ("\n", file);
177b6be0
NS
2530 break;
2531
2532 default:
2533 gcc_unreachable ();
03984308 2534 }
03984308
BW
2535 break;
2536
2537 default:
177b6be0 2538 gcc_unreachable ();
03984308
BW
2539 }
2540}
2541
2542
2543/* Return the bytes needed to compute the frame pointer from the current
638db43e 2544 stack pointer. */
03984308
BW
2545
2546#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2547#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2548
2549long
ffbc8796 2550compute_frame_size (int size)
03984308 2551{
ffbc8796 2552 /* Add space for the incoming static chain value. */
6de9cd9a 2553 if (cfun->static_chain_decl != NULL)
03984308
BW
2554 size += (1 * UNITS_PER_WORD);
2555
2556 xtensa_current_frame_size =
2557 XTENSA_STACK_ALIGN (size
38173d38 2558 + crtl->outgoing_args_size
03984308
BW
2559 + (WINDOW_SIZE * UNITS_PER_WORD));
2560 return xtensa_current_frame_size;
2561}
2562
2563
b52b1749 2564bool
ffbc8796 2565xtensa_frame_pointer_required (void)
03984308
BW
2566{
2567 /* The code to expand builtin_frame_addr and builtin_return_addr
2568 currently uses the hard_frame_pointer instead of frame_pointer.
2569 This seems wrong but maybe it's necessary for other architectures.
638db43e 2570 This function is derived from the i386 code. */
03984308
BW
2571
2572 if (cfun->machine->accesses_prev_frame)
b52b1749 2573 return true;
03984308 2574
b52b1749 2575 return false;
03984308
BW
2576}
2577
2578
7f0ee694
BW
2579/* minimum frame = reg save area (4 words) plus static chain (1 word)
2580 and the total number of words must be a multiple of 128 bits. */
2581#define MIN_FRAME_SIZE (8 * UNITS_PER_WORD)
2582
f42f5a1b 2583void
ffbc8796 2584xtensa_expand_prologue (void)
f42f5a1b
BW
2585{
2586 HOST_WIDE_INT total_size;
2587 rtx size_rtx;
4e6c2193 2588 rtx insn, note_rtx;
18dbd950 2589
f42f5a1b
BW
2590 total_size = compute_frame_size (get_frame_size ());
2591 size_rtx = GEN_INT (total_size);
18dbd950 2592
f42f5a1b 2593 if (total_size < (1 << (12+3)))
35a3be48 2594 insn = emit_insn (gen_entry (size_rtx));
03984308
BW
2595 else
2596 {
f42f5a1b
BW
2597 /* Use a8 as a temporary since a0-a7 may be live. */
2598 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
35a3be48 2599 emit_insn (gen_entry (GEN_INT (MIN_FRAME_SIZE)));
f42f5a1b
BW
2600 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2601 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
4e6c2193 2602 insn = emit_insn (gen_movsi (stack_pointer_rtx, tmp_reg));
03984308
BW
2603 }
2604
f42f5a1b 2605 if (frame_pointer_needed)
03984308 2606 {
997b8b4d 2607 if (cfun->machine->set_frame_ptr_insn)
03984308 2608 {
4e6c2193 2609 rtx first;
03984308 2610
997b8b4d
BW
2611 push_topmost_sequence ();
2612 first = get_insns ();
2613 pop_topmost_sequence ();
03984308 2614
f42f5a1b
BW
2615 /* For all instructions prior to set_frame_ptr_insn, replace
2616 hard_frame_pointer references with stack_pointer. */
2617 for (insn = first;
997b8b4d 2618 insn != cfun->machine->set_frame_ptr_insn;
f42f5a1b
BW
2619 insn = NEXT_INSN (insn))
2620 {
2621 if (INSN_P (insn))
20dca97b
BW
2622 {
2623 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2624 hard_frame_pointer_rtx,
2625 stack_pointer_rtx);
2626 df_insn_rescan (insn);
2627 }
f42f5a1b
BW
2628 }
2629 }
2630 else
4e6c2193
BW
2631 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx,
2632 stack_pointer_rtx));
03984308 2633 }
4e6c2193
BW
2634
2635 /* Create a note to describe the CFA. Because this is only used to set
2636 DW_AT_frame_base for debug info, don't bother tracking changes through
2637 each instruction in the prologue. It just takes up space. */
2638 note_rtx = gen_rtx_SET (VOIDmode, (frame_pointer_needed
2639 ? hard_frame_pointer_rtx
2640 : stack_pointer_rtx),
2641 plus_constant (stack_pointer_rtx, -total_size));
2642 RTX_FRAME_RELATED_P (insn) = 1;
046845de 2643 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
03984308
BW
2644}
2645
2646
f42f5a1b 2647/* Clear variables at function end. */
03984308
BW
2648
2649void
ffbc8796
BW
2650xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2651 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
03984308 2652{
03984308
BW
2653 xtensa_current_frame_size = 0;
2654}
2655
2656
0c14a54d 2657rtx
ffbc8796 2658xtensa_return_addr (int count, rtx frame)
0c14a54d 2659{
7f0ee694 2660 rtx result, retaddr, curaddr, label;
0c14a54d
BW
2661
2662 if (count == -1)
f42f5a1b 2663 retaddr = gen_rtx_REG (Pmode, A0_REG);
0c14a54d
BW
2664 else
2665 {
2666 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2667 addr = memory_address (Pmode, addr);
2668 retaddr = gen_reg_rtx (Pmode);
2669 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2670 }
2671
2672 /* The 2 most-significant bits of the return address on Xtensa hold
2673 the register window size. To get the real return address, these
7f0ee694
BW
2674 bits must be replaced with the high bits from some address in the
2675 code. */
2676
2677 /* Get the 2 high bits of a local label in the code. */
2678 curaddr = gen_reg_rtx (Pmode);
2679 label = gen_label_rtx ();
2680 emit_label (label);
2681 LABEL_PRESERVE_P (label) = 1;
2682 emit_move_insn (curaddr, gen_rtx_LABEL_REF (Pmode, label));
2683 emit_insn (gen_lshrsi3 (curaddr, curaddr, GEN_INT (30)));
2684 emit_insn (gen_ashlsi3 (curaddr, curaddr, GEN_INT (30)));
2685
2686 /* Clear the 2 high bits of the return address. */
0c14a54d 2687 result = gen_reg_rtx (Pmode);
7f0ee694
BW
2688 emit_insn (gen_ashlsi3 (result, retaddr, GEN_INT (2)));
2689 emit_insn (gen_lshrsi3 (result, result, GEN_INT (2)));
2690
2691 /* Combine them to get the result. */
2692 emit_insn (gen_iorsi3 (result, result, curaddr));
0c14a54d
BW
2693 return result;
2694}
2695
2696
03984308 2697/* Create the va_list data type.
822e895c
BW
2698
2699 This structure is set up by __builtin_saveregs. The __va_reg field
2700 points to a stack-allocated region holding the contents of the
2701 incoming argument registers. The __va_ndx field is an index
2702 initialized to the position of the first unnamed (variable)
2703 argument. This same index is also used to address the arguments
2704 passed in memory. Thus, the __va_stk field is initialized to point
2705 to the position of the first argument in memory offset to account
2706 for the arguments passed in registers and to account for the size
2707 of the argument registers not being 16-byte aligned. E.G., there
2708 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2709 for the first stack argument to have the maximal alignment of 16
2710 bytes, so we offset the __va_stk address by 32 bytes so that
2711 __va_stk[32] references the first argument on the stack. */
03984308 2712
c35d187f
RH
2713static tree
2714xtensa_build_builtin_va_list (void)
03984308 2715{
540eaea8 2716 tree f_stk, f_reg, f_ndx, record, type_decl;
03984308 2717
540eaea8 2718 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4c4bde29
AH
2719 type_decl = build_decl (BUILTINS_LOCATION,
2720 TYPE_DECL, get_identifier ("__va_list_tag"), record);
03984308 2721
4c4bde29
AH
2722 f_stk = build_decl (BUILTINS_LOCATION,
2723 FIELD_DECL, get_identifier ("__va_stk"),
03984308 2724 ptr_type_node);
4c4bde29
AH
2725 f_reg = build_decl (BUILTINS_LOCATION,
2726 FIELD_DECL, get_identifier ("__va_reg"),
03984308 2727 ptr_type_node);
4c4bde29
AH
2728 f_ndx = build_decl (BUILTINS_LOCATION,
2729 FIELD_DECL, get_identifier ("__va_ndx"),
03984308
BW
2730 integer_type_node);
2731
2732 DECL_FIELD_CONTEXT (f_stk) = record;
2733 DECL_FIELD_CONTEXT (f_reg) = record;
2734 DECL_FIELD_CONTEXT (f_ndx) = record;
2735
0fd2eac2 2736 TYPE_STUB_DECL (record) = type_decl;
540eaea8 2737 TYPE_NAME (record) = type_decl;
03984308 2738 TYPE_FIELDS (record) = f_stk;
910ad8de
NF
2739 DECL_CHAIN (f_stk) = f_reg;
2740 DECL_CHAIN (f_reg) = f_ndx;
03984308
BW
2741
2742 layout_type (record);
2743 return record;
2744}
2745
2746
2747/* Save the incoming argument registers on the stack. Returns the
638db43e 2748 address of the saved registers. */
03984308 2749
4c45af42 2750static rtx
ffbc8796 2751xtensa_builtin_saveregs (void)
03984308 2752{
e70312d4 2753 rtx gp_regs;
38173d38 2754 int arg_words = crtl->args.info.arg_words;
03984308 2755 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
03984308 2756
997b8b4d 2757 if (gp_left <= 0)
03984308
BW
2758 return const0_rtx;
2759
3bbc2af6 2760 /* Allocate the general-purpose register space. */
03984308
BW
2761 gp_regs = assign_stack_local
2762 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
540eaea8 2763 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
03984308
BW
2764
2765 /* Now store the incoming registers. */
997b8b4d
BW
2766 cfun->machine->need_a7_copy = true;
2767 cfun->machine->vararg_a7 = true;
e70312d4
BW
2768 move_block_from_reg (GP_ARG_FIRST + arg_words,
2769 adjust_address (gp_regs, BLKmode,
2770 arg_words * UNITS_PER_WORD),
2771 gp_left);
0d8442b8
BW
2772 gcc_assert (cfun->machine->vararg_a7_copy != 0);
2773 emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ());
03984308
BW
2774
2775 return XEXP (gp_regs, 0);
2776}
2777
2778
2779/* Implement `va_start' for varargs and stdarg. We look at the
638db43e 2780 current function to fill in an initial va_list. */
03984308 2781
d7bd8aeb 2782static void
ffbc8796 2783xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
03984308
BW
2784{
2785 tree f_stk, stk;
2786 tree f_reg, reg;
2787 tree f_ndx, ndx;
2788 tree t, u;
2789 int arg_words;
2790
38173d38 2791 arg_words = crtl->args.info.arg_words;
03984308
BW
2792
2793 f_stk = TYPE_FIELDS (va_list_type_node);
910ad8de
NF
2794 f_reg = DECL_CHAIN (f_stk);
2795 f_ndx = DECL_CHAIN (f_reg);
03984308 2796
47a25a46 2797 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
fa1615d7
BW
2798 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
2799 f_reg, NULL_TREE);
2800 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
2801 f_ndx, NULL_TREE);
03984308
BW
2802
2803 /* Call __builtin_saveregs; save the result in __va_reg */
e70312d4
BW
2804 u = make_tree (sizetype, expand_builtin_saveregs ());
2805 u = fold_convert (ptr_type_node, u);
726a989a 2806 t = build2 (MODIFY_EXPR, ptr_type_node, reg, u);
03984308
BW
2807 TREE_SIDE_EFFECTS (t) = 1;
2808 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2809
822e895c 2810 /* Set the __va_stk member to ($arg_ptr - 32). */
03984308 2811 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
e70312d4 2812 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u, size_int (-32));
726a989a 2813 t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
03984308
BW
2814 TREE_SIDE_EFFECTS (t) = 1;
2815 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2816
822e895c
BW
2817 /* Set the __va_ndx member. If the first variable argument is on
2818 the stack, adjust __va_ndx by 2 words to account for the extra
2819 alignment offset for __va_stk. */
2820 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2821 arg_words += 2;
726a989a 2822 t = build2 (MODIFY_EXPR, integer_type_node, ndx,
f4d3e7fd 2823 build_int_cst (integer_type_node, arg_words * UNITS_PER_WORD));
03984308
BW
2824 TREE_SIDE_EFFECTS (t) = 1;
2825 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2826}
2827
2828
2829/* Implement `va_arg'. */
2830
85d53c1d 2831static tree
726a989a
RB
2832xtensa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2833 gimple_seq *post_p ATTRIBUTE_UNUSED)
03984308
BW
2834{
2835 tree f_stk, stk;
2836 tree f_reg, reg;
2837 tree f_ndx, ndx;
85d53c1d
RH
2838 tree type_size, array, orig_ndx, addr, size, va_size, t;
2839 tree lab_false, lab_over, lab_false2;
08b0dc1b
RH
2840 bool indirect;
2841
2842 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2843 if (indirect)
2844 type = build_pointer_type (type);
03984308 2845
3712281f
BW
2846 /* Handle complex values as separate real and imaginary parts. */
2847 if (TREE_CODE (type) == COMPLEX_TYPE)
2848 {
85d53c1d 2849 tree real_part, imag_part;
3712281f 2850
85d53c1d
RH
2851 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2852 pre_p, NULL);
2853 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
3712281f 2854
fa1615d7
BW
2855 imag_part = xtensa_gimplify_va_arg_expr (unshare_expr (valist),
2856 TREE_TYPE (type),
85d53c1d
RH
2857 pre_p, NULL);
2858 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
3712281f 2859
47a25a46 2860 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
3712281f
BW
2861 }
2862
03984308 2863 f_stk = TYPE_FIELDS (va_list_type_node);
910ad8de
NF
2864 f_reg = DECL_CHAIN (f_stk);
2865 f_ndx = DECL_CHAIN (f_reg);
03984308 2866
fa1615d7
BW
2867 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist,
2868 f_stk, NULL_TREE);
2869 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
2870 f_reg, NULL_TREE);
2871 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
2872 f_ndx, NULL_TREE);
03984308 2873
85d53c1d
RH
2874 type_size = size_in_bytes (type);
2875 va_size = round_up (type_size, UNITS_PER_WORD);
2876 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
8be56275 2877
03984308 2878
822e895c 2879 /* First align __va_ndx if necessary for this arg:
03984308 2880
85d53c1d 2881 orig_ndx = (AP).__va_ndx;
822e895c 2882 if (__alignof__ (TYPE) > 4 )
85d53c1d 2883 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
822e895c 2884 & -__alignof__ (TYPE)); */
03984308 2885
85d53c1d
RH
2886 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2887
03984308
BW
2888 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2889 {
d2348985 2890 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
85d53c1d 2891
fa1615d7 2892 t = build2 (PLUS_EXPR, integer_type_node, unshare_expr (orig_ndx),
f4d3e7fd
BW
2893 build_int_cst (integer_type_node, align - 1));
2894 t = build2 (BIT_AND_EXPR, integer_type_node, t,
2895 build_int_cst (integer_type_node, -align));
fa1615d7 2896 gimplify_assign (unshare_expr (orig_ndx), t, pre_p);
03984308
BW
2897 }
2898
2899
2900 /* Increment __va_ndx to point past the argument:
2901
85d53c1d 2902 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
03984308 2903
85d53c1d 2904 t = fold_convert (integer_type_node, va_size);
47a25a46 2905 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
fa1615d7 2906 gimplify_assign (unshare_expr (ndx), t, pre_p);
03984308
BW
2907
2908
2909 /* Check if the argument is in registers:
2910
bcf88f9b 2911 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
fe984136 2912 && !must_pass_in_stack (type))
ffbc8796 2913 __array = (AP).__va_reg; */
03984308 2914
85d53c1d 2915 array = create_tmp_var (ptr_type_node, NULL);
03984308 2916
85d53c1d 2917 lab_over = NULL;
fe984136 2918 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
bcf88f9b 2919 {
4c4bde29
AH
2920 lab_false = create_artificial_label (UNKNOWN_LOCATION);
2921 lab_over = create_artificial_label (UNKNOWN_LOCATION);
85d53c1d 2922
fa1615d7 2923 t = build2 (GT_EXPR, boolean_type_node, unshare_expr (ndx),
f4d3e7fd
BW
2924 build_int_cst (integer_type_node,
2925 MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
47a25a46
RG
2926 t = build3 (COND_EXPR, void_type_node, t,
2927 build1 (GOTO_EXPR, void_type_node, lab_false),
2928 NULL_TREE);
85d53c1d
RH
2929 gimplify_and_add (t, pre_p);
2930
fa1615d7 2931 gimplify_assign (unshare_expr (array), reg, pre_p);
85d53c1d 2932
47a25a46 2933 t = build1 (GOTO_EXPR, void_type_node, lab_over);
85d53c1d
RH
2934 gimplify_and_add (t, pre_p);
2935
47a25a46 2936 t = build1 (LABEL_EXPR, void_type_node, lab_false);
85d53c1d 2937 gimplify_and_add (t, pre_p);
bcf88f9b 2938 }
03984308 2939
85d53c1d 2940
03984308
BW
2941 /* ...otherwise, the argument is on the stack (never split between
2942 registers and the stack -- change __va_ndx if necessary):
2943
2944 else
2945 {
822e895c
BW
2946 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2947 (AP).__va_ndx = 32 + __va_size (TYPE);
03984308 2948 __array = (AP).__va_stk;
ffbc8796 2949 } */
03984308 2950
4c4bde29 2951 lab_false2 = create_artificial_label (UNKNOWN_LOCATION);
03984308 2952
fa1615d7 2953 t = build2 (GT_EXPR, boolean_type_node, unshare_expr (orig_ndx),
f4d3e7fd
BW
2954 build_int_cst (integer_type_node,
2955 MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
47a25a46
RG
2956 t = build3 (COND_EXPR, void_type_node, t,
2957 build1 (GOTO_EXPR, void_type_node, lab_false2),
2958 NULL_TREE);
85d53c1d 2959 gimplify_and_add (t, pre_p);
03984308 2960
fa1615d7 2961 t = size_binop (PLUS_EXPR, unshare_expr (va_size), size_int (32));
85d53c1d 2962 t = fold_convert (integer_type_node, t);
fa1615d7 2963 gimplify_assign (unshare_expr (ndx), t, pre_p);
03984308 2964
47a25a46 2965 t = build1 (LABEL_EXPR, void_type_node, lab_false2);
85d53c1d 2966 gimplify_and_add (t, pre_p);
03984308 2967
726a989a 2968 gimplify_assign (array, stk, pre_p);
85d53c1d
RH
2969
2970 if (lab_over)
2971 {
47a25a46 2972 t = build1 (LABEL_EXPR, void_type_node, lab_over);
85d53c1d
RH
2973 gimplify_and_add (t, pre_p);
2974 }
8be56275 2975
03984308
BW
2976
2977 /* Given the base array pointer (__array) and index to the subsequent
2978 argument (__va_ndx), find the address:
2979
8be56275
BW
2980 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2981 ? sizeof (TYPE)
2982 : __va_size (TYPE))
03984308
BW
2983
2984 The results are endian-dependent because values smaller than one word
ffbc8796 2985 are aligned differently. */
03984308 2986
633e4eb4 2987
85d91d5b 2988 if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
8be56275 2989 {
fa1615d7 2990 t = fold_build2 (GE_EXPR, boolean_type_node, unshare_expr (type_size),
e70312d4 2991 size_int (PARM_BOUNDARY / BITS_PER_UNIT));
fa1615d7
BW
2992 t = fold_build3 (COND_EXPR, sizetype, t, unshare_expr (va_size),
2993 unshare_expr (type_size));
85d53c1d 2994 size = t;
8be56275 2995 }
85d53c1d 2996 else
fa1615d7 2997 size = unshare_expr (va_size);
85d53c1d 2998
fa1615d7 2999 t = fold_convert (sizetype, unshare_expr (ndx));
f4d3e7fd 3000 t = build2 (MINUS_EXPR, sizetype, t, size);
fa1615d7 3001 addr = build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (array), t);
03984308 3002
85d53c1d 3003 addr = fold_convert (build_pointer_type (type), addr);
08b0dc1b 3004 if (indirect)
d6e9821f
RH
3005 addr = build_va_arg_indirect_ref (addr);
3006 return build_va_arg_indirect_ref (addr);
03984308
BW
3007}
3008
3009
09fa8841
BW
3010/* Builtins. */
3011
3012enum xtensa_builtin
3013{
3014 XTENSA_BUILTIN_UMULSIDI3,
6a7a462c
BW
3015 XTENSA_BUILTIN_THREAD_POINTER,
3016 XTENSA_BUILTIN_SET_THREAD_POINTER,
09fa8841
BW
3017 XTENSA_BUILTIN_max
3018};
3019
3020
3021static void
3022xtensa_init_builtins (void)
3023{
6a7a462c 3024 tree ftype, decl;
09fa8841
BW
3025
3026 ftype = build_function_type_list (unsigned_intDI_type_node,
3027 unsigned_intSI_type_node,
3028 unsigned_intSI_type_node, NULL_TREE);
3029
6a7a462c
BW
3030 decl = add_builtin_function ("__builtin_umulsidi3", ftype,
3031 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
3032 "__umulsidi3", NULL_TREE);
3033 TREE_NOTHROW (decl) = 1;
3034 TREE_READONLY (decl) = 1;
3035
3036 if (TARGET_THREADPTR)
3037 {
3038 ftype = build_function_type (ptr_type_node, void_list_node);
3039 decl = add_builtin_function ("__builtin_thread_pointer", ftype,
3040 XTENSA_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
3041 NULL, NULL_TREE);
3042 TREE_READONLY (decl) = 1;
3043 TREE_NOTHROW (decl) = 1;
3044
3045 ftype = build_function_type_list (void_type_node, ptr_type_node,
3046 NULL_TREE);
3047 decl = add_builtin_function ("__builtin_set_thread_pointer", ftype,
3048 XTENSA_BUILTIN_SET_THREAD_POINTER,
3049 BUILT_IN_MD, NULL, NULL_TREE);
3050 TREE_NOTHROW (decl) = 1;
3051 }
09fa8841
BW
3052}
3053
3054
3055static tree
f311c3b4
NF
3056xtensa_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
3057 bool ignore ATTRIBUTE_UNUSED)
09fa8841
BW
3058{
3059 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3060 tree arg0, arg1;
3061
6a7a462c 3062 switch (fcode)
09fa8841 3063 {
6a7a462c 3064 case XTENSA_BUILTIN_UMULSIDI3:
f311c3b4
NF
3065 arg0 = args[0];
3066 arg1 = args[1];
09fa8841
BW
3067 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3068 || TARGET_MUL32_HIGH)
3069 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
3070 fold_convert (unsigned_intDI_type_node, arg0),
3071 fold_convert (unsigned_intDI_type_node, arg1));
6a7a462c
BW
3072 break;
3073
3074 case XTENSA_BUILTIN_THREAD_POINTER:
3075 case XTENSA_BUILTIN_SET_THREAD_POINTER:
3076 break;
3077
3078 default:
3079 internal_error ("bad builtin code");
3080 break;
09fa8841
BW
3081 }
3082
09fa8841
BW
3083 return NULL;
3084}
3085
3086
3087static rtx
3088xtensa_expand_builtin (tree exp, rtx target,
3089 rtx subtarget ATTRIBUTE_UNUSED,
3090 enum machine_mode mode ATTRIBUTE_UNUSED,
3091 int ignore)
3092{
ec3643e8 3093 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
09fa8841 3094 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6a7a462c
BW
3095 rtx arg;
3096
3097 switch (fcode)
3098 {
3099 case XTENSA_BUILTIN_UMULSIDI3:
3100 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
3101 __umulsidi3 function when the Xtensa configuration can directly
3102 implement it. If not, just call the function. */
3103 return expand_call (exp, target, ignore);
09fa8841 3104
6a7a462c
BW
3105 case XTENSA_BUILTIN_THREAD_POINTER:
3106 if (!target || !register_operand (target, Pmode))
3107 target = gen_reg_rtx (Pmode);
3108 emit_insn (gen_load_tp (target));
3109 return target;
09fa8841 3110
6a7a462c
BW
3111 case XTENSA_BUILTIN_SET_THREAD_POINTER:
3112 arg = expand_normal (CALL_EXPR_ARG (exp, 0));
3113 if (!register_operand (arg, Pmode))
3114 arg = copy_to_mode_reg (Pmode, arg);
3115 emit_insn (gen_set_tp (arg));
3116 return const0_rtx;
3117
3118 default:
3119 internal_error ("bad builtin code");
3120 }
09fa8841
BW
3121 return NULL_RTX;
3122}
3123
3124
a8cacfd2 3125enum reg_class
0a2aaacc 3126xtensa_preferred_reload_class (rtx x, enum reg_class rclass, int isoutput)
a8cacfd2 3127{
89f6025d 3128 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
a8cacfd2
BW
3129 return NO_REGS;
3130
89f6025d
BW
3131 /* Don't use the stack pointer or hard frame pointer for reloads!
3132 The hard frame pointer would normally be OK except that it may
3133 briefly hold an incoming argument in the prologue, and reload
3134 won't know that it is live because the hard frame pointer is
3135 treated specially. */
3136
0a2aaacc 3137 if (rclass == AR_REGS || rclass == GR_REGS)
89f6025d 3138 return RL_REGS;
a8cacfd2 3139
0a2aaacc 3140 return rclass;
a8cacfd2
BW
3141}
3142
3143
a87cf97e
JR
3144reg_class_t
3145xtensa_secondary_reload (bool in_p, rtx x, reg_class_t rclass,
37fbe8a3 3146 enum machine_mode mode, secondary_reload_info *sri)
03984308
BW
3147{
3148 int regno;
3149
37fbe8a3 3150 if (in_p && constantpool_mem_p (x))
03984308 3151 {
37fbe8a3 3152 if (rclass == FP_REGS)
89f6025d 3153 return RL_REGS;
37fbe8a3
BW
3154
3155 if (mode == QImode)
3156 sri->icode = CODE_FOR_reloadqi_literal;
3157 else if (mode == HImode)
3158 sri->icode = CODE_FOR_reloadhi_literal;
03984308
BW
3159 }
3160
37fbe8a3 3161 regno = xt_true_regnum (x);
03984308 3162 if (ACC_REG_P (regno))
0a2aaacc
KG
3163 return ((rclass == GR_REGS || rclass == RL_REGS) ? NO_REGS : RL_REGS);
3164 if (rclass == ACC_REG)
89f6025d 3165 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
03984308
BW
3166
3167 return NO_REGS;
3168}
3169
3170
3171void
ffbc8796 3172order_regs_for_local_alloc (void)
03984308
BW
3173{
3174 if (!leaf_function_p ())
3175 {
3176 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
3177 FIRST_PSEUDO_REGISTER * sizeof (int));
3178 }
3179 else
3180 {
3181 int i, num_arg_regs;
3182 int nxt = 0;
3183
3bbc2af6
KH
3184 /* Use the AR registers in increasing order (skipping a0 and a1)
3185 but save the incoming argument registers for a last resort. */
38173d38 3186 num_arg_regs = crtl->args.info.arg_words;
03984308
BW
3187 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
3188 num_arg_regs = MAX_ARGS_IN_REGISTERS;
3189 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
3190 reg_alloc_order[nxt++] = i + num_arg_regs;
3191 for (i = 0; i < num_arg_regs; i++)
3192 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
3193
3bbc2af6 3194 /* List the coprocessor registers in order. */
985d0d50
BW
3195 for (i = 0; i < BR_REG_NUM; i++)
3196 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
3197
3bbc2af6 3198 /* List the FP registers in order for now. */
03984308
BW
3199 for (i = 0; i < 16; i++)
3200 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
3201
638db43e 3202 /* GCC requires that we list *all* the registers.... */
03984308
BW
3203 reg_alloc_order[nxt++] = 0; /* a0 = return address */
3204 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
3205 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
3206 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
3207
03984308
BW
3208 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
3209 }
3210}
3211
3212
01abf342
BW
3213/* Some Xtensa targets support multiple bss sections. If the section
3214 name ends with ".bss", add SECTION_BSS to the flags. */
3215
3216static unsigned int
ffbc8796 3217xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
01abf342
BW
3218{
3219 unsigned int flags = default_section_type_flags (decl, name, reloc);
3220 const char *suffix;
3221
3222 suffix = strrchr (name, '.');
3223 if (suffix && strcmp (suffix, ".bss") == 0)
3224 {
3225 if (!decl || (TREE_CODE (decl) == VAR_DECL
3226 && DECL_INITIAL (decl) == NULL_TREE))
3227 flags |= SECTION_BSS; /* @nobits */
3228 else
d4ee4d25 3229 warning (0, "only uninitialized variables can be placed in a "
01abf342
BW
3230 ".bss section");
3231 }
3232
3233 return flags;
3234}
3235
3236
b64a1b53
RH
3237/* The literal pool stays with the function. */
3238
d6b5193b 3239static section *
ffbc8796
BW
3240xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
3241 rtx x ATTRIBUTE_UNUSED,
3242 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53 3243{
d6b5193b 3244 return function_section (current_function_decl);
b64a1b53 3245}
fb49053f 3246
ffbc8796 3247
3c50106f
RH
3248/* Compute a (partial) cost for rtx X. Return true if the complete
3249 cost has been computed, and false if subexpressions should be
3250 scanned. In either case, *TOTAL contains the cost result. */
3251
3252static bool
f40751dd
JH
3253xtensa_rtx_costs (rtx x, int code, int outer_code, int *total,
3254 bool speed ATTRIBUTE_UNUSED)
3c50106f
RH
3255{
3256 switch (code)
3257 {
3258 case CONST_INT:
3259 switch (outer_code)
3260 {
3261 case SET:
3262 if (xtensa_simm12b (INTVAL (x)))
3263 {
3264 *total = 4;
3265 return true;
3266 }
3267 break;
3268 case PLUS:
3269 if (xtensa_simm8 (INTVAL (x))
3270 || xtensa_simm8x256 (INTVAL (x)))
3271 {
3272 *total = 0;
3273 return true;
3274 }
3275 break;
3276 case AND:
3277 if (xtensa_mask_immediate (INTVAL (x)))
3278 {
3279 *total = 0;
3280 return true;
3281 }
3282 break;
3283 case COMPARE:
3284 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
3285 {
3286 *total = 0;
3287 return true;
3288 }
3289 break;
3290 case ASHIFT:
3291 case ASHIFTRT:
3292 case LSHIFTRT:
3293 case ROTATE:
3294 case ROTATERT:
3bbc2af6 3295 /* No way to tell if X is the 2nd operand so be conservative. */
3c50106f
RH
3296 default: break;
3297 }
3298 if (xtensa_simm12b (INTVAL (x)))
3299 *total = 5;
f42f5a1b
BW
3300 else if (TARGET_CONST16)
3301 *total = COSTS_N_INSNS (2);
3c50106f
RH
3302 else
3303 *total = 6;
3304 return true;
3305
3306 case CONST:
3307 case LABEL_REF:
3308 case SYMBOL_REF:
f42f5a1b
BW
3309 if (TARGET_CONST16)
3310 *total = COSTS_N_INSNS (2);
3311 else
3312 *total = 5;
3c50106f
RH
3313 return true;
3314
3315 case CONST_DOUBLE:
f42f5a1b
BW
3316 if (TARGET_CONST16)
3317 *total = COSTS_N_INSNS (4);
3318 else
3319 *total = 7;
3c50106f
RH
3320 return true;
3321
3322 case MEM:
3323 {
3324 int num_words =
3325 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
3326
3327 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
3328 *total = COSTS_N_INSNS (num_words);
3329 else
3330 *total = COSTS_N_INSNS (2*num_words);
3331 return true;
3332 }
3333
3334 case FFS:
09fa8841 3335 case CTZ:
3c50106f
RH
3336 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
3337 return true;
3338
09fa8841
BW
3339 case CLZ:
3340 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
3341 return true;
3342
3c50106f
RH
3343 case NOT:
3344 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
3345 return true;
3346
3347 case AND:
3348 case IOR:
3349 case XOR:
3350 if (GET_MODE (x) == DImode)
3351 *total = COSTS_N_INSNS (2);
3352 else
3353 *total = COSTS_N_INSNS (1);
3354 return true;
3355
3356 case ASHIFT:
3357 case ASHIFTRT:
3358 case LSHIFTRT:
3359 if (GET_MODE (x) == DImode)
3360 *total = COSTS_N_INSNS (50);
3361 else
3362 *total = COSTS_N_INSNS (1);
3363 return true;
3364
3365 case ABS:
3366 {
3367 enum machine_mode xmode = GET_MODE (x);
3368 if (xmode == SFmode)
3369 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3370 else if (xmode == DFmode)
3371 *total = COSTS_N_INSNS (50);
3372 else
3373 *total = COSTS_N_INSNS (4);
3374 return true;
3375 }
3376
3377 case PLUS:
3378 case MINUS:
3379 {
3380 enum machine_mode xmode = GET_MODE (x);
3381 if (xmode == SFmode)
3382 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3383 else if (xmode == DFmode || xmode == DImode)
3384 *total = COSTS_N_INSNS (50);
3385 else
3386 *total = COSTS_N_INSNS (1);
3387 return true;
3388 }
3389
3390 case NEG:
3391 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
3392 return true;
3393
3394 case MULT:
3395 {
3396 enum machine_mode xmode = GET_MODE (x);
3397 if (xmode == SFmode)
3398 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
09fa8841 3399 else if (xmode == DFmode)
3c50106f 3400 *total = COSTS_N_INSNS (50);
09fa8841
BW
3401 else if (xmode == DImode)
3402 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
3c50106f
RH
3403 else if (TARGET_MUL32)
3404 *total = COSTS_N_INSNS (4);
3405 else if (TARGET_MAC16)
3406 *total = COSTS_N_INSNS (16);
3407 else if (TARGET_MUL16)
3408 *total = COSTS_N_INSNS (12);
3409 else
3410 *total = COSTS_N_INSNS (50);
3411 return true;
3412 }
3413
3414 case DIV:
3415 case MOD:
3416 {
3417 enum machine_mode xmode = GET_MODE (x);
3418 if (xmode == SFmode)
3419 {
3420 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3421 return true;
3422 }
3423 else if (xmode == DFmode)
3424 {
3425 *total = COSTS_N_INSNS (50);
3426 return true;
3427 }
3428 }
3bbc2af6 3429 /* Fall through. */
3c50106f
RH
3430
3431 case UDIV:
3432 case UMOD:
3433 {
3434 enum machine_mode xmode = GET_MODE (x);
3435 if (xmode == DImode)
3436 *total = COSTS_N_INSNS (50);
3437 else if (TARGET_DIV32)
3438 *total = COSTS_N_INSNS (32);
3439 else
3440 *total = COSTS_N_INSNS (50);
3441 return true;
3442 }
3443
3444 case SQRT:
3445 if (GET_MODE (x) == SFmode)
3446 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3447 else
3448 *total = COSTS_N_INSNS (50);
3449 return true;
3450
3451 case SMIN:
3452 case UMIN:
3453 case SMAX:
3454 case UMAX:
3455 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3456 return true;
3457
3458 case SIGN_EXTRACT:
3459 case SIGN_EXTEND:
3460 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3461 return true;
3462
3463 case ZERO_EXTRACT:
3464 case ZERO_EXTEND:
3465 *total = COSTS_N_INSNS (1);
3466 return true;
3467
3468 default:
3469 return false;
3470 }
3471}
3472
bd5bd7ac
KH
3473/* Worker function for TARGET_RETURN_IN_MEMORY. */
3474
4c45af42 3475static bool
586de218 3476xtensa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4c45af42
KH
3477{
3478 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3479 > 4 * UNITS_PER_WORD);
3480}
3481
e2b2d01e
AS
3482/* Worker function for TARGET_FUNCTION_VALUE. */
3483
3484rtx
3485xtensa_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
3486 bool outgoing)
3487{
3488 return gen_rtx_REG ((INTEGRAL_TYPE_P (valtype)
3489 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
3490 ? SImode : TYPE_MODE (valtype),
3491 outgoing ? GP_OUTGOING_RETURN : GP_RETURN);
3492}
7f0ee694 3493
2b4fa409
RH
3494/* The static chain is passed in memory. Provide rtx giving 'mem'
3495 expressions that denote where they are stored. */
3496
3497static rtx
3498xtensa_static_chain (const_tree ARG_UNUSED (fndecl), bool incoming_p)
3499{
3500 rtx base = incoming_p ? arg_pointer_rtx : stack_pointer_rtx;
3501 return gen_frame_mem (Pmode, plus_constant (base, -5 * UNITS_PER_WORD));
3502}
3503
3504
7f0ee694
BW
3505/* TRAMPOLINE_TEMPLATE: For Xtensa, the trampoline must perform an ENTRY
3506 instruction with a minimal stack frame in order to get some free
3507 registers. Once the actual call target is known, the proper stack frame
3508 size is extracted from the ENTRY instruction at the target and the
3509 current frame is adjusted to match. The trampoline then transfers
3510 control to the instruction following the ENTRY at the target. Note:
3511 this assumes that the target begins with an ENTRY instruction. */
3512
3c1229cb
RH
3513static void
3514xtensa_asm_trampoline_template (FILE *stream)
7f0ee694
BW
3515{
3516 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3517
3518 fprintf (stream, "\t.begin no-transform\n");
3519 fprintf (stream, "\tentry\tsp, %d\n", MIN_FRAME_SIZE);
3520
3521 if (use_call0)
3522 {
3523 /* Save the return address. */
3524 fprintf (stream, "\tmov\ta10, a0\n");
3525
3526 /* Use a CALL0 instruction to skip past the constants and in the
3527 process get the PC into A0. This allows PC-relative access to
3528 the constants without relying on L32R. */
3529 fprintf (stream, "\tcall0\t.Lskipconsts\n");
3530 }
3531 else
3532 fprintf (stream, "\tj\t.Lskipconsts\n");
3533
3534 fprintf (stream, "\t.align\t4\n");
3535 fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
3536 fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
3537 fprintf (stream, ".Lskipconsts:\n");
3538
3539 /* Load the static chain and function address from the trampoline. */
3540 if (use_call0)
3541 {
3542 fprintf (stream, "\taddi\ta0, a0, 3\n");
3543 fprintf (stream, "\tl32i\ta9, a0, 0\n");
3544 fprintf (stream, "\tl32i\ta8, a0, 4\n");
3545 }
3546 else
3547 {
3548 fprintf (stream, "\tl32r\ta9, .Lchainval\n");
3549 fprintf (stream, "\tl32r\ta8, .Lfnaddr\n");
3550 }
3551
3552 /* Store the static chain. */
3553 fprintf (stream, "\ts32i\ta9, sp, %d\n", MIN_FRAME_SIZE - 20);
3554
3555 /* Set the proper stack pointer value. */
3556 fprintf (stream, "\tl32i\ta9, a8, 0\n");
3557 fprintf (stream, "\textui\ta9, a9, %d, 12\n",
3558 TARGET_BIG_ENDIAN ? 8 : 12);
3559 fprintf (stream, "\tslli\ta9, a9, 3\n");
3560 fprintf (stream, "\taddi\ta9, a9, %d\n", -MIN_FRAME_SIZE);
3561 fprintf (stream, "\tsub\ta9, sp, a9\n");
3562 fprintf (stream, "\tmovsp\tsp, a9\n");
3563
3564 if (use_call0)
3565 /* Restore the return address. */
3566 fprintf (stream, "\tmov\ta0, a10\n");
3567
3568 /* Jump to the instruction following the ENTRY. */
3569 fprintf (stream, "\taddi\ta8, a8, 3\n");
3570 fprintf (stream, "\tjx\ta8\n");
3571
3572 /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
3573 if (use_call0)
3574 fprintf (stream, "\t.byte\t0\n");
3575 else
3576 fprintf (stream, "\tnop\n");
3577
3578 fprintf (stream, "\t.end no-transform\n");
3579}
3580
3c1229cb
RH
3581static void
3582xtensa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain)
7f0ee694 3583{
3c1229cb 3584 rtx func = XEXP (DECL_RTL (fndecl), 0);
7f0ee694
BW
3585 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3586 int chain_off = use_call0 ? 12 : 8;
3587 int func_off = use_call0 ? 16 : 12;
3c1229cb
RH
3588
3589 emit_block_move (m_tramp, assemble_trampoline_template (),
3590 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3591
3592 emit_move_insn (adjust_address (m_tramp, SImode, chain_off), chain);
3593 emit_move_insn (adjust_address (m_tramp, SImode, func_off), func);
7f0ee694 3594 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_sync_caches"),
046845de 3595 LCT_NORMAL, VOIDmode, 1, XEXP (m_tramp, 0), Pmode);
7f0ee694
BW
3596}
3597
3598
e2500fed 3599#include "gt-xtensa.h"