]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
re PR c++/28152 (Diagnostic about wrong use _Complex prints __complex__)
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
66e58b33 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
da1f39e4 3 Free Software Foundation, Inc.
03984308
BW
4 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
2f83c7d6 10Software Foundation; either version 3, or (at your option) any later
03984308
BW
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
2f83c7d6
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
03984308
BW
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
03984308
BW
26#include "rtl.h"
27#include "regs.h"
03984308
BW
28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "insn-attr.h"
35#include "insn-codes.h"
36#include "recog.h"
37#include "output.h"
38#include "tree.h"
39#include "expr.h"
40#include "flags.h"
41#include "reload.h"
42#include "tm_p.h"
43#include "function.h"
44#include "toplev.h"
45#include "optabs.h"
46#include "libfuncs.h"
07232638 47#include "ggc.h"
03984308
BW
48#include "target.h"
49#include "target-def.h"
540eaea8 50#include "langhooks.h"
726a989a 51#include "gimple.h"
e70312d4 52#include "df.h"
85d53c1d 53
03984308
BW
54
55/* Enumeration for all of the relational tests, so that we can build
56 arrays indexed by the test type, and not worry about the order
638db43e 57 of EQ, NE, etc. */
03984308 58
ffbc8796
BW
59enum internal_test
60{
61 ITEST_EQ,
62 ITEST_NE,
63 ITEST_GT,
64 ITEST_GE,
65 ITEST_LT,
66 ITEST_LE,
67 ITEST_GTU,
68 ITEST_GEU,
69 ITEST_LTU,
70 ITEST_LEU,
71 ITEST_MAX
72};
03984308
BW
73
74/* Cached operands, and operator to compare for use in set/branch on
75 condition codes. */
76rtx branch_cmp[2];
77
78/* what type of branch to use */
79enum cmp_type branch_type;
80
81/* Array giving truth value on whether or not a given hard register
82 can support a given mode. */
83char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
84
85/* Current frame size calculated by compute_frame_size. */
86unsigned xtensa_current_frame_size;
87
a46bbb5a 88/* Largest block move to handle in-line. */
03984308
BW
89#define LARGEST_MOVE_RATIO 15
90
91/* Define the structure for the machine field in struct function. */
d1b38208 92struct GTY(()) machine_function
03984308
BW
93{
94 int accesses_prev_frame;
997b8b4d
BW
95 bool need_a7_copy;
96 bool vararg_a7;
0d8442b8 97 rtx vararg_a7_copy;
997b8b4d 98 rtx set_frame_ptr_insn;
03984308
BW
99};
100
101/* Vector, indexed by hard register number, which contains 1 for a
102 register that is allowable in a candidate for leaf function
638db43e 103 treatment. */
03984308
BW
104
105const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
106{
107 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
108 1, 1, 1,
109 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
110 1
111};
112
113/* Map hard register number to register class */
114const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
115{
89f6025d
BW
116 RL_REGS, SP_REG, RL_REGS, RL_REGS,
117 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
118 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
119 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
03984308
BW
120 AR_REGS, AR_REGS, BR_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
123 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
125 ACC_REG,
126};
127
ffbc8796
BW
128static enum internal_test map_test_to_internal_test (enum rtx_code);
129static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
130static rtx gen_float_relational (enum rtx_code, rtx, rtx);
131static rtx gen_conditional_move (rtx);
132static rtx fixup_subreg_mem (rtx);
ffbc8796 133static struct machine_function * xtensa_init_machine_status (void);
6a7a462c 134static rtx xtensa_legitimize_tls_address (rtx);
586de218 135static bool xtensa_return_in_msb (const_tree);
ffbc8796
BW
136static void printx (FILE *, signed int);
137static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
4c45af42 138static rtx xtensa_builtin_saveregs (void);
ffbc8796
BW
139static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
140 int) ATTRIBUTE_UNUSED;
d6b5193b
RS
141static section *xtensa_select_rtx_section (enum machine_mode, rtx,
142 unsigned HOST_WIDE_INT);
f40751dd 143static bool xtensa_rtx_costs (rtx, int, int, int *, bool);
c35d187f 144static tree xtensa_build_builtin_va_list (void);
586de218 145static bool xtensa_return_in_memory (const_tree, const_tree);
726a989a
RB
146static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
147 gimple_seq *);
e2b2d01e 148static rtx xtensa_function_value (const_tree, const_tree, bool);
09fa8841
BW
149static void xtensa_init_builtins (void);
150static tree xtensa_fold_builtin (tree, tree, bool);
151static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
9d0b1619 152static void xtensa_va_start (tree, rtx);
b64a1b53 153
b64a1b53
RH
154static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
155 REG_ALLOC_ORDER;
156\f
03984308
BW
157
158/* This macro generates the assembly code for function exit,
159 on machines that need it. If FUNCTION_EPILOGUE is not defined
160 then individual return instructions are generated for each
161 return statement. Args are same as for FUNCTION_PROLOGUE. */
162
163#undef TARGET_ASM_FUNCTION_EPILOGUE
164#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
165
166/* These hooks specify assembly directives for creating certain kinds
167 of integer object. */
168
169#undef TARGET_ASM_ALIGNED_SI_OP
170#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
171
b64a1b53
RH
172#undef TARGET_ASM_SELECT_RTX_SECTION
173#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
03984308 174
66beb87a
RS
175#undef TARGET_DEFAULT_TARGET_FLAGS
176#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
177
3c50106f
RH
178#undef TARGET_RTX_COSTS
179#define TARGET_RTX_COSTS xtensa_rtx_costs
dcefdf67 180#undef TARGET_ADDRESS_COST
f40751dd 181#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 182
c35d187f
RH
183#undef TARGET_BUILD_BUILTIN_VA_LIST
184#define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
185
d7bd8aeb
JJ
186#undef TARGET_EXPAND_BUILTIN_VA_START
187#define TARGET_EXPAND_BUILTIN_VA_START xtensa_va_start
188
4c45af42 189#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 190#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
4c45af42 191#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 192#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
4c45af42 193#undef TARGET_PROMOTE_PROTOTYPES
586de218 194#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
4c45af42 195
4c45af42
KH
196#undef TARGET_RETURN_IN_MEMORY
197#define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
e2b2d01e
AS
198#undef TARGET_FUNCTION_VALUE
199#define TARGET_FUNCTION_VALUE xtensa_function_value
42ba5130 200#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 201#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
202#undef TARGET_MUST_PASS_IN_STACK
203#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
4c45af42
KH
204
205#undef TARGET_EXPAND_BUILTIN_SAVEREGS
206#define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
85d53c1d
RH
207#undef TARGET_GIMPLIFY_VA_ARG_EXPR
208#define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
4c45af42 209
6e5ff6e7
BW
210#undef TARGET_RETURN_IN_MSB
211#define TARGET_RETURN_IN_MSB xtensa_return_in_msb
212
09fa8841
BW
213#undef TARGET_INIT_BUILTINS
214#define TARGET_INIT_BUILTINS xtensa_init_builtins
215#undef TARGET_FOLD_BUILTIN
216#define TARGET_FOLD_BUILTIN xtensa_fold_builtin
217#undef TARGET_EXPAND_BUILTIN
218#define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
219
37fbe8a3
BW
220#undef TARGET_SECONDARY_RELOAD
221#define TARGET_SECONDARY_RELOAD xtensa_secondary_reload
222
6a7a462c
BW
223#undef TARGET_HAVE_TLS
224#define TARGET_HAVE_TLS (TARGET_THREADPTR && HAVE_AS_TLS)
225
226#undef TARGET_CANNOT_FORCE_CONST_MEM
227#define TARGET_CANNOT_FORCE_CONST_MEM xtensa_tls_referenced_p
228
b64a1b53 229struct gcc_target targetm = TARGET_INITIALIZER;
03984308 230
887af464
BW
231\f
232/* Functions to test Xtensa immediate operand validity. */
03984308 233
8eb1bc5c
BW
234bool
235xtensa_simm8 (HOST_WIDE_INT v)
236{
237 return v >= -128 && v <= 127;
238}
239
240
241bool
242xtensa_simm8x256 (HOST_WIDE_INT v)
243{
244 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
245}
246
247
248bool
249xtensa_simm12b (HOST_WIDE_INT v)
250{
251 return v >= -2048 && v <= 2047;
252}
253
254
255static bool
256xtensa_uimm8 (HOST_WIDE_INT v)
257{
258 return v >= 0 && v <= 255;
259}
260
261
262static bool
263xtensa_uimm8x2 (HOST_WIDE_INT v)
264{
265 return (v & 1) == 0 && (v >= 0 && v <= 510);
266}
267
268
269static bool
270xtensa_uimm8x4 (HOST_WIDE_INT v)
271{
272 return (v & 3) == 0 && (v >= 0 && v <= 1020);
273}
274
275
276static bool
277xtensa_b4const (HOST_WIDE_INT v)
03984308
BW
278{
279 switch (v)
280 {
8eb1bc5c
BW
281 case -1:
282 case 1:
03984308
BW
283 case 2:
284 case 3:
285 case 4:
286 case 5:
287 case 6:
288 case 7:
289 case 8:
290 case 10:
291 case 12:
292 case 16:
293 case 32:
294 case 64:
295 case 128:
296 case 256:
8eb1bc5c 297 return true;
03984308 298 }
8eb1bc5c 299 return false;
03984308
BW
300}
301
03984308 302
8eb1bc5c
BW
303bool
304xtensa_b4const_or_zero (HOST_WIDE_INT v)
03984308 305{
8eb1bc5c
BW
306 if (v == 0)
307 return true;
308 return xtensa_b4const (v);
03984308
BW
309}
310
03984308 311
8eb1bc5c
BW
312bool
313xtensa_b4constu (HOST_WIDE_INT v)
03984308
BW
314{
315 switch (v)
316 {
8eb1bc5c
BW
317 case 32768:
318 case 65536:
03984308
BW
319 case 2:
320 case 3:
321 case 4:
322 case 5:
323 case 6:
324 case 7:
325 case 8:
326 case 10:
327 case 12:
328 case 16:
329 case 32:
330 case 64:
331 case 128:
332 case 256:
8eb1bc5c 333 return true;
03984308 334 }
8eb1bc5c 335 return false;
03984308
BW
336}
337
03984308 338
8eb1bc5c
BW
339bool
340xtensa_mask_immediate (HOST_WIDE_INT v)
03984308 341{
8eb1bc5c
BW
342#define MAX_MASK_SIZE 16
343 int mask_size;
03984308 344
8eb1bc5c
BW
345 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
346 {
347 if ((v & 1) == 0)
348 return false;
349 v = v >> 1;
350 if (v == 0)
351 return true;
352 }
03984308 353
8eb1bc5c 354 return false;
03984308
BW
355}
356
03984308 357
03984308 358/* This is just like the standard true_regnum() function except that it
638db43e 359 works even when reg_renumber is not initialized. */
03984308
BW
360
361int
ffbc8796 362xt_true_regnum (rtx x)
03984308
BW
363{
364 if (GET_CODE (x) == REG)
365 {
366 if (reg_renumber
367 && REGNO (x) >= FIRST_PSEUDO_REGISTER
368 && reg_renumber[REGNO (x)] >= 0)
369 return reg_renumber[REGNO (x)];
370 return REGNO (x);
371 }
372 if (GET_CODE (x) == SUBREG)
373 {
374 int base = xt_true_regnum (SUBREG_REG (x));
375 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
376 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
377 GET_MODE (SUBREG_REG (x)),
378 SUBREG_BYTE (x), GET_MODE (x));
379 }
380 return -1;
381}
382
383
03984308 384int
ffbc8796 385xtensa_valid_move (enum machine_mode mode, rtx *operands)
03984308 386{
a8cacfd2
BW
387 /* Either the destination or source must be a register, and the
388 MAC16 accumulator doesn't count. */
389
390 if (register_operand (operands[0], mode))
391 {
392 int dst_regnum = xt_true_regnum (operands[0]);
393
638db43e 394 /* The stack pointer can only be assigned with a MOVSP opcode. */
a8cacfd2
BW
395 if (dst_regnum == STACK_POINTER_REGNUM)
396 return (mode == SImode
397 && register_operand (operands[1], mode)
398 && !ACC_REG_P (xt_true_regnum (operands[1])));
399
400 if (!ACC_REG_P (dst_regnum))
401 return true;
402 }
3437320b 403 if (register_operand (operands[1], mode))
a8cacfd2
BW
404 {
405 int src_regnum = xt_true_regnum (operands[1]);
406 if (!ACC_REG_P (src_regnum))
407 return true;
408 }
03984308
BW
409 return FALSE;
410}
411
412
03984308 413int
ffbc8796 414smalloffset_mem_p (rtx op)
03984308
BW
415{
416 if (GET_CODE (op) == MEM)
417 {
418 rtx addr = XEXP (op, 0);
419 if (GET_CODE (addr) == REG)
da1f39e4 420 return BASE_REG_P (addr, 0);
03984308
BW
421 if (GET_CODE (addr) == PLUS)
422 {
423 rtx offset = XEXP (addr, 0);
8eb1bc5c 424 HOST_WIDE_INT val;
03984308
BW
425 if (GET_CODE (offset) != CONST_INT)
426 offset = XEXP (addr, 1);
427 if (GET_CODE (offset) != CONST_INT)
428 return FALSE;
8eb1bc5c
BW
429
430 val = INTVAL (offset);
431 return (val & 3) == 0 && (val >= 0 && val <= 60);
03984308
BW
432 }
433 }
434 return FALSE;
435}
436
437
03984308 438int
ffbc8796 439constantpool_address_p (rtx addr)
03984308
BW
440{
441 rtx sym = addr;
442
443 if (GET_CODE (addr) == CONST)
444 {
445 rtx offset;
446
3bbc2af6 447 /* Only handle (PLUS (SYM, OFFSET)) form. */
03984308
BW
448 addr = XEXP (addr, 0);
449 if (GET_CODE (addr) != PLUS)
450 return FALSE;
451
3bbc2af6 452 /* Make sure the address is word aligned. */
03984308
BW
453 offset = XEXP (addr, 1);
454 if ((GET_CODE (offset) != CONST_INT)
455 || ((INTVAL (offset) & 3) != 0))
456 return FALSE;
457
458 sym = XEXP (addr, 0);
459 }
460
461 if ((GET_CODE (sym) == SYMBOL_REF)
462 && CONSTANT_POOL_ADDRESS_P (sym))
463 return TRUE;
464 return FALSE;
465}
466
467
468int
ffbc8796 469constantpool_mem_p (rtx op)
03984308 470{
63694bdd
BW
471 if (GET_CODE (op) == SUBREG)
472 op = SUBREG_REG (op);
03984308
BW
473 if (GET_CODE (op) == MEM)
474 return constantpool_address_p (XEXP (op, 0));
475 return FALSE;
476}
477
478
6a7a462c
BW
479/* Return TRUE if X is a thread-local symbol. */
480
481static bool
482xtensa_tls_symbol_p (rtx x)
483{
484 if (! TARGET_HAVE_TLS)
485 return false;
486
487 return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
488}
489
490
03984308 491void
ffbc8796 492xtensa_extend_reg (rtx dst, rtx src)
03984308
BW
493{
494 rtx temp = gen_reg_rtx (SImode);
495 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
496
3bbc2af6 497 /* Generate paradoxical subregs as needed so that the modes match. */
03984308
BW
498 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
499 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
500
501 emit_insn (gen_ashlsi3 (temp, src, shift));
502 emit_insn (gen_ashrsi3 (dst, temp, shift));
503}
504
505
8eb1bc5c 506bool
ffbc8796 507xtensa_mem_offset (unsigned v, enum machine_mode mode)
03984308
BW
508{
509 switch (mode)
510 {
511 case BLKmode:
512 /* Handle the worst case for block moves. See xtensa_expand_block_move
513 where we emit an optimized block move operation if the block can be
514 moved in < "move_ratio" pieces. The worst case is when the block is
515 aligned but has a size of (3 mod 4) (does this happen?) so that the
638db43e 516 last piece requires a byte load/store. */
f42f5a1b
BW
517 return (xtensa_uimm8 (v)
518 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
03984308
BW
519
520 case QImode:
521 return xtensa_uimm8 (v);
522
523 case HImode:
524 return xtensa_uimm8x2 (v);
525
526 case DFmode:
527 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
528
529 default:
530 break;
531 }
532
533 return xtensa_uimm8x4 (v);
534}
535
536
ffbc8796 537/* Make normal rtx_code into something we can index from an array. */
03984308
BW
538
539static enum internal_test
ffbc8796 540map_test_to_internal_test (enum rtx_code test_code)
03984308
BW
541{
542 enum internal_test test = ITEST_MAX;
543
544 switch (test_code)
545 {
546 default: break;
547 case EQ: test = ITEST_EQ; break;
548 case NE: test = ITEST_NE; break;
549 case GT: test = ITEST_GT; break;
550 case GE: test = ITEST_GE; break;
551 case LT: test = ITEST_LT; break;
552 case LE: test = ITEST_LE; break;
553 case GTU: test = ITEST_GTU; break;
554 case GEU: test = ITEST_GEU; break;
555 case LTU: test = ITEST_LTU; break;
556 case LEU: test = ITEST_LEU; break;
557 }
558
559 return test;
560}
561
562
563/* Generate the code to compare two integer values. The return value is
638db43e 564 the comparison expression. */
03984308
BW
565
566static rtx
ffbc8796
BW
567gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
568 rtx cmp0, /* first operand to compare */
569 rtx cmp1, /* second operand to compare */
570 int *p_invert /* whether branch needs to reverse test */)
03984308 571{
ffbc8796
BW
572 struct cmp_info
573 {
03984308 574 enum rtx_code test_code; /* test code to use in insn */
8eb1bc5c 575 bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
03984308
BW
576 int const_add; /* constant to add (convert LE -> LT) */
577 int reverse_regs; /* reverse registers in test */
578 int invert_const; /* != 0 if invert value if cmp1 is constant */
579 int invert_reg; /* != 0 if invert value if cmp1 is register */
580 int unsignedp; /* != 0 for unsigned comparisons. */
581 };
582
583 static struct cmp_info info[ (int)ITEST_MAX ] = {
584
8eb1bc5c
BW
585 { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
586 { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
03984308 587
8eb1bc5c
BW
588 { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
589 { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
590 { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
591 { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
03984308
BW
592
593 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
594 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
595 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
596 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
597 };
598
599 enum internal_test test;
600 enum machine_mode mode;
601 struct cmp_info *p_info;
602
603 test = map_test_to_internal_test (test_code);
177b6be0 604 gcc_assert (test != ITEST_MAX);
03984308
BW
605
606 p_info = &info[ (int)test ];
607
608 mode = GET_MODE (cmp0);
609 if (mode == VOIDmode)
610 mode = GET_MODE (cmp1);
611
612 /* Make sure we can handle any constants given to us. */
613 if (GET_CODE (cmp1) == CONST_INT)
614 {
615 HOST_WIDE_INT value = INTVAL (cmp1);
616 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
617
618 /* if the immediate overflows or does not fit in the immediate field,
619 spill it to a register */
620
621 if ((p_info->unsignedp ?
622 (uvalue + p_info->const_add > uvalue) :
623 (value + p_info->const_add > value)) != (p_info->const_add > 0))
624 {
625 cmp1 = force_reg (mode, cmp1);
626 }
627 else if (!(p_info->const_range_p) (value + p_info->const_add))
628 {
629 cmp1 = force_reg (mode, cmp1);
630 }
631 }
632 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
633 {
634 cmp1 = force_reg (mode, cmp1);
635 }
636
637 /* See if we need to invert the result. */
638 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
639 ? p_info->invert_const
640 : p_info->invert_reg);
641
642 /* Comparison to constants, may involve adding 1 to change a LT into LE.
643 Comparison between two registers, may involve switching operands. */
644 if (GET_CODE (cmp1) == CONST_INT)
645 {
646 if (p_info->const_add != 0)
647 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
648
649 }
650 else if (p_info->reverse_regs)
651 {
652 rtx temp = cmp0;
653 cmp0 = cmp1;
654 cmp1 = temp;
655 }
656
1c563bed 657 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
03984308
BW
658}
659
660
661/* Generate the code to compare two float values. The return value is
638db43e 662 the comparison expression. */
03984308
BW
663
664static rtx
ffbc8796
BW
665gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
666 rtx cmp0, /* first operand to compare */
667 rtx cmp1 /* second operand to compare */)
03984308 668{
ffbc8796 669 rtx (*gen_fn) (rtx, rtx, rtx);
03984308
BW
670 rtx brtmp;
671 int reverse_regs, invert;
672
673 switch (test_code)
674 {
675 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
676 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
677 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
678 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
679 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
680 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
ff779f98
BW
681 case UNEQ: reverse_regs = 0; invert = 0; gen_fn = gen_suneq_sf; break;
682 case LTGT: reverse_regs = 0; invert = 1; gen_fn = gen_suneq_sf; break;
683 case UNLE: reverse_regs = 0; invert = 0; gen_fn = gen_sunle_sf; break;
684 case UNGT: reverse_regs = 1; invert = 0; gen_fn = gen_sunlt_sf; break;
685 case UNLT: reverse_regs = 0; invert = 0; gen_fn = gen_sunlt_sf; break;
686 case UNGE: reverse_regs = 1; invert = 0; gen_fn = gen_sunle_sf; break;
687 case UNORDERED:
688 reverse_regs = 0; invert = 0; gen_fn = gen_sunordered_sf; break;
689 case ORDERED:
690 reverse_regs = 0; invert = 1; gen_fn = gen_sunordered_sf; break;
633e4eb4 691 default:
1c563bed 692 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
03984308
BW
693 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
694 }
695
696 if (reverse_regs)
697 {
698 rtx temp = cmp0;
699 cmp0 = cmp1;
700 cmp1 = temp;
701 }
702
703 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
704 emit_insn (gen_fn (brtmp, cmp0, cmp1));
705
1c563bed 706 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
03984308
BW
707}
708
709
710void
ffbc8796 711xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
03984308
BW
712{
713 enum cmp_type type = branch_type;
714 rtx cmp0 = branch_cmp[0];
715 rtx cmp1 = branch_cmp[1];
716 rtx cmp;
717 int invert;
718 rtx label1, label2;
719
720 switch (type)
721 {
722 case CMP_DF:
723 default:
1c563bed 724 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
03984308
BW
725
726 case CMP_SI:
727 invert = FALSE;
728 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
729 break;
730
731 case CMP_SF:
732 if (!TARGET_HARD_FLOAT)
da1f39e4
BW
733 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
734 cmp0, cmp1));
03984308
BW
735 invert = FALSE;
736 cmp = gen_float_relational (test_code, cmp0, cmp1);
737 break;
738 }
739
740 /* Generate the branch. */
741
742 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
743 label2 = pc_rtx;
744
745 if (invert)
746 {
747 label2 = label1;
748 label1 = pc_rtx;
749 }
750
751 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
752 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
753 label1,
754 label2)));
755}
756
757
758static rtx
ffbc8796 759gen_conditional_move (rtx cmp)
03984308
BW
760{
761 enum rtx_code code = GET_CODE (cmp);
762 rtx op0 = branch_cmp[0];
763 rtx op1 = branch_cmp[1];
764
765 if (branch_type == CMP_SI)
766 {
767 /* Jump optimization calls get_condition() which canonicalizes
768 comparisons like (GE x <const>) to (GT x <const-1>).
769 Transform those comparisons back to GE, since that is the
770 comparison supported in Xtensa. We shouldn't have to
771 transform <LE x const> comparisons, because neither
772 xtensa_expand_conditional_branch() nor get_condition() will
638db43e 773 produce them. */
03984308
BW
774
775 if ((code == GT) && (op1 == constm1_rtx))
776 {
777 code = GE;
778 op1 = const0_rtx;
779 }
1c563bed 780 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
03984308
BW
781
782 if (boolean_operator (cmp, VOIDmode))
783 {
3bbc2af6 784 /* Swap the operands to make const0 second. */
03984308
BW
785 if (op0 == const0_rtx)
786 {
787 op0 = op1;
788 op1 = const0_rtx;
789 }
790
3bbc2af6 791 /* If not comparing against zero, emit a comparison (subtract). */
03984308
BW
792 if (op1 != const0_rtx)
793 {
794 op0 = expand_binop (SImode, sub_optab, op0, op1,
795 0, 0, OPTAB_LIB_WIDEN);
796 op1 = const0_rtx;
797 }
798 }
799 else if (branch_operator (cmp, VOIDmode))
800 {
3bbc2af6 801 /* Swap the operands to make const0 second. */
03984308
BW
802 if (op0 == const0_rtx)
803 {
804 op0 = op1;
805 op1 = const0_rtx;
806
807 switch (code)
808 {
809 case LT: code = GE; break;
810 case GE: code = LT; break;
177b6be0 811 default: gcc_unreachable ();
03984308
BW
812 }
813 }
814
815 if (op1 != const0_rtx)
816 return 0;
817 }
818 else
819 return 0;
820
1c563bed 821 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
03984308
BW
822 }
823
824 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
825 return gen_float_relational (code, op0, op1);
826
827 return 0;
828}
829
830
831int
ffbc8796 832xtensa_expand_conditional_move (rtx *operands, int isflt)
03984308
BW
833{
834 rtx cmp;
ffbc8796 835 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
03984308
BW
836
837 if (!(cmp = gen_conditional_move (operands[1])))
838 return 0;
839
840 if (isflt)
841 gen_fn = (branch_type == CMP_SI
842 ? gen_movsfcc_internal0
843 : gen_movsfcc_internal1);
844 else
845 gen_fn = (branch_type == CMP_SI
846 ? gen_movsicc_internal0
847 : gen_movsicc_internal1);
848
849 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
850 operands[2], operands[3], cmp));
851 return 1;
852}
853
854
855int
ffbc8796 856xtensa_expand_scc (rtx *operands)
03984308
BW
857{
858 rtx dest = operands[0];
859 rtx cmp = operands[1];
860 rtx one_tmp, zero_tmp;
ffbc8796 861 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
03984308
BW
862
863 if (!(cmp = gen_conditional_move (cmp)))
864 return 0;
865
866 one_tmp = gen_reg_rtx (SImode);
867 zero_tmp = gen_reg_rtx (SImode);
868 emit_insn (gen_movsi (one_tmp, const_true_rtx));
869 emit_insn (gen_movsi (zero_tmp, const0_rtx));
870
871 gen_fn = (branch_type == CMP_SI
872 ? gen_movsicc_internal0
873 : gen_movsicc_internal1);
874 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
875 return 1;
876}
877
878
633e4eb4
BW
879/* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
880 for the output, i.e., the input operands are twice as big as MODE. */
881
882void
ffbc8796 883xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
633e4eb4
BW
884{
885 switch (GET_CODE (operands[1]))
886 {
887 case REG:
888 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
889 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
890 break;
891
892 case MEM:
893 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
894 operands[2] = adjust_address (operands[1], mode, 0);
895 break;
896
897 case CONST_INT:
898 case CONST_DOUBLE:
899 split_double (operands[1], &operands[2], &operands[3]);
900 break;
901
902 default:
177b6be0 903 gcc_unreachable ();
633e4eb4
BW
904 }
905
906 switch (GET_CODE (operands[0]))
907 {
908 case REG:
909 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
910 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
911 break;
912
913 case MEM:
914 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
915 operands[0] = adjust_address (operands[0], mode, 0);
916 break;
917
918 default:
177b6be0 919 gcc_unreachable ();
633e4eb4
BW
920 }
921}
922
923
03984308 924/* Emit insns to move operands[1] into operands[0].
03984308
BW
925 Return 1 if we have written out everything that needs to be done to
926 do the move. Otherwise, return 0 and the caller will emit the move
927 normally. */
928
929int
ffbc8796 930xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
03984308 931{
6a7a462c
BW
932 rtx src = operands[1];
933
934 if (CONSTANT_P (src)
935 && (GET_CODE (src) != CONST_INT || ! xtensa_simm12b (INTVAL (src))))
03984308 936 {
6a7a462c
BW
937 rtx dst = operands[0];
938
939 if (xtensa_tls_referenced_p (src))
940 {
941 rtx addend = NULL;
942
943 if (GET_CODE (src) == CONST && GET_CODE (XEXP (src, 0)) == PLUS)
944 {
945 addend = XEXP (XEXP (src, 0), 1);
946 src = XEXP (XEXP (src, 0), 0);
947 }
948
949 src = xtensa_legitimize_tls_address (src);
950 if (addend)
951 {
952 src = gen_rtx_PLUS (mode, src, addend);
953 src = force_operand (src, dst);
954 }
955 emit_move_insn (dst, src);
956 return 1;
957 }
958
959 if (! TARGET_CONST16)
960 {
961 src = force_const_mem (SImode, src);
962 operands[1] = src;
963 }
f42f5a1b
BW
964
965 /* PC-relative loads are always SImode, and CONST16 is only
966 supported in the movsi pattern, so add a SUBREG for any other
967 (smaller) mode. */
968
969 if (mode != SImode)
970 {
6a7a462c 971 if (register_operand (dst, mode))
f42f5a1b 972 {
6a7a462c 973 emit_move_insn (simplify_gen_subreg (SImode, dst, mode, 0), src);
f42f5a1b
BW
974 return 1;
975 }
976 else
977 {
6a7a462c
BW
978 src = force_reg (SImode, src);
979 src = gen_lowpart_SUBREG (mode, src);
980 operands[1] = src;
f42f5a1b
BW
981 }
982 }
03984308
BW
983 }
984
997b8b4d
BW
985 if (!(reload_in_progress | reload_completed)
986 && !xtensa_valid_move (mode, operands))
987 operands[1] = force_reg (mode, operands[1]);
03984308 988
997b8b4d 989 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
03984308
BW
990
991 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
638db43e
BW
992 instruction won't be recognized after reload, so we remove the
993 subreg and adjust mem accordingly. */
03984308
BW
994 if (reload_in_progress)
995 {
996 operands[0] = fixup_subreg_mem (operands[0]);
997 operands[1] = fixup_subreg_mem (operands[1]);
998 }
999 return 0;
1000}
1001
f42f5a1b 1002
03984308 1003static rtx
ffbc8796 1004fixup_subreg_mem (rtx x)
03984308
BW
1005{
1006 if (GET_CODE (x) == SUBREG
1007 && GET_CODE (SUBREG_REG (x)) == REG
1008 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1009 {
1010 rtx temp =
1011 gen_rtx_SUBREG (GET_MODE (x),
1012 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1013 SUBREG_BYTE (x));
1014 x = alter_subreg (&temp);
1015 }
1016 return x;
1017}
1018
1019
997b8b4d
BW
1020/* Check if an incoming argument in a7 is expected to be used soon and
1021 if OPND is a register or register pair that includes a7. If so,
1022 create a new pseudo and copy a7 into that pseudo at the very
1023 beginning of the function, followed by the special "set_frame_ptr"
1024 unspec_volatile insn. The return value is either the original
1025 operand, if it is not a7, or the new pseudo containing a copy of
1026 the incoming argument. This is necessary because the register
1027 allocator will ignore conflicts with a7 and may either assign some
1028 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1029 the incoming argument in a7. By copying the argument out of a7 as
1030 the very first thing, and then immediately following that with an
1031 unspec_volatile to keep the scheduler away, we should avoid any
1032 problems. Putting the set_frame_ptr insn at the beginning, with
1033 only the a7 copy before it, also makes it easier for the prologue
1034 expander to initialize the frame pointer after the a7 copy and to
1035 fix up the a7 copy to use the stack pointer instead of the frame
1036 pointer. */
58db834b 1037
997b8b4d
BW
1038rtx
1039xtensa_copy_incoming_a7 (rtx opnd)
58db834b 1040{
997b8b4d
BW
1041 rtx entry_insns = 0;
1042 rtx reg, tmp;
1043 enum machine_mode mode;
1044
1045 if (!cfun->machine->need_a7_copy)
1046 return opnd;
1047
1048 /* This function should never be called again once a7 has been copied. */
177b6be0 1049 gcc_assert (!cfun->machine->set_frame_ptr_insn);
997b8b4d
BW
1050
1051 mode = GET_MODE (opnd);
1052
1053 /* The operand using a7 may come in a later instruction, so just return
1054 the original operand if it doesn't use a7. */
1055 reg = opnd;
1056 if (GET_CODE (reg) == SUBREG)
58db834b 1057 {
177b6be0 1058 gcc_assert (SUBREG_BYTE (reg) == 0);
997b8b4d
BW
1059 reg = SUBREG_REG (reg);
1060 }
1061 if (GET_CODE (reg) != REG
1062 || REGNO (reg) > A7_REG
1063 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1064 return opnd;
e6aecf8e 1065
997b8b4d 1066 /* 1-word args will always be in a7; 2-word args in a6/a7. */
177b6be0 1067 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
58db834b 1068
997b8b4d 1069 cfun->machine->need_a7_copy = false;
58db834b 1070
997b8b4d
BW
1071 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1072 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
58db834b 1073
0d8442b8 1074 start_sequence ();
997b8b4d 1075 tmp = gen_reg_rtx (mode);
58db834b 1076
997b8b4d
BW
1077 switch (mode)
1078 {
1079 case DFmode:
1080 case DImode:
b412869c
BW
1081 /* Copy the value out of A7 here but keep the first word in A6 until
1082 after the set_frame_ptr insn. Otherwise, the register allocator
1083 may decide to put "subreg (tmp, 0)" in A7 and clobber the incoming
1084 value. */
997b8b4d
BW
1085 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1086 gen_raw_REG (SImode, A7_REG)));
1087 break;
1088 case SFmode:
1089 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1090 break;
1091 case SImode:
1092 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1093 break;
1094 case HImode:
1095 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1096 break;
1097 case QImode:
1098 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1099 break;
1100 default:
177b6be0 1101 gcc_unreachable ();
58db834b
BW
1102 }
1103
997b8b4d 1104 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
b412869c
BW
1105
1106 /* For DF and DI mode arguments, copy the incoming value in A6 now. */
1107 if (mode == DFmode || mode == DImode)
1108 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1109 gen_rtx_REG (SImode, A7_REG - 1)));
997b8b4d
BW
1110 entry_insns = get_insns ();
1111 end_sequence ();
1112
1113 if (cfun->machine->vararg_a7)
1114 {
0d8442b8
BW
1115 /* This is called from within builtin_saveregs, which will insert the
1116 saveregs code at the function entry, ahead of anything placed at
1117 the function entry now. Instead, save the sequence to be inserted
1118 at the beginning of the saveregs code. */
1119 cfun->machine->vararg_a7_copy = entry_insns;
997b8b4d
BW
1120 }
1121 else
1122 {
1123 /* Put entry_insns after the NOTE that starts the function. If
1124 this is inside a start_sequence, make the outer-level insn
1125 chain current, so the code is placed at the start of the
1126 function. */
1127 push_topmost_sequence ();
0d8442b8
BW
1128 /* Do not use entry_of_function() here. This is called from within
1129 expand_function_start, when the CFG still holds GIMPLE. */
997b8b4d
BW
1130 emit_insn_after (entry_insns, get_insns ());
1131 pop_topmost_sequence ();
1132 }
1133
1134 return tmp;
58db834b
BW
1135}
1136
1137
a46bbb5a
BW
1138/* Try to expand a block move operation to a sequence of RTL move
1139 instructions. If not optimizing, or if the block size is not a
1140 constant, or if the block is too large, the expansion fails and GCC
1141 falls back to calling memcpy().
03984308
BW
1142
1143 operands[0] is the destination
1144 operands[1] is the source
1145 operands[2] is the length
1146 operands[3] is the alignment */
1147
1148int
ffbc8796 1149xtensa_expand_block_move (rtx *operands)
03984308 1150{
7eda7cda
RH
1151 static const enum machine_mode mode_from_align[] =
1152 {
1153 VOIDmode, QImode, HImode, VOIDmode, SImode,
1154 };
1155
1156 rtx dst_mem = operands[0];
1157 rtx src_mem = operands[1];
1158 HOST_WIDE_INT bytes, align;
03984308 1159 int num_pieces, move_ratio;
7eda7cda
RH
1160 rtx temp[2];
1161 enum machine_mode mode[2];
1162 int amount[2];
1163 bool active[2];
1164 int phase = 0;
1165 int next;
1166 int offset_ld = 0;
1167 int offset_st = 0;
1168 rtx x;
03984308 1169
3bbc2af6 1170 /* If this is not a fixed size move, just call memcpy. */
03984308
BW
1171 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1172 return 0;
1173
7eda7cda
RH
1174 bytes = INTVAL (operands[2]);
1175 align = INTVAL (operands[3]);
1176
3bbc2af6 1177 /* Anything to move? */
03984308 1178 if (bytes <= 0)
7eda7cda 1179 return 0;
03984308
BW
1180
1181 if (align > MOVE_MAX)
1182 align = MOVE_MAX;
1183
3bbc2af6 1184 /* Decide whether to expand inline based on the optimization level. */
03984308
BW
1185 move_ratio = 4;
1186 if (optimize > 2)
1187 move_ratio = LARGEST_MOVE_RATIO;
3bbc2af6 1188 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
7eda7cda 1189 if (num_pieces > move_ratio)
03984308
BW
1190 return 0;
1191
7eda7cda
RH
1192 x = XEXP (dst_mem, 0);
1193 if (!REG_P (x))
1194 {
1195 x = force_reg (Pmode, x);
1196 dst_mem = replace_equiv_address (dst_mem, x);
1197 }
03984308 1198
7eda7cda
RH
1199 x = XEXP (src_mem, 0);
1200 if (!REG_P (x))
1201 {
1202 x = force_reg (Pmode, x);
1203 src_mem = replace_equiv_address (src_mem, x);
1204 }
03984308 1205
7eda7cda 1206 active[0] = active[1] = false;
03984308 1207
7eda7cda 1208 do
03984308 1209 {
7eda7cda
RH
1210 next = phase;
1211 phase ^= 1;
03984308 1212
7eda7cda 1213 if (bytes > 0)
03984308 1214 {
7eda7cda 1215 int next_amount;
03984308 1216
7eda7cda
RH
1217 next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1218 next_amount = MIN (next_amount, align);
03984308 1219
7eda7cda
RH
1220 amount[next] = next_amount;
1221 mode[next] = mode_from_align[next_amount];
1222 temp[next] = gen_reg_rtx (mode[next]);
03984308 1223
7eda7cda
RH
1224 x = adjust_address (src_mem, mode[next], offset_ld);
1225 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
03984308 1226
7eda7cda
RH
1227 offset_ld += next_amount;
1228 bytes -= next_amount;
1229 active[next] = true;
1230 }
03984308 1231
7eda7cda
RH
1232 if (active[phase])
1233 {
1234 active[phase] = false;
1235
1236 x = adjust_address (dst_mem, mode[phase], offset_st);
1237 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
03984308 1238
7eda7cda
RH
1239 offset_st += amount[phase];
1240 }
03984308 1241 }
7eda7cda 1242 while (active[next]);
03984308 1243
7eda7cda 1244 return 1;
03984308
BW
1245}
1246
1247
1248void
ffbc8796 1249xtensa_expand_nonlocal_goto (rtx *operands)
03984308
BW
1250{
1251 rtx goto_handler = operands[1];
1252 rtx containing_fp = operands[3];
1253
3bbc2af6
KH
1254 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1255 is too big to generate in-line. */
03984308
BW
1256
1257 if (GET_CODE (containing_fp) != REG)
1258 containing_fp = force_reg (Pmode, containing_fp);
1259
03984308
BW
1260 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1261 0, VOIDmode, 2,
1262 containing_fp, Pmode,
1263 goto_handler, Pmode);
1264}
1265
1266
e2500fed 1267static struct machine_function *
ffbc8796 1268xtensa_init_machine_status (void)
03984308 1269{
5ead67f6 1270 return GGC_CNEW (struct machine_function);
03984308
BW
1271}
1272
1273
2a48b790
BW
1274/* Shift VAL of mode MODE left by COUNT bits. */
1275
1276static inline rtx
1277xtensa_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
1278{
1279 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1280 NULL_RTX, 1, OPTAB_DIRECT);
1281 return expand_simple_binop (SImode, ASHIFT, val, count,
1282 NULL_RTX, 1, OPTAB_DIRECT);
1283}
1284
1285
1286/* Structure to hold the initial parameters for a compare_and_swap operation
1287 in HImode and QImode. */
1288
1289struct alignment_context
1290{
1291 rtx memsi; /* SI aligned memory location. */
1292 rtx shift; /* Bit offset with regard to lsb. */
1293 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
1294 rtx modemaski; /* ~modemask */
1295};
1296
1297
1298/* Initialize structure AC for word access to HI and QI mode memory. */
1299
1300static void
1301init_alignment_context (struct alignment_context *ac, rtx mem)
1302{
1303 enum machine_mode mode = GET_MODE (mem);
1304 rtx byteoffset = NULL_RTX;
1305 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1306
1307 if (aligned)
1308 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1309 else
1310 {
1311 /* Alignment is unknown. */
1312 rtx addr, align;
1313
1314 /* Force the address into a register. */
1315 addr = force_reg (Pmode, XEXP (mem, 0));
1316
1317 /* Align it to SImode. */
1318 align = expand_simple_binop (Pmode, AND, addr,
1319 GEN_INT (-GET_MODE_SIZE (SImode)),
1320 NULL_RTX, 1, OPTAB_DIRECT);
1321 /* Generate MEM. */
1322 ac->memsi = gen_rtx_MEM (SImode, align);
1323 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
1324 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
1325 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
1326
1327 byteoffset = expand_simple_binop (Pmode, AND, addr,
1328 GEN_INT (GET_MODE_SIZE (SImode) - 1),
1329 NULL_RTX, 1, OPTAB_DIRECT);
1330 }
1331
1332 /* Calculate shiftcount. */
1333 if (TARGET_BIG_ENDIAN)
1334 {
1335 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
1336 if (!aligned)
1337 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
1338 NULL_RTX, 1, OPTAB_DIRECT);
1339 }
1340 else
1341 {
1342 if (aligned)
1343 ac->shift = NULL_RTX;
1344 else
1345 ac->shift = byteoffset;
1346 }
1347
1348 if (ac->shift != NULL_RTX)
1349 {
1350 /* Shift is the byte count, but we need the bitcount. */
1351 ac->shift = expand_simple_binop (SImode, MULT, ac->shift,
1352 GEN_INT (BITS_PER_UNIT),
1353 NULL_RTX, 1, OPTAB_DIRECT);
1354 ac->modemask = expand_simple_binop (SImode, ASHIFT,
1355 GEN_INT (GET_MODE_MASK (mode)),
1356 ac->shift,
1357 NULL_RTX, 1, OPTAB_DIRECT);
1358 }
1359 else
1360 ac->modemask = GEN_INT (GET_MODE_MASK (mode));
1361
1362 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
1363}
1364
1365
1366/* Expand an atomic compare and swap operation for HImode and QImode.
1367 MEM is the memory location, CMP the old value to compare MEM with
0a2aaacc 1368 and NEW_RTX the value to set if CMP == MEM. */
2a48b790
BW
1369
1370void
0a2aaacc 1371xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new_rtx)
2a48b790
BW
1372{
1373 enum machine_mode mode = GET_MODE (mem);
1374 struct alignment_context ac;
1375 rtx tmp, cmpv, newv, val;
1376 rtx oldval = gen_reg_rtx (SImode);
1377 rtx res = gen_reg_rtx (SImode);
1378 rtx csloop = gen_label_rtx ();
1379 rtx csend = gen_label_rtx ();
1380
1381 init_alignment_context (&ac, mem);
1382
1383 if (ac.shift != NULL_RTX)
1384 {
1385 cmp = xtensa_expand_mask_and_shift (cmp, mode, ac.shift);
0a2aaacc 1386 new_rtx = xtensa_expand_mask_and_shift (new_rtx, mode, ac.shift);
2a48b790
BW
1387 }
1388
1389 /* Load the surrounding word into VAL with the MEM value masked out. */
1390 val = force_reg (SImode, expand_simple_binop (SImode, AND, ac.memsi,
1391 ac.modemaski, NULL_RTX, 1,
1392 OPTAB_DIRECT));
1393 emit_label (csloop);
1394
0a2aaacc 1395 /* Patch CMP and NEW_RTX into VAL at correct position. */
2a48b790
BW
1396 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
1397 NULL_RTX, 1, OPTAB_DIRECT));
0a2aaacc 1398 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new_rtx, val,
2a48b790
BW
1399 NULL_RTX, 1, OPTAB_DIRECT));
1400
1401 /* Jump to end if we're done. */
1402 emit_insn (gen_sync_compare_and_swapsi (res, ac.memsi, cmpv, newv));
1403 emit_cmp_and_jump_insns (res, cmpv, EQ, const0_rtx, SImode, true, csend);
1404
1405 /* Check for changes outside mode. */
1406 emit_move_insn (oldval, val);
1407 tmp = expand_simple_binop (SImode, AND, res, ac.modemaski,
1408 val, 1, OPTAB_DIRECT);
1409 if (tmp != val)
1410 emit_move_insn (val, tmp);
1411
1412 /* Loop internal if so. */
1413 emit_cmp_and_jump_insns (oldval, val, NE, const0_rtx, SImode, true, csloop);
1414
1415 emit_label (csend);
1416
1417 /* Return the correct part of the bitfield. */
1418 convert_move (target,
1419 (ac.shift == NULL_RTX ? res
1420 : expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
1421 NULL_RTX, 1, OPTAB_DIRECT)),
1422 1);
1423}
1424
1425
1426/* Expand an atomic operation CODE of mode MODE (either HImode or QImode --
1427 the default expansion works fine for SImode). MEM is the memory location
1428 and VAL the value to play with. If AFTER is true then store the value
1429 MEM holds after the operation, if AFTER is false then store the value MEM
1430 holds before the operation. If TARGET is zero then discard that value, else
1431 store it to TARGET. */
1432
1433void
1434xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1435 bool after)
1436{
1437 enum machine_mode mode = GET_MODE (mem);
1438 struct alignment_context ac;
1439 rtx csloop = gen_label_rtx ();
1440 rtx cmp, tmp;
1441 rtx old = gen_reg_rtx (SImode);
0a2aaacc 1442 rtx new_rtx = gen_reg_rtx (SImode);
2a48b790
BW
1443 rtx orig = NULL_RTX;
1444
1445 init_alignment_context (&ac, mem);
1446
1447 /* Prepare values before the compare-and-swap loop. */
1448 if (ac.shift != NULL_RTX)
1449 val = xtensa_expand_mask_and_shift (val, mode, ac.shift);
1450 switch (code)
1451 {
1452 case PLUS:
1453 case MINUS:
1454 orig = gen_reg_rtx (SImode);
1455 convert_move (orig, val, 1);
1456 break;
1457
1458 case SET:
1459 case IOR:
1460 case XOR:
1461 break;
1462
1463 case MULT: /* NAND */
1464 case AND:
1465 /* val = "11..1<val>11..1" */
1466 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
1467 NULL_RTX, 1, OPTAB_DIRECT);
1468 break;
1469
1470 default:
1471 gcc_unreachable ();
1472 }
1473
1474 /* Load full word. Subsequent loads are performed by S32C1I. */
1475 cmp = force_reg (SImode, ac.memsi);
1476
1477 emit_label (csloop);
1478 emit_move_insn (old, cmp);
1479
1480 switch (code)
1481 {
1482 case PLUS:
1483 case MINUS:
1484 val = expand_simple_binop (SImode, code, old, orig,
1485 NULL_RTX, 1, OPTAB_DIRECT);
1486 val = expand_simple_binop (SImode, AND, val, ac.modemask,
1487 NULL_RTX, 1, OPTAB_DIRECT);
1488 /* FALLTHRU */
1489 case SET:
1490 tmp = expand_simple_binop (SImode, AND, old, ac.modemaski,
1491 NULL_RTX, 1, OPTAB_DIRECT);
1492 tmp = expand_simple_binop (SImode, IOR, tmp, val,
0a2aaacc 1493 new_rtx, 1, OPTAB_DIRECT);
2a48b790
BW
1494 break;
1495
1496 case AND:
1497 case IOR:
1498 case XOR:
1499 tmp = expand_simple_binop (SImode, code, old, val,
0a2aaacc 1500 new_rtx, 1, OPTAB_DIRECT);
2a48b790
BW
1501 break;
1502
1503 case MULT: /* NAND */
1504 tmp = expand_simple_binop (SImode, XOR, old, ac.modemask,
1505 NULL_RTX, 1, OPTAB_DIRECT);
1506 tmp = expand_simple_binop (SImode, AND, tmp, val,
0a2aaacc 1507 new_rtx, 1, OPTAB_DIRECT);
2a48b790
BW
1508 break;
1509
1510 default:
1511 gcc_unreachable ();
1512 }
1513
0a2aaacc
KG
1514 if (tmp != new_rtx)
1515 emit_move_insn (new_rtx, tmp);
1516 emit_insn (gen_sync_compare_and_swapsi (cmp, ac.memsi, old, new_rtx));
2a48b790
BW
1517 emit_cmp_and_jump_insns (cmp, old, NE, const0_rtx, SImode, true, csloop);
1518
1519 if (target)
1520 {
0a2aaacc 1521 tmp = (after ? new_rtx : cmp);
2a48b790
BW
1522 convert_move (target,
1523 (ac.shift == NULL_RTX ? tmp
1524 : expand_simple_binop (SImode, LSHIFTRT, tmp, ac.shift,
1525 NULL_RTX, 1, OPTAB_DIRECT)),
1526 1);
1527 }
1528}
1529
1530
03984308 1531void
ffbc8796 1532xtensa_setup_frame_addresses (void)
03984308 1533{
638db43e 1534 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
03984308
BW
1535 cfun->machine->accesses_prev_frame = 1;
1536
1537 emit_library_call
1538 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1539 0, VOIDmode, 0);
1540}
1541
1542
638db43e
BW
1543/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1544 a comment showing where the end of the loop is. However, if there is a
03984308 1545 label or a branch at the end of the loop then we need to place a nop
638db43e 1546 there. If the loop ends with a label we need the nop so that branches
839a4992
KH
1547 targeting that label will target the nop (and thus remain in the loop),
1548 instead of targeting the instruction after the loop (and thus exiting
638db43e 1549 the loop). If the loop ends with a branch, we need the nop in case the
839a4992 1550 branch is targeting a location inside the loop. When the branch
03984308
BW
1551 executes it will cause the loop count to be decremented even if it is
1552 taken (because it is the last instruction in the loop), so we need to
1553 nop after the branch to prevent the loop count from being decremented
638db43e 1554 when the branch is taken. */
03984308
BW
1555
1556void
ffbc8796 1557xtensa_emit_loop_end (rtx insn, rtx *operands)
03984308
BW
1558{
1559 char done = 0;
1560
1561 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1562 {
1563 switch (GET_CODE (insn))
1564 {
1565 case NOTE:
1566 case BARRIER:
1567 break;
1568
1569 case CODE_LABEL:
0bd0703d 1570 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
03984308
BW
1571 done = 1;
1572 break;
1573
1574 default:
1575 {
1576 rtx body = PATTERN (insn);
1577
1578 if (GET_CODE (body) == JUMP_INSN)
1579 {
0bd0703d 1580 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
03984308
BW
1581 done = 1;
1582 }
1583 else if ((GET_CODE (body) != USE)
1584 && (GET_CODE (body) != CLOBBER))
1585 done = 1;
1586 }
1587 break;
1588 }
1589 }
1590
1591 output_asm_insn ("# loop end for %0", operands);
1592}
1593
1594
036a2b7a
BW
1595char *
1596xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1597{
1598 static char result[64];
1599 enum rtx_code code;
1600 const char *op;
1601
1602 code = GET_CODE (operands[3]);
1603 switch (code)
1604 {
1605 case EQ: op = inverted ? "ne" : "eq"; break;
1606 case NE: op = inverted ? "eq" : "ne"; break;
1607 case LT: op = inverted ? "ge" : "lt"; break;
1608 case GE: op = inverted ? "lt" : "ge"; break;
1609 case LTU: op = inverted ? "geu" : "ltu"; break;
1610 case GEU: op = inverted ? "ltu" : "geu"; break;
1611 default: gcc_unreachable ();
1612 }
1613
1614 if (immed)
1615 {
1616 if (INTVAL (operands[1]) == 0)
1617 sprintf (result, "b%sz%s\t%%0, %%2", op,
1618 (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1619 else
1620 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1621 }
1622 else
1623 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1624
1625 return result;
1626}
1627
1628
1629char *
1630xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1631{
1632 static char result[64];
1633 const char *op;
1634
1635 switch (GET_CODE (operands[3]))
1636 {
1637 case EQ: op = inverted ? "bs" : "bc"; break;
1638 case NE: op = inverted ? "bc" : "bs"; break;
1639 default: gcc_unreachable ();
1640 }
1641
1642 if (immed)
1643 {
1644 unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1645 operands[1] = GEN_INT (bitnum);
1646 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1647 }
1648 else
1649 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1650
1651 return result;
1652}
1653
1654
1655char *
1656xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1657{
1658 static char result[64];
1659 enum rtx_code code;
1660 const char *op;
1661
1662 code = GET_CODE (operands[4]);
1663 if (isbool)
1664 {
1665 switch (code)
1666 {
1667 case EQ: op = inverted ? "t" : "f"; break;
1668 case NE: op = inverted ? "f" : "t"; break;
1669 default: gcc_unreachable ();
1670 }
1671 }
1672 else
1673 {
1674 switch (code)
1675 {
1676 case EQ: op = inverted ? "nez" : "eqz"; break;
1677 case NE: op = inverted ? "eqz" : "nez"; break;
1678 case LT: op = inverted ? "gez" : "ltz"; break;
1679 case GE: op = inverted ? "ltz" : "gez"; break;
1680 default: gcc_unreachable ();
1681 }
1682 }
1683
1684 sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1685 op, isfp ? ".s" : "", inverted ? 3 : 2);
1686 return result;
1687}
1688
1689
03984308 1690char *
ffbc8796 1691xtensa_emit_call (int callop, rtx *operands)
03984308 1692{
b64a1b53 1693 static char result[64];
03984308
BW
1694 rtx tgt = operands[callop];
1695
1696 if (GET_CODE (tgt) == CONST_INT)
1d0ea52e 1697 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
03984308
BW
1698 else if (register_operand (tgt, VOIDmode))
1699 sprintf (result, "callx8\t%%%d", callop);
1700 else
1701 sprintf (result, "call8\t%%%d", callop);
1702
1703 return result;
1704}
1705
1706
da1f39e4
BW
1707bool
1708xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
1709{
1710 /* Allow constant pool addresses. */
1711 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
6a7a462c
BW
1712 && ! TARGET_CONST16 && constantpool_address_p (addr)
1713 && ! xtensa_tls_referenced_p (addr))
da1f39e4
BW
1714 return true;
1715
1716 while (GET_CODE (addr) == SUBREG)
1717 addr = SUBREG_REG (addr);
1718
1719 /* Allow base registers. */
1720 if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1721 return true;
1722
1723 /* Check for "register + offset" addressing. */
1724 if (GET_CODE (addr) == PLUS)
1725 {
1726 rtx xplus0 = XEXP (addr, 0);
1727 rtx xplus1 = XEXP (addr, 1);
1728 enum rtx_code code0;
1729 enum rtx_code code1;
1730
1731 while (GET_CODE (xplus0) == SUBREG)
1732 xplus0 = SUBREG_REG (xplus0);
1733 code0 = GET_CODE (xplus0);
1734
1735 while (GET_CODE (xplus1) == SUBREG)
1736 xplus1 = SUBREG_REG (xplus1);
1737 code1 = GET_CODE (xplus1);
1738
1739 /* Swap operands if necessary so the register is first. */
1740 if (code0 != REG && code1 == REG)
1741 {
1742 xplus0 = XEXP (addr, 1);
1743 xplus1 = XEXP (addr, 0);
1744 code0 = GET_CODE (xplus0);
1745 code1 = GET_CODE (xplus1);
1746 }
1747
1748 if (code0 == REG && BASE_REG_P (xplus0, strict)
1749 && code1 == CONST_INT
1750 && xtensa_mem_offset (INTVAL (xplus1), mode))
1751 return true;
1752 }
1753
1754 return false;
1755}
1756
1757
6a7a462c
BW
1758/* Construct the SYMBOL_REF for the _TLS_MODULE_BASE_ symbol. */
1759
1760static GTY(()) rtx xtensa_tls_module_base_symbol;
1761
1762static rtx
1763xtensa_tls_module_base (void)
1764{
1765 if (! xtensa_tls_module_base_symbol)
1766 {
1767 xtensa_tls_module_base_symbol =
1768 gen_rtx_SYMBOL_REF (Pmode, "_TLS_MODULE_BASE_");
1769 SYMBOL_REF_FLAGS (xtensa_tls_module_base_symbol)
1770 |= TLS_MODEL_GLOBAL_DYNAMIC << SYMBOL_FLAG_TLS_SHIFT;
1771 }
1772
1773 return xtensa_tls_module_base_symbol;
1774}
1775
1776
1777static rtx
1778xtensa_call_tls_desc (rtx sym, rtx *retp)
1779{
1780 rtx fn, arg, a10, call_insn, insns;
1781
1782 start_sequence ();
1783 fn = gen_reg_rtx (Pmode);
1784 arg = gen_reg_rtx (Pmode);
1785 a10 = gen_rtx_REG (Pmode, 10);
1786
1787 emit_insn (gen_tls_func (fn, sym));
1788 emit_insn (gen_tls_arg (arg, sym));
1789 emit_move_insn (a10, arg);
1790 call_insn = emit_call_insn (gen_tls_call (a10, fn, sym, const1_rtx));
1791 CALL_INSN_FUNCTION_USAGE (call_insn)
1792 = gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_USE (VOIDmode, a10),
1793 CALL_INSN_FUNCTION_USAGE (call_insn));
1794 insns = get_insns ();
1795 end_sequence ();
1796
1797 *retp = a10;
1798 return insns;
1799}
1800
1801
1802static rtx
1803xtensa_legitimize_tls_address (rtx x)
1804{
1805 unsigned int model = SYMBOL_REF_TLS_MODEL (x);
1806 rtx dest, tp, ret, modbase, base, addend, insns;
1807
1808 dest = gen_reg_rtx (Pmode);
1809 switch (model)
1810 {
1811 case TLS_MODEL_GLOBAL_DYNAMIC:
1812 insns = xtensa_call_tls_desc (x, &ret);
1813 emit_libcall_block (insns, dest, ret, x);
1814 break;
1815
1816 case TLS_MODEL_LOCAL_DYNAMIC:
1817 base = gen_reg_rtx (Pmode);
1818 modbase = xtensa_tls_module_base ();
1819 insns = xtensa_call_tls_desc (modbase, &ret);
1820 emit_libcall_block (insns, base, ret, modbase);
1821 addend = force_reg (SImode, gen_sym_DTPOFF (x));
1822 emit_insn (gen_addsi3 (dest, base, addend));
1823 break;
1824
1825 case TLS_MODEL_INITIAL_EXEC:
1826 case TLS_MODEL_LOCAL_EXEC:
1827 tp = gen_reg_rtx (SImode);
1828 emit_insn (gen_load_tp (tp));
1829 addend = force_reg (SImode, gen_sym_TPOFF (x));
1830 emit_insn (gen_addsi3 (dest, tp, addend));
1831 break;
1832
1833 default:
1834 gcc_unreachable ();
1835 }
1836
1837 return dest;
1838}
1839
1840
da1f39e4
BW
1841rtx
1842xtensa_legitimize_address (rtx x,
1843 rtx oldx ATTRIBUTE_UNUSED,
1844 enum machine_mode mode)
1845{
6a7a462c
BW
1846 if (xtensa_tls_symbol_p (x))
1847 return xtensa_legitimize_tls_address (x);
1848
da1f39e4
BW
1849 if (GET_CODE (x) == PLUS)
1850 {
1851 rtx plus0 = XEXP (x, 0);
1852 rtx plus1 = XEXP (x, 1);
1853
1854 if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
1855 {
1856 plus0 = XEXP (x, 1);
1857 plus1 = XEXP (x, 0);
1858 }
1859
1860 /* Try to split up the offset to use an ADDMI instruction. */
1861 if (GET_CODE (plus0) == REG
1862 && GET_CODE (plus1) == CONST_INT
1863 && !xtensa_mem_offset (INTVAL (plus1), mode)
1864 && !xtensa_simm8 (INTVAL (plus1))
1865 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1866 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1867 {
1868 rtx temp = gen_reg_rtx (Pmode);
1869 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
1870 emit_insn (gen_rtx_SET (Pmode, temp,
1871 gen_rtx_PLUS (Pmode, plus0, addmi_offset)));
1872 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1873 }
1874 }
1875
1876 return NULL_RTX;
1877}
1878
1879
6a7a462c
BW
1880/* Helper for xtensa_tls_referenced_p. */
1881
1882static int
1883xtensa_tls_referenced_p_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1884{
1885 if (GET_CODE (*x) == SYMBOL_REF)
1886 return SYMBOL_REF_TLS_MODEL (*x) != 0;
1887
1888 /* Ignore TLS references that have already been legitimized. */
1889 if (GET_CODE (*x) == UNSPEC)
1890 {
1891 switch (XINT (*x, 1))
1892 {
1893 case UNSPEC_TPOFF:
1894 case UNSPEC_DTPOFF:
1895 case UNSPEC_TLS_FUNC:
1896 case UNSPEC_TLS_ARG:
1897 case UNSPEC_TLS_CALL:
1898 return -1;
1899 default:
1900 break;
1901 }
1902 }
1903
1904 return 0;
1905}
1906
1907
1908/* Return TRUE if X contains any TLS symbol references. */
1909
1910bool
1911xtensa_tls_referenced_p (rtx x)
1912{
1913 if (! TARGET_HAVE_TLS)
1914 return false;
1915
1916 return for_each_rtx (&x, xtensa_tls_referenced_p_1, NULL);
1917}
1918
1919
b0c6e48f 1920/* Return the debugger register number to use for 'regno'. */
03984308
BW
1921
1922int
ffbc8796 1923xtensa_dbx_register_number (int regno)
03984308
BW
1924{
1925 int first = -1;
633e4eb4
BW
1926
1927 if (GP_REG_P (regno))
1928 {
1929 regno -= GP_REG_FIRST;
1930 first = 0;
1931 }
1932 else if (BR_REG_P (regno))
1933 {
1934 regno -= BR_REG_FIRST;
1935 first = 16;
1936 }
1937 else if (FP_REG_P (regno))
1938 {
1939 regno -= FP_REG_FIRST;
b0c6e48f 1940 first = 48;
633e4eb4 1941 }
03984308
BW
1942 else if (ACC_REG_P (regno))
1943 {
b0c6e48f
BW
1944 first = 0x200; /* Start of Xtensa special registers. */
1945 regno = 16; /* ACCLO is special register 16. */
03984308
BW
1946 }
1947
1948 /* When optimizing, we sometimes get asked about pseudo-registers
638db43e 1949 that don't represent hard registers. Return 0 for these. */
03984308
BW
1950 if (first == -1)
1951 return 0;
1952
1953 return first + regno;
1954}
1955
1956
1957/* Argument support functions. */
1958
1959/* Initialize CUMULATIVE_ARGS for a function. */
1960
1961void
997b8b4d 1962init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
03984308
BW
1963{
1964 cum->arg_words = 0;
997b8b4d 1965 cum->incoming = incoming;
03984308
BW
1966}
1967
ffbc8796 1968
03984308
BW
1969/* Advance the argument to the next argument position. */
1970
1971void
ffbc8796 1972function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
03984308
BW
1973{
1974 int words, max;
1975 int *arg_words;
1976
1977 arg_words = &cum->arg_words;
1978 max = MAX_ARGS_IN_REGISTERS;
1979
1980 words = (((mode != BLKmode)
1981 ? (int) GET_MODE_SIZE (mode)
1982 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1983
85d91d5b
BW
1984 if (*arg_words < max
1985 && (targetm.calls.must_pass_in_stack (mode, type)
1986 || *arg_words + words > max))
03984308
BW
1987 *arg_words = max;
1988
1989 *arg_words += words;
1990}
1991
1992
1993/* Return an RTL expression containing the register for the given mode,
368ebcd6 1994 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
ffbc8796 1995 if this is an incoming argument to the current function. */
03984308
BW
1996
1997rtx
ffbc8796
BW
1998function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1999 int incoming_p)
03984308
BW
2000{
2001 int regbase, words, max;
2002 int *arg_words;
2003 int regno;
03984308
BW
2004
2005 arg_words = &cum->arg_words;
2006 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
2007 max = MAX_ARGS_IN_REGISTERS;
2008
2009 words = (((mode != BLKmode)
2010 ? (int) GET_MODE_SIZE (mode)
2011 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2012
2013 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
822e895c 2014 {
d2348985 2015 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
822e895c
BW
2016 *arg_words = (*arg_words + align - 1) & -align;
2017 }
03984308
BW
2018
2019 if (*arg_words + words > max)
2020 return (rtx)0;
2021
2022 regno = regbase + *arg_words;
03984308 2023
997b8b4d
BW
2024 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
2025 cfun->machine->need_a7_copy = true;
03984308 2026
997b8b4d 2027 return gen_rtx_REG (mode, regno);
03984308
BW
2028}
2029
2030
d2348985
BW
2031int
2032function_arg_boundary (enum machine_mode mode, tree type)
2033{
2034 unsigned int alignment;
2035
2036 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
2037 if (alignment < PARM_BOUNDARY)
2038 alignment = PARM_BOUNDARY;
2039 if (alignment > STACK_BOUNDARY)
2040 alignment = STACK_BOUNDARY;
2041 return alignment;
2042}
2043
2044
6e5ff6e7 2045static bool
586de218 2046xtensa_return_in_msb (const_tree valtype)
6e5ff6e7
BW
2047{
2048 return (TARGET_BIG_ENDIAN
2049 && AGGREGATE_TYPE_P (valtype)
2050 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
2051}
2052
2053
03984308 2054void
ffbc8796 2055override_options (void)
03984308
BW
2056{
2057 int regno;
2058 enum machine_mode mode;
2059
2060 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
2061 error ("boolean registers required for the floating-point option");
2062
638db43e 2063 /* Set up array giving whether a given register can hold a given mode. */
03984308
BW
2064 for (mode = VOIDmode;
2065 mode != MAX_MACHINE_MODE;
2066 mode = (enum machine_mode) ((int) mode + 1))
2067 {
2068 int size = GET_MODE_SIZE (mode);
0a2aaacc 2069 enum mode_class mclass = GET_MODE_CLASS (mode);
03984308
BW
2070
2071 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2072 {
2073 int temp;
2074
2075 if (ACC_REG_P (regno))
f42f5a1b 2076 temp = (TARGET_MAC16
0a2aaacc 2077 && (mclass == MODE_INT) && (size <= UNITS_PER_WORD));
03984308
BW
2078 else if (GP_REG_P (regno))
2079 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
2080 else if (FP_REG_P (regno))
2081 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
2082 else if (BR_REG_P (regno))
2083 temp = (TARGET_BOOLEANS && (mode == CCmode));
2084 else
2085 temp = FALSE;
2086
2087 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
2088 }
2089 }
2090
2091 init_machine_status = xtensa_init_machine_status;
03984308 2092
f42f5a1b
BW
2093 /* Check PIC settings. PIC is only supported when using L32R
2094 instructions, and some targets need to always use PIC. */
2095 if (flag_pic && TARGET_CONST16)
2096 error ("-f%s is not supported with CONST16 instructions",
2097 (flag_pic > 1 ? "PIC" : "pic"));
2098 else if (XTENSA_ALWAYS_PIC)
2099 {
2100 if (TARGET_CONST16)
2101 error ("PIC is required but not supported with CONST16 instructions");
2102 flag_pic = 1;
2103 }
2104 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
2105 if (flag_pic > 1)
03984308 2106 flag_pic = 1;
166b25dc
BW
2107 if (flag_pic && !flag_pie)
2108 flag_shlib = 1;
87c8b4be
CT
2109
2110 /* Hot/cold partitioning does not work on this architecture, because of
2111 constant pools (the load instruction cannot necessarily reach that far).
2112 Therefore disable it on this architecture. */
2113 if (flag_reorder_blocks_and_partition)
2114 {
2115 flag_reorder_blocks_and_partition = 0;
2116 flag_reorder_blocks = 1;
2117 }
03984308
BW
2118}
2119
2120
2121/* A C compound statement to output to stdio stream STREAM the
2122 assembler syntax for an instruction operand X. X is an RTL
2123 expression.
2124
2125 CODE is a value that can be used to specify one of several ways
2126 of printing the operand. It is used when identical operands
2127 must be printed differently depending on the context. CODE
2128 comes from the '%' specification that was used to request
2129 printing of the operand. If the specification was just '%DIGIT'
2130 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
2131 is the ASCII code for LTR.
2132
2133 If X is a register, this macro should print the register's name.
2134 The names can be found in an array 'reg_names' whose type is
2135 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
2136
2137 When the machine description has a specification '%PUNCT' (a '%'
2138 followed by a punctuation character), this macro is called with
2139 a null pointer for X and the punctuation character for CODE.
2140
2141 'a', 'c', 'l', and 'n' are reserved.
633e4eb4 2142
03984308
BW
2143 The Xtensa specific codes are:
2144
2145 'd' CONST_INT, print as signed decimal
2146 'x' CONST_INT, print as signed hexadecimal
2147 'K' CONST_INT, print number of bits in mask for EXTUI
2148 'R' CONST_INT, print (X & 0x1f)
2149 'L' CONST_INT, print ((32 - X) & 0x1f)
2150 'D' REG, print second register of double-word register operand
2151 'N' MEM, print address of next word following a memory operand
2152 'v' MEM, if memory reference is volatile, output a MEMW before it
f42f5a1b
BW
2153 't' any constant, add "@h" suffix for top 16 bits
2154 'b' any constant, add "@l" suffix for bottom 16 bits
03984308
BW
2155*/
2156
2157static void
ffbc8796 2158printx (FILE *file, signed int val)
03984308 2159{
ffbc8796 2160 /* Print a hexadecimal value in a nice way. */
03984308
BW
2161 if ((val > -0xa) && (val < 0xa))
2162 fprintf (file, "%d", val);
2163 else if (val < 0)
2164 fprintf (file, "-0x%x", -val);
2165 else
2166 fprintf (file, "0x%x", val);
2167}
2168
2169
2170void
ffbc8796 2171print_operand (FILE *file, rtx x, int letter)
03984308 2172{
f42f5a1b 2173 if (!x)
03984308
BW
2174 error ("PRINT_OPERAND null pointer");
2175
f42f5a1b 2176 switch (letter)
03984308 2177 {
f42f5a1b
BW
2178 case 'D':
2179 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2180 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
2181 else
2182 output_operand_lossage ("invalid %%D value");
2183 break;
03984308 2184
f42f5a1b
BW
2185 case 'v':
2186 if (GET_CODE (x) == MEM)
2187 {
2188 /* For a volatile memory reference, emit a MEMW before the
2189 load or store. */
66e58b33 2190 if (MEM_VOLATILE_P (x) && TARGET_SERIALIZE_VOLATILE)
f42f5a1b
BW
2191 fprintf (file, "memw\n\t");
2192 }
2193 else
2194 output_operand_lossage ("invalid %%v value");
2195 break;
03984308 2196
f42f5a1b
BW
2197 case 'N':
2198 if (GET_CODE (x) == MEM
2199 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
2200 {
2201 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
2202 output_address (XEXP (x, 0));
2203 }
2204 else
2205 output_operand_lossage ("invalid %%N value");
2206 break;
03984308 2207
f42f5a1b
BW
2208 case 'K':
2209 if (GET_CODE (x) == CONST_INT)
03984308 2210 {
f42f5a1b
BW
2211 int num_bits = 0;
2212 unsigned val = INTVAL (x);
2213 while (val & 1)
2214 {
2215 num_bits += 1;
2216 val = val >> 1;
2217 }
2218 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2219 fatal_insn ("invalid mask", x);
03984308 2220
f42f5a1b
BW
2221 fprintf (file, "%d", num_bits);
2222 }
2223 else
2224 output_operand_lossage ("invalid %%K value");
2225 break;
03984308 2226
f42f5a1b
BW
2227 case 'L':
2228 if (GET_CODE (x) == CONST_INT)
2229 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2230 else
2231 output_operand_lossage ("invalid %%L value");
2232 break;
03984308 2233
f42f5a1b
BW
2234 case 'R':
2235 if (GET_CODE (x) == CONST_INT)
2236 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2237 else
2238 output_operand_lossage ("invalid %%R value");
2239 break;
03984308 2240
f42f5a1b
BW
2241 case 'x':
2242 if (GET_CODE (x) == CONST_INT)
2243 printx (file, INTVAL (x));
2244 else
2245 output_operand_lossage ("invalid %%x value");
2246 break;
03984308 2247
f42f5a1b
BW
2248 case 'd':
2249 if (GET_CODE (x) == CONST_INT)
2250 fprintf (file, "%ld", INTVAL (x));
2251 else
2252 output_operand_lossage ("invalid %%d value");
2253 break;
03984308 2254
f42f5a1b
BW
2255 case 't':
2256 case 'b':
2257 if (GET_CODE (x) == CONST_INT)
2258 {
2259 printx (file, INTVAL (x));
2260 fputs (letter == 't' ? "@h" : "@l", file);
2261 }
2262 else if (GET_CODE (x) == CONST_DOUBLE)
2263 {
2264 REAL_VALUE_TYPE r;
2265 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2266 if (GET_MODE (x) == SFmode)
2267 {
2268 long l;
2269 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2270 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2271 }
2272 else
2273 output_operand_lossage ("invalid %%t/%%b value");
2274 }
2275 else if (GET_CODE (x) == CONST)
2276 {
2277 /* X must be a symbolic constant on ELF. Write an expression
2278 suitable for 'const16' that sets the high or low 16 bits. */
2279 if (GET_CODE (XEXP (x, 0)) != PLUS
2280 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2281 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2282 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2283 output_operand_lossage ("invalid %%t/%%b value");
2284 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2285 fputs (letter == 't' ? "@h" : "@l", file);
2286 /* There must be a non-alphanumeric character between 'h' or 'l'
2287 and the number. The '-' is added by print_operand() already. */
2288 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2289 fputs ("+", file);
2290 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2291 }
2292 else
633e4eb4 2293 {
f42f5a1b
BW
2294 output_addr_const (file, x);
2295 fputs (letter == 't' ? "@h" : "@l", file);
03984308
BW
2296 }
2297 break;
2298
2299 default:
f42f5a1b
BW
2300 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2301 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2302 else if (GET_CODE (x) == MEM)
2303 output_address (XEXP (x, 0));
2304 else if (GET_CODE (x) == CONST_INT)
2305 fprintf (file, "%ld", INTVAL (x));
2306 else
2307 output_addr_const (file, x);
03984308
BW
2308 }
2309}
2310
2311
2312/* A C compound statement to output to stdio stream STREAM the
2313 assembler syntax for an instruction operand that is a memory
fb49053f 2314 reference whose address is ADDR. ADDR is an RTL expression. */
03984308
BW
2315
2316void
ffbc8796 2317print_operand_address (FILE *file, rtx addr)
03984308
BW
2318{
2319 if (!addr)
2320 error ("PRINT_OPERAND_ADDRESS, null pointer");
2321
2322 switch (GET_CODE (addr))
2323 {
2324 default:
2325 fatal_insn ("invalid address", addr);
2326 break;
2327
2328 case REG:
2329 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2330 break;
2331
2332 case PLUS:
2333 {
2334 rtx reg = (rtx)0;
2335 rtx offset = (rtx)0;
2336 rtx arg0 = XEXP (addr, 0);
2337 rtx arg1 = XEXP (addr, 1);
2338
2339 if (GET_CODE (arg0) == REG)
2340 {
2341 reg = arg0;
2342 offset = arg1;
2343 }
2344 else if (GET_CODE (arg1) == REG)
2345 {
2346 reg = arg1;
2347 offset = arg0;
2348 }
2349 else
2350 fatal_insn ("no register in address", addr);
2351
2352 if (CONSTANT_P (offset))
2353 {
2354 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2355 output_addr_const (file, offset);
2356 }
2357 else
2358 fatal_insn ("address offset not a constant", addr);
2359 }
2360 break;
2361
2362 case LABEL_REF:
2363 case SYMBOL_REF:
2364 case CONST_INT:
2365 case CONST:
2366 output_addr_const (file, addr);
2367 break;
2368 }
2369}
2370
2371
da1f39e4
BW
2372bool
2373xtensa_output_addr_const_extra (FILE *fp, rtx x)
2374{
2375 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
2376 {
2377 switch (XINT (x, 1))
2378 {
6a7a462c
BW
2379 case UNSPEC_TPOFF:
2380 output_addr_const (fp, XVECEXP (x, 0, 0));
2381 fputs ("@TPOFF", fp);
2382 return true;
2383 case UNSPEC_DTPOFF:
2384 output_addr_const (fp, XVECEXP (x, 0, 0));
2385 fputs ("@DTPOFF", fp);
2386 return true;
da1f39e4
BW
2387 case UNSPEC_PLT:
2388 if (flag_pic)
2389 {
2390 output_addr_const (fp, XVECEXP (x, 0, 0));
2391 fputs ("@PLT", fp);
2392 return true;
2393 }
2394 break;
2395 default:
2396 break;
2397 }
2398 }
2399 return false;
2400}
2401
2402
03984308 2403void
ffbc8796 2404xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
03984308
BW
2405{
2406 long value_long[2];
b216cd4a 2407 REAL_VALUE_TYPE r;
03984308 2408 int size;
74ed13f5 2409 rtx first, second;
03984308
BW
2410
2411 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2412
2413 switch (GET_MODE_CLASS (mode))
2414 {
2415 case MODE_FLOAT:
177b6be0 2416 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
03984308 2417
b216cd4a 2418 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
03984308
BW
2419 switch (mode)
2420 {
2421 case SFmode:
b216cd4a 2422 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
4575a800
BW
2423 if (HOST_BITS_PER_LONG > 32)
2424 value_long[0] &= 0xffffffff;
b216cd4a 2425 fprintf (file, "0x%08lx\n", value_long[0]);
03984308
BW
2426 break;
2427
2428 case DFmode:
b216cd4a 2429 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
4575a800
BW
2430 if (HOST_BITS_PER_LONG > 32)
2431 {
2432 value_long[0] &= 0xffffffff;
2433 value_long[1] &= 0xffffffff;
2434 }
b216cd4a
ZW
2435 fprintf (file, "0x%08lx, 0x%08lx\n",
2436 value_long[0], value_long[1]);
03984308
BW
2437 break;
2438
2439 default:
177b6be0 2440 gcc_unreachable ();
03984308
BW
2441 }
2442
2443 break;
2444
2445 case MODE_INT:
2446 case MODE_PARTIAL_INT:
2447 size = GET_MODE_SIZE (mode);
177b6be0 2448 switch (size)
03984308 2449 {
177b6be0 2450 case 4:
03984308
BW
2451 output_addr_const (file, x);
2452 fputs ("\n", file);
177b6be0
NS
2453 break;
2454
2455 case 8:
74ed13f5
BW
2456 split_double (x, &first, &second);
2457 output_addr_const (file, first);
03984308 2458 fputs (", ", file);
74ed13f5 2459 output_addr_const (file, second);
03984308 2460 fputs ("\n", file);
177b6be0
NS
2461 break;
2462
2463 default:
2464 gcc_unreachable ();
03984308 2465 }
03984308
BW
2466 break;
2467
2468 default:
177b6be0 2469 gcc_unreachable ();
03984308
BW
2470 }
2471}
2472
2473
2474/* Return the bytes needed to compute the frame pointer from the current
638db43e 2475 stack pointer. */
03984308
BW
2476
2477#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2478#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2479
2480long
ffbc8796 2481compute_frame_size (int size)
03984308 2482{
ffbc8796 2483 /* Add space for the incoming static chain value. */
6de9cd9a 2484 if (cfun->static_chain_decl != NULL)
03984308
BW
2485 size += (1 * UNITS_PER_WORD);
2486
2487 xtensa_current_frame_size =
2488 XTENSA_STACK_ALIGN (size
38173d38 2489 + crtl->outgoing_args_size
03984308
BW
2490 + (WINDOW_SIZE * UNITS_PER_WORD));
2491 return xtensa_current_frame_size;
2492}
2493
2494
2495int
ffbc8796 2496xtensa_frame_pointer_required (void)
03984308
BW
2497{
2498 /* The code to expand builtin_frame_addr and builtin_return_addr
2499 currently uses the hard_frame_pointer instead of frame_pointer.
2500 This seems wrong but maybe it's necessary for other architectures.
638db43e 2501 This function is derived from the i386 code. */
03984308
BW
2502
2503 if (cfun->machine->accesses_prev_frame)
2504 return 1;
2505
2506 return 0;
2507}
2508
2509
7f0ee694
BW
2510/* minimum frame = reg save area (4 words) plus static chain (1 word)
2511 and the total number of words must be a multiple of 128 bits. */
2512#define MIN_FRAME_SIZE (8 * UNITS_PER_WORD)
2513
f42f5a1b 2514void
ffbc8796 2515xtensa_expand_prologue (void)
f42f5a1b
BW
2516{
2517 HOST_WIDE_INT total_size;
2518 rtx size_rtx;
4e6c2193 2519 rtx insn, note_rtx;
18dbd950 2520
f42f5a1b
BW
2521 total_size = compute_frame_size (get_frame_size ());
2522 size_rtx = GEN_INT (total_size);
18dbd950 2523
f42f5a1b 2524 if (total_size < (1 << (12+3)))
35a3be48 2525 insn = emit_insn (gen_entry (size_rtx));
03984308
BW
2526 else
2527 {
f42f5a1b
BW
2528 /* Use a8 as a temporary since a0-a7 may be live. */
2529 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
35a3be48 2530 emit_insn (gen_entry (GEN_INT (MIN_FRAME_SIZE)));
f42f5a1b
BW
2531 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2532 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
4e6c2193 2533 insn = emit_insn (gen_movsi (stack_pointer_rtx, tmp_reg));
03984308
BW
2534 }
2535
f42f5a1b 2536 if (frame_pointer_needed)
03984308 2537 {
997b8b4d 2538 if (cfun->machine->set_frame_ptr_insn)
03984308 2539 {
4e6c2193 2540 rtx first;
03984308 2541
997b8b4d
BW
2542 push_topmost_sequence ();
2543 first = get_insns ();
2544 pop_topmost_sequence ();
03984308 2545
f42f5a1b
BW
2546 /* For all instructions prior to set_frame_ptr_insn, replace
2547 hard_frame_pointer references with stack_pointer. */
2548 for (insn = first;
997b8b4d 2549 insn != cfun->machine->set_frame_ptr_insn;
f42f5a1b
BW
2550 insn = NEXT_INSN (insn))
2551 {
2552 if (INSN_P (insn))
20dca97b
BW
2553 {
2554 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2555 hard_frame_pointer_rtx,
2556 stack_pointer_rtx);
2557 df_insn_rescan (insn);
2558 }
f42f5a1b
BW
2559 }
2560 }
2561 else
4e6c2193
BW
2562 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx,
2563 stack_pointer_rtx));
03984308 2564 }
4e6c2193
BW
2565
2566 /* Create a note to describe the CFA. Because this is only used to set
2567 DW_AT_frame_base for debug info, don't bother tracking changes through
2568 each instruction in the prologue. It just takes up space. */
2569 note_rtx = gen_rtx_SET (VOIDmode, (frame_pointer_needed
2570 ? hard_frame_pointer_rtx
2571 : stack_pointer_rtx),
2572 plus_constant (stack_pointer_rtx, -total_size));
2573 RTX_FRAME_RELATED_P (insn) = 1;
2574 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2575 note_rtx, REG_NOTES (insn));
03984308
BW
2576}
2577
2578
f42f5a1b 2579/* Clear variables at function end. */
03984308
BW
2580
2581void
ffbc8796
BW
2582xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2583 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
03984308 2584{
03984308
BW
2585 xtensa_current_frame_size = 0;
2586}
2587
2588
0c14a54d 2589rtx
ffbc8796 2590xtensa_return_addr (int count, rtx frame)
0c14a54d 2591{
7f0ee694 2592 rtx result, retaddr, curaddr, label;
0c14a54d
BW
2593
2594 if (count == -1)
f42f5a1b 2595 retaddr = gen_rtx_REG (Pmode, A0_REG);
0c14a54d
BW
2596 else
2597 {
2598 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2599 addr = memory_address (Pmode, addr);
2600 retaddr = gen_reg_rtx (Pmode);
2601 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2602 }
2603
2604 /* The 2 most-significant bits of the return address on Xtensa hold
2605 the register window size. To get the real return address, these
7f0ee694
BW
2606 bits must be replaced with the high bits from some address in the
2607 code. */
2608
2609 /* Get the 2 high bits of a local label in the code. */
2610 curaddr = gen_reg_rtx (Pmode);
2611 label = gen_label_rtx ();
2612 emit_label (label);
2613 LABEL_PRESERVE_P (label) = 1;
2614 emit_move_insn (curaddr, gen_rtx_LABEL_REF (Pmode, label));
2615 emit_insn (gen_lshrsi3 (curaddr, curaddr, GEN_INT (30)));
2616 emit_insn (gen_ashlsi3 (curaddr, curaddr, GEN_INT (30)));
2617
2618 /* Clear the 2 high bits of the return address. */
0c14a54d 2619 result = gen_reg_rtx (Pmode);
7f0ee694
BW
2620 emit_insn (gen_ashlsi3 (result, retaddr, GEN_INT (2)));
2621 emit_insn (gen_lshrsi3 (result, result, GEN_INT (2)));
2622
2623 /* Combine them to get the result. */
2624 emit_insn (gen_iorsi3 (result, result, curaddr));
0c14a54d
BW
2625 return result;
2626}
2627
2628
03984308 2629/* Create the va_list data type.
822e895c
BW
2630
2631 This structure is set up by __builtin_saveregs. The __va_reg field
2632 points to a stack-allocated region holding the contents of the
2633 incoming argument registers. The __va_ndx field is an index
2634 initialized to the position of the first unnamed (variable)
2635 argument. This same index is also used to address the arguments
2636 passed in memory. Thus, the __va_stk field is initialized to point
2637 to the position of the first argument in memory offset to account
2638 for the arguments passed in registers and to account for the size
2639 of the argument registers not being 16-byte aligned. E.G., there
2640 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2641 for the first stack argument to have the maximal alignment of 16
2642 bytes, so we offset the __va_stk address by 32 bytes so that
2643 __va_stk[32] references the first argument on the stack. */
03984308 2644
c35d187f
RH
2645static tree
2646xtensa_build_builtin_va_list (void)
03984308 2647{
540eaea8 2648 tree f_stk, f_reg, f_ndx, record, type_decl;
03984308 2649
540eaea8
BW
2650 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2651 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
03984308
BW
2652
2653 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2654 ptr_type_node);
2655 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2656 ptr_type_node);
2657 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2658 integer_type_node);
2659
2660 DECL_FIELD_CONTEXT (f_stk) = record;
2661 DECL_FIELD_CONTEXT (f_reg) = record;
2662 DECL_FIELD_CONTEXT (f_ndx) = record;
2663
540eaea8
BW
2664 TREE_CHAIN (record) = type_decl;
2665 TYPE_NAME (record) = type_decl;
03984308
BW
2666 TYPE_FIELDS (record) = f_stk;
2667 TREE_CHAIN (f_stk) = f_reg;
2668 TREE_CHAIN (f_reg) = f_ndx;
2669
2670 layout_type (record);
2671 return record;
2672}
2673
2674
2675/* Save the incoming argument registers on the stack. Returns the
638db43e 2676 address of the saved registers. */
03984308 2677
4c45af42 2678static rtx
ffbc8796 2679xtensa_builtin_saveregs (void)
03984308 2680{
e70312d4 2681 rtx gp_regs;
38173d38 2682 int arg_words = crtl->args.info.arg_words;
03984308 2683 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
03984308 2684
997b8b4d 2685 if (gp_left <= 0)
03984308
BW
2686 return const0_rtx;
2687
3bbc2af6 2688 /* Allocate the general-purpose register space. */
03984308
BW
2689 gp_regs = assign_stack_local
2690 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
540eaea8 2691 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
03984308
BW
2692
2693 /* Now store the incoming registers. */
997b8b4d
BW
2694 cfun->machine->need_a7_copy = true;
2695 cfun->machine->vararg_a7 = true;
e70312d4
BW
2696 move_block_from_reg (GP_ARG_FIRST + arg_words,
2697 adjust_address (gp_regs, BLKmode,
2698 arg_words * UNITS_PER_WORD),
2699 gp_left);
0d8442b8
BW
2700 gcc_assert (cfun->machine->vararg_a7_copy != 0);
2701 emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ());
03984308
BW
2702
2703 return XEXP (gp_regs, 0);
2704}
2705
2706
2707/* Implement `va_start' for varargs and stdarg. We look at the
638db43e 2708 current function to fill in an initial va_list. */
03984308 2709
d7bd8aeb 2710static void
ffbc8796 2711xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
03984308
BW
2712{
2713 tree f_stk, stk;
2714 tree f_reg, reg;
2715 tree f_ndx, ndx;
2716 tree t, u;
2717 int arg_words;
2718
38173d38 2719 arg_words = crtl->args.info.arg_words;
03984308
BW
2720
2721 f_stk = TYPE_FIELDS (va_list_type_node);
2722 f_reg = TREE_CHAIN (f_stk);
2723 f_ndx = TREE_CHAIN (f_reg);
2724
47a25a46 2725 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
fa1615d7
BW
2726 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
2727 f_reg, NULL_TREE);
2728 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
2729 f_ndx, NULL_TREE);
03984308
BW
2730
2731 /* Call __builtin_saveregs; save the result in __va_reg */
e70312d4
BW
2732 u = make_tree (sizetype, expand_builtin_saveregs ());
2733 u = fold_convert (ptr_type_node, u);
726a989a 2734 t = build2 (MODIFY_EXPR, ptr_type_node, reg, u);
03984308
BW
2735 TREE_SIDE_EFFECTS (t) = 1;
2736 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2737
822e895c 2738 /* Set the __va_stk member to ($arg_ptr - 32). */
03984308 2739 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
e70312d4 2740 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u, size_int (-32));
726a989a 2741 t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
03984308
BW
2742 TREE_SIDE_EFFECTS (t) = 1;
2743 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2744
822e895c
BW
2745 /* Set the __va_ndx member. If the first variable argument is on
2746 the stack, adjust __va_ndx by 2 words to account for the extra
2747 alignment offset for __va_stk. */
2748 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2749 arg_words += 2;
726a989a 2750 t = build2 (MODIFY_EXPR, integer_type_node, ndx,
f4d3e7fd 2751 build_int_cst (integer_type_node, arg_words * UNITS_PER_WORD));
03984308
BW
2752 TREE_SIDE_EFFECTS (t) = 1;
2753 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2754}
2755
2756
2757/* Implement `va_arg'. */
2758
85d53c1d 2759static tree
726a989a
RB
2760xtensa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2761 gimple_seq *post_p ATTRIBUTE_UNUSED)
03984308
BW
2762{
2763 tree f_stk, stk;
2764 tree f_reg, reg;
2765 tree f_ndx, ndx;
85d53c1d
RH
2766 tree type_size, array, orig_ndx, addr, size, va_size, t;
2767 tree lab_false, lab_over, lab_false2;
08b0dc1b
RH
2768 bool indirect;
2769
2770 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2771 if (indirect)
2772 type = build_pointer_type (type);
03984308 2773
3712281f
BW
2774 /* Handle complex values as separate real and imaginary parts. */
2775 if (TREE_CODE (type) == COMPLEX_TYPE)
2776 {
85d53c1d 2777 tree real_part, imag_part;
3712281f 2778
85d53c1d
RH
2779 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2780 pre_p, NULL);
2781 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
3712281f 2782
fa1615d7
BW
2783 imag_part = xtensa_gimplify_va_arg_expr (unshare_expr (valist),
2784 TREE_TYPE (type),
85d53c1d
RH
2785 pre_p, NULL);
2786 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
3712281f 2787
47a25a46 2788 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
3712281f
BW
2789 }
2790
03984308
BW
2791 f_stk = TYPE_FIELDS (va_list_type_node);
2792 f_reg = TREE_CHAIN (f_stk);
2793 f_ndx = TREE_CHAIN (f_reg);
2794
fa1615d7
BW
2795 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist,
2796 f_stk, NULL_TREE);
2797 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
2798 f_reg, NULL_TREE);
2799 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
2800 f_ndx, NULL_TREE);
03984308 2801
85d53c1d
RH
2802 type_size = size_in_bytes (type);
2803 va_size = round_up (type_size, UNITS_PER_WORD);
2804 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
8be56275 2805
03984308 2806
822e895c 2807 /* First align __va_ndx if necessary for this arg:
03984308 2808
85d53c1d 2809 orig_ndx = (AP).__va_ndx;
822e895c 2810 if (__alignof__ (TYPE) > 4 )
85d53c1d 2811 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
822e895c 2812 & -__alignof__ (TYPE)); */
03984308 2813
85d53c1d
RH
2814 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2815
03984308
BW
2816 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2817 {
d2348985 2818 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
85d53c1d 2819
fa1615d7 2820 t = build2 (PLUS_EXPR, integer_type_node, unshare_expr (orig_ndx),
f4d3e7fd
BW
2821 build_int_cst (integer_type_node, align - 1));
2822 t = build2 (BIT_AND_EXPR, integer_type_node, t,
2823 build_int_cst (integer_type_node, -align));
fa1615d7 2824 gimplify_assign (unshare_expr (orig_ndx), t, pre_p);
03984308
BW
2825 }
2826
2827
2828 /* Increment __va_ndx to point past the argument:
2829
85d53c1d 2830 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
03984308 2831
85d53c1d 2832 t = fold_convert (integer_type_node, va_size);
47a25a46 2833 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
fa1615d7 2834 gimplify_assign (unshare_expr (ndx), t, pre_p);
03984308
BW
2835
2836
2837 /* Check if the argument is in registers:
2838
bcf88f9b 2839 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
fe984136 2840 && !must_pass_in_stack (type))
ffbc8796 2841 __array = (AP).__va_reg; */
03984308 2842
85d53c1d 2843 array = create_tmp_var (ptr_type_node, NULL);
03984308 2844
85d53c1d 2845 lab_over = NULL;
fe984136 2846 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
bcf88f9b 2847 {
85d53c1d
RH
2848 lab_false = create_artificial_label ();
2849 lab_over = create_artificial_label ();
2850
fa1615d7 2851 t = build2 (GT_EXPR, boolean_type_node, unshare_expr (ndx),
f4d3e7fd
BW
2852 build_int_cst (integer_type_node,
2853 MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
47a25a46
RG
2854 t = build3 (COND_EXPR, void_type_node, t,
2855 build1 (GOTO_EXPR, void_type_node, lab_false),
2856 NULL_TREE);
85d53c1d
RH
2857 gimplify_and_add (t, pre_p);
2858
fa1615d7 2859 gimplify_assign (unshare_expr (array), reg, pre_p);
85d53c1d 2860
47a25a46 2861 t = build1 (GOTO_EXPR, void_type_node, lab_over);
85d53c1d
RH
2862 gimplify_and_add (t, pre_p);
2863
47a25a46 2864 t = build1 (LABEL_EXPR, void_type_node, lab_false);
85d53c1d 2865 gimplify_and_add (t, pre_p);
bcf88f9b 2866 }
03984308 2867
85d53c1d 2868
03984308
BW
2869 /* ...otherwise, the argument is on the stack (never split between
2870 registers and the stack -- change __va_ndx if necessary):
2871
2872 else
2873 {
822e895c
BW
2874 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2875 (AP).__va_ndx = 32 + __va_size (TYPE);
03984308 2876 __array = (AP).__va_stk;
ffbc8796 2877 } */
03984308 2878
85d53c1d 2879 lab_false2 = create_artificial_label ();
03984308 2880
fa1615d7 2881 t = build2 (GT_EXPR, boolean_type_node, unshare_expr (orig_ndx),
f4d3e7fd
BW
2882 build_int_cst (integer_type_node,
2883 MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
47a25a46
RG
2884 t = build3 (COND_EXPR, void_type_node, t,
2885 build1 (GOTO_EXPR, void_type_node, lab_false2),
2886 NULL_TREE);
85d53c1d 2887 gimplify_and_add (t, pre_p);
03984308 2888
fa1615d7 2889 t = size_binop (PLUS_EXPR, unshare_expr (va_size), size_int (32));
85d53c1d 2890 t = fold_convert (integer_type_node, t);
fa1615d7 2891 gimplify_assign (unshare_expr (ndx), t, pre_p);
03984308 2892
47a25a46 2893 t = build1 (LABEL_EXPR, void_type_node, lab_false2);
85d53c1d 2894 gimplify_and_add (t, pre_p);
03984308 2895
726a989a 2896 gimplify_assign (array, stk, pre_p);
85d53c1d
RH
2897
2898 if (lab_over)
2899 {
47a25a46 2900 t = build1 (LABEL_EXPR, void_type_node, lab_over);
85d53c1d
RH
2901 gimplify_and_add (t, pre_p);
2902 }
8be56275 2903
03984308
BW
2904
2905 /* Given the base array pointer (__array) and index to the subsequent
2906 argument (__va_ndx), find the address:
2907
8be56275
BW
2908 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2909 ? sizeof (TYPE)
2910 : __va_size (TYPE))
03984308
BW
2911
2912 The results are endian-dependent because values smaller than one word
ffbc8796 2913 are aligned differently. */
03984308 2914
633e4eb4 2915
85d91d5b 2916 if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
8be56275 2917 {
fa1615d7 2918 t = fold_build2 (GE_EXPR, boolean_type_node, unshare_expr (type_size),
e70312d4 2919 size_int (PARM_BOUNDARY / BITS_PER_UNIT));
fa1615d7
BW
2920 t = fold_build3 (COND_EXPR, sizetype, t, unshare_expr (va_size),
2921 unshare_expr (type_size));
85d53c1d 2922 size = t;
8be56275 2923 }
85d53c1d 2924 else
fa1615d7 2925 size = unshare_expr (va_size);
85d53c1d 2926
fa1615d7 2927 t = fold_convert (sizetype, unshare_expr (ndx));
f4d3e7fd 2928 t = build2 (MINUS_EXPR, sizetype, t, size);
fa1615d7 2929 addr = build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (array), t);
03984308 2930
85d53c1d 2931 addr = fold_convert (build_pointer_type (type), addr);
08b0dc1b 2932 if (indirect)
d6e9821f
RH
2933 addr = build_va_arg_indirect_ref (addr);
2934 return build_va_arg_indirect_ref (addr);
03984308
BW
2935}
2936
2937
09fa8841
BW
2938/* Builtins. */
2939
2940enum xtensa_builtin
2941{
2942 XTENSA_BUILTIN_UMULSIDI3,
6a7a462c
BW
2943 XTENSA_BUILTIN_THREAD_POINTER,
2944 XTENSA_BUILTIN_SET_THREAD_POINTER,
09fa8841
BW
2945 XTENSA_BUILTIN_max
2946};
2947
2948
2949static void
2950xtensa_init_builtins (void)
2951{
6a7a462c 2952 tree ftype, decl;
09fa8841
BW
2953
2954 ftype = build_function_type_list (unsigned_intDI_type_node,
2955 unsigned_intSI_type_node,
2956 unsigned_intSI_type_node, NULL_TREE);
2957
6a7a462c
BW
2958 decl = add_builtin_function ("__builtin_umulsidi3", ftype,
2959 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
2960 "__umulsidi3", NULL_TREE);
2961 TREE_NOTHROW (decl) = 1;
2962 TREE_READONLY (decl) = 1;
2963
2964 if (TARGET_THREADPTR)
2965 {
2966 ftype = build_function_type (ptr_type_node, void_list_node);
2967 decl = add_builtin_function ("__builtin_thread_pointer", ftype,
2968 XTENSA_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
2969 NULL, NULL_TREE);
2970 TREE_READONLY (decl) = 1;
2971 TREE_NOTHROW (decl) = 1;
2972
2973 ftype = build_function_type_list (void_type_node, ptr_type_node,
2974 NULL_TREE);
2975 decl = add_builtin_function ("__builtin_set_thread_pointer", ftype,
2976 XTENSA_BUILTIN_SET_THREAD_POINTER,
2977 BUILT_IN_MD, NULL, NULL_TREE);
2978 TREE_NOTHROW (decl) = 1;
2979 }
09fa8841
BW
2980}
2981
2982
2983static tree
2984xtensa_fold_builtin (tree fndecl, tree arglist, bool ignore ATTRIBUTE_UNUSED)
2985{
2986 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2987 tree arg0, arg1;
2988
6a7a462c 2989 switch (fcode)
09fa8841 2990 {
6a7a462c 2991 case XTENSA_BUILTIN_UMULSIDI3:
09fa8841
BW
2992 arg0 = TREE_VALUE (arglist);
2993 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2994 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2995 || TARGET_MUL32_HIGH)
2996 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
2997 fold_convert (unsigned_intDI_type_node, arg0),
2998 fold_convert (unsigned_intDI_type_node, arg1));
6a7a462c
BW
2999 break;
3000
3001 case XTENSA_BUILTIN_THREAD_POINTER:
3002 case XTENSA_BUILTIN_SET_THREAD_POINTER:
3003 break;
3004
3005 default:
3006 internal_error ("bad builtin code");
3007 break;
09fa8841
BW
3008 }
3009
09fa8841
BW
3010 return NULL;
3011}
3012
3013
3014static rtx
3015xtensa_expand_builtin (tree exp, rtx target,
3016 rtx subtarget ATTRIBUTE_UNUSED,
3017 enum machine_mode mode ATTRIBUTE_UNUSED,
3018 int ignore)
3019{
ec3643e8 3020 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
09fa8841 3021 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6a7a462c
BW
3022 rtx arg;
3023
3024 switch (fcode)
3025 {
3026 case XTENSA_BUILTIN_UMULSIDI3:
3027 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
3028 __umulsidi3 function when the Xtensa configuration can directly
3029 implement it. If not, just call the function. */
3030 return expand_call (exp, target, ignore);
09fa8841 3031
6a7a462c
BW
3032 case XTENSA_BUILTIN_THREAD_POINTER:
3033 if (!target || !register_operand (target, Pmode))
3034 target = gen_reg_rtx (Pmode);
3035 emit_insn (gen_load_tp (target));
3036 return target;
09fa8841 3037
6a7a462c
BW
3038 case XTENSA_BUILTIN_SET_THREAD_POINTER:
3039 arg = expand_normal (CALL_EXPR_ARG (exp, 0));
3040 if (!register_operand (arg, Pmode))
3041 arg = copy_to_mode_reg (Pmode, arg);
3042 emit_insn (gen_set_tp (arg));
3043 return const0_rtx;
3044
3045 default:
3046 internal_error ("bad builtin code");
3047 }
09fa8841
BW
3048 return NULL_RTX;
3049}
3050
3051
a8cacfd2 3052enum reg_class
0a2aaacc 3053xtensa_preferred_reload_class (rtx x, enum reg_class rclass, int isoutput)
a8cacfd2 3054{
89f6025d 3055 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
a8cacfd2
BW
3056 return NO_REGS;
3057
89f6025d
BW
3058 /* Don't use the stack pointer or hard frame pointer for reloads!
3059 The hard frame pointer would normally be OK except that it may
3060 briefly hold an incoming argument in the prologue, and reload
3061 won't know that it is live because the hard frame pointer is
3062 treated specially. */
3063
0a2aaacc 3064 if (rclass == AR_REGS || rclass == GR_REGS)
89f6025d 3065 return RL_REGS;
a8cacfd2 3066
0a2aaacc 3067 return rclass;
a8cacfd2
BW
3068}
3069
3070
03984308 3071enum reg_class
37fbe8a3
BW
3072xtensa_secondary_reload (bool in_p, rtx x, enum reg_class rclass,
3073 enum machine_mode mode, secondary_reload_info *sri)
03984308
BW
3074{
3075 int regno;
3076
37fbe8a3 3077 if (in_p && constantpool_mem_p (x))
03984308 3078 {
37fbe8a3 3079 if (rclass == FP_REGS)
89f6025d 3080 return RL_REGS;
37fbe8a3
BW
3081
3082 if (mode == QImode)
3083 sri->icode = CODE_FOR_reloadqi_literal;
3084 else if (mode == HImode)
3085 sri->icode = CODE_FOR_reloadhi_literal;
03984308
BW
3086 }
3087
37fbe8a3 3088 regno = xt_true_regnum (x);
03984308 3089 if (ACC_REG_P (regno))
0a2aaacc
KG
3090 return ((rclass == GR_REGS || rclass == RL_REGS) ? NO_REGS : RL_REGS);
3091 if (rclass == ACC_REG)
89f6025d 3092 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
03984308
BW
3093
3094 return NO_REGS;
3095}
3096
3097
3098void
ffbc8796 3099order_regs_for_local_alloc (void)
03984308
BW
3100{
3101 if (!leaf_function_p ())
3102 {
3103 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
3104 FIRST_PSEUDO_REGISTER * sizeof (int));
3105 }
3106 else
3107 {
3108 int i, num_arg_regs;
3109 int nxt = 0;
3110
3bbc2af6
KH
3111 /* Use the AR registers in increasing order (skipping a0 and a1)
3112 but save the incoming argument registers for a last resort. */
38173d38 3113 num_arg_regs = crtl->args.info.arg_words;
03984308
BW
3114 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
3115 num_arg_regs = MAX_ARGS_IN_REGISTERS;
3116 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
3117 reg_alloc_order[nxt++] = i + num_arg_regs;
3118 for (i = 0; i < num_arg_regs; i++)
3119 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
3120
3bbc2af6 3121 /* List the coprocessor registers in order. */
985d0d50
BW
3122 for (i = 0; i < BR_REG_NUM; i++)
3123 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
3124
3bbc2af6 3125 /* List the FP registers in order for now. */
03984308
BW
3126 for (i = 0; i < 16; i++)
3127 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
3128
638db43e 3129 /* GCC requires that we list *all* the registers.... */
03984308
BW
3130 reg_alloc_order[nxt++] = 0; /* a0 = return address */
3131 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
3132 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
3133 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
3134
03984308
BW
3135 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
3136 }
3137}
3138
3139
01abf342
BW
3140/* Some Xtensa targets support multiple bss sections. If the section
3141 name ends with ".bss", add SECTION_BSS to the flags. */
3142
3143static unsigned int
ffbc8796 3144xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
01abf342
BW
3145{
3146 unsigned int flags = default_section_type_flags (decl, name, reloc);
3147 const char *suffix;
3148
3149 suffix = strrchr (name, '.');
3150 if (suffix && strcmp (suffix, ".bss") == 0)
3151 {
3152 if (!decl || (TREE_CODE (decl) == VAR_DECL
3153 && DECL_INITIAL (decl) == NULL_TREE))
3154 flags |= SECTION_BSS; /* @nobits */
3155 else
d4ee4d25 3156 warning (0, "only uninitialized variables can be placed in a "
01abf342
BW
3157 ".bss section");
3158 }
3159
3160 return flags;
3161}
3162
3163
b64a1b53
RH
3164/* The literal pool stays with the function. */
3165
d6b5193b 3166static section *
ffbc8796
BW
3167xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
3168 rtx x ATTRIBUTE_UNUSED,
3169 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53 3170{
d6b5193b 3171 return function_section (current_function_decl);
b64a1b53 3172}
fb49053f 3173
ffbc8796 3174
3c50106f
RH
3175/* Compute a (partial) cost for rtx X. Return true if the complete
3176 cost has been computed, and false if subexpressions should be
3177 scanned. In either case, *TOTAL contains the cost result. */
3178
3179static bool
f40751dd
JH
3180xtensa_rtx_costs (rtx x, int code, int outer_code, int *total,
3181 bool speed ATTRIBUTE_UNUSED)
3c50106f
RH
3182{
3183 switch (code)
3184 {
3185 case CONST_INT:
3186 switch (outer_code)
3187 {
3188 case SET:
3189 if (xtensa_simm12b (INTVAL (x)))
3190 {
3191 *total = 4;
3192 return true;
3193 }
3194 break;
3195 case PLUS:
3196 if (xtensa_simm8 (INTVAL (x))
3197 || xtensa_simm8x256 (INTVAL (x)))
3198 {
3199 *total = 0;
3200 return true;
3201 }
3202 break;
3203 case AND:
3204 if (xtensa_mask_immediate (INTVAL (x)))
3205 {
3206 *total = 0;
3207 return true;
3208 }
3209 break;
3210 case COMPARE:
3211 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
3212 {
3213 *total = 0;
3214 return true;
3215 }
3216 break;
3217 case ASHIFT:
3218 case ASHIFTRT:
3219 case LSHIFTRT:
3220 case ROTATE:
3221 case ROTATERT:
3bbc2af6 3222 /* No way to tell if X is the 2nd operand so be conservative. */
3c50106f
RH
3223 default: break;
3224 }
3225 if (xtensa_simm12b (INTVAL (x)))
3226 *total = 5;
f42f5a1b
BW
3227 else if (TARGET_CONST16)
3228 *total = COSTS_N_INSNS (2);
3c50106f
RH
3229 else
3230 *total = 6;
3231 return true;
3232
3233 case CONST:
3234 case LABEL_REF:
3235 case SYMBOL_REF:
f42f5a1b
BW
3236 if (TARGET_CONST16)
3237 *total = COSTS_N_INSNS (2);
3238 else
3239 *total = 5;
3c50106f
RH
3240 return true;
3241
3242 case CONST_DOUBLE:
f42f5a1b
BW
3243 if (TARGET_CONST16)
3244 *total = COSTS_N_INSNS (4);
3245 else
3246 *total = 7;
3c50106f
RH
3247 return true;
3248
3249 case MEM:
3250 {
3251 int num_words =
3252 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
3253
3254 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
3255 *total = COSTS_N_INSNS (num_words);
3256 else
3257 *total = COSTS_N_INSNS (2*num_words);
3258 return true;
3259 }
3260
3261 case FFS:
09fa8841 3262 case CTZ:
3c50106f
RH
3263 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
3264 return true;
3265
09fa8841
BW
3266 case CLZ:
3267 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
3268 return true;
3269
3c50106f
RH
3270 case NOT:
3271 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
3272 return true;
3273
3274 case AND:
3275 case IOR:
3276 case XOR:
3277 if (GET_MODE (x) == DImode)
3278 *total = COSTS_N_INSNS (2);
3279 else
3280 *total = COSTS_N_INSNS (1);
3281 return true;
3282
3283 case ASHIFT:
3284 case ASHIFTRT:
3285 case LSHIFTRT:
3286 if (GET_MODE (x) == DImode)
3287 *total = COSTS_N_INSNS (50);
3288 else
3289 *total = COSTS_N_INSNS (1);
3290 return true;
3291
3292 case ABS:
3293 {
3294 enum machine_mode xmode = GET_MODE (x);
3295 if (xmode == SFmode)
3296 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3297 else if (xmode == DFmode)
3298 *total = COSTS_N_INSNS (50);
3299 else
3300 *total = COSTS_N_INSNS (4);
3301 return true;
3302 }
3303
3304 case PLUS:
3305 case MINUS:
3306 {
3307 enum machine_mode xmode = GET_MODE (x);
3308 if (xmode == SFmode)
3309 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3310 else if (xmode == DFmode || xmode == DImode)
3311 *total = COSTS_N_INSNS (50);
3312 else
3313 *total = COSTS_N_INSNS (1);
3314 return true;
3315 }
3316
3317 case NEG:
3318 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
3319 return true;
3320
3321 case MULT:
3322 {
3323 enum machine_mode xmode = GET_MODE (x);
3324 if (xmode == SFmode)
3325 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
09fa8841 3326 else if (xmode == DFmode)
3c50106f 3327 *total = COSTS_N_INSNS (50);
09fa8841
BW
3328 else if (xmode == DImode)
3329 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
3c50106f
RH
3330 else if (TARGET_MUL32)
3331 *total = COSTS_N_INSNS (4);
3332 else if (TARGET_MAC16)
3333 *total = COSTS_N_INSNS (16);
3334 else if (TARGET_MUL16)
3335 *total = COSTS_N_INSNS (12);
3336 else
3337 *total = COSTS_N_INSNS (50);
3338 return true;
3339 }
3340
3341 case DIV:
3342 case MOD:
3343 {
3344 enum machine_mode xmode = GET_MODE (x);
3345 if (xmode == SFmode)
3346 {
3347 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3348 return true;
3349 }
3350 else if (xmode == DFmode)
3351 {
3352 *total = COSTS_N_INSNS (50);
3353 return true;
3354 }
3355 }
3bbc2af6 3356 /* Fall through. */
3c50106f
RH
3357
3358 case UDIV:
3359 case UMOD:
3360 {
3361 enum machine_mode xmode = GET_MODE (x);
3362 if (xmode == DImode)
3363 *total = COSTS_N_INSNS (50);
3364 else if (TARGET_DIV32)
3365 *total = COSTS_N_INSNS (32);
3366 else
3367 *total = COSTS_N_INSNS (50);
3368 return true;
3369 }
3370
3371 case SQRT:
3372 if (GET_MODE (x) == SFmode)
3373 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3374 else
3375 *total = COSTS_N_INSNS (50);
3376 return true;
3377
3378 case SMIN:
3379 case UMIN:
3380 case SMAX:
3381 case UMAX:
3382 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3383 return true;
3384
3385 case SIGN_EXTRACT:
3386 case SIGN_EXTEND:
3387 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3388 return true;
3389
3390 case ZERO_EXTRACT:
3391 case ZERO_EXTEND:
3392 *total = COSTS_N_INSNS (1);
3393 return true;
3394
3395 default:
3396 return false;
3397 }
3398}
3399
bd5bd7ac
KH
3400/* Worker function for TARGET_RETURN_IN_MEMORY. */
3401
4c45af42 3402static bool
586de218 3403xtensa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4c45af42
KH
3404{
3405 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3406 > 4 * UNITS_PER_WORD);
3407}
3408
e2b2d01e
AS
3409/* Worker function for TARGET_FUNCTION_VALUE. */
3410
3411rtx
3412xtensa_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
3413 bool outgoing)
3414{
3415 return gen_rtx_REG ((INTEGRAL_TYPE_P (valtype)
3416 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
3417 ? SImode : TYPE_MODE (valtype),
3418 outgoing ? GP_OUTGOING_RETURN : GP_RETURN);
3419}
7f0ee694
BW
3420
3421/* TRAMPOLINE_TEMPLATE: For Xtensa, the trampoline must perform an ENTRY
3422 instruction with a minimal stack frame in order to get some free
3423 registers. Once the actual call target is known, the proper stack frame
3424 size is extracted from the ENTRY instruction at the target and the
3425 current frame is adjusted to match. The trampoline then transfers
3426 control to the instruction following the ENTRY at the target. Note:
3427 this assumes that the target begins with an ENTRY instruction. */
3428
3429void
3430xtensa_trampoline_template (FILE *stream)
3431{
3432 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3433
3434 fprintf (stream, "\t.begin no-transform\n");
3435 fprintf (stream, "\tentry\tsp, %d\n", MIN_FRAME_SIZE);
3436
3437 if (use_call0)
3438 {
3439 /* Save the return address. */
3440 fprintf (stream, "\tmov\ta10, a0\n");
3441
3442 /* Use a CALL0 instruction to skip past the constants and in the
3443 process get the PC into A0. This allows PC-relative access to
3444 the constants without relying on L32R. */
3445 fprintf (stream, "\tcall0\t.Lskipconsts\n");
3446 }
3447 else
3448 fprintf (stream, "\tj\t.Lskipconsts\n");
3449
3450 fprintf (stream, "\t.align\t4\n");
3451 fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
3452 fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
3453 fprintf (stream, ".Lskipconsts:\n");
3454
3455 /* Load the static chain and function address from the trampoline. */
3456 if (use_call0)
3457 {
3458 fprintf (stream, "\taddi\ta0, a0, 3\n");
3459 fprintf (stream, "\tl32i\ta9, a0, 0\n");
3460 fprintf (stream, "\tl32i\ta8, a0, 4\n");
3461 }
3462 else
3463 {
3464 fprintf (stream, "\tl32r\ta9, .Lchainval\n");
3465 fprintf (stream, "\tl32r\ta8, .Lfnaddr\n");
3466 }
3467
3468 /* Store the static chain. */
3469 fprintf (stream, "\ts32i\ta9, sp, %d\n", MIN_FRAME_SIZE - 20);
3470
3471 /* Set the proper stack pointer value. */
3472 fprintf (stream, "\tl32i\ta9, a8, 0\n");
3473 fprintf (stream, "\textui\ta9, a9, %d, 12\n",
3474 TARGET_BIG_ENDIAN ? 8 : 12);
3475 fprintf (stream, "\tslli\ta9, a9, 3\n");
3476 fprintf (stream, "\taddi\ta9, a9, %d\n", -MIN_FRAME_SIZE);
3477 fprintf (stream, "\tsub\ta9, sp, a9\n");
3478 fprintf (stream, "\tmovsp\tsp, a9\n");
3479
3480 if (use_call0)
3481 /* Restore the return address. */
3482 fprintf (stream, "\tmov\ta0, a10\n");
3483
3484 /* Jump to the instruction following the ENTRY. */
3485 fprintf (stream, "\taddi\ta8, a8, 3\n");
3486 fprintf (stream, "\tjx\ta8\n");
3487
3488 /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
3489 if (use_call0)
3490 fprintf (stream, "\t.byte\t0\n");
3491 else
3492 fprintf (stream, "\tnop\n");
3493
3494 fprintf (stream, "\t.end no-transform\n");
3495}
3496
3497
3498void
3499xtensa_initialize_trampoline (rtx addr, rtx func, rtx chain)
3500{
3501 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3502 int chain_off = use_call0 ? 12 : 8;
3503 int func_off = use_call0 ? 16 : 12;
3504 emit_move_insn (gen_rtx_MEM (SImode, plus_constant (addr, chain_off)), chain);
3505 emit_move_insn (gen_rtx_MEM (SImode, plus_constant (addr, func_off)), func);
3506 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_sync_caches"),
3507 0, VOIDmode, 1, addr, Pmode);
3508}
3509
3510
e2500fed 3511#include "gt-xtensa.h"