]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
* io/io.h: Include libgfortran.h first.
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
da1f39e4
BW
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
03984308
BW
4 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
2f83c7d6 10Software Foundation; either version 3, or (at your option) any later
03984308
BW
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
2f83c7d6
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
03984308
BW
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
03984308
BW
26#include "rtl.h"
27#include "regs.h"
03984308
BW
28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "insn-attr.h"
35#include "insn-codes.h"
36#include "recog.h"
37#include "output.h"
38#include "tree.h"
39#include "expr.h"
40#include "flags.h"
41#include "reload.h"
42#include "tm_p.h"
43#include "function.h"
44#include "toplev.h"
45#include "optabs.h"
46#include "libfuncs.h"
07232638 47#include "ggc.h"
03984308
BW
48#include "target.h"
49#include "target-def.h"
540eaea8 50#include "langhooks.h"
85d53c1d 51#include "tree-gimple.h"
e70312d4 52#include "df.h"
85d53c1d 53
03984308
BW
54
55/* Enumeration for all of the relational tests, so that we can build
56 arrays indexed by the test type, and not worry about the order
638db43e 57 of EQ, NE, etc. */
03984308 58
ffbc8796
BW
59enum internal_test
60{
61 ITEST_EQ,
62 ITEST_NE,
63 ITEST_GT,
64 ITEST_GE,
65 ITEST_LT,
66 ITEST_LE,
67 ITEST_GTU,
68 ITEST_GEU,
69 ITEST_LTU,
70 ITEST_LEU,
71 ITEST_MAX
72};
03984308
BW
73
74/* Cached operands, and operator to compare for use in set/branch on
75 condition codes. */
76rtx branch_cmp[2];
77
78/* what type of branch to use */
79enum cmp_type branch_type;
80
81/* Array giving truth value on whether or not a given hard register
82 can support a given mode. */
83char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
84
85/* Current frame size calculated by compute_frame_size. */
86unsigned xtensa_current_frame_size;
87
a46bbb5a 88/* Largest block move to handle in-line. */
03984308
BW
89#define LARGEST_MOVE_RATIO 15
90
91/* Define the structure for the machine field in struct function. */
e2500fed 92struct machine_function GTY(())
03984308
BW
93{
94 int accesses_prev_frame;
997b8b4d
BW
95 bool need_a7_copy;
96 bool vararg_a7;
97 rtx set_frame_ptr_insn;
03984308
BW
98};
99
100/* Vector, indexed by hard register number, which contains 1 for a
101 register that is allowable in a candidate for leaf function
638db43e 102 treatment. */
03984308
BW
103
104const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
105{
106 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 1, 1, 1,
108 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 1
110};
111
112/* Map hard register number to register class */
113const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
114{
89f6025d
BW
115 RL_REGS, SP_REG, RL_REGS, RL_REGS,
116 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
117 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
118 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
03984308
BW
119 AR_REGS, AR_REGS, BR_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
123 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 ACC_REG,
125};
126
ffbc8796
BW
127static enum internal_test map_test_to_internal_test (enum rtx_code);
128static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
129static rtx gen_float_relational (enum rtx_code, rtx, rtx);
130static rtx gen_conditional_move (rtx);
131static rtx fixup_subreg_mem (rtx);
ffbc8796 132static struct machine_function * xtensa_init_machine_status (void);
586de218 133static bool xtensa_return_in_msb (const_tree);
ffbc8796
BW
134static void printx (FILE *, signed int);
135static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
4c45af42 136static rtx xtensa_builtin_saveregs (void);
ffbc8796
BW
137static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
138 int) ATTRIBUTE_UNUSED;
d6b5193b
RS
139static section *xtensa_select_rtx_section (enum machine_mode, rtx,
140 unsigned HOST_WIDE_INT);
ffbc8796 141static bool xtensa_rtx_costs (rtx, int, int, int *);
c35d187f 142static tree xtensa_build_builtin_va_list (void);
586de218 143static bool xtensa_return_in_memory (const_tree, const_tree);
85d53c1d 144static tree xtensa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
09fa8841
BW
145static void xtensa_init_builtins (void);
146static tree xtensa_fold_builtin (tree, tree, bool);
147static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
b64a1b53 148
b64a1b53
RH
149static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
150 REG_ALLOC_ORDER;
151\f
03984308
BW
152
153/* This macro generates the assembly code for function exit,
154 on machines that need it. If FUNCTION_EPILOGUE is not defined
155 then individual return instructions are generated for each
156 return statement. Args are same as for FUNCTION_PROLOGUE. */
157
158#undef TARGET_ASM_FUNCTION_EPILOGUE
159#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
160
161/* These hooks specify assembly directives for creating certain kinds
162 of integer object. */
163
164#undef TARGET_ASM_ALIGNED_SI_OP
165#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
166
b64a1b53
RH
167#undef TARGET_ASM_SELECT_RTX_SECTION
168#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
03984308 169
66beb87a
RS
170#undef TARGET_DEFAULT_TARGET_FLAGS
171#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
172
3c50106f
RH
173#undef TARGET_RTX_COSTS
174#define TARGET_RTX_COSTS xtensa_rtx_costs
dcefdf67
RH
175#undef TARGET_ADDRESS_COST
176#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 177
c35d187f
RH
178#undef TARGET_BUILD_BUILTIN_VA_LIST
179#define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
180
4c45af42 181#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 182#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
4c45af42 183#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 184#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
4c45af42 185#undef TARGET_PROMOTE_PROTOTYPES
586de218 186#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
4c45af42 187
4c45af42
KH
188#undef TARGET_RETURN_IN_MEMORY
189#define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
42ba5130 190#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 191#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
192#undef TARGET_MUST_PASS_IN_STACK
193#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
4c45af42
KH
194
195#undef TARGET_EXPAND_BUILTIN_SAVEREGS
196#define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
85d53c1d
RH
197#undef TARGET_GIMPLIFY_VA_ARG_EXPR
198#define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
4c45af42 199
6e5ff6e7
BW
200#undef TARGET_RETURN_IN_MSB
201#define TARGET_RETURN_IN_MSB xtensa_return_in_msb
202
09fa8841
BW
203#undef TARGET_INIT_BUILTINS
204#define TARGET_INIT_BUILTINS xtensa_init_builtins
205#undef TARGET_FOLD_BUILTIN
206#define TARGET_FOLD_BUILTIN xtensa_fold_builtin
207#undef TARGET_EXPAND_BUILTIN
208#define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
209
b64a1b53 210struct gcc_target targetm = TARGET_INITIALIZER;
03984308 211
887af464
BW
212\f
213/* Functions to test Xtensa immediate operand validity. */
03984308 214
8eb1bc5c
BW
215bool
216xtensa_simm8 (HOST_WIDE_INT v)
217{
218 return v >= -128 && v <= 127;
219}
220
221
222bool
223xtensa_simm8x256 (HOST_WIDE_INT v)
224{
225 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
226}
227
228
229bool
230xtensa_simm12b (HOST_WIDE_INT v)
231{
232 return v >= -2048 && v <= 2047;
233}
234
235
236static bool
237xtensa_uimm8 (HOST_WIDE_INT v)
238{
239 return v >= 0 && v <= 255;
240}
241
242
243static bool
244xtensa_uimm8x2 (HOST_WIDE_INT v)
245{
246 return (v & 1) == 0 && (v >= 0 && v <= 510);
247}
248
249
250static bool
251xtensa_uimm8x4 (HOST_WIDE_INT v)
252{
253 return (v & 3) == 0 && (v >= 0 && v <= 1020);
254}
255
256
257static bool
258xtensa_b4const (HOST_WIDE_INT v)
03984308
BW
259{
260 switch (v)
261 {
8eb1bc5c
BW
262 case -1:
263 case 1:
03984308
BW
264 case 2:
265 case 3:
266 case 4:
267 case 5:
268 case 6:
269 case 7:
270 case 8:
271 case 10:
272 case 12:
273 case 16:
274 case 32:
275 case 64:
276 case 128:
277 case 256:
8eb1bc5c 278 return true;
03984308 279 }
8eb1bc5c 280 return false;
03984308
BW
281}
282
03984308 283
8eb1bc5c
BW
284bool
285xtensa_b4const_or_zero (HOST_WIDE_INT v)
03984308 286{
8eb1bc5c
BW
287 if (v == 0)
288 return true;
289 return xtensa_b4const (v);
03984308
BW
290}
291
03984308 292
8eb1bc5c
BW
293bool
294xtensa_b4constu (HOST_WIDE_INT v)
03984308
BW
295{
296 switch (v)
297 {
8eb1bc5c
BW
298 case 32768:
299 case 65536:
03984308
BW
300 case 2:
301 case 3:
302 case 4:
303 case 5:
304 case 6:
305 case 7:
306 case 8:
307 case 10:
308 case 12:
309 case 16:
310 case 32:
311 case 64:
312 case 128:
313 case 256:
8eb1bc5c 314 return true;
03984308 315 }
8eb1bc5c 316 return false;
03984308
BW
317}
318
03984308 319
8eb1bc5c
BW
320bool
321xtensa_mask_immediate (HOST_WIDE_INT v)
03984308 322{
8eb1bc5c
BW
323#define MAX_MASK_SIZE 16
324 int mask_size;
03984308 325
8eb1bc5c
BW
326 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
327 {
328 if ((v & 1) == 0)
329 return false;
330 v = v >> 1;
331 if (v == 0)
332 return true;
333 }
03984308 334
8eb1bc5c 335 return false;
03984308
BW
336}
337
03984308 338
03984308 339/* This is just like the standard true_regnum() function except that it
638db43e 340 works even when reg_renumber is not initialized. */
03984308
BW
341
342int
ffbc8796 343xt_true_regnum (rtx x)
03984308
BW
344{
345 if (GET_CODE (x) == REG)
346 {
347 if (reg_renumber
348 && REGNO (x) >= FIRST_PSEUDO_REGISTER
349 && reg_renumber[REGNO (x)] >= 0)
350 return reg_renumber[REGNO (x)];
351 return REGNO (x);
352 }
353 if (GET_CODE (x) == SUBREG)
354 {
355 int base = xt_true_regnum (SUBREG_REG (x));
356 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
357 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
358 GET_MODE (SUBREG_REG (x)),
359 SUBREG_BYTE (x), GET_MODE (x));
360 }
361 return -1;
362}
363
364
03984308 365int
ffbc8796 366xtensa_valid_move (enum machine_mode mode, rtx *operands)
03984308 367{
a8cacfd2
BW
368 /* Either the destination or source must be a register, and the
369 MAC16 accumulator doesn't count. */
370
371 if (register_operand (operands[0], mode))
372 {
373 int dst_regnum = xt_true_regnum (operands[0]);
374
638db43e 375 /* The stack pointer can only be assigned with a MOVSP opcode. */
a8cacfd2
BW
376 if (dst_regnum == STACK_POINTER_REGNUM)
377 return (mode == SImode
378 && register_operand (operands[1], mode)
379 && !ACC_REG_P (xt_true_regnum (operands[1])));
380
381 if (!ACC_REG_P (dst_regnum))
382 return true;
383 }
3437320b 384 if (register_operand (operands[1], mode))
a8cacfd2
BW
385 {
386 int src_regnum = xt_true_regnum (operands[1]);
387 if (!ACC_REG_P (src_regnum))
388 return true;
389 }
03984308
BW
390 return FALSE;
391}
392
393
03984308 394int
ffbc8796 395smalloffset_mem_p (rtx op)
03984308
BW
396{
397 if (GET_CODE (op) == MEM)
398 {
399 rtx addr = XEXP (op, 0);
400 if (GET_CODE (addr) == REG)
da1f39e4 401 return BASE_REG_P (addr, 0);
03984308
BW
402 if (GET_CODE (addr) == PLUS)
403 {
404 rtx offset = XEXP (addr, 0);
8eb1bc5c 405 HOST_WIDE_INT val;
03984308
BW
406 if (GET_CODE (offset) != CONST_INT)
407 offset = XEXP (addr, 1);
408 if (GET_CODE (offset) != CONST_INT)
409 return FALSE;
8eb1bc5c
BW
410
411 val = INTVAL (offset);
412 return (val & 3) == 0 && (val >= 0 && val <= 60);
03984308
BW
413 }
414 }
415 return FALSE;
416}
417
418
03984308 419int
ffbc8796 420constantpool_address_p (rtx addr)
03984308
BW
421{
422 rtx sym = addr;
423
424 if (GET_CODE (addr) == CONST)
425 {
426 rtx offset;
427
3bbc2af6 428 /* Only handle (PLUS (SYM, OFFSET)) form. */
03984308
BW
429 addr = XEXP (addr, 0);
430 if (GET_CODE (addr) != PLUS)
431 return FALSE;
432
3bbc2af6 433 /* Make sure the address is word aligned. */
03984308
BW
434 offset = XEXP (addr, 1);
435 if ((GET_CODE (offset) != CONST_INT)
436 || ((INTVAL (offset) & 3) != 0))
437 return FALSE;
438
439 sym = XEXP (addr, 0);
440 }
441
442 if ((GET_CODE (sym) == SYMBOL_REF)
443 && CONSTANT_POOL_ADDRESS_P (sym))
444 return TRUE;
445 return FALSE;
446}
447
448
449int
ffbc8796 450constantpool_mem_p (rtx op)
03984308 451{
63694bdd
BW
452 if (GET_CODE (op) == SUBREG)
453 op = SUBREG_REG (op);
03984308
BW
454 if (GET_CODE (op) == MEM)
455 return constantpool_address_p (XEXP (op, 0));
456 return FALSE;
457}
458
459
03984308 460void
ffbc8796 461xtensa_extend_reg (rtx dst, rtx src)
03984308
BW
462{
463 rtx temp = gen_reg_rtx (SImode);
464 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
465
3bbc2af6 466 /* Generate paradoxical subregs as needed so that the modes match. */
03984308
BW
467 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
468 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
469
470 emit_insn (gen_ashlsi3 (temp, src, shift));
471 emit_insn (gen_ashrsi3 (dst, temp, shift));
472}
473
474
8eb1bc5c 475bool
ffbc8796 476xtensa_mem_offset (unsigned v, enum machine_mode mode)
03984308
BW
477{
478 switch (mode)
479 {
480 case BLKmode:
481 /* Handle the worst case for block moves. See xtensa_expand_block_move
482 where we emit an optimized block move operation if the block can be
483 moved in < "move_ratio" pieces. The worst case is when the block is
484 aligned but has a size of (3 mod 4) (does this happen?) so that the
638db43e 485 last piece requires a byte load/store. */
f42f5a1b
BW
486 return (xtensa_uimm8 (v)
487 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
03984308
BW
488
489 case QImode:
490 return xtensa_uimm8 (v);
491
492 case HImode:
493 return xtensa_uimm8x2 (v);
494
495 case DFmode:
496 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
497
498 default:
499 break;
500 }
501
502 return xtensa_uimm8x4 (v);
503}
504
505
ffbc8796 506/* Make normal rtx_code into something we can index from an array. */
03984308
BW
507
508static enum internal_test
ffbc8796 509map_test_to_internal_test (enum rtx_code test_code)
03984308
BW
510{
511 enum internal_test test = ITEST_MAX;
512
513 switch (test_code)
514 {
515 default: break;
516 case EQ: test = ITEST_EQ; break;
517 case NE: test = ITEST_NE; break;
518 case GT: test = ITEST_GT; break;
519 case GE: test = ITEST_GE; break;
520 case LT: test = ITEST_LT; break;
521 case LE: test = ITEST_LE; break;
522 case GTU: test = ITEST_GTU; break;
523 case GEU: test = ITEST_GEU; break;
524 case LTU: test = ITEST_LTU; break;
525 case LEU: test = ITEST_LEU; break;
526 }
527
528 return test;
529}
530
531
532/* Generate the code to compare two integer values. The return value is
638db43e 533 the comparison expression. */
03984308
BW
534
535static rtx
ffbc8796
BW
536gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
537 rtx cmp0, /* first operand to compare */
538 rtx cmp1, /* second operand to compare */
539 int *p_invert /* whether branch needs to reverse test */)
03984308 540{
ffbc8796
BW
541 struct cmp_info
542 {
03984308 543 enum rtx_code test_code; /* test code to use in insn */
8eb1bc5c 544 bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
03984308
BW
545 int const_add; /* constant to add (convert LE -> LT) */
546 int reverse_regs; /* reverse registers in test */
547 int invert_const; /* != 0 if invert value if cmp1 is constant */
548 int invert_reg; /* != 0 if invert value if cmp1 is register */
549 int unsignedp; /* != 0 for unsigned comparisons. */
550 };
551
552 static struct cmp_info info[ (int)ITEST_MAX ] = {
553
8eb1bc5c
BW
554 { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
555 { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
03984308 556
8eb1bc5c
BW
557 { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
558 { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
559 { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
560 { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
03984308
BW
561
562 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
563 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
564 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
565 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
566 };
567
568 enum internal_test test;
569 enum machine_mode mode;
570 struct cmp_info *p_info;
571
572 test = map_test_to_internal_test (test_code);
177b6be0 573 gcc_assert (test != ITEST_MAX);
03984308
BW
574
575 p_info = &info[ (int)test ];
576
577 mode = GET_MODE (cmp0);
578 if (mode == VOIDmode)
579 mode = GET_MODE (cmp1);
580
581 /* Make sure we can handle any constants given to us. */
582 if (GET_CODE (cmp1) == CONST_INT)
583 {
584 HOST_WIDE_INT value = INTVAL (cmp1);
585 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
586
587 /* if the immediate overflows or does not fit in the immediate field,
588 spill it to a register */
589
590 if ((p_info->unsignedp ?
591 (uvalue + p_info->const_add > uvalue) :
592 (value + p_info->const_add > value)) != (p_info->const_add > 0))
593 {
594 cmp1 = force_reg (mode, cmp1);
595 }
596 else if (!(p_info->const_range_p) (value + p_info->const_add))
597 {
598 cmp1 = force_reg (mode, cmp1);
599 }
600 }
601 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
602 {
603 cmp1 = force_reg (mode, cmp1);
604 }
605
606 /* See if we need to invert the result. */
607 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
608 ? p_info->invert_const
609 : p_info->invert_reg);
610
611 /* Comparison to constants, may involve adding 1 to change a LT into LE.
612 Comparison between two registers, may involve switching operands. */
613 if (GET_CODE (cmp1) == CONST_INT)
614 {
615 if (p_info->const_add != 0)
616 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
617
618 }
619 else if (p_info->reverse_regs)
620 {
621 rtx temp = cmp0;
622 cmp0 = cmp1;
623 cmp1 = temp;
624 }
625
1c563bed 626 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
03984308
BW
627}
628
629
630/* Generate the code to compare two float values. The return value is
638db43e 631 the comparison expression. */
03984308
BW
632
633static rtx
ffbc8796
BW
634gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
635 rtx cmp0, /* first operand to compare */
636 rtx cmp1 /* second operand to compare */)
03984308 637{
ffbc8796 638 rtx (*gen_fn) (rtx, rtx, rtx);
03984308
BW
639 rtx brtmp;
640 int reverse_regs, invert;
641
642 switch (test_code)
643 {
644 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
645 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
646 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
647 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
648 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
649 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
633e4eb4 650 default:
1c563bed 651 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
03984308
BW
652 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
653 }
654
655 if (reverse_regs)
656 {
657 rtx temp = cmp0;
658 cmp0 = cmp1;
659 cmp1 = temp;
660 }
661
662 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
663 emit_insn (gen_fn (brtmp, cmp0, cmp1));
664
1c563bed 665 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
03984308
BW
666}
667
668
669void
ffbc8796 670xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
03984308
BW
671{
672 enum cmp_type type = branch_type;
673 rtx cmp0 = branch_cmp[0];
674 rtx cmp1 = branch_cmp[1];
675 rtx cmp;
676 int invert;
677 rtx label1, label2;
678
679 switch (type)
680 {
681 case CMP_DF:
682 default:
1c563bed 683 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
03984308
BW
684
685 case CMP_SI:
686 invert = FALSE;
687 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
688 break;
689
690 case CMP_SF:
691 if (!TARGET_HARD_FLOAT)
da1f39e4
BW
692 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
693 cmp0, cmp1));
03984308
BW
694 invert = FALSE;
695 cmp = gen_float_relational (test_code, cmp0, cmp1);
696 break;
697 }
698
699 /* Generate the branch. */
700
701 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
702 label2 = pc_rtx;
703
704 if (invert)
705 {
706 label2 = label1;
707 label1 = pc_rtx;
708 }
709
710 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
711 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
712 label1,
713 label2)));
714}
715
716
717static rtx
ffbc8796 718gen_conditional_move (rtx cmp)
03984308
BW
719{
720 enum rtx_code code = GET_CODE (cmp);
721 rtx op0 = branch_cmp[0];
722 rtx op1 = branch_cmp[1];
723
724 if (branch_type == CMP_SI)
725 {
726 /* Jump optimization calls get_condition() which canonicalizes
727 comparisons like (GE x <const>) to (GT x <const-1>).
728 Transform those comparisons back to GE, since that is the
729 comparison supported in Xtensa. We shouldn't have to
730 transform <LE x const> comparisons, because neither
731 xtensa_expand_conditional_branch() nor get_condition() will
638db43e 732 produce them. */
03984308
BW
733
734 if ((code == GT) && (op1 == constm1_rtx))
735 {
736 code = GE;
737 op1 = const0_rtx;
738 }
1c563bed 739 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
03984308
BW
740
741 if (boolean_operator (cmp, VOIDmode))
742 {
3bbc2af6 743 /* Swap the operands to make const0 second. */
03984308
BW
744 if (op0 == const0_rtx)
745 {
746 op0 = op1;
747 op1 = const0_rtx;
748 }
749
3bbc2af6 750 /* If not comparing against zero, emit a comparison (subtract). */
03984308
BW
751 if (op1 != const0_rtx)
752 {
753 op0 = expand_binop (SImode, sub_optab, op0, op1,
754 0, 0, OPTAB_LIB_WIDEN);
755 op1 = const0_rtx;
756 }
757 }
758 else if (branch_operator (cmp, VOIDmode))
759 {
3bbc2af6 760 /* Swap the operands to make const0 second. */
03984308
BW
761 if (op0 == const0_rtx)
762 {
763 op0 = op1;
764 op1 = const0_rtx;
765
766 switch (code)
767 {
768 case LT: code = GE; break;
769 case GE: code = LT; break;
177b6be0 770 default: gcc_unreachable ();
03984308
BW
771 }
772 }
773
774 if (op1 != const0_rtx)
775 return 0;
776 }
777 else
778 return 0;
779
1c563bed 780 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
03984308
BW
781 }
782
783 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
784 return gen_float_relational (code, op0, op1);
785
786 return 0;
787}
788
789
790int
ffbc8796 791xtensa_expand_conditional_move (rtx *operands, int isflt)
03984308
BW
792{
793 rtx cmp;
ffbc8796 794 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
03984308
BW
795
796 if (!(cmp = gen_conditional_move (operands[1])))
797 return 0;
798
799 if (isflt)
800 gen_fn = (branch_type == CMP_SI
801 ? gen_movsfcc_internal0
802 : gen_movsfcc_internal1);
803 else
804 gen_fn = (branch_type == CMP_SI
805 ? gen_movsicc_internal0
806 : gen_movsicc_internal1);
807
808 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
809 operands[2], operands[3], cmp));
810 return 1;
811}
812
813
814int
ffbc8796 815xtensa_expand_scc (rtx *operands)
03984308
BW
816{
817 rtx dest = operands[0];
818 rtx cmp = operands[1];
819 rtx one_tmp, zero_tmp;
ffbc8796 820 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
03984308
BW
821
822 if (!(cmp = gen_conditional_move (cmp)))
823 return 0;
824
825 one_tmp = gen_reg_rtx (SImode);
826 zero_tmp = gen_reg_rtx (SImode);
827 emit_insn (gen_movsi (one_tmp, const_true_rtx));
828 emit_insn (gen_movsi (zero_tmp, const0_rtx));
829
830 gen_fn = (branch_type == CMP_SI
831 ? gen_movsicc_internal0
832 : gen_movsicc_internal1);
833 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
834 return 1;
835}
836
837
633e4eb4
BW
838/* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
839 for the output, i.e., the input operands are twice as big as MODE. */
840
841void
ffbc8796 842xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
633e4eb4
BW
843{
844 switch (GET_CODE (operands[1]))
845 {
846 case REG:
847 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
848 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
849 break;
850
851 case MEM:
852 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
853 operands[2] = adjust_address (operands[1], mode, 0);
854 break;
855
856 case CONST_INT:
857 case CONST_DOUBLE:
858 split_double (operands[1], &operands[2], &operands[3]);
859 break;
860
861 default:
177b6be0 862 gcc_unreachable ();
633e4eb4
BW
863 }
864
865 switch (GET_CODE (operands[0]))
866 {
867 case REG:
868 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
869 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
870 break;
871
872 case MEM:
873 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
874 operands[0] = adjust_address (operands[0], mode, 0);
875 break;
876
877 default:
177b6be0 878 gcc_unreachable ();
633e4eb4
BW
879 }
880}
881
882
03984308 883/* Emit insns to move operands[1] into operands[0].
03984308
BW
884 Return 1 if we have written out everything that needs to be done to
885 do the move. Otherwise, return 0 and the caller will emit the move
886 normally. */
887
888int
ffbc8796 889xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
03984308
BW
890{
891 if (CONSTANT_P (operands[1])
03984308
BW
892 && (GET_CODE (operands[1]) != CONST_INT
893 || !xtensa_simm12b (INTVAL (operands[1]))))
894 {
f42f5a1b
BW
895 if (!TARGET_CONST16)
896 operands[1] = force_const_mem (SImode, operands[1]);
897
898 /* PC-relative loads are always SImode, and CONST16 is only
899 supported in the movsi pattern, so add a SUBREG for any other
900 (smaller) mode. */
901
902 if (mode != SImode)
903 {
904 if (register_operand (operands[0], mode))
905 {
906 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
907 emit_move_insn (operands[0], operands[1]);
908 return 1;
909 }
910 else
911 {
912 operands[1] = force_reg (SImode, operands[1]);
913 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
914 }
915 }
03984308
BW
916 }
917
997b8b4d
BW
918 if (!(reload_in_progress | reload_completed)
919 && !xtensa_valid_move (mode, operands))
920 operands[1] = force_reg (mode, operands[1]);
03984308 921
997b8b4d 922 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
03984308
BW
923
924 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
638db43e
BW
925 instruction won't be recognized after reload, so we remove the
926 subreg and adjust mem accordingly. */
03984308
BW
927 if (reload_in_progress)
928 {
929 operands[0] = fixup_subreg_mem (operands[0]);
930 operands[1] = fixup_subreg_mem (operands[1]);
931 }
932 return 0;
933}
934
f42f5a1b 935
03984308 936static rtx
ffbc8796 937fixup_subreg_mem (rtx x)
03984308
BW
938{
939 if (GET_CODE (x) == SUBREG
940 && GET_CODE (SUBREG_REG (x)) == REG
941 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
942 {
943 rtx temp =
944 gen_rtx_SUBREG (GET_MODE (x),
945 reg_equiv_mem [REGNO (SUBREG_REG (x))],
946 SUBREG_BYTE (x));
947 x = alter_subreg (&temp);
948 }
949 return x;
950}
951
952
997b8b4d
BW
953/* Check if an incoming argument in a7 is expected to be used soon and
954 if OPND is a register or register pair that includes a7. If so,
955 create a new pseudo and copy a7 into that pseudo at the very
956 beginning of the function, followed by the special "set_frame_ptr"
957 unspec_volatile insn. The return value is either the original
958 operand, if it is not a7, or the new pseudo containing a copy of
959 the incoming argument. This is necessary because the register
960 allocator will ignore conflicts with a7 and may either assign some
961 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
962 the incoming argument in a7. By copying the argument out of a7 as
963 the very first thing, and then immediately following that with an
964 unspec_volatile to keep the scheduler away, we should avoid any
965 problems. Putting the set_frame_ptr insn at the beginning, with
966 only the a7 copy before it, also makes it easier for the prologue
967 expander to initialize the frame pointer after the a7 copy and to
968 fix up the a7 copy to use the stack pointer instead of the frame
969 pointer. */
58db834b 970
997b8b4d
BW
971rtx
972xtensa_copy_incoming_a7 (rtx opnd)
58db834b 973{
997b8b4d
BW
974 rtx entry_insns = 0;
975 rtx reg, tmp;
976 enum machine_mode mode;
977
978 if (!cfun->machine->need_a7_copy)
979 return opnd;
980
981 /* This function should never be called again once a7 has been copied. */
177b6be0 982 gcc_assert (!cfun->machine->set_frame_ptr_insn);
997b8b4d
BW
983
984 mode = GET_MODE (opnd);
985
986 /* The operand using a7 may come in a later instruction, so just return
987 the original operand if it doesn't use a7. */
988 reg = opnd;
989 if (GET_CODE (reg) == SUBREG)
58db834b 990 {
177b6be0 991 gcc_assert (SUBREG_BYTE (reg) == 0);
997b8b4d
BW
992 reg = SUBREG_REG (reg);
993 }
994 if (GET_CODE (reg) != REG
995 || REGNO (reg) > A7_REG
996 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
997 return opnd;
e6aecf8e 998
997b8b4d 999 /* 1-word args will always be in a7; 2-word args in a6/a7. */
177b6be0 1000 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
58db834b 1001
997b8b4d 1002 cfun->machine->need_a7_copy = false;
58db834b 1003
997b8b4d
BW
1004 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1005 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
58db834b 1006
997b8b4d
BW
1007 push_to_sequence (entry_insns);
1008 tmp = gen_reg_rtx (mode);
58db834b 1009
997b8b4d
BW
1010 switch (mode)
1011 {
1012 case DFmode:
1013 case DImode:
1014 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1015 gen_rtx_REG (SImode, A7_REG - 1)));
1016 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1017 gen_raw_REG (SImode, A7_REG)));
1018 break;
1019 case SFmode:
1020 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1021 break;
1022 case SImode:
1023 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1024 break;
1025 case HImode:
1026 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1027 break;
1028 case QImode:
1029 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1030 break;
1031 default:
177b6be0 1032 gcc_unreachable ();
58db834b
BW
1033 }
1034
997b8b4d
BW
1035 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1036 entry_insns = get_insns ();
1037 end_sequence ();
1038
1039 if (cfun->machine->vararg_a7)
1040 {
1041 /* This is called from within builtin_savereg, so we're already
1042 inside a start_sequence that will be placed at the start of
1043 the function. */
1044 emit_insn (entry_insns);
1045 }
1046 else
1047 {
1048 /* Put entry_insns after the NOTE that starts the function. If
1049 this is inside a start_sequence, make the outer-level insn
1050 chain current, so the code is placed at the start of the
1051 function. */
1052 push_topmost_sequence ();
1053 emit_insn_after (entry_insns, get_insns ());
1054 pop_topmost_sequence ();
1055 }
1056
1057 return tmp;
58db834b
BW
1058}
1059
1060
a46bbb5a
BW
1061/* Try to expand a block move operation to a sequence of RTL move
1062 instructions. If not optimizing, or if the block size is not a
1063 constant, or if the block is too large, the expansion fails and GCC
1064 falls back to calling memcpy().
03984308
BW
1065
1066 operands[0] is the destination
1067 operands[1] is the source
1068 operands[2] is the length
1069 operands[3] is the alignment */
1070
1071int
ffbc8796 1072xtensa_expand_block_move (rtx *operands)
03984308 1073{
7eda7cda
RH
1074 static const enum machine_mode mode_from_align[] =
1075 {
1076 VOIDmode, QImode, HImode, VOIDmode, SImode,
1077 };
1078
1079 rtx dst_mem = operands[0];
1080 rtx src_mem = operands[1];
1081 HOST_WIDE_INT bytes, align;
03984308 1082 int num_pieces, move_ratio;
7eda7cda
RH
1083 rtx temp[2];
1084 enum machine_mode mode[2];
1085 int amount[2];
1086 bool active[2];
1087 int phase = 0;
1088 int next;
1089 int offset_ld = 0;
1090 int offset_st = 0;
1091 rtx x;
03984308 1092
3bbc2af6 1093 /* If this is not a fixed size move, just call memcpy. */
03984308
BW
1094 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1095 return 0;
1096
7eda7cda
RH
1097 bytes = INTVAL (operands[2]);
1098 align = INTVAL (operands[3]);
1099
3bbc2af6 1100 /* Anything to move? */
03984308 1101 if (bytes <= 0)
7eda7cda 1102 return 0;
03984308
BW
1103
1104 if (align > MOVE_MAX)
1105 align = MOVE_MAX;
1106
3bbc2af6 1107 /* Decide whether to expand inline based on the optimization level. */
03984308
BW
1108 move_ratio = 4;
1109 if (optimize > 2)
1110 move_ratio = LARGEST_MOVE_RATIO;
3bbc2af6 1111 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
7eda7cda 1112 if (num_pieces > move_ratio)
03984308
BW
1113 return 0;
1114
7eda7cda
RH
1115 x = XEXP (dst_mem, 0);
1116 if (!REG_P (x))
1117 {
1118 x = force_reg (Pmode, x);
1119 dst_mem = replace_equiv_address (dst_mem, x);
1120 }
03984308 1121
7eda7cda
RH
1122 x = XEXP (src_mem, 0);
1123 if (!REG_P (x))
1124 {
1125 x = force_reg (Pmode, x);
1126 src_mem = replace_equiv_address (src_mem, x);
1127 }
03984308 1128
7eda7cda 1129 active[0] = active[1] = false;
03984308 1130
7eda7cda 1131 do
03984308 1132 {
7eda7cda
RH
1133 next = phase;
1134 phase ^= 1;
03984308 1135
7eda7cda 1136 if (bytes > 0)
03984308 1137 {
7eda7cda 1138 int next_amount;
03984308 1139
7eda7cda
RH
1140 next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1141 next_amount = MIN (next_amount, align);
03984308 1142
7eda7cda
RH
1143 amount[next] = next_amount;
1144 mode[next] = mode_from_align[next_amount];
1145 temp[next] = gen_reg_rtx (mode[next]);
03984308 1146
7eda7cda
RH
1147 x = adjust_address (src_mem, mode[next], offset_ld);
1148 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
03984308 1149
7eda7cda
RH
1150 offset_ld += next_amount;
1151 bytes -= next_amount;
1152 active[next] = true;
1153 }
03984308 1154
7eda7cda
RH
1155 if (active[phase])
1156 {
1157 active[phase] = false;
1158
1159 x = adjust_address (dst_mem, mode[phase], offset_st);
1160 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
03984308 1161
7eda7cda
RH
1162 offset_st += amount[phase];
1163 }
03984308 1164 }
7eda7cda 1165 while (active[next]);
03984308 1166
7eda7cda 1167 return 1;
03984308
BW
1168}
1169
1170
1171void
ffbc8796 1172xtensa_expand_nonlocal_goto (rtx *operands)
03984308
BW
1173{
1174 rtx goto_handler = operands[1];
1175 rtx containing_fp = operands[3];
1176
3bbc2af6
KH
1177 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1178 is too big to generate in-line. */
03984308
BW
1179
1180 if (GET_CODE (containing_fp) != REG)
1181 containing_fp = force_reg (Pmode, containing_fp);
1182
20dca97b
BW
1183 goto_handler = copy_rtx (goto_handler);
1184 validate_replace_rtx (virtual_stack_vars_rtx, containing_fp, goto_handler);
03984308
BW
1185
1186 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1187 0, VOIDmode, 2,
1188 containing_fp, Pmode,
1189 goto_handler, Pmode);
1190}
1191
1192
e2500fed 1193static struct machine_function *
ffbc8796 1194xtensa_init_machine_status (void)
03984308 1195{
e2500fed 1196 return ggc_alloc_cleared (sizeof (struct machine_function));
03984308
BW
1197}
1198
1199
2a48b790
BW
1200/* Shift VAL of mode MODE left by COUNT bits. */
1201
1202static inline rtx
1203xtensa_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
1204{
1205 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1206 NULL_RTX, 1, OPTAB_DIRECT);
1207 return expand_simple_binop (SImode, ASHIFT, val, count,
1208 NULL_RTX, 1, OPTAB_DIRECT);
1209}
1210
1211
1212/* Structure to hold the initial parameters for a compare_and_swap operation
1213 in HImode and QImode. */
1214
1215struct alignment_context
1216{
1217 rtx memsi; /* SI aligned memory location. */
1218 rtx shift; /* Bit offset with regard to lsb. */
1219 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
1220 rtx modemaski; /* ~modemask */
1221};
1222
1223
1224/* Initialize structure AC for word access to HI and QI mode memory. */
1225
1226static void
1227init_alignment_context (struct alignment_context *ac, rtx mem)
1228{
1229 enum machine_mode mode = GET_MODE (mem);
1230 rtx byteoffset = NULL_RTX;
1231 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1232
1233 if (aligned)
1234 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1235 else
1236 {
1237 /* Alignment is unknown. */
1238 rtx addr, align;
1239
1240 /* Force the address into a register. */
1241 addr = force_reg (Pmode, XEXP (mem, 0));
1242
1243 /* Align it to SImode. */
1244 align = expand_simple_binop (Pmode, AND, addr,
1245 GEN_INT (-GET_MODE_SIZE (SImode)),
1246 NULL_RTX, 1, OPTAB_DIRECT);
1247 /* Generate MEM. */
1248 ac->memsi = gen_rtx_MEM (SImode, align);
1249 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
1250 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
1251 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
1252
1253 byteoffset = expand_simple_binop (Pmode, AND, addr,
1254 GEN_INT (GET_MODE_SIZE (SImode) - 1),
1255 NULL_RTX, 1, OPTAB_DIRECT);
1256 }
1257
1258 /* Calculate shiftcount. */
1259 if (TARGET_BIG_ENDIAN)
1260 {
1261 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
1262 if (!aligned)
1263 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
1264 NULL_RTX, 1, OPTAB_DIRECT);
1265 }
1266 else
1267 {
1268 if (aligned)
1269 ac->shift = NULL_RTX;
1270 else
1271 ac->shift = byteoffset;
1272 }
1273
1274 if (ac->shift != NULL_RTX)
1275 {
1276 /* Shift is the byte count, but we need the bitcount. */
1277 ac->shift = expand_simple_binop (SImode, MULT, ac->shift,
1278 GEN_INT (BITS_PER_UNIT),
1279 NULL_RTX, 1, OPTAB_DIRECT);
1280 ac->modemask = expand_simple_binop (SImode, ASHIFT,
1281 GEN_INT (GET_MODE_MASK (mode)),
1282 ac->shift,
1283 NULL_RTX, 1, OPTAB_DIRECT);
1284 }
1285 else
1286 ac->modemask = GEN_INT (GET_MODE_MASK (mode));
1287
1288 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
1289}
1290
1291
1292/* Expand an atomic compare and swap operation for HImode and QImode.
1293 MEM is the memory location, CMP the old value to compare MEM with
1294 and NEW the value to set if CMP == MEM. */
1295
1296void
1297xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new)
1298{
1299 enum machine_mode mode = GET_MODE (mem);
1300 struct alignment_context ac;
1301 rtx tmp, cmpv, newv, val;
1302 rtx oldval = gen_reg_rtx (SImode);
1303 rtx res = gen_reg_rtx (SImode);
1304 rtx csloop = gen_label_rtx ();
1305 rtx csend = gen_label_rtx ();
1306
1307 init_alignment_context (&ac, mem);
1308
1309 if (ac.shift != NULL_RTX)
1310 {
1311 cmp = xtensa_expand_mask_and_shift (cmp, mode, ac.shift);
1312 new = xtensa_expand_mask_and_shift (new, mode, ac.shift);
1313 }
1314
1315 /* Load the surrounding word into VAL with the MEM value masked out. */
1316 val = force_reg (SImode, expand_simple_binop (SImode, AND, ac.memsi,
1317 ac.modemaski, NULL_RTX, 1,
1318 OPTAB_DIRECT));
1319 emit_label (csloop);
1320
1321 /* Patch CMP and NEW into VAL at correct position. */
1322 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
1323 NULL_RTX, 1, OPTAB_DIRECT));
1324 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
1325 NULL_RTX, 1, OPTAB_DIRECT));
1326
1327 /* Jump to end if we're done. */
1328 emit_insn (gen_sync_compare_and_swapsi (res, ac.memsi, cmpv, newv));
1329 emit_cmp_and_jump_insns (res, cmpv, EQ, const0_rtx, SImode, true, csend);
1330
1331 /* Check for changes outside mode. */
1332 emit_move_insn (oldval, val);
1333 tmp = expand_simple_binop (SImode, AND, res, ac.modemaski,
1334 val, 1, OPTAB_DIRECT);
1335 if (tmp != val)
1336 emit_move_insn (val, tmp);
1337
1338 /* Loop internal if so. */
1339 emit_cmp_and_jump_insns (oldval, val, NE, const0_rtx, SImode, true, csloop);
1340
1341 emit_label (csend);
1342
1343 /* Return the correct part of the bitfield. */
1344 convert_move (target,
1345 (ac.shift == NULL_RTX ? res
1346 : expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
1347 NULL_RTX, 1, OPTAB_DIRECT)),
1348 1);
1349}
1350
1351
1352/* Expand an atomic operation CODE of mode MODE (either HImode or QImode --
1353 the default expansion works fine for SImode). MEM is the memory location
1354 and VAL the value to play with. If AFTER is true then store the value
1355 MEM holds after the operation, if AFTER is false then store the value MEM
1356 holds before the operation. If TARGET is zero then discard that value, else
1357 store it to TARGET. */
1358
1359void
1360xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1361 bool after)
1362{
1363 enum machine_mode mode = GET_MODE (mem);
1364 struct alignment_context ac;
1365 rtx csloop = gen_label_rtx ();
1366 rtx cmp, tmp;
1367 rtx old = gen_reg_rtx (SImode);
1368 rtx new = gen_reg_rtx (SImode);
1369 rtx orig = NULL_RTX;
1370
1371 init_alignment_context (&ac, mem);
1372
1373 /* Prepare values before the compare-and-swap loop. */
1374 if (ac.shift != NULL_RTX)
1375 val = xtensa_expand_mask_and_shift (val, mode, ac.shift);
1376 switch (code)
1377 {
1378 case PLUS:
1379 case MINUS:
1380 orig = gen_reg_rtx (SImode);
1381 convert_move (orig, val, 1);
1382 break;
1383
1384 case SET:
1385 case IOR:
1386 case XOR:
1387 break;
1388
1389 case MULT: /* NAND */
1390 case AND:
1391 /* val = "11..1<val>11..1" */
1392 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
1393 NULL_RTX, 1, OPTAB_DIRECT);
1394 break;
1395
1396 default:
1397 gcc_unreachable ();
1398 }
1399
1400 /* Load full word. Subsequent loads are performed by S32C1I. */
1401 cmp = force_reg (SImode, ac.memsi);
1402
1403 emit_label (csloop);
1404 emit_move_insn (old, cmp);
1405
1406 switch (code)
1407 {
1408 case PLUS:
1409 case MINUS:
1410 val = expand_simple_binop (SImode, code, old, orig,
1411 NULL_RTX, 1, OPTAB_DIRECT);
1412 val = expand_simple_binop (SImode, AND, val, ac.modemask,
1413 NULL_RTX, 1, OPTAB_DIRECT);
1414 /* FALLTHRU */
1415 case SET:
1416 tmp = expand_simple_binop (SImode, AND, old, ac.modemaski,
1417 NULL_RTX, 1, OPTAB_DIRECT);
1418 tmp = expand_simple_binop (SImode, IOR, tmp, val,
1419 new, 1, OPTAB_DIRECT);
1420 break;
1421
1422 case AND:
1423 case IOR:
1424 case XOR:
1425 tmp = expand_simple_binop (SImode, code, old, val,
1426 new, 1, OPTAB_DIRECT);
1427 break;
1428
1429 case MULT: /* NAND */
1430 tmp = expand_simple_binop (SImode, XOR, old, ac.modemask,
1431 NULL_RTX, 1, OPTAB_DIRECT);
1432 tmp = expand_simple_binop (SImode, AND, tmp, val,
1433 new, 1, OPTAB_DIRECT);
1434 break;
1435
1436 default:
1437 gcc_unreachable ();
1438 }
1439
1440 if (tmp != new)
1441 emit_move_insn (new, tmp);
1442 emit_insn (gen_sync_compare_and_swapsi (cmp, ac.memsi, old, new));
1443 emit_cmp_and_jump_insns (cmp, old, NE, const0_rtx, SImode, true, csloop);
1444
1445 if (target)
1446 {
1447 tmp = (after ? new : cmp);
1448 convert_move (target,
1449 (ac.shift == NULL_RTX ? tmp
1450 : expand_simple_binop (SImode, LSHIFTRT, tmp, ac.shift,
1451 NULL_RTX, 1, OPTAB_DIRECT)),
1452 1);
1453 }
1454}
1455
1456
03984308 1457void
ffbc8796 1458xtensa_setup_frame_addresses (void)
03984308 1459{
638db43e 1460 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
03984308
BW
1461 cfun->machine->accesses_prev_frame = 1;
1462
1463 emit_library_call
1464 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1465 0, VOIDmode, 0);
1466}
1467
1468
638db43e
BW
1469/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1470 a comment showing where the end of the loop is. However, if there is a
03984308 1471 label or a branch at the end of the loop then we need to place a nop
638db43e 1472 there. If the loop ends with a label we need the nop so that branches
839a4992
KH
1473 targeting that label will target the nop (and thus remain in the loop),
1474 instead of targeting the instruction after the loop (and thus exiting
638db43e 1475 the loop). If the loop ends with a branch, we need the nop in case the
839a4992 1476 branch is targeting a location inside the loop. When the branch
03984308
BW
1477 executes it will cause the loop count to be decremented even if it is
1478 taken (because it is the last instruction in the loop), so we need to
1479 nop after the branch to prevent the loop count from being decremented
638db43e 1480 when the branch is taken. */
03984308
BW
1481
1482void
ffbc8796 1483xtensa_emit_loop_end (rtx insn, rtx *operands)
03984308
BW
1484{
1485 char done = 0;
1486
1487 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1488 {
1489 switch (GET_CODE (insn))
1490 {
1491 case NOTE:
1492 case BARRIER:
1493 break;
1494
1495 case CODE_LABEL:
0bd0703d 1496 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
03984308
BW
1497 done = 1;
1498 break;
1499
1500 default:
1501 {
1502 rtx body = PATTERN (insn);
1503
1504 if (GET_CODE (body) == JUMP_INSN)
1505 {
0bd0703d 1506 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
03984308
BW
1507 done = 1;
1508 }
1509 else if ((GET_CODE (body) != USE)
1510 && (GET_CODE (body) != CLOBBER))
1511 done = 1;
1512 }
1513 break;
1514 }
1515 }
1516
1517 output_asm_insn ("# loop end for %0", operands);
1518}
1519
1520
036a2b7a
BW
1521char *
1522xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1523{
1524 static char result[64];
1525 enum rtx_code code;
1526 const char *op;
1527
1528 code = GET_CODE (operands[3]);
1529 switch (code)
1530 {
1531 case EQ: op = inverted ? "ne" : "eq"; break;
1532 case NE: op = inverted ? "eq" : "ne"; break;
1533 case LT: op = inverted ? "ge" : "lt"; break;
1534 case GE: op = inverted ? "lt" : "ge"; break;
1535 case LTU: op = inverted ? "geu" : "ltu"; break;
1536 case GEU: op = inverted ? "ltu" : "geu"; break;
1537 default: gcc_unreachable ();
1538 }
1539
1540 if (immed)
1541 {
1542 if (INTVAL (operands[1]) == 0)
1543 sprintf (result, "b%sz%s\t%%0, %%2", op,
1544 (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1545 else
1546 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1547 }
1548 else
1549 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1550
1551 return result;
1552}
1553
1554
1555char *
1556xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1557{
1558 static char result[64];
1559 const char *op;
1560
1561 switch (GET_CODE (operands[3]))
1562 {
1563 case EQ: op = inverted ? "bs" : "bc"; break;
1564 case NE: op = inverted ? "bc" : "bs"; break;
1565 default: gcc_unreachable ();
1566 }
1567
1568 if (immed)
1569 {
1570 unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1571 operands[1] = GEN_INT (bitnum);
1572 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1573 }
1574 else
1575 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1576
1577 return result;
1578}
1579
1580
1581char *
1582xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1583{
1584 static char result[64];
1585 enum rtx_code code;
1586 const char *op;
1587
1588 code = GET_CODE (operands[4]);
1589 if (isbool)
1590 {
1591 switch (code)
1592 {
1593 case EQ: op = inverted ? "t" : "f"; break;
1594 case NE: op = inverted ? "f" : "t"; break;
1595 default: gcc_unreachable ();
1596 }
1597 }
1598 else
1599 {
1600 switch (code)
1601 {
1602 case EQ: op = inverted ? "nez" : "eqz"; break;
1603 case NE: op = inverted ? "eqz" : "nez"; break;
1604 case LT: op = inverted ? "gez" : "ltz"; break;
1605 case GE: op = inverted ? "ltz" : "gez"; break;
1606 default: gcc_unreachable ();
1607 }
1608 }
1609
1610 sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1611 op, isfp ? ".s" : "", inverted ? 3 : 2);
1612 return result;
1613}
1614
1615
03984308 1616char *
ffbc8796 1617xtensa_emit_call (int callop, rtx *operands)
03984308 1618{
b64a1b53 1619 static char result[64];
03984308
BW
1620 rtx tgt = operands[callop];
1621
1622 if (GET_CODE (tgt) == CONST_INT)
1d0ea52e 1623 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
03984308
BW
1624 else if (register_operand (tgt, VOIDmode))
1625 sprintf (result, "callx8\t%%%d", callop);
1626 else
1627 sprintf (result, "call8\t%%%d", callop);
1628
1629 return result;
1630}
1631
1632
da1f39e4
BW
1633bool
1634xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
1635{
1636 /* Allow constant pool addresses. */
1637 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
1638 && ! TARGET_CONST16 && constantpool_address_p (addr))
1639 return true;
1640
1641 while (GET_CODE (addr) == SUBREG)
1642 addr = SUBREG_REG (addr);
1643
1644 /* Allow base registers. */
1645 if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1646 return true;
1647
1648 /* Check for "register + offset" addressing. */
1649 if (GET_CODE (addr) == PLUS)
1650 {
1651 rtx xplus0 = XEXP (addr, 0);
1652 rtx xplus1 = XEXP (addr, 1);
1653 enum rtx_code code0;
1654 enum rtx_code code1;
1655
1656 while (GET_CODE (xplus0) == SUBREG)
1657 xplus0 = SUBREG_REG (xplus0);
1658 code0 = GET_CODE (xplus0);
1659
1660 while (GET_CODE (xplus1) == SUBREG)
1661 xplus1 = SUBREG_REG (xplus1);
1662 code1 = GET_CODE (xplus1);
1663
1664 /* Swap operands if necessary so the register is first. */
1665 if (code0 != REG && code1 == REG)
1666 {
1667 xplus0 = XEXP (addr, 1);
1668 xplus1 = XEXP (addr, 0);
1669 code0 = GET_CODE (xplus0);
1670 code1 = GET_CODE (xplus1);
1671 }
1672
1673 if (code0 == REG && BASE_REG_P (xplus0, strict)
1674 && code1 == CONST_INT
1675 && xtensa_mem_offset (INTVAL (xplus1), mode))
1676 return true;
1677 }
1678
1679 return false;
1680}
1681
1682
1683rtx
1684xtensa_legitimize_address (rtx x,
1685 rtx oldx ATTRIBUTE_UNUSED,
1686 enum machine_mode mode)
1687{
1688 if (GET_CODE (x) == PLUS)
1689 {
1690 rtx plus0 = XEXP (x, 0);
1691 rtx plus1 = XEXP (x, 1);
1692
1693 if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
1694 {
1695 plus0 = XEXP (x, 1);
1696 plus1 = XEXP (x, 0);
1697 }
1698
1699 /* Try to split up the offset to use an ADDMI instruction. */
1700 if (GET_CODE (plus0) == REG
1701 && GET_CODE (plus1) == CONST_INT
1702 && !xtensa_mem_offset (INTVAL (plus1), mode)
1703 && !xtensa_simm8 (INTVAL (plus1))
1704 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1705 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1706 {
1707 rtx temp = gen_reg_rtx (Pmode);
1708 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
1709 emit_insn (gen_rtx_SET (Pmode, temp,
1710 gen_rtx_PLUS (Pmode, plus0, addmi_offset)));
1711 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1712 }
1713 }
1714
1715 return NULL_RTX;
1716}
1717
1718
b0c6e48f 1719/* Return the debugger register number to use for 'regno'. */
03984308
BW
1720
1721int
ffbc8796 1722xtensa_dbx_register_number (int regno)
03984308
BW
1723{
1724 int first = -1;
633e4eb4
BW
1725
1726 if (GP_REG_P (regno))
1727 {
1728 regno -= GP_REG_FIRST;
1729 first = 0;
1730 }
1731 else if (BR_REG_P (regno))
1732 {
1733 regno -= BR_REG_FIRST;
1734 first = 16;
1735 }
1736 else if (FP_REG_P (regno))
1737 {
1738 regno -= FP_REG_FIRST;
b0c6e48f 1739 first = 48;
633e4eb4 1740 }
03984308
BW
1741 else if (ACC_REG_P (regno))
1742 {
b0c6e48f
BW
1743 first = 0x200; /* Start of Xtensa special registers. */
1744 regno = 16; /* ACCLO is special register 16. */
03984308
BW
1745 }
1746
1747 /* When optimizing, we sometimes get asked about pseudo-registers
638db43e 1748 that don't represent hard registers. Return 0 for these. */
03984308
BW
1749 if (first == -1)
1750 return 0;
1751
1752 return first + regno;
1753}
1754
1755
1756/* Argument support functions. */
1757
1758/* Initialize CUMULATIVE_ARGS for a function. */
1759
1760void
997b8b4d 1761init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
03984308
BW
1762{
1763 cum->arg_words = 0;
997b8b4d 1764 cum->incoming = incoming;
03984308
BW
1765}
1766
ffbc8796 1767
03984308
BW
1768/* Advance the argument to the next argument position. */
1769
1770void
ffbc8796 1771function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
03984308
BW
1772{
1773 int words, max;
1774 int *arg_words;
1775
1776 arg_words = &cum->arg_words;
1777 max = MAX_ARGS_IN_REGISTERS;
1778
1779 words = (((mode != BLKmode)
1780 ? (int) GET_MODE_SIZE (mode)
1781 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1782
85d91d5b
BW
1783 if (*arg_words < max
1784 && (targetm.calls.must_pass_in_stack (mode, type)
1785 || *arg_words + words > max))
03984308
BW
1786 *arg_words = max;
1787
1788 *arg_words += words;
1789}
1790
1791
1792/* Return an RTL expression containing the register for the given mode,
368ebcd6 1793 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
ffbc8796 1794 if this is an incoming argument to the current function. */
03984308
BW
1795
1796rtx
ffbc8796
BW
1797function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1798 int incoming_p)
03984308
BW
1799{
1800 int regbase, words, max;
1801 int *arg_words;
1802 int regno;
03984308
BW
1803
1804 arg_words = &cum->arg_words;
1805 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1806 max = MAX_ARGS_IN_REGISTERS;
1807
1808 words = (((mode != BLKmode)
1809 ? (int) GET_MODE_SIZE (mode)
1810 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1811
1812 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
822e895c 1813 {
d2348985 1814 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
822e895c
BW
1815 *arg_words = (*arg_words + align - 1) & -align;
1816 }
03984308
BW
1817
1818 if (*arg_words + words > max)
1819 return (rtx)0;
1820
1821 regno = regbase + *arg_words;
03984308 1822
997b8b4d
BW
1823 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1824 cfun->machine->need_a7_copy = true;
03984308 1825
997b8b4d 1826 return gen_rtx_REG (mode, regno);
03984308
BW
1827}
1828
1829
d2348985
BW
1830int
1831function_arg_boundary (enum machine_mode mode, tree type)
1832{
1833 unsigned int alignment;
1834
1835 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
1836 if (alignment < PARM_BOUNDARY)
1837 alignment = PARM_BOUNDARY;
1838 if (alignment > STACK_BOUNDARY)
1839 alignment = STACK_BOUNDARY;
1840 return alignment;
1841}
1842
1843
6e5ff6e7 1844static bool
586de218 1845xtensa_return_in_msb (const_tree valtype)
6e5ff6e7
BW
1846{
1847 return (TARGET_BIG_ENDIAN
1848 && AGGREGATE_TYPE_P (valtype)
1849 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1850}
1851
1852
03984308 1853void
ffbc8796 1854override_options (void)
03984308
BW
1855{
1856 int regno;
1857 enum machine_mode mode;
1858
1859 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1860 error ("boolean registers required for the floating-point option");
1861
638db43e 1862 /* Set up array giving whether a given register can hold a given mode. */
03984308
BW
1863 for (mode = VOIDmode;
1864 mode != MAX_MACHINE_MODE;
1865 mode = (enum machine_mode) ((int) mode + 1))
1866 {
1867 int size = GET_MODE_SIZE (mode);
1868 enum mode_class class = GET_MODE_CLASS (mode);
1869
1870 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1871 {
1872 int temp;
1873
1874 if (ACC_REG_P (regno))
f42f5a1b
BW
1875 temp = (TARGET_MAC16
1876 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
03984308
BW
1877 else if (GP_REG_P (regno))
1878 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1879 else if (FP_REG_P (regno))
1880 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1881 else if (BR_REG_P (regno))
1882 temp = (TARGET_BOOLEANS && (mode == CCmode));
1883 else
1884 temp = FALSE;
1885
1886 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1887 }
1888 }
1889
1890 init_machine_status = xtensa_init_machine_status;
03984308 1891
f42f5a1b
BW
1892 /* Check PIC settings. PIC is only supported when using L32R
1893 instructions, and some targets need to always use PIC. */
1894 if (flag_pic && TARGET_CONST16)
1895 error ("-f%s is not supported with CONST16 instructions",
1896 (flag_pic > 1 ? "PIC" : "pic"));
1897 else if (XTENSA_ALWAYS_PIC)
1898 {
1899 if (TARGET_CONST16)
1900 error ("PIC is required but not supported with CONST16 instructions");
1901 flag_pic = 1;
1902 }
1903 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1904 if (flag_pic > 1)
03984308 1905 flag_pic = 1;
87c8b4be
CT
1906
1907 /* Hot/cold partitioning does not work on this architecture, because of
1908 constant pools (the load instruction cannot necessarily reach that far).
1909 Therefore disable it on this architecture. */
1910 if (flag_reorder_blocks_and_partition)
1911 {
1912 flag_reorder_blocks_and_partition = 0;
1913 flag_reorder_blocks = 1;
1914 }
03984308
BW
1915}
1916
1917
1918/* A C compound statement to output to stdio stream STREAM the
1919 assembler syntax for an instruction operand X. X is an RTL
1920 expression.
1921
1922 CODE is a value that can be used to specify one of several ways
1923 of printing the operand. It is used when identical operands
1924 must be printed differently depending on the context. CODE
1925 comes from the '%' specification that was used to request
1926 printing of the operand. If the specification was just '%DIGIT'
1927 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1928 is the ASCII code for LTR.
1929
1930 If X is a register, this macro should print the register's name.
1931 The names can be found in an array 'reg_names' whose type is
1932 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1933
1934 When the machine description has a specification '%PUNCT' (a '%'
1935 followed by a punctuation character), this macro is called with
1936 a null pointer for X and the punctuation character for CODE.
1937
1938 'a', 'c', 'l', and 'n' are reserved.
633e4eb4 1939
03984308
BW
1940 The Xtensa specific codes are:
1941
1942 'd' CONST_INT, print as signed decimal
1943 'x' CONST_INT, print as signed hexadecimal
1944 'K' CONST_INT, print number of bits in mask for EXTUI
1945 'R' CONST_INT, print (X & 0x1f)
1946 'L' CONST_INT, print ((32 - X) & 0x1f)
1947 'D' REG, print second register of double-word register operand
1948 'N' MEM, print address of next word following a memory operand
1949 'v' MEM, if memory reference is volatile, output a MEMW before it
f42f5a1b
BW
1950 't' any constant, add "@h" suffix for top 16 bits
1951 'b' any constant, add "@l" suffix for bottom 16 bits
03984308
BW
1952*/
1953
1954static void
ffbc8796 1955printx (FILE *file, signed int val)
03984308 1956{
ffbc8796 1957 /* Print a hexadecimal value in a nice way. */
03984308
BW
1958 if ((val > -0xa) && (val < 0xa))
1959 fprintf (file, "%d", val);
1960 else if (val < 0)
1961 fprintf (file, "-0x%x", -val);
1962 else
1963 fprintf (file, "0x%x", val);
1964}
1965
1966
1967void
ffbc8796 1968print_operand (FILE *file, rtx x, int letter)
03984308 1969{
f42f5a1b 1970 if (!x)
03984308
BW
1971 error ("PRINT_OPERAND null pointer");
1972
f42f5a1b 1973 switch (letter)
03984308 1974 {
f42f5a1b
BW
1975 case 'D':
1976 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1977 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1978 else
1979 output_operand_lossage ("invalid %%D value");
1980 break;
03984308 1981
f42f5a1b
BW
1982 case 'v':
1983 if (GET_CODE (x) == MEM)
1984 {
1985 /* For a volatile memory reference, emit a MEMW before the
1986 load or store. */
7a29f483 1987 if (MEM_VOLATILE_P (x))
f42f5a1b
BW
1988 fprintf (file, "memw\n\t");
1989 }
1990 else
1991 output_operand_lossage ("invalid %%v value");
1992 break;
03984308 1993
f42f5a1b
BW
1994 case 'N':
1995 if (GET_CODE (x) == MEM
1996 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1997 {
1998 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1999 output_address (XEXP (x, 0));
2000 }
2001 else
2002 output_operand_lossage ("invalid %%N value");
2003 break;
03984308 2004
f42f5a1b
BW
2005 case 'K':
2006 if (GET_CODE (x) == CONST_INT)
03984308 2007 {
f42f5a1b
BW
2008 int num_bits = 0;
2009 unsigned val = INTVAL (x);
2010 while (val & 1)
2011 {
2012 num_bits += 1;
2013 val = val >> 1;
2014 }
2015 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2016 fatal_insn ("invalid mask", x);
03984308 2017
f42f5a1b
BW
2018 fprintf (file, "%d", num_bits);
2019 }
2020 else
2021 output_operand_lossage ("invalid %%K value");
2022 break;
03984308 2023
f42f5a1b
BW
2024 case 'L':
2025 if (GET_CODE (x) == CONST_INT)
2026 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2027 else
2028 output_operand_lossage ("invalid %%L value");
2029 break;
03984308 2030
f42f5a1b
BW
2031 case 'R':
2032 if (GET_CODE (x) == CONST_INT)
2033 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2034 else
2035 output_operand_lossage ("invalid %%R value");
2036 break;
03984308 2037
f42f5a1b
BW
2038 case 'x':
2039 if (GET_CODE (x) == CONST_INT)
2040 printx (file, INTVAL (x));
2041 else
2042 output_operand_lossage ("invalid %%x value");
2043 break;
03984308 2044
f42f5a1b
BW
2045 case 'd':
2046 if (GET_CODE (x) == CONST_INT)
2047 fprintf (file, "%ld", INTVAL (x));
2048 else
2049 output_operand_lossage ("invalid %%d value");
2050 break;
03984308 2051
f42f5a1b
BW
2052 case 't':
2053 case 'b':
2054 if (GET_CODE (x) == CONST_INT)
2055 {
2056 printx (file, INTVAL (x));
2057 fputs (letter == 't' ? "@h" : "@l", file);
2058 }
2059 else if (GET_CODE (x) == CONST_DOUBLE)
2060 {
2061 REAL_VALUE_TYPE r;
2062 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2063 if (GET_MODE (x) == SFmode)
2064 {
2065 long l;
2066 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2067 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2068 }
2069 else
2070 output_operand_lossage ("invalid %%t/%%b value");
2071 }
2072 else if (GET_CODE (x) == CONST)
2073 {
2074 /* X must be a symbolic constant on ELF. Write an expression
2075 suitable for 'const16' that sets the high or low 16 bits. */
2076 if (GET_CODE (XEXP (x, 0)) != PLUS
2077 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2078 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2079 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2080 output_operand_lossage ("invalid %%t/%%b value");
2081 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2082 fputs (letter == 't' ? "@h" : "@l", file);
2083 /* There must be a non-alphanumeric character between 'h' or 'l'
2084 and the number. The '-' is added by print_operand() already. */
2085 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2086 fputs ("+", file);
2087 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2088 }
2089 else
633e4eb4 2090 {
f42f5a1b
BW
2091 output_addr_const (file, x);
2092 fputs (letter == 't' ? "@h" : "@l", file);
03984308
BW
2093 }
2094 break;
2095
2096 default:
f42f5a1b
BW
2097 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2098 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2099 else if (GET_CODE (x) == MEM)
2100 output_address (XEXP (x, 0));
2101 else if (GET_CODE (x) == CONST_INT)
2102 fprintf (file, "%ld", INTVAL (x));
2103 else
2104 output_addr_const (file, x);
03984308
BW
2105 }
2106}
2107
2108
2109/* A C compound statement to output to stdio stream STREAM the
2110 assembler syntax for an instruction operand that is a memory
fb49053f 2111 reference whose address is ADDR. ADDR is an RTL expression. */
03984308
BW
2112
2113void
ffbc8796 2114print_operand_address (FILE *file, rtx addr)
03984308
BW
2115{
2116 if (!addr)
2117 error ("PRINT_OPERAND_ADDRESS, null pointer");
2118
2119 switch (GET_CODE (addr))
2120 {
2121 default:
2122 fatal_insn ("invalid address", addr);
2123 break;
2124
2125 case REG:
2126 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2127 break;
2128
2129 case PLUS:
2130 {
2131 rtx reg = (rtx)0;
2132 rtx offset = (rtx)0;
2133 rtx arg0 = XEXP (addr, 0);
2134 rtx arg1 = XEXP (addr, 1);
2135
2136 if (GET_CODE (arg0) == REG)
2137 {
2138 reg = arg0;
2139 offset = arg1;
2140 }
2141 else if (GET_CODE (arg1) == REG)
2142 {
2143 reg = arg1;
2144 offset = arg0;
2145 }
2146 else
2147 fatal_insn ("no register in address", addr);
2148
2149 if (CONSTANT_P (offset))
2150 {
2151 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2152 output_addr_const (file, offset);
2153 }
2154 else
2155 fatal_insn ("address offset not a constant", addr);
2156 }
2157 break;
2158
2159 case LABEL_REF:
2160 case SYMBOL_REF:
2161 case CONST_INT:
2162 case CONST:
2163 output_addr_const (file, addr);
2164 break;
2165 }
2166}
2167
2168
da1f39e4
BW
2169bool
2170xtensa_output_addr_const_extra (FILE *fp, rtx x)
2171{
2172 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
2173 {
2174 switch (XINT (x, 1))
2175 {
2176 case UNSPEC_PLT:
2177 if (flag_pic)
2178 {
2179 output_addr_const (fp, XVECEXP (x, 0, 0));
2180 fputs ("@PLT", fp);
2181 return true;
2182 }
2183 break;
2184 default:
2185 break;
2186 }
2187 }
2188 return false;
2189}
2190
2191
03984308 2192void
ffbc8796 2193xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
03984308
BW
2194{
2195 long value_long[2];
b216cd4a 2196 REAL_VALUE_TYPE r;
03984308 2197 int size;
74ed13f5 2198 rtx first, second;
03984308
BW
2199
2200 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2201
2202 switch (GET_MODE_CLASS (mode))
2203 {
2204 case MODE_FLOAT:
177b6be0 2205 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
03984308 2206
b216cd4a 2207 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
03984308
BW
2208 switch (mode)
2209 {
2210 case SFmode:
b216cd4a 2211 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
4575a800
BW
2212 if (HOST_BITS_PER_LONG > 32)
2213 value_long[0] &= 0xffffffff;
b216cd4a 2214 fprintf (file, "0x%08lx\n", value_long[0]);
03984308
BW
2215 break;
2216
2217 case DFmode:
b216cd4a 2218 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
4575a800
BW
2219 if (HOST_BITS_PER_LONG > 32)
2220 {
2221 value_long[0] &= 0xffffffff;
2222 value_long[1] &= 0xffffffff;
2223 }
b216cd4a
ZW
2224 fprintf (file, "0x%08lx, 0x%08lx\n",
2225 value_long[0], value_long[1]);
03984308
BW
2226 break;
2227
2228 default:
177b6be0 2229 gcc_unreachable ();
03984308
BW
2230 }
2231
2232 break;
2233
2234 case MODE_INT:
2235 case MODE_PARTIAL_INT:
2236 size = GET_MODE_SIZE (mode);
177b6be0 2237 switch (size)
03984308 2238 {
177b6be0 2239 case 4:
03984308
BW
2240 output_addr_const (file, x);
2241 fputs ("\n", file);
177b6be0
NS
2242 break;
2243
2244 case 8:
74ed13f5
BW
2245 split_double (x, &first, &second);
2246 output_addr_const (file, first);
03984308 2247 fputs (", ", file);
74ed13f5 2248 output_addr_const (file, second);
03984308 2249 fputs ("\n", file);
177b6be0
NS
2250 break;
2251
2252 default:
2253 gcc_unreachable ();
03984308 2254 }
03984308
BW
2255 break;
2256
2257 default:
177b6be0 2258 gcc_unreachable ();
03984308
BW
2259 }
2260}
2261
2262
2263/* Return the bytes needed to compute the frame pointer from the current
638db43e 2264 stack pointer. */
03984308
BW
2265
2266#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2267#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2268
2269long
ffbc8796 2270compute_frame_size (int size)
03984308 2271{
ffbc8796 2272 /* Add space for the incoming static chain value. */
6de9cd9a 2273 if (cfun->static_chain_decl != NULL)
03984308
BW
2274 size += (1 * UNITS_PER_WORD);
2275
2276 xtensa_current_frame_size =
2277 XTENSA_STACK_ALIGN (size
2278 + current_function_outgoing_args_size
2279 + (WINDOW_SIZE * UNITS_PER_WORD));
2280 return xtensa_current_frame_size;
2281}
2282
2283
2284int
ffbc8796 2285xtensa_frame_pointer_required (void)
03984308
BW
2286{
2287 /* The code to expand builtin_frame_addr and builtin_return_addr
2288 currently uses the hard_frame_pointer instead of frame_pointer.
2289 This seems wrong but maybe it's necessary for other architectures.
638db43e 2290 This function is derived from the i386 code. */
03984308
BW
2291
2292 if (cfun->machine->accesses_prev_frame)
2293 return 1;
2294
2295 return 0;
2296}
2297
2298
f42f5a1b 2299void
ffbc8796 2300xtensa_expand_prologue (void)
f42f5a1b
BW
2301{
2302 HOST_WIDE_INT total_size;
2303 rtx size_rtx;
18dbd950 2304
f42f5a1b
BW
2305 total_size = compute_frame_size (get_frame_size ());
2306 size_rtx = GEN_INT (total_size);
18dbd950 2307
f42f5a1b
BW
2308 if (total_size < (1 << (12+3)))
2309 emit_insn (gen_entry (size_rtx, size_rtx));
03984308
BW
2310 else
2311 {
f42f5a1b
BW
2312 /* Use a8 as a temporary since a0-a7 may be live. */
2313 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2314 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2315 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2316 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2317 emit_move_insn (stack_pointer_rtx, tmp_reg);
03984308
BW
2318 }
2319
f42f5a1b 2320 if (frame_pointer_needed)
03984308 2321 {
997b8b4d 2322 if (cfun->machine->set_frame_ptr_insn)
03984308 2323 {
997b8b4d 2324 rtx first, insn;
03984308 2325
997b8b4d
BW
2326 push_topmost_sequence ();
2327 first = get_insns ();
2328 pop_topmost_sequence ();
03984308 2329
f42f5a1b
BW
2330 /* For all instructions prior to set_frame_ptr_insn, replace
2331 hard_frame_pointer references with stack_pointer. */
2332 for (insn = first;
997b8b4d 2333 insn != cfun->machine->set_frame_ptr_insn;
f42f5a1b
BW
2334 insn = NEXT_INSN (insn))
2335 {
2336 if (INSN_P (insn))
20dca97b
BW
2337 {
2338 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2339 hard_frame_pointer_rtx,
2340 stack_pointer_rtx);
2341 df_insn_rescan (insn);
2342 }
f42f5a1b
BW
2343 }
2344 }
2345 else
2346 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
03984308
BW
2347 }
2348}
2349
2350
f42f5a1b 2351/* Clear variables at function end. */
03984308
BW
2352
2353void
ffbc8796
BW
2354xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2355 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
03984308 2356{
03984308
BW
2357 xtensa_current_frame_size = 0;
2358}
2359
2360
0c14a54d 2361rtx
ffbc8796 2362xtensa_return_addr (int count, rtx frame)
0c14a54d
BW
2363{
2364 rtx result, retaddr;
2365
2366 if (count == -1)
f42f5a1b 2367 retaddr = gen_rtx_REG (Pmode, A0_REG);
0c14a54d
BW
2368 else
2369 {
2370 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2371 addr = memory_address (Pmode, addr);
2372 retaddr = gen_reg_rtx (Pmode);
2373 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2374 }
2375
2376 /* The 2 most-significant bits of the return address on Xtensa hold
2377 the register window size. To get the real return address, these
2378 bits must be replaced with the high bits from the current PC. */
2379
2380 result = gen_reg_rtx (Pmode);
2381 emit_insn (gen_fix_return_addr (result, retaddr));
2382 return result;
2383}
2384
2385
03984308 2386/* Create the va_list data type.
822e895c
BW
2387
2388 This structure is set up by __builtin_saveregs. The __va_reg field
2389 points to a stack-allocated region holding the contents of the
2390 incoming argument registers. The __va_ndx field is an index
2391 initialized to the position of the first unnamed (variable)
2392 argument. This same index is also used to address the arguments
2393 passed in memory. Thus, the __va_stk field is initialized to point
2394 to the position of the first argument in memory offset to account
2395 for the arguments passed in registers and to account for the size
2396 of the argument registers not being 16-byte aligned. E.G., there
2397 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2398 for the first stack argument to have the maximal alignment of 16
2399 bytes, so we offset the __va_stk address by 32 bytes so that
2400 __va_stk[32] references the first argument on the stack. */
03984308 2401
c35d187f
RH
2402static tree
2403xtensa_build_builtin_va_list (void)
03984308 2404{
540eaea8 2405 tree f_stk, f_reg, f_ndx, record, type_decl;
03984308 2406
540eaea8
BW
2407 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2408 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
03984308
BW
2409
2410 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2411 ptr_type_node);
2412 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2413 ptr_type_node);
2414 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2415 integer_type_node);
2416
2417 DECL_FIELD_CONTEXT (f_stk) = record;
2418 DECL_FIELD_CONTEXT (f_reg) = record;
2419 DECL_FIELD_CONTEXT (f_ndx) = record;
2420
540eaea8
BW
2421 TREE_CHAIN (record) = type_decl;
2422 TYPE_NAME (record) = type_decl;
03984308
BW
2423 TYPE_FIELDS (record) = f_stk;
2424 TREE_CHAIN (f_stk) = f_reg;
2425 TREE_CHAIN (f_reg) = f_ndx;
2426
2427 layout_type (record);
2428 return record;
2429}
2430
2431
2432/* Save the incoming argument registers on the stack. Returns the
638db43e 2433 address of the saved registers. */
03984308 2434
4c45af42 2435static rtx
ffbc8796 2436xtensa_builtin_saveregs (void)
03984308 2437{
e70312d4 2438 rtx gp_regs;
79e9ebdc 2439 int arg_words = current_function_args_info.arg_words;
03984308 2440 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
03984308 2441
997b8b4d 2442 if (gp_left <= 0)
03984308
BW
2443 return const0_rtx;
2444
3bbc2af6 2445 /* Allocate the general-purpose register space. */
03984308
BW
2446 gp_regs = assign_stack_local
2447 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
540eaea8 2448 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
03984308
BW
2449
2450 /* Now store the incoming registers. */
997b8b4d
BW
2451 cfun->machine->need_a7_copy = true;
2452 cfun->machine->vararg_a7 = true;
e70312d4
BW
2453 move_block_from_reg (GP_ARG_FIRST + arg_words,
2454 adjust_address (gp_regs, BLKmode,
2455 arg_words * UNITS_PER_WORD),
2456 gp_left);
03984308
BW
2457
2458 return XEXP (gp_regs, 0);
2459}
2460
2461
2462/* Implement `va_start' for varargs and stdarg. We look at the
638db43e 2463 current function to fill in an initial va_list. */
03984308
BW
2464
2465void
ffbc8796 2466xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
03984308
BW
2467{
2468 tree f_stk, stk;
2469 tree f_reg, reg;
2470 tree f_ndx, ndx;
2471 tree t, u;
2472 int arg_words;
2473
2474 arg_words = current_function_args_info.arg_words;
2475
2476 f_stk = TYPE_FIELDS (va_list_type_node);
2477 f_reg = TREE_CHAIN (f_stk);
2478 f_ndx = TREE_CHAIN (f_reg);
2479
47a25a46
RG
2480 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2481 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2482 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
03984308
BW
2483
2484 /* Call __builtin_saveregs; save the result in __va_reg */
e70312d4
BW
2485 u = make_tree (sizetype, expand_builtin_saveregs ());
2486 u = fold_convert (ptr_type_node, u);
07beea0d 2487 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, reg, u);
03984308
BW
2488 TREE_SIDE_EFFECTS (t) = 1;
2489 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2490
822e895c 2491 /* Set the __va_stk member to ($arg_ptr - 32). */
03984308 2492 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
e70312d4 2493 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u, size_int (-32));
07beea0d 2494 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, stk, u);
03984308
BW
2495 TREE_SIDE_EFFECTS (t) = 1;
2496 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2497
822e895c
BW
2498 /* Set the __va_ndx member. If the first variable argument is on
2499 the stack, adjust __va_ndx by 2 words to account for the extra
2500 alignment offset for __va_stk. */
2501 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2502 arg_words += 2;
e70312d4
BW
2503 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx,
2504 size_int (arg_words * UNITS_PER_WORD));
03984308
BW
2505 TREE_SIDE_EFFECTS (t) = 1;
2506 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2507}
2508
2509
2510/* Implement `va_arg'. */
2511
85d53c1d
RH
2512static tree
2513xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
2514 tree *post_p ATTRIBUTE_UNUSED)
03984308
BW
2515{
2516 tree f_stk, stk;
2517 tree f_reg, reg;
2518 tree f_ndx, ndx;
85d53c1d
RH
2519 tree type_size, array, orig_ndx, addr, size, va_size, t;
2520 tree lab_false, lab_over, lab_false2;
08b0dc1b
RH
2521 bool indirect;
2522
2523 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2524 if (indirect)
2525 type = build_pointer_type (type);
03984308 2526
3712281f
BW
2527 /* Handle complex values as separate real and imaginary parts. */
2528 if (TREE_CODE (type) == COMPLEX_TYPE)
2529 {
85d53c1d 2530 tree real_part, imag_part;
3712281f 2531
85d53c1d
RH
2532 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2533 pre_p, NULL);
2534 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
3712281f 2535
85d53c1d
RH
2536 imag_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2537 pre_p, NULL);
2538 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
3712281f 2539
47a25a46 2540 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
3712281f
BW
2541 }
2542
03984308
BW
2543 f_stk = TYPE_FIELDS (va_list_type_node);
2544 f_reg = TREE_CHAIN (f_stk);
2545 f_ndx = TREE_CHAIN (f_reg);
2546
47a25a46
RG
2547 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2548 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2549 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
03984308 2550
85d53c1d
RH
2551 type_size = size_in_bytes (type);
2552 va_size = round_up (type_size, UNITS_PER_WORD);
2553 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
8be56275 2554
03984308 2555
822e895c 2556 /* First align __va_ndx if necessary for this arg:
03984308 2557
85d53c1d 2558 orig_ndx = (AP).__va_ndx;
822e895c 2559 if (__alignof__ (TYPE) > 4 )
85d53c1d 2560 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
822e895c 2561 & -__alignof__ (TYPE)); */
03984308 2562
85d53c1d
RH
2563 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2564
03984308
BW
2565 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2566 {
d2348985 2567 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
85d53c1d 2568
e70312d4
BW
2569 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, size_int (align - 1));
2570 t = build2 (BIT_AND_EXPR, integer_type_node, t, size_int (-align));
07beea0d 2571 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, orig_ndx, t);
85d53c1d 2572 gimplify_and_add (t, pre_p);
03984308
BW
2573 }
2574
2575
2576 /* Increment __va_ndx to point past the argument:
2577
85d53c1d 2578 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
03984308 2579
85d53c1d 2580 t = fold_convert (integer_type_node, va_size);
47a25a46 2581 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
07beea0d 2582 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
85d53c1d 2583 gimplify_and_add (t, pre_p);
03984308
BW
2584
2585
2586 /* Check if the argument is in registers:
2587
bcf88f9b 2588 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
fe984136 2589 && !must_pass_in_stack (type))
ffbc8796 2590 __array = (AP).__va_reg; */
03984308 2591
85d53c1d 2592 array = create_tmp_var (ptr_type_node, NULL);
03984308 2593
85d53c1d 2594 lab_over = NULL;
fe984136 2595 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
bcf88f9b 2596 {
85d53c1d
RH
2597 lab_false = create_artificial_label ();
2598 lab_over = create_artificial_label ();
2599
e70312d4
BW
2600 t = build2 (GT_EXPR, boolean_type_node, ndx,
2601 size_int (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
47a25a46
RG
2602 t = build3 (COND_EXPR, void_type_node, t,
2603 build1 (GOTO_EXPR, void_type_node, lab_false),
2604 NULL_TREE);
85d53c1d
RH
2605 gimplify_and_add (t, pre_p);
2606
07beea0d 2607 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, reg);
85d53c1d
RH
2608 gimplify_and_add (t, pre_p);
2609
47a25a46 2610 t = build1 (GOTO_EXPR, void_type_node, lab_over);
85d53c1d
RH
2611 gimplify_and_add (t, pre_p);
2612
47a25a46 2613 t = build1 (LABEL_EXPR, void_type_node, lab_false);
85d53c1d 2614 gimplify_and_add (t, pre_p);
bcf88f9b 2615 }
03984308 2616
85d53c1d 2617
03984308
BW
2618 /* ...otherwise, the argument is on the stack (never split between
2619 registers and the stack -- change __va_ndx if necessary):
2620
2621 else
2622 {
822e895c
BW
2623 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2624 (AP).__va_ndx = 32 + __va_size (TYPE);
03984308 2625 __array = (AP).__va_stk;
ffbc8796 2626 } */
03984308 2627
85d53c1d 2628 lab_false2 = create_artificial_label ();
03984308 2629
e70312d4
BW
2630 t = build2 (GT_EXPR, boolean_type_node, orig_ndx,
2631 size_int (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
47a25a46
RG
2632 t = build3 (COND_EXPR, void_type_node, t,
2633 build1 (GOTO_EXPR, void_type_node, lab_false2),
2634 NULL_TREE);
85d53c1d 2635 gimplify_and_add (t, pre_p);
03984308 2636
85d53c1d
RH
2637 t = size_binop (PLUS_EXPR, va_size, size_int (32));
2638 t = fold_convert (integer_type_node, t);
07beea0d 2639 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
85d53c1d 2640 gimplify_and_add (t, pre_p);
03984308 2641
47a25a46 2642 t = build1 (LABEL_EXPR, void_type_node, lab_false2);
85d53c1d 2643 gimplify_and_add (t, pre_p);
03984308 2644
07beea0d 2645 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, stk);
85d53c1d
RH
2646 gimplify_and_add (t, pre_p);
2647
2648 if (lab_over)
2649 {
47a25a46 2650 t = build1 (LABEL_EXPR, void_type_node, lab_over);
85d53c1d
RH
2651 gimplify_and_add (t, pre_p);
2652 }
8be56275 2653
03984308
BW
2654
2655 /* Given the base array pointer (__array) and index to the subsequent
2656 argument (__va_ndx), find the address:
2657
8be56275
BW
2658 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2659 ? sizeof (TYPE)
2660 : __va_size (TYPE))
03984308
BW
2661
2662 The results are endian-dependent because values smaller than one word
ffbc8796 2663 are aligned differently. */
03984308 2664
633e4eb4 2665
85d91d5b 2666 if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
8be56275 2667 {
e70312d4
BW
2668 t = fold_build2 (GE_EXPR, boolean_type_node, type_size,
2669 size_int (PARM_BOUNDARY / BITS_PER_UNIT));
47a25a46 2670 t = fold_build3 (COND_EXPR, sizetype, t, va_size, type_size);
85d53c1d 2671 size = t;
8be56275 2672 }
85d53c1d
RH
2673 else
2674 size = va_size;
2675
e70312d4
BW
2676 t = build2 (MINUS_EXPR, sizetype, ndx, size);
2677 addr = build2 (POINTER_PLUS_EXPR, ptr_type_node, array, t);
03984308 2678
85d53c1d 2679 addr = fold_convert (build_pointer_type (type), addr);
08b0dc1b 2680 if (indirect)
d6e9821f
RH
2681 addr = build_va_arg_indirect_ref (addr);
2682 return build_va_arg_indirect_ref (addr);
03984308
BW
2683}
2684
2685
09fa8841
BW
2686/* Builtins. */
2687
2688enum xtensa_builtin
2689{
2690 XTENSA_BUILTIN_UMULSIDI3,
2691 XTENSA_BUILTIN_max
2692};
2693
2694
2695static void
2696xtensa_init_builtins (void)
2697{
2698 tree ftype;
2699
2700 ftype = build_function_type_list (unsigned_intDI_type_node,
2701 unsigned_intSI_type_node,
2702 unsigned_intSI_type_node, NULL_TREE);
2703
2704 add_builtin_function ("__builtin_umulsidi3", ftype,
2705 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
2706 "__umulsidi3", NULL_TREE);
2707}
2708
2709
2710static tree
2711xtensa_fold_builtin (tree fndecl, tree arglist, bool ignore ATTRIBUTE_UNUSED)
2712{
2713 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2714 tree arg0, arg1;
2715
2716 if (fcode == XTENSA_BUILTIN_UMULSIDI3)
2717 {
2718 arg0 = TREE_VALUE (arglist);
2719 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2720 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2721 || TARGET_MUL32_HIGH)
2722 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
2723 fold_convert (unsigned_intDI_type_node, arg0),
2724 fold_convert (unsigned_intDI_type_node, arg1));
2725 else
2726 return NULL;
2727 }
2728
2729 internal_error ("bad builtin code");
2730 return NULL;
2731}
2732
2733
2734static rtx
2735xtensa_expand_builtin (tree exp, rtx target,
2736 rtx subtarget ATTRIBUTE_UNUSED,
2737 enum machine_mode mode ATTRIBUTE_UNUSED,
2738 int ignore)
2739{
ec3643e8 2740 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
09fa8841
BW
2741 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2742
2743 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
2744 __umulsidi3 function when the Xtensa configuration can directly
2745 implement it. If not, just call the function. */
2746 if (fcode == XTENSA_BUILTIN_UMULSIDI3)
2747 return expand_call (exp, target, ignore);
2748
2749 internal_error ("bad builtin code");
2750 return NULL_RTX;
2751}
2752
2753
a8cacfd2 2754enum reg_class
ffbc8796 2755xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
a8cacfd2 2756{
89f6025d 2757 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
a8cacfd2
BW
2758 return NO_REGS;
2759
89f6025d
BW
2760 /* Don't use the stack pointer or hard frame pointer for reloads!
2761 The hard frame pointer would normally be OK except that it may
2762 briefly hold an incoming argument in the prologue, and reload
2763 won't know that it is live because the hard frame pointer is
2764 treated specially. */
2765
2766 if (class == AR_REGS || class == GR_REGS)
2767 return RL_REGS;
a8cacfd2
BW
2768
2769 return class;
2770}
2771
2772
03984308 2773enum reg_class
ffbc8796
BW
2774xtensa_secondary_reload_class (enum reg_class class,
2775 enum machine_mode mode ATTRIBUTE_UNUSED,
2776 rtx x, int isoutput)
03984308
BW
2777{
2778 int regno;
2779
2780 if (GET_CODE (x) == SIGN_EXTEND)
2781 x = XEXP (x, 0);
2782 regno = xt_true_regnum (x);
2783
2784 if (!isoutput)
2785 {
2786 if (class == FP_REGS && constantpool_mem_p (x))
89f6025d 2787 return RL_REGS;
03984308
BW
2788 }
2789
2790 if (ACC_REG_P (regno))
89f6025d 2791 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
03984308 2792 if (class == ACC_REG)
89f6025d 2793 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
03984308
BW
2794
2795 return NO_REGS;
2796}
2797
2798
2799void
ffbc8796 2800order_regs_for_local_alloc (void)
03984308
BW
2801{
2802 if (!leaf_function_p ())
2803 {
2804 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2805 FIRST_PSEUDO_REGISTER * sizeof (int));
2806 }
2807 else
2808 {
2809 int i, num_arg_regs;
2810 int nxt = 0;
2811
3bbc2af6
KH
2812 /* Use the AR registers in increasing order (skipping a0 and a1)
2813 but save the incoming argument registers for a last resort. */
03984308
BW
2814 num_arg_regs = current_function_args_info.arg_words;
2815 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2816 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2817 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2818 reg_alloc_order[nxt++] = i + num_arg_regs;
2819 for (i = 0; i < num_arg_regs; i++)
2820 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2821
3bbc2af6 2822 /* List the coprocessor registers in order. */
985d0d50
BW
2823 for (i = 0; i < BR_REG_NUM; i++)
2824 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2825
3bbc2af6 2826 /* List the FP registers in order for now. */
03984308
BW
2827 for (i = 0; i < 16; i++)
2828 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2829
638db43e 2830 /* GCC requires that we list *all* the registers.... */
03984308
BW
2831 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2832 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2833 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2834 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2835
03984308
BW
2836 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2837 }
2838}
2839
2840
01abf342
BW
2841/* Some Xtensa targets support multiple bss sections. If the section
2842 name ends with ".bss", add SECTION_BSS to the flags. */
2843
2844static unsigned int
ffbc8796 2845xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
01abf342
BW
2846{
2847 unsigned int flags = default_section_type_flags (decl, name, reloc);
2848 const char *suffix;
2849
2850 suffix = strrchr (name, '.');
2851 if (suffix && strcmp (suffix, ".bss") == 0)
2852 {
2853 if (!decl || (TREE_CODE (decl) == VAR_DECL
2854 && DECL_INITIAL (decl) == NULL_TREE))
2855 flags |= SECTION_BSS; /* @nobits */
2856 else
d4ee4d25 2857 warning (0, "only uninitialized variables can be placed in a "
01abf342
BW
2858 ".bss section");
2859 }
2860
2861 return flags;
2862}
2863
2864
b64a1b53
RH
2865/* The literal pool stays with the function. */
2866
d6b5193b 2867static section *
ffbc8796
BW
2868xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2869 rtx x ATTRIBUTE_UNUSED,
2870 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53 2871{
d6b5193b 2872 return function_section (current_function_decl);
b64a1b53 2873}
fb49053f 2874
ffbc8796 2875
3c50106f
RH
2876/* Compute a (partial) cost for rtx X. Return true if the complete
2877 cost has been computed, and false if subexpressions should be
2878 scanned. In either case, *TOTAL contains the cost result. */
2879
2880static bool
ffbc8796 2881xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f
RH
2882{
2883 switch (code)
2884 {
2885 case CONST_INT:
2886 switch (outer_code)
2887 {
2888 case SET:
2889 if (xtensa_simm12b (INTVAL (x)))
2890 {
2891 *total = 4;
2892 return true;
2893 }
2894 break;
2895 case PLUS:
2896 if (xtensa_simm8 (INTVAL (x))
2897 || xtensa_simm8x256 (INTVAL (x)))
2898 {
2899 *total = 0;
2900 return true;
2901 }
2902 break;
2903 case AND:
2904 if (xtensa_mask_immediate (INTVAL (x)))
2905 {
2906 *total = 0;
2907 return true;
2908 }
2909 break;
2910 case COMPARE:
2911 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2912 {
2913 *total = 0;
2914 return true;
2915 }
2916 break;
2917 case ASHIFT:
2918 case ASHIFTRT:
2919 case LSHIFTRT:
2920 case ROTATE:
2921 case ROTATERT:
3bbc2af6 2922 /* No way to tell if X is the 2nd operand so be conservative. */
3c50106f
RH
2923 default: break;
2924 }
2925 if (xtensa_simm12b (INTVAL (x)))
2926 *total = 5;
f42f5a1b
BW
2927 else if (TARGET_CONST16)
2928 *total = COSTS_N_INSNS (2);
3c50106f
RH
2929 else
2930 *total = 6;
2931 return true;
2932
2933 case CONST:
2934 case LABEL_REF:
2935 case SYMBOL_REF:
f42f5a1b
BW
2936 if (TARGET_CONST16)
2937 *total = COSTS_N_INSNS (2);
2938 else
2939 *total = 5;
3c50106f
RH
2940 return true;
2941
2942 case CONST_DOUBLE:
f42f5a1b
BW
2943 if (TARGET_CONST16)
2944 *total = COSTS_N_INSNS (4);
2945 else
2946 *total = 7;
3c50106f
RH
2947 return true;
2948
2949 case MEM:
2950 {
2951 int num_words =
2952 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2953
2954 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2955 *total = COSTS_N_INSNS (num_words);
2956 else
2957 *total = COSTS_N_INSNS (2*num_words);
2958 return true;
2959 }
2960
2961 case FFS:
09fa8841 2962 case CTZ:
3c50106f
RH
2963 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2964 return true;
2965
09fa8841
BW
2966 case CLZ:
2967 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
2968 return true;
2969
3c50106f
RH
2970 case NOT:
2971 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2972 return true;
2973
2974 case AND:
2975 case IOR:
2976 case XOR:
2977 if (GET_MODE (x) == DImode)
2978 *total = COSTS_N_INSNS (2);
2979 else
2980 *total = COSTS_N_INSNS (1);
2981 return true;
2982
2983 case ASHIFT:
2984 case ASHIFTRT:
2985 case LSHIFTRT:
2986 if (GET_MODE (x) == DImode)
2987 *total = COSTS_N_INSNS (50);
2988 else
2989 *total = COSTS_N_INSNS (1);
2990 return true;
2991
2992 case ABS:
2993 {
2994 enum machine_mode xmode = GET_MODE (x);
2995 if (xmode == SFmode)
2996 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2997 else if (xmode == DFmode)
2998 *total = COSTS_N_INSNS (50);
2999 else
3000 *total = COSTS_N_INSNS (4);
3001 return true;
3002 }
3003
3004 case PLUS:
3005 case MINUS:
3006 {
3007 enum machine_mode xmode = GET_MODE (x);
3008 if (xmode == SFmode)
3009 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3010 else if (xmode == DFmode || xmode == DImode)
3011 *total = COSTS_N_INSNS (50);
3012 else
3013 *total = COSTS_N_INSNS (1);
3014 return true;
3015 }
3016
3017 case NEG:
3018 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
3019 return true;
3020
3021 case MULT:
3022 {
3023 enum machine_mode xmode = GET_MODE (x);
3024 if (xmode == SFmode)
3025 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
09fa8841 3026 else if (xmode == DFmode)
3c50106f 3027 *total = COSTS_N_INSNS (50);
09fa8841
BW
3028 else if (xmode == DImode)
3029 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
3c50106f
RH
3030 else if (TARGET_MUL32)
3031 *total = COSTS_N_INSNS (4);
3032 else if (TARGET_MAC16)
3033 *total = COSTS_N_INSNS (16);
3034 else if (TARGET_MUL16)
3035 *total = COSTS_N_INSNS (12);
3036 else
3037 *total = COSTS_N_INSNS (50);
3038 return true;
3039 }
3040
3041 case DIV:
3042 case MOD:
3043 {
3044 enum machine_mode xmode = GET_MODE (x);
3045 if (xmode == SFmode)
3046 {
3047 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3048 return true;
3049 }
3050 else if (xmode == DFmode)
3051 {
3052 *total = COSTS_N_INSNS (50);
3053 return true;
3054 }
3055 }
3bbc2af6 3056 /* Fall through. */
3c50106f
RH
3057
3058 case UDIV:
3059 case UMOD:
3060 {
3061 enum machine_mode xmode = GET_MODE (x);
3062 if (xmode == DImode)
3063 *total = COSTS_N_INSNS (50);
3064 else if (TARGET_DIV32)
3065 *total = COSTS_N_INSNS (32);
3066 else
3067 *total = COSTS_N_INSNS (50);
3068 return true;
3069 }
3070
3071 case SQRT:
3072 if (GET_MODE (x) == SFmode)
3073 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3074 else
3075 *total = COSTS_N_INSNS (50);
3076 return true;
3077
3078 case SMIN:
3079 case UMIN:
3080 case SMAX:
3081 case UMAX:
3082 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3083 return true;
3084
3085 case SIGN_EXTRACT:
3086 case SIGN_EXTEND:
3087 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3088 return true;
3089
3090 case ZERO_EXTRACT:
3091 case ZERO_EXTEND:
3092 *total = COSTS_N_INSNS (1);
3093 return true;
3094
3095 default:
3096 return false;
3097 }
3098}
3099
bd5bd7ac
KH
3100/* Worker function for TARGET_RETURN_IN_MEMORY. */
3101
4c45af42 3102static bool
586de218 3103xtensa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4c45af42
KH
3104{
3105 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3106 > 4 * UNITS_PER_WORD);
3107}
3108
e2500fed 3109#include "gt-xtensa.h"