]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
PR target/17390
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
f6b7ba2b 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
771b6086 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
f6b7ba2b 4 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
038d1e19 10Software Foundation; either version 3, or (at your option) any later
f6b7ba2b 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
038d1e19 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
f6b7ba2b 21
22#include "config.h"
23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
f6b7ba2b 26#include "rtl.h"
27#include "regs.h"
f6b7ba2b 28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "insn-attr.h"
35#include "insn-codes.h"
36#include "recog.h"
37#include "output.h"
38#include "tree.h"
39#include "expr.h"
40#include "flags.h"
41#include "reload.h"
42#include "tm_p.h"
43#include "function.h"
44#include "toplev.h"
45#include "optabs.h"
46#include "libfuncs.h"
160b2123 47#include "ggc.h"
f6b7ba2b 48#include "target.h"
49#include "target-def.h"
049d6666 50#include "langhooks.h"
ae79166b 51#include "tree-gimple.h"
d8002fbc 52#include "df.h"
ae79166b 53
f6b7ba2b 54
55/* Enumeration for all of the relational tests, so that we can build
56 arrays indexed by the test type, and not worry about the order
c821cf9c 57 of EQ, NE, etc. */
f6b7ba2b 58
fd63fcf8 59enum internal_test
60{
61 ITEST_EQ,
62 ITEST_NE,
63 ITEST_GT,
64 ITEST_GE,
65 ITEST_LT,
66 ITEST_LE,
67 ITEST_GTU,
68 ITEST_GEU,
69 ITEST_LTU,
70 ITEST_LEU,
71 ITEST_MAX
72};
f6b7ba2b 73
74/* Cached operands, and operator to compare for use in set/branch on
75 condition codes. */
76rtx branch_cmp[2];
77
78/* what type of branch to use */
79enum cmp_type branch_type;
80
81/* Array giving truth value on whether or not a given hard register
82 can support a given mode. */
83char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
84
85/* Current frame size calculated by compute_frame_size. */
86unsigned xtensa_current_frame_size;
87
a80259b6 88/* Largest block move to handle in-line. */
f6b7ba2b 89#define LARGEST_MOVE_RATIO 15
90
91/* Define the structure for the machine field in struct function. */
1f3233d1 92struct machine_function GTY(())
f6b7ba2b 93{
94 int accesses_prev_frame;
e060c9df 95 bool need_a7_copy;
96 bool vararg_a7;
97 rtx set_frame_ptr_insn;
f6b7ba2b 98};
99
100/* Vector, indexed by hard register number, which contains 1 for a
101 register that is allowable in a candidate for leaf function
c821cf9c 102 treatment. */
f6b7ba2b 103
104const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
105{
106 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 1, 1, 1,
108 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 1
110};
111
112/* Map hard register number to register class */
113const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
114{
a8332086 115 RL_REGS, SP_REG, RL_REGS, RL_REGS,
116 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
117 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
118 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
f6b7ba2b 119 AR_REGS, AR_REGS, BR_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
123 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 ACC_REG,
125};
126
fd63fcf8 127static enum internal_test map_test_to_internal_test (enum rtx_code);
128static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
129static rtx gen_float_relational (enum rtx_code, rtx, rtx);
130static rtx gen_conditional_move (rtx);
131static rtx fixup_subreg_mem (rtx);
fd63fcf8 132static struct machine_function * xtensa_init_machine_status (void);
110f993e 133static bool xtensa_return_in_msb (tree);
fd63fcf8 134static void printx (FILE *, signed int);
135static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
4fe4af61 136static rtx xtensa_builtin_saveregs (void);
fd63fcf8 137static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
138 int) ATTRIBUTE_UNUSED;
2f14b1f9 139static section *xtensa_select_rtx_section (enum machine_mode, rtx,
140 unsigned HOST_WIDE_INT);
fd63fcf8 141static bool xtensa_rtx_costs (rtx, int, int, int *);
2e15d750 142static tree xtensa_build_builtin_va_list (void);
4fe4af61 143static bool xtensa_return_in_memory (tree, tree);
ae79166b 144static tree xtensa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
8e8c0c04 145static void xtensa_init_builtins (void);
146static tree xtensa_fold_builtin (tree, tree, bool);
147static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
bbfbe351 148
bbfbe351 149static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
150 REG_ALLOC_ORDER;
151\f
f6b7ba2b 152
153/* This macro generates the assembly code for function exit,
154 on machines that need it. If FUNCTION_EPILOGUE is not defined
155 then individual return instructions are generated for each
156 return statement. Args are same as for FUNCTION_PROLOGUE. */
157
158#undef TARGET_ASM_FUNCTION_EPILOGUE
159#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
160
161/* These hooks specify assembly directives for creating certain kinds
162 of integer object. */
163
164#undef TARGET_ASM_ALIGNED_SI_OP
165#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
166
bbfbe351 167#undef TARGET_ASM_SELECT_RTX_SECTION
168#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
f6b7ba2b 169
03ea9d8a 170#undef TARGET_DEFAULT_TARGET_FLAGS
171#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
172
fab7adbf 173#undef TARGET_RTX_COSTS
174#define TARGET_RTX_COSTS xtensa_rtx_costs
ec0457a8 175#undef TARGET_ADDRESS_COST
176#define TARGET_ADDRESS_COST hook_int_rtx_0
fab7adbf 177
2e15d750 178#undef TARGET_BUILD_BUILTIN_VA_LIST
179#define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
180
4fe4af61 181#undef TARGET_PROMOTE_FUNCTION_ARGS
182#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
183#undef TARGET_PROMOTE_FUNCTION_RETURN
184#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
185#undef TARGET_PROMOTE_PROTOTYPES
186#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
187
4fe4af61 188#undef TARGET_RETURN_IN_MEMORY
189#define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
92d40bc4 190#undef TARGET_SPLIT_COMPLEX_ARG
191#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
0336f0f0 192#undef TARGET_MUST_PASS_IN_STACK
193#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
4fe4af61 194
195#undef TARGET_EXPAND_BUILTIN_SAVEREGS
196#define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
ae79166b 197#undef TARGET_GIMPLIFY_VA_ARG_EXPR
198#define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
4fe4af61 199
110f993e 200#undef TARGET_RETURN_IN_MSB
201#define TARGET_RETURN_IN_MSB xtensa_return_in_msb
202
8e8c0c04 203#undef TARGET_INIT_BUILTINS
204#define TARGET_INIT_BUILTINS xtensa_init_builtins
205#undef TARGET_FOLD_BUILTIN
206#define TARGET_FOLD_BUILTIN xtensa_fold_builtin
207#undef TARGET_EXPAND_BUILTIN
208#define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
209
bbfbe351 210struct gcc_target targetm = TARGET_INITIALIZER;
f6b7ba2b 211
eb472ecb 212\f
213/* Functions to test Xtensa immediate operand validity. */
f6b7ba2b 214
7d0f7bf8 215bool
216xtensa_simm8 (HOST_WIDE_INT v)
217{
218 return v >= -128 && v <= 127;
219}
220
221
222bool
223xtensa_simm8x256 (HOST_WIDE_INT v)
224{
225 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
226}
227
228
229bool
230xtensa_simm12b (HOST_WIDE_INT v)
231{
232 return v >= -2048 && v <= 2047;
233}
234
235
236static bool
237xtensa_uimm8 (HOST_WIDE_INT v)
238{
239 return v >= 0 && v <= 255;
240}
241
242
243static bool
244xtensa_uimm8x2 (HOST_WIDE_INT v)
245{
246 return (v & 1) == 0 && (v >= 0 && v <= 510);
247}
248
249
250static bool
251xtensa_uimm8x4 (HOST_WIDE_INT v)
252{
253 return (v & 3) == 0 && (v >= 0 && v <= 1020);
254}
255
256
257static bool
258xtensa_b4const (HOST_WIDE_INT v)
f6b7ba2b 259{
260 switch (v)
261 {
7d0f7bf8 262 case -1:
263 case 1:
f6b7ba2b 264 case 2:
265 case 3:
266 case 4:
267 case 5:
268 case 6:
269 case 7:
270 case 8:
271 case 10:
272 case 12:
273 case 16:
274 case 32:
275 case 64:
276 case 128:
277 case 256:
7d0f7bf8 278 return true;
f6b7ba2b 279 }
7d0f7bf8 280 return false;
f6b7ba2b 281}
282
f6b7ba2b 283
7d0f7bf8 284bool
285xtensa_b4const_or_zero (HOST_WIDE_INT v)
f6b7ba2b 286{
7d0f7bf8 287 if (v == 0)
288 return true;
289 return xtensa_b4const (v);
f6b7ba2b 290}
291
f6b7ba2b 292
7d0f7bf8 293bool
294xtensa_b4constu (HOST_WIDE_INT v)
f6b7ba2b 295{
296 switch (v)
297 {
7d0f7bf8 298 case 32768:
299 case 65536:
f6b7ba2b 300 case 2:
301 case 3:
302 case 4:
303 case 5:
304 case 6:
305 case 7:
306 case 8:
307 case 10:
308 case 12:
309 case 16:
310 case 32:
311 case 64:
312 case 128:
313 case 256:
7d0f7bf8 314 return true;
f6b7ba2b 315 }
7d0f7bf8 316 return false;
f6b7ba2b 317}
318
f6b7ba2b 319
7d0f7bf8 320bool
321xtensa_mask_immediate (HOST_WIDE_INT v)
f6b7ba2b 322{
7d0f7bf8 323#define MAX_MASK_SIZE 16
324 int mask_size;
f6b7ba2b 325
7d0f7bf8 326 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
327 {
328 if ((v & 1) == 0)
329 return false;
330 v = v >> 1;
331 if (v == 0)
332 return true;
333 }
f6b7ba2b 334
7d0f7bf8 335 return false;
f6b7ba2b 336}
337
f6b7ba2b 338
f6b7ba2b 339/* This is just like the standard true_regnum() function except that it
c821cf9c 340 works even when reg_renumber is not initialized. */
f6b7ba2b 341
342int
fd63fcf8 343xt_true_regnum (rtx x)
f6b7ba2b 344{
345 if (GET_CODE (x) == REG)
346 {
347 if (reg_renumber
348 && REGNO (x) >= FIRST_PSEUDO_REGISTER
349 && reg_renumber[REGNO (x)] >= 0)
350 return reg_renumber[REGNO (x)];
351 return REGNO (x);
352 }
353 if (GET_CODE (x) == SUBREG)
354 {
355 int base = xt_true_regnum (SUBREG_REG (x));
356 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
357 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
358 GET_MODE (SUBREG_REG (x)),
359 SUBREG_BYTE (x), GET_MODE (x));
360 }
361 return -1;
362}
363
364
f6b7ba2b 365int
fd63fcf8 366xtensa_valid_move (enum machine_mode mode, rtx *operands)
f6b7ba2b 367{
fc12fa10 368 /* Either the destination or source must be a register, and the
369 MAC16 accumulator doesn't count. */
370
371 if (register_operand (operands[0], mode))
372 {
373 int dst_regnum = xt_true_regnum (operands[0]);
374
c821cf9c 375 /* The stack pointer can only be assigned with a MOVSP opcode. */
fc12fa10 376 if (dst_regnum == STACK_POINTER_REGNUM)
377 return (mode == SImode
378 && register_operand (operands[1], mode)
379 && !ACC_REG_P (xt_true_regnum (operands[1])));
380
381 if (!ACC_REG_P (dst_regnum))
382 return true;
383 }
141e2ef6 384 if (register_operand (operands[1], mode))
fc12fa10 385 {
386 int src_regnum = xt_true_regnum (operands[1]);
387 if (!ACC_REG_P (src_regnum))
388 return true;
389 }
f6b7ba2b 390 return FALSE;
391}
392
393
f6b7ba2b 394int
fd63fcf8 395smalloffset_mem_p (rtx op)
f6b7ba2b 396{
397 if (GET_CODE (op) == MEM)
398 {
399 rtx addr = XEXP (op, 0);
400 if (GET_CODE (addr) == REG)
771b6086 401 return BASE_REG_P (addr, 0);
f6b7ba2b 402 if (GET_CODE (addr) == PLUS)
403 {
404 rtx offset = XEXP (addr, 0);
7d0f7bf8 405 HOST_WIDE_INT val;
f6b7ba2b 406 if (GET_CODE (offset) != CONST_INT)
407 offset = XEXP (addr, 1);
408 if (GET_CODE (offset) != CONST_INT)
409 return FALSE;
7d0f7bf8 410
411 val = INTVAL (offset);
412 return (val & 3) == 0 && (val >= 0 && val <= 60);
f6b7ba2b 413 }
414 }
415 return FALSE;
416}
417
418
f6b7ba2b 419int
fd63fcf8 420constantpool_address_p (rtx addr)
f6b7ba2b 421{
422 rtx sym = addr;
423
424 if (GET_CODE (addr) == CONST)
425 {
426 rtx offset;
427
dafa59bd 428 /* Only handle (PLUS (SYM, OFFSET)) form. */
f6b7ba2b 429 addr = XEXP (addr, 0);
430 if (GET_CODE (addr) != PLUS)
431 return FALSE;
432
dafa59bd 433 /* Make sure the address is word aligned. */
f6b7ba2b 434 offset = XEXP (addr, 1);
435 if ((GET_CODE (offset) != CONST_INT)
436 || ((INTVAL (offset) & 3) != 0))
437 return FALSE;
438
439 sym = XEXP (addr, 0);
440 }
441
442 if ((GET_CODE (sym) == SYMBOL_REF)
443 && CONSTANT_POOL_ADDRESS_P (sym))
444 return TRUE;
445 return FALSE;
446}
447
448
449int
fd63fcf8 450constantpool_mem_p (rtx op)
f6b7ba2b 451{
b0e603fe 452 if (GET_CODE (op) == SUBREG)
453 op = SUBREG_REG (op);
f6b7ba2b 454 if (GET_CODE (op) == MEM)
455 return constantpool_address_p (XEXP (op, 0));
456 return FALSE;
457}
458
459
f6b7ba2b 460void
fd63fcf8 461xtensa_extend_reg (rtx dst, rtx src)
f6b7ba2b 462{
463 rtx temp = gen_reg_rtx (SImode);
464 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
465
dafa59bd 466 /* Generate paradoxical subregs as needed so that the modes match. */
f6b7ba2b 467 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
468 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
469
470 emit_insn (gen_ashlsi3 (temp, src, shift));
471 emit_insn (gen_ashrsi3 (dst, temp, shift));
472}
473
474
7d0f7bf8 475bool
fd63fcf8 476xtensa_mem_offset (unsigned v, enum machine_mode mode)
f6b7ba2b 477{
478 switch (mode)
479 {
480 case BLKmode:
481 /* Handle the worst case for block moves. See xtensa_expand_block_move
482 where we emit an optimized block move operation if the block can be
483 moved in < "move_ratio" pieces. The worst case is when the block is
484 aligned but has a size of (3 mod 4) (does this happen?) so that the
c821cf9c 485 last piece requires a byte load/store. */
afb26b4b 486 return (xtensa_uimm8 (v)
487 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
f6b7ba2b 488
489 case QImode:
490 return xtensa_uimm8 (v);
491
492 case HImode:
493 return xtensa_uimm8x2 (v);
494
495 case DFmode:
496 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
497
498 default:
499 break;
500 }
501
502 return xtensa_uimm8x4 (v);
503}
504
505
fd63fcf8 506/* Make normal rtx_code into something we can index from an array. */
f6b7ba2b 507
508static enum internal_test
fd63fcf8 509map_test_to_internal_test (enum rtx_code test_code)
f6b7ba2b 510{
511 enum internal_test test = ITEST_MAX;
512
513 switch (test_code)
514 {
515 default: break;
516 case EQ: test = ITEST_EQ; break;
517 case NE: test = ITEST_NE; break;
518 case GT: test = ITEST_GT; break;
519 case GE: test = ITEST_GE; break;
520 case LT: test = ITEST_LT; break;
521 case LE: test = ITEST_LE; break;
522 case GTU: test = ITEST_GTU; break;
523 case GEU: test = ITEST_GEU; break;
524 case LTU: test = ITEST_LTU; break;
525 case LEU: test = ITEST_LEU; break;
526 }
527
528 return test;
529}
530
531
532/* Generate the code to compare two integer values. The return value is
c821cf9c 533 the comparison expression. */
f6b7ba2b 534
535static rtx
fd63fcf8 536gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
537 rtx cmp0, /* first operand to compare */
538 rtx cmp1, /* second operand to compare */
539 int *p_invert /* whether branch needs to reverse test */)
f6b7ba2b 540{
fd63fcf8 541 struct cmp_info
542 {
f6b7ba2b 543 enum rtx_code test_code; /* test code to use in insn */
7d0f7bf8 544 bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
f6b7ba2b 545 int const_add; /* constant to add (convert LE -> LT) */
546 int reverse_regs; /* reverse registers in test */
547 int invert_const; /* != 0 if invert value if cmp1 is constant */
548 int invert_reg; /* != 0 if invert value if cmp1 is register */
549 int unsignedp; /* != 0 for unsigned comparisons. */
550 };
551
552 static struct cmp_info info[ (int)ITEST_MAX ] = {
553
7d0f7bf8 554 { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
555 { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
f6b7ba2b 556
7d0f7bf8 557 { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
558 { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
559 { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
560 { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
f6b7ba2b 561
562 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
563 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
564 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
565 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
566 };
567
568 enum internal_test test;
569 enum machine_mode mode;
570 struct cmp_info *p_info;
571
572 test = map_test_to_internal_test (test_code);
cd3d4fe0 573 gcc_assert (test != ITEST_MAX);
f6b7ba2b 574
575 p_info = &info[ (int)test ];
576
577 mode = GET_MODE (cmp0);
578 if (mode == VOIDmode)
579 mode = GET_MODE (cmp1);
580
581 /* Make sure we can handle any constants given to us. */
582 if (GET_CODE (cmp1) == CONST_INT)
583 {
584 HOST_WIDE_INT value = INTVAL (cmp1);
585 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
586
587 /* if the immediate overflows or does not fit in the immediate field,
588 spill it to a register */
589
590 if ((p_info->unsignedp ?
591 (uvalue + p_info->const_add > uvalue) :
592 (value + p_info->const_add > value)) != (p_info->const_add > 0))
593 {
594 cmp1 = force_reg (mode, cmp1);
595 }
596 else if (!(p_info->const_range_p) (value + p_info->const_add))
597 {
598 cmp1 = force_reg (mode, cmp1);
599 }
600 }
601 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
602 {
603 cmp1 = force_reg (mode, cmp1);
604 }
605
606 /* See if we need to invert the result. */
607 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
608 ? p_info->invert_const
609 : p_info->invert_reg);
610
611 /* Comparison to constants, may involve adding 1 to change a LT into LE.
612 Comparison between two registers, may involve switching operands. */
613 if (GET_CODE (cmp1) == CONST_INT)
614 {
615 if (p_info->const_add != 0)
616 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
617
618 }
619 else if (p_info->reverse_regs)
620 {
621 rtx temp = cmp0;
622 cmp0 = cmp1;
623 cmp1 = temp;
624 }
625
29bb088d 626 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
f6b7ba2b 627}
628
629
630/* Generate the code to compare two float values. The return value is
c821cf9c 631 the comparison expression. */
f6b7ba2b 632
633static rtx
fd63fcf8 634gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
635 rtx cmp0, /* first operand to compare */
636 rtx cmp1 /* second operand to compare */)
f6b7ba2b 637{
fd63fcf8 638 rtx (*gen_fn) (rtx, rtx, rtx);
f6b7ba2b 639 rtx brtmp;
640 int reverse_regs, invert;
641
642 switch (test_code)
643 {
644 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
645 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
646 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
647 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
648 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
649 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
de071186 650 default:
29bb088d 651 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
f6b7ba2b 652 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
653 }
654
655 if (reverse_regs)
656 {
657 rtx temp = cmp0;
658 cmp0 = cmp1;
659 cmp1 = temp;
660 }
661
662 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
663 emit_insn (gen_fn (brtmp, cmp0, cmp1));
664
29bb088d 665 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
f6b7ba2b 666}
667
668
669void
fd63fcf8 670xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
f6b7ba2b 671{
672 enum cmp_type type = branch_type;
673 rtx cmp0 = branch_cmp[0];
674 rtx cmp1 = branch_cmp[1];
675 rtx cmp;
676 int invert;
677 rtx label1, label2;
678
679 switch (type)
680 {
681 case CMP_DF:
682 default:
29bb088d 683 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
f6b7ba2b 684
685 case CMP_SI:
686 invert = FALSE;
687 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
688 break;
689
690 case CMP_SF:
691 if (!TARGET_HARD_FLOAT)
771b6086 692 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
693 cmp0, cmp1));
f6b7ba2b 694 invert = FALSE;
695 cmp = gen_float_relational (test_code, cmp0, cmp1);
696 break;
697 }
698
699 /* Generate the branch. */
700
701 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
702 label2 = pc_rtx;
703
704 if (invert)
705 {
706 label2 = label1;
707 label1 = pc_rtx;
708 }
709
710 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
711 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
712 label1,
713 label2)));
714}
715
716
717static rtx
fd63fcf8 718gen_conditional_move (rtx cmp)
f6b7ba2b 719{
720 enum rtx_code code = GET_CODE (cmp);
721 rtx op0 = branch_cmp[0];
722 rtx op1 = branch_cmp[1];
723
724 if (branch_type == CMP_SI)
725 {
726 /* Jump optimization calls get_condition() which canonicalizes
727 comparisons like (GE x <const>) to (GT x <const-1>).
728 Transform those comparisons back to GE, since that is the
729 comparison supported in Xtensa. We shouldn't have to
730 transform <LE x const> comparisons, because neither
731 xtensa_expand_conditional_branch() nor get_condition() will
c821cf9c 732 produce them. */
f6b7ba2b 733
734 if ((code == GT) && (op1 == constm1_rtx))
735 {
736 code = GE;
737 op1 = const0_rtx;
738 }
29bb088d 739 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
f6b7ba2b 740
741 if (boolean_operator (cmp, VOIDmode))
742 {
dafa59bd 743 /* Swap the operands to make const0 second. */
f6b7ba2b 744 if (op0 == const0_rtx)
745 {
746 op0 = op1;
747 op1 = const0_rtx;
748 }
749
dafa59bd 750 /* If not comparing against zero, emit a comparison (subtract). */
f6b7ba2b 751 if (op1 != const0_rtx)
752 {
753 op0 = expand_binop (SImode, sub_optab, op0, op1,
754 0, 0, OPTAB_LIB_WIDEN);
755 op1 = const0_rtx;
756 }
757 }
758 else if (branch_operator (cmp, VOIDmode))
759 {
dafa59bd 760 /* Swap the operands to make const0 second. */
f6b7ba2b 761 if (op0 == const0_rtx)
762 {
763 op0 = op1;
764 op1 = const0_rtx;
765
766 switch (code)
767 {
768 case LT: code = GE; break;
769 case GE: code = LT; break;
cd3d4fe0 770 default: gcc_unreachable ();
f6b7ba2b 771 }
772 }
773
774 if (op1 != const0_rtx)
775 return 0;
776 }
777 else
778 return 0;
779
29bb088d 780 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
f6b7ba2b 781 }
782
783 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
784 return gen_float_relational (code, op0, op1);
785
786 return 0;
787}
788
789
790int
fd63fcf8 791xtensa_expand_conditional_move (rtx *operands, int isflt)
f6b7ba2b 792{
793 rtx cmp;
fd63fcf8 794 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
f6b7ba2b 795
796 if (!(cmp = gen_conditional_move (operands[1])))
797 return 0;
798
799 if (isflt)
800 gen_fn = (branch_type == CMP_SI
801 ? gen_movsfcc_internal0
802 : gen_movsfcc_internal1);
803 else
804 gen_fn = (branch_type == CMP_SI
805 ? gen_movsicc_internal0
806 : gen_movsicc_internal1);
807
808 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
809 operands[2], operands[3], cmp));
810 return 1;
811}
812
813
814int
fd63fcf8 815xtensa_expand_scc (rtx *operands)
f6b7ba2b 816{
817 rtx dest = operands[0];
818 rtx cmp = operands[1];
819 rtx one_tmp, zero_tmp;
fd63fcf8 820 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
f6b7ba2b 821
822 if (!(cmp = gen_conditional_move (cmp)))
823 return 0;
824
825 one_tmp = gen_reg_rtx (SImode);
826 zero_tmp = gen_reg_rtx (SImode);
827 emit_insn (gen_movsi (one_tmp, const_true_rtx));
828 emit_insn (gen_movsi (zero_tmp, const0_rtx));
829
830 gen_fn = (branch_type == CMP_SI
831 ? gen_movsicc_internal0
832 : gen_movsicc_internal1);
833 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
834 return 1;
835}
836
837
de071186 838/* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
839 for the output, i.e., the input operands are twice as big as MODE. */
840
841void
fd63fcf8 842xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
de071186 843{
844 switch (GET_CODE (operands[1]))
845 {
846 case REG:
847 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
848 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
849 break;
850
851 case MEM:
852 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
853 operands[2] = adjust_address (operands[1], mode, 0);
854 break;
855
856 case CONST_INT:
857 case CONST_DOUBLE:
858 split_double (operands[1], &operands[2], &operands[3]);
859 break;
860
861 default:
cd3d4fe0 862 gcc_unreachable ();
de071186 863 }
864
865 switch (GET_CODE (operands[0]))
866 {
867 case REG:
868 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
869 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
870 break;
871
872 case MEM:
873 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
874 operands[0] = adjust_address (operands[0], mode, 0);
875 break;
876
877 default:
cd3d4fe0 878 gcc_unreachable ();
de071186 879 }
880}
881
882
f6b7ba2b 883/* Emit insns to move operands[1] into operands[0].
f6b7ba2b 884 Return 1 if we have written out everything that needs to be done to
885 do the move. Otherwise, return 0 and the caller will emit the move
886 normally. */
887
888int
fd63fcf8 889xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
f6b7ba2b 890{
891 if (CONSTANT_P (operands[1])
f6b7ba2b 892 && (GET_CODE (operands[1]) != CONST_INT
893 || !xtensa_simm12b (INTVAL (operands[1]))))
894 {
afb26b4b 895 if (!TARGET_CONST16)
896 operands[1] = force_const_mem (SImode, operands[1]);
897
898 /* PC-relative loads are always SImode, and CONST16 is only
899 supported in the movsi pattern, so add a SUBREG for any other
900 (smaller) mode. */
901
902 if (mode != SImode)
903 {
904 if (register_operand (operands[0], mode))
905 {
906 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
907 emit_move_insn (operands[0], operands[1]);
908 return 1;
909 }
910 else
911 {
912 operands[1] = force_reg (SImode, operands[1]);
913 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
914 }
915 }
f6b7ba2b 916 }
917
e060c9df 918 if (!(reload_in_progress | reload_completed)
919 && !xtensa_valid_move (mode, operands))
920 operands[1] = force_reg (mode, operands[1]);
f6b7ba2b 921
e060c9df 922 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
f6b7ba2b 923
924 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
c821cf9c 925 instruction won't be recognized after reload, so we remove the
926 subreg and adjust mem accordingly. */
f6b7ba2b 927 if (reload_in_progress)
928 {
929 operands[0] = fixup_subreg_mem (operands[0]);
930 operands[1] = fixup_subreg_mem (operands[1]);
931 }
932 return 0;
933}
934
afb26b4b 935
f6b7ba2b 936static rtx
fd63fcf8 937fixup_subreg_mem (rtx x)
f6b7ba2b 938{
939 if (GET_CODE (x) == SUBREG
940 && GET_CODE (SUBREG_REG (x)) == REG
941 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
942 {
943 rtx temp =
944 gen_rtx_SUBREG (GET_MODE (x),
945 reg_equiv_mem [REGNO (SUBREG_REG (x))],
946 SUBREG_BYTE (x));
947 x = alter_subreg (&temp);
948 }
949 return x;
950}
951
952
e060c9df 953/* Check if an incoming argument in a7 is expected to be used soon and
954 if OPND is a register or register pair that includes a7. If so,
955 create a new pseudo and copy a7 into that pseudo at the very
956 beginning of the function, followed by the special "set_frame_ptr"
957 unspec_volatile insn. The return value is either the original
958 operand, if it is not a7, or the new pseudo containing a copy of
959 the incoming argument. This is necessary because the register
960 allocator will ignore conflicts with a7 and may either assign some
961 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
962 the incoming argument in a7. By copying the argument out of a7 as
963 the very first thing, and then immediately following that with an
964 unspec_volatile to keep the scheduler away, we should avoid any
965 problems. Putting the set_frame_ptr insn at the beginning, with
966 only the a7 copy before it, also makes it easier for the prologue
967 expander to initialize the frame pointer after the a7 copy and to
968 fix up the a7 copy to use the stack pointer instead of the frame
969 pointer. */
78d6a4ed 970
e060c9df 971rtx
972xtensa_copy_incoming_a7 (rtx opnd)
78d6a4ed 973{
e060c9df 974 rtx entry_insns = 0;
975 rtx reg, tmp;
976 enum machine_mode mode;
977
978 if (!cfun->machine->need_a7_copy)
979 return opnd;
980
981 /* This function should never be called again once a7 has been copied. */
cd3d4fe0 982 gcc_assert (!cfun->machine->set_frame_ptr_insn);
e060c9df 983
984 mode = GET_MODE (opnd);
985
986 /* The operand using a7 may come in a later instruction, so just return
987 the original operand if it doesn't use a7. */
988 reg = opnd;
989 if (GET_CODE (reg) == SUBREG)
78d6a4ed 990 {
cd3d4fe0 991 gcc_assert (SUBREG_BYTE (reg) == 0);
e060c9df 992 reg = SUBREG_REG (reg);
993 }
994 if (GET_CODE (reg) != REG
995 || REGNO (reg) > A7_REG
996 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
997 return opnd;
2aac53ce 998
e060c9df 999 /* 1-word args will always be in a7; 2-word args in a6/a7. */
cd3d4fe0 1000 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
78d6a4ed 1001
e060c9df 1002 cfun->machine->need_a7_copy = false;
78d6a4ed 1003
e060c9df 1004 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1005 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
78d6a4ed 1006
e060c9df 1007 push_to_sequence (entry_insns);
1008 tmp = gen_reg_rtx (mode);
78d6a4ed 1009
e060c9df 1010 switch (mode)
1011 {
1012 case DFmode:
1013 case DImode:
1014 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1015 gen_rtx_REG (SImode, A7_REG - 1)));
1016 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1017 gen_raw_REG (SImode, A7_REG)));
1018 break;
1019 case SFmode:
1020 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1021 break;
1022 case SImode:
1023 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1024 break;
1025 case HImode:
1026 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1027 break;
1028 case QImode:
1029 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1030 break;
1031 default:
cd3d4fe0 1032 gcc_unreachable ();
78d6a4ed 1033 }
1034
e060c9df 1035 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1036 entry_insns = get_insns ();
1037 end_sequence ();
1038
1039 if (cfun->machine->vararg_a7)
1040 {
1041 /* This is called from within builtin_savereg, so we're already
1042 inside a start_sequence that will be placed at the start of
1043 the function. */
1044 emit_insn (entry_insns);
1045 }
1046 else
1047 {
1048 /* Put entry_insns after the NOTE that starts the function. If
1049 this is inside a start_sequence, make the outer-level insn
1050 chain current, so the code is placed at the start of the
1051 function. */
1052 push_topmost_sequence ();
1053 emit_insn_after (entry_insns, get_insns ());
1054 pop_topmost_sequence ();
1055 }
1056
1057 return tmp;
78d6a4ed 1058}
1059
1060
a80259b6 1061/* Try to expand a block move operation to a sequence of RTL move
1062 instructions. If not optimizing, or if the block size is not a
1063 constant, or if the block is too large, the expansion fails and GCC
1064 falls back to calling memcpy().
f6b7ba2b 1065
1066 operands[0] is the destination
1067 operands[1] is the source
1068 operands[2] is the length
1069 operands[3] is the alignment */
1070
1071int
fd63fcf8 1072xtensa_expand_block_move (rtx *operands)
f6b7ba2b 1073{
986ef67a 1074 static const enum machine_mode mode_from_align[] =
1075 {
1076 VOIDmode, QImode, HImode, VOIDmode, SImode,
1077 };
1078
1079 rtx dst_mem = operands[0];
1080 rtx src_mem = operands[1];
1081 HOST_WIDE_INT bytes, align;
f6b7ba2b 1082 int num_pieces, move_ratio;
986ef67a 1083 rtx temp[2];
1084 enum machine_mode mode[2];
1085 int amount[2];
1086 bool active[2];
1087 int phase = 0;
1088 int next;
1089 int offset_ld = 0;
1090 int offset_st = 0;
1091 rtx x;
f6b7ba2b 1092
dafa59bd 1093 /* If this is not a fixed size move, just call memcpy. */
f6b7ba2b 1094 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1095 return 0;
1096
986ef67a 1097 bytes = INTVAL (operands[2]);
1098 align = INTVAL (operands[3]);
1099
dafa59bd 1100 /* Anything to move? */
f6b7ba2b 1101 if (bytes <= 0)
986ef67a 1102 return 0;
f6b7ba2b 1103
1104 if (align > MOVE_MAX)
1105 align = MOVE_MAX;
1106
dafa59bd 1107 /* Decide whether to expand inline based on the optimization level. */
f6b7ba2b 1108 move_ratio = 4;
1109 if (optimize > 2)
1110 move_ratio = LARGEST_MOVE_RATIO;
dafa59bd 1111 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
986ef67a 1112 if (num_pieces > move_ratio)
f6b7ba2b 1113 return 0;
1114
986ef67a 1115 x = XEXP (dst_mem, 0);
1116 if (!REG_P (x))
1117 {
1118 x = force_reg (Pmode, x);
1119 dst_mem = replace_equiv_address (dst_mem, x);
1120 }
f6b7ba2b 1121
986ef67a 1122 x = XEXP (src_mem, 0);
1123 if (!REG_P (x))
1124 {
1125 x = force_reg (Pmode, x);
1126 src_mem = replace_equiv_address (src_mem, x);
1127 }
f6b7ba2b 1128
986ef67a 1129 active[0] = active[1] = false;
f6b7ba2b 1130
986ef67a 1131 do
f6b7ba2b 1132 {
986ef67a 1133 next = phase;
1134 phase ^= 1;
f6b7ba2b 1135
986ef67a 1136 if (bytes > 0)
f6b7ba2b 1137 {
986ef67a 1138 int next_amount;
f6b7ba2b 1139
986ef67a 1140 next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1141 next_amount = MIN (next_amount, align);
f6b7ba2b 1142
986ef67a 1143 amount[next] = next_amount;
1144 mode[next] = mode_from_align[next_amount];
1145 temp[next] = gen_reg_rtx (mode[next]);
f6b7ba2b 1146
986ef67a 1147 x = adjust_address (src_mem, mode[next], offset_ld);
1148 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
f6b7ba2b 1149
986ef67a 1150 offset_ld += next_amount;
1151 bytes -= next_amount;
1152 active[next] = true;
1153 }
f6b7ba2b 1154
986ef67a 1155 if (active[phase])
1156 {
1157 active[phase] = false;
1158
1159 x = adjust_address (dst_mem, mode[phase], offset_st);
1160 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
f6b7ba2b 1161
986ef67a 1162 offset_st += amount[phase];
1163 }
f6b7ba2b 1164 }
986ef67a 1165 while (active[next]);
f6b7ba2b 1166
986ef67a 1167 return 1;
f6b7ba2b 1168}
1169
1170
1171void
fd63fcf8 1172xtensa_expand_nonlocal_goto (rtx *operands)
f6b7ba2b 1173{
1174 rtx goto_handler = operands[1];
1175 rtx containing_fp = operands[3];
1176
dafa59bd 1177 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1178 is too big to generate in-line. */
f6b7ba2b 1179
1180 if (GET_CODE (containing_fp) != REG)
1181 containing_fp = force_reg (Pmode, containing_fp);
1182
c3e2d63e 1183 goto_handler = copy_rtx (goto_handler);
1184 validate_replace_rtx (virtual_stack_vars_rtx, containing_fp, goto_handler);
f6b7ba2b 1185
1186 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1187 0, VOIDmode, 2,
1188 containing_fp, Pmode,
1189 goto_handler, Pmode);
1190}
1191
1192
1f3233d1 1193static struct machine_function *
fd63fcf8 1194xtensa_init_machine_status (void)
f6b7ba2b 1195{
1f3233d1 1196 return ggc_alloc_cleared (sizeof (struct machine_function));
f6b7ba2b 1197}
1198
1199
4690907f 1200/* Shift VAL of mode MODE left by COUNT bits. */
1201
1202static inline rtx
1203xtensa_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
1204{
1205 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1206 NULL_RTX, 1, OPTAB_DIRECT);
1207 return expand_simple_binop (SImode, ASHIFT, val, count,
1208 NULL_RTX, 1, OPTAB_DIRECT);
1209}
1210
1211
1212/* Structure to hold the initial parameters for a compare_and_swap operation
1213 in HImode and QImode. */
1214
1215struct alignment_context
1216{
1217 rtx memsi; /* SI aligned memory location. */
1218 rtx shift; /* Bit offset with regard to lsb. */
1219 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
1220 rtx modemaski; /* ~modemask */
1221};
1222
1223
1224/* Initialize structure AC for word access to HI and QI mode memory. */
1225
1226static void
1227init_alignment_context (struct alignment_context *ac, rtx mem)
1228{
1229 enum machine_mode mode = GET_MODE (mem);
1230 rtx byteoffset = NULL_RTX;
1231 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1232
1233 if (aligned)
1234 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1235 else
1236 {
1237 /* Alignment is unknown. */
1238 rtx addr, align;
1239
1240 /* Force the address into a register. */
1241 addr = force_reg (Pmode, XEXP (mem, 0));
1242
1243 /* Align it to SImode. */
1244 align = expand_simple_binop (Pmode, AND, addr,
1245 GEN_INT (-GET_MODE_SIZE (SImode)),
1246 NULL_RTX, 1, OPTAB_DIRECT);
1247 /* Generate MEM. */
1248 ac->memsi = gen_rtx_MEM (SImode, align);
1249 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
1250 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
1251 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
1252
1253 byteoffset = expand_simple_binop (Pmode, AND, addr,
1254 GEN_INT (GET_MODE_SIZE (SImode) - 1),
1255 NULL_RTX, 1, OPTAB_DIRECT);
1256 }
1257
1258 /* Calculate shiftcount. */
1259 if (TARGET_BIG_ENDIAN)
1260 {
1261 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
1262 if (!aligned)
1263 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
1264 NULL_RTX, 1, OPTAB_DIRECT);
1265 }
1266 else
1267 {
1268 if (aligned)
1269 ac->shift = NULL_RTX;
1270 else
1271 ac->shift = byteoffset;
1272 }
1273
1274 if (ac->shift != NULL_RTX)
1275 {
1276 /* Shift is the byte count, but we need the bitcount. */
1277 ac->shift = expand_simple_binop (SImode, MULT, ac->shift,
1278 GEN_INT (BITS_PER_UNIT),
1279 NULL_RTX, 1, OPTAB_DIRECT);
1280 ac->modemask = expand_simple_binop (SImode, ASHIFT,
1281 GEN_INT (GET_MODE_MASK (mode)),
1282 ac->shift,
1283 NULL_RTX, 1, OPTAB_DIRECT);
1284 }
1285 else
1286 ac->modemask = GEN_INT (GET_MODE_MASK (mode));
1287
1288 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
1289}
1290
1291
1292/* Expand an atomic compare and swap operation for HImode and QImode.
1293 MEM is the memory location, CMP the old value to compare MEM with
1294 and NEW the value to set if CMP == MEM. */
1295
1296void
1297xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new)
1298{
1299 enum machine_mode mode = GET_MODE (mem);
1300 struct alignment_context ac;
1301 rtx tmp, cmpv, newv, val;
1302 rtx oldval = gen_reg_rtx (SImode);
1303 rtx res = gen_reg_rtx (SImode);
1304 rtx csloop = gen_label_rtx ();
1305 rtx csend = gen_label_rtx ();
1306
1307 init_alignment_context (&ac, mem);
1308
1309 if (ac.shift != NULL_RTX)
1310 {
1311 cmp = xtensa_expand_mask_and_shift (cmp, mode, ac.shift);
1312 new = xtensa_expand_mask_and_shift (new, mode, ac.shift);
1313 }
1314
1315 /* Load the surrounding word into VAL with the MEM value masked out. */
1316 val = force_reg (SImode, expand_simple_binop (SImode, AND, ac.memsi,
1317 ac.modemaski, NULL_RTX, 1,
1318 OPTAB_DIRECT));
1319 emit_label (csloop);
1320
1321 /* Patch CMP and NEW into VAL at correct position. */
1322 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
1323 NULL_RTX, 1, OPTAB_DIRECT));
1324 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
1325 NULL_RTX, 1, OPTAB_DIRECT));
1326
1327 /* Jump to end if we're done. */
1328 emit_insn (gen_sync_compare_and_swapsi (res, ac.memsi, cmpv, newv));
1329 emit_cmp_and_jump_insns (res, cmpv, EQ, const0_rtx, SImode, true, csend);
1330
1331 /* Check for changes outside mode. */
1332 emit_move_insn (oldval, val);
1333 tmp = expand_simple_binop (SImode, AND, res, ac.modemaski,
1334 val, 1, OPTAB_DIRECT);
1335 if (tmp != val)
1336 emit_move_insn (val, tmp);
1337
1338 /* Loop internal if so. */
1339 emit_cmp_and_jump_insns (oldval, val, NE, const0_rtx, SImode, true, csloop);
1340
1341 emit_label (csend);
1342
1343 /* Return the correct part of the bitfield. */
1344 convert_move (target,
1345 (ac.shift == NULL_RTX ? res
1346 : expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
1347 NULL_RTX, 1, OPTAB_DIRECT)),
1348 1);
1349}
1350
1351
1352/* Expand an atomic operation CODE of mode MODE (either HImode or QImode --
1353 the default expansion works fine for SImode). MEM is the memory location
1354 and VAL the value to play with. If AFTER is true then store the value
1355 MEM holds after the operation, if AFTER is false then store the value MEM
1356 holds before the operation. If TARGET is zero then discard that value, else
1357 store it to TARGET. */
1358
1359void
1360xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1361 bool after)
1362{
1363 enum machine_mode mode = GET_MODE (mem);
1364 struct alignment_context ac;
1365 rtx csloop = gen_label_rtx ();
1366 rtx cmp, tmp;
1367 rtx old = gen_reg_rtx (SImode);
1368 rtx new = gen_reg_rtx (SImode);
1369 rtx orig = NULL_RTX;
1370
1371 init_alignment_context (&ac, mem);
1372
1373 /* Prepare values before the compare-and-swap loop. */
1374 if (ac.shift != NULL_RTX)
1375 val = xtensa_expand_mask_and_shift (val, mode, ac.shift);
1376 switch (code)
1377 {
1378 case PLUS:
1379 case MINUS:
1380 orig = gen_reg_rtx (SImode);
1381 convert_move (orig, val, 1);
1382 break;
1383
1384 case SET:
1385 case IOR:
1386 case XOR:
1387 break;
1388
1389 case MULT: /* NAND */
1390 case AND:
1391 /* val = "11..1<val>11..1" */
1392 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
1393 NULL_RTX, 1, OPTAB_DIRECT);
1394 break;
1395
1396 default:
1397 gcc_unreachable ();
1398 }
1399
1400 /* Load full word. Subsequent loads are performed by S32C1I. */
1401 cmp = force_reg (SImode, ac.memsi);
1402
1403 emit_label (csloop);
1404 emit_move_insn (old, cmp);
1405
1406 switch (code)
1407 {
1408 case PLUS:
1409 case MINUS:
1410 val = expand_simple_binop (SImode, code, old, orig,
1411 NULL_RTX, 1, OPTAB_DIRECT);
1412 val = expand_simple_binop (SImode, AND, val, ac.modemask,
1413 NULL_RTX, 1, OPTAB_DIRECT);
1414 /* FALLTHRU */
1415 case SET:
1416 tmp = expand_simple_binop (SImode, AND, old, ac.modemaski,
1417 NULL_RTX, 1, OPTAB_DIRECT);
1418 tmp = expand_simple_binop (SImode, IOR, tmp, val,
1419 new, 1, OPTAB_DIRECT);
1420 break;
1421
1422 case AND:
1423 case IOR:
1424 case XOR:
1425 tmp = expand_simple_binop (SImode, code, old, val,
1426 new, 1, OPTAB_DIRECT);
1427 break;
1428
1429 case MULT: /* NAND */
1430 tmp = expand_simple_binop (SImode, XOR, old, ac.modemask,
1431 NULL_RTX, 1, OPTAB_DIRECT);
1432 tmp = expand_simple_binop (SImode, AND, tmp, val,
1433 new, 1, OPTAB_DIRECT);
1434 break;
1435
1436 default:
1437 gcc_unreachable ();
1438 }
1439
1440 if (tmp != new)
1441 emit_move_insn (new, tmp);
1442 emit_insn (gen_sync_compare_and_swapsi (cmp, ac.memsi, old, new));
1443 emit_cmp_and_jump_insns (cmp, old, NE, const0_rtx, SImode, true, csloop);
1444
1445 if (target)
1446 {
1447 tmp = (after ? new : cmp);
1448 convert_move (target,
1449 (ac.shift == NULL_RTX ? tmp
1450 : expand_simple_binop (SImode, LSHIFTRT, tmp, ac.shift,
1451 NULL_RTX, 1, OPTAB_DIRECT)),
1452 1);
1453 }
1454}
1455
1456
f6b7ba2b 1457void
fd63fcf8 1458xtensa_setup_frame_addresses (void)
f6b7ba2b 1459{
c821cf9c 1460 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
f6b7ba2b 1461 cfun->machine->accesses_prev_frame = 1;
1462
1463 emit_library_call
1464 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1465 0, VOIDmode, 0);
1466}
1467
1468
c821cf9c 1469/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1470 a comment showing where the end of the loop is. However, if there is a
f6b7ba2b 1471 label or a branch at the end of the loop then we need to place a nop
c821cf9c 1472 there. If the loop ends with a label we need the nop so that branches
efee20da 1473 targeting that label will target the nop (and thus remain in the loop),
1474 instead of targeting the instruction after the loop (and thus exiting
c821cf9c 1475 the loop). If the loop ends with a branch, we need the nop in case the
efee20da 1476 branch is targeting a location inside the loop. When the branch
f6b7ba2b 1477 executes it will cause the loop count to be decremented even if it is
1478 taken (because it is the last instruction in the loop), so we need to
1479 nop after the branch to prevent the loop count from being decremented
c821cf9c 1480 when the branch is taken. */
f6b7ba2b 1481
1482void
fd63fcf8 1483xtensa_emit_loop_end (rtx insn, rtx *operands)
f6b7ba2b 1484{
1485 char done = 0;
1486
1487 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1488 {
1489 switch (GET_CODE (insn))
1490 {
1491 case NOTE:
1492 case BARRIER:
1493 break;
1494
1495 case CODE_LABEL:
2af1591e 1496 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
f6b7ba2b 1497 done = 1;
1498 break;
1499
1500 default:
1501 {
1502 rtx body = PATTERN (insn);
1503
1504 if (GET_CODE (body) == JUMP_INSN)
1505 {
2af1591e 1506 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
f6b7ba2b 1507 done = 1;
1508 }
1509 else if ((GET_CODE (body) != USE)
1510 && (GET_CODE (body) != CLOBBER))
1511 done = 1;
1512 }
1513 break;
1514 }
1515 }
1516
1517 output_asm_insn ("# loop end for %0", operands);
1518}
1519
1520
3c0ca649 1521char *
1522xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1523{
1524 static char result[64];
1525 enum rtx_code code;
1526 const char *op;
1527
1528 code = GET_CODE (operands[3]);
1529 switch (code)
1530 {
1531 case EQ: op = inverted ? "ne" : "eq"; break;
1532 case NE: op = inverted ? "eq" : "ne"; break;
1533 case LT: op = inverted ? "ge" : "lt"; break;
1534 case GE: op = inverted ? "lt" : "ge"; break;
1535 case LTU: op = inverted ? "geu" : "ltu"; break;
1536 case GEU: op = inverted ? "ltu" : "geu"; break;
1537 default: gcc_unreachable ();
1538 }
1539
1540 if (immed)
1541 {
1542 if (INTVAL (operands[1]) == 0)
1543 sprintf (result, "b%sz%s\t%%0, %%2", op,
1544 (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1545 else
1546 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1547 }
1548 else
1549 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1550
1551 return result;
1552}
1553
1554
1555char *
1556xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1557{
1558 static char result[64];
1559 const char *op;
1560
1561 switch (GET_CODE (operands[3]))
1562 {
1563 case EQ: op = inverted ? "bs" : "bc"; break;
1564 case NE: op = inverted ? "bc" : "bs"; break;
1565 default: gcc_unreachable ();
1566 }
1567
1568 if (immed)
1569 {
1570 unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1571 operands[1] = GEN_INT (bitnum);
1572 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1573 }
1574 else
1575 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1576
1577 return result;
1578}
1579
1580
1581char *
1582xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1583{
1584 static char result[64];
1585 enum rtx_code code;
1586 const char *op;
1587
1588 code = GET_CODE (operands[4]);
1589 if (isbool)
1590 {
1591 switch (code)
1592 {
1593 case EQ: op = inverted ? "t" : "f"; break;
1594 case NE: op = inverted ? "f" : "t"; break;
1595 default: gcc_unreachable ();
1596 }
1597 }
1598 else
1599 {
1600 switch (code)
1601 {
1602 case EQ: op = inverted ? "nez" : "eqz"; break;
1603 case NE: op = inverted ? "eqz" : "nez"; break;
1604 case LT: op = inverted ? "gez" : "ltz"; break;
1605 case GE: op = inverted ? "ltz" : "gez"; break;
1606 default: gcc_unreachable ();
1607 }
1608 }
1609
1610 sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1611 op, isfp ? ".s" : "", inverted ? 3 : 2);
1612 return result;
1613}
1614
1615
f6b7ba2b 1616char *
fd63fcf8 1617xtensa_emit_call (int callop, rtx *operands)
f6b7ba2b 1618{
bbfbe351 1619 static char result[64];
f6b7ba2b 1620 rtx tgt = operands[callop];
1621
1622 if (GET_CODE (tgt) == CONST_INT)
a6169f06 1623 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
f6b7ba2b 1624 else if (register_operand (tgt, VOIDmode))
1625 sprintf (result, "callx8\t%%%d", callop);
1626 else
1627 sprintf (result, "call8\t%%%d", callop);
1628
1629 return result;
1630}
1631
1632
771b6086 1633bool
1634xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
1635{
1636 /* Allow constant pool addresses. */
1637 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
1638 && ! TARGET_CONST16 && constantpool_address_p (addr))
1639 return true;
1640
1641 while (GET_CODE (addr) == SUBREG)
1642 addr = SUBREG_REG (addr);
1643
1644 /* Allow base registers. */
1645 if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1646 return true;
1647
1648 /* Check for "register + offset" addressing. */
1649 if (GET_CODE (addr) == PLUS)
1650 {
1651 rtx xplus0 = XEXP (addr, 0);
1652 rtx xplus1 = XEXP (addr, 1);
1653 enum rtx_code code0;
1654 enum rtx_code code1;
1655
1656 while (GET_CODE (xplus0) == SUBREG)
1657 xplus0 = SUBREG_REG (xplus0);
1658 code0 = GET_CODE (xplus0);
1659
1660 while (GET_CODE (xplus1) == SUBREG)
1661 xplus1 = SUBREG_REG (xplus1);
1662 code1 = GET_CODE (xplus1);
1663
1664 /* Swap operands if necessary so the register is first. */
1665 if (code0 != REG && code1 == REG)
1666 {
1667 xplus0 = XEXP (addr, 1);
1668 xplus1 = XEXP (addr, 0);
1669 code0 = GET_CODE (xplus0);
1670 code1 = GET_CODE (xplus1);
1671 }
1672
1673 if (code0 == REG && BASE_REG_P (xplus0, strict)
1674 && code1 == CONST_INT
1675 && xtensa_mem_offset (INTVAL (xplus1), mode))
1676 return true;
1677 }
1678
1679 return false;
1680}
1681
1682
1683rtx
1684xtensa_legitimize_address (rtx x,
1685 rtx oldx ATTRIBUTE_UNUSED,
1686 enum machine_mode mode)
1687{
1688 if (GET_CODE (x) == PLUS)
1689 {
1690 rtx plus0 = XEXP (x, 0);
1691 rtx plus1 = XEXP (x, 1);
1692
1693 if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
1694 {
1695 plus0 = XEXP (x, 1);
1696 plus1 = XEXP (x, 0);
1697 }
1698
1699 /* Try to split up the offset to use an ADDMI instruction. */
1700 if (GET_CODE (plus0) == REG
1701 && GET_CODE (plus1) == CONST_INT
1702 && !xtensa_mem_offset (INTVAL (plus1), mode)
1703 && !xtensa_simm8 (INTVAL (plus1))
1704 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1705 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1706 {
1707 rtx temp = gen_reg_rtx (Pmode);
1708 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
1709 emit_insn (gen_rtx_SET (Pmode, temp,
1710 gen_rtx_PLUS (Pmode, plus0, addmi_offset)));
1711 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1712 }
1713 }
1714
1715 return NULL_RTX;
1716}
1717
1718
b68eeca9 1719/* Return the debugger register number to use for 'regno'. */
f6b7ba2b 1720
1721int
fd63fcf8 1722xtensa_dbx_register_number (int regno)
f6b7ba2b 1723{
1724 int first = -1;
de071186 1725
1726 if (GP_REG_P (regno))
1727 {
1728 regno -= GP_REG_FIRST;
1729 first = 0;
1730 }
1731 else if (BR_REG_P (regno))
1732 {
1733 regno -= BR_REG_FIRST;
1734 first = 16;
1735 }
1736 else if (FP_REG_P (regno))
1737 {
1738 regno -= FP_REG_FIRST;
b68eeca9 1739 first = 48;
de071186 1740 }
f6b7ba2b 1741 else if (ACC_REG_P (regno))
1742 {
b68eeca9 1743 first = 0x200; /* Start of Xtensa special registers. */
1744 regno = 16; /* ACCLO is special register 16. */
f6b7ba2b 1745 }
1746
1747 /* When optimizing, we sometimes get asked about pseudo-registers
c821cf9c 1748 that don't represent hard registers. Return 0 for these. */
f6b7ba2b 1749 if (first == -1)
1750 return 0;
1751
1752 return first + regno;
1753}
1754
1755
1756/* Argument support functions. */
1757
1758/* Initialize CUMULATIVE_ARGS for a function. */
1759
1760void
e060c9df 1761init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
f6b7ba2b 1762{
1763 cum->arg_words = 0;
e060c9df 1764 cum->incoming = incoming;
f6b7ba2b 1765}
1766
fd63fcf8 1767
f6b7ba2b 1768/* Advance the argument to the next argument position. */
1769
1770void
fd63fcf8 1771function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
f6b7ba2b 1772{
1773 int words, max;
1774 int *arg_words;
1775
1776 arg_words = &cum->arg_words;
1777 max = MAX_ARGS_IN_REGISTERS;
1778
1779 words = (((mode != BLKmode)
1780 ? (int) GET_MODE_SIZE (mode)
1781 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1782
ea2981b9 1783 if (*arg_words < max
1784 && (targetm.calls.must_pass_in_stack (mode, type)
1785 || *arg_words + words > max))
f6b7ba2b 1786 *arg_words = max;
1787
1788 *arg_words += words;
1789}
1790
1791
1792/* Return an RTL expression containing the register for the given mode,
751e10d1 1793 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
fd63fcf8 1794 if this is an incoming argument to the current function. */
f6b7ba2b 1795
1796rtx
fd63fcf8 1797function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1798 int incoming_p)
f6b7ba2b 1799{
1800 int regbase, words, max;
1801 int *arg_words;
1802 int regno;
f6b7ba2b 1803
1804 arg_words = &cum->arg_words;
1805 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1806 max = MAX_ARGS_IN_REGISTERS;
1807
1808 words = (((mode != BLKmode)
1809 ? (int) GET_MODE_SIZE (mode)
1810 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1811
1812 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
9276fdff 1813 {
81c44390 1814 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
9276fdff 1815 *arg_words = (*arg_words + align - 1) & -align;
1816 }
f6b7ba2b 1817
1818 if (*arg_words + words > max)
1819 return (rtx)0;
1820
1821 regno = regbase + *arg_words;
f6b7ba2b 1822
e060c9df 1823 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1824 cfun->machine->need_a7_copy = true;
f6b7ba2b 1825
e060c9df 1826 return gen_rtx_REG (mode, regno);
f6b7ba2b 1827}
1828
1829
81c44390 1830int
1831function_arg_boundary (enum machine_mode mode, tree type)
1832{
1833 unsigned int alignment;
1834
1835 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
1836 if (alignment < PARM_BOUNDARY)
1837 alignment = PARM_BOUNDARY;
1838 if (alignment > STACK_BOUNDARY)
1839 alignment = STACK_BOUNDARY;
1840 return alignment;
1841}
1842
1843
110f993e 1844static bool
1845xtensa_return_in_msb (tree valtype)
1846{
1847 return (TARGET_BIG_ENDIAN
1848 && AGGREGATE_TYPE_P (valtype)
1849 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1850}
1851
1852
f6b7ba2b 1853void
fd63fcf8 1854override_options (void)
f6b7ba2b 1855{
1856 int regno;
1857 enum machine_mode mode;
1858
1859 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1860 error ("boolean registers required for the floating-point option");
1861
c821cf9c 1862 /* Set up array giving whether a given register can hold a given mode. */
f6b7ba2b 1863 for (mode = VOIDmode;
1864 mode != MAX_MACHINE_MODE;
1865 mode = (enum machine_mode) ((int) mode + 1))
1866 {
1867 int size = GET_MODE_SIZE (mode);
1868 enum mode_class class = GET_MODE_CLASS (mode);
1869
1870 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1871 {
1872 int temp;
1873
1874 if (ACC_REG_P (regno))
afb26b4b 1875 temp = (TARGET_MAC16
1876 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
f6b7ba2b 1877 else if (GP_REG_P (regno))
1878 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1879 else if (FP_REG_P (regno))
1880 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1881 else if (BR_REG_P (regno))
1882 temp = (TARGET_BOOLEANS && (mode == CCmode));
1883 else
1884 temp = FALSE;
1885
1886 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1887 }
1888 }
1889
1890 init_machine_status = xtensa_init_machine_status;
f6b7ba2b 1891
afb26b4b 1892 /* Check PIC settings. PIC is only supported when using L32R
1893 instructions, and some targets need to always use PIC. */
1894 if (flag_pic && TARGET_CONST16)
1895 error ("-f%s is not supported with CONST16 instructions",
1896 (flag_pic > 1 ? "PIC" : "pic"));
1897 else if (XTENSA_ALWAYS_PIC)
1898 {
1899 if (TARGET_CONST16)
1900 error ("PIC is required but not supported with CONST16 instructions");
1901 flag_pic = 1;
1902 }
1903 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1904 if (flag_pic > 1)
f6b7ba2b 1905 flag_pic = 1;
1897b881 1906
1907 /* Hot/cold partitioning does not work on this architecture, because of
1908 constant pools (the load instruction cannot necessarily reach that far).
1909 Therefore disable it on this architecture. */
1910 if (flag_reorder_blocks_and_partition)
1911 {
1912 flag_reorder_blocks_and_partition = 0;
1913 flag_reorder_blocks = 1;
1914 }
f6b7ba2b 1915}
1916
1917
1918/* A C compound statement to output to stdio stream STREAM the
1919 assembler syntax for an instruction operand X. X is an RTL
1920 expression.
1921
1922 CODE is a value that can be used to specify one of several ways
1923 of printing the operand. It is used when identical operands
1924 must be printed differently depending on the context. CODE
1925 comes from the '%' specification that was used to request
1926 printing of the operand. If the specification was just '%DIGIT'
1927 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1928 is the ASCII code for LTR.
1929
1930 If X is a register, this macro should print the register's name.
1931 The names can be found in an array 'reg_names' whose type is
1932 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1933
1934 When the machine description has a specification '%PUNCT' (a '%'
1935 followed by a punctuation character), this macro is called with
1936 a null pointer for X and the punctuation character for CODE.
1937
1938 'a', 'c', 'l', and 'n' are reserved.
de071186 1939
f6b7ba2b 1940 The Xtensa specific codes are:
1941
1942 'd' CONST_INT, print as signed decimal
1943 'x' CONST_INT, print as signed hexadecimal
1944 'K' CONST_INT, print number of bits in mask for EXTUI
1945 'R' CONST_INT, print (X & 0x1f)
1946 'L' CONST_INT, print ((32 - X) & 0x1f)
1947 'D' REG, print second register of double-word register operand
1948 'N' MEM, print address of next word following a memory operand
1949 'v' MEM, if memory reference is volatile, output a MEMW before it
afb26b4b 1950 't' any constant, add "@h" suffix for top 16 bits
1951 'b' any constant, add "@l" suffix for bottom 16 bits
f6b7ba2b 1952*/
1953
1954static void
fd63fcf8 1955printx (FILE *file, signed int val)
f6b7ba2b 1956{
fd63fcf8 1957 /* Print a hexadecimal value in a nice way. */
f6b7ba2b 1958 if ((val > -0xa) && (val < 0xa))
1959 fprintf (file, "%d", val);
1960 else if (val < 0)
1961 fprintf (file, "-0x%x", -val);
1962 else
1963 fprintf (file, "0x%x", val);
1964}
1965
1966
1967void
fd63fcf8 1968print_operand (FILE *file, rtx x, int letter)
f6b7ba2b 1969{
afb26b4b 1970 if (!x)
f6b7ba2b 1971 error ("PRINT_OPERAND null pointer");
1972
afb26b4b 1973 switch (letter)
f6b7ba2b 1974 {
afb26b4b 1975 case 'D':
1976 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1977 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1978 else
1979 output_operand_lossage ("invalid %%D value");
1980 break;
f6b7ba2b 1981
afb26b4b 1982 case 'v':
1983 if (GET_CODE (x) == MEM)
1984 {
1985 /* For a volatile memory reference, emit a MEMW before the
1986 load or store. */
d8ad0757 1987 if (MEM_VOLATILE_P (x))
afb26b4b 1988 fprintf (file, "memw\n\t");
1989 }
1990 else
1991 output_operand_lossage ("invalid %%v value");
1992 break;
f6b7ba2b 1993
afb26b4b 1994 case 'N':
1995 if (GET_CODE (x) == MEM
1996 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1997 {
1998 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1999 output_address (XEXP (x, 0));
2000 }
2001 else
2002 output_operand_lossage ("invalid %%N value");
2003 break;
f6b7ba2b 2004
afb26b4b 2005 case 'K':
2006 if (GET_CODE (x) == CONST_INT)
f6b7ba2b 2007 {
afb26b4b 2008 int num_bits = 0;
2009 unsigned val = INTVAL (x);
2010 while (val & 1)
2011 {
2012 num_bits += 1;
2013 val = val >> 1;
2014 }
2015 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2016 fatal_insn ("invalid mask", x);
f6b7ba2b 2017
afb26b4b 2018 fprintf (file, "%d", num_bits);
2019 }
2020 else
2021 output_operand_lossage ("invalid %%K value");
2022 break;
f6b7ba2b 2023
afb26b4b 2024 case 'L':
2025 if (GET_CODE (x) == CONST_INT)
2026 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2027 else
2028 output_operand_lossage ("invalid %%L value");
2029 break;
f6b7ba2b 2030
afb26b4b 2031 case 'R':
2032 if (GET_CODE (x) == CONST_INT)
2033 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2034 else
2035 output_operand_lossage ("invalid %%R value");
2036 break;
f6b7ba2b 2037
afb26b4b 2038 case 'x':
2039 if (GET_CODE (x) == CONST_INT)
2040 printx (file, INTVAL (x));
2041 else
2042 output_operand_lossage ("invalid %%x value");
2043 break;
f6b7ba2b 2044
afb26b4b 2045 case 'd':
2046 if (GET_CODE (x) == CONST_INT)
2047 fprintf (file, "%ld", INTVAL (x));
2048 else
2049 output_operand_lossage ("invalid %%d value");
2050 break;
f6b7ba2b 2051
afb26b4b 2052 case 't':
2053 case 'b':
2054 if (GET_CODE (x) == CONST_INT)
2055 {
2056 printx (file, INTVAL (x));
2057 fputs (letter == 't' ? "@h" : "@l", file);
2058 }
2059 else if (GET_CODE (x) == CONST_DOUBLE)
2060 {
2061 REAL_VALUE_TYPE r;
2062 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2063 if (GET_MODE (x) == SFmode)
2064 {
2065 long l;
2066 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2067 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2068 }
2069 else
2070 output_operand_lossage ("invalid %%t/%%b value");
2071 }
2072 else if (GET_CODE (x) == CONST)
2073 {
2074 /* X must be a symbolic constant on ELF. Write an expression
2075 suitable for 'const16' that sets the high or low 16 bits. */
2076 if (GET_CODE (XEXP (x, 0)) != PLUS
2077 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2078 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2079 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2080 output_operand_lossage ("invalid %%t/%%b value");
2081 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2082 fputs (letter == 't' ? "@h" : "@l", file);
2083 /* There must be a non-alphanumeric character between 'h' or 'l'
2084 and the number. The '-' is added by print_operand() already. */
2085 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2086 fputs ("+", file);
2087 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2088 }
2089 else
de071186 2090 {
afb26b4b 2091 output_addr_const (file, x);
2092 fputs (letter == 't' ? "@h" : "@l", file);
f6b7ba2b 2093 }
2094 break;
2095
2096 default:
afb26b4b 2097 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2098 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2099 else if (GET_CODE (x) == MEM)
2100 output_address (XEXP (x, 0));
2101 else if (GET_CODE (x) == CONST_INT)
2102 fprintf (file, "%ld", INTVAL (x));
2103 else
2104 output_addr_const (file, x);
f6b7ba2b 2105 }
2106}
2107
2108
2109/* A C compound statement to output to stdio stream STREAM the
2110 assembler syntax for an instruction operand that is a memory
7811991d 2111 reference whose address is ADDR. ADDR is an RTL expression. */
f6b7ba2b 2112
2113void
fd63fcf8 2114print_operand_address (FILE *file, rtx addr)
f6b7ba2b 2115{
2116 if (!addr)
2117 error ("PRINT_OPERAND_ADDRESS, null pointer");
2118
2119 switch (GET_CODE (addr))
2120 {
2121 default:
2122 fatal_insn ("invalid address", addr);
2123 break;
2124
2125 case REG:
2126 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2127 break;
2128
2129 case PLUS:
2130 {
2131 rtx reg = (rtx)0;
2132 rtx offset = (rtx)0;
2133 rtx arg0 = XEXP (addr, 0);
2134 rtx arg1 = XEXP (addr, 1);
2135
2136 if (GET_CODE (arg0) == REG)
2137 {
2138 reg = arg0;
2139 offset = arg1;
2140 }
2141 else if (GET_CODE (arg1) == REG)
2142 {
2143 reg = arg1;
2144 offset = arg0;
2145 }
2146 else
2147 fatal_insn ("no register in address", addr);
2148
2149 if (CONSTANT_P (offset))
2150 {
2151 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2152 output_addr_const (file, offset);
2153 }
2154 else
2155 fatal_insn ("address offset not a constant", addr);
2156 }
2157 break;
2158
2159 case LABEL_REF:
2160 case SYMBOL_REF:
2161 case CONST_INT:
2162 case CONST:
2163 output_addr_const (file, addr);
2164 break;
2165 }
2166}
2167
2168
771b6086 2169bool
2170xtensa_output_addr_const_extra (FILE *fp, rtx x)
2171{
2172 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
2173 {
2174 switch (XINT (x, 1))
2175 {
2176 case UNSPEC_PLT:
2177 if (flag_pic)
2178 {
2179 output_addr_const (fp, XVECEXP (x, 0, 0));
2180 fputs ("@PLT", fp);
2181 return true;
2182 }
2183 break;
2184 default:
2185 break;
2186 }
2187 }
2188 return false;
2189}
2190
2191
f6b7ba2b 2192void
fd63fcf8 2193xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
f6b7ba2b 2194{
2195 long value_long[2];
badfe841 2196 REAL_VALUE_TYPE r;
f6b7ba2b 2197 int size;
c9876a47 2198 rtx first, second;
f6b7ba2b 2199
2200 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2201
2202 switch (GET_MODE_CLASS (mode))
2203 {
2204 case MODE_FLOAT:
cd3d4fe0 2205 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
f6b7ba2b 2206
badfe841 2207 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
f6b7ba2b 2208 switch (mode)
2209 {
2210 case SFmode:
badfe841 2211 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
205710bf 2212 if (HOST_BITS_PER_LONG > 32)
2213 value_long[0] &= 0xffffffff;
badfe841 2214 fprintf (file, "0x%08lx\n", value_long[0]);
f6b7ba2b 2215 break;
2216
2217 case DFmode:
badfe841 2218 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
205710bf 2219 if (HOST_BITS_PER_LONG > 32)
2220 {
2221 value_long[0] &= 0xffffffff;
2222 value_long[1] &= 0xffffffff;
2223 }
badfe841 2224 fprintf (file, "0x%08lx, 0x%08lx\n",
2225 value_long[0], value_long[1]);
f6b7ba2b 2226 break;
2227
2228 default:
cd3d4fe0 2229 gcc_unreachable ();
f6b7ba2b 2230 }
2231
2232 break;
2233
2234 case MODE_INT:
2235 case MODE_PARTIAL_INT:
2236 size = GET_MODE_SIZE (mode);
cd3d4fe0 2237 switch (size)
f6b7ba2b 2238 {
cd3d4fe0 2239 case 4:
f6b7ba2b 2240 output_addr_const (file, x);
2241 fputs ("\n", file);
cd3d4fe0 2242 break;
2243
2244 case 8:
c9876a47 2245 split_double (x, &first, &second);
2246 output_addr_const (file, first);
f6b7ba2b 2247 fputs (", ", file);
c9876a47 2248 output_addr_const (file, second);
f6b7ba2b 2249 fputs ("\n", file);
cd3d4fe0 2250 break;
2251
2252 default:
2253 gcc_unreachable ();
f6b7ba2b 2254 }
f6b7ba2b 2255 break;
2256
2257 default:
cd3d4fe0 2258 gcc_unreachable ();
f6b7ba2b 2259 }
2260}
2261
2262
2263/* Return the bytes needed to compute the frame pointer from the current
c821cf9c 2264 stack pointer. */
f6b7ba2b 2265
2266#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2267#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2268
2269long
fd63fcf8 2270compute_frame_size (int size)
f6b7ba2b 2271{
fd63fcf8 2272 /* Add space for the incoming static chain value. */
4ee9c684 2273 if (cfun->static_chain_decl != NULL)
f6b7ba2b 2274 size += (1 * UNITS_PER_WORD);
2275
2276 xtensa_current_frame_size =
2277 XTENSA_STACK_ALIGN (size
2278 + current_function_outgoing_args_size
2279 + (WINDOW_SIZE * UNITS_PER_WORD));
2280 return xtensa_current_frame_size;
2281}
2282
2283
2284int
fd63fcf8 2285xtensa_frame_pointer_required (void)
f6b7ba2b 2286{
2287 /* The code to expand builtin_frame_addr and builtin_return_addr
2288 currently uses the hard_frame_pointer instead of frame_pointer.
2289 This seems wrong but maybe it's necessary for other architectures.
c821cf9c 2290 This function is derived from the i386 code. */
f6b7ba2b 2291
2292 if (cfun->machine->accesses_prev_frame)
2293 return 1;
2294
2295 return 0;
2296}
2297
2298
afb26b4b 2299void
fd63fcf8 2300xtensa_expand_prologue (void)
afb26b4b 2301{
2302 HOST_WIDE_INT total_size;
2303 rtx size_rtx;
2efea8c0 2304
afb26b4b 2305 total_size = compute_frame_size (get_frame_size ());
2306 size_rtx = GEN_INT (total_size);
2efea8c0 2307
afb26b4b 2308 if (total_size < (1 << (12+3)))
2309 emit_insn (gen_entry (size_rtx, size_rtx));
f6b7ba2b 2310 else
2311 {
afb26b4b 2312 /* Use a8 as a temporary since a0-a7 may be live. */
2313 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2314 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2315 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2316 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2317 emit_move_insn (stack_pointer_rtx, tmp_reg);
f6b7ba2b 2318 }
2319
afb26b4b 2320 if (frame_pointer_needed)
f6b7ba2b 2321 {
e060c9df 2322 if (cfun->machine->set_frame_ptr_insn)
f6b7ba2b 2323 {
e060c9df 2324 rtx first, insn;
f6b7ba2b 2325
e060c9df 2326 push_topmost_sequence ();
2327 first = get_insns ();
2328 pop_topmost_sequence ();
f6b7ba2b 2329
afb26b4b 2330 /* For all instructions prior to set_frame_ptr_insn, replace
2331 hard_frame_pointer references with stack_pointer. */
2332 for (insn = first;
e060c9df 2333 insn != cfun->machine->set_frame_ptr_insn;
afb26b4b 2334 insn = NEXT_INSN (insn))
2335 {
2336 if (INSN_P (insn))
c3e2d63e 2337 {
2338 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2339 hard_frame_pointer_rtx,
2340 stack_pointer_rtx);
2341 df_insn_rescan (insn);
2342 }
afb26b4b 2343 }
2344 }
2345 else
2346 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
f6b7ba2b 2347 }
2348}
2349
2350
afb26b4b 2351/* Clear variables at function end. */
f6b7ba2b 2352
2353void
fd63fcf8 2354xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2355 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
f6b7ba2b 2356{
f6b7ba2b 2357 xtensa_current_frame_size = 0;
2358}
2359
2360
43326cf7 2361rtx
fd63fcf8 2362xtensa_return_addr (int count, rtx frame)
43326cf7 2363{
2364 rtx result, retaddr;
2365
2366 if (count == -1)
afb26b4b 2367 retaddr = gen_rtx_REG (Pmode, A0_REG);
43326cf7 2368 else
2369 {
2370 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2371 addr = memory_address (Pmode, addr);
2372 retaddr = gen_reg_rtx (Pmode);
2373 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2374 }
2375
2376 /* The 2 most-significant bits of the return address on Xtensa hold
2377 the register window size. To get the real return address, these
2378 bits must be replaced with the high bits from the current PC. */
2379
2380 result = gen_reg_rtx (Pmode);
2381 emit_insn (gen_fix_return_addr (result, retaddr));
2382 return result;
2383}
2384
2385
f6b7ba2b 2386/* Create the va_list data type.
9276fdff 2387
2388 This structure is set up by __builtin_saveregs. The __va_reg field
2389 points to a stack-allocated region holding the contents of the
2390 incoming argument registers. The __va_ndx field is an index
2391 initialized to the position of the first unnamed (variable)
2392 argument. This same index is also used to address the arguments
2393 passed in memory. Thus, the __va_stk field is initialized to point
2394 to the position of the first argument in memory offset to account
2395 for the arguments passed in registers and to account for the size
2396 of the argument registers not being 16-byte aligned. E.G., there
2397 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2398 for the first stack argument to have the maximal alignment of 16
2399 bytes, so we offset the __va_stk address by 32 bytes so that
2400 __va_stk[32] references the first argument on the stack. */
f6b7ba2b 2401
2e15d750 2402static tree
2403xtensa_build_builtin_va_list (void)
f6b7ba2b 2404{
049d6666 2405 tree f_stk, f_reg, f_ndx, record, type_decl;
f6b7ba2b 2406
049d6666 2407 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2408 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
f6b7ba2b 2409
2410 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2411 ptr_type_node);
2412 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2413 ptr_type_node);
2414 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2415 integer_type_node);
2416
2417 DECL_FIELD_CONTEXT (f_stk) = record;
2418 DECL_FIELD_CONTEXT (f_reg) = record;
2419 DECL_FIELD_CONTEXT (f_ndx) = record;
2420
049d6666 2421 TREE_CHAIN (record) = type_decl;
2422 TYPE_NAME (record) = type_decl;
f6b7ba2b 2423 TYPE_FIELDS (record) = f_stk;
2424 TREE_CHAIN (f_stk) = f_reg;
2425 TREE_CHAIN (f_reg) = f_ndx;
2426
2427 layout_type (record);
2428 return record;
2429}
2430
2431
2432/* Save the incoming argument registers on the stack. Returns the
c821cf9c 2433 address of the saved registers. */
f6b7ba2b 2434
4fe4af61 2435static rtx
fd63fcf8 2436xtensa_builtin_saveregs (void)
f6b7ba2b 2437{
d8002fbc 2438 rtx gp_regs;
bef77716 2439 int arg_words = current_function_args_info.arg_words;
f6b7ba2b 2440 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
f6b7ba2b 2441
e060c9df 2442 if (gp_left <= 0)
f6b7ba2b 2443 return const0_rtx;
2444
dafa59bd 2445 /* Allocate the general-purpose register space. */
f6b7ba2b 2446 gp_regs = assign_stack_local
2447 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
049d6666 2448 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
f6b7ba2b 2449
2450 /* Now store the incoming registers. */
e060c9df 2451 cfun->machine->need_a7_copy = true;
2452 cfun->machine->vararg_a7 = true;
d8002fbc 2453 move_block_from_reg (GP_ARG_FIRST + arg_words,
2454 adjust_address (gp_regs, BLKmode,
2455 arg_words * UNITS_PER_WORD),
2456 gp_left);
f6b7ba2b 2457
2458 return XEXP (gp_regs, 0);
2459}
2460
2461
2462/* Implement `va_start' for varargs and stdarg. We look at the
c821cf9c 2463 current function to fill in an initial va_list. */
f6b7ba2b 2464
2465void
fd63fcf8 2466xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
f6b7ba2b 2467{
2468 tree f_stk, stk;
2469 tree f_reg, reg;
2470 tree f_ndx, ndx;
2471 tree t, u;
2472 int arg_words;
2473
2474 arg_words = current_function_args_info.arg_words;
2475
2476 f_stk = TYPE_FIELDS (va_list_type_node);
2477 f_reg = TREE_CHAIN (f_stk);
2478 f_ndx = TREE_CHAIN (f_reg);
2479
ed03eadb 2480 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2481 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2482 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
f6b7ba2b 2483
2484 /* Call __builtin_saveregs; save the result in __va_reg */
d8002fbc 2485 u = make_tree (sizetype, expand_builtin_saveregs ());
2486 u = fold_convert (ptr_type_node, u);
35cc02b5 2487 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, reg, u);
f6b7ba2b 2488 TREE_SIDE_EFFECTS (t) = 1;
2489 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2490
9276fdff 2491 /* Set the __va_stk member to ($arg_ptr - 32). */
f6b7ba2b 2492 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
d8002fbc 2493 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u, size_int (-32));
35cc02b5 2494 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, stk, u);
f6b7ba2b 2495 TREE_SIDE_EFFECTS (t) = 1;
2496 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2497
9276fdff 2498 /* Set the __va_ndx member. If the first variable argument is on
2499 the stack, adjust __va_ndx by 2 words to account for the extra
2500 alignment offset for __va_stk. */
2501 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2502 arg_words += 2;
d8002fbc 2503 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx,
2504 size_int (arg_words * UNITS_PER_WORD));
f6b7ba2b 2505 TREE_SIDE_EFFECTS (t) = 1;
2506 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2507}
2508
2509
2510/* Implement `va_arg'. */
2511
ae79166b 2512static tree
2513xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
2514 tree *post_p ATTRIBUTE_UNUSED)
f6b7ba2b 2515{
2516 tree f_stk, stk;
2517 tree f_reg, reg;
2518 tree f_ndx, ndx;
ae79166b 2519 tree type_size, array, orig_ndx, addr, size, va_size, t;
2520 tree lab_false, lab_over, lab_false2;
2cd7bb84 2521 bool indirect;
2522
2523 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2524 if (indirect)
2525 type = build_pointer_type (type);
f6b7ba2b 2526
abeadffe 2527 /* Handle complex values as separate real and imaginary parts. */
2528 if (TREE_CODE (type) == COMPLEX_TYPE)
2529 {
ae79166b 2530 tree real_part, imag_part;
abeadffe 2531
ae79166b 2532 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2533 pre_p, NULL);
2534 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
abeadffe 2535
ae79166b 2536 imag_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2537 pre_p, NULL);
2538 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
abeadffe 2539
ed03eadb 2540 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
abeadffe 2541 }
2542
f6b7ba2b 2543 f_stk = TYPE_FIELDS (va_list_type_node);
2544 f_reg = TREE_CHAIN (f_stk);
2545 f_ndx = TREE_CHAIN (f_reg);
2546
ed03eadb 2547 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2548 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2549 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
f6b7ba2b 2550
ae79166b 2551 type_size = size_in_bytes (type);
2552 va_size = round_up (type_size, UNITS_PER_WORD);
2553 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
dd52a190 2554
f6b7ba2b 2555
9276fdff 2556 /* First align __va_ndx if necessary for this arg:
f6b7ba2b 2557
ae79166b 2558 orig_ndx = (AP).__va_ndx;
9276fdff 2559 if (__alignof__ (TYPE) > 4 )
ae79166b 2560 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
9276fdff 2561 & -__alignof__ (TYPE)); */
f6b7ba2b 2562
ae79166b 2563 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2564
f6b7ba2b 2565 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2566 {
81c44390 2567 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
ae79166b 2568
d8002fbc 2569 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, size_int (align - 1));
2570 t = build2 (BIT_AND_EXPR, integer_type_node, t, size_int (-align));
35cc02b5 2571 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, orig_ndx, t);
ae79166b 2572 gimplify_and_add (t, pre_p);
f6b7ba2b 2573 }
2574
2575
2576 /* Increment __va_ndx to point past the argument:
2577
ae79166b 2578 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
f6b7ba2b 2579
ae79166b 2580 t = fold_convert (integer_type_node, va_size);
ed03eadb 2581 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
35cc02b5 2582 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
ae79166b 2583 gimplify_and_add (t, pre_p);
f6b7ba2b 2584
2585
2586 /* Check if the argument is in registers:
2587
89d4bc5e 2588 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
0336f0f0 2589 && !must_pass_in_stack (type))
fd63fcf8 2590 __array = (AP).__va_reg; */
f6b7ba2b 2591
ae79166b 2592 array = create_tmp_var (ptr_type_node, NULL);
f6b7ba2b 2593
ae79166b 2594 lab_over = NULL;
0336f0f0 2595 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
89d4bc5e 2596 {
ae79166b 2597 lab_false = create_artificial_label ();
2598 lab_over = create_artificial_label ();
2599
d8002fbc 2600 t = build2 (GT_EXPR, boolean_type_node, ndx,
2601 size_int (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
ed03eadb 2602 t = build3 (COND_EXPR, void_type_node, t,
2603 build1 (GOTO_EXPR, void_type_node, lab_false),
2604 NULL_TREE);
ae79166b 2605 gimplify_and_add (t, pre_p);
2606
35cc02b5 2607 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, reg);
ae79166b 2608 gimplify_and_add (t, pre_p);
2609
ed03eadb 2610 t = build1 (GOTO_EXPR, void_type_node, lab_over);
ae79166b 2611 gimplify_and_add (t, pre_p);
2612
ed03eadb 2613 t = build1 (LABEL_EXPR, void_type_node, lab_false);
ae79166b 2614 gimplify_and_add (t, pre_p);
89d4bc5e 2615 }
f6b7ba2b 2616
ae79166b 2617
f6b7ba2b 2618 /* ...otherwise, the argument is on the stack (never split between
2619 registers and the stack -- change __va_ndx if necessary):
2620
2621 else
2622 {
9276fdff 2623 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2624 (AP).__va_ndx = 32 + __va_size (TYPE);
f6b7ba2b 2625 __array = (AP).__va_stk;
fd63fcf8 2626 } */
f6b7ba2b 2627
ae79166b 2628 lab_false2 = create_artificial_label ();
f6b7ba2b 2629
d8002fbc 2630 t = build2 (GT_EXPR, boolean_type_node, orig_ndx,
2631 size_int (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
ed03eadb 2632 t = build3 (COND_EXPR, void_type_node, t,
2633 build1 (GOTO_EXPR, void_type_node, lab_false2),
2634 NULL_TREE);
ae79166b 2635 gimplify_and_add (t, pre_p);
f6b7ba2b 2636
ae79166b 2637 t = size_binop (PLUS_EXPR, va_size, size_int (32));
2638 t = fold_convert (integer_type_node, t);
35cc02b5 2639 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
ae79166b 2640 gimplify_and_add (t, pre_p);
f6b7ba2b 2641
ed03eadb 2642 t = build1 (LABEL_EXPR, void_type_node, lab_false2);
ae79166b 2643 gimplify_and_add (t, pre_p);
f6b7ba2b 2644
35cc02b5 2645 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, stk);
ae79166b 2646 gimplify_and_add (t, pre_p);
2647
2648 if (lab_over)
2649 {
ed03eadb 2650 t = build1 (LABEL_EXPR, void_type_node, lab_over);
ae79166b 2651 gimplify_and_add (t, pre_p);
2652 }
dd52a190 2653
f6b7ba2b 2654
2655 /* Given the base array pointer (__array) and index to the subsequent
2656 argument (__va_ndx), find the address:
2657
dd52a190 2658 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2659 ? sizeof (TYPE)
2660 : __va_size (TYPE))
f6b7ba2b 2661
2662 The results are endian-dependent because values smaller than one word
fd63fcf8 2663 are aligned differently. */
f6b7ba2b 2664
de071186 2665
ea2981b9 2666 if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
dd52a190 2667 {
d8002fbc 2668 t = fold_build2 (GE_EXPR, boolean_type_node, type_size,
2669 size_int (PARM_BOUNDARY / BITS_PER_UNIT));
ed03eadb 2670 t = fold_build3 (COND_EXPR, sizetype, t, va_size, type_size);
ae79166b 2671 size = t;
dd52a190 2672 }
ae79166b 2673 else
2674 size = va_size;
2675
d8002fbc 2676 t = build2 (MINUS_EXPR, sizetype, ndx, size);
2677 addr = build2 (POINTER_PLUS_EXPR, ptr_type_node, array, t);
f6b7ba2b 2678
ae79166b 2679 addr = fold_convert (build_pointer_type (type), addr);
2cd7bb84 2680 if (indirect)
063f5fdd 2681 addr = build_va_arg_indirect_ref (addr);
2682 return build_va_arg_indirect_ref (addr);
f6b7ba2b 2683}
2684
2685
8e8c0c04 2686/* Builtins. */
2687
2688enum xtensa_builtin
2689{
2690 XTENSA_BUILTIN_UMULSIDI3,
2691 XTENSA_BUILTIN_max
2692};
2693
2694
2695static void
2696xtensa_init_builtins (void)
2697{
2698 tree ftype;
2699
2700 ftype = build_function_type_list (unsigned_intDI_type_node,
2701 unsigned_intSI_type_node,
2702 unsigned_intSI_type_node, NULL_TREE);
2703
2704 add_builtin_function ("__builtin_umulsidi3", ftype,
2705 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
2706 "__umulsidi3", NULL_TREE);
2707}
2708
2709
2710static tree
2711xtensa_fold_builtin (tree fndecl, tree arglist, bool ignore ATTRIBUTE_UNUSED)
2712{
2713 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2714 tree arg0, arg1;
2715
2716 if (fcode == XTENSA_BUILTIN_UMULSIDI3)
2717 {
2718 arg0 = TREE_VALUE (arglist);
2719 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2720 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2721 || TARGET_MUL32_HIGH)
2722 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
2723 fold_convert (unsigned_intDI_type_node, arg0),
2724 fold_convert (unsigned_intDI_type_node, arg1));
2725 else
2726 return NULL;
2727 }
2728
2729 internal_error ("bad builtin code");
2730 return NULL;
2731}
2732
2733
2734static rtx
2735xtensa_expand_builtin (tree exp, rtx target,
2736 rtx subtarget ATTRIBUTE_UNUSED,
2737 enum machine_mode mode ATTRIBUTE_UNUSED,
2738 int ignore)
2739{
d4c45216 2740 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8e8c0c04 2741 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2742
2743 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
2744 __umulsidi3 function when the Xtensa configuration can directly
2745 implement it. If not, just call the function. */
2746 if (fcode == XTENSA_BUILTIN_UMULSIDI3)
2747 return expand_call (exp, target, ignore);
2748
2749 internal_error ("bad builtin code");
2750 return NULL_RTX;
2751}
2752
2753
fc12fa10 2754enum reg_class
fd63fcf8 2755xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
fc12fa10 2756{
a8332086 2757 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
fc12fa10 2758 return NO_REGS;
2759
a8332086 2760 /* Don't use the stack pointer or hard frame pointer for reloads!
2761 The hard frame pointer would normally be OK except that it may
2762 briefly hold an incoming argument in the prologue, and reload
2763 won't know that it is live because the hard frame pointer is
2764 treated specially. */
2765
2766 if (class == AR_REGS || class == GR_REGS)
2767 return RL_REGS;
fc12fa10 2768
2769 return class;
2770}
2771
2772
f6b7ba2b 2773enum reg_class
fd63fcf8 2774xtensa_secondary_reload_class (enum reg_class class,
2775 enum machine_mode mode ATTRIBUTE_UNUSED,
2776 rtx x, int isoutput)
f6b7ba2b 2777{
2778 int regno;
2779
2780 if (GET_CODE (x) == SIGN_EXTEND)
2781 x = XEXP (x, 0);
2782 regno = xt_true_regnum (x);
2783
2784 if (!isoutput)
2785 {
2786 if (class == FP_REGS && constantpool_mem_p (x))
a8332086 2787 return RL_REGS;
f6b7ba2b 2788 }
2789
2790 if (ACC_REG_P (regno))
a8332086 2791 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
f6b7ba2b 2792 if (class == ACC_REG)
a8332086 2793 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
f6b7ba2b 2794
2795 return NO_REGS;
2796}
2797
2798
2799void
fd63fcf8 2800order_regs_for_local_alloc (void)
f6b7ba2b 2801{
2802 if (!leaf_function_p ())
2803 {
2804 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2805 FIRST_PSEUDO_REGISTER * sizeof (int));
2806 }
2807 else
2808 {
2809 int i, num_arg_regs;
2810 int nxt = 0;
2811
dafa59bd 2812 /* Use the AR registers in increasing order (skipping a0 and a1)
2813 but save the incoming argument registers for a last resort. */
f6b7ba2b 2814 num_arg_regs = current_function_args_info.arg_words;
2815 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2816 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2817 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2818 reg_alloc_order[nxt++] = i + num_arg_regs;
2819 for (i = 0; i < num_arg_regs; i++)
2820 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2821
dafa59bd 2822 /* List the coprocessor registers in order. */
bef09eef 2823 for (i = 0; i < BR_REG_NUM; i++)
2824 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2825
dafa59bd 2826 /* List the FP registers in order for now. */
f6b7ba2b 2827 for (i = 0; i < 16; i++)
2828 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2829
c821cf9c 2830 /* GCC requires that we list *all* the registers.... */
f6b7ba2b 2831 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2832 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2833 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2834 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2835
f6b7ba2b 2836 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2837 }
2838}
2839
2840
5f4442bc 2841/* Some Xtensa targets support multiple bss sections. If the section
2842 name ends with ".bss", add SECTION_BSS to the flags. */
2843
2844static unsigned int
fd63fcf8 2845xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
5f4442bc 2846{
2847 unsigned int flags = default_section_type_flags (decl, name, reloc);
2848 const char *suffix;
2849
2850 suffix = strrchr (name, '.');
2851 if (suffix && strcmp (suffix, ".bss") == 0)
2852 {
2853 if (!decl || (TREE_CODE (decl) == VAR_DECL
2854 && DECL_INITIAL (decl) == NULL_TREE))
2855 flags |= SECTION_BSS; /* @nobits */
2856 else
c3ceba8e 2857 warning (0, "only uninitialized variables can be placed in a "
5f4442bc 2858 ".bss section");
2859 }
2860
2861 return flags;
2862}
2863
2864
bbfbe351 2865/* The literal pool stays with the function. */
2866
2f14b1f9 2867static section *
fd63fcf8 2868xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2869 rtx x ATTRIBUTE_UNUSED,
2870 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
bbfbe351 2871{
2f14b1f9 2872 return function_section (current_function_decl);
bbfbe351 2873}
7811991d 2874
fd63fcf8 2875
fab7adbf 2876/* Compute a (partial) cost for rtx X. Return true if the complete
2877 cost has been computed, and false if subexpressions should be
2878 scanned. In either case, *TOTAL contains the cost result. */
2879
2880static bool
fd63fcf8 2881xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
fab7adbf 2882{
2883 switch (code)
2884 {
2885 case CONST_INT:
2886 switch (outer_code)
2887 {
2888 case SET:
2889 if (xtensa_simm12b (INTVAL (x)))
2890 {
2891 *total = 4;
2892 return true;
2893 }
2894 break;
2895 case PLUS:
2896 if (xtensa_simm8 (INTVAL (x))
2897 || xtensa_simm8x256 (INTVAL (x)))
2898 {
2899 *total = 0;
2900 return true;
2901 }
2902 break;
2903 case AND:
2904 if (xtensa_mask_immediate (INTVAL (x)))
2905 {
2906 *total = 0;
2907 return true;
2908 }
2909 break;
2910 case COMPARE:
2911 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2912 {
2913 *total = 0;
2914 return true;
2915 }
2916 break;
2917 case ASHIFT:
2918 case ASHIFTRT:
2919 case LSHIFTRT:
2920 case ROTATE:
2921 case ROTATERT:
dafa59bd 2922 /* No way to tell if X is the 2nd operand so be conservative. */
fab7adbf 2923 default: break;
2924 }
2925 if (xtensa_simm12b (INTVAL (x)))
2926 *total = 5;
afb26b4b 2927 else if (TARGET_CONST16)
2928 *total = COSTS_N_INSNS (2);
fab7adbf 2929 else
2930 *total = 6;
2931 return true;
2932
2933 case CONST:
2934 case LABEL_REF:
2935 case SYMBOL_REF:
afb26b4b 2936 if (TARGET_CONST16)
2937 *total = COSTS_N_INSNS (2);
2938 else
2939 *total = 5;
fab7adbf 2940 return true;
2941
2942 case CONST_DOUBLE:
afb26b4b 2943 if (TARGET_CONST16)
2944 *total = COSTS_N_INSNS (4);
2945 else
2946 *total = 7;
fab7adbf 2947 return true;
2948
2949 case MEM:
2950 {
2951 int num_words =
2952 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2953
2954 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2955 *total = COSTS_N_INSNS (num_words);
2956 else
2957 *total = COSTS_N_INSNS (2*num_words);
2958 return true;
2959 }
2960
2961 case FFS:
8e8c0c04 2962 case CTZ:
fab7adbf 2963 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2964 return true;
2965
8e8c0c04 2966 case CLZ:
2967 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
2968 return true;
2969
fab7adbf 2970 case NOT:
2971 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2972 return true;
2973
2974 case AND:
2975 case IOR:
2976 case XOR:
2977 if (GET_MODE (x) == DImode)
2978 *total = COSTS_N_INSNS (2);
2979 else
2980 *total = COSTS_N_INSNS (1);
2981 return true;
2982
2983 case ASHIFT:
2984 case ASHIFTRT:
2985 case LSHIFTRT:
2986 if (GET_MODE (x) == DImode)
2987 *total = COSTS_N_INSNS (50);
2988 else
2989 *total = COSTS_N_INSNS (1);
2990 return true;
2991
2992 case ABS:
2993 {
2994 enum machine_mode xmode = GET_MODE (x);
2995 if (xmode == SFmode)
2996 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2997 else if (xmode == DFmode)
2998 *total = COSTS_N_INSNS (50);
2999 else
3000 *total = COSTS_N_INSNS (4);
3001 return true;
3002 }
3003
3004 case PLUS:
3005 case MINUS:
3006 {
3007 enum machine_mode xmode = GET_MODE (x);
3008 if (xmode == SFmode)
3009 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3010 else if (xmode == DFmode || xmode == DImode)
3011 *total = COSTS_N_INSNS (50);
3012 else
3013 *total = COSTS_N_INSNS (1);
3014 return true;
3015 }
3016
3017 case NEG:
3018 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
3019 return true;
3020
3021 case MULT:
3022 {
3023 enum machine_mode xmode = GET_MODE (x);
3024 if (xmode == SFmode)
3025 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
8e8c0c04 3026 else if (xmode == DFmode)
fab7adbf 3027 *total = COSTS_N_INSNS (50);
8e8c0c04 3028 else if (xmode == DImode)
3029 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
fab7adbf 3030 else if (TARGET_MUL32)
3031 *total = COSTS_N_INSNS (4);
3032 else if (TARGET_MAC16)
3033 *total = COSTS_N_INSNS (16);
3034 else if (TARGET_MUL16)
3035 *total = COSTS_N_INSNS (12);
3036 else
3037 *total = COSTS_N_INSNS (50);
3038 return true;
3039 }
3040
3041 case DIV:
3042 case MOD:
3043 {
3044 enum machine_mode xmode = GET_MODE (x);
3045 if (xmode == SFmode)
3046 {
3047 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3048 return true;
3049 }
3050 else if (xmode == DFmode)
3051 {
3052 *total = COSTS_N_INSNS (50);
3053 return true;
3054 }
3055 }
dafa59bd 3056 /* Fall through. */
fab7adbf 3057
3058 case UDIV:
3059 case UMOD:
3060 {
3061 enum machine_mode xmode = GET_MODE (x);
3062 if (xmode == DImode)
3063 *total = COSTS_N_INSNS (50);
3064 else if (TARGET_DIV32)
3065 *total = COSTS_N_INSNS (32);
3066 else
3067 *total = COSTS_N_INSNS (50);
3068 return true;
3069 }
3070
3071 case SQRT:
3072 if (GET_MODE (x) == SFmode)
3073 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3074 else
3075 *total = COSTS_N_INSNS (50);
3076 return true;
3077
3078 case SMIN:
3079 case UMIN:
3080 case SMAX:
3081 case UMAX:
3082 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3083 return true;
3084
3085 case SIGN_EXTRACT:
3086 case SIGN_EXTEND:
3087 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3088 return true;
3089
3090 case ZERO_EXTRACT:
3091 case ZERO_EXTEND:
3092 *total = COSTS_N_INSNS (1);
3093 return true;
3094
3095 default:
3096 return false;
3097 }
3098}
3099
6644435d 3100/* Worker function for TARGET_RETURN_IN_MEMORY. */
3101
4fe4af61 3102static bool
3103xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3104{
3105 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3106 > 4 * UNITS_PER_WORD);
3107}
3108
1f3233d1 3109#include "gt-xtensa.h"