]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
Daily bump.
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
da1f39e4
BW
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
03984308
BW
4 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
2f83c7d6 10Software Foundation; either version 3, or (at your option) any later
03984308
BW
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
2f83c7d6
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
03984308
BW
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
03984308
BW
26#include "rtl.h"
27#include "regs.h"
03984308
BW
28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "insn-attr.h"
35#include "insn-codes.h"
36#include "recog.h"
37#include "output.h"
38#include "tree.h"
39#include "expr.h"
40#include "flags.h"
41#include "reload.h"
42#include "tm_p.h"
43#include "function.h"
44#include "toplev.h"
45#include "optabs.h"
46#include "libfuncs.h"
07232638 47#include "ggc.h"
03984308
BW
48#include "target.h"
49#include "target-def.h"
540eaea8 50#include "langhooks.h"
85d53c1d 51#include "tree-gimple.h"
e70312d4 52#include "df.h"
85d53c1d 53
03984308
BW
54
55/* Enumeration for all of the relational tests, so that we can build
56 arrays indexed by the test type, and not worry about the order
638db43e 57 of EQ, NE, etc. */
03984308 58
ffbc8796
BW
59enum internal_test
60{
61 ITEST_EQ,
62 ITEST_NE,
63 ITEST_GT,
64 ITEST_GE,
65 ITEST_LT,
66 ITEST_LE,
67 ITEST_GTU,
68 ITEST_GEU,
69 ITEST_LTU,
70 ITEST_LEU,
71 ITEST_MAX
72};
03984308
BW
73
74/* Cached operands, and operator to compare for use in set/branch on
75 condition codes. */
76rtx branch_cmp[2];
77
78/* what type of branch to use */
79enum cmp_type branch_type;
80
81/* Array giving truth value on whether or not a given hard register
82 can support a given mode. */
83char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
84
85/* Current frame size calculated by compute_frame_size. */
86unsigned xtensa_current_frame_size;
87
a46bbb5a 88/* Largest block move to handle in-line. */
03984308
BW
89#define LARGEST_MOVE_RATIO 15
90
91/* Define the structure for the machine field in struct function. */
e2500fed 92struct machine_function GTY(())
03984308
BW
93{
94 int accesses_prev_frame;
997b8b4d
BW
95 bool need_a7_copy;
96 bool vararg_a7;
0d8442b8 97 rtx vararg_a7_copy;
997b8b4d 98 rtx set_frame_ptr_insn;
03984308
BW
99};
100
101/* Vector, indexed by hard register number, which contains 1 for a
102 register that is allowable in a candidate for leaf function
638db43e 103 treatment. */
03984308
BW
104
105const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
106{
107 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
108 1, 1, 1,
109 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
110 1
111};
112
113/* Map hard register number to register class */
114const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
115{
89f6025d
BW
116 RL_REGS, SP_REG, RL_REGS, RL_REGS,
117 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
118 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
119 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
03984308
BW
120 AR_REGS, AR_REGS, BR_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
123 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
125 ACC_REG,
126};
127
ffbc8796
BW
128static enum internal_test map_test_to_internal_test (enum rtx_code);
129static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
130static rtx gen_float_relational (enum rtx_code, rtx, rtx);
131static rtx gen_conditional_move (rtx);
132static rtx fixup_subreg_mem (rtx);
ffbc8796 133static struct machine_function * xtensa_init_machine_status (void);
586de218 134static bool xtensa_return_in_msb (const_tree);
ffbc8796
BW
135static void printx (FILE *, signed int);
136static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
4c45af42 137static rtx xtensa_builtin_saveregs (void);
ffbc8796
BW
138static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
139 int) ATTRIBUTE_UNUSED;
d6b5193b
RS
140static section *xtensa_select_rtx_section (enum machine_mode, rtx,
141 unsigned HOST_WIDE_INT);
ffbc8796 142static bool xtensa_rtx_costs (rtx, int, int, int *);
c35d187f 143static tree xtensa_build_builtin_va_list (void);
586de218 144static bool xtensa_return_in_memory (const_tree, const_tree);
85d53c1d 145static tree xtensa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
09fa8841
BW
146static void xtensa_init_builtins (void);
147static tree xtensa_fold_builtin (tree, tree, bool);
148static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
b64a1b53 149
b64a1b53
RH
150static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
151 REG_ALLOC_ORDER;
152\f
03984308
BW
153
154/* This macro generates the assembly code for function exit,
155 on machines that need it. If FUNCTION_EPILOGUE is not defined
156 then individual return instructions are generated for each
157 return statement. Args are same as for FUNCTION_PROLOGUE. */
158
159#undef TARGET_ASM_FUNCTION_EPILOGUE
160#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
161
162/* These hooks specify assembly directives for creating certain kinds
163 of integer object. */
164
165#undef TARGET_ASM_ALIGNED_SI_OP
166#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
167
b64a1b53
RH
168#undef TARGET_ASM_SELECT_RTX_SECTION
169#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
03984308 170
66beb87a
RS
171#undef TARGET_DEFAULT_TARGET_FLAGS
172#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
173
3c50106f
RH
174#undef TARGET_RTX_COSTS
175#define TARGET_RTX_COSTS xtensa_rtx_costs
dcefdf67
RH
176#undef TARGET_ADDRESS_COST
177#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 178
c35d187f
RH
179#undef TARGET_BUILD_BUILTIN_VA_LIST
180#define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
181
d7bd8aeb
JJ
182#undef TARGET_EXPAND_BUILTIN_VA_START
183#define TARGET_EXPAND_BUILTIN_VA_START xtensa_va_start
184
4c45af42 185#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 186#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
4c45af42 187#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 188#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
4c45af42 189#undef TARGET_PROMOTE_PROTOTYPES
586de218 190#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
4c45af42 191
4c45af42
KH
192#undef TARGET_RETURN_IN_MEMORY
193#define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
42ba5130 194#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 195#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
196#undef TARGET_MUST_PASS_IN_STACK
197#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
4c45af42
KH
198
199#undef TARGET_EXPAND_BUILTIN_SAVEREGS
200#define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
85d53c1d
RH
201#undef TARGET_GIMPLIFY_VA_ARG_EXPR
202#define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
4c45af42 203
6e5ff6e7
BW
204#undef TARGET_RETURN_IN_MSB
205#define TARGET_RETURN_IN_MSB xtensa_return_in_msb
206
09fa8841
BW
207#undef TARGET_INIT_BUILTINS
208#define TARGET_INIT_BUILTINS xtensa_init_builtins
209#undef TARGET_FOLD_BUILTIN
210#define TARGET_FOLD_BUILTIN xtensa_fold_builtin
211#undef TARGET_EXPAND_BUILTIN
212#define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
213
b64a1b53 214struct gcc_target targetm = TARGET_INITIALIZER;
03984308 215
887af464
BW
216\f
217/* Functions to test Xtensa immediate operand validity. */
03984308 218
8eb1bc5c
BW
219bool
220xtensa_simm8 (HOST_WIDE_INT v)
221{
222 return v >= -128 && v <= 127;
223}
224
225
226bool
227xtensa_simm8x256 (HOST_WIDE_INT v)
228{
229 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
230}
231
232
233bool
234xtensa_simm12b (HOST_WIDE_INT v)
235{
236 return v >= -2048 && v <= 2047;
237}
238
239
240static bool
241xtensa_uimm8 (HOST_WIDE_INT v)
242{
243 return v >= 0 && v <= 255;
244}
245
246
247static bool
248xtensa_uimm8x2 (HOST_WIDE_INT v)
249{
250 return (v & 1) == 0 && (v >= 0 && v <= 510);
251}
252
253
254static bool
255xtensa_uimm8x4 (HOST_WIDE_INT v)
256{
257 return (v & 3) == 0 && (v >= 0 && v <= 1020);
258}
259
260
261static bool
262xtensa_b4const (HOST_WIDE_INT v)
03984308
BW
263{
264 switch (v)
265 {
8eb1bc5c
BW
266 case -1:
267 case 1:
03984308
BW
268 case 2:
269 case 3:
270 case 4:
271 case 5:
272 case 6:
273 case 7:
274 case 8:
275 case 10:
276 case 12:
277 case 16:
278 case 32:
279 case 64:
280 case 128:
281 case 256:
8eb1bc5c 282 return true;
03984308 283 }
8eb1bc5c 284 return false;
03984308
BW
285}
286
03984308 287
8eb1bc5c
BW
288bool
289xtensa_b4const_or_zero (HOST_WIDE_INT v)
03984308 290{
8eb1bc5c
BW
291 if (v == 0)
292 return true;
293 return xtensa_b4const (v);
03984308
BW
294}
295
03984308 296
8eb1bc5c
BW
297bool
298xtensa_b4constu (HOST_WIDE_INT v)
03984308
BW
299{
300 switch (v)
301 {
8eb1bc5c
BW
302 case 32768:
303 case 65536:
03984308
BW
304 case 2:
305 case 3:
306 case 4:
307 case 5:
308 case 6:
309 case 7:
310 case 8:
311 case 10:
312 case 12:
313 case 16:
314 case 32:
315 case 64:
316 case 128:
317 case 256:
8eb1bc5c 318 return true;
03984308 319 }
8eb1bc5c 320 return false;
03984308
BW
321}
322
03984308 323
8eb1bc5c
BW
324bool
325xtensa_mask_immediate (HOST_WIDE_INT v)
03984308 326{
8eb1bc5c
BW
327#define MAX_MASK_SIZE 16
328 int mask_size;
03984308 329
8eb1bc5c
BW
330 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
331 {
332 if ((v & 1) == 0)
333 return false;
334 v = v >> 1;
335 if (v == 0)
336 return true;
337 }
03984308 338
8eb1bc5c 339 return false;
03984308
BW
340}
341
03984308 342
03984308 343/* This is just like the standard true_regnum() function except that it
638db43e 344 works even when reg_renumber is not initialized. */
03984308
BW
345
346int
ffbc8796 347xt_true_regnum (rtx x)
03984308
BW
348{
349 if (GET_CODE (x) == REG)
350 {
351 if (reg_renumber
352 && REGNO (x) >= FIRST_PSEUDO_REGISTER
353 && reg_renumber[REGNO (x)] >= 0)
354 return reg_renumber[REGNO (x)];
355 return REGNO (x);
356 }
357 if (GET_CODE (x) == SUBREG)
358 {
359 int base = xt_true_regnum (SUBREG_REG (x));
360 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
361 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
362 GET_MODE (SUBREG_REG (x)),
363 SUBREG_BYTE (x), GET_MODE (x));
364 }
365 return -1;
366}
367
368
03984308 369int
ffbc8796 370xtensa_valid_move (enum machine_mode mode, rtx *operands)
03984308 371{
a8cacfd2
BW
372 /* Either the destination or source must be a register, and the
373 MAC16 accumulator doesn't count. */
374
375 if (register_operand (operands[0], mode))
376 {
377 int dst_regnum = xt_true_regnum (operands[0]);
378
638db43e 379 /* The stack pointer can only be assigned with a MOVSP opcode. */
a8cacfd2
BW
380 if (dst_regnum == STACK_POINTER_REGNUM)
381 return (mode == SImode
382 && register_operand (operands[1], mode)
383 && !ACC_REG_P (xt_true_regnum (operands[1])));
384
385 if (!ACC_REG_P (dst_regnum))
386 return true;
387 }
3437320b 388 if (register_operand (operands[1], mode))
a8cacfd2
BW
389 {
390 int src_regnum = xt_true_regnum (operands[1]);
391 if (!ACC_REG_P (src_regnum))
392 return true;
393 }
03984308
BW
394 return FALSE;
395}
396
397
03984308 398int
ffbc8796 399smalloffset_mem_p (rtx op)
03984308
BW
400{
401 if (GET_CODE (op) == MEM)
402 {
403 rtx addr = XEXP (op, 0);
404 if (GET_CODE (addr) == REG)
da1f39e4 405 return BASE_REG_P (addr, 0);
03984308
BW
406 if (GET_CODE (addr) == PLUS)
407 {
408 rtx offset = XEXP (addr, 0);
8eb1bc5c 409 HOST_WIDE_INT val;
03984308
BW
410 if (GET_CODE (offset) != CONST_INT)
411 offset = XEXP (addr, 1);
412 if (GET_CODE (offset) != CONST_INT)
413 return FALSE;
8eb1bc5c
BW
414
415 val = INTVAL (offset);
416 return (val & 3) == 0 && (val >= 0 && val <= 60);
03984308
BW
417 }
418 }
419 return FALSE;
420}
421
422
03984308 423int
ffbc8796 424constantpool_address_p (rtx addr)
03984308
BW
425{
426 rtx sym = addr;
427
428 if (GET_CODE (addr) == CONST)
429 {
430 rtx offset;
431
3bbc2af6 432 /* Only handle (PLUS (SYM, OFFSET)) form. */
03984308
BW
433 addr = XEXP (addr, 0);
434 if (GET_CODE (addr) != PLUS)
435 return FALSE;
436
3bbc2af6 437 /* Make sure the address is word aligned. */
03984308
BW
438 offset = XEXP (addr, 1);
439 if ((GET_CODE (offset) != CONST_INT)
440 || ((INTVAL (offset) & 3) != 0))
441 return FALSE;
442
443 sym = XEXP (addr, 0);
444 }
445
446 if ((GET_CODE (sym) == SYMBOL_REF)
447 && CONSTANT_POOL_ADDRESS_P (sym))
448 return TRUE;
449 return FALSE;
450}
451
452
453int
ffbc8796 454constantpool_mem_p (rtx op)
03984308 455{
63694bdd
BW
456 if (GET_CODE (op) == SUBREG)
457 op = SUBREG_REG (op);
03984308
BW
458 if (GET_CODE (op) == MEM)
459 return constantpool_address_p (XEXP (op, 0));
460 return FALSE;
461}
462
463
03984308 464void
ffbc8796 465xtensa_extend_reg (rtx dst, rtx src)
03984308
BW
466{
467 rtx temp = gen_reg_rtx (SImode);
468 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
469
3bbc2af6 470 /* Generate paradoxical subregs as needed so that the modes match. */
03984308
BW
471 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
472 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
473
474 emit_insn (gen_ashlsi3 (temp, src, shift));
475 emit_insn (gen_ashrsi3 (dst, temp, shift));
476}
477
478
8eb1bc5c 479bool
ffbc8796 480xtensa_mem_offset (unsigned v, enum machine_mode mode)
03984308
BW
481{
482 switch (mode)
483 {
484 case BLKmode:
485 /* Handle the worst case for block moves. See xtensa_expand_block_move
486 where we emit an optimized block move operation if the block can be
487 moved in < "move_ratio" pieces. The worst case is when the block is
488 aligned but has a size of (3 mod 4) (does this happen?) so that the
638db43e 489 last piece requires a byte load/store. */
f42f5a1b
BW
490 return (xtensa_uimm8 (v)
491 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
03984308
BW
492
493 case QImode:
494 return xtensa_uimm8 (v);
495
496 case HImode:
497 return xtensa_uimm8x2 (v);
498
499 case DFmode:
500 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
501
502 default:
503 break;
504 }
505
506 return xtensa_uimm8x4 (v);
507}
508
509
ffbc8796 510/* Make normal rtx_code into something we can index from an array. */
03984308
BW
511
512static enum internal_test
ffbc8796 513map_test_to_internal_test (enum rtx_code test_code)
03984308
BW
514{
515 enum internal_test test = ITEST_MAX;
516
517 switch (test_code)
518 {
519 default: break;
520 case EQ: test = ITEST_EQ; break;
521 case NE: test = ITEST_NE; break;
522 case GT: test = ITEST_GT; break;
523 case GE: test = ITEST_GE; break;
524 case LT: test = ITEST_LT; break;
525 case LE: test = ITEST_LE; break;
526 case GTU: test = ITEST_GTU; break;
527 case GEU: test = ITEST_GEU; break;
528 case LTU: test = ITEST_LTU; break;
529 case LEU: test = ITEST_LEU; break;
530 }
531
532 return test;
533}
534
535
536/* Generate the code to compare two integer values. The return value is
638db43e 537 the comparison expression. */
03984308
BW
538
539static rtx
ffbc8796
BW
540gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
541 rtx cmp0, /* first operand to compare */
542 rtx cmp1, /* second operand to compare */
543 int *p_invert /* whether branch needs to reverse test */)
03984308 544{
ffbc8796
BW
545 struct cmp_info
546 {
03984308 547 enum rtx_code test_code; /* test code to use in insn */
8eb1bc5c 548 bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
03984308
BW
549 int const_add; /* constant to add (convert LE -> LT) */
550 int reverse_regs; /* reverse registers in test */
551 int invert_const; /* != 0 if invert value if cmp1 is constant */
552 int invert_reg; /* != 0 if invert value if cmp1 is register */
553 int unsignedp; /* != 0 for unsigned comparisons. */
554 };
555
556 static struct cmp_info info[ (int)ITEST_MAX ] = {
557
8eb1bc5c
BW
558 { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
559 { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
03984308 560
8eb1bc5c
BW
561 { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
562 { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
563 { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
564 { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
03984308
BW
565
566 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
567 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
568 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
569 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
570 };
571
572 enum internal_test test;
573 enum machine_mode mode;
574 struct cmp_info *p_info;
575
576 test = map_test_to_internal_test (test_code);
177b6be0 577 gcc_assert (test != ITEST_MAX);
03984308
BW
578
579 p_info = &info[ (int)test ];
580
581 mode = GET_MODE (cmp0);
582 if (mode == VOIDmode)
583 mode = GET_MODE (cmp1);
584
585 /* Make sure we can handle any constants given to us. */
586 if (GET_CODE (cmp1) == CONST_INT)
587 {
588 HOST_WIDE_INT value = INTVAL (cmp1);
589 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
590
591 /* if the immediate overflows or does not fit in the immediate field,
592 spill it to a register */
593
594 if ((p_info->unsignedp ?
595 (uvalue + p_info->const_add > uvalue) :
596 (value + p_info->const_add > value)) != (p_info->const_add > 0))
597 {
598 cmp1 = force_reg (mode, cmp1);
599 }
600 else if (!(p_info->const_range_p) (value + p_info->const_add))
601 {
602 cmp1 = force_reg (mode, cmp1);
603 }
604 }
605 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
606 {
607 cmp1 = force_reg (mode, cmp1);
608 }
609
610 /* See if we need to invert the result. */
611 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
612 ? p_info->invert_const
613 : p_info->invert_reg);
614
615 /* Comparison to constants, may involve adding 1 to change a LT into LE.
616 Comparison between two registers, may involve switching operands. */
617 if (GET_CODE (cmp1) == CONST_INT)
618 {
619 if (p_info->const_add != 0)
620 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
621
622 }
623 else if (p_info->reverse_regs)
624 {
625 rtx temp = cmp0;
626 cmp0 = cmp1;
627 cmp1 = temp;
628 }
629
1c563bed 630 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
03984308
BW
631}
632
633
634/* Generate the code to compare two float values. The return value is
638db43e 635 the comparison expression. */
03984308
BW
636
637static rtx
ffbc8796
BW
638gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
639 rtx cmp0, /* first operand to compare */
640 rtx cmp1 /* second operand to compare */)
03984308 641{
ffbc8796 642 rtx (*gen_fn) (rtx, rtx, rtx);
03984308
BW
643 rtx brtmp;
644 int reverse_regs, invert;
645
646 switch (test_code)
647 {
648 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
649 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
650 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
651 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
652 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
653 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
633e4eb4 654 default:
1c563bed 655 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
03984308
BW
656 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
657 }
658
659 if (reverse_regs)
660 {
661 rtx temp = cmp0;
662 cmp0 = cmp1;
663 cmp1 = temp;
664 }
665
666 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
667 emit_insn (gen_fn (brtmp, cmp0, cmp1));
668
1c563bed 669 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
03984308
BW
670}
671
672
673void
ffbc8796 674xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
03984308
BW
675{
676 enum cmp_type type = branch_type;
677 rtx cmp0 = branch_cmp[0];
678 rtx cmp1 = branch_cmp[1];
679 rtx cmp;
680 int invert;
681 rtx label1, label2;
682
683 switch (type)
684 {
685 case CMP_DF:
686 default:
1c563bed 687 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
03984308
BW
688
689 case CMP_SI:
690 invert = FALSE;
691 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
692 break;
693
694 case CMP_SF:
695 if (!TARGET_HARD_FLOAT)
da1f39e4
BW
696 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
697 cmp0, cmp1));
03984308
BW
698 invert = FALSE;
699 cmp = gen_float_relational (test_code, cmp0, cmp1);
700 break;
701 }
702
703 /* Generate the branch. */
704
705 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
706 label2 = pc_rtx;
707
708 if (invert)
709 {
710 label2 = label1;
711 label1 = pc_rtx;
712 }
713
714 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
715 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
716 label1,
717 label2)));
718}
719
720
721static rtx
ffbc8796 722gen_conditional_move (rtx cmp)
03984308
BW
723{
724 enum rtx_code code = GET_CODE (cmp);
725 rtx op0 = branch_cmp[0];
726 rtx op1 = branch_cmp[1];
727
728 if (branch_type == CMP_SI)
729 {
730 /* Jump optimization calls get_condition() which canonicalizes
731 comparisons like (GE x <const>) to (GT x <const-1>).
732 Transform those comparisons back to GE, since that is the
733 comparison supported in Xtensa. We shouldn't have to
734 transform <LE x const> comparisons, because neither
735 xtensa_expand_conditional_branch() nor get_condition() will
638db43e 736 produce them. */
03984308
BW
737
738 if ((code == GT) && (op1 == constm1_rtx))
739 {
740 code = GE;
741 op1 = const0_rtx;
742 }
1c563bed 743 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
03984308
BW
744
745 if (boolean_operator (cmp, VOIDmode))
746 {
3bbc2af6 747 /* Swap the operands to make const0 second. */
03984308
BW
748 if (op0 == const0_rtx)
749 {
750 op0 = op1;
751 op1 = const0_rtx;
752 }
753
3bbc2af6 754 /* If not comparing against zero, emit a comparison (subtract). */
03984308
BW
755 if (op1 != const0_rtx)
756 {
757 op0 = expand_binop (SImode, sub_optab, op0, op1,
758 0, 0, OPTAB_LIB_WIDEN);
759 op1 = const0_rtx;
760 }
761 }
762 else if (branch_operator (cmp, VOIDmode))
763 {
3bbc2af6 764 /* Swap the operands to make const0 second. */
03984308
BW
765 if (op0 == const0_rtx)
766 {
767 op0 = op1;
768 op1 = const0_rtx;
769
770 switch (code)
771 {
772 case LT: code = GE; break;
773 case GE: code = LT; break;
177b6be0 774 default: gcc_unreachable ();
03984308
BW
775 }
776 }
777
778 if (op1 != const0_rtx)
779 return 0;
780 }
781 else
782 return 0;
783
1c563bed 784 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
03984308
BW
785 }
786
787 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
788 return gen_float_relational (code, op0, op1);
789
790 return 0;
791}
792
793
794int
ffbc8796 795xtensa_expand_conditional_move (rtx *operands, int isflt)
03984308
BW
796{
797 rtx cmp;
ffbc8796 798 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
03984308
BW
799
800 if (!(cmp = gen_conditional_move (operands[1])))
801 return 0;
802
803 if (isflt)
804 gen_fn = (branch_type == CMP_SI
805 ? gen_movsfcc_internal0
806 : gen_movsfcc_internal1);
807 else
808 gen_fn = (branch_type == CMP_SI
809 ? gen_movsicc_internal0
810 : gen_movsicc_internal1);
811
812 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
813 operands[2], operands[3], cmp));
814 return 1;
815}
816
817
818int
ffbc8796 819xtensa_expand_scc (rtx *operands)
03984308
BW
820{
821 rtx dest = operands[0];
822 rtx cmp = operands[1];
823 rtx one_tmp, zero_tmp;
ffbc8796 824 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
03984308
BW
825
826 if (!(cmp = gen_conditional_move (cmp)))
827 return 0;
828
829 one_tmp = gen_reg_rtx (SImode);
830 zero_tmp = gen_reg_rtx (SImode);
831 emit_insn (gen_movsi (one_tmp, const_true_rtx));
832 emit_insn (gen_movsi (zero_tmp, const0_rtx));
833
834 gen_fn = (branch_type == CMP_SI
835 ? gen_movsicc_internal0
836 : gen_movsicc_internal1);
837 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
838 return 1;
839}
840
841
633e4eb4
BW
842/* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
843 for the output, i.e., the input operands are twice as big as MODE. */
844
845void
ffbc8796 846xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
633e4eb4
BW
847{
848 switch (GET_CODE (operands[1]))
849 {
850 case REG:
851 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
852 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
853 break;
854
855 case MEM:
856 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
857 operands[2] = adjust_address (operands[1], mode, 0);
858 break;
859
860 case CONST_INT:
861 case CONST_DOUBLE:
862 split_double (operands[1], &operands[2], &operands[3]);
863 break;
864
865 default:
177b6be0 866 gcc_unreachable ();
633e4eb4
BW
867 }
868
869 switch (GET_CODE (operands[0]))
870 {
871 case REG:
872 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
873 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
874 break;
875
876 case MEM:
877 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
878 operands[0] = adjust_address (operands[0], mode, 0);
879 break;
880
881 default:
177b6be0 882 gcc_unreachable ();
633e4eb4
BW
883 }
884}
885
886
03984308 887/* Emit insns to move operands[1] into operands[0].
03984308
BW
888 Return 1 if we have written out everything that needs to be done to
889 do the move. Otherwise, return 0 and the caller will emit the move
890 normally. */
891
892int
ffbc8796 893xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
03984308
BW
894{
895 if (CONSTANT_P (operands[1])
03984308
BW
896 && (GET_CODE (operands[1]) != CONST_INT
897 || !xtensa_simm12b (INTVAL (operands[1]))))
898 {
f42f5a1b
BW
899 if (!TARGET_CONST16)
900 operands[1] = force_const_mem (SImode, operands[1]);
901
902 /* PC-relative loads are always SImode, and CONST16 is only
903 supported in the movsi pattern, so add a SUBREG for any other
904 (smaller) mode. */
905
906 if (mode != SImode)
907 {
908 if (register_operand (operands[0], mode))
909 {
910 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
911 emit_move_insn (operands[0], operands[1]);
912 return 1;
913 }
914 else
915 {
916 operands[1] = force_reg (SImode, operands[1]);
917 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
918 }
919 }
03984308
BW
920 }
921
997b8b4d
BW
922 if (!(reload_in_progress | reload_completed)
923 && !xtensa_valid_move (mode, operands))
924 operands[1] = force_reg (mode, operands[1]);
03984308 925
997b8b4d 926 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
03984308
BW
927
928 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
638db43e
BW
929 instruction won't be recognized after reload, so we remove the
930 subreg and adjust mem accordingly. */
03984308
BW
931 if (reload_in_progress)
932 {
933 operands[0] = fixup_subreg_mem (operands[0]);
934 operands[1] = fixup_subreg_mem (operands[1]);
935 }
936 return 0;
937}
938
f42f5a1b 939
03984308 940static rtx
ffbc8796 941fixup_subreg_mem (rtx x)
03984308
BW
942{
943 if (GET_CODE (x) == SUBREG
944 && GET_CODE (SUBREG_REG (x)) == REG
945 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
946 {
947 rtx temp =
948 gen_rtx_SUBREG (GET_MODE (x),
949 reg_equiv_mem [REGNO (SUBREG_REG (x))],
950 SUBREG_BYTE (x));
951 x = alter_subreg (&temp);
952 }
953 return x;
954}
955
956
997b8b4d
BW
957/* Check if an incoming argument in a7 is expected to be used soon and
958 if OPND is a register or register pair that includes a7. If so,
959 create a new pseudo and copy a7 into that pseudo at the very
960 beginning of the function, followed by the special "set_frame_ptr"
961 unspec_volatile insn. The return value is either the original
962 operand, if it is not a7, or the new pseudo containing a copy of
963 the incoming argument. This is necessary because the register
964 allocator will ignore conflicts with a7 and may either assign some
965 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
966 the incoming argument in a7. By copying the argument out of a7 as
967 the very first thing, and then immediately following that with an
968 unspec_volatile to keep the scheduler away, we should avoid any
969 problems. Putting the set_frame_ptr insn at the beginning, with
970 only the a7 copy before it, also makes it easier for the prologue
971 expander to initialize the frame pointer after the a7 copy and to
972 fix up the a7 copy to use the stack pointer instead of the frame
973 pointer. */
58db834b 974
997b8b4d
BW
975rtx
976xtensa_copy_incoming_a7 (rtx opnd)
58db834b 977{
997b8b4d
BW
978 rtx entry_insns = 0;
979 rtx reg, tmp;
980 enum machine_mode mode;
981
982 if (!cfun->machine->need_a7_copy)
983 return opnd;
984
985 /* This function should never be called again once a7 has been copied. */
177b6be0 986 gcc_assert (!cfun->machine->set_frame_ptr_insn);
997b8b4d
BW
987
988 mode = GET_MODE (opnd);
989
990 /* The operand using a7 may come in a later instruction, so just return
991 the original operand if it doesn't use a7. */
992 reg = opnd;
993 if (GET_CODE (reg) == SUBREG)
58db834b 994 {
177b6be0 995 gcc_assert (SUBREG_BYTE (reg) == 0);
997b8b4d
BW
996 reg = SUBREG_REG (reg);
997 }
998 if (GET_CODE (reg) != REG
999 || REGNO (reg) > A7_REG
1000 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1001 return opnd;
e6aecf8e 1002
997b8b4d 1003 /* 1-word args will always be in a7; 2-word args in a6/a7. */
177b6be0 1004 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
58db834b 1005
997b8b4d 1006 cfun->machine->need_a7_copy = false;
58db834b 1007
997b8b4d
BW
1008 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1009 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
58db834b 1010
0d8442b8 1011 start_sequence ();
997b8b4d 1012 tmp = gen_reg_rtx (mode);
58db834b 1013
997b8b4d
BW
1014 switch (mode)
1015 {
1016 case DFmode:
1017 case DImode:
1018 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1019 gen_rtx_REG (SImode, A7_REG - 1)));
1020 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1021 gen_raw_REG (SImode, A7_REG)));
1022 break;
1023 case SFmode:
1024 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1025 break;
1026 case SImode:
1027 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1028 break;
1029 case HImode:
1030 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1031 break;
1032 case QImode:
1033 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1034 break;
1035 default:
177b6be0 1036 gcc_unreachable ();
58db834b
BW
1037 }
1038
997b8b4d
BW
1039 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1040 entry_insns = get_insns ();
1041 end_sequence ();
1042
1043 if (cfun->machine->vararg_a7)
1044 {
0d8442b8
BW
1045 /* This is called from within builtin_saveregs, which will insert the
1046 saveregs code at the function entry, ahead of anything placed at
1047 the function entry now. Instead, save the sequence to be inserted
1048 at the beginning of the saveregs code. */
1049 cfun->machine->vararg_a7_copy = entry_insns;
997b8b4d
BW
1050 }
1051 else
1052 {
1053 /* Put entry_insns after the NOTE that starts the function. If
1054 this is inside a start_sequence, make the outer-level insn
1055 chain current, so the code is placed at the start of the
1056 function. */
1057 push_topmost_sequence ();
0d8442b8
BW
1058 /* Do not use entry_of_function() here. This is called from within
1059 expand_function_start, when the CFG still holds GIMPLE. */
997b8b4d
BW
1060 emit_insn_after (entry_insns, get_insns ());
1061 pop_topmost_sequence ();
1062 }
1063
1064 return tmp;
58db834b
BW
1065}
1066
1067
a46bbb5a
BW
1068/* Try to expand a block move operation to a sequence of RTL move
1069 instructions. If not optimizing, or if the block size is not a
1070 constant, or if the block is too large, the expansion fails and GCC
1071 falls back to calling memcpy().
03984308
BW
1072
1073 operands[0] is the destination
1074 operands[1] is the source
1075 operands[2] is the length
1076 operands[3] is the alignment */
1077
1078int
ffbc8796 1079xtensa_expand_block_move (rtx *operands)
03984308 1080{
7eda7cda
RH
1081 static const enum machine_mode mode_from_align[] =
1082 {
1083 VOIDmode, QImode, HImode, VOIDmode, SImode,
1084 };
1085
1086 rtx dst_mem = operands[0];
1087 rtx src_mem = operands[1];
1088 HOST_WIDE_INT bytes, align;
03984308 1089 int num_pieces, move_ratio;
7eda7cda
RH
1090 rtx temp[2];
1091 enum machine_mode mode[2];
1092 int amount[2];
1093 bool active[2];
1094 int phase = 0;
1095 int next;
1096 int offset_ld = 0;
1097 int offset_st = 0;
1098 rtx x;
03984308 1099
3bbc2af6 1100 /* If this is not a fixed size move, just call memcpy. */
03984308
BW
1101 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1102 return 0;
1103
7eda7cda
RH
1104 bytes = INTVAL (operands[2]);
1105 align = INTVAL (operands[3]);
1106
3bbc2af6 1107 /* Anything to move? */
03984308 1108 if (bytes <= 0)
7eda7cda 1109 return 0;
03984308
BW
1110
1111 if (align > MOVE_MAX)
1112 align = MOVE_MAX;
1113
3bbc2af6 1114 /* Decide whether to expand inline based on the optimization level. */
03984308
BW
1115 move_ratio = 4;
1116 if (optimize > 2)
1117 move_ratio = LARGEST_MOVE_RATIO;
3bbc2af6 1118 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
7eda7cda 1119 if (num_pieces > move_ratio)
03984308
BW
1120 return 0;
1121
7eda7cda
RH
1122 x = XEXP (dst_mem, 0);
1123 if (!REG_P (x))
1124 {
1125 x = force_reg (Pmode, x);
1126 dst_mem = replace_equiv_address (dst_mem, x);
1127 }
03984308 1128
7eda7cda
RH
1129 x = XEXP (src_mem, 0);
1130 if (!REG_P (x))
1131 {
1132 x = force_reg (Pmode, x);
1133 src_mem = replace_equiv_address (src_mem, x);
1134 }
03984308 1135
7eda7cda 1136 active[0] = active[1] = false;
03984308 1137
7eda7cda 1138 do
03984308 1139 {
7eda7cda
RH
1140 next = phase;
1141 phase ^= 1;
03984308 1142
7eda7cda 1143 if (bytes > 0)
03984308 1144 {
7eda7cda 1145 int next_amount;
03984308 1146
7eda7cda
RH
1147 next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1148 next_amount = MIN (next_amount, align);
03984308 1149
7eda7cda
RH
1150 amount[next] = next_amount;
1151 mode[next] = mode_from_align[next_amount];
1152 temp[next] = gen_reg_rtx (mode[next]);
03984308 1153
7eda7cda
RH
1154 x = adjust_address (src_mem, mode[next], offset_ld);
1155 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
03984308 1156
7eda7cda
RH
1157 offset_ld += next_amount;
1158 bytes -= next_amount;
1159 active[next] = true;
1160 }
03984308 1161
7eda7cda
RH
1162 if (active[phase])
1163 {
1164 active[phase] = false;
1165
1166 x = adjust_address (dst_mem, mode[phase], offset_st);
1167 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
03984308 1168
7eda7cda
RH
1169 offset_st += amount[phase];
1170 }
03984308 1171 }
7eda7cda 1172 while (active[next]);
03984308 1173
7eda7cda 1174 return 1;
03984308
BW
1175}
1176
1177
1178void
ffbc8796 1179xtensa_expand_nonlocal_goto (rtx *operands)
03984308
BW
1180{
1181 rtx goto_handler = operands[1];
1182 rtx containing_fp = operands[3];
1183
3bbc2af6
KH
1184 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1185 is too big to generate in-line. */
03984308
BW
1186
1187 if (GET_CODE (containing_fp) != REG)
1188 containing_fp = force_reg (Pmode, containing_fp);
1189
03984308
BW
1190 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1191 0, VOIDmode, 2,
1192 containing_fp, Pmode,
1193 goto_handler, Pmode);
1194}
1195
1196
e2500fed 1197static struct machine_function *
ffbc8796 1198xtensa_init_machine_status (void)
03984308 1199{
e2500fed 1200 return ggc_alloc_cleared (sizeof (struct machine_function));
03984308
BW
1201}
1202
1203
2a48b790
BW
1204/* Shift VAL of mode MODE left by COUNT bits. */
1205
1206static inline rtx
1207xtensa_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
1208{
1209 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1210 NULL_RTX, 1, OPTAB_DIRECT);
1211 return expand_simple_binop (SImode, ASHIFT, val, count,
1212 NULL_RTX, 1, OPTAB_DIRECT);
1213}
1214
1215
1216/* Structure to hold the initial parameters for a compare_and_swap operation
1217 in HImode and QImode. */
1218
1219struct alignment_context
1220{
1221 rtx memsi; /* SI aligned memory location. */
1222 rtx shift; /* Bit offset with regard to lsb. */
1223 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
1224 rtx modemaski; /* ~modemask */
1225};
1226
1227
1228/* Initialize structure AC for word access to HI and QI mode memory. */
1229
1230static void
1231init_alignment_context (struct alignment_context *ac, rtx mem)
1232{
1233 enum machine_mode mode = GET_MODE (mem);
1234 rtx byteoffset = NULL_RTX;
1235 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1236
1237 if (aligned)
1238 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1239 else
1240 {
1241 /* Alignment is unknown. */
1242 rtx addr, align;
1243
1244 /* Force the address into a register. */
1245 addr = force_reg (Pmode, XEXP (mem, 0));
1246
1247 /* Align it to SImode. */
1248 align = expand_simple_binop (Pmode, AND, addr,
1249 GEN_INT (-GET_MODE_SIZE (SImode)),
1250 NULL_RTX, 1, OPTAB_DIRECT);
1251 /* Generate MEM. */
1252 ac->memsi = gen_rtx_MEM (SImode, align);
1253 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
1254 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
1255 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
1256
1257 byteoffset = expand_simple_binop (Pmode, AND, addr,
1258 GEN_INT (GET_MODE_SIZE (SImode) - 1),
1259 NULL_RTX, 1, OPTAB_DIRECT);
1260 }
1261
1262 /* Calculate shiftcount. */
1263 if (TARGET_BIG_ENDIAN)
1264 {
1265 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
1266 if (!aligned)
1267 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
1268 NULL_RTX, 1, OPTAB_DIRECT);
1269 }
1270 else
1271 {
1272 if (aligned)
1273 ac->shift = NULL_RTX;
1274 else
1275 ac->shift = byteoffset;
1276 }
1277
1278 if (ac->shift != NULL_RTX)
1279 {
1280 /* Shift is the byte count, but we need the bitcount. */
1281 ac->shift = expand_simple_binop (SImode, MULT, ac->shift,
1282 GEN_INT (BITS_PER_UNIT),
1283 NULL_RTX, 1, OPTAB_DIRECT);
1284 ac->modemask = expand_simple_binop (SImode, ASHIFT,
1285 GEN_INT (GET_MODE_MASK (mode)),
1286 ac->shift,
1287 NULL_RTX, 1, OPTAB_DIRECT);
1288 }
1289 else
1290 ac->modemask = GEN_INT (GET_MODE_MASK (mode));
1291
1292 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
1293}
1294
1295
1296/* Expand an atomic compare and swap operation for HImode and QImode.
1297 MEM is the memory location, CMP the old value to compare MEM with
1298 and NEW the value to set if CMP == MEM. */
1299
1300void
1301xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new)
1302{
1303 enum machine_mode mode = GET_MODE (mem);
1304 struct alignment_context ac;
1305 rtx tmp, cmpv, newv, val;
1306 rtx oldval = gen_reg_rtx (SImode);
1307 rtx res = gen_reg_rtx (SImode);
1308 rtx csloop = gen_label_rtx ();
1309 rtx csend = gen_label_rtx ();
1310
1311 init_alignment_context (&ac, mem);
1312
1313 if (ac.shift != NULL_RTX)
1314 {
1315 cmp = xtensa_expand_mask_and_shift (cmp, mode, ac.shift);
1316 new = xtensa_expand_mask_and_shift (new, mode, ac.shift);
1317 }
1318
1319 /* Load the surrounding word into VAL with the MEM value masked out. */
1320 val = force_reg (SImode, expand_simple_binop (SImode, AND, ac.memsi,
1321 ac.modemaski, NULL_RTX, 1,
1322 OPTAB_DIRECT));
1323 emit_label (csloop);
1324
1325 /* Patch CMP and NEW into VAL at correct position. */
1326 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
1327 NULL_RTX, 1, OPTAB_DIRECT));
1328 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
1329 NULL_RTX, 1, OPTAB_DIRECT));
1330
1331 /* Jump to end if we're done. */
1332 emit_insn (gen_sync_compare_and_swapsi (res, ac.memsi, cmpv, newv));
1333 emit_cmp_and_jump_insns (res, cmpv, EQ, const0_rtx, SImode, true, csend);
1334
1335 /* Check for changes outside mode. */
1336 emit_move_insn (oldval, val);
1337 tmp = expand_simple_binop (SImode, AND, res, ac.modemaski,
1338 val, 1, OPTAB_DIRECT);
1339 if (tmp != val)
1340 emit_move_insn (val, tmp);
1341
1342 /* Loop internal if so. */
1343 emit_cmp_and_jump_insns (oldval, val, NE, const0_rtx, SImode, true, csloop);
1344
1345 emit_label (csend);
1346
1347 /* Return the correct part of the bitfield. */
1348 convert_move (target,
1349 (ac.shift == NULL_RTX ? res
1350 : expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
1351 NULL_RTX, 1, OPTAB_DIRECT)),
1352 1);
1353}
1354
1355
1356/* Expand an atomic operation CODE of mode MODE (either HImode or QImode --
1357 the default expansion works fine for SImode). MEM is the memory location
1358 and VAL the value to play with. If AFTER is true then store the value
1359 MEM holds after the operation, if AFTER is false then store the value MEM
1360 holds before the operation. If TARGET is zero then discard that value, else
1361 store it to TARGET. */
1362
1363void
1364xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1365 bool after)
1366{
1367 enum machine_mode mode = GET_MODE (mem);
1368 struct alignment_context ac;
1369 rtx csloop = gen_label_rtx ();
1370 rtx cmp, tmp;
1371 rtx old = gen_reg_rtx (SImode);
1372 rtx new = gen_reg_rtx (SImode);
1373 rtx orig = NULL_RTX;
1374
1375 init_alignment_context (&ac, mem);
1376
1377 /* Prepare values before the compare-and-swap loop. */
1378 if (ac.shift != NULL_RTX)
1379 val = xtensa_expand_mask_and_shift (val, mode, ac.shift);
1380 switch (code)
1381 {
1382 case PLUS:
1383 case MINUS:
1384 orig = gen_reg_rtx (SImode);
1385 convert_move (orig, val, 1);
1386 break;
1387
1388 case SET:
1389 case IOR:
1390 case XOR:
1391 break;
1392
1393 case MULT: /* NAND */
1394 case AND:
1395 /* val = "11..1<val>11..1" */
1396 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
1397 NULL_RTX, 1, OPTAB_DIRECT);
1398 break;
1399
1400 default:
1401 gcc_unreachable ();
1402 }
1403
1404 /* Load full word. Subsequent loads are performed by S32C1I. */
1405 cmp = force_reg (SImode, ac.memsi);
1406
1407 emit_label (csloop);
1408 emit_move_insn (old, cmp);
1409
1410 switch (code)
1411 {
1412 case PLUS:
1413 case MINUS:
1414 val = expand_simple_binop (SImode, code, old, orig,
1415 NULL_RTX, 1, OPTAB_DIRECT);
1416 val = expand_simple_binop (SImode, AND, val, ac.modemask,
1417 NULL_RTX, 1, OPTAB_DIRECT);
1418 /* FALLTHRU */
1419 case SET:
1420 tmp = expand_simple_binop (SImode, AND, old, ac.modemaski,
1421 NULL_RTX, 1, OPTAB_DIRECT);
1422 tmp = expand_simple_binop (SImode, IOR, tmp, val,
1423 new, 1, OPTAB_DIRECT);
1424 break;
1425
1426 case AND:
1427 case IOR:
1428 case XOR:
1429 tmp = expand_simple_binop (SImode, code, old, val,
1430 new, 1, OPTAB_DIRECT);
1431 break;
1432
1433 case MULT: /* NAND */
1434 tmp = expand_simple_binop (SImode, XOR, old, ac.modemask,
1435 NULL_RTX, 1, OPTAB_DIRECT);
1436 tmp = expand_simple_binop (SImode, AND, tmp, val,
1437 new, 1, OPTAB_DIRECT);
1438 break;
1439
1440 default:
1441 gcc_unreachable ();
1442 }
1443
1444 if (tmp != new)
1445 emit_move_insn (new, tmp);
1446 emit_insn (gen_sync_compare_and_swapsi (cmp, ac.memsi, old, new));
1447 emit_cmp_and_jump_insns (cmp, old, NE, const0_rtx, SImode, true, csloop);
1448
1449 if (target)
1450 {
1451 tmp = (after ? new : cmp);
1452 convert_move (target,
1453 (ac.shift == NULL_RTX ? tmp
1454 : expand_simple_binop (SImode, LSHIFTRT, tmp, ac.shift,
1455 NULL_RTX, 1, OPTAB_DIRECT)),
1456 1);
1457 }
1458}
1459
1460
03984308 1461void
ffbc8796 1462xtensa_setup_frame_addresses (void)
03984308 1463{
638db43e 1464 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
03984308
BW
1465 cfun->machine->accesses_prev_frame = 1;
1466
1467 emit_library_call
1468 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1469 0, VOIDmode, 0);
1470}
1471
1472
638db43e
BW
1473/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1474 a comment showing where the end of the loop is. However, if there is a
03984308 1475 label or a branch at the end of the loop then we need to place a nop
638db43e 1476 there. If the loop ends with a label we need the nop so that branches
839a4992
KH
1477 targeting that label will target the nop (and thus remain in the loop),
1478 instead of targeting the instruction after the loop (and thus exiting
638db43e 1479 the loop). If the loop ends with a branch, we need the nop in case the
839a4992 1480 branch is targeting a location inside the loop. When the branch
03984308
BW
1481 executes it will cause the loop count to be decremented even if it is
1482 taken (because it is the last instruction in the loop), so we need to
1483 nop after the branch to prevent the loop count from being decremented
638db43e 1484 when the branch is taken. */
03984308
BW
1485
1486void
ffbc8796 1487xtensa_emit_loop_end (rtx insn, rtx *operands)
03984308
BW
1488{
1489 char done = 0;
1490
1491 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1492 {
1493 switch (GET_CODE (insn))
1494 {
1495 case NOTE:
1496 case BARRIER:
1497 break;
1498
1499 case CODE_LABEL:
0bd0703d 1500 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
03984308
BW
1501 done = 1;
1502 break;
1503
1504 default:
1505 {
1506 rtx body = PATTERN (insn);
1507
1508 if (GET_CODE (body) == JUMP_INSN)
1509 {
0bd0703d 1510 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
03984308
BW
1511 done = 1;
1512 }
1513 else if ((GET_CODE (body) != USE)
1514 && (GET_CODE (body) != CLOBBER))
1515 done = 1;
1516 }
1517 break;
1518 }
1519 }
1520
1521 output_asm_insn ("# loop end for %0", operands);
1522}
1523
1524
036a2b7a
BW
1525char *
1526xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1527{
1528 static char result[64];
1529 enum rtx_code code;
1530 const char *op;
1531
1532 code = GET_CODE (operands[3]);
1533 switch (code)
1534 {
1535 case EQ: op = inverted ? "ne" : "eq"; break;
1536 case NE: op = inverted ? "eq" : "ne"; break;
1537 case LT: op = inverted ? "ge" : "lt"; break;
1538 case GE: op = inverted ? "lt" : "ge"; break;
1539 case LTU: op = inverted ? "geu" : "ltu"; break;
1540 case GEU: op = inverted ? "ltu" : "geu"; break;
1541 default: gcc_unreachable ();
1542 }
1543
1544 if (immed)
1545 {
1546 if (INTVAL (operands[1]) == 0)
1547 sprintf (result, "b%sz%s\t%%0, %%2", op,
1548 (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1549 else
1550 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1551 }
1552 else
1553 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1554
1555 return result;
1556}
1557
1558
1559char *
1560xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1561{
1562 static char result[64];
1563 const char *op;
1564
1565 switch (GET_CODE (operands[3]))
1566 {
1567 case EQ: op = inverted ? "bs" : "bc"; break;
1568 case NE: op = inverted ? "bc" : "bs"; break;
1569 default: gcc_unreachable ();
1570 }
1571
1572 if (immed)
1573 {
1574 unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1575 operands[1] = GEN_INT (bitnum);
1576 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1577 }
1578 else
1579 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1580
1581 return result;
1582}
1583
1584
1585char *
1586xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1587{
1588 static char result[64];
1589 enum rtx_code code;
1590 const char *op;
1591
1592 code = GET_CODE (operands[4]);
1593 if (isbool)
1594 {
1595 switch (code)
1596 {
1597 case EQ: op = inverted ? "t" : "f"; break;
1598 case NE: op = inverted ? "f" : "t"; break;
1599 default: gcc_unreachable ();
1600 }
1601 }
1602 else
1603 {
1604 switch (code)
1605 {
1606 case EQ: op = inverted ? "nez" : "eqz"; break;
1607 case NE: op = inverted ? "eqz" : "nez"; break;
1608 case LT: op = inverted ? "gez" : "ltz"; break;
1609 case GE: op = inverted ? "ltz" : "gez"; break;
1610 default: gcc_unreachable ();
1611 }
1612 }
1613
1614 sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1615 op, isfp ? ".s" : "", inverted ? 3 : 2);
1616 return result;
1617}
1618
1619
03984308 1620char *
ffbc8796 1621xtensa_emit_call (int callop, rtx *operands)
03984308 1622{
b64a1b53 1623 static char result[64];
03984308
BW
1624 rtx tgt = operands[callop];
1625
1626 if (GET_CODE (tgt) == CONST_INT)
1d0ea52e 1627 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
03984308
BW
1628 else if (register_operand (tgt, VOIDmode))
1629 sprintf (result, "callx8\t%%%d", callop);
1630 else
1631 sprintf (result, "call8\t%%%d", callop);
1632
1633 return result;
1634}
1635
1636
da1f39e4
BW
1637bool
1638xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
1639{
1640 /* Allow constant pool addresses. */
1641 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
1642 && ! TARGET_CONST16 && constantpool_address_p (addr))
1643 return true;
1644
1645 while (GET_CODE (addr) == SUBREG)
1646 addr = SUBREG_REG (addr);
1647
1648 /* Allow base registers. */
1649 if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1650 return true;
1651
1652 /* Check for "register + offset" addressing. */
1653 if (GET_CODE (addr) == PLUS)
1654 {
1655 rtx xplus0 = XEXP (addr, 0);
1656 rtx xplus1 = XEXP (addr, 1);
1657 enum rtx_code code0;
1658 enum rtx_code code1;
1659
1660 while (GET_CODE (xplus0) == SUBREG)
1661 xplus0 = SUBREG_REG (xplus0);
1662 code0 = GET_CODE (xplus0);
1663
1664 while (GET_CODE (xplus1) == SUBREG)
1665 xplus1 = SUBREG_REG (xplus1);
1666 code1 = GET_CODE (xplus1);
1667
1668 /* Swap operands if necessary so the register is first. */
1669 if (code0 != REG && code1 == REG)
1670 {
1671 xplus0 = XEXP (addr, 1);
1672 xplus1 = XEXP (addr, 0);
1673 code0 = GET_CODE (xplus0);
1674 code1 = GET_CODE (xplus1);
1675 }
1676
1677 if (code0 == REG && BASE_REG_P (xplus0, strict)
1678 && code1 == CONST_INT
1679 && xtensa_mem_offset (INTVAL (xplus1), mode))
1680 return true;
1681 }
1682
1683 return false;
1684}
1685
1686
1687rtx
1688xtensa_legitimize_address (rtx x,
1689 rtx oldx ATTRIBUTE_UNUSED,
1690 enum machine_mode mode)
1691{
1692 if (GET_CODE (x) == PLUS)
1693 {
1694 rtx plus0 = XEXP (x, 0);
1695 rtx plus1 = XEXP (x, 1);
1696
1697 if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
1698 {
1699 plus0 = XEXP (x, 1);
1700 plus1 = XEXP (x, 0);
1701 }
1702
1703 /* Try to split up the offset to use an ADDMI instruction. */
1704 if (GET_CODE (plus0) == REG
1705 && GET_CODE (plus1) == CONST_INT
1706 && !xtensa_mem_offset (INTVAL (plus1), mode)
1707 && !xtensa_simm8 (INTVAL (plus1))
1708 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1709 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1710 {
1711 rtx temp = gen_reg_rtx (Pmode);
1712 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
1713 emit_insn (gen_rtx_SET (Pmode, temp,
1714 gen_rtx_PLUS (Pmode, plus0, addmi_offset)));
1715 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1716 }
1717 }
1718
1719 return NULL_RTX;
1720}
1721
1722
b0c6e48f 1723/* Return the debugger register number to use for 'regno'. */
03984308
BW
1724
1725int
ffbc8796 1726xtensa_dbx_register_number (int regno)
03984308
BW
1727{
1728 int first = -1;
633e4eb4
BW
1729
1730 if (GP_REG_P (regno))
1731 {
1732 regno -= GP_REG_FIRST;
1733 first = 0;
1734 }
1735 else if (BR_REG_P (regno))
1736 {
1737 regno -= BR_REG_FIRST;
1738 first = 16;
1739 }
1740 else if (FP_REG_P (regno))
1741 {
1742 regno -= FP_REG_FIRST;
b0c6e48f 1743 first = 48;
633e4eb4 1744 }
03984308
BW
1745 else if (ACC_REG_P (regno))
1746 {
b0c6e48f
BW
1747 first = 0x200; /* Start of Xtensa special registers. */
1748 regno = 16; /* ACCLO is special register 16. */
03984308
BW
1749 }
1750
1751 /* When optimizing, we sometimes get asked about pseudo-registers
638db43e 1752 that don't represent hard registers. Return 0 for these. */
03984308
BW
1753 if (first == -1)
1754 return 0;
1755
1756 return first + regno;
1757}
1758
1759
1760/* Argument support functions. */
1761
1762/* Initialize CUMULATIVE_ARGS for a function. */
1763
1764void
997b8b4d 1765init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
03984308
BW
1766{
1767 cum->arg_words = 0;
997b8b4d 1768 cum->incoming = incoming;
03984308
BW
1769}
1770
ffbc8796 1771
03984308
BW
1772/* Advance the argument to the next argument position. */
1773
1774void
ffbc8796 1775function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
03984308
BW
1776{
1777 int words, max;
1778 int *arg_words;
1779
1780 arg_words = &cum->arg_words;
1781 max = MAX_ARGS_IN_REGISTERS;
1782
1783 words = (((mode != BLKmode)
1784 ? (int) GET_MODE_SIZE (mode)
1785 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1786
85d91d5b
BW
1787 if (*arg_words < max
1788 && (targetm.calls.must_pass_in_stack (mode, type)
1789 || *arg_words + words > max))
03984308
BW
1790 *arg_words = max;
1791
1792 *arg_words += words;
1793}
1794
1795
1796/* Return an RTL expression containing the register for the given mode,
368ebcd6 1797 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
ffbc8796 1798 if this is an incoming argument to the current function. */
03984308
BW
1799
1800rtx
ffbc8796
BW
1801function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1802 int incoming_p)
03984308
BW
1803{
1804 int regbase, words, max;
1805 int *arg_words;
1806 int regno;
03984308
BW
1807
1808 arg_words = &cum->arg_words;
1809 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1810 max = MAX_ARGS_IN_REGISTERS;
1811
1812 words = (((mode != BLKmode)
1813 ? (int) GET_MODE_SIZE (mode)
1814 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1815
1816 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
822e895c 1817 {
d2348985 1818 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
822e895c
BW
1819 *arg_words = (*arg_words + align - 1) & -align;
1820 }
03984308
BW
1821
1822 if (*arg_words + words > max)
1823 return (rtx)0;
1824
1825 regno = regbase + *arg_words;
03984308 1826
997b8b4d
BW
1827 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1828 cfun->machine->need_a7_copy = true;
03984308 1829
997b8b4d 1830 return gen_rtx_REG (mode, regno);
03984308
BW
1831}
1832
1833
d2348985
BW
1834int
1835function_arg_boundary (enum machine_mode mode, tree type)
1836{
1837 unsigned int alignment;
1838
1839 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
1840 if (alignment < PARM_BOUNDARY)
1841 alignment = PARM_BOUNDARY;
1842 if (alignment > STACK_BOUNDARY)
1843 alignment = STACK_BOUNDARY;
1844 return alignment;
1845}
1846
1847
6e5ff6e7 1848static bool
586de218 1849xtensa_return_in_msb (const_tree valtype)
6e5ff6e7
BW
1850{
1851 return (TARGET_BIG_ENDIAN
1852 && AGGREGATE_TYPE_P (valtype)
1853 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1854}
1855
1856
03984308 1857void
ffbc8796 1858override_options (void)
03984308
BW
1859{
1860 int regno;
1861 enum machine_mode mode;
1862
1863 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1864 error ("boolean registers required for the floating-point option");
1865
638db43e 1866 /* Set up array giving whether a given register can hold a given mode. */
03984308
BW
1867 for (mode = VOIDmode;
1868 mode != MAX_MACHINE_MODE;
1869 mode = (enum machine_mode) ((int) mode + 1))
1870 {
1871 int size = GET_MODE_SIZE (mode);
1872 enum mode_class class = GET_MODE_CLASS (mode);
1873
1874 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1875 {
1876 int temp;
1877
1878 if (ACC_REG_P (regno))
f42f5a1b
BW
1879 temp = (TARGET_MAC16
1880 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
03984308
BW
1881 else if (GP_REG_P (regno))
1882 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1883 else if (FP_REG_P (regno))
1884 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1885 else if (BR_REG_P (regno))
1886 temp = (TARGET_BOOLEANS && (mode == CCmode));
1887 else
1888 temp = FALSE;
1889
1890 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1891 }
1892 }
1893
1894 init_machine_status = xtensa_init_machine_status;
03984308 1895
f42f5a1b
BW
1896 /* Check PIC settings. PIC is only supported when using L32R
1897 instructions, and some targets need to always use PIC. */
1898 if (flag_pic && TARGET_CONST16)
1899 error ("-f%s is not supported with CONST16 instructions",
1900 (flag_pic > 1 ? "PIC" : "pic"));
1901 else if (XTENSA_ALWAYS_PIC)
1902 {
1903 if (TARGET_CONST16)
1904 error ("PIC is required but not supported with CONST16 instructions");
1905 flag_pic = 1;
1906 }
1907 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1908 if (flag_pic > 1)
03984308 1909 flag_pic = 1;
87c8b4be
CT
1910
1911 /* Hot/cold partitioning does not work on this architecture, because of
1912 constant pools (the load instruction cannot necessarily reach that far).
1913 Therefore disable it on this architecture. */
1914 if (flag_reorder_blocks_and_partition)
1915 {
1916 flag_reorder_blocks_and_partition = 0;
1917 flag_reorder_blocks = 1;
1918 }
03984308
BW
1919}
1920
1921
1922/* A C compound statement to output to stdio stream STREAM the
1923 assembler syntax for an instruction operand X. X is an RTL
1924 expression.
1925
1926 CODE is a value that can be used to specify one of several ways
1927 of printing the operand. It is used when identical operands
1928 must be printed differently depending on the context. CODE
1929 comes from the '%' specification that was used to request
1930 printing of the operand. If the specification was just '%DIGIT'
1931 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1932 is the ASCII code for LTR.
1933
1934 If X is a register, this macro should print the register's name.
1935 The names can be found in an array 'reg_names' whose type is
1936 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1937
1938 When the machine description has a specification '%PUNCT' (a '%'
1939 followed by a punctuation character), this macro is called with
1940 a null pointer for X and the punctuation character for CODE.
1941
1942 'a', 'c', 'l', and 'n' are reserved.
633e4eb4 1943
03984308
BW
1944 The Xtensa specific codes are:
1945
1946 'd' CONST_INT, print as signed decimal
1947 'x' CONST_INT, print as signed hexadecimal
1948 'K' CONST_INT, print number of bits in mask for EXTUI
1949 'R' CONST_INT, print (X & 0x1f)
1950 'L' CONST_INT, print ((32 - X) & 0x1f)
1951 'D' REG, print second register of double-word register operand
1952 'N' MEM, print address of next word following a memory operand
1953 'v' MEM, if memory reference is volatile, output a MEMW before it
f42f5a1b
BW
1954 't' any constant, add "@h" suffix for top 16 bits
1955 'b' any constant, add "@l" suffix for bottom 16 bits
03984308
BW
1956*/
1957
1958static void
ffbc8796 1959printx (FILE *file, signed int val)
03984308 1960{
ffbc8796 1961 /* Print a hexadecimal value in a nice way. */
03984308
BW
1962 if ((val > -0xa) && (val < 0xa))
1963 fprintf (file, "%d", val);
1964 else if (val < 0)
1965 fprintf (file, "-0x%x", -val);
1966 else
1967 fprintf (file, "0x%x", val);
1968}
1969
1970
1971void
ffbc8796 1972print_operand (FILE *file, rtx x, int letter)
03984308 1973{
f42f5a1b 1974 if (!x)
03984308
BW
1975 error ("PRINT_OPERAND null pointer");
1976
f42f5a1b 1977 switch (letter)
03984308 1978 {
f42f5a1b
BW
1979 case 'D':
1980 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1981 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1982 else
1983 output_operand_lossage ("invalid %%D value");
1984 break;
03984308 1985
f42f5a1b
BW
1986 case 'v':
1987 if (GET_CODE (x) == MEM)
1988 {
1989 /* For a volatile memory reference, emit a MEMW before the
1990 load or store. */
7a29f483 1991 if (MEM_VOLATILE_P (x))
f42f5a1b
BW
1992 fprintf (file, "memw\n\t");
1993 }
1994 else
1995 output_operand_lossage ("invalid %%v value");
1996 break;
03984308 1997
f42f5a1b
BW
1998 case 'N':
1999 if (GET_CODE (x) == MEM
2000 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
2001 {
2002 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
2003 output_address (XEXP (x, 0));
2004 }
2005 else
2006 output_operand_lossage ("invalid %%N value");
2007 break;
03984308 2008
f42f5a1b
BW
2009 case 'K':
2010 if (GET_CODE (x) == CONST_INT)
03984308 2011 {
f42f5a1b
BW
2012 int num_bits = 0;
2013 unsigned val = INTVAL (x);
2014 while (val & 1)
2015 {
2016 num_bits += 1;
2017 val = val >> 1;
2018 }
2019 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2020 fatal_insn ("invalid mask", x);
03984308 2021
f42f5a1b
BW
2022 fprintf (file, "%d", num_bits);
2023 }
2024 else
2025 output_operand_lossage ("invalid %%K value");
2026 break;
03984308 2027
f42f5a1b
BW
2028 case 'L':
2029 if (GET_CODE (x) == CONST_INT)
2030 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2031 else
2032 output_operand_lossage ("invalid %%L value");
2033 break;
03984308 2034
f42f5a1b
BW
2035 case 'R':
2036 if (GET_CODE (x) == CONST_INT)
2037 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2038 else
2039 output_operand_lossage ("invalid %%R value");
2040 break;
03984308 2041
f42f5a1b
BW
2042 case 'x':
2043 if (GET_CODE (x) == CONST_INT)
2044 printx (file, INTVAL (x));
2045 else
2046 output_operand_lossage ("invalid %%x value");
2047 break;
03984308 2048
f42f5a1b
BW
2049 case 'd':
2050 if (GET_CODE (x) == CONST_INT)
2051 fprintf (file, "%ld", INTVAL (x));
2052 else
2053 output_operand_lossage ("invalid %%d value");
2054 break;
03984308 2055
f42f5a1b
BW
2056 case 't':
2057 case 'b':
2058 if (GET_CODE (x) == CONST_INT)
2059 {
2060 printx (file, INTVAL (x));
2061 fputs (letter == 't' ? "@h" : "@l", file);
2062 }
2063 else if (GET_CODE (x) == CONST_DOUBLE)
2064 {
2065 REAL_VALUE_TYPE r;
2066 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2067 if (GET_MODE (x) == SFmode)
2068 {
2069 long l;
2070 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2071 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2072 }
2073 else
2074 output_operand_lossage ("invalid %%t/%%b value");
2075 }
2076 else if (GET_CODE (x) == CONST)
2077 {
2078 /* X must be a symbolic constant on ELF. Write an expression
2079 suitable for 'const16' that sets the high or low 16 bits. */
2080 if (GET_CODE (XEXP (x, 0)) != PLUS
2081 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2082 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2083 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2084 output_operand_lossage ("invalid %%t/%%b value");
2085 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2086 fputs (letter == 't' ? "@h" : "@l", file);
2087 /* There must be a non-alphanumeric character between 'h' or 'l'
2088 and the number. The '-' is added by print_operand() already. */
2089 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2090 fputs ("+", file);
2091 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2092 }
2093 else
633e4eb4 2094 {
f42f5a1b
BW
2095 output_addr_const (file, x);
2096 fputs (letter == 't' ? "@h" : "@l", file);
03984308
BW
2097 }
2098 break;
2099
2100 default:
f42f5a1b
BW
2101 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2102 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2103 else if (GET_CODE (x) == MEM)
2104 output_address (XEXP (x, 0));
2105 else if (GET_CODE (x) == CONST_INT)
2106 fprintf (file, "%ld", INTVAL (x));
2107 else
2108 output_addr_const (file, x);
03984308
BW
2109 }
2110}
2111
2112
2113/* A C compound statement to output to stdio stream STREAM the
2114 assembler syntax for an instruction operand that is a memory
fb49053f 2115 reference whose address is ADDR. ADDR is an RTL expression. */
03984308
BW
2116
2117void
ffbc8796 2118print_operand_address (FILE *file, rtx addr)
03984308
BW
2119{
2120 if (!addr)
2121 error ("PRINT_OPERAND_ADDRESS, null pointer");
2122
2123 switch (GET_CODE (addr))
2124 {
2125 default:
2126 fatal_insn ("invalid address", addr);
2127 break;
2128
2129 case REG:
2130 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2131 break;
2132
2133 case PLUS:
2134 {
2135 rtx reg = (rtx)0;
2136 rtx offset = (rtx)0;
2137 rtx arg0 = XEXP (addr, 0);
2138 rtx arg1 = XEXP (addr, 1);
2139
2140 if (GET_CODE (arg0) == REG)
2141 {
2142 reg = arg0;
2143 offset = arg1;
2144 }
2145 else if (GET_CODE (arg1) == REG)
2146 {
2147 reg = arg1;
2148 offset = arg0;
2149 }
2150 else
2151 fatal_insn ("no register in address", addr);
2152
2153 if (CONSTANT_P (offset))
2154 {
2155 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2156 output_addr_const (file, offset);
2157 }
2158 else
2159 fatal_insn ("address offset not a constant", addr);
2160 }
2161 break;
2162
2163 case LABEL_REF:
2164 case SYMBOL_REF:
2165 case CONST_INT:
2166 case CONST:
2167 output_addr_const (file, addr);
2168 break;
2169 }
2170}
2171
2172
da1f39e4
BW
2173bool
2174xtensa_output_addr_const_extra (FILE *fp, rtx x)
2175{
2176 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
2177 {
2178 switch (XINT (x, 1))
2179 {
2180 case UNSPEC_PLT:
2181 if (flag_pic)
2182 {
2183 output_addr_const (fp, XVECEXP (x, 0, 0));
2184 fputs ("@PLT", fp);
2185 return true;
2186 }
2187 break;
2188 default:
2189 break;
2190 }
2191 }
2192 return false;
2193}
2194
2195
03984308 2196void
ffbc8796 2197xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
03984308
BW
2198{
2199 long value_long[2];
b216cd4a 2200 REAL_VALUE_TYPE r;
03984308 2201 int size;
74ed13f5 2202 rtx first, second;
03984308
BW
2203
2204 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2205
2206 switch (GET_MODE_CLASS (mode))
2207 {
2208 case MODE_FLOAT:
177b6be0 2209 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
03984308 2210
b216cd4a 2211 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
03984308
BW
2212 switch (mode)
2213 {
2214 case SFmode:
b216cd4a 2215 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
4575a800
BW
2216 if (HOST_BITS_PER_LONG > 32)
2217 value_long[0] &= 0xffffffff;
b216cd4a 2218 fprintf (file, "0x%08lx\n", value_long[0]);
03984308
BW
2219 break;
2220
2221 case DFmode:
b216cd4a 2222 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
4575a800
BW
2223 if (HOST_BITS_PER_LONG > 32)
2224 {
2225 value_long[0] &= 0xffffffff;
2226 value_long[1] &= 0xffffffff;
2227 }
b216cd4a
ZW
2228 fprintf (file, "0x%08lx, 0x%08lx\n",
2229 value_long[0], value_long[1]);
03984308
BW
2230 break;
2231
2232 default:
177b6be0 2233 gcc_unreachable ();
03984308
BW
2234 }
2235
2236 break;
2237
2238 case MODE_INT:
2239 case MODE_PARTIAL_INT:
2240 size = GET_MODE_SIZE (mode);
177b6be0 2241 switch (size)
03984308 2242 {
177b6be0 2243 case 4:
03984308
BW
2244 output_addr_const (file, x);
2245 fputs ("\n", file);
177b6be0
NS
2246 break;
2247
2248 case 8:
74ed13f5
BW
2249 split_double (x, &first, &second);
2250 output_addr_const (file, first);
03984308 2251 fputs (", ", file);
74ed13f5 2252 output_addr_const (file, second);
03984308 2253 fputs ("\n", file);
177b6be0
NS
2254 break;
2255
2256 default:
2257 gcc_unreachable ();
03984308 2258 }
03984308
BW
2259 break;
2260
2261 default:
177b6be0 2262 gcc_unreachable ();
03984308
BW
2263 }
2264}
2265
2266
2267/* Return the bytes needed to compute the frame pointer from the current
638db43e 2268 stack pointer. */
03984308
BW
2269
2270#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2271#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2272
2273long
ffbc8796 2274compute_frame_size (int size)
03984308 2275{
ffbc8796 2276 /* Add space for the incoming static chain value. */
6de9cd9a 2277 if (cfun->static_chain_decl != NULL)
03984308
BW
2278 size += (1 * UNITS_PER_WORD);
2279
2280 xtensa_current_frame_size =
2281 XTENSA_STACK_ALIGN (size
2282 + current_function_outgoing_args_size
2283 + (WINDOW_SIZE * UNITS_PER_WORD));
2284 return xtensa_current_frame_size;
2285}
2286
2287
2288int
ffbc8796 2289xtensa_frame_pointer_required (void)
03984308
BW
2290{
2291 /* The code to expand builtin_frame_addr and builtin_return_addr
2292 currently uses the hard_frame_pointer instead of frame_pointer.
2293 This seems wrong but maybe it's necessary for other architectures.
638db43e 2294 This function is derived from the i386 code. */
03984308
BW
2295
2296 if (cfun->machine->accesses_prev_frame)
2297 return 1;
2298
2299 return 0;
2300}
2301
2302
f42f5a1b 2303void
ffbc8796 2304xtensa_expand_prologue (void)
f42f5a1b
BW
2305{
2306 HOST_WIDE_INT total_size;
2307 rtx size_rtx;
4e6c2193 2308 rtx insn, note_rtx;
18dbd950 2309
f42f5a1b
BW
2310 total_size = compute_frame_size (get_frame_size ());
2311 size_rtx = GEN_INT (total_size);
18dbd950 2312
f42f5a1b 2313 if (total_size < (1 << (12+3)))
35a3be48 2314 insn = emit_insn (gen_entry (size_rtx));
03984308
BW
2315 else
2316 {
f42f5a1b
BW
2317 /* Use a8 as a temporary since a0-a7 may be live. */
2318 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
35a3be48 2319 emit_insn (gen_entry (GEN_INT (MIN_FRAME_SIZE)));
f42f5a1b
BW
2320 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2321 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
4e6c2193 2322 insn = emit_insn (gen_movsi (stack_pointer_rtx, tmp_reg));
03984308
BW
2323 }
2324
f42f5a1b 2325 if (frame_pointer_needed)
03984308 2326 {
997b8b4d 2327 if (cfun->machine->set_frame_ptr_insn)
03984308 2328 {
4e6c2193 2329 rtx first;
03984308 2330
997b8b4d
BW
2331 push_topmost_sequence ();
2332 first = get_insns ();
2333 pop_topmost_sequence ();
03984308 2334
f42f5a1b
BW
2335 /* For all instructions prior to set_frame_ptr_insn, replace
2336 hard_frame_pointer references with stack_pointer. */
2337 for (insn = first;
997b8b4d 2338 insn != cfun->machine->set_frame_ptr_insn;
f42f5a1b
BW
2339 insn = NEXT_INSN (insn))
2340 {
2341 if (INSN_P (insn))
20dca97b
BW
2342 {
2343 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2344 hard_frame_pointer_rtx,
2345 stack_pointer_rtx);
2346 df_insn_rescan (insn);
2347 }
f42f5a1b
BW
2348 }
2349 }
2350 else
4e6c2193
BW
2351 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx,
2352 stack_pointer_rtx));
03984308 2353 }
4e6c2193
BW
2354
2355 /* Create a note to describe the CFA. Because this is only used to set
2356 DW_AT_frame_base for debug info, don't bother tracking changes through
2357 each instruction in the prologue. It just takes up space. */
2358 note_rtx = gen_rtx_SET (VOIDmode, (frame_pointer_needed
2359 ? hard_frame_pointer_rtx
2360 : stack_pointer_rtx),
2361 plus_constant (stack_pointer_rtx, -total_size));
2362 RTX_FRAME_RELATED_P (insn) = 1;
2363 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2364 note_rtx, REG_NOTES (insn));
03984308
BW
2365}
2366
2367
f42f5a1b 2368/* Clear variables at function end. */
03984308
BW
2369
2370void
ffbc8796
BW
2371xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2372 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
03984308 2373{
03984308
BW
2374 xtensa_current_frame_size = 0;
2375}
2376
2377
0c14a54d 2378rtx
ffbc8796 2379xtensa_return_addr (int count, rtx frame)
0c14a54d
BW
2380{
2381 rtx result, retaddr;
2382
2383 if (count == -1)
f42f5a1b 2384 retaddr = gen_rtx_REG (Pmode, A0_REG);
0c14a54d
BW
2385 else
2386 {
2387 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2388 addr = memory_address (Pmode, addr);
2389 retaddr = gen_reg_rtx (Pmode);
2390 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2391 }
2392
2393 /* The 2 most-significant bits of the return address on Xtensa hold
2394 the register window size. To get the real return address, these
2395 bits must be replaced with the high bits from the current PC. */
2396
2397 result = gen_reg_rtx (Pmode);
2398 emit_insn (gen_fix_return_addr (result, retaddr));
2399 return result;
2400}
2401
2402
03984308 2403/* Create the va_list data type.
822e895c
BW
2404
2405 This structure is set up by __builtin_saveregs. The __va_reg field
2406 points to a stack-allocated region holding the contents of the
2407 incoming argument registers. The __va_ndx field is an index
2408 initialized to the position of the first unnamed (variable)
2409 argument. This same index is also used to address the arguments
2410 passed in memory. Thus, the __va_stk field is initialized to point
2411 to the position of the first argument in memory offset to account
2412 for the arguments passed in registers and to account for the size
2413 of the argument registers not being 16-byte aligned. E.G., there
2414 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2415 for the first stack argument to have the maximal alignment of 16
2416 bytes, so we offset the __va_stk address by 32 bytes so that
2417 __va_stk[32] references the first argument on the stack. */
03984308 2418
c35d187f
RH
2419static tree
2420xtensa_build_builtin_va_list (void)
03984308 2421{
540eaea8 2422 tree f_stk, f_reg, f_ndx, record, type_decl;
03984308 2423
540eaea8
BW
2424 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2425 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
03984308
BW
2426
2427 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2428 ptr_type_node);
2429 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2430 ptr_type_node);
2431 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2432 integer_type_node);
2433
2434 DECL_FIELD_CONTEXT (f_stk) = record;
2435 DECL_FIELD_CONTEXT (f_reg) = record;
2436 DECL_FIELD_CONTEXT (f_ndx) = record;
2437
540eaea8
BW
2438 TREE_CHAIN (record) = type_decl;
2439 TYPE_NAME (record) = type_decl;
03984308
BW
2440 TYPE_FIELDS (record) = f_stk;
2441 TREE_CHAIN (f_stk) = f_reg;
2442 TREE_CHAIN (f_reg) = f_ndx;
2443
2444 layout_type (record);
2445 return record;
2446}
2447
2448
2449/* Save the incoming argument registers on the stack. Returns the
638db43e 2450 address of the saved registers. */
03984308 2451
4c45af42 2452static rtx
ffbc8796 2453xtensa_builtin_saveregs (void)
03984308 2454{
e70312d4 2455 rtx gp_regs;
79e9ebdc 2456 int arg_words = current_function_args_info.arg_words;
03984308 2457 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
03984308 2458
997b8b4d 2459 if (gp_left <= 0)
03984308
BW
2460 return const0_rtx;
2461
3bbc2af6 2462 /* Allocate the general-purpose register space. */
03984308
BW
2463 gp_regs = assign_stack_local
2464 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
540eaea8 2465 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
03984308
BW
2466
2467 /* Now store the incoming registers. */
997b8b4d
BW
2468 cfun->machine->need_a7_copy = true;
2469 cfun->machine->vararg_a7 = true;
e70312d4
BW
2470 move_block_from_reg (GP_ARG_FIRST + arg_words,
2471 adjust_address (gp_regs, BLKmode,
2472 arg_words * UNITS_PER_WORD),
2473 gp_left);
0d8442b8
BW
2474 gcc_assert (cfun->machine->vararg_a7_copy != 0);
2475 emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ());
03984308
BW
2476
2477 return XEXP (gp_regs, 0);
2478}
2479
2480
2481/* Implement `va_start' for varargs and stdarg. We look at the
638db43e 2482 current function to fill in an initial va_list. */
03984308 2483
d7bd8aeb 2484static void
ffbc8796 2485xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
03984308
BW
2486{
2487 tree f_stk, stk;
2488 tree f_reg, reg;
2489 tree f_ndx, ndx;
2490 tree t, u;
2491 int arg_words;
2492
2493 arg_words = current_function_args_info.arg_words;
2494
2495 f_stk = TYPE_FIELDS (va_list_type_node);
2496 f_reg = TREE_CHAIN (f_stk);
2497 f_ndx = TREE_CHAIN (f_reg);
2498
47a25a46
RG
2499 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2500 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2501 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
03984308
BW
2502
2503 /* Call __builtin_saveregs; save the result in __va_reg */
e70312d4
BW
2504 u = make_tree (sizetype, expand_builtin_saveregs ());
2505 u = fold_convert (ptr_type_node, u);
07beea0d 2506 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, reg, u);
03984308
BW
2507 TREE_SIDE_EFFECTS (t) = 1;
2508 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2509
822e895c 2510 /* Set the __va_stk member to ($arg_ptr - 32). */
03984308 2511 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
e70312d4 2512 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u, size_int (-32));
07beea0d 2513 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, stk, u);
03984308
BW
2514 TREE_SIDE_EFFECTS (t) = 1;
2515 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2516
822e895c
BW
2517 /* Set the __va_ndx member. If the first variable argument is on
2518 the stack, adjust __va_ndx by 2 words to account for the extra
2519 alignment offset for __va_stk. */
2520 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2521 arg_words += 2;
e70312d4
BW
2522 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx,
2523 size_int (arg_words * UNITS_PER_WORD));
03984308
BW
2524 TREE_SIDE_EFFECTS (t) = 1;
2525 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2526}
2527
2528
2529/* Implement `va_arg'. */
2530
85d53c1d
RH
2531static tree
2532xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
2533 tree *post_p ATTRIBUTE_UNUSED)
03984308
BW
2534{
2535 tree f_stk, stk;
2536 tree f_reg, reg;
2537 tree f_ndx, ndx;
85d53c1d
RH
2538 tree type_size, array, orig_ndx, addr, size, va_size, t;
2539 tree lab_false, lab_over, lab_false2;
08b0dc1b
RH
2540 bool indirect;
2541
2542 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2543 if (indirect)
2544 type = build_pointer_type (type);
03984308 2545
3712281f
BW
2546 /* Handle complex values as separate real and imaginary parts. */
2547 if (TREE_CODE (type) == COMPLEX_TYPE)
2548 {
85d53c1d 2549 tree real_part, imag_part;
3712281f 2550
85d53c1d
RH
2551 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2552 pre_p, NULL);
2553 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
3712281f 2554
85d53c1d
RH
2555 imag_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2556 pre_p, NULL);
2557 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
3712281f 2558
47a25a46 2559 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
3712281f
BW
2560 }
2561
03984308
BW
2562 f_stk = TYPE_FIELDS (va_list_type_node);
2563 f_reg = TREE_CHAIN (f_stk);
2564 f_ndx = TREE_CHAIN (f_reg);
2565
47a25a46
RG
2566 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2567 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2568 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
03984308 2569
85d53c1d
RH
2570 type_size = size_in_bytes (type);
2571 va_size = round_up (type_size, UNITS_PER_WORD);
2572 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
8be56275 2573
03984308 2574
822e895c 2575 /* First align __va_ndx if necessary for this arg:
03984308 2576
85d53c1d 2577 orig_ndx = (AP).__va_ndx;
822e895c 2578 if (__alignof__ (TYPE) > 4 )
85d53c1d 2579 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
822e895c 2580 & -__alignof__ (TYPE)); */
03984308 2581
85d53c1d
RH
2582 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2583
03984308
BW
2584 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2585 {
d2348985 2586 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
85d53c1d 2587
e70312d4
BW
2588 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, size_int (align - 1));
2589 t = build2 (BIT_AND_EXPR, integer_type_node, t, size_int (-align));
07beea0d 2590 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, orig_ndx, t);
85d53c1d 2591 gimplify_and_add (t, pre_p);
03984308
BW
2592 }
2593
2594
2595 /* Increment __va_ndx to point past the argument:
2596
85d53c1d 2597 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
03984308 2598
85d53c1d 2599 t = fold_convert (integer_type_node, va_size);
47a25a46 2600 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
07beea0d 2601 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
85d53c1d 2602 gimplify_and_add (t, pre_p);
03984308
BW
2603
2604
2605 /* Check if the argument is in registers:
2606
bcf88f9b 2607 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
fe984136 2608 && !must_pass_in_stack (type))
ffbc8796 2609 __array = (AP).__va_reg; */
03984308 2610
85d53c1d 2611 array = create_tmp_var (ptr_type_node, NULL);
03984308 2612
85d53c1d 2613 lab_over = NULL;
fe984136 2614 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
bcf88f9b 2615 {
85d53c1d
RH
2616 lab_false = create_artificial_label ();
2617 lab_over = create_artificial_label ();
2618
e70312d4
BW
2619 t = build2 (GT_EXPR, boolean_type_node, ndx,
2620 size_int (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
47a25a46
RG
2621 t = build3 (COND_EXPR, void_type_node, t,
2622 build1 (GOTO_EXPR, void_type_node, lab_false),
2623 NULL_TREE);
85d53c1d
RH
2624 gimplify_and_add (t, pre_p);
2625
07beea0d 2626 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, reg);
85d53c1d
RH
2627 gimplify_and_add (t, pre_p);
2628
47a25a46 2629 t = build1 (GOTO_EXPR, void_type_node, lab_over);
85d53c1d
RH
2630 gimplify_and_add (t, pre_p);
2631
47a25a46 2632 t = build1 (LABEL_EXPR, void_type_node, lab_false);
85d53c1d 2633 gimplify_and_add (t, pre_p);
bcf88f9b 2634 }
03984308 2635
85d53c1d 2636
03984308
BW
2637 /* ...otherwise, the argument is on the stack (never split between
2638 registers and the stack -- change __va_ndx if necessary):
2639
2640 else
2641 {
822e895c
BW
2642 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2643 (AP).__va_ndx = 32 + __va_size (TYPE);
03984308 2644 __array = (AP).__va_stk;
ffbc8796 2645 } */
03984308 2646
85d53c1d 2647 lab_false2 = create_artificial_label ();
03984308 2648
e70312d4
BW
2649 t = build2 (GT_EXPR, boolean_type_node, orig_ndx,
2650 size_int (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
47a25a46
RG
2651 t = build3 (COND_EXPR, void_type_node, t,
2652 build1 (GOTO_EXPR, void_type_node, lab_false2),
2653 NULL_TREE);
85d53c1d 2654 gimplify_and_add (t, pre_p);
03984308 2655
85d53c1d
RH
2656 t = size_binop (PLUS_EXPR, va_size, size_int (32));
2657 t = fold_convert (integer_type_node, t);
07beea0d 2658 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
85d53c1d 2659 gimplify_and_add (t, pre_p);
03984308 2660
47a25a46 2661 t = build1 (LABEL_EXPR, void_type_node, lab_false2);
85d53c1d 2662 gimplify_and_add (t, pre_p);
03984308 2663
07beea0d 2664 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, stk);
85d53c1d
RH
2665 gimplify_and_add (t, pre_p);
2666
2667 if (lab_over)
2668 {
47a25a46 2669 t = build1 (LABEL_EXPR, void_type_node, lab_over);
85d53c1d
RH
2670 gimplify_and_add (t, pre_p);
2671 }
8be56275 2672
03984308
BW
2673
2674 /* Given the base array pointer (__array) and index to the subsequent
2675 argument (__va_ndx), find the address:
2676
8be56275
BW
2677 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2678 ? sizeof (TYPE)
2679 : __va_size (TYPE))
03984308
BW
2680
2681 The results are endian-dependent because values smaller than one word
ffbc8796 2682 are aligned differently. */
03984308 2683
633e4eb4 2684
85d91d5b 2685 if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
8be56275 2686 {
e70312d4
BW
2687 t = fold_build2 (GE_EXPR, boolean_type_node, type_size,
2688 size_int (PARM_BOUNDARY / BITS_PER_UNIT));
47a25a46 2689 t = fold_build3 (COND_EXPR, sizetype, t, va_size, type_size);
85d53c1d 2690 size = t;
8be56275 2691 }
85d53c1d
RH
2692 else
2693 size = va_size;
2694
e70312d4
BW
2695 t = build2 (MINUS_EXPR, sizetype, ndx, size);
2696 addr = build2 (POINTER_PLUS_EXPR, ptr_type_node, array, t);
03984308 2697
85d53c1d 2698 addr = fold_convert (build_pointer_type (type), addr);
08b0dc1b 2699 if (indirect)
d6e9821f
RH
2700 addr = build_va_arg_indirect_ref (addr);
2701 return build_va_arg_indirect_ref (addr);
03984308
BW
2702}
2703
2704
09fa8841
BW
2705/* Builtins. */
2706
2707enum xtensa_builtin
2708{
2709 XTENSA_BUILTIN_UMULSIDI3,
2710 XTENSA_BUILTIN_max
2711};
2712
2713
2714static void
2715xtensa_init_builtins (void)
2716{
2717 tree ftype;
2718
2719 ftype = build_function_type_list (unsigned_intDI_type_node,
2720 unsigned_intSI_type_node,
2721 unsigned_intSI_type_node, NULL_TREE);
2722
2723 add_builtin_function ("__builtin_umulsidi3", ftype,
2724 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
2725 "__umulsidi3", NULL_TREE);
2726}
2727
2728
2729static tree
2730xtensa_fold_builtin (tree fndecl, tree arglist, bool ignore ATTRIBUTE_UNUSED)
2731{
2732 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2733 tree arg0, arg1;
2734
2735 if (fcode == XTENSA_BUILTIN_UMULSIDI3)
2736 {
2737 arg0 = TREE_VALUE (arglist);
2738 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2739 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2740 || TARGET_MUL32_HIGH)
2741 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
2742 fold_convert (unsigned_intDI_type_node, arg0),
2743 fold_convert (unsigned_intDI_type_node, arg1));
2744 else
2745 return NULL;
2746 }
2747
2748 internal_error ("bad builtin code");
2749 return NULL;
2750}
2751
2752
2753static rtx
2754xtensa_expand_builtin (tree exp, rtx target,
2755 rtx subtarget ATTRIBUTE_UNUSED,
2756 enum machine_mode mode ATTRIBUTE_UNUSED,
2757 int ignore)
2758{
ec3643e8 2759 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
09fa8841
BW
2760 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2761
2762 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
2763 __umulsidi3 function when the Xtensa configuration can directly
2764 implement it. If not, just call the function. */
2765 if (fcode == XTENSA_BUILTIN_UMULSIDI3)
2766 return expand_call (exp, target, ignore);
2767
2768 internal_error ("bad builtin code");
2769 return NULL_RTX;
2770}
2771
2772
a8cacfd2 2773enum reg_class
ffbc8796 2774xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
a8cacfd2 2775{
89f6025d 2776 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
a8cacfd2
BW
2777 return NO_REGS;
2778
89f6025d
BW
2779 /* Don't use the stack pointer or hard frame pointer for reloads!
2780 The hard frame pointer would normally be OK except that it may
2781 briefly hold an incoming argument in the prologue, and reload
2782 won't know that it is live because the hard frame pointer is
2783 treated specially. */
2784
2785 if (class == AR_REGS || class == GR_REGS)
2786 return RL_REGS;
a8cacfd2
BW
2787
2788 return class;
2789}
2790
2791
03984308 2792enum reg_class
ffbc8796
BW
2793xtensa_secondary_reload_class (enum reg_class class,
2794 enum machine_mode mode ATTRIBUTE_UNUSED,
2795 rtx x, int isoutput)
03984308
BW
2796{
2797 int regno;
2798
2799 if (GET_CODE (x) == SIGN_EXTEND)
2800 x = XEXP (x, 0);
2801 regno = xt_true_regnum (x);
2802
2803 if (!isoutput)
2804 {
2805 if (class == FP_REGS && constantpool_mem_p (x))
89f6025d 2806 return RL_REGS;
03984308
BW
2807 }
2808
2809 if (ACC_REG_P (regno))
89f6025d 2810 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
03984308 2811 if (class == ACC_REG)
89f6025d 2812 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
03984308
BW
2813
2814 return NO_REGS;
2815}
2816
2817
2818void
ffbc8796 2819order_regs_for_local_alloc (void)
03984308
BW
2820{
2821 if (!leaf_function_p ())
2822 {
2823 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2824 FIRST_PSEUDO_REGISTER * sizeof (int));
2825 }
2826 else
2827 {
2828 int i, num_arg_regs;
2829 int nxt = 0;
2830
3bbc2af6
KH
2831 /* Use the AR registers in increasing order (skipping a0 and a1)
2832 but save the incoming argument registers for a last resort. */
03984308
BW
2833 num_arg_regs = current_function_args_info.arg_words;
2834 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2835 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2836 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2837 reg_alloc_order[nxt++] = i + num_arg_regs;
2838 for (i = 0; i < num_arg_regs; i++)
2839 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2840
3bbc2af6 2841 /* List the coprocessor registers in order. */
985d0d50
BW
2842 for (i = 0; i < BR_REG_NUM; i++)
2843 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2844
3bbc2af6 2845 /* List the FP registers in order for now. */
03984308
BW
2846 for (i = 0; i < 16; i++)
2847 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2848
638db43e 2849 /* GCC requires that we list *all* the registers.... */
03984308
BW
2850 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2851 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2852 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2853 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2854
03984308
BW
2855 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2856 }
2857}
2858
2859
01abf342
BW
2860/* Some Xtensa targets support multiple bss sections. If the section
2861 name ends with ".bss", add SECTION_BSS to the flags. */
2862
2863static unsigned int
ffbc8796 2864xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
01abf342
BW
2865{
2866 unsigned int flags = default_section_type_flags (decl, name, reloc);
2867 const char *suffix;
2868
2869 suffix = strrchr (name, '.');
2870 if (suffix && strcmp (suffix, ".bss") == 0)
2871 {
2872 if (!decl || (TREE_CODE (decl) == VAR_DECL
2873 && DECL_INITIAL (decl) == NULL_TREE))
2874 flags |= SECTION_BSS; /* @nobits */
2875 else
d4ee4d25 2876 warning (0, "only uninitialized variables can be placed in a "
01abf342
BW
2877 ".bss section");
2878 }
2879
2880 return flags;
2881}
2882
2883
b64a1b53
RH
2884/* The literal pool stays with the function. */
2885
d6b5193b 2886static section *
ffbc8796
BW
2887xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2888 rtx x ATTRIBUTE_UNUSED,
2889 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53 2890{
d6b5193b 2891 return function_section (current_function_decl);
b64a1b53 2892}
fb49053f 2893
ffbc8796 2894
3c50106f
RH
2895/* Compute a (partial) cost for rtx X. Return true if the complete
2896 cost has been computed, and false if subexpressions should be
2897 scanned. In either case, *TOTAL contains the cost result. */
2898
2899static bool
ffbc8796 2900xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f
RH
2901{
2902 switch (code)
2903 {
2904 case CONST_INT:
2905 switch (outer_code)
2906 {
2907 case SET:
2908 if (xtensa_simm12b (INTVAL (x)))
2909 {
2910 *total = 4;
2911 return true;
2912 }
2913 break;
2914 case PLUS:
2915 if (xtensa_simm8 (INTVAL (x))
2916 || xtensa_simm8x256 (INTVAL (x)))
2917 {
2918 *total = 0;
2919 return true;
2920 }
2921 break;
2922 case AND:
2923 if (xtensa_mask_immediate (INTVAL (x)))
2924 {
2925 *total = 0;
2926 return true;
2927 }
2928 break;
2929 case COMPARE:
2930 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2931 {
2932 *total = 0;
2933 return true;
2934 }
2935 break;
2936 case ASHIFT:
2937 case ASHIFTRT:
2938 case LSHIFTRT:
2939 case ROTATE:
2940 case ROTATERT:
3bbc2af6 2941 /* No way to tell if X is the 2nd operand so be conservative. */
3c50106f
RH
2942 default: break;
2943 }
2944 if (xtensa_simm12b (INTVAL (x)))
2945 *total = 5;
f42f5a1b
BW
2946 else if (TARGET_CONST16)
2947 *total = COSTS_N_INSNS (2);
3c50106f
RH
2948 else
2949 *total = 6;
2950 return true;
2951
2952 case CONST:
2953 case LABEL_REF:
2954 case SYMBOL_REF:
f42f5a1b
BW
2955 if (TARGET_CONST16)
2956 *total = COSTS_N_INSNS (2);
2957 else
2958 *total = 5;
3c50106f
RH
2959 return true;
2960
2961 case CONST_DOUBLE:
f42f5a1b
BW
2962 if (TARGET_CONST16)
2963 *total = COSTS_N_INSNS (4);
2964 else
2965 *total = 7;
3c50106f
RH
2966 return true;
2967
2968 case MEM:
2969 {
2970 int num_words =
2971 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2972
2973 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2974 *total = COSTS_N_INSNS (num_words);
2975 else
2976 *total = COSTS_N_INSNS (2*num_words);
2977 return true;
2978 }
2979
2980 case FFS:
09fa8841 2981 case CTZ:
3c50106f
RH
2982 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2983 return true;
2984
09fa8841
BW
2985 case CLZ:
2986 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
2987 return true;
2988
3c50106f
RH
2989 case NOT:
2990 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2991 return true;
2992
2993 case AND:
2994 case IOR:
2995 case XOR:
2996 if (GET_MODE (x) == DImode)
2997 *total = COSTS_N_INSNS (2);
2998 else
2999 *total = COSTS_N_INSNS (1);
3000 return true;
3001
3002 case ASHIFT:
3003 case ASHIFTRT:
3004 case LSHIFTRT:
3005 if (GET_MODE (x) == DImode)
3006 *total = COSTS_N_INSNS (50);
3007 else
3008 *total = COSTS_N_INSNS (1);
3009 return true;
3010
3011 case ABS:
3012 {
3013 enum machine_mode xmode = GET_MODE (x);
3014 if (xmode == SFmode)
3015 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3016 else if (xmode == DFmode)
3017 *total = COSTS_N_INSNS (50);
3018 else
3019 *total = COSTS_N_INSNS (4);
3020 return true;
3021 }
3022
3023 case PLUS:
3024 case MINUS:
3025 {
3026 enum machine_mode xmode = GET_MODE (x);
3027 if (xmode == SFmode)
3028 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3029 else if (xmode == DFmode || xmode == DImode)
3030 *total = COSTS_N_INSNS (50);
3031 else
3032 *total = COSTS_N_INSNS (1);
3033 return true;
3034 }
3035
3036 case NEG:
3037 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
3038 return true;
3039
3040 case MULT:
3041 {
3042 enum machine_mode xmode = GET_MODE (x);
3043 if (xmode == SFmode)
3044 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
09fa8841 3045 else if (xmode == DFmode)
3c50106f 3046 *total = COSTS_N_INSNS (50);
09fa8841
BW
3047 else if (xmode == DImode)
3048 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
3c50106f
RH
3049 else if (TARGET_MUL32)
3050 *total = COSTS_N_INSNS (4);
3051 else if (TARGET_MAC16)
3052 *total = COSTS_N_INSNS (16);
3053 else if (TARGET_MUL16)
3054 *total = COSTS_N_INSNS (12);
3055 else
3056 *total = COSTS_N_INSNS (50);
3057 return true;
3058 }
3059
3060 case DIV:
3061 case MOD:
3062 {
3063 enum machine_mode xmode = GET_MODE (x);
3064 if (xmode == SFmode)
3065 {
3066 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3067 return true;
3068 }
3069 else if (xmode == DFmode)
3070 {
3071 *total = COSTS_N_INSNS (50);
3072 return true;
3073 }
3074 }
3bbc2af6 3075 /* Fall through. */
3c50106f
RH
3076
3077 case UDIV:
3078 case UMOD:
3079 {
3080 enum machine_mode xmode = GET_MODE (x);
3081 if (xmode == DImode)
3082 *total = COSTS_N_INSNS (50);
3083 else if (TARGET_DIV32)
3084 *total = COSTS_N_INSNS (32);
3085 else
3086 *total = COSTS_N_INSNS (50);
3087 return true;
3088 }
3089
3090 case SQRT:
3091 if (GET_MODE (x) == SFmode)
3092 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3093 else
3094 *total = COSTS_N_INSNS (50);
3095 return true;
3096
3097 case SMIN:
3098 case UMIN:
3099 case SMAX:
3100 case UMAX:
3101 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3102 return true;
3103
3104 case SIGN_EXTRACT:
3105 case SIGN_EXTEND:
3106 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3107 return true;
3108
3109 case ZERO_EXTRACT:
3110 case ZERO_EXTEND:
3111 *total = COSTS_N_INSNS (1);
3112 return true;
3113
3114 default:
3115 return false;
3116 }
3117}
3118
bd5bd7ac
KH
3119/* Worker function for TARGET_RETURN_IN_MEMORY. */
3120
4c45af42 3121static bool
586de218 3122xtensa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4c45af42
KH
3123{
3124 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3125 > 4 * UNITS_PER_WORD);
3126}
3127
e2500fed 3128#include "gt-xtensa.h"