]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
* tree.h (BLOCK_IN_COLD_SECTION_P): Define.
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
f6b7ba2b 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
f1717362 2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
f6b7ba2b 3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
038d1e19 9Software Foundation; either version 3, or (at your option) any later
f6b7ba2b 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
038d1e19 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f6b7ba2b 20
21#include "config.h"
22#include "system.h"
805e22b2 23#include "coretypes.h"
9ef16211 24#include "backend.h"
c1eb80de 25#include "target.h"
26#include "rtl.h"
9ef16211 27#include "tree.h"
28#include "gimple.h"
c1eb80de 29#include "cfghooks.h"
9ef16211 30#include "df.h"
ad7b10a2 31#include "memmodel.h"
c1eb80de 32#include "tm_p.h"
33#include "stringpool.h"
34#include "optabs.h"
f6b7ba2b 35#include "regs.h"
c1eb80de 36#include "emit-rtl.h"
f6b7ba2b 37#include "recog.h"
c1eb80de 38#include "diagnostic-core.h"
39#include "cfgrtl.h"
f6b7ba2b 40#include "output.h"
b20a8bb4 41#include "fold-const.h"
9ed99284 42#include "stor-layout.h"
43#include "calls.h"
44#include "varasm.h"
d53441c8 45#include "alias.h"
d53441c8 46#include "explow.h"
d53441c8 47#include "expr.h"
f6b7ba2b 48#include "reload.h"
049d6666 49#include "langhooks.h"
a8783bee 50#include "gimplify.h"
f7715905 51#include "builtins.h"
47edca9a 52#include "dumpfile.h"
53#include "hw-doloop.h"
cd2faba8 54#include "rtl-iter.h"
ae79166b 55
0c71fb4f 56/* This file should be included last. */
4b498588 57#include "target-def.h"
f6b7ba2b 58
59/* Enumeration for all of the relational tests, so that we can build
60 arrays indexed by the test type, and not worry about the order
c821cf9c 61 of EQ, NE, etc. */
f6b7ba2b 62
fd63fcf8 63enum internal_test
64{
65 ITEST_EQ,
66 ITEST_NE,
67 ITEST_GT,
68 ITEST_GE,
69 ITEST_LT,
70 ITEST_LE,
71 ITEST_GTU,
72 ITEST_GEU,
73 ITEST_LTU,
74 ITEST_LEU,
75 ITEST_MAX
76};
f6b7ba2b 77
f6b7ba2b 78/* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
81
82/* Current frame size calculated by compute_frame_size. */
83unsigned xtensa_current_frame_size;
b89c671b 84/* Callee-save area size in the current frame calculated by compute_frame_size. */
85int xtensa_callee_save_size;
f6b7ba2b 86
a80259b6 87/* Largest block move to handle in-line. */
f6b7ba2b 88#define LARGEST_MOVE_RATIO 15
89
90/* Define the structure for the machine field in struct function. */
fb1e4f4a 91struct GTY(()) machine_function
f6b7ba2b 92{
93 int accesses_prev_frame;
e060c9df 94 bool need_a7_copy;
95 bool vararg_a7;
a3759617 96 rtx vararg_a7_copy;
bf735bc6 97 rtx_insn *set_frame_ptr_insn;
f6b7ba2b 98};
99
100/* Vector, indexed by hard register number, which contains 1 for a
101 register that is allowable in a candidate for leaf function
c821cf9c 102 treatment. */
f6b7ba2b 103
104const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
105{
106 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 1, 1, 1,
108 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 1
110};
111
4c834714 112static void xtensa_option_override (void);
fd63fcf8 113static enum internal_test map_test_to_internal_test (enum rtx_code);
114static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
115static rtx gen_float_relational (enum rtx_code, rtx, rtx);
3754d046 116static rtx gen_conditional_move (enum rtx_code, machine_mode, rtx, rtx);
fd63fcf8 117static rtx fixup_subreg_mem (rtx);
fd63fcf8 118static struct machine_function * xtensa_init_machine_status (void);
c656b8fd 119static rtx xtensa_legitimize_tls_address (rtx);
3754d046 120static rtx xtensa_legitimize_address (rtx, rtx, machine_mode);
4e27ffd0 121static bool xtensa_mode_dependent_address_p (const_rtx, addr_space_t);
fb80456a 122static bool xtensa_return_in_msb (const_tree);
fd63fcf8 123static void printx (FILE *, signed int);
4fe4af61 124static rtx xtensa_builtin_saveregs (void);
3754d046 125static bool xtensa_legitimate_address_p (machine_mode, rtx, bool);
fd63fcf8 126static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
127 int) ATTRIBUTE_UNUSED;
3754d046 128static section *xtensa_select_rtx_section (machine_mode, rtx,
2f14b1f9 129 unsigned HOST_WIDE_INT);
5ae4887d 130static bool xtensa_rtx_costs (rtx, machine_mode, int, int, int *, bool);
3754d046 131static int xtensa_register_move_cost (machine_mode, reg_class_t,
156d021f 132 reg_class_t);
3754d046 133static int xtensa_memory_move_cost (machine_mode, reg_class_t, bool);
2e15d750 134static tree xtensa_build_builtin_va_list (void);
fb80456a 135static bool xtensa_return_in_memory (const_tree, const_tree);
75a70cf9 136static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
137 gimple_seq *);
3754d046 138static void xtensa_function_arg_advance (cumulative_args_t, machine_mode,
41e01e3e 139 const_tree, bool);
3754d046 140static rtx xtensa_function_arg (cumulative_args_t, machine_mode,
41e01e3e 141 const_tree, bool);
39cba157 142static rtx xtensa_function_incoming_arg (cumulative_args_t,
3754d046 143 machine_mode, const_tree, bool);
b542c964 144static rtx xtensa_function_value (const_tree, const_tree, bool);
3754d046 145static rtx xtensa_libcall_value (machine_mode, const_rtx);
7af7466c 146static bool xtensa_function_value_regno_p (const unsigned int);
3754d046 147static unsigned int xtensa_function_arg_boundary (machine_mode,
bd99ba64 148 const_tree);
8e8c0c04 149static void xtensa_init_builtins (void);
97d67146 150static tree xtensa_fold_builtin (tree, int, tree *, bool);
3754d046 151static rtx xtensa_expand_builtin (tree, rtx, rtx, machine_mode, int);
f912ce81 152static void xtensa_va_start (tree, rtx);
5a1c68c3 153static bool xtensa_frame_pointer_required (void);
974b8df6 154static rtx xtensa_static_chain (const_tree, bool);
269e94f8 155static void xtensa_asm_trampoline_template (FILE *);
156static void xtensa_trampoline_init (rtx, tree, rtx);
77a69f9f 157static bool xtensa_output_addr_const_extra (FILE *, rtx);
3754d046 158static bool xtensa_cannot_force_const_mem (machine_mode, rtx);
bbfbe351 159
d7198e1f 160static reg_class_t xtensa_preferred_reload_class (rtx, reg_class_t);
161static reg_class_t xtensa_preferred_output_reload_class (rtx, reg_class_t);
162static reg_class_t xtensa_secondary_reload (bool, rtx, reg_class_t,
3754d046 163 machine_mode,
d7198e1f 164 struct secondary_reload_info *);
165
5cae3439 166static bool constantpool_address_p (const_rtx addr);
3754d046 167static bool xtensa_legitimate_constant_p (machine_mode, rtx);
47edca9a 168static void xtensa_reorg (void);
169static bool xtensa_can_use_doloop_p (const widest_int &, const widest_int &,
170 unsigned int, bool);
171static const char *xtensa_invalid_within_doloop (const rtx_insn *);
5cae3439 172
f91ed644 173static bool xtensa_member_type_forces_blk (const_tree,
3754d046 174 machine_mode mode);
f91ed644 175
b89c671b 176static void xtensa_conditional_register_usage (void);
f6b7ba2b 177
b89c671b 178\f
f6b7ba2b 179
180/* These hooks specify assembly directives for creating certain kinds
181 of integer object. */
182
183#undef TARGET_ASM_ALIGNED_SI_OP
184#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
185
bbfbe351 186#undef TARGET_ASM_SELECT_RTX_SECTION
187#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
f6b7ba2b 188
41e3a0c7 189#undef TARGET_LEGITIMIZE_ADDRESS
190#define TARGET_LEGITIMIZE_ADDRESS xtensa_legitimize_address
5cae3439 191#undef TARGET_MODE_DEPENDENT_ADDRESS_P
192#define TARGET_MODE_DEPENDENT_ADDRESS_P xtensa_mode_dependent_address_p
41e3a0c7 193
156d021f 194#undef TARGET_REGISTER_MOVE_COST
195#define TARGET_REGISTER_MOVE_COST xtensa_register_move_cost
196#undef TARGET_MEMORY_MOVE_COST
197#define TARGET_MEMORY_MOVE_COST xtensa_memory_move_cost
fab7adbf 198#undef TARGET_RTX_COSTS
199#define TARGET_RTX_COSTS xtensa_rtx_costs
ec0457a8 200#undef TARGET_ADDRESS_COST
d9c5e5f4 201#define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
fab7adbf 202
f91ed644 203#undef TARGET_MEMBER_TYPE_FORCES_BLK
204#define TARGET_MEMBER_TYPE_FORCES_BLK xtensa_member_type_forces_blk
205
2e15d750 206#undef TARGET_BUILD_BUILTIN_VA_LIST
207#define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
208
8a58ed0a 209#undef TARGET_EXPAND_BUILTIN_VA_START
210#define TARGET_EXPAND_BUILTIN_VA_START xtensa_va_start
211
3b2411a8 212#undef TARGET_PROMOTE_FUNCTION_MODE
213#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
4fe4af61 214#undef TARGET_PROMOTE_PROTOTYPES
fb80456a 215#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
4fe4af61 216
4fe4af61 217#undef TARGET_RETURN_IN_MEMORY
218#define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
b542c964 219#undef TARGET_FUNCTION_VALUE
220#define TARGET_FUNCTION_VALUE xtensa_function_value
7af7466c 221#undef TARGET_LIBCALL_VALUE
222#define TARGET_LIBCALL_VALUE xtensa_libcall_value
223#undef TARGET_FUNCTION_VALUE_REGNO_P
224#define TARGET_FUNCTION_VALUE_REGNO_P xtensa_function_value_regno_p
225
92d40bc4 226#undef TARGET_SPLIT_COMPLEX_ARG
a9f1838b 227#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
0336f0f0 228#undef TARGET_MUST_PASS_IN_STACK
229#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
41e01e3e 230#undef TARGET_FUNCTION_ARG_ADVANCE
231#define TARGET_FUNCTION_ARG_ADVANCE xtensa_function_arg_advance
232#undef TARGET_FUNCTION_ARG
233#define TARGET_FUNCTION_ARG xtensa_function_arg
234#undef TARGET_FUNCTION_INCOMING_ARG
235#define TARGET_FUNCTION_INCOMING_ARG xtensa_function_incoming_arg
bd99ba64 236#undef TARGET_FUNCTION_ARG_BOUNDARY
237#define TARGET_FUNCTION_ARG_BOUNDARY xtensa_function_arg_boundary
4fe4af61 238
239#undef TARGET_EXPAND_BUILTIN_SAVEREGS
240#define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
ae79166b 241#undef TARGET_GIMPLIFY_VA_ARG_EXPR
242#define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
4fe4af61 243
110f993e 244#undef TARGET_RETURN_IN_MSB
245#define TARGET_RETURN_IN_MSB xtensa_return_in_msb
246
8e8c0c04 247#undef TARGET_INIT_BUILTINS
248#define TARGET_INIT_BUILTINS xtensa_init_builtins
249#undef TARGET_FOLD_BUILTIN
250#define TARGET_FOLD_BUILTIN xtensa_fold_builtin
251#undef TARGET_EXPAND_BUILTIN
252#define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
253
d7198e1f 254#undef TARGET_PREFERRED_RELOAD_CLASS
255#define TARGET_PREFERRED_RELOAD_CLASS xtensa_preferred_reload_class
256#undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
257#define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xtensa_preferred_output_reload_class
258
e0488d87 259#undef TARGET_SECONDARY_RELOAD
260#define TARGET_SECONDARY_RELOAD xtensa_secondary_reload
261
c656b8fd 262#undef TARGET_HAVE_TLS
263#define TARGET_HAVE_TLS (TARGET_THREADPTR && HAVE_AS_TLS)
264
265#undef TARGET_CANNOT_FORCE_CONST_MEM
7d7d7bd2 266#define TARGET_CANNOT_FORCE_CONST_MEM xtensa_cannot_force_const_mem
c656b8fd 267
e46fbef5 268#undef TARGET_LRA_P
269#define TARGET_LRA_P hook_bool_void_false
270
fd50b071 271#undef TARGET_LEGITIMATE_ADDRESS_P
272#define TARGET_LEGITIMATE_ADDRESS_P xtensa_legitimate_address_p
273
5a1c68c3 274#undef TARGET_FRAME_POINTER_REQUIRED
275#define TARGET_FRAME_POINTER_REQUIRED xtensa_frame_pointer_required
276
974b8df6 277#undef TARGET_STATIC_CHAIN
278#define TARGET_STATIC_CHAIN xtensa_static_chain
269e94f8 279#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
280#define TARGET_ASM_TRAMPOLINE_TEMPLATE xtensa_asm_trampoline_template
281#undef TARGET_TRAMPOLINE_INIT
282#define TARGET_TRAMPOLINE_INIT xtensa_trampoline_init
283
4c834714 284#undef TARGET_OPTION_OVERRIDE
285#define TARGET_OPTION_OVERRIDE xtensa_option_override
286
77a69f9f 287#undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
288#define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA xtensa_output_addr_const_extra
289
ca316360 290#undef TARGET_LEGITIMATE_CONSTANT_P
291#define TARGET_LEGITIMATE_CONSTANT_P xtensa_legitimate_constant_p
292
47edca9a 293#undef TARGET_MACHINE_DEPENDENT_REORG
294#define TARGET_MACHINE_DEPENDENT_REORG xtensa_reorg
295
296#undef TARGET_CAN_USE_DOLOOP_P
297#define TARGET_CAN_USE_DOLOOP_P xtensa_can_use_doloop_p
298
299#undef TARGET_INVALID_WITHIN_DOLOOP
300#define TARGET_INVALID_WITHIN_DOLOOP xtensa_invalid_within_doloop
301
b89c671b 302#undef TARGET_CONDITIONAL_REGISTER_USAGE
303#define TARGET_CONDITIONAL_REGISTER_USAGE xtensa_conditional_register_usage
304
bbfbe351 305struct gcc_target targetm = TARGET_INITIALIZER;
f6b7ba2b 306
eb472ecb 307\f
308/* Functions to test Xtensa immediate operand validity. */
f6b7ba2b 309
7d0f7bf8 310bool
311xtensa_simm8 (HOST_WIDE_INT v)
312{
313 return v >= -128 && v <= 127;
314}
315
316
317bool
318xtensa_simm8x256 (HOST_WIDE_INT v)
319{
320 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
321}
322
323
324bool
325xtensa_simm12b (HOST_WIDE_INT v)
326{
327 return v >= -2048 && v <= 2047;
328}
329
330
331static bool
332xtensa_uimm8 (HOST_WIDE_INT v)
333{
334 return v >= 0 && v <= 255;
335}
336
337
338static bool
339xtensa_uimm8x2 (HOST_WIDE_INT v)
340{
341 return (v & 1) == 0 && (v >= 0 && v <= 510);
342}
343
344
345static bool
346xtensa_uimm8x4 (HOST_WIDE_INT v)
347{
348 return (v & 3) == 0 && (v >= 0 && v <= 1020);
349}
350
351
352static bool
353xtensa_b4const (HOST_WIDE_INT v)
f6b7ba2b 354{
355 switch (v)
356 {
7d0f7bf8 357 case -1:
358 case 1:
f6b7ba2b 359 case 2:
360 case 3:
361 case 4:
362 case 5:
363 case 6:
364 case 7:
365 case 8:
366 case 10:
367 case 12:
368 case 16:
369 case 32:
370 case 64:
371 case 128:
372 case 256:
7d0f7bf8 373 return true;
f6b7ba2b 374 }
7d0f7bf8 375 return false;
f6b7ba2b 376}
377
f6b7ba2b 378
7d0f7bf8 379bool
380xtensa_b4const_or_zero (HOST_WIDE_INT v)
f6b7ba2b 381{
7d0f7bf8 382 if (v == 0)
383 return true;
384 return xtensa_b4const (v);
f6b7ba2b 385}
386
f6b7ba2b 387
7d0f7bf8 388bool
389xtensa_b4constu (HOST_WIDE_INT v)
f6b7ba2b 390{
391 switch (v)
392 {
7d0f7bf8 393 case 32768:
394 case 65536:
f6b7ba2b 395 case 2:
396 case 3:
397 case 4:
398 case 5:
399 case 6:
400 case 7:
401 case 8:
402 case 10:
403 case 12:
404 case 16:
405 case 32:
406 case 64:
407 case 128:
408 case 256:
7d0f7bf8 409 return true;
f6b7ba2b 410 }
7d0f7bf8 411 return false;
f6b7ba2b 412}
413
f6b7ba2b 414
7d0f7bf8 415bool
416xtensa_mask_immediate (HOST_WIDE_INT v)
f6b7ba2b 417{
7d0f7bf8 418#define MAX_MASK_SIZE 16
419 int mask_size;
f6b7ba2b 420
7d0f7bf8 421 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
422 {
423 if ((v & 1) == 0)
424 return false;
425 v = v >> 1;
426 if (v == 0)
427 return true;
428 }
f6b7ba2b 429
7d0f7bf8 430 return false;
f6b7ba2b 431}
432
f6b7ba2b 433
f6b7ba2b 434/* This is just like the standard true_regnum() function except that it
c821cf9c 435 works even when reg_renumber is not initialized. */
f6b7ba2b 436
437int
fd63fcf8 438xt_true_regnum (rtx x)
f6b7ba2b 439{
440 if (GET_CODE (x) == REG)
441 {
442 if (reg_renumber
443 && REGNO (x) >= FIRST_PSEUDO_REGISTER
444 && reg_renumber[REGNO (x)] >= 0)
445 return reg_renumber[REGNO (x)];
446 return REGNO (x);
447 }
448 if (GET_CODE (x) == SUBREG)
449 {
450 int base = xt_true_regnum (SUBREG_REG (x));
451 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
452 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
453 GET_MODE (SUBREG_REG (x)),
454 SUBREG_BYTE (x), GET_MODE (x));
455 }
456 return -1;
457}
458
459
f6b7ba2b 460int
3754d046 461xtensa_valid_move (machine_mode mode, rtx *operands)
f6b7ba2b 462{
fc12fa10 463 /* Either the destination or source must be a register, and the
464 MAC16 accumulator doesn't count. */
465
466 if (register_operand (operands[0], mode))
467 {
468 int dst_regnum = xt_true_regnum (operands[0]);
469
a2acdfa1 470 if (xtensa_tls_referenced_p (operands[1]))
471 return FALSE;
472
c821cf9c 473 /* The stack pointer can only be assigned with a MOVSP opcode. */
fc12fa10 474 if (dst_regnum == STACK_POINTER_REGNUM)
b89c671b 475 return !TARGET_WINDOWED_ABI
476 || (mode == SImode
477 && register_operand (operands[1], mode)
478 && !ACC_REG_P (xt_true_regnum (operands[1])));
fc12fa10 479
480 if (!ACC_REG_P (dst_regnum))
481 return true;
482 }
141e2ef6 483 if (register_operand (operands[1], mode))
fc12fa10 484 {
485 int src_regnum = xt_true_regnum (operands[1]);
486 if (!ACC_REG_P (src_regnum))
487 return true;
488 }
f6b7ba2b 489 return FALSE;
490}
491
492
f6b7ba2b 493int
fd63fcf8 494smalloffset_mem_p (rtx op)
f6b7ba2b 495{
496 if (GET_CODE (op) == MEM)
497 {
498 rtx addr = XEXP (op, 0);
499 if (GET_CODE (addr) == REG)
771b6086 500 return BASE_REG_P (addr, 0);
f6b7ba2b 501 if (GET_CODE (addr) == PLUS)
502 {
503 rtx offset = XEXP (addr, 0);
7d0f7bf8 504 HOST_WIDE_INT val;
f6b7ba2b 505 if (GET_CODE (offset) != CONST_INT)
506 offset = XEXP (addr, 1);
507 if (GET_CODE (offset) != CONST_INT)
508 return FALSE;
7d0f7bf8 509
510 val = INTVAL (offset);
511 return (val & 3) == 0 && (val >= 0 && val <= 60);
f6b7ba2b 512 }
513 }
514 return FALSE;
515}
516
517
5cae3439 518static bool
519constantpool_address_p (const_rtx addr)
f6b7ba2b 520{
5cae3439 521 const_rtx sym = addr;
f6b7ba2b 522
523 if (GET_CODE (addr) == CONST)
524 {
525 rtx offset;
526
dafa59bd 527 /* Only handle (PLUS (SYM, OFFSET)) form. */
f6b7ba2b 528 addr = XEXP (addr, 0);
529 if (GET_CODE (addr) != PLUS)
5cae3439 530 return false;
f6b7ba2b 531
dafa59bd 532 /* Make sure the address is word aligned. */
f6b7ba2b 533 offset = XEXP (addr, 1);
5cae3439 534 if ((!CONST_INT_P (offset))
f6b7ba2b 535 || ((INTVAL (offset) & 3) != 0))
5cae3439 536 return false;
f6b7ba2b 537
538 sym = XEXP (addr, 0);
539 }
540
541 if ((GET_CODE (sym) == SYMBOL_REF)
542 && CONSTANT_POOL_ADDRESS_P (sym))
5cae3439 543 return true;
544 return false;
f6b7ba2b 545}
546
547
548int
fd63fcf8 549constantpool_mem_p (rtx op)
f6b7ba2b 550{
b0e603fe 551 if (GET_CODE (op) == SUBREG)
552 op = SUBREG_REG (op);
f6b7ba2b 553 if (GET_CODE (op) == MEM)
554 return constantpool_address_p (XEXP (op, 0));
555 return FALSE;
556}
557
558
c656b8fd 559/* Return TRUE if X is a thread-local symbol. */
560
561static bool
562xtensa_tls_symbol_p (rtx x)
563{
564 if (! TARGET_HAVE_TLS)
565 return false;
566
567 return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
568}
569
570
f6b7ba2b 571void
fd63fcf8 572xtensa_extend_reg (rtx dst, rtx src)
f6b7ba2b 573{
574 rtx temp = gen_reg_rtx (SImode);
575 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
576
dafa59bd 577 /* Generate paradoxical subregs as needed so that the modes match. */
f6b7ba2b 578 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
579 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
580
581 emit_insn (gen_ashlsi3 (temp, src, shift));
582 emit_insn (gen_ashrsi3 (dst, temp, shift));
583}
584
585
7d0f7bf8 586bool
3754d046 587xtensa_mem_offset (unsigned v, machine_mode mode)
f6b7ba2b 588{
589 switch (mode)
590 {
591 case BLKmode:
592 /* Handle the worst case for block moves. See xtensa_expand_block_move
593 where we emit an optimized block move operation if the block can be
594 moved in < "move_ratio" pieces. The worst case is when the block is
595 aligned but has a size of (3 mod 4) (does this happen?) so that the
c821cf9c 596 last piece requires a byte load/store. */
afb26b4b 597 return (xtensa_uimm8 (v)
598 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
f6b7ba2b 599
600 case QImode:
601 return xtensa_uimm8 (v);
602
603 case HImode:
604 return xtensa_uimm8x2 (v);
605
606 case DFmode:
607 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
608
609 default:
610 break;
611 }
612
613 return xtensa_uimm8x4 (v);
614}
615
616
fd63fcf8 617/* Make normal rtx_code into something we can index from an array. */
f6b7ba2b 618
619static enum internal_test
fd63fcf8 620map_test_to_internal_test (enum rtx_code test_code)
f6b7ba2b 621{
622 enum internal_test test = ITEST_MAX;
623
624 switch (test_code)
625 {
626 default: break;
627 case EQ: test = ITEST_EQ; break;
628 case NE: test = ITEST_NE; break;
629 case GT: test = ITEST_GT; break;
630 case GE: test = ITEST_GE; break;
631 case LT: test = ITEST_LT; break;
632 case LE: test = ITEST_LE; break;
633 case GTU: test = ITEST_GTU; break;
634 case GEU: test = ITEST_GEU; break;
635 case LTU: test = ITEST_LTU; break;
636 case LEU: test = ITEST_LEU; break;
637 }
638
639 return test;
640}
641
642
643/* Generate the code to compare two integer values. The return value is
c821cf9c 644 the comparison expression. */
f6b7ba2b 645
646static rtx
fd63fcf8 647gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
648 rtx cmp0, /* first operand to compare */
649 rtx cmp1, /* second operand to compare */
650 int *p_invert /* whether branch needs to reverse test */)
f6b7ba2b 651{
fd63fcf8 652 struct cmp_info
653 {
f6b7ba2b 654 enum rtx_code test_code; /* test code to use in insn */
7d0f7bf8 655 bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
f6b7ba2b 656 int const_add; /* constant to add (convert LE -> LT) */
657 int reverse_regs; /* reverse registers in test */
658 int invert_const; /* != 0 if invert value if cmp1 is constant */
659 int invert_reg; /* != 0 if invert value if cmp1 is register */
660 int unsignedp; /* != 0 for unsigned comparisons. */
661 };
662
663 static struct cmp_info info[ (int)ITEST_MAX ] = {
664
7d0f7bf8 665 { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
666 { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
f6b7ba2b 667
7d0f7bf8 668 { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
669 { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
670 { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
671 { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
f6b7ba2b 672
673 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
674 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
675 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
676 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
677 };
678
679 enum internal_test test;
3754d046 680 machine_mode mode;
f6b7ba2b 681 struct cmp_info *p_info;
682
683 test = map_test_to_internal_test (test_code);
cd3d4fe0 684 gcc_assert (test != ITEST_MAX);
f6b7ba2b 685
686 p_info = &info[ (int)test ];
687
688 mode = GET_MODE (cmp0);
689 if (mode == VOIDmode)
690 mode = GET_MODE (cmp1);
691
692 /* Make sure we can handle any constants given to us. */
693 if (GET_CODE (cmp1) == CONST_INT)
694 {
695 HOST_WIDE_INT value = INTVAL (cmp1);
696 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
697
698 /* if the immediate overflows or does not fit in the immediate field,
699 spill it to a register */
700
701 if ((p_info->unsignedp ?
702 (uvalue + p_info->const_add > uvalue) :
703 (value + p_info->const_add > value)) != (p_info->const_add > 0))
704 {
705 cmp1 = force_reg (mode, cmp1);
706 }
707 else if (!(p_info->const_range_p) (value + p_info->const_add))
708 {
709 cmp1 = force_reg (mode, cmp1);
710 }
711 }
712 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
713 {
714 cmp1 = force_reg (mode, cmp1);
715 }
716
717 /* See if we need to invert the result. */
718 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
719 ? p_info->invert_const
720 : p_info->invert_reg);
721
722 /* Comparison to constants, may involve adding 1 to change a LT into LE.
723 Comparison between two registers, may involve switching operands. */
724 if (GET_CODE (cmp1) == CONST_INT)
725 {
726 if (p_info->const_add != 0)
727 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
728
729 }
730 else if (p_info->reverse_regs)
731 {
732 rtx temp = cmp0;
733 cmp0 = cmp1;
734 cmp1 = temp;
735 }
736
29bb088d 737 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
f6b7ba2b 738}
739
740
741/* Generate the code to compare two float values. The return value is
c821cf9c 742 the comparison expression. */
f6b7ba2b 743
744static rtx
fd63fcf8 745gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
746 rtx cmp0, /* first operand to compare */
747 rtx cmp1 /* second operand to compare */)
f6b7ba2b 748{
fd63fcf8 749 rtx (*gen_fn) (rtx, rtx, rtx);
f6b7ba2b 750 rtx brtmp;
751 int reverse_regs, invert;
752
753 switch (test_code)
754 {
755 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
756 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
757 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
758 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
759 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
760 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
a22be2c5 761 case UNEQ: reverse_regs = 0; invert = 0; gen_fn = gen_suneq_sf; break;
762 case LTGT: reverse_regs = 0; invert = 1; gen_fn = gen_suneq_sf; break;
763 case UNLE: reverse_regs = 0; invert = 0; gen_fn = gen_sunle_sf; break;
764 case UNGT: reverse_regs = 1; invert = 0; gen_fn = gen_sunlt_sf; break;
765 case UNLT: reverse_regs = 0; invert = 0; gen_fn = gen_sunlt_sf; break;
766 case UNGE: reverse_regs = 1; invert = 0; gen_fn = gen_sunle_sf; break;
767 case UNORDERED:
768 reverse_regs = 0; invert = 0; gen_fn = gen_sunordered_sf; break;
769 case ORDERED:
770 reverse_regs = 0; invert = 1; gen_fn = gen_sunordered_sf; break;
de071186 771 default:
29bb088d 772 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
f6b7ba2b 773 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
774 }
775
776 if (reverse_regs)
777 {
778 rtx temp = cmp0;
779 cmp0 = cmp1;
780 cmp1 = temp;
781 }
782
783 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
784 emit_insn (gen_fn (brtmp, cmp0, cmp1));
785
29bb088d 786 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
f6b7ba2b 787}
788
789
790void
3754d046 791xtensa_expand_conditional_branch (rtx *operands, machine_mode mode)
f6b7ba2b 792{
74f4459c 793 enum rtx_code test_code = GET_CODE (operands[0]);
794 rtx cmp0 = operands[1];
795 rtx cmp1 = operands[2];
f6b7ba2b 796 rtx cmp;
797 int invert;
798 rtx label1, label2;
799
74f4459c 800 switch (mode)
f6b7ba2b 801 {
74f4459c 802 case DFmode:
f6b7ba2b 803 default:
29bb088d 804 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
f6b7ba2b 805
74f4459c 806 case SImode:
f6b7ba2b 807 invert = FALSE;
808 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
809 break;
810
74f4459c 811 case SFmode:
f6b7ba2b 812 if (!TARGET_HARD_FLOAT)
771b6086 813 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
814 cmp0, cmp1));
f6b7ba2b 815 invert = FALSE;
816 cmp = gen_float_relational (test_code, cmp0, cmp1);
817 break;
818 }
819
820 /* Generate the branch. */
821
74f4459c 822 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
f6b7ba2b 823 label2 = pc_rtx;
824
825 if (invert)
826 {
827 label2 = label1;
828 label1 = pc_rtx;
829 }
830
d1f9b275 831 emit_jump_insn (gen_rtx_SET (pc_rtx,
f6b7ba2b 832 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
833 label1,
834 label2)));
835}
836
837
838static rtx
3754d046 839gen_conditional_move (enum rtx_code code, machine_mode mode,
74f4459c 840 rtx op0, rtx op1)
f6b7ba2b 841{
74f4459c 842 if (mode == SImode)
f6b7ba2b 843 {
74f4459c 844 rtx cmp;
845
f6b7ba2b 846 /* Jump optimization calls get_condition() which canonicalizes
847 comparisons like (GE x <const>) to (GT x <const-1>).
848 Transform those comparisons back to GE, since that is the
849 comparison supported in Xtensa. We shouldn't have to
850 transform <LE x const> comparisons, because neither
851 xtensa_expand_conditional_branch() nor get_condition() will
c821cf9c 852 produce them. */
f6b7ba2b 853
854 if ((code == GT) && (op1 == constm1_rtx))
855 {
856 code = GE;
857 op1 = const0_rtx;
858 }
29bb088d 859 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
f6b7ba2b 860
861 if (boolean_operator (cmp, VOIDmode))
862 {
dafa59bd 863 /* Swap the operands to make const0 second. */
f6b7ba2b 864 if (op0 == const0_rtx)
865 {
866 op0 = op1;
867 op1 = const0_rtx;
868 }
869
dafa59bd 870 /* If not comparing against zero, emit a comparison (subtract). */
f6b7ba2b 871 if (op1 != const0_rtx)
872 {
873 op0 = expand_binop (SImode, sub_optab, op0, op1,
874 0, 0, OPTAB_LIB_WIDEN);
875 op1 = const0_rtx;
876 }
877 }
878 else if (branch_operator (cmp, VOIDmode))
879 {
dafa59bd 880 /* Swap the operands to make const0 second. */
f6b7ba2b 881 if (op0 == const0_rtx)
882 {
883 op0 = op1;
884 op1 = const0_rtx;
885
886 switch (code)
887 {
888 case LT: code = GE; break;
889 case GE: code = LT; break;
cd3d4fe0 890 default: gcc_unreachable ();
f6b7ba2b 891 }
892 }
893
894 if (op1 != const0_rtx)
895 return 0;
896 }
897 else
898 return 0;
899
29bb088d 900 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
f6b7ba2b 901 }
902
74f4459c 903 if (TARGET_HARD_FLOAT && mode == SFmode)
f6b7ba2b 904 return gen_float_relational (code, op0, op1);
905
906 return 0;
907}
908
909
910int
fd63fcf8 911xtensa_expand_conditional_move (rtx *operands, int isflt)
f6b7ba2b 912{
74f4459c 913 rtx dest = operands[0];
914 rtx cmp = operands[1];
3754d046 915 machine_mode cmp_mode = GET_MODE (XEXP (cmp, 0));
fd63fcf8 916 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
f6b7ba2b 917
74f4459c 918 if (!(cmp = gen_conditional_move (GET_CODE (cmp), cmp_mode,
919 XEXP (cmp, 0), XEXP (cmp, 1))))
f6b7ba2b 920 return 0;
921
922 if (isflt)
74f4459c 923 gen_fn = (cmp_mode == SImode
f6b7ba2b 924 ? gen_movsfcc_internal0
925 : gen_movsfcc_internal1);
926 else
74f4459c 927 gen_fn = (cmp_mode == SImode
f6b7ba2b 928 ? gen_movsicc_internal0
929 : gen_movsicc_internal1);
930
74f4459c 931 emit_insn (gen_fn (dest, XEXP (cmp, 0), operands[2], operands[3], cmp));
f6b7ba2b 932 return 1;
933}
934
935
936int
3754d046 937xtensa_expand_scc (rtx operands[4], machine_mode cmp_mode)
f6b7ba2b 938{
939 rtx dest = operands[0];
74f4459c 940 rtx cmp;
f6b7ba2b 941 rtx one_tmp, zero_tmp;
fd63fcf8 942 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
f6b7ba2b 943
74f4459c 944 if (!(cmp = gen_conditional_move (GET_CODE (operands[1]), cmp_mode,
945 operands[2], operands[3])))
f6b7ba2b 946 return 0;
947
948 one_tmp = gen_reg_rtx (SImode);
949 zero_tmp = gen_reg_rtx (SImode);
950 emit_insn (gen_movsi (one_tmp, const_true_rtx));
951 emit_insn (gen_movsi (zero_tmp, const0_rtx));
952
74f4459c 953 gen_fn = (cmp_mode == SImode
f6b7ba2b 954 ? gen_movsicc_internal0
955 : gen_movsicc_internal1);
956 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
957 return 1;
958}
959
960
de071186 961/* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
962 for the output, i.e., the input operands are twice as big as MODE. */
963
964void
3754d046 965xtensa_split_operand_pair (rtx operands[4], machine_mode mode)
de071186 966{
967 switch (GET_CODE (operands[1]))
968 {
969 case REG:
970 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
971 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
972 break;
973
974 case MEM:
975 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
976 operands[2] = adjust_address (operands[1], mode, 0);
977 break;
978
979 case CONST_INT:
980 case CONST_DOUBLE:
981 split_double (operands[1], &operands[2], &operands[3]);
982 break;
983
984 default:
cd3d4fe0 985 gcc_unreachable ();
de071186 986 }
987
988 switch (GET_CODE (operands[0]))
989 {
990 case REG:
991 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
992 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
993 break;
994
995 case MEM:
996 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
997 operands[0] = adjust_address (operands[0], mode, 0);
998 break;
999
1000 default:
cd3d4fe0 1001 gcc_unreachable ();
de071186 1002 }
1003}
1004
1005
f6b7ba2b 1006/* Emit insns to move operands[1] into operands[0].
f6b7ba2b 1007 Return 1 if we have written out everything that needs to be done to
1008 do the move. Otherwise, return 0 and the caller will emit the move
1009 normally. */
1010
1011int
3754d046 1012xtensa_emit_move_sequence (rtx *operands, machine_mode mode)
f6b7ba2b 1013{
c656b8fd 1014 rtx src = operands[1];
1015
1016 if (CONSTANT_P (src)
1017 && (GET_CODE (src) != CONST_INT || ! xtensa_simm12b (INTVAL (src))))
f6b7ba2b 1018 {
c656b8fd 1019 rtx dst = operands[0];
1020
1021 if (xtensa_tls_referenced_p (src))
1022 {
1023 rtx addend = NULL;
1024
1025 if (GET_CODE (src) == CONST && GET_CODE (XEXP (src, 0)) == PLUS)
1026 {
1027 addend = XEXP (XEXP (src, 0), 1);
1028 src = XEXP (XEXP (src, 0), 0);
1029 }
1030
1031 src = xtensa_legitimize_tls_address (src);
1032 if (addend)
1033 {
1034 src = gen_rtx_PLUS (mode, src, addend);
1035 src = force_operand (src, dst);
1036 }
1037 emit_move_insn (dst, src);
1038 return 1;
1039 }
1040
a2acdfa1 1041 if (! TARGET_AUTO_LITPOOLS && ! TARGET_CONST16)
c656b8fd 1042 {
1043 src = force_const_mem (SImode, src);
1044 operands[1] = src;
1045 }
afb26b4b 1046
1047 /* PC-relative loads are always SImode, and CONST16 is only
1048 supported in the movsi pattern, so add a SUBREG for any other
1049 (smaller) mode. */
1050
1051 if (mode != SImode)
1052 {
c656b8fd 1053 if (register_operand (dst, mode))
afb26b4b 1054 {
c656b8fd 1055 emit_move_insn (simplify_gen_subreg (SImode, dst, mode, 0), src);
afb26b4b 1056 return 1;
1057 }
1058 else
1059 {
c656b8fd 1060 src = force_reg (SImode, src);
1061 src = gen_lowpart_SUBREG (mode, src);
1062 operands[1] = src;
afb26b4b 1063 }
1064 }
f6b7ba2b 1065 }
1066
e060c9df 1067 if (!(reload_in_progress | reload_completed)
1068 && !xtensa_valid_move (mode, operands))
1069 operands[1] = force_reg (mode, operands[1]);
f6b7ba2b 1070
e060c9df 1071 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
f6b7ba2b 1072
1073 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
c821cf9c 1074 instruction won't be recognized after reload, so we remove the
1075 subreg and adjust mem accordingly. */
f6b7ba2b 1076 if (reload_in_progress)
1077 {
1078 operands[0] = fixup_subreg_mem (operands[0]);
1079 operands[1] = fixup_subreg_mem (operands[1]);
1080 }
1081 return 0;
1082}
1083
afb26b4b 1084
f6b7ba2b 1085static rtx
fd63fcf8 1086fixup_subreg_mem (rtx x)
f6b7ba2b 1087{
1088 if (GET_CODE (x) == SUBREG
1089 && GET_CODE (SUBREG_REG (x)) == REG
1090 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1091 {
1092 rtx temp =
1093 gen_rtx_SUBREG (GET_MODE (x),
1c654ff1 1094 reg_equiv_mem (REGNO (SUBREG_REG (x))),
f6b7ba2b 1095 SUBREG_BYTE (x));
c6a6cdaa 1096 x = alter_subreg (&temp, true);
f6b7ba2b 1097 }
1098 return x;
1099}
1100
1101
e060c9df 1102/* Check if an incoming argument in a7 is expected to be used soon and
1103 if OPND is a register or register pair that includes a7. If so,
1104 create a new pseudo and copy a7 into that pseudo at the very
1105 beginning of the function, followed by the special "set_frame_ptr"
1106 unspec_volatile insn. The return value is either the original
1107 operand, if it is not a7, or the new pseudo containing a copy of
1108 the incoming argument. This is necessary because the register
1109 allocator will ignore conflicts with a7 and may either assign some
1110 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1111 the incoming argument in a7. By copying the argument out of a7 as
1112 the very first thing, and then immediately following that with an
1113 unspec_volatile to keep the scheduler away, we should avoid any
1114 problems. Putting the set_frame_ptr insn at the beginning, with
1115 only the a7 copy before it, also makes it easier for the prologue
1116 expander to initialize the frame pointer after the a7 copy and to
1117 fix up the a7 copy to use the stack pointer instead of the frame
1118 pointer. */
78d6a4ed 1119
e060c9df 1120rtx
1121xtensa_copy_incoming_a7 (rtx opnd)
78d6a4ed 1122{
e060c9df 1123 rtx entry_insns = 0;
1124 rtx reg, tmp;
3754d046 1125 machine_mode mode;
e060c9df 1126
1127 if (!cfun->machine->need_a7_copy)
1128 return opnd;
1129
1130 /* This function should never be called again once a7 has been copied. */
cd3d4fe0 1131 gcc_assert (!cfun->machine->set_frame_ptr_insn);
e060c9df 1132
1133 mode = GET_MODE (opnd);
1134
1135 /* The operand using a7 may come in a later instruction, so just return
1136 the original operand if it doesn't use a7. */
1137 reg = opnd;
1138 if (GET_CODE (reg) == SUBREG)
78d6a4ed 1139 {
cd3d4fe0 1140 gcc_assert (SUBREG_BYTE (reg) == 0);
e060c9df 1141 reg = SUBREG_REG (reg);
1142 }
1143 if (GET_CODE (reg) != REG
1144 || REGNO (reg) > A7_REG
1145 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1146 return opnd;
2aac53ce 1147
e060c9df 1148 /* 1-word args will always be in a7; 2-word args in a6/a7. */
cd3d4fe0 1149 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
78d6a4ed 1150
e060c9df 1151 cfun->machine->need_a7_copy = false;
78d6a4ed 1152
e060c9df 1153 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1154 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
78d6a4ed 1155
a3759617 1156 start_sequence ();
e060c9df 1157 tmp = gen_reg_rtx (mode);
78d6a4ed 1158
e060c9df 1159 switch (mode)
1160 {
1161 case DFmode:
1162 case DImode:
a0a73743 1163 /* Copy the value out of A7 here but keep the first word in A6 until
1164 after the set_frame_ptr insn. Otherwise, the register allocator
1165 may decide to put "subreg (tmp, 0)" in A7 and clobber the incoming
1166 value. */
e060c9df 1167 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1168 gen_raw_REG (SImode, A7_REG)));
1169 break;
1170 case SFmode:
1171 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1172 break;
1173 case SImode:
1174 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1175 break;
1176 case HImode:
1177 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1178 break;
1179 case QImode:
1180 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1181 break;
1182 default:
cd3d4fe0 1183 gcc_unreachable ();
78d6a4ed 1184 }
1185
e060c9df 1186 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
a0a73743 1187
1188 /* For DF and DI mode arguments, copy the incoming value in A6 now. */
1189 if (mode == DFmode || mode == DImode)
1190 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1191 gen_rtx_REG (SImode, A7_REG - 1)));
e060c9df 1192 entry_insns = get_insns ();
1193 end_sequence ();
1194
1195 if (cfun->machine->vararg_a7)
1196 {
a3759617 1197 /* This is called from within builtin_saveregs, which will insert the
1198 saveregs code at the function entry, ahead of anything placed at
1199 the function entry now. Instead, save the sequence to be inserted
1200 at the beginning of the saveregs code. */
1201 cfun->machine->vararg_a7_copy = entry_insns;
e060c9df 1202 }
1203 else
1204 {
1205 /* Put entry_insns after the NOTE that starts the function. If
1206 this is inside a start_sequence, make the outer-level insn
1207 chain current, so the code is placed at the start of the
1208 function. */
1209 push_topmost_sequence ();
a3759617 1210 /* Do not use entry_of_function() here. This is called from within
1211 expand_function_start, when the CFG still holds GIMPLE. */
e060c9df 1212 emit_insn_after (entry_insns, get_insns ());
1213 pop_topmost_sequence ();
1214 }
1215
1216 return tmp;
78d6a4ed 1217}
1218
1219
a80259b6 1220/* Try to expand a block move operation to a sequence of RTL move
1221 instructions. If not optimizing, or if the block size is not a
1222 constant, or if the block is too large, the expansion fails and GCC
1223 falls back to calling memcpy().
f6b7ba2b 1224
1225 operands[0] is the destination
1226 operands[1] is the source
1227 operands[2] is the length
1228 operands[3] is the alignment */
1229
1230int
fd63fcf8 1231xtensa_expand_block_move (rtx *operands)
f6b7ba2b 1232{
3754d046 1233 static const machine_mode mode_from_align[] =
986ef67a 1234 {
1235 VOIDmode, QImode, HImode, VOIDmode, SImode,
1236 };
1237
1238 rtx dst_mem = operands[0];
1239 rtx src_mem = operands[1];
1240 HOST_WIDE_INT bytes, align;
f6b7ba2b 1241 int num_pieces, move_ratio;
986ef67a 1242 rtx temp[2];
3754d046 1243 machine_mode mode[2];
986ef67a 1244 int amount[2];
1245 bool active[2];
1246 int phase = 0;
1247 int next;
1248 int offset_ld = 0;
1249 int offset_st = 0;
1250 rtx x;
f6b7ba2b 1251
dafa59bd 1252 /* If this is not a fixed size move, just call memcpy. */
f6b7ba2b 1253 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1254 return 0;
1255
986ef67a 1256 bytes = INTVAL (operands[2]);
1257 align = INTVAL (operands[3]);
1258
dafa59bd 1259 /* Anything to move? */
f6b7ba2b 1260 if (bytes <= 0)
986ef67a 1261 return 0;
f6b7ba2b 1262
1263 if (align > MOVE_MAX)
1264 align = MOVE_MAX;
1265
dafa59bd 1266 /* Decide whether to expand inline based on the optimization level. */
f6b7ba2b 1267 move_ratio = 4;
1268 if (optimize > 2)
1269 move_ratio = LARGEST_MOVE_RATIO;
dafa59bd 1270 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
986ef67a 1271 if (num_pieces > move_ratio)
f6b7ba2b 1272 return 0;
1273
986ef67a 1274 x = XEXP (dst_mem, 0);
1275 if (!REG_P (x))
1276 {
1277 x = force_reg (Pmode, x);
1278 dst_mem = replace_equiv_address (dst_mem, x);
1279 }
f6b7ba2b 1280
986ef67a 1281 x = XEXP (src_mem, 0);
1282 if (!REG_P (x))
1283 {
1284 x = force_reg (Pmode, x);
1285 src_mem = replace_equiv_address (src_mem, x);
1286 }
f6b7ba2b 1287
986ef67a 1288 active[0] = active[1] = false;
f6b7ba2b 1289
986ef67a 1290 do
f6b7ba2b 1291 {
986ef67a 1292 next = phase;
1293 phase ^= 1;
f6b7ba2b 1294
986ef67a 1295 if (bytes > 0)
f6b7ba2b 1296 {
986ef67a 1297 int next_amount;
f6b7ba2b 1298
986ef67a 1299 next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1300 next_amount = MIN (next_amount, align);
f6b7ba2b 1301
986ef67a 1302 amount[next] = next_amount;
1303 mode[next] = mode_from_align[next_amount];
1304 temp[next] = gen_reg_rtx (mode[next]);
f6b7ba2b 1305
986ef67a 1306 x = adjust_address (src_mem, mode[next], offset_ld);
d1f9b275 1307 emit_insn (gen_rtx_SET (temp[next], x));
f6b7ba2b 1308
986ef67a 1309 offset_ld += next_amount;
1310 bytes -= next_amount;
1311 active[next] = true;
1312 }
f6b7ba2b 1313
986ef67a 1314 if (active[phase])
1315 {
1316 active[phase] = false;
1317
1318 x = adjust_address (dst_mem, mode[phase], offset_st);
d1f9b275 1319 emit_insn (gen_rtx_SET (x, temp[phase]));
f6b7ba2b 1320
986ef67a 1321 offset_st += amount[phase];
1322 }
f6b7ba2b 1323 }
986ef67a 1324 while (active[next]);
f6b7ba2b 1325
986ef67a 1326 return 1;
f6b7ba2b 1327}
1328
1329
1330void
fd63fcf8 1331xtensa_expand_nonlocal_goto (rtx *operands)
f6b7ba2b 1332{
1333 rtx goto_handler = operands[1];
1334 rtx containing_fp = operands[3];
1335
dafa59bd 1336 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1337 is too big to generate in-line. */
f6b7ba2b 1338
1339 if (GET_CODE (containing_fp) != REG)
1340 containing_fp = force_reg (Pmode, containing_fp);
1341
f6b7ba2b 1342 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
51538c49 1343 LCT_NORMAL, VOIDmode, 2,
f6b7ba2b 1344 containing_fp, Pmode,
1345 goto_handler, Pmode);
1346}
1347
1348
1f3233d1 1349static struct machine_function *
fd63fcf8 1350xtensa_init_machine_status (void)
f6b7ba2b 1351{
25a27413 1352 return ggc_cleared_alloc<machine_function> ();
f6b7ba2b 1353}
1354
1355
4690907f 1356/* Shift VAL of mode MODE left by COUNT bits. */
1357
1358static inline rtx
3754d046 1359xtensa_expand_mask_and_shift (rtx val, machine_mode mode, rtx count)
4690907f 1360{
1361 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1362 NULL_RTX, 1, OPTAB_DIRECT);
1363 return expand_simple_binop (SImode, ASHIFT, val, count,
1364 NULL_RTX, 1, OPTAB_DIRECT);
1365}
1366
1367
1368/* Structure to hold the initial parameters for a compare_and_swap operation
1369 in HImode and QImode. */
1370
1371struct alignment_context
1372{
1373 rtx memsi; /* SI aligned memory location. */
1374 rtx shift; /* Bit offset with regard to lsb. */
1375 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
1376 rtx modemaski; /* ~modemask */
1377};
1378
1379
1380/* Initialize structure AC for word access to HI and QI mode memory. */
1381
1382static void
1383init_alignment_context (struct alignment_context *ac, rtx mem)
1384{
3754d046 1385 machine_mode mode = GET_MODE (mem);
4690907f 1386 rtx byteoffset = NULL_RTX;
1387 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1388
1389 if (aligned)
1390 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1391 else
1392 {
1393 /* Alignment is unknown. */
1394 rtx addr, align;
1395
1396 /* Force the address into a register. */
1397 addr = force_reg (Pmode, XEXP (mem, 0));
1398
1399 /* Align it to SImode. */
1400 align = expand_simple_binop (Pmode, AND, addr,
1401 GEN_INT (-GET_MODE_SIZE (SImode)),
1402 NULL_RTX, 1, OPTAB_DIRECT);
1403 /* Generate MEM. */
1404 ac->memsi = gen_rtx_MEM (SImode, align);
1405 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
1406 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
1407 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
1408
1409 byteoffset = expand_simple_binop (Pmode, AND, addr,
1410 GEN_INT (GET_MODE_SIZE (SImode) - 1),
1411 NULL_RTX, 1, OPTAB_DIRECT);
1412 }
1413
1414 /* Calculate shiftcount. */
1415 if (TARGET_BIG_ENDIAN)
1416 {
1417 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
1418 if (!aligned)
1419 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
1420 NULL_RTX, 1, OPTAB_DIRECT);
1421 }
1422 else
1423 {
1424 if (aligned)
1425 ac->shift = NULL_RTX;
1426 else
1427 ac->shift = byteoffset;
1428 }
1429
1430 if (ac->shift != NULL_RTX)
1431 {
1432 /* Shift is the byte count, but we need the bitcount. */
64ab0923 1433 gcc_assert (exact_log2 (BITS_PER_UNIT) >= 0);
1434 ac->shift = expand_simple_binop (SImode, ASHIFT, ac->shift,
1435 GEN_INT (exact_log2 (BITS_PER_UNIT)),
4690907f 1436 NULL_RTX, 1, OPTAB_DIRECT);
1437 ac->modemask = expand_simple_binop (SImode, ASHIFT,
1438 GEN_INT (GET_MODE_MASK (mode)),
1439 ac->shift,
1440 NULL_RTX, 1, OPTAB_DIRECT);
1441 }
1442 else
1443 ac->modemask = GEN_INT (GET_MODE_MASK (mode));
1444
1445 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
1446}
1447
1448
1449/* Expand an atomic compare and swap operation for HImode and QImode.
1450 MEM is the memory location, CMP the old value to compare MEM with
8deb3959 1451 and NEW_RTX the value to set if CMP == MEM. */
4690907f 1452
1453void
8deb3959 1454xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new_rtx)
4690907f 1455{
3754d046 1456 machine_mode mode = GET_MODE (mem);
4690907f 1457 struct alignment_context ac;
1458 rtx tmp, cmpv, newv, val;
1459 rtx oldval = gen_reg_rtx (SImode);
1460 rtx res = gen_reg_rtx (SImode);
bf735bc6 1461 rtx_code_label *csloop = gen_label_rtx ();
1462 rtx_code_label *csend = gen_label_rtx ();
4690907f 1463
1464 init_alignment_context (&ac, mem);
1465
1466 if (ac.shift != NULL_RTX)
1467 {
1468 cmp = xtensa_expand_mask_and_shift (cmp, mode, ac.shift);
8deb3959 1469 new_rtx = xtensa_expand_mask_and_shift (new_rtx, mode, ac.shift);
4690907f 1470 }
1471
1472 /* Load the surrounding word into VAL with the MEM value masked out. */
1473 val = force_reg (SImode, expand_simple_binop (SImode, AND, ac.memsi,
1474 ac.modemaski, NULL_RTX, 1,
1475 OPTAB_DIRECT));
1476 emit_label (csloop);
1477
8deb3959 1478 /* Patch CMP and NEW_RTX into VAL at correct position. */
4690907f 1479 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
1480 NULL_RTX, 1, OPTAB_DIRECT));
8deb3959 1481 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new_rtx, val,
4690907f 1482 NULL_RTX, 1, OPTAB_DIRECT));
1483
1484 /* Jump to end if we're done. */
1485 emit_insn (gen_sync_compare_and_swapsi (res, ac.memsi, cmpv, newv));
1486 emit_cmp_and_jump_insns (res, cmpv, EQ, const0_rtx, SImode, true, csend);
1487
1488 /* Check for changes outside mode. */
1489 emit_move_insn (oldval, val);
1490 tmp = expand_simple_binop (SImode, AND, res, ac.modemaski,
1491 val, 1, OPTAB_DIRECT);
1492 if (tmp != val)
1493 emit_move_insn (val, tmp);
1494
1495 /* Loop internal if so. */
1496 emit_cmp_and_jump_insns (oldval, val, NE, const0_rtx, SImode, true, csloop);
1497
1498 emit_label (csend);
1499
1500 /* Return the correct part of the bitfield. */
1501 convert_move (target,
1502 (ac.shift == NULL_RTX ? res
1503 : expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
1504 NULL_RTX, 1, OPTAB_DIRECT)),
1505 1);
1506}
1507
1508
1509/* Expand an atomic operation CODE of mode MODE (either HImode or QImode --
1510 the default expansion works fine for SImode). MEM is the memory location
1511 and VAL the value to play with. If AFTER is true then store the value
1512 MEM holds after the operation, if AFTER is false then store the value MEM
1513 holds before the operation. If TARGET is zero then discard that value, else
1514 store it to TARGET. */
1515
1516void
1517xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1518 bool after)
1519{
3754d046 1520 machine_mode mode = GET_MODE (mem);
4690907f 1521 struct alignment_context ac;
bf735bc6 1522 rtx_code_label *csloop = gen_label_rtx ();
4690907f 1523 rtx cmp, tmp;
1524 rtx old = gen_reg_rtx (SImode);
8deb3959 1525 rtx new_rtx = gen_reg_rtx (SImode);
4690907f 1526 rtx orig = NULL_RTX;
1527
1528 init_alignment_context (&ac, mem);
1529
1530 /* Prepare values before the compare-and-swap loop. */
1531 if (ac.shift != NULL_RTX)
1532 val = xtensa_expand_mask_and_shift (val, mode, ac.shift);
1533 switch (code)
1534 {
1535 case PLUS:
1536 case MINUS:
1537 orig = gen_reg_rtx (SImode);
1538 convert_move (orig, val, 1);
1539 break;
1540
1541 case SET:
1542 case IOR:
1543 case XOR:
1544 break;
1545
1546 case MULT: /* NAND */
1547 case AND:
1548 /* val = "11..1<val>11..1" */
1549 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
1550 NULL_RTX, 1, OPTAB_DIRECT);
1551 break;
1552
1553 default:
1554 gcc_unreachable ();
1555 }
1556
1557 /* Load full word. Subsequent loads are performed by S32C1I. */
1558 cmp = force_reg (SImode, ac.memsi);
1559
1560 emit_label (csloop);
1561 emit_move_insn (old, cmp);
1562
1563 switch (code)
1564 {
1565 case PLUS:
1566 case MINUS:
1567 val = expand_simple_binop (SImode, code, old, orig,
1568 NULL_RTX, 1, OPTAB_DIRECT);
1569 val = expand_simple_binop (SImode, AND, val, ac.modemask,
1570 NULL_RTX, 1, OPTAB_DIRECT);
1571 /* FALLTHRU */
1572 case SET:
1573 tmp = expand_simple_binop (SImode, AND, old, ac.modemaski,
1574 NULL_RTX, 1, OPTAB_DIRECT);
1575 tmp = expand_simple_binop (SImode, IOR, tmp, val,
8deb3959 1576 new_rtx, 1, OPTAB_DIRECT);
4690907f 1577 break;
1578
1579 case AND:
1580 case IOR:
1581 case XOR:
1582 tmp = expand_simple_binop (SImode, code, old, val,
8deb3959 1583 new_rtx, 1, OPTAB_DIRECT);
4690907f 1584 break;
1585
1586 case MULT: /* NAND */
1587 tmp = expand_simple_binop (SImode, XOR, old, ac.modemask,
1588 NULL_RTX, 1, OPTAB_DIRECT);
1589 tmp = expand_simple_binop (SImode, AND, tmp, val,
8deb3959 1590 new_rtx, 1, OPTAB_DIRECT);
4690907f 1591 break;
1592
1593 default:
1594 gcc_unreachable ();
1595 }
1596
8deb3959 1597 if (tmp != new_rtx)
1598 emit_move_insn (new_rtx, tmp);
1599 emit_insn (gen_sync_compare_and_swapsi (cmp, ac.memsi, old, new_rtx));
4690907f 1600 emit_cmp_and_jump_insns (cmp, old, NE, const0_rtx, SImode, true, csloop);
1601
1602 if (target)
1603 {
8deb3959 1604 tmp = (after ? new_rtx : cmp);
4690907f 1605 convert_move (target,
1606 (ac.shift == NULL_RTX ? tmp
1607 : expand_simple_binop (SImode, LSHIFTRT, tmp, ac.shift,
1608 NULL_RTX, 1, OPTAB_DIRECT)),
1609 1);
1610 }
1611}
1612
1613
f6b7ba2b 1614void
fd63fcf8 1615xtensa_setup_frame_addresses (void)
f6b7ba2b 1616{
5a1c68c3 1617 /* Set flag to cause TARGET_FRAME_POINTER_REQUIRED to return true. */
f6b7ba2b 1618 cfun->machine->accesses_prev_frame = 1;
1619
b89c671b 1620 if (TARGET_WINDOWED_ABI)
1621 emit_library_call
1622 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1623 LCT_NORMAL, VOIDmode, 0);
f6b7ba2b 1624}
1625
1626
c821cf9c 1627/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1628 a comment showing where the end of the loop is. However, if there is a
f6b7ba2b 1629 label or a branch at the end of the loop then we need to place a nop
c821cf9c 1630 there. If the loop ends with a label we need the nop so that branches
efee20da 1631 targeting that label will target the nop (and thus remain in the loop),
1632 instead of targeting the instruction after the loop (and thus exiting
c821cf9c 1633 the loop). If the loop ends with a branch, we need the nop in case the
efee20da 1634 branch is targeting a location inside the loop. When the branch
f6b7ba2b 1635 executes it will cause the loop count to be decremented even if it is
1636 taken (because it is the last instruction in the loop), so we need to
1637 nop after the branch to prevent the loop count from being decremented
c821cf9c 1638 when the branch is taken. */
f6b7ba2b 1639
1640void
bf735bc6 1641xtensa_emit_loop_end (rtx_insn *insn, rtx *operands)
f6b7ba2b 1642{
1643 char done = 0;
1644
1645 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1646 {
1647 switch (GET_CODE (insn))
1648 {
1649 case NOTE:
1650 case BARRIER:
1651 break;
1652
1653 case CODE_LABEL:
2af1591e 1654 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
f6b7ba2b 1655 done = 1;
1656 break;
1657
1658 default:
1659 {
1660 rtx body = PATTERN (insn);
1661
aa90bb35 1662 if (JUMP_P (body))
f6b7ba2b 1663 {
2af1591e 1664 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
f6b7ba2b 1665 done = 1;
1666 }
1667 else if ((GET_CODE (body) != USE)
1668 && (GET_CODE (body) != CLOBBER))
1669 done = 1;
1670 }
1671 break;
1672 }
1673 }
1674
47edca9a 1675 output_asm_insn ("%1_LEND:", operands);
f6b7ba2b 1676}
1677
1678
3c0ca649 1679char *
1680xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1681{
1682 static char result[64];
1683 enum rtx_code code;
1684 const char *op;
1685
1686 code = GET_CODE (operands[3]);
1687 switch (code)
1688 {
1689 case EQ: op = inverted ? "ne" : "eq"; break;
1690 case NE: op = inverted ? "eq" : "ne"; break;
1691 case LT: op = inverted ? "ge" : "lt"; break;
1692 case GE: op = inverted ? "lt" : "ge"; break;
1693 case LTU: op = inverted ? "geu" : "ltu"; break;
1694 case GEU: op = inverted ? "ltu" : "geu"; break;
1695 default: gcc_unreachable ();
1696 }
1697
1698 if (immed)
1699 {
1700 if (INTVAL (operands[1]) == 0)
1701 sprintf (result, "b%sz%s\t%%0, %%2", op,
1702 (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1703 else
1704 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1705 }
1706 else
1707 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1708
1709 return result;
1710}
1711
1712
1713char *
1714xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1715{
1716 static char result[64];
1717 const char *op;
1718
1719 switch (GET_CODE (operands[3]))
1720 {
1721 case EQ: op = inverted ? "bs" : "bc"; break;
1722 case NE: op = inverted ? "bc" : "bs"; break;
1723 default: gcc_unreachable ();
1724 }
1725
1726 if (immed)
1727 {
1728 unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1729 operands[1] = GEN_INT (bitnum);
1730 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1731 }
1732 else
1733 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1734
1735 return result;
1736}
1737
1738
1739char *
1740xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1741{
1742 static char result[64];
1743 enum rtx_code code;
1744 const char *op;
1745
1746 code = GET_CODE (operands[4]);
1747 if (isbool)
1748 {
1749 switch (code)
1750 {
1751 case EQ: op = inverted ? "t" : "f"; break;
1752 case NE: op = inverted ? "f" : "t"; break;
1753 default: gcc_unreachable ();
1754 }
1755 }
1756 else
1757 {
1758 switch (code)
1759 {
1760 case EQ: op = inverted ? "nez" : "eqz"; break;
1761 case NE: op = inverted ? "eqz" : "nez"; break;
1762 case LT: op = inverted ? "gez" : "ltz"; break;
1763 case GE: op = inverted ? "ltz" : "gez"; break;
1764 default: gcc_unreachable ();
1765 }
1766 }
1767
1768 sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1769 op, isfp ? ".s" : "", inverted ? 3 : 2);
1770 return result;
1771}
1772
1773
f6b7ba2b 1774char *
fd63fcf8 1775xtensa_emit_call (int callop, rtx *operands)
f6b7ba2b 1776{
bbfbe351 1777 static char result[64];
f6b7ba2b 1778 rtx tgt = operands[callop];
1779
1780 if (GET_CODE (tgt) == CONST_INT)
b89c671b 1781 sprintf (result, "call%d\t0x%lx", WINDOW_SIZE, INTVAL (tgt));
f6b7ba2b 1782 else if (register_operand (tgt, VOIDmode))
b89c671b 1783 sprintf (result, "callx%d\t%%%d", WINDOW_SIZE, callop);
f6b7ba2b 1784 else
b89c671b 1785 sprintf (result, "call%d\t%%%d", WINDOW_SIZE, callop);
f6b7ba2b 1786
1787 return result;
1788}
1789
1790
771b6086 1791bool
3754d046 1792xtensa_legitimate_address_p (machine_mode mode, rtx addr, bool strict)
771b6086 1793{
1794 /* Allow constant pool addresses. */
1795 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
c656b8fd 1796 && ! TARGET_CONST16 && constantpool_address_p (addr)
1797 && ! xtensa_tls_referenced_p (addr))
771b6086 1798 return true;
1799
1800 while (GET_CODE (addr) == SUBREG)
1801 addr = SUBREG_REG (addr);
1802
1803 /* Allow base registers. */
1804 if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1805 return true;
1806
1807 /* Check for "register + offset" addressing. */
1808 if (GET_CODE (addr) == PLUS)
1809 {
1810 rtx xplus0 = XEXP (addr, 0);
1811 rtx xplus1 = XEXP (addr, 1);
1812 enum rtx_code code0;
1813 enum rtx_code code1;
1814
1815 while (GET_CODE (xplus0) == SUBREG)
1816 xplus0 = SUBREG_REG (xplus0);
1817 code0 = GET_CODE (xplus0);
1818
1819 while (GET_CODE (xplus1) == SUBREG)
1820 xplus1 = SUBREG_REG (xplus1);
1821 code1 = GET_CODE (xplus1);
1822
1823 /* Swap operands if necessary so the register is first. */
1824 if (code0 != REG && code1 == REG)
1825 {
1826 xplus0 = XEXP (addr, 1);
1827 xplus1 = XEXP (addr, 0);
1828 code0 = GET_CODE (xplus0);
1829 code1 = GET_CODE (xplus1);
1830 }
1831
1832 if (code0 == REG && BASE_REG_P (xplus0, strict)
1833 && code1 == CONST_INT
1834 && xtensa_mem_offset (INTVAL (xplus1), mode))
1835 return true;
1836 }
1837
1838 return false;
1839}
1840
1841
c656b8fd 1842/* Construct the SYMBOL_REF for the _TLS_MODULE_BASE_ symbol. */
1843
1844static GTY(()) rtx xtensa_tls_module_base_symbol;
1845
1846static rtx
1847xtensa_tls_module_base (void)
1848{
1849 if (! xtensa_tls_module_base_symbol)
1850 {
1851 xtensa_tls_module_base_symbol =
1852 gen_rtx_SYMBOL_REF (Pmode, "_TLS_MODULE_BASE_");
1853 SYMBOL_REF_FLAGS (xtensa_tls_module_base_symbol)
1854 |= TLS_MODEL_GLOBAL_DYNAMIC << SYMBOL_FLAG_TLS_SHIFT;
1855 }
1856
1857 return xtensa_tls_module_base_symbol;
1858}
1859
1860
bf735bc6 1861static rtx_insn *
c656b8fd 1862xtensa_call_tls_desc (rtx sym, rtx *retp)
1863{
0ae28a22 1864 rtx fn, arg, a_io;
bf735bc6 1865 rtx_insn *call_insn, *insns;
c656b8fd 1866
1867 start_sequence ();
1868 fn = gen_reg_rtx (Pmode);
1869 arg = gen_reg_rtx (Pmode);
0ae28a22 1870 a_io = gen_rtx_REG (Pmode, WINDOW_SIZE + 2);
c656b8fd 1871
1872 emit_insn (gen_tls_func (fn, sym));
1873 emit_insn (gen_tls_arg (arg, sym));
0ae28a22 1874 emit_move_insn (a_io, arg);
1875 call_insn = emit_call_insn (gen_tls_call (a_io, fn, sym, const1_rtx));
1876 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), a_io);
c656b8fd 1877 insns = get_insns ();
1878 end_sequence ();
1879
0ae28a22 1880 *retp = a_io;
c656b8fd 1881 return insns;
1882}
1883
1884
1885static rtx
1886xtensa_legitimize_tls_address (rtx x)
1887{
1888 unsigned int model = SYMBOL_REF_TLS_MODEL (x);
bf735bc6 1889 rtx dest, tp, ret, modbase, base, addend;
1890 rtx_insn *insns;
c656b8fd 1891
1892 dest = gen_reg_rtx (Pmode);
1893 switch (model)
1894 {
1895 case TLS_MODEL_GLOBAL_DYNAMIC:
1896 insns = xtensa_call_tls_desc (x, &ret);
1897 emit_libcall_block (insns, dest, ret, x);
1898 break;
1899
1900 case TLS_MODEL_LOCAL_DYNAMIC:
1901 base = gen_reg_rtx (Pmode);
1902 modbase = xtensa_tls_module_base ();
1903 insns = xtensa_call_tls_desc (modbase, &ret);
1904 emit_libcall_block (insns, base, ret, modbase);
1905 addend = force_reg (SImode, gen_sym_DTPOFF (x));
1906 emit_insn (gen_addsi3 (dest, base, addend));
1907 break;
1908
1909 case TLS_MODEL_INITIAL_EXEC:
1910 case TLS_MODEL_LOCAL_EXEC:
1911 tp = gen_reg_rtx (SImode);
badaa04c 1912 emit_insn (gen_get_thread_pointersi (tp));
c656b8fd 1913 addend = force_reg (SImode, gen_sym_TPOFF (x));
1914 emit_insn (gen_addsi3 (dest, tp, addend));
1915 break;
1916
1917 default:
1918 gcc_unreachable ();
1919 }
1920
1921 return dest;
1922}
1923
1924
771b6086 1925rtx
1926xtensa_legitimize_address (rtx x,
1927 rtx oldx ATTRIBUTE_UNUSED,
3754d046 1928 machine_mode mode)
771b6086 1929{
c656b8fd 1930 if (xtensa_tls_symbol_p (x))
1931 return xtensa_legitimize_tls_address (x);
1932
771b6086 1933 if (GET_CODE (x) == PLUS)
1934 {
1935 rtx plus0 = XEXP (x, 0);
1936 rtx plus1 = XEXP (x, 1);
1937
1938 if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
1939 {
1940 plus0 = XEXP (x, 1);
1941 plus1 = XEXP (x, 0);
1942 }
1943
1944 /* Try to split up the offset to use an ADDMI instruction. */
1945 if (GET_CODE (plus0) == REG
1946 && GET_CODE (plus1) == CONST_INT
1947 && !xtensa_mem_offset (INTVAL (plus1), mode)
1948 && !xtensa_simm8 (INTVAL (plus1))
1949 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1950 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1951 {
1952 rtx temp = gen_reg_rtx (Pmode);
1953 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
d1f9b275 1954 emit_insn (gen_rtx_SET (temp, gen_rtx_PLUS (Pmode, plus0,
1955 addmi_offset)));
771b6086 1956 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1957 }
1958 }
1959
41e3a0c7 1960 return x;
771b6086 1961}
1962
5cae3439 1963/* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
1964
1965 Treat constant-pool references as "mode dependent" since they can
1966 only be accessed with SImode loads. This works around a bug in the
1967 combiner where a constant pool reference is temporarily converted
1968 to an HImode load, which is then assumed to zero-extend based on
1969 our definition of LOAD_EXTEND_OP. This is wrong because the high
1970 bits of a 16-bit value in the constant pool are now sign-extended
1971 by default. */
1972
1973static bool
4e27ffd0 1974xtensa_mode_dependent_address_p (const_rtx addr,
1975 addr_space_t as ATTRIBUTE_UNUSED)
5cae3439 1976{
1977 return constantpool_address_p (addr);
1978}
771b6086 1979
c656b8fd 1980/* Return TRUE if X contains any TLS symbol references. */
1981
1982bool
1983xtensa_tls_referenced_p (rtx x)
1984{
1985 if (! TARGET_HAVE_TLS)
1986 return false;
1987
cd2faba8 1988 subrtx_iterator::array_type array;
1989 FOR_EACH_SUBRTX (iter, array, x, ALL)
1990 {
1991 const_rtx x = *iter;
1992 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0)
1993 return true;
1994
1995 /* Ignore TLS references that have already been legitimized. */
1996 if (GET_CODE (x) == UNSPEC)
1997 switch (XINT (x, 1))
1998 {
1999 case UNSPEC_TPOFF:
2000 case UNSPEC_DTPOFF:
2001 case UNSPEC_TLS_FUNC:
2002 case UNSPEC_TLS_ARG:
2003 case UNSPEC_TLS_CALL:
2004 iter.skip_subrtxes ();
2005 break;
2006 default:
2007 break;
2008 }
2009 }
2010 return false;
c656b8fd 2011}
2012
2013
7d7d7bd2 2014/* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
2015
2016static bool
3754d046 2017xtensa_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
7d7d7bd2 2018{
2019 return xtensa_tls_referenced_p (x);
2020}
2021
2022
b68eeca9 2023/* Return the debugger register number to use for 'regno'. */
f6b7ba2b 2024
2025int
fd63fcf8 2026xtensa_dbx_register_number (int regno)
f6b7ba2b 2027{
2028 int first = -1;
de071186 2029
2030 if (GP_REG_P (regno))
2031 {
2032 regno -= GP_REG_FIRST;
2033 first = 0;
2034 }
2035 else if (BR_REG_P (regno))
2036 {
2037 regno -= BR_REG_FIRST;
2038 first = 16;
2039 }
2040 else if (FP_REG_P (regno))
2041 {
2042 regno -= FP_REG_FIRST;
b68eeca9 2043 first = 48;
de071186 2044 }
f6b7ba2b 2045 else if (ACC_REG_P (regno))
2046 {
b68eeca9 2047 first = 0x200; /* Start of Xtensa special registers. */
2048 regno = 16; /* ACCLO is special register 16. */
f6b7ba2b 2049 }
2050
2051 /* When optimizing, we sometimes get asked about pseudo-registers
c821cf9c 2052 that don't represent hard registers. Return 0 for these. */
f6b7ba2b 2053 if (first == -1)
2054 return 0;
2055
2056 return first + regno;
2057}
2058
2059
2060/* Argument support functions. */
2061
2062/* Initialize CUMULATIVE_ARGS for a function. */
2063
2064void
e060c9df 2065init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
f6b7ba2b 2066{
2067 cum->arg_words = 0;
e060c9df 2068 cum->incoming = incoming;
f6b7ba2b 2069}
2070
fd63fcf8 2071
f6b7ba2b 2072/* Advance the argument to the next argument position. */
2073
41e01e3e 2074static void
3754d046 2075xtensa_function_arg_advance (cumulative_args_t cum, machine_mode mode,
41e01e3e 2076 const_tree type, bool named ATTRIBUTE_UNUSED)
f6b7ba2b 2077{
2078 int words, max;
2079 int *arg_words;
2080
39cba157 2081 arg_words = &get_cumulative_args (cum)->arg_words;
f6b7ba2b 2082 max = MAX_ARGS_IN_REGISTERS;
2083
2084 words = (((mode != BLKmode)
2085 ? (int) GET_MODE_SIZE (mode)
2086 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2087
ea2981b9 2088 if (*arg_words < max
2089 && (targetm.calls.must_pass_in_stack (mode, type)
2090 || *arg_words + words > max))
f6b7ba2b 2091 *arg_words = max;
2092
2093 *arg_words += words;
2094}
2095
2096
2097/* Return an RTL expression containing the register for the given mode,
751e10d1 2098 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
fd63fcf8 2099 if this is an incoming argument to the current function. */
f6b7ba2b 2100
41e01e3e 2101static rtx
3754d046 2102xtensa_function_arg_1 (cumulative_args_t cum_v, machine_mode mode,
41e01e3e 2103 const_tree type, bool incoming_p)
f6b7ba2b 2104{
39cba157 2105 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
f6b7ba2b 2106 int regbase, words, max;
2107 int *arg_words;
2108 int regno;
f6b7ba2b 2109
2110 arg_words = &cum->arg_words;
2111 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
2112 max = MAX_ARGS_IN_REGISTERS;
2113
2114 words = (((mode != BLKmode)
2115 ? (int) GET_MODE_SIZE (mode)
2116 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2117
2118 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
9276fdff 2119 {
81c44390 2120 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
9276fdff 2121 *arg_words = (*arg_words + align - 1) & -align;
2122 }
f6b7ba2b 2123
2124 if (*arg_words + words > max)
2125 return (rtx)0;
2126
2127 regno = regbase + *arg_words;
f6b7ba2b 2128
e060c9df 2129 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
b89c671b 2130 cfun->machine->need_a7_copy = TARGET_WINDOWED_ABI;
f6b7ba2b 2131
e060c9df 2132 return gen_rtx_REG (mode, regno);
f6b7ba2b 2133}
2134
41e01e3e 2135/* Implement TARGET_FUNCTION_ARG. */
2136
2137static rtx
3754d046 2138xtensa_function_arg (cumulative_args_t cum, machine_mode mode,
41e01e3e 2139 const_tree type, bool named ATTRIBUTE_UNUSED)
2140{
2141 return xtensa_function_arg_1 (cum, mode, type, false);
2142}
2143
2144/* Implement TARGET_FUNCTION_INCOMING_ARG. */
2145
2146static rtx
3754d046 2147xtensa_function_incoming_arg (cumulative_args_t cum, machine_mode mode,
41e01e3e 2148 const_tree type, bool named ATTRIBUTE_UNUSED)
2149{
2150 return xtensa_function_arg_1 (cum, mode, type, true);
2151}
f6b7ba2b 2152
bd99ba64 2153static unsigned int
3754d046 2154xtensa_function_arg_boundary (machine_mode mode, const_tree type)
81c44390 2155{
2156 unsigned int alignment;
2157
2158 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
2159 if (alignment < PARM_BOUNDARY)
2160 alignment = PARM_BOUNDARY;
2161 if (alignment > STACK_BOUNDARY)
2162 alignment = STACK_BOUNDARY;
2163 return alignment;
2164}
2165
2166
110f993e 2167static bool
fb80456a 2168xtensa_return_in_msb (const_tree valtype)
110f993e 2169{
2170 return (TARGET_BIG_ENDIAN
2171 && AGGREGATE_TYPE_P (valtype)
2172 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
2173}
2174
2175
4c834714 2176static void
2177xtensa_option_override (void)
f6b7ba2b 2178{
2179 int regno;
3754d046 2180 machine_mode mode;
f6b7ba2b 2181
2182 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
2183 error ("boolean registers required for the floating-point option");
2184
c821cf9c 2185 /* Set up array giving whether a given register can hold a given mode. */
f6b7ba2b 2186 for (mode = VOIDmode;
2187 mode != MAX_MACHINE_MODE;
3754d046 2188 mode = (machine_mode) ((int) mode + 1))
f6b7ba2b 2189 {
2190 int size = GET_MODE_SIZE (mode);
8deb3959 2191 enum mode_class mclass = GET_MODE_CLASS (mode);
f6b7ba2b 2192
2193 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2194 {
2195 int temp;
2196
2197 if (ACC_REG_P (regno))
afb26b4b 2198 temp = (TARGET_MAC16
8deb3959 2199 && (mclass == MODE_INT) && (size <= UNITS_PER_WORD));
f6b7ba2b 2200 else if (GP_REG_P (regno))
2201 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
2202 else if (FP_REG_P (regno))
2203 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
2204 else if (BR_REG_P (regno))
2205 temp = (TARGET_BOOLEANS && (mode == CCmode));
2206 else
2207 temp = FALSE;
2208
2209 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
2210 }
2211 }
2212
2213 init_machine_status = xtensa_init_machine_status;
f6b7ba2b 2214
afb26b4b 2215 /* Check PIC settings. PIC is only supported when using L32R
2216 instructions, and some targets need to always use PIC. */
2217 if (flag_pic && TARGET_CONST16)
2218 error ("-f%s is not supported with CONST16 instructions",
2219 (flag_pic > 1 ? "PIC" : "pic"));
0cd4f29f 2220 else if (TARGET_FORCE_NO_PIC)
2221 flag_pic = 0;
afb26b4b 2222 else if (XTENSA_ALWAYS_PIC)
2223 {
2224 if (TARGET_CONST16)
2225 error ("PIC is required but not supported with CONST16 instructions");
2226 flag_pic = 1;
2227 }
2228 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
2229 if (flag_pic > 1)
f6b7ba2b 2230 flag_pic = 1;
0d1c6644 2231 if (flag_pic && !flag_pie)
2232 flag_shlib = 1;
1897b881 2233
2234 /* Hot/cold partitioning does not work on this architecture, because of
2235 constant pools (the load instruction cannot necessarily reach that far).
2236 Therefore disable it on this architecture. */
2237 if (flag_reorder_blocks_and_partition)
2238 {
2239 flag_reorder_blocks_and_partition = 0;
2240 flag_reorder_blocks = 1;
2241 }
f6b7ba2b 2242}
2243
f6b7ba2b 2244/* A C compound statement to output to stdio stream STREAM the
2245 assembler syntax for an instruction operand X. X is an RTL
2246 expression.
2247
2248 CODE is a value that can be used to specify one of several ways
2249 of printing the operand. It is used when identical operands
2250 must be printed differently depending on the context. CODE
2251 comes from the '%' specification that was used to request
2252 printing of the operand. If the specification was just '%DIGIT'
2253 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
2254 is the ASCII code for LTR.
2255
2256 If X is a register, this macro should print the register's name.
2257 The names can be found in an array 'reg_names' whose type is
2258 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
2259
2260 When the machine description has a specification '%PUNCT' (a '%'
2261 followed by a punctuation character), this macro is called with
2262 a null pointer for X and the punctuation character for CODE.
2263
2264 'a', 'c', 'l', and 'n' are reserved.
de071186 2265
f6b7ba2b 2266 The Xtensa specific codes are:
2267
2268 'd' CONST_INT, print as signed decimal
2269 'x' CONST_INT, print as signed hexadecimal
2270 'K' CONST_INT, print number of bits in mask for EXTUI
2271 'R' CONST_INT, print (X & 0x1f)
2272 'L' CONST_INT, print ((32 - X) & 0x1f)
2273 'D' REG, print second register of double-word register operand
2274 'N' MEM, print address of next word following a memory operand
2275 'v' MEM, if memory reference is volatile, output a MEMW before it
afb26b4b 2276 't' any constant, add "@h" suffix for top 16 bits
2277 'b' any constant, add "@l" suffix for bottom 16 bits
f6b7ba2b 2278*/
2279
2280static void
fd63fcf8 2281printx (FILE *file, signed int val)
f6b7ba2b 2282{
fd63fcf8 2283 /* Print a hexadecimal value in a nice way. */
f6b7ba2b 2284 if ((val > -0xa) && (val < 0xa))
2285 fprintf (file, "%d", val);
2286 else if (val < 0)
2287 fprintf (file, "-0x%x", -val);
2288 else
2289 fprintf (file, "0x%x", val);
2290}
2291
2292
2293void
fd63fcf8 2294print_operand (FILE *file, rtx x, int letter)
f6b7ba2b 2295{
afb26b4b 2296 if (!x)
f6b7ba2b 2297 error ("PRINT_OPERAND null pointer");
2298
afb26b4b 2299 switch (letter)
f6b7ba2b 2300 {
afb26b4b 2301 case 'D':
2302 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2303 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
2304 else
2305 output_operand_lossage ("invalid %%D value");
2306 break;
f6b7ba2b 2307
afb26b4b 2308 case 'v':
2309 if (GET_CODE (x) == MEM)
2310 {
2311 /* For a volatile memory reference, emit a MEMW before the
2312 load or store. */
2c613040 2313 if (MEM_VOLATILE_P (x) && TARGET_SERIALIZE_VOLATILE)
afb26b4b 2314 fprintf (file, "memw\n\t");
2315 }
2316 else
2317 output_operand_lossage ("invalid %%v value");
2318 break;
f6b7ba2b 2319
afb26b4b 2320 case 'N':
2321 if (GET_CODE (x) == MEM
2322 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
2323 {
2324 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
3c047fe9 2325 output_address (GET_MODE (x), XEXP (x, 0));
afb26b4b 2326 }
2327 else
2328 output_operand_lossage ("invalid %%N value");
2329 break;
f6b7ba2b 2330
afb26b4b 2331 case 'K':
2332 if (GET_CODE (x) == CONST_INT)
f6b7ba2b 2333 {
afb26b4b 2334 int num_bits = 0;
2335 unsigned val = INTVAL (x);
2336 while (val & 1)
2337 {
2338 num_bits += 1;
2339 val = val >> 1;
2340 }
2341 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2342 fatal_insn ("invalid mask", x);
f6b7ba2b 2343
afb26b4b 2344 fprintf (file, "%d", num_bits);
2345 }
2346 else
2347 output_operand_lossage ("invalid %%K value");
2348 break;
f6b7ba2b 2349
afb26b4b 2350 case 'L':
2351 if (GET_CODE (x) == CONST_INT)
2352 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2353 else
2354 output_operand_lossage ("invalid %%L value");
2355 break;
f6b7ba2b 2356
afb26b4b 2357 case 'R':
2358 if (GET_CODE (x) == CONST_INT)
2359 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2360 else
2361 output_operand_lossage ("invalid %%R value");
2362 break;
f6b7ba2b 2363
afb26b4b 2364 case 'x':
2365 if (GET_CODE (x) == CONST_INT)
2366 printx (file, INTVAL (x));
2367 else
2368 output_operand_lossage ("invalid %%x value");
2369 break;
f6b7ba2b 2370
afb26b4b 2371 case 'd':
2372 if (GET_CODE (x) == CONST_INT)
2373 fprintf (file, "%ld", INTVAL (x));
2374 else
2375 output_operand_lossage ("invalid %%d value");
2376 break;
f6b7ba2b 2377
afb26b4b 2378 case 't':
2379 case 'b':
2380 if (GET_CODE (x) == CONST_INT)
2381 {
2382 printx (file, INTVAL (x));
2383 fputs (letter == 't' ? "@h" : "@l", file);
2384 }
2385 else if (GET_CODE (x) == CONST_DOUBLE)
2386 {
afb26b4b 2387 if (GET_MODE (x) == SFmode)
2388 {
2389 long l;
945f7b03 2390 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
afb26b4b 2391 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2392 }
2393 else
2394 output_operand_lossage ("invalid %%t/%%b value");
2395 }
2396 else if (GET_CODE (x) == CONST)
2397 {
2398 /* X must be a symbolic constant on ELF. Write an expression
2399 suitable for 'const16' that sets the high or low 16 bits. */
2400 if (GET_CODE (XEXP (x, 0)) != PLUS
2401 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2402 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2403 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2404 output_operand_lossage ("invalid %%t/%%b value");
2405 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2406 fputs (letter == 't' ? "@h" : "@l", file);
2407 /* There must be a non-alphanumeric character between 'h' or 'l'
2408 and the number. The '-' is added by print_operand() already. */
2409 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2410 fputs ("+", file);
2411 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2412 }
2413 else
de071186 2414 {
afb26b4b 2415 output_addr_const (file, x);
2416 fputs (letter == 't' ? "@h" : "@l", file);
f6b7ba2b 2417 }
2418 break;
2419
a2acdfa1 2420 case 'y':
2421 if (GET_CODE (x) == CONST_DOUBLE &&
2422 GET_MODE (x) == SFmode)
2423 {
a2acdfa1 2424 long l;
945f7b03 2425 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
a2acdfa1 2426 fprintf (file, "0x%08lx", l);
2427 break;
2428 }
2429
2430 /* fall through */
2431
f6b7ba2b 2432 default:
afb26b4b 2433 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2434 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2435 else if (GET_CODE (x) == MEM)
3c047fe9 2436 output_address (GET_MODE (x), XEXP (x, 0));
afb26b4b 2437 else if (GET_CODE (x) == CONST_INT)
2438 fprintf (file, "%ld", INTVAL (x));
2439 else
2440 output_addr_const (file, x);
f6b7ba2b 2441 }
2442}
2443
2444
2445/* A C compound statement to output to stdio stream STREAM the
2446 assembler syntax for an instruction operand that is a memory
7811991d 2447 reference whose address is ADDR. ADDR is an RTL expression. */
f6b7ba2b 2448
2449void
fd63fcf8 2450print_operand_address (FILE *file, rtx addr)
f6b7ba2b 2451{
2452 if (!addr)
2453 error ("PRINT_OPERAND_ADDRESS, null pointer");
2454
2455 switch (GET_CODE (addr))
2456 {
2457 default:
2458 fatal_insn ("invalid address", addr);
2459 break;
2460
2461 case REG:
2462 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2463 break;
2464
2465 case PLUS:
2466 {
2467 rtx reg = (rtx)0;
2468 rtx offset = (rtx)0;
2469 rtx arg0 = XEXP (addr, 0);
2470 rtx arg1 = XEXP (addr, 1);
2471
2472 if (GET_CODE (arg0) == REG)
2473 {
2474 reg = arg0;
2475 offset = arg1;
2476 }
2477 else if (GET_CODE (arg1) == REG)
2478 {
2479 reg = arg1;
2480 offset = arg0;
2481 }
2482 else
2483 fatal_insn ("no register in address", addr);
2484
2485 if (CONSTANT_P (offset))
2486 {
2487 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2488 output_addr_const (file, offset);
2489 }
2490 else
2491 fatal_insn ("address offset not a constant", addr);
2492 }
2493 break;
2494
2495 case LABEL_REF:
2496 case SYMBOL_REF:
2497 case CONST_INT:
2498 case CONST:
2499 output_addr_const (file, addr);
2500 break;
2501 }
2502}
2503
77a69f9f 2504/* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
f6b7ba2b 2505
77a69f9f 2506static bool
771b6086 2507xtensa_output_addr_const_extra (FILE *fp, rtx x)
2508{
2509 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
2510 {
2511 switch (XINT (x, 1))
2512 {
c656b8fd 2513 case UNSPEC_TPOFF:
2514 output_addr_const (fp, XVECEXP (x, 0, 0));
2515 fputs ("@TPOFF", fp);
2516 return true;
2517 case UNSPEC_DTPOFF:
2518 output_addr_const (fp, XVECEXP (x, 0, 0));
2519 fputs ("@DTPOFF", fp);
2520 return true;
771b6086 2521 case UNSPEC_PLT:
2522 if (flag_pic)
2523 {
2524 output_addr_const (fp, XVECEXP (x, 0, 0));
2525 fputs ("@PLT", fp);
2526 return true;
2527 }
2528 break;
2529 default:
2530 break;
2531 }
2532 }
2533 return false;
2534}
2535
2536
f6b7ba2b 2537void
3754d046 2538xtensa_output_literal (FILE *file, rtx x, machine_mode mode, int labelno)
f6b7ba2b 2539{
2540 long value_long[2];
f6b7ba2b 2541 int size;
c9876a47 2542 rtx first, second;
f6b7ba2b 2543
2544 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2545
2546 switch (GET_MODE_CLASS (mode))
2547 {
2548 case MODE_FLOAT:
cd3d4fe0 2549 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
f6b7ba2b 2550
f6b7ba2b 2551 switch (mode)
2552 {
2553 case SFmode:
945f7b03 2554 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x),
2555 value_long[0]);
205710bf 2556 if (HOST_BITS_PER_LONG > 32)
2557 value_long[0] &= 0xffffffff;
badfe841 2558 fprintf (file, "0x%08lx\n", value_long[0]);
f6b7ba2b 2559 break;
2560
2561 case DFmode:
945f7b03 2562 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x),
2563 value_long);
205710bf 2564 if (HOST_BITS_PER_LONG > 32)
2565 {
2566 value_long[0] &= 0xffffffff;
2567 value_long[1] &= 0xffffffff;
2568 }
badfe841 2569 fprintf (file, "0x%08lx, 0x%08lx\n",
2570 value_long[0], value_long[1]);
f6b7ba2b 2571 break;
2572
2573 default:
cd3d4fe0 2574 gcc_unreachable ();
f6b7ba2b 2575 }
2576
2577 break;
2578
2579 case MODE_INT:
2580 case MODE_PARTIAL_INT:
2581 size = GET_MODE_SIZE (mode);
cd3d4fe0 2582 switch (size)
f6b7ba2b 2583 {
cd3d4fe0 2584 case 4:
f6b7ba2b 2585 output_addr_const (file, x);
2586 fputs ("\n", file);
cd3d4fe0 2587 break;
2588
2589 case 8:
c9876a47 2590 split_double (x, &first, &second);
2591 output_addr_const (file, first);
f6b7ba2b 2592 fputs (", ", file);
c9876a47 2593 output_addr_const (file, second);
f6b7ba2b 2594 fputs ("\n", file);
cd3d4fe0 2595 break;
2596
2597 default:
2598 gcc_unreachable ();
f6b7ba2b 2599 }
f6b7ba2b 2600 break;
2601
2602 default:
cd3d4fe0 2603 gcc_unreachable ();
f6b7ba2b 2604 }
2605}
2606
b89c671b 2607static bool
2608xtensa_call_save_reg(int regno)
2609{
2610 if (TARGET_WINDOWED_ABI)
2611 return false;
2612
2613 if (regno == A0_REG)
2614 return crtl->profile || !crtl->is_leaf || crtl->calls_eh_return ||
2615 df_regs_ever_live_p (regno);
2616
2617 if (crtl->calls_eh_return && regno >= 2 && regno < 4)
2618 return true;
2619
2620 return !fixed_regs[regno] && !call_used_regs[regno] &&
2621 df_regs_ever_live_p (regno);
2622}
f6b7ba2b 2623
2624/* Return the bytes needed to compute the frame pointer from the current
c821cf9c 2625 stack pointer. */
f6b7ba2b 2626
2627#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2628#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2629
2630long
fd63fcf8 2631compute_frame_size (int size)
f6b7ba2b 2632{
b89c671b 2633 int regno;
2634
fd63fcf8 2635 /* Add space for the incoming static chain value. */
4ee9c684 2636 if (cfun->static_chain_decl != NULL)
f6b7ba2b 2637 size += (1 * UNITS_PER_WORD);
2638
b89c671b 2639 xtensa_callee_save_size = 0;
2640 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2641 {
2642 if (xtensa_call_save_reg(regno))
2643 xtensa_callee_save_size += UNITS_PER_WORD;
2644 }
2645
f6b7ba2b 2646 xtensa_current_frame_size =
2647 XTENSA_STACK_ALIGN (size
b89c671b 2648 + xtensa_callee_save_size
abe32cce 2649 + crtl->outgoing_args_size
f6b7ba2b 2650 + (WINDOW_SIZE * UNITS_PER_WORD));
b89c671b 2651 xtensa_callee_save_size = XTENSA_STACK_ALIGN (xtensa_callee_save_size);
f6b7ba2b 2652 return xtensa_current_frame_size;
2653}
2654
2655
5a1c68c3 2656bool
fd63fcf8 2657xtensa_frame_pointer_required (void)
f6b7ba2b 2658{
2659 /* The code to expand builtin_frame_addr and builtin_return_addr
2660 currently uses the hard_frame_pointer instead of frame_pointer.
2661 This seems wrong but maybe it's necessary for other architectures.
c821cf9c 2662 This function is derived from the i386 code. */
f6b7ba2b 2663
2664 if (cfun->machine->accesses_prev_frame)
5a1c68c3 2665 return true;
f6b7ba2b 2666
5a1c68c3 2667 return false;
f6b7ba2b 2668}
2669
2670
57ffde16 2671/* minimum frame = reg save area (4 words) plus static chain (1 word)
2672 and the total number of words must be a multiple of 128 bits. */
2673#define MIN_FRAME_SIZE (8 * UNITS_PER_WORD)
2674
afb26b4b 2675void
fd63fcf8 2676xtensa_expand_prologue (void)
afb26b4b 2677{
2678 HOST_WIDE_INT total_size;
b89c671b 2679 rtx_insn *insn = NULL;
bf735bc6 2680 rtx note_rtx;
2efea8c0 2681
b89c671b 2682
afb26b4b 2683 total_size = compute_frame_size (get_frame_size ());
2efea8c0 2684
38a4a274 2685 if (flag_stack_usage_info)
2686 current_function_static_stack_size = total_size;
2687
b89c671b 2688 if (TARGET_WINDOWED_ABI)
2689 {
2690 if (total_size < (1 << (12+3)))
2691 insn = emit_insn (gen_entry (GEN_INT (total_size)));
2692 else
2693 {
2694 /* Use a8 as a temporary since a0-a7 may be live. */
2695 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2696 emit_insn (gen_entry (GEN_INT (MIN_FRAME_SIZE)));
2697 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2698 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2699 insn = emit_insn (gen_movsi (stack_pointer_rtx, tmp_reg));
2700 }
2701 }
f6b7ba2b 2702 else
2703 {
b89c671b 2704 int regno;
2705 HOST_WIDE_INT offset = 0;
2706
2707 /* -128 is a limit of single addi instruction. */
2708 if (total_size > 0 && total_size <= 128)
2709 {
2710 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2711 GEN_INT (-total_size)));
2712 RTX_FRAME_RELATED_P (insn) = 1;
d1f9b275 2713 note_rtx = gen_rtx_SET (stack_pointer_rtx,
b89c671b 2714 plus_constant (Pmode, stack_pointer_rtx,
2715 -total_size));
2716 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2717 offset = total_size - UNITS_PER_WORD;
2718 }
2719 else if (xtensa_callee_save_size)
2720 {
2721 /* 1020 is maximal s32i offset, if the frame is bigger than that
2722 * we move sp to the end of callee-saved save area, save and then
2723 * move it to its final location. */
2724 if (total_size > 1024)
2725 {
2726 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2727 GEN_INT (-xtensa_callee_save_size)));
2728 RTX_FRAME_RELATED_P (insn) = 1;
d1f9b275 2729 note_rtx = gen_rtx_SET (stack_pointer_rtx,
b89c671b 2730 plus_constant (Pmode, stack_pointer_rtx,
2731 -xtensa_callee_save_size));
2732 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2733 offset = xtensa_callee_save_size - UNITS_PER_WORD;
2734 }
2735 else
2736 {
2737 rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2738 emit_move_insn (tmp_reg, GEN_INT (total_size));
2739 insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
2740 stack_pointer_rtx, tmp_reg));
2741 RTX_FRAME_RELATED_P (insn) = 1;
d1f9b275 2742 note_rtx = gen_rtx_SET (stack_pointer_rtx,
b89c671b 2743 plus_constant (Pmode, stack_pointer_rtx,
2744 -total_size));
2745 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2746 offset = total_size - UNITS_PER_WORD;
2747 }
2748 }
2749
2750 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2751 {
2752 if (xtensa_call_save_reg(regno))
2753 {
2754 rtx x = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
2755 rtx mem = gen_frame_mem (SImode, x);
2756 rtx reg = gen_rtx_REG (SImode, regno);
2757
2758 offset -= UNITS_PER_WORD;
2759 insn = emit_move_insn (mem, reg);
2760 RTX_FRAME_RELATED_P (insn) = 1;
2761 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
d1f9b275 2762 gen_rtx_SET (mem, reg));
b89c671b 2763 }
2764 }
2765 if (total_size > 1024)
2766 {
2767 rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2768 emit_move_insn (tmp_reg, GEN_INT (total_size -
2769 xtensa_callee_save_size));
2770 insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
2771 stack_pointer_rtx, tmp_reg));
2772 RTX_FRAME_RELATED_P (insn) = 1;
d1f9b275 2773 note_rtx = gen_rtx_SET (stack_pointer_rtx,
b89c671b 2774 plus_constant (Pmode, stack_pointer_rtx,
2775 xtensa_callee_save_size -
2776 total_size));
2777 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2778 }
f6b7ba2b 2779 }
2780
afb26b4b 2781 if (frame_pointer_needed)
f6b7ba2b 2782 {
e060c9df 2783 if (cfun->machine->set_frame_ptr_insn)
f6b7ba2b 2784 {
bf735bc6 2785 rtx_insn *first;
f6b7ba2b 2786
e060c9df 2787 push_topmost_sequence ();
2788 first = get_insns ();
2789 pop_topmost_sequence ();
f6b7ba2b 2790
afb26b4b 2791 /* For all instructions prior to set_frame_ptr_insn, replace
2792 hard_frame_pointer references with stack_pointer. */
2793 for (insn = first;
e060c9df 2794 insn != cfun->machine->set_frame_ptr_insn;
afb26b4b 2795 insn = NEXT_INSN (insn))
2796 {
2797 if (INSN_P (insn))
c3e2d63e 2798 {
2799 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2800 hard_frame_pointer_rtx,
2801 stack_pointer_rtx);
2802 df_insn_rescan (insn);
2803 }
afb26b4b 2804 }
2805 }
2806 else
b89c671b 2807 {
2808 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx,
2809 stack_pointer_rtx));
2810 if (!TARGET_WINDOWED_ABI)
2811 {
d1f9b275 2812 note_rtx = gen_rtx_SET (hard_frame_pointer_rtx,
b89c671b 2813 stack_pointer_rtx);
2814 RTX_FRAME_RELATED_P (insn) = 1;
2815 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2816 }
2817 }
2818 }
f6b7ba2b 2819
b89c671b 2820 if (TARGET_WINDOWED_ABI)
2821 {
2822 /* Create a note to describe the CFA. Because this is only used to set
2823 DW_AT_frame_base for debug info, don't bother tracking changes through
2824 each instruction in the prologue. It just takes up space. */
d1f9b275 2825 note_rtx = gen_rtx_SET ((frame_pointer_needed
2826 ? hard_frame_pointer_rtx
2827 : stack_pointer_rtx),
b89c671b 2828 plus_constant (Pmode, stack_pointer_rtx,
2829 -total_size));
2830 RTX_FRAME_RELATED_P (insn) = 1;
2831 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2832 }
2833}
f6b7ba2b 2834
2835void
b89c671b 2836xtensa_expand_epilogue (void)
f6b7ba2b 2837{
b89c671b 2838 if (!TARGET_WINDOWED_ABI)
2839 {
2840 int regno;
2841 HOST_WIDE_INT offset;
2842
2843 if (xtensa_current_frame_size > (frame_pointer_needed ? 127 : 1024))
2844 {
2845 rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2846 emit_move_insn (tmp_reg, GEN_INT (xtensa_current_frame_size -
2847 xtensa_callee_save_size));
2848 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_needed ?
2849 hard_frame_pointer_rtx : stack_pointer_rtx,
2850 tmp_reg));
2851 offset = xtensa_callee_save_size - UNITS_PER_WORD;
2852 }
2853 else
2854 {
2855 if (frame_pointer_needed)
2856 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
2857 offset = xtensa_current_frame_size - UNITS_PER_WORD;
2858 }
2859
2860 /* Prevent reordering of saved a0 update and loading it back from
2861 the save area. */
2862 if (crtl->calls_eh_return)
2863 emit_insn (gen_blockage ());
2864
2865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2866 {
2867 if (xtensa_call_save_reg(regno))
2868 {
2869 rtx x = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
2870
2871 offset -= UNITS_PER_WORD;
2872 emit_move_insn (gen_rtx_REG (SImode, regno),
2873 gen_frame_mem (SImode, x));
2874 }
2875 }
2876
2877 if (xtensa_current_frame_size > 0)
2878 {
2879 if (frame_pointer_needed || /* always reachable with addi */
2880 xtensa_current_frame_size > 1024 ||
2881 xtensa_current_frame_size <= 127)
2882 {
2883 if (xtensa_current_frame_size <= 127)
2884 offset = xtensa_current_frame_size;
2885 else
2886 offset = xtensa_callee_save_size;
2887
2888 emit_insn (gen_addsi3 (stack_pointer_rtx,
2889 stack_pointer_rtx,
2890 GEN_INT (offset)));
2891 }
2892 else
2893 {
2894 rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2895 emit_move_insn (tmp_reg, GEN_INT (xtensa_current_frame_size));
2896 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2897 tmp_reg));
2898 }
2899 }
2900
2901 if (crtl->calls_eh_return)
2902 emit_insn (gen_add3_insn (stack_pointer_rtx,
2903 stack_pointer_rtx,
2904 EH_RETURN_STACKADJ_RTX));
2905 }
f6b7ba2b 2906 xtensa_current_frame_size = 0;
b89c671b 2907 xtensa_callee_save_size = 0;
2908 emit_jump_insn (gen_return ());
f6b7ba2b 2909}
2910
b89c671b 2911void
2912xtensa_set_return_address (rtx address, rtx scratch)
2913{
2914 HOST_WIDE_INT total_size = compute_frame_size (get_frame_size ());
2915 rtx frame = frame_pointer_needed ?
2916 hard_frame_pointer_rtx : stack_pointer_rtx;
2917 rtx a0_addr = plus_constant (Pmode, frame,
2918 total_size - UNITS_PER_WORD);
d1f9b275 2919 rtx note = gen_rtx_SET (gen_frame_mem (SImode, a0_addr),
b89c671b 2920 gen_rtx_REG (SImode, A0_REG));
2921 rtx insn;
2922
2923 if (total_size > 1024) {
2924 emit_move_insn (scratch, GEN_INT (total_size - UNITS_PER_WORD));
2925 emit_insn (gen_addsi3 (scratch, frame, scratch));
2926 a0_addr = scratch;
2927 }
2928
2929 insn = emit_move_insn (gen_frame_mem (SImode, a0_addr), address);
2930 RTX_FRAME_RELATED_P (insn) = 1;
2931 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note);
2932}
f6b7ba2b 2933
43326cf7 2934rtx
fd63fcf8 2935xtensa_return_addr (int count, rtx frame)
43326cf7 2936{
57ffde16 2937 rtx result, retaddr, curaddr, label;
43326cf7 2938
b89c671b 2939 if (!TARGET_WINDOWED_ABI)
2940 {
2941 if (count != 0)
2942 return const0_rtx;
2943
2944 return get_hard_reg_initial_val (Pmode, A0_REG);
2945 }
2946
43326cf7 2947 if (count == -1)
afb26b4b 2948 retaddr = gen_rtx_REG (Pmode, A0_REG);
43326cf7 2949 else
2950 {
29c05e22 2951 rtx addr = plus_constant (Pmode, frame, -4 * UNITS_PER_WORD);
43326cf7 2952 addr = memory_address (Pmode, addr);
2953 retaddr = gen_reg_rtx (Pmode);
2954 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2955 }
2956
2957 /* The 2 most-significant bits of the return address on Xtensa hold
2958 the register window size. To get the real return address, these
57ffde16 2959 bits must be replaced with the high bits from some address in the
2960 code. */
2961
2962 /* Get the 2 high bits of a local label in the code. */
2963 curaddr = gen_reg_rtx (Pmode);
2964 label = gen_label_rtx ();
2965 emit_label (label);
2966 LABEL_PRESERVE_P (label) = 1;
2967 emit_move_insn (curaddr, gen_rtx_LABEL_REF (Pmode, label));
2968 emit_insn (gen_lshrsi3 (curaddr, curaddr, GEN_INT (30)));
2969 emit_insn (gen_ashlsi3 (curaddr, curaddr, GEN_INT (30)));
2970
2971 /* Clear the 2 high bits of the return address. */
43326cf7 2972 result = gen_reg_rtx (Pmode);
57ffde16 2973 emit_insn (gen_ashlsi3 (result, retaddr, GEN_INT (2)));
2974 emit_insn (gen_lshrsi3 (result, result, GEN_INT (2)));
2975
2976 /* Combine them to get the result. */
2977 emit_insn (gen_iorsi3 (result, result, curaddr));
43326cf7 2978 return result;
2979}
2980
f91ed644 2981/* Disable the use of word-sized or smaller complex modes for structures,
2982 and for function arguments in particular, where they cause problems with
2983 register a7. The xtensa_copy_incoming_a7 function assumes that there is
2984 a single reference to an argument in a7, but with small complex modes the
2985 real and imaginary components may be extracted separately, leading to two
2986 uses of the register, only one of which would be replaced. */
2987
2988static bool
3754d046 2989xtensa_member_type_forces_blk (const_tree, machine_mode mode)
f91ed644 2990{
2991 return mode == CQImode || mode == CHImode;
2992}
43326cf7 2993
f6b7ba2b 2994/* Create the va_list data type.
9276fdff 2995
2996 This structure is set up by __builtin_saveregs. The __va_reg field
2997 points to a stack-allocated region holding the contents of the
2998 incoming argument registers. The __va_ndx field is an index
2999 initialized to the position of the first unnamed (variable)
3000 argument. This same index is also used to address the arguments
3001 passed in memory. Thus, the __va_stk field is initialized to point
3002 to the position of the first argument in memory offset to account
3003 for the arguments passed in registers and to account for the size
3004 of the argument registers not being 16-byte aligned. E.G., there
3005 are 6 argument registers of 4 bytes each, but we want the __va_ndx
3006 for the first stack argument to have the maximal alignment of 16
3007 bytes, so we offset the __va_stk address by 32 bytes so that
3008 __va_stk[32] references the first argument on the stack. */
f6b7ba2b 3009
2e15d750 3010static tree
3011xtensa_build_builtin_va_list (void)
f6b7ba2b 3012{
049d6666 3013 tree f_stk, f_reg, f_ndx, record, type_decl;
f6b7ba2b 3014
049d6666 3015 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
54e46243 3016 type_decl = build_decl (BUILTINS_LOCATION,
3017 TYPE_DECL, get_identifier ("__va_list_tag"), record);
f6b7ba2b 3018
54e46243 3019 f_stk = build_decl (BUILTINS_LOCATION,
3020 FIELD_DECL, get_identifier ("__va_stk"),
f6b7ba2b 3021 ptr_type_node);
54e46243 3022 f_reg = build_decl (BUILTINS_LOCATION,
3023 FIELD_DECL, get_identifier ("__va_reg"),
f6b7ba2b 3024 ptr_type_node);
54e46243 3025 f_ndx = build_decl (BUILTINS_LOCATION,
3026 FIELD_DECL, get_identifier ("__va_ndx"),
f6b7ba2b 3027 integer_type_node);
3028
3029 DECL_FIELD_CONTEXT (f_stk) = record;
3030 DECL_FIELD_CONTEXT (f_reg) = record;
3031 DECL_FIELD_CONTEXT (f_ndx) = record;
3032
bc907808 3033 TYPE_STUB_DECL (record) = type_decl;
049d6666 3034 TYPE_NAME (record) = type_decl;
f6b7ba2b 3035 TYPE_FIELDS (record) = f_stk;
1767a056 3036 DECL_CHAIN (f_stk) = f_reg;
3037 DECL_CHAIN (f_reg) = f_ndx;
f6b7ba2b 3038
3039 layout_type (record);
3040 return record;
3041}
3042
3043
3044/* Save the incoming argument registers on the stack. Returns the
c821cf9c 3045 address of the saved registers. */
f6b7ba2b 3046
4fe4af61 3047static rtx
fd63fcf8 3048xtensa_builtin_saveregs (void)
f6b7ba2b 3049{
d8002fbc 3050 rtx gp_regs;
abe32cce 3051 int arg_words = crtl->args.info.arg_words;
f6b7ba2b 3052 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
f6b7ba2b 3053
e060c9df 3054 if (gp_left <= 0)
f6b7ba2b 3055 return const0_rtx;
3056
dafa59bd 3057 /* Allocate the general-purpose register space. */
f6b7ba2b 3058 gp_regs = assign_stack_local
3059 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
049d6666 3060 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
f6b7ba2b 3061
3062 /* Now store the incoming registers. */
b89c671b 3063 cfun->machine->need_a7_copy = TARGET_WINDOWED_ABI;
e060c9df 3064 cfun->machine->vararg_a7 = true;
d8002fbc 3065 move_block_from_reg (GP_ARG_FIRST + arg_words,
3066 adjust_address (gp_regs, BLKmode,
3067 arg_words * UNITS_PER_WORD),
3068 gp_left);
b89c671b 3069 if (cfun->machine->vararg_a7_copy != 0)
3070 emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ());
f6b7ba2b 3071
3072 return XEXP (gp_regs, 0);
3073}
3074
3075
3076/* Implement `va_start' for varargs and stdarg. We look at the
c821cf9c 3077 current function to fill in an initial va_list. */
f6b7ba2b 3078
8a58ed0a 3079static void
fd63fcf8 3080xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
f6b7ba2b 3081{
3082 tree f_stk, stk;
3083 tree f_reg, reg;
3084 tree f_ndx, ndx;
3085 tree t, u;
3086 int arg_words;
3087
abe32cce 3088 arg_words = crtl->args.info.arg_words;
f6b7ba2b 3089
3090 f_stk = TYPE_FIELDS (va_list_type_node);
1767a056 3091 f_reg = DECL_CHAIN (f_stk);
3092 f_ndx = DECL_CHAIN (f_reg);
f6b7ba2b 3093
ed03eadb 3094 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
b4b5d826 3095 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
3096 f_reg, NULL_TREE);
3097 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
3098 f_ndx, NULL_TREE);
f6b7ba2b 3099
3100 /* Call __builtin_saveregs; save the result in __va_reg */
d8002fbc 3101 u = make_tree (sizetype, expand_builtin_saveregs ());
3102 u = fold_convert (ptr_type_node, u);
75a70cf9 3103 t = build2 (MODIFY_EXPR, ptr_type_node, reg, u);
f6b7ba2b 3104 TREE_SIDE_EFFECTS (t) = 1;
3105 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3106
9276fdff 3107 /* Set the __va_stk member to ($arg_ptr - 32). */
f6b7ba2b 3108 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2cc66f2a 3109 u = fold_build_pointer_plus_hwi (u, -32);
75a70cf9 3110 t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
f6b7ba2b 3111 TREE_SIDE_EFFECTS (t) = 1;
3112 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3113
9276fdff 3114 /* Set the __va_ndx member. If the first variable argument is on
3115 the stack, adjust __va_ndx by 2 words to account for the extra
3116 alignment offset for __va_stk. */
3117 if (arg_words >= MAX_ARGS_IN_REGISTERS)
3118 arg_words += 2;
75a70cf9 3119 t = build2 (MODIFY_EXPR, integer_type_node, ndx,
1bdc4996 3120 build_int_cst (integer_type_node, arg_words * UNITS_PER_WORD));
f6b7ba2b 3121 TREE_SIDE_EFFECTS (t) = 1;
3122 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3123}
3124
3125
3126/* Implement `va_arg'. */
3127
ae79166b 3128static tree
75a70cf9 3129xtensa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
3130 gimple_seq *post_p ATTRIBUTE_UNUSED)
f6b7ba2b 3131{
3132 tree f_stk, stk;
3133 tree f_reg, reg;
3134 tree f_ndx, ndx;
ae79166b 3135 tree type_size, array, orig_ndx, addr, size, va_size, t;
3136 tree lab_false, lab_over, lab_false2;
2cd7bb84 3137 bool indirect;
3138
3139 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
3140 if (indirect)
3141 type = build_pointer_type (type);
f6b7ba2b 3142
abeadffe 3143 /* Handle complex values as separate real and imaginary parts. */
3144 if (TREE_CODE (type) == COMPLEX_TYPE)
3145 {
ae79166b 3146 tree real_part, imag_part;
abeadffe 3147
ae79166b 3148 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
3149 pre_p, NULL);
3150 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
abeadffe 3151
b4b5d826 3152 imag_part = xtensa_gimplify_va_arg_expr (unshare_expr (valist),
3153 TREE_TYPE (type),
ae79166b 3154 pre_p, NULL);
3155 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
abeadffe 3156
ed03eadb 3157 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
abeadffe 3158 }
3159
f6b7ba2b 3160 f_stk = TYPE_FIELDS (va_list_type_node);
1767a056 3161 f_reg = DECL_CHAIN (f_stk);
3162 f_ndx = DECL_CHAIN (f_reg);
f6b7ba2b 3163
b4b5d826 3164 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist,
3165 f_stk, NULL_TREE);
3166 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
3167 f_reg, NULL_TREE);
3168 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
3169 f_ndx, NULL_TREE);
f6b7ba2b 3170
ae79166b 3171 type_size = size_in_bytes (type);
3172 va_size = round_up (type_size, UNITS_PER_WORD);
3173 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
dd52a190 3174
f6b7ba2b 3175
9276fdff 3176 /* First align __va_ndx if necessary for this arg:
f6b7ba2b 3177
ae79166b 3178 orig_ndx = (AP).__va_ndx;
9276fdff 3179 if (__alignof__ (TYPE) > 4 )
ae79166b 3180 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
9276fdff 3181 & -__alignof__ (TYPE)); */
f6b7ba2b 3182
ae79166b 3183 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
3184
f6b7ba2b 3185 if (TYPE_ALIGN (type) > BITS_PER_WORD)
3186 {
81c44390 3187 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
ae79166b 3188
b4b5d826 3189 t = build2 (PLUS_EXPR, integer_type_node, unshare_expr (orig_ndx),
1bdc4996 3190 build_int_cst (integer_type_node, align - 1));
3191 t = build2 (BIT_AND_EXPR, integer_type_node, t,
3192 build_int_cst (integer_type_node, -align));
b4b5d826 3193 gimplify_assign (unshare_expr (orig_ndx), t, pre_p);
f6b7ba2b 3194 }
3195
3196
3197 /* Increment __va_ndx to point past the argument:
3198
ae79166b 3199 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
f6b7ba2b 3200
ae79166b 3201 t = fold_convert (integer_type_node, va_size);
ed03eadb 3202 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
b4b5d826 3203 gimplify_assign (unshare_expr (ndx), t, pre_p);
f6b7ba2b 3204
3205
3206 /* Check if the argument is in registers:
3207
89d4bc5e 3208 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
0336f0f0 3209 && !must_pass_in_stack (type))
fd63fcf8 3210 __array = (AP).__va_reg; */
f6b7ba2b 3211
98cfaaca 3212 array = create_tmp_var (ptr_type_node);
f6b7ba2b 3213
ae79166b 3214 lab_over = NULL;
0336f0f0 3215 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
89d4bc5e 3216 {
54e46243 3217 lab_false = create_artificial_label (UNKNOWN_LOCATION);
3218 lab_over = create_artificial_label (UNKNOWN_LOCATION);
ae79166b 3219
b4b5d826 3220 t = build2 (GT_EXPR, boolean_type_node, unshare_expr (ndx),
1bdc4996 3221 build_int_cst (integer_type_node,
3222 MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
ed03eadb 3223 t = build3 (COND_EXPR, void_type_node, t,
3224 build1 (GOTO_EXPR, void_type_node, lab_false),
3225 NULL_TREE);
ae79166b 3226 gimplify_and_add (t, pre_p);
3227
b4b5d826 3228 gimplify_assign (unshare_expr (array), reg, pre_p);
ae79166b 3229
ed03eadb 3230 t = build1 (GOTO_EXPR, void_type_node, lab_over);
ae79166b 3231 gimplify_and_add (t, pre_p);
3232
ed03eadb 3233 t = build1 (LABEL_EXPR, void_type_node, lab_false);
ae79166b 3234 gimplify_and_add (t, pre_p);
89d4bc5e 3235 }
f6b7ba2b 3236
ae79166b 3237
f6b7ba2b 3238 /* ...otherwise, the argument is on the stack (never split between
3239 registers and the stack -- change __va_ndx if necessary):
3240
3241 else
3242 {
9276fdff 3243 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
3244 (AP).__va_ndx = 32 + __va_size (TYPE);
f6b7ba2b 3245 __array = (AP).__va_stk;
fd63fcf8 3246 } */
f6b7ba2b 3247
54e46243 3248 lab_false2 = create_artificial_label (UNKNOWN_LOCATION);
f6b7ba2b 3249
b4b5d826 3250 t = build2 (GT_EXPR, boolean_type_node, unshare_expr (orig_ndx),
1bdc4996 3251 build_int_cst (integer_type_node,
3252 MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
ed03eadb 3253 t = build3 (COND_EXPR, void_type_node, t,
3254 build1 (GOTO_EXPR, void_type_node, lab_false2),
3255 NULL_TREE);
ae79166b 3256 gimplify_and_add (t, pre_p);
f6b7ba2b 3257
b4b5d826 3258 t = size_binop (PLUS_EXPR, unshare_expr (va_size), size_int (32));
ae79166b 3259 t = fold_convert (integer_type_node, t);
b4b5d826 3260 gimplify_assign (unshare_expr (ndx), t, pre_p);
f6b7ba2b 3261
ed03eadb 3262 t = build1 (LABEL_EXPR, void_type_node, lab_false2);
ae79166b 3263 gimplify_and_add (t, pre_p);
f6b7ba2b 3264
75a70cf9 3265 gimplify_assign (array, stk, pre_p);
ae79166b 3266
3267 if (lab_over)
3268 {
ed03eadb 3269 t = build1 (LABEL_EXPR, void_type_node, lab_over);
ae79166b 3270 gimplify_and_add (t, pre_p);
3271 }
dd52a190 3272
f6b7ba2b 3273
3274 /* Given the base array pointer (__array) and index to the subsequent
3275 argument (__va_ndx), find the address:
3276
dd52a190 3277 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
3278 ? sizeof (TYPE)
3279 : __va_size (TYPE))
f6b7ba2b 3280
3281 The results are endian-dependent because values smaller than one word
fd63fcf8 3282 are aligned differently. */
f6b7ba2b 3283
de071186 3284
ea2981b9 3285 if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
dd52a190 3286 {
b4b5d826 3287 t = fold_build2 (GE_EXPR, boolean_type_node, unshare_expr (type_size),
d8002fbc 3288 size_int (PARM_BOUNDARY / BITS_PER_UNIT));
b4b5d826 3289 t = fold_build3 (COND_EXPR, sizetype, t, unshare_expr (va_size),
3290 unshare_expr (type_size));
ae79166b 3291 size = t;
dd52a190 3292 }
ae79166b 3293 else
b4b5d826 3294 size = unshare_expr (va_size);
ae79166b 3295
b4b5d826 3296 t = fold_convert (sizetype, unshare_expr (ndx));
1bdc4996 3297 t = build2 (MINUS_EXPR, sizetype, t, size);
2cc66f2a 3298 addr = fold_build_pointer_plus (unshare_expr (array), t);
f6b7ba2b 3299
ae79166b 3300 addr = fold_convert (build_pointer_type (type), addr);
2cd7bb84 3301 if (indirect)
063f5fdd 3302 addr = build_va_arg_indirect_ref (addr);
3303 return build_va_arg_indirect_ref (addr);
f6b7ba2b 3304}
3305
3306
8e8c0c04 3307/* Builtins. */
3308
3309enum xtensa_builtin
3310{
3311 XTENSA_BUILTIN_UMULSIDI3,
3312 XTENSA_BUILTIN_max
3313};
3314
3315
3316static void
3317xtensa_init_builtins (void)
3318{
c656b8fd 3319 tree ftype, decl;
8e8c0c04 3320
3321 ftype = build_function_type_list (unsigned_intDI_type_node,
3322 unsigned_intSI_type_node,
3323 unsigned_intSI_type_node, NULL_TREE);
3324
c656b8fd 3325 decl = add_builtin_function ("__builtin_umulsidi3", ftype,
3326 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
3327 "__umulsidi3", NULL_TREE);
3328 TREE_NOTHROW (decl) = 1;
3329 TREE_READONLY (decl) = 1;
8e8c0c04 3330}
3331
3332
3333static tree
97d67146 3334xtensa_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
3335 bool ignore ATTRIBUTE_UNUSED)
8e8c0c04 3336{
3337 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3338 tree arg0, arg1;
3339
c656b8fd 3340 switch (fcode)
8e8c0c04 3341 {
c656b8fd 3342 case XTENSA_BUILTIN_UMULSIDI3:
97d67146 3343 arg0 = args[0];
3344 arg1 = args[1];
8e8c0c04 3345 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3346 || TARGET_MUL32_HIGH)
3347 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
3348 fold_convert (unsigned_intDI_type_node, arg0),
3349 fold_convert (unsigned_intDI_type_node, arg1));
c656b8fd 3350 break;
3351
c656b8fd 3352 default:
3353 internal_error ("bad builtin code");
3354 break;
8e8c0c04 3355 }
3356
8e8c0c04 3357 return NULL;
3358}
3359
3360
3361static rtx
3362xtensa_expand_builtin (tree exp, rtx target,
3363 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 3364 machine_mode mode ATTRIBUTE_UNUSED,
8e8c0c04 3365 int ignore)
3366{
d4c45216 3367 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8e8c0c04 3368 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
c656b8fd 3369
3370 switch (fcode)
3371 {
3372 case XTENSA_BUILTIN_UMULSIDI3:
3373 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
3374 __umulsidi3 function when the Xtensa configuration can directly
3375 implement it. If not, just call the function. */
3376 return expand_call (exp, target, ignore);
8e8c0c04 3377
c656b8fd 3378 default:
3379 internal_error ("bad builtin code");
3380 }
8e8c0c04 3381 return NULL_RTX;
3382}
3383
d7198e1f 3384/* Worker function for TARGET_PREFERRED_RELOAD_CLASS. */
8e8c0c04 3385
d7198e1f 3386static reg_class_t
3387xtensa_preferred_reload_class (rtx x, reg_class_t rclass)
fc12fa10 3388{
d7198e1f 3389 if (CONSTANT_P (x) && CONST_DOUBLE_P (x))
fc12fa10 3390 return NO_REGS;
3391
a8332086 3392 /* Don't use the stack pointer or hard frame pointer for reloads!
3393 The hard frame pointer would normally be OK except that it may
3394 briefly hold an incoming argument in the prologue, and reload
3395 won't know that it is live because the hard frame pointer is
3396 treated specially. */
3397
8deb3959 3398 if (rclass == AR_REGS || rclass == GR_REGS)
a8332086 3399 return RL_REGS;
fc12fa10 3400
8deb3959 3401 return rclass;
fc12fa10 3402}
3403
d7198e1f 3404/* Worker function for TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
3405
3406static reg_class_t
3407xtensa_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
3408 reg_class_t rclass)
3409{
3410 /* Don't use the stack pointer or hard frame pointer for reloads!
3411 The hard frame pointer would normally be OK except that it may
3412 briefly hold an incoming argument in the prologue, and reload
3413 won't know that it is live because the hard frame pointer is
3414 treated specially. */
3415
3416 if (rclass == AR_REGS || rclass == GR_REGS)
3417 return RL_REGS;
3418
3419 return rclass;
3420}
3421
3422/* Worker function for TARGET_SECONDARY_RELOAD. */
fc12fa10 3423
d7198e1f 3424static reg_class_t
964229b7 3425xtensa_secondary_reload (bool in_p, rtx x, reg_class_t rclass,
3754d046 3426 machine_mode mode, secondary_reload_info *sri)
f6b7ba2b 3427{
3428 int regno;
3429
e0488d87 3430 if (in_p && constantpool_mem_p (x))
f6b7ba2b 3431 {
e0488d87 3432 if (rclass == FP_REGS)
a8332086 3433 return RL_REGS;
e0488d87 3434
3435 if (mode == QImode)
3436 sri->icode = CODE_FOR_reloadqi_literal;
3437 else if (mode == HImode)
3438 sri->icode = CODE_FOR_reloadhi_literal;
f6b7ba2b 3439 }
3440
e0488d87 3441 regno = xt_true_regnum (x);
f6b7ba2b 3442 if (ACC_REG_P (regno))
8deb3959 3443 return ((rclass == GR_REGS || rclass == RL_REGS) ? NO_REGS : RL_REGS);
3444 if (rclass == ACC_REG)
a8332086 3445 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
f6b7ba2b 3446
3447 return NO_REGS;
3448}
3449
3450
3451void
fd63fcf8 3452order_regs_for_local_alloc (void)
f6b7ba2b 3453{
3454 if (!leaf_function_p ())
3455 {
b89c671b 3456 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
3457 REG_ALLOC_ORDER;
3458 static const int reg_nonleaf_alloc_order_call0[FIRST_PSEUDO_REGISTER] =
3459 {
3460 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 12, 13, 14, 15,
3461 18,
3462 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
3463 0, 1, 16, 17,
3464 35,
3465 };
3466
3467 memcpy (reg_alloc_order, TARGET_WINDOWED_ABI ?
3468 reg_nonleaf_alloc_order : reg_nonleaf_alloc_order_call0,
f6b7ba2b 3469 FIRST_PSEUDO_REGISTER * sizeof (int));
3470 }
3471 else
3472 {
3473 int i, num_arg_regs;
3474 int nxt = 0;
3475
dafa59bd 3476 /* Use the AR registers in increasing order (skipping a0 and a1)
3477 but save the incoming argument registers for a last resort. */
abe32cce 3478 num_arg_regs = crtl->args.info.arg_words;
f6b7ba2b 3479 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
3480 num_arg_regs = MAX_ARGS_IN_REGISTERS;
3481 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
3482 reg_alloc_order[nxt++] = i + num_arg_regs;
3483 for (i = 0; i < num_arg_regs; i++)
3484 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
3485
dafa59bd 3486 /* List the coprocessor registers in order. */
bef09eef 3487 for (i = 0; i < BR_REG_NUM; i++)
3488 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
3489
dafa59bd 3490 /* List the FP registers in order for now. */
f6b7ba2b 3491 for (i = 0; i < 16; i++)
3492 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
3493
c821cf9c 3494 /* GCC requires that we list *all* the registers.... */
f6b7ba2b 3495 reg_alloc_order[nxt++] = 0; /* a0 = return address */
3496 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
3497 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
3498 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
3499
f6b7ba2b 3500 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
3501 }
3502}
3503
3504
5f4442bc 3505/* Some Xtensa targets support multiple bss sections. If the section
3506 name ends with ".bss", add SECTION_BSS to the flags. */
3507
3508static unsigned int
fd63fcf8 3509xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
5f4442bc 3510{
3511 unsigned int flags = default_section_type_flags (decl, name, reloc);
3512 const char *suffix;
3513
3514 suffix = strrchr (name, '.');
3515 if (suffix && strcmp (suffix, ".bss") == 0)
3516 {
3517 if (!decl || (TREE_CODE (decl) == VAR_DECL
3518 && DECL_INITIAL (decl) == NULL_TREE))
3519 flags |= SECTION_BSS; /* @nobits */
3520 else
c3ceba8e 3521 warning (0, "only uninitialized variables can be placed in a "
5f4442bc 3522 ".bss section");
3523 }
3524
3525 return flags;
3526}
3527
3528
bbfbe351 3529/* The literal pool stays with the function. */
3530
2f14b1f9 3531static section *
3754d046 3532xtensa_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
fd63fcf8 3533 rtx x ATTRIBUTE_UNUSED,
3534 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
bbfbe351 3535{
2f14b1f9 3536 return function_section (current_function_decl);
bbfbe351 3537}
7811991d 3538
156d021f 3539/* Worker function for TARGET_REGISTER_MOVE_COST. */
3540
3541static int
3754d046 3542xtensa_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
156d021f 3543 reg_class_t from, reg_class_t to)
3544{
3545 if (from == to && from != BR_REGS && to != BR_REGS)
3546 return 2;
3547 else if (reg_class_subset_p (from, AR_REGS)
3548 && reg_class_subset_p (to, AR_REGS))
3549 return 2;
3550 else if (reg_class_subset_p (from, AR_REGS) && to == ACC_REG)
3551 return 3;
3552 else if (from == ACC_REG && reg_class_subset_p (to, AR_REGS))
3553 return 3;
3554 else
3555 return 10;
3556}
3557
3558/* Worker function for TARGET_MEMORY_MOVE_COST. */
3559
3560static int
3754d046 3561xtensa_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
156d021f 3562 reg_class_t rclass ATTRIBUTE_UNUSED,
3563 bool in ATTRIBUTE_UNUSED)
3564{
3565 return 4;
3566}
fd63fcf8 3567
fab7adbf 3568/* Compute a (partial) cost for rtx X. Return true if the complete
3569 cost has been computed, and false if subexpressions should be
3570 scanned. In either case, *TOTAL contains the cost result. */
3571
3572static bool
5ae4887d 3573xtensa_rtx_costs (rtx x, machine_mode mode, int outer_code,
3574 int opno ATTRIBUTE_UNUSED,
20d892d1 3575 int *total, bool speed ATTRIBUTE_UNUSED)
fab7adbf 3576{
5ae4887d 3577 int code = GET_CODE (x);
3578
fab7adbf 3579 switch (code)
3580 {
3581 case CONST_INT:
3582 switch (outer_code)
3583 {
3584 case SET:
3585 if (xtensa_simm12b (INTVAL (x)))
3586 {
3587 *total = 4;
3588 return true;
3589 }
3590 break;
3591 case PLUS:
3592 if (xtensa_simm8 (INTVAL (x))
3593 || xtensa_simm8x256 (INTVAL (x)))
3594 {
3595 *total = 0;
3596 return true;
3597 }
3598 break;
3599 case AND:
3600 if (xtensa_mask_immediate (INTVAL (x)))
3601 {
3602 *total = 0;
3603 return true;
3604 }
3605 break;
3606 case COMPARE:
3607 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
3608 {
3609 *total = 0;
3610 return true;
3611 }
3612 break;
3613 case ASHIFT:
3614 case ASHIFTRT:
3615 case LSHIFTRT:
3616 case ROTATE:
3617 case ROTATERT:
dafa59bd 3618 /* No way to tell if X is the 2nd operand so be conservative. */
fab7adbf 3619 default: break;
3620 }
3621 if (xtensa_simm12b (INTVAL (x)))
3622 *total = 5;
afb26b4b 3623 else if (TARGET_CONST16)
3624 *total = COSTS_N_INSNS (2);
fab7adbf 3625 else
3626 *total = 6;
3627 return true;
3628
3629 case CONST:
3630 case LABEL_REF:
3631 case SYMBOL_REF:
afb26b4b 3632 if (TARGET_CONST16)
3633 *total = COSTS_N_INSNS (2);
3634 else
3635 *total = 5;
fab7adbf 3636 return true;
3637
3638 case CONST_DOUBLE:
afb26b4b 3639 if (TARGET_CONST16)
3640 *total = COSTS_N_INSNS (4);
3641 else
3642 *total = 7;
fab7adbf 3643 return true;
3644
3645 case MEM:
3646 {
3647 int num_words =
5ae4887d 3648 (GET_MODE_SIZE (mode) > UNITS_PER_WORD) ? 2 : 1;
fab7adbf 3649
5ae4887d 3650 if (memory_address_p (mode, XEXP ((x), 0)))
fab7adbf 3651 *total = COSTS_N_INSNS (num_words);
3652 else
3653 *total = COSTS_N_INSNS (2*num_words);
3654 return true;
3655 }
3656
3657 case FFS:
8e8c0c04 3658 case CTZ:
fab7adbf 3659 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
3660 return true;
3661
8e8c0c04 3662 case CLZ:
3663 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
3664 return true;
3665
fab7adbf 3666 case NOT:
5ae4887d 3667 *total = COSTS_N_INSNS (mode == DImode ? 3 : 2);
fab7adbf 3668 return true;
3669
3670 case AND:
3671 case IOR:
3672 case XOR:
5ae4887d 3673 if (mode == DImode)
fab7adbf 3674 *total = COSTS_N_INSNS (2);
3675 else
3676 *total = COSTS_N_INSNS (1);
3677 return true;
3678
3679 case ASHIFT:
3680 case ASHIFTRT:
3681 case LSHIFTRT:
5ae4887d 3682 if (mode == DImode)
fab7adbf 3683 *total = COSTS_N_INSNS (50);
3684 else
3685 *total = COSTS_N_INSNS (1);
3686 return true;
3687
3688 case ABS:
3689 {
5ae4887d 3690 if (mode == SFmode)
fab7adbf 3691 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
5ae4887d 3692 else if (mode == DFmode)
fab7adbf 3693 *total = COSTS_N_INSNS (50);
3694 else
3695 *total = COSTS_N_INSNS (4);
3696 return true;
3697 }
3698
3699 case PLUS:
3700 case MINUS:
3701 {
5ae4887d 3702 if (mode == SFmode)
fab7adbf 3703 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
5ae4887d 3704 else if (mode == DFmode || mode == DImode)
fab7adbf 3705 *total = COSTS_N_INSNS (50);
3706 else
3707 *total = COSTS_N_INSNS (1);
3708 return true;
3709 }
3710
3711 case NEG:
5ae4887d 3712 *total = COSTS_N_INSNS (mode == DImode ? 4 : 2);
fab7adbf 3713 return true;
3714
3715 case MULT:
3716 {
5ae4887d 3717 if (mode == SFmode)
fab7adbf 3718 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
5ae4887d 3719 else if (mode == DFmode)
fab7adbf 3720 *total = COSTS_N_INSNS (50);
5ae4887d 3721 else if (mode == DImode)
8e8c0c04 3722 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
fab7adbf 3723 else if (TARGET_MUL32)
3724 *total = COSTS_N_INSNS (4);
3725 else if (TARGET_MAC16)
3726 *total = COSTS_N_INSNS (16);
3727 else if (TARGET_MUL16)
3728 *total = COSTS_N_INSNS (12);
3729 else
3730 *total = COSTS_N_INSNS (50);
3731 return true;
3732 }
3733
3734 case DIV:
3735 case MOD:
3736 {
5ae4887d 3737 if (mode == SFmode)
fab7adbf 3738 {
3739 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3740 return true;
3741 }
5ae4887d 3742 else if (mode == DFmode)
fab7adbf 3743 {
3744 *total = COSTS_N_INSNS (50);
3745 return true;
3746 }
3747 }
dafa59bd 3748 /* Fall through. */
fab7adbf 3749
3750 case UDIV:
3751 case UMOD:
3752 {
5ae4887d 3753 if (mode == DImode)
fab7adbf 3754 *total = COSTS_N_INSNS (50);
3755 else if (TARGET_DIV32)
3756 *total = COSTS_N_INSNS (32);
3757 else
3758 *total = COSTS_N_INSNS (50);
3759 return true;
3760 }
3761
3762 case SQRT:
5ae4887d 3763 if (mode == SFmode)
fab7adbf 3764 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3765 else
3766 *total = COSTS_N_INSNS (50);
3767 return true;
3768
3769 case SMIN:
3770 case UMIN:
3771 case SMAX:
3772 case UMAX:
3773 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3774 return true;
3775
3776 case SIGN_EXTRACT:
3777 case SIGN_EXTEND:
3778 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3779 return true;
3780
3781 case ZERO_EXTRACT:
3782 case ZERO_EXTEND:
3783 *total = COSTS_N_INSNS (1);
3784 return true;
3785
3786 default:
3787 return false;
3788 }
3789}
3790
6644435d 3791/* Worker function for TARGET_RETURN_IN_MEMORY. */
3792
4fe4af61 3793static bool
fb80456a 3794xtensa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4fe4af61 3795{
3796 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3797 > 4 * UNITS_PER_WORD);
3798}
3799
b542c964 3800/* Worker function for TARGET_FUNCTION_VALUE. */
3801
3802rtx
3803xtensa_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
3804 bool outgoing)
3805{
3806 return gen_rtx_REG ((INTEGRAL_TYPE_P (valtype)
3807 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
3808 ? SImode : TYPE_MODE (valtype),
3809 outgoing ? GP_OUTGOING_RETURN : GP_RETURN);
3810}
57ffde16 3811
7af7466c 3812/* Worker function for TARGET_LIBCALL_VALUE. */
3813
3814static rtx
3754d046 3815xtensa_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
7af7466c 3816{
3817 return gen_rtx_REG ((GET_MODE_CLASS (mode) == MODE_INT
3818 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3819 ? SImode : mode, GP_RETURN);
3820}
3821
3822/* Worker function TARGET_FUNCTION_VALUE_REGNO_P. */
3823
3824static bool
3825xtensa_function_value_regno_p (const unsigned int regno)
3826{
3827 return (regno == GP_RETURN);
3828}
3829
974b8df6 3830/* The static chain is passed in memory. Provide rtx giving 'mem'
3831 expressions that denote where they are stored. */
3832
3833static rtx
8d54d6a0 3834xtensa_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
974b8df6 3835{
b89c671b 3836 if (TARGET_WINDOWED_ABI)
3837 {
3838 rtx base = incoming_p ? arg_pointer_rtx : stack_pointer_rtx;
3839 return gen_frame_mem (Pmode, plus_constant (Pmode, base,
3840 -5 * UNITS_PER_WORD));
3841 }
3842 else
3843 return gen_rtx_REG (Pmode, A8_REG);
974b8df6 3844}
3845
3846
57ffde16 3847/* TRAMPOLINE_TEMPLATE: For Xtensa, the trampoline must perform an ENTRY
3848 instruction with a minimal stack frame in order to get some free
3849 registers. Once the actual call target is known, the proper stack frame
3850 size is extracted from the ENTRY instruction at the target and the
3851 current frame is adjusted to match. The trampoline then transfers
3852 control to the instruction following the ENTRY at the target. Note:
3853 this assumes that the target begins with an ENTRY instruction. */
3854
269e94f8 3855static void
3856xtensa_asm_trampoline_template (FILE *stream)
57ffde16 3857{
3858 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3859
3860 fprintf (stream, "\t.begin no-transform\n");
57ffde16 3861
b89c671b 3862 if (TARGET_WINDOWED_ABI)
57ffde16 3863 {
b89c671b 3864 fprintf (stream, "\tentry\tsp, %d\n", MIN_FRAME_SIZE);
57ffde16 3865
b89c671b 3866 if (use_call0)
3867 {
3868 /* Save the return address. */
3869 fprintf (stream, "\tmov\ta10, a0\n");
57ffde16 3870
b89c671b 3871 /* Use a CALL0 instruction to skip past the constants and in the
3872 process get the PC into A0. This allows PC-relative access to
3873 the constants without relying on L32R. */
3874 fprintf (stream, "\tcall0\t.Lskipconsts\n");
3875 }
3876 else
3877 fprintf (stream, "\tj\t.Lskipconsts\n");
57ffde16 3878
b89c671b 3879 fprintf (stream, "\t.align\t4\n");
3880 fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
3881 fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
3882 fprintf (stream, ".Lskipconsts:\n");
3883
3884 /* Load the static chain and function address from the trampoline. */
3885 if (use_call0)
3886 {
3887 fprintf (stream, "\taddi\ta0, a0, 3\n");
3888 fprintf (stream, "\tl32i\ta9, a0, 0\n");
3889 fprintf (stream, "\tl32i\ta8, a0, 4\n");
3890 }
3891 else
3892 {
3893 fprintf (stream, "\tl32r\ta9, .Lchainval\n");
3894 fprintf (stream, "\tl32r\ta8, .Lfnaddr\n");
3895 }
3896
3897 /* Store the static chain. */
3898 fprintf (stream, "\ts32i\ta9, sp, %d\n", MIN_FRAME_SIZE - 20);
3899
3900 /* Set the proper stack pointer value. */
3901 fprintf (stream, "\tl32i\ta9, a8, 0\n");
3902 fprintf (stream, "\textui\ta9, a9, %d, 12\n",
3903 TARGET_BIG_ENDIAN ? 8 : 12);
3904 fprintf (stream, "\tslli\ta9, a9, 3\n");
3905 fprintf (stream, "\taddi\ta9, a9, %d\n", -MIN_FRAME_SIZE);
3906 fprintf (stream, "\tsub\ta9, sp, a9\n");
3907 fprintf (stream, "\tmovsp\tsp, a9\n");
3908
3909 if (use_call0)
3910 /* Restore the return address. */
3911 fprintf (stream, "\tmov\ta0, a10\n");
3912
3913 /* Jump to the instruction following the ENTRY. */
3914 fprintf (stream, "\taddi\ta8, a8, 3\n");
3915 fprintf (stream, "\tjx\ta8\n");
3916
3917 /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
3918 if (use_call0)
3919 fprintf (stream, "\t.byte\t0\n");
3920 else
3921 fprintf (stream, "\tnop\n");
57ffde16 3922 }
3923 else
3924 {
b89c671b 3925 if (use_call0)
3926 {
3927 /* Save the return address. */
3928 fprintf (stream, "\tmov\ta10, a0\n");
57ffde16 3929
b89c671b 3930 /* Use a CALL0 instruction to skip past the constants and in the
3931 process get the PC into A0. This allows PC-relative access to
3932 the constants without relying on L32R. */
3933 fprintf (stream, "\tcall0\t.Lskipconsts\n");
3934 }
3935 else
3936 fprintf (stream, "\tj\t.Lskipconsts\n");
57ffde16 3937
b89c671b 3938 fprintf (stream, "\t.align\t4\n");
3939 fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
3940 fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
3941 fprintf (stream, ".Lskipconsts:\n");
57ffde16 3942
b89c671b 3943 /* Load the static chain and function address from the trampoline. */
3944 if (use_call0)
3945 {
3946 fprintf (stream, "\taddi\ta0, a0, 3\n");
3947 fprintf (stream, "\tl32i\ta8, a0, 0\n");
3948 fprintf (stream, "\tl32i\ta9, a0, 4\n");
3949 fprintf (stream, "\tmov\ta0, a10\n");
3950 }
3951 else
3952 {
3953 fprintf (stream, "\tl32r\ta8, .Lchainval\n");
3954 fprintf (stream, "\tl32r\ta9, .Lfnaddr\n");
3955 }
3956 fprintf (stream, "\tjx\ta9\n");
57ffde16 3957
b89c671b 3958 /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
3959 if (use_call0)
3960 fprintf (stream, "\t.byte\t0\n");
3961 else
3962 fprintf (stream, "\tnop\n");
3963 }
57ffde16 3964 fprintf (stream, "\t.end no-transform\n");
3965}
3966
269e94f8 3967static void
3968xtensa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain)
57ffde16 3969{
269e94f8 3970 rtx func = XEXP (DECL_RTL (fndecl), 0);
57ffde16 3971 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
b89c671b 3972 int chain_off;
3973 int func_off;
3974
3975 if (TARGET_WINDOWED_ABI)
3976 {
3977 chain_off = use_call0 ? 12 : 8;
3978 func_off = use_call0 ? 16 : 12;
3979 }
3980 else
3981 {
3982 chain_off = use_call0 ? 8 : 4;
3983 func_off = use_call0 ? 12 : 8;
3984 }
269e94f8 3985
3986 emit_block_move (m_tramp, assemble_trampoline_template (),
3987 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3988
3989 emit_move_insn (adjust_address (m_tramp, SImode, chain_off), chain);
3990 emit_move_insn (adjust_address (m_tramp, SImode, func_off), func);
57ffde16 3991 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_sync_caches"),
51538c49 3992 LCT_NORMAL, VOIDmode, 1, XEXP (m_tramp, 0), Pmode);
57ffde16 3993}
3994
ca316360 3995/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3996
3997static bool
3754d046 3998xtensa_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
ca316360 3999{
4000 return !xtensa_tls_referenced_p (x);
4001}
57ffde16 4002
47edca9a 4003/* Implement TARGET_CAN_USE_DOLOOP_P. */
4004
4005static bool
4006xtensa_can_use_doloop_p (const widest_int &, const widest_int &,
4007 unsigned int loop_depth, bool entered_at_top)
4008{
4009 /* Considering limitations in the hardware, only use doloop
4010 for innermost loops which must be entered from the top. */
4011 if (loop_depth > 1 || !entered_at_top)
4012 return false;
4013
4014 return true;
4015}
4016
4017/* NULL if INSN insn is valid within a low-overhead loop.
4018 Otherwise return why doloop cannot be applied. */
4019
4020static const char *
4021xtensa_invalid_within_doloop (const rtx_insn *insn)
4022{
4023 if (CALL_P (insn))
4024 return "Function call in the loop.";
4025
4026 if (JUMP_P (insn) && INSN_CODE (insn) == CODE_FOR_return)
4027 return "Return from a call instruction in the loop.";
4028
4029 return NULL;
4030}
4031
4032/* Optimize LOOP. */
4033
833dfc8b 4034#if TARGET_LOOPS
4035
47edca9a 4036static bool
4037hwloop_optimize (hwloop_info loop)
4038{
4039 int i;
4040 edge entry_edge;
4041 basic_block entry_bb;
4042 rtx iter_reg;
4043 rtx_insn *insn, *seq, *entry_after;
4044
4045 if (loop->depth > 1)
4046 {
4047 if (dump_file)
4048 fprintf (dump_file, ";; loop %d is not innermost\n",
4049 loop->loop_no);
4050 return false;
4051 }
4052
4053 if (!loop->incoming_dest)
4054 {
4055 if (dump_file)
4056 fprintf (dump_file, ";; loop %d has more than one entry\n",
4057 loop->loop_no);
4058 return false;
4059 }
4060
4061 if (loop->incoming_dest != loop->head)
4062 {
4063 if (dump_file)
4064 fprintf (dump_file, ";; loop %d is not entered from head\n",
4065 loop->loop_no);
4066 return false;
4067 }
4068
4069 if (loop->has_call || loop->has_asm)
4070 {
4071 if (dump_file)
4072 fprintf (dump_file, ";; loop %d has invalid insn\n",
4073 loop->loop_no);
4074 return false;
4075 }
4076
4077 /* Scan all the blocks to make sure they don't use iter_reg. */
4078 if (loop->iter_reg_used || loop->iter_reg_used_outside)
4079 {
4080 if (dump_file)
4081 fprintf (dump_file, ";; loop %d uses iterator\n",
4082 loop->loop_no);
4083 return false;
4084 }
4085
4086 /* Check if start_label appears before doloop_end. */
4087 insn = loop->start_label;
4088 while (insn && insn != loop->loop_end)
4089 insn = NEXT_INSN (insn);
4090
4091 if (!insn)
4092 {
4093 if (dump_file)
4094 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
4095 loop->loop_no);
4096 return false;
4097 }
4098
4099 /* Get the loop iteration register. */
4100 iter_reg = loop->iter_reg;
4101
4102 gcc_assert (REG_P (iter_reg));
4103
4104 entry_edge = NULL;
4105
4106 FOR_EACH_VEC_SAFE_ELT (loop->incoming, i, entry_edge)
4107 if (entry_edge->flags & EDGE_FALLTHRU)
4108 break;
4109
4110 if (entry_edge == NULL)
4111 return false;
4112
4113 /* Place the zero_cost_loop_start instruction before the loop. */
4114 entry_bb = entry_edge->src;
4115
4116 start_sequence ();
4117
4118 insn = emit_insn (gen_zero_cost_loop_start (loop->iter_reg,
4119 loop->start_label,
4120 loop->iter_reg));
4121
4122 seq = get_insns ();
4123
4124 if (!single_succ_p (entry_bb) || vec_safe_length (loop->incoming) > 1)
4125 {
4126 basic_block new_bb;
4127 edge e;
4128 edge_iterator ei;
4129
4130 emit_insn_before (seq, BB_HEAD (loop->head));
4131 seq = emit_label_before (gen_label_rtx (), seq);
4132 new_bb = create_basic_block (seq, insn, entry_bb);
4133 FOR_EACH_EDGE (e, ei, loop->incoming)
4134 {
4135 if (!(e->flags & EDGE_FALLTHRU))
4136 redirect_edge_and_branch_force (e, new_bb);
4137 else
4138 redirect_edge_succ (e, new_bb);
4139 }
4140
4141 make_edge (new_bb, loop->head, 0);
4142 }
4143 else
4144 {
4145 entry_after = BB_END (entry_bb);
4146 while (DEBUG_INSN_P (entry_after)
4147 || (NOTE_P (entry_after)
4148 && NOTE_KIND (entry_after) != NOTE_INSN_BASIC_BLOCK))
4149 entry_after = PREV_INSN (entry_after);
4150
4151 emit_insn_after (seq, entry_after);
4152 }
4153
4154 end_sequence ();
4155
4156 return true;
4157}
4158
4159/* A callback for the hw-doloop pass. Called when a loop we have discovered
4160 turns out not to be optimizable; we have to split the loop_end pattern into
4161 a subtract and a test. */
4162
4163static void
4164hwloop_fail (hwloop_info loop)
4165{
4166 rtx test;
4167 rtx_insn *insn = loop->loop_end;
4168
4169 emit_insn_before (gen_addsi3 (loop->iter_reg,
4170 loop->iter_reg,
4171 constm1_rtx),
4172 loop->loop_end);
4173
4174 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
4175 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
4176 loop->iter_reg, const0_rtx,
4177 loop->start_label),
4178 loop->loop_end);
4179
4180 JUMP_LABEL (insn) = loop->start_label;
4181 LABEL_NUSES (loop->start_label)++;
4182 delete_insn (loop->loop_end);
4183}
4184
4185/* A callback for the hw-doloop pass. This function examines INSN; if
4186 it is a doloop_end pattern we recognize, return the reg rtx for the
4187 loop counter. Otherwise, return NULL_RTX. */
4188
4189static rtx
4190hwloop_pattern_reg (rtx_insn *insn)
4191{
4192 rtx reg;
4193
4194 if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
4195 return NULL_RTX;
4196
4197 reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
4198 if (!REG_P (reg))
4199 return NULL_RTX;
4200
4201 return reg;
4202}
4203
4204
4205static struct hw_doloop_hooks xtensa_doloop_hooks =
4206{
4207 hwloop_pattern_reg,
4208 hwloop_optimize,
4209 hwloop_fail
4210};
4211
4212/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
4213 and tries to rewrite the RTL of these loops so that proper Xtensa
4214 hardware loops are generated. */
4215
4216static void
4217xtensa_reorg_loops (void)
4218{
4219 reorg_loops (false, &xtensa_doloop_hooks);
4220}
833dfc8b 4221#else
4222static inline void
4223xtensa_reorg_loops (void)
4224{
4225}
4226#endif
47edca9a 4227
4228/* Implement the TARGET_MACHINE_DEPENDENT_REORG pass. */
4229
4230static void
4231xtensa_reorg (void)
4232{
4233 /* We are freeing block_for_insn in the toplev to keep compatibility
4234 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4235 compute_bb_for_insn ();
4236
4237 df_analyze ();
4238
4239 /* Doloop optimization. */
4240 xtensa_reorg_loops ();
4241}
4242
b89c671b 4243/* Update register usage after having seen the compiler flags. */
4244
4245static void
4246xtensa_conditional_register_usage (void)
4247{
4248 unsigned i, c_mask;
4249
4250 c_mask = TARGET_WINDOWED_ABI ? (1 << 1) : (1 << 2);
4251
4252 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4253 {
4254 /* Set/reset conditionally defined registers from
4255 CALL_USED_REGISTERS initializer. */
4256 if (call_used_regs[i] > 1)
4257 call_used_regs[i] = !!(call_used_regs[i] & c_mask);
4258 }
4259
4260 /* Remove hard FP register from the preferred reload registers set. */
4261 CLEAR_HARD_REG_BIT (reg_class_contents[(int)RL_REGS],
4262 HARD_FRAME_POINTER_REGNUM);
4263}
4264
4265/* Map hard register number to register class */
4266
4267enum reg_class xtensa_regno_to_class (int regno)
4268{
4269 static const enum reg_class regno_to_class[FIRST_PSEUDO_REGISTER] =
4270 {
4271 RL_REGS, SP_REG, RL_REGS, RL_REGS,
4272 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4273 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4274 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4275 AR_REGS, AR_REGS, BR_REGS,
4276 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4277 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4278 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4279 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4280 ACC_REG,
4281 };
4282
4283 if (regno == HARD_FRAME_POINTER_REGNUM)
4284 return GR_REGS;
4285 else
4286 return regno_to_class[regno];
4287}
4288
1f3233d1 4289#include "gt-xtensa.h"