]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
* tree-dump.c (dequeue_and_dump): Use CONSTRUCTOR_ELTS,
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
f6b7ba2b 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
389269e4 2 Copyright 2001,2002,2003 Free Software Foundation, Inc.
f6b7ba2b 3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
f6b7ba2b 26#include "rtl.h"
27#include "regs.h"
f6b7ba2b 28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "insn-attr.h"
35#include "insn-codes.h"
36#include "recog.h"
37#include "output.h"
38#include "tree.h"
39#include "expr.h"
40#include "flags.h"
41#include "reload.h"
42#include "tm_p.h"
43#include "function.h"
44#include "toplev.h"
45#include "optabs.h"
bbfbe351 46#include "output.h"
f6b7ba2b 47#include "libfuncs.h"
160b2123 48#include "ggc.h"
f6b7ba2b 49#include "target.h"
50#include "target-def.h"
049d6666 51#include "langhooks.h"
f6b7ba2b 52
53/* Enumeration for all of the relational tests, so that we can build
54 arrays indexed by the test type, and not worry about the order
55 of EQ, NE, etc. */
56
57enum internal_test {
58 ITEST_EQ,
59 ITEST_NE,
60 ITEST_GT,
61 ITEST_GE,
62 ITEST_LT,
63 ITEST_LE,
64 ITEST_GTU,
65 ITEST_GEU,
66 ITEST_LTU,
67 ITEST_LEU,
68 ITEST_MAX
69 };
70
71/* Cached operands, and operator to compare for use in set/branch on
72 condition codes. */
73rtx branch_cmp[2];
74
75/* what type of branch to use */
76enum cmp_type branch_type;
77
78/* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
81
82/* Current frame size calculated by compute_frame_size. */
83unsigned xtensa_current_frame_size;
84
85/* Tables of ld/st opcode names for block moves */
86const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88#define LARGEST_MOVE_RATIO 15
89
90/* Define the structure for the machine field in struct function. */
1f3233d1 91struct machine_function GTY(())
f6b7ba2b 92{
93 int accesses_prev_frame;
78d6a4ed 94 bool incoming_a7_copied;
f6b7ba2b 95};
96
97/* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
99 treatment. */
100
101const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
102{
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
106 1
107};
108
109/* Map hard register number to register class */
110const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
111{
a8332086 112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
f6b7ba2b 116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 ACC_REG,
122};
123
124/* Map register constraint character to register class. */
125enum reg_class xtensa_char_to_class[256] =
126{
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191};
192
bbfbe351 193static int b4const_or_zero PARAMS ((int));
194static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
195static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
196static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
197static rtx gen_conditional_move PARAMS ((rtx));
198static rtx fixup_subreg_mem PARAMS ((rtx x));
199static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
160b2123 200static struct machine_function * xtensa_init_machine_status PARAMS ((void));
bbfbe351 201static void printx PARAMS ((FILE *, signed int));
5f4442bc 202static unsigned int xtensa_multibss_section_type_flags
203 PARAMS ((tree, const char *, int));
204static void xtensa_select_rtx_section
205 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
fab7adbf 206static bool xtensa_rtx_costs PARAMS ((rtx, int, int, int *));
bbfbe351 207
208static rtx frame_size_const;
209static int current_function_arg_words;
210static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
211 REG_ALLOC_ORDER;
212\f
f6b7ba2b 213/* This macro generates the assembly code for function entry.
214 FILE is a stdio stream to output the code to.
215 SIZE is an int: how many units of temporary storage to allocate.
216 Refer to the array 'regs_ever_live' to determine which registers
217 to save; 'regs_ever_live[I]' is nonzero if register number I
218 is ever used in the function. This macro is responsible for
219 knowing which registers should not be saved even if used. */
220
221#undef TARGET_ASM_FUNCTION_PROLOGUE
222#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
223
224/* This macro generates the assembly code for function exit,
225 on machines that need it. If FUNCTION_EPILOGUE is not defined
226 then individual return instructions are generated for each
227 return statement. Args are same as for FUNCTION_PROLOGUE. */
228
229#undef TARGET_ASM_FUNCTION_EPILOGUE
230#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
231
232/* These hooks specify assembly directives for creating certain kinds
233 of integer object. */
234
235#undef TARGET_ASM_ALIGNED_SI_OP
236#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
237
bbfbe351 238#undef TARGET_ASM_SELECT_RTX_SECTION
239#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
f6b7ba2b 240
fab7adbf 241#undef TARGET_RTX_COSTS
242#define TARGET_RTX_COSTS xtensa_rtx_costs
ec0457a8 243#undef TARGET_ADDRESS_COST
244#define TARGET_ADDRESS_COST hook_int_rtx_0
fab7adbf 245
bbfbe351 246struct gcc_target targetm = TARGET_INITIALIZER;
247\f
f6b7ba2b 248
249/*
250 * Functions to test Xtensa immediate operand validity.
251 */
252
253int
254xtensa_b4constu (v)
255 int v;
256{
257 switch (v)
258 {
259 case 32768:
260 case 65536:
261 case 2:
262 case 3:
263 case 4:
264 case 5:
265 case 6:
266 case 7:
267 case 8:
268 case 10:
269 case 12:
270 case 16:
271 case 32:
272 case 64:
273 case 128:
274 case 256:
275 return 1;
276 }
277 return 0;
278}
279
280int
281xtensa_simm8x256 (v)
282 int v;
283{
284 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
285}
286
287int
288xtensa_ai4const (v)
289 int v;
290{
291 return (v == -1 || (v >= 1 && v <= 15));
292}
293
294int
295xtensa_simm7 (v)
296 int v;
297{
298 return v >= -32 && v <= 95;
299}
300
301int
302xtensa_b4const (v)
303 int v;
304{
305 switch (v)
306 {
307 case -1:
308 case 1:
309 case 2:
310 case 3:
311 case 4:
312 case 5:
313 case 6:
314 case 7:
315 case 8:
316 case 10:
317 case 12:
318 case 16:
319 case 32:
320 case 64:
321 case 128:
322 case 256:
323 return 1;
324 }
325 return 0;
326}
327
328int
329xtensa_simm8 (v)
330 int v;
331{
332 return v >= -128 && v <= 127;
333}
334
335int
336xtensa_tp7 (v)
337 int v;
338{
339 return (v >= 7 && v <= 22);
340}
341
342int
343xtensa_lsi4x4 (v)
344 int v;
345{
346 return (v & 3) == 0 && (v >= 0 && v <= 60);
347}
348
349int
350xtensa_simm12b (v)
351 int v;
352{
353 return v >= -2048 && v <= 2047;
354}
355
356int
357xtensa_uimm8 (v)
358 int v;
359{
360 return v >= 0 && v <= 255;
361}
362
363int
364xtensa_uimm8x2 (v)
365 int v;
366{
367 return (v & 1) == 0 && (v >= 0 && v <= 510);
368}
369
370int
371xtensa_uimm8x4 (v)
372 int v;
373{
374 return (v & 3) == 0 && (v >= 0 && v <= 1020);
375}
376
377
378/* This is just like the standard true_regnum() function except that it
379 works even when reg_renumber is not initialized. */
380
381int
382xt_true_regnum (x)
383 rtx x;
384{
385 if (GET_CODE (x) == REG)
386 {
387 if (reg_renumber
388 && REGNO (x) >= FIRST_PSEUDO_REGISTER
389 && reg_renumber[REGNO (x)] >= 0)
390 return reg_renumber[REGNO (x)];
391 return REGNO (x);
392 }
393 if (GET_CODE (x) == SUBREG)
394 {
395 int base = xt_true_regnum (SUBREG_REG (x));
396 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
397 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
398 GET_MODE (SUBREG_REG (x)),
399 SUBREG_BYTE (x), GET_MODE (x));
400 }
401 return -1;
402}
403
404
405int
406add_operand (op, mode)
407 rtx op;
408 enum machine_mode mode;
409{
410 if (GET_CODE (op) == CONST_INT)
411 return (xtensa_simm8 (INTVAL (op)) ||
412 xtensa_simm8x256 (INTVAL (op)));
413
414 return register_operand (op, mode);
415}
416
417
418int
419arith_operand (op, mode)
420 rtx op;
421 enum machine_mode mode;
422{
423 if (GET_CODE (op) == CONST_INT)
424 return xtensa_simm8 (INTVAL (op));
425
426 return register_operand (op, mode);
427}
428
429
430int
431nonimmed_operand (op, mode)
432 rtx op;
433 enum machine_mode mode;
434{
435 /* We cannot use the standard nonimmediate_operand() predicate because
436 it includes constant pool memory operands. */
437
438 if (memory_operand (op, mode))
439 return !constantpool_address_p (XEXP (op, 0));
440
441 return register_operand (op, mode);
442}
443
444
445int
446mem_operand (op, mode)
447 rtx op;
448 enum machine_mode mode;
449{
450 /* We cannot use the standard memory_operand() predicate because
451 it includes constant pool memory operands. */
452
453 if (memory_operand (op, mode))
454 return !constantpool_address_p (XEXP (op, 0));
455
456 return FALSE;
457}
458
459
460int
fc12fa10 461xtensa_valid_move (mode, operands)
f6b7ba2b 462 enum machine_mode mode;
fc12fa10 463 rtx *operands;
f6b7ba2b 464{
fc12fa10 465 /* Either the destination or source must be a register, and the
466 MAC16 accumulator doesn't count. */
467
468 if (register_operand (operands[0], mode))
469 {
470 int dst_regnum = xt_true_regnum (operands[0]);
471
472 /* The stack pointer can only be assigned with a MOVSP opcode. */
473 if (dst_regnum == STACK_POINTER_REGNUM)
474 return (mode == SImode
475 && register_operand (operands[1], mode)
476 && !ACC_REG_P (xt_true_regnum (operands[1])));
477
478 if (!ACC_REG_P (dst_regnum))
479 return true;
480 }
141e2ef6 481 if (register_operand (operands[1], mode))
fc12fa10 482 {
483 int src_regnum = xt_true_regnum (operands[1]);
484 if (!ACC_REG_P (src_regnum))
485 return true;
486 }
f6b7ba2b 487 return FALSE;
488}
489
490
491int
492mask_operand (op, mode)
493 rtx op;
494 enum machine_mode mode;
495{
496 if (GET_CODE (op) == CONST_INT)
497 return xtensa_mask_immediate (INTVAL (op));
498
499 return register_operand (op, mode);
500}
501
502
503int
504extui_fldsz_operand (op, mode)
505 rtx op;
506 enum machine_mode mode ATTRIBUTE_UNUSED;
507{
508 return ((GET_CODE (op) == CONST_INT)
509 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
510}
511
512
513int
514sext_operand (op, mode)
515 rtx op;
516 enum machine_mode mode;
517{
518 if (TARGET_SEXT)
519 return nonimmed_operand (op, mode);
520 return mem_operand (op, mode);
521}
522
523
524int
525sext_fldsz_operand (op, mode)
526 rtx op;
527 enum machine_mode mode ATTRIBUTE_UNUSED;
528{
529 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
530}
531
532
533int
534lsbitnum_operand (op, mode)
535 rtx op;
536 enum machine_mode mode ATTRIBUTE_UNUSED;
537{
538 if (GET_CODE (op) == CONST_INT)
539 {
540 return (BITS_BIG_ENDIAN
541 ? (INTVAL (op) == BITS_PER_WORD-1)
542 : (INTVAL (op) == 0));
543 }
544 return FALSE;
545}
546
547
548static int
549b4const_or_zero (v)
550 int v;
551{
552 if (v == 0)
553 return TRUE;
554 return xtensa_b4const (v);
555}
556
557
558int
559branch_operand (op, mode)
560 rtx op;
561 enum machine_mode mode;
562{
563 if (GET_CODE (op) == CONST_INT)
564 return b4const_or_zero (INTVAL (op));
565
566 return register_operand (op, mode);
567}
568
569
570int
571ubranch_operand (op, mode)
572 rtx op;
573 enum machine_mode mode;
574{
575 if (GET_CODE (op) == CONST_INT)
576 return xtensa_b4constu (INTVAL (op));
577
578 return register_operand (op, mode);
579}
580
581
582int
583call_insn_operand (op, mode)
584 rtx op;
585 enum machine_mode mode ATTRIBUTE_UNUSED;
586{
587 if ((GET_CODE (op) == REG)
588 && (op != arg_pointer_rtx)
589 && ((REGNO (op) < FRAME_POINTER_REGNUM)
590 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
591 return TRUE;
592
593 if (CONSTANT_ADDRESS_P (op))
594 {
595 /* Direct calls only allowed to static functions with PIC. */
be2f664f 596 return (!flag_pic
597 || (GET_CODE (op) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op)));
f6b7ba2b 598 }
599
600 return FALSE;
601}
602
603
604int
605move_operand (op, mode)
606 rtx op;
607 enum machine_mode mode;
608{
609 if (register_operand (op, mode))
610 return TRUE;
611
612 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
613 result in 0/1. */
614 if (GET_CODE (op) == CONSTANT_P_RTX)
615 return TRUE;
616
617 if (GET_CODE (op) == CONST_INT)
618 return xtensa_simm12b (INTVAL (op));
619
620 if (GET_CODE (op) == MEM)
621 return memory_address_p (mode, XEXP (op, 0));
622
623 return FALSE;
624}
625
626
627int
628smalloffset_mem_p (op)
629 rtx op;
630{
631 if (GET_CODE (op) == MEM)
632 {
633 rtx addr = XEXP (op, 0);
634 if (GET_CODE (addr) == REG)
635 return REG_OK_FOR_BASE_P (addr);
636 if (GET_CODE (addr) == PLUS)
637 {
638 rtx offset = XEXP (addr, 0);
639 if (GET_CODE (offset) != CONST_INT)
640 offset = XEXP (addr, 1);
641 if (GET_CODE (offset) != CONST_INT)
642 return FALSE;
643 return xtensa_lsi4x4 (INTVAL (offset));
644 }
645 }
646 return FALSE;
647}
648
649
650int
651smalloffset_double_mem_p (op)
652 rtx op;
653{
654 if (!smalloffset_mem_p (op))
655 return FALSE;
656 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
657}
658
659
660int
661constantpool_address_p (addr)
662 rtx addr;
663{
664 rtx sym = addr;
665
666 if (GET_CODE (addr) == CONST)
667 {
668 rtx offset;
669
670 /* only handle (PLUS (SYM, OFFSET)) form */
671 addr = XEXP (addr, 0);
672 if (GET_CODE (addr) != PLUS)
673 return FALSE;
674
675 /* make sure the address is word aligned */
676 offset = XEXP (addr, 1);
677 if ((GET_CODE (offset) != CONST_INT)
678 || ((INTVAL (offset) & 3) != 0))
679 return FALSE;
680
681 sym = XEXP (addr, 0);
682 }
683
684 if ((GET_CODE (sym) == SYMBOL_REF)
685 && CONSTANT_POOL_ADDRESS_P (sym))
686 return TRUE;
687 return FALSE;
688}
689
690
691int
692constantpool_mem_p (op)
693 rtx op;
694{
695 if (GET_CODE (op) == MEM)
696 return constantpool_address_p (XEXP (op, 0));
697 return FALSE;
698}
699
700
701int
702non_const_move_operand (op, mode)
703 rtx op;
704 enum machine_mode mode;
705{
706 if (register_operand (op, mode))
707 return 1;
708 if (GET_CODE (op) == SUBREG)
709 op = SUBREG_REG (op);
710 if (GET_CODE (op) == MEM)
711 return memory_address_p (mode, XEXP (op, 0));
712 return FALSE;
713}
714
715
716/* Accept the floating point constant 1 in the appropriate mode. */
717
718int
719const_float_1_operand (op, mode)
720 rtx op;
721 enum machine_mode mode;
722{
723 REAL_VALUE_TYPE d;
724 static REAL_VALUE_TYPE onedf;
725 static REAL_VALUE_TYPE onesf;
726 static int one_initialized;
727
728 if ((GET_CODE (op) != CONST_DOUBLE)
729 || (mode != GET_MODE (op))
730 || (mode != DFmode && mode != SFmode))
731 return FALSE;
732
733 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
734
735 if (! one_initialized)
736 {
737 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
738 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
739 one_initialized = TRUE;
740 }
741
742 if (mode == DFmode)
743 return REAL_VALUES_EQUAL (d, onedf);
744 else
745 return REAL_VALUES_EQUAL (d, onesf);
746}
747
748
749int
750fpmem_offset_operand (op, mode)
751 rtx op;
752 enum machine_mode mode ATTRIBUTE_UNUSED;
753{
754 if (GET_CODE (op) == CONST_INT)
755 return xtensa_mem_offset (INTVAL (op), SFmode);
756 return 0;
757}
758
759
760void
761xtensa_extend_reg (dst, src)
762 rtx dst;
763 rtx src;
764{
765 rtx temp = gen_reg_rtx (SImode);
766 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
767
768 /* generate paradoxical subregs as needed so that the modes match */
769 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
770 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
771
772 emit_insn (gen_ashlsi3 (temp, src, shift));
773 emit_insn (gen_ashrsi3 (dst, temp, shift));
774}
775
776
777void
778xtensa_load_constant (dst, src)
779 rtx dst;
780 rtx src;
781{
782 enum machine_mode mode = GET_MODE (dst);
783 src = force_const_mem (SImode, src);
784
785 /* PC-relative loads are always SImode so we have to add a SUBREG if that
786 is not the desired mode */
787
788 if (mode != SImode)
789 {
790 if (register_operand (dst, mode))
791 dst = simplify_gen_subreg (SImode, dst, mode, 0);
792 else
793 {
794 src = force_reg (SImode, src);
795 src = gen_lowpart_SUBREG (mode, src);
796 }
797 }
798
799 emit_move_insn (dst, src);
800}
801
802
803int
804branch_operator (x, mode)
805 rtx x;
806 enum machine_mode mode;
807{
808 if (GET_MODE (x) != mode)
809 return FALSE;
810
811 switch (GET_CODE (x))
812 {
813 case EQ:
814 case NE:
815 case LT:
816 case GE:
817 return TRUE;
818 default:
819 break;
820 }
821 return FALSE;
822}
823
824
825int
826ubranch_operator (x, mode)
827 rtx x;
828 enum machine_mode mode;
829{
830 if (GET_MODE (x) != mode)
831 return FALSE;
832
833 switch (GET_CODE (x))
834 {
835 case LTU:
836 case GEU:
837 return TRUE;
838 default:
839 break;
840 }
841 return FALSE;
842}
843
844
845int
846boolean_operator (x, mode)
847 rtx x;
848 enum machine_mode mode;
849{
850 if (GET_MODE (x) != mode)
851 return FALSE;
852
853 switch (GET_CODE (x))
854 {
855 case EQ:
856 case NE:
857 return TRUE;
858 default:
859 break;
860 }
861 return FALSE;
862}
863
864
865int
866xtensa_mask_immediate (v)
867 int v;
868{
869#define MAX_MASK_SIZE 16
870 int mask_size;
871
872 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
873 {
874 if ((v & 1) == 0)
875 return FALSE;
876 v = v >> 1;
877 if (v == 0)
878 return TRUE;
879 }
880
881 return FALSE;
882}
883
884
885int
886xtensa_mem_offset (v, mode)
887 unsigned v;
888 enum machine_mode mode;
889{
890 switch (mode)
891 {
892 case BLKmode:
893 /* Handle the worst case for block moves. See xtensa_expand_block_move
894 where we emit an optimized block move operation if the block can be
895 moved in < "move_ratio" pieces. The worst case is when the block is
896 aligned but has a size of (3 mod 4) (does this happen?) so that the
897 last piece requires a byte load/store. */
898 return (xtensa_uimm8 (v) &&
899 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
900
901 case QImode:
902 return xtensa_uimm8 (v);
903
904 case HImode:
905 return xtensa_uimm8x2 (v);
906
907 case DFmode:
908 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
909
910 default:
911 break;
912 }
913
914 return xtensa_uimm8x4 (v);
915}
916
917
918/* Make normal rtx_code into something we can index from an array */
919
920static enum internal_test
921map_test_to_internal_test (test_code)
922 enum rtx_code test_code;
923{
924 enum internal_test test = ITEST_MAX;
925
926 switch (test_code)
927 {
928 default: break;
929 case EQ: test = ITEST_EQ; break;
930 case NE: test = ITEST_NE; break;
931 case GT: test = ITEST_GT; break;
932 case GE: test = ITEST_GE; break;
933 case LT: test = ITEST_LT; break;
934 case LE: test = ITEST_LE; break;
935 case GTU: test = ITEST_GTU; break;
936 case GEU: test = ITEST_GEU; break;
937 case LTU: test = ITEST_LTU; break;
938 case LEU: test = ITEST_LEU; break;
939 }
940
941 return test;
942}
943
944
945/* Generate the code to compare two integer values. The return value is
946 the comparison expression. */
947
948static rtx
949gen_int_relational (test_code, cmp0, cmp1, p_invert)
950 enum rtx_code test_code; /* relational test (EQ, etc) */
951 rtx cmp0; /* first operand to compare */
952 rtx cmp1; /* second operand to compare */
953 int *p_invert; /* whether branch needs to reverse its test */
954{
955 struct cmp_info {
956 enum rtx_code test_code; /* test code to use in insn */
957 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
958 int const_add; /* constant to add (convert LE -> LT) */
959 int reverse_regs; /* reverse registers in test */
960 int invert_const; /* != 0 if invert value if cmp1 is constant */
961 int invert_reg; /* != 0 if invert value if cmp1 is register */
962 int unsignedp; /* != 0 for unsigned comparisons. */
963 };
964
965 static struct cmp_info info[ (int)ITEST_MAX ] = {
966
967 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
968 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
969
970 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
971 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
972 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
973 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
974
975 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
976 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
977 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
978 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
979 };
980
981 enum internal_test test;
982 enum machine_mode mode;
983 struct cmp_info *p_info;
984
985 test = map_test_to_internal_test (test_code);
986 if (test == ITEST_MAX)
987 abort ();
988
989 p_info = &info[ (int)test ];
990
991 mode = GET_MODE (cmp0);
992 if (mode == VOIDmode)
993 mode = GET_MODE (cmp1);
994
995 /* Make sure we can handle any constants given to us. */
996 if (GET_CODE (cmp1) == CONST_INT)
997 {
998 HOST_WIDE_INT value = INTVAL (cmp1);
999 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
1000
1001 /* if the immediate overflows or does not fit in the immediate field,
1002 spill it to a register */
1003
1004 if ((p_info->unsignedp ?
1005 (uvalue + p_info->const_add > uvalue) :
1006 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1007 {
1008 cmp1 = force_reg (mode, cmp1);
1009 }
1010 else if (!(p_info->const_range_p) (value + p_info->const_add))
1011 {
1012 cmp1 = force_reg (mode, cmp1);
1013 }
1014 }
1015 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1016 {
1017 cmp1 = force_reg (mode, cmp1);
1018 }
1019
1020 /* See if we need to invert the result. */
1021 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1022 ? p_info->invert_const
1023 : p_info->invert_reg);
1024
1025 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1026 Comparison between two registers, may involve switching operands. */
1027 if (GET_CODE (cmp1) == CONST_INT)
1028 {
1029 if (p_info->const_add != 0)
1030 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1031
1032 }
1033 else if (p_info->reverse_regs)
1034 {
1035 rtx temp = cmp0;
1036 cmp0 = cmp1;
1037 cmp1 = temp;
1038 }
1039
1040 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1041}
1042
1043
1044/* Generate the code to compare two float values. The return value is
1045 the comparison expression. */
1046
1047static rtx
1048gen_float_relational (test_code, cmp0, cmp1)
1049 enum rtx_code test_code; /* relational test (EQ, etc) */
1050 rtx cmp0; /* first operand to compare */
1051 rtx cmp1; /* second operand to compare */
1052{
1053 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1054 rtx brtmp;
1055 int reverse_regs, invert;
1056
1057 switch (test_code)
1058 {
1059 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1060 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1061 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1062 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1063 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1064 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1065 default:
1066 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1067 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1068 }
1069
1070 if (reverse_regs)
1071 {
1072 rtx temp = cmp0;
1073 cmp0 = cmp1;
1074 cmp1 = temp;
1075 }
1076
1077 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1078 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1079
1080 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1081}
1082
1083
1084void
1085xtensa_expand_conditional_branch (operands, test_code)
1086 rtx *operands;
1087 enum rtx_code test_code;
1088{
1089 enum cmp_type type = branch_type;
1090 rtx cmp0 = branch_cmp[0];
1091 rtx cmp1 = branch_cmp[1];
1092 rtx cmp;
1093 int invert;
1094 rtx label1, label2;
1095
1096 switch (type)
1097 {
1098 case CMP_DF:
1099 default:
1100 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1101
1102 case CMP_SI:
1103 invert = FALSE;
1104 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1105 break;
1106
1107 case CMP_SF:
1108 if (!TARGET_HARD_FLOAT)
1109 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1110 invert = FALSE;
1111 cmp = gen_float_relational (test_code, cmp0, cmp1);
1112 break;
1113 }
1114
1115 /* Generate the branch. */
1116
1117 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1118 label2 = pc_rtx;
1119
1120 if (invert)
1121 {
1122 label2 = label1;
1123 label1 = pc_rtx;
1124 }
1125
1126 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1127 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1128 label1,
1129 label2)));
1130}
1131
1132
1133static rtx
1134gen_conditional_move (cmp)
1135 rtx cmp;
1136{
1137 enum rtx_code code = GET_CODE (cmp);
1138 rtx op0 = branch_cmp[0];
1139 rtx op1 = branch_cmp[1];
1140
1141 if (branch_type == CMP_SI)
1142 {
1143 /* Jump optimization calls get_condition() which canonicalizes
1144 comparisons like (GE x <const>) to (GT x <const-1>).
1145 Transform those comparisons back to GE, since that is the
1146 comparison supported in Xtensa. We shouldn't have to
1147 transform <LE x const> comparisons, because neither
1148 xtensa_expand_conditional_branch() nor get_condition() will
1149 produce them. */
1150
1151 if ((code == GT) && (op1 == constm1_rtx))
1152 {
1153 code = GE;
1154 op1 = const0_rtx;
1155 }
1156 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1157
1158 if (boolean_operator (cmp, VOIDmode))
1159 {
1160 /* swap the operands to make const0 second */
1161 if (op0 == const0_rtx)
1162 {
1163 op0 = op1;
1164 op1 = const0_rtx;
1165 }
1166
1167 /* if not comparing against zero, emit a comparison (subtract) */
1168 if (op1 != const0_rtx)
1169 {
1170 op0 = expand_binop (SImode, sub_optab, op0, op1,
1171 0, 0, OPTAB_LIB_WIDEN);
1172 op1 = const0_rtx;
1173 }
1174 }
1175 else if (branch_operator (cmp, VOIDmode))
1176 {
1177 /* swap the operands to make const0 second */
1178 if (op0 == const0_rtx)
1179 {
1180 op0 = op1;
1181 op1 = const0_rtx;
1182
1183 switch (code)
1184 {
1185 case LT: code = GE; break;
1186 case GE: code = LT; break;
1187 default: abort ();
1188 }
1189 }
1190
1191 if (op1 != const0_rtx)
1192 return 0;
1193 }
1194 else
1195 return 0;
1196
1197 return gen_rtx (code, VOIDmode, op0, op1);
1198 }
1199
1200 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1201 return gen_float_relational (code, op0, op1);
1202
1203 return 0;
1204}
1205
1206
1207int
1208xtensa_expand_conditional_move (operands, isflt)
1209 rtx *operands;
1210 int isflt;
1211{
1212 rtx cmp;
1213 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1214
1215 if (!(cmp = gen_conditional_move (operands[1])))
1216 return 0;
1217
1218 if (isflt)
1219 gen_fn = (branch_type == CMP_SI
1220 ? gen_movsfcc_internal0
1221 : gen_movsfcc_internal1);
1222 else
1223 gen_fn = (branch_type == CMP_SI
1224 ? gen_movsicc_internal0
1225 : gen_movsicc_internal1);
1226
1227 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1228 operands[2], operands[3], cmp));
1229 return 1;
1230}
1231
1232
1233int
1234xtensa_expand_scc (operands)
1235 rtx *operands;
1236{
1237 rtx dest = operands[0];
1238 rtx cmp = operands[1];
1239 rtx one_tmp, zero_tmp;
1240 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1241
1242 if (!(cmp = gen_conditional_move (cmp)))
1243 return 0;
1244
1245 one_tmp = gen_reg_rtx (SImode);
1246 zero_tmp = gen_reg_rtx (SImode);
1247 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1248 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1249
1250 gen_fn = (branch_type == CMP_SI
1251 ? gen_movsicc_internal0
1252 : gen_movsicc_internal1);
1253 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1254 return 1;
1255}
1256
1257
1258/* Emit insns to move operands[1] into operands[0].
1259
1260 Return 1 if we have written out everything that needs to be done to
1261 do the move. Otherwise, return 0 and the caller will emit the move
1262 normally. */
1263
1264int
1265xtensa_emit_move_sequence (operands, mode)
1266 rtx *operands;
1267 enum machine_mode mode;
1268{
1269 if (CONSTANT_P (operands[1])
1270 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1271 && (GET_CODE (operands[1]) != CONST_INT
1272 || !xtensa_simm12b (INTVAL (operands[1]))))
1273 {
1274 xtensa_load_constant (operands[0], operands[1]);
1275 return 1;
1276 }
1277
1278 if (!(reload_in_progress | reload_completed))
1279 {
fc12fa10 1280 if (!xtensa_valid_move (mode, operands))
f6b7ba2b 1281 operands[1] = force_reg (mode, operands[1]);
1282
78d6a4ed 1283 if (xtensa_copy_incoming_a7 (operands, mode))
1284 return 1;
f6b7ba2b 1285 }
1286
1287 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1288 instruction won't be recognized after reload. So we remove the
1289 subreg and adjust mem accordingly. */
1290 if (reload_in_progress)
1291 {
1292 operands[0] = fixup_subreg_mem (operands[0]);
1293 operands[1] = fixup_subreg_mem (operands[1]);
1294 }
1295 return 0;
1296}
1297
1298static rtx
1299fixup_subreg_mem (x)
1300 rtx x;
1301{
1302 if (GET_CODE (x) == SUBREG
1303 && GET_CODE (SUBREG_REG (x)) == REG
1304 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1305 {
1306 rtx temp =
1307 gen_rtx_SUBREG (GET_MODE (x),
1308 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1309 SUBREG_BYTE (x));
1310 x = alter_subreg (&temp);
1311 }
1312 return x;
1313}
1314
1315
78d6a4ed 1316/* Check if this move is copying an incoming argument in a7. If so,
1317 emit the move, followed by the special "set_frame_ptr"
1318 unspec_volatile insn, at the very beginning of the function. This
1319 is necessary because the register allocator will ignore conflicts
1320 with a7 and may assign some other pseudo to a7. If that pseudo was
1321 assigned prior to this move, it would clobber the incoming argument
1322 in a7. By copying the argument out of a7 as the very first thing,
1323 and then immediately following that with an unspec_volatile to keep
1324 the scheduler away, we should avoid any problems. */
1325
1326bool
1327xtensa_copy_incoming_a7 (operands, mode)
1328 rtx *operands;
1329 enum machine_mode mode;
1330{
1331 if (a7_overlap_mentioned_p (operands[1])
1332 && !cfun->machine->incoming_a7_copied)
1333 {
1334 rtx mov;
1335 switch (mode)
1336 {
1337 case DFmode:
1338 mov = gen_movdf_internal (operands[0], operands[1]);
1339 break;
1340 case SFmode:
1341 mov = gen_movsf_internal (operands[0], operands[1]);
1342 break;
1343 case DImode:
1344 mov = gen_movdi_internal (operands[0], operands[1]);
1345 break;
1346 case SImode:
1347 mov = gen_movsi_internal (operands[0], operands[1]);
1348 break;
1349 case HImode:
1350 mov = gen_movhi_internal (operands[0], operands[1]);
1351 break;
1352 case QImode:
1353 mov = gen_movqi_internal (operands[0], operands[1]);
1354 break;
1355 default:
1356 abort ();
1357 }
1358
1359 /* Insert the instructions before any other argument copies.
1360 (The set_frame_ptr insn comes _after_ the move, so push it
1361 out first.) */
1362 push_topmost_sequence ();
1363 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1364 emit_insn_after (mov, get_insns ());
1365 pop_topmost_sequence ();
1366
1367 /* Ideally the incoming argument in a7 would only be copied
1368 once, since propagating a7 into the body of a function
1369 will almost certainly lead to errors. However, there is
1370 at least one harmless case (in GCSE) where the original
1371 copy from a7 is changed to copy into a new pseudo. Thus,
1372 we use a flag to only do this special treatment for the
1373 first copy of a7. */
1374
1375 cfun->machine->incoming_a7_copied = true;
1376
1377 return 1;
1378 }
1379
1380 return 0;
1381}
1382
1383
f6b7ba2b 1384/* Try to expand a block move operation to an RTL block move instruction.
1385 If not optimizing or if the block size is not a constant or if the
1386 block is small, the expansion fails and GCC falls back to calling
1387 memcpy().
1388
1389 operands[0] is the destination
1390 operands[1] is the source
1391 operands[2] is the length
1392 operands[3] is the alignment */
1393
1394int
1395xtensa_expand_block_move (operands)
1396 rtx *operands;
1397{
1398 rtx dest = operands[0];
1399 rtx src = operands[1];
1400 int bytes = INTVAL (operands[2]);
1401 int align = XINT (operands[3], 0);
1402 int num_pieces, move_ratio;
1403
1404 /* If this is not a fixed size move, just call memcpy */
1405 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1406 return 0;
1407
1408 /* Anything to move? */
1409 if (bytes <= 0)
1410 return 1;
1411
1412 if (align > MOVE_MAX)
1413 align = MOVE_MAX;
1414
1415 /* decide whether to expand inline based on the optimization level */
1416 move_ratio = 4;
1417 if (optimize > 2)
1418 move_ratio = LARGEST_MOVE_RATIO;
1419 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1420 if (num_pieces >= move_ratio)
1421 return 0;
1422
160b2123 1423 /* make sure the memory addresses are valid */
9c56a8c5 1424 operands[0] = validize_mem (dest);
1425 operands[1] = validize_mem (src);
f6b7ba2b 1426
1427 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1428 operands[2], operands[3]));
1429 return 1;
1430}
1431
1432
1433/* Emit a sequence of instructions to implement a block move, trying
1434 to hide load delay slots as much as possible. Load N values into
1435 temporary registers, store those N values, and repeat until the
1436 complete block has been moved. N=delay_slots+1 */
1437
1438struct meminsnbuf {
1439 char template[30];
1440 rtx operands[2];
1441};
1442
1443void
1444xtensa_emit_block_move (operands, tmpregs, delay_slots)
1445 rtx *operands;
1446 rtx *tmpregs;
1447 int delay_slots;
1448{
1449 rtx dest = operands[0];
1450 rtx src = operands[1];
1451 int bytes = INTVAL (operands[2]);
1452 int align = XINT (operands[3], 0);
1453 rtx from_addr = XEXP (src, 0);
1454 rtx to_addr = XEXP (dest, 0);
1455 int from_struct = MEM_IN_STRUCT_P (src);
1456 int to_struct = MEM_IN_STRUCT_P (dest);
1457 int offset = 0;
1458 int chunk_size, item_size;
1459 struct meminsnbuf *ldinsns, *stinsns;
1460 const char *ldname, *stname;
1461 enum machine_mode mode;
1462
1463 if (align > MOVE_MAX)
1464 align = MOVE_MAX;
1465 item_size = align;
1466 chunk_size = delay_slots + 1;
1467
1468 ldinsns = (struct meminsnbuf *)
1469 alloca (chunk_size * sizeof (struct meminsnbuf));
1470 stinsns = (struct meminsnbuf *)
1471 alloca (chunk_size * sizeof (struct meminsnbuf));
1472
1473 mode = xtensa_find_mode_for_size (item_size);
1474 item_size = GET_MODE_SIZE (mode);
1475 ldname = xtensa_ld_opcodes[(int) mode];
1476 stname = xtensa_st_opcodes[(int) mode];
1477
1478 while (bytes > 0)
1479 {
1480 int n;
1481
1482 for (n = 0; n < chunk_size; n++)
1483 {
1484 rtx addr, mem;
1485
1486 if (bytes == 0)
1487 {
1488 chunk_size = n;
1489 break;
1490 }
1491
1492 if (bytes < item_size)
1493 {
1494 /* find a smaller item_size which we can load & store */
1495 item_size = bytes;
1496 mode = xtensa_find_mode_for_size (item_size);
1497 item_size = GET_MODE_SIZE (mode);
1498 ldname = xtensa_ld_opcodes[(int) mode];
1499 stname = xtensa_st_opcodes[(int) mode];
1500 }
1501
1502 /* record the load instruction opcode and operands */
1503 addr = plus_constant (from_addr, offset);
1504 mem = gen_rtx_MEM (mode, addr);
1505 if (! memory_address_p (mode, addr))
1506 abort ();
1507 MEM_IN_STRUCT_P (mem) = from_struct;
1508 ldinsns[n].operands[0] = tmpregs[n];
1509 ldinsns[n].operands[1] = mem;
1510 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1511
1512 /* record the store instruction opcode and operands */
1513 addr = plus_constant (to_addr, offset);
1514 mem = gen_rtx_MEM (mode, addr);
1515 if (! memory_address_p (mode, addr))
1516 abort ();
1517 MEM_IN_STRUCT_P (mem) = to_struct;
1518 stinsns[n].operands[0] = tmpregs[n];
1519 stinsns[n].operands[1] = mem;
1520 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1521
1522 offset += item_size;
1523 bytes -= item_size;
1524 }
1525
1526 /* now output the loads followed by the stores */
1527 for (n = 0; n < chunk_size; n++)
1528 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1529 for (n = 0; n < chunk_size; n++)
1530 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1531 }
1532}
1533
1534
1535static enum machine_mode
1536xtensa_find_mode_for_size (item_size)
1537 unsigned item_size;
1538{
1539 enum machine_mode mode, tmode;
1540
1541 while (1)
1542 {
1543 mode = VOIDmode;
1544
1545 /* find mode closest to but not bigger than item_size */
1546 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1547 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1548 if (GET_MODE_SIZE (tmode) <= item_size)
1549 mode = tmode;
1550 if (mode == VOIDmode)
1551 abort ();
1552
1553 item_size = GET_MODE_SIZE (mode);
1554
1555 if (xtensa_ld_opcodes[(int) mode]
1556 && xtensa_st_opcodes[(int) mode])
1557 break;
1558
1559 /* cannot load & store this mode; try something smaller */
1560 item_size -= 1;
1561 }
1562
1563 return mode;
1564}
1565
1566
1567void
1568xtensa_expand_nonlocal_goto (operands)
1569 rtx *operands;
1570{
1571 rtx goto_handler = operands[1];
1572 rtx containing_fp = operands[3];
1573
1574 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1575 is too big to generate in-line */
1576
1577 if (GET_CODE (containing_fp) != REG)
1578 containing_fp = force_reg (Pmode, containing_fp);
1579
1580 goto_handler = replace_rtx (copy_rtx (goto_handler),
1581 virtual_stack_vars_rtx,
1582 containing_fp);
1583
1584 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1585 0, VOIDmode, 2,
1586 containing_fp, Pmode,
1587 goto_handler, Pmode);
1588}
1589
1590
1f3233d1 1591static struct machine_function *
1592xtensa_init_machine_status ()
f6b7ba2b 1593{
1f3233d1 1594 return ggc_alloc_cleared (sizeof (struct machine_function));
f6b7ba2b 1595}
1596
1597
1598void
1599xtensa_setup_frame_addresses ()
1600{
1601 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1602 cfun->machine->accesses_prev_frame = 1;
1603
1604 emit_library_call
1605 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1606 0, VOIDmode, 0);
1607}
1608
1609
1610/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1611 a comment showing where the end of the loop is. However, if there is a
1612 label or a branch at the end of the loop then we need to place a nop
1613 there. If the loop ends with a label we need the nop so that branches
1614 targetting that label will target the nop (and thus remain in the loop),
1615 instead of targetting the instruction after the loop (and thus exiting
1616 the loop). If the loop ends with a branch, we need the nop in case the
1617 branch is targetting a location inside the loop. When the branch
1618 executes it will cause the loop count to be decremented even if it is
1619 taken (because it is the last instruction in the loop), so we need to
1620 nop after the branch to prevent the loop count from being decremented
1621 when the branch is taken. */
1622
1623void
1624xtensa_emit_loop_end (insn, operands)
1625 rtx insn;
1626 rtx *operands;
1627{
1628 char done = 0;
1629
1630 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1631 {
1632 switch (GET_CODE (insn))
1633 {
1634 case NOTE:
1635 case BARRIER:
1636 break;
1637
1638 case CODE_LABEL:
1639 output_asm_insn ("nop.n", operands);
1640 done = 1;
1641 break;
1642
1643 default:
1644 {
1645 rtx body = PATTERN (insn);
1646
1647 if (GET_CODE (body) == JUMP_INSN)
1648 {
1649 output_asm_insn ("nop.n", operands);
1650 done = 1;
1651 }
1652 else if ((GET_CODE (body) != USE)
1653 && (GET_CODE (body) != CLOBBER))
1654 done = 1;
1655 }
1656 break;
1657 }
1658 }
1659
1660 output_asm_insn ("# loop end for %0", operands);
1661}
1662
1663
1664char *
1665xtensa_emit_call (callop, operands)
1666 int callop;
1667 rtx *operands;
1668{
bbfbe351 1669 static char result[64];
f6b7ba2b 1670 rtx tgt = operands[callop];
1671
1672 if (GET_CODE (tgt) == CONST_INT)
a6169f06 1673 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
f6b7ba2b 1674 else if (register_operand (tgt, VOIDmode))
1675 sprintf (result, "callx8\t%%%d", callop);
1676 else
1677 sprintf (result, "call8\t%%%d", callop);
1678
1679 return result;
1680}
1681
1682
1683/* Return the stabs register number to use for 'regno'. */
1684
1685int
1686xtensa_dbx_register_number (regno)
1687 int regno;
1688{
1689 int first = -1;
1690
1691 if (GP_REG_P (regno)) {
1692 regno -= GP_REG_FIRST;
1693 first = 0;
1694 }
1695 else if (BR_REG_P (regno)) {
1696 regno -= BR_REG_FIRST;
1697 first = 16;
1698 }
1699 else if (FP_REG_P (regno)) {
1700 regno -= FP_REG_FIRST;
1701 /* The current numbering convention is that TIE registers are
1702 numbered in libcc order beginning with 256. We can't guarantee
1703 that the FP registers will come first, so the following is just
1704 a guess. It seems like we should make a special case for FP
1705 registers and give them fixed numbers < 256. */
1706 first = 256;
1707 }
1708 else if (ACC_REG_P (regno))
1709 {
1710 first = 0;
1711 regno = -1;
1712 }
1713
1714 /* When optimizing, we sometimes get asked about pseudo-registers
1715 that don't represent hard registers. Return 0 for these. */
1716 if (first == -1)
1717 return 0;
1718
1719 return first + regno;
1720}
1721
1722
1723/* Argument support functions. */
1724
1725/* Initialize CUMULATIVE_ARGS for a function. */
1726
1727void
1728init_cumulative_args (cum, fntype, libname)
1729 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1730 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1731 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1732{
1733 cum->arg_words = 0;
1734}
1735
1736/* Advance the argument to the next argument position. */
1737
1738void
1739function_arg_advance (cum, mode, type)
1740 CUMULATIVE_ARGS *cum; /* current arg information */
1741 enum machine_mode mode; /* current arg mode */
1742 tree type; /* type of the argument or 0 if lib support */
1743{
1744 int words, max;
1745 int *arg_words;
1746
1747 arg_words = &cum->arg_words;
1748 max = MAX_ARGS_IN_REGISTERS;
1749
1750 words = (((mode != BLKmode)
1751 ? (int) GET_MODE_SIZE (mode)
1752 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1753
1754 if ((*arg_words + words > max) && (*arg_words < max))
1755 *arg_words = max;
1756
1757 *arg_words += words;
1758}
1759
1760
1761/* Return an RTL expression containing the register for the given mode,
1762 or 0 if the argument is to be passed on the stack. */
1763
1764rtx
1765function_arg (cum, mode, type, incoming_p)
1766 CUMULATIVE_ARGS *cum; /* current arg information */
1767 enum machine_mode mode; /* current arg mode */
1768 tree type; /* type of the argument or 0 if lib support */
1769 int incoming_p; /* computing the incoming registers? */
1770{
1771 int regbase, words, max;
1772 int *arg_words;
1773 int regno;
1774 enum machine_mode result_mode;
1775
1776 arg_words = &cum->arg_words;
1777 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1778 max = MAX_ARGS_IN_REGISTERS;
1779
1780 words = (((mode != BLKmode)
1781 ? (int) GET_MODE_SIZE (mode)
1782 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1783
1784 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1785 *arg_words += (*arg_words & 1);
1786
1787 if (*arg_words + words > max)
1788 return (rtx)0;
1789
1790 regno = regbase + *arg_words;
1791 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1792
1793 /* We need to make sure that references to a7 are represented with
1794 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1795 modes bigger than 2 words (because we only have patterns for
1796 modes of 2 words or smaller), we can't control the expansion
1797 unless we explicitly list the individual registers in a PARALLEL. */
1798
1799 if ((mode == BLKmode || words > 2)
1800 && regno < A7_REG
1801 && regno + words > A7_REG)
1802 {
1803 rtx result;
1804 int n;
1805
1806 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1807 for (n = 0; n < words; n++)
1808 {
1809 XVECEXP (result, 0, n) =
1810 gen_rtx_EXPR_LIST (VOIDmode,
1811 gen_raw_REG (SImode, regno + n),
1812 GEN_INT (n * UNITS_PER_WORD));
1813 }
1814 return result;
1815 }
1816
1817 return gen_raw_REG (result_mode, regno);
1818}
1819
1820
1821void
1822override_options ()
1823{
1824 int regno;
1825 enum machine_mode mode;
1826
1827 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1828 error ("boolean registers required for the floating-point option");
1829
1830 /* set up the tables of ld/st opcode names for block moves */
1831 xtensa_ld_opcodes[(int) SImode] = "l32i";
1832 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1833 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1834 xtensa_st_opcodes[(int) SImode] = "s32i";
1835 xtensa_st_opcodes[(int) HImode] = "s16i";
1836 xtensa_st_opcodes[(int) QImode] = "s8i";
1837
1838 xtensa_char_to_class['q'] = SP_REG;
1839 xtensa_char_to_class['a'] = GR_REGS;
1840 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1841 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1842 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1843 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1844 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1845 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1846 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1847
1848 /* Set up array giving whether a given register can hold a given mode. */
1849 for (mode = VOIDmode;
1850 mode != MAX_MACHINE_MODE;
1851 mode = (enum machine_mode) ((int) mode + 1))
1852 {
1853 int size = GET_MODE_SIZE (mode);
1854 enum mode_class class = GET_MODE_CLASS (mode);
1855
1856 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1857 {
1858 int temp;
1859
1860 if (ACC_REG_P (regno))
1861 temp = (TARGET_MAC16 &&
1862 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1863 else if (GP_REG_P (regno))
1864 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1865 else if (FP_REG_P (regno))
1866 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1867 else if (BR_REG_P (regno))
1868 temp = (TARGET_BOOLEANS && (mode == CCmode));
1869 else
1870 temp = FALSE;
1871
1872 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1873 }
1874 }
1875
1876 init_machine_status = xtensa_init_machine_status;
f6b7ba2b 1877
1878 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1879 some targets need to always use PIC. */
1bc67528 1880 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
f6b7ba2b 1881 flag_pic = 1;
1882}
1883
1884
1885/* A C compound statement to output to stdio stream STREAM the
1886 assembler syntax for an instruction operand X. X is an RTL
1887 expression.
1888
1889 CODE is a value that can be used to specify one of several ways
1890 of printing the operand. It is used when identical operands
1891 must be printed differently depending on the context. CODE
1892 comes from the '%' specification that was used to request
1893 printing of the operand. If the specification was just '%DIGIT'
1894 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1895 is the ASCII code for LTR.
1896
1897 If X is a register, this macro should print the register's name.
1898 The names can be found in an array 'reg_names' whose type is
1899 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1900
1901 When the machine description has a specification '%PUNCT' (a '%'
1902 followed by a punctuation character), this macro is called with
1903 a null pointer for X and the punctuation character for CODE.
1904
1905 'a', 'c', 'l', and 'n' are reserved.
1906
1907 The Xtensa specific codes are:
1908
1909 'd' CONST_INT, print as signed decimal
1910 'x' CONST_INT, print as signed hexadecimal
1911 'K' CONST_INT, print number of bits in mask for EXTUI
1912 'R' CONST_INT, print (X & 0x1f)
1913 'L' CONST_INT, print ((32 - X) & 0x1f)
1914 'D' REG, print second register of double-word register operand
1915 'N' MEM, print address of next word following a memory operand
1916 'v' MEM, if memory reference is volatile, output a MEMW before it
1917*/
1918
1919static void
1920printx (file, val)
1921 FILE *file;
1922 signed int val;
1923{
1924 /* print a hexadecimal value in a nice way */
1925 if ((val > -0xa) && (val < 0xa))
1926 fprintf (file, "%d", val);
1927 else if (val < 0)
1928 fprintf (file, "-0x%x", -val);
1929 else
1930 fprintf (file, "0x%x", val);
1931}
1932
1933
1934void
1935print_operand (file, op, letter)
1936 FILE *file; /* file to write to */
1937 rtx op; /* operand to print */
1938 int letter; /* %<letter> or 0 */
1939{
1940 enum rtx_code code;
1941
1942 if (! op)
1943 error ("PRINT_OPERAND null pointer");
1944
1945 code = GET_CODE (op);
1946 switch (code)
1947 {
1948 case REG:
1949 case SUBREG:
1950 {
1951 int regnum = xt_true_regnum (op);
1952 if (letter == 'D')
1953 regnum++;
1954 fprintf (file, "%s", reg_names[regnum]);
1955 break;
1956 }
1957
1958 case MEM:
aac632cd 1959 /* For a volatile memory reference, emit a MEMW before the
1960 load or store. */
f6b7ba2b 1961 if (letter == 'v')
1962 {
1963 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1964 fprintf (file, "memw\n\t");
1965 break;
1966 }
1967 else if (letter == 'N')
aac632cd 1968 {
1969 enum machine_mode mode;
1970 switch (GET_MODE (op))
1971 {
1972 case DFmode: mode = SFmode; break;
1973 case DImode: mode = SImode; break;
1974 default: abort ();
1975 }
1976 op = adjust_address (op, mode, 4);
1977 }
f6b7ba2b 1978
1979 output_address (XEXP (op, 0));
1980 break;
1981
1982 case CONST_INT:
1983 switch (letter)
1984 {
1985 case 'K':
1986 {
1987 int num_bits = 0;
1988 unsigned val = INTVAL (op);
1989 while (val & 1)
1990 {
1991 num_bits += 1;
1992 val = val >> 1;
1993 }
1994 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1995 fatal_insn ("invalid mask", op);
1996
1997 fprintf (file, "%d", num_bits);
1998 break;
1999 }
2000
2001 case 'L':
a6169f06 2002 fprintf (file, "%ld", (32 - INTVAL (op)) & 0x1f);
f6b7ba2b 2003 break;
2004
2005 case 'R':
a6169f06 2006 fprintf (file, "%ld", INTVAL (op) & 0x1f);
f6b7ba2b 2007 break;
2008
2009 case 'x':
2010 printx (file, INTVAL (op));
2011 break;
2012
2013 case 'd':
2014 default:
a6169f06 2015 fprintf (file, "%ld", INTVAL (op));
f6b7ba2b 2016 break;
2017
2018 }
2019 break;
2020
2021 default:
2022 output_addr_const (file, op);
2023 }
2024}
2025
2026
2027/* A C compound statement to output to stdio stream STREAM the
2028 assembler syntax for an instruction operand that is a memory
7811991d 2029 reference whose address is ADDR. ADDR is an RTL expression. */
f6b7ba2b 2030
2031void
2032print_operand_address (file, addr)
2033 FILE *file;
2034 rtx addr;
2035{
2036 if (!addr)
2037 error ("PRINT_OPERAND_ADDRESS, null pointer");
2038
2039 switch (GET_CODE (addr))
2040 {
2041 default:
2042 fatal_insn ("invalid address", addr);
2043 break;
2044
2045 case REG:
2046 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2047 break;
2048
2049 case PLUS:
2050 {
2051 rtx reg = (rtx)0;
2052 rtx offset = (rtx)0;
2053 rtx arg0 = XEXP (addr, 0);
2054 rtx arg1 = XEXP (addr, 1);
2055
2056 if (GET_CODE (arg0) == REG)
2057 {
2058 reg = arg0;
2059 offset = arg1;
2060 }
2061 else if (GET_CODE (arg1) == REG)
2062 {
2063 reg = arg1;
2064 offset = arg0;
2065 }
2066 else
2067 fatal_insn ("no register in address", addr);
2068
2069 if (CONSTANT_P (offset))
2070 {
2071 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2072 output_addr_const (file, offset);
2073 }
2074 else
2075 fatal_insn ("address offset not a constant", addr);
2076 }
2077 break;
2078
2079 case LABEL_REF:
2080 case SYMBOL_REF:
2081 case CONST_INT:
2082 case CONST:
2083 output_addr_const (file, addr);
2084 break;
2085 }
2086}
2087
2088
f6b7ba2b 2089void
2090xtensa_output_literal (file, x, mode, labelno)
2091 FILE *file;
2092 rtx x;
2093 enum machine_mode mode;
2094 int labelno;
2095{
2096 long value_long[2];
badfe841 2097 REAL_VALUE_TYPE r;
f6b7ba2b 2098 int size;
2099
2100 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2101
2102 switch (GET_MODE_CLASS (mode))
2103 {
2104 case MODE_FLOAT:
2105 if (GET_CODE (x) != CONST_DOUBLE)
2106 abort ();
2107
badfe841 2108 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
f6b7ba2b 2109 switch (mode)
2110 {
2111 case SFmode:
badfe841 2112 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2113 fprintf (file, "0x%08lx\n", value_long[0]);
f6b7ba2b 2114 break;
2115
2116 case DFmode:
badfe841 2117 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2118 fprintf (file, "0x%08lx, 0x%08lx\n",
2119 value_long[0], value_long[1]);
f6b7ba2b 2120 break;
2121
2122 default:
2123 abort ();
2124 }
2125
2126 break;
2127
2128 case MODE_INT:
2129 case MODE_PARTIAL_INT:
2130 size = GET_MODE_SIZE (mode);
2131 if (size == 4)
2132 {
2133 output_addr_const (file, x);
2134 fputs ("\n", file);
2135 }
2136 else if (size == 8)
2137 {
2138 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2139 fputs (", ", file);
2140 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2141 fputs ("\n", file);
2142 }
2143 else
2144 abort ();
2145 break;
2146
2147 default:
2148 abort ();
2149 }
2150}
2151
2152
2153/* Return the bytes needed to compute the frame pointer from the current
2154 stack pointer. */
2155
2156#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2157#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2158
2159long
2160compute_frame_size (size)
2161 int size; /* # of var. bytes allocated */
2162{
2163 /* add space for the incoming static chain value */
2164 if (current_function_needs_context)
2165 size += (1 * UNITS_PER_WORD);
2166
2167 xtensa_current_frame_size =
2168 XTENSA_STACK_ALIGN (size
2169 + current_function_outgoing_args_size
2170 + (WINDOW_SIZE * UNITS_PER_WORD));
2171 return xtensa_current_frame_size;
2172}
2173
2174
2175int
2176xtensa_frame_pointer_required ()
2177{
2178 /* The code to expand builtin_frame_addr and builtin_return_addr
2179 currently uses the hard_frame_pointer instead of frame_pointer.
2180 This seems wrong but maybe it's necessary for other architectures.
2181 This function is derived from the i386 code. */
2182
2183 if (cfun->machine->accesses_prev_frame)
2184 return 1;
2185
2186 return 0;
2187}
2188
2189
2190void
2191xtensa_reorg (first)
2192 rtx first;
2193{
2194 rtx insn, set_frame_ptr_insn = 0;
2195
2196 unsigned long tsize = compute_frame_size (get_frame_size ());
2197 if (tsize < (1 << (12+3)))
2198 frame_size_const = 0;
2199 else
2200 {
2201 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2202
2203 /* make sure the constant is used so it doesn't get eliminated
2204 from the constant pool */
2205 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2206 }
2207
2208 if (!frame_pointer_needed)
2209 return;
2210
2211 /* Search all instructions, looking for the insn that sets up the
2212 frame pointer. This search will fail if the function does not
2213 have an incoming argument in $a7, but in that case, we can just
2214 set up the frame pointer at the very beginning of the
2215 function. */
2216
2217 for (insn = first; insn; insn = NEXT_INSN (insn))
2218 {
2219 rtx pat;
2220
2221 if (!INSN_P (insn))
2222 continue;
2223
2224 pat = PATTERN (insn);
3370e9b1 2225 if (GET_CODE (pat) == SET
2226 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
2227 && (XINT (SET_SRC (pat), 1) == UNSPECV_SET_FP))
f6b7ba2b 2228 {
2229 set_frame_ptr_insn = insn;
2230 break;
2231 }
2232 }
2233
2234 if (set_frame_ptr_insn)
2235 {
2236 /* for all instructions prior to set_frame_ptr_insn, replace
2237 hard_frame_pointer references with stack_pointer */
2238 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2239 {
2240 if (INSN_P (insn))
2241 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2242 hard_frame_pointer_rtx,
2243 stack_pointer_rtx);
2244 }
2245 }
2246 else
2247 {
2248 /* emit the frame pointer move immediately after the NOTE that starts
2249 the function */
2250 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2251 stack_pointer_rtx), first);
2252 }
2253}
2254
2255
2256/* Set up the stack and frame (if desired) for the function. */
2257
2258void
2259xtensa_function_prologue (file, size)
2260 FILE *file;
a6169f06 2261 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
f6b7ba2b 2262{
2263 unsigned long tsize = compute_frame_size (get_frame_size ());
2264
2265 if (frame_pointer_needed)
2266 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2267 else
2268 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2269
2270
2271 if (tsize < (1 << (12+3)))
2272 {
2273 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2274 }
2275 else
2276 {
2277 fprintf (file, "\tentry\tsp, 16\n");
2278
2279 /* use a8 as a temporary since a0-a7 may be live */
2280 fprintf (file, "\tl32r\ta8, ");
2281 print_operand (file, frame_size_const, 0);
2282 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2283 fprintf (file, "\tmovsp\tsp, a8\n");
2284 }
2285}
2286
2287
2288/* Do any necessary cleanup after a function to restore
2289 stack, frame, and regs. */
2290
2291void
2292xtensa_function_epilogue (file, size)
2293 FILE *file;
a6169f06 2294 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
f6b7ba2b 2295{
2296 rtx insn = get_last_insn ();
2297 /* If the last insn was a BARRIER, we don't have to write anything. */
2298 if (GET_CODE (insn) == NOTE)
2299 insn = prev_nonnote_insn (insn);
2300 if (insn == 0 || GET_CODE (insn) != BARRIER)
2301 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2302
2303 xtensa_current_frame_size = 0;
2304}
2305
2306
43326cf7 2307rtx
2308xtensa_return_addr (count, frame)
2309 int count;
2310 rtx frame;
2311{
2312 rtx result, retaddr;
2313
2314 if (count == -1)
2315 retaddr = gen_rtx_REG (Pmode, 0);
2316 else
2317 {
2318 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2319 addr = memory_address (Pmode, addr);
2320 retaddr = gen_reg_rtx (Pmode);
2321 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2322 }
2323
2324 /* The 2 most-significant bits of the return address on Xtensa hold
2325 the register window size. To get the real return address, these
2326 bits must be replaced with the high bits from the current PC. */
2327
2328 result = gen_reg_rtx (Pmode);
2329 emit_insn (gen_fix_return_addr (result, retaddr));
2330 return result;
2331}
2332
2333
f6b7ba2b 2334/* Create the va_list data type.
2335 This structure is set up by __builtin_saveregs. The __va_reg
2336 field points to a stack-allocated region holding the contents of the
2337 incoming argument registers. The __va_ndx field is an index initialized
2338 to the position of the first unnamed (variable) argument. This same index
2339 is also used to address the arguments passed in memory. Thus, the
2340 __va_stk field is initialized to point to the position of the first
2341 argument in memory offset to account for the arguments passed in
2342 registers. E.G., if there are 6 argument registers, and each register is
2343 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2344 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2345 argument word N for N >= 6. */
2346
2347tree
908e141d 2348xtensa_build_va_list ()
f6b7ba2b 2349{
049d6666 2350 tree f_stk, f_reg, f_ndx, record, type_decl;
f6b7ba2b 2351
049d6666 2352 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2353 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
f6b7ba2b 2354
2355 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2356 ptr_type_node);
2357 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2358 ptr_type_node);
2359 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2360 integer_type_node);
2361
2362 DECL_FIELD_CONTEXT (f_stk) = record;
2363 DECL_FIELD_CONTEXT (f_reg) = record;
2364 DECL_FIELD_CONTEXT (f_ndx) = record;
2365
049d6666 2366 TREE_CHAIN (record) = type_decl;
2367 TYPE_NAME (record) = type_decl;
f6b7ba2b 2368 TYPE_FIELDS (record) = f_stk;
2369 TREE_CHAIN (f_stk) = f_reg;
2370 TREE_CHAIN (f_reg) = f_ndx;
2371
2372 layout_type (record);
2373 return record;
2374}
2375
2376
2377/* Save the incoming argument registers on the stack. Returns the
2378 address of the saved registers. */
2379
2380rtx
2381xtensa_builtin_saveregs ()
2382{
2383 rtx gp_regs, dest;
2384 int arg_words = current_function_arg_words;
2385 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2386 int i;
2387
2388 if (gp_left == 0)
2389 return const0_rtx;
2390
2391 /* allocate the general-purpose register space */
2392 gp_regs = assign_stack_local
2393 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
049d6666 2394 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
f6b7ba2b 2395
2396 /* Now store the incoming registers. */
2397 dest = change_address (gp_regs, SImode,
2398 plus_constant (XEXP (gp_regs, 0),
2399 arg_words * UNITS_PER_WORD));
2400
2401 /* Note: Don't use move_block_from_reg() here because the incoming
2402 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2403 Instead, call gen_raw_REG() directly so that we get a distinct
2404 instance of (REG:SI 7). */
2405 for (i = 0; i < gp_left; i++)
2406 {
2407 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2408 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2409 }
2410
2411 return XEXP (gp_regs, 0);
2412}
2413
2414
2415/* Implement `va_start' for varargs and stdarg. We look at the
2416 current function to fill in an initial va_list. */
2417
2418void
7df226a2 2419xtensa_va_start (valist, nextarg)
f6b7ba2b 2420 tree valist;
2421 rtx nextarg ATTRIBUTE_UNUSED;
2422{
2423 tree f_stk, stk;
2424 tree f_reg, reg;
2425 tree f_ndx, ndx;
2426 tree t, u;
2427 int arg_words;
2428
2429 arg_words = current_function_args_info.arg_words;
2430
2431 f_stk = TYPE_FIELDS (va_list_type_node);
2432 f_reg = TREE_CHAIN (f_stk);
2433 f_ndx = TREE_CHAIN (f_reg);
2434
2435 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2436 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2437 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2438
2439 /* Call __builtin_saveregs; save the result in __va_reg */
2440 current_function_arg_words = arg_words;
2441 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2442 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2443 TREE_SIDE_EFFECTS (t) = 1;
2444 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2445
2446 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2447 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2448 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2449 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2450 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2451 TREE_SIDE_EFFECTS (t) = 1;
2452 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2453
2454 /* Set the __va_ndx member. */
2455 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2456 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2457 TREE_SIDE_EFFECTS (t) = 1;
2458 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2459}
2460
2461
2462/* Implement `va_arg'. */
2463
2464rtx
2465xtensa_va_arg (valist, type)
2466 tree valist, type;
2467{
2468 tree f_stk, stk;
2469 tree f_reg, reg;
2470 tree f_ndx, ndx;
dd52a190 2471 tree tmp, addr_tree, type_size;
2472 rtx array, orig_ndx, r, addr, size, va_size;
f6b7ba2b 2473 rtx lab_false, lab_over, lab_false2;
2474
f6b7ba2b 2475 f_stk = TYPE_FIELDS (va_list_type_node);
2476 f_reg = TREE_CHAIN (f_stk);
2477 f_ndx = TREE_CHAIN (f_reg);
2478
2479 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2480 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2481 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2482
dd52a190 2483 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2484
2485 va_size = gen_reg_rtx (SImode);
2486 tmp = fold (build (MULT_EXPR, sizetype,
2487 fold (build (TRUNC_DIV_EXPR, sizetype,
2488 fold (build (PLUS_EXPR, sizetype,
2489 type_size,
2490 size_int (UNITS_PER_WORD - 1))),
2491 size_int (UNITS_PER_WORD))),
2492 size_int (UNITS_PER_WORD)));
2493 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2494 if (r != va_size)
2495 emit_move_insn (va_size, r);
2496
f6b7ba2b 2497
2498 /* First align __va_ndx to a double word boundary if necessary for this arg:
2499
2500 if (__alignof__ (TYPE) > 4)
2501 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2502 */
2503
2504 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2505 {
2506 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2507 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2508 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2509 build_int_2 (-2 * UNITS_PER_WORD, -1));
2510 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2511 TREE_SIDE_EFFECTS (tmp) = 1;
2512 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2513 }
2514
2515
2516 /* Increment __va_ndx to point past the argument:
2517
2518 orig_ndx = (AP).__va_ndx;
2519 (AP).__va_ndx += __va_size (TYPE);
2520 */
2521
2522 orig_ndx = gen_reg_rtx (SImode);
2523 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2524 if (r != orig_ndx)
2525 emit_move_insn (orig_ndx, r);
2526
dd52a190 2527 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2528 make_tree (intSI_type_node, va_size));
f6b7ba2b 2529 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2530 TREE_SIDE_EFFECTS (tmp) = 1;
2531 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2532
2533
2534 /* Check if the argument is in registers:
2535
89d4bc5e 2536 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2537 && !MUST_PASS_IN_STACK (type))
f6b7ba2b 2538 __array = (AP).__va_reg;
2539 */
2540
f6b7ba2b 2541 array = gen_reg_rtx (Pmode);
2542
4588bbd8 2543 lab_over = NULL_RTX;
89d4bc5e 2544 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2545 {
2546 lab_false = gen_label_rtx ();
2547 lab_over = gen_label_rtx ();
2548
2549 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2550 EXPAND_NORMAL),
2551 GEN_INT (MAX_ARGS_IN_REGISTERS
2552 * UNITS_PER_WORD),
2553 GT, const1_rtx, SImode, 0, lab_false);
2554
2555 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2556 if (r != array)
2557 emit_move_insn (array, r);
2558
2559 emit_jump_insn (gen_jump (lab_over));
2560 emit_barrier ();
2561 emit_label (lab_false);
2562 }
f6b7ba2b 2563
2564 /* ...otherwise, the argument is on the stack (never split between
2565 registers and the stack -- change __va_ndx if necessary):
2566
2567 else
2568 {
2569 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2570 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2571 __array = (AP).__va_stk;
2572 }
2573 */
2574
2575 lab_false2 = gen_label_rtx ();
2576 emit_cmp_and_jump_insns (orig_ndx,
2577 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2578 GE, const1_rtx, SImode, 0, lab_false2);
2579
dd52a190 2580 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2581 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
f6b7ba2b 2582 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2583 TREE_SIDE_EFFECTS (tmp) = 1;
2584 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2585
2586 emit_label (lab_false2);
2587
2588 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2589 if (r != array)
2590 emit_move_insn (array, r);
2591
4588bbd8 2592 if (lab_over != NULL_RTX)
89d4bc5e 2593 emit_label (lab_over);
dd52a190 2594
f6b7ba2b 2595
2596 /* Given the base array pointer (__array) and index to the subsequent
2597 argument (__va_ndx), find the address:
2598
dd52a190 2599 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2600 ? sizeof (TYPE)
2601 : __va_size (TYPE))
f6b7ba2b 2602
2603 The results are endian-dependent because values smaller than one word
2604 are aligned differently.
2605 */
2606
dd52a190 2607 size = gen_reg_rtx (SImode);
2608 emit_move_insn (size, va_size);
2609
2610 if (BYTES_BIG_ENDIAN)
2611 {
2612 rtx lab_use_va_size = gen_label_rtx ();
2613
2614 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2615 EXPAND_NORMAL),
2616 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2617 GE, const1_rtx, SImode, 0, lab_use_va_size);
2618
2619 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2620 if (r != size)
2621 emit_move_insn (size, r);
2622
2623 emit_label (lab_use_va_size);
2624 }
f6b7ba2b 2625
2626 addr_tree = build (PLUS_EXPR, ptr_type_node,
2627 make_tree (ptr_type_node, array),
2628 ndx);
dd52a190 2629 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2630 make_tree (intSI_type_node, size));
f6b7ba2b 2631 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2632 addr = copy_to_reg (addr);
2633 return addr;
2634}
2635
2636
fc12fa10 2637enum reg_class
a8332086 2638xtensa_preferred_reload_class (x, class, isoutput)
fc12fa10 2639 rtx x;
2640 enum reg_class class;
a8332086 2641 int isoutput;
fc12fa10 2642{
a8332086 2643 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
fc12fa10 2644 return NO_REGS;
2645
a8332086 2646 /* Don't use the stack pointer or hard frame pointer for reloads!
2647 The hard frame pointer would normally be OK except that it may
2648 briefly hold an incoming argument in the prologue, and reload
2649 won't know that it is live because the hard frame pointer is
2650 treated specially. */
2651
2652 if (class == AR_REGS || class == GR_REGS)
2653 return RL_REGS;
fc12fa10 2654
2655 return class;
2656}
2657
2658
f6b7ba2b 2659enum reg_class
2660xtensa_secondary_reload_class (class, mode, x, isoutput)
2661 enum reg_class class;
2662 enum machine_mode mode ATTRIBUTE_UNUSED;
2663 rtx x;
2664 int isoutput;
2665{
2666 int regno;
2667
2668 if (GET_CODE (x) == SIGN_EXTEND)
2669 x = XEXP (x, 0);
2670 regno = xt_true_regnum (x);
2671
2672 if (!isoutput)
2673 {
2674 if (class == FP_REGS && constantpool_mem_p (x))
a8332086 2675 return RL_REGS;
f6b7ba2b 2676 }
2677
2678 if (ACC_REG_P (regno))
a8332086 2679 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
f6b7ba2b 2680 if (class == ACC_REG)
a8332086 2681 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
f6b7ba2b 2682
2683 return NO_REGS;
2684}
2685
2686
2687void
2688order_regs_for_local_alloc ()
2689{
2690 if (!leaf_function_p ())
2691 {
2692 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2693 FIRST_PSEUDO_REGISTER * sizeof (int));
2694 }
2695 else
2696 {
2697 int i, num_arg_regs;
2698 int nxt = 0;
2699
2700 /* use the AR registers in increasing order (skipping a0 and a1)
2701 but save the incoming argument registers for a last resort */
2702 num_arg_regs = current_function_args_info.arg_words;
2703 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2704 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2705 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2706 reg_alloc_order[nxt++] = i + num_arg_regs;
2707 for (i = 0; i < num_arg_regs; i++)
2708 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2709
bef09eef 2710 /* list the coprocessor registers in order */
2711 for (i = 0; i < BR_REG_NUM; i++)
2712 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2713
f6b7ba2b 2714 /* list the FP registers in order for now */
2715 for (i = 0; i < 16; i++)
2716 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2717
2718 /* GCC requires that we list *all* the registers.... */
2719 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2720 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2721 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2722 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2723
f6b7ba2b 2724 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2725 }
2726}
2727
2728
2729/* A customized version of reg_overlap_mentioned_p that only looks for
2730 references to a7 (as opposed to hard_frame_pointer_rtx). */
2731
2732int
2733a7_overlap_mentioned_p (x)
2734 rtx x;
2735{
2736 int i, j;
2737 unsigned int x_regno;
2738 const char *fmt;
2739
2740 if (GET_CODE (x) == REG)
2741 {
2742 x_regno = REGNO (x);
2743 return (x != hard_frame_pointer_rtx
2744 && x_regno < A7_REG + 1
2745 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2746 }
2747
2748 if (GET_CODE (x) == SUBREG
2749 && GET_CODE (SUBREG_REG (x)) == REG
2750 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2751 {
2752 x_regno = subreg_regno (x);
2753 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2754 && x_regno < A7_REG + 1
2755 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2756 }
2757
2758 /* X does not match, so try its subexpressions. */
2759 fmt = GET_RTX_FORMAT (GET_CODE (x));
2760 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2761 {
2762 if (fmt[i] == 'e')
2763 {
2764 if (a7_overlap_mentioned_p (XEXP (x, i)))
2765 return 1;
2766 }
2767 else if (fmt[i] == 'E')
2768 {
2769 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2770 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2771 return 1;
2772 }
2773 }
2774
2775 return 0;
2776}
bbfbe351 2777
5f4442bc 2778
2779/* Some Xtensa targets support multiple bss sections. If the section
2780 name ends with ".bss", add SECTION_BSS to the flags. */
2781
2782static unsigned int
2783xtensa_multibss_section_type_flags (decl, name, reloc)
2784 tree decl;
2785 const char *name;
2786 int reloc;
2787{
2788 unsigned int flags = default_section_type_flags (decl, name, reloc);
2789 const char *suffix;
2790
2791 suffix = strrchr (name, '.');
2792 if (suffix && strcmp (suffix, ".bss") == 0)
2793 {
2794 if (!decl || (TREE_CODE (decl) == VAR_DECL
2795 && DECL_INITIAL (decl) == NULL_TREE))
2796 flags |= SECTION_BSS; /* @nobits */
2797 else
2798 warning ("only uninitialized variables can be placed in a "
2799 ".bss section");
2800 }
2801
2802 return flags;
2803}
2804
2805
bbfbe351 2806/* The literal pool stays with the function. */
2807
2808static void
2809xtensa_select_rtx_section (mode, x, align)
2810 enum machine_mode mode ATTRIBUTE_UNUSED;
2811 rtx x ATTRIBUTE_UNUSED;
2812 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2813{
2814 function_section (current_function_decl);
2815}
7811991d 2816
fab7adbf 2817/* Compute a (partial) cost for rtx X. Return true if the complete
2818 cost has been computed, and false if subexpressions should be
2819 scanned. In either case, *TOTAL contains the cost result. */
2820
2821static bool
2822xtensa_rtx_costs (x, code, outer_code, total)
2823 rtx x;
2824 int code, outer_code;
2825 int *total;
2826{
2827 switch (code)
2828 {
2829 case CONST_INT:
2830 switch (outer_code)
2831 {
2832 case SET:
2833 if (xtensa_simm12b (INTVAL (x)))
2834 {
2835 *total = 4;
2836 return true;
2837 }
2838 break;
2839 case PLUS:
2840 if (xtensa_simm8 (INTVAL (x))
2841 || xtensa_simm8x256 (INTVAL (x)))
2842 {
2843 *total = 0;
2844 return true;
2845 }
2846 break;
2847 case AND:
2848 if (xtensa_mask_immediate (INTVAL (x)))
2849 {
2850 *total = 0;
2851 return true;
2852 }
2853 break;
2854 case COMPARE:
2855 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2856 {
2857 *total = 0;
2858 return true;
2859 }
2860 break;
2861 case ASHIFT:
2862 case ASHIFTRT:
2863 case LSHIFTRT:
2864 case ROTATE:
2865 case ROTATERT:
2866 /* no way to tell if X is the 2nd operand so be conservative */
2867 default: break;
2868 }
2869 if (xtensa_simm12b (INTVAL (x)))
2870 *total = 5;
2871 else
2872 *total = 6;
2873 return true;
2874
2875 case CONST:
2876 case LABEL_REF:
2877 case SYMBOL_REF:
2878 *total = 5;
2879 return true;
2880
2881 case CONST_DOUBLE:
2882 *total = 7;
2883 return true;
2884
2885 case MEM:
2886 {
2887 int num_words =
2888 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2889
2890 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2891 *total = COSTS_N_INSNS (num_words);
2892 else
2893 *total = COSTS_N_INSNS (2*num_words);
2894 return true;
2895 }
2896
2897 case FFS:
2898 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2899 return true;
2900
2901 case NOT:
2902 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2903 return true;
2904
2905 case AND:
2906 case IOR:
2907 case XOR:
2908 if (GET_MODE (x) == DImode)
2909 *total = COSTS_N_INSNS (2);
2910 else
2911 *total = COSTS_N_INSNS (1);
2912 return true;
2913
2914 case ASHIFT:
2915 case ASHIFTRT:
2916 case LSHIFTRT:
2917 if (GET_MODE (x) == DImode)
2918 *total = COSTS_N_INSNS (50);
2919 else
2920 *total = COSTS_N_INSNS (1);
2921 return true;
2922
2923 case ABS:
2924 {
2925 enum machine_mode xmode = GET_MODE (x);
2926 if (xmode == SFmode)
2927 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2928 else if (xmode == DFmode)
2929 *total = COSTS_N_INSNS (50);
2930 else
2931 *total = COSTS_N_INSNS (4);
2932 return true;
2933 }
2934
2935 case PLUS:
2936 case MINUS:
2937 {
2938 enum machine_mode xmode = GET_MODE (x);
2939 if (xmode == SFmode)
2940 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2941 else if (xmode == DFmode || xmode == DImode)
2942 *total = COSTS_N_INSNS (50);
2943 else
2944 *total = COSTS_N_INSNS (1);
2945 return true;
2946 }
2947
2948 case NEG:
2949 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2950 return true;
2951
2952 case MULT:
2953 {
2954 enum machine_mode xmode = GET_MODE (x);
2955 if (xmode == SFmode)
2956 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2957 else if (xmode == DFmode || xmode == DImode)
2958 *total = COSTS_N_INSNS (50);
2959 else if (TARGET_MUL32)
2960 *total = COSTS_N_INSNS (4);
2961 else if (TARGET_MAC16)
2962 *total = COSTS_N_INSNS (16);
2963 else if (TARGET_MUL16)
2964 *total = COSTS_N_INSNS (12);
2965 else
2966 *total = COSTS_N_INSNS (50);
2967 return true;
2968 }
2969
2970 case DIV:
2971 case MOD:
2972 {
2973 enum machine_mode xmode = GET_MODE (x);
2974 if (xmode == SFmode)
2975 {
2976 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2977 return true;
2978 }
2979 else if (xmode == DFmode)
2980 {
2981 *total = COSTS_N_INSNS (50);
2982 return true;
2983 }
2984 }
2985 /* fall through */
2986
2987 case UDIV:
2988 case UMOD:
2989 {
2990 enum machine_mode xmode = GET_MODE (x);
2991 if (xmode == DImode)
2992 *total = COSTS_N_INSNS (50);
2993 else if (TARGET_DIV32)
2994 *total = COSTS_N_INSNS (32);
2995 else
2996 *total = COSTS_N_INSNS (50);
2997 return true;
2998 }
2999
3000 case SQRT:
3001 if (GET_MODE (x) == SFmode)
3002 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3003 else
3004 *total = COSTS_N_INSNS (50);
3005 return true;
3006
3007 case SMIN:
3008 case UMIN:
3009 case SMAX:
3010 case UMAX:
3011 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3012 return true;
3013
3014 case SIGN_EXTRACT:
3015 case SIGN_EXTEND:
3016 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3017 return true;
3018
3019 case ZERO_EXTRACT:
3020 case ZERO_EXTEND:
3021 *total = COSTS_N_INSNS (1);
3022 return true;
3023
3024 default:
3025 return false;
3026 }
3027}
3028
1f3233d1 3029#include "gt-xtensa.h"