]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
*** empty log message ***
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
f6b7ba2b 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "rtl.h"
25#include "regs.h"
26#include "machmode.h"
27#include "hard-reg-set.h"
28#include "basic-block.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "insn-attr.h"
34#include "insn-codes.h"
35#include "recog.h"
36#include "output.h"
37#include "tree.h"
38#include "expr.h"
39#include "flags.h"
40#include "reload.h"
41#include "tm_p.h"
42#include "function.h"
43#include "toplev.h"
44#include "optabs.h"
bbfbe351 45#include "output.h"
f6b7ba2b 46#include "libfuncs.h"
160b2123 47#include "ggc.h"
f6b7ba2b 48#include "target.h"
49#include "target-def.h"
50
51/* Enumeration for all of the relational tests, so that we can build
52 arrays indexed by the test type, and not worry about the order
53 of EQ, NE, etc. */
54
55enum internal_test {
56 ITEST_EQ,
57 ITEST_NE,
58 ITEST_GT,
59 ITEST_GE,
60 ITEST_LT,
61 ITEST_LE,
62 ITEST_GTU,
63 ITEST_GEU,
64 ITEST_LTU,
65 ITEST_LEU,
66 ITEST_MAX
67 };
68
69/* Cached operands, and operator to compare for use in set/branch on
70 condition codes. */
71rtx branch_cmp[2];
72
73/* what type of branch to use */
74enum cmp_type branch_type;
75
76/* Array giving truth value on whether or not a given hard register
77 can support a given mode. */
78char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
79
80/* Current frame size calculated by compute_frame_size. */
81unsigned xtensa_current_frame_size;
82
83/* Tables of ld/st opcode names for block moves */
84const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
85const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
86#define LARGEST_MOVE_RATIO 15
87
88/* Define the structure for the machine field in struct function. */
1f3233d1 89struct machine_function GTY(())
f6b7ba2b 90{
91 int accesses_prev_frame;
92};
93
94/* Vector, indexed by hard register number, which contains 1 for a
95 register that is allowable in a candidate for leaf function
96 treatment. */
97
98const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
99{
100 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
101 1, 1, 1,
102 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
103 1
104};
105
106/* Map hard register number to register class */
107const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
108{
109 GR_REGS, SP_REG, GR_REGS, GR_REGS,
110 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
111 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
112 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
113 AR_REGS, AR_REGS, BR_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 ACC_REG,
119};
120
121/* Map register constraint character to register class. */
122enum reg_class xtensa_char_to_class[256] =
123{
124 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188};
189
bbfbe351 190static int b4const_or_zero PARAMS ((int));
191static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
192static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
193static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
194static rtx gen_conditional_move PARAMS ((rtx));
195static rtx fixup_subreg_mem PARAMS ((rtx x));
196static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
160b2123 197static struct machine_function * xtensa_init_machine_status PARAMS ((void));
bbfbe351 198static void printx PARAMS ((FILE *, signed int));
199static void xtensa_select_rtx_section PARAMS ((enum machine_mode, rtx,
200 unsigned HOST_WIDE_INT));
7811991d 201static void xtensa_encode_section_info PARAMS ((tree, int));
bbfbe351 202
203static rtx frame_size_const;
204static int current_function_arg_words;
205static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
206 REG_ALLOC_ORDER;
207\f
f6b7ba2b 208/* This macro generates the assembly code for function entry.
209 FILE is a stdio stream to output the code to.
210 SIZE is an int: how many units of temporary storage to allocate.
211 Refer to the array 'regs_ever_live' to determine which registers
212 to save; 'regs_ever_live[I]' is nonzero if register number I
213 is ever used in the function. This macro is responsible for
214 knowing which registers should not be saved even if used. */
215
216#undef TARGET_ASM_FUNCTION_PROLOGUE
217#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
218
219/* This macro generates the assembly code for function exit,
220 on machines that need it. If FUNCTION_EPILOGUE is not defined
221 then individual return instructions are generated for each
222 return statement. Args are same as for FUNCTION_PROLOGUE. */
223
224#undef TARGET_ASM_FUNCTION_EPILOGUE
225#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
226
227/* These hooks specify assembly directives for creating certain kinds
228 of integer object. */
229
230#undef TARGET_ASM_ALIGNED_SI_OP
231#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
232
bbfbe351 233#undef TARGET_ASM_SELECT_RTX_SECTION
234#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
7811991d 235#undef TARGET_ENCODE_SECTION_INFO
236#define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
f6b7ba2b 237
bbfbe351 238struct gcc_target targetm = TARGET_INITIALIZER;
239\f
f6b7ba2b 240
241/*
242 * Functions to test Xtensa immediate operand validity.
243 */
244
245int
246xtensa_b4constu (v)
247 int v;
248{
249 switch (v)
250 {
251 case 32768:
252 case 65536:
253 case 2:
254 case 3:
255 case 4:
256 case 5:
257 case 6:
258 case 7:
259 case 8:
260 case 10:
261 case 12:
262 case 16:
263 case 32:
264 case 64:
265 case 128:
266 case 256:
267 return 1;
268 }
269 return 0;
270}
271
272int
273xtensa_simm8x256 (v)
274 int v;
275{
276 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
277}
278
279int
280xtensa_ai4const (v)
281 int v;
282{
283 return (v == -1 || (v >= 1 && v <= 15));
284}
285
286int
287xtensa_simm7 (v)
288 int v;
289{
290 return v >= -32 && v <= 95;
291}
292
293int
294xtensa_b4const (v)
295 int v;
296{
297 switch (v)
298 {
299 case -1:
300 case 1:
301 case 2:
302 case 3:
303 case 4:
304 case 5:
305 case 6:
306 case 7:
307 case 8:
308 case 10:
309 case 12:
310 case 16:
311 case 32:
312 case 64:
313 case 128:
314 case 256:
315 return 1;
316 }
317 return 0;
318}
319
320int
321xtensa_simm8 (v)
322 int v;
323{
324 return v >= -128 && v <= 127;
325}
326
327int
328xtensa_tp7 (v)
329 int v;
330{
331 return (v >= 7 && v <= 22);
332}
333
334int
335xtensa_lsi4x4 (v)
336 int v;
337{
338 return (v & 3) == 0 && (v >= 0 && v <= 60);
339}
340
341int
342xtensa_simm12b (v)
343 int v;
344{
345 return v >= -2048 && v <= 2047;
346}
347
348int
349xtensa_uimm8 (v)
350 int v;
351{
352 return v >= 0 && v <= 255;
353}
354
355int
356xtensa_uimm8x2 (v)
357 int v;
358{
359 return (v & 1) == 0 && (v >= 0 && v <= 510);
360}
361
362int
363xtensa_uimm8x4 (v)
364 int v;
365{
366 return (v & 3) == 0 && (v >= 0 && v <= 1020);
367}
368
369
370/* This is just like the standard true_regnum() function except that it
371 works even when reg_renumber is not initialized. */
372
373int
374xt_true_regnum (x)
375 rtx x;
376{
377 if (GET_CODE (x) == REG)
378 {
379 if (reg_renumber
380 && REGNO (x) >= FIRST_PSEUDO_REGISTER
381 && reg_renumber[REGNO (x)] >= 0)
382 return reg_renumber[REGNO (x)];
383 return REGNO (x);
384 }
385 if (GET_CODE (x) == SUBREG)
386 {
387 int base = xt_true_regnum (SUBREG_REG (x));
388 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
389 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
390 GET_MODE (SUBREG_REG (x)),
391 SUBREG_BYTE (x), GET_MODE (x));
392 }
393 return -1;
394}
395
396
397int
398add_operand (op, mode)
399 rtx op;
400 enum machine_mode mode;
401{
402 if (GET_CODE (op) == CONST_INT)
403 return (xtensa_simm8 (INTVAL (op)) ||
404 xtensa_simm8x256 (INTVAL (op)));
405
406 return register_operand (op, mode);
407}
408
409
410int
411arith_operand (op, mode)
412 rtx op;
413 enum machine_mode mode;
414{
415 if (GET_CODE (op) == CONST_INT)
416 return xtensa_simm8 (INTVAL (op));
417
418 return register_operand (op, mode);
419}
420
421
422int
423nonimmed_operand (op, mode)
424 rtx op;
425 enum machine_mode mode;
426{
427 /* We cannot use the standard nonimmediate_operand() predicate because
428 it includes constant pool memory operands. */
429
430 if (memory_operand (op, mode))
431 return !constantpool_address_p (XEXP (op, 0));
432
433 return register_operand (op, mode);
434}
435
436
437int
438mem_operand (op, mode)
439 rtx op;
440 enum machine_mode mode;
441{
442 /* We cannot use the standard memory_operand() predicate because
443 it includes constant pool memory operands. */
444
445 if (memory_operand (op, mode))
446 return !constantpool_address_p (XEXP (op, 0));
447
448 return FALSE;
449}
450
451
452int
fc12fa10 453xtensa_valid_move (mode, operands)
f6b7ba2b 454 enum machine_mode mode;
fc12fa10 455 rtx *operands;
f6b7ba2b 456{
fc12fa10 457 /* Either the destination or source must be a register, and the
458 MAC16 accumulator doesn't count. */
459
460 if (register_operand (operands[0], mode))
461 {
462 int dst_regnum = xt_true_regnum (operands[0]);
463
464 /* The stack pointer can only be assigned with a MOVSP opcode. */
465 if (dst_regnum == STACK_POINTER_REGNUM)
466 return (mode == SImode
467 && register_operand (operands[1], mode)
468 && !ACC_REG_P (xt_true_regnum (operands[1])));
469
470 if (!ACC_REG_P (dst_regnum))
471 return true;
472 }
141e2ef6 473 if (register_operand (operands[1], mode))
fc12fa10 474 {
475 int src_regnum = xt_true_regnum (operands[1]);
476 if (!ACC_REG_P (src_regnum))
477 return true;
478 }
f6b7ba2b 479 return FALSE;
480}
481
482
483int
484mask_operand (op, mode)
485 rtx op;
486 enum machine_mode mode;
487{
488 if (GET_CODE (op) == CONST_INT)
489 return xtensa_mask_immediate (INTVAL (op));
490
491 return register_operand (op, mode);
492}
493
494
495int
496extui_fldsz_operand (op, mode)
497 rtx op;
498 enum machine_mode mode ATTRIBUTE_UNUSED;
499{
500 return ((GET_CODE (op) == CONST_INT)
501 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
502}
503
504
505int
506sext_operand (op, mode)
507 rtx op;
508 enum machine_mode mode;
509{
510 if (TARGET_SEXT)
511 return nonimmed_operand (op, mode);
512 return mem_operand (op, mode);
513}
514
515
516int
517sext_fldsz_operand (op, mode)
518 rtx op;
519 enum machine_mode mode ATTRIBUTE_UNUSED;
520{
521 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
522}
523
524
525int
526lsbitnum_operand (op, mode)
527 rtx op;
528 enum machine_mode mode ATTRIBUTE_UNUSED;
529{
530 if (GET_CODE (op) == CONST_INT)
531 {
532 return (BITS_BIG_ENDIAN
533 ? (INTVAL (op) == BITS_PER_WORD-1)
534 : (INTVAL (op) == 0));
535 }
536 return FALSE;
537}
538
539
540static int
541b4const_or_zero (v)
542 int v;
543{
544 if (v == 0)
545 return TRUE;
546 return xtensa_b4const (v);
547}
548
549
550int
551branch_operand (op, mode)
552 rtx op;
553 enum machine_mode mode;
554{
555 if (GET_CODE (op) == CONST_INT)
556 return b4const_or_zero (INTVAL (op));
557
558 return register_operand (op, mode);
559}
560
561
562int
563ubranch_operand (op, mode)
564 rtx op;
565 enum machine_mode mode;
566{
567 if (GET_CODE (op) == CONST_INT)
568 return xtensa_b4constu (INTVAL (op));
569
570 return register_operand (op, mode);
571}
572
573
574int
575call_insn_operand (op, mode)
576 rtx op;
577 enum machine_mode mode ATTRIBUTE_UNUSED;
578{
579 if ((GET_CODE (op) == REG)
580 && (op != arg_pointer_rtx)
581 && ((REGNO (op) < FRAME_POINTER_REGNUM)
582 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
583 return TRUE;
584
585 if (CONSTANT_ADDRESS_P (op))
586 {
587 /* Direct calls only allowed to static functions with PIC. */
588 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
589 && SYMBOL_REF_FLAG (op)));
590 }
591
592 return FALSE;
593}
594
595
596int
597move_operand (op, mode)
598 rtx op;
599 enum machine_mode mode;
600{
601 if (register_operand (op, mode))
602 return TRUE;
603
604 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
605 result in 0/1. */
606 if (GET_CODE (op) == CONSTANT_P_RTX)
607 return TRUE;
608
609 if (GET_CODE (op) == CONST_INT)
610 return xtensa_simm12b (INTVAL (op));
611
612 if (GET_CODE (op) == MEM)
613 return memory_address_p (mode, XEXP (op, 0));
614
615 return FALSE;
616}
617
618
619int
620smalloffset_mem_p (op)
621 rtx op;
622{
623 if (GET_CODE (op) == MEM)
624 {
625 rtx addr = XEXP (op, 0);
626 if (GET_CODE (addr) == REG)
627 return REG_OK_FOR_BASE_P (addr);
628 if (GET_CODE (addr) == PLUS)
629 {
630 rtx offset = XEXP (addr, 0);
631 if (GET_CODE (offset) != CONST_INT)
632 offset = XEXP (addr, 1);
633 if (GET_CODE (offset) != CONST_INT)
634 return FALSE;
635 return xtensa_lsi4x4 (INTVAL (offset));
636 }
637 }
638 return FALSE;
639}
640
641
642int
643smalloffset_double_mem_p (op)
644 rtx op;
645{
646 if (!smalloffset_mem_p (op))
647 return FALSE;
648 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
649}
650
651
652int
653constantpool_address_p (addr)
654 rtx addr;
655{
656 rtx sym = addr;
657
658 if (GET_CODE (addr) == CONST)
659 {
660 rtx offset;
661
662 /* only handle (PLUS (SYM, OFFSET)) form */
663 addr = XEXP (addr, 0);
664 if (GET_CODE (addr) != PLUS)
665 return FALSE;
666
667 /* make sure the address is word aligned */
668 offset = XEXP (addr, 1);
669 if ((GET_CODE (offset) != CONST_INT)
670 || ((INTVAL (offset) & 3) != 0))
671 return FALSE;
672
673 sym = XEXP (addr, 0);
674 }
675
676 if ((GET_CODE (sym) == SYMBOL_REF)
677 && CONSTANT_POOL_ADDRESS_P (sym))
678 return TRUE;
679 return FALSE;
680}
681
682
683int
684constantpool_mem_p (op)
685 rtx op;
686{
687 if (GET_CODE (op) == MEM)
688 return constantpool_address_p (XEXP (op, 0));
689 return FALSE;
690}
691
692
693int
694non_const_move_operand (op, mode)
695 rtx op;
696 enum machine_mode mode;
697{
698 if (register_operand (op, mode))
699 return 1;
700 if (GET_CODE (op) == SUBREG)
701 op = SUBREG_REG (op);
702 if (GET_CODE (op) == MEM)
703 return memory_address_p (mode, XEXP (op, 0));
704 return FALSE;
705}
706
707
708/* Accept the floating point constant 1 in the appropriate mode. */
709
710int
711const_float_1_operand (op, mode)
712 rtx op;
713 enum machine_mode mode;
714{
715 REAL_VALUE_TYPE d;
716 static REAL_VALUE_TYPE onedf;
717 static REAL_VALUE_TYPE onesf;
718 static int one_initialized;
719
720 if ((GET_CODE (op) != CONST_DOUBLE)
721 || (mode != GET_MODE (op))
722 || (mode != DFmode && mode != SFmode))
723 return FALSE;
724
725 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
726
727 if (! one_initialized)
728 {
729 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
730 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
731 one_initialized = TRUE;
732 }
733
734 if (mode == DFmode)
735 return REAL_VALUES_EQUAL (d, onedf);
736 else
737 return REAL_VALUES_EQUAL (d, onesf);
738}
739
740
741int
742fpmem_offset_operand (op, mode)
743 rtx op;
744 enum machine_mode mode ATTRIBUTE_UNUSED;
745{
746 if (GET_CODE (op) == CONST_INT)
747 return xtensa_mem_offset (INTVAL (op), SFmode);
748 return 0;
749}
750
751
752void
753xtensa_extend_reg (dst, src)
754 rtx dst;
755 rtx src;
756{
757 rtx temp = gen_reg_rtx (SImode);
758 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
759
760 /* generate paradoxical subregs as needed so that the modes match */
761 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
762 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
763
764 emit_insn (gen_ashlsi3 (temp, src, shift));
765 emit_insn (gen_ashrsi3 (dst, temp, shift));
766}
767
768
769void
770xtensa_load_constant (dst, src)
771 rtx dst;
772 rtx src;
773{
774 enum machine_mode mode = GET_MODE (dst);
775 src = force_const_mem (SImode, src);
776
777 /* PC-relative loads are always SImode so we have to add a SUBREG if that
778 is not the desired mode */
779
780 if (mode != SImode)
781 {
782 if (register_operand (dst, mode))
783 dst = simplify_gen_subreg (SImode, dst, mode, 0);
784 else
785 {
786 src = force_reg (SImode, src);
787 src = gen_lowpart_SUBREG (mode, src);
788 }
789 }
790
791 emit_move_insn (dst, src);
792}
793
794
795int
796branch_operator (x, mode)
797 rtx x;
798 enum machine_mode mode;
799{
800 if (GET_MODE (x) != mode)
801 return FALSE;
802
803 switch (GET_CODE (x))
804 {
805 case EQ:
806 case NE:
807 case LT:
808 case GE:
809 return TRUE;
810 default:
811 break;
812 }
813 return FALSE;
814}
815
816
817int
818ubranch_operator (x, mode)
819 rtx x;
820 enum machine_mode mode;
821{
822 if (GET_MODE (x) != mode)
823 return FALSE;
824
825 switch (GET_CODE (x))
826 {
827 case LTU:
828 case GEU:
829 return TRUE;
830 default:
831 break;
832 }
833 return FALSE;
834}
835
836
837int
838boolean_operator (x, mode)
839 rtx x;
840 enum machine_mode mode;
841{
842 if (GET_MODE (x) != mode)
843 return FALSE;
844
845 switch (GET_CODE (x))
846 {
847 case EQ:
848 case NE:
849 return TRUE;
850 default:
851 break;
852 }
853 return FALSE;
854}
855
856
857int
858xtensa_mask_immediate (v)
859 int v;
860{
861#define MAX_MASK_SIZE 16
862 int mask_size;
863
864 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
865 {
866 if ((v & 1) == 0)
867 return FALSE;
868 v = v >> 1;
869 if (v == 0)
870 return TRUE;
871 }
872
873 return FALSE;
874}
875
876
877int
878xtensa_mem_offset (v, mode)
879 unsigned v;
880 enum machine_mode mode;
881{
882 switch (mode)
883 {
884 case BLKmode:
885 /* Handle the worst case for block moves. See xtensa_expand_block_move
886 where we emit an optimized block move operation if the block can be
887 moved in < "move_ratio" pieces. The worst case is when the block is
888 aligned but has a size of (3 mod 4) (does this happen?) so that the
889 last piece requires a byte load/store. */
890 return (xtensa_uimm8 (v) &&
891 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
892
893 case QImode:
894 return xtensa_uimm8 (v);
895
896 case HImode:
897 return xtensa_uimm8x2 (v);
898
899 case DFmode:
900 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
901
902 default:
903 break;
904 }
905
906 return xtensa_uimm8x4 (v);
907}
908
909
910/* Make normal rtx_code into something we can index from an array */
911
912static enum internal_test
913map_test_to_internal_test (test_code)
914 enum rtx_code test_code;
915{
916 enum internal_test test = ITEST_MAX;
917
918 switch (test_code)
919 {
920 default: break;
921 case EQ: test = ITEST_EQ; break;
922 case NE: test = ITEST_NE; break;
923 case GT: test = ITEST_GT; break;
924 case GE: test = ITEST_GE; break;
925 case LT: test = ITEST_LT; break;
926 case LE: test = ITEST_LE; break;
927 case GTU: test = ITEST_GTU; break;
928 case GEU: test = ITEST_GEU; break;
929 case LTU: test = ITEST_LTU; break;
930 case LEU: test = ITEST_LEU; break;
931 }
932
933 return test;
934}
935
936
937/* Generate the code to compare two integer values. The return value is
938 the comparison expression. */
939
940static rtx
941gen_int_relational (test_code, cmp0, cmp1, p_invert)
942 enum rtx_code test_code; /* relational test (EQ, etc) */
943 rtx cmp0; /* first operand to compare */
944 rtx cmp1; /* second operand to compare */
945 int *p_invert; /* whether branch needs to reverse its test */
946{
947 struct cmp_info {
948 enum rtx_code test_code; /* test code to use in insn */
949 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
950 int const_add; /* constant to add (convert LE -> LT) */
951 int reverse_regs; /* reverse registers in test */
952 int invert_const; /* != 0 if invert value if cmp1 is constant */
953 int invert_reg; /* != 0 if invert value if cmp1 is register */
954 int unsignedp; /* != 0 for unsigned comparisons. */
955 };
956
957 static struct cmp_info info[ (int)ITEST_MAX ] = {
958
959 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
960 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
961
962 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
963 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
964 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
965 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
966
967 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
968 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
969 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
970 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
971 };
972
973 enum internal_test test;
974 enum machine_mode mode;
975 struct cmp_info *p_info;
976
977 test = map_test_to_internal_test (test_code);
978 if (test == ITEST_MAX)
979 abort ();
980
981 p_info = &info[ (int)test ];
982
983 mode = GET_MODE (cmp0);
984 if (mode == VOIDmode)
985 mode = GET_MODE (cmp1);
986
987 /* Make sure we can handle any constants given to us. */
988 if (GET_CODE (cmp1) == CONST_INT)
989 {
990 HOST_WIDE_INT value = INTVAL (cmp1);
991 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
992
993 /* if the immediate overflows or does not fit in the immediate field,
994 spill it to a register */
995
996 if ((p_info->unsignedp ?
997 (uvalue + p_info->const_add > uvalue) :
998 (value + p_info->const_add > value)) != (p_info->const_add > 0))
999 {
1000 cmp1 = force_reg (mode, cmp1);
1001 }
1002 else if (!(p_info->const_range_p) (value + p_info->const_add))
1003 {
1004 cmp1 = force_reg (mode, cmp1);
1005 }
1006 }
1007 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1008 {
1009 cmp1 = force_reg (mode, cmp1);
1010 }
1011
1012 /* See if we need to invert the result. */
1013 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1014 ? p_info->invert_const
1015 : p_info->invert_reg);
1016
1017 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1018 Comparison between two registers, may involve switching operands. */
1019 if (GET_CODE (cmp1) == CONST_INT)
1020 {
1021 if (p_info->const_add != 0)
1022 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1023
1024 }
1025 else if (p_info->reverse_regs)
1026 {
1027 rtx temp = cmp0;
1028 cmp0 = cmp1;
1029 cmp1 = temp;
1030 }
1031
1032 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1033}
1034
1035
1036/* Generate the code to compare two float values. The return value is
1037 the comparison expression. */
1038
1039static rtx
1040gen_float_relational (test_code, cmp0, cmp1)
1041 enum rtx_code test_code; /* relational test (EQ, etc) */
1042 rtx cmp0; /* first operand to compare */
1043 rtx cmp1; /* second operand to compare */
1044{
1045 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1046 rtx brtmp;
1047 int reverse_regs, invert;
1048
1049 switch (test_code)
1050 {
1051 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1052 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1053 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1054 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1055 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1056 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1057 default:
1058 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1059 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1060 }
1061
1062 if (reverse_regs)
1063 {
1064 rtx temp = cmp0;
1065 cmp0 = cmp1;
1066 cmp1 = temp;
1067 }
1068
1069 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1070 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1071
1072 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1073}
1074
1075
1076void
1077xtensa_expand_conditional_branch (operands, test_code)
1078 rtx *operands;
1079 enum rtx_code test_code;
1080{
1081 enum cmp_type type = branch_type;
1082 rtx cmp0 = branch_cmp[0];
1083 rtx cmp1 = branch_cmp[1];
1084 rtx cmp;
1085 int invert;
1086 rtx label1, label2;
1087
1088 switch (type)
1089 {
1090 case CMP_DF:
1091 default:
1092 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1093
1094 case CMP_SI:
1095 invert = FALSE;
1096 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1097 break;
1098
1099 case CMP_SF:
1100 if (!TARGET_HARD_FLOAT)
1101 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1102 invert = FALSE;
1103 cmp = gen_float_relational (test_code, cmp0, cmp1);
1104 break;
1105 }
1106
1107 /* Generate the branch. */
1108
1109 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1110 label2 = pc_rtx;
1111
1112 if (invert)
1113 {
1114 label2 = label1;
1115 label1 = pc_rtx;
1116 }
1117
1118 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1119 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1120 label1,
1121 label2)));
1122}
1123
1124
1125static rtx
1126gen_conditional_move (cmp)
1127 rtx cmp;
1128{
1129 enum rtx_code code = GET_CODE (cmp);
1130 rtx op0 = branch_cmp[0];
1131 rtx op1 = branch_cmp[1];
1132
1133 if (branch_type == CMP_SI)
1134 {
1135 /* Jump optimization calls get_condition() which canonicalizes
1136 comparisons like (GE x <const>) to (GT x <const-1>).
1137 Transform those comparisons back to GE, since that is the
1138 comparison supported in Xtensa. We shouldn't have to
1139 transform <LE x const> comparisons, because neither
1140 xtensa_expand_conditional_branch() nor get_condition() will
1141 produce them. */
1142
1143 if ((code == GT) && (op1 == constm1_rtx))
1144 {
1145 code = GE;
1146 op1 = const0_rtx;
1147 }
1148 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1149
1150 if (boolean_operator (cmp, VOIDmode))
1151 {
1152 /* swap the operands to make const0 second */
1153 if (op0 == const0_rtx)
1154 {
1155 op0 = op1;
1156 op1 = const0_rtx;
1157 }
1158
1159 /* if not comparing against zero, emit a comparison (subtract) */
1160 if (op1 != const0_rtx)
1161 {
1162 op0 = expand_binop (SImode, sub_optab, op0, op1,
1163 0, 0, OPTAB_LIB_WIDEN);
1164 op1 = const0_rtx;
1165 }
1166 }
1167 else if (branch_operator (cmp, VOIDmode))
1168 {
1169 /* swap the operands to make const0 second */
1170 if (op0 == const0_rtx)
1171 {
1172 op0 = op1;
1173 op1 = const0_rtx;
1174
1175 switch (code)
1176 {
1177 case LT: code = GE; break;
1178 case GE: code = LT; break;
1179 default: abort ();
1180 }
1181 }
1182
1183 if (op1 != const0_rtx)
1184 return 0;
1185 }
1186 else
1187 return 0;
1188
1189 return gen_rtx (code, VOIDmode, op0, op1);
1190 }
1191
1192 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1193 return gen_float_relational (code, op0, op1);
1194
1195 return 0;
1196}
1197
1198
1199int
1200xtensa_expand_conditional_move (operands, isflt)
1201 rtx *operands;
1202 int isflt;
1203{
1204 rtx cmp;
1205 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1206
1207 if (!(cmp = gen_conditional_move (operands[1])))
1208 return 0;
1209
1210 if (isflt)
1211 gen_fn = (branch_type == CMP_SI
1212 ? gen_movsfcc_internal0
1213 : gen_movsfcc_internal1);
1214 else
1215 gen_fn = (branch_type == CMP_SI
1216 ? gen_movsicc_internal0
1217 : gen_movsicc_internal1);
1218
1219 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1220 operands[2], operands[3], cmp));
1221 return 1;
1222}
1223
1224
1225int
1226xtensa_expand_scc (operands)
1227 rtx *operands;
1228{
1229 rtx dest = operands[0];
1230 rtx cmp = operands[1];
1231 rtx one_tmp, zero_tmp;
1232 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1233
1234 if (!(cmp = gen_conditional_move (cmp)))
1235 return 0;
1236
1237 one_tmp = gen_reg_rtx (SImode);
1238 zero_tmp = gen_reg_rtx (SImode);
1239 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1240 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1241
1242 gen_fn = (branch_type == CMP_SI
1243 ? gen_movsicc_internal0
1244 : gen_movsicc_internal1);
1245 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1246 return 1;
1247}
1248
1249
1250/* Emit insns to move operands[1] into operands[0].
1251
1252 Return 1 if we have written out everything that needs to be done to
1253 do the move. Otherwise, return 0 and the caller will emit the move
1254 normally. */
1255
1256int
1257xtensa_emit_move_sequence (operands, mode)
1258 rtx *operands;
1259 enum machine_mode mode;
1260{
1261 if (CONSTANT_P (operands[1])
1262 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1263 && (GET_CODE (operands[1]) != CONST_INT
1264 || !xtensa_simm12b (INTVAL (operands[1]))))
1265 {
1266 xtensa_load_constant (operands[0], operands[1]);
1267 return 1;
1268 }
1269
1270 if (!(reload_in_progress | reload_completed))
1271 {
fc12fa10 1272 if (!xtensa_valid_move (mode, operands))
f6b7ba2b 1273 operands[1] = force_reg (mode, operands[1]);
1274
1275 /* Check if this move is copying an incoming argument in a7. If
1276 so, emit the move, followed by the special "set_frame_ptr"
1277 unspec_volatile insn, at the very beginning of the function.
1278 This is necessary because the register allocator will ignore
1279 conflicts with a7 and may assign some other pseudo to a7. If
1280 that pseudo was assigned prior to this move, it would clobber
1281 the incoming argument in a7. By copying the argument out of
1282 a7 as the very first thing, and then immediately following
1283 that with an unspec_volatile to keep the scheduler away, we
1284 should avoid any problems. */
1285
1286 if (a7_overlap_mentioned_p (operands[1]))
1287 {
1288 rtx mov;
1289 switch (mode)
1290 {
1291 case SImode:
1292 mov = gen_movsi_internal (operands[0], operands[1]);
1293 break;
1294 case HImode:
1295 mov = gen_movhi_internal (operands[0], operands[1]);
1296 break;
1297 case QImode:
1298 mov = gen_movqi_internal (operands[0], operands[1]);
1299 break;
1300 default:
1301 abort ();
1302 }
1303
1304 /* Insert the instructions before any other argument copies.
1305 (The set_frame_ptr insn comes _after_ the move, so push it
1306 out first.) */
1307 push_topmost_sequence ();
1308 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1309 emit_insn_after (mov, get_insns ());
1310 pop_topmost_sequence ();
1311
1312 return 1;
1313 }
1314 }
1315
1316 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1317 instruction won't be recognized after reload. So we remove the
1318 subreg and adjust mem accordingly. */
1319 if (reload_in_progress)
1320 {
1321 operands[0] = fixup_subreg_mem (operands[0]);
1322 operands[1] = fixup_subreg_mem (operands[1]);
1323 }
1324 return 0;
1325}
1326
1327static rtx
1328fixup_subreg_mem (x)
1329 rtx x;
1330{
1331 if (GET_CODE (x) == SUBREG
1332 && GET_CODE (SUBREG_REG (x)) == REG
1333 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1334 {
1335 rtx temp =
1336 gen_rtx_SUBREG (GET_MODE (x),
1337 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1338 SUBREG_BYTE (x));
1339 x = alter_subreg (&temp);
1340 }
1341 return x;
1342}
1343
1344
1345/* Try to expand a block move operation to an RTL block move instruction.
1346 If not optimizing or if the block size is not a constant or if the
1347 block is small, the expansion fails and GCC falls back to calling
1348 memcpy().
1349
1350 operands[0] is the destination
1351 operands[1] is the source
1352 operands[2] is the length
1353 operands[3] is the alignment */
1354
1355int
1356xtensa_expand_block_move (operands)
1357 rtx *operands;
1358{
1359 rtx dest = operands[0];
1360 rtx src = operands[1];
1361 int bytes = INTVAL (operands[2]);
1362 int align = XINT (operands[3], 0);
1363 int num_pieces, move_ratio;
1364
1365 /* If this is not a fixed size move, just call memcpy */
1366 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1367 return 0;
1368
1369 /* Anything to move? */
1370 if (bytes <= 0)
1371 return 1;
1372
1373 if (align > MOVE_MAX)
1374 align = MOVE_MAX;
1375
1376 /* decide whether to expand inline based on the optimization level */
1377 move_ratio = 4;
1378 if (optimize > 2)
1379 move_ratio = LARGEST_MOVE_RATIO;
1380 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1381 if (num_pieces >= move_ratio)
1382 return 0;
1383
160b2123 1384 /* make sure the memory addresses are valid */
9c56a8c5 1385 operands[0] = validize_mem (dest);
1386 operands[1] = validize_mem (src);
f6b7ba2b 1387
1388 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1389 operands[2], operands[3]));
1390 return 1;
1391}
1392
1393
1394/* Emit a sequence of instructions to implement a block move, trying
1395 to hide load delay slots as much as possible. Load N values into
1396 temporary registers, store those N values, and repeat until the
1397 complete block has been moved. N=delay_slots+1 */
1398
1399struct meminsnbuf {
1400 char template[30];
1401 rtx operands[2];
1402};
1403
1404void
1405xtensa_emit_block_move (operands, tmpregs, delay_slots)
1406 rtx *operands;
1407 rtx *tmpregs;
1408 int delay_slots;
1409{
1410 rtx dest = operands[0];
1411 rtx src = operands[1];
1412 int bytes = INTVAL (operands[2]);
1413 int align = XINT (operands[3], 0);
1414 rtx from_addr = XEXP (src, 0);
1415 rtx to_addr = XEXP (dest, 0);
1416 int from_struct = MEM_IN_STRUCT_P (src);
1417 int to_struct = MEM_IN_STRUCT_P (dest);
1418 int offset = 0;
1419 int chunk_size, item_size;
1420 struct meminsnbuf *ldinsns, *stinsns;
1421 const char *ldname, *stname;
1422 enum machine_mode mode;
1423
1424 if (align > MOVE_MAX)
1425 align = MOVE_MAX;
1426 item_size = align;
1427 chunk_size = delay_slots + 1;
1428
1429 ldinsns = (struct meminsnbuf *)
1430 alloca (chunk_size * sizeof (struct meminsnbuf));
1431 stinsns = (struct meminsnbuf *)
1432 alloca (chunk_size * sizeof (struct meminsnbuf));
1433
1434 mode = xtensa_find_mode_for_size (item_size);
1435 item_size = GET_MODE_SIZE (mode);
1436 ldname = xtensa_ld_opcodes[(int) mode];
1437 stname = xtensa_st_opcodes[(int) mode];
1438
1439 while (bytes > 0)
1440 {
1441 int n;
1442
1443 for (n = 0; n < chunk_size; n++)
1444 {
1445 rtx addr, mem;
1446
1447 if (bytes == 0)
1448 {
1449 chunk_size = n;
1450 break;
1451 }
1452
1453 if (bytes < item_size)
1454 {
1455 /* find a smaller item_size which we can load & store */
1456 item_size = bytes;
1457 mode = xtensa_find_mode_for_size (item_size);
1458 item_size = GET_MODE_SIZE (mode);
1459 ldname = xtensa_ld_opcodes[(int) mode];
1460 stname = xtensa_st_opcodes[(int) mode];
1461 }
1462
1463 /* record the load instruction opcode and operands */
1464 addr = plus_constant (from_addr, offset);
1465 mem = gen_rtx_MEM (mode, addr);
1466 if (! memory_address_p (mode, addr))
1467 abort ();
1468 MEM_IN_STRUCT_P (mem) = from_struct;
1469 ldinsns[n].operands[0] = tmpregs[n];
1470 ldinsns[n].operands[1] = mem;
1471 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1472
1473 /* record the store instruction opcode and operands */
1474 addr = plus_constant (to_addr, offset);
1475 mem = gen_rtx_MEM (mode, addr);
1476 if (! memory_address_p (mode, addr))
1477 abort ();
1478 MEM_IN_STRUCT_P (mem) = to_struct;
1479 stinsns[n].operands[0] = tmpregs[n];
1480 stinsns[n].operands[1] = mem;
1481 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1482
1483 offset += item_size;
1484 bytes -= item_size;
1485 }
1486
1487 /* now output the loads followed by the stores */
1488 for (n = 0; n < chunk_size; n++)
1489 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1490 for (n = 0; n < chunk_size; n++)
1491 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1492 }
1493}
1494
1495
1496static enum machine_mode
1497xtensa_find_mode_for_size (item_size)
1498 unsigned item_size;
1499{
1500 enum machine_mode mode, tmode;
1501
1502 while (1)
1503 {
1504 mode = VOIDmode;
1505
1506 /* find mode closest to but not bigger than item_size */
1507 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1508 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1509 if (GET_MODE_SIZE (tmode) <= item_size)
1510 mode = tmode;
1511 if (mode == VOIDmode)
1512 abort ();
1513
1514 item_size = GET_MODE_SIZE (mode);
1515
1516 if (xtensa_ld_opcodes[(int) mode]
1517 && xtensa_st_opcodes[(int) mode])
1518 break;
1519
1520 /* cannot load & store this mode; try something smaller */
1521 item_size -= 1;
1522 }
1523
1524 return mode;
1525}
1526
1527
1528void
1529xtensa_expand_nonlocal_goto (operands)
1530 rtx *operands;
1531{
1532 rtx goto_handler = operands[1];
1533 rtx containing_fp = operands[3];
1534
1535 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1536 is too big to generate in-line */
1537
1538 if (GET_CODE (containing_fp) != REG)
1539 containing_fp = force_reg (Pmode, containing_fp);
1540
1541 goto_handler = replace_rtx (copy_rtx (goto_handler),
1542 virtual_stack_vars_rtx,
1543 containing_fp);
1544
1545 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1546 0, VOIDmode, 2,
1547 containing_fp, Pmode,
1548 goto_handler, Pmode);
1549}
1550
1551
1f3233d1 1552static struct machine_function *
1553xtensa_init_machine_status ()
f6b7ba2b 1554{
1f3233d1 1555 return ggc_alloc_cleared (sizeof (struct machine_function));
f6b7ba2b 1556}
1557
1558
1559void
1560xtensa_setup_frame_addresses ()
1561{
1562 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1563 cfun->machine->accesses_prev_frame = 1;
1564
1565 emit_library_call
1566 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1567 0, VOIDmode, 0);
1568}
1569
1570
1571/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1572 a comment showing where the end of the loop is. However, if there is a
1573 label or a branch at the end of the loop then we need to place a nop
1574 there. If the loop ends with a label we need the nop so that branches
1575 targetting that label will target the nop (and thus remain in the loop),
1576 instead of targetting the instruction after the loop (and thus exiting
1577 the loop). If the loop ends with a branch, we need the nop in case the
1578 branch is targetting a location inside the loop. When the branch
1579 executes it will cause the loop count to be decremented even if it is
1580 taken (because it is the last instruction in the loop), so we need to
1581 nop after the branch to prevent the loop count from being decremented
1582 when the branch is taken. */
1583
1584void
1585xtensa_emit_loop_end (insn, operands)
1586 rtx insn;
1587 rtx *operands;
1588{
1589 char done = 0;
1590
1591 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1592 {
1593 switch (GET_CODE (insn))
1594 {
1595 case NOTE:
1596 case BARRIER:
1597 break;
1598
1599 case CODE_LABEL:
1600 output_asm_insn ("nop.n", operands);
1601 done = 1;
1602 break;
1603
1604 default:
1605 {
1606 rtx body = PATTERN (insn);
1607
1608 if (GET_CODE (body) == JUMP_INSN)
1609 {
1610 output_asm_insn ("nop.n", operands);
1611 done = 1;
1612 }
1613 else if ((GET_CODE (body) != USE)
1614 && (GET_CODE (body) != CLOBBER))
1615 done = 1;
1616 }
1617 break;
1618 }
1619 }
1620
1621 output_asm_insn ("# loop end for %0", operands);
1622}
1623
1624
1625char *
1626xtensa_emit_call (callop, operands)
1627 int callop;
1628 rtx *operands;
1629{
bbfbe351 1630 static char result[64];
f6b7ba2b 1631 rtx tgt = operands[callop];
1632
1633 if (GET_CODE (tgt) == CONST_INT)
1634 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1635 else if (register_operand (tgt, VOIDmode))
1636 sprintf (result, "callx8\t%%%d", callop);
1637 else
1638 sprintf (result, "call8\t%%%d", callop);
1639
1640 return result;
1641}
1642
1643
1644/* Return the stabs register number to use for 'regno'. */
1645
1646int
1647xtensa_dbx_register_number (regno)
1648 int regno;
1649{
1650 int first = -1;
1651
1652 if (GP_REG_P (regno)) {
1653 regno -= GP_REG_FIRST;
1654 first = 0;
1655 }
1656 else if (BR_REG_P (regno)) {
1657 regno -= BR_REG_FIRST;
1658 first = 16;
1659 }
1660 else if (FP_REG_P (regno)) {
1661 regno -= FP_REG_FIRST;
1662 /* The current numbering convention is that TIE registers are
1663 numbered in libcc order beginning with 256. We can't guarantee
1664 that the FP registers will come first, so the following is just
1665 a guess. It seems like we should make a special case for FP
1666 registers and give them fixed numbers < 256. */
1667 first = 256;
1668 }
1669 else if (ACC_REG_P (regno))
1670 {
1671 first = 0;
1672 regno = -1;
1673 }
1674
1675 /* When optimizing, we sometimes get asked about pseudo-registers
1676 that don't represent hard registers. Return 0 for these. */
1677 if (first == -1)
1678 return 0;
1679
1680 return first + regno;
1681}
1682
1683
1684/* Argument support functions. */
1685
1686/* Initialize CUMULATIVE_ARGS for a function. */
1687
1688void
1689init_cumulative_args (cum, fntype, libname)
1690 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1691 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1692 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1693{
1694 cum->arg_words = 0;
1695}
1696
1697/* Advance the argument to the next argument position. */
1698
1699void
1700function_arg_advance (cum, mode, type)
1701 CUMULATIVE_ARGS *cum; /* current arg information */
1702 enum machine_mode mode; /* current arg mode */
1703 tree type; /* type of the argument or 0 if lib support */
1704{
1705 int words, max;
1706 int *arg_words;
1707
1708 arg_words = &cum->arg_words;
1709 max = MAX_ARGS_IN_REGISTERS;
1710
1711 words = (((mode != BLKmode)
1712 ? (int) GET_MODE_SIZE (mode)
1713 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1714
1715 if ((*arg_words + words > max) && (*arg_words < max))
1716 *arg_words = max;
1717
1718 *arg_words += words;
1719}
1720
1721
1722/* Return an RTL expression containing the register for the given mode,
1723 or 0 if the argument is to be passed on the stack. */
1724
1725rtx
1726function_arg (cum, mode, type, incoming_p)
1727 CUMULATIVE_ARGS *cum; /* current arg information */
1728 enum machine_mode mode; /* current arg mode */
1729 tree type; /* type of the argument or 0 if lib support */
1730 int incoming_p; /* computing the incoming registers? */
1731{
1732 int regbase, words, max;
1733 int *arg_words;
1734 int regno;
1735 enum machine_mode result_mode;
1736
1737 arg_words = &cum->arg_words;
1738 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1739 max = MAX_ARGS_IN_REGISTERS;
1740
1741 words = (((mode != BLKmode)
1742 ? (int) GET_MODE_SIZE (mode)
1743 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1744
1745 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1746 *arg_words += (*arg_words & 1);
1747
1748 if (*arg_words + words > max)
1749 return (rtx)0;
1750
1751 regno = regbase + *arg_words;
1752 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1753
1754 /* We need to make sure that references to a7 are represented with
1755 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1756 modes bigger than 2 words (because we only have patterns for
1757 modes of 2 words or smaller), we can't control the expansion
1758 unless we explicitly list the individual registers in a PARALLEL. */
1759
1760 if ((mode == BLKmode || words > 2)
1761 && regno < A7_REG
1762 && regno + words > A7_REG)
1763 {
1764 rtx result;
1765 int n;
1766
1767 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1768 for (n = 0; n < words; n++)
1769 {
1770 XVECEXP (result, 0, n) =
1771 gen_rtx_EXPR_LIST (VOIDmode,
1772 gen_raw_REG (SImode, regno + n),
1773 GEN_INT (n * UNITS_PER_WORD));
1774 }
1775 return result;
1776 }
1777
1778 return gen_raw_REG (result_mode, regno);
1779}
1780
1781
1782void
1783override_options ()
1784{
1785 int regno;
1786 enum machine_mode mode;
1787
1788 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1789 error ("boolean registers required for the floating-point option");
1790
1791 /* set up the tables of ld/st opcode names for block moves */
1792 xtensa_ld_opcodes[(int) SImode] = "l32i";
1793 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1794 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1795 xtensa_st_opcodes[(int) SImode] = "s32i";
1796 xtensa_st_opcodes[(int) HImode] = "s16i";
1797 xtensa_st_opcodes[(int) QImode] = "s8i";
1798
1799 xtensa_char_to_class['q'] = SP_REG;
1800 xtensa_char_to_class['a'] = GR_REGS;
1801 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1802 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1803 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1804 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1805 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1806 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1807 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1808
1809 /* Set up array giving whether a given register can hold a given mode. */
1810 for (mode = VOIDmode;
1811 mode != MAX_MACHINE_MODE;
1812 mode = (enum machine_mode) ((int) mode + 1))
1813 {
1814 int size = GET_MODE_SIZE (mode);
1815 enum mode_class class = GET_MODE_CLASS (mode);
1816
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 {
1819 int temp;
1820
1821 if (ACC_REG_P (regno))
1822 temp = (TARGET_MAC16 &&
1823 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1824 else if (GP_REG_P (regno))
1825 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1826 else if (FP_REG_P (regno))
1827 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1828 else if (BR_REG_P (regno))
1829 temp = (TARGET_BOOLEANS && (mode == CCmode));
1830 else
1831 temp = FALSE;
1832
1833 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1834 }
1835 }
1836
1837 init_machine_status = xtensa_init_machine_status;
f6b7ba2b 1838
1839 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1840 some targets need to always use PIC. */
1841 if (XTENSA_ALWAYS_PIC)
1842 {
1843 if (flag_pic)
1844 warning ("-f%s ignored (all code is position independent)",
1845 (flag_pic > 1 ? "PIC" : "pic"));
1846 flag_pic = 1;
1847 }
1848 if (flag_pic > 1)
1849 flag_pic = 1;
1850}
1851
1852
1853/* A C compound statement to output to stdio stream STREAM the
1854 assembler syntax for an instruction operand X. X is an RTL
1855 expression.
1856
1857 CODE is a value that can be used to specify one of several ways
1858 of printing the operand. It is used when identical operands
1859 must be printed differently depending on the context. CODE
1860 comes from the '%' specification that was used to request
1861 printing of the operand. If the specification was just '%DIGIT'
1862 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1863 is the ASCII code for LTR.
1864
1865 If X is a register, this macro should print the register's name.
1866 The names can be found in an array 'reg_names' whose type is
1867 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1868
1869 When the machine description has a specification '%PUNCT' (a '%'
1870 followed by a punctuation character), this macro is called with
1871 a null pointer for X and the punctuation character for CODE.
1872
1873 'a', 'c', 'l', and 'n' are reserved.
1874
1875 The Xtensa specific codes are:
1876
1877 'd' CONST_INT, print as signed decimal
1878 'x' CONST_INT, print as signed hexadecimal
1879 'K' CONST_INT, print number of bits in mask for EXTUI
1880 'R' CONST_INT, print (X & 0x1f)
1881 'L' CONST_INT, print ((32 - X) & 0x1f)
1882 'D' REG, print second register of double-word register operand
1883 'N' MEM, print address of next word following a memory operand
1884 'v' MEM, if memory reference is volatile, output a MEMW before it
1885*/
1886
1887static void
1888printx (file, val)
1889 FILE *file;
1890 signed int val;
1891{
1892 /* print a hexadecimal value in a nice way */
1893 if ((val > -0xa) && (val < 0xa))
1894 fprintf (file, "%d", val);
1895 else if (val < 0)
1896 fprintf (file, "-0x%x", -val);
1897 else
1898 fprintf (file, "0x%x", val);
1899}
1900
1901
1902void
1903print_operand (file, op, letter)
1904 FILE *file; /* file to write to */
1905 rtx op; /* operand to print */
1906 int letter; /* %<letter> or 0 */
1907{
1908 enum rtx_code code;
1909
1910 if (! op)
1911 error ("PRINT_OPERAND null pointer");
1912
1913 code = GET_CODE (op);
1914 switch (code)
1915 {
1916 case REG:
1917 case SUBREG:
1918 {
1919 int regnum = xt_true_regnum (op);
1920 if (letter == 'D')
1921 regnum++;
1922 fprintf (file, "%s", reg_names[regnum]);
1923 break;
1924 }
1925
1926 case MEM:
aac632cd 1927 /* For a volatile memory reference, emit a MEMW before the
1928 load or store. */
f6b7ba2b 1929 if (letter == 'v')
1930 {
1931 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1932 fprintf (file, "memw\n\t");
1933 break;
1934 }
1935 else if (letter == 'N')
aac632cd 1936 {
1937 enum machine_mode mode;
1938 switch (GET_MODE (op))
1939 {
1940 case DFmode: mode = SFmode; break;
1941 case DImode: mode = SImode; break;
1942 default: abort ();
1943 }
1944 op = adjust_address (op, mode, 4);
1945 }
f6b7ba2b 1946
1947 output_address (XEXP (op, 0));
1948 break;
1949
1950 case CONST_INT:
1951 switch (letter)
1952 {
1953 case 'K':
1954 {
1955 int num_bits = 0;
1956 unsigned val = INTVAL (op);
1957 while (val & 1)
1958 {
1959 num_bits += 1;
1960 val = val >> 1;
1961 }
1962 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1963 fatal_insn ("invalid mask", op);
1964
1965 fprintf (file, "%d", num_bits);
1966 break;
1967 }
1968
1969 case 'L':
1970 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1971 break;
1972
1973 case 'R':
1974 fprintf (file, "%d", INTVAL (op) & 0x1f);
1975 break;
1976
1977 case 'x':
1978 printx (file, INTVAL (op));
1979 break;
1980
1981 case 'd':
1982 default:
1983 fprintf (file, "%d", INTVAL (op));
1984 break;
1985
1986 }
1987 break;
1988
1989 default:
1990 output_addr_const (file, op);
1991 }
1992}
1993
1994
1995/* A C compound statement to output to stdio stream STREAM the
1996 assembler syntax for an instruction operand that is a memory
7811991d 1997 reference whose address is ADDR. ADDR is an RTL expression. */
f6b7ba2b 1998
1999void
2000print_operand_address (file, addr)
2001 FILE *file;
2002 rtx addr;
2003{
2004 if (!addr)
2005 error ("PRINT_OPERAND_ADDRESS, null pointer");
2006
2007 switch (GET_CODE (addr))
2008 {
2009 default:
2010 fatal_insn ("invalid address", addr);
2011 break;
2012
2013 case REG:
2014 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2015 break;
2016
2017 case PLUS:
2018 {
2019 rtx reg = (rtx)0;
2020 rtx offset = (rtx)0;
2021 rtx arg0 = XEXP (addr, 0);
2022 rtx arg1 = XEXP (addr, 1);
2023
2024 if (GET_CODE (arg0) == REG)
2025 {
2026 reg = arg0;
2027 offset = arg1;
2028 }
2029 else if (GET_CODE (arg1) == REG)
2030 {
2031 reg = arg1;
2032 offset = arg0;
2033 }
2034 else
2035 fatal_insn ("no register in address", addr);
2036
2037 if (CONSTANT_P (offset))
2038 {
2039 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2040 output_addr_const (file, offset);
2041 }
2042 else
2043 fatal_insn ("address offset not a constant", addr);
2044 }
2045 break;
2046
2047 case LABEL_REF:
2048 case SYMBOL_REF:
2049 case CONST_INT:
2050 case CONST:
2051 output_addr_const (file, addr);
2052 break;
2053 }
2054}
2055
2056
2057/* Emit either a label, .comm, or .lcomm directive. */
2058
2059void
2060xtensa_declare_object (file, name, init_string, final_string, size)
2061 FILE *file;
2062 char *name;
2063 char *init_string;
2064 char *final_string;
2065 int size;
2066{
2067 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2068 assemble_name (file, name);
2069 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2070}
2071
2072
2073void
2074xtensa_output_literal (file, x, mode, labelno)
2075 FILE *file;
2076 rtx x;
2077 enum machine_mode mode;
2078 int labelno;
2079{
2080 long value_long[2];
badfe841 2081 REAL_VALUE_TYPE r;
f6b7ba2b 2082 int size;
2083
2084 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2085
2086 switch (GET_MODE_CLASS (mode))
2087 {
2088 case MODE_FLOAT:
2089 if (GET_CODE (x) != CONST_DOUBLE)
2090 abort ();
2091
badfe841 2092 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
f6b7ba2b 2093 switch (mode)
2094 {
2095 case SFmode:
badfe841 2096 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2097 fprintf (file, "0x%08lx\n", value_long[0]);
f6b7ba2b 2098 break;
2099
2100 case DFmode:
badfe841 2101 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2102 fprintf (file, "0x%08lx, 0x%08lx\n",
2103 value_long[0], value_long[1]);
f6b7ba2b 2104 break;
2105
2106 default:
2107 abort ();
2108 }
2109
2110 break;
2111
2112 case MODE_INT:
2113 case MODE_PARTIAL_INT:
2114 size = GET_MODE_SIZE (mode);
2115 if (size == 4)
2116 {
2117 output_addr_const (file, x);
2118 fputs ("\n", file);
2119 }
2120 else if (size == 8)
2121 {
2122 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2123 fputs (", ", file);
2124 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2125 fputs ("\n", file);
2126 }
2127 else
2128 abort ();
2129 break;
2130
2131 default:
2132 abort ();
2133 }
2134}
2135
2136
2137/* Return the bytes needed to compute the frame pointer from the current
2138 stack pointer. */
2139
2140#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2141#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2142
2143long
2144compute_frame_size (size)
2145 int size; /* # of var. bytes allocated */
2146{
2147 /* add space for the incoming static chain value */
2148 if (current_function_needs_context)
2149 size += (1 * UNITS_PER_WORD);
2150
2151 xtensa_current_frame_size =
2152 XTENSA_STACK_ALIGN (size
2153 + current_function_outgoing_args_size
2154 + (WINDOW_SIZE * UNITS_PER_WORD));
2155 return xtensa_current_frame_size;
2156}
2157
2158
2159int
2160xtensa_frame_pointer_required ()
2161{
2162 /* The code to expand builtin_frame_addr and builtin_return_addr
2163 currently uses the hard_frame_pointer instead of frame_pointer.
2164 This seems wrong but maybe it's necessary for other architectures.
2165 This function is derived from the i386 code. */
2166
2167 if (cfun->machine->accesses_prev_frame)
2168 return 1;
2169
2170 return 0;
2171}
2172
2173
2174void
2175xtensa_reorg (first)
2176 rtx first;
2177{
2178 rtx insn, set_frame_ptr_insn = 0;
2179
2180 unsigned long tsize = compute_frame_size (get_frame_size ());
2181 if (tsize < (1 << (12+3)))
2182 frame_size_const = 0;
2183 else
2184 {
2185 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2186
2187 /* make sure the constant is used so it doesn't get eliminated
2188 from the constant pool */
2189 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2190 }
2191
2192 if (!frame_pointer_needed)
2193 return;
2194
2195 /* Search all instructions, looking for the insn that sets up the
2196 frame pointer. This search will fail if the function does not
2197 have an incoming argument in $a7, but in that case, we can just
2198 set up the frame pointer at the very beginning of the
2199 function. */
2200
2201 for (insn = first; insn; insn = NEXT_INSN (insn))
2202 {
2203 rtx pat;
2204
2205 if (!INSN_P (insn))
2206 continue;
2207
2208 pat = PATTERN (insn);
2209 if (GET_CODE (pat) == UNSPEC_VOLATILE
2210 && (XINT (pat, 1) == UNSPECV_SET_FP))
2211 {
2212 set_frame_ptr_insn = insn;
2213 break;
2214 }
2215 }
2216
2217 if (set_frame_ptr_insn)
2218 {
2219 /* for all instructions prior to set_frame_ptr_insn, replace
2220 hard_frame_pointer references with stack_pointer */
2221 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2222 {
2223 if (INSN_P (insn))
2224 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2225 hard_frame_pointer_rtx,
2226 stack_pointer_rtx);
2227 }
2228 }
2229 else
2230 {
2231 /* emit the frame pointer move immediately after the NOTE that starts
2232 the function */
2233 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2234 stack_pointer_rtx), first);
2235 }
2236}
2237
2238
2239/* Set up the stack and frame (if desired) for the function. */
2240
2241void
2242xtensa_function_prologue (file, size)
2243 FILE *file;
2244 int size ATTRIBUTE_UNUSED;
2245{
2246 unsigned long tsize = compute_frame_size (get_frame_size ());
2247
2248 if (frame_pointer_needed)
2249 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2250 else
2251 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2252
2253
2254 if (tsize < (1 << (12+3)))
2255 {
2256 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2257 }
2258 else
2259 {
2260 fprintf (file, "\tentry\tsp, 16\n");
2261
2262 /* use a8 as a temporary since a0-a7 may be live */
2263 fprintf (file, "\tl32r\ta8, ");
2264 print_operand (file, frame_size_const, 0);
2265 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2266 fprintf (file, "\tmovsp\tsp, a8\n");
2267 }
2268}
2269
2270
2271/* Do any necessary cleanup after a function to restore
2272 stack, frame, and regs. */
2273
2274void
2275xtensa_function_epilogue (file, size)
2276 FILE *file;
2277 int size ATTRIBUTE_UNUSED;
2278{
2279 rtx insn = get_last_insn ();
2280 /* If the last insn was a BARRIER, we don't have to write anything. */
2281 if (GET_CODE (insn) == NOTE)
2282 insn = prev_nonnote_insn (insn);
2283 if (insn == 0 || GET_CODE (insn) != BARRIER)
2284 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2285
2286 xtensa_current_frame_size = 0;
2287}
2288
2289
2290/* Create the va_list data type.
2291 This structure is set up by __builtin_saveregs. The __va_reg
2292 field points to a stack-allocated region holding the contents of the
2293 incoming argument registers. The __va_ndx field is an index initialized
2294 to the position of the first unnamed (variable) argument. This same index
2295 is also used to address the arguments passed in memory. Thus, the
2296 __va_stk field is initialized to point to the position of the first
2297 argument in memory offset to account for the arguments passed in
2298 registers. E.G., if there are 6 argument registers, and each register is
2299 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2300 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2301 argument word N for N >= 6. */
2302
2303tree
2304xtensa_build_va_list (void)
2305{
2306 tree f_stk, f_reg, f_ndx, record;
2307
2308 record = make_node (RECORD_TYPE);
2309
2310 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2311 ptr_type_node);
2312 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2313 ptr_type_node);
2314 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2315 integer_type_node);
2316
2317 DECL_FIELD_CONTEXT (f_stk) = record;
2318 DECL_FIELD_CONTEXT (f_reg) = record;
2319 DECL_FIELD_CONTEXT (f_ndx) = record;
2320
2321 TYPE_FIELDS (record) = f_stk;
2322 TREE_CHAIN (f_stk) = f_reg;
2323 TREE_CHAIN (f_reg) = f_ndx;
2324
2325 layout_type (record);
2326 return record;
2327}
2328
2329
2330/* Save the incoming argument registers on the stack. Returns the
2331 address of the saved registers. */
2332
2333rtx
2334xtensa_builtin_saveregs ()
2335{
2336 rtx gp_regs, dest;
2337 int arg_words = current_function_arg_words;
2338 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2339 int i;
2340
2341 if (gp_left == 0)
2342 return const0_rtx;
2343
2344 /* allocate the general-purpose register space */
2345 gp_regs = assign_stack_local
2346 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2347 MEM_IN_STRUCT_P (gp_regs) = 1;
2348 RTX_UNCHANGING_P (gp_regs) = 1;
2349 RTX_UNCHANGING_P (XEXP (gp_regs, 0)) = 1;
2350
2351 /* Now store the incoming registers. */
2352 dest = change_address (gp_regs, SImode,
2353 plus_constant (XEXP (gp_regs, 0),
2354 arg_words * UNITS_PER_WORD));
2355
2356 /* Note: Don't use move_block_from_reg() here because the incoming
2357 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2358 Instead, call gen_raw_REG() directly so that we get a distinct
2359 instance of (REG:SI 7). */
2360 for (i = 0; i < gp_left; i++)
2361 {
2362 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2363 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2364 }
2365
2366 return XEXP (gp_regs, 0);
2367}
2368
2369
2370/* Implement `va_start' for varargs and stdarg. We look at the
2371 current function to fill in an initial va_list. */
2372
2373void
2374xtensa_va_start (stdarg_p, valist, nextarg)
2375 int stdarg_p ATTRIBUTE_UNUSED;
2376 tree valist;
2377 rtx nextarg ATTRIBUTE_UNUSED;
2378{
2379 tree f_stk, stk;
2380 tree f_reg, reg;
2381 tree f_ndx, ndx;
2382 tree t, u;
2383 int arg_words;
2384
2385 arg_words = current_function_args_info.arg_words;
2386
2387 f_stk = TYPE_FIELDS (va_list_type_node);
2388 f_reg = TREE_CHAIN (f_stk);
2389 f_ndx = TREE_CHAIN (f_reg);
2390
2391 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2392 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2393 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2394
2395 /* Call __builtin_saveregs; save the result in __va_reg */
2396 current_function_arg_words = arg_words;
2397 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2398 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2399 TREE_SIDE_EFFECTS (t) = 1;
2400 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2401
2402 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2403 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2404 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2405 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2406 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2407 TREE_SIDE_EFFECTS (t) = 1;
2408 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2409
2410 /* Set the __va_ndx member. */
2411 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2412 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2413 TREE_SIDE_EFFECTS (t) = 1;
2414 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2415}
2416
2417
2418/* Implement `va_arg'. */
2419
2420rtx
2421xtensa_va_arg (valist, type)
2422 tree valist, type;
2423{
2424 tree f_stk, stk;
2425 tree f_reg, reg;
2426 tree f_ndx, ndx;
dd52a190 2427 tree tmp, addr_tree, type_size;
2428 rtx array, orig_ndx, r, addr, size, va_size;
f6b7ba2b 2429 rtx lab_false, lab_over, lab_false2;
2430
f6b7ba2b 2431 f_stk = TYPE_FIELDS (va_list_type_node);
2432 f_reg = TREE_CHAIN (f_stk);
2433 f_ndx = TREE_CHAIN (f_reg);
2434
2435 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2436 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2437 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2438
dd52a190 2439 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2440
2441 va_size = gen_reg_rtx (SImode);
2442 tmp = fold (build (MULT_EXPR, sizetype,
2443 fold (build (TRUNC_DIV_EXPR, sizetype,
2444 fold (build (PLUS_EXPR, sizetype,
2445 type_size,
2446 size_int (UNITS_PER_WORD - 1))),
2447 size_int (UNITS_PER_WORD))),
2448 size_int (UNITS_PER_WORD)));
2449 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2450 if (r != va_size)
2451 emit_move_insn (va_size, r);
2452
f6b7ba2b 2453
2454 /* First align __va_ndx to a double word boundary if necessary for this arg:
2455
2456 if (__alignof__ (TYPE) > 4)
2457 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2458 */
2459
2460 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2461 {
2462 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2463 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2464 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2465 build_int_2 (-2 * UNITS_PER_WORD, -1));
2466 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2467 TREE_SIDE_EFFECTS (tmp) = 1;
2468 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2469 }
2470
2471
2472 /* Increment __va_ndx to point past the argument:
2473
2474 orig_ndx = (AP).__va_ndx;
2475 (AP).__va_ndx += __va_size (TYPE);
2476 */
2477
2478 orig_ndx = gen_reg_rtx (SImode);
2479 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2480 if (r != orig_ndx)
2481 emit_move_insn (orig_ndx, r);
2482
dd52a190 2483 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2484 make_tree (intSI_type_node, va_size));
f6b7ba2b 2485 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2486 TREE_SIDE_EFFECTS (tmp) = 1;
2487 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2488
2489
2490 /* Check if the argument is in registers:
2491
89d4bc5e 2492 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2493 && !MUST_PASS_IN_STACK (type))
f6b7ba2b 2494 __array = (AP).__va_reg;
2495 */
2496
f6b7ba2b 2497 array = gen_reg_rtx (Pmode);
2498
4588bbd8 2499 lab_over = NULL_RTX;
89d4bc5e 2500 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2501 {
2502 lab_false = gen_label_rtx ();
2503 lab_over = gen_label_rtx ();
2504
2505 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2506 EXPAND_NORMAL),
2507 GEN_INT (MAX_ARGS_IN_REGISTERS
2508 * UNITS_PER_WORD),
2509 GT, const1_rtx, SImode, 0, lab_false);
2510
2511 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2512 if (r != array)
2513 emit_move_insn (array, r);
2514
2515 emit_jump_insn (gen_jump (lab_over));
2516 emit_barrier ();
2517 emit_label (lab_false);
2518 }
f6b7ba2b 2519
2520 /* ...otherwise, the argument is on the stack (never split between
2521 registers and the stack -- change __va_ndx if necessary):
2522
2523 else
2524 {
2525 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2526 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2527 __array = (AP).__va_stk;
2528 }
2529 */
2530
2531 lab_false2 = gen_label_rtx ();
2532 emit_cmp_and_jump_insns (orig_ndx,
2533 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2534 GE, const1_rtx, SImode, 0, lab_false2);
2535
dd52a190 2536 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2537 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
f6b7ba2b 2538 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2539 TREE_SIDE_EFFECTS (tmp) = 1;
2540 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2541
2542 emit_label (lab_false2);
2543
2544 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2545 if (r != array)
2546 emit_move_insn (array, r);
2547
4588bbd8 2548 if (lab_over != NULL_RTX)
89d4bc5e 2549 emit_label (lab_over);
dd52a190 2550
f6b7ba2b 2551
2552 /* Given the base array pointer (__array) and index to the subsequent
2553 argument (__va_ndx), find the address:
2554
dd52a190 2555 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2556 ? sizeof (TYPE)
2557 : __va_size (TYPE))
f6b7ba2b 2558
2559 The results are endian-dependent because values smaller than one word
2560 are aligned differently.
2561 */
2562
dd52a190 2563 size = gen_reg_rtx (SImode);
2564 emit_move_insn (size, va_size);
2565
2566 if (BYTES_BIG_ENDIAN)
2567 {
2568 rtx lab_use_va_size = gen_label_rtx ();
2569
2570 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2571 EXPAND_NORMAL),
2572 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2573 GE, const1_rtx, SImode, 0, lab_use_va_size);
2574
2575 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2576 if (r != size)
2577 emit_move_insn (size, r);
2578
2579 emit_label (lab_use_va_size);
2580 }
f6b7ba2b 2581
2582 addr_tree = build (PLUS_EXPR, ptr_type_node,
2583 make_tree (ptr_type_node, array),
2584 ndx);
dd52a190 2585 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2586 make_tree (intSI_type_node, size));
f6b7ba2b 2587 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2588 addr = copy_to_reg (addr);
2589 return addr;
2590}
2591
2592
fc12fa10 2593enum reg_class
2594xtensa_preferred_reload_class (x, class)
2595 rtx x;
2596 enum reg_class class;
2597{
2598 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2599 return NO_REGS;
2600
2601 /* Don't use sp for reloads! */
2602 if (class == AR_REGS)
2603 return GR_REGS;
2604
2605 return class;
2606}
2607
2608
f6b7ba2b 2609enum reg_class
2610xtensa_secondary_reload_class (class, mode, x, isoutput)
2611 enum reg_class class;
2612 enum machine_mode mode ATTRIBUTE_UNUSED;
2613 rtx x;
2614 int isoutput;
2615{
2616 int regno;
2617
2618 if (GET_CODE (x) == SIGN_EXTEND)
2619 x = XEXP (x, 0);
2620 regno = xt_true_regnum (x);
2621
2622 if (!isoutput)
2623 {
2624 if (class == FP_REGS && constantpool_mem_p (x))
2625 return GR_REGS;
2626 }
2627
2628 if (ACC_REG_P (regno))
2629 return (class == GR_REGS ? NO_REGS : GR_REGS);
2630 if (class == ACC_REG)
2631 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2632
2633 return NO_REGS;
2634}
2635
2636
2637void
2638order_regs_for_local_alloc ()
2639{
2640 if (!leaf_function_p ())
2641 {
2642 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2643 FIRST_PSEUDO_REGISTER * sizeof (int));
2644 }
2645 else
2646 {
2647 int i, num_arg_regs;
2648 int nxt = 0;
2649
2650 /* use the AR registers in increasing order (skipping a0 and a1)
2651 but save the incoming argument registers for a last resort */
2652 num_arg_regs = current_function_args_info.arg_words;
2653 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2654 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2655 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2656 reg_alloc_order[nxt++] = i + num_arg_regs;
2657 for (i = 0; i < num_arg_regs; i++)
2658 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2659
2660 /* list the FP registers in order for now */
2661 for (i = 0; i < 16; i++)
2662 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2663
2664 /* GCC requires that we list *all* the registers.... */
2665 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2666 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2667 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2668 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2669
2670 /* list the coprocessor registers in order */
2671 for (i = 0; i < BR_REG_NUM; i++)
2672 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2673
2674 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2675 }
2676}
2677
2678
2679/* A customized version of reg_overlap_mentioned_p that only looks for
2680 references to a7 (as opposed to hard_frame_pointer_rtx). */
2681
2682int
2683a7_overlap_mentioned_p (x)
2684 rtx x;
2685{
2686 int i, j;
2687 unsigned int x_regno;
2688 const char *fmt;
2689
2690 if (GET_CODE (x) == REG)
2691 {
2692 x_regno = REGNO (x);
2693 return (x != hard_frame_pointer_rtx
2694 && x_regno < A7_REG + 1
2695 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2696 }
2697
2698 if (GET_CODE (x) == SUBREG
2699 && GET_CODE (SUBREG_REG (x)) == REG
2700 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2701 {
2702 x_regno = subreg_regno (x);
2703 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2704 && x_regno < A7_REG + 1
2705 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2706 }
2707
2708 /* X does not match, so try its subexpressions. */
2709 fmt = GET_RTX_FORMAT (GET_CODE (x));
2710 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2711 {
2712 if (fmt[i] == 'e')
2713 {
2714 if (a7_overlap_mentioned_p (XEXP (x, i)))
2715 return 1;
2716 }
2717 else if (fmt[i] == 'E')
2718 {
2719 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2720 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2721 return 1;
2722 }
2723 }
2724
2725 return 0;
2726}
bbfbe351 2727
2728/* The literal pool stays with the function. */
2729
2730static void
2731xtensa_select_rtx_section (mode, x, align)
2732 enum machine_mode mode ATTRIBUTE_UNUSED;
2733 rtx x ATTRIBUTE_UNUSED;
2734 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2735{
2736 function_section (current_function_decl);
2737}
7811991d 2738
2739/* If we are referencing a function that is static, make the SYMBOL_REF
2740 special so that we can generate direct calls to it even with -fpic. */
2741
2742static void
2743xtensa_encode_section_info (decl, first)
2744 tree decl;
2745 int first ATTRIBUTE_UNUSED;
2746{
2747 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2748 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2749}
1f3233d1 2750
2751#include "gt-xtensa.h"