]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/xtensa/xtensa.c
re PR c++/3902 ([parser] ambiguous 8.2/7)
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308 1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
89f6025d 2 Copyright 2001,2002 Free Software Foundation, Inc.
03984308
BW
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
03984308
BW
26#include "rtl.h"
27#include "regs.h"
03984308
BW
28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "insn-attr.h"
35#include "insn-codes.h"
36#include "recog.h"
37#include "output.h"
38#include "tree.h"
39#include "expr.h"
40#include "flags.h"
41#include "reload.h"
42#include "tm_p.h"
43#include "function.h"
44#include "toplev.h"
45#include "optabs.h"
b64a1b53 46#include "output.h"
03984308 47#include "libfuncs.h"
07232638 48#include "ggc.h"
03984308
BW
49#include "target.h"
50#include "target-def.h"
540eaea8 51#include "langhooks.h"
03984308
BW
52
53/* Enumeration for all of the relational tests, so that we can build
54 arrays indexed by the test type, and not worry about the order
55 of EQ, NE, etc. */
56
57enum internal_test {
58 ITEST_EQ,
59 ITEST_NE,
60 ITEST_GT,
61 ITEST_GE,
62 ITEST_LT,
63 ITEST_LE,
64 ITEST_GTU,
65 ITEST_GEU,
66 ITEST_LTU,
67 ITEST_LEU,
68 ITEST_MAX
69 };
70
71/* Cached operands, and operator to compare for use in set/branch on
72 condition codes. */
73rtx branch_cmp[2];
74
75/* what type of branch to use */
76enum cmp_type branch_type;
77
78/* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
81
82/* Current frame size calculated by compute_frame_size. */
83unsigned xtensa_current_frame_size;
84
85/* Tables of ld/st opcode names for block moves */
86const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88#define LARGEST_MOVE_RATIO 15
89
90/* Define the structure for the machine field in struct function. */
e2500fed 91struct machine_function GTY(())
03984308
BW
92{
93 int accesses_prev_frame;
58db834b 94 bool incoming_a7_copied;
03984308
BW
95};
96
97/* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
99 treatment. */
100
101const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
102{
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
106 1
107};
108
109/* Map hard register number to register class */
110const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
111{
89f6025d
BW
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
03984308
BW
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 ACC_REG,
122};
123
124/* Map register constraint character to register class. */
125enum reg_class xtensa_char_to_class[256] =
126{
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191};
192
b64a1b53
RH
193static int b4const_or_zero PARAMS ((int));
194static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
195static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
196static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
197static rtx gen_conditional_move PARAMS ((rtx));
198static rtx fixup_subreg_mem PARAMS ((rtx x));
199static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
07232638 200static struct machine_function * xtensa_init_machine_status PARAMS ((void));
b64a1b53 201static void printx PARAMS ((FILE *, signed int));
01abf342
BW
202static unsigned int xtensa_multibss_section_type_flags
203 PARAMS ((tree, const char *, int));
204static void xtensa_select_rtx_section
205 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
fb49053f 206static void xtensa_encode_section_info PARAMS ((tree, int));
3c50106f 207static bool xtensa_rtx_costs PARAMS ((rtx, int, int, int *));
b64a1b53
RH
208
209static rtx frame_size_const;
210static int current_function_arg_words;
211static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
212 REG_ALLOC_ORDER;
213\f
03984308
BW
214/* This macro generates the assembly code for function entry.
215 FILE is a stdio stream to output the code to.
216 SIZE is an int: how many units of temporary storage to allocate.
217 Refer to the array 'regs_ever_live' to determine which registers
218 to save; 'regs_ever_live[I]' is nonzero if register number I
219 is ever used in the function. This macro is responsible for
220 knowing which registers should not be saved even if used. */
221
222#undef TARGET_ASM_FUNCTION_PROLOGUE
223#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
224
225/* This macro generates the assembly code for function exit,
226 on machines that need it. If FUNCTION_EPILOGUE is not defined
227 then individual return instructions are generated for each
228 return statement. Args are same as for FUNCTION_PROLOGUE. */
229
230#undef TARGET_ASM_FUNCTION_EPILOGUE
231#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
232
233/* These hooks specify assembly directives for creating certain kinds
234 of integer object. */
235
236#undef TARGET_ASM_ALIGNED_SI_OP
237#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
238
b64a1b53
RH
239#undef TARGET_ASM_SELECT_RTX_SECTION
240#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
fb49053f
RH
241#undef TARGET_ENCODE_SECTION_INFO
242#define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
03984308 243
3c50106f
RH
244#undef TARGET_RTX_COSTS
245#define TARGET_RTX_COSTS xtensa_rtx_costs
246
b64a1b53
RH
247struct gcc_target targetm = TARGET_INITIALIZER;
248\f
03984308
BW
249
250/*
251 * Functions to test Xtensa immediate operand validity.
252 */
253
254int
255xtensa_b4constu (v)
256 int v;
257{
258 switch (v)
259 {
260 case 32768:
261 case 65536:
262 case 2:
263 case 3:
264 case 4:
265 case 5:
266 case 6:
267 case 7:
268 case 8:
269 case 10:
270 case 12:
271 case 16:
272 case 32:
273 case 64:
274 case 128:
275 case 256:
276 return 1;
277 }
278 return 0;
279}
280
281int
282xtensa_simm8x256 (v)
283 int v;
284{
285 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
286}
287
288int
289xtensa_ai4const (v)
290 int v;
291{
292 return (v == -1 || (v >= 1 && v <= 15));
293}
294
295int
296xtensa_simm7 (v)
297 int v;
298{
299 return v >= -32 && v <= 95;
300}
301
302int
303xtensa_b4const (v)
304 int v;
305{
306 switch (v)
307 {
308 case -1:
309 case 1:
310 case 2:
311 case 3:
312 case 4:
313 case 5:
314 case 6:
315 case 7:
316 case 8:
317 case 10:
318 case 12:
319 case 16:
320 case 32:
321 case 64:
322 case 128:
323 case 256:
324 return 1;
325 }
326 return 0;
327}
328
329int
330xtensa_simm8 (v)
331 int v;
332{
333 return v >= -128 && v <= 127;
334}
335
336int
337xtensa_tp7 (v)
338 int v;
339{
340 return (v >= 7 && v <= 22);
341}
342
343int
344xtensa_lsi4x4 (v)
345 int v;
346{
347 return (v & 3) == 0 && (v >= 0 && v <= 60);
348}
349
350int
351xtensa_simm12b (v)
352 int v;
353{
354 return v >= -2048 && v <= 2047;
355}
356
357int
358xtensa_uimm8 (v)
359 int v;
360{
361 return v >= 0 && v <= 255;
362}
363
364int
365xtensa_uimm8x2 (v)
366 int v;
367{
368 return (v & 1) == 0 && (v >= 0 && v <= 510);
369}
370
371int
372xtensa_uimm8x4 (v)
373 int v;
374{
375 return (v & 3) == 0 && (v >= 0 && v <= 1020);
376}
377
378
379/* This is just like the standard true_regnum() function except that it
380 works even when reg_renumber is not initialized. */
381
382int
383xt_true_regnum (x)
384 rtx x;
385{
386 if (GET_CODE (x) == REG)
387 {
388 if (reg_renumber
389 && REGNO (x) >= FIRST_PSEUDO_REGISTER
390 && reg_renumber[REGNO (x)] >= 0)
391 return reg_renumber[REGNO (x)];
392 return REGNO (x);
393 }
394 if (GET_CODE (x) == SUBREG)
395 {
396 int base = xt_true_regnum (SUBREG_REG (x));
397 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
398 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
399 GET_MODE (SUBREG_REG (x)),
400 SUBREG_BYTE (x), GET_MODE (x));
401 }
402 return -1;
403}
404
405
406int
407add_operand (op, mode)
408 rtx op;
409 enum machine_mode mode;
410{
411 if (GET_CODE (op) == CONST_INT)
412 return (xtensa_simm8 (INTVAL (op)) ||
413 xtensa_simm8x256 (INTVAL (op)));
414
415 return register_operand (op, mode);
416}
417
418
419int
420arith_operand (op, mode)
421 rtx op;
422 enum machine_mode mode;
423{
424 if (GET_CODE (op) == CONST_INT)
425 return xtensa_simm8 (INTVAL (op));
426
427 return register_operand (op, mode);
428}
429
430
431int
432nonimmed_operand (op, mode)
433 rtx op;
434 enum machine_mode mode;
435{
436 /* We cannot use the standard nonimmediate_operand() predicate because
437 it includes constant pool memory operands. */
438
439 if (memory_operand (op, mode))
440 return !constantpool_address_p (XEXP (op, 0));
441
442 return register_operand (op, mode);
443}
444
445
446int
447mem_operand (op, mode)
448 rtx op;
449 enum machine_mode mode;
450{
451 /* We cannot use the standard memory_operand() predicate because
452 it includes constant pool memory operands. */
453
454 if (memory_operand (op, mode))
455 return !constantpool_address_p (XEXP (op, 0));
456
457 return FALSE;
458}
459
460
461int
a8cacfd2 462xtensa_valid_move (mode, operands)
03984308 463 enum machine_mode mode;
a8cacfd2 464 rtx *operands;
03984308 465{
a8cacfd2
BW
466 /* Either the destination or source must be a register, and the
467 MAC16 accumulator doesn't count. */
468
469 if (register_operand (operands[0], mode))
470 {
471 int dst_regnum = xt_true_regnum (operands[0]);
472
473 /* The stack pointer can only be assigned with a MOVSP opcode. */
474 if (dst_regnum == STACK_POINTER_REGNUM)
475 return (mode == SImode
476 && register_operand (operands[1], mode)
477 && !ACC_REG_P (xt_true_regnum (operands[1])));
478
479 if (!ACC_REG_P (dst_regnum))
480 return true;
481 }
3437320b 482 if (register_operand (operands[1], mode))
a8cacfd2
BW
483 {
484 int src_regnum = xt_true_regnum (operands[1]);
485 if (!ACC_REG_P (src_regnum))
486 return true;
487 }
03984308
BW
488 return FALSE;
489}
490
491
492int
493mask_operand (op, mode)
494 rtx op;
495 enum machine_mode mode;
496{
497 if (GET_CODE (op) == CONST_INT)
498 return xtensa_mask_immediate (INTVAL (op));
499
500 return register_operand (op, mode);
501}
502
503
504int
505extui_fldsz_operand (op, mode)
506 rtx op;
507 enum machine_mode mode ATTRIBUTE_UNUSED;
508{
509 return ((GET_CODE (op) == CONST_INT)
510 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
511}
512
513
514int
515sext_operand (op, mode)
516 rtx op;
517 enum machine_mode mode;
518{
519 if (TARGET_SEXT)
520 return nonimmed_operand (op, mode);
521 return mem_operand (op, mode);
522}
523
524
525int
526sext_fldsz_operand (op, mode)
527 rtx op;
528 enum machine_mode mode ATTRIBUTE_UNUSED;
529{
530 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
531}
532
533
534int
535lsbitnum_operand (op, mode)
536 rtx op;
537 enum machine_mode mode ATTRIBUTE_UNUSED;
538{
539 if (GET_CODE (op) == CONST_INT)
540 {
541 return (BITS_BIG_ENDIAN
542 ? (INTVAL (op) == BITS_PER_WORD-1)
543 : (INTVAL (op) == 0));
544 }
545 return FALSE;
546}
547
548
549static int
550b4const_or_zero (v)
551 int v;
552{
553 if (v == 0)
554 return TRUE;
555 return xtensa_b4const (v);
556}
557
558
559int
560branch_operand (op, mode)
561 rtx op;
562 enum machine_mode mode;
563{
564 if (GET_CODE (op) == CONST_INT)
565 return b4const_or_zero (INTVAL (op));
566
567 return register_operand (op, mode);
568}
569
570
571int
572ubranch_operand (op, mode)
573 rtx op;
574 enum machine_mode mode;
575{
576 if (GET_CODE (op) == CONST_INT)
577 return xtensa_b4constu (INTVAL (op));
578
579 return register_operand (op, mode);
580}
581
582
583int
584call_insn_operand (op, mode)
585 rtx op;
586 enum machine_mode mode ATTRIBUTE_UNUSED;
587{
588 if ((GET_CODE (op) == REG)
589 && (op != arg_pointer_rtx)
590 && ((REGNO (op) < FRAME_POINTER_REGNUM)
591 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
592 return TRUE;
593
594 if (CONSTANT_ADDRESS_P (op))
595 {
596 /* Direct calls only allowed to static functions with PIC. */
597 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
598 && SYMBOL_REF_FLAG (op)));
599 }
600
601 return FALSE;
602}
603
604
605int
606move_operand (op, mode)
607 rtx op;
608 enum machine_mode mode;
609{
610 if (register_operand (op, mode))
611 return TRUE;
612
613 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
614 result in 0/1. */
615 if (GET_CODE (op) == CONSTANT_P_RTX)
616 return TRUE;
617
618 if (GET_CODE (op) == CONST_INT)
619 return xtensa_simm12b (INTVAL (op));
620
621 if (GET_CODE (op) == MEM)
622 return memory_address_p (mode, XEXP (op, 0));
623
624 return FALSE;
625}
626
627
628int
629smalloffset_mem_p (op)
630 rtx op;
631{
632 if (GET_CODE (op) == MEM)
633 {
634 rtx addr = XEXP (op, 0);
635 if (GET_CODE (addr) == REG)
636 return REG_OK_FOR_BASE_P (addr);
637 if (GET_CODE (addr) == PLUS)
638 {
639 rtx offset = XEXP (addr, 0);
640 if (GET_CODE (offset) != CONST_INT)
641 offset = XEXP (addr, 1);
642 if (GET_CODE (offset) != CONST_INT)
643 return FALSE;
644 return xtensa_lsi4x4 (INTVAL (offset));
645 }
646 }
647 return FALSE;
648}
649
650
651int
652smalloffset_double_mem_p (op)
653 rtx op;
654{
655 if (!smalloffset_mem_p (op))
656 return FALSE;
657 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
658}
659
660
661int
662constantpool_address_p (addr)
663 rtx addr;
664{
665 rtx sym = addr;
666
667 if (GET_CODE (addr) == CONST)
668 {
669 rtx offset;
670
671 /* only handle (PLUS (SYM, OFFSET)) form */
672 addr = XEXP (addr, 0);
673 if (GET_CODE (addr) != PLUS)
674 return FALSE;
675
676 /* make sure the address is word aligned */
677 offset = XEXP (addr, 1);
678 if ((GET_CODE (offset) != CONST_INT)
679 || ((INTVAL (offset) & 3) != 0))
680 return FALSE;
681
682 sym = XEXP (addr, 0);
683 }
684
685 if ((GET_CODE (sym) == SYMBOL_REF)
686 && CONSTANT_POOL_ADDRESS_P (sym))
687 return TRUE;
688 return FALSE;
689}
690
691
692int
693constantpool_mem_p (op)
694 rtx op;
695{
696 if (GET_CODE (op) == MEM)
697 return constantpool_address_p (XEXP (op, 0));
698 return FALSE;
699}
700
701
702int
703non_const_move_operand (op, mode)
704 rtx op;
705 enum machine_mode mode;
706{
707 if (register_operand (op, mode))
708 return 1;
709 if (GET_CODE (op) == SUBREG)
710 op = SUBREG_REG (op);
711 if (GET_CODE (op) == MEM)
712 return memory_address_p (mode, XEXP (op, 0));
713 return FALSE;
714}
715
716
717/* Accept the floating point constant 1 in the appropriate mode. */
718
719int
720const_float_1_operand (op, mode)
721 rtx op;
722 enum machine_mode mode;
723{
724 REAL_VALUE_TYPE d;
725 static REAL_VALUE_TYPE onedf;
726 static REAL_VALUE_TYPE onesf;
727 static int one_initialized;
728
729 if ((GET_CODE (op) != CONST_DOUBLE)
730 || (mode != GET_MODE (op))
731 || (mode != DFmode && mode != SFmode))
732 return FALSE;
733
734 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
735
736 if (! one_initialized)
737 {
738 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
739 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
740 one_initialized = TRUE;
741 }
742
743 if (mode == DFmode)
744 return REAL_VALUES_EQUAL (d, onedf);
745 else
746 return REAL_VALUES_EQUAL (d, onesf);
747}
748
749
750int
751fpmem_offset_operand (op, mode)
752 rtx op;
753 enum machine_mode mode ATTRIBUTE_UNUSED;
754{
755 if (GET_CODE (op) == CONST_INT)
756 return xtensa_mem_offset (INTVAL (op), SFmode);
757 return 0;
758}
759
760
761void
762xtensa_extend_reg (dst, src)
763 rtx dst;
764 rtx src;
765{
766 rtx temp = gen_reg_rtx (SImode);
767 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
768
769 /* generate paradoxical subregs as needed so that the modes match */
770 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
771 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
772
773 emit_insn (gen_ashlsi3 (temp, src, shift));
774 emit_insn (gen_ashrsi3 (dst, temp, shift));
775}
776
777
778void
779xtensa_load_constant (dst, src)
780 rtx dst;
781 rtx src;
782{
783 enum machine_mode mode = GET_MODE (dst);
784 src = force_const_mem (SImode, src);
785
786 /* PC-relative loads are always SImode so we have to add a SUBREG if that
787 is not the desired mode */
788
789 if (mode != SImode)
790 {
791 if (register_operand (dst, mode))
792 dst = simplify_gen_subreg (SImode, dst, mode, 0);
793 else
794 {
795 src = force_reg (SImode, src);
796 src = gen_lowpart_SUBREG (mode, src);
797 }
798 }
799
800 emit_move_insn (dst, src);
801}
802
803
804int
805branch_operator (x, mode)
806 rtx x;
807 enum machine_mode mode;
808{
809 if (GET_MODE (x) != mode)
810 return FALSE;
811
812 switch (GET_CODE (x))
813 {
814 case EQ:
815 case NE:
816 case LT:
817 case GE:
818 return TRUE;
819 default:
820 break;
821 }
822 return FALSE;
823}
824
825
826int
827ubranch_operator (x, mode)
828 rtx x;
829 enum machine_mode mode;
830{
831 if (GET_MODE (x) != mode)
832 return FALSE;
833
834 switch (GET_CODE (x))
835 {
836 case LTU:
837 case GEU:
838 return TRUE;
839 default:
840 break;
841 }
842 return FALSE;
843}
844
845
846int
847boolean_operator (x, mode)
848 rtx x;
849 enum machine_mode mode;
850{
851 if (GET_MODE (x) != mode)
852 return FALSE;
853
854 switch (GET_CODE (x))
855 {
856 case EQ:
857 case NE:
858 return TRUE;
859 default:
860 break;
861 }
862 return FALSE;
863}
864
865
866int
867xtensa_mask_immediate (v)
868 int v;
869{
870#define MAX_MASK_SIZE 16
871 int mask_size;
872
873 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
874 {
875 if ((v & 1) == 0)
876 return FALSE;
877 v = v >> 1;
878 if (v == 0)
879 return TRUE;
880 }
881
882 return FALSE;
883}
884
885
886int
887xtensa_mem_offset (v, mode)
888 unsigned v;
889 enum machine_mode mode;
890{
891 switch (mode)
892 {
893 case BLKmode:
894 /* Handle the worst case for block moves. See xtensa_expand_block_move
895 where we emit an optimized block move operation if the block can be
896 moved in < "move_ratio" pieces. The worst case is when the block is
897 aligned but has a size of (3 mod 4) (does this happen?) so that the
898 last piece requires a byte load/store. */
899 return (xtensa_uimm8 (v) &&
900 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
901
902 case QImode:
903 return xtensa_uimm8 (v);
904
905 case HImode:
906 return xtensa_uimm8x2 (v);
907
908 case DFmode:
909 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
910
911 default:
912 break;
913 }
914
915 return xtensa_uimm8x4 (v);
916}
917
918
919/* Make normal rtx_code into something we can index from an array */
920
921static enum internal_test
922map_test_to_internal_test (test_code)
923 enum rtx_code test_code;
924{
925 enum internal_test test = ITEST_MAX;
926
927 switch (test_code)
928 {
929 default: break;
930 case EQ: test = ITEST_EQ; break;
931 case NE: test = ITEST_NE; break;
932 case GT: test = ITEST_GT; break;
933 case GE: test = ITEST_GE; break;
934 case LT: test = ITEST_LT; break;
935 case LE: test = ITEST_LE; break;
936 case GTU: test = ITEST_GTU; break;
937 case GEU: test = ITEST_GEU; break;
938 case LTU: test = ITEST_LTU; break;
939 case LEU: test = ITEST_LEU; break;
940 }
941
942 return test;
943}
944
945
946/* Generate the code to compare two integer values. The return value is
947 the comparison expression. */
948
949static rtx
950gen_int_relational (test_code, cmp0, cmp1, p_invert)
951 enum rtx_code test_code; /* relational test (EQ, etc) */
952 rtx cmp0; /* first operand to compare */
953 rtx cmp1; /* second operand to compare */
954 int *p_invert; /* whether branch needs to reverse its test */
955{
956 struct cmp_info {
957 enum rtx_code test_code; /* test code to use in insn */
958 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
959 int const_add; /* constant to add (convert LE -> LT) */
960 int reverse_regs; /* reverse registers in test */
961 int invert_const; /* != 0 if invert value if cmp1 is constant */
962 int invert_reg; /* != 0 if invert value if cmp1 is register */
963 int unsignedp; /* != 0 for unsigned comparisons. */
964 };
965
966 static struct cmp_info info[ (int)ITEST_MAX ] = {
967
968 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
969 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
970
971 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
972 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
973 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
974 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
975
976 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
977 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
978 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
979 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
980 };
981
982 enum internal_test test;
983 enum machine_mode mode;
984 struct cmp_info *p_info;
985
986 test = map_test_to_internal_test (test_code);
987 if (test == ITEST_MAX)
988 abort ();
989
990 p_info = &info[ (int)test ];
991
992 mode = GET_MODE (cmp0);
993 if (mode == VOIDmode)
994 mode = GET_MODE (cmp1);
995
996 /* Make sure we can handle any constants given to us. */
997 if (GET_CODE (cmp1) == CONST_INT)
998 {
999 HOST_WIDE_INT value = INTVAL (cmp1);
1000 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
1001
1002 /* if the immediate overflows or does not fit in the immediate field,
1003 spill it to a register */
1004
1005 if ((p_info->unsignedp ?
1006 (uvalue + p_info->const_add > uvalue) :
1007 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1008 {
1009 cmp1 = force_reg (mode, cmp1);
1010 }
1011 else if (!(p_info->const_range_p) (value + p_info->const_add))
1012 {
1013 cmp1 = force_reg (mode, cmp1);
1014 }
1015 }
1016 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1017 {
1018 cmp1 = force_reg (mode, cmp1);
1019 }
1020
1021 /* See if we need to invert the result. */
1022 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1023 ? p_info->invert_const
1024 : p_info->invert_reg);
1025
1026 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1027 Comparison between two registers, may involve switching operands. */
1028 if (GET_CODE (cmp1) == CONST_INT)
1029 {
1030 if (p_info->const_add != 0)
1031 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1032
1033 }
1034 else if (p_info->reverse_regs)
1035 {
1036 rtx temp = cmp0;
1037 cmp0 = cmp1;
1038 cmp1 = temp;
1039 }
1040
1041 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1042}
1043
1044
1045/* Generate the code to compare two float values. The return value is
1046 the comparison expression. */
1047
1048static rtx
1049gen_float_relational (test_code, cmp0, cmp1)
1050 enum rtx_code test_code; /* relational test (EQ, etc) */
1051 rtx cmp0; /* first operand to compare */
1052 rtx cmp1; /* second operand to compare */
1053{
1054 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1055 rtx brtmp;
1056 int reverse_regs, invert;
1057
1058 switch (test_code)
1059 {
1060 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1061 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1062 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1063 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1064 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1065 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1066 default:
1067 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1068 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1069 }
1070
1071 if (reverse_regs)
1072 {
1073 rtx temp = cmp0;
1074 cmp0 = cmp1;
1075 cmp1 = temp;
1076 }
1077
1078 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1079 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1080
1081 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1082}
1083
1084
1085void
1086xtensa_expand_conditional_branch (operands, test_code)
1087 rtx *operands;
1088 enum rtx_code test_code;
1089{
1090 enum cmp_type type = branch_type;
1091 rtx cmp0 = branch_cmp[0];
1092 rtx cmp1 = branch_cmp[1];
1093 rtx cmp;
1094 int invert;
1095 rtx label1, label2;
1096
1097 switch (type)
1098 {
1099 case CMP_DF:
1100 default:
1101 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1102
1103 case CMP_SI:
1104 invert = FALSE;
1105 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1106 break;
1107
1108 case CMP_SF:
1109 if (!TARGET_HARD_FLOAT)
1110 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1111 invert = FALSE;
1112 cmp = gen_float_relational (test_code, cmp0, cmp1);
1113 break;
1114 }
1115
1116 /* Generate the branch. */
1117
1118 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1119 label2 = pc_rtx;
1120
1121 if (invert)
1122 {
1123 label2 = label1;
1124 label1 = pc_rtx;
1125 }
1126
1127 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1128 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1129 label1,
1130 label2)));
1131}
1132
1133
1134static rtx
1135gen_conditional_move (cmp)
1136 rtx cmp;
1137{
1138 enum rtx_code code = GET_CODE (cmp);
1139 rtx op0 = branch_cmp[0];
1140 rtx op1 = branch_cmp[1];
1141
1142 if (branch_type == CMP_SI)
1143 {
1144 /* Jump optimization calls get_condition() which canonicalizes
1145 comparisons like (GE x <const>) to (GT x <const-1>).
1146 Transform those comparisons back to GE, since that is the
1147 comparison supported in Xtensa. We shouldn't have to
1148 transform <LE x const> comparisons, because neither
1149 xtensa_expand_conditional_branch() nor get_condition() will
1150 produce them. */
1151
1152 if ((code == GT) && (op1 == constm1_rtx))
1153 {
1154 code = GE;
1155 op1 = const0_rtx;
1156 }
1157 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1158
1159 if (boolean_operator (cmp, VOIDmode))
1160 {
1161 /* swap the operands to make const0 second */
1162 if (op0 == const0_rtx)
1163 {
1164 op0 = op1;
1165 op1 = const0_rtx;
1166 }
1167
1168 /* if not comparing against zero, emit a comparison (subtract) */
1169 if (op1 != const0_rtx)
1170 {
1171 op0 = expand_binop (SImode, sub_optab, op0, op1,
1172 0, 0, OPTAB_LIB_WIDEN);
1173 op1 = const0_rtx;
1174 }
1175 }
1176 else if (branch_operator (cmp, VOIDmode))
1177 {
1178 /* swap the operands to make const0 second */
1179 if (op0 == const0_rtx)
1180 {
1181 op0 = op1;
1182 op1 = const0_rtx;
1183
1184 switch (code)
1185 {
1186 case LT: code = GE; break;
1187 case GE: code = LT; break;
1188 default: abort ();
1189 }
1190 }
1191
1192 if (op1 != const0_rtx)
1193 return 0;
1194 }
1195 else
1196 return 0;
1197
1198 return gen_rtx (code, VOIDmode, op0, op1);
1199 }
1200
1201 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1202 return gen_float_relational (code, op0, op1);
1203
1204 return 0;
1205}
1206
1207
1208int
1209xtensa_expand_conditional_move (operands, isflt)
1210 rtx *operands;
1211 int isflt;
1212{
1213 rtx cmp;
1214 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1215
1216 if (!(cmp = gen_conditional_move (operands[1])))
1217 return 0;
1218
1219 if (isflt)
1220 gen_fn = (branch_type == CMP_SI
1221 ? gen_movsfcc_internal0
1222 : gen_movsfcc_internal1);
1223 else
1224 gen_fn = (branch_type == CMP_SI
1225 ? gen_movsicc_internal0
1226 : gen_movsicc_internal1);
1227
1228 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1229 operands[2], operands[3], cmp));
1230 return 1;
1231}
1232
1233
1234int
1235xtensa_expand_scc (operands)
1236 rtx *operands;
1237{
1238 rtx dest = operands[0];
1239 rtx cmp = operands[1];
1240 rtx one_tmp, zero_tmp;
1241 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1242
1243 if (!(cmp = gen_conditional_move (cmp)))
1244 return 0;
1245
1246 one_tmp = gen_reg_rtx (SImode);
1247 zero_tmp = gen_reg_rtx (SImode);
1248 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1249 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1250
1251 gen_fn = (branch_type == CMP_SI
1252 ? gen_movsicc_internal0
1253 : gen_movsicc_internal1);
1254 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1255 return 1;
1256}
1257
1258
1259/* Emit insns to move operands[1] into operands[0].
1260
1261 Return 1 if we have written out everything that needs to be done to
1262 do the move. Otherwise, return 0 and the caller will emit the move
1263 normally. */
1264
1265int
1266xtensa_emit_move_sequence (operands, mode)
1267 rtx *operands;
1268 enum machine_mode mode;
1269{
1270 if (CONSTANT_P (operands[1])
1271 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1272 && (GET_CODE (operands[1]) != CONST_INT
1273 || !xtensa_simm12b (INTVAL (operands[1]))))
1274 {
1275 xtensa_load_constant (operands[0], operands[1]);
1276 return 1;
1277 }
1278
1279 if (!(reload_in_progress | reload_completed))
1280 {
a8cacfd2 1281 if (!xtensa_valid_move (mode, operands))
03984308
BW
1282 operands[1] = force_reg (mode, operands[1]);
1283
58db834b
BW
1284 if (xtensa_copy_incoming_a7 (operands, mode))
1285 return 1;
03984308
BW
1286 }
1287
1288 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1289 instruction won't be recognized after reload. So we remove the
1290 subreg and adjust mem accordingly. */
1291 if (reload_in_progress)
1292 {
1293 operands[0] = fixup_subreg_mem (operands[0]);
1294 operands[1] = fixup_subreg_mem (operands[1]);
1295 }
1296 return 0;
1297}
1298
1299static rtx
1300fixup_subreg_mem (x)
1301 rtx x;
1302{
1303 if (GET_CODE (x) == SUBREG
1304 && GET_CODE (SUBREG_REG (x)) == REG
1305 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1306 {
1307 rtx temp =
1308 gen_rtx_SUBREG (GET_MODE (x),
1309 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1310 SUBREG_BYTE (x));
1311 x = alter_subreg (&temp);
1312 }
1313 return x;
1314}
1315
1316
58db834b
BW
1317/* Check if this move is copying an incoming argument in a7. If so,
1318 emit the move, followed by the special "set_frame_ptr"
1319 unspec_volatile insn, at the very beginning of the function. This
1320 is necessary because the register allocator will ignore conflicts
1321 with a7 and may assign some other pseudo to a7. If that pseudo was
1322 assigned prior to this move, it would clobber the incoming argument
1323 in a7. By copying the argument out of a7 as the very first thing,
1324 and then immediately following that with an unspec_volatile to keep
1325 the scheduler away, we should avoid any problems. */
1326
1327bool
1328xtensa_copy_incoming_a7 (operands, mode)
1329 rtx *operands;
1330 enum machine_mode mode;
1331{
1332 if (a7_overlap_mentioned_p (operands[1])
1333 && !cfun->machine->incoming_a7_copied)
1334 {
1335 rtx mov;
1336 switch (mode)
1337 {
1338 case DFmode:
1339 mov = gen_movdf_internal (operands[0], operands[1]);
1340 break;
1341 case SFmode:
1342 mov = gen_movsf_internal (operands[0], operands[1]);
1343 break;
1344 case DImode:
1345 mov = gen_movdi_internal (operands[0], operands[1]);
1346 break;
1347 case SImode:
1348 mov = gen_movsi_internal (operands[0], operands[1]);
1349 break;
1350 case HImode:
1351 mov = gen_movhi_internal (operands[0], operands[1]);
1352 break;
1353 case QImode:
1354 mov = gen_movqi_internal (operands[0], operands[1]);
1355 break;
1356 default:
1357 abort ();
1358 }
1359
1360 /* Insert the instructions before any other argument copies.
1361 (The set_frame_ptr insn comes _after_ the move, so push it
1362 out first.) */
1363 push_topmost_sequence ();
1364 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1365 emit_insn_after (mov, get_insns ());
1366 pop_topmost_sequence ();
1367
1368 /* Ideally the incoming argument in a7 would only be copied
1369 once, since propagating a7 into the body of a function
1370 will almost certainly lead to errors. However, there is
1371 at least one harmless case (in GCSE) where the original
1372 copy from a7 is changed to copy into a new pseudo. Thus,
1373 we use a flag to only do this special treatment for the
1374 first copy of a7. */
1375
1376 cfun->machine->incoming_a7_copied = true;
1377
1378 return 1;
1379 }
1380
1381 return 0;
1382}
1383
1384
03984308
BW
1385/* Try to expand a block move operation to an RTL block move instruction.
1386 If not optimizing or if the block size is not a constant or if the
1387 block is small, the expansion fails and GCC falls back to calling
1388 memcpy().
1389
1390 operands[0] is the destination
1391 operands[1] is the source
1392 operands[2] is the length
1393 operands[3] is the alignment */
1394
1395int
1396xtensa_expand_block_move (operands)
1397 rtx *operands;
1398{
1399 rtx dest = operands[0];
1400 rtx src = operands[1];
1401 int bytes = INTVAL (operands[2]);
1402 int align = XINT (operands[3], 0);
1403 int num_pieces, move_ratio;
1404
1405 /* If this is not a fixed size move, just call memcpy */
1406 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1407 return 0;
1408
1409 /* Anything to move? */
1410 if (bytes <= 0)
1411 return 1;
1412
1413 if (align > MOVE_MAX)
1414 align = MOVE_MAX;
1415
1416 /* decide whether to expand inline based on the optimization level */
1417 move_ratio = 4;
1418 if (optimize > 2)
1419 move_ratio = LARGEST_MOVE_RATIO;
1420 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1421 if (num_pieces >= move_ratio)
1422 return 0;
1423
07232638 1424 /* make sure the memory addresses are valid */
0ae02efa
BW
1425 operands[0] = validize_mem (dest);
1426 operands[1] = validize_mem (src);
03984308
BW
1427
1428 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1429 operands[2], operands[3]));
1430 return 1;
1431}
1432
1433
1434/* Emit a sequence of instructions to implement a block move, trying
1435 to hide load delay slots as much as possible. Load N values into
1436 temporary registers, store those N values, and repeat until the
1437 complete block has been moved. N=delay_slots+1 */
1438
1439struct meminsnbuf {
1440 char template[30];
1441 rtx operands[2];
1442};
1443
1444void
1445xtensa_emit_block_move (operands, tmpregs, delay_slots)
1446 rtx *operands;
1447 rtx *tmpregs;
1448 int delay_slots;
1449{
1450 rtx dest = operands[0];
1451 rtx src = operands[1];
1452 int bytes = INTVAL (operands[2]);
1453 int align = XINT (operands[3], 0);
1454 rtx from_addr = XEXP (src, 0);
1455 rtx to_addr = XEXP (dest, 0);
1456 int from_struct = MEM_IN_STRUCT_P (src);
1457 int to_struct = MEM_IN_STRUCT_P (dest);
1458 int offset = 0;
1459 int chunk_size, item_size;
1460 struct meminsnbuf *ldinsns, *stinsns;
1461 const char *ldname, *stname;
1462 enum machine_mode mode;
1463
1464 if (align > MOVE_MAX)
1465 align = MOVE_MAX;
1466 item_size = align;
1467 chunk_size = delay_slots + 1;
1468
1469 ldinsns = (struct meminsnbuf *)
1470 alloca (chunk_size * sizeof (struct meminsnbuf));
1471 stinsns = (struct meminsnbuf *)
1472 alloca (chunk_size * sizeof (struct meminsnbuf));
1473
1474 mode = xtensa_find_mode_for_size (item_size);
1475 item_size = GET_MODE_SIZE (mode);
1476 ldname = xtensa_ld_opcodes[(int) mode];
1477 stname = xtensa_st_opcodes[(int) mode];
1478
1479 while (bytes > 0)
1480 {
1481 int n;
1482
1483 for (n = 0; n < chunk_size; n++)
1484 {
1485 rtx addr, mem;
1486
1487 if (bytes == 0)
1488 {
1489 chunk_size = n;
1490 break;
1491 }
1492
1493 if (bytes < item_size)
1494 {
1495 /* find a smaller item_size which we can load & store */
1496 item_size = bytes;
1497 mode = xtensa_find_mode_for_size (item_size);
1498 item_size = GET_MODE_SIZE (mode);
1499 ldname = xtensa_ld_opcodes[(int) mode];
1500 stname = xtensa_st_opcodes[(int) mode];
1501 }
1502
1503 /* record the load instruction opcode and operands */
1504 addr = plus_constant (from_addr, offset);
1505 mem = gen_rtx_MEM (mode, addr);
1506 if (! memory_address_p (mode, addr))
1507 abort ();
1508 MEM_IN_STRUCT_P (mem) = from_struct;
1509 ldinsns[n].operands[0] = tmpregs[n];
1510 ldinsns[n].operands[1] = mem;
1511 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1512
1513 /* record the store instruction opcode and operands */
1514 addr = plus_constant (to_addr, offset);
1515 mem = gen_rtx_MEM (mode, addr);
1516 if (! memory_address_p (mode, addr))
1517 abort ();
1518 MEM_IN_STRUCT_P (mem) = to_struct;
1519 stinsns[n].operands[0] = tmpregs[n];
1520 stinsns[n].operands[1] = mem;
1521 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1522
1523 offset += item_size;
1524 bytes -= item_size;
1525 }
1526
1527 /* now output the loads followed by the stores */
1528 for (n = 0; n < chunk_size; n++)
1529 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1530 for (n = 0; n < chunk_size; n++)
1531 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1532 }
1533}
1534
1535
1536static enum machine_mode
1537xtensa_find_mode_for_size (item_size)
1538 unsigned item_size;
1539{
1540 enum machine_mode mode, tmode;
1541
1542 while (1)
1543 {
1544 mode = VOIDmode;
1545
1546 /* find mode closest to but not bigger than item_size */
1547 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1548 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1549 if (GET_MODE_SIZE (tmode) <= item_size)
1550 mode = tmode;
1551 if (mode == VOIDmode)
1552 abort ();
1553
1554 item_size = GET_MODE_SIZE (mode);
1555
1556 if (xtensa_ld_opcodes[(int) mode]
1557 && xtensa_st_opcodes[(int) mode])
1558 break;
1559
1560 /* cannot load & store this mode; try something smaller */
1561 item_size -= 1;
1562 }
1563
1564 return mode;
1565}
1566
1567
1568void
1569xtensa_expand_nonlocal_goto (operands)
1570 rtx *operands;
1571{
1572 rtx goto_handler = operands[1];
1573 rtx containing_fp = operands[3];
1574
1575 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1576 is too big to generate in-line */
1577
1578 if (GET_CODE (containing_fp) != REG)
1579 containing_fp = force_reg (Pmode, containing_fp);
1580
1581 goto_handler = replace_rtx (copy_rtx (goto_handler),
1582 virtual_stack_vars_rtx,
1583 containing_fp);
1584
1585 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1586 0, VOIDmode, 2,
1587 containing_fp, Pmode,
1588 goto_handler, Pmode);
1589}
1590
1591
e2500fed
GK
1592static struct machine_function *
1593xtensa_init_machine_status ()
03984308 1594{
e2500fed 1595 return ggc_alloc_cleared (sizeof (struct machine_function));
03984308
BW
1596}
1597
1598
1599void
1600xtensa_setup_frame_addresses ()
1601{
1602 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1603 cfun->machine->accesses_prev_frame = 1;
1604
1605 emit_library_call
1606 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1607 0, VOIDmode, 0);
1608}
1609
1610
1611/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1612 a comment showing where the end of the loop is. However, if there is a
1613 label or a branch at the end of the loop then we need to place a nop
1614 there. If the loop ends with a label we need the nop so that branches
1615 targetting that label will target the nop (and thus remain in the loop),
1616 instead of targetting the instruction after the loop (and thus exiting
1617 the loop). If the loop ends with a branch, we need the nop in case the
1618 branch is targetting a location inside the loop. When the branch
1619 executes it will cause the loop count to be decremented even if it is
1620 taken (because it is the last instruction in the loop), so we need to
1621 nop after the branch to prevent the loop count from being decremented
1622 when the branch is taken. */
1623
1624void
1625xtensa_emit_loop_end (insn, operands)
1626 rtx insn;
1627 rtx *operands;
1628{
1629 char done = 0;
1630
1631 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1632 {
1633 switch (GET_CODE (insn))
1634 {
1635 case NOTE:
1636 case BARRIER:
1637 break;
1638
1639 case CODE_LABEL:
1640 output_asm_insn ("nop.n", operands);
1641 done = 1;
1642 break;
1643
1644 default:
1645 {
1646 rtx body = PATTERN (insn);
1647
1648 if (GET_CODE (body) == JUMP_INSN)
1649 {
1650 output_asm_insn ("nop.n", operands);
1651 done = 1;
1652 }
1653 else if ((GET_CODE (body) != USE)
1654 && (GET_CODE (body) != CLOBBER))
1655 done = 1;
1656 }
1657 break;
1658 }
1659 }
1660
1661 output_asm_insn ("# loop end for %0", operands);
1662}
1663
1664
1665char *
1666xtensa_emit_call (callop, operands)
1667 int callop;
1668 rtx *operands;
1669{
b64a1b53 1670 static char result[64];
03984308
BW
1671 rtx tgt = operands[callop];
1672
1673 if (GET_CODE (tgt) == CONST_INT)
1d0ea52e 1674 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
03984308
BW
1675 else if (register_operand (tgt, VOIDmode))
1676 sprintf (result, "callx8\t%%%d", callop);
1677 else
1678 sprintf (result, "call8\t%%%d", callop);
1679
1680 return result;
1681}
1682
1683
1684/* Return the stabs register number to use for 'regno'. */
1685
1686int
1687xtensa_dbx_register_number (regno)
1688 int regno;
1689{
1690 int first = -1;
1691
1692 if (GP_REG_P (regno)) {
1693 regno -= GP_REG_FIRST;
1694 first = 0;
1695 }
1696 else if (BR_REG_P (regno)) {
1697 regno -= BR_REG_FIRST;
1698 first = 16;
1699 }
1700 else if (FP_REG_P (regno)) {
1701 regno -= FP_REG_FIRST;
1702 /* The current numbering convention is that TIE registers are
1703 numbered in libcc order beginning with 256. We can't guarantee
1704 that the FP registers will come first, so the following is just
1705 a guess. It seems like we should make a special case for FP
1706 registers and give them fixed numbers < 256. */
1707 first = 256;
1708 }
1709 else if (ACC_REG_P (regno))
1710 {
1711 first = 0;
1712 regno = -1;
1713 }
1714
1715 /* When optimizing, we sometimes get asked about pseudo-registers
1716 that don't represent hard registers. Return 0 for these. */
1717 if (first == -1)
1718 return 0;
1719
1720 return first + regno;
1721}
1722
1723
1724/* Argument support functions. */
1725
1726/* Initialize CUMULATIVE_ARGS for a function. */
1727
1728void
1729init_cumulative_args (cum, fntype, libname)
1730 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1731 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1732 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1733{
1734 cum->arg_words = 0;
1735}
1736
1737/* Advance the argument to the next argument position. */
1738
1739void
1740function_arg_advance (cum, mode, type)
1741 CUMULATIVE_ARGS *cum; /* current arg information */
1742 enum machine_mode mode; /* current arg mode */
1743 tree type; /* type of the argument or 0 if lib support */
1744{
1745 int words, max;
1746 int *arg_words;
1747
1748 arg_words = &cum->arg_words;
1749 max = MAX_ARGS_IN_REGISTERS;
1750
1751 words = (((mode != BLKmode)
1752 ? (int) GET_MODE_SIZE (mode)
1753 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1754
1755 if ((*arg_words + words > max) && (*arg_words < max))
1756 *arg_words = max;
1757
1758 *arg_words += words;
1759}
1760
1761
1762/* Return an RTL expression containing the register for the given mode,
1763 or 0 if the argument is to be passed on the stack. */
1764
1765rtx
1766function_arg (cum, mode, type, incoming_p)
1767 CUMULATIVE_ARGS *cum; /* current arg information */
1768 enum machine_mode mode; /* current arg mode */
1769 tree type; /* type of the argument or 0 if lib support */
1770 int incoming_p; /* computing the incoming registers? */
1771{
1772 int regbase, words, max;
1773 int *arg_words;
1774 int regno;
1775 enum machine_mode result_mode;
1776
1777 arg_words = &cum->arg_words;
1778 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1779 max = MAX_ARGS_IN_REGISTERS;
1780
1781 words = (((mode != BLKmode)
1782 ? (int) GET_MODE_SIZE (mode)
1783 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1784
1785 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1786 *arg_words += (*arg_words & 1);
1787
1788 if (*arg_words + words > max)
1789 return (rtx)0;
1790
1791 regno = regbase + *arg_words;
1792 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1793
1794 /* We need to make sure that references to a7 are represented with
1795 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1796 modes bigger than 2 words (because we only have patterns for
1797 modes of 2 words or smaller), we can't control the expansion
1798 unless we explicitly list the individual registers in a PARALLEL. */
1799
1800 if ((mode == BLKmode || words > 2)
1801 && regno < A7_REG
1802 && regno + words > A7_REG)
1803 {
1804 rtx result;
1805 int n;
1806
1807 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1808 for (n = 0; n < words; n++)
1809 {
1810 XVECEXP (result, 0, n) =
1811 gen_rtx_EXPR_LIST (VOIDmode,
1812 gen_raw_REG (SImode, regno + n),
1813 GEN_INT (n * UNITS_PER_WORD));
1814 }
1815 return result;
1816 }
1817
1818 return gen_raw_REG (result_mode, regno);
1819}
1820
1821
1822void
1823override_options ()
1824{
1825 int regno;
1826 enum machine_mode mode;
1827
1828 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1829 error ("boolean registers required for the floating-point option");
1830
1831 /* set up the tables of ld/st opcode names for block moves */
1832 xtensa_ld_opcodes[(int) SImode] = "l32i";
1833 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1834 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1835 xtensa_st_opcodes[(int) SImode] = "s32i";
1836 xtensa_st_opcodes[(int) HImode] = "s16i";
1837 xtensa_st_opcodes[(int) QImode] = "s8i";
1838
1839 xtensa_char_to_class['q'] = SP_REG;
1840 xtensa_char_to_class['a'] = GR_REGS;
1841 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1842 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1843 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1844 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1845 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1846 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1847 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1848
1849 /* Set up array giving whether a given register can hold a given mode. */
1850 for (mode = VOIDmode;
1851 mode != MAX_MACHINE_MODE;
1852 mode = (enum machine_mode) ((int) mode + 1))
1853 {
1854 int size = GET_MODE_SIZE (mode);
1855 enum mode_class class = GET_MODE_CLASS (mode);
1856
1857 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1858 {
1859 int temp;
1860
1861 if (ACC_REG_P (regno))
1862 temp = (TARGET_MAC16 &&
1863 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1864 else if (GP_REG_P (regno))
1865 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1866 else if (FP_REG_P (regno))
1867 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1868 else if (BR_REG_P (regno))
1869 temp = (TARGET_BOOLEANS && (mode == CCmode));
1870 else
1871 temp = FALSE;
1872
1873 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1874 }
1875 }
1876
1877 init_machine_status = xtensa_init_machine_status;
03984308
BW
1878
1879 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1880 some targets need to always use PIC. */
a69c385e 1881 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
03984308
BW
1882 flag_pic = 1;
1883}
1884
1885
1886/* A C compound statement to output to stdio stream STREAM the
1887 assembler syntax for an instruction operand X. X is an RTL
1888 expression.
1889
1890 CODE is a value that can be used to specify one of several ways
1891 of printing the operand. It is used when identical operands
1892 must be printed differently depending on the context. CODE
1893 comes from the '%' specification that was used to request
1894 printing of the operand. If the specification was just '%DIGIT'
1895 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1896 is the ASCII code for LTR.
1897
1898 If X is a register, this macro should print the register's name.
1899 The names can be found in an array 'reg_names' whose type is
1900 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1901
1902 When the machine description has a specification '%PUNCT' (a '%'
1903 followed by a punctuation character), this macro is called with
1904 a null pointer for X and the punctuation character for CODE.
1905
1906 'a', 'c', 'l', and 'n' are reserved.
1907
1908 The Xtensa specific codes are:
1909
1910 'd' CONST_INT, print as signed decimal
1911 'x' CONST_INT, print as signed hexadecimal
1912 'K' CONST_INT, print number of bits in mask for EXTUI
1913 'R' CONST_INT, print (X & 0x1f)
1914 'L' CONST_INT, print ((32 - X) & 0x1f)
1915 'D' REG, print second register of double-word register operand
1916 'N' MEM, print address of next word following a memory operand
1917 'v' MEM, if memory reference is volatile, output a MEMW before it
1918*/
1919
1920static void
1921printx (file, val)
1922 FILE *file;
1923 signed int val;
1924{
1925 /* print a hexadecimal value in a nice way */
1926 if ((val > -0xa) && (val < 0xa))
1927 fprintf (file, "%d", val);
1928 else if (val < 0)
1929 fprintf (file, "-0x%x", -val);
1930 else
1931 fprintf (file, "0x%x", val);
1932}
1933
1934
1935void
1936print_operand (file, op, letter)
1937 FILE *file; /* file to write to */
1938 rtx op; /* operand to print */
1939 int letter; /* %<letter> or 0 */
1940{
1941 enum rtx_code code;
1942
1943 if (! op)
1944 error ("PRINT_OPERAND null pointer");
1945
1946 code = GET_CODE (op);
1947 switch (code)
1948 {
1949 case REG:
1950 case SUBREG:
1951 {
1952 int regnum = xt_true_regnum (op);
1953 if (letter == 'D')
1954 regnum++;
1955 fprintf (file, "%s", reg_names[regnum]);
1956 break;
1957 }
1958
1959 case MEM:
84bf8c2c
BW
1960 /* For a volatile memory reference, emit a MEMW before the
1961 load or store. */
03984308
BW
1962 if (letter == 'v')
1963 {
1964 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1965 fprintf (file, "memw\n\t");
1966 break;
1967 }
1968 else if (letter == 'N')
84bf8c2c
BW
1969 {
1970 enum machine_mode mode;
1971 switch (GET_MODE (op))
1972 {
1973 case DFmode: mode = SFmode; break;
1974 case DImode: mode = SImode; break;
1975 default: abort ();
1976 }
1977 op = adjust_address (op, mode, 4);
1978 }
03984308
BW
1979
1980 output_address (XEXP (op, 0));
1981 break;
1982
1983 case CONST_INT:
1984 switch (letter)
1985 {
1986 case 'K':
1987 {
1988 int num_bits = 0;
1989 unsigned val = INTVAL (op);
1990 while (val & 1)
1991 {
1992 num_bits += 1;
1993 val = val >> 1;
1994 }
1995 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1996 fatal_insn ("invalid mask", op);
1997
1998 fprintf (file, "%d", num_bits);
1999 break;
2000 }
2001
2002 case 'L':
1d0ea52e 2003 fprintf (file, "%ld", (32 - INTVAL (op)) & 0x1f);
03984308
BW
2004 break;
2005
2006 case 'R':
1d0ea52e 2007 fprintf (file, "%ld", INTVAL (op) & 0x1f);
03984308
BW
2008 break;
2009
2010 case 'x':
2011 printx (file, INTVAL (op));
2012 break;
2013
2014 case 'd':
2015 default:
1d0ea52e 2016 fprintf (file, "%ld", INTVAL (op));
03984308
BW
2017 break;
2018
2019 }
2020 break;
2021
2022 default:
2023 output_addr_const (file, op);
2024 }
2025}
2026
2027
2028/* A C compound statement to output to stdio stream STREAM the
2029 assembler syntax for an instruction operand that is a memory
fb49053f 2030 reference whose address is ADDR. ADDR is an RTL expression. */
03984308
BW
2031
2032void
2033print_operand_address (file, addr)
2034 FILE *file;
2035 rtx addr;
2036{
2037 if (!addr)
2038 error ("PRINT_OPERAND_ADDRESS, null pointer");
2039
2040 switch (GET_CODE (addr))
2041 {
2042 default:
2043 fatal_insn ("invalid address", addr);
2044 break;
2045
2046 case REG:
2047 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2048 break;
2049
2050 case PLUS:
2051 {
2052 rtx reg = (rtx)0;
2053 rtx offset = (rtx)0;
2054 rtx arg0 = XEXP (addr, 0);
2055 rtx arg1 = XEXP (addr, 1);
2056
2057 if (GET_CODE (arg0) == REG)
2058 {
2059 reg = arg0;
2060 offset = arg1;
2061 }
2062 else if (GET_CODE (arg1) == REG)
2063 {
2064 reg = arg1;
2065 offset = arg0;
2066 }
2067 else
2068 fatal_insn ("no register in address", addr);
2069
2070 if (CONSTANT_P (offset))
2071 {
2072 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2073 output_addr_const (file, offset);
2074 }
2075 else
2076 fatal_insn ("address offset not a constant", addr);
2077 }
2078 break;
2079
2080 case LABEL_REF:
2081 case SYMBOL_REF:
2082 case CONST_INT:
2083 case CONST:
2084 output_addr_const (file, addr);
2085 break;
2086 }
2087}
2088
2089
2090/* Emit either a label, .comm, or .lcomm directive. */
2091
2092void
2093xtensa_declare_object (file, name, init_string, final_string, size)
2094 FILE *file;
2095 char *name;
2096 char *init_string;
2097 char *final_string;
2098 int size;
2099{
2100 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2101 assemble_name (file, name);
2102 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2103}
2104
2105
2106void
2107xtensa_output_literal (file, x, mode, labelno)
2108 FILE *file;
2109 rtx x;
2110 enum machine_mode mode;
2111 int labelno;
2112{
2113 long value_long[2];
b216cd4a 2114 REAL_VALUE_TYPE r;
03984308
BW
2115 int size;
2116
2117 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2118
2119 switch (GET_MODE_CLASS (mode))
2120 {
2121 case MODE_FLOAT:
2122 if (GET_CODE (x) != CONST_DOUBLE)
2123 abort ();
2124
b216cd4a 2125 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
03984308
BW
2126 switch (mode)
2127 {
2128 case SFmode:
b216cd4a
ZW
2129 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2130 fprintf (file, "0x%08lx\n", value_long[0]);
03984308
BW
2131 break;
2132
2133 case DFmode:
b216cd4a
ZW
2134 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2135 fprintf (file, "0x%08lx, 0x%08lx\n",
2136 value_long[0], value_long[1]);
03984308
BW
2137 break;
2138
2139 default:
2140 abort ();
2141 }
2142
2143 break;
2144
2145 case MODE_INT:
2146 case MODE_PARTIAL_INT:
2147 size = GET_MODE_SIZE (mode);
2148 if (size == 4)
2149 {
2150 output_addr_const (file, x);
2151 fputs ("\n", file);
2152 }
2153 else if (size == 8)
2154 {
2155 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2156 fputs (", ", file);
2157 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2158 fputs ("\n", file);
2159 }
2160 else
2161 abort ();
2162 break;
2163
2164 default:
2165 abort ();
2166 }
2167}
2168
2169
2170/* Return the bytes needed to compute the frame pointer from the current
2171 stack pointer. */
2172
2173#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2174#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2175
2176long
2177compute_frame_size (size)
2178 int size; /* # of var. bytes allocated */
2179{
2180 /* add space for the incoming static chain value */
2181 if (current_function_needs_context)
2182 size += (1 * UNITS_PER_WORD);
2183
2184 xtensa_current_frame_size =
2185 XTENSA_STACK_ALIGN (size
2186 + current_function_outgoing_args_size
2187 + (WINDOW_SIZE * UNITS_PER_WORD));
2188 return xtensa_current_frame_size;
2189}
2190
2191
2192int
2193xtensa_frame_pointer_required ()
2194{
2195 /* The code to expand builtin_frame_addr and builtin_return_addr
2196 currently uses the hard_frame_pointer instead of frame_pointer.
2197 This seems wrong but maybe it's necessary for other architectures.
2198 This function is derived from the i386 code. */
2199
2200 if (cfun->machine->accesses_prev_frame)
2201 return 1;
2202
2203 return 0;
2204}
2205
2206
2207void
2208xtensa_reorg (first)
2209 rtx first;
2210{
2211 rtx insn, set_frame_ptr_insn = 0;
2212
2213 unsigned long tsize = compute_frame_size (get_frame_size ());
2214 if (tsize < (1 << (12+3)))
2215 frame_size_const = 0;
2216 else
2217 {
2218 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2219
2220 /* make sure the constant is used so it doesn't get eliminated
2221 from the constant pool */
2222 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2223 }
2224
2225 if (!frame_pointer_needed)
2226 return;
2227
2228 /* Search all instructions, looking for the insn that sets up the
2229 frame pointer. This search will fail if the function does not
2230 have an incoming argument in $a7, but in that case, we can just
2231 set up the frame pointer at the very beginning of the
2232 function. */
2233
2234 for (insn = first; insn; insn = NEXT_INSN (insn))
2235 {
2236 rtx pat;
2237
2238 if (!INSN_P (insn))
2239 continue;
2240
2241 pat = PATTERN (insn);
2242 if (GET_CODE (pat) == UNSPEC_VOLATILE
2243 && (XINT (pat, 1) == UNSPECV_SET_FP))
2244 {
2245 set_frame_ptr_insn = insn;
2246 break;
2247 }
2248 }
2249
2250 if (set_frame_ptr_insn)
2251 {
2252 /* for all instructions prior to set_frame_ptr_insn, replace
2253 hard_frame_pointer references with stack_pointer */
2254 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2255 {
2256 if (INSN_P (insn))
2257 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2258 hard_frame_pointer_rtx,
2259 stack_pointer_rtx);
2260 }
2261 }
2262 else
2263 {
2264 /* emit the frame pointer move immediately after the NOTE that starts
2265 the function */
2266 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2267 stack_pointer_rtx), first);
2268 }
2269}
2270
2271
2272/* Set up the stack and frame (if desired) for the function. */
2273
2274void
2275xtensa_function_prologue (file, size)
2276 FILE *file;
1d0ea52e 2277 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
03984308
BW
2278{
2279 unsigned long tsize = compute_frame_size (get_frame_size ());
2280
2281 if (frame_pointer_needed)
2282 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2283 else
2284 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2285
2286
2287 if (tsize < (1 << (12+3)))
2288 {
2289 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2290 }
2291 else
2292 {
2293 fprintf (file, "\tentry\tsp, 16\n");
2294
2295 /* use a8 as a temporary since a0-a7 may be live */
2296 fprintf (file, "\tl32r\ta8, ");
2297 print_operand (file, frame_size_const, 0);
2298 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2299 fprintf (file, "\tmovsp\tsp, a8\n");
2300 }
2301}
2302
2303
2304/* Do any necessary cleanup after a function to restore
2305 stack, frame, and regs. */
2306
2307void
2308xtensa_function_epilogue (file, size)
2309 FILE *file;
1d0ea52e 2310 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
03984308
BW
2311{
2312 rtx insn = get_last_insn ();
2313 /* If the last insn was a BARRIER, we don't have to write anything. */
2314 if (GET_CODE (insn) == NOTE)
2315 insn = prev_nonnote_insn (insn);
2316 if (insn == 0 || GET_CODE (insn) != BARRIER)
2317 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2318
2319 xtensa_current_frame_size = 0;
2320}
2321
2322
0c14a54d
BW
2323rtx
2324xtensa_return_addr (count, frame)
2325 int count;
2326 rtx frame;
2327{
2328 rtx result, retaddr;
2329
2330 if (count == -1)
2331 retaddr = gen_rtx_REG (Pmode, 0);
2332 else
2333 {
2334 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2335 addr = memory_address (Pmode, addr);
2336 retaddr = gen_reg_rtx (Pmode);
2337 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2338 }
2339
2340 /* The 2 most-significant bits of the return address on Xtensa hold
2341 the register window size. To get the real return address, these
2342 bits must be replaced with the high bits from the current PC. */
2343
2344 result = gen_reg_rtx (Pmode);
2345 emit_insn (gen_fix_return_addr (result, retaddr));
2346 return result;
2347}
2348
2349
03984308
BW
2350/* Create the va_list data type.
2351 This structure is set up by __builtin_saveregs. The __va_reg
2352 field points to a stack-allocated region holding the contents of the
2353 incoming argument registers. The __va_ndx field is an index initialized
2354 to the position of the first unnamed (variable) argument. This same index
2355 is also used to address the arguments passed in memory. Thus, the
2356 __va_stk field is initialized to point to the position of the first
2357 argument in memory offset to account for the arguments passed in
2358 registers. E.G., if there are 6 argument registers, and each register is
2359 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2360 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2361 argument word N for N >= 6. */
2362
2363tree
ab2877a3 2364xtensa_build_va_list ()
03984308 2365{
540eaea8 2366 tree f_stk, f_reg, f_ndx, record, type_decl;
03984308 2367
540eaea8
BW
2368 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2369 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
03984308
BW
2370
2371 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2372 ptr_type_node);
2373 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2374 ptr_type_node);
2375 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2376 integer_type_node);
2377
2378 DECL_FIELD_CONTEXT (f_stk) = record;
2379 DECL_FIELD_CONTEXT (f_reg) = record;
2380 DECL_FIELD_CONTEXT (f_ndx) = record;
2381
540eaea8
BW
2382 TREE_CHAIN (record) = type_decl;
2383 TYPE_NAME (record) = type_decl;
03984308
BW
2384 TYPE_FIELDS (record) = f_stk;
2385 TREE_CHAIN (f_stk) = f_reg;
2386 TREE_CHAIN (f_reg) = f_ndx;
2387
2388 layout_type (record);
2389 return record;
2390}
2391
2392
2393/* Save the incoming argument registers on the stack. Returns the
2394 address of the saved registers. */
2395
2396rtx
2397xtensa_builtin_saveregs ()
2398{
2399 rtx gp_regs, dest;
2400 int arg_words = current_function_arg_words;
2401 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2402 int i;
2403
2404 if (gp_left == 0)
2405 return const0_rtx;
2406
2407 /* allocate the general-purpose register space */
2408 gp_regs = assign_stack_local
2409 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
540eaea8 2410 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
03984308
BW
2411
2412 /* Now store the incoming registers. */
2413 dest = change_address (gp_regs, SImode,
2414 plus_constant (XEXP (gp_regs, 0),
2415 arg_words * UNITS_PER_WORD));
2416
2417 /* Note: Don't use move_block_from_reg() here because the incoming
2418 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2419 Instead, call gen_raw_REG() directly so that we get a distinct
2420 instance of (REG:SI 7). */
2421 for (i = 0; i < gp_left; i++)
2422 {
2423 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2424 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2425 }
2426
2427 return XEXP (gp_regs, 0);
2428}
2429
2430
2431/* Implement `va_start' for varargs and stdarg. We look at the
2432 current function to fill in an initial va_list. */
2433
2434void
e5faf155 2435xtensa_va_start (valist, nextarg)
03984308
BW
2436 tree valist;
2437 rtx nextarg ATTRIBUTE_UNUSED;
2438{
2439 tree f_stk, stk;
2440 tree f_reg, reg;
2441 tree f_ndx, ndx;
2442 tree t, u;
2443 int arg_words;
2444
2445 arg_words = current_function_args_info.arg_words;
2446
2447 f_stk = TYPE_FIELDS (va_list_type_node);
2448 f_reg = TREE_CHAIN (f_stk);
2449 f_ndx = TREE_CHAIN (f_reg);
2450
2451 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2452 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2453 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2454
2455 /* Call __builtin_saveregs; save the result in __va_reg */
2456 current_function_arg_words = arg_words;
2457 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2458 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2459 TREE_SIDE_EFFECTS (t) = 1;
2460 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2461
2462 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2463 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2464 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2465 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2466 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2467 TREE_SIDE_EFFECTS (t) = 1;
2468 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2469
2470 /* Set the __va_ndx member. */
2471 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2472 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2473 TREE_SIDE_EFFECTS (t) = 1;
2474 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2475}
2476
2477
2478/* Implement `va_arg'. */
2479
2480rtx
2481xtensa_va_arg (valist, type)
2482 tree valist, type;
2483{
2484 tree f_stk, stk;
2485 tree f_reg, reg;
2486 tree f_ndx, ndx;
8be56275
BW
2487 tree tmp, addr_tree, type_size;
2488 rtx array, orig_ndx, r, addr, size, va_size;
03984308
BW
2489 rtx lab_false, lab_over, lab_false2;
2490
03984308
BW
2491 f_stk = TYPE_FIELDS (va_list_type_node);
2492 f_reg = TREE_CHAIN (f_stk);
2493 f_ndx = TREE_CHAIN (f_reg);
2494
2495 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2496 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2497 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2498
8be56275
BW
2499 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2500
2501 va_size = gen_reg_rtx (SImode);
2502 tmp = fold (build (MULT_EXPR, sizetype,
2503 fold (build (TRUNC_DIV_EXPR, sizetype,
2504 fold (build (PLUS_EXPR, sizetype,
2505 type_size,
2506 size_int (UNITS_PER_WORD - 1))),
2507 size_int (UNITS_PER_WORD))),
2508 size_int (UNITS_PER_WORD)));
2509 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2510 if (r != va_size)
2511 emit_move_insn (va_size, r);
2512
03984308
BW
2513
2514 /* First align __va_ndx to a double word boundary if necessary for this arg:
2515
2516 if (__alignof__ (TYPE) > 4)
2517 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2518 */
2519
2520 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2521 {
2522 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2523 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2524 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2525 build_int_2 (-2 * UNITS_PER_WORD, -1));
2526 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2527 TREE_SIDE_EFFECTS (tmp) = 1;
2528 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2529 }
2530
2531
2532 /* Increment __va_ndx to point past the argument:
2533
2534 orig_ndx = (AP).__va_ndx;
2535 (AP).__va_ndx += __va_size (TYPE);
2536 */
2537
2538 orig_ndx = gen_reg_rtx (SImode);
2539 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2540 if (r != orig_ndx)
2541 emit_move_insn (orig_ndx, r);
2542
8be56275
BW
2543 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2544 make_tree (intSI_type_node, va_size));
03984308
BW
2545 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2546 TREE_SIDE_EFFECTS (tmp) = 1;
2547 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2548
2549
2550 /* Check if the argument is in registers:
2551
bcf88f9b
BW
2552 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2553 && !MUST_PASS_IN_STACK (type))
03984308
BW
2554 __array = (AP).__va_reg;
2555 */
2556
03984308
BW
2557 array = gen_reg_rtx (Pmode);
2558
544ef5b5 2559 lab_over = NULL_RTX;
bcf88f9b
BW
2560 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2561 {
2562 lab_false = gen_label_rtx ();
2563 lab_over = gen_label_rtx ();
2564
2565 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2566 EXPAND_NORMAL),
2567 GEN_INT (MAX_ARGS_IN_REGISTERS
2568 * UNITS_PER_WORD),
2569 GT, const1_rtx, SImode, 0, lab_false);
2570
2571 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2572 if (r != array)
2573 emit_move_insn (array, r);
2574
2575 emit_jump_insn (gen_jump (lab_over));
2576 emit_barrier ();
2577 emit_label (lab_false);
2578 }
03984308
BW
2579
2580 /* ...otherwise, the argument is on the stack (never split between
2581 registers and the stack -- change __va_ndx if necessary):
2582
2583 else
2584 {
2585 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2586 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2587 __array = (AP).__va_stk;
2588 }
2589 */
2590
2591 lab_false2 = gen_label_rtx ();
2592 emit_cmp_and_jump_insns (orig_ndx,
2593 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2594 GE, const1_rtx, SImode, 0, lab_false2);
2595
8be56275
BW
2596 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2597 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
03984308
BW
2598 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2599 TREE_SIDE_EFFECTS (tmp) = 1;
2600 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2601
2602 emit_label (lab_false2);
2603
2604 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2605 if (r != array)
2606 emit_move_insn (array, r);
2607
544ef5b5 2608 if (lab_over != NULL_RTX)
bcf88f9b 2609 emit_label (lab_over);
8be56275 2610
03984308
BW
2611
2612 /* Given the base array pointer (__array) and index to the subsequent
2613 argument (__va_ndx), find the address:
2614
8be56275
BW
2615 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2616 ? sizeof (TYPE)
2617 : __va_size (TYPE))
03984308
BW
2618
2619 The results are endian-dependent because values smaller than one word
2620 are aligned differently.
2621 */
2622
8be56275
BW
2623 size = gen_reg_rtx (SImode);
2624 emit_move_insn (size, va_size);
2625
2626 if (BYTES_BIG_ENDIAN)
2627 {
2628 rtx lab_use_va_size = gen_label_rtx ();
2629
2630 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2631 EXPAND_NORMAL),
2632 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2633 GE, const1_rtx, SImode, 0, lab_use_va_size);
2634
2635 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2636 if (r != size)
2637 emit_move_insn (size, r);
2638
2639 emit_label (lab_use_va_size);
2640 }
03984308
BW
2641
2642 addr_tree = build (PLUS_EXPR, ptr_type_node,
2643 make_tree (ptr_type_node, array),
2644 ndx);
8be56275
BW
2645 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2646 make_tree (intSI_type_node, size));
03984308
BW
2647 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2648 addr = copy_to_reg (addr);
2649 return addr;
2650}
2651
2652
a8cacfd2 2653enum reg_class
89f6025d 2654xtensa_preferred_reload_class (x, class, isoutput)
a8cacfd2
BW
2655 rtx x;
2656 enum reg_class class;
89f6025d 2657 int isoutput;
a8cacfd2 2658{
89f6025d 2659 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
a8cacfd2
BW
2660 return NO_REGS;
2661
89f6025d
BW
2662 /* Don't use the stack pointer or hard frame pointer for reloads!
2663 The hard frame pointer would normally be OK except that it may
2664 briefly hold an incoming argument in the prologue, and reload
2665 won't know that it is live because the hard frame pointer is
2666 treated specially. */
2667
2668 if (class == AR_REGS || class == GR_REGS)
2669 return RL_REGS;
a8cacfd2
BW
2670
2671 return class;
2672}
2673
2674
03984308
BW
2675enum reg_class
2676xtensa_secondary_reload_class (class, mode, x, isoutput)
2677 enum reg_class class;
2678 enum machine_mode mode ATTRIBUTE_UNUSED;
2679 rtx x;
2680 int isoutput;
2681{
2682 int regno;
2683
2684 if (GET_CODE (x) == SIGN_EXTEND)
2685 x = XEXP (x, 0);
2686 regno = xt_true_regnum (x);
2687
2688 if (!isoutput)
2689 {
2690 if (class == FP_REGS && constantpool_mem_p (x))
89f6025d 2691 return RL_REGS;
03984308
BW
2692 }
2693
2694 if (ACC_REG_P (regno))
89f6025d 2695 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
03984308 2696 if (class == ACC_REG)
89f6025d 2697 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
03984308
BW
2698
2699 return NO_REGS;
2700}
2701
2702
2703void
2704order_regs_for_local_alloc ()
2705{
2706 if (!leaf_function_p ())
2707 {
2708 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2709 FIRST_PSEUDO_REGISTER * sizeof (int));
2710 }
2711 else
2712 {
2713 int i, num_arg_regs;
2714 int nxt = 0;
2715
2716 /* use the AR registers in increasing order (skipping a0 and a1)
2717 but save the incoming argument registers for a last resort */
2718 num_arg_regs = current_function_args_info.arg_words;
2719 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2720 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2721 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2722 reg_alloc_order[nxt++] = i + num_arg_regs;
2723 for (i = 0; i < num_arg_regs; i++)
2724 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2725
2726 /* list the FP registers in order for now */
2727 for (i = 0; i < 16; i++)
2728 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2729
2730 /* GCC requires that we list *all* the registers.... */
2731 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2732 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2733 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2734 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2735
2736 /* list the coprocessor registers in order */
2737 for (i = 0; i < BR_REG_NUM; i++)
2738 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2739
2740 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2741 }
2742}
2743
2744
2745/* A customized version of reg_overlap_mentioned_p that only looks for
2746 references to a7 (as opposed to hard_frame_pointer_rtx). */
2747
2748int
2749a7_overlap_mentioned_p (x)
2750 rtx x;
2751{
2752 int i, j;
2753 unsigned int x_regno;
2754 const char *fmt;
2755
2756 if (GET_CODE (x) == REG)
2757 {
2758 x_regno = REGNO (x);
2759 return (x != hard_frame_pointer_rtx
2760 && x_regno < A7_REG + 1
2761 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2762 }
2763
2764 if (GET_CODE (x) == SUBREG
2765 && GET_CODE (SUBREG_REG (x)) == REG
2766 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2767 {
2768 x_regno = subreg_regno (x);
2769 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2770 && x_regno < A7_REG + 1
2771 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2772 }
2773
2774 /* X does not match, so try its subexpressions. */
2775 fmt = GET_RTX_FORMAT (GET_CODE (x));
2776 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2777 {
2778 if (fmt[i] == 'e')
2779 {
2780 if (a7_overlap_mentioned_p (XEXP (x, i)))
2781 return 1;
2782 }
2783 else if (fmt[i] == 'E')
2784 {
2785 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2786 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2787 return 1;
2788 }
2789 }
2790
2791 return 0;
2792}
b64a1b53 2793
01abf342
BW
2794
2795/* Some Xtensa targets support multiple bss sections. If the section
2796 name ends with ".bss", add SECTION_BSS to the flags. */
2797
2798static unsigned int
2799xtensa_multibss_section_type_flags (decl, name, reloc)
2800 tree decl;
2801 const char *name;
2802 int reloc;
2803{
2804 unsigned int flags = default_section_type_flags (decl, name, reloc);
2805 const char *suffix;
2806
2807 suffix = strrchr (name, '.');
2808 if (suffix && strcmp (suffix, ".bss") == 0)
2809 {
2810 if (!decl || (TREE_CODE (decl) == VAR_DECL
2811 && DECL_INITIAL (decl) == NULL_TREE))
2812 flags |= SECTION_BSS; /* @nobits */
2813 else
2814 warning ("only uninitialized variables can be placed in a "
2815 ".bss section");
2816 }
2817
2818 return flags;
2819}
2820
2821
b64a1b53
RH
2822/* The literal pool stays with the function. */
2823
2824static void
2825xtensa_select_rtx_section (mode, x, align)
2826 enum machine_mode mode ATTRIBUTE_UNUSED;
2827 rtx x ATTRIBUTE_UNUSED;
2828 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2829{
2830 function_section (current_function_decl);
2831}
fb49053f
RH
2832
2833/* If we are referencing a function that is static, make the SYMBOL_REF
2834 special so that we can generate direct calls to it even with -fpic. */
2835
2836static void
2837xtensa_encode_section_info (decl, first)
2838 tree decl;
2839 int first ATTRIBUTE_UNUSED;
2840{
2841 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2842 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2843}
e2500fed 2844
3c50106f
RH
2845/* Compute a (partial) cost for rtx X. Return true if the complete
2846 cost has been computed, and false if subexpressions should be
2847 scanned. In either case, *TOTAL contains the cost result. */
2848
2849static bool
2850xtensa_rtx_costs (x, code, outer_code, total)
2851 rtx x;
2852 int code, outer_code;
2853 int *total;
2854{
2855 switch (code)
2856 {
2857 case CONST_INT:
2858 switch (outer_code)
2859 {
2860 case SET:
2861 if (xtensa_simm12b (INTVAL (x)))
2862 {
2863 *total = 4;
2864 return true;
2865 }
2866 break;
2867 case PLUS:
2868 if (xtensa_simm8 (INTVAL (x))
2869 || xtensa_simm8x256 (INTVAL (x)))
2870 {
2871 *total = 0;
2872 return true;
2873 }
2874 break;
2875 case AND:
2876 if (xtensa_mask_immediate (INTVAL (x)))
2877 {
2878 *total = 0;
2879 return true;
2880 }
2881 break;
2882 case COMPARE:
2883 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2884 {
2885 *total = 0;
2886 return true;
2887 }
2888 break;
2889 case ASHIFT:
2890 case ASHIFTRT:
2891 case LSHIFTRT:
2892 case ROTATE:
2893 case ROTATERT:
2894 /* no way to tell if X is the 2nd operand so be conservative */
2895 default: break;
2896 }
2897 if (xtensa_simm12b (INTVAL (x)))
2898 *total = 5;
2899 else
2900 *total = 6;
2901 return true;
2902
2903 case CONST:
2904 case LABEL_REF:
2905 case SYMBOL_REF:
2906 *total = 5;
2907 return true;
2908
2909 case CONST_DOUBLE:
2910 *total = 7;
2911 return true;
2912
2913 case MEM:
2914 {
2915 int num_words =
2916 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2917
2918 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2919 *total = COSTS_N_INSNS (num_words);
2920 else
2921 *total = COSTS_N_INSNS (2*num_words);
2922 return true;
2923 }
2924
2925 case FFS:
2926 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2927 return true;
2928
2929 case NOT:
2930 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2931 return true;
2932
2933 case AND:
2934 case IOR:
2935 case XOR:
2936 if (GET_MODE (x) == DImode)
2937 *total = COSTS_N_INSNS (2);
2938 else
2939 *total = COSTS_N_INSNS (1);
2940 return true;
2941
2942 case ASHIFT:
2943 case ASHIFTRT:
2944 case LSHIFTRT:
2945 if (GET_MODE (x) == DImode)
2946 *total = COSTS_N_INSNS (50);
2947 else
2948 *total = COSTS_N_INSNS (1);
2949 return true;
2950
2951 case ABS:
2952 {
2953 enum machine_mode xmode = GET_MODE (x);
2954 if (xmode == SFmode)
2955 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2956 else if (xmode == DFmode)
2957 *total = COSTS_N_INSNS (50);
2958 else
2959 *total = COSTS_N_INSNS (4);
2960 return true;
2961 }
2962
2963 case PLUS:
2964 case MINUS:
2965 {
2966 enum machine_mode xmode = GET_MODE (x);
2967 if (xmode == SFmode)
2968 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2969 else if (xmode == DFmode || xmode == DImode)
2970 *total = COSTS_N_INSNS (50);
2971 else
2972 *total = COSTS_N_INSNS (1);
2973 return true;
2974 }
2975
2976 case NEG:
2977 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2978 return true;
2979
2980 case MULT:
2981 {
2982 enum machine_mode xmode = GET_MODE (x);
2983 if (xmode == SFmode)
2984 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2985 else if (xmode == DFmode || xmode == DImode)
2986 *total = COSTS_N_INSNS (50);
2987 else if (TARGET_MUL32)
2988 *total = COSTS_N_INSNS (4);
2989 else if (TARGET_MAC16)
2990 *total = COSTS_N_INSNS (16);
2991 else if (TARGET_MUL16)
2992 *total = COSTS_N_INSNS (12);
2993 else
2994 *total = COSTS_N_INSNS (50);
2995 return true;
2996 }
2997
2998 case DIV:
2999 case MOD:
3000 {
3001 enum machine_mode xmode = GET_MODE (x);
3002 if (xmode == SFmode)
3003 {
3004 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3005 return true;
3006 }
3007 else if (xmode == DFmode)
3008 {
3009 *total = COSTS_N_INSNS (50);
3010 return true;
3011 }
3012 }
3013 /* fall through */
3014
3015 case UDIV:
3016 case UMOD:
3017 {
3018 enum machine_mode xmode = GET_MODE (x);
3019 if (xmode == DImode)
3020 *total = COSTS_N_INSNS (50);
3021 else if (TARGET_DIV32)
3022 *total = COSTS_N_INSNS (32);
3023 else
3024 *total = COSTS_N_INSNS (50);
3025 return true;
3026 }
3027
3028 case SQRT:
3029 if (GET_MODE (x) == SFmode)
3030 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3031 else
3032 *total = COSTS_N_INSNS (50);
3033 return true;
3034
3035 case SMIN:
3036 case UMIN:
3037 case SMAX:
3038 case UMAX:
3039 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3040 return true;
3041
3042 case SIGN_EXTRACT:
3043 case SIGN_EXTEND:
3044 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3045 return true;
3046
3047 case ZERO_EXTRACT:
3048 case ZERO_EXTEND:
3049 *total = COSTS_N_INSNS (1);
3050 return true;
3051
3052 default:
3053 return false;
3054 }
3055}
3056
e2500fed 3057#include "gt-xtensa.h"