]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/xtensa/xtensa.c
d11036813454e0dbab14dd9e278519b4ed7aa572
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "function.h"
44 #include "toplev.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
54 of EQ, NE, etc. */
55
56 enum internal_test
57 {
58 ITEST_EQ,
59 ITEST_NE,
60 ITEST_GT,
61 ITEST_GE,
62 ITEST_LT,
63 ITEST_LE,
64 ITEST_GTU,
65 ITEST_GEU,
66 ITEST_LTU,
67 ITEST_LEU,
68 ITEST_MAX
69 };
70
71 /* Cached operands, and operator to compare for use in set/branch on
72 condition codes. */
73 rtx branch_cmp[2];
74
75 /* what type of branch to use */
76 enum cmp_type branch_type;
77
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
81
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
84
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
89
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
92 {
93 int accesses_prev_frame;
94 bool need_a7_copy;
95 bool vararg_a7;
96 rtx set_frame_ptr_insn;
97 };
98
99 /* Vector, indexed by hard register number, which contains 1 for a
100 register that is allowable in a candidate for leaf function
101 treatment. */
102
103 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
104 {
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
106 1, 1, 1,
107 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
108 1
109 };
110
111 /* Map hard register number to register class */
112 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
113 {
114 RL_REGS, SP_REG, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
116 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
117 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
118 AR_REGS, AR_REGS, BR_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
123 ACC_REG,
124 };
125
126 /* Map register constraint character to register class. */
127 enum reg_class xtensa_char_to_class[256] =
128 {
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
192 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 };
194
195 static int b4const_or_zero (int);
196 static enum internal_test map_test_to_internal_test (enum rtx_code);
197 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
198 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
199 static rtx gen_conditional_move (rtx);
200 static rtx fixup_subreg_mem (rtx);
201 static enum machine_mode xtensa_find_mode_for_size (unsigned);
202 static struct machine_function * xtensa_init_machine_status (void);
203 static bool xtensa_return_in_msb (tree);
204 static void printx (FILE *, signed int);
205 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
206 static rtx xtensa_builtin_saveregs (void);
207 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
208 int) ATTRIBUTE_UNUSED;
209 static void xtensa_select_rtx_section (enum machine_mode, rtx,
210 unsigned HOST_WIDE_INT);
211 static bool xtensa_rtx_costs (rtx, int, int, int *);
212 static tree xtensa_build_builtin_va_list (void);
213 static bool xtensa_return_in_memory (tree, tree);
214
215 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
216 REG_ALLOC_ORDER;
217 \f
218
219 /* This macro generates the assembly code for function exit,
220 on machines that need it. If FUNCTION_EPILOGUE is not defined
221 then individual return instructions are generated for each
222 return statement. Args are same as for FUNCTION_PROLOGUE. */
223
224 #undef TARGET_ASM_FUNCTION_EPILOGUE
225 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
226
227 /* These hooks specify assembly directives for creating certain kinds
228 of integer object. */
229
230 #undef TARGET_ASM_ALIGNED_SI_OP
231 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
232
233 #undef TARGET_ASM_SELECT_RTX_SECTION
234 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
235
236 #undef TARGET_RTX_COSTS
237 #define TARGET_RTX_COSTS xtensa_rtx_costs
238 #undef TARGET_ADDRESS_COST
239 #define TARGET_ADDRESS_COST hook_int_rtx_0
240
241 #undef TARGET_BUILD_BUILTIN_VA_LIST
242 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
243
244 #undef TARGET_PROMOTE_FUNCTION_ARGS
245 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
246 #undef TARGET_PROMOTE_FUNCTION_RETURN
247 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
248 #undef TARGET_PROMOTE_PROTOTYPES
249 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
250
251 #undef TARGET_RETURN_IN_MEMORY
252 #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
253 #undef TARGET_SPLIT_COMPLEX_ARG
254 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
255
256 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
257 #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
258
259 #undef TARGET_RETURN_IN_MSB
260 #define TARGET_RETURN_IN_MSB xtensa_return_in_msb
261
262 struct gcc_target targetm = TARGET_INITIALIZER;
263 \f
264
265 /*
266 * Functions to test Xtensa immediate operand validity.
267 */
268
269 int
270 xtensa_b4constu (int v)
271 {
272 switch (v)
273 {
274 case 32768:
275 case 65536:
276 case 2:
277 case 3:
278 case 4:
279 case 5:
280 case 6:
281 case 7:
282 case 8:
283 case 10:
284 case 12:
285 case 16:
286 case 32:
287 case 64:
288 case 128:
289 case 256:
290 return 1;
291 }
292 return 0;
293 }
294
295 int
296 xtensa_simm8x256 (int v)
297 {
298 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
299 }
300
301 int
302 xtensa_ai4const (int v)
303 {
304 return (v == -1 || (v >= 1 && v <= 15));
305 }
306
307 int
308 xtensa_simm7 (int v)
309 {
310 return v >= -32 && v <= 95;
311 }
312
313 int
314 xtensa_b4const (int v)
315 {
316 switch (v)
317 {
318 case -1:
319 case 1:
320 case 2:
321 case 3:
322 case 4:
323 case 5:
324 case 6:
325 case 7:
326 case 8:
327 case 10:
328 case 12:
329 case 16:
330 case 32:
331 case 64:
332 case 128:
333 case 256:
334 return 1;
335 }
336 return 0;
337 }
338
339 int
340 xtensa_simm8 (int v)
341 {
342 return v >= -128 && v <= 127;
343 }
344
345 int
346 xtensa_tp7 (int v)
347 {
348 return (v >= 7 && v <= 22);
349 }
350
351 int
352 xtensa_lsi4x4 (int v)
353 {
354 return (v & 3) == 0 && (v >= 0 && v <= 60);
355 }
356
357 int
358 xtensa_simm12b (int v)
359 {
360 return v >= -2048 && v <= 2047;
361 }
362
363 int
364 xtensa_uimm8 (int v)
365 {
366 return v >= 0 && v <= 255;
367 }
368
369 int
370 xtensa_uimm8x2 (int v)
371 {
372 return (v & 1) == 0 && (v >= 0 && v <= 510);
373 }
374
375 int
376 xtensa_uimm8x4 (int v)
377 {
378 return (v & 3) == 0 && (v >= 0 && v <= 1020);
379 }
380
381
382 /* This is just like the standard true_regnum() function except that it
383 works even when reg_renumber is not initialized. */
384
385 int
386 xt_true_regnum (rtx x)
387 {
388 if (GET_CODE (x) == REG)
389 {
390 if (reg_renumber
391 && REGNO (x) >= FIRST_PSEUDO_REGISTER
392 && reg_renumber[REGNO (x)] >= 0)
393 return reg_renumber[REGNO (x)];
394 return REGNO (x);
395 }
396 if (GET_CODE (x) == SUBREG)
397 {
398 int base = xt_true_regnum (SUBREG_REG (x));
399 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
400 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
401 GET_MODE (SUBREG_REG (x)),
402 SUBREG_BYTE (x), GET_MODE (x));
403 }
404 return -1;
405 }
406
407
408 int
409 add_operand (rtx op, enum machine_mode mode)
410 {
411 if (GET_CODE (op) == CONST_INT)
412 return (xtensa_simm8 (INTVAL (op)) || xtensa_simm8x256 (INTVAL (op)));
413
414 return register_operand (op, mode);
415 }
416
417
418 int
419 arith_operand (rtx op, enum machine_mode mode)
420 {
421 if (GET_CODE (op) == CONST_INT)
422 return xtensa_simm8 (INTVAL (op));
423
424 return register_operand (op, mode);
425 }
426
427
428 int
429 nonimmed_operand (rtx op, enum machine_mode mode)
430 {
431 /* We cannot use the standard nonimmediate_operand() predicate because
432 it includes constant pool memory operands. */
433
434 if (memory_operand (op, mode))
435 return !constantpool_address_p (XEXP (op, 0));
436
437 return register_operand (op, mode);
438 }
439
440
441 int
442 mem_operand (rtx op, enum machine_mode mode)
443 {
444 /* We cannot use the standard memory_operand() predicate because
445 it includes constant pool memory operands. */
446
447 if (memory_operand (op, mode))
448 return !constantpool_address_p (XEXP (op, 0));
449
450 return FALSE;
451 }
452
453
454 int
455 xtensa_valid_move (enum machine_mode mode, rtx *operands)
456 {
457 /* Either the destination or source must be a register, and the
458 MAC16 accumulator doesn't count. */
459
460 if (register_operand (operands[0], mode))
461 {
462 int dst_regnum = xt_true_regnum (operands[0]);
463
464 /* The stack pointer can only be assigned with a MOVSP opcode. */
465 if (dst_regnum == STACK_POINTER_REGNUM)
466 return (mode == SImode
467 && register_operand (operands[1], mode)
468 && !ACC_REG_P (xt_true_regnum (operands[1])));
469
470 if (!ACC_REG_P (dst_regnum))
471 return true;
472 }
473 if (register_operand (operands[1], mode))
474 {
475 int src_regnum = xt_true_regnum (operands[1]);
476 if (!ACC_REG_P (src_regnum))
477 return true;
478 }
479 return FALSE;
480 }
481
482
483 int
484 mask_operand (rtx op, enum machine_mode mode)
485 {
486 if (GET_CODE (op) == CONST_INT)
487 return xtensa_mask_immediate (INTVAL (op));
488
489 return register_operand (op, mode);
490 }
491
492
493 int
494 extui_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
495 {
496 return ((GET_CODE (op) == CONST_INT)
497 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
498 }
499
500
501 int
502 sext_operand (rtx op, enum machine_mode mode)
503 {
504 if (TARGET_SEXT)
505 return nonimmed_operand (op, mode);
506 return mem_operand (op, mode);
507 }
508
509
510 int
511 sext_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
512 {
513 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
514 }
515
516
517 int
518 lsbitnum_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
519 {
520 if (GET_CODE (op) == CONST_INT)
521 {
522 return (BITS_BIG_ENDIAN
523 ? (INTVAL (op) == BITS_PER_WORD-1)
524 : (INTVAL (op) == 0));
525 }
526 return FALSE;
527 }
528
529
530 static int
531 b4const_or_zero (int v)
532 {
533 if (v == 0)
534 return TRUE;
535 return xtensa_b4const (v);
536 }
537
538
539 int
540 branch_operand (rtx op, enum machine_mode mode)
541 {
542 if (GET_CODE (op) == CONST_INT)
543 return b4const_or_zero (INTVAL (op));
544
545 return register_operand (op, mode);
546 }
547
548
549 int
550 ubranch_operand (rtx op, enum machine_mode mode)
551 {
552 if (GET_CODE (op) == CONST_INT)
553 return xtensa_b4constu (INTVAL (op));
554
555 return register_operand (op, mode);
556 }
557
558
559 int
560 call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
561 {
562 if ((GET_CODE (op) == REG)
563 && (op != arg_pointer_rtx)
564 && ((REGNO (op) < FRAME_POINTER_REGNUM)
565 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
566 return TRUE;
567
568 if (CONSTANT_ADDRESS_P (op))
569 {
570 /* Direct calls only allowed to static functions with PIC. */
571 if (flag_pic)
572 {
573 tree callee, callee_sec, caller_sec;
574
575 if (GET_CODE (op) != SYMBOL_REF
576 || !SYMBOL_REF_LOCAL_P (op) || SYMBOL_REF_EXTERNAL_P (op))
577 return FALSE;
578
579 /* Don't attempt a direct call if the callee is known to be in
580 a different section, since there's a good chance it will be
581 out of range. */
582
583 if (flag_function_sections
584 || DECL_ONE_ONLY (current_function_decl))
585 return FALSE;
586 caller_sec = DECL_SECTION_NAME (current_function_decl);
587 callee = SYMBOL_REF_DECL (op);
588 if (callee)
589 {
590 if (DECL_ONE_ONLY (callee))
591 return FALSE;
592 callee_sec = DECL_SECTION_NAME (callee);
593 if (((caller_sec == NULL_TREE) ^ (callee_sec == NULL_TREE))
594 || (caller_sec != NULL_TREE
595 && strcmp (TREE_STRING_POINTER (caller_sec),
596 TREE_STRING_POINTER (callee_sec)) != 0))
597 return FALSE;
598 }
599 else if (caller_sec != NULL_TREE)
600 return FALSE;
601 }
602 return TRUE;
603 }
604
605 return FALSE;
606 }
607
608
609 int
610 move_operand (rtx op, enum machine_mode mode)
611 {
612 if (register_operand (op, mode)
613 || memory_operand (op, mode))
614 return TRUE;
615
616 switch (mode)
617 {
618 case DFmode:
619 case SFmode:
620 return TARGET_CONST16 && CONSTANT_P (op);
621
622 case DImode:
623 case SImode:
624 if (TARGET_CONST16)
625 return CONSTANT_P (op);
626 /* Fall through. */
627
628 case HImode:
629 case QImode:
630 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
631 result in 0/1. */
632 if (GET_CODE (op) == CONSTANT_P_RTX)
633 return TRUE;
634
635 if (GET_CODE (op) == CONST_INT && xtensa_simm12b (INTVAL (op)))
636 return TRUE;
637 break;
638
639 default:
640 break;
641 }
642
643 return FALSE;
644 }
645
646
647 int
648 smalloffset_mem_p (rtx op)
649 {
650 if (GET_CODE (op) == MEM)
651 {
652 rtx addr = XEXP (op, 0);
653 if (GET_CODE (addr) == REG)
654 return REG_OK_FOR_BASE_P (addr);
655 if (GET_CODE (addr) == PLUS)
656 {
657 rtx offset = XEXP (addr, 0);
658 if (GET_CODE (offset) != CONST_INT)
659 offset = XEXP (addr, 1);
660 if (GET_CODE (offset) != CONST_INT)
661 return FALSE;
662 return xtensa_lsi4x4 (INTVAL (offset));
663 }
664 }
665 return FALSE;
666 }
667
668
669 int
670 constantpool_address_p (rtx addr)
671 {
672 rtx sym = addr;
673
674 if (GET_CODE (addr) == CONST)
675 {
676 rtx offset;
677
678 /* Only handle (PLUS (SYM, OFFSET)) form. */
679 addr = XEXP (addr, 0);
680 if (GET_CODE (addr) != PLUS)
681 return FALSE;
682
683 /* Make sure the address is word aligned. */
684 offset = XEXP (addr, 1);
685 if ((GET_CODE (offset) != CONST_INT)
686 || ((INTVAL (offset) & 3) != 0))
687 return FALSE;
688
689 sym = XEXP (addr, 0);
690 }
691
692 if ((GET_CODE (sym) == SYMBOL_REF)
693 && CONSTANT_POOL_ADDRESS_P (sym))
694 return TRUE;
695 return FALSE;
696 }
697
698
699 int
700 constantpool_mem_p (rtx op)
701 {
702 if (GET_CODE (op) == MEM)
703 return constantpool_address_p (XEXP (op, 0));
704 return FALSE;
705 }
706
707
708 /* Accept the floating point constant 1 in the appropriate mode. */
709
710 int
711 const_float_1_operand (rtx op, enum machine_mode mode)
712 {
713 REAL_VALUE_TYPE d;
714 static REAL_VALUE_TYPE onedf;
715 static REAL_VALUE_TYPE onesf;
716 static int one_initialized;
717
718 if ((GET_CODE (op) != CONST_DOUBLE)
719 || (mode != GET_MODE (op))
720 || (mode != DFmode && mode != SFmode))
721 return FALSE;
722
723 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
724
725 if (! one_initialized)
726 {
727 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
728 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
729 one_initialized = TRUE;
730 }
731
732 if (mode == DFmode)
733 return REAL_VALUES_EQUAL (d, onedf);
734 else
735 return REAL_VALUES_EQUAL (d, onesf);
736 }
737
738
739 int
740 fpmem_offset_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
741 {
742 if (GET_CODE (op) == CONST_INT)
743 return xtensa_mem_offset (INTVAL (op), SFmode);
744 return 0;
745 }
746
747
748 void
749 xtensa_extend_reg (rtx dst, rtx src)
750 {
751 rtx temp = gen_reg_rtx (SImode);
752 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
753
754 /* Generate paradoxical subregs as needed so that the modes match. */
755 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
756 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
757
758 emit_insn (gen_ashlsi3 (temp, src, shift));
759 emit_insn (gen_ashrsi3 (dst, temp, shift));
760 }
761
762
763 int
764 branch_operator (rtx x, enum machine_mode mode)
765 {
766 if (GET_MODE (x) != mode)
767 return FALSE;
768
769 switch (GET_CODE (x))
770 {
771 case EQ:
772 case NE:
773 case LT:
774 case GE:
775 return TRUE;
776 default:
777 break;
778 }
779 return FALSE;
780 }
781
782
783 int
784 ubranch_operator (rtx x, enum machine_mode mode)
785 {
786 if (GET_MODE (x) != mode)
787 return FALSE;
788
789 switch (GET_CODE (x))
790 {
791 case LTU:
792 case GEU:
793 return TRUE;
794 default:
795 break;
796 }
797 return FALSE;
798 }
799
800
801 int
802 boolean_operator (rtx x, enum machine_mode mode)
803 {
804 if (GET_MODE (x) != mode)
805 return FALSE;
806
807 switch (GET_CODE (x))
808 {
809 case EQ:
810 case NE:
811 return TRUE;
812 default:
813 break;
814 }
815 return FALSE;
816 }
817
818
819 int
820 xtensa_mask_immediate (int v)
821 {
822 #define MAX_MASK_SIZE 16
823 int mask_size;
824
825 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
826 {
827 if ((v & 1) == 0)
828 return FALSE;
829 v = v >> 1;
830 if (v == 0)
831 return TRUE;
832 }
833
834 return FALSE;
835 }
836
837
838 int
839 xtensa_mem_offset (unsigned v, enum machine_mode mode)
840 {
841 switch (mode)
842 {
843 case BLKmode:
844 /* Handle the worst case for block moves. See xtensa_expand_block_move
845 where we emit an optimized block move operation if the block can be
846 moved in < "move_ratio" pieces. The worst case is when the block is
847 aligned but has a size of (3 mod 4) (does this happen?) so that the
848 last piece requires a byte load/store. */
849 return (xtensa_uimm8 (v)
850 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
851
852 case QImode:
853 return xtensa_uimm8 (v);
854
855 case HImode:
856 return xtensa_uimm8x2 (v);
857
858 case DFmode:
859 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
860
861 default:
862 break;
863 }
864
865 return xtensa_uimm8x4 (v);
866 }
867
868
869 /* Make normal rtx_code into something we can index from an array. */
870
871 static enum internal_test
872 map_test_to_internal_test (enum rtx_code test_code)
873 {
874 enum internal_test test = ITEST_MAX;
875
876 switch (test_code)
877 {
878 default: break;
879 case EQ: test = ITEST_EQ; break;
880 case NE: test = ITEST_NE; break;
881 case GT: test = ITEST_GT; break;
882 case GE: test = ITEST_GE; break;
883 case LT: test = ITEST_LT; break;
884 case LE: test = ITEST_LE; break;
885 case GTU: test = ITEST_GTU; break;
886 case GEU: test = ITEST_GEU; break;
887 case LTU: test = ITEST_LTU; break;
888 case LEU: test = ITEST_LEU; break;
889 }
890
891 return test;
892 }
893
894
895 /* Generate the code to compare two integer values. The return value is
896 the comparison expression. */
897
898 static rtx
899 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
900 rtx cmp0, /* first operand to compare */
901 rtx cmp1, /* second operand to compare */
902 int *p_invert /* whether branch needs to reverse test */)
903 {
904 struct cmp_info
905 {
906 enum rtx_code test_code; /* test code to use in insn */
907 int (*const_range_p) (int); /* predicate function to check range */
908 int const_add; /* constant to add (convert LE -> LT) */
909 int reverse_regs; /* reverse registers in test */
910 int invert_const; /* != 0 if invert value if cmp1 is constant */
911 int invert_reg; /* != 0 if invert value if cmp1 is register */
912 int unsignedp; /* != 0 for unsigned comparisons. */
913 };
914
915 static struct cmp_info info[ (int)ITEST_MAX ] = {
916
917 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
918 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
919
920 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
921 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
922 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
923 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
924
925 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
926 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
927 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
928 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
929 };
930
931 enum internal_test test;
932 enum machine_mode mode;
933 struct cmp_info *p_info;
934
935 test = map_test_to_internal_test (test_code);
936 if (test == ITEST_MAX)
937 abort ();
938
939 p_info = &info[ (int)test ];
940
941 mode = GET_MODE (cmp0);
942 if (mode == VOIDmode)
943 mode = GET_MODE (cmp1);
944
945 /* Make sure we can handle any constants given to us. */
946 if (GET_CODE (cmp1) == CONST_INT)
947 {
948 HOST_WIDE_INT value = INTVAL (cmp1);
949 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
950
951 /* if the immediate overflows or does not fit in the immediate field,
952 spill it to a register */
953
954 if ((p_info->unsignedp ?
955 (uvalue + p_info->const_add > uvalue) :
956 (value + p_info->const_add > value)) != (p_info->const_add > 0))
957 {
958 cmp1 = force_reg (mode, cmp1);
959 }
960 else if (!(p_info->const_range_p) (value + p_info->const_add))
961 {
962 cmp1 = force_reg (mode, cmp1);
963 }
964 }
965 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
966 {
967 cmp1 = force_reg (mode, cmp1);
968 }
969
970 /* See if we need to invert the result. */
971 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
972 ? p_info->invert_const
973 : p_info->invert_reg);
974
975 /* Comparison to constants, may involve adding 1 to change a LT into LE.
976 Comparison between two registers, may involve switching operands. */
977 if (GET_CODE (cmp1) == CONST_INT)
978 {
979 if (p_info->const_add != 0)
980 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
981
982 }
983 else if (p_info->reverse_regs)
984 {
985 rtx temp = cmp0;
986 cmp0 = cmp1;
987 cmp1 = temp;
988 }
989
990 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
991 }
992
993
994 /* Generate the code to compare two float values. The return value is
995 the comparison expression. */
996
997 static rtx
998 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
999 rtx cmp0, /* first operand to compare */
1000 rtx cmp1 /* second operand to compare */)
1001 {
1002 rtx (*gen_fn) (rtx, rtx, rtx);
1003 rtx brtmp;
1004 int reverse_regs, invert;
1005
1006 switch (test_code)
1007 {
1008 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1009 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1010 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1011 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1012 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1013 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1014 default:
1015 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1016 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1017 }
1018
1019 if (reverse_regs)
1020 {
1021 rtx temp = cmp0;
1022 cmp0 = cmp1;
1023 cmp1 = temp;
1024 }
1025
1026 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1027 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1028
1029 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1030 }
1031
1032
1033 void
1034 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
1035 {
1036 enum cmp_type type = branch_type;
1037 rtx cmp0 = branch_cmp[0];
1038 rtx cmp1 = branch_cmp[1];
1039 rtx cmp;
1040 int invert;
1041 rtx label1, label2;
1042
1043 switch (type)
1044 {
1045 case CMP_DF:
1046 default:
1047 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1048
1049 case CMP_SI:
1050 invert = FALSE;
1051 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1052 break;
1053
1054 case CMP_SF:
1055 if (!TARGET_HARD_FLOAT)
1056 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1057 invert = FALSE;
1058 cmp = gen_float_relational (test_code, cmp0, cmp1);
1059 break;
1060 }
1061
1062 /* Generate the branch. */
1063
1064 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1065 label2 = pc_rtx;
1066
1067 if (invert)
1068 {
1069 label2 = label1;
1070 label1 = pc_rtx;
1071 }
1072
1073 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1074 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1075 label1,
1076 label2)));
1077 }
1078
1079
1080 static rtx
1081 gen_conditional_move (rtx cmp)
1082 {
1083 enum rtx_code code = GET_CODE (cmp);
1084 rtx op0 = branch_cmp[0];
1085 rtx op1 = branch_cmp[1];
1086
1087 if (branch_type == CMP_SI)
1088 {
1089 /* Jump optimization calls get_condition() which canonicalizes
1090 comparisons like (GE x <const>) to (GT x <const-1>).
1091 Transform those comparisons back to GE, since that is the
1092 comparison supported in Xtensa. We shouldn't have to
1093 transform <LE x const> comparisons, because neither
1094 xtensa_expand_conditional_branch() nor get_condition() will
1095 produce them. */
1096
1097 if ((code == GT) && (op1 == constm1_rtx))
1098 {
1099 code = GE;
1100 op1 = const0_rtx;
1101 }
1102 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
1103
1104 if (boolean_operator (cmp, VOIDmode))
1105 {
1106 /* Swap the operands to make const0 second. */
1107 if (op0 == const0_rtx)
1108 {
1109 op0 = op1;
1110 op1 = const0_rtx;
1111 }
1112
1113 /* If not comparing against zero, emit a comparison (subtract). */
1114 if (op1 != const0_rtx)
1115 {
1116 op0 = expand_binop (SImode, sub_optab, op0, op1,
1117 0, 0, OPTAB_LIB_WIDEN);
1118 op1 = const0_rtx;
1119 }
1120 }
1121 else if (branch_operator (cmp, VOIDmode))
1122 {
1123 /* Swap the operands to make const0 second. */
1124 if (op0 == const0_rtx)
1125 {
1126 op0 = op1;
1127 op1 = const0_rtx;
1128
1129 switch (code)
1130 {
1131 case LT: code = GE; break;
1132 case GE: code = LT; break;
1133 default: abort ();
1134 }
1135 }
1136
1137 if (op1 != const0_rtx)
1138 return 0;
1139 }
1140 else
1141 return 0;
1142
1143 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1144 }
1145
1146 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1147 return gen_float_relational (code, op0, op1);
1148
1149 return 0;
1150 }
1151
1152
1153 int
1154 xtensa_expand_conditional_move (rtx *operands, int isflt)
1155 {
1156 rtx cmp;
1157 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1158
1159 if (!(cmp = gen_conditional_move (operands[1])))
1160 return 0;
1161
1162 if (isflt)
1163 gen_fn = (branch_type == CMP_SI
1164 ? gen_movsfcc_internal0
1165 : gen_movsfcc_internal1);
1166 else
1167 gen_fn = (branch_type == CMP_SI
1168 ? gen_movsicc_internal0
1169 : gen_movsicc_internal1);
1170
1171 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1172 operands[2], operands[3], cmp));
1173 return 1;
1174 }
1175
1176
1177 int
1178 xtensa_expand_scc (rtx *operands)
1179 {
1180 rtx dest = operands[0];
1181 rtx cmp = operands[1];
1182 rtx one_tmp, zero_tmp;
1183 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1184
1185 if (!(cmp = gen_conditional_move (cmp)))
1186 return 0;
1187
1188 one_tmp = gen_reg_rtx (SImode);
1189 zero_tmp = gen_reg_rtx (SImode);
1190 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1191 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1192
1193 gen_fn = (branch_type == CMP_SI
1194 ? gen_movsicc_internal0
1195 : gen_movsicc_internal1);
1196 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1197 return 1;
1198 }
1199
1200
1201 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1202 for the output, i.e., the input operands are twice as big as MODE. */
1203
1204 void
1205 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
1206 {
1207 switch (GET_CODE (operands[1]))
1208 {
1209 case REG:
1210 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1211 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
1212 break;
1213
1214 case MEM:
1215 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
1216 operands[2] = adjust_address (operands[1], mode, 0);
1217 break;
1218
1219 case CONST_INT:
1220 case CONST_DOUBLE:
1221 split_double (operands[1], &operands[2], &operands[3]);
1222 break;
1223
1224 default:
1225 abort ();
1226 }
1227
1228 switch (GET_CODE (operands[0]))
1229 {
1230 case REG:
1231 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
1232 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
1233 break;
1234
1235 case MEM:
1236 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
1237 operands[0] = adjust_address (operands[0], mode, 0);
1238 break;
1239
1240 default:
1241 abort ();
1242 }
1243 }
1244
1245
1246 /* Emit insns to move operands[1] into operands[0].
1247 Return 1 if we have written out everything that needs to be done to
1248 do the move. Otherwise, return 0 and the caller will emit the move
1249 normally. */
1250
1251 int
1252 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
1253 {
1254 if (CONSTANT_P (operands[1])
1255 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1256 && (GET_CODE (operands[1]) != CONST_INT
1257 || !xtensa_simm12b (INTVAL (operands[1]))))
1258 {
1259 if (!TARGET_CONST16)
1260 operands[1] = force_const_mem (SImode, operands[1]);
1261
1262 /* PC-relative loads are always SImode, and CONST16 is only
1263 supported in the movsi pattern, so add a SUBREG for any other
1264 (smaller) mode. */
1265
1266 if (mode != SImode)
1267 {
1268 if (register_operand (operands[0], mode))
1269 {
1270 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1271 emit_move_insn (operands[0], operands[1]);
1272 return 1;
1273 }
1274 else
1275 {
1276 operands[1] = force_reg (SImode, operands[1]);
1277 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1278 }
1279 }
1280 }
1281
1282 if (!(reload_in_progress | reload_completed)
1283 && !xtensa_valid_move (mode, operands))
1284 operands[1] = force_reg (mode, operands[1]);
1285
1286 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1287
1288 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1289 instruction won't be recognized after reload, so we remove the
1290 subreg and adjust mem accordingly. */
1291 if (reload_in_progress)
1292 {
1293 operands[0] = fixup_subreg_mem (operands[0]);
1294 operands[1] = fixup_subreg_mem (operands[1]);
1295 }
1296 return 0;
1297 }
1298
1299
1300 static rtx
1301 fixup_subreg_mem (rtx x)
1302 {
1303 if (GET_CODE (x) == SUBREG
1304 && GET_CODE (SUBREG_REG (x)) == REG
1305 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1306 {
1307 rtx temp =
1308 gen_rtx_SUBREG (GET_MODE (x),
1309 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1310 SUBREG_BYTE (x));
1311 x = alter_subreg (&temp);
1312 }
1313 return x;
1314 }
1315
1316
1317 /* Check if an incoming argument in a7 is expected to be used soon and
1318 if OPND is a register or register pair that includes a7. If so,
1319 create a new pseudo and copy a7 into that pseudo at the very
1320 beginning of the function, followed by the special "set_frame_ptr"
1321 unspec_volatile insn. The return value is either the original
1322 operand, if it is not a7, or the new pseudo containing a copy of
1323 the incoming argument. This is necessary because the register
1324 allocator will ignore conflicts with a7 and may either assign some
1325 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1326 the incoming argument in a7. By copying the argument out of a7 as
1327 the very first thing, and then immediately following that with an
1328 unspec_volatile to keep the scheduler away, we should avoid any
1329 problems. Putting the set_frame_ptr insn at the beginning, with
1330 only the a7 copy before it, also makes it easier for the prologue
1331 expander to initialize the frame pointer after the a7 copy and to
1332 fix up the a7 copy to use the stack pointer instead of the frame
1333 pointer. */
1334
1335 rtx
1336 xtensa_copy_incoming_a7 (rtx opnd)
1337 {
1338 rtx entry_insns = 0;
1339 rtx reg, tmp;
1340 enum machine_mode mode;
1341
1342 if (!cfun->machine->need_a7_copy)
1343 return opnd;
1344
1345 /* This function should never be called again once a7 has been copied. */
1346 if (cfun->machine->set_frame_ptr_insn)
1347 abort ();
1348
1349 mode = GET_MODE (opnd);
1350
1351 /* The operand using a7 may come in a later instruction, so just return
1352 the original operand if it doesn't use a7. */
1353 reg = opnd;
1354 if (GET_CODE (reg) == SUBREG)
1355 {
1356 if (SUBREG_BYTE (reg) != 0)
1357 abort ();
1358 reg = SUBREG_REG (reg);
1359 }
1360 if (GET_CODE (reg) != REG
1361 || REGNO (reg) > A7_REG
1362 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1363 return opnd;
1364
1365 /* 1-word args will always be in a7; 2-word args in a6/a7. */
1366 if (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 != A7_REG)
1367 abort ();
1368
1369 cfun->machine->need_a7_copy = false;
1370
1371 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1372 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
1373
1374 push_to_sequence (entry_insns);
1375 tmp = gen_reg_rtx (mode);
1376
1377 switch (mode)
1378 {
1379 case DFmode:
1380 case DImode:
1381 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1382 gen_rtx_REG (SImode, A7_REG - 1)));
1383 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1384 gen_raw_REG (SImode, A7_REG)));
1385 break;
1386 case SFmode:
1387 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1388 break;
1389 case SImode:
1390 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1391 break;
1392 case HImode:
1393 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1394 break;
1395 case QImode:
1396 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1397 break;
1398 default:
1399 abort ();
1400 }
1401
1402 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1403 entry_insns = get_insns ();
1404 end_sequence ();
1405
1406 if (cfun->machine->vararg_a7)
1407 {
1408 /* This is called from within builtin_savereg, so we're already
1409 inside a start_sequence that will be placed at the start of
1410 the function. */
1411 emit_insn (entry_insns);
1412 }
1413 else
1414 {
1415 /* Put entry_insns after the NOTE that starts the function. If
1416 this is inside a start_sequence, make the outer-level insn
1417 chain current, so the code is placed at the start of the
1418 function. */
1419 push_topmost_sequence ();
1420 emit_insn_after (entry_insns, get_insns ());
1421 pop_topmost_sequence ();
1422 }
1423
1424 return tmp;
1425 }
1426
1427
1428 /* Try to expand a block move operation to an RTL block move instruction.
1429 If not optimizing or if the block size is not a constant or if the
1430 block is small, the expansion fails and GCC falls back to calling
1431 memcpy().
1432
1433 operands[0] is the destination
1434 operands[1] is the source
1435 operands[2] is the length
1436 operands[3] is the alignment */
1437
1438 int
1439 xtensa_expand_block_move (rtx *operands)
1440 {
1441 rtx dest = operands[0];
1442 rtx src = operands[1];
1443 int bytes = INTVAL (operands[2]);
1444 int align = XINT (operands[3], 0);
1445 int num_pieces, move_ratio;
1446
1447 /* If this is not a fixed size move, just call memcpy. */
1448 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1449 return 0;
1450
1451 /* Anything to move? */
1452 if (bytes <= 0)
1453 return 1;
1454
1455 if (align > MOVE_MAX)
1456 align = MOVE_MAX;
1457
1458 /* Decide whether to expand inline based on the optimization level. */
1459 move_ratio = 4;
1460 if (optimize > 2)
1461 move_ratio = LARGEST_MOVE_RATIO;
1462 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1463 if (num_pieces >= move_ratio)
1464 return 0;
1465
1466 /* Make sure the memory addresses are valid. */
1467 operands[0] = validize_mem (dest);
1468 operands[1] = validize_mem (src);
1469
1470 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1471 operands[2], operands[3]));
1472 return 1;
1473 }
1474
1475
1476 /* Emit a sequence of instructions to implement a block move, trying
1477 to hide load delay slots as much as possible. Load N values into
1478 temporary registers, store those N values, and repeat until the
1479 complete block has been moved. N=delay_slots+1. */
1480
1481 struct meminsnbuf
1482 {
1483 char template[30];
1484 rtx operands[2];
1485 };
1486
1487 void
1488 xtensa_emit_block_move (rtx *operands, rtx *tmpregs, int delay_slots)
1489 {
1490 rtx dest = operands[0];
1491 rtx src = operands[1];
1492 int bytes = INTVAL (operands[2]);
1493 int align = XINT (operands[3], 0);
1494 rtx from_addr = XEXP (src, 0);
1495 rtx to_addr = XEXP (dest, 0);
1496 int from_struct = MEM_IN_STRUCT_P (src);
1497 int to_struct = MEM_IN_STRUCT_P (dest);
1498 int offset = 0;
1499 int chunk_size, item_size;
1500 struct meminsnbuf *ldinsns, *stinsns;
1501 const char *ldname, *stname;
1502 enum machine_mode mode;
1503
1504 if (align > MOVE_MAX)
1505 align = MOVE_MAX;
1506 item_size = align;
1507 chunk_size = delay_slots + 1;
1508
1509 ldinsns = (struct meminsnbuf *)
1510 alloca (chunk_size * sizeof (struct meminsnbuf));
1511 stinsns = (struct meminsnbuf *)
1512 alloca (chunk_size * sizeof (struct meminsnbuf));
1513
1514 mode = xtensa_find_mode_for_size (item_size);
1515 item_size = GET_MODE_SIZE (mode);
1516 ldname = xtensa_ld_opcodes[(int) mode];
1517 stname = xtensa_st_opcodes[(int) mode];
1518
1519 while (bytes > 0)
1520 {
1521 int n;
1522
1523 for (n = 0; n < chunk_size; n++)
1524 {
1525 rtx addr, mem;
1526
1527 if (bytes == 0)
1528 {
1529 chunk_size = n;
1530 break;
1531 }
1532
1533 if (bytes < item_size)
1534 {
1535 /* Find a smaller item_size which we can load & store. */
1536 item_size = bytes;
1537 mode = xtensa_find_mode_for_size (item_size);
1538 item_size = GET_MODE_SIZE (mode);
1539 ldname = xtensa_ld_opcodes[(int) mode];
1540 stname = xtensa_st_opcodes[(int) mode];
1541 }
1542
1543 /* Record the load instruction opcode and operands. */
1544 addr = plus_constant (from_addr, offset);
1545 mem = gen_rtx_MEM (mode, addr);
1546 if (! memory_address_p (mode, addr))
1547 abort ();
1548 MEM_IN_STRUCT_P (mem) = from_struct;
1549 ldinsns[n].operands[0] = tmpregs[n];
1550 ldinsns[n].operands[1] = mem;
1551 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1552
1553 /* Record the store instruction opcode and operands. */
1554 addr = plus_constant (to_addr, offset);
1555 mem = gen_rtx_MEM (mode, addr);
1556 if (! memory_address_p (mode, addr))
1557 abort ();
1558 MEM_IN_STRUCT_P (mem) = to_struct;
1559 stinsns[n].operands[0] = tmpregs[n];
1560 stinsns[n].operands[1] = mem;
1561 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1562
1563 offset += item_size;
1564 bytes -= item_size;
1565 }
1566
1567 /* Now output the loads followed by the stores. */
1568 for (n = 0; n < chunk_size; n++)
1569 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1570 for (n = 0; n < chunk_size; n++)
1571 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1572 }
1573 }
1574
1575
1576 static enum machine_mode
1577 xtensa_find_mode_for_size (unsigned item_size)
1578 {
1579 enum machine_mode mode, tmode;
1580
1581 while (1)
1582 {
1583 mode = VOIDmode;
1584
1585 /* Find mode closest to but not bigger than item_size. */
1586 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1587 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1588 if (GET_MODE_SIZE (tmode) <= item_size)
1589 mode = tmode;
1590 if (mode == VOIDmode)
1591 abort ();
1592
1593 item_size = GET_MODE_SIZE (mode);
1594
1595 if (xtensa_ld_opcodes[(int) mode]
1596 && xtensa_st_opcodes[(int) mode])
1597 break;
1598
1599 /* Cannot load & store this mode; try something smaller. */
1600 item_size -= 1;
1601 }
1602
1603 return mode;
1604 }
1605
1606
1607 void
1608 xtensa_expand_nonlocal_goto (rtx *operands)
1609 {
1610 rtx goto_handler = operands[1];
1611 rtx containing_fp = operands[3];
1612
1613 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1614 is too big to generate in-line. */
1615
1616 if (GET_CODE (containing_fp) != REG)
1617 containing_fp = force_reg (Pmode, containing_fp);
1618
1619 goto_handler = replace_rtx (copy_rtx (goto_handler),
1620 virtual_stack_vars_rtx,
1621 containing_fp);
1622
1623 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1624 0, VOIDmode, 2,
1625 containing_fp, Pmode,
1626 goto_handler, Pmode);
1627 }
1628
1629
1630 static struct machine_function *
1631 xtensa_init_machine_status (void)
1632 {
1633 return ggc_alloc_cleared (sizeof (struct machine_function));
1634 }
1635
1636
1637 void
1638 xtensa_setup_frame_addresses (void)
1639 {
1640 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1641 cfun->machine->accesses_prev_frame = 1;
1642
1643 emit_library_call
1644 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1645 0, VOIDmode, 0);
1646 }
1647
1648
1649 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1650 a comment showing where the end of the loop is. However, if there is a
1651 label or a branch at the end of the loop then we need to place a nop
1652 there. If the loop ends with a label we need the nop so that branches
1653 targeting that label will target the nop (and thus remain in the loop),
1654 instead of targeting the instruction after the loop (and thus exiting
1655 the loop). If the loop ends with a branch, we need the nop in case the
1656 branch is targeting a location inside the loop. When the branch
1657 executes it will cause the loop count to be decremented even if it is
1658 taken (because it is the last instruction in the loop), so we need to
1659 nop after the branch to prevent the loop count from being decremented
1660 when the branch is taken. */
1661
1662 void
1663 xtensa_emit_loop_end (rtx insn, rtx *operands)
1664 {
1665 char done = 0;
1666
1667 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1668 {
1669 switch (GET_CODE (insn))
1670 {
1671 case NOTE:
1672 case BARRIER:
1673 break;
1674
1675 case CODE_LABEL:
1676 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1677 done = 1;
1678 break;
1679
1680 default:
1681 {
1682 rtx body = PATTERN (insn);
1683
1684 if (GET_CODE (body) == JUMP_INSN)
1685 {
1686 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1687 done = 1;
1688 }
1689 else if ((GET_CODE (body) != USE)
1690 && (GET_CODE (body) != CLOBBER))
1691 done = 1;
1692 }
1693 break;
1694 }
1695 }
1696
1697 output_asm_insn ("# loop end for %0", operands);
1698 }
1699
1700
1701 char *
1702 xtensa_emit_call (int callop, rtx *operands)
1703 {
1704 static char result[64];
1705 rtx tgt = operands[callop];
1706
1707 if (GET_CODE (tgt) == CONST_INT)
1708 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1709 else if (register_operand (tgt, VOIDmode))
1710 sprintf (result, "callx8\t%%%d", callop);
1711 else
1712 sprintf (result, "call8\t%%%d", callop);
1713
1714 return result;
1715 }
1716
1717
1718 /* Return the debugger register number to use for 'regno'. */
1719
1720 int
1721 xtensa_dbx_register_number (int regno)
1722 {
1723 int first = -1;
1724
1725 if (GP_REG_P (regno))
1726 {
1727 regno -= GP_REG_FIRST;
1728 first = 0;
1729 }
1730 else if (BR_REG_P (regno))
1731 {
1732 regno -= BR_REG_FIRST;
1733 first = 16;
1734 }
1735 else if (FP_REG_P (regno))
1736 {
1737 regno -= FP_REG_FIRST;
1738 first = 48;
1739 }
1740 else if (ACC_REG_P (regno))
1741 {
1742 first = 0x200; /* Start of Xtensa special registers. */
1743 regno = 16; /* ACCLO is special register 16. */
1744 }
1745
1746 /* When optimizing, we sometimes get asked about pseudo-registers
1747 that don't represent hard registers. Return 0 for these. */
1748 if (first == -1)
1749 return 0;
1750
1751 return first + regno;
1752 }
1753
1754
1755 /* Argument support functions. */
1756
1757 /* Initialize CUMULATIVE_ARGS for a function. */
1758
1759 void
1760 init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
1761 {
1762 cum->arg_words = 0;
1763 cum->incoming = incoming;
1764 }
1765
1766
1767 /* Advance the argument to the next argument position. */
1768
1769 void
1770 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1771 {
1772 int words, max;
1773 int *arg_words;
1774
1775 arg_words = &cum->arg_words;
1776 max = MAX_ARGS_IN_REGISTERS;
1777
1778 words = (((mode != BLKmode)
1779 ? (int) GET_MODE_SIZE (mode)
1780 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1781
1782 if ((*arg_words + words > max) && (*arg_words < max))
1783 *arg_words = max;
1784
1785 *arg_words += words;
1786 }
1787
1788
1789 /* Return an RTL expression containing the register for the given mode,
1790 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1791 if this is an incoming argument to the current function. */
1792
1793 rtx
1794 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1795 int incoming_p)
1796 {
1797 int regbase, words, max;
1798 int *arg_words;
1799 int regno;
1800
1801 arg_words = &cum->arg_words;
1802 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1803 max = MAX_ARGS_IN_REGISTERS;
1804
1805 words = (((mode != BLKmode)
1806 ? (int) GET_MODE_SIZE (mode)
1807 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1808
1809 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1810 {
1811 int align = TYPE_ALIGN (type) / BITS_PER_WORD;
1812 *arg_words = (*arg_words + align - 1) & -align;
1813 }
1814
1815 if (*arg_words + words > max)
1816 return (rtx)0;
1817
1818 regno = regbase + *arg_words;
1819
1820 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1821 cfun->machine->need_a7_copy = true;
1822
1823 return gen_rtx_REG (mode, regno);
1824 }
1825
1826
1827 static bool
1828 xtensa_return_in_msb (tree valtype)
1829 {
1830 return (TARGET_BIG_ENDIAN
1831 && AGGREGATE_TYPE_P (valtype)
1832 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1833 }
1834
1835
1836 void
1837 override_options (void)
1838 {
1839 int regno;
1840 enum machine_mode mode;
1841
1842 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1843 error ("boolean registers required for the floating-point option");
1844
1845 /* Set up the tables of ld/st opcode names for block moves. */
1846 xtensa_ld_opcodes[(int) SImode] = "l32i";
1847 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1848 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1849 xtensa_st_opcodes[(int) SImode] = "s32i";
1850 xtensa_st_opcodes[(int) HImode] = "s16i";
1851 xtensa_st_opcodes[(int) QImode] = "s8i";
1852
1853 xtensa_char_to_class['q'] = SP_REG;
1854 xtensa_char_to_class['a'] = GR_REGS;
1855 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1856 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1857 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1858 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1859 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1860 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1861 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1862 xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1863
1864 /* Set up array giving whether a given register can hold a given mode. */
1865 for (mode = VOIDmode;
1866 mode != MAX_MACHINE_MODE;
1867 mode = (enum machine_mode) ((int) mode + 1))
1868 {
1869 int size = GET_MODE_SIZE (mode);
1870 enum mode_class class = GET_MODE_CLASS (mode);
1871
1872 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1873 {
1874 int temp;
1875
1876 if (ACC_REG_P (regno))
1877 temp = (TARGET_MAC16
1878 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1879 else if (GP_REG_P (regno))
1880 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1881 else if (FP_REG_P (regno))
1882 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1883 else if (BR_REG_P (regno))
1884 temp = (TARGET_BOOLEANS && (mode == CCmode));
1885 else
1886 temp = FALSE;
1887
1888 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1889 }
1890 }
1891
1892 init_machine_status = xtensa_init_machine_status;
1893
1894 /* Check PIC settings. PIC is only supported when using L32R
1895 instructions, and some targets need to always use PIC. */
1896 if (flag_pic && TARGET_CONST16)
1897 error ("-f%s is not supported with CONST16 instructions",
1898 (flag_pic > 1 ? "PIC" : "pic"));
1899 else if (XTENSA_ALWAYS_PIC)
1900 {
1901 if (TARGET_CONST16)
1902 error ("PIC is required but not supported with CONST16 instructions");
1903 flag_pic = 1;
1904 }
1905 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1906 if (flag_pic > 1)
1907 flag_pic = 1;
1908 }
1909
1910
1911 /* A C compound statement to output to stdio stream STREAM the
1912 assembler syntax for an instruction operand X. X is an RTL
1913 expression.
1914
1915 CODE is a value that can be used to specify one of several ways
1916 of printing the operand. It is used when identical operands
1917 must be printed differently depending on the context. CODE
1918 comes from the '%' specification that was used to request
1919 printing of the operand. If the specification was just '%DIGIT'
1920 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1921 is the ASCII code for LTR.
1922
1923 If X is a register, this macro should print the register's name.
1924 The names can be found in an array 'reg_names' whose type is
1925 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1926
1927 When the machine description has a specification '%PUNCT' (a '%'
1928 followed by a punctuation character), this macro is called with
1929 a null pointer for X and the punctuation character for CODE.
1930
1931 'a', 'c', 'l', and 'n' are reserved.
1932
1933 The Xtensa specific codes are:
1934
1935 'd' CONST_INT, print as signed decimal
1936 'x' CONST_INT, print as signed hexadecimal
1937 'K' CONST_INT, print number of bits in mask for EXTUI
1938 'R' CONST_INT, print (X & 0x1f)
1939 'L' CONST_INT, print ((32 - X) & 0x1f)
1940 'D' REG, print second register of double-word register operand
1941 'N' MEM, print address of next word following a memory operand
1942 'v' MEM, if memory reference is volatile, output a MEMW before it
1943 't' any constant, add "@h" suffix for top 16 bits
1944 'b' any constant, add "@l" suffix for bottom 16 bits
1945 */
1946
1947 static void
1948 printx (FILE *file, signed int val)
1949 {
1950 /* Print a hexadecimal value in a nice way. */
1951 if ((val > -0xa) && (val < 0xa))
1952 fprintf (file, "%d", val);
1953 else if (val < 0)
1954 fprintf (file, "-0x%x", -val);
1955 else
1956 fprintf (file, "0x%x", val);
1957 }
1958
1959
1960 void
1961 print_operand (FILE *file, rtx x, int letter)
1962 {
1963 if (!x)
1964 error ("PRINT_OPERAND null pointer");
1965
1966 switch (letter)
1967 {
1968 case 'D':
1969 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1970 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1971 else
1972 output_operand_lossage ("invalid %%D value");
1973 break;
1974
1975 case 'v':
1976 if (GET_CODE (x) == MEM)
1977 {
1978 /* For a volatile memory reference, emit a MEMW before the
1979 load or store. */
1980 if (MEM_VOLATILE_P (x))
1981 fprintf (file, "memw\n\t");
1982 }
1983 else
1984 output_operand_lossage ("invalid %%v value");
1985 break;
1986
1987 case 'N':
1988 if (GET_CODE (x) == MEM
1989 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1990 {
1991 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1992 output_address (XEXP (x, 0));
1993 }
1994 else
1995 output_operand_lossage ("invalid %%N value");
1996 break;
1997
1998 case 'K':
1999 if (GET_CODE (x) == CONST_INT)
2000 {
2001 int num_bits = 0;
2002 unsigned val = INTVAL (x);
2003 while (val & 1)
2004 {
2005 num_bits += 1;
2006 val = val >> 1;
2007 }
2008 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2009 fatal_insn ("invalid mask", x);
2010
2011 fprintf (file, "%d", num_bits);
2012 }
2013 else
2014 output_operand_lossage ("invalid %%K value");
2015 break;
2016
2017 case 'L':
2018 if (GET_CODE (x) == CONST_INT)
2019 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2020 else
2021 output_operand_lossage ("invalid %%L value");
2022 break;
2023
2024 case 'R':
2025 if (GET_CODE (x) == CONST_INT)
2026 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2027 else
2028 output_operand_lossage ("invalid %%R value");
2029 break;
2030
2031 case 'x':
2032 if (GET_CODE (x) == CONST_INT)
2033 printx (file, INTVAL (x));
2034 else
2035 output_operand_lossage ("invalid %%x value");
2036 break;
2037
2038 case 'd':
2039 if (GET_CODE (x) == CONST_INT)
2040 fprintf (file, "%ld", INTVAL (x));
2041 else
2042 output_operand_lossage ("invalid %%d value");
2043 break;
2044
2045 case 't':
2046 case 'b':
2047 if (GET_CODE (x) == CONST_INT)
2048 {
2049 printx (file, INTVAL (x));
2050 fputs (letter == 't' ? "@h" : "@l", file);
2051 }
2052 else if (GET_CODE (x) == CONST_DOUBLE)
2053 {
2054 REAL_VALUE_TYPE r;
2055 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2056 if (GET_MODE (x) == SFmode)
2057 {
2058 long l;
2059 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2060 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2061 }
2062 else
2063 output_operand_lossage ("invalid %%t/%%b value");
2064 }
2065 else if (GET_CODE (x) == CONST)
2066 {
2067 /* X must be a symbolic constant on ELF. Write an expression
2068 suitable for 'const16' that sets the high or low 16 bits. */
2069 if (GET_CODE (XEXP (x, 0)) != PLUS
2070 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2071 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2072 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2073 output_operand_lossage ("invalid %%t/%%b value");
2074 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2075 fputs (letter == 't' ? "@h" : "@l", file);
2076 /* There must be a non-alphanumeric character between 'h' or 'l'
2077 and the number. The '-' is added by print_operand() already. */
2078 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2079 fputs ("+", file);
2080 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2081 }
2082 else
2083 {
2084 output_addr_const (file, x);
2085 fputs (letter == 't' ? "@h" : "@l", file);
2086 }
2087 break;
2088
2089 default:
2090 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2091 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2092 else if (GET_CODE (x) == MEM)
2093 output_address (XEXP (x, 0));
2094 else if (GET_CODE (x) == CONST_INT)
2095 fprintf (file, "%ld", INTVAL (x));
2096 else
2097 output_addr_const (file, x);
2098 }
2099 }
2100
2101
2102 /* A C compound statement to output to stdio stream STREAM the
2103 assembler syntax for an instruction operand that is a memory
2104 reference whose address is ADDR. ADDR is an RTL expression. */
2105
2106 void
2107 print_operand_address (FILE *file, rtx addr)
2108 {
2109 if (!addr)
2110 error ("PRINT_OPERAND_ADDRESS, null pointer");
2111
2112 switch (GET_CODE (addr))
2113 {
2114 default:
2115 fatal_insn ("invalid address", addr);
2116 break;
2117
2118 case REG:
2119 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2120 break;
2121
2122 case PLUS:
2123 {
2124 rtx reg = (rtx)0;
2125 rtx offset = (rtx)0;
2126 rtx arg0 = XEXP (addr, 0);
2127 rtx arg1 = XEXP (addr, 1);
2128
2129 if (GET_CODE (arg0) == REG)
2130 {
2131 reg = arg0;
2132 offset = arg1;
2133 }
2134 else if (GET_CODE (arg1) == REG)
2135 {
2136 reg = arg1;
2137 offset = arg0;
2138 }
2139 else
2140 fatal_insn ("no register in address", addr);
2141
2142 if (CONSTANT_P (offset))
2143 {
2144 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2145 output_addr_const (file, offset);
2146 }
2147 else
2148 fatal_insn ("address offset not a constant", addr);
2149 }
2150 break;
2151
2152 case LABEL_REF:
2153 case SYMBOL_REF:
2154 case CONST_INT:
2155 case CONST:
2156 output_addr_const (file, addr);
2157 break;
2158 }
2159 }
2160
2161
2162 void
2163 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
2164 {
2165 long value_long[2];
2166 REAL_VALUE_TYPE r;
2167 int size;
2168
2169 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2170
2171 switch (GET_MODE_CLASS (mode))
2172 {
2173 case MODE_FLOAT:
2174 if (GET_CODE (x) != CONST_DOUBLE)
2175 abort ();
2176
2177 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2178 switch (mode)
2179 {
2180 case SFmode:
2181 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2182 fprintf (file, "0x%08lx\n", value_long[0]);
2183 break;
2184
2185 case DFmode:
2186 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2187 fprintf (file, "0x%08lx, 0x%08lx\n",
2188 value_long[0], value_long[1]);
2189 break;
2190
2191 default:
2192 abort ();
2193 }
2194
2195 break;
2196
2197 case MODE_INT:
2198 case MODE_PARTIAL_INT:
2199 size = GET_MODE_SIZE (mode);
2200 if (size == 4)
2201 {
2202 output_addr_const (file, x);
2203 fputs ("\n", file);
2204 }
2205 else if (size == 8)
2206 {
2207 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2208 fputs (", ", file);
2209 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2210 fputs ("\n", file);
2211 }
2212 else
2213 abort ();
2214 break;
2215
2216 default:
2217 abort ();
2218 }
2219 }
2220
2221
2222 /* Return the bytes needed to compute the frame pointer from the current
2223 stack pointer. */
2224
2225 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2226 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2227
2228 long
2229 compute_frame_size (int size)
2230 {
2231 /* Add space for the incoming static chain value. */
2232 if (current_function_needs_context)
2233 size += (1 * UNITS_PER_WORD);
2234
2235 xtensa_current_frame_size =
2236 XTENSA_STACK_ALIGN (size
2237 + current_function_outgoing_args_size
2238 + (WINDOW_SIZE * UNITS_PER_WORD));
2239 return xtensa_current_frame_size;
2240 }
2241
2242
2243 int
2244 xtensa_frame_pointer_required (void)
2245 {
2246 /* The code to expand builtin_frame_addr and builtin_return_addr
2247 currently uses the hard_frame_pointer instead of frame_pointer.
2248 This seems wrong but maybe it's necessary for other architectures.
2249 This function is derived from the i386 code. */
2250
2251 if (cfun->machine->accesses_prev_frame)
2252 return 1;
2253
2254 return 0;
2255 }
2256
2257
2258 void
2259 xtensa_expand_prologue (void)
2260 {
2261 HOST_WIDE_INT total_size;
2262 rtx size_rtx;
2263
2264 total_size = compute_frame_size (get_frame_size ());
2265 size_rtx = GEN_INT (total_size);
2266
2267 if (total_size < (1 << (12+3)))
2268 emit_insn (gen_entry (size_rtx, size_rtx));
2269 else
2270 {
2271 /* Use a8 as a temporary since a0-a7 may be live. */
2272 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2273 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2274 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2275 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2276 emit_move_insn (stack_pointer_rtx, tmp_reg);
2277 }
2278
2279 if (frame_pointer_needed)
2280 {
2281 if (cfun->machine->set_frame_ptr_insn)
2282 {
2283 rtx first, insn;
2284
2285 push_topmost_sequence ();
2286 first = get_insns ();
2287 pop_topmost_sequence ();
2288
2289 /* For all instructions prior to set_frame_ptr_insn, replace
2290 hard_frame_pointer references with stack_pointer. */
2291 for (insn = first;
2292 insn != cfun->machine->set_frame_ptr_insn;
2293 insn = NEXT_INSN (insn))
2294 {
2295 if (INSN_P (insn))
2296 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2297 hard_frame_pointer_rtx,
2298 stack_pointer_rtx);
2299 }
2300 }
2301 else
2302 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2303 }
2304 }
2305
2306
2307 /* Clear variables at function end. */
2308
2309 void
2310 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2311 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2312 {
2313 xtensa_current_frame_size = 0;
2314 }
2315
2316
2317 rtx
2318 xtensa_return_addr (int count, rtx frame)
2319 {
2320 rtx result, retaddr;
2321
2322 if (count == -1)
2323 retaddr = gen_rtx_REG (Pmode, A0_REG);
2324 else
2325 {
2326 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2327 addr = memory_address (Pmode, addr);
2328 retaddr = gen_reg_rtx (Pmode);
2329 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2330 }
2331
2332 /* The 2 most-significant bits of the return address on Xtensa hold
2333 the register window size. To get the real return address, these
2334 bits must be replaced with the high bits from the current PC. */
2335
2336 result = gen_reg_rtx (Pmode);
2337 emit_insn (gen_fix_return_addr (result, retaddr));
2338 return result;
2339 }
2340
2341
2342 /* Create the va_list data type.
2343
2344 This structure is set up by __builtin_saveregs. The __va_reg field
2345 points to a stack-allocated region holding the contents of the
2346 incoming argument registers. The __va_ndx field is an index
2347 initialized to the position of the first unnamed (variable)
2348 argument. This same index is also used to address the arguments
2349 passed in memory. Thus, the __va_stk field is initialized to point
2350 to the position of the first argument in memory offset to account
2351 for the arguments passed in registers and to account for the size
2352 of the argument registers not being 16-byte aligned. E.G., there
2353 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2354 for the first stack argument to have the maximal alignment of 16
2355 bytes, so we offset the __va_stk address by 32 bytes so that
2356 __va_stk[32] references the first argument on the stack. */
2357
2358 static tree
2359 xtensa_build_builtin_va_list (void)
2360 {
2361 tree f_stk, f_reg, f_ndx, record, type_decl;
2362
2363 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2364 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2365
2366 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2367 ptr_type_node);
2368 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2369 ptr_type_node);
2370 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2371 integer_type_node);
2372
2373 DECL_FIELD_CONTEXT (f_stk) = record;
2374 DECL_FIELD_CONTEXT (f_reg) = record;
2375 DECL_FIELD_CONTEXT (f_ndx) = record;
2376
2377 TREE_CHAIN (record) = type_decl;
2378 TYPE_NAME (record) = type_decl;
2379 TYPE_FIELDS (record) = f_stk;
2380 TREE_CHAIN (f_stk) = f_reg;
2381 TREE_CHAIN (f_reg) = f_ndx;
2382
2383 layout_type (record);
2384 return record;
2385 }
2386
2387
2388 /* Save the incoming argument registers on the stack. Returns the
2389 address of the saved registers. */
2390
2391 static rtx
2392 xtensa_builtin_saveregs (void)
2393 {
2394 rtx gp_regs, dest;
2395 int arg_words = current_function_args_info.arg_words;
2396 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2397
2398 if (gp_left <= 0)
2399 return const0_rtx;
2400
2401 /* Allocate the general-purpose register space. */
2402 gp_regs = assign_stack_local
2403 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2404 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2405
2406 /* Now store the incoming registers. */
2407 dest = change_address (gp_regs, SImode,
2408 plus_constant (XEXP (gp_regs, 0),
2409 arg_words * UNITS_PER_WORD));
2410 cfun->machine->need_a7_copy = true;
2411 cfun->machine->vararg_a7 = true;
2412 move_block_from_reg (GP_ARG_FIRST + arg_words, dest, gp_left);
2413
2414 return XEXP (gp_regs, 0);
2415 }
2416
2417
2418 /* Implement `va_start' for varargs and stdarg. We look at the
2419 current function to fill in an initial va_list. */
2420
2421 void
2422 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2423 {
2424 tree f_stk, stk;
2425 tree f_reg, reg;
2426 tree f_ndx, ndx;
2427 tree t, u;
2428 int arg_words;
2429
2430 arg_words = current_function_args_info.arg_words;
2431
2432 f_stk = TYPE_FIELDS (va_list_type_node);
2433 f_reg = TREE_CHAIN (f_stk);
2434 f_ndx = TREE_CHAIN (f_reg);
2435
2436 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2437 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2438 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2439
2440 /* Call __builtin_saveregs; save the result in __va_reg */
2441 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2442 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2443 TREE_SIDE_EFFECTS (t) = 1;
2444 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2445
2446 /* Set the __va_stk member to ($arg_ptr - 32). */
2447 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2448 u = fold (build (PLUS_EXPR, ptr_type_node, u, build_int_2 (-32, -1)));
2449 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2450 TREE_SIDE_EFFECTS (t) = 1;
2451 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2452
2453 /* Set the __va_ndx member. If the first variable argument is on
2454 the stack, adjust __va_ndx by 2 words to account for the extra
2455 alignment offset for __va_stk. */
2456 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2457 arg_words += 2;
2458 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2459 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2460 TREE_SIDE_EFFECTS (t) = 1;
2461 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2462 }
2463
2464
2465 /* Implement `va_arg'. */
2466
2467 rtx
2468 xtensa_va_arg (tree valist, tree type)
2469 {
2470 tree f_stk, stk;
2471 tree f_reg, reg;
2472 tree f_ndx, ndx;
2473 tree tmp, addr_tree, type_size;
2474 rtx array, orig_ndx, r, addr, size, va_size;
2475 rtx lab_false, lab_over, lab_false2;
2476
2477 /* Handle complex values as separate real and imaginary parts. */
2478 if (TREE_CODE (type) == COMPLEX_TYPE)
2479 {
2480 rtx real_part, imag_part, concat_val, local_copy;
2481
2482 real_part = xtensa_va_arg (valist, TREE_TYPE (type));
2483 imag_part = xtensa_va_arg (valist, TREE_TYPE (type));
2484
2485 /* Make a copy of the value in case the parts are not contiguous. */
2486 real_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), real_part);
2487 imag_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), imag_part);
2488 concat_val = gen_rtx_CONCAT (TYPE_MODE (type), real_part, imag_part);
2489
2490 local_copy = assign_temp (type, 0, 1, 0);
2491 emit_move_insn (local_copy, concat_val);
2492
2493 return XEXP (local_copy, 0);
2494 }
2495
2496 f_stk = TYPE_FIELDS (va_list_type_node);
2497 f_reg = TREE_CHAIN (f_stk);
2498 f_ndx = TREE_CHAIN (f_reg);
2499
2500 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2501 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2502 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2503
2504 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2505
2506 va_size = gen_reg_rtx (SImode);
2507 tmp = fold (build (MULT_EXPR, sizetype,
2508 fold (build (TRUNC_DIV_EXPR, sizetype,
2509 fold (build (PLUS_EXPR, sizetype,
2510 type_size,
2511 size_int (UNITS_PER_WORD - 1))),
2512 size_int (UNITS_PER_WORD))),
2513 size_int (UNITS_PER_WORD)));
2514 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2515 if (r != va_size)
2516 emit_move_insn (va_size, r);
2517
2518
2519 /* First align __va_ndx if necessary for this arg:
2520
2521 if (__alignof__ (TYPE) > 4 )
2522 (AP).__va_ndx = (((AP).__va_ndx + __alignof__ (TYPE) - 1)
2523 & -__alignof__ (TYPE)); */
2524
2525 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2526 {
2527 int align = TYPE_ALIGN (type) / BITS_PER_UNIT;
2528 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2529 build_int_2 (align - 1, 0));
2530 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2531 build_int_2 (-align, -1));
2532 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2533 TREE_SIDE_EFFECTS (tmp) = 1;
2534 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2535 }
2536
2537
2538 /* Increment __va_ndx to point past the argument:
2539
2540 orig_ndx = (AP).__va_ndx;
2541 (AP).__va_ndx += __va_size (TYPE); */
2542
2543 orig_ndx = gen_reg_rtx (SImode);
2544 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2545 if (r != orig_ndx)
2546 emit_move_insn (orig_ndx, r);
2547
2548 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2549 make_tree (intSI_type_node, va_size));
2550 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2551 TREE_SIDE_EFFECTS (tmp) = 1;
2552 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2553
2554
2555 /* Check if the argument is in registers:
2556
2557 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2558 && !MUST_PASS_IN_STACK (type))
2559 __array = (AP).__va_reg; */
2560
2561 array = gen_reg_rtx (Pmode);
2562
2563 lab_over = NULL_RTX;
2564 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2565 {
2566 lab_false = gen_label_rtx ();
2567 lab_over = gen_label_rtx ();
2568
2569 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2570 EXPAND_NORMAL),
2571 GEN_INT (MAX_ARGS_IN_REGISTERS
2572 * UNITS_PER_WORD),
2573 GT, const1_rtx, SImode, 0, lab_false);
2574
2575 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2576 if (r != array)
2577 emit_move_insn (array, r);
2578
2579 emit_jump_insn (gen_jump (lab_over));
2580 emit_barrier ();
2581 emit_label (lab_false);
2582 }
2583
2584 /* ...otherwise, the argument is on the stack (never split between
2585 registers and the stack -- change __va_ndx if necessary):
2586
2587 else
2588 {
2589 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2590 (AP).__va_ndx = 32 + __va_size (TYPE);
2591 __array = (AP).__va_stk;
2592 } */
2593
2594 lab_false2 = gen_label_rtx ();
2595 emit_cmp_and_jump_insns (orig_ndx,
2596 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2597 GT, const1_rtx, SImode, 0, lab_false2);
2598
2599 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2600 build_int_2 (32, 0));
2601 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2602 TREE_SIDE_EFFECTS (tmp) = 1;
2603 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2604
2605 emit_label (lab_false2);
2606
2607 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2608 if (r != array)
2609 emit_move_insn (array, r);
2610
2611 if (lab_over != NULL_RTX)
2612 emit_label (lab_over);
2613
2614
2615 /* Given the base array pointer (__array) and index to the subsequent
2616 argument (__va_ndx), find the address:
2617
2618 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2619 ? sizeof (TYPE)
2620 : __va_size (TYPE))
2621
2622 The results are endian-dependent because values smaller than one word
2623 are aligned differently. */
2624
2625 size = gen_reg_rtx (SImode);
2626 emit_move_insn (size, va_size);
2627
2628 if (BYTES_BIG_ENDIAN)
2629 {
2630 rtx lab_use_va_size = gen_label_rtx ();
2631
2632 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2633 EXPAND_NORMAL),
2634 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2635 GE, const1_rtx, SImode, 0, lab_use_va_size);
2636
2637 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2638 if (r != size)
2639 emit_move_insn (size, r);
2640
2641 emit_label (lab_use_va_size);
2642 }
2643
2644 addr_tree = build (PLUS_EXPR, ptr_type_node,
2645 make_tree (ptr_type_node, array),
2646 ndx);
2647 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2648 make_tree (intSI_type_node, size));
2649 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2650 addr = copy_to_reg (addr);
2651 return addr;
2652 }
2653
2654
2655 enum reg_class
2656 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2657 {
2658 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2659 return NO_REGS;
2660
2661 /* Don't use the stack pointer or hard frame pointer for reloads!
2662 The hard frame pointer would normally be OK except that it may
2663 briefly hold an incoming argument in the prologue, and reload
2664 won't know that it is live because the hard frame pointer is
2665 treated specially. */
2666
2667 if (class == AR_REGS || class == GR_REGS)
2668 return RL_REGS;
2669
2670 return class;
2671 }
2672
2673
2674 enum reg_class
2675 xtensa_secondary_reload_class (enum reg_class class,
2676 enum machine_mode mode ATTRIBUTE_UNUSED,
2677 rtx x, int isoutput)
2678 {
2679 int regno;
2680
2681 if (GET_CODE (x) == SIGN_EXTEND)
2682 x = XEXP (x, 0);
2683 regno = xt_true_regnum (x);
2684
2685 if (!isoutput)
2686 {
2687 if (class == FP_REGS && constantpool_mem_p (x))
2688 return RL_REGS;
2689 }
2690
2691 if (ACC_REG_P (regno))
2692 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2693 if (class == ACC_REG)
2694 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2695
2696 return NO_REGS;
2697 }
2698
2699
2700 void
2701 order_regs_for_local_alloc (void)
2702 {
2703 if (!leaf_function_p ())
2704 {
2705 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2706 FIRST_PSEUDO_REGISTER * sizeof (int));
2707 }
2708 else
2709 {
2710 int i, num_arg_regs;
2711 int nxt = 0;
2712
2713 /* Use the AR registers in increasing order (skipping a0 and a1)
2714 but save the incoming argument registers for a last resort. */
2715 num_arg_regs = current_function_args_info.arg_words;
2716 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2717 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2718 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2719 reg_alloc_order[nxt++] = i + num_arg_regs;
2720 for (i = 0; i < num_arg_regs; i++)
2721 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2722
2723 /* List the coprocessor registers in order. */
2724 for (i = 0; i < BR_REG_NUM; i++)
2725 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2726
2727 /* List the FP registers in order for now. */
2728 for (i = 0; i < 16; i++)
2729 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2730
2731 /* GCC requires that we list *all* the registers.... */
2732 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2733 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2734 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2735 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2736
2737 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2738 }
2739 }
2740
2741
2742 /* Some Xtensa targets support multiple bss sections. If the section
2743 name ends with ".bss", add SECTION_BSS to the flags. */
2744
2745 static unsigned int
2746 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2747 {
2748 unsigned int flags = default_section_type_flags (decl, name, reloc);
2749 const char *suffix;
2750
2751 suffix = strrchr (name, '.');
2752 if (suffix && strcmp (suffix, ".bss") == 0)
2753 {
2754 if (!decl || (TREE_CODE (decl) == VAR_DECL
2755 && DECL_INITIAL (decl) == NULL_TREE))
2756 flags |= SECTION_BSS; /* @nobits */
2757 else
2758 warning ("only uninitialized variables can be placed in a "
2759 ".bss section");
2760 }
2761
2762 return flags;
2763 }
2764
2765
2766 /* The literal pool stays with the function. */
2767
2768 static void
2769 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2770 rtx x ATTRIBUTE_UNUSED,
2771 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2772 {
2773 function_section (current_function_decl);
2774 }
2775
2776
2777 /* Compute a (partial) cost for rtx X. Return true if the complete
2778 cost has been computed, and false if subexpressions should be
2779 scanned. In either case, *TOTAL contains the cost result. */
2780
2781 static bool
2782 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2783 {
2784 switch (code)
2785 {
2786 case CONST_INT:
2787 switch (outer_code)
2788 {
2789 case SET:
2790 if (xtensa_simm12b (INTVAL (x)))
2791 {
2792 *total = 4;
2793 return true;
2794 }
2795 break;
2796 case PLUS:
2797 if (xtensa_simm8 (INTVAL (x))
2798 || xtensa_simm8x256 (INTVAL (x)))
2799 {
2800 *total = 0;
2801 return true;
2802 }
2803 break;
2804 case AND:
2805 if (xtensa_mask_immediate (INTVAL (x)))
2806 {
2807 *total = 0;
2808 return true;
2809 }
2810 break;
2811 case COMPARE:
2812 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2813 {
2814 *total = 0;
2815 return true;
2816 }
2817 break;
2818 case ASHIFT:
2819 case ASHIFTRT:
2820 case LSHIFTRT:
2821 case ROTATE:
2822 case ROTATERT:
2823 /* No way to tell if X is the 2nd operand so be conservative. */
2824 default: break;
2825 }
2826 if (xtensa_simm12b (INTVAL (x)))
2827 *total = 5;
2828 else if (TARGET_CONST16)
2829 *total = COSTS_N_INSNS (2);
2830 else
2831 *total = 6;
2832 return true;
2833
2834 case CONST:
2835 case LABEL_REF:
2836 case SYMBOL_REF:
2837 if (TARGET_CONST16)
2838 *total = COSTS_N_INSNS (2);
2839 else
2840 *total = 5;
2841 return true;
2842
2843 case CONST_DOUBLE:
2844 if (TARGET_CONST16)
2845 *total = COSTS_N_INSNS (4);
2846 else
2847 *total = 7;
2848 return true;
2849
2850 case MEM:
2851 {
2852 int num_words =
2853 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2854
2855 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2856 *total = COSTS_N_INSNS (num_words);
2857 else
2858 *total = COSTS_N_INSNS (2*num_words);
2859 return true;
2860 }
2861
2862 case FFS:
2863 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2864 return true;
2865
2866 case NOT:
2867 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2868 return true;
2869
2870 case AND:
2871 case IOR:
2872 case XOR:
2873 if (GET_MODE (x) == DImode)
2874 *total = COSTS_N_INSNS (2);
2875 else
2876 *total = COSTS_N_INSNS (1);
2877 return true;
2878
2879 case ASHIFT:
2880 case ASHIFTRT:
2881 case LSHIFTRT:
2882 if (GET_MODE (x) == DImode)
2883 *total = COSTS_N_INSNS (50);
2884 else
2885 *total = COSTS_N_INSNS (1);
2886 return true;
2887
2888 case ABS:
2889 {
2890 enum machine_mode xmode = GET_MODE (x);
2891 if (xmode == SFmode)
2892 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2893 else if (xmode == DFmode)
2894 *total = COSTS_N_INSNS (50);
2895 else
2896 *total = COSTS_N_INSNS (4);
2897 return true;
2898 }
2899
2900 case PLUS:
2901 case MINUS:
2902 {
2903 enum machine_mode xmode = GET_MODE (x);
2904 if (xmode == SFmode)
2905 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2906 else if (xmode == DFmode || xmode == DImode)
2907 *total = COSTS_N_INSNS (50);
2908 else
2909 *total = COSTS_N_INSNS (1);
2910 return true;
2911 }
2912
2913 case NEG:
2914 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2915 return true;
2916
2917 case MULT:
2918 {
2919 enum machine_mode xmode = GET_MODE (x);
2920 if (xmode == SFmode)
2921 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2922 else if (xmode == DFmode || xmode == DImode)
2923 *total = COSTS_N_INSNS (50);
2924 else if (TARGET_MUL32)
2925 *total = COSTS_N_INSNS (4);
2926 else if (TARGET_MAC16)
2927 *total = COSTS_N_INSNS (16);
2928 else if (TARGET_MUL16)
2929 *total = COSTS_N_INSNS (12);
2930 else
2931 *total = COSTS_N_INSNS (50);
2932 return true;
2933 }
2934
2935 case DIV:
2936 case MOD:
2937 {
2938 enum machine_mode xmode = GET_MODE (x);
2939 if (xmode == SFmode)
2940 {
2941 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2942 return true;
2943 }
2944 else if (xmode == DFmode)
2945 {
2946 *total = COSTS_N_INSNS (50);
2947 return true;
2948 }
2949 }
2950 /* Fall through. */
2951
2952 case UDIV:
2953 case UMOD:
2954 {
2955 enum machine_mode xmode = GET_MODE (x);
2956 if (xmode == DImode)
2957 *total = COSTS_N_INSNS (50);
2958 else if (TARGET_DIV32)
2959 *total = COSTS_N_INSNS (32);
2960 else
2961 *total = COSTS_N_INSNS (50);
2962 return true;
2963 }
2964
2965 case SQRT:
2966 if (GET_MODE (x) == SFmode)
2967 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2968 else
2969 *total = COSTS_N_INSNS (50);
2970 return true;
2971
2972 case SMIN:
2973 case UMIN:
2974 case SMAX:
2975 case UMAX:
2976 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2977 return true;
2978
2979 case SIGN_EXTRACT:
2980 case SIGN_EXTEND:
2981 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
2982 return true;
2983
2984 case ZERO_EXTRACT:
2985 case ZERO_EXTEND:
2986 *total = COSTS_N_INSNS (1);
2987 return true;
2988
2989 default:
2990 return false;
2991 }
2992 }
2993
2994 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2995
2996 static bool
2997 xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2998 {
2999 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3000 > 4 * UNITS_PER_WORD);
3001 }
3002
3003 #include "gt-xtensa.h"