]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/xtensa/xtensa.c
Merge from pch-branch up to tag pch-commit-20020603.
[thirdparty/gcc.git] / gcc / config / xtensa / xtensa.c
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "machmode.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "toplev.h"
44 #include "optabs.h"
45 #include "output.h"
46 #include "libfuncs.h"
47 #include "target.h"
48 #include "target-def.h"
49
50 /* Enumeration for all of the relational tests, so that we can build
51 arrays indexed by the test type, and not worry about the order
52 of EQ, NE, etc. */
53
54 enum internal_test {
55 ITEST_EQ,
56 ITEST_NE,
57 ITEST_GT,
58 ITEST_GE,
59 ITEST_LT,
60 ITEST_LE,
61 ITEST_GTU,
62 ITEST_GEU,
63 ITEST_LTU,
64 ITEST_LEU,
65 ITEST_MAX
66 };
67
68 /* Cached operands, and operator to compare for use in set/branch on
69 condition codes. */
70 rtx branch_cmp[2];
71
72 /* what type of branch to use */
73 enum cmp_type branch_type;
74
75 /* Array giving truth value on whether or not a given hard register
76 can support a given mode. */
77 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
78
79 /* Current frame size calculated by compute_frame_size. */
80 unsigned xtensa_current_frame_size;
81
82 /* Tables of ld/st opcode names for block moves */
83 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
84 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
85 #define LARGEST_MOVE_RATIO 15
86
87 /* Define the structure for the machine field in struct function. */
88 struct machine_function GTY(())
89 {
90 int accesses_prev_frame;
91 };
92
93 /* Vector, indexed by hard register number, which contains 1 for a
94 register that is allowable in a candidate for leaf function
95 treatment. */
96
97 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
98 {
99 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
100 1, 1, 1,
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
102 1
103 };
104
105 /* Map hard register number to register class */
106 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
107 {
108 GR_REGS, SP_REG, GR_REGS, GR_REGS,
109 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
110 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
111 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
112 AR_REGS, AR_REGS, BR_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 ACC_REG,
118 };
119
120 /* Map register constraint character to register class. */
121 enum reg_class xtensa_char_to_class[256] =
122 {
123 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
124 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 };
188
189 static int b4const_or_zero PARAMS ((int));
190 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
191 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
192 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
193 static rtx gen_conditional_move PARAMS ((rtx));
194 static rtx fixup_subreg_mem PARAMS ((rtx x));
195 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
196 static struct machine_status * xtensa_init_machine_status PARAMS ((void));
197 static void printx PARAMS ((FILE *, signed int));
198 static void xtensa_select_rtx_section PARAMS ((enum machine_mode, rtx,
199 unsigned HOST_WIDE_INT));
200 static void xtensa_encode_section_info PARAMS ((tree, int));
201
202 static rtx frame_size_const;
203 static int current_function_arg_words;
204 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
205 REG_ALLOC_ORDER;
206 \f
207 /* This macro generates the assembly code for function entry.
208 FILE is a stdio stream to output the code to.
209 SIZE is an int: how many units of temporary storage to allocate.
210 Refer to the array 'regs_ever_live' to determine which registers
211 to save; 'regs_ever_live[I]' is nonzero if register number I
212 is ever used in the function. This macro is responsible for
213 knowing which registers should not be saved even if used. */
214
215 #undef TARGET_ASM_FUNCTION_PROLOGUE
216 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
217
218 /* This macro generates the assembly code for function exit,
219 on machines that need it. If FUNCTION_EPILOGUE is not defined
220 then individual return instructions are generated for each
221 return statement. Args are same as for FUNCTION_PROLOGUE. */
222
223 #undef TARGET_ASM_FUNCTION_EPILOGUE
224 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
225
226 /* These hooks specify assembly directives for creating certain kinds
227 of integer object. */
228
229 #undef TARGET_ASM_ALIGNED_SI_OP
230 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
231
232 #undef TARGET_ASM_SELECT_RTX_SECTION
233 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
234 #undef TARGET_ENCODE_SECTION_INFO
235 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
236
237 struct gcc_target targetm = TARGET_INITIALIZER;
238 \f
239
240 /*
241 * Functions to test Xtensa immediate operand validity.
242 */
243
244 int
245 xtensa_b4constu (v)
246 int v;
247 {
248 switch (v)
249 {
250 case 32768:
251 case 65536:
252 case 2:
253 case 3:
254 case 4:
255 case 5:
256 case 6:
257 case 7:
258 case 8:
259 case 10:
260 case 12:
261 case 16:
262 case 32:
263 case 64:
264 case 128:
265 case 256:
266 return 1;
267 }
268 return 0;
269 }
270
271 int
272 xtensa_simm8x256 (v)
273 int v;
274 {
275 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
276 }
277
278 int
279 xtensa_ai4const (v)
280 int v;
281 {
282 return (v == -1 || (v >= 1 && v <= 15));
283 }
284
285 int
286 xtensa_simm7 (v)
287 int v;
288 {
289 return v >= -32 && v <= 95;
290 }
291
292 int
293 xtensa_b4const (v)
294 int v;
295 {
296 switch (v)
297 {
298 case -1:
299 case 1:
300 case 2:
301 case 3:
302 case 4:
303 case 5:
304 case 6:
305 case 7:
306 case 8:
307 case 10:
308 case 12:
309 case 16:
310 case 32:
311 case 64:
312 case 128:
313 case 256:
314 return 1;
315 }
316 return 0;
317 }
318
319 int
320 xtensa_simm8 (v)
321 int v;
322 {
323 return v >= -128 && v <= 127;
324 }
325
326 int
327 xtensa_tp7 (v)
328 int v;
329 {
330 return (v >= 7 && v <= 22);
331 }
332
333 int
334 xtensa_lsi4x4 (v)
335 int v;
336 {
337 return (v & 3) == 0 && (v >= 0 && v <= 60);
338 }
339
340 int
341 xtensa_simm12b (v)
342 int v;
343 {
344 return v >= -2048 && v <= 2047;
345 }
346
347 int
348 xtensa_uimm8 (v)
349 int v;
350 {
351 return v >= 0 && v <= 255;
352 }
353
354 int
355 xtensa_uimm8x2 (v)
356 int v;
357 {
358 return (v & 1) == 0 && (v >= 0 && v <= 510);
359 }
360
361 int
362 xtensa_uimm8x4 (v)
363 int v;
364 {
365 return (v & 3) == 0 && (v >= 0 && v <= 1020);
366 }
367
368
369 /* This is just like the standard true_regnum() function except that it
370 works even when reg_renumber is not initialized. */
371
372 int
373 xt_true_regnum (x)
374 rtx x;
375 {
376 if (GET_CODE (x) == REG)
377 {
378 if (reg_renumber
379 && REGNO (x) >= FIRST_PSEUDO_REGISTER
380 && reg_renumber[REGNO (x)] >= 0)
381 return reg_renumber[REGNO (x)];
382 return REGNO (x);
383 }
384 if (GET_CODE (x) == SUBREG)
385 {
386 int base = xt_true_regnum (SUBREG_REG (x));
387 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
388 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
389 GET_MODE (SUBREG_REG (x)),
390 SUBREG_BYTE (x), GET_MODE (x));
391 }
392 return -1;
393 }
394
395
396 int
397 add_operand (op, mode)
398 rtx op;
399 enum machine_mode mode;
400 {
401 if (GET_CODE (op) == CONST_INT)
402 return (xtensa_simm8 (INTVAL (op)) ||
403 xtensa_simm8x256 (INTVAL (op)));
404
405 return register_operand (op, mode);
406 }
407
408
409 int
410 arith_operand (op, mode)
411 rtx op;
412 enum machine_mode mode;
413 {
414 if (GET_CODE (op) == CONST_INT)
415 return xtensa_simm8 (INTVAL (op));
416
417 return register_operand (op, mode);
418 }
419
420
421 int
422 nonimmed_operand (op, mode)
423 rtx op;
424 enum machine_mode mode;
425 {
426 /* We cannot use the standard nonimmediate_operand() predicate because
427 it includes constant pool memory operands. */
428
429 if (memory_operand (op, mode))
430 return !constantpool_address_p (XEXP (op, 0));
431
432 return register_operand (op, mode);
433 }
434
435
436 int
437 mem_operand (op, mode)
438 rtx op;
439 enum machine_mode mode;
440 {
441 /* We cannot use the standard memory_operand() predicate because
442 it includes constant pool memory operands. */
443
444 if (memory_operand (op, mode))
445 return !constantpool_address_p (XEXP (op, 0));
446
447 return FALSE;
448 }
449
450
451 int
452 xtensa_valid_move (mode, operands)
453 enum machine_mode mode;
454 rtx *operands;
455 {
456 /* Either the destination or source must be a register, and the
457 MAC16 accumulator doesn't count. */
458
459 if (register_operand (operands[0], mode))
460 {
461 int dst_regnum = xt_true_regnum (operands[0]);
462
463 /* The stack pointer can only be assigned with a MOVSP opcode. */
464 if (dst_regnum == STACK_POINTER_REGNUM)
465 return (mode == SImode
466 && register_operand (operands[1], mode)
467 && !ACC_REG_P (xt_true_regnum (operands[1])));
468
469 if (!ACC_REG_P (dst_regnum))
470 return true;
471 }
472 if (register_operand (operands[1], mode))
473 {
474 int src_regnum = xt_true_regnum (operands[1]);
475 if (!ACC_REG_P (src_regnum))
476 return true;
477 }
478 return FALSE;
479 }
480
481
482 int
483 mask_operand (op, mode)
484 rtx op;
485 enum machine_mode mode;
486 {
487 if (GET_CODE (op) == CONST_INT)
488 return xtensa_mask_immediate (INTVAL (op));
489
490 return register_operand (op, mode);
491 }
492
493
494 int
495 extui_fldsz_operand (op, mode)
496 rtx op;
497 enum machine_mode mode ATTRIBUTE_UNUSED;
498 {
499 return ((GET_CODE (op) == CONST_INT)
500 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
501 }
502
503
504 int
505 sext_operand (op, mode)
506 rtx op;
507 enum machine_mode mode;
508 {
509 if (TARGET_SEXT)
510 return nonimmed_operand (op, mode);
511 return mem_operand (op, mode);
512 }
513
514
515 int
516 sext_fldsz_operand (op, mode)
517 rtx op;
518 enum machine_mode mode ATTRIBUTE_UNUSED;
519 {
520 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
521 }
522
523
524 int
525 lsbitnum_operand (op, mode)
526 rtx op;
527 enum machine_mode mode ATTRIBUTE_UNUSED;
528 {
529 if (GET_CODE (op) == CONST_INT)
530 {
531 return (BITS_BIG_ENDIAN
532 ? (INTVAL (op) == BITS_PER_WORD-1)
533 : (INTVAL (op) == 0));
534 }
535 return FALSE;
536 }
537
538
539 static int
540 b4const_or_zero (v)
541 int v;
542 {
543 if (v == 0)
544 return TRUE;
545 return xtensa_b4const (v);
546 }
547
548
549 int
550 branch_operand (op, mode)
551 rtx op;
552 enum machine_mode mode;
553 {
554 if (GET_CODE (op) == CONST_INT)
555 return b4const_or_zero (INTVAL (op));
556
557 return register_operand (op, mode);
558 }
559
560
561 int
562 ubranch_operand (op, mode)
563 rtx op;
564 enum machine_mode mode;
565 {
566 if (GET_CODE (op) == CONST_INT)
567 return xtensa_b4constu (INTVAL (op));
568
569 return register_operand (op, mode);
570 }
571
572
573 int
574 call_insn_operand (op, mode)
575 rtx op;
576 enum machine_mode mode ATTRIBUTE_UNUSED;
577 {
578 if ((GET_CODE (op) == REG)
579 && (op != arg_pointer_rtx)
580 && ((REGNO (op) < FRAME_POINTER_REGNUM)
581 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
582 return TRUE;
583
584 if (CONSTANT_ADDRESS_P (op))
585 {
586 /* Direct calls only allowed to static functions with PIC. */
587 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
588 && SYMBOL_REF_FLAG (op)));
589 }
590
591 return FALSE;
592 }
593
594
595 int
596 move_operand (op, mode)
597 rtx op;
598 enum machine_mode mode;
599 {
600 if (register_operand (op, mode))
601 return TRUE;
602
603 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
604 result in 0/1. */
605 if (GET_CODE (op) == CONSTANT_P_RTX)
606 return TRUE;
607
608 if (GET_CODE (op) == CONST_INT)
609 return xtensa_simm12b (INTVAL (op));
610
611 if (GET_CODE (op) == MEM)
612 return memory_address_p (mode, XEXP (op, 0));
613
614 return FALSE;
615 }
616
617
618 int
619 smalloffset_mem_p (op)
620 rtx op;
621 {
622 if (GET_CODE (op) == MEM)
623 {
624 rtx addr = XEXP (op, 0);
625 if (GET_CODE (addr) == REG)
626 return REG_OK_FOR_BASE_P (addr);
627 if (GET_CODE (addr) == PLUS)
628 {
629 rtx offset = XEXP (addr, 0);
630 if (GET_CODE (offset) != CONST_INT)
631 offset = XEXP (addr, 1);
632 if (GET_CODE (offset) != CONST_INT)
633 return FALSE;
634 return xtensa_lsi4x4 (INTVAL (offset));
635 }
636 }
637 return FALSE;
638 }
639
640
641 int
642 smalloffset_double_mem_p (op)
643 rtx op;
644 {
645 if (!smalloffset_mem_p (op))
646 return FALSE;
647 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
648 }
649
650
651 int
652 constantpool_address_p (addr)
653 rtx addr;
654 {
655 rtx sym = addr;
656
657 if (GET_CODE (addr) == CONST)
658 {
659 rtx offset;
660
661 /* only handle (PLUS (SYM, OFFSET)) form */
662 addr = XEXP (addr, 0);
663 if (GET_CODE (addr) != PLUS)
664 return FALSE;
665
666 /* make sure the address is word aligned */
667 offset = XEXP (addr, 1);
668 if ((GET_CODE (offset) != CONST_INT)
669 || ((INTVAL (offset) & 3) != 0))
670 return FALSE;
671
672 sym = XEXP (addr, 0);
673 }
674
675 if ((GET_CODE (sym) == SYMBOL_REF)
676 && CONSTANT_POOL_ADDRESS_P (sym))
677 return TRUE;
678 return FALSE;
679 }
680
681
682 int
683 constantpool_mem_p (op)
684 rtx op;
685 {
686 if (GET_CODE (op) == MEM)
687 return constantpool_address_p (XEXP (op, 0));
688 return FALSE;
689 }
690
691
692 int
693 non_const_move_operand (op, mode)
694 rtx op;
695 enum machine_mode mode;
696 {
697 if (register_operand (op, mode))
698 return 1;
699 if (GET_CODE (op) == SUBREG)
700 op = SUBREG_REG (op);
701 if (GET_CODE (op) == MEM)
702 return memory_address_p (mode, XEXP (op, 0));
703 return FALSE;
704 }
705
706
707 /* Accept the floating point constant 1 in the appropriate mode. */
708
709 int
710 const_float_1_operand (op, mode)
711 rtx op;
712 enum machine_mode mode;
713 {
714 REAL_VALUE_TYPE d;
715 static REAL_VALUE_TYPE onedf;
716 static REAL_VALUE_TYPE onesf;
717 static int one_initialized;
718
719 if ((GET_CODE (op) != CONST_DOUBLE)
720 || (mode != GET_MODE (op))
721 || (mode != DFmode && mode != SFmode))
722 return FALSE;
723
724 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
725
726 if (! one_initialized)
727 {
728 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
729 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
730 one_initialized = TRUE;
731 }
732
733 if (mode == DFmode)
734 return REAL_VALUES_EQUAL (d, onedf);
735 else
736 return REAL_VALUES_EQUAL (d, onesf);
737 }
738
739
740 int
741 fpmem_offset_operand (op, mode)
742 rtx op;
743 enum machine_mode mode ATTRIBUTE_UNUSED;
744 {
745 if (GET_CODE (op) == CONST_INT)
746 return xtensa_mem_offset (INTVAL (op), SFmode);
747 return 0;
748 }
749
750
751 void
752 xtensa_extend_reg (dst, src)
753 rtx dst;
754 rtx src;
755 {
756 rtx temp = gen_reg_rtx (SImode);
757 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
758
759 /* generate paradoxical subregs as needed so that the modes match */
760 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
761 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
762
763 emit_insn (gen_ashlsi3 (temp, src, shift));
764 emit_insn (gen_ashrsi3 (dst, temp, shift));
765 }
766
767
768 void
769 xtensa_load_constant (dst, src)
770 rtx dst;
771 rtx src;
772 {
773 enum machine_mode mode = GET_MODE (dst);
774 src = force_const_mem (SImode, src);
775
776 /* PC-relative loads are always SImode so we have to add a SUBREG if that
777 is not the desired mode */
778
779 if (mode != SImode)
780 {
781 if (register_operand (dst, mode))
782 dst = simplify_gen_subreg (SImode, dst, mode, 0);
783 else
784 {
785 src = force_reg (SImode, src);
786 src = gen_lowpart_SUBREG (mode, src);
787 }
788 }
789
790 emit_move_insn (dst, src);
791 }
792
793
794 int
795 branch_operator (x, mode)
796 rtx x;
797 enum machine_mode mode;
798 {
799 if (GET_MODE (x) != mode)
800 return FALSE;
801
802 switch (GET_CODE (x))
803 {
804 case EQ:
805 case NE:
806 case LT:
807 case GE:
808 return TRUE;
809 default:
810 break;
811 }
812 return FALSE;
813 }
814
815
816 int
817 ubranch_operator (x, mode)
818 rtx x;
819 enum machine_mode mode;
820 {
821 if (GET_MODE (x) != mode)
822 return FALSE;
823
824 switch (GET_CODE (x))
825 {
826 case LTU:
827 case GEU:
828 return TRUE;
829 default:
830 break;
831 }
832 return FALSE;
833 }
834
835
836 int
837 boolean_operator (x, mode)
838 rtx x;
839 enum machine_mode mode;
840 {
841 if (GET_MODE (x) != mode)
842 return FALSE;
843
844 switch (GET_CODE (x))
845 {
846 case EQ:
847 case NE:
848 return TRUE;
849 default:
850 break;
851 }
852 return FALSE;
853 }
854
855
856 int
857 xtensa_mask_immediate (v)
858 int v;
859 {
860 #define MAX_MASK_SIZE 16
861 int mask_size;
862
863 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
864 {
865 if ((v & 1) == 0)
866 return FALSE;
867 v = v >> 1;
868 if (v == 0)
869 return TRUE;
870 }
871
872 return FALSE;
873 }
874
875
876 int
877 xtensa_mem_offset (v, mode)
878 unsigned v;
879 enum machine_mode mode;
880 {
881 switch (mode)
882 {
883 case BLKmode:
884 /* Handle the worst case for block moves. See xtensa_expand_block_move
885 where we emit an optimized block move operation if the block can be
886 moved in < "move_ratio" pieces. The worst case is when the block is
887 aligned but has a size of (3 mod 4) (does this happen?) so that the
888 last piece requires a byte load/store. */
889 return (xtensa_uimm8 (v) &&
890 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
891
892 case QImode:
893 return xtensa_uimm8 (v);
894
895 case HImode:
896 return xtensa_uimm8x2 (v);
897
898 case DFmode:
899 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
900
901 default:
902 break;
903 }
904
905 return xtensa_uimm8x4 (v);
906 }
907
908
909 /* Make normal rtx_code into something we can index from an array */
910
911 static enum internal_test
912 map_test_to_internal_test (test_code)
913 enum rtx_code test_code;
914 {
915 enum internal_test test = ITEST_MAX;
916
917 switch (test_code)
918 {
919 default: break;
920 case EQ: test = ITEST_EQ; break;
921 case NE: test = ITEST_NE; break;
922 case GT: test = ITEST_GT; break;
923 case GE: test = ITEST_GE; break;
924 case LT: test = ITEST_LT; break;
925 case LE: test = ITEST_LE; break;
926 case GTU: test = ITEST_GTU; break;
927 case GEU: test = ITEST_GEU; break;
928 case LTU: test = ITEST_LTU; break;
929 case LEU: test = ITEST_LEU; break;
930 }
931
932 return test;
933 }
934
935
936 /* Generate the code to compare two integer values. The return value is
937 the comparison expression. */
938
939 static rtx
940 gen_int_relational (test_code, cmp0, cmp1, p_invert)
941 enum rtx_code test_code; /* relational test (EQ, etc) */
942 rtx cmp0; /* first operand to compare */
943 rtx cmp1; /* second operand to compare */
944 int *p_invert; /* whether branch needs to reverse its test */
945 {
946 struct cmp_info {
947 enum rtx_code test_code; /* test code to use in insn */
948 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
949 int const_add; /* constant to add (convert LE -> LT) */
950 int reverse_regs; /* reverse registers in test */
951 int invert_const; /* != 0 if invert value if cmp1 is constant */
952 int invert_reg; /* != 0 if invert value if cmp1 is register */
953 int unsignedp; /* != 0 for unsigned comparisons. */
954 };
955
956 static struct cmp_info info[ (int)ITEST_MAX ] = {
957
958 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
959 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
960
961 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
962 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
963 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
964 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
965
966 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
967 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
968 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
969 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
970 };
971
972 enum internal_test test;
973 enum machine_mode mode;
974 struct cmp_info *p_info;
975
976 test = map_test_to_internal_test (test_code);
977 if (test == ITEST_MAX)
978 abort ();
979
980 p_info = &info[ (int)test ];
981
982 mode = GET_MODE (cmp0);
983 if (mode == VOIDmode)
984 mode = GET_MODE (cmp1);
985
986 /* Make sure we can handle any constants given to us. */
987 if (GET_CODE (cmp1) == CONST_INT)
988 {
989 HOST_WIDE_INT value = INTVAL (cmp1);
990 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
991
992 /* if the immediate overflows or does not fit in the immediate field,
993 spill it to a register */
994
995 if ((p_info->unsignedp ?
996 (uvalue + p_info->const_add > uvalue) :
997 (value + p_info->const_add > value)) != (p_info->const_add > 0))
998 {
999 cmp1 = force_reg (mode, cmp1);
1000 }
1001 else if (!(p_info->const_range_p) (value + p_info->const_add))
1002 {
1003 cmp1 = force_reg (mode, cmp1);
1004 }
1005 }
1006 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1007 {
1008 cmp1 = force_reg (mode, cmp1);
1009 }
1010
1011 /* See if we need to invert the result. */
1012 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1013 ? p_info->invert_const
1014 : p_info->invert_reg);
1015
1016 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1017 Comparison between two registers, may involve switching operands. */
1018 if (GET_CODE (cmp1) == CONST_INT)
1019 {
1020 if (p_info->const_add != 0)
1021 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1022
1023 }
1024 else if (p_info->reverse_regs)
1025 {
1026 rtx temp = cmp0;
1027 cmp0 = cmp1;
1028 cmp1 = temp;
1029 }
1030
1031 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1032 }
1033
1034
1035 /* Generate the code to compare two float values. The return value is
1036 the comparison expression. */
1037
1038 static rtx
1039 gen_float_relational (test_code, cmp0, cmp1)
1040 enum rtx_code test_code; /* relational test (EQ, etc) */
1041 rtx cmp0; /* first operand to compare */
1042 rtx cmp1; /* second operand to compare */
1043 {
1044 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1045 rtx brtmp;
1046 int reverse_regs, invert;
1047
1048 switch (test_code)
1049 {
1050 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1051 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1052 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1053 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1054 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1055 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1056 default:
1057 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1058 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1059 }
1060
1061 if (reverse_regs)
1062 {
1063 rtx temp = cmp0;
1064 cmp0 = cmp1;
1065 cmp1 = temp;
1066 }
1067
1068 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1069 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1070
1071 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1072 }
1073
1074
1075 void
1076 xtensa_expand_conditional_branch (operands, test_code)
1077 rtx *operands;
1078 enum rtx_code test_code;
1079 {
1080 enum cmp_type type = branch_type;
1081 rtx cmp0 = branch_cmp[0];
1082 rtx cmp1 = branch_cmp[1];
1083 rtx cmp;
1084 int invert;
1085 rtx label1, label2;
1086
1087 switch (type)
1088 {
1089 case CMP_DF:
1090 default:
1091 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1092
1093 case CMP_SI:
1094 invert = FALSE;
1095 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1096 break;
1097
1098 case CMP_SF:
1099 if (!TARGET_HARD_FLOAT)
1100 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1101 invert = FALSE;
1102 cmp = gen_float_relational (test_code, cmp0, cmp1);
1103 break;
1104 }
1105
1106 /* Generate the branch. */
1107
1108 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1109 label2 = pc_rtx;
1110
1111 if (invert)
1112 {
1113 label2 = label1;
1114 label1 = pc_rtx;
1115 }
1116
1117 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1118 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1119 label1,
1120 label2)));
1121 }
1122
1123
1124 static rtx
1125 gen_conditional_move (cmp)
1126 rtx cmp;
1127 {
1128 enum rtx_code code = GET_CODE (cmp);
1129 rtx op0 = branch_cmp[0];
1130 rtx op1 = branch_cmp[1];
1131
1132 if (branch_type == CMP_SI)
1133 {
1134 /* Jump optimization calls get_condition() which canonicalizes
1135 comparisons like (GE x <const>) to (GT x <const-1>).
1136 Transform those comparisons back to GE, since that is the
1137 comparison supported in Xtensa. We shouldn't have to
1138 transform <LE x const> comparisons, because neither
1139 xtensa_expand_conditional_branch() nor get_condition() will
1140 produce them. */
1141
1142 if ((code == GT) && (op1 == constm1_rtx))
1143 {
1144 code = GE;
1145 op1 = const0_rtx;
1146 }
1147 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1148
1149 if (boolean_operator (cmp, VOIDmode))
1150 {
1151 /* swap the operands to make const0 second */
1152 if (op0 == const0_rtx)
1153 {
1154 op0 = op1;
1155 op1 = const0_rtx;
1156 }
1157
1158 /* if not comparing against zero, emit a comparison (subtract) */
1159 if (op1 != const0_rtx)
1160 {
1161 op0 = expand_binop (SImode, sub_optab, op0, op1,
1162 0, 0, OPTAB_LIB_WIDEN);
1163 op1 = const0_rtx;
1164 }
1165 }
1166 else if (branch_operator (cmp, VOIDmode))
1167 {
1168 /* swap the operands to make const0 second */
1169 if (op0 == const0_rtx)
1170 {
1171 op0 = op1;
1172 op1 = const0_rtx;
1173
1174 switch (code)
1175 {
1176 case LT: code = GE; break;
1177 case GE: code = LT; break;
1178 default: abort ();
1179 }
1180 }
1181
1182 if (op1 != const0_rtx)
1183 return 0;
1184 }
1185 else
1186 return 0;
1187
1188 return gen_rtx (code, VOIDmode, op0, op1);
1189 }
1190
1191 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1192 return gen_float_relational (code, op0, op1);
1193
1194 return 0;
1195 }
1196
1197
1198 int
1199 xtensa_expand_conditional_move (operands, isflt)
1200 rtx *operands;
1201 int isflt;
1202 {
1203 rtx cmp;
1204 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1205
1206 if (!(cmp = gen_conditional_move (operands[1])))
1207 return 0;
1208
1209 if (isflt)
1210 gen_fn = (branch_type == CMP_SI
1211 ? gen_movsfcc_internal0
1212 : gen_movsfcc_internal1);
1213 else
1214 gen_fn = (branch_type == CMP_SI
1215 ? gen_movsicc_internal0
1216 : gen_movsicc_internal1);
1217
1218 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1219 operands[2], operands[3], cmp));
1220 return 1;
1221 }
1222
1223
1224 int
1225 xtensa_expand_scc (operands)
1226 rtx *operands;
1227 {
1228 rtx dest = operands[0];
1229 rtx cmp = operands[1];
1230 rtx one_tmp, zero_tmp;
1231 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1232
1233 if (!(cmp = gen_conditional_move (cmp)))
1234 return 0;
1235
1236 one_tmp = gen_reg_rtx (SImode);
1237 zero_tmp = gen_reg_rtx (SImode);
1238 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1239 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1240
1241 gen_fn = (branch_type == CMP_SI
1242 ? gen_movsicc_internal0
1243 : gen_movsicc_internal1);
1244 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1245 return 1;
1246 }
1247
1248
1249 /* Emit insns to move operands[1] into operands[0].
1250
1251 Return 1 if we have written out everything that needs to be done to
1252 do the move. Otherwise, return 0 and the caller will emit the move
1253 normally. */
1254
1255 int
1256 xtensa_emit_move_sequence (operands, mode)
1257 rtx *operands;
1258 enum machine_mode mode;
1259 {
1260 if (CONSTANT_P (operands[1])
1261 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1262 && (GET_CODE (operands[1]) != CONST_INT
1263 || !xtensa_simm12b (INTVAL (operands[1]))))
1264 {
1265 xtensa_load_constant (operands[0], operands[1]);
1266 return 1;
1267 }
1268
1269 if (!(reload_in_progress | reload_completed))
1270 {
1271 if (!xtensa_valid_move (mode, operands))
1272 operands[1] = force_reg (mode, operands[1]);
1273
1274 /* Check if this move is copying an incoming argument in a7. If
1275 so, emit the move, followed by the special "set_frame_ptr"
1276 unspec_volatile insn, at the very beginning of the function.
1277 This is necessary because the register allocator will ignore
1278 conflicts with a7 and may assign some other pseudo to a7. If
1279 that pseudo was assigned prior to this move, it would clobber
1280 the incoming argument in a7. By copying the argument out of
1281 a7 as the very first thing, and then immediately following
1282 that with an unspec_volatile to keep the scheduler away, we
1283 should avoid any problems. */
1284
1285 if (a7_overlap_mentioned_p (operands[1]))
1286 {
1287 rtx mov;
1288 switch (mode)
1289 {
1290 case SImode:
1291 mov = gen_movsi_internal (operands[0], operands[1]);
1292 break;
1293 case HImode:
1294 mov = gen_movhi_internal (operands[0], operands[1]);
1295 break;
1296 case QImode:
1297 mov = gen_movqi_internal (operands[0], operands[1]);
1298 break;
1299 default:
1300 abort ();
1301 }
1302
1303 /* Insert the instructions before any other argument copies.
1304 (The set_frame_ptr insn comes _after_ the move, so push it
1305 out first.) */
1306 push_topmost_sequence ();
1307 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1308 emit_insn_after (mov, get_insns ());
1309 pop_topmost_sequence ();
1310
1311 return 1;
1312 }
1313 }
1314
1315 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1316 instruction won't be recognized after reload. So we remove the
1317 subreg and adjust mem accordingly. */
1318 if (reload_in_progress)
1319 {
1320 operands[0] = fixup_subreg_mem (operands[0]);
1321 operands[1] = fixup_subreg_mem (operands[1]);
1322 }
1323 return 0;
1324 }
1325
1326 static rtx
1327 fixup_subreg_mem (x)
1328 rtx x;
1329 {
1330 if (GET_CODE (x) == SUBREG
1331 && GET_CODE (SUBREG_REG (x)) == REG
1332 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1333 {
1334 rtx temp =
1335 gen_rtx_SUBREG (GET_MODE (x),
1336 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1337 SUBREG_BYTE (x));
1338 x = alter_subreg (&temp);
1339 }
1340 return x;
1341 }
1342
1343
1344 /* Try to expand a block move operation to an RTL block move instruction.
1345 If not optimizing or if the block size is not a constant or if the
1346 block is small, the expansion fails and GCC falls back to calling
1347 memcpy().
1348
1349 operands[0] is the destination
1350 operands[1] is the source
1351 operands[2] is the length
1352 operands[3] is the alignment */
1353
1354 int
1355 xtensa_expand_block_move (operands)
1356 rtx *operands;
1357 {
1358 rtx dest = operands[0];
1359 rtx src = operands[1];
1360 int bytes = INTVAL (operands[2]);
1361 int align = XINT (operands[3], 0);
1362 int num_pieces, move_ratio;
1363
1364 /* If this is not a fixed size move, just call memcpy */
1365 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1366 return 0;
1367
1368 /* Anything to move? */
1369 if (bytes <= 0)
1370 return 1;
1371
1372 if (align > MOVE_MAX)
1373 align = MOVE_MAX;
1374
1375 /* decide whether to expand inline based on the optimization level */
1376 move_ratio = 4;
1377 if (optimize > 2)
1378 move_ratio = LARGEST_MOVE_RATIO;
1379 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1380 if (num_pieces >= move_ratio)
1381 return 0;
1382
1383 /* make sure the memory addresses are valid */
1384 operands[0] = validize_mem (dest);
1385 operands[1] = validize_mem (src);
1386
1387 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1388 operands[2], operands[3]));
1389 return 1;
1390 }
1391
1392
1393 /* Emit a sequence of instructions to implement a block move, trying
1394 to hide load delay slots as much as possible. Load N values into
1395 temporary registers, store those N values, and repeat until the
1396 complete block has been moved. N=delay_slots+1 */
1397
1398 struct meminsnbuf {
1399 char template[30];
1400 rtx operands[2];
1401 };
1402
1403 void
1404 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1405 rtx *operands;
1406 rtx *tmpregs;
1407 int delay_slots;
1408 {
1409 rtx dest = operands[0];
1410 rtx src = operands[1];
1411 int bytes = INTVAL (operands[2]);
1412 int align = XINT (operands[3], 0);
1413 rtx from_addr = XEXP (src, 0);
1414 rtx to_addr = XEXP (dest, 0);
1415 int from_struct = MEM_IN_STRUCT_P (src);
1416 int to_struct = MEM_IN_STRUCT_P (dest);
1417 int offset = 0;
1418 int chunk_size, item_size;
1419 struct meminsnbuf *ldinsns, *stinsns;
1420 const char *ldname, *stname;
1421 enum machine_mode mode;
1422
1423 if (align > MOVE_MAX)
1424 align = MOVE_MAX;
1425 item_size = align;
1426 chunk_size = delay_slots + 1;
1427
1428 ldinsns = (struct meminsnbuf *)
1429 alloca (chunk_size * sizeof (struct meminsnbuf));
1430 stinsns = (struct meminsnbuf *)
1431 alloca (chunk_size * sizeof (struct meminsnbuf));
1432
1433 mode = xtensa_find_mode_for_size (item_size);
1434 item_size = GET_MODE_SIZE (mode);
1435 ldname = xtensa_ld_opcodes[(int) mode];
1436 stname = xtensa_st_opcodes[(int) mode];
1437
1438 while (bytes > 0)
1439 {
1440 int n;
1441
1442 for (n = 0; n < chunk_size; n++)
1443 {
1444 rtx addr, mem;
1445
1446 if (bytes == 0)
1447 {
1448 chunk_size = n;
1449 break;
1450 }
1451
1452 if (bytes < item_size)
1453 {
1454 /* find a smaller item_size which we can load & store */
1455 item_size = bytes;
1456 mode = xtensa_find_mode_for_size (item_size);
1457 item_size = GET_MODE_SIZE (mode);
1458 ldname = xtensa_ld_opcodes[(int) mode];
1459 stname = xtensa_st_opcodes[(int) mode];
1460 }
1461
1462 /* record the load instruction opcode and operands */
1463 addr = plus_constant (from_addr, offset);
1464 mem = gen_rtx_MEM (mode, addr);
1465 if (! memory_address_p (mode, addr))
1466 abort ();
1467 MEM_IN_STRUCT_P (mem) = from_struct;
1468 ldinsns[n].operands[0] = tmpregs[n];
1469 ldinsns[n].operands[1] = mem;
1470 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1471
1472 /* record the store instruction opcode and operands */
1473 addr = plus_constant (to_addr, offset);
1474 mem = gen_rtx_MEM (mode, addr);
1475 if (! memory_address_p (mode, addr))
1476 abort ();
1477 MEM_IN_STRUCT_P (mem) = to_struct;
1478 stinsns[n].operands[0] = tmpregs[n];
1479 stinsns[n].operands[1] = mem;
1480 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1481
1482 offset += item_size;
1483 bytes -= item_size;
1484 }
1485
1486 /* now output the loads followed by the stores */
1487 for (n = 0; n < chunk_size; n++)
1488 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1489 for (n = 0; n < chunk_size; n++)
1490 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1491 }
1492 }
1493
1494
1495 static enum machine_mode
1496 xtensa_find_mode_for_size (item_size)
1497 unsigned item_size;
1498 {
1499 enum machine_mode mode, tmode;
1500
1501 while (1)
1502 {
1503 mode = VOIDmode;
1504
1505 /* find mode closest to but not bigger than item_size */
1506 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1507 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1508 if (GET_MODE_SIZE (tmode) <= item_size)
1509 mode = tmode;
1510 if (mode == VOIDmode)
1511 abort ();
1512
1513 item_size = GET_MODE_SIZE (mode);
1514
1515 if (xtensa_ld_opcodes[(int) mode]
1516 && xtensa_st_opcodes[(int) mode])
1517 break;
1518
1519 /* cannot load & store this mode; try something smaller */
1520 item_size -= 1;
1521 }
1522
1523 return mode;
1524 }
1525
1526
1527 void
1528 xtensa_expand_nonlocal_goto (operands)
1529 rtx *operands;
1530 {
1531 rtx goto_handler = operands[1];
1532 rtx containing_fp = operands[3];
1533
1534 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1535 is too big to generate in-line */
1536
1537 if (GET_CODE (containing_fp) != REG)
1538 containing_fp = force_reg (Pmode, containing_fp);
1539
1540 goto_handler = replace_rtx (copy_rtx (goto_handler),
1541 virtual_stack_vars_rtx,
1542 containing_fp);
1543
1544 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1545 0, VOIDmode, 2,
1546 containing_fp, Pmode,
1547 goto_handler, Pmode);
1548 }
1549
1550
1551 static struct machine_function *
1552 xtensa_init_machine_status ()
1553 {
1554 return ggc_alloc_cleared (sizeof (struct machine_function));
1555 }
1556
1557
1558 void
1559 xtensa_setup_frame_addresses ()
1560 {
1561 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1562 cfun->machine->accesses_prev_frame = 1;
1563
1564 emit_library_call
1565 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1566 0, VOIDmode, 0);
1567 }
1568
1569
1570 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1571 a comment showing where the end of the loop is. However, if there is a
1572 label or a branch at the end of the loop then we need to place a nop
1573 there. If the loop ends with a label we need the nop so that branches
1574 targetting that label will target the nop (and thus remain in the loop),
1575 instead of targetting the instruction after the loop (and thus exiting
1576 the loop). If the loop ends with a branch, we need the nop in case the
1577 branch is targetting a location inside the loop. When the branch
1578 executes it will cause the loop count to be decremented even if it is
1579 taken (because it is the last instruction in the loop), so we need to
1580 nop after the branch to prevent the loop count from being decremented
1581 when the branch is taken. */
1582
1583 void
1584 xtensa_emit_loop_end (insn, operands)
1585 rtx insn;
1586 rtx *operands;
1587 {
1588 char done = 0;
1589
1590 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1591 {
1592 switch (GET_CODE (insn))
1593 {
1594 case NOTE:
1595 case BARRIER:
1596 break;
1597
1598 case CODE_LABEL:
1599 output_asm_insn ("nop.n", operands);
1600 done = 1;
1601 break;
1602
1603 default:
1604 {
1605 rtx body = PATTERN (insn);
1606
1607 if (GET_CODE (body) == JUMP_INSN)
1608 {
1609 output_asm_insn ("nop.n", operands);
1610 done = 1;
1611 }
1612 else if ((GET_CODE (body) != USE)
1613 && (GET_CODE (body) != CLOBBER))
1614 done = 1;
1615 }
1616 break;
1617 }
1618 }
1619
1620 output_asm_insn ("# loop end for %0", operands);
1621 }
1622
1623
1624 char *
1625 xtensa_emit_call (callop, operands)
1626 int callop;
1627 rtx *operands;
1628 {
1629 static char result[64];
1630 rtx tgt = operands[callop];
1631
1632 if (GET_CODE (tgt) == CONST_INT)
1633 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1634 else if (register_operand (tgt, VOIDmode))
1635 sprintf (result, "callx8\t%%%d", callop);
1636 else
1637 sprintf (result, "call8\t%%%d", callop);
1638
1639 return result;
1640 }
1641
1642
1643 /* Return the stabs register number to use for 'regno'. */
1644
1645 int
1646 xtensa_dbx_register_number (regno)
1647 int regno;
1648 {
1649 int first = -1;
1650
1651 if (GP_REG_P (regno)) {
1652 regno -= GP_REG_FIRST;
1653 first = 0;
1654 }
1655 else if (BR_REG_P (regno)) {
1656 regno -= BR_REG_FIRST;
1657 first = 16;
1658 }
1659 else if (FP_REG_P (regno)) {
1660 regno -= FP_REG_FIRST;
1661 /* The current numbering convention is that TIE registers are
1662 numbered in libcc order beginning with 256. We can't guarantee
1663 that the FP registers will come first, so the following is just
1664 a guess. It seems like we should make a special case for FP
1665 registers and give them fixed numbers < 256. */
1666 first = 256;
1667 }
1668 else if (ACC_REG_P (regno))
1669 {
1670 first = 0;
1671 regno = -1;
1672 }
1673
1674 /* When optimizing, we sometimes get asked about pseudo-registers
1675 that don't represent hard registers. Return 0 for these. */
1676 if (first == -1)
1677 return 0;
1678
1679 return first + regno;
1680 }
1681
1682
1683 /* Argument support functions. */
1684
1685 /* Initialize CUMULATIVE_ARGS for a function. */
1686
1687 void
1688 init_cumulative_args (cum, fntype, libname)
1689 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1690 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1691 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1692 {
1693 cum->arg_words = 0;
1694 }
1695
1696 /* Advance the argument to the next argument position. */
1697
1698 void
1699 function_arg_advance (cum, mode, type)
1700 CUMULATIVE_ARGS *cum; /* current arg information */
1701 enum machine_mode mode; /* current arg mode */
1702 tree type; /* type of the argument or 0 if lib support */
1703 {
1704 int words, max;
1705 int *arg_words;
1706
1707 arg_words = &cum->arg_words;
1708 max = MAX_ARGS_IN_REGISTERS;
1709
1710 words = (((mode != BLKmode)
1711 ? (int) GET_MODE_SIZE (mode)
1712 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1713
1714 if ((*arg_words + words > max) && (*arg_words < max))
1715 *arg_words = max;
1716
1717 *arg_words += words;
1718 }
1719
1720
1721 /* Return an RTL expression containing the register for the given mode,
1722 or 0 if the argument is to be passed on the stack. */
1723
1724 rtx
1725 function_arg (cum, mode, type, incoming_p)
1726 CUMULATIVE_ARGS *cum; /* current arg information */
1727 enum machine_mode mode; /* current arg mode */
1728 tree type; /* type of the argument or 0 if lib support */
1729 int incoming_p; /* computing the incoming registers? */
1730 {
1731 int regbase, words, max;
1732 int *arg_words;
1733 int regno;
1734 enum machine_mode result_mode;
1735
1736 arg_words = &cum->arg_words;
1737 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1738 max = MAX_ARGS_IN_REGISTERS;
1739
1740 words = (((mode != BLKmode)
1741 ? (int) GET_MODE_SIZE (mode)
1742 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1743
1744 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1745 *arg_words += (*arg_words & 1);
1746
1747 if (*arg_words + words > max)
1748 return (rtx)0;
1749
1750 regno = regbase + *arg_words;
1751 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1752
1753 /* We need to make sure that references to a7 are represented with
1754 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1755 modes bigger than 2 words (because we only have patterns for
1756 modes of 2 words or smaller), we can't control the expansion
1757 unless we explicitly list the individual registers in a PARALLEL. */
1758
1759 if ((mode == BLKmode || words > 2)
1760 && regno < A7_REG
1761 && regno + words > A7_REG)
1762 {
1763 rtx result;
1764 int n;
1765
1766 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1767 for (n = 0; n < words; n++)
1768 {
1769 XVECEXP (result, 0, n) =
1770 gen_rtx_EXPR_LIST (VOIDmode,
1771 gen_raw_REG (SImode, regno + n),
1772 GEN_INT (n * UNITS_PER_WORD));
1773 }
1774 return result;
1775 }
1776
1777 return gen_raw_REG (result_mode, regno);
1778 }
1779
1780
1781 void
1782 override_options ()
1783 {
1784 int regno;
1785 enum machine_mode mode;
1786
1787 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1788 error ("boolean registers required for the floating-point option");
1789
1790 /* set up the tables of ld/st opcode names for block moves */
1791 xtensa_ld_opcodes[(int) SImode] = "l32i";
1792 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1793 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1794 xtensa_st_opcodes[(int) SImode] = "s32i";
1795 xtensa_st_opcodes[(int) HImode] = "s16i";
1796 xtensa_st_opcodes[(int) QImode] = "s8i";
1797
1798 xtensa_char_to_class['q'] = SP_REG;
1799 xtensa_char_to_class['a'] = GR_REGS;
1800 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1801 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1802 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1803 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1804 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1805 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1806 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1807
1808 /* Set up array giving whether a given register can hold a given mode. */
1809 for (mode = VOIDmode;
1810 mode != MAX_MACHINE_MODE;
1811 mode = (enum machine_mode) ((int) mode + 1))
1812 {
1813 int size = GET_MODE_SIZE (mode);
1814 enum mode_class class = GET_MODE_CLASS (mode);
1815
1816 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1817 {
1818 int temp;
1819
1820 if (ACC_REG_P (regno))
1821 temp = (TARGET_MAC16 &&
1822 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1823 else if (GP_REG_P (regno))
1824 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1825 else if (FP_REG_P (regno))
1826 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1827 else if (BR_REG_P (regno))
1828 temp = (TARGET_BOOLEANS && (mode == CCmode));
1829 else
1830 temp = FALSE;
1831
1832 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1833 }
1834 }
1835
1836 init_machine_status = xtensa_init_machine_status;
1837
1838 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1839 some targets need to always use PIC. */
1840 if (XTENSA_ALWAYS_PIC)
1841 {
1842 if (flag_pic)
1843 warning ("-f%s ignored (all code is position independent)",
1844 (flag_pic > 1 ? "PIC" : "pic"));
1845 flag_pic = 1;
1846 }
1847 if (flag_pic > 1)
1848 flag_pic = 1;
1849 }
1850
1851
1852 /* A C compound statement to output to stdio stream STREAM the
1853 assembler syntax for an instruction operand X. X is an RTL
1854 expression.
1855
1856 CODE is a value that can be used to specify one of several ways
1857 of printing the operand. It is used when identical operands
1858 must be printed differently depending on the context. CODE
1859 comes from the '%' specification that was used to request
1860 printing of the operand. If the specification was just '%DIGIT'
1861 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1862 is the ASCII code for LTR.
1863
1864 If X is a register, this macro should print the register's name.
1865 The names can be found in an array 'reg_names' whose type is
1866 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1867
1868 When the machine description has a specification '%PUNCT' (a '%'
1869 followed by a punctuation character), this macro is called with
1870 a null pointer for X and the punctuation character for CODE.
1871
1872 'a', 'c', 'l', and 'n' are reserved.
1873
1874 The Xtensa specific codes are:
1875
1876 'd' CONST_INT, print as signed decimal
1877 'x' CONST_INT, print as signed hexadecimal
1878 'K' CONST_INT, print number of bits in mask for EXTUI
1879 'R' CONST_INT, print (X & 0x1f)
1880 'L' CONST_INT, print ((32 - X) & 0x1f)
1881 'D' REG, print second register of double-word register operand
1882 'N' MEM, print address of next word following a memory operand
1883 'v' MEM, if memory reference is volatile, output a MEMW before it
1884 */
1885
1886 static void
1887 printx (file, val)
1888 FILE *file;
1889 signed int val;
1890 {
1891 /* print a hexadecimal value in a nice way */
1892 if ((val > -0xa) && (val < 0xa))
1893 fprintf (file, "%d", val);
1894 else if (val < 0)
1895 fprintf (file, "-0x%x", -val);
1896 else
1897 fprintf (file, "0x%x", val);
1898 }
1899
1900
1901 void
1902 print_operand (file, op, letter)
1903 FILE *file; /* file to write to */
1904 rtx op; /* operand to print */
1905 int letter; /* %<letter> or 0 */
1906 {
1907 enum rtx_code code;
1908
1909 if (! op)
1910 error ("PRINT_OPERAND null pointer");
1911
1912 code = GET_CODE (op);
1913 switch (code)
1914 {
1915 case REG:
1916 case SUBREG:
1917 {
1918 int regnum = xt_true_regnum (op);
1919 if (letter == 'D')
1920 regnum++;
1921 fprintf (file, "%s", reg_names[regnum]);
1922 break;
1923 }
1924
1925 case MEM:
1926 /* For a volatile memory reference, emit a MEMW before the
1927 load or store. */
1928 if (letter == 'v')
1929 {
1930 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1931 fprintf (file, "memw\n\t");
1932 break;
1933 }
1934 else if (letter == 'N')
1935 {
1936 enum machine_mode mode;
1937 switch (GET_MODE (op))
1938 {
1939 case DFmode: mode = SFmode; break;
1940 case DImode: mode = SImode; break;
1941 default: abort ();
1942 }
1943 op = adjust_address (op, mode, 4);
1944 }
1945
1946 output_address (XEXP (op, 0));
1947 break;
1948
1949 case CONST_INT:
1950 switch (letter)
1951 {
1952 case 'K':
1953 {
1954 int num_bits = 0;
1955 unsigned val = INTVAL (op);
1956 while (val & 1)
1957 {
1958 num_bits += 1;
1959 val = val >> 1;
1960 }
1961 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1962 fatal_insn ("invalid mask", op);
1963
1964 fprintf (file, "%d", num_bits);
1965 break;
1966 }
1967
1968 case 'L':
1969 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1970 break;
1971
1972 case 'R':
1973 fprintf (file, "%d", INTVAL (op) & 0x1f);
1974 break;
1975
1976 case 'x':
1977 printx (file, INTVAL (op));
1978 break;
1979
1980 case 'd':
1981 default:
1982 fprintf (file, "%d", INTVAL (op));
1983 break;
1984
1985 }
1986 break;
1987
1988 default:
1989 output_addr_const (file, op);
1990 }
1991 }
1992
1993
1994 /* A C compound statement to output to stdio stream STREAM the
1995 assembler syntax for an instruction operand that is a memory
1996 reference whose address is ADDR. ADDR is an RTL expression. */
1997
1998 void
1999 print_operand_address (file, addr)
2000 FILE *file;
2001 rtx addr;
2002 {
2003 if (!addr)
2004 error ("PRINT_OPERAND_ADDRESS, null pointer");
2005
2006 switch (GET_CODE (addr))
2007 {
2008 default:
2009 fatal_insn ("invalid address", addr);
2010 break;
2011
2012 case REG:
2013 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2014 break;
2015
2016 case PLUS:
2017 {
2018 rtx reg = (rtx)0;
2019 rtx offset = (rtx)0;
2020 rtx arg0 = XEXP (addr, 0);
2021 rtx arg1 = XEXP (addr, 1);
2022
2023 if (GET_CODE (arg0) == REG)
2024 {
2025 reg = arg0;
2026 offset = arg1;
2027 }
2028 else if (GET_CODE (arg1) == REG)
2029 {
2030 reg = arg1;
2031 offset = arg0;
2032 }
2033 else
2034 fatal_insn ("no register in address", addr);
2035
2036 if (CONSTANT_P (offset))
2037 {
2038 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2039 output_addr_const (file, offset);
2040 }
2041 else
2042 fatal_insn ("address offset not a constant", addr);
2043 }
2044 break;
2045
2046 case LABEL_REF:
2047 case SYMBOL_REF:
2048 case CONST_INT:
2049 case CONST:
2050 output_addr_const (file, addr);
2051 break;
2052 }
2053 }
2054
2055
2056 /* Emit either a label, .comm, or .lcomm directive. */
2057
2058 void
2059 xtensa_declare_object (file, name, init_string, final_string, size)
2060 FILE *file;
2061 char *name;
2062 char *init_string;
2063 char *final_string;
2064 int size;
2065 {
2066 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2067 assemble_name (file, name);
2068 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2069 }
2070
2071
2072 void
2073 xtensa_output_literal (file, x, mode, labelno)
2074 FILE *file;
2075 rtx x;
2076 enum machine_mode mode;
2077 int labelno;
2078 {
2079 long value_long[2];
2080 REAL_VALUE_TYPE r;
2081 int size;
2082
2083 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2084
2085 switch (GET_MODE_CLASS (mode))
2086 {
2087 case MODE_FLOAT:
2088 if (GET_CODE (x) != CONST_DOUBLE)
2089 abort ();
2090
2091 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2092 switch (mode)
2093 {
2094 case SFmode:
2095 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2096 fprintf (file, "0x%08lx\n", value_long[0]);
2097 break;
2098
2099 case DFmode:
2100 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2101 fprintf (file, "0x%08lx, 0x%08lx\n",
2102 value_long[0], value_long[1]);
2103 break;
2104
2105 default:
2106 abort ();
2107 }
2108
2109 break;
2110
2111 case MODE_INT:
2112 case MODE_PARTIAL_INT:
2113 size = GET_MODE_SIZE (mode);
2114 if (size == 4)
2115 {
2116 output_addr_const (file, x);
2117 fputs ("\n", file);
2118 }
2119 else if (size == 8)
2120 {
2121 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2122 fputs (", ", file);
2123 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2124 fputs ("\n", file);
2125 }
2126 else
2127 abort ();
2128 break;
2129
2130 default:
2131 abort ();
2132 }
2133 }
2134
2135
2136 /* Return the bytes needed to compute the frame pointer from the current
2137 stack pointer. */
2138
2139 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2140 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2141
2142 long
2143 compute_frame_size (size)
2144 int size; /* # of var. bytes allocated */
2145 {
2146 /* add space for the incoming static chain value */
2147 if (current_function_needs_context)
2148 size += (1 * UNITS_PER_WORD);
2149
2150 xtensa_current_frame_size =
2151 XTENSA_STACK_ALIGN (size
2152 + current_function_outgoing_args_size
2153 + (WINDOW_SIZE * UNITS_PER_WORD));
2154 return xtensa_current_frame_size;
2155 }
2156
2157
2158 int
2159 xtensa_frame_pointer_required ()
2160 {
2161 /* The code to expand builtin_frame_addr and builtin_return_addr
2162 currently uses the hard_frame_pointer instead of frame_pointer.
2163 This seems wrong but maybe it's necessary for other architectures.
2164 This function is derived from the i386 code. */
2165
2166 if (cfun->machine->accesses_prev_frame)
2167 return 1;
2168
2169 return 0;
2170 }
2171
2172
2173 void
2174 xtensa_reorg (first)
2175 rtx first;
2176 {
2177 rtx insn, set_frame_ptr_insn = 0;
2178
2179 unsigned long tsize = compute_frame_size (get_frame_size ());
2180 if (tsize < (1 << (12+3)))
2181 frame_size_const = 0;
2182 else
2183 {
2184 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2185
2186 /* make sure the constant is used so it doesn't get eliminated
2187 from the constant pool */
2188 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2189 }
2190
2191 if (!frame_pointer_needed)
2192 return;
2193
2194 /* Search all instructions, looking for the insn that sets up the
2195 frame pointer. This search will fail if the function does not
2196 have an incoming argument in $a7, but in that case, we can just
2197 set up the frame pointer at the very beginning of the
2198 function. */
2199
2200 for (insn = first; insn; insn = NEXT_INSN (insn))
2201 {
2202 rtx pat;
2203
2204 if (!INSN_P (insn))
2205 continue;
2206
2207 pat = PATTERN (insn);
2208 if (GET_CODE (pat) == UNSPEC_VOLATILE
2209 && (XINT (pat, 1) == UNSPECV_SET_FP))
2210 {
2211 set_frame_ptr_insn = insn;
2212 break;
2213 }
2214 }
2215
2216 if (set_frame_ptr_insn)
2217 {
2218 /* for all instructions prior to set_frame_ptr_insn, replace
2219 hard_frame_pointer references with stack_pointer */
2220 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2221 {
2222 if (INSN_P (insn))
2223 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2224 hard_frame_pointer_rtx,
2225 stack_pointer_rtx);
2226 }
2227 }
2228 else
2229 {
2230 /* emit the frame pointer move immediately after the NOTE that starts
2231 the function */
2232 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2233 stack_pointer_rtx), first);
2234 }
2235 }
2236
2237
2238 /* Set up the stack and frame (if desired) for the function. */
2239
2240 void
2241 xtensa_function_prologue (file, size)
2242 FILE *file;
2243 int size ATTRIBUTE_UNUSED;
2244 {
2245 unsigned long tsize = compute_frame_size (get_frame_size ());
2246
2247 if (frame_pointer_needed)
2248 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2249 else
2250 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2251
2252
2253 if (tsize < (1 << (12+3)))
2254 {
2255 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2256 }
2257 else
2258 {
2259 fprintf (file, "\tentry\tsp, 16\n");
2260
2261 /* use a8 as a temporary since a0-a7 may be live */
2262 fprintf (file, "\tl32r\ta8, ");
2263 print_operand (file, frame_size_const, 0);
2264 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2265 fprintf (file, "\tmovsp\tsp, a8\n");
2266 }
2267 }
2268
2269
2270 /* Do any necessary cleanup after a function to restore
2271 stack, frame, and regs. */
2272
2273 void
2274 xtensa_function_epilogue (file, size)
2275 FILE *file;
2276 int size ATTRIBUTE_UNUSED;
2277 {
2278 rtx insn = get_last_insn ();
2279 /* If the last insn was a BARRIER, we don't have to write anything. */
2280 if (GET_CODE (insn) == NOTE)
2281 insn = prev_nonnote_insn (insn);
2282 if (insn == 0 || GET_CODE (insn) != BARRIER)
2283 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2284
2285 xtensa_current_frame_size = 0;
2286 }
2287
2288
2289 /* Create the va_list data type.
2290 This structure is set up by __builtin_saveregs. The __va_reg
2291 field points to a stack-allocated region holding the contents of the
2292 incoming argument registers. The __va_ndx field is an index initialized
2293 to the position of the first unnamed (variable) argument. This same index
2294 is also used to address the arguments passed in memory. Thus, the
2295 __va_stk field is initialized to point to the position of the first
2296 argument in memory offset to account for the arguments passed in
2297 registers. E.G., if there are 6 argument registers, and each register is
2298 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2299 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2300 argument word N for N >= 6. */
2301
2302 tree
2303 xtensa_build_va_list (void)
2304 {
2305 tree f_stk, f_reg, f_ndx, record;
2306
2307 record = make_node (RECORD_TYPE);
2308
2309 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2310 ptr_type_node);
2311 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2312 ptr_type_node);
2313 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2314 integer_type_node);
2315
2316 DECL_FIELD_CONTEXT (f_stk) = record;
2317 DECL_FIELD_CONTEXT (f_reg) = record;
2318 DECL_FIELD_CONTEXT (f_ndx) = record;
2319
2320 TYPE_FIELDS (record) = f_stk;
2321 TREE_CHAIN (f_stk) = f_reg;
2322 TREE_CHAIN (f_reg) = f_ndx;
2323
2324 layout_type (record);
2325 return record;
2326 }
2327
2328
2329 /* Save the incoming argument registers on the stack. Returns the
2330 address of the saved registers. */
2331
2332 rtx
2333 xtensa_builtin_saveregs ()
2334 {
2335 rtx gp_regs, dest;
2336 int arg_words = current_function_arg_words;
2337 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2338 int i;
2339
2340 if (gp_left == 0)
2341 return const0_rtx;
2342
2343 /* allocate the general-purpose register space */
2344 gp_regs = assign_stack_local
2345 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2346 MEM_IN_STRUCT_P (gp_regs) = 1;
2347 RTX_UNCHANGING_P (gp_regs) = 1;
2348 RTX_UNCHANGING_P (XEXP (gp_regs, 0)) = 1;
2349
2350 /* Now store the incoming registers. */
2351 dest = change_address (gp_regs, SImode,
2352 plus_constant (XEXP (gp_regs, 0),
2353 arg_words * UNITS_PER_WORD));
2354
2355 /* Note: Don't use move_block_from_reg() here because the incoming
2356 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2357 Instead, call gen_raw_REG() directly so that we get a distinct
2358 instance of (REG:SI 7). */
2359 for (i = 0; i < gp_left; i++)
2360 {
2361 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2362 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2363 }
2364
2365 return XEXP (gp_regs, 0);
2366 }
2367
2368
2369 /* Implement `va_start' for varargs and stdarg. We look at the
2370 current function to fill in an initial va_list. */
2371
2372 void
2373 xtensa_va_start (stdarg_p, valist, nextarg)
2374 int stdarg_p ATTRIBUTE_UNUSED;
2375 tree valist;
2376 rtx nextarg ATTRIBUTE_UNUSED;
2377 {
2378 tree f_stk, stk;
2379 tree f_reg, reg;
2380 tree f_ndx, ndx;
2381 tree t, u;
2382 int arg_words;
2383
2384 arg_words = current_function_args_info.arg_words;
2385
2386 f_stk = TYPE_FIELDS (va_list_type_node);
2387 f_reg = TREE_CHAIN (f_stk);
2388 f_ndx = TREE_CHAIN (f_reg);
2389
2390 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2391 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2392 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2393
2394 /* Call __builtin_saveregs; save the result in __va_reg */
2395 current_function_arg_words = arg_words;
2396 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2397 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2398 TREE_SIDE_EFFECTS (t) = 1;
2399 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2400
2401 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2402 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2403 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2404 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2405 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2406 TREE_SIDE_EFFECTS (t) = 1;
2407 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2408
2409 /* Set the __va_ndx member. */
2410 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2411 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2412 TREE_SIDE_EFFECTS (t) = 1;
2413 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2414 }
2415
2416
2417 /* Implement `va_arg'. */
2418
2419 rtx
2420 xtensa_va_arg (valist, type)
2421 tree valist, type;
2422 {
2423 tree f_stk, stk;
2424 tree f_reg, reg;
2425 tree f_ndx, ndx;
2426 tree tmp, addr_tree, type_size;
2427 rtx array, orig_ndx, r, addr, size, va_size;
2428 rtx lab_false, lab_over, lab_false2;
2429
2430 f_stk = TYPE_FIELDS (va_list_type_node);
2431 f_reg = TREE_CHAIN (f_stk);
2432 f_ndx = TREE_CHAIN (f_reg);
2433
2434 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2435 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2436 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2437
2438 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2439
2440 va_size = gen_reg_rtx (SImode);
2441 tmp = fold (build (MULT_EXPR, sizetype,
2442 fold (build (TRUNC_DIV_EXPR, sizetype,
2443 fold (build (PLUS_EXPR, sizetype,
2444 type_size,
2445 size_int (UNITS_PER_WORD - 1))),
2446 size_int (UNITS_PER_WORD))),
2447 size_int (UNITS_PER_WORD)));
2448 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2449 if (r != va_size)
2450 emit_move_insn (va_size, r);
2451
2452
2453 /* First align __va_ndx to a double word boundary if necessary for this arg:
2454
2455 if (__alignof__ (TYPE) > 4)
2456 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2457 */
2458
2459 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2460 {
2461 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2462 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2463 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2464 build_int_2 (-2 * UNITS_PER_WORD, -1));
2465 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2466 TREE_SIDE_EFFECTS (tmp) = 1;
2467 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2468 }
2469
2470
2471 /* Increment __va_ndx to point past the argument:
2472
2473 orig_ndx = (AP).__va_ndx;
2474 (AP).__va_ndx += __va_size (TYPE);
2475 */
2476
2477 orig_ndx = gen_reg_rtx (SImode);
2478 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2479 if (r != orig_ndx)
2480 emit_move_insn (orig_ndx, r);
2481
2482 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2483 make_tree (intSI_type_node, va_size));
2484 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2485 TREE_SIDE_EFFECTS (tmp) = 1;
2486 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2487
2488
2489 /* Check if the argument is in registers:
2490
2491 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2492 && !MUST_PASS_IN_STACK (type))
2493 __array = (AP).__va_reg;
2494 */
2495
2496 array = gen_reg_rtx (Pmode);
2497
2498 lab_over = NULL_RTX;
2499 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2500 {
2501 lab_false = gen_label_rtx ();
2502 lab_over = gen_label_rtx ();
2503
2504 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2505 EXPAND_NORMAL),
2506 GEN_INT (MAX_ARGS_IN_REGISTERS
2507 * UNITS_PER_WORD),
2508 GT, const1_rtx, SImode, 0, lab_false);
2509
2510 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2511 if (r != array)
2512 emit_move_insn (array, r);
2513
2514 emit_jump_insn (gen_jump (lab_over));
2515 emit_barrier ();
2516 emit_label (lab_false);
2517 }
2518
2519 /* ...otherwise, the argument is on the stack (never split between
2520 registers and the stack -- change __va_ndx if necessary):
2521
2522 else
2523 {
2524 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2525 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2526 __array = (AP).__va_stk;
2527 }
2528 */
2529
2530 lab_false2 = gen_label_rtx ();
2531 emit_cmp_and_jump_insns (orig_ndx,
2532 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2533 GE, const1_rtx, SImode, 0, lab_false2);
2534
2535 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2536 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2537 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2538 TREE_SIDE_EFFECTS (tmp) = 1;
2539 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2540
2541 emit_label (lab_false2);
2542
2543 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2544 if (r != array)
2545 emit_move_insn (array, r);
2546
2547 if (lab_over != NULL_RTX)
2548 emit_label (lab_over);
2549
2550
2551 /* Given the base array pointer (__array) and index to the subsequent
2552 argument (__va_ndx), find the address:
2553
2554 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2555 ? sizeof (TYPE)
2556 : __va_size (TYPE))
2557
2558 The results are endian-dependent because values smaller than one word
2559 are aligned differently.
2560 */
2561
2562 size = gen_reg_rtx (SImode);
2563 emit_move_insn (size, va_size);
2564
2565 if (BYTES_BIG_ENDIAN)
2566 {
2567 rtx lab_use_va_size = gen_label_rtx ();
2568
2569 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2570 EXPAND_NORMAL),
2571 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2572 GE, const1_rtx, SImode, 0, lab_use_va_size);
2573
2574 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2575 if (r != size)
2576 emit_move_insn (size, r);
2577
2578 emit_label (lab_use_va_size);
2579 }
2580
2581 addr_tree = build (PLUS_EXPR, ptr_type_node,
2582 make_tree (ptr_type_node, array),
2583 ndx);
2584 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2585 make_tree (intSI_type_node, size));
2586 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2587 addr = copy_to_reg (addr);
2588 return addr;
2589 }
2590
2591
2592 enum reg_class
2593 xtensa_preferred_reload_class (x, class)
2594 rtx x;
2595 enum reg_class class;
2596 {
2597 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2598 return NO_REGS;
2599
2600 /* Don't use sp for reloads! */
2601 if (class == AR_REGS)
2602 return GR_REGS;
2603
2604 return class;
2605 }
2606
2607
2608 enum reg_class
2609 xtensa_secondary_reload_class (class, mode, x, isoutput)
2610 enum reg_class class;
2611 enum machine_mode mode ATTRIBUTE_UNUSED;
2612 rtx x;
2613 int isoutput;
2614 {
2615 int regno;
2616
2617 if (GET_CODE (x) == SIGN_EXTEND)
2618 x = XEXP (x, 0);
2619 regno = xt_true_regnum (x);
2620
2621 if (!isoutput)
2622 {
2623 if (class == FP_REGS && constantpool_mem_p (x))
2624 return GR_REGS;
2625 }
2626
2627 if (ACC_REG_P (regno))
2628 return (class == GR_REGS ? NO_REGS : GR_REGS);
2629 if (class == ACC_REG)
2630 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2631
2632 return NO_REGS;
2633 }
2634
2635
2636 void
2637 order_regs_for_local_alloc ()
2638 {
2639 if (!leaf_function_p ())
2640 {
2641 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2642 FIRST_PSEUDO_REGISTER * sizeof (int));
2643 }
2644 else
2645 {
2646 int i, num_arg_regs;
2647 int nxt = 0;
2648
2649 /* use the AR registers in increasing order (skipping a0 and a1)
2650 but save the incoming argument registers for a last resort */
2651 num_arg_regs = current_function_args_info.arg_words;
2652 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2653 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2654 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2655 reg_alloc_order[nxt++] = i + num_arg_regs;
2656 for (i = 0; i < num_arg_regs; i++)
2657 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2658
2659 /* list the FP registers in order for now */
2660 for (i = 0; i < 16; i++)
2661 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2662
2663 /* GCC requires that we list *all* the registers.... */
2664 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2665 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2666 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2667 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2668
2669 /* list the coprocessor registers in order */
2670 for (i = 0; i < BR_REG_NUM; i++)
2671 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2672
2673 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2674 }
2675 }
2676
2677
2678 /* A customized version of reg_overlap_mentioned_p that only looks for
2679 references to a7 (as opposed to hard_frame_pointer_rtx). */
2680
2681 int
2682 a7_overlap_mentioned_p (x)
2683 rtx x;
2684 {
2685 int i, j;
2686 unsigned int x_regno;
2687 const char *fmt;
2688
2689 if (GET_CODE (x) == REG)
2690 {
2691 x_regno = REGNO (x);
2692 return (x != hard_frame_pointer_rtx
2693 && x_regno < A7_REG + 1
2694 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2695 }
2696
2697 if (GET_CODE (x) == SUBREG
2698 && GET_CODE (SUBREG_REG (x)) == REG
2699 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2700 {
2701 x_regno = subreg_regno (x);
2702 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2703 && x_regno < A7_REG + 1
2704 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2705 }
2706
2707 /* X does not match, so try its subexpressions. */
2708 fmt = GET_RTX_FORMAT (GET_CODE (x));
2709 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2710 {
2711 if (fmt[i] == 'e')
2712 {
2713 if (a7_overlap_mentioned_p (XEXP (x, i)))
2714 return 1;
2715 }
2716 else if (fmt[i] == 'E')
2717 {
2718 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2719 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2720 return 1;
2721 }
2722 }
2723
2724 return 0;
2725 }
2726
2727 /* The literal pool stays with the function. */
2728
2729 static void
2730 xtensa_select_rtx_section (mode, x, align)
2731 enum machine_mode mode ATTRIBUTE_UNUSED;
2732 rtx x ATTRIBUTE_UNUSED;
2733 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2734 {
2735 function_section (current_function_decl);
2736 }
2737
2738 /* If we are referencing a function that is static, make the SYMBOL_REF
2739 special so that we can generate direct calls to it even with -fpic. */
2740
2741 static void
2742 xtensa_encode_section_info (decl, first)
2743 tree decl;
2744 int first ATTRIBUTE_UNUSED;
2745 {
2746 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2747 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2748 }
2749
2750 #include "gt-xtensa.h"