1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
35 #include "diagnostic-core.h"
40 #include "double-int.h"
47 #include "fold-const.h"
48 #include "stringpool.h"
49 #include "stor-layout.h"
53 #include "insn-codes.h"
59 #include "target-def.h"
61 #include "langhooks.h"
62 #include "hash-table.h"
65 #include "dominance.h"
71 #include "cfgcleanup.h"
72 #include "basic-block.h"
73 #include "tree-ssa-alias.h"
74 #include "internal-fn.h"
75 #include "gimple-fold.h"
77 #include "gimple-expr.h"
85 static rtx
emit_addhi3_postreload (rtx
, rtx
, rtx
);
86 static void xstormy16_asm_out_constructor (rtx
, int);
87 static void xstormy16_asm_out_destructor (rtx
, int);
88 static void xstormy16_asm_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
91 static void xstormy16_init_builtins (void);
92 static rtx
xstormy16_expand_builtin (tree
, rtx
, rtx
, machine_mode
, int);
93 static bool xstormy16_rtx_costs (rtx
, int, int, int, int *, bool);
94 static int xstormy16_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
95 static bool xstormy16_return_in_memory (const_tree
, const_tree
);
97 static GTY(()) section
*bss100_section
;
99 /* Compute a (partial) cost for rtx X. Return true if the complete
100 cost has been computed, and false if subexpressions should be
101 scanned. In either case, *TOTAL contains the cost result. */
104 xstormy16_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
105 int opno ATTRIBUTE_UNUSED
, int *total
,
106 bool speed ATTRIBUTE_UNUSED
)
111 if (INTVAL (x
) < 16 && INTVAL (x
) >= 0)
112 *total
= COSTS_N_INSNS (1) / 2;
113 else if (INTVAL (x
) < 256 && INTVAL (x
) >= 0)
114 *total
= COSTS_N_INSNS (1);
116 *total
= COSTS_N_INSNS (2);
123 *total
= COSTS_N_INSNS (2);
127 *total
= COSTS_N_INSNS (35 + 6);
130 *total
= COSTS_N_INSNS (51 - 6);
139 xstormy16_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
140 addr_space_t as ATTRIBUTE_UNUSED
,
141 bool speed ATTRIBUTE_UNUSED
)
143 return (CONST_INT_P (x
) ? 2
144 : GET_CODE (x
) == PLUS
? 7
148 /* Worker function for TARGET_MEMORY_MOVE_COST. */
151 xstormy16_memory_move_cost (machine_mode mode
, reg_class_t rclass
,
154 return (5 + memory_move_secondary_cost (mode
, rclass
, in
));
157 /* Branches are handled as follows:
159 1. HImode compare-and-branches. The machine supports these
160 natively, so the appropriate pattern is emitted directly.
162 2. SImode EQ and NE. These are emitted as pairs of HImode
163 compare-and-branches.
165 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
166 of a SImode subtract followed by a branch (not a compare-and-branch),
172 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
179 /* Emit a branch of kind CODE to location LOC. */
182 xstormy16_emit_cbranch (enum rtx_code code
, rtx op0
, rtx op1
, rtx loc
)
184 rtx condition_rtx
, loc_ref
, branch
, cy_clobber
;
188 mode
= GET_MODE (op0
);
189 gcc_assert (mode
== HImode
|| mode
== SImode
);
192 && (code
== GT
|| code
== LE
|| code
== GTU
|| code
== LEU
))
194 int unsigned_p
= (code
== GTU
|| code
== LEU
);
195 int gt_p
= (code
== GT
|| code
== GTU
);
199 lab
= gen_label_rtx ();
200 xstormy16_emit_cbranch (unsigned_p
? LTU
: LT
, op0
, op1
, gt_p
? lab
: loc
);
201 /* This should be generated as a comparison against the temporary
202 created by the previous insn, but reload can't handle that. */
203 xstormy16_emit_cbranch (gt_p
? NE
: EQ
, op0
, op1
, loc
);
208 else if (mode
== SImode
209 && (code
== NE
|| code
== EQ
)
210 && op1
!= const0_rtx
)
212 rtx op0_word
, op1_word
;
214 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
218 lab
= gen_label_rtx ();
220 for (i
= 0; i
< num_words
- 1; i
++)
222 op0_word
= simplify_gen_subreg (word_mode
, op0
, mode
,
224 op1_word
= simplify_gen_subreg (word_mode
, op1
, mode
,
226 xstormy16_emit_cbranch (NE
, op0_word
, op1_word
, code
== EQ
? lab
: loc
);
228 op0_word
= simplify_gen_subreg (word_mode
, op0
, mode
,
230 op1_word
= simplify_gen_subreg (word_mode
, op1
, mode
,
232 xstormy16_emit_cbranch (code
, op0_word
, op1_word
, loc
);
239 /* We can't allow reload to try to generate any reload after a branch,
240 so when some register must match we must make the temporary ourselves. */
244 tmp
= gen_reg_rtx (mode
);
245 emit_move_insn (tmp
, op0
);
249 condition_rtx
= gen_rtx_fmt_ee (code
, mode
, op0
, op1
);
250 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
251 branch
= gen_rtx_SET (VOIDmode
, pc_rtx
,
252 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
255 cy_clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
258 vec
= gen_rtvec (2, branch
, cy_clobber
);
259 else if (code
== NE
|| code
== EQ
)
260 vec
= gen_rtvec (2, branch
, gen_rtx_CLOBBER (VOIDmode
, op0
));
265 sub
= gen_rtx_SET (VOIDmode
, op0
, gen_rtx_MINUS (SImode
, op0
, op1
));
267 sub
= gen_rtx_CLOBBER (SImode
, op0
);
269 vec
= gen_rtvec (3, branch
, sub
, cy_clobber
);
272 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, vec
));
275 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
276 the arithmetic operation. Most of the work is done by
277 xstormy16_expand_arith. */
280 xstormy16_split_cbranch (machine_mode mode
, rtx label
, rtx comparison
,
283 rtx op0
= XEXP (comparison
, 0);
284 rtx op1
= XEXP (comparison
, 1);
285 rtx_insn
*seq
, *last_insn
;
289 xstormy16_expand_arith (mode
, COMPARE
, dest
, op0
, op1
);
293 gcc_assert (INSN_P (seq
));
296 while (NEXT_INSN (last_insn
) != NULL_RTX
)
297 last_insn
= NEXT_INSN (last_insn
);
299 compare
= SET_SRC (XVECEXP (PATTERN (last_insn
), 0, 0));
300 PUT_CODE (XEXP (compare
, 0), GET_CODE (comparison
));
301 XEXP (compare
, 1) = gen_rtx_LABEL_REF (VOIDmode
, label
);
306 /* Return the string to output a conditional branch to LABEL, which is
307 the operand number of the label.
309 OP is the conditional expression, or NULL for branch-always.
311 REVERSED is nonzero if we should reverse the sense of the comparison.
316 xstormy16_output_cbranch_hi (rtx op
, const char *label
, int reversed
,
319 static char string
[64];
320 int need_longbranch
= (op
!= NULL_RTX
321 ? get_attr_length (insn
) == 8
322 : get_attr_length (insn
) == 4);
323 int really_reversed
= reversed
^ need_longbranch
;
326 const char *operands
;
335 sprintf (string
, "%s %s", ccode
, label
);
339 code
= GET_CODE (op
);
341 if (! REG_P (XEXP (op
, 0)))
343 code
= swap_condition (code
);
349 /* Work out which way this really branches. */
351 code
= reverse_condition (code
);
355 case EQ
: ccode
= "z"; break;
356 case NE
: ccode
= "nz"; break;
357 case GE
: ccode
= "ge"; break;
358 case LT
: ccode
= "lt"; break;
359 case GT
: ccode
= "gt"; break;
360 case LE
: ccode
= "le"; break;
361 case GEU
: ccode
= "nc"; break;
362 case LTU
: ccode
= "c"; break;
363 case GTU
: ccode
= "hi"; break;
364 case LEU
: ccode
= "ls"; break;
371 templ
= "b%s %s,.+8 | jmpf %s";
374 sprintf (string
, templ
, ccode
, operands
, label
);
379 /* Return the string to output a conditional branch to LABEL, which is
380 the operand number of the label, but suitable for the tail of a
383 OP is the conditional expression (OP is never NULL_RTX).
385 REVERSED is nonzero if we should reverse the sense of the comparison.
390 xstormy16_output_cbranch_si (rtx op
, const char *label
, int reversed
,
393 static char string
[64];
394 int need_longbranch
= get_attr_length (insn
) >= 8;
395 int really_reversed
= reversed
^ need_longbranch
;
401 code
= GET_CODE (op
);
403 /* Work out which way this really branches. */
405 code
= reverse_condition (code
);
409 case EQ
: ccode
= "z"; break;
410 case NE
: ccode
= "nz"; break;
411 case GE
: ccode
= "ge"; break;
412 case LT
: ccode
= "lt"; break;
413 case GEU
: ccode
= "nc"; break;
414 case LTU
: ccode
= "c"; break;
416 /* The missing codes above should never be generated. */
427 gcc_assert (REG_P (XEXP (op
, 0)));
429 regnum
= REGNO (XEXP (op
, 0));
430 sprintf (prevop
, "or %s,%s", reg_names
[regnum
], reg_names
[regnum
+1]);
434 case GE
: case LT
: case GEU
: case LTU
:
435 strcpy (prevop
, "sbc %2,%3");
443 templ
= "%s | b%s .+6 | jmpf %s";
445 templ
= "%s | b%s %s";
446 sprintf (string
, templ
, prevop
, ccode
, label
);
451 /* Many machines have some registers that cannot be copied directly to or from
452 memory or even from other types of registers. An example is the `MQ'
453 register, which on most machines, can only be copied to or from general
454 registers, but not memory. Some machines allow copying all registers to and
455 from memory, but require a scratch register for stores to some memory
456 locations (e.g., those with symbolic address on the RT, and those with
457 certain symbolic address on the SPARC when compiling PIC). In some cases,
458 both an intermediate and a scratch register are required.
460 You should define these macros to indicate to the reload phase that it may
461 need to allocate at least one register for a reload in addition to the
462 register to contain the data. Specifically, if copying X to a register
463 RCLASS in MODE requires an intermediate register, you should define
464 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
465 whose registers can be used as intermediate registers or scratch registers.
467 If copying a register RCLASS in MODE to X requires an intermediate or scratch
468 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
469 largest register class required. If the requirements for input and output
470 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
471 instead of defining both macros identically.
473 The values returned by these macros are often `GENERAL_REGS'. Return
474 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
475 to or from a register of RCLASS in MODE without requiring a scratch register.
476 Do not define this macro if it would always return `NO_REGS'.
478 If a scratch register is required (either with or without an intermediate
479 register), you should define patterns for `reload_inM' or `reload_outM', as
480 required.. These patterns, which will normally be implemented with a
481 `define_expand', should be similar to the `movM' patterns, except that
482 operand 2 is the scratch register.
484 Define constraints for the reload register and scratch register that contain
485 a single register class. If the original reload register (whose class is
486 RCLASS) can meet the constraint given in the pattern, the value returned by
487 these macros is used for the class of the scratch register. Otherwise, two
488 additional reload registers are required. Their classes are obtained from
489 the constraints in the insn pattern.
491 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
492 either be in a hard register or in memory. Use `true_regnum' to find out;
493 it will return -1 if the pseudo is in memory and the hard register number if
496 These macros should not be used in the case where a particular class of
497 registers can only be copied to memory and not to another class of
498 registers. In that case, secondary reload registers are not needed and
499 would not be helpful. Instead, a stack location must be used to perform the
500 copy and the `movM' pattern should use memory as an intermediate storage.
501 This case often occurs between floating-point and general registers. */
504 xstormy16_secondary_reload_class (enum reg_class rclass
,
505 machine_mode mode ATTRIBUTE_UNUSED
,
508 /* This chip has the interesting property that only the first eight
509 registers can be moved to/from memory. */
511 || ((GET_CODE (x
) == SUBREG
|| REG_P (x
))
512 && (true_regnum (x
) == -1
513 || true_regnum (x
) >= FIRST_PSEUDO_REGISTER
)))
514 && ! reg_class_subset_p (rclass
, EIGHT_REGS
))
520 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
521 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
524 xstormy16_preferred_reload_class (rtx x
, reg_class_t rclass
)
526 if (rclass
== GENERAL_REGS
&& MEM_P (x
))
532 /* Predicate for symbols and addresses that reflect special 8-bit
536 xstormy16_below100_symbol (rtx x
,
537 machine_mode mode ATTRIBUTE_UNUSED
)
539 if (GET_CODE (x
) == CONST
)
541 if (GET_CODE (x
) == PLUS
&& CONST_INT_P (XEXP (x
, 1)))
544 if (GET_CODE (x
) == SYMBOL_REF
)
545 return (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_XSTORMY16_BELOW100
) != 0;
549 HOST_WIDE_INT i
= INTVAL (x
);
551 if ((i
>= 0x0000 && i
<= 0x00ff)
552 || (i
>= 0x7f00 && i
<= 0x7fff))
558 /* Likewise, but only for non-volatile MEMs, for patterns where the
559 MEM will get split into smaller sized accesses. */
562 xstormy16_splittable_below100_operand (rtx x
, machine_mode mode
)
564 if (MEM_P (x
) && MEM_VOLATILE_P (x
))
566 return xstormy16_below100_operand (x
, mode
);
569 /* Expand an 8-bit IOR. This either detects the one case we can
570 actually do, or uses a 16-bit IOR. */
573 xstormy16_expand_iorqi3 (rtx
*operands
)
575 rtx in
, out
, outsub
, val
;
581 if (xstormy16_onebit_set_operand (val
, QImode
))
583 if (!xstormy16_below100_or_register (in
, QImode
))
584 in
= copy_to_mode_reg (QImode
, in
);
585 if (!xstormy16_below100_or_register (out
, QImode
))
586 out
= gen_reg_rtx (QImode
);
587 emit_insn (gen_iorqi3_internal (out
, in
, val
));
588 if (out
!= operands
[0])
589 emit_move_insn (operands
[0], out
);
594 in
= copy_to_mode_reg (QImode
, in
);
596 if (! REG_P (val
) && ! CONST_INT_P (val
))
597 val
= copy_to_mode_reg (QImode
, val
);
600 out
= gen_reg_rtx (QImode
);
602 in
= simplify_gen_subreg (HImode
, in
, QImode
, 0);
603 outsub
= simplify_gen_subreg (HImode
, out
, QImode
, 0);
605 if (! CONST_INT_P (val
))
606 val
= simplify_gen_subreg (HImode
, val
, QImode
, 0);
608 emit_insn (gen_iorhi3 (outsub
, in
, val
));
610 if (out
!= operands
[0])
611 emit_move_insn (operands
[0], out
);
614 /* Expand an 8-bit AND. This either detects the one case we can
615 actually do, or uses a 16-bit AND. */
618 xstormy16_expand_andqi3 (rtx
*operands
)
620 rtx in
, out
, outsub
, val
;
626 if (xstormy16_onebit_clr_operand (val
, QImode
))
628 if (!xstormy16_below100_or_register (in
, QImode
))
629 in
= copy_to_mode_reg (QImode
, in
);
630 if (!xstormy16_below100_or_register (out
, QImode
))
631 out
= gen_reg_rtx (QImode
);
632 emit_insn (gen_andqi3_internal (out
, in
, val
));
633 if (out
!= operands
[0])
634 emit_move_insn (operands
[0], out
);
639 in
= copy_to_mode_reg (QImode
, in
);
641 if (! REG_P (val
) && ! CONST_INT_P (val
))
642 val
= copy_to_mode_reg (QImode
, val
);
645 out
= gen_reg_rtx (QImode
);
647 in
= simplify_gen_subreg (HImode
, in
, QImode
, 0);
648 outsub
= simplify_gen_subreg (HImode
, out
, QImode
, 0);
650 if (! CONST_INT_P (val
))
651 val
= simplify_gen_subreg (HImode
, val
, QImode
, 0);
653 emit_insn (gen_andhi3 (outsub
, in
, val
));
655 if (out
!= operands
[0])
656 emit_move_insn (operands
[0], out
);
659 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
661 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
663 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
665 && INTVAL (X) + (OFFSET) >= 0 \
666 && INTVAL (X) + (OFFSET) < 0x8000 \
667 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
670 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
673 if (LEGITIMATE_ADDRESS_CONST_INT_P (x
, 0))
676 if (GET_CODE (x
) == PLUS
677 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 0))
680 /* PR 31232: Do not allow INT+INT as an address. */
685 if ((GET_CODE (x
) == PRE_MODIFY
&& CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
686 || GET_CODE (x
) == POST_INC
687 || GET_CODE (x
) == PRE_DEC
)
691 && REGNO_OK_FOR_BASE_P (REGNO (x
))
692 && (! strict
|| REGNO (x
) < FIRST_PSEUDO_REGISTER
))
695 if (xstormy16_below100_symbol (x
, mode
))
701 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
703 On this chip, this is true if the address is valid with an offset
704 of 0 but not of 6, because in that case it cannot be used as an
705 address for DImode or DFmode, or if the address is a post-increment
706 or pre-decrement address. */
709 xstormy16_mode_dependent_address_p (const_rtx x
,
710 addr_space_t as ATTRIBUTE_UNUSED
)
712 if (LEGITIMATE_ADDRESS_CONST_INT_P (x
, 0)
713 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x
, 6))
716 if (GET_CODE (x
) == PLUS
717 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 0)
718 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 6))
721 /* Auto-increment addresses are now treated generically in recog.c. */
726 short_memory_operand (rtx x
, machine_mode mode
)
728 if (! memory_operand (x
, mode
))
730 return (GET_CODE (XEXP (x
, 0)) != PLUS
);
733 /* Splitter for the 'move' patterns, for modes not directly implemented
734 by hardware. Emit insns to copy a value of mode MODE from SRC to
737 This function is only called when reload_completed. */
740 xstormy16_split_move (machine_mode mode
, rtx dest
, rtx src
)
742 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
743 int direction
, end
, i
;
744 int src_modifies
= 0;
745 int dest_modifies
= 0;
746 int src_volatile
= 0;
747 int dest_volatile
= 0;
749 rtx auto_inc_reg_rtx
= NULL_RTX
;
751 /* Check initial conditions. */
752 gcc_assert (reload_completed
753 && mode
!= QImode
&& mode
!= HImode
754 && nonimmediate_operand (dest
, mode
)
755 && general_operand (src
, mode
));
757 /* This case is not supported below, and shouldn't be generated. */
758 gcc_assert (! MEM_P (dest
) || ! MEM_P (src
));
760 /* This case is very very bad after reload, so trap it now. */
761 gcc_assert (GET_CODE (dest
) != SUBREG
&& GET_CODE (src
) != SUBREG
);
763 /* The general idea is to copy by words, offsetting the source and
764 destination. Normally the least-significant word will be copied
765 first, but for pre-dec operations it's better to copy the
766 most-significant word first. Only one operand can be a pre-dec
769 It's also possible that the copy overlaps so that the direction
775 mem_operand
= XEXP (dest
, 0);
776 dest_modifies
= side_effects_p (mem_operand
);
777 if (auto_inc_p (mem_operand
))
778 auto_inc_reg_rtx
= XEXP (mem_operand
, 0);
779 dest_volatile
= MEM_VOLATILE_P (dest
);
782 dest
= copy_rtx (dest
);
783 MEM_VOLATILE_P (dest
) = 0;
786 else if (MEM_P (src
))
788 mem_operand
= XEXP (src
, 0);
789 src_modifies
= side_effects_p (mem_operand
);
790 if (auto_inc_p (mem_operand
))
791 auto_inc_reg_rtx
= XEXP (mem_operand
, 0);
792 src_volatile
= MEM_VOLATILE_P (src
);
795 src
= copy_rtx (src
);
796 MEM_VOLATILE_P (src
) = 0;
800 mem_operand
= NULL_RTX
;
802 if (mem_operand
== NULL_RTX
)
806 && reg_overlap_mentioned_p (dest
, src
)
807 && REGNO (dest
) > REGNO (src
))
810 else if (GET_CODE (mem_operand
) == PRE_DEC
811 || (GET_CODE (mem_operand
) == PLUS
812 && GET_CODE (XEXP (mem_operand
, 0)) == PRE_DEC
))
814 else if (MEM_P (src
) && reg_overlap_mentioned_p (dest
, src
))
818 gcc_assert (REG_P (dest
));
819 regno
= REGNO (dest
);
821 gcc_assert (refers_to_regno_p (regno
, regno
+ num_words
,
824 if (refers_to_regno_p (regno
, mem_operand
))
826 else if (refers_to_regno_p (regno
+ num_words
- 1, regno
+ num_words
,
830 /* This means something like
831 (set (reg:DI r0) (mem:DI (reg:HI r1)))
832 which we'd need to support by doing the set of the second word
837 end
= direction
< 0 ? -1 : num_words
;
838 for (i
= direction
< 0 ? num_words
- 1 : 0; i
!= end
; i
+= direction
)
840 rtx w_src
, w_dest
, insn
;
843 w_src
= gen_rtx_MEM (word_mode
, mem_operand
);
845 w_src
= simplify_gen_subreg (word_mode
, src
, mode
, i
* UNITS_PER_WORD
);
847 MEM_VOLATILE_P (w_src
) = 1;
849 w_dest
= gen_rtx_MEM (word_mode
, mem_operand
);
851 w_dest
= simplify_gen_subreg (word_mode
, dest
, mode
,
854 MEM_VOLATILE_P (w_dest
) = 1;
856 /* The simplify_subreg calls must always be able to simplify. */
857 gcc_assert (GET_CODE (w_src
) != SUBREG
858 && GET_CODE (w_dest
) != SUBREG
);
860 insn
= emit_insn (gen_rtx_SET (VOIDmode
, w_dest
, w_src
));
861 if (auto_inc_reg_rtx
)
862 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
,
868 /* Expander for the 'move' patterns. Emit insns to copy a value of
869 mode MODE from SRC to DEST. */
872 xstormy16_expand_move (machine_mode mode
, rtx dest
, rtx src
)
874 if (MEM_P (dest
) && (GET_CODE (XEXP (dest
, 0)) == PRE_MODIFY
))
876 rtx pmv
= XEXP (dest
, 0);
877 rtx dest_reg
= XEXP (pmv
, 0);
878 rtx dest_mod
= XEXP (pmv
, 1);
879 rtx set
= gen_rtx_SET (Pmode
, dest_reg
, dest_mod
);
880 rtx clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
882 dest
= gen_rtx_MEM (mode
, dest_reg
);
883 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
885 else if (MEM_P (src
) && (GET_CODE (XEXP (src
, 0)) == PRE_MODIFY
))
887 rtx pmv
= XEXP (src
, 0);
888 rtx src_reg
= XEXP (pmv
, 0);
889 rtx src_mod
= XEXP (pmv
, 1);
890 rtx set
= gen_rtx_SET (Pmode
, src_reg
, src_mod
);
891 rtx clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
893 src
= gen_rtx_MEM (mode
, src_reg
);
894 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
897 /* There are only limited immediate-to-memory move instructions. */
898 if (! reload_in_progress
899 && ! reload_completed
901 && (! CONST_INT_P (XEXP (dest
, 0))
902 || ! xstormy16_legitimate_address_p (mode
, XEXP (dest
, 0), 0))
903 && ! xstormy16_below100_operand (dest
, mode
)
905 && GET_CODE (src
) != SUBREG
)
906 src
= copy_to_mode_reg (mode
, src
);
908 /* Don't emit something we would immediately split. */
910 && mode
!= HImode
&& mode
!= QImode
)
912 xstormy16_split_move (mode
, dest
, src
);
916 emit_insn (gen_rtx_SET (VOIDmode
, dest
, src
));
921 The stack is laid out as follows:
925 Register save area (up to 4 words)
926 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
928 AP-> Return address (two words)
929 9th procedure parameter word
930 10th procedure parameter word
932 last procedure parameter word
934 The frame pointer location is tuned to make it most likely that all
935 parameters and local variables can be accessed using a load-indexed
938 /* A structure to describe the layout. */
939 struct xstormy16_stack_layout
941 /* Size of the topmost three items on the stack. */
943 int register_save_size
;
944 int stdarg_save_size
;
945 /* Sum of the above items. */
947 /* Various offsets. */
948 int first_local_minus_ap
;
953 /* Does REGNO need to be saved? */
954 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
955 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
956 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
957 && (REGNUM != CARRY_REGNUM) \
958 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
960 /* Compute the stack layout. */
962 struct xstormy16_stack_layout
963 xstormy16_compute_stack_layout (void)
965 struct xstormy16_stack_layout layout
;
967 const int ifun
= xstormy16_interrupt_function_p ();
969 layout
.locals_size
= get_frame_size ();
971 layout
.register_save_size
= 0;
972 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
973 if (REG_NEEDS_SAVE (regno
, ifun
))
974 layout
.register_save_size
+= UNITS_PER_WORD
;
977 layout
.stdarg_save_size
= NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
;
979 layout
.stdarg_save_size
= 0;
981 layout
.frame_size
= (layout
.locals_size
982 + layout
.register_save_size
983 + layout
.stdarg_save_size
);
985 if (crtl
->args
.size
<= 2048 && crtl
->args
.size
!= -1)
987 if (layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
988 + crtl
->args
.size
<= 2048)
989 layout
.fp_minus_ap
= layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
;
991 layout
.fp_minus_ap
= 2048 - crtl
->args
.size
;
994 layout
.fp_minus_ap
= (layout
.stdarg_save_size
995 + layout
.register_save_size
996 - INCOMING_FRAME_SP_OFFSET
);
997 layout
.sp_minus_fp
= (layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
998 - layout
.fp_minus_ap
);
999 layout
.first_local_minus_ap
= layout
.sp_minus_fp
- layout
.locals_size
;
1003 /* Worker function for TARGET_CAN_ELIMINATE. */
1006 xstormy16_can_eliminate (const int from
, const int to
)
1008 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
1009 ? ! frame_pointer_needed
1013 /* Determine how all the special registers get eliminated. */
1016 xstormy16_initial_elimination_offset (int from
, int to
)
1018 struct xstormy16_stack_layout layout
;
1021 layout
= xstormy16_compute_stack_layout ();
1023 if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
1024 result
= layout
.sp_minus_fp
- layout
.locals_size
;
1025 else if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1026 result
= - layout
.locals_size
;
1027 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
1028 result
= - layout
.fp_minus_ap
;
1029 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1030 result
= - (layout
.sp_minus_fp
+ layout
.fp_minus_ap
);
1038 emit_addhi3_postreload (rtx dest
, rtx src0
, rtx src1
)
1040 rtx set
, clobber
, insn
;
1042 set
= gen_rtx_SET (VOIDmode
, dest
, gen_rtx_PLUS (HImode
, src0
, src1
));
1043 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
1044 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
1048 /* Called after register allocation to add any instructions needed for
1049 the prologue. Using a prologue insn is favored compared to putting
1050 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1051 since it allows the scheduler to intermix instructions with the
1052 saves of the caller saved registers. In some cases, it might be
1053 necessary to emit a barrier instruction as the last insn to prevent
1056 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1057 so that the debug info generation code can handle them properly. */
1060 xstormy16_expand_prologue (void)
1062 struct xstormy16_stack_layout layout
;
1066 const int ifun
= xstormy16_interrupt_function_p ();
1068 mem_push_rtx
= gen_rtx_POST_INC (Pmode
, stack_pointer_rtx
);
1069 mem_push_rtx
= gen_rtx_MEM (HImode
, mem_push_rtx
);
1071 layout
= xstormy16_compute_stack_layout ();
1073 if (layout
.locals_size
>= 32768)
1074 error ("local variable memory requirements exceed capacity");
1076 if (flag_stack_usage_info
)
1077 current_function_static_stack_size
= layout
.frame_size
;
1079 /* Save the argument registers if necessary. */
1080 if (layout
.stdarg_save_size
)
1081 for (regno
= FIRST_ARGUMENT_REGISTER
;
1082 regno
< FIRST_ARGUMENT_REGISTER
+ NUM_ARGUMENT_REGISTERS
;
1086 rtx reg
= gen_rtx_REG (HImode
, regno
);
1088 insn
= emit_move_insn (mem_push_rtx
, reg
);
1089 RTX_FRAME_RELATED_P (insn
) = 1;
1091 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (2));
1093 XVECEXP (dwarf
, 0, 0) = gen_rtx_SET (VOIDmode
,
1094 gen_rtx_MEM (Pmode
, stack_pointer_rtx
),
1096 XVECEXP (dwarf
, 0, 1) = gen_rtx_SET (Pmode
, stack_pointer_rtx
,
1097 plus_constant (Pmode
,
1099 GET_MODE_SIZE (Pmode
)));
1100 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
1101 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 0)) = 1;
1102 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 1)) = 1;
1105 /* Push each of the registers to save. */
1106 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1107 if (REG_NEEDS_SAVE (regno
, ifun
))
1110 rtx reg
= gen_rtx_REG (HImode
, regno
);
1112 insn
= emit_move_insn (mem_push_rtx
, reg
);
1113 RTX_FRAME_RELATED_P (insn
) = 1;
1115 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (2));
1117 XVECEXP (dwarf
, 0, 0) = gen_rtx_SET (VOIDmode
,
1118 gen_rtx_MEM (Pmode
, stack_pointer_rtx
),
1120 XVECEXP (dwarf
, 0, 1) = gen_rtx_SET (Pmode
, stack_pointer_rtx
,
1121 plus_constant (Pmode
,
1123 GET_MODE_SIZE (Pmode
)));
1124 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
1125 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 0)) = 1;
1126 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 1)) = 1;
1129 /* It's just possible that the SP here might be what we need for
1131 if (frame_pointer_needed
&& layout
.sp_minus_fp
== layout
.locals_size
)
1133 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1134 RTX_FRAME_RELATED_P (insn
) = 1;
1137 /* Allocate space for local variables. */
1138 if (layout
.locals_size
)
1140 insn
= emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1141 GEN_INT (layout
.locals_size
));
1142 RTX_FRAME_RELATED_P (insn
) = 1;
1145 /* Set up the frame pointer, if required. */
1146 if (frame_pointer_needed
&& layout
.sp_minus_fp
!= layout
.locals_size
)
1148 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1149 RTX_FRAME_RELATED_P (insn
) = 1;
1151 if (layout
.sp_minus_fp
)
1153 insn
= emit_addhi3_postreload (hard_frame_pointer_rtx
,
1154 hard_frame_pointer_rtx
,
1155 GEN_INT (- layout
.sp_minus_fp
));
1156 RTX_FRAME_RELATED_P (insn
) = 1;
1161 /* Do we need an epilogue at all? */
1164 direct_return (void)
1166 return (reload_completed
1167 && xstormy16_compute_stack_layout ().frame_size
== 0
1168 && ! xstormy16_interrupt_function_p ());
1171 /* Called after register allocation to add any instructions needed for
1172 the epilogue. Using an epilogue insn is favored compared to putting
1173 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1174 since it allows the scheduler to intermix instructions with the
1175 saves of the caller saved registers. In some cases, it might be
1176 necessary to emit a barrier instruction as the last insn to prevent
1180 xstormy16_expand_epilogue (void)
1182 struct xstormy16_stack_layout layout
;
1185 const int ifun
= xstormy16_interrupt_function_p ();
1187 mem_pop_rtx
= gen_rtx_PRE_DEC (Pmode
, stack_pointer_rtx
);
1188 mem_pop_rtx
= gen_rtx_MEM (HImode
, mem_pop_rtx
);
1190 layout
= xstormy16_compute_stack_layout ();
1192 /* Pop the stack for the locals. */
1193 if (layout
.locals_size
)
1195 if (frame_pointer_needed
&& layout
.sp_minus_fp
== layout
.locals_size
)
1196 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1198 emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1199 GEN_INT (- layout
.locals_size
));
1202 /* Restore any call-saved registers. */
1203 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1204 if (REG_NEEDS_SAVE (regno
, ifun
))
1205 emit_move_insn (gen_rtx_REG (HImode
, regno
), mem_pop_rtx
);
1207 /* Pop the stack for the stdarg save area. */
1208 if (layout
.stdarg_save_size
)
1209 emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1210 GEN_INT (- layout
.stdarg_save_size
));
1214 emit_jump_insn (gen_return_internal_interrupt ());
1216 emit_jump_insn (gen_return_internal ());
1220 xstormy16_epilogue_uses (int regno
)
1222 if (reload_completed
&& call_used_regs
[regno
])
1224 const int ifun
= xstormy16_interrupt_function_p ();
1225 return REG_NEEDS_SAVE (regno
, ifun
);
1231 xstormy16_function_profiler (void)
1233 sorry ("function_profiler support");
1236 /* Update CUM to advance past an argument in the argument list. The
1237 values MODE, TYPE and NAMED describe that argument. Once this is
1238 done, the variable CUM is suitable for analyzing the *following*
1239 argument with `TARGET_FUNCTION_ARG', etc.
1241 This function need not do anything if the argument in question was
1242 passed on the stack. The compiler knows how to track the amount of
1243 stack space used for arguments without any special help. However,
1244 it makes life easier for xstormy16_build_va_list if it does update
1248 xstormy16_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1249 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1251 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1253 /* If an argument would otherwise be passed partially in registers,
1254 and partially on the stack, the whole of it is passed on the
1256 if (*cum
< NUM_ARGUMENT_REGISTERS
1257 && *cum
+ XSTORMY16_WORD_SIZE (type
, mode
) > NUM_ARGUMENT_REGISTERS
)
1258 *cum
= NUM_ARGUMENT_REGISTERS
;
1260 *cum
+= XSTORMY16_WORD_SIZE (type
, mode
);
1264 xstormy16_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1265 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1267 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1269 if (mode
== VOIDmode
)
1271 if (targetm
.calls
.must_pass_in_stack (mode
, type
)
1272 || *cum
+ XSTORMY16_WORD_SIZE (type
, mode
) > NUM_ARGUMENT_REGISTERS
)
1274 return gen_rtx_REG (mode
, *cum
+ FIRST_ARGUMENT_REGISTER
);
1277 /* Build the va_list type.
1279 For this chip, va_list is a record containing a counter and a pointer.
1280 The counter is of type 'int' and indicates how many bytes
1281 have been used to date. The pointer indicates the stack position
1282 for arguments that have not been passed in registers.
1283 To keep the layout nice, the pointer is first in the structure. */
1286 xstormy16_build_builtin_va_list (void)
1288 tree f_1
, f_2
, record
, type_decl
;
1290 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
1291 type_decl
= build_decl (BUILTINS_LOCATION
,
1292 TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
1294 f_1
= build_decl (BUILTINS_LOCATION
,
1295 FIELD_DECL
, get_identifier ("base"),
1297 f_2
= build_decl (BUILTINS_LOCATION
,
1298 FIELD_DECL
, get_identifier ("count"),
1299 unsigned_type_node
);
1301 DECL_FIELD_CONTEXT (f_1
) = record
;
1302 DECL_FIELD_CONTEXT (f_2
) = record
;
1304 TYPE_STUB_DECL (record
) = type_decl
;
1305 TYPE_NAME (record
) = type_decl
;
1306 TYPE_FIELDS (record
) = f_1
;
1307 DECL_CHAIN (f_1
) = f_2
;
1309 layout_type (record
);
1314 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1315 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1316 variable to initialize. NEXTARG is the machine independent notion of the
1317 'next' argument after the variable arguments. */
1320 xstormy16_expand_builtin_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
1322 tree f_base
, f_count
;
1326 if (xstormy16_interrupt_function_p ())
1327 error ("cannot use va_start in interrupt function");
1329 f_base
= TYPE_FIELDS (va_list_type_node
);
1330 f_count
= DECL_CHAIN (f_base
);
1332 base
= build3 (COMPONENT_REF
, TREE_TYPE (f_base
), valist
, f_base
, NULL_TREE
);
1333 count
= build3 (COMPONENT_REF
, TREE_TYPE (f_count
), valist
, f_count
,
1336 t
= make_tree (TREE_TYPE (base
), virtual_incoming_args_rtx
);
1337 u
= build_int_cst (NULL_TREE
, - INCOMING_FRAME_SP_OFFSET
);
1338 u
= fold_convert (TREE_TYPE (count
), u
);
1339 t
= fold_build_pointer_plus (t
, u
);
1340 t
= build2 (MODIFY_EXPR
, TREE_TYPE (base
), base
, t
);
1341 TREE_SIDE_EFFECTS (t
) = 1;
1342 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1344 t
= build2 (MODIFY_EXPR
, TREE_TYPE (count
), count
,
1345 build_int_cst (NULL_TREE
,
1346 crtl
->args
.info
* UNITS_PER_WORD
));
1347 TREE_SIDE_EFFECTS (t
) = 1;
1348 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1351 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1352 of type va_list as a tree, TYPE is the type passed to va_arg.
1353 Note: This algorithm is documented in stormy-abi. */
1356 xstormy16_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
1357 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
1359 tree f_base
, f_count
;
1361 tree count_tmp
, addr
, t
;
1362 tree lab_gotaddr
, lab_fromstack
;
1363 int size
, size_of_reg_args
, must_stack
;
1366 f_base
= TYPE_FIELDS (va_list_type_node
);
1367 f_count
= DECL_CHAIN (f_base
);
1369 base
= build3 (COMPONENT_REF
, TREE_TYPE (f_base
), valist
, f_base
, NULL_TREE
);
1370 count
= build3 (COMPONENT_REF
, TREE_TYPE (f_count
), valist
, f_count
,
1373 must_stack
= targetm
.calls
.must_pass_in_stack (TYPE_MODE (type
), type
);
1374 size_tree
= round_up (size_in_bytes (type
), UNITS_PER_WORD
);
1375 gimplify_expr (&size_tree
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
1377 size_of_reg_args
= NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
;
1379 count_tmp
= get_initialized_tmp_var (count
, pre_p
, NULL
);
1380 lab_gotaddr
= create_artificial_label (UNKNOWN_LOCATION
);
1381 lab_fromstack
= create_artificial_label (UNKNOWN_LOCATION
);
1382 addr
= create_tmp_var (ptr_type_node
);
1388 t
= fold_convert (TREE_TYPE (count
), size_tree
);
1389 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1390 r
= fold_convert (TREE_TYPE (count
), size_int (size_of_reg_args
));
1391 t
= build2 (GT_EXPR
, boolean_type_node
, t
, r
);
1392 t
= build3 (COND_EXPR
, void_type_node
, t
,
1393 build1 (GOTO_EXPR
, void_type_node
, lab_fromstack
),
1395 gimplify_and_add (t
, pre_p
);
1397 t
= fold_build_pointer_plus (base
, count_tmp
);
1398 gimplify_assign (addr
, t
, pre_p
);
1400 t
= build1 (GOTO_EXPR
, void_type_node
, lab_gotaddr
);
1401 gimplify_and_add (t
, pre_p
);
1403 t
= build1 (LABEL_EXPR
, void_type_node
, lab_fromstack
);
1404 gimplify_and_add (t
, pre_p
);
1407 /* Arguments larger than a word might need to skip over some
1408 registers, since arguments are either passed entirely in
1409 registers or entirely on the stack. */
1410 size
= PUSH_ROUNDING (int_size_in_bytes (type
));
1411 if (size
> 2 || size
< 0 || must_stack
)
1415 r
= size_int (NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
);
1416 u
= build2 (MODIFY_EXPR
, TREE_TYPE (count_tmp
), count_tmp
, r
);
1418 t
= fold_convert (TREE_TYPE (count
), r
);
1419 t
= build2 (GE_EXPR
, boolean_type_node
, count_tmp
, t
);
1420 t
= build3 (COND_EXPR
, void_type_node
, t
, NULL_TREE
, u
);
1421 gimplify_and_add (t
, pre_p
);
1424 t
= size_int (NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
1425 + INCOMING_FRAME_SP_OFFSET
);
1426 t
= fold_convert (TREE_TYPE (count
), t
);
1427 t
= build2 (MINUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1428 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), t
,
1429 fold_convert (TREE_TYPE (count
), size_tree
));
1430 t
= fold_convert (TREE_TYPE (t
), fold (t
));
1431 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1432 t
= fold_build_pointer_plus (base
, t
);
1433 gimplify_assign (addr
, t
, pre_p
);
1435 t
= build1 (LABEL_EXPR
, void_type_node
, lab_gotaddr
);
1436 gimplify_and_add (t
, pre_p
);
1438 t
= fold_convert (TREE_TYPE (count
), size_tree
);
1439 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1440 gimplify_assign (count
, t
, pre_p
);
1442 addr
= fold_convert (build_pointer_type (type
), addr
);
1443 return build_va_arg_indirect_ref (addr
);
1446 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1449 xstormy16_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
1451 rtx temp
= gen_reg_rtx (HImode
);
1452 rtx reg_fnaddr
= gen_reg_rtx (HImode
);
1453 rtx reg_addr
, reg_addr_mem
;
1455 reg_addr
= copy_to_reg (XEXP (m_tramp
, 0));
1456 reg_addr_mem
= adjust_automodify_address (m_tramp
, HImode
, reg_addr
, 0);
1458 emit_move_insn (temp
, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM
));
1459 emit_move_insn (reg_addr_mem
, temp
);
1460 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1461 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1463 emit_move_insn (temp
, static_chain
);
1464 emit_move_insn (reg_addr_mem
, temp
);
1465 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1466 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1468 emit_move_insn (reg_fnaddr
, XEXP (DECL_RTL (fndecl
), 0));
1469 emit_move_insn (temp
, reg_fnaddr
);
1470 emit_insn (gen_andhi3 (temp
, temp
, GEN_INT (0xFF)));
1471 emit_insn (gen_iorhi3 (temp
, temp
, GEN_INT (0x0200)));
1472 emit_move_insn (reg_addr_mem
, temp
);
1473 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1474 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1476 emit_insn (gen_lshrhi3 (reg_fnaddr
, reg_fnaddr
, GEN_INT (8)));
1477 emit_move_insn (reg_addr_mem
, reg_fnaddr
);
1480 /* Worker function for TARGET_FUNCTION_VALUE. */
1483 xstormy16_function_value (const_tree valtype
,
1484 const_tree func ATTRIBUTE_UNUSED
,
1485 bool outgoing ATTRIBUTE_UNUSED
)
1488 mode
= TYPE_MODE (valtype
);
1489 PROMOTE_MODE (mode
, 0, valtype
);
1490 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1493 /* Worker function for TARGET_LIBCALL_VALUE. */
1496 xstormy16_libcall_value (machine_mode mode
,
1497 const_rtx fun ATTRIBUTE_UNUSED
)
1499 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1502 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1505 xstormy16_function_value_regno_p (const unsigned int regno
)
1507 return (regno
== RETURN_VALUE_REGNUM
);
1510 /* A C compound statement that outputs the assembler code for a thunk function,
1511 used to implement C++ virtual function calls with multiple inheritance. The
1512 thunk acts as a wrapper around a virtual function, adjusting the implicit
1513 object parameter before handing control off to the real function.
1515 First, emit code to add the integer DELTA to the location that contains the
1516 incoming first argument. Assume that this argument contains a pointer, and
1517 is the one used to pass the `this' pointer in C++. This is the incoming
1518 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1519 addition must preserve the values of all other incoming arguments.
1521 After the addition, emit code to jump to FUNCTION, which is a
1522 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1523 the return address. Hence returning from FUNCTION will return to whoever
1524 called the current `thunk'.
1526 The effect must be as if @var{function} had been called directly
1527 with the adjusted first argument. This macro is responsible for
1528 emitting all of the code for a thunk function;
1529 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1532 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1533 extracted from it.) It might possibly be useful on some targets, but
1537 xstormy16_asm_output_mi_thunk (FILE *file
,
1538 tree thunk_fndecl ATTRIBUTE_UNUSED
,
1539 HOST_WIDE_INT delta
,
1540 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
1543 int regnum
= FIRST_ARGUMENT_REGISTER
;
1545 /* There might be a hidden first argument for a returned structure. */
1546 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
1549 fprintf (file
, "\tadd %s,#0x%x\n", reg_names
[regnum
], (int) delta
& 0xFFFF);
1550 fputs ("\tjmpf ", file
);
1551 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
1555 /* The purpose of this function is to override the default behavior of
1556 BSS objects. Normally, they go into .bss or .sbss via ".common"
1557 directives, but we need to override that and put them in
1558 .bss_below100. We can't just use a section override (like we do
1559 for .data_below100), because that makes them initialized rather
1560 than uninitialized. */
1563 xstormy16_asm_output_aligned_common (FILE *stream
,
1570 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
1575 && GET_CODE (symbol
= XEXP (mem
, 0)) == SYMBOL_REF
1576 && SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_XSTORMY16_BELOW100
)
1581 switch_to_section (bss100_section
);
1589 name2
= default_strip_name_encoding (name
);
1591 fprintf (stream
, "\t.globl\t%s\n", name2
);
1593 fprintf (stream
, "\t.p2align %d\n", p2align
);
1594 fprintf (stream
, "\t.type\t%s, @object\n", name2
);
1595 fprintf (stream
, "\t.size\t%s, %d\n", name2
, size
);
1596 fprintf (stream
, "%s:\n\t.space\t%d\n", name2
, size
);
1602 fprintf (stream
, "\t.local\t");
1603 assemble_name (stream
, name
);
1604 fprintf (stream
, "\n");
1606 fprintf (stream
, "\t.comm\t");
1607 assemble_name (stream
, name
);
1608 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
1611 /* Implement TARGET_ASM_INIT_SECTIONS. */
1614 xstormy16_asm_init_sections (void)
1617 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
1618 output_section_asm_op
,
1619 "\t.section \".bss_below100\",\"aw\",@nobits");
1622 /* Mark symbols with the "below100" attribute so that we can use the
1623 special addressing modes for them. */
1626 xstormy16_encode_section_info (tree decl
, rtx r
, int first
)
1628 default_encode_section_info (decl
, r
, first
);
1630 if (TREE_CODE (decl
) == VAR_DECL
1631 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl
))
1632 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl
))))
1634 rtx symbol
= XEXP (r
, 0);
1636 gcc_assert (GET_CODE (symbol
) == SYMBOL_REF
);
1637 SYMBOL_REF_FLAGS (symbol
) |= SYMBOL_FLAG_XSTORMY16_BELOW100
;
1641 #undef TARGET_ASM_CONSTRUCTOR
1642 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1643 #undef TARGET_ASM_DESTRUCTOR
1644 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1646 /* Output constructors and destructors. Just like
1647 default_named_section_asm_out_* but don't set the sections writable. */
1650 xstormy16_asm_out_destructor (rtx symbol
, int priority
)
1652 const char *section
= ".dtors";
1655 /* ??? This only works reliably with the GNU linker. */
1656 if (priority
!= DEFAULT_INIT_PRIORITY
)
1658 sprintf (buf
, ".dtors.%.5u",
1659 /* Invert the numbering so the linker puts us in the proper
1660 order; constructors are run from right to left, and the
1661 linker sorts in increasing order. */
1662 MAX_INIT_PRIORITY
- priority
);
1666 switch_to_section (get_section (section
, 0, NULL
));
1667 assemble_align (POINTER_SIZE
);
1668 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
1672 xstormy16_asm_out_constructor (rtx symbol
, int priority
)
1674 const char *section
= ".ctors";
1677 /* ??? This only works reliably with the GNU linker. */
1678 if (priority
!= DEFAULT_INIT_PRIORITY
)
1680 sprintf (buf
, ".ctors.%.5u",
1681 /* Invert the numbering so the linker puts us in the proper
1682 order; constructors are run from right to left, and the
1683 linker sorts in increasing order. */
1684 MAX_INIT_PRIORITY
- priority
);
1688 switch_to_section (get_section (section
, 0, NULL
));
1689 assemble_align (POINTER_SIZE
);
1690 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
1693 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1695 Print a memory address as an operand to reference that memory location. */
1698 xstormy16_print_operand_address (FILE *file
, rtx address
)
1700 HOST_WIDE_INT offset
;
1701 int pre_dec
, post_inc
;
1703 /* There are a few easy cases. */
1704 if (CONST_INT_P (address
))
1706 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (address
) & 0xFFFF);
1710 if (CONSTANT_P (address
) || LABEL_P (address
))
1712 output_addr_const (file
, address
);
1716 /* Otherwise, it's hopefully something of the form
1717 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1718 if (GET_CODE (address
) == PLUS
)
1720 gcc_assert (CONST_INT_P (XEXP (address
, 1)));
1721 offset
= INTVAL (XEXP (address
, 1));
1722 address
= XEXP (address
, 0);
1727 pre_dec
= (GET_CODE (address
) == PRE_DEC
);
1728 post_inc
= (GET_CODE (address
) == POST_INC
);
1729 if (pre_dec
|| post_inc
)
1730 address
= XEXP (address
, 0);
1732 gcc_assert (REG_P (address
));
1737 fputs (reg_names
[REGNO (address
)], file
);
1741 fprintf (file
, "," HOST_WIDE_INT_PRINT_DEC
, offset
);
1745 /* Worker function for TARGET_PRINT_OPERAND.
1747 Print an operand to an assembler instruction. */
1750 xstormy16_print_operand (FILE *file
, rtx x
, int code
)
1755 /* There is either one bit set, or one bit clear, in X.
1756 Print it preceded by '#'. */
1758 static int bits_set
[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1759 HOST_WIDE_INT xx
= 1;
1762 if (CONST_INT_P (x
))
1765 output_operand_lossage ("'B' operand is not constant");
1767 /* GCC sign-extends masks with the MSB set, so we have to
1768 detect all the cases that differ only in sign extension
1769 beyond the bits we care about. Normally, the predicates
1770 and constraints ensure that we have the right values. This
1771 works correctly for valid masks. */
1772 if (bits_set
[xx
& 7] <= 1)
1774 /* Remove sign extension bits. */
1775 if ((~xx
& ~(HOST_WIDE_INT
)0xff) == 0)
1777 else if ((~xx
& ~(HOST_WIDE_INT
)0xffff) == 0)
1779 l
= exact_log2 (xx
);
1783 /* Add sign extension bits. */
1784 if ((xx
& ~(HOST_WIDE_INT
)0xff) == 0)
1785 xx
|= ~(HOST_WIDE_INT
)0xff;
1786 else if ((xx
& ~(HOST_WIDE_INT
)0xffff) == 0)
1787 xx
|= ~(HOST_WIDE_INT
)0xffff;
1788 l
= exact_log2 (~xx
);
1792 output_operand_lossage ("'B' operand has multiple bits set");
1794 fprintf (file
, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC
, l
);
1799 /* Print the symbol without a surrounding @fptr(). */
1800 if (GET_CODE (x
) == SYMBOL_REF
)
1801 assemble_name (file
, XSTR (x
, 0));
1802 else if (LABEL_P (x
))
1803 output_asm_label (x
);
1805 xstormy16_print_operand_address (file
, x
);
1810 /* Print the immediate operand less one, preceded by '#'.
1811 For 'O', negate it first. */
1813 HOST_WIDE_INT xx
= 0;
1815 if (CONST_INT_P (x
))
1818 output_operand_lossage ("'o' operand is not constant");
1823 fprintf (file
, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC
, xx
- 1);
1828 /* Print the shift mask for bp/bn. */
1830 HOST_WIDE_INT xx
= 1;
1833 if (CONST_INT_P (x
))
1836 output_operand_lossage ("'B' operand is not constant");
1840 fputs (IMMEDIATE_PREFIX
, file
);
1841 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, l
);
1846 /* Handled below. */
1850 output_operand_lossage ("xstormy16_print_operand: unknown code");
1854 switch (GET_CODE (x
))
1857 fputs (reg_names
[REGNO (x
)], file
);
1861 xstormy16_print_operand_address (file
, XEXP (x
, 0));
1865 /* Some kind of constant or label; an immediate operand,
1866 so prefix it with '#' for the assembler. */
1867 fputs (IMMEDIATE_PREFIX
, file
);
1868 output_addr_const (file
, x
);
1875 /* Expander for the `casesi' pattern.
1876 INDEX is the index of the switch statement.
1877 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1878 to the first table entry.
1879 RANGE is the number of table entries.
1880 TABLE is an ADDR_VEC that is the jump table.
1881 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1882 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1885 xstormy16_expand_casesi (rtx index
, rtx lower_bound
, rtx range
,
1886 rtx table
, rtx default_label
)
1888 HOST_WIDE_INT range_i
= INTVAL (range
);
1891 /* This code uses 'br', so it can deal only with tables of size up to
1893 if (range_i
>= 8192)
1894 sorry ("switch statement of size %lu entries too large",
1895 (unsigned long) range_i
);
1897 index
= expand_binop (SImode
, sub_optab
, index
, lower_bound
, NULL_RTX
, 0,
1899 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, SImode
, 1,
1901 int_index
= gen_lowpart_common (HImode
, index
);
1902 emit_insn (gen_ashlhi3 (int_index
, int_index
, const2_rtx
));
1903 emit_jump_insn (gen_tablejump_pcrel (int_index
, table
));
1906 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1907 instructions, without label or alignment or any other special
1908 constructs. We know that the previous instruction will be the
1909 `tablejump_pcrel' output above.
1911 TODO: it might be nice to output 'br' instructions if they could
1915 xstormy16_output_addr_vec (FILE *file
, rtx label ATTRIBUTE_UNUSED
, rtx table
)
1919 switch_to_section (current_function_section ());
1921 vlen
= XVECLEN (table
, 0);
1922 for (idx
= 0; idx
< vlen
; idx
++)
1924 fputs ("\tjmpf ", file
);
1925 output_asm_label (XEXP (XVECEXP (table
, 0, idx
), 0));
1930 /* Expander for the `call' patterns.
1931 RETVAL is the RTL for the return register or NULL for void functions.
1932 DEST is the function to call, expressed as a MEM.
1933 COUNTER is ignored. */
1936 xstormy16_expand_call (rtx retval
, rtx dest
, rtx counter
)
1941 gcc_assert (MEM_P (dest
));
1942 dest
= XEXP (dest
, 0);
1944 if (! CONSTANT_P (dest
) && ! REG_P (dest
))
1945 dest
= force_reg (Pmode
, dest
);
1950 mode
= GET_MODE (retval
);
1952 call
= gen_rtx_CALL (mode
, gen_rtx_MEM (FUNCTION_MODE
, dest
),
1955 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
1957 if (! CONSTANT_P (dest
))
1959 temp
= gen_reg_rtx (HImode
);
1960 emit_move_insn (temp
, const0_rtx
);
1965 call
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, call
,
1966 gen_rtx_USE (VOIDmode
, temp
)));
1967 emit_call_insn (call
);
1970 /* Expanders for multiword computational operations. */
1972 /* Expander for arithmetic operations; emit insns to compute
1974 (set DEST (CODE:MODE SRC0 SRC1))
1976 When CODE is COMPARE, a branch template is generated
1977 (this saves duplicating code in xstormy16_split_cbranch). */
1980 xstormy16_expand_arith (machine_mode mode
, enum rtx_code code
,
1981 rtx dest
, rtx src0
, rtx src1
)
1983 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
1988 emit_move_insn (src0
, const0_rtx
);
1990 for (i
= 0; i
< num_words
; i
++)
1992 rtx w_src0
, w_src1
, w_dest
;
1995 w_src0
= simplify_gen_subreg (word_mode
, src0
, mode
,
1996 i
* UNITS_PER_WORD
);
1997 w_src1
= simplify_gen_subreg (word_mode
, src1
, mode
, i
* UNITS_PER_WORD
);
1998 w_dest
= simplify_gen_subreg (word_mode
, dest
, mode
, i
* UNITS_PER_WORD
);
2004 && CONST_INT_P (w_src1
)
2005 && INTVAL (w_src1
) == 0)
2009 insn
= gen_addchi4 (w_dest
, w_src0
, w_src1
);
2011 insn
= gen_addchi5 (w_dest
, w_src0
, w_src1
);
2017 if (code
== COMPARE
&& i
== num_words
- 1)
2019 rtx branch
, sub
, clobber
, sub_1
;
2021 sub_1
= gen_rtx_MINUS (HImode
, w_src0
,
2022 gen_rtx_ZERO_EXTEND (HImode
, gen_rtx_REG (BImode
, CARRY_REGNUM
)));
2023 sub
= gen_rtx_SET (VOIDmode
, w_dest
,
2024 gen_rtx_MINUS (HImode
, sub_1
, w_src1
));
2025 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
2026 branch
= gen_rtx_SET (VOIDmode
, pc_rtx
,
2027 gen_rtx_IF_THEN_ELSE (VOIDmode
,
2033 insn
= gen_rtx_PARALLEL (VOIDmode
,
2034 gen_rtvec (3, branch
, sub
, clobber
));
2038 && CONST_INT_P (w_src1
)
2039 && INTVAL (w_src1
) == 0)
2042 insn
= gen_subchi4 (w_dest
, w_src0
, w_src1
);
2044 insn
= gen_subchi5 (w_dest
, w_src0
, w_src1
);
2050 if (CONST_INT_P (w_src1
)
2051 && INTVAL (w_src1
) == -(code
== AND
))
2054 insn
= gen_rtx_SET (VOIDmode
, w_dest
, gen_rtx_fmt_ee (code
, mode
,
2059 insn
= gen_rtx_SET (VOIDmode
, w_dest
, gen_rtx_NOT (mode
, w_src0
));
2070 /* If we emit nothing, try_split() will think we failed. So emit
2071 something that does nothing and can be optimized away. */
2076 /* The shift operations are split at output time for constant values;
2077 variable-width shifts get handed off to a library routine.
2079 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2080 SIZE_R will be a CONST_INT, X will be a hard register. */
2083 xstormy16_output_shift (machine_mode mode
, enum rtx_code code
,
2084 rtx x
, rtx size_r
, rtx temp
)
2087 const char *r0
, *r1
, *rt
;
2090 gcc_assert (CONST_INT_P (size_r
)
2094 size
= INTVAL (size_r
) & (GET_MODE_BITSIZE (mode
) - 1);
2099 r0
= reg_names
[REGNO (x
)];
2100 r1
= reg_names
[REGNO (x
) + 1];
2102 /* For shifts of size 1, we can use the rotate instructions. */
2108 sprintf (r
, "shl %s,#1 | rlc %s,#1", r0
, r1
);
2111 sprintf (r
, "asr %s,#1 | rrc %s,#1", r1
, r0
);
2114 sprintf (r
, "shr %s,#1 | rrc %s,#1", r1
, r0
);
2122 /* For large shifts, there are easy special cases. */
2128 sprintf (r
, "mov %s,%s | mov %s,#0", r1
, r0
, r0
);
2131 sprintf (r
, "mov %s,%s | asr %s,#15", r0
, r1
, r1
);
2134 sprintf (r
, "mov %s,%s | mov %s,#0", r0
, r1
, r1
);
2146 sprintf (r
, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2147 r1
, r0
, r0
, r1
, (int) size
- 16);
2150 sprintf (r
, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2151 r0
, r1
, r1
, r0
, (int) size
- 16);
2154 sprintf (r
, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2155 r0
, r1
, r1
, r0
, (int) size
- 16);
2163 /* For the rest, we have to do more work. In particular, we
2164 need a temporary. */
2165 rt
= reg_names
[REGNO (temp
)];
2170 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2171 rt
, r0
, r0
, (int) size
, r1
, (int) size
, rt
, (int) (16 - size
),
2176 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2177 rt
, r1
, r1
, (int) size
, r0
, (int) size
, rt
, (int) (16 - size
),
2182 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2183 rt
, r1
, r1
, (int) size
, r0
, (int) size
, rt
, (int) (16 - size
),
2192 /* Attribute handling. */
2194 /* Return nonzero if the function is an interrupt function. */
2197 xstormy16_interrupt_function_p (void)
2201 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2202 any functions are declared, which is demonstrably wrong, but
2203 it is worked around here. FIXME. */
2207 attributes
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
2208 return lookup_attribute ("interrupt", attributes
) != NULL_TREE
;
2211 #undef TARGET_ATTRIBUTE_TABLE
2212 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2214 static tree xstormy16_handle_interrupt_attribute
2215 (tree
*, tree
, tree
, int, bool *);
2216 static tree xstormy16_handle_below100_attribute
2217 (tree
*, tree
, tree
, int, bool *);
2219 static const struct attribute_spec xstormy16_attribute_table
[] =
2221 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2222 affects_type_identity. */
2223 { "interrupt", 0, 0, false, true, true,
2224 xstormy16_handle_interrupt_attribute
, false },
2225 { "BELOW100", 0, 0, false, false, false,
2226 xstormy16_handle_below100_attribute
, false },
2227 { "below100", 0, 0, false, false, false,
2228 xstormy16_handle_below100_attribute
, false },
2229 { NULL
, 0, 0, false, false, false, NULL
, false }
2232 /* Handle an "interrupt" attribute;
2233 arguments as in struct attribute_spec.handler. */
2236 xstormy16_handle_interrupt_attribute (tree
*node
, tree name
,
2237 tree args ATTRIBUTE_UNUSED
,
2238 int flags ATTRIBUTE_UNUSED
,
2241 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
2243 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2245 *no_add_attrs
= true;
2251 /* Handle an "below" attribute;
2252 arguments as in struct attribute_spec.handler. */
2255 xstormy16_handle_below100_attribute (tree
*node
,
2256 tree name ATTRIBUTE_UNUSED
,
2257 tree args ATTRIBUTE_UNUSED
,
2258 int flags ATTRIBUTE_UNUSED
,
2261 if (TREE_CODE (*node
) != VAR_DECL
2262 && TREE_CODE (*node
) != POINTER_TYPE
2263 && TREE_CODE (*node
) != TYPE_DECL
)
2265 warning (OPT_Wattributes
,
2266 "%<__BELOW100__%> attribute only applies to variables");
2267 *no_add_attrs
= true;
2269 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
2271 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
2273 warning (OPT_Wattributes
, "__BELOW100__ attribute not allowed "
2274 "with auto storage class");
2275 *no_add_attrs
= true;
2282 #undef TARGET_INIT_BUILTINS
2283 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2284 #undef TARGET_EXPAND_BUILTIN
2285 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2291 const char * arg_ops
; /* 0..9, t for temp register, r for return value. */
2292 const char * arg_types
; /* s=short,l=long, upper case for unsigned. */
2296 { "__sdivlh", CODE_FOR_sdivlh
, "rt01", "sls" },
2297 { "__smodlh", CODE_FOR_sdivlh
, "tr01", "sls" },
2298 { "__udivlh", CODE_FOR_udivlh
, "rt01", "SLS" },
2299 { "__umodlh", CODE_FOR_udivlh
, "tr01", "SLS" },
2300 { NULL
, 0, NULL
, NULL
}
2304 xstormy16_init_builtins (void)
2306 tree args
[2], ret_type
, arg
= NULL_TREE
, ftype
;
2309 ret_type
= void_type_node
;
2311 for (i
= 0; s16builtins
[i
].name
; i
++)
2313 n_args
= strlen (s16builtins
[i
].arg_types
) - 1;
2315 gcc_assert (n_args
<= (int) ARRAY_SIZE (args
));
2317 for (a
= n_args
- 1; a
>= 0; a
--)
2318 args
[a
] = NULL_TREE
;
2320 for (a
= n_args
; a
>= 0; a
--)
2322 switch (s16builtins
[i
].arg_types
[a
])
2324 case 's': arg
= short_integer_type_node
; break;
2325 case 'S': arg
= short_unsigned_type_node
; break;
2326 case 'l': arg
= long_integer_type_node
; break;
2327 case 'L': arg
= long_unsigned_type_node
; break;
2328 default: gcc_unreachable ();
2335 ftype
= build_function_type_list (ret_type
, args
[0], args
[1], NULL_TREE
);
2336 add_builtin_function (s16builtins
[i
].name
, ftype
,
2337 i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
2342 xstormy16_expand_builtin (tree exp
, rtx target
,
2343 rtx subtarget ATTRIBUTE_UNUSED
,
2344 machine_mode mode ATTRIBUTE_UNUSED
,
2345 int ignore ATTRIBUTE_UNUSED
)
2347 rtx op
[10], args
[10], pat
, copyto
[10], retval
= 0;
2348 tree fndecl
, argtree
;
2351 fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
2352 argtree
= TREE_OPERAND (exp
, 1);
2353 i
= DECL_FUNCTION_CODE (fndecl
);
2354 code
= s16builtins
[i
].md_code
;
2356 for (a
= 0; a
< 10 && argtree
; a
++)
2358 args
[a
] = expand_normal (TREE_VALUE (argtree
));
2359 argtree
= TREE_CHAIN (argtree
);
2362 for (o
= 0; s16builtins
[i
].arg_ops
[o
]; o
++)
2364 char ao
= s16builtins
[i
].arg_ops
[o
];
2365 char c
= insn_data
[code
].operand
[o
].constraint
[0];
2370 omode
= (machine_mode
) insn_data
[code
].operand
[o
].mode
;
2372 op
[o
] = target
? target
: gen_reg_rtx (omode
);
2374 op
[o
] = gen_reg_rtx (omode
);
2376 op
[o
] = args
[(int) hex_value (ao
)];
2378 if (! (*insn_data
[code
].operand
[o
].predicate
) (op
[o
], GET_MODE (op
[o
])))
2380 if (c
== '+' || c
== '=')
2383 op
[o
] = gen_reg_rtx (omode
);
2386 op
[o
] = copy_to_mode_reg (omode
, op
[o
]);
2393 pat
= GEN_FCN (code
) (op
[0], op
[1], op
[2], op
[3], op
[4],
2394 op
[5], op
[6], op
[7], op
[8], op
[9]);
2397 for (o
= 0; s16builtins
[i
].arg_ops
[o
]; o
++)
2400 emit_move_insn (copyto
[o
], op
[o
]);
2401 if (op
[o
] == retval
)
2408 /* Look for combinations of insns that can be converted to BN or BP
2409 opcodes. This is, unfortunately, too complex to do with MD
2413 combine_bnp (rtx_insn
*insn
)
2415 int insn_code
, regno
, need_extend
;
2417 rtx cond
, reg
, qireg
, mem
;
2418 rtx_insn
*and_insn
, *load
;
2419 machine_mode load_mode
= QImode
;
2420 machine_mode and_mode
= QImode
;
2421 rtx_insn
*shift
= NULL
;
2423 insn_code
= recog_memoized (insn
);
2424 if (insn_code
!= CODE_FOR_cbranchhi
2425 && insn_code
!= CODE_FOR_cbranchhi_neg
)
2428 cond
= XVECEXP (PATTERN (insn
), 0, 0); /* set */
2429 cond
= XEXP (cond
, 1); /* if */
2430 cond
= XEXP (cond
, 0); /* cond */
2431 switch (GET_CODE (cond
))
2445 reg
= XEXP (cond
, 0);
2448 regno
= REGNO (reg
);
2449 if (XEXP (cond
, 1) != const0_rtx
)
2451 if (! find_regno_note (insn
, REG_DEAD
, regno
))
2453 qireg
= gen_rtx_REG (QImode
, regno
);
2457 /* LT and GE conditionals should have a sign extend before
2459 for (and_insn
= prev_real_insn (insn
);
2460 and_insn
!= NULL_RTX
;
2461 and_insn
= prev_real_insn (and_insn
))
2463 int and_code
= recog_memoized (and_insn
);
2465 if (and_code
== CODE_FOR_extendqihi2
2466 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
)
2467 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn
)), 0), qireg
))
2470 if (and_code
== CODE_FOR_movhi_internal
2471 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
))
2473 /* This is for testing bit 15. */
2478 if (reg_mentioned_p (reg
, and_insn
))
2481 if (! NOTE_P (and_insn
) && ! NONJUMP_INSN_P (and_insn
))
2487 /* EQ and NE conditionals have an AND before them. */
2488 for (and_insn
= prev_real_insn (insn
);
2489 and_insn
!= NULL_RTX
;
2490 and_insn
= prev_real_insn (and_insn
))
2492 if (recog_memoized (and_insn
) == CODE_FOR_andhi3
2493 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
)
2494 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn
)), 0), reg
))
2497 if (reg_mentioned_p (reg
, and_insn
))
2500 if (! NOTE_P (and_insn
) && ! NONJUMP_INSN_P (and_insn
))
2506 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2507 followed by an AND like this:
2509 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2510 (clobber (reg:BI carry))]
2512 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2514 Attempt to detect this here. */
2515 for (shift
= prev_real_insn (and_insn
); shift
;
2516 shift
= prev_real_insn (shift
))
2518 if (recog_memoized (shift
) == CODE_FOR_lshrhi3
2519 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift
), 0, 0)), reg
)
2520 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift
), 0, 0)), 0), reg
))
2523 if (reg_mentioned_p (reg
, shift
)
2524 || (! NOTE_P (shift
) && ! NONJUMP_INSN_P (shift
)))
2533 if (and_insn
== NULL_RTX
)
2536 for (load
= shift
? prev_real_insn (shift
) : prev_real_insn (and_insn
);
2538 load
= prev_real_insn (load
))
2540 int load_code
= recog_memoized (load
);
2542 if (load_code
== CODE_FOR_movhi_internal
2543 && rtx_equal_p (SET_DEST (PATTERN (load
)), reg
)
2544 && xstormy16_below100_operand (SET_SRC (PATTERN (load
)), HImode
)
2545 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load
))))
2551 if (load_code
== CODE_FOR_movqi_internal
2552 && rtx_equal_p (SET_DEST (PATTERN (load
)), qireg
)
2553 && xstormy16_below100_operand (SET_SRC (PATTERN (load
)), QImode
))
2559 if (load_code
== CODE_FOR_zero_extendqihi2
2560 && rtx_equal_p (SET_DEST (PATTERN (load
)), reg
)
2561 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load
)), 0), QImode
))
2568 if (reg_mentioned_p (reg
, load
))
2571 if (! NOTE_P (load
) && ! NONJUMP_INSN_P (load
))
2577 mem
= SET_SRC (PATTERN (load
));
2581 mask
= (load_mode
== HImode
) ? 0x8000 : 0x80;
2583 /* If the mem includes a zero-extend operation and we are
2584 going to generate a sign-extend operation then move the
2585 mem inside the zero-extend. */
2586 if (GET_CODE (mem
) == ZERO_EXTEND
)
2587 mem
= XEXP (mem
, 0);
2591 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn
)), 1),
2595 mask
= (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn
)), 1));
2598 mask
<<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift
), 0, 0)), 1));
2601 if (load_mode
== HImode
)
2603 rtx addr
= XEXP (mem
, 0);
2605 if (! (mask
& 0xff))
2607 addr
= plus_constant (Pmode
, addr
, 1);
2610 mem
= gen_rtx_MEM (QImode
, addr
);
2614 XEXP (cond
, 0) = gen_rtx_SIGN_EXTEND (HImode
, mem
);
2616 XEXP (cond
, 0) = gen_rtx_AND (and_mode
, mem
, GEN_INT (mask
));
2618 INSN_CODE (insn
) = -1;
2621 if (and_insn
!= insn
)
2622 delete_insn (and_insn
);
2624 if (shift
!= NULL_RTX
)
2625 delete_insn (shift
);
2629 xstormy16_reorg (void)
2633 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2635 if (! JUMP_P (insn
))
2641 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2644 xstormy16_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2646 const HOST_WIDE_INT size
= int_size_in_bytes (type
);
2647 return (size
== -1 || size
> UNITS_PER_WORD
* NUM_ARGUMENT_REGISTERS
);
2650 #undef TARGET_ASM_ALIGNED_HI_OP
2651 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2652 #undef TARGET_ASM_ALIGNED_SI_OP
2653 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2654 #undef TARGET_ENCODE_SECTION_INFO
2655 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2657 /* Select_section doesn't handle .bss_below100. */
2658 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2659 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2661 #undef TARGET_ASM_OUTPUT_MI_THUNK
2662 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2663 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2664 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2666 #undef TARGET_PRINT_OPERAND
2667 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2668 #undef TARGET_PRINT_OPERAND_ADDRESS
2669 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2671 #undef TARGET_MEMORY_MOVE_COST
2672 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2673 #undef TARGET_RTX_COSTS
2674 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2675 #undef TARGET_ADDRESS_COST
2676 #define TARGET_ADDRESS_COST xstormy16_address_cost
2678 #undef TARGET_BUILD_BUILTIN_VA_LIST
2679 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2680 #undef TARGET_EXPAND_BUILTIN_VA_START
2681 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2682 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2683 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2685 #undef TARGET_PROMOTE_FUNCTION_MODE
2686 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2687 #undef TARGET_PROMOTE_PROTOTYPES
2688 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2690 #undef TARGET_FUNCTION_ARG
2691 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2692 #undef TARGET_FUNCTION_ARG_ADVANCE
2693 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2695 #undef TARGET_RETURN_IN_MEMORY
2696 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2697 #undef TARGET_FUNCTION_VALUE
2698 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2699 #undef TARGET_LIBCALL_VALUE
2700 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2701 #undef TARGET_FUNCTION_VALUE_REGNO_P
2702 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2704 #undef TARGET_MACHINE_DEPENDENT_REORG
2705 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2707 #undef TARGET_PREFERRED_RELOAD_CLASS
2708 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2709 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2710 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2712 #undef TARGET_LEGITIMATE_ADDRESS_P
2713 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2714 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2715 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2717 #undef TARGET_CAN_ELIMINATE
2718 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2720 #undef TARGET_TRAMPOLINE_INIT
2721 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2723 struct gcc_target targetm
= TARGET_INITIALIZER
;
2725 #include "gt-stormy16.h"