1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2016 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "stringpool.h"
35 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
43 #include "langhooks.h"
49 /* This file should be included last. */
50 #include "target-def.h"
52 static rtx
emit_addhi3_postreload (rtx
, rtx
, rtx
);
53 static void xstormy16_asm_out_constructor (rtx
, int);
54 static void xstormy16_asm_out_destructor (rtx
, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
58 static void xstormy16_init_builtins (void);
59 static rtx
xstormy16_expand_builtin (tree
, rtx
, rtx
, machine_mode
, int);
60 static int xstormy16_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
61 static bool xstormy16_return_in_memory (const_tree
, const_tree
);
63 static GTY(()) section
*bss100_section
;
65 /* Compute a (partial) cost for rtx X. Return true if the complete
66 cost has been computed, and false if subexpressions should be
67 scanned. In either case, *TOTAL contains the cost result. */
70 xstormy16_rtx_costs (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
71 int outer_code ATTRIBUTE_UNUSED
,
72 int opno ATTRIBUTE_UNUSED
, int *total
,
73 bool speed ATTRIBUTE_UNUSED
)
75 int code
= GET_CODE (x
);
80 if (INTVAL (x
) < 16 && INTVAL (x
) >= 0)
81 *total
= COSTS_N_INSNS (1) / 2;
82 else if (INTVAL (x
) < 256 && INTVAL (x
) >= 0)
83 *total
= COSTS_N_INSNS (1);
85 *total
= COSTS_N_INSNS (2);
92 *total
= COSTS_N_INSNS (2);
96 *total
= COSTS_N_INSNS (35 + 6);
99 *total
= COSTS_N_INSNS (51 - 6);
108 xstormy16_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
109 addr_space_t as ATTRIBUTE_UNUSED
,
110 bool speed ATTRIBUTE_UNUSED
)
112 return (CONST_INT_P (x
) ? 2
113 : GET_CODE (x
) == PLUS
? 7
117 /* Worker function for TARGET_MEMORY_MOVE_COST. */
120 xstormy16_memory_move_cost (machine_mode mode
, reg_class_t rclass
,
123 return (5 + memory_move_secondary_cost (mode
, rclass
, in
));
126 /* Branches are handled as follows:
128 1. HImode compare-and-branches. The machine supports these
129 natively, so the appropriate pattern is emitted directly.
131 2. SImode EQ and NE. These are emitted as pairs of HImode
132 compare-and-branches.
134 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
135 of a SImode subtract followed by a branch (not a compare-and-branch),
141 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
148 /* Emit a branch of kind CODE to location LOC. */
151 xstormy16_emit_cbranch (enum rtx_code code
, rtx op0
, rtx op1
, rtx loc
)
153 rtx condition_rtx
, loc_ref
, branch
, cy_clobber
;
157 mode
= GET_MODE (op0
);
158 gcc_assert (mode
== HImode
|| mode
== SImode
);
161 && (code
== GT
|| code
== LE
|| code
== GTU
|| code
== LEU
))
163 int unsigned_p
= (code
== GTU
|| code
== LEU
);
164 int gt_p
= (code
== GT
|| code
== GTU
);
168 lab
= gen_label_rtx ();
169 xstormy16_emit_cbranch (unsigned_p
? LTU
: LT
, op0
, op1
, gt_p
? lab
: loc
);
170 /* This should be generated as a comparison against the temporary
171 created by the previous insn, but reload can't handle that. */
172 xstormy16_emit_cbranch (gt_p
? NE
: EQ
, op0
, op1
, loc
);
177 else if (mode
== SImode
178 && (code
== NE
|| code
== EQ
)
179 && op1
!= const0_rtx
)
181 rtx op0_word
, op1_word
;
183 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
187 lab
= gen_label_rtx ();
189 for (i
= 0; i
< num_words
- 1; i
++)
191 op0_word
= simplify_gen_subreg (word_mode
, op0
, mode
,
193 op1_word
= simplify_gen_subreg (word_mode
, op1
, mode
,
195 xstormy16_emit_cbranch (NE
, op0_word
, op1_word
, code
== EQ
? lab
: loc
);
197 op0_word
= simplify_gen_subreg (word_mode
, op0
, mode
,
199 op1_word
= simplify_gen_subreg (word_mode
, op1
, mode
,
201 xstormy16_emit_cbranch (code
, op0_word
, op1_word
, loc
);
208 /* We can't allow reload to try to generate any reload after a branch,
209 so when some register must match we must make the temporary ourselves. */
213 tmp
= gen_reg_rtx (mode
);
214 emit_move_insn (tmp
, op0
);
218 condition_rtx
= gen_rtx_fmt_ee (code
, mode
, op0
, op1
);
219 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
220 branch
= gen_rtx_SET (pc_rtx
,
221 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
224 cy_clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
227 vec
= gen_rtvec (2, branch
, cy_clobber
);
228 else if (code
== NE
|| code
== EQ
)
229 vec
= gen_rtvec (2, branch
, gen_rtx_CLOBBER (VOIDmode
, op0
));
234 sub
= gen_rtx_SET (op0
, gen_rtx_MINUS (SImode
, op0
, op1
));
236 sub
= gen_rtx_CLOBBER (SImode
, op0
);
238 vec
= gen_rtvec (3, branch
, sub
, cy_clobber
);
241 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, vec
));
244 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
245 the arithmetic operation. Most of the work is done by
246 xstormy16_expand_arith. */
249 xstormy16_split_cbranch (machine_mode mode
, rtx label
, rtx comparison
,
252 rtx op0
= XEXP (comparison
, 0);
253 rtx op1
= XEXP (comparison
, 1);
254 rtx_insn
*seq
, *last_insn
;
258 xstormy16_expand_arith (mode
, COMPARE
, dest
, op0
, op1
);
262 gcc_assert (INSN_P (seq
));
265 while (NEXT_INSN (last_insn
) != NULL_RTX
)
266 last_insn
= NEXT_INSN (last_insn
);
268 compare
= SET_SRC (XVECEXP (PATTERN (last_insn
), 0, 0));
269 PUT_CODE (XEXP (compare
, 0), GET_CODE (comparison
));
270 XEXP (compare
, 1) = gen_rtx_LABEL_REF (VOIDmode
, label
);
275 /* Return the string to output a conditional branch to LABEL, which is
276 the operand number of the label.
278 OP is the conditional expression, or NULL for branch-always.
280 REVERSED is nonzero if we should reverse the sense of the comparison.
285 xstormy16_output_cbranch_hi (rtx op
, const char *label
, int reversed
,
288 static char string
[64];
289 int need_longbranch
= (op
!= NULL_RTX
290 ? get_attr_length (insn
) == 8
291 : get_attr_length (insn
) == 4);
292 int really_reversed
= reversed
^ need_longbranch
;
295 const char *operands
;
304 sprintf (string
, "%s %s", ccode
, label
);
308 code
= GET_CODE (op
);
310 if (! REG_P (XEXP (op
, 0)))
312 code
= swap_condition (code
);
318 /* Work out which way this really branches. */
320 code
= reverse_condition (code
);
324 case EQ
: ccode
= "z"; break;
325 case NE
: ccode
= "nz"; break;
326 case GE
: ccode
= "ge"; break;
327 case LT
: ccode
= "lt"; break;
328 case GT
: ccode
= "gt"; break;
329 case LE
: ccode
= "le"; break;
330 case GEU
: ccode
= "nc"; break;
331 case LTU
: ccode
= "c"; break;
332 case GTU
: ccode
= "hi"; break;
333 case LEU
: ccode
= "ls"; break;
340 templ
= "b%s %s,.+8 | jmpf %s";
343 sprintf (string
, templ
, ccode
, operands
, label
);
348 /* Return the string to output a conditional branch to LABEL, which is
349 the operand number of the label, but suitable for the tail of a
352 OP is the conditional expression (OP is never NULL_RTX).
354 REVERSED is nonzero if we should reverse the sense of the comparison.
359 xstormy16_output_cbranch_si (rtx op
, const char *label
, int reversed
,
362 static char string
[64];
363 int need_longbranch
= get_attr_length (insn
) >= 8;
364 int really_reversed
= reversed
^ need_longbranch
;
370 code
= GET_CODE (op
);
372 /* Work out which way this really branches. */
374 code
= reverse_condition (code
);
378 case EQ
: ccode
= "z"; break;
379 case NE
: ccode
= "nz"; break;
380 case GE
: ccode
= "ge"; break;
381 case LT
: ccode
= "lt"; break;
382 case GEU
: ccode
= "nc"; break;
383 case LTU
: ccode
= "c"; break;
385 /* The missing codes above should never be generated. */
396 gcc_assert (REG_P (XEXP (op
, 0)));
398 regnum
= REGNO (XEXP (op
, 0));
399 sprintf (prevop
, "or %s,%s", reg_names
[regnum
], reg_names
[regnum
+1]);
403 case GE
: case LT
: case GEU
: case LTU
:
404 strcpy (prevop
, "sbc %2,%3");
412 templ
= "%s | b%s .+6 | jmpf %s";
414 templ
= "%s | b%s %s";
415 sprintf (string
, templ
, prevop
, ccode
, label
);
420 /* Many machines have some registers that cannot be copied directly to or from
421 memory or even from other types of registers. An example is the `MQ'
422 register, which on most machines, can only be copied to or from general
423 registers, but not memory. Some machines allow copying all registers to and
424 from memory, but require a scratch register for stores to some memory
425 locations (e.g., those with symbolic address on the RT, and those with
426 certain symbolic address on the SPARC when compiling PIC). In some cases,
427 both an intermediate and a scratch register are required.
429 You should define these macros to indicate to the reload phase that it may
430 need to allocate at least one register for a reload in addition to the
431 register to contain the data. Specifically, if copying X to a register
432 RCLASS in MODE requires an intermediate register, you should define
433 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
434 whose registers can be used as intermediate registers or scratch registers.
436 If copying a register RCLASS in MODE to X requires an intermediate or scratch
437 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
438 largest register class required. If the requirements for input and output
439 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
440 instead of defining both macros identically.
442 The values returned by these macros are often `GENERAL_REGS'. Return
443 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
444 to or from a register of RCLASS in MODE without requiring a scratch register.
445 Do not define this macro if it would always return `NO_REGS'.
447 If a scratch register is required (either with or without an intermediate
448 register), you should define patterns for `reload_inM' or `reload_outM', as
449 required.. These patterns, which will normally be implemented with a
450 `define_expand', should be similar to the `movM' patterns, except that
451 operand 2 is the scratch register.
453 Define constraints for the reload register and scratch register that contain
454 a single register class. If the original reload register (whose class is
455 RCLASS) can meet the constraint given in the pattern, the value returned by
456 these macros is used for the class of the scratch register. Otherwise, two
457 additional reload registers are required. Their classes are obtained from
458 the constraints in the insn pattern.
460 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
461 either be in a hard register or in memory. Use `true_regnum' to find out;
462 it will return -1 if the pseudo is in memory and the hard register number if
465 These macros should not be used in the case where a particular class of
466 registers can only be copied to memory and not to another class of
467 registers. In that case, secondary reload registers are not needed and
468 would not be helpful. Instead, a stack location must be used to perform the
469 copy and the `movM' pattern should use memory as an intermediate storage.
470 This case often occurs between floating-point and general registers. */
473 xstormy16_secondary_reload_class (enum reg_class rclass
,
474 machine_mode mode ATTRIBUTE_UNUSED
,
477 /* This chip has the interesting property that only the first eight
478 registers can be moved to/from memory. */
480 || ((GET_CODE (x
) == SUBREG
|| REG_P (x
))
481 && (true_regnum (x
) == -1
482 || true_regnum (x
) >= FIRST_PSEUDO_REGISTER
)))
483 && ! reg_class_subset_p (rclass
, EIGHT_REGS
))
489 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
490 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
493 xstormy16_preferred_reload_class (rtx x
, reg_class_t rclass
)
495 if (rclass
== GENERAL_REGS
&& MEM_P (x
))
501 /* Predicate for symbols and addresses that reflect special 8-bit
505 xstormy16_below100_symbol (rtx x
,
506 machine_mode mode ATTRIBUTE_UNUSED
)
508 if (GET_CODE (x
) == CONST
)
510 if (GET_CODE (x
) == PLUS
&& CONST_INT_P (XEXP (x
, 1)))
513 if (GET_CODE (x
) == SYMBOL_REF
)
514 return (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_XSTORMY16_BELOW100
) != 0;
518 HOST_WIDE_INT i
= INTVAL (x
);
520 if ((i
>= 0x0000 && i
<= 0x00ff)
521 || (i
>= 0x7f00 && i
<= 0x7fff))
527 /* Likewise, but only for non-volatile MEMs, for patterns where the
528 MEM will get split into smaller sized accesses. */
531 xstormy16_splittable_below100_operand (rtx x
, machine_mode mode
)
533 if (MEM_P (x
) && MEM_VOLATILE_P (x
))
535 return xstormy16_below100_operand (x
, mode
);
538 /* Expand an 8-bit IOR. This either detects the one case we can
539 actually do, or uses a 16-bit IOR. */
542 xstormy16_expand_iorqi3 (rtx
*operands
)
544 rtx in
, out
, outsub
, val
;
550 if (xstormy16_onebit_set_operand (val
, QImode
))
552 if (!xstormy16_below100_or_register (in
, QImode
))
553 in
= copy_to_mode_reg (QImode
, in
);
554 if (!xstormy16_below100_or_register (out
, QImode
))
555 out
= gen_reg_rtx (QImode
);
556 emit_insn (gen_iorqi3_internal (out
, in
, val
));
557 if (out
!= operands
[0])
558 emit_move_insn (operands
[0], out
);
563 in
= copy_to_mode_reg (QImode
, in
);
565 if (! REG_P (val
) && ! CONST_INT_P (val
))
566 val
= copy_to_mode_reg (QImode
, val
);
569 out
= gen_reg_rtx (QImode
);
571 in
= simplify_gen_subreg (HImode
, in
, QImode
, 0);
572 outsub
= simplify_gen_subreg (HImode
, out
, QImode
, 0);
574 if (! CONST_INT_P (val
))
575 val
= simplify_gen_subreg (HImode
, val
, QImode
, 0);
577 emit_insn (gen_iorhi3 (outsub
, in
, val
));
579 if (out
!= operands
[0])
580 emit_move_insn (operands
[0], out
);
583 /* Expand an 8-bit AND. This either detects the one case we can
584 actually do, or uses a 16-bit AND. */
587 xstormy16_expand_andqi3 (rtx
*operands
)
589 rtx in
, out
, outsub
, val
;
595 if (xstormy16_onebit_clr_operand (val
, QImode
))
597 if (!xstormy16_below100_or_register (in
, QImode
))
598 in
= copy_to_mode_reg (QImode
, in
);
599 if (!xstormy16_below100_or_register (out
, QImode
))
600 out
= gen_reg_rtx (QImode
);
601 emit_insn (gen_andqi3_internal (out
, in
, val
));
602 if (out
!= operands
[0])
603 emit_move_insn (operands
[0], out
);
608 in
= copy_to_mode_reg (QImode
, in
);
610 if (! REG_P (val
) && ! CONST_INT_P (val
))
611 val
= copy_to_mode_reg (QImode
, val
);
614 out
= gen_reg_rtx (QImode
);
616 in
= simplify_gen_subreg (HImode
, in
, QImode
, 0);
617 outsub
= simplify_gen_subreg (HImode
, out
, QImode
, 0);
619 if (! CONST_INT_P (val
))
620 val
= simplify_gen_subreg (HImode
, val
, QImode
, 0);
622 emit_insn (gen_andhi3 (outsub
, in
, val
));
624 if (out
!= operands
[0])
625 emit_move_insn (operands
[0], out
);
628 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
630 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
632 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
634 && INTVAL (X) + (OFFSET) >= 0 \
635 && INTVAL (X) + (OFFSET) < 0x8000 \
636 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
639 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
642 if (LEGITIMATE_ADDRESS_CONST_INT_P (x
, 0))
645 if (GET_CODE (x
) == PLUS
646 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 0))
649 /* PR 31232: Do not allow INT+INT as an address. */
654 if ((GET_CODE (x
) == PRE_MODIFY
&& CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
655 || GET_CODE (x
) == POST_INC
656 || GET_CODE (x
) == PRE_DEC
)
660 && REGNO_OK_FOR_BASE_P (REGNO (x
))
661 && (! strict
|| REGNO (x
) < FIRST_PSEUDO_REGISTER
))
664 if (xstormy16_below100_symbol (x
, mode
))
670 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
672 On this chip, this is true if the address is valid with an offset
673 of 0 but not of 6, because in that case it cannot be used as an
674 address for DImode or DFmode, or if the address is a post-increment
675 or pre-decrement address. */
678 xstormy16_mode_dependent_address_p (const_rtx x
,
679 addr_space_t as ATTRIBUTE_UNUSED
)
681 if (LEGITIMATE_ADDRESS_CONST_INT_P (x
, 0)
682 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x
, 6))
685 if (GET_CODE (x
) == PLUS
686 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 0)
687 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 6))
690 /* Auto-increment addresses are now treated generically in recog.c. */
695 short_memory_operand (rtx x
, machine_mode mode
)
697 if (! memory_operand (x
, mode
))
699 return (GET_CODE (XEXP (x
, 0)) != PLUS
);
702 /* Splitter for the 'move' patterns, for modes not directly implemented
703 by hardware. Emit insns to copy a value of mode MODE from SRC to
706 This function is only called when reload_completed. */
709 xstormy16_split_move (machine_mode mode
, rtx dest
, rtx src
)
711 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
712 int direction
, end
, i
;
713 int src_modifies
= 0;
714 int dest_modifies
= 0;
715 int src_volatile
= 0;
716 int dest_volatile
= 0;
718 rtx auto_inc_reg_rtx
= NULL_RTX
;
720 /* Check initial conditions. */
721 gcc_assert (reload_completed
722 && mode
!= QImode
&& mode
!= HImode
723 && nonimmediate_operand (dest
, mode
)
724 && general_operand (src
, mode
));
726 /* This case is not supported below, and shouldn't be generated. */
727 gcc_assert (! MEM_P (dest
) || ! MEM_P (src
));
729 /* This case is very very bad after reload, so trap it now. */
730 gcc_assert (GET_CODE (dest
) != SUBREG
&& GET_CODE (src
) != SUBREG
);
732 /* The general idea is to copy by words, offsetting the source and
733 destination. Normally the least-significant word will be copied
734 first, but for pre-dec operations it's better to copy the
735 most-significant word first. Only one operand can be a pre-dec
738 It's also possible that the copy overlaps so that the direction
744 mem_operand
= XEXP (dest
, 0);
745 dest_modifies
= side_effects_p (mem_operand
);
746 if (auto_inc_p (mem_operand
))
747 auto_inc_reg_rtx
= XEXP (mem_operand
, 0);
748 dest_volatile
= MEM_VOLATILE_P (dest
);
751 dest
= copy_rtx (dest
);
752 MEM_VOLATILE_P (dest
) = 0;
755 else if (MEM_P (src
))
757 mem_operand
= XEXP (src
, 0);
758 src_modifies
= side_effects_p (mem_operand
);
759 if (auto_inc_p (mem_operand
))
760 auto_inc_reg_rtx
= XEXP (mem_operand
, 0);
761 src_volatile
= MEM_VOLATILE_P (src
);
764 src
= copy_rtx (src
);
765 MEM_VOLATILE_P (src
) = 0;
769 mem_operand
= NULL_RTX
;
771 if (mem_operand
== NULL_RTX
)
775 && reg_overlap_mentioned_p (dest
, src
)
776 && REGNO (dest
) > REGNO (src
))
779 else if (GET_CODE (mem_operand
) == PRE_DEC
780 || (GET_CODE (mem_operand
) == PLUS
781 && GET_CODE (XEXP (mem_operand
, 0)) == PRE_DEC
))
783 else if (MEM_P (src
) && reg_overlap_mentioned_p (dest
, src
))
787 gcc_assert (REG_P (dest
));
788 regno
= REGNO (dest
);
790 gcc_assert (refers_to_regno_p (regno
, regno
+ num_words
,
793 if (refers_to_regno_p (regno
, mem_operand
))
795 else if (refers_to_regno_p (regno
+ num_words
- 1, regno
+ num_words
,
799 /* This means something like
800 (set (reg:DI r0) (mem:DI (reg:HI r1)))
801 which we'd need to support by doing the set of the second word
806 end
= direction
< 0 ? -1 : num_words
;
807 for (i
= direction
< 0 ? num_words
- 1 : 0; i
!= end
; i
+= direction
)
809 rtx w_src
, w_dest
, insn
;
812 w_src
= gen_rtx_MEM (word_mode
, mem_operand
);
814 w_src
= simplify_gen_subreg (word_mode
, src
, mode
, i
* UNITS_PER_WORD
);
816 MEM_VOLATILE_P (w_src
) = 1;
818 w_dest
= gen_rtx_MEM (word_mode
, mem_operand
);
820 w_dest
= simplify_gen_subreg (word_mode
, dest
, mode
,
823 MEM_VOLATILE_P (w_dest
) = 1;
825 /* The simplify_subreg calls must always be able to simplify. */
826 gcc_assert (GET_CODE (w_src
) != SUBREG
827 && GET_CODE (w_dest
) != SUBREG
);
829 insn
= emit_insn (gen_rtx_SET (w_dest
, w_src
));
830 if (auto_inc_reg_rtx
)
831 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
,
837 /* Expander for the 'move' patterns. Emit insns to copy a value of
838 mode MODE from SRC to DEST. */
841 xstormy16_expand_move (machine_mode mode
, rtx dest
, rtx src
)
843 if (MEM_P (dest
) && (GET_CODE (XEXP (dest
, 0)) == PRE_MODIFY
))
845 rtx pmv
= XEXP (dest
, 0);
846 rtx dest_reg
= XEXP (pmv
, 0);
847 rtx dest_mod
= XEXP (pmv
, 1);
848 rtx set
= gen_rtx_SET (dest_reg
, dest_mod
);
849 rtx clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
851 dest
= gen_rtx_MEM (mode
, dest_reg
);
852 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
854 else if (MEM_P (src
) && (GET_CODE (XEXP (src
, 0)) == PRE_MODIFY
))
856 rtx pmv
= XEXP (src
, 0);
857 rtx src_reg
= XEXP (pmv
, 0);
858 rtx src_mod
= XEXP (pmv
, 1);
859 rtx set
= gen_rtx_SET (src_reg
, src_mod
);
860 rtx clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
862 src
= gen_rtx_MEM (mode
, src_reg
);
863 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
866 /* There are only limited immediate-to-memory move instructions. */
867 if (! reload_in_progress
868 && ! reload_completed
870 && (! CONST_INT_P (XEXP (dest
, 0))
871 || ! xstormy16_legitimate_address_p (mode
, XEXP (dest
, 0), 0))
872 && ! xstormy16_below100_operand (dest
, mode
)
874 && GET_CODE (src
) != SUBREG
)
875 src
= copy_to_mode_reg (mode
, src
);
877 /* Don't emit something we would immediately split. */
879 && mode
!= HImode
&& mode
!= QImode
)
881 xstormy16_split_move (mode
, dest
, src
);
885 emit_insn (gen_rtx_SET (dest
, src
));
890 The stack is laid out as follows:
894 Register save area (up to 4 words)
895 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
897 AP-> Return address (two words)
898 9th procedure parameter word
899 10th procedure parameter word
901 last procedure parameter word
903 The frame pointer location is tuned to make it most likely that all
904 parameters and local variables can be accessed using a load-indexed
907 /* A structure to describe the layout. */
908 struct xstormy16_stack_layout
910 /* Size of the topmost three items on the stack. */
912 int register_save_size
;
913 int stdarg_save_size
;
914 /* Sum of the above items. */
916 /* Various offsets. */
917 int first_local_minus_ap
;
922 /* Does REGNO need to be saved? */
923 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
924 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
925 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
926 && (REGNUM != CARRY_REGNUM) \
927 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
929 /* Compute the stack layout. */
931 struct xstormy16_stack_layout
932 xstormy16_compute_stack_layout (void)
934 struct xstormy16_stack_layout layout
;
936 const int ifun
= xstormy16_interrupt_function_p ();
938 layout
.locals_size
= get_frame_size ();
940 layout
.register_save_size
= 0;
941 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
942 if (REG_NEEDS_SAVE (regno
, ifun
))
943 layout
.register_save_size
+= UNITS_PER_WORD
;
946 layout
.stdarg_save_size
= NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
;
948 layout
.stdarg_save_size
= 0;
950 layout
.frame_size
= (layout
.locals_size
951 + layout
.register_save_size
952 + layout
.stdarg_save_size
);
954 if (crtl
->args
.size
<= 2048 && crtl
->args
.size
!= -1)
956 if (layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
957 + crtl
->args
.size
<= 2048)
958 layout
.fp_minus_ap
= layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
;
960 layout
.fp_minus_ap
= 2048 - crtl
->args
.size
;
963 layout
.fp_minus_ap
= (layout
.stdarg_save_size
964 + layout
.register_save_size
965 - INCOMING_FRAME_SP_OFFSET
);
966 layout
.sp_minus_fp
= (layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
967 - layout
.fp_minus_ap
);
968 layout
.first_local_minus_ap
= layout
.sp_minus_fp
- layout
.locals_size
;
972 /* Worker function for TARGET_CAN_ELIMINATE. */
975 xstormy16_can_eliminate (const int from
, const int to
)
977 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
978 ? ! frame_pointer_needed
982 /* Determine how all the special registers get eliminated. */
985 xstormy16_initial_elimination_offset (int from
, int to
)
987 struct xstormy16_stack_layout layout
;
990 layout
= xstormy16_compute_stack_layout ();
992 if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
993 result
= layout
.sp_minus_fp
- layout
.locals_size
;
994 else if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
995 result
= - layout
.locals_size
;
996 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
997 result
= - layout
.fp_minus_ap
;
998 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
999 result
= - (layout
.sp_minus_fp
+ layout
.fp_minus_ap
);
1007 emit_addhi3_postreload (rtx dest
, rtx src0
, rtx src1
)
1009 rtx set
, clobber
, insn
;
1011 set
= gen_rtx_SET (dest
, gen_rtx_PLUS (HImode
, src0
, src1
));
1012 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
1013 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
1017 /* Called after register allocation to add any instructions needed for
1018 the prologue. Using a prologue insn is favored compared to putting
1019 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1020 since it allows the scheduler to intermix instructions with the
1021 saves of the caller saved registers. In some cases, it might be
1022 necessary to emit a barrier instruction as the last insn to prevent
1025 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1026 so that the debug info generation code can handle them properly. */
1029 xstormy16_expand_prologue (void)
1031 struct xstormy16_stack_layout layout
;
1035 const int ifun
= xstormy16_interrupt_function_p ();
1037 mem_push_rtx
= gen_rtx_POST_INC (Pmode
, stack_pointer_rtx
);
1038 mem_push_rtx
= gen_rtx_MEM (HImode
, mem_push_rtx
);
1040 layout
= xstormy16_compute_stack_layout ();
1042 if (layout
.locals_size
>= 32768)
1043 error ("local variable memory requirements exceed capacity");
1045 if (flag_stack_usage_info
)
1046 current_function_static_stack_size
= layout
.frame_size
;
1048 /* Save the argument registers if necessary. */
1049 if (layout
.stdarg_save_size
)
1050 for (regno
= FIRST_ARGUMENT_REGISTER
;
1051 regno
< FIRST_ARGUMENT_REGISTER
+ NUM_ARGUMENT_REGISTERS
;
1055 rtx reg
= gen_rtx_REG (HImode
, regno
);
1057 insn
= emit_move_insn (mem_push_rtx
, reg
);
1058 RTX_FRAME_RELATED_P (insn
) = 1;
1060 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (2));
1062 XVECEXP (dwarf
, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode
, stack_pointer_rtx
),
1064 XVECEXP (dwarf
, 0, 1) = gen_rtx_SET (stack_pointer_rtx
,
1065 plus_constant (Pmode
,
1067 GET_MODE_SIZE (Pmode
)));
1068 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
1069 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 0)) = 1;
1070 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 1)) = 1;
1073 /* Push each of the registers to save. */
1074 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1075 if (REG_NEEDS_SAVE (regno
, ifun
))
1078 rtx reg
= gen_rtx_REG (HImode
, regno
);
1080 insn
= emit_move_insn (mem_push_rtx
, reg
);
1081 RTX_FRAME_RELATED_P (insn
) = 1;
1083 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (2));
1085 XVECEXP (dwarf
, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode
, stack_pointer_rtx
),
1087 XVECEXP (dwarf
, 0, 1) = gen_rtx_SET (stack_pointer_rtx
,
1088 plus_constant (Pmode
,
1090 GET_MODE_SIZE (Pmode
)));
1091 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
1092 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 0)) = 1;
1093 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 1)) = 1;
1096 /* It's just possible that the SP here might be what we need for
1098 if (frame_pointer_needed
&& layout
.sp_minus_fp
== layout
.locals_size
)
1100 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1101 RTX_FRAME_RELATED_P (insn
) = 1;
1104 /* Allocate space for local variables. */
1105 if (layout
.locals_size
)
1107 insn
= emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1108 GEN_INT (layout
.locals_size
));
1109 RTX_FRAME_RELATED_P (insn
) = 1;
1112 /* Set up the frame pointer, if required. */
1113 if (frame_pointer_needed
&& layout
.sp_minus_fp
!= layout
.locals_size
)
1115 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1116 RTX_FRAME_RELATED_P (insn
) = 1;
1118 if (layout
.sp_minus_fp
)
1120 insn
= emit_addhi3_postreload (hard_frame_pointer_rtx
,
1121 hard_frame_pointer_rtx
,
1122 GEN_INT (- layout
.sp_minus_fp
));
1123 RTX_FRAME_RELATED_P (insn
) = 1;
1128 /* Do we need an epilogue at all? */
1131 direct_return (void)
1133 return (reload_completed
1134 && xstormy16_compute_stack_layout ().frame_size
== 0
1135 && ! xstormy16_interrupt_function_p ());
1138 /* Called after register allocation to add any instructions needed for
1139 the epilogue. Using an epilogue insn is favored compared to putting
1140 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1141 since it allows the scheduler to intermix instructions with the
1142 saves of the caller saved registers. In some cases, it might be
1143 necessary to emit a barrier instruction as the last insn to prevent
1147 xstormy16_expand_epilogue (void)
1149 struct xstormy16_stack_layout layout
;
1152 const int ifun
= xstormy16_interrupt_function_p ();
1154 mem_pop_rtx
= gen_rtx_PRE_DEC (Pmode
, stack_pointer_rtx
);
1155 mem_pop_rtx
= gen_rtx_MEM (HImode
, mem_pop_rtx
);
1157 layout
= xstormy16_compute_stack_layout ();
1159 /* Pop the stack for the locals. */
1160 if (layout
.locals_size
)
1162 if (frame_pointer_needed
&& layout
.sp_minus_fp
== layout
.locals_size
)
1163 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1165 emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1166 GEN_INT (- layout
.locals_size
));
1169 /* Restore any call-saved registers. */
1170 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1171 if (REG_NEEDS_SAVE (regno
, ifun
))
1172 emit_move_insn (gen_rtx_REG (HImode
, regno
), mem_pop_rtx
);
1174 /* Pop the stack for the stdarg save area. */
1175 if (layout
.stdarg_save_size
)
1176 emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1177 GEN_INT (- layout
.stdarg_save_size
));
1181 emit_jump_insn (gen_return_internal_interrupt ());
1183 emit_jump_insn (gen_return_internal ());
1187 xstormy16_epilogue_uses (int regno
)
1189 if (reload_completed
&& call_used_regs
[regno
])
1191 const int ifun
= xstormy16_interrupt_function_p ();
1192 return REG_NEEDS_SAVE (regno
, ifun
);
1198 xstormy16_function_profiler (void)
1200 sorry ("function_profiler support");
1203 /* Update CUM to advance past an argument in the argument list. The
1204 values MODE, TYPE and NAMED describe that argument. Once this is
1205 done, the variable CUM is suitable for analyzing the *following*
1206 argument with `TARGET_FUNCTION_ARG', etc.
1208 This function need not do anything if the argument in question was
1209 passed on the stack. The compiler knows how to track the amount of
1210 stack space used for arguments without any special help. However,
1211 it makes life easier for xstormy16_build_va_list if it does update
1215 xstormy16_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1216 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1218 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1220 /* If an argument would otherwise be passed partially in registers,
1221 and partially on the stack, the whole of it is passed on the
1223 if (*cum
< NUM_ARGUMENT_REGISTERS
1224 && *cum
+ XSTORMY16_WORD_SIZE (type
, mode
) > NUM_ARGUMENT_REGISTERS
)
1225 *cum
= NUM_ARGUMENT_REGISTERS
;
1227 *cum
+= XSTORMY16_WORD_SIZE (type
, mode
);
1231 xstormy16_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1232 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1234 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1236 if (mode
== VOIDmode
)
1238 if (targetm
.calls
.must_pass_in_stack (mode
, type
)
1239 || *cum
+ XSTORMY16_WORD_SIZE (type
, mode
) > NUM_ARGUMENT_REGISTERS
)
1241 return gen_rtx_REG (mode
, *cum
+ FIRST_ARGUMENT_REGISTER
);
1244 /* Build the va_list type.
1246 For this chip, va_list is a record containing a counter and a pointer.
1247 The counter is of type 'int' and indicates how many bytes
1248 have been used to date. The pointer indicates the stack position
1249 for arguments that have not been passed in registers.
1250 To keep the layout nice, the pointer is first in the structure. */
1253 xstormy16_build_builtin_va_list (void)
1255 tree f_1
, f_2
, record
, type_decl
;
1257 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
1258 type_decl
= build_decl (BUILTINS_LOCATION
,
1259 TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
1261 f_1
= build_decl (BUILTINS_LOCATION
,
1262 FIELD_DECL
, get_identifier ("base"),
1264 f_2
= build_decl (BUILTINS_LOCATION
,
1265 FIELD_DECL
, get_identifier ("count"),
1266 unsigned_type_node
);
1268 DECL_FIELD_CONTEXT (f_1
) = record
;
1269 DECL_FIELD_CONTEXT (f_2
) = record
;
1271 TYPE_STUB_DECL (record
) = type_decl
;
1272 TYPE_NAME (record
) = type_decl
;
1273 TYPE_FIELDS (record
) = f_1
;
1274 DECL_CHAIN (f_1
) = f_2
;
1276 layout_type (record
);
1281 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1282 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1283 variable to initialize. NEXTARG is the machine independent notion of the
1284 'next' argument after the variable arguments. */
1287 xstormy16_expand_builtin_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
1289 tree f_base
, f_count
;
1293 if (xstormy16_interrupt_function_p ())
1294 error ("cannot use va_start in interrupt function");
1296 f_base
= TYPE_FIELDS (va_list_type_node
);
1297 f_count
= DECL_CHAIN (f_base
);
1299 base
= build3 (COMPONENT_REF
, TREE_TYPE (f_base
), valist
, f_base
, NULL_TREE
);
1300 count
= build3 (COMPONENT_REF
, TREE_TYPE (f_count
), valist
, f_count
,
1303 t
= make_tree (TREE_TYPE (base
), virtual_incoming_args_rtx
);
1304 u
= build_int_cst (NULL_TREE
, - INCOMING_FRAME_SP_OFFSET
);
1305 u
= fold_convert (TREE_TYPE (count
), u
);
1306 t
= fold_build_pointer_plus (t
, u
);
1307 t
= build2 (MODIFY_EXPR
, TREE_TYPE (base
), base
, t
);
1308 TREE_SIDE_EFFECTS (t
) = 1;
1309 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1311 t
= build2 (MODIFY_EXPR
, TREE_TYPE (count
), count
,
1312 build_int_cst (NULL_TREE
,
1313 crtl
->args
.info
* UNITS_PER_WORD
));
1314 TREE_SIDE_EFFECTS (t
) = 1;
1315 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1318 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1319 of type va_list as a tree, TYPE is the type passed to va_arg.
1320 Note: This algorithm is documented in stormy-abi. */
1323 xstormy16_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
1324 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
1326 tree f_base
, f_count
;
1328 tree count_tmp
, addr
, t
;
1329 tree lab_gotaddr
, lab_fromstack
;
1330 int size
, size_of_reg_args
, must_stack
;
1333 f_base
= TYPE_FIELDS (va_list_type_node
);
1334 f_count
= DECL_CHAIN (f_base
);
1336 base
= build3 (COMPONENT_REF
, TREE_TYPE (f_base
), valist
, f_base
, NULL_TREE
);
1337 count
= build3 (COMPONENT_REF
, TREE_TYPE (f_count
), valist
, f_count
,
1340 must_stack
= targetm
.calls
.must_pass_in_stack (TYPE_MODE (type
), type
);
1341 size_tree
= round_up (size_in_bytes (type
), UNITS_PER_WORD
);
1342 gimplify_expr (&size_tree
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
1344 size_of_reg_args
= NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
;
1346 count_tmp
= get_initialized_tmp_var (count
, pre_p
, NULL
);
1347 lab_gotaddr
= create_artificial_label (UNKNOWN_LOCATION
);
1348 lab_fromstack
= create_artificial_label (UNKNOWN_LOCATION
);
1349 addr
= create_tmp_var (ptr_type_node
);
1355 t
= fold_convert (TREE_TYPE (count
), size_tree
);
1356 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1357 r
= fold_convert (TREE_TYPE (count
), size_int (size_of_reg_args
));
1358 t
= build2 (GT_EXPR
, boolean_type_node
, t
, r
);
1359 t
= build3 (COND_EXPR
, void_type_node
, t
,
1360 build1 (GOTO_EXPR
, void_type_node
, lab_fromstack
),
1362 gimplify_and_add (t
, pre_p
);
1364 t
= fold_build_pointer_plus (base
, count_tmp
);
1365 gimplify_assign (addr
, t
, pre_p
);
1367 t
= build1 (GOTO_EXPR
, void_type_node
, lab_gotaddr
);
1368 gimplify_and_add (t
, pre_p
);
1370 t
= build1 (LABEL_EXPR
, void_type_node
, lab_fromstack
);
1371 gimplify_and_add (t
, pre_p
);
1374 /* Arguments larger than a word might need to skip over some
1375 registers, since arguments are either passed entirely in
1376 registers or entirely on the stack. */
1377 size
= PUSH_ROUNDING (int_size_in_bytes (type
));
1378 if (size
> 2 || size
< 0 || must_stack
)
1382 r
= size_int (NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
);
1383 u
= build2 (MODIFY_EXPR
, TREE_TYPE (count_tmp
), count_tmp
, r
);
1385 t
= fold_convert (TREE_TYPE (count
), r
);
1386 t
= build2 (GE_EXPR
, boolean_type_node
, count_tmp
, t
);
1387 t
= build3 (COND_EXPR
, void_type_node
, t
, NULL_TREE
, u
);
1388 gimplify_and_add (t
, pre_p
);
1391 t
= size_int (NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
1392 + INCOMING_FRAME_SP_OFFSET
);
1393 t
= fold_convert (TREE_TYPE (count
), t
);
1394 t
= build2 (MINUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1395 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), t
,
1396 fold_convert (TREE_TYPE (count
), size_tree
));
1397 t
= fold_convert (TREE_TYPE (t
), fold (t
));
1398 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1399 t
= fold_build_pointer_plus (base
, t
);
1400 gimplify_assign (addr
, t
, pre_p
);
1402 t
= build1 (LABEL_EXPR
, void_type_node
, lab_gotaddr
);
1403 gimplify_and_add (t
, pre_p
);
1405 t
= fold_convert (TREE_TYPE (count
), size_tree
);
1406 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1407 gimplify_assign (count
, t
, pre_p
);
1409 addr
= fold_convert (build_pointer_type (type
), addr
);
1410 return build_va_arg_indirect_ref (addr
);
1413 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1416 xstormy16_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
1418 rtx temp
= gen_reg_rtx (HImode
);
1419 rtx reg_fnaddr
= gen_reg_rtx (HImode
);
1420 rtx reg_addr
, reg_addr_mem
;
1422 reg_addr
= copy_to_reg (XEXP (m_tramp
, 0));
1423 reg_addr_mem
= adjust_automodify_address (m_tramp
, HImode
, reg_addr
, 0);
1425 emit_move_insn (temp
, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM
));
1426 emit_move_insn (reg_addr_mem
, temp
);
1427 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1428 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1430 emit_move_insn (temp
, static_chain
);
1431 emit_move_insn (reg_addr_mem
, temp
);
1432 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1433 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1435 emit_move_insn (reg_fnaddr
, XEXP (DECL_RTL (fndecl
), 0));
1436 emit_move_insn (temp
, reg_fnaddr
);
1437 emit_insn (gen_andhi3 (temp
, temp
, GEN_INT (0xFF)));
1438 emit_insn (gen_iorhi3 (temp
, temp
, GEN_INT (0x0200)));
1439 emit_move_insn (reg_addr_mem
, temp
);
1440 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1441 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1443 emit_insn (gen_lshrhi3 (reg_fnaddr
, reg_fnaddr
, GEN_INT (8)));
1444 emit_move_insn (reg_addr_mem
, reg_fnaddr
);
1447 /* Worker function for TARGET_FUNCTION_VALUE. */
1450 xstormy16_function_value (const_tree valtype
,
1451 const_tree func ATTRIBUTE_UNUSED
,
1452 bool outgoing ATTRIBUTE_UNUSED
)
1455 mode
= TYPE_MODE (valtype
);
1456 PROMOTE_MODE (mode
, 0, valtype
);
1457 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1460 /* Worker function for TARGET_LIBCALL_VALUE. */
1463 xstormy16_libcall_value (machine_mode mode
,
1464 const_rtx fun ATTRIBUTE_UNUSED
)
1466 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1469 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1472 xstormy16_function_value_regno_p (const unsigned int regno
)
1474 return (regno
== RETURN_VALUE_REGNUM
);
1477 /* A C compound statement that outputs the assembler code for a thunk function,
1478 used to implement C++ virtual function calls with multiple inheritance. The
1479 thunk acts as a wrapper around a virtual function, adjusting the implicit
1480 object parameter before handing control off to the real function.
1482 First, emit code to add the integer DELTA to the location that contains the
1483 incoming first argument. Assume that this argument contains a pointer, and
1484 is the one used to pass the `this' pointer in C++. This is the incoming
1485 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1486 addition must preserve the values of all other incoming arguments.
1488 After the addition, emit code to jump to FUNCTION, which is a
1489 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1490 the return address. Hence returning from FUNCTION will return to whoever
1491 called the current `thunk'.
1493 The effect must be as if @var{function} had been called directly
1494 with the adjusted first argument. This macro is responsible for
1495 emitting all of the code for a thunk function;
1496 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1499 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1500 extracted from it.) It might possibly be useful on some targets, but
1504 xstormy16_asm_output_mi_thunk (FILE *file
,
1505 tree thunk_fndecl ATTRIBUTE_UNUSED
,
1506 HOST_WIDE_INT delta
,
1507 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
1510 int regnum
= FIRST_ARGUMENT_REGISTER
;
1512 /* There might be a hidden first argument for a returned structure. */
1513 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
1516 fprintf (file
, "\tadd %s,#0x%x\n", reg_names
[regnum
], (int) delta
& 0xFFFF);
1517 fputs ("\tjmpf ", file
);
1518 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
1522 /* The purpose of this function is to override the default behavior of
1523 BSS objects. Normally, they go into .bss or .sbss via ".common"
1524 directives, but we need to override that and put them in
1525 .bss_below100. We can't just use a section override (like we do
1526 for .data_below100), because that makes them initialized rather
1527 than uninitialized. */
1530 xstormy16_asm_output_aligned_common (FILE *stream
,
1537 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
1542 && GET_CODE (symbol
= XEXP (mem
, 0)) == SYMBOL_REF
1543 && SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_XSTORMY16_BELOW100
)
1548 switch_to_section (bss100_section
);
1556 name2
= default_strip_name_encoding (name
);
1558 fprintf (stream
, "\t.globl\t%s\n", name2
);
1560 fprintf (stream
, "\t.p2align %d\n", p2align
);
1561 fprintf (stream
, "\t.type\t%s, @object\n", name2
);
1562 fprintf (stream
, "\t.size\t%s, %d\n", name2
, size
);
1563 fprintf (stream
, "%s:\n\t.space\t%d\n", name2
, size
);
1569 fprintf (stream
, "\t.local\t");
1570 assemble_name (stream
, name
);
1571 fprintf (stream
, "\n");
1573 fprintf (stream
, "\t.comm\t");
1574 assemble_name (stream
, name
);
1575 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
1578 /* Implement TARGET_ASM_INIT_SECTIONS. */
1581 xstormy16_asm_init_sections (void)
1584 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
1585 output_section_asm_op
,
1586 "\t.section \".bss_below100\",\"aw\",@nobits");
1589 /* Mark symbols with the "below100" attribute so that we can use the
1590 special addressing modes for them. */
1593 xstormy16_encode_section_info (tree decl
, rtx r
, int first
)
1595 default_encode_section_info (decl
, r
, first
);
1597 if (TREE_CODE (decl
) == VAR_DECL
1598 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl
))
1599 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl
))))
1601 rtx symbol
= XEXP (r
, 0);
1603 gcc_assert (GET_CODE (symbol
) == SYMBOL_REF
);
1604 SYMBOL_REF_FLAGS (symbol
) |= SYMBOL_FLAG_XSTORMY16_BELOW100
;
1608 #undef TARGET_ASM_CONSTRUCTOR
1609 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1610 #undef TARGET_ASM_DESTRUCTOR
1611 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1613 /* Output constructors and destructors. Just like
1614 default_named_section_asm_out_* but don't set the sections writable. */
1617 xstormy16_asm_out_destructor (rtx symbol
, int priority
)
1619 const char *section
= ".dtors";
1622 /* ??? This only works reliably with the GNU linker. */
1623 if (priority
!= DEFAULT_INIT_PRIORITY
)
1625 sprintf (buf
, ".dtors.%.5u",
1626 /* Invert the numbering so the linker puts us in the proper
1627 order; constructors are run from right to left, and the
1628 linker sorts in increasing order. */
1629 MAX_INIT_PRIORITY
- priority
);
1633 switch_to_section (get_section (section
, 0, NULL
));
1634 assemble_align (POINTER_SIZE
);
1635 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
1639 xstormy16_asm_out_constructor (rtx symbol
, int priority
)
1641 const char *section
= ".ctors";
1644 /* ??? This only works reliably with the GNU linker. */
1645 if (priority
!= DEFAULT_INIT_PRIORITY
)
1647 sprintf (buf
, ".ctors.%.5u",
1648 /* Invert the numbering so the linker puts us in the proper
1649 order; constructors are run from right to left, and the
1650 linker sorts in increasing order. */
1651 MAX_INIT_PRIORITY
- priority
);
1655 switch_to_section (get_section (section
, 0, NULL
));
1656 assemble_align (POINTER_SIZE
);
1657 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
1660 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1662 Print a memory address as an operand to reference that memory location. */
1665 xstormy16_print_operand_address (FILE *file
, machine_mode
/*mode*/,
1668 HOST_WIDE_INT offset
;
1669 int pre_dec
, post_inc
;
1671 /* There are a few easy cases. */
1672 if (CONST_INT_P (address
))
1674 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (address
) & 0xFFFF);
1678 if (CONSTANT_P (address
) || LABEL_P (address
))
1680 output_addr_const (file
, address
);
1684 /* Otherwise, it's hopefully something of the form
1685 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1686 if (GET_CODE (address
) == PLUS
)
1688 gcc_assert (CONST_INT_P (XEXP (address
, 1)));
1689 offset
= INTVAL (XEXP (address
, 1));
1690 address
= XEXP (address
, 0);
1695 pre_dec
= (GET_CODE (address
) == PRE_DEC
);
1696 post_inc
= (GET_CODE (address
) == POST_INC
);
1697 if (pre_dec
|| post_inc
)
1698 address
= XEXP (address
, 0);
1700 gcc_assert (REG_P (address
));
1705 fputs (reg_names
[REGNO (address
)], file
);
1709 fprintf (file
, "," HOST_WIDE_INT_PRINT_DEC
, offset
);
1713 /* Worker function for TARGET_PRINT_OPERAND.
1715 Print an operand to an assembler instruction. */
1718 xstormy16_print_operand (FILE *file
, rtx x
, int code
)
1723 /* There is either one bit set, or one bit clear, in X.
1724 Print it preceded by '#'. */
1726 static int bits_set
[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1727 HOST_WIDE_INT xx
= 1;
1730 if (CONST_INT_P (x
))
1733 output_operand_lossage ("'B' operand is not constant");
1735 /* GCC sign-extends masks with the MSB set, so we have to
1736 detect all the cases that differ only in sign extension
1737 beyond the bits we care about. Normally, the predicates
1738 and constraints ensure that we have the right values. This
1739 works correctly for valid masks. */
1740 if (bits_set
[xx
& 7] <= 1)
1742 /* Remove sign extension bits. */
1743 if ((~xx
& ~(HOST_WIDE_INT
)0xff) == 0)
1745 else if ((~xx
& ~(HOST_WIDE_INT
)0xffff) == 0)
1747 l
= exact_log2 (xx
);
1751 /* Add sign extension bits. */
1752 if ((xx
& ~(HOST_WIDE_INT
)0xff) == 0)
1753 xx
|= ~(HOST_WIDE_INT
)0xff;
1754 else if ((xx
& ~(HOST_WIDE_INT
)0xffff) == 0)
1755 xx
|= ~(HOST_WIDE_INT
)0xffff;
1756 l
= exact_log2 (~xx
);
1760 output_operand_lossage ("'B' operand has multiple bits set");
1762 fprintf (file
, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC
, l
);
1767 /* Print the symbol without a surrounding @fptr(). */
1768 if (GET_CODE (x
) == SYMBOL_REF
)
1769 assemble_name (file
, XSTR (x
, 0));
1770 else if (LABEL_P (x
))
1771 output_asm_label (x
);
1773 xstormy16_print_operand_address (file
, VOIDmode
, x
);
1778 /* Print the immediate operand less one, preceded by '#'.
1779 For 'O', negate it first. */
1781 HOST_WIDE_INT xx
= 0;
1783 if (CONST_INT_P (x
))
1786 output_operand_lossage ("'o' operand is not constant");
1791 fprintf (file
, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC
, xx
- 1);
1796 /* Print the shift mask for bp/bn. */
1798 HOST_WIDE_INT xx
= 1;
1801 if (CONST_INT_P (x
))
1804 output_operand_lossage ("'B' operand is not constant");
1808 fputs (IMMEDIATE_PREFIX
, file
);
1809 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, l
);
1814 /* Handled below. */
1818 output_operand_lossage ("xstormy16_print_operand: unknown code");
1822 switch (GET_CODE (x
))
1825 fputs (reg_names
[REGNO (x
)], file
);
1829 xstormy16_print_operand_address (file
, GET_MODE (x
), XEXP (x
, 0));
1833 /* Some kind of constant or label; an immediate operand,
1834 so prefix it with '#' for the assembler. */
1835 fputs (IMMEDIATE_PREFIX
, file
);
1836 output_addr_const (file
, x
);
1843 /* Expander for the `casesi' pattern.
1844 INDEX is the index of the switch statement.
1845 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1846 to the first table entry.
1847 RANGE is the number of table entries.
1848 TABLE is an ADDR_VEC that is the jump table.
1849 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1850 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1853 xstormy16_expand_casesi (rtx index
, rtx lower_bound
, rtx range
,
1854 rtx table
, rtx default_label
)
1856 HOST_WIDE_INT range_i
= INTVAL (range
);
1859 /* This code uses 'br', so it can deal only with tables of size up to
1861 if (range_i
>= 8192)
1862 sorry ("switch statement of size %lu entries too large",
1863 (unsigned long) range_i
);
1865 index
= expand_binop (SImode
, sub_optab
, index
, lower_bound
, NULL_RTX
, 0,
1867 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, SImode
, 1,
1869 int_index
= gen_lowpart_common (HImode
, index
);
1870 emit_insn (gen_ashlhi3 (int_index
, int_index
, const2_rtx
));
1871 emit_jump_insn (gen_tablejump_pcrel (int_index
, table
));
1874 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1875 instructions, without label or alignment or any other special
1876 constructs. We know that the previous instruction will be the
1877 `tablejump_pcrel' output above.
1879 TODO: it might be nice to output 'br' instructions if they could
1883 xstormy16_output_addr_vec (FILE *file
, rtx label ATTRIBUTE_UNUSED
, rtx table
)
1887 switch_to_section (current_function_section ());
1889 vlen
= XVECLEN (table
, 0);
1890 for (idx
= 0; idx
< vlen
; idx
++)
1892 fputs ("\tjmpf ", file
);
1893 output_asm_label (XEXP (XVECEXP (table
, 0, idx
), 0));
1898 /* Expander for the `call' patterns.
1899 RETVAL is the RTL for the return register or NULL for void functions.
1900 DEST is the function to call, expressed as a MEM.
1901 COUNTER is ignored. */
1904 xstormy16_expand_call (rtx retval
, rtx dest
, rtx counter
)
1909 gcc_assert (MEM_P (dest
));
1910 dest
= XEXP (dest
, 0);
1912 if (! CONSTANT_P (dest
) && ! REG_P (dest
))
1913 dest
= force_reg (Pmode
, dest
);
1918 mode
= GET_MODE (retval
);
1920 call
= gen_rtx_CALL (mode
, gen_rtx_MEM (FUNCTION_MODE
, dest
),
1923 call
= gen_rtx_SET (retval
, call
);
1925 if (! CONSTANT_P (dest
))
1927 temp
= gen_reg_rtx (HImode
);
1928 emit_move_insn (temp
, const0_rtx
);
1933 call
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, call
,
1934 gen_rtx_USE (VOIDmode
, temp
)));
1935 emit_call_insn (call
);
1938 /* Expanders for multiword computational operations. */
1940 /* Expander for arithmetic operations; emit insns to compute
1942 (set DEST (CODE:MODE SRC0 SRC1))
1944 When CODE is COMPARE, a branch template is generated
1945 (this saves duplicating code in xstormy16_split_cbranch). */
1948 xstormy16_expand_arith (machine_mode mode
, enum rtx_code code
,
1949 rtx dest
, rtx src0
, rtx src1
)
1951 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
1956 emit_move_insn (src0
, const0_rtx
);
1958 for (i
= 0; i
< num_words
; i
++)
1960 rtx w_src0
, w_src1
, w_dest
;
1963 w_src0
= simplify_gen_subreg (word_mode
, src0
, mode
,
1964 i
* UNITS_PER_WORD
);
1965 w_src1
= simplify_gen_subreg (word_mode
, src1
, mode
, i
* UNITS_PER_WORD
);
1966 w_dest
= simplify_gen_subreg (word_mode
, dest
, mode
, i
* UNITS_PER_WORD
);
1972 && CONST_INT_P (w_src1
)
1973 && INTVAL (w_src1
) == 0)
1977 insn
= gen_addchi4 (w_dest
, w_src0
, w_src1
);
1979 insn
= gen_addchi5 (w_dest
, w_src0
, w_src1
);
1985 if (code
== COMPARE
&& i
== num_words
- 1)
1987 rtx branch
, sub
, clobber
, sub_1
;
1989 sub_1
= gen_rtx_MINUS (HImode
, w_src0
,
1990 gen_rtx_ZERO_EXTEND (HImode
, gen_rtx_REG (BImode
, CARRY_REGNUM
)));
1991 sub
= gen_rtx_SET (w_dest
,
1992 gen_rtx_MINUS (HImode
, sub_1
, w_src1
));
1993 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
1994 branch
= gen_rtx_SET (pc_rtx
,
1995 gen_rtx_IF_THEN_ELSE (VOIDmode
,
2001 insn
= gen_rtx_PARALLEL (VOIDmode
,
2002 gen_rtvec (3, branch
, sub
, clobber
));
2006 && CONST_INT_P (w_src1
)
2007 && INTVAL (w_src1
) == 0)
2010 insn
= gen_subchi4 (w_dest
, w_src0
, w_src1
);
2012 insn
= gen_subchi5 (w_dest
, w_src0
, w_src1
);
2018 if (CONST_INT_P (w_src1
)
2019 && INTVAL (w_src1
) == -(code
== AND
))
2022 insn
= gen_rtx_SET (w_dest
, gen_rtx_fmt_ee (code
, mode
,
2027 insn
= gen_rtx_SET (w_dest
, gen_rtx_NOT (mode
, w_src0
));
2038 /* If we emit nothing, try_split() will think we failed. So emit
2039 something that does nothing and can be optimized away. */
2044 /* The shift operations are split at output time for constant values;
2045 variable-width shifts get handed off to a library routine.
2047 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2048 SIZE_R will be a CONST_INT, X will be a hard register. */
2051 xstormy16_output_shift (machine_mode mode
, enum rtx_code code
,
2052 rtx x
, rtx size_r
, rtx temp
)
2055 const char *r0
, *r1
, *rt
;
2058 gcc_assert (CONST_INT_P (size_r
)
2062 size
= INTVAL (size_r
) & (GET_MODE_BITSIZE (mode
) - 1);
2067 r0
= reg_names
[REGNO (x
)];
2068 r1
= reg_names
[REGNO (x
) + 1];
2070 /* For shifts of size 1, we can use the rotate instructions. */
2076 sprintf (r
, "shl %s,#1 | rlc %s,#1", r0
, r1
);
2079 sprintf (r
, "asr %s,#1 | rrc %s,#1", r1
, r0
);
2082 sprintf (r
, "shr %s,#1 | rrc %s,#1", r1
, r0
);
2090 /* For large shifts, there are easy special cases. */
2096 sprintf (r
, "mov %s,%s | mov %s,#0", r1
, r0
, r0
);
2099 sprintf (r
, "mov %s,%s | asr %s,#15", r0
, r1
, r1
);
2102 sprintf (r
, "mov %s,%s | mov %s,#0", r0
, r1
, r1
);
2114 sprintf (r
, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2115 r1
, r0
, r0
, r1
, (int) size
- 16);
2118 sprintf (r
, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2119 r0
, r1
, r1
, r0
, (int) size
- 16);
2122 sprintf (r
, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2123 r0
, r1
, r1
, r0
, (int) size
- 16);
2131 /* For the rest, we have to do more work. In particular, we
2132 need a temporary. */
2133 rt
= reg_names
[REGNO (temp
)];
2138 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2139 rt
, r0
, r0
, (int) size
, r1
, (int) size
, rt
, (int) (16 - size
),
2144 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2145 rt
, r1
, r1
, (int) size
, r0
, (int) size
, rt
, (int) (16 - size
),
2150 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2151 rt
, r1
, r1
, (int) size
, r0
, (int) size
, rt
, (int) (16 - size
),
2160 /* Attribute handling. */
2162 /* Return nonzero if the function is an interrupt function. */
2165 xstormy16_interrupt_function_p (void)
2169 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2170 any functions are declared, which is demonstrably wrong, but
2171 it is worked around here. FIXME. */
2175 attributes
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
2176 return lookup_attribute ("interrupt", attributes
) != NULL_TREE
;
2179 #undef TARGET_ATTRIBUTE_TABLE
2180 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2182 static tree xstormy16_handle_interrupt_attribute
2183 (tree
*, tree
, tree
, int, bool *);
2184 static tree xstormy16_handle_below100_attribute
2185 (tree
*, tree
, tree
, int, bool *);
2187 static const struct attribute_spec xstormy16_attribute_table
[] =
2189 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2190 affects_type_identity. */
2191 { "interrupt", 0, 0, false, true, true,
2192 xstormy16_handle_interrupt_attribute
, false },
2193 { "BELOW100", 0, 0, false, false, false,
2194 xstormy16_handle_below100_attribute
, false },
2195 { "below100", 0, 0, false, false, false,
2196 xstormy16_handle_below100_attribute
, false },
2197 { NULL
, 0, 0, false, false, false, NULL
, false }
2200 /* Handle an "interrupt" attribute;
2201 arguments as in struct attribute_spec.handler. */
2204 xstormy16_handle_interrupt_attribute (tree
*node
, tree name
,
2205 tree args ATTRIBUTE_UNUSED
,
2206 int flags ATTRIBUTE_UNUSED
,
2209 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
2211 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2213 *no_add_attrs
= true;
2219 /* Handle an "below" attribute;
2220 arguments as in struct attribute_spec.handler. */
2223 xstormy16_handle_below100_attribute (tree
*node
,
2224 tree name ATTRIBUTE_UNUSED
,
2225 tree args ATTRIBUTE_UNUSED
,
2226 int flags ATTRIBUTE_UNUSED
,
2229 if (TREE_CODE (*node
) != VAR_DECL
2230 && TREE_CODE (*node
) != POINTER_TYPE
2231 && TREE_CODE (*node
) != TYPE_DECL
)
2233 warning (OPT_Wattributes
,
2234 "%<__BELOW100__%> attribute only applies to variables");
2235 *no_add_attrs
= true;
2237 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
2239 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
2241 warning (OPT_Wattributes
, "__BELOW100__ attribute not allowed "
2242 "with auto storage class");
2243 *no_add_attrs
= true;
2250 #undef TARGET_INIT_BUILTINS
2251 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2252 #undef TARGET_EXPAND_BUILTIN
2253 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2259 const char * arg_ops
; /* 0..9, t for temp register, r for return value. */
2260 const char * arg_types
; /* s=short,l=long, upper case for unsigned. */
2264 { "__sdivlh", CODE_FOR_sdivlh
, "rt01", "sls" },
2265 { "__smodlh", CODE_FOR_sdivlh
, "tr01", "sls" },
2266 { "__udivlh", CODE_FOR_udivlh
, "rt01", "SLS" },
2267 { "__umodlh", CODE_FOR_udivlh
, "tr01", "SLS" },
2268 { NULL
, 0, NULL
, NULL
}
2272 xstormy16_init_builtins (void)
2274 tree args
[2], ret_type
, arg
= NULL_TREE
, ftype
;
2277 ret_type
= void_type_node
;
2279 for (i
= 0; s16builtins
[i
].name
; i
++)
2281 n_args
= strlen (s16builtins
[i
].arg_types
) - 1;
2283 gcc_assert (n_args
<= (int) ARRAY_SIZE (args
));
2285 for (a
= n_args
- 1; a
>= 0; a
--)
2286 args
[a
] = NULL_TREE
;
2288 for (a
= n_args
; a
>= 0; a
--)
2290 switch (s16builtins
[i
].arg_types
[a
])
2292 case 's': arg
= short_integer_type_node
; break;
2293 case 'S': arg
= short_unsigned_type_node
; break;
2294 case 'l': arg
= long_integer_type_node
; break;
2295 case 'L': arg
= long_unsigned_type_node
; break;
2296 default: gcc_unreachable ();
2303 ftype
= build_function_type_list (ret_type
, args
[0], args
[1], NULL_TREE
);
2304 add_builtin_function (s16builtins
[i
].name
, ftype
,
2305 i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
2310 xstormy16_expand_builtin (tree exp
, rtx target
,
2311 rtx subtarget ATTRIBUTE_UNUSED
,
2312 machine_mode mode ATTRIBUTE_UNUSED
,
2313 int ignore ATTRIBUTE_UNUSED
)
2315 rtx op
[10], args
[10], pat
, copyto
[10], retval
= 0;
2316 tree fndecl
, argtree
;
2319 fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
2320 argtree
= TREE_OPERAND (exp
, 1);
2321 i
= DECL_FUNCTION_CODE (fndecl
);
2322 code
= s16builtins
[i
].md_code
;
2324 for (a
= 0; a
< 10 && argtree
; a
++)
2326 args
[a
] = expand_normal (TREE_VALUE (argtree
));
2327 argtree
= TREE_CHAIN (argtree
);
2330 for (o
= 0; s16builtins
[i
].arg_ops
[o
]; o
++)
2332 char ao
= s16builtins
[i
].arg_ops
[o
];
2333 char c
= insn_data
[code
].operand
[o
].constraint
[0];
2338 omode
= (machine_mode
) insn_data
[code
].operand
[o
].mode
;
2340 op
[o
] = target
? target
: gen_reg_rtx (omode
);
2342 op
[o
] = gen_reg_rtx (omode
);
2344 op
[o
] = args
[(int) hex_value (ao
)];
2346 if (! (*insn_data
[code
].operand
[o
].predicate
) (op
[o
], GET_MODE (op
[o
])))
2348 if (c
== '+' || c
== '=')
2351 op
[o
] = gen_reg_rtx (omode
);
2354 op
[o
] = copy_to_mode_reg (omode
, op
[o
]);
2361 pat
= GEN_FCN (code
) (op
[0], op
[1], op
[2], op
[3], op
[4],
2362 op
[5], op
[6], op
[7], op
[8], op
[9]);
2365 for (o
= 0; s16builtins
[i
].arg_ops
[o
]; o
++)
2368 emit_move_insn (copyto
[o
], op
[o
]);
2369 if (op
[o
] == retval
)
2376 /* Look for combinations of insns that can be converted to BN or BP
2377 opcodes. This is, unfortunately, too complex to do with MD
2381 combine_bnp (rtx_insn
*insn
)
2383 int insn_code
, regno
, need_extend
;
2385 rtx cond
, reg
, qireg
, mem
;
2386 rtx_insn
*and_insn
, *load
;
2387 machine_mode load_mode
= QImode
;
2388 machine_mode and_mode
= QImode
;
2389 rtx_insn
*shift
= NULL
;
2391 insn_code
= recog_memoized (insn
);
2392 if (insn_code
!= CODE_FOR_cbranchhi
2393 && insn_code
!= CODE_FOR_cbranchhi_neg
)
2396 cond
= XVECEXP (PATTERN (insn
), 0, 0); /* set */
2397 cond
= XEXP (cond
, 1); /* if */
2398 cond
= XEXP (cond
, 0); /* cond */
2399 switch (GET_CODE (cond
))
2413 reg
= XEXP (cond
, 0);
2416 regno
= REGNO (reg
);
2417 if (XEXP (cond
, 1) != const0_rtx
)
2419 if (! find_regno_note (insn
, REG_DEAD
, regno
))
2421 qireg
= gen_rtx_REG (QImode
, regno
);
2425 /* LT and GE conditionals should have a sign extend before
2427 for (and_insn
= prev_real_insn (insn
);
2428 and_insn
!= NULL_RTX
;
2429 and_insn
= prev_real_insn (and_insn
))
2431 int and_code
= recog_memoized (and_insn
);
2433 if (and_code
== CODE_FOR_extendqihi2
2434 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
)
2435 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn
)), 0), qireg
))
2438 if (and_code
== CODE_FOR_movhi_internal
2439 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
))
2441 /* This is for testing bit 15. */
2446 if (reg_mentioned_p (reg
, and_insn
))
2449 if (! NOTE_P (and_insn
) && ! NONJUMP_INSN_P (and_insn
))
2455 /* EQ and NE conditionals have an AND before them. */
2456 for (and_insn
= prev_real_insn (insn
);
2457 and_insn
!= NULL_RTX
;
2458 and_insn
= prev_real_insn (and_insn
))
2460 if (recog_memoized (and_insn
) == CODE_FOR_andhi3
2461 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
)
2462 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn
)), 0), reg
))
2465 if (reg_mentioned_p (reg
, and_insn
))
2468 if (! NOTE_P (and_insn
) && ! NONJUMP_INSN_P (and_insn
))
2474 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2475 followed by an AND like this:
2477 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2478 (clobber (reg:BI carry))]
2480 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2482 Attempt to detect this here. */
2483 for (shift
= prev_real_insn (and_insn
); shift
;
2484 shift
= prev_real_insn (shift
))
2486 if (recog_memoized (shift
) == CODE_FOR_lshrhi3
2487 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift
), 0, 0)), reg
)
2488 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift
), 0, 0)), 0), reg
))
2491 if (reg_mentioned_p (reg
, shift
)
2492 || (! NOTE_P (shift
) && ! NONJUMP_INSN_P (shift
)))
2501 if (and_insn
== NULL_RTX
)
2504 for (load
= shift
? prev_real_insn (shift
) : prev_real_insn (and_insn
);
2506 load
= prev_real_insn (load
))
2508 int load_code
= recog_memoized (load
);
2510 if (load_code
== CODE_FOR_movhi_internal
2511 && rtx_equal_p (SET_DEST (PATTERN (load
)), reg
)
2512 && xstormy16_below100_operand (SET_SRC (PATTERN (load
)), HImode
)
2513 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load
))))
2519 if (load_code
== CODE_FOR_movqi_internal
2520 && rtx_equal_p (SET_DEST (PATTERN (load
)), qireg
)
2521 && xstormy16_below100_operand (SET_SRC (PATTERN (load
)), QImode
))
2527 if (load_code
== CODE_FOR_zero_extendqihi2
2528 && rtx_equal_p (SET_DEST (PATTERN (load
)), reg
)
2529 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load
)), 0), QImode
))
2536 if (reg_mentioned_p (reg
, load
))
2539 if (! NOTE_P (load
) && ! NONJUMP_INSN_P (load
))
2545 mem
= SET_SRC (PATTERN (load
));
2549 mask
= (load_mode
== HImode
) ? 0x8000 : 0x80;
2551 /* If the mem includes a zero-extend operation and we are
2552 going to generate a sign-extend operation then move the
2553 mem inside the zero-extend. */
2554 if (GET_CODE (mem
) == ZERO_EXTEND
)
2555 mem
= XEXP (mem
, 0);
2559 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn
)), 1),
2563 mask
= (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn
)), 1));
2566 mask
<<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift
), 0, 0)), 1));
2569 if (load_mode
== HImode
)
2571 rtx addr
= XEXP (mem
, 0);
2573 if (! (mask
& 0xff))
2575 addr
= plus_constant (Pmode
, addr
, 1);
2578 mem
= gen_rtx_MEM (QImode
, addr
);
2582 XEXP (cond
, 0) = gen_rtx_SIGN_EXTEND (HImode
, mem
);
2584 XEXP (cond
, 0) = gen_rtx_AND (and_mode
, mem
, GEN_INT (mask
));
2586 INSN_CODE (insn
) = -1;
2589 if (and_insn
!= insn
)
2590 delete_insn (and_insn
);
2592 if (shift
!= NULL_RTX
)
2593 delete_insn (shift
);
2597 xstormy16_reorg (void)
2601 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2603 if (! JUMP_P (insn
))
2609 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2612 xstormy16_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2614 const HOST_WIDE_INT size
= int_size_in_bytes (type
);
2615 return (size
== -1 || size
> UNITS_PER_WORD
* NUM_ARGUMENT_REGISTERS
);
2618 #undef TARGET_ASM_ALIGNED_HI_OP
2619 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2620 #undef TARGET_ASM_ALIGNED_SI_OP
2621 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2622 #undef TARGET_ENCODE_SECTION_INFO
2623 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2625 /* Select_section doesn't handle .bss_below100. */
2626 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2627 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2629 #undef TARGET_ASM_OUTPUT_MI_THUNK
2630 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2631 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2632 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2634 #undef TARGET_PRINT_OPERAND
2635 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2636 #undef TARGET_PRINT_OPERAND_ADDRESS
2637 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2639 #undef TARGET_MEMORY_MOVE_COST
2640 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2641 #undef TARGET_RTX_COSTS
2642 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2643 #undef TARGET_ADDRESS_COST
2644 #define TARGET_ADDRESS_COST xstormy16_address_cost
2646 #undef TARGET_BUILD_BUILTIN_VA_LIST
2647 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2648 #undef TARGET_EXPAND_BUILTIN_VA_START
2649 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2650 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2651 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2653 #undef TARGET_PROMOTE_FUNCTION_MODE
2654 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2655 #undef TARGET_PROMOTE_PROTOTYPES
2656 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2658 #undef TARGET_FUNCTION_ARG
2659 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2660 #undef TARGET_FUNCTION_ARG_ADVANCE
2661 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2663 #undef TARGET_RETURN_IN_MEMORY
2664 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2665 #undef TARGET_FUNCTION_VALUE
2666 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2667 #undef TARGET_LIBCALL_VALUE
2668 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2669 #undef TARGET_FUNCTION_VALUE_REGNO_P
2670 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2672 #undef TARGET_MACHINE_DEPENDENT_REORG
2673 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2675 #undef TARGET_PREFERRED_RELOAD_CLASS
2676 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2677 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2678 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2680 #undef TARGET_LEGITIMATE_ADDRESS_P
2681 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2682 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2683 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2685 #undef TARGET_CAN_ELIMINATE
2686 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2688 #undef TARGET_TRAMPOLINE_INIT
2689 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2691 struct gcc_target targetm
= TARGET_INITIALIZER
;
2693 #include "gt-stormy16.h"