1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2014 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
27 #include "stor-layout.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
46 #include "diagnostic-core.h"
50 #include "target-def.h"
56 #define streq(a,b) (strcmp (a, b) == 0)
59 static void v850_print_operand_address (FILE *, rtx
);
61 /* Names of the various data areas used on the v850. */
62 const char * GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
63 const char * GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
65 /* Track the current data area set by the data area pragma (which
66 can be nested). Tested by check_default_data_area. */
67 data_area_stack_element
* data_area_stack
= NULL
;
69 /* True if we don't need to check any more if the current
70 function is an interrupt handler. */
71 static int v850_interrupt_cache_p
= FALSE
;
73 rtx v850_compare_op0
, v850_compare_op1
;
75 /* Whether current function is an interrupt handler. */
76 static int v850_interrupt_p
= FALSE
;
78 static GTY(()) section
* rosdata_section
;
79 static GTY(()) section
* rozdata_section
;
80 static GTY(()) section
* tdata_section
;
81 static GTY(()) section
* zdata_section
;
82 static GTY(()) section
* zbss_section
;
84 /* We use this to wrap all emitted insns in the prologue. */
88 if (GET_CODE (x
) != CLOBBER
)
89 RTX_FRAME_RELATED_P (x
) = 1;
93 /* Mark all the subexpressions of the PARALLEL rtx PAR as
94 frame-related. Return PAR.
96 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
97 PARALLEL rtx other than the first if they do not have the
98 FRAME_RELATED flag set on them. */
101 v850_all_frame_related (rtx par
)
103 int len
= XVECLEN (par
, 0);
106 gcc_assert (GET_CODE (par
) == PARALLEL
);
107 for (i
= 0; i
< len
; i
++)
108 F (XVECEXP (par
, 0, i
));
113 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
114 Specify whether to pass the argument by reference. */
117 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
118 enum machine_mode mode
, const_tree type
,
119 bool named ATTRIBUTE_UNUSED
)
121 unsigned HOST_WIDE_INT size
;
127 size
= int_size_in_bytes (type
);
129 size
= GET_MODE_SIZE (mode
);
134 /* Return an RTX to represent where an argument with mode MODE
135 and type TYPE will be passed to a function. If the result
136 is NULL_RTX, the argument will be pushed. */
139 v850_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
140 const_tree type
, bool named
)
142 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
143 rtx result
= NULL_RTX
;
150 size
= int_size_in_bytes (type
);
152 size
= GET_MODE_SIZE (mode
);
154 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
158 /* Once we have stopped using argument registers, do not start up again. */
159 cum
->nbytes
= 4 * UNITS_PER_WORD
;
164 align
= UNITS_PER_WORD
;
165 else if (size
<= UNITS_PER_WORD
&& type
)
166 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
170 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
172 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
175 if (type
== NULL_TREE
176 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
179 switch (cum
->nbytes
/ UNITS_PER_WORD
)
182 result
= gen_rtx_REG (mode
, 6);
185 result
= gen_rtx_REG (mode
, 7);
188 result
= gen_rtx_REG (mode
, 8);
191 result
= gen_rtx_REG (mode
, 9);
200 /* Return the number of bytes which must be put into registers
201 for values which are part in registers and part in memory. */
203 v850_arg_partial_bytes (cumulative_args_t cum_v
, enum machine_mode mode
,
204 tree type
, bool named
)
206 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
213 size
= int_size_in_bytes (type
);
215 size
= GET_MODE_SIZE (mode
);
221 align
= UNITS_PER_WORD
;
223 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
227 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
229 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
232 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
235 if (type
== NULL_TREE
236 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
239 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
242 /* Update the data in CUM to advance over an argument
243 of mode MODE and data type TYPE.
244 (TYPE is null for libcalls where that information may not be available.) */
247 v850_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
248 const_tree type
, bool named ATTRIBUTE_UNUSED
)
250 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
253 cum
->nbytes
+= (((mode
!= BLKmode
254 ? GET_MODE_SIZE (mode
)
255 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1)
258 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
259 ? GET_MODE_SIZE (Pmode
)
261 ? GET_MODE_SIZE (mode
)
262 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
266 /* Return the high and low words of a CONST_DOUBLE */
269 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
271 if (GET_CODE (x
) == CONST_DOUBLE
)
276 switch (GET_MODE (x
))
279 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
280 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
281 *p_high
= t
[1]; /* since v850 is little endian */
282 *p_low
= t
[0]; /* high is second word */
286 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
287 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
293 *p_high
= CONST_DOUBLE_HIGH (x
);
294 *p_low
= CONST_DOUBLE_LOW (x
);
302 fatal_insn ("const_double_split got a bad insn:", x
);
306 /* Return the cost of the rtx R with code CODE. */
309 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
311 if (CONST_OK_FOR_I (value
))
313 else if (CONST_OK_FOR_J (value
))
315 else if (CONST_OK_FOR_K (value
))
322 const_costs (rtx r
, enum rtx_code c
)
324 HOST_WIDE_INT high
, low
;
329 return const_costs_int (INTVAL (r
), 0);
332 const_double_split (r
, &high
, &low
);
333 if (GET_MODE (r
) == SFmode
)
334 return const_costs_int (high
, 1);
336 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
352 v850_rtx_costs (rtx x
,
354 int outer_code ATTRIBUTE_UNUSED
,
355 int opno ATTRIBUTE_UNUSED
,
356 int * total
, bool speed
)
358 enum rtx_code code
= (enum rtx_code
) codearg
;
367 *total
= COSTS_N_INSNS (const_costs (x
, code
));
374 if (TARGET_V850E
&& !speed
)
382 && ( GET_MODE (x
) == SImode
383 || GET_MODE (x
) == HImode
384 || GET_MODE (x
) == QImode
))
386 if (GET_CODE (XEXP (x
, 1)) == REG
)
388 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
390 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
392 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
401 if (outer_code
== COMPARE
)
410 /* Print operand X using operand code CODE to assembly language output file
414 v850_print_operand (FILE * file
, rtx x
, int code
)
416 HOST_WIDE_INT high
, low
;
421 /* We use 'c' operands with symbols for .vtinherit. */
422 if (GET_CODE (x
) == SYMBOL_REF
)
424 output_addr_const(file
, x
);
431 switch ((code
== 'B' || code
== 'C')
432 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
435 if (code
== 'c' || code
== 'C')
436 fprintf (file
, "nz");
438 fprintf (file
, "ne");
441 if (code
== 'c' || code
== 'C')
447 fprintf (file
, "ge");
450 fprintf (file
, "gt");
453 fprintf (file
, "le");
456 fprintf (file
, "lt");
459 fprintf (file
, "nl");
465 fprintf (file
, "nh");
474 case 'F': /* High word of CONST_DOUBLE. */
475 switch (GET_CODE (x
))
478 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
482 const_double_split (x
, &high
, &low
);
483 fprintf (file
, "%ld", (long) high
);
490 case 'G': /* Low word of CONST_DOUBLE. */
491 switch (GET_CODE (x
))
494 fprintf (file
, "%ld", (long) INTVAL (x
));
498 const_double_split (x
, &high
, &low
);
499 fprintf (file
, "%ld", (long) low
);
507 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
510 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
513 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
515 if (GET_CODE (x
) == CONST
)
516 x
= XEXP (XEXP (x
, 0), 0);
518 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
520 if (SYMBOL_REF_ZDA_P (x
))
521 fprintf (file
, "zdaoff");
522 else if (SYMBOL_REF_SDA_P (x
))
523 fprintf (file
, "sdaoff");
524 else if (SYMBOL_REF_TDA_P (x
))
525 fprintf (file
, "tdaoff");
530 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
531 output_addr_const (file
, x
);
534 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
536 if (GET_CODE (x
) == CONST
)
537 x
= XEXP (XEXP (x
, 0), 0);
539 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
541 if (SYMBOL_REF_ZDA_P (x
))
542 fprintf (file
, "r0");
543 else if (SYMBOL_REF_SDA_P (x
))
544 fprintf (file
, "gp");
545 else if (SYMBOL_REF_TDA_P (x
))
546 fprintf (file
, "ep");
550 case 'R': /* 2nd word of a double. */
551 switch (GET_CODE (x
))
554 fprintf (file
, reg_names
[REGNO (x
) + 1]);
557 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
558 v850_print_operand_address (file
, x
);
559 if (GET_CODE (x
) == CONST_INT
)
560 fprintf (file
, "[r0]");
565 unsigned HOST_WIDE_INT v
= INTVAL (x
);
567 /* Trickery to avoid problems with shifting
568 32-bits at a time on a 32-bit host. */
571 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, v
);
576 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_HIGH (x
));
586 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
587 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
594 /* Like an 'S' operand above, but for unsigned loads only. */
595 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
600 case 'W': /* Print the instruction suffix. */
601 switch (GET_MODE (x
))
606 case QImode
: fputs (".b", file
); break;
607 case HImode
: fputs (".h", file
); break;
608 case SImode
: fputs (".w", file
); break;
609 case SFmode
: fputs (".w", file
); break;
612 case '.': /* Register r0. */
613 fputs (reg_names
[0], file
);
615 case 'z': /* Reg or zero. */
617 fputs (reg_names
[REGNO (x
)], file
);
618 else if ((GET_MODE(x
) == SImode
619 || GET_MODE(x
) == DFmode
620 || GET_MODE(x
) == SFmode
)
621 && x
== CONST0_RTX(GET_MODE(x
)))
622 fputs (reg_names
[0], file
);
625 gcc_assert (x
== const0_rtx
);
626 fputs (reg_names
[0], file
);
630 switch (GET_CODE (x
))
633 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
634 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
637 output_address (XEXP (x
, 0));
641 fputs (reg_names
[REGNO (x
)], file
);
644 fputs (reg_names
[subreg_regno (x
)], file
);
647 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
655 v850_print_operand_address (file
, x
);
666 /* Output assembly language output for the address ADDR to FILE. */
669 v850_print_operand_address (FILE * file
, rtx addr
)
671 switch (GET_CODE (addr
))
674 fprintf (file
, "0[");
675 v850_print_operand (file
, addr
, 0);
679 if (GET_CODE (XEXP (addr
, 0)) == REG
)
682 fprintf (file
, "lo(");
683 v850_print_operand (file
, XEXP (addr
, 1), 0);
684 fprintf (file
, ")[");
685 v850_print_operand (file
, XEXP (addr
, 0), 0);
690 if (GET_CODE (XEXP (addr
, 0)) == REG
691 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
694 v850_print_operand (file
, XEXP (addr
, 1), 0);
696 v850_print_operand (file
, XEXP (addr
, 0), 0);
701 v850_print_operand (file
, XEXP (addr
, 0), 0);
703 v850_print_operand (file
, XEXP (addr
, 1), 0);
708 const char *off_name
= NULL
;
709 const char *reg_name
= NULL
;
711 if (SYMBOL_REF_ZDA_P (addr
))
716 else if (SYMBOL_REF_SDA_P (addr
))
721 else if (SYMBOL_REF_TDA_P (addr
))
728 fprintf (file
, "%s(", off_name
);
729 output_addr_const (file
, addr
);
731 fprintf (file
, ")[%s]", reg_name
);
735 if (special_symbolref_operand (addr
, VOIDmode
))
737 rtx x
= XEXP (XEXP (addr
, 0), 0);
738 const char *off_name
;
739 const char *reg_name
;
741 if (SYMBOL_REF_ZDA_P (x
))
746 else if (SYMBOL_REF_SDA_P (x
))
751 else if (SYMBOL_REF_TDA_P (x
))
759 fprintf (file
, "%s(", off_name
);
760 output_addr_const (file
, addr
);
761 fprintf (file
, ")[%s]", reg_name
);
764 output_addr_const (file
, addr
);
767 output_addr_const (file
, addr
);
773 v850_print_operand_punct_valid_p (unsigned char code
)
778 /* When assemble_integer is used to emit the offsets for a switch
779 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
780 output_addr_const will normally barf at this, but it is OK to omit
781 the truncate and just emit the difference of the two labels. The
782 .hword directive will automatically handle the truncation for us.
784 Returns true if rtx was handled, false otherwise. */
787 v850_output_addr_const_extra (FILE * file
, rtx x
)
789 if (GET_CODE (x
) != TRUNCATE
)
794 /* We must also handle the case where the switch table was passed a
795 constant value and so has been collapsed. In this case the first
796 label will have been deleted. In such a case it is OK to emit
797 nothing, since the table will not be used.
798 (cf gcc.c-torture/compile/990801-1.c). */
799 if (GET_CODE (x
) == MINUS
800 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
802 rtx_code_label
*label
803 = dyn_cast
<rtx_code_label
*> (XEXP (XEXP (x
, 0), 0));
804 if (label
&& label
->deleted ())
808 output_addr_const (file
, x
);
812 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
816 output_move_single (rtx
* operands
)
818 rtx dst
= operands
[0];
819 rtx src
= operands
[1];
826 else if (GET_CODE (src
) == CONST_INT
)
828 HOST_WIDE_INT value
= INTVAL (src
);
830 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
833 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
834 return "movea %1,%.,%0";
836 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
837 return "movhi hi0(%1),%.,%0";
839 /* A random constant. */
840 else if (TARGET_V850E_UP
)
843 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
846 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
848 HOST_WIDE_INT high
, low
;
850 const_double_split (src
, &high
, &low
);
852 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
855 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
856 return "movea %F1,%.,%0";
858 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
859 return "movhi hi0(%F1),%.,%0";
861 /* A random constant. */
862 else if (TARGET_V850E_UP
)
866 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
869 else if (GET_CODE (src
) == MEM
)
870 return "%S1ld%W1 %1,%0";
872 else if (special_symbolref_operand (src
, VOIDmode
))
873 return "movea %O1(%P1),%Q1,%0";
875 else if (GET_CODE (src
) == LABEL_REF
876 || GET_CODE (src
) == SYMBOL_REF
877 || GET_CODE (src
) == CONST
)
880 return "mov hilo(%1),%0";
882 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
885 else if (GET_CODE (src
) == HIGH
)
886 return "movhi hi(%1),%.,%0";
888 else if (GET_CODE (src
) == LO_SUM
)
890 operands
[2] = XEXP (src
, 0);
891 operands
[3] = XEXP (src
, 1);
892 return "movea lo(%3),%2,%0";
896 else if (GET_CODE (dst
) == MEM
)
899 return "%S0st%W0 %1,%0";
901 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
902 return "%S0st%W0 %.,%0";
904 else if (GET_CODE (src
) == CONST_DOUBLE
905 && CONST0_RTX (GET_MODE (dst
)) == src
)
906 return "%S0st%W0 %.,%0";
909 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode
, dst
, src
));
914 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
916 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
921 return CC_FPU_LEmode
;
923 return CC_FPU_GEmode
;
925 return CC_FPU_LTmode
;
927 return CC_FPU_GTmode
;
929 return CC_FPU_EQmode
;
931 return CC_FPU_NEmode
;
940 v850_gen_float_compare (enum rtx_code cond
, enum machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
942 if (GET_MODE (op0
) == DFmode
)
947 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
950 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
953 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
956 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
959 /* Note: There is no NE comparison operator. So we
960 perform an EQ comparison and invert the branch.
961 See v850_float_nz_comparison for how this is done. */
963 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
969 else if (GET_MODE (v850_compare_op0
) == SFmode
)
974 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
977 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
980 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
983 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
986 /* Note: There is no NE comparison operator. So we
987 perform an EQ comparison and invert the branch.
988 See v850_float_nz_comparison for how this is done. */
990 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
999 return v850_select_cc_mode (cond
, op0
, op1
);
1003 v850_gen_compare (enum rtx_code cond
, enum machine_mode mode
, rtx op0
, rtx op1
)
1005 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
1007 emit_insn (gen_cmpsi_insn (op0
, op1
));
1008 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1013 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1014 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1015 emit_insn (gen_rtx_SET(mode
, cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1017 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1021 /* Return maximum offset supported for a short EP memory reference of mode
1022 MODE and signedness UNSIGNEDP. */
1025 ep_memory_offset (enum machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1032 if (TARGET_SMALL_SLD
)
1033 max_offset
= (1 << 4);
1034 else if ((TARGET_V850E_UP
)
1036 max_offset
= (1 << 4);
1038 max_offset
= (1 << 7);
1042 if (TARGET_SMALL_SLD
)
1043 max_offset
= (1 << 5);
1044 else if ((TARGET_V850E_UP
)
1046 max_offset
= (1 << 5);
1048 max_offset
= (1 << 8);
1053 max_offset
= (1 << 8);
1063 /* Return true if OP is a valid short EP memory reference */
1066 ep_memory_operand (rtx op
, enum machine_mode mode
, int unsigned_load
)
1072 /* If we are not using the EP register on a per-function basis
1073 then do not allow this optimization at all. This is to
1074 prevent the use of the SLD/SST instructions which cannot be
1075 guaranteed to work properly due to a hardware bug. */
1079 if (GET_CODE (op
) != MEM
)
1082 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1084 mask
= GET_MODE_SIZE (mode
) - 1;
1086 addr
= XEXP (op
, 0);
1087 if (GET_CODE (addr
) == CONST
)
1088 addr
= XEXP (addr
, 0);
1090 switch (GET_CODE (addr
))
1096 return SYMBOL_REF_TDA_P (addr
);
1099 return REGNO (addr
) == EP_REGNUM
;
1102 op0
= XEXP (addr
, 0);
1103 op1
= XEXP (addr
, 1);
1104 if (GET_CODE (op1
) == CONST_INT
1105 && INTVAL (op1
) < max_offset
1106 && INTVAL (op1
) >= 0
1107 && (INTVAL (op1
) & mask
) == 0)
1109 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1112 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1121 /* Substitute memory references involving a pointer, to use the ep pointer,
1122 taking care to save and preserve the ep. */
1125 substitute_ep_register (rtx_insn
*first_insn
,
1126 rtx_insn
*last_insn
,
1132 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1137 df_set_regs_ever_live (1, true);
1138 *p_r1
= gen_rtx_REG (Pmode
, 1);
1139 *p_ep
= gen_rtx_REG (Pmode
, 30);
1144 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1145 2 * (uses
- 3), uses
, reg_names
[regno
],
1146 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1147 INSN_UID (first_insn
), INSN_UID (last_insn
));
1149 if (NOTE_P (first_insn
))
1150 first_insn
= next_nonnote_insn (first_insn
);
1152 last_insn
= next_nonnote_insn (last_insn
);
1153 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1155 if (NONJUMP_INSN_P (insn
))
1157 rtx pattern
= single_set (insn
);
1159 /* Replace the memory references. */
1163 /* Memory operands are signed by default. */
1164 int unsignedp
= FALSE
;
1166 if (GET_CODE (SET_DEST (pattern
)) == MEM
1167 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1170 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1171 p_mem
= &SET_DEST (pattern
);
1173 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1174 p_mem
= &SET_SRC (pattern
);
1176 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1177 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1178 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1180 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1181 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1183 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1191 rtx addr
= XEXP (*p_mem
, 0);
1193 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1194 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1196 else if (GET_CODE (addr
) == PLUS
1197 && GET_CODE (XEXP (addr
, 0)) == REG
1198 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1199 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1200 && ((INTVAL (XEXP (addr
, 1)))
1201 < ep_memory_offset (GET_MODE (*p_mem
),
1203 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1204 *p_mem
= change_address (*p_mem
, VOIDmode
,
1205 gen_rtx_PLUS (Pmode
,
1213 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1214 insn
= prev_nonnote_insn (first_insn
);
1215 if (insn
&& NONJUMP_INSN_P (insn
)
1216 && GET_CODE (PATTERN (insn
)) == SET
1217 && SET_DEST (PATTERN (insn
)) == *p_ep
1218 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1221 emit_insn_before (gen_rtx_SET (Pmode
, *p_r1
, *p_ep
), first_insn
);
1223 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, reg
), first_insn
);
1224 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, *p_r1
), last_insn
);
1228 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1229 the -mep mode to copy heavily used pointers to ep to use the implicit
1238 rtx_insn
*first_insn
;
1239 rtx_insn
*last_insn
;
1241 regs
[FIRST_PSEUDO_REGISTER
];
1250 /* If not ep mode, just return now. */
1254 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1257 regs
[i
].first_insn
= NULL
;
1258 regs
[i
].last_insn
= NULL
;
1261 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1263 switch (GET_CODE (insn
))
1265 /* End of basic block */
1272 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1274 if (max_uses
< regs
[i
].uses
)
1276 max_uses
= regs
[i
].uses
;
1282 substitute_ep_register (regs
[max_regno
].first_insn
,
1283 regs
[max_regno
].last_insn
,
1284 max_uses
, max_regno
, &r1
, &ep
);
1288 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1291 regs
[i
].first_insn
= NULL
;
1292 regs
[i
].last_insn
= NULL
;
1300 pattern
= single_set (insn
);
1302 /* See if there are any memory references we can shorten. */
1305 rtx src
= SET_SRC (pattern
);
1306 rtx dest
= SET_DEST (pattern
);
1308 /* Memory operands are signed by default. */
1309 int unsignedp
= FALSE
;
1311 /* We might have (SUBREG (MEM)) here, so just get rid of the
1312 subregs to make this code simpler. */
1313 if (GET_CODE (dest
) == SUBREG
1314 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1315 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1316 alter_subreg (&dest
, false);
1317 if (GET_CODE (src
) == SUBREG
1318 && (GET_CODE (SUBREG_REG (src
)) == MEM
1319 || GET_CODE (SUBREG_REG (src
)) == REG
))
1320 alter_subreg (&src
, false);
1322 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1325 else if (GET_CODE (dest
) == MEM
)
1328 else if (GET_CODE (src
) == MEM
)
1331 else if (GET_CODE (src
) == SIGN_EXTEND
1332 && GET_CODE (XEXP (src
, 0)) == MEM
)
1333 mem
= XEXP (src
, 0);
1335 else if (GET_CODE (src
) == ZERO_EXTEND
1336 && GET_CODE (XEXP (src
, 0)) == MEM
)
1338 mem
= XEXP (src
, 0);
1344 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1347 else if (!use_ep
&& mem
1348 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1350 rtx addr
= XEXP (mem
, 0);
1354 if (GET_CODE (addr
) == REG
)
1357 regno
= REGNO (addr
);
1360 else if (GET_CODE (addr
) == PLUS
1361 && GET_CODE (XEXP (addr
, 0)) == REG
1362 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1363 && ((INTVAL (XEXP (addr
, 1)))
1364 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1365 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1368 regno
= REGNO (XEXP (addr
, 0));
1377 regs
[regno
].last_insn
= insn
;
1378 if (!regs
[regno
].first_insn
)
1379 regs
[regno
].first_insn
= insn
;
1383 /* Loading up a register in the basic block zaps any savings
1385 if (GET_CODE (dest
) == REG
)
1387 enum machine_mode mode
= GET_MODE (dest
);
1391 regno
= REGNO (dest
);
1392 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1396 /* See if we can use the pointer before this
1401 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1403 if (max_uses
< regs
[i
].uses
)
1405 max_uses
= regs
[i
].uses
;
1411 && max_regno
>= regno
1412 && max_regno
< endregno
)
1414 substitute_ep_register (regs
[max_regno
].first_insn
,
1415 regs
[max_regno
].last_insn
,
1416 max_uses
, max_regno
, &r1
,
1419 /* Since we made a substitution, zap all remembered
1421 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1424 regs
[i
].first_insn
= NULL
;
1425 regs
[i
].last_insn
= NULL
;
1430 for (i
= regno
; i
< endregno
; i
++)
1433 regs
[i
].first_insn
= NULL
;
1434 regs
[i
].last_insn
= NULL
;
1442 /* # of registers saved by the interrupt handler. */
1443 #define INTERRUPT_FIXED_NUM 5
1445 /* # of bytes for registers saved by the interrupt handler. */
1446 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1448 /* # of words saved for other registers. */
1449 #define INTERRUPT_ALL_SAVE_NUM \
1450 (30 - INTERRUPT_FIXED_NUM)
1452 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1455 compute_register_save_size (long * p_reg_saved
)
1459 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1460 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1463 /* Count space for the register saves. */
1464 if (interrupt_handler
)
1466 for (i
= 0; i
<= 31; i
++)
1470 if (df_regs_ever_live_p (i
) || call_p
)
1473 reg_saved
|= 1L << i
;
1477 /* We don't save/restore r0 or the stack pointer */
1479 case STACK_POINTER_REGNUM
:
1482 /* For registers with fixed use, we save them, set them to the
1483 appropriate value, and then restore them.
1484 These registers are handled specially, so don't list them
1485 on the list of registers to save in the prologue. */
1486 case 1: /* temp used to hold ep */
1488 case 10: /* temp used to call interrupt save/restore */
1489 case 11: /* temp used to call interrupt save/restore (long call) */
1490 case EP_REGNUM
: /* ep */
1497 /* Find the first register that needs to be saved. */
1498 for (i
= 0; i
<= 31; i
++)
1499 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1500 || i
== LINK_POINTER_REGNUM
))
1503 /* If it is possible that an out-of-line helper function might be
1504 used to generate the prologue for the current function, then we
1505 need to cover the possibility that such a helper function will
1506 be used, despite the fact that there might be gaps in the list of
1507 registers that need to be saved. To detect this we note that the
1508 helper functions always push at least register r29 (provided
1509 that the function is not an interrupt handler). */
1511 if (TARGET_PROLOG_FUNCTION
1512 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1517 reg_saved
|= 1L << i
;
1522 /* Helper functions save all registers between the starting
1523 register and the last register, regardless of whether they
1524 are actually used by the function or not. */
1525 for (; i
<= 29; i
++)
1528 reg_saved
|= 1L << i
;
1531 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1534 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1539 for (; i
<= 31; i
++)
1540 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1541 || i
== LINK_POINTER_REGNUM
))
1544 reg_saved
|= 1L << i
;
1550 *p_reg_saved
= reg_saved
;
1555 /* Typical stack layout should looks like this after the function's prologue:
1560 | | arguments saved | Increasing
1561 | | on the stack | addresses
1562 PARENT arg pointer -> | | /
1563 -------------------------- ---- -------------------
1564 | | - space for argument split between regs & stack
1566 CHILD | | \ <-- (return address here)
1571 frame pointer -> | | \ ___
1578 | | arguments | | Decreasing
1579 (hard) frame pointer | | / | | addresses
1580 and stack pointer -> | | / _|_ |
1581 -------------------------- ---- ------------------ V */
1584 compute_frame_size (int size
, long * p_reg_saved
)
1587 + compute_register_save_size (p_reg_saved
)
1588 + crtl
->outgoing_args_size
);
1592 use_prolog_function (int num_save
, int frame_size
)
1594 int alloc_stack
= (4 * num_save
);
1595 int unalloc_stack
= frame_size
- alloc_stack
;
1596 int save_func_len
, restore_func_len
;
1597 int save_normal_len
, restore_normal_len
;
1599 if (! TARGET_DISABLE_CALLT
)
1600 save_func_len
= restore_func_len
= 2;
1602 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1606 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1607 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1610 /* See if we would have used ep to save the stack. */
1611 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1612 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1614 save_normal_len
= restore_normal_len
= 4 * num_save
;
1616 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1617 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1619 /* Don't bother checking if we don't actually save any space.
1620 This happens for instance if one register is saved and additional
1621 stack space is allocated. */
1622 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1626 increment_stack (signed int amount
, bool in_prologue
)
1633 inc
= GEN_INT (amount
);
1635 if (! CONST_OK_FOR_K (amount
))
1637 rtx reg
= gen_rtx_REG (Pmode
, 12);
1639 inc
= emit_move_insn (reg
, inc
);
1645 inc
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1651 expand_prologue (void)
1654 unsigned int size
= get_frame_size ();
1655 unsigned int actual_fsize
;
1656 unsigned int init_stack_alloc
= 0;
1659 unsigned int num_save
;
1661 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1664 actual_fsize
= compute_frame_size (size
, ®_saved
);
1666 if (flag_stack_usage_info
)
1667 current_function_static_stack_size
= actual_fsize
;
1669 /* Save/setup global registers for interrupt functions right now. */
1670 if (interrupt_handler
)
1672 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1673 emit_insn (gen_callt_save_interrupt ());
1675 emit_insn (gen_save_interrupt ());
1677 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1679 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1680 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1682 /* Interrupt functions are not passed arguments, so no need to
1683 allocate space for split structure arguments. */
1684 gcc_assert (crtl
->args
.pretend_args_size
== 0);
1687 /* Identify all of the saved registers. */
1689 for (i
= 1; i
< 32; i
++)
1691 if (((1L << i
) & reg_saved
) != 0)
1692 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1695 if (crtl
->args
.pretend_args_size
)
1699 increment_stack (- (actual_fsize
+ crtl
->args
.pretend_args_size
), true);
1703 increment_stack (- crtl
->args
.pretend_args_size
, true);
1706 /* See if we have an insn that allocates stack space and saves the particular
1707 registers we want to. Note that the helpers won't
1708 allocate additional space for registers GCC saves to complete a
1709 "split" structure argument. */
1710 save_all
= NULL_RTX
;
1711 if (TARGET_PROLOG_FUNCTION
1712 && !crtl
->args
.pretend_args_size
1715 if (use_prolog_function (num_save
, actual_fsize
))
1717 int alloc_stack
= 4 * num_save
;
1720 save_all
= gen_rtx_PARALLEL
1722 rtvec_alloc (num_save
+ 1
1723 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1725 XVECEXP (save_all
, 0, 0)
1726 = gen_rtx_SET (VOIDmode
,
1728 gen_rtx_PLUS (Pmode
,
1730 GEN_INT(-alloc_stack
)));
1731 for (i
= 0; i
< num_save
; i
++)
1734 XVECEXP (save_all
, 0, i
+1)
1735 = gen_rtx_SET (VOIDmode
,
1737 gen_rtx_PLUS (Pmode
,
1743 if (TARGET_DISABLE_CALLT
)
1745 XVECEXP (save_all
, 0, num_save
+ 1)
1746 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1748 if (TARGET_LONG_CALLS
)
1749 XVECEXP (save_all
, 0, num_save
+ 2)
1750 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1753 v850_all_frame_related (save_all
);
1755 code
= recog (save_all
, NULL_RTX
, NULL
);
1758 rtx insn
= emit_insn (save_all
);
1759 INSN_CODE (insn
) = code
;
1760 actual_fsize
-= alloc_stack
;
1764 save_all
= NULL_RTX
;
1768 /* If no prolog save function is available, store the registers the old
1769 fashioned way (one by one). */
1772 /* Special case interrupt functions that save all registers for a call. */
1773 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1775 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1776 emit_insn (gen_callt_save_all_interrupt ());
1778 emit_insn (gen_save_all_interrupt ());
1783 /* If the stack is too big, allocate it in chunks so we can do the
1784 register saves. We use the register save size so we use the ep
1786 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1787 init_stack_alloc
= compute_register_save_size (NULL
);
1789 init_stack_alloc
= actual_fsize
;
1791 /* Save registers at the beginning of the stack frame. */
1792 offset
= init_stack_alloc
- 4;
1794 if (init_stack_alloc
)
1795 increment_stack (- (signed) init_stack_alloc
, true);
1797 /* Save the return pointer first. */
1798 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1800 F (emit_move_insn (gen_rtx_MEM (SImode
,
1801 plus_constant (Pmode
,
1804 save_regs
[--num_save
]));
1808 for (i
= 0; i
< num_save
; i
++)
1810 F (emit_move_insn (gen_rtx_MEM (SImode
,
1811 plus_constant (Pmode
,
1820 /* Allocate the rest of the stack that was not allocated above (either it is
1821 > 32K or we just called a function to save the registers and needed more
1823 if (actual_fsize
> init_stack_alloc
)
1824 increment_stack (init_stack_alloc
- actual_fsize
, true);
1826 /* If we need a frame pointer, set it up now. */
1827 if (frame_pointer_needed
)
1828 F (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
1833 expand_epilogue (void)
1836 unsigned int size
= get_frame_size ();
1838 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1839 rtx restore_regs
[32];
1841 unsigned int num_restore
;
1843 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1845 /* Eliminate the initial stack stored by interrupt functions. */
1846 if (interrupt_handler
)
1848 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1849 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1850 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1853 /* Cut off any dynamic stack created. */
1854 if (frame_pointer_needed
)
1855 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1857 /* Identify all of the saved registers. */
1859 for (i
= 1; i
< 32; i
++)
1861 if (((1L << i
) & reg_saved
) != 0)
1862 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1865 /* See if we have an insn that restores the particular registers we
1867 restore_all
= NULL_RTX
;
1869 if (TARGET_PROLOG_FUNCTION
1871 && !crtl
->args
.pretend_args_size
1872 && !interrupt_handler
)
1874 int alloc_stack
= (4 * num_restore
);
1876 /* Don't bother checking if we don't actually save any space. */
1877 if (use_prolog_function (num_restore
, actual_fsize
))
1880 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1881 rtvec_alloc (num_restore
+ 2));
1882 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1883 XVECEXP (restore_all
, 0, 1)
1884 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1885 gen_rtx_PLUS (Pmode
,
1887 GEN_INT (alloc_stack
)));
1889 offset
= alloc_stack
- 4;
1890 for (i
= 0; i
< num_restore
; i
++)
1892 XVECEXP (restore_all
, 0, i
+2)
1893 = gen_rtx_SET (VOIDmode
,
1896 gen_rtx_PLUS (Pmode
,
1902 code
= recog (restore_all
, NULL_RTX
, NULL
);
1908 actual_fsize
-= alloc_stack
;
1909 increment_stack (actual_fsize
, false);
1911 insn
= emit_jump_insn (restore_all
);
1912 INSN_CODE (insn
) = code
;
1915 restore_all
= NULL_RTX
;
1919 /* If no epilogue save function is available, restore the registers the
1920 old fashioned way (one by one). */
1923 unsigned int init_stack_free
;
1925 /* If the stack is large, we need to cut it down in 2 pieces. */
1926 if (interrupt_handler
)
1927 init_stack_free
= 0;
1928 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1929 init_stack_free
= 4 * num_restore
;
1931 init_stack_free
= (signed) actual_fsize
;
1933 /* Deallocate the rest of the stack if it is > 32K. */
1934 if ((unsigned int) actual_fsize
> init_stack_free
)
1935 increment_stack (actual_fsize
- init_stack_free
, false);
1937 /* Special case interrupt functions that save all registers
1939 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1941 if (! TARGET_DISABLE_CALLT
)
1942 emit_insn (gen_callt_restore_all_interrupt ());
1944 emit_insn (gen_restore_all_interrupt ());
1948 /* Restore registers from the beginning of the stack frame. */
1949 int offset
= init_stack_free
- 4;
1951 /* Restore the return pointer first. */
1953 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1955 emit_move_insn (restore_regs
[--num_restore
],
1956 gen_rtx_MEM (SImode
,
1957 plus_constant (Pmode
,
1963 for (i
= 0; i
< num_restore
; i
++)
1965 emit_move_insn (restore_regs
[i
],
1966 gen_rtx_MEM (SImode
,
1967 plus_constant (Pmode
,
1971 emit_use (restore_regs
[i
]);
1975 /* Cut back the remainder of the stack. */
1976 increment_stack (init_stack_free
+ crtl
->args
.pretend_args_size
,
1980 /* And return or use reti for interrupt handlers. */
1981 if (interrupt_handler
)
1983 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1984 emit_insn (gen_callt_return_interrupt ());
1986 emit_jump_insn (gen_return_interrupt ());
1988 else if (actual_fsize
)
1989 emit_jump_insn (gen_return_internal ());
1991 emit_jump_insn (gen_return_simple ());
1994 v850_interrupt_cache_p
= FALSE
;
1995 v850_interrupt_p
= FALSE
;
1998 /* Update the condition code from the insn. */
2000 notice_update_cc (rtx body
, rtx_insn
*insn
)
2002 switch (get_attr_cc (insn
))
2005 /* Insn does not affect CC at all. */
2009 /* Insn does not change CC, but the 0'th operand has been changed. */
2010 if (cc_status
.value1
!= 0
2011 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
2012 cc_status
.value1
= 0;
2016 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2017 V,C is in an unusable state. */
2019 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2020 cc_status
.value1
= recog_data
.operand
[0];
2024 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2025 C is in an unusable state. */
2027 cc_status
.flags
|= CC_NO_CARRY
;
2028 cc_status
.value1
= recog_data
.operand
[0];
2032 /* The insn is a compare instruction. */
2034 cc_status
.value1
= SET_SRC (body
);
2038 /* Insn doesn't leave CC in a usable state. */
2047 /* Retrieve the data area that has been chosen for the given decl. */
2050 v850_get_data_area (tree decl
)
2052 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2053 return DATA_AREA_SDA
;
2055 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2056 return DATA_AREA_TDA
;
2058 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2059 return DATA_AREA_ZDA
;
2061 return DATA_AREA_NORMAL
;
2064 /* Store the indicated data area in the decl's attributes. */
2067 v850_set_data_area (tree decl
, v850_data_area data_area
)
2073 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2074 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2075 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2080 DECL_ATTRIBUTES (decl
) = tree_cons
2081 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2084 /* Handle an "interrupt" attribute; arguments as in
2085 struct attribute_spec.handler. */
2087 v850_handle_interrupt_attribute (tree
* node
,
2089 tree args ATTRIBUTE_UNUSED
,
2090 int flags ATTRIBUTE_UNUSED
,
2091 bool * no_add_attrs
)
2093 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2095 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2097 *no_add_attrs
= true;
2103 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2104 struct attribute_spec.handler. */
2106 v850_handle_data_area_attribute (tree
* node
,
2108 tree args ATTRIBUTE_UNUSED
,
2109 int flags ATTRIBUTE_UNUSED
,
2110 bool * no_add_attrs
)
2112 v850_data_area data_area
;
2113 v850_data_area area
;
2116 /* Implement data area attribute. */
2117 if (is_attribute_p ("sda", name
))
2118 data_area
= DATA_AREA_SDA
;
2119 else if (is_attribute_p ("tda", name
))
2120 data_area
= DATA_AREA_TDA
;
2121 else if (is_attribute_p ("zda", name
))
2122 data_area
= DATA_AREA_ZDA
;
2126 switch (TREE_CODE (decl
))
2129 if (current_function_decl
!= NULL_TREE
)
2131 error_at (DECL_SOURCE_LOCATION (decl
),
2132 "data area attributes cannot be specified for "
2134 *no_add_attrs
= true;
2140 area
= v850_get_data_area (decl
);
2141 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2143 error ("data area of %q+D conflicts with previous declaration",
2145 *no_add_attrs
= true;
2157 /* Return nonzero if FUNC is an interrupt function as specified
2158 by the "interrupt" attribute. */
2161 v850_interrupt_function_p (tree func
)
2166 if (v850_interrupt_cache_p
)
2167 return v850_interrupt_p
;
2169 if (TREE_CODE (func
) != FUNCTION_DECL
)
2172 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2178 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2179 ret
= a
!= NULL_TREE
;
2182 /* Its not safe to trust global variables until after function inlining has
2184 if (reload_completed
| reload_in_progress
)
2185 v850_interrupt_p
= ret
;
2192 v850_encode_data_area (tree decl
, rtx symbol
)
2196 /* Map explicit sections into the appropriate attribute */
2197 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2199 if (DECL_SECTION_NAME (decl
))
2201 const char *name
= DECL_SECTION_NAME (decl
);
2203 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2204 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2206 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2207 v850_set_data_area (decl
, DATA_AREA_SDA
);
2209 else if (streq (name
, ".tdata"))
2210 v850_set_data_area (decl
, DATA_AREA_TDA
);
2213 /* If no attribute, support -m{zda,sda,tda}=n */
2216 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2220 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2221 v850_set_data_area (decl
, DATA_AREA_TDA
);
2223 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2224 v850_set_data_area (decl
, DATA_AREA_SDA
);
2226 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2227 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2230 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2234 flags
= SYMBOL_REF_FLAGS (symbol
);
2235 switch (v850_get_data_area (decl
))
2237 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2238 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2239 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2240 default: gcc_unreachable ();
2242 SYMBOL_REF_FLAGS (symbol
) = flags
;
2246 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2248 default_encode_section_info (decl
, rtl
, first
);
2250 if (TREE_CODE (decl
) == VAR_DECL
2251 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2252 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2255 /* Construct a JR instruction to a routine that will perform the equivalent of
2256 the RTL passed in as an argument. This RTL is a function epilogue that
2257 pops registers off the stack and possibly releases some extra stack space
2258 as well. The code has already verified that the RTL matches these
2262 construct_restore_jr (rtx op
)
2264 int count
= XVECLEN (op
, 0);
2266 unsigned long int mask
;
2267 unsigned long int first
;
2268 unsigned long int last
;
2270 static char buff
[100]; /* XXX */
2274 error ("bogus JR construction: %d", count
);
2278 /* Work out how many bytes to pop off the stack before retrieving
2280 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2281 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2282 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2284 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2286 /* Each pop will remove 4 bytes from the stack.... */
2287 stack_bytes
-= (count
- 2) * 4;
2289 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2290 if (stack_bytes
!= 0)
2292 error ("bad amount of stack space removal: %d", stack_bytes
);
2296 /* Now compute the bit mask of registers to push. */
2298 for (i
= 2; i
< count
; i
++)
2300 rtx vector_element
= XVECEXP (op
, 0, i
);
2302 gcc_assert (GET_CODE (vector_element
) == SET
);
2303 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2304 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2307 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2310 /* Scan for the first register to pop. */
2311 for (first
= 0; first
< 32; first
++)
2313 if (mask
& (1 << first
))
2317 gcc_assert (first
< 32);
2319 /* Discover the last register to pop. */
2320 if (mask
& (1 << LINK_POINTER_REGNUM
))
2322 last
= LINK_POINTER_REGNUM
;
2326 gcc_assert (!stack_bytes
);
2327 gcc_assert (mask
& (1 << 29));
2332 /* Note, it is possible to have gaps in the register mask.
2333 We ignore this here, and generate a JR anyway. We will
2334 be popping more registers than is strictly necessary, but
2335 it does save code space. */
2337 if (TARGET_LONG_CALLS
)
2342 sprintf (name
, "__return_%s", reg_names
[first
]);
2344 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2346 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2352 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2354 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2361 /* Construct a JARL instruction to a routine that will perform the equivalent
2362 of the RTL passed as a parameter. This RTL is a function prologue that
2363 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2364 some stack space as well. The code has already verified that the RTL
2365 matches these requirements. */
2367 construct_save_jarl (rtx op
)
2369 int count
= XVECLEN (op
, 0);
2371 unsigned long int mask
;
2372 unsigned long int first
;
2373 unsigned long int last
;
2375 static char buff
[100]; /* XXX */
2377 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2379 error ("bogus JARL construction: %d", count
);
2384 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2385 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2386 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2387 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2389 /* Work out how many bytes to push onto the stack after storing the
2391 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2393 /* Each push will put 4 bytes from the stack.... */
2394 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2396 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2397 if (stack_bytes
!= 0)
2399 error ("bad amount of stack space removal: %d", stack_bytes
);
2403 /* Now compute the bit mask of registers to push. */
2405 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2407 rtx vector_element
= XVECEXP (op
, 0, i
);
2409 gcc_assert (GET_CODE (vector_element
) == SET
);
2410 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2411 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2414 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2417 /* Scan for the first register to push. */
2418 for (first
= 0; first
< 32; first
++)
2420 if (mask
& (1 << first
))
2424 gcc_assert (first
< 32);
2426 /* Discover the last register to push. */
2427 if (mask
& (1 << LINK_POINTER_REGNUM
))
2429 last
= LINK_POINTER_REGNUM
;
2433 gcc_assert (!stack_bytes
);
2434 gcc_assert (mask
& (1 << 29));
2439 /* Note, it is possible to have gaps in the register mask.
2440 We ignore this here, and generate a JARL anyway. We will
2441 be pushing more registers than is strictly necessary, but
2442 it does save code space. */
2444 if (TARGET_LONG_CALLS
)
2449 sprintf (name
, "__save_%s", reg_names
[first
]);
2451 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2453 if (TARGET_V850E3V5_UP
)
2454 sprintf (buff
, "mov hilo(%s), r11\n\tjarl [r11], r10", name
);
2456 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2462 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2464 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2471 /* A version of asm_output_aligned_bss() that copes with the special
2472 data areas of the v850. */
2474 v850_output_aligned_bss (FILE * file
,
2477 unsigned HOST_WIDE_INT size
,
2480 switch (v850_get_data_area (decl
))
2483 switch_to_section (zbss_section
);
2487 switch_to_section (sbss_section
);
2491 switch_to_section (tdata_section
);
2494 switch_to_section (bss_section
);
2498 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2499 #ifdef ASM_DECLARE_OBJECT_NAME
2500 last_assemble_variable_decl
= decl
;
2501 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2503 /* Standard thing is just output label for the object. */
2504 ASM_OUTPUT_LABEL (file
, name
);
2505 #endif /* ASM_DECLARE_OBJECT_NAME */
2506 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2509 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2511 v850_output_common (FILE * file
,
2517 if (decl
== NULL_TREE
)
2519 fprintf (file
, "%s", COMMON_ASM_OP
);
2523 switch (v850_get_data_area (decl
))
2526 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2530 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2534 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2538 fprintf (file
, "%s", COMMON_ASM_OP
);
2543 assemble_name (file
, name
);
2544 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2547 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2549 v850_output_local (FILE * file
,
2555 fprintf (file
, "%s", LOCAL_ASM_OP
);
2556 assemble_name (file
, name
);
2557 fprintf (file
, "\n");
2559 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2562 /* Add data area to the given declaration if a ghs data area pragma is
2563 currently in effect (#pragma ghs startXXX/endXXX). */
2565 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2568 && data_area_stack
->data_area
2569 && current_function_decl
== NULL_TREE
2570 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2571 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2572 v850_set_data_area (decl
, data_area_stack
->data_area
);
2574 /* Initialize the default names of the v850 specific sections,
2575 if this has not been done before. */
2577 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2579 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2582 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2585 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2588 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2591 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2595 if (current_function_decl
== NULL_TREE
2596 && (TREE_CODE (decl
) == VAR_DECL
2597 || TREE_CODE (decl
) == CONST_DECL
2598 || TREE_CODE (decl
) == FUNCTION_DECL
)
2599 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2600 && !DECL_SECTION_NAME (decl
))
2602 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2603 const char * chosen_section
;
2605 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2606 kind
= GHS_SECTION_KIND_TEXT
;
2609 /* First choose a section kind based on the data area of the decl. */
2610 switch (v850_get_data_area (decl
))
2616 kind
= ((TREE_READONLY (decl
))
2617 ? GHS_SECTION_KIND_ROSDATA
2618 : GHS_SECTION_KIND_SDATA
);
2622 kind
= GHS_SECTION_KIND_TDATA
;
2626 kind
= ((TREE_READONLY (decl
))
2627 ? GHS_SECTION_KIND_ROZDATA
2628 : GHS_SECTION_KIND_ZDATA
);
2631 case DATA_AREA_NORMAL
: /* default data area */
2632 if (TREE_READONLY (decl
))
2633 kind
= GHS_SECTION_KIND_RODATA
;
2634 else if (DECL_INITIAL (decl
))
2635 kind
= GHS_SECTION_KIND_DATA
;
2637 kind
= GHS_SECTION_KIND_BSS
;
2641 /* Now, if the section kind has been explicitly renamed,
2642 then attach a section attribute. */
2643 chosen_section
= GHS_current_section_names
[(int) kind
];
2645 /* Otherwise, if this kind of section needs an explicit section
2646 attribute, then also attach one. */
2647 if (chosen_section
== NULL
)
2648 chosen_section
= GHS_default_section_names
[(int) kind
];
2652 /* Only set the section name if specified by a pragma, because
2653 otherwise it will force those variables to get allocated storage
2654 in this module, rather than by the linker. */
2655 set_decl_section_name (decl
, chosen_section
);
2660 /* Construct a DISPOSE instruction that is the equivalent of
2661 the given RTX. We have already verified that this should
2665 construct_dispose_instruction (rtx op
)
2667 int count
= XVECLEN (op
, 0);
2669 unsigned long int mask
;
2671 static char buff
[ 100 ]; /* XXX */
2676 error ("bogus DISPOSE construction: %d", count
);
2680 /* Work out how many bytes to pop off the
2681 stack before retrieving registers. */
2682 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2683 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2684 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2686 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2688 /* Each pop will remove 4 bytes from the stack.... */
2689 stack_bytes
-= (count
- 2) * 4;
2691 /* Make sure that the amount we are popping
2692 will fit into the DISPOSE instruction. */
2693 if (stack_bytes
> 128)
2695 error ("too much stack space to dispose of: %d", stack_bytes
);
2699 /* Now compute the bit mask of registers to push. */
2702 for (i
= 2; i
< count
; i
++)
2704 rtx vector_element
= XVECEXP (op
, 0, i
);
2706 gcc_assert (GET_CODE (vector_element
) == SET
);
2707 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2708 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2711 if (REGNO (SET_DEST (vector_element
)) == 2)
2714 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2717 if (! TARGET_DISABLE_CALLT
2718 && (use_callt
|| stack_bytes
== 0))
2722 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2727 for (i
= 20; i
< 32; i
++)
2728 if (mask
& (1 << i
))
2732 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2734 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2735 i
, (mask
& (1 << 31)) ? "31c" : "29");
2740 static char regs
[100]; /* XXX */
2743 /* Generate the DISPOSE instruction. Note we could just issue the
2744 bit mask as a number as the assembler can cope with this, but for
2745 the sake of our readers we turn it into a textual description. */
2749 for (i
= 20; i
< 32; i
++)
2751 if (mask
& (1 << i
))
2756 strcat (regs
, ", ");
2761 strcat (regs
, reg_names
[ first
]);
2763 for (i
++; i
< 32; i
++)
2764 if ((mask
& (1 << i
)) == 0)
2769 strcat (regs
, " - ");
2770 strcat (regs
, reg_names
[ i
- 1 ] );
2775 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2781 /* Construct a PREPARE instruction that is the equivalent of
2782 the given RTL. We have already verified that this should
2786 construct_prepare_instruction (rtx op
)
2790 unsigned long int mask
;
2792 static char buff
[ 100 ]; /* XXX */
2795 if (XVECLEN (op
, 0) <= 1)
2797 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2801 /* Work out how many bytes to push onto
2802 the stack after storing the registers. */
2803 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2804 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2805 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2807 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2810 /* Make sure that the amount we are popping
2811 will fit into the DISPOSE instruction. */
2812 if (stack_bytes
< -128)
2814 error ("too much stack space to prepare: %d", stack_bytes
);
2818 /* Now compute the bit mask of registers to push. */
2821 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2823 rtx vector_element
= XVECEXP (op
, 0, i
);
2825 if (GET_CODE (vector_element
) == CLOBBER
)
2828 gcc_assert (GET_CODE (vector_element
) == SET
);
2829 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2830 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2833 if (REGNO (SET_SRC (vector_element
)) == 2)
2836 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2840 stack_bytes
+= count
* 4;
2842 if ((! TARGET_DISABLE_CALLT
)
2843 && (use_callt
|| stack_bytes
== 0))
2847 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2851 for (i
= 20; i
< 32; i
++)
2852 if (mask
& (1 << i
))
2856 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2858 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2859 i
, (mask
& (1 << 31)) ? "31c" : "29");
2863 static char regs
[100]; /* XXX */
2867 /* Generate the PREPARE instruction. Note we could just issue the
2868 bit mask as a number as the assembler can cope with this, but for
2869 the sake of our readers we turn it into a textual description. */
2873 for (i
= 20; i
< 32; i
++)
2875 if (mask
& (1 << i
))
2880 strcat (regs
, ", ");
2885 strcat (regs
, reg_names
[ first
]);
2887 for (i
++; i
< 32; i
++)
2888 if ((mask
& (1 << i
)) == 0)
2893 strcat (regs
, " - ");
2894 strcat (regs
, reg_names
[ i
- 1 ] );
2899 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2905 /* Return an RTX indicating where the return address to the
2906 calling function can be found. */
2909 v850_return_addr (int count
)
2914 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2917 /* Implement TARGET_ASM_INIT_SECTIONS. */
2920 v850_asm_init_sections (void)
2923 = get_unnamed_section (0, output_section_asm_op
,
2924 "\t.section .rosdata,\"a\"");
2927 = get_unnamed_section (0, output_section_asm_op
,
2928 "\t.section .rozdata,\"a\"");
2931 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2932 "\t.section .tdata,\"aw\"");
2935 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2936 "\t.section .zdata,\"aw\"");
2939 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2940 output_section_asm_op
,
2941 "\t.section .zbss,\"aw\"");
2945 v850_select_section (tree exp
,
2946 int reloc ATTRIBUTE_UNUSED
,
2947 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2949 if (TREE_CODE (exp
) == VAR_DECL
)
2952 if (!TREE_READONLY (exp
)
2953 || TREE_SIDE_EFFECTS (exp
)
2954 || !DECL_INITIAL (exp
)
2955 || (DECL_INITIAL (exp
) != error_mark_node
2956 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2961 switch (v850_get_data_area (exp
))
2964 return is_const
? rozdata_section
: zdata_section
;
2967 return tdata_section
;
2970 return is_const
? rosdata_section
: sdata_section
;
2973 return is_const
? readonly_data_section
: data_section
;
2976 return readonly_data_section
;
2979 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2982 v850_function_value_regno_p (const unsigned int regno
)
2984 return (regno
== 10);
2987 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2990 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2992 /* Return values > 8 bytes in length in memory. */
2993 return int_size_in_bytes (type
) > 8
2994 || TYPE_MODE (type
) == BLKmode
2995 /* With the rh850 ABI return all aggregates in memory. */
2996 || ((! TARGET_GCC_ABI
) && AGGREGATE_TYPE_P (type
))
3000 /* Worker function for TARGET_FUNCTION_VALUE. */
3003 v850_function_value (const_tree valtype
,
3004 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
3005 bool outgoing ATTRIBUTE_UNUSED
)
3007 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
3011 /* Worker function for TARGET_CAN_ELIMINATE. */
3014 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3016 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
3019 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3021 If TARGET_APP_REGS is not defined then add r2 and r5 to
3022 the pool of fixed registers. See PR 14505. */
3025 v850_conditional_register_usage (void)
3027 if (TARGET_APP_REGS
)
3029 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3030 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3034 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3037 v850_asm_trampoline_template (FILE *f
)
3039 fprintf (f
, "\tjarl .+4,r12\n");
3040 fprintf (f
, "\tld.w 12[r12],r20\n");
3041 fprintf (f
, "\tld.w 16[r12],r12\n");
3042 fprintf (f
, "\tjmp [r12]\n");
3043 fprintf (f
, "\tnop\n");
3044 fprintf (f
, "\t.long 0\n");
3045 fprintf (f
, "\t.long 0\n");
3048 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3051 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3053 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3055 emit_block_move (m_tramp
, assemble_trampoline_template (),
3056 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3058 mem
= adjust_address (m_tramp
, SImode
, 16);
3059 emit_move_insn (mem
, chain_value
);
3060 mem
= adjust_address (m_tramp
, SImode
, 20);
3061 emit_move_insn (mem
, fnaddr
);
3065 v850_issue_rate (void)
3067 return (TARGET_V850E2_UP
? 2 : 1);
3070 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3073 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3075 return (GET_CODE (x
) == CONST_DOUBLE
3076 || !(GET_CODE (x
) == CONST
3077 && GET_CODE (XEXP (x
, 0)) == PLUS
3078 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3079 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3080 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3084 v850_memory_move_cost (enum machine_mode mode
,
3085 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3088 switch (GET_MODE_SIZE (mode
))
3098 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3103 v850_adjust_insn_length (rtx_insn
*insn
, int length
)
3105 if (TARGET_V850E3V5_UP
)
3109 if (TARGET_LONG_CALLS
)
3111 /* call_internal_long, call_value_internal_long. */
3119 /* call_internal_short, call_value_internal_short. */
3128 /* V850 specific attributes. */
3130 static const struct attribute_spec v850_attribute_table
[] =
3132 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3133 affects_type_identity } */
3134 { "interrupt_handler", 0, 0, true, false, false,
3135 v850_handle_interrupt_attribute
, false },
3136 { "interrupt", 0, 0, true, false, false,
3137 v850_handle_interrupt_attribute
, false },
3138 { "sda", 0, 0, true, false, false,
3139 v850_handle_data_area_attribute
, false },
3140 { "tda", 0, 0, true, false, false,
3141 v850_handle_data_area_attribute
, false },
3142 { "zda", 0, 0, true, false, false,
3143 v850_handle_data_area_attribute
, false },
3144 { NULL
, 0, 0, false, false, false, NULL
, false }
3148 v850_option_override (void)
3150 if (flag_exceptions
|| flag_non_call_exceptions
)
3151 flag_omit_frame_pointer
= 0;
3153 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3154 if (! TARGET_GCC_ABI
)
3155 target_flags
|= MASK_DISABLE_CALLT
;
3159 v850_gen_movdi (rtx
* operands
)
3161 if (REG_P (operands
[0]))
3163 if (REG_P (operands
[1]))
3165 if (REGNO (operands
[0]) == (REGNO (operands
[1]) - 1))
3166 return "mov %1, %0; mov %R1, %R0";
3168 return "mov %R1, %R0; mov %1, %0";
3171 if (MEM_P (operands
[1]))
3173 if (REGNO (operands
[0]) & 1)
3174 /* Use two load word instructions to synthesise a load double. */
3175 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3177 return "ld.dw %1, %0";
3180 return "mov %1, %0; mov %R1, %R0";
3183 gcc_assert (REG_P (operands
[1]));
3185 if (REGNO (operands
[1]) & 1)
3186 /* Use two store word instructions to synthesise a store double. */
3187 return "st.w %1, %0 ; st.w %R1, %R0 ";
3189 return "st.dw %1, %0";
3192 /* Initialize the GCC target structure. */
3194 #undef TARGET_OPTION_OVERRIDE
3195 #define TARGET_OPTION_OVERRIDE v850_option_override
3197 #undef TARGET_MEMORY_MOVE_COST
3198 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3200 #undef TARGET_ASM_ALIGNED_HI_OP
3201 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3203 #undef TARGET_PRINT_OPERAND
3204 #define TARGET_PRINT_OPERAND v850_print_operand
3205 #undef TARGET_PRINT_OPERAND_ADDRESS
3206 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3207 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3208 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3210 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3211 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3213 #undef TARGET_ATTRIBUTE_TABLE
3214 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3216 #undef TARGET_INSERT_ATTRIBUTES
3217 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3219 #undef TARGET_ASM_SELECT_SECTION
3220 #define TARGET_ASM_SELECT_SECTION v850_select_section
3222 /* The assembler supports switchable .bss sections, but
3223 v850_select_section doesn't yet make use of them. */
3224 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3225 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3227 #undef TARGET_ENCODE_SECTION_INFO
3228 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3230 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3231 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3233 #undef TARGET_RTX_COSTS
3234 #define TARGET_RTX_COSTS v850_rtx_costs
3236 #undef TARGET_ADDRESS_COST
3237 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3239 #undef TARGET_MACHINE_DEPENDENT_REORG
3240 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3242 #undef TARGET_SCHED_ISSUE_RATE
3243 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3245 #undef TARGET_FUNCTION_VALUE_REGNO_P
3246 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3247 #undef TARGET_FUNCTION_VALUE
3248 #define TARGET_FUNCTION_VALUE v850_function_value
3250 #undef TARGET_PROMOTE_PROTOTYPES
3251 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3253 #undef TARGET_RETURN_IN_MEMORY
3254 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3256 #undef TARGET_PASS_BY_REFERENCE
3257 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3259 #undef TARGET_CALLEE_COPIES
3260 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3262 #undef TARGET_ARG_PARTIAL_BYTES
3263 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3265 #undef TARGET_FUNCTION_ARG
3266 #define TARGET_FUNCTION_ARG v850_function_arg
3268 #undef TARGET_FUNCTION_ARG_ADVANCE
3269 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3271 #undef TARGET_CAN_ELIMINATE
3272 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3274 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3275 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3277 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3278 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3279 #undef TARGET_TRAMPOLINE_INIT
3280 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3282 #undef TARGET_LEGITIMATE_CONSTANT_P
3283 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3285 #undef TARGET_CAN_USE_DOLOOP_P
3286 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3288 struct gcc_target targetm
= TARGET_INITIALIZER
;
3290 #include "gt-v850.h"