1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "stringpool.h"
31 #include "insn-config.h"
35 #include "diagnostic-core.h"
36 #include "stor-layout.h"
39 #include "conditions.h"
41 #include "insn-attr.h"
46 /* This file should be included last. */
47 #include "target-def.h"
50 #define streq(a,b) (strcmp (a, b) == 0)
53 static void v850_print_operand_address (FILE *, rtx
);
55 /* Names of the various data areas used on the v850. */
56 const char * GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
57 const char * GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
59 /* Track the current data area set by the data area pragma (which
60 can be nested). Tested by check_default_data_area. */
61 data_area_stack_element
* data_area_stack
= NULL
;
63 /* True if we don't need to check any more if the current
64 function is an interrupt handler. */
65 static int v850_interrupt_cache_p
= FALSE
;
67 rtx v850_compare_op0
, v850_compare_op1
;
69 /* Whether current function is an interrupt handler. */
70 static int v850_interrupt_p
= FALSE
;
72 static GTY(()) section
* rosdata_section
;
73 static GTY(()) section
* rozdata_section
;
74 static GTY(()) section
* tdata_section
;
75 static GTY(()) section
* zdata_section
;
76 static GTY(()) section
* zbss_section
;
78 /* We use this to wrap all emitted insns in the prologue. */
82 if (GET_CODE (x
) != CLOBBER
)
83 RTX_FRAME_RELATED_P (x
) = 1;
87 /* Mark all the subexpressions of the PARALLEL rtx PAR as
88 frame-related. Return PAR.
90 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
91 PARALLEL rtx other than the first if they do not have the
92 FRAME_RELATED flag set on them. */
95 v850_all_frame_related (rtx par
)
97 int len
= XVECLEN (par
, 0);
100 gcc_assert (GET_CODE (par
) == PARALLEL
);
101 for (i
= 0; i
< len
; i
++)
102 F (XVECEXP (par
, 0, i
));
107 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
108 Specify whether to pass the argument by reference. */
111 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
112 machine_mode mode
, const_tree type
,
113 bool named ATTRIBUTE_UNUSED
)
115 unsigned HOST_WIDE_INT size
;
121 size
= int_size_in_bytes (type
);
123 size
= GET_MODE_SIZE (mode
);
128 /* Return an RTX to represent where an argument with mode MODE
129 and type TYPE will be passed to a function. If the result
130 is NULL_RTX, the argument will be pushed. */
133 v850_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
134 const_tree type
, bool named
)
136 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
137 rtx result
= NULL_RTX
;
144 size
= int_size_in_bytes (type
);
146 size
= GET_MODE_SIZE (mode
);
148 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
152 /* Once we have stopped using argument registers, do not start up again. */
153 cum
->nbytes
= 4 * UNITS_PER_WORD
;
158 align
= UNITS_PER_WORD
;
159 else if (size
<= UNITS_PER_WORD
&& type
)
160 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
164 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
166 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
169 if (type
== NULL_TREE
170 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
173 switch (cum
->nbytes
/ UNITS_PER_WORD
)
176 result
= gen_rtx_REG (mode
, 6);
179 result
= gen_rtx_REG (mode
, 7);
182 result
= gen_rtx_REG (mode
, 8);
185 result
= gen_rtx_REG (mode
, 9);
194 /* Return the number of bytes which must be put into registers
195 for values which are part in registers and part in memory. */
197 v850_arg_partial_bytes (cumulative_args_t cum_v
, machine_mode mode
,
198 tree type
, bool named
)
200 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
207 size
= int_size_in_bytes (type
);
209 size
= GET_MODE_SIZE (mode
);
215 align
= UNITS_PER_WORD
;
217 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
221 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
223 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
226 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
229 if (type
== NULL_TREE
230 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
233 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
236 /* Update the data in CUM to advance over an argument
237 of mode MODE and data type TYPE.
238 (TYPE is null for libcalls where that information may not be available.) */
241 v850_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
242 const_tree type
, bool named ATTRIBUTE_UNUSED
)
244 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
247 cum
->nbytes
+= (((mode
!= BLKmode
248 ? GET_MODE_SIZE (mode
)
249 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1)
252 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
253 ? GET_MODE_SIZE (Pmode
)
255 ? GET_MODE_SIZE (mode
)
256 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
260 /* Return the high and low words of a CONST_DOUBLE */
263 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
265 if (GET_CODE (x
) == CONST_DOUBLE
)
269 switch (GET_MODE (x
))
272 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x
), t
);
273 *p_high
= t
[1]; /* since v850 is little endian */
274 *p_low
= t
[0]; /* high is second word */
278 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), *p_high
);
284 *p_high
= CONST_DOUBLE_HIGH (x
);
285 *p_low
= CONST_DOUBLE_LOW (x
);
293 fatal_insn ("const_double_split got a bad insn:", x
);
297 /* Return the cost of the rtx R with code CODE. */
300 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
302 if (CONST_OK_FOR_I (value
))
304 else if (CONST_OK_FOR_J (value
))
306 else if (CONST_OK_FOR_K (value
))
313 const_costs (rtx r
, enum rtx_code c
)
315 HOST_WIDE_INT high
, low
;
320 return const_costs_int (INTVAL (r
), 0);
323 const_double_split (r
, &high
, &low
);
324 if (GET_MODE (r
) == SFmode
)
325 return const_costs_int (high
, 1);
327 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
343 v850_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
344 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
346 enum rtx_code code
= GET_CODE (x
);
355 *total
= COSTS_N_INSNS (const_costs (x
, code
));
362 if (TARGET_V850E
&& !speed
)
370 && (mode
== SImode
|| mode
== HImode
|| mode
== QImode
))
372 if (GET_CODE (XEXP (x
, 1)) == REG
)
374 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
376 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
378 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
387 if (outer_code
== COMPARE
)
396 /* Print operand X using operand code CODE to assembly language output file
400 v850_print_operand (FILE * file
, rtx x
, int code
)
402 HOST_WIDE_INT high
, low
;
407 /* We use 'c' operands with symbols for .vtinherit. */
408 if (GET_CODE (x
) == SYMBOL_REF
)
410 output_addr_const(file
, x
);
417 switch ((code
== 'B' || code
== 'C')
418 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
421 if (code
== 'c' || code
== 'C')
422 fprintf (file
, "nz");
424 fprintf (file
, "ne");
427 if (code
== 'c' || code
== 'C')
433 fprintf (file
, "ge");
436 fprintf (file
, "gt");
439 fprintf (file
, "le");
442 fprintf (file
, "lt");
445 fprintf (file
, "nl");
451 fprintf (file
, "nh");
460 case 'F': /* High word of CONST_DOUBLE. */
461 switch (GET_CODE (x
))
464 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
468 const_double_split (x
, &high
, &low
);
469 fprintf (file
, "%ld", (long) high
);
476 case 'G': /* Low word of CONST_DOUBLE. */
477 switch (GET_CODE (x
))
480 fprintf (file
, "%ld", (long) INTVAL (x
));
484 const_double_split (x
, &high
, &low
);
485 fprintf (file
, "%ld", (long) low
);
493 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
496 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
499 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
501 if (GET_CODE (x
) == CONST
)
502 x
= XEXP (XEXP (x
, 0), 0);
504 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
506 if (SYMBOL_REF_ZDA_P (x
))
507 fprintf (file
, "zdaoff");
508 else if (SYMBOL_REF_SDA_P (x
))
509 fprintf (file
, "sdaoff");
510 else if (SYMBOL_REF_TDA_P (x
))
511 fprintf (file
, "tdaoff");
516 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
517 output_addr_const (file
, x
);
520 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
522 if (GET_CODE (x
) == CONST
)
523 x
= XEXP (XEXP (x
, 0), 0);
525 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
527 if (SYMBOL_REF_ZDA_P (x
))
528 fprintf (file
, "r0");
529 else if (SYMBOL_REF_SDA_P (x
))
530 fprintf (file
, "gp");
531 else if (SYMBOL_REF_TDA_P (x
))
532 fprintf (file
, "ep");
536 case 'R': /* 2nd word of a double. */
537 switch (GET_CODE (x
))
540 fprintf (file
, reg_names
[REGNO (x
) + 1]);
543 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
544 v850_print_operand_address (file
, x
);
545 if (GET_CODE (x
) == CONST_INT
)
546 fprintf (file
, "[r0]");
551 unsigned HOST_WIDE_INT v
= INTVAL (x
);
553 /* Trickery to avoid problems with shifting
554 32-bits at a time on a 32-bit host. */
557 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, v
);
562 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_HIGH (x
));
572 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
573 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
580 /* Like an 'S' operand above, but for unsigned loads only. */
581 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
586 case 'W': /* Print the instruction suffix. */
587 switch (GET_MODE (x
))
592 case QImode
: fputs (".b", file
); break;
593 case HImode
: fputs (".h", file
); break;
594 case SImode
: fputs (".w", file
); break;
595 case SFmode
: fputs (".w", file
); break;
598 case '.': /* Register r0. */
599 fputs (reg_names
[0], file
);
601 case 'z': /* Reg or zero. */
603 fputs (reg_names
[REGNO (x
)], file
);
604 else if ((GET_MODE(x
) == SImode
605 || GET_MODE(x
) == DFmode
606 || GET_MODE(x
) == SFmode
)
607 && x
== CONST0_RTX(GET_MODE(x
)))
608 fputs (reg_names
[0], file
);
611 gcc_assert (x
== const0_rtx
);
612 fputs (reg_names
[0], file
);
616 switch (GET_CODE (x
))
619 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
620 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
623 output_address (XEXP (x
, 0));
627 fputs (reg_names
[REGNO (x
)], file
);
630 fputs (reg_names
[subreg_regno (x
)], file
);
633 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
641 v850_print_operand_address (file
, x
);
652 /* Output assembly language output for the address ADDR to FILE. */
655 v850_print_operand_address (FILE * file
, rtx addr
)
657 switch (GET_CODE (addr
))
660 fprintf (file
, "0[");
661 v850_print_operand (file
, addr
, 0);
665 if (GET_CODE (XEXP (addr
, 0)) == REG
)
668 fprintf (file
, "lo(");
669 v850_print_operand (file
, XEXP (addr
, 1), 0);
670 fprintf (file
, ")[");
671 v850_print_operand (file
, XEXP (addr
, 0), 0);
676 if (GET_CODE (XEXP (addr
, 0)) == REG
677 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
680 v850_print_operand (file
, XEXP (addr
, 1), 0);
682 v850_print_operand (file
, XEXP (addr
, 0), 0);
687 v850_print_operand (file
, XEXP (addr
, 0), 0);
689 v850_print_operand (file
, XEXP (addr
, 1), 0);
694 const char *off_name
= NULL
;
695 const char *reg_name
= NULL
;
697 if (SYMBOL_REF_ZDA_P (addr
))
702 else if (SYMBOL_REF_SDA_P (addr
))
707 else if (SYMBOL_REF_TDA_P (addr
))
714 fprintf (file
, "%s(", off_name
);
715 output_addr_const (file
, addr
);
717 fprintf (file
, ")[%s]", reg_name
);
721 if (special_symbolref_operand (addr
, VOIDmode
))
723 rtx x
= XEXP (XEXP (addr
, 0), 0);
724 const char *off_name
;
725 const char *reg_name
;
727 if (SYMBOL_REF_ZDA_P (x
))
732 else if (SYMBOL_REF_SDA_P (x
))
737 else if (SYMBOL_REF_TDA_P (x
))
745 fprintf (file
, "%s(", off_name
);
746 output_addr_const (file
, addr
);
747 fprintf (file
, ")[%s]", reg_name
);
750 output_addr_const (file
, addr
);
753 output_addr_const (file
, addr
);
759 v850_print_operand_punct_valid_p (unsigned char code
)
764 /* When assemble_integer is used to emit the offsets for a switch
765 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
766 output_addr_const will normally barf at this, but it is OK to omit
767 the truncate and just emit the difference of the two labels. The
768 .hword directive will automatically handle the truncation for us.
770 Returns true if rtx was handled, false otherwise. */
773 v850_output_addr_const_extra (FILE * file
, rtx x
)
775 if (GET_CODE (x
) != TRUNCATE
)
780 /* We must also handle the case where the switch table was passed a
781 constant value and so has been collapsed. In this case the first
782 label will have been deleted. In such a case it is OK to emit
783 nothing, since the table will not be used.
784 (cf gcc.c-torture/compile/990801-1.c). */
785 if (GET_CODE (x
) == MINUS
786 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
788 rtx_code_label
*label
789 = dyn_cast
<rtx_code_label
*> (XEXP (XEXP (x
, 0), 0));
790 if (label
&& label
->deleted ())
794 output_addr_const (file
, x
);
798 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
802 output_move_single (rtx
* operands
)
804 rtx dst
= operands
[0];
805 rtx src
= operands
[1];
812 else if (GET_CODE (src
) == CONST_INT
)
814 HOST_WIDE_INT value
= INTVAL (src
);
816 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
819 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
820 return "movea %1,%.,%0";
822 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
823 return "movhi hi0(%1),%.,%0";
825 /* A random constant. */
826 else if (TARGET_V850E_UP
)
829 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
832 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
834 HOST_WIDE_INT high
, low
;
836 const_double_split (src
, &high
, &low
);
838 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
841 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
842 return "movea %F1,%.,%0";
844 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
845 return "movhi hi0(%F1),%.,%0";
847 /* A random constant. */
848 else if (TARGET_V850E_UP
)
852 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
855 else if (GET_CODE (src
) == MEM
)
856 return "%S1ld%W1 %1,%0";
858 else if (special_symbolref_operand (src
, VOIDmode
))
859 return "movea %O1(%P1),%Q1,%0";
861 else if (GET_CODE (src
) == LABEL_REF
862 || GET_CODE (src
) == SYMBOL_REF
863 || GET_CODE (src
) == CONST
)
866 return "mov hilo(%1),%0";
868 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
871 else if (GET_CODE (src
) == HIGH
)
872 return "movhi hi(%1),%.,%0";
874 else if (GET_CODE (src
) == LO_SUM
)
876 operands
[2] = XEXP (src
, 0);
877 operands
[3] = XEXP (src
, 1);
878 return "movea lo(%3),%2,%0";
882 else if (GET_CODE (dst
) == MEM
)
885 return "%S0st%W0 %1,%0";
887 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
888 return "%S0st%W0 %.,%0";
890 else if (GET_CODE (src
) == CONST_DOUBLE
891 && CONST0_RTX (GET_MODE (dst
)) == src
)
892 return "%S0st%W0 %.,%0";
895 fatal_insn ("output_move_single:", gen_rtx_SET (dst
, src
));
900 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
902 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
907 return CC_FPU_LEmode
;
909 return CC_FPU_GEmode
;
911 return CC_FPU_LTmode
;
913 return CC_FPU_GTmode
;
915 return CC_FPU_EQmode
;
917 return CC_FPU_NEmode
;
926 v850_gen_float_compare (enum rtx_code cond
, machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
928 if (GET_MODE (op0
) == DFmode
)
933 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
936 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
939 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
942 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
945 /* Note: There is no NE comparison operator. So we
946 perform an EQ comparison and invert the branch.
947 See v850_float_nz_comparison for how this is done. */
949 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
955 else if (GET_MODE (v850_compare_op0
) == SFmode
)
960 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
963 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
966 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
969 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
972 /* Note: There is no NE comparison operator. So we
973 perform an EQ comparison and invert the branch.
974 See v850_float_nz_comparison for how this is done. */
976 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
985 return v850_select_cc_mode (cond
, op0
, op1
);
989 v850_gen_compare (enum rtx_code cond
, machine_mode mode
, rtx op0
, rtx op1
)
991 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
993 emit_insn (gen_cmpsi_insn (op0
, op1
));
994 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
999 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1000 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1001 emit_insn (gen_rtx_SET (cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1003 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1007 /* Return maximum offset supported for a short EP memory reference of mode
1008 MODE and signedness UNSIGNEDP. */
1011 ep_memory_offset (machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1018 if (TARGET_SMALL_SLD
)
1019 max_offset
= (1 << 4);
1020 else if ((TARGET_V850E_UP
)
1022 max_offset
= (1 << 4);
1024 max_offset
= (1 << 7);
1028 if (TARGET_SMALL_SLD
)
1029 max_offset
= (1 << 5);
1030 else if ((TARGET_V850E_UP
)
1032 max_offset
= (1 << 5);
1034 max_offset
= (1 << 8);
1039 max_offset
= (1 << 8);
1049 /* Return true if OP is a valid short EP memory reference */
1052 ep_memory_operand (rtx op
, machine_mode mode
, int unsigned_load
)
1058 /* If we are not using the EP register on a per-function basis
1059 then do not allow this optimization at all. This is to
1060 prevent the use of the SLD/SST instructions which cannot be
1061 guaranteed to work properly due to a hardware bug. */
1065 if (GET_CODE (op
) != MEM
)
1068 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1070 mask
= GET_MODE_SIZE (mode
) - 1;
1072 addr
= XEXP (op
, 0);
1073 if (GET_CODE (addr
) == CONST
)
1074 addr
= XEXP (addr
, 0);
1076 switch (GET_CODE (addr
))
1082 return SYMBOL_REF_TDA_P (addr
);
1085 return REGNO (addr
) == EP_REGNUM
;
1088 op0
= XEXP (addr
, 0);
1089 op1
= XEXP (addr
, 1);
1090 if (GET_CODE (op1
) == CONST_INT
1091 && INTVAL (op1
) < max_offset
1092 && INTVAL (op1
) >= 0
1093 && (INTVAL (op1
) & mask
) == 0)
1095 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1098 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1107 /* Substitute memory references involving a pointer, to use the ep pointer,
1108 taking care to save and preserve the ep. */
1111 substitute_ep_register (rtx_insn
*first_insn
,
1112 rtx_insn
*last_insn
,
1118 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1123 df_set_regs_ever_live (1, true);
1124 *p_r1
= gen_rtx_REG (Pmode
, 1);
1125 *p_ep
= gen_rtx_REG (Pmode
, 30);
1130 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1131 2 * (uses
- 3), uses
, reg_names
[regno
],
1132 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1133 INSN_UID (first_insn
), INSN_UID (last_insn
));
1135 if (NOTE_P (first_insn
))
1136 first_insn
= next_nonnote_insn (first_insn
);
1138 last_insn
= next_nonnote_insn (last_insn
);
1139 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1141 if (NONJUMP_INSN_P (insn
))
1143 rtx pattern
= single_set (insn
);
1145 /* Replace the memory references. */
1149 /* Memory operands are signed by default. */
1150 int unsignedp
= FALSE
;
1152 if (GET_CODE (SET_DEST (pattern
)) == MEM
1153 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1156 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1157 p_mem
= &SET_DEST (pattern
);
1159 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1160 p_mem
= &SET_SRC (pattern
);
1162 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1163 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1164 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1166 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1167 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1169 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1177 rtx addr
= XEXP (*p_mem
, 0);
1179 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1180 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1182 else if (GET_CODE (addr
) == PLUS
1183 && GET_CODE (XEXP (addr
, 0)) == REG
1184 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1185 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1186 && ((INTVAL (XEXP (addr
, 1)))
1187 < ep_memory_offset (GET_MODE (*p_mem
),
1189 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1190 *p_mem
= change_address (*p_mem
, VOIDmode
,
1191 gen_rtx_PLUS (Pmode
,
1199 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1200 insn
= prev_nonnote_insn (first_insn
);
1201 if (insn
&& NONJUMP_INSN_P (insn
)
1202 && GET_CODE (PATTERN (insn
)) == SET
1203 && SET_DEST (PATTERN (insn
)) == *p_ep
1204 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1207 emit_insn_before (gen_rtx_SET (*p_r1
, *p_ep
), first_insn
);
1209 emit_insn_before (gen_rtx_SET (*p_ep
, reg
), first_insn
);
1210 emit_insn_before (gen_rtx_SET (*p_ep
, *p_r1
), last_insn
);
1214 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1215 the -mep mode to copy heavily used pointers to ep to use the implicit
1224 rtx_insn
*first_insn
;
1225 rtx_insn
*last_insn
;
1227 regs
[FIRST_PSEUDO_REGISTER
];
1236 /* If not ep mode, just return now. */
1240 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1243 regs
[i
].first_insn
= NULL
;
1244 regs
[i
].last_insn
= NULL
;
1247 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1249 switch (GET_CODE (insn
))
1251 /* End of basic block */
1258 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1260 if (max_uses
< regs
[i
].uses
)
1262 max_uses
= regs
[i
].uses
;
1268 substitute_ep_register (regs
[max_regno
].first_insn
,
1269 regs
[max_regno
].last_insn
,
1270 max_uses
, max_regno
, &r1
, &ep
);
1274 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1277 regs
[i
].first_insn
= NULL
;
1278 regs
[i
].last_insn
= NULL
;
1286 pattern
= single_set (insn
);
1288 /* See if there are any memory references we can shorten. */
1291 rtx src
= SET_SRC (pattern
);
1292 rtx dest
= SET_DEST (pattern
);
1294 /* Memory operands are signed by default. */
1295 int unsignedp
= FALSE
;
1297 /* We might have (SUBREG (MEM)) here, so just get rid of the
1298 subregs to make this code simpler. */
1299 if (GET_CODE (dest
) == SUBREG
1300 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1301 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1302 alter_subreg (&dest
, false);
1303 if (GET_CODE (src
) == SUBREG
1304 && (GET_CODE (SUBREG_REG (src
)) == MEM
1305 || GET_CODE (SUBREG_REG (src
)) == REG
))
1306 alter_subreg (&src
, false);
1308 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1311 else if (GET_CODE (dest
) == MEM
)
1314 else if (GET_CODE (src
) == MEM
)
1317 else if (GET_CODE (src
) == SIGN_EXTEND
1318 && GET_CODE (XEXP (src
, 0)) == MEM
)
1319 mem
= XEXP (src
, 0);
1321 else if (GET_CODE (src
) == ZERO_EXTEND
1322 && GET_CODE (XEXP (src
, 0)) == MEM
)
1324 mem
= XEXP (src
, 0);
1330 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1333 else if (!use_ep
&& mem
1334 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1336 rtx addr
= XEXP (mem
, 0);
1340 if (GET_CODE (addr
) == REG
)
1343 regno
= REGNO (addr
);
1346 else if (GET_CODE (addr
) == PLUS
1347 && GET_CODE (XEXP (addr
, 0)) == REG
1348 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1349 && ((INTVAL (XEXP (addr
, 1)))
1350 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1351 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1354 regno
= REGNO (XEXP (addr
, 0));
1363 regs
[regno
].last_insn
= insn
;
1364 if (!regs
[regno
].first_insn
)
1365 regs
[regno
].first_insn
= insn
;
1369 /* Loading up a register in the basic block zaps any savings
1371 if (GET_CODE (dest
) == REG
)
1373 machine_mode mode
= GET_MODE (dest
);
1377 regno
= REGNO (dest
);
1378 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1382 /* See if we can use the pointer before this
1387 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1389 if (max_uses
< regs
[i
].uses
)
1391 max_uses
= regs
[i
].uses
;
1397 && max_regno
>= regno
1398 && max_regno
< endregno
)
1400 substitute_ep_register (regs
[max_regno
].first_insn
,
1401 regs
[max_regno
].last_insn
,
1402 max_uses
, max_regno
, &r1
,
1405 /* Since we made a substitution, zap all remembered
1407 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1410 regs
[i
].first_insn
= NULL
;
1411 regs
[i
].last_insn
= NULL
;
1416 for (i
= regno
; i
< endregno
; i
++)
1419 regs
[i
].first_insn
= NULL
;
1420 regs
[i
].last_insn
= NULL
;
1428 /* # of registers saved by the interrupt handler. */
1429 #define INTERRUPT_FIXED_NUM 5
1431 /* # of bytes for registers saved by the interrupt handler. */
1432 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1434 /* # of words saved for other registers. */
1435 #define INTERRUPT_ALL_SAVE_NUM \
1436 (30 - INTERRUPT_FIXED_NUM)
1438 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1441 compute_register_save_size (long * p_reg_saved
)
1445 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1446 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1449 /* Count space for the register saves. */
1450 if (interrupt_handler
)
1452 for (i
= 0; i
<= 31; i
++)
1456 if (df_regs_ever_live_p (i
) || call_p
)
1459 reg_saved
|= 1L << i
;
1463 /* We don't save/restore r0 or the stack pointer */
1465 case STACK_POINTER_REGNUM
:
1468 /* For registers with fixed use, we save them, set them to the
1469 appropriate value, and then restore them.
1470 These registers are handled specially, so don't list them
1471 on the list of registers to save in the prologue. */
1472 case 1: /* temp used to hold ep */
1474 case 10: /* temp used to call interrupt save/restore */
1475 case 11: /* temp used to call interrupt save/restore (long call) */
1476 case EP_REGNUM
: /* ep */
1483 /* Find the first register that needs to be saved. */
1484 for (i
= 0; i
<= 31; i
++)
1485 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1486 || i
== LINK_POINTER_REGNUM
))
1489 /* If it is possible that an out-of-line helper function might be
1490 used to generate the prologue for the current function, then we
1491 need to cover the possibility that such a helper function will
1492 be used, despite the fact that there might be gaps in the list of
1493 registers that need to be saved. To detect this we note that the
1494 helper functions always push at least register r29 (provided
1495 that the function is not an interrupt handler). */
1497 if (TARGET_PROLOG_FUNCTION
1498 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1503 reg_saved
|= 1L << i
;
1508 /* Helper functions save all registers between the starting
1509 register and the last register, regardless of whether they
1510 are actually used by the function or not. */
1511 for (; i
<= 29; i
++)
1514 reg_saved
|= 1L << i
;
1517 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1520 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1525 for (; i
<= 31; i
++)
1526 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1527 || i
== LINK_POINTER_REGNUM
))
1530 reg_saved
|= 1L << i
;
1536 *p_reg_saved
= reg_saved
;
1541 /* Typical stack layout should looks like this after the function's prologue:
1546 | | arguments saved | Increasing
1547 | | on the stack | addresses
1548 PARENT arg pointer -> | | /
1549 -------------------------- ---- -------------------
1550 | | - space for argument split between regs & stack
1552 CHILD | | \ <-- (return address here)
1557 frame pointer -> | | \ ___
1564 | | arguments | | Decreasing
1565 (hard) frame pointer | | / | | addresses
1566 and stack pointer -> | | / _|_ |
1567 -------------------------- ---- ------------------ V */
1570 compute_frame_size (int size
, long * p_reg_saved
)
1573 + compute_register_save_size (p_reg_saved
)
1574 + crtl
->outgoing_args_size
);
1578 use_prolog_function (int num_save
, int frame_size
)
1580 int alloc_stack
= (4 * num_save
);
1581 int unalloc_stack
= frame_size
- alloc_stack
;
1582 int save_func_len
, restore_func_len
;
1583 int save_normal_len
, restore_normal_len
;
1585 if (! TARGET_DISABLE_CALLT
)
1586 save_func_len
= restore_func_len
= 2;
1588 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1592 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1593 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1596 /* See if we would have used ep to save the stack. */
1597 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1598 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1600 save_normal_len
= restore_normal_len
= 4 * num_save
;
1602 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1603 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1605 /* Don't bother checking if we don't actually save any space.
1606 This happens for instance if one register is saved and additional
1607 stack space is allocated. */
1608 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1612 increment_stack (signed int amount
, bool in_prologue
)
1619 inc
= GEN_INT (amount
);
1621 if (! CONST_OK_FOR_K (amount
))
1623 rtx reg
= gen_rtx_REG (Pmode
, 12);
1625 inc
= emit_move_insn (reg
, inc
);
1631 inc
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1637 expand_prologue (void)
1640 unsigned int size
= get_frame_size ();
1641 unsigned int actual_fsize
;
1642 unsigned int init_stack_alloc
= 0;
1645 unsigned int num_save
;
1647 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1650 actual_fsize
= compute_frame_size (size
, ®_saved
);
1652 if (flag_stack_usage_info
)
1653 current_function_static_stack_size
= actual_fsize
;
1655 /* Save/setup global registers for interrupt functions right now. */
1656 if (interrupt_handler
)
1658 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1659 emit_insn (gen_callt_save_interrupt ());
1661 emit_insn (gen_save_interrupt ());
1663 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1665 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1666 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1668 /* Interrupt functions are not passed arguments, so no need to
1669 allocate space for split structure arguments. */
1670 gcc_assert (crtl
->args
.pretend_args_size
== 0);
1673 /* Identify all of the saved registers. */
1675 for (i
= 1; i
< 32; i
++)
1677 if (((1L << i
) & reg_saved
) != 0)
1678 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1681 if (crtl
->args
.pretend_args_size
)
1685 increment_stack (- (actual_fsize
+ crtl
->args
.pretend_args_size
), true);
1689 increment_stack (- crtl
->args
.pretend_args_size
, true);
1692 /* See if we have an insn that allocates stack space and saves the particular
1693 registers we want to. Note that the helpers won't
1694 allocate additional space for registers GCC saves to complete a
1695 "split" structure argument. */
1696 save_all
= NULL_RTX
;
1697 if (TARGET_PROLOG_FUNCTION
1698 && !crtl
->args
.pretend_args_size
1701 if (use_prolog_function (num_save
, actual_fsize
))
1703 int alloc_stack
= 4 * num_save
;
1706 save_all
= gen_rtx_PARALLEL
1708 rtvec_alloc (num_save
+ 1
1709 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1711 XVECEXP (save_all
, 0, 0)
1712 = gen_rtx_SET (stack_pointer_rtx
,
1713 gen_rtx_PLUS (Pmode
,
1715 GEN_INT(-alloc_stack
)));
1716 for (i
= 0; i
< num_save
; i
++)
1719 XVECEXP (save_all
, 0, i
+1)
1720 = gen_rtx_SET (gen_rtx_MEM (Pmode
,
1721 gen_rtx_PLUS (Pmode
,
1727 if (TARGET_DISABLE_CALLT
)
1729 XVECEXP (save_all
, 0, num_save
+ 1)
1730 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1732 if (TARGET_LONG_CALLS
)
1733 XVECEXP (save_all
, 0, num_save
+ 2)
1734 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1737 v850_all_frame_related (save_all
);
1739 code
= recog (save_all
, NULL_RTX
, NULL
);
1742 rtx insn
= emit_insn (save_all
);
1743 INSN_CODE (insn
) = code
;
1744 actual_fsize
-= alloc_stack
;
1748 save_all
= NULL_RTX
;
1752 /* If no prolog save function is available, store the registers the old
1753 fashioned way (one by one). */
1756 /* Special case interrupt functions that save all registers for a call. */
1757 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1759 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1760 emit_insn (gen_callt_save_all_interrupt ());
1762 emit_insn (gen_save_all_interrupt ());
1767 /* If the stack is too big, allocate it in chunks so we can do the
1768 register saves. We use the register save size so we use the ep
1770 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1771 init_stack_alloc
= compute_register_save_size (NULL
);
1773 init_stack_alloc
= actual_fsize
;
1775 /* Save registers at the beginning of the stack frame. */
1776 offset
= init_stack_alloc
- 4;
1778 if (init_stack_alloc
)
1779 increment_stack (- (signed) init_stack_alloc
, true);
1781 /* Save the return pointer first. */
1782 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1784 F (emit_move_insn (gen_rtx_MEM (SImode
,
1785 plus_constant (Pmode
,
1788 save_regs
[--num_save
]));
1792 for (i
= 0; i
< num_save
; i
++)
1794 F (emit_move_insn (gen_rtx_MEM (SImode
,
1795 plus_constant (Pmode
,
1804 /* Allocate the rest of the stack that was not allocated above (either it is
1805 > 32K or we just called a function to save the registers and needed more
1807 if (actual_fsize
> init_stack_alloc
)
1808 increment_stack (init_stack_alloc
- actual_fsize
, true);
1810 /* If we need a frame pointer, set it up now. */
1811 if (frame_pointer_needed
)
1812 F (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
1817 expand_epilogue (void)
1820 unsigned int size
= get_frame_size ();
1822 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1823 rtx restore_regs
[32];
1825 unsigned int num_restore
;
1827 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1829 /* Eliminate the initial stack stored by interrupt functions. */
1830 if (interrupt_handler
)
1832 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1833 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1834 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1837 /* Cut off any dynamic stack created. */
1838 if (frame_pointer_needed
)
1839 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1841 /* Identify all of the saved registers. */
1843 for (i
= 1; i
< 32; i
++)
1845 if (((1L << i
) & reg_saved
) != 0)
1846 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1849 /* See if we have an insn that restores the particular registers we
1851 restore_all
= NULL_RTX
;
1853 if (TARGET_PROLOG_FUNCTION
1855 && !crtl
->args
.pretend_args_size
1856 && !interrupt_handler
)
1858 int alloc_stack
= (4 * num_restore
);
1860 /* Don't bother checking if we don't actually save any space. */
1861 if (use_prolog_function (num_restore
, actual_fsize
))
1864 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1865 rtvec_alloc (num_restore
+ 2));
1866 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1867 XVECEXP (restore_all
, 0, 1)
1868 = gen_rtx_SET (stack_pointer_rtx
,
1869 gen_rtx_PLUS (Pmode
,
1871 GEN_INT (alloc_stack
)));
1873 offset
= alloc_stack
- 4;
1874 for (i
= 0; i
< num_restore
; i
++)
1876 XVECEXP (restore_all
, 0, i
+2)
1877 = gen_rtx_SET (restore_regs
[i
],
1879 gen_rtx_PLUS (Pmode
,
1885 code
= recog (restore_all
, NULL_RTX
, NULL
);
1891 actual_fsize
-= alloc_stack
;
1892 increment_stack (actual_fsize
, false);
1894 insn
= emit_jump_insn (restore_all
);
1895 INSN_CODE (insn
) = code
;
1898 restore_all
= NULL_RTX
;
1902 /* If no epilogue save function is available, restore the registers the
1903 old fashioned way (one by one). */
1906 unsigned int init_stack_free
;
1908 /* If the stack is large, we need to cut it down in 2 pieces. */
1909 if (interrupt_handler
)
1910 init_stack_free
= 0;
1911 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1912 init_stack_free
= 4 * num_restore
;
1914 init_stack_free
= (signed) actual_fsize
;
1916 /* Deallocate the rest of the stack if it is > 32K. */
1917 if ((unsigned int) actual_fsize
> init_stack_free
)
1918 increment_stack (actual_fsize
- init_stack_free
, false);
1920 /* Special case interrupt functions that save all registers
1922 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1924 if (! TARGET_DISABLE_CALLT
)
1925 emit_insn (gen_callt_restore_all_interrupt ());
1927 emit_insn (gen_restore_all_interrupt ());
1931 /* Restore registers from the beginning of the stack frame. */
1932 int offset
= init_stack_free
- 4;
1934 /* Restore the return pointer first. */
1936 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1938 emit_move_insn (restore_regs
[--num_restore
],
1939 gen_rtx_MEM (SImode
,
1940 plus_constant (Pmode
,
1946 for (i
= 0; i
< num_restore
; i
++)
1948 emit_move_insn (restore_regs
[i
],
1949 gen_rtx_MEM (SImode
,
1950 plus_constant (Pmode
,
1954 emit_use (restore_regs
[i
]);
1958 /* Cut back the remainder of the stack. */
1959 increment_stack (init_stack_free
+ crtl
->args
.pretend_args_size
,
1963 /* And return or use reti for interrupt handlers. */
1964 if (interrupt_handler
)
1966 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1967 emit_insn (gen_callt_return_interrupt ());
1969 emit_jump_insn (gen_return_interrupt ());
1971 else if (actual_fsize
)
1972 emit_jump_insn (gen_return_internal ());
1974 emit_jump_insn (gen_return_simple ());
1977 v850_interrupt_cache_p
= FALSE
;
1978 v850_interrupt_p
= FALSE
;
1981 /* Update the condition code from the insn. */
1983 notice_update_cc (rtx body
, rtx_insn
*insn
)
1985 switch (get_attr_cc (insn
))
1988 /* Insn does not affect CC at all. */
1992 /* Insn does not change CC, but the 0'th operand has been changed. */
1993 if (cc_status
.value1
!= 0
1994 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
1995 cc_status
.value1
= 0;
1999 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2000 V,C is in an unusable state. */
2002 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2003 cc_status
.value1
= recog_data
.operand
[0];
2007 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2008 C is in an unusable state. */
2010 cc_status
.flags
|= CC_NO_CARRY
;
2011 cc_status
.value1
= recog_data
.operand
[0];
2015 /* The insn is a compare instruction. */
2017 cc_status
.value1
= SET_SRC (body
);
2021 /* Insn doesn't leave CC in a usable state. */
2030 /* Retrieve the data area that has been chosen for the given decl. */
2033 v850_get_data_area (tree decl
)
2035 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2036 return DATA_AREA_SDA
;
2038 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2039 return DATA_AREA_TDA
;
2041 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2042 return DATA_AREA_ZDA
;
2044 return DATA_AREA_NORMAL
;
2047 /* Store the indicated data area in the decl's attributes. */
2050 v850_set_data_area (tree decl
, v850_data_area data_area
)
2056 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2057 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2058 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2063 DECL_ATTRIBUTES (decl
) = tree_cons
2064 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2067 /* Handle an "interrupt" attribute; arguments as in
2068 struct attribute_spec.handler. */
2070 v850_handle_interrupt_attribute (tree
* node
,
2072 tree args ATTRIBUTE_UNUSED
,
2073 int flags ATTRIBUTE_UNUSED
,
2074 bool * no_add_attrs
)
2076 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2078 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2080 *no_add_attrs
= true;
2086 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2087 struct attribute_spec.handler. */
2089 v850_handle_data_area_attribute (tree
* node
,
2091 tree args ATTRIBUTE_UNUSED
,
2092 int flags ATTRIBUTE_UNUSED
,
2093 bool * no_add_attrs
)
2095 v850_data_area data_area
;
2096 v850_data_area area
;
2099 /* Implement data area attribute. */
2100 if (is_attribute_p ("sda", name
))
2101 data_area
= DATA_AREA_SDA
;
2102 else if (is_attribute_p ("tda", name
))
2103 data_area
= DATA_AREA_TDA
;
2104 else if (is_attribute_p ("zda", name
))
2105 data_area
= DATA_AREA_ZDA
;
2109 switch (TREE_CODE (decl
))
2112 if (current_function_decl
!= NULL_TREE
)
2114 error_at (DECL_SOURCE_LOCATION (decl
),
2115 "data area attributes cannot be specified for "
2117 *no_add_attrs
= true;
2123 area
= v850_get_data_area (decl
);
2124 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2126 error ("data area of %q+D conflicts with previous declaration",
2128 *no_add_attrs
= true;
2140 /* Return nonzero if FUNC is an interrupt function as specified
2141 by the "interrupt" attribute. */
2144 v850_interrupt_function_p (tree func
)
2149 if (v850_interrupt_cache_p
)
2150 return v850_interrupt_p
;
2152 if (TREE_CODE (func
) != FUNCTION_DECL
)
2155 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2161 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2162 ret
= a
!= NULL_TREE
;
2165 /* Its not safe to trust global variables until after function inlining has
2167 if (reload_completed
| reload_in_progress
)
2168 v850_interrupt_p
= ret
;
2175 v850_encode_data_area (tree decl
, rtx symbol
)
2179 /* Map explicit sections into the appropriate attribute */
2180 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2182 if (DECL_SECTION_NAME (decl
))
2184 const char *name
= DECL_SECTION_NAME (decl
);
2186 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2187 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2189 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2190 v850_set_data_area (decl
, DATA_AREA_SDA
);
2192 else if (streq (name
, ".tdata"))
2193 v850_set_data_area (decl
, DATA_AREA_TDA
);
2196 /* If no attribute, support -m{zda,sda,tda}=n */
2199 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2203 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2204 v850_set_data_area (decl
, DATA_AREA_TDA
);
2206 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2207 v850_set_data_area (decl
, DATA_AREA_SDA
);
2209 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2210 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2213 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2217 flags
= SYMBOL_REF_FLAGS (symbol
);
2218 switch (v850_get_data_area (decl
))
2220 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2221 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2222 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2223 default: gcc_unreachable ();
2225 SYMBOL_REF_FLAGS (symbol
) = flags
;
2229 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2231 default_encode_section_info (decl
, rtl
, first
);
2233 if (TREE_CODE (decl
) == VAR_DECL
2234 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2235 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2238 /* Construct a JR instruction to a routine that will perform the equivalent of
2239 the RTL passed in as an argument. This RTL is a function epilogue that
2240 pops registers off the stack and possibly releases some extra stack space
2241 as well. The code has already verified that the RTL matches these
2245 construct_restore_jr (rtx op
)
2247 int count
= XVECLEN (op
, 0);
2249 unsigned long int mask
;
2250 unsigned long int first
;
2251 unsigned long int last
;
2253 static char buff
[100]; /* XXX */
2257 error ("bogus JR construction: %d", count
);
2261 /* Work out how many bytes to pop off the stack before retrieving
2263 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2264 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2265 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2267 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2269 /* Each pop will remove 4 bytes from the stack.... */
2270 stack_bytes
-= (count
- 2) * 4;
2272 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2273 if (stack_bytes
!= 0)
2275 error ("bad amount of stack space removal: %d", stack_bytes
);
2279 /* Now compute the bit mask of registers to push. */
2281 for (i
= 2; i
< count
; i
++)
2283 rtx vector_element
= XVECEXP (op
, 0, i
);
2285 gcc_assert (GET_CODE (vector_element
) == SET
);
2286 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2287 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2290 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2293 /* Scan for the first register to pop. */
2294 for (first
= 0; first
< 32; first
++)
2296 if (mask
& (1 << first
))
2300 gcc_assert (first
< 32);
2302 /* Discover the last register to pop. */
2303 if (mask
& (1 << LINK_POINTER_REGNUM
))
2305 last
= LINK_POINTER_REGNUM
;
2309 gcc_assert (!stack_bytes
);
2310 gcc_assert (mask
& (1 << 29));
2315 /* Note, it is possible to have gaps in the register mask.
2316 We ignore this here, and generate a JR anyway. We will
2317 be popping more registers than is strictly necessary, but
2318 it does save code space. */
2320 if (TARGET_LONG_CALLS
)
2325 sprintf (name
, "__return_%s", reg_names
[first
]);
2327 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2329 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2335 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2337 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2344 /* Construct a JARL instruction to a routine that will perform the equivalent
2345 of the RTL passed as a parameter. This RTL is a function prologue that
2346 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2347 some stack space as well. The code has already verified that the RTL
2348 matches these requirements. */
2350 construct_save_jarl (rtx op
)
2352 int count
= XVECLEN (op
, 0);
2354 unsigned long int mask
;
2355 unsigned long int first
;
2356 unsigned long int last
;
2358 static char buff
[100]; /* XXX */
2360 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2362 error ("bogus JARL construction: %d", count
);
2367 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2368 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2369 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2370 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2372 /* Work out how many bytes to push onto the stack after storing the
2374 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2376 /* Each push will put 4 bytes from the stack.... */
2377 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2379 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2380 if (stack_bytes
!= 0)
2382 error ("bad amount of stack space removal: %d", stack_bytes
);
2386 /* Now compute the bit mask of registers to push. */
2388 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2390 rtx vector_element
= XVECEXP (op
, 0, i
);
2392 gcc_assert (GET_CODE (vector_element
) == SET
);
2393 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2394 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2397 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2400 /* Scan for the first register to push. */
2401 for (first
= 0; first
< 32; first
++)
2403 if (mask
& (1 << first
))
2407 gcc_assert (first
< 32);
2409 /* Discover the last register to push. */
2410 if (mask
& (1 << LINK_POINTER_REGNUM
))
2412 last
= LINK_POINTER_REGNUM
;
2416 gcc_assert (!stack_bytes
);
2417 gcc_assert (mask
& (1 << 29));
2422 /* Note, it is possible to have gaps in the register mask.
2423 We ignore this here, and generate a JARL anyway. We will
2424 be pushing more registers than is strictly necessary, but
2425 it does save code space. */
2427 if (TARGET_LONG_CALLS
)
2432 sprintf (name
, "__save_%s", reg_names
[first
]);
2434 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2436 if (TARGET_V850E3V5_UP
)
2437 sprintf (buff
, "mov hilo(%s), r11\n\tjarl [r11], r10", name
);
2439 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2445 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2447 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2454 /* A version of asm_output_aligned_bss() that copes with the special
2455 data areas of the v850. */
2457 v850_output_aligned_bss (FILE * file
,
2460 unsigned HOST_WIDE_INT size
,
2463 switch (v850_get_data_area (decl
))
2466 switch_to_section (zbss_section
);
2470 switch_to_section (sbss_section
);
2474 switch_to_section (tdata_section
);
2477 switch_to_section (bss_section
);
2481 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2482 #ifdef ASM_DECLARE_OBJECT_NAME
2483 last_assemble_variable_decl
= decl
;
2484 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2486 /* Standard thing is just output label for the object. */
2487 ASM_OUTPUT_LABEL (file
, name
);
2488 #endif /* ASM_DECLARE_OBJECT_NAME */
2489 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2492 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2494 v850_output_common (FILE * file
,
2500 if (decl
== NULL_TREE
)
2502 fprintf (file
, "%s", COMMON_ASM_OP
);
2506 switch (v850_get_data_area (decl
))
2509 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2513 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2517 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2521 fprintf (file
, "%s", COMMON_ASM_OP
);
2526 assemble_name (file
, name
);
2527 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2530 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2532 v850_output_local (FILE * file
,
2538 fprintf (file
, "%s", LOCAL_ASM_OP
);
2539 assemble_name (file
, name
);
2540 fprintf (file
, "\n");
2542 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2545 /* Add data area to the given declaration if a ghs data area pragma is
2546 currently in effect (#pragma ghs startXXX/endXXX). */
2548 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2551 && data_area_stack
->data_area
2552 && current_function_decl
== NULL_TREE
2553 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2554 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2555 v850_set_data_area (decl
, data_area_stack
->data_area
);
2557 /* Initialize the default names of the v850 specific sections,
2558 if this has not been done before. */
2560 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2562 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2565 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2568 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2571 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2574 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2578 if (current_function_decl
== NULL_TREE
2579 && (TREE_CODE (decl
) == VAR_DECL
2580 || TREE_CODE (decl
) == CONST_DECL
2581 || TREE_CODE (decl
) == FUNCTION_DECL
)
2582 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2583 && !DECL_SECTION_NAME (decl
))
2585 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2586 const char * chosen_section
;
2588 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2589 kind
= GHS_SECTION_KIND_TEXT
;
2592 /* First choose a section kind based on the data area of the decl. */
2593 switch (v850_get_data_area (decl
))
2599 kind
= ((TREE_READONLY (decl
))
2600 ? GHS_SECTION_KIND_ROSDATA
2601 : GHS_SECTION_KIND_SDATA
);
2605 kind
= GHS_SECTION_KIND_TDATA
;
2609 kind
= ((TREE_READONLY (decl
))
2610 ? GHS_SECTION_KIND_ROZDATA
2611 : GHS_SECTION_KIND_ZDATA
);
2614 case DATA_AREA_NORMAL
: /* default data area */
2615 if (TREE_READONLY (decl
))
2616 kind
= GHS_SECTION_KIND_RODATA
;
2617 else if (DECL_INITIAL (decl
))
2618 kind
= GHS_SECTION_KIND_DATA
;
2620 kind
= GHS_SECTION_KIND_BSS
;
2624 /* Now, if the section kind has been explicitly renamed,
2625 then attach a section attribute. */
2626 chosen_section
= GHS_current_section_names
[(int) kind
];
2628 /* Otherwise, if this kind of section needs an explicit section
2629 attribute, then also attach one. */
2630 if (chosen_section
== NULL
)
2631 chosen_section
= GHS_default_section_names
[(int) kind
];
2635 /* Only set the section name if specified by a pragma, because
2636 otherwise it will force those variables to get allocated storage
2637 in this module, rather than by the linker. */
2638 set_decl_section_name (decl
, chosen_section
);
2643 /* Construct a DISPOSE instruction that is the equivalent of
2644 the given RTX. We have already verified that this should
2648 construct_dispose_instruction (rtx op
)
2650 int count
= XVECLEN (op
, 0);
2652 unsigned long int mask
;
2654 static char buff
[ 100 ]; /* XXX */
2659 error ("bogus DISPOSE construction: %d", count
);
2663 /* Work out how many bytes to pop off the
2664 stack before retrieving registers. */
2665 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2666 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2667 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2669 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2671 /* Each pop will remove 4 bytes from the stack.... */
2672 stack_bytes
-= (count
- 2) * 4;
2674 /* Make sure that the amount we are popping
2675 will fit into the DISPOSE instruction. */
2676 if (stack_bytes
> 128)
2678 error ("too much stack space to dispose of: %d", stack_bytes
);
2682 /* Now compute the bit mask of registers to push. */
2685 for (i
= 2; i
< count
; i
++)
2687 rtx vector_element
= XVECEXP (op
, 0, i
);
2689 gcc_assert (GET_CODE (vector_element
) == SET
);
2690 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2691 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2694 if (REGNO (SET_DEST (vector_element
)) == 2)
2697 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2700 if (! TARGET_DISABLE_CALLT
2701 && (use_callt
|| stack_bytes
== 0))
2705 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2710 for (i
= 20; i
< 32; i
++)
2711 if (mask
& (1 << i
))
2715 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2717 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2718 i
, (mask
& (1 << 31)) ? "31c" : "29");
2723 static char regs
[100]; /* XXX */
2726 /* Generate the DISPOSE instruction. Note we could just issue the
2727 bit mask as a number as the assembler can cope with this, but for
2728 the sake of our readers we turn it into a textual description. */
2732 for (i
= 20; i
< 32; i
++)
2734 if (mask
& (1 << i
))
2739 strcat (regs
, ", ");
2744 strcat (regs
, reg_names
[ first
]);
2746 for (i
++; i
< 32; i
++)
2747 if ((mask
& (1 << i
)) == 0)
2752 strcat (regs
, " - ");
2753 strcat (regs
, reg_names
[ i
- 1 ] );
2758 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2764 /* Construct a PREPARE instruction that is the equivalent of
2765 the given RTL. We have already verified that this should
2769 construct_prepare_instruction (rtx op
)
2773 unsigned long int mask
;
2775 static char buff
[ 100 ]; /* XXX */
2778 if (XVECLEN (op
, 0) <= 1)
2780 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2784 /* Work out how many bytes to push onto
2785 the stack after storing the registers. */
2786 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2787 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2788 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2790 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2793 /* Make sure that the amount we are popping
2794 will fit into the DISPOSE instruction. */
2795 if (stack_bytes
< -128)
2797 error ("too much stack space to prepare: %d", stack_bytes
);
2801 /* Now compute the bit mask of registers to push. */
2804 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2806 rtx vector_element
= XVECEXP (op
, 0, i
);
2808 if (GET_CODE (vector_element
) == CLOBBER
)
2811 gcc_assert (GET_CODE (vector_element
) == SET
);
2812 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2813 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2816 if (REGNO (SET_SRC (vector_element
)) == 2)
2819 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2823 stack_bytes
+= count
* 4;
2825 if ((! TARGET_DISABLE_CALLT
)
2826 && (use_callt
|| stack_bytes
== 0))
2830 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2834 for (i
= 20; i
< 32; i
++)
2835 if (mask
& (1 << i
))
2839 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2841 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2842 i
, (mask
& (1 << 31)) ? "31c" : "29");
2846 static char regs
[100]; /* XXX */
2850 /* Generate the PREPARE instruction. Note we could just issue the
2851 bit mask as a number as the assembler can cope with this, but for
2852 the sake of our readers we turn it into a textual description. */
2856 for (i
= 20; i
< 32; i
++)
2858 if (mask
& (1 << i
))
2863 strcat (regs
, ", ");
2868 strcat (regs
, reg_names
[ first
]);
2870 for (i
++; i
< 32; i
++)
2871 if ((mask
& (1 << i
)) == 0)
2876 strcat (regs
, " - ");
2877 strcat (regs
, reg_names
[ i
- 1 ] );
2882 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2888 /* Return an RTX indicating where the return address to the
2889 calling function can be found. */
2892 v850_return_addr (int count
)
2897 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2900 /* Implement TARGET_ASM_INIT_SECTIONS. */
2903 v850_asm_init_sections (void)
2906 = get_unnamed_section (0, output_section_asm_op
,
2907 "\t.section .rosdata,\"a\"");
2910 = get_unnamed_section (0, output_section_asm_op
,
2911 "\t.section .rozdata,\"a\"");
2914 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2915 "\t.section .tdata,\"aw\"");
2918 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2919 "\t.section .zdata,\"aw\"");
2922 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2923 output_section_asm_op
,
2924 "\t.section .zbss,\"aw\"");
2928 v850_select_section (tree exp
,
2929 int reloc ATTRIBUTE_UNUSED
,
2930 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2932 if (TREE_CODE (exp
) == VAR_DECL
)
2935 if (!TREE_READONLY (exp
)
2936 || TREE_SIDE_EFFECTS (exp
)
2937 || !DECL_INITIAL (exp
)
2938 || (DECL_INITIAL (exp
) != error_mark_node
2939 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2944 switch (v850_get_data_area (exp
))
2947 return is_const
? rozdata_section
: zdata_section
;
2950 return tdata_section
;
2953 return is_const
? rosdata_section
: sdata_section
;
2956 return is_const
? readonly_data_section
: data_section
;
2959 return readonly_data_section
;
2962 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2965 v850_function_value_regno_p (const unsigned int regno
)
2967 return (regno
== RV_REGNUM
);
2970 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2973 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2975 /* Return values > 8 bytes in length in memory. */
2976 return int_size_in_bytes (type
) > 8
2977 || TYPE_MODE (type
) == BLKmode
2978 /* With the rh850 ABI return all aggregates in memory. */
2979 || ((! TARGET_GCC_ABI
) && AGGREGATE_TYPE_P (type
))
2983 /* Worker function for TARGET_FUNCTION_VALUE. */
2986 v850_function_value (const_tree valtype
,
2987 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
2988 bool outgoing ATTRIBUTE_UNUSED
)
2990 return gen_rtx_REG (TYPE_MODE (valtype
), RV_REGNUM
);
2993 /* Implement TARGET_LIBCALL_VALUE. */
2996 v850_libcall_value (machine_mode mode
,
2997 const_rtx func ATTRIBUTE_UNUSED
)
2999 return gen_rtx_REG (mode
, RV_REGNUM
);
3003 /* Worker function for TARGET_CAN_ELIMINATE. */
3006 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3008 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
3011 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3013 If TARGET_APP_REGS is not defined then add r2 and r5 to
3014 the pool of fixed registers. See PR 14505. */
3017 v850_conditional_register_usage (void)
3019 if (TARGET_APP_REGS
)
3021 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3022 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3026 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3029 v850_asm_trampoline_template (FILE *f
)
3031 fprintf (f
, "\tjarl .+4,r12\n");
3032 fprintf (f
, "\tld.w 12[r12],r20\n");
3033 fprintf (f
, "\tld.w 16[r12],r12\n");
3034 fprintf (f
, "\tjmp [r12]\n");
3035 fprintf (f
, "\tnop\n");
3036 fprintf (f
, "\t.long 0\n");
3037 fprintf (f
, "\t.long 0\n");
3040 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3043 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3045 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3047 emit_block_move (m_tramp
, assemble_trampoline_template (),
3048 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3050 mem
= adjust_address (m_tramp
, SImode
, 16);
3051 emit_move_insn (mem
, chain_value
);
3052 mem
= adjust_address (m_tramp
, SImode
, 20);
3053 emit_move_insn (mem
, fnaddr
);
3057 v850_issue_rate (void)
3059 return (TARGET_V850E2_UP
? 2 : 1);
3062 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3065 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3067 return (GET_CODE (x
) == CONST_DOUBLE
3068 || !(GET_CODE (x
) == CONST
3069 && GET_CODE (XEXP (x
, 0)) == PLUS
3070 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3071 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3072 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3075 /* Helper function for `v850_legitimate_address_p'. */
3078 v850_reg_ok_for_base_p (const_rtx reg
, bool strict_p
)
3082 return REGNO_OK_FOR_BASE_P (REGNO (reg
));
3088 /* Accept either REG or SUBREG where a register is valid. */
3091 v850_rtx_ok_for_base_p (const_rtx x
, bool strict_p
)
3093 return ((REG_P (x
) && v850_reg_ok_for_base_p (x
, strict_p
))
3094 || (SUBREG_P (x
) && REG_P (SUBREG_REG (x
))
3095 && v850_reg_ok_for_base_p (SUBREG_REG (x
), strict_p
)));
3098 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3101 v850_legitimate_address_p (machine_mode mode
, rtx x
, bool strict_p
,
3102 addr_space_t as ATTRIBUTE_UNUSED
)
3104 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
3106 if (v850_rtx_ok_for_base_p (x
, strict_p
))
3108 if (CONSTANT_ADDRESS_P (x
)
3109 && (mode
== QImode
|| INTVAL (x
) % 2 == 0)
3110 && (GET_MODE_SIZE (mode
) <= 4 || INTVAL (x
) % 4 == 0))
3112 if (GET_CODE (x
) == LO_SUM
3113 && REG_P (XEXP (x
, 0))
3114 && v850_reg_ok_for_base_p (XEXP (x
, 0), strict_p
)
3115 && CONSTANT_P (XEXP (x
, 1))
3116 && (!CONST_INT_P (XEXP (x
, 1))
3117 || ((mode
== QImode
|| INTVAL (XEXP (x
, 1)) % 2 == 0)
3118 && constraint_satisfied_p (XEXP (x
, 1), CONSTRAINT_K
)))
3119 && GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (word_mode
))
3121 if (special_symbolref_operand (x
, mode
)
3122 && (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (word_mode
)))
3124 if (GET_CODE (x
) == PLUS
3125 && v850_rtx_ok_for_base_p (XEXP (x
, 0), strict_p
)
3126 && constraint_satisfied_p (XEXP (x
,1), CONSTRAINT_K
)
3127 && ((mode
== QImode
|| INTVAL (XEXP (x
, 1)) % 2 == 0)
3128 && CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))
3129 + (GET_MODE_NUNITS (mode
) * UNITS_PER_WORD
))))
3136 v850_memory_move_cost (machine_mode mode
,
3137 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3140 switch (GET_MODE_SIZE (mode
))
3150 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3155 v850_adjust_insn_length (rtx_insn
*insn
, int length
)
3157 if (TARGET_V850E3V5_UP
)
3161 if (TARGET_LONG_CALLS
)
3163 /* call_internal_long, call_value_internal_long. */
3171 /* call_internal_short, call_value_internal_short. */
3180 /* V850 specific attributes. */
3182 static const struct attribute_spec v850_attribute_table
[] =
3184 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3185 affects_type_identity } */
3186 { "interrupt_handler", 0, 0, true, false, false,
3187 v850_handle_interrupt_attribute
, false },
3188 { "interrupt", 0, 0, true, false, false,
3189 v850_handle_interrupt_attribute
, false },
3190 { "sda", 0, 0, true, false, false,
3191 v850_handle_data_area_attribute
, false },
3192 { "tda", 0, 0, true, false, false,
3193 v850_handle_data_area_attribute
, false },
3194 { "zda", 0, 0, true, false, false,
3195 v850_handle_data_area_attribute
, false },
3196 { NULL
, 0, 0, false, false, false, NULL
, false }
3200 v850_option_override (void)
3202 if (flag_exceptions
|| flag_non_call_exceptions
)
3203 flag_omit_frame_pointer
= 0;
3205 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3206 if (! TARGET_GCC_ABI
)
3207 target_flags
|= MASK_DISABLE_CALLT
;
3211 v850_gen_movdi (rtx
* operands
)
3213 if (REG_P (operands
[0]))
3215 if (REG_P (operands
[1]))
3217 if (REGNO (operands
[0]) == (REGNO (operands
[1]) - 1))
3218 return "mov %1, %0; mov %R1, %R0";
3220 return "mov %R1, %R0; mov %1, %0";
3223 if (MEM_P (operands
[1]))
3225 if (REGNO (operands
[0]) & 1)
3226 /* Use two load word instructions to synthesise a load double. */
3227 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3229 return "ld.dw %1, %0";
3232 return "mov %1, %0; mov %R1, %R0";
3235 gcc_assert (REG_P (operands
[1]));
3237 if (REGNO (operands
[1]) & 1)
3238 /* Use two store word instructions to synthesise a store double. */
3239 return "st.w %1, %0 ; st.w %R1, %R0 ";
3241 return "st.dw %1, %0";
3244 /* Initialize the GCC target structure. */
3246 #undef TARGET_OPTION_OVERRIDE
3247 #define TARGET_OPTION_OVERRIDE v850_option_override
3249 #undef TARGET_MEMORY_MOVE_COST
3250 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3252 #undef TARGET_ASM_ALIGNED_HI_OP
3253 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3255 #undef TARGET_PRINT_OPERAND
3256 #define TARGET_PRINT_OPERAND v850_print_operand
3257 #undef TARGET_PRINT_OPERAND_ADDRESS
3258 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3259 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3260 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3262 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3263 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3265 #undef TARGET_ATTRIBUTE_TABLE
3266 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3268 #undef TARGET_INSERT_ATTRIBUTES
3269 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3271 #undef TARGET_ASM_SELECT_SECTION
3272 #define TARGET_ASM_SELECT_SECTION v850_select_section
3274 /* The assembler supports switchable .bss sections, but
3275 v850_select_section doesn't yet make use of them. */
3276 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3277 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3279 #undef TARGET_ENCODE_SECTION_INFO
3280 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3282 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3283 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3285 #undef TARGET_RTX_COSTS
3286 #define TARGET_RTX_COSTS v850_rtx_costs
3288 #undef TARGET_ADDRESS_COST
3289 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3291 #undef TARGET_MACHINE_DEPENDENT_REORG
3292 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3294 #undef TARGET_SCHED_ISSUE_RATE
3295 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3297 #undef TARGET_FUNCTION_VALUE_REGNO_P
3298 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3299 #undef TARGET_FUNCTION_VALUE
3300 #define TARGET_FUNCTION_VALUE v850_function_value
3301 #undef TARGET_LIBCALL_VALUE
3302 #define TARGET_LIBCALL_VALUE v850_libcall_value
3304 #undef TARGET_PROMOTE_PROTOTYPES
3305 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3307 #undef TARGET_RETURN_IN_MEMORY
3308 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3310 #undef TARGET_PASS_BY_REFERENCE
3311 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3313 #undef TARGET_CALLEE_COPIES
3314 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3316 #undef TARGET_ARG_PARTIAL_BYTES
3317 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3319 #undef TARGET_FUNCTION_ARG
3320 #define TARGET_FUNCTION_ARG v850_function_arg
3322 #undef TARGET_FUNCTION_ARG_ADVANCE
3323 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3325 #undef TARGET_CAN_ELIMINATE
3326 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3328 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3329 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3331 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3332 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3333 #undef TARGET_TRAMPOLINE_INIT
3334 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3336 #undef TARGET_LEGITIMATE_CONSTANT_P
3337 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3339 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3340 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3342 #undef TARGET_CAN_USE_DOLOOP_P
3343 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3345 struct gcc_target targetm
= TARGET_INITIALIZER
;
3347 #include "gt-v850.h"