1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "stringpool.h"
36 #include "stor-layout.h"
41 #include "hard-reg-set.h"
42 #include "insn-config.h"
43 #include "conditions.h"
45 #include "insn-attr.h"
50 #include "statistics.h"
52 #include "fixed-value.h"
59 #include "diagnostic-core.h"
63 #include "target-def.h"
64 #include "dominance.h"
70 #include "cfgcleanup.h"
72 #include "basic-block.h"
78 #define streq(a,b) (strcmp (a, b) == 0)
81 static void v850_print_operand_address (FILE *, rtx
);
83 /* Names of the various data areas used on the v850. */
84 const char * GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
85 const char * GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
87 /* Track the current data area set by the data area pragma (which
88 can be nested). Tested by check_default_data_area. */
89 data_area_stack_element
* data_area_stack
= NULL
;
91 /* True if we don't need to check any more if the current
92 function is an interrupt handler. */
93 static int v850_interrupt_cache_p
= FALSE
;
95 rtx v850_compare_op0
, v850_compare_op1
;
97 /* Whether current function is an interrupt handler. */
98 static int v850_interrupt_p
= FALSE
;
100 static GTY(()) section
* rosdata_section
;
101 static GTY(()) section
* rozdata_section
;
102 static GTY(()) section
* tdata_section
;
103 static GTY(()) section
* zdata_section
;
104 static GTY(()) section
* zbss_section
;
106 /* We use this to wrap all emitted insns in the prologue. */
110 if (GET_CODE (x
) != CLOBBER
)
111 RTX_FRAME_RELATED_P (x
) = 1;
115 /* Mark all the subexpressions of the PARALLEL rtx PAR as
116 frame-related. Return PAR.
118 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
119 PARALLEL rtx other than the first if they do not have the
120 FRAME_RELATED flag set on them. */
123 v850_all_frame_related (rtx par
)
125 int len
= XVECLEN (par
, 0);
128 gcc_assert (GET_CODE (par
) == PARALLEL
);
129 for (i
= 0; i
< len
; i
++)
130 F (XVECEXP (par
, 0, i
));
135 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
136 Specify whether to pass the argument by reference. */
139 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
140 machine_mode mode
, const_tree type
,
141 bool named ATTRIBUTE_UNUSED
)
143 unsigned HOST_WIDE_INT size
;
149 size
= int_size_in_bytes (type
);
151 size
= GET_MODE_SIZE (mode
);
156 /* Return an RTX to represent where an argument with mode MODE
157 and type TYPE will be passed to a function. If the result
158 is NULL_RTX, the argument will be pushed. */
161 v850_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
162 const_tree type
, bool named
)
164 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
165 rtx result
= NULL_RTX
;
172 size
= int_size_in_bytes (type
);
174 size
= GET_MODE_SIZE (mode
);
176 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
180 /* Once we have stopped using argument registers, do not start up again. */
181 cum
->nbytes
= 4 * UNITS_PER_WORD
;
186 align
= UNITS_PER_WORD
;
187 else if (size
<= UNITS_PER_WORD
&& type
)
188 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
192 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
194 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
197 if (type
== NULL_TREE
198 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
201 switch (cum
->nbytes
/ UNITS_PER_WORD
)
204 result
= gen_rtx_REG (mode
, 6);
207 result
= gen_rtx_REG (mode
, 7);
210 result
= gen_rtx_REG (mode
, 8);
213 result
= gen_rtx_REG (mode
, 9);
222 /* Return the number of bytes which must be put into registers
223 for values which are part in registers and part in memory. */
225 v850_arg_partial_bytes (cumulative_args_t cum_v
, machine_mode mode
,
226 tree type
, bool named
)
228 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
235 size
= int_size_in_bytes (type
);
237 size
= GET_MODE_SIZE (mode
);
243 align
= UNITS_PER_WORD
;
245 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
249 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
251 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
254 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
257 if (type
== NULL_TREE
258 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
261 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
264 /* Update the data in CUM to advance over an argument
265 of mode MODE and data type TYPE.
266 (TYPE is null for libcalls where that information may not be available.) */
269 v850_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
270 const_tree type
, bool named ATTRIBUTE_UNUSED
)
272 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
275 cum
->nbytes
+= (((mode
!= BLKmode
276 ? GET_MODE_SIZE (mode
)
277 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1)
280 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
281 ? GET_MODE_SIZE (Pmode
)
283 ? GET_MODE_SIZE (mode
)
284 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
288 /* Return the high and low words of a CONST_DOUBLE */
291 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
293 if (GET_CODE (x
) == CONST_DOUBLE
)
298 switch (GET_MODE (x
))
301 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
302 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
303 *p_high
= t
[1]; /* since v850 is little endian */
304 *p_low
= t
[0]; /* high is second word */
308 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
309 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
315 *p_high
= CONST_DOUBLE_HIGH (x
);
316 *p_low
= CONST_DOUBLE_LOW (x
);
324 fatal_insn ("const_double_split got a bad insn:", x
);
328 /* Return the cost of the rtx R with code CODE. */
331 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
333 if (CONST_OK_FOR_I (value
))
335 else if (CONST_OK_FOR_J (value
))
337 else if (CONST_OK_FOR_K (value
))
344 const_costs (rtx r
, enum rtx_code c
)
346 HOST_WIDE_INT high
, low
;
351 return const_costs_int (INTVAL (r
), 0);
354 const_double_split (r
, &high
, &low
);
355 if (GET_MODE (r
) == SFmode
)
356 return const_costs_int (high
, 1);
358 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
374 v850_rtx_costs (rtx x
,
376 int outer_code ATTRIBUTE_UNUSED
,
377 int opno ATTRIBUTE_UNUSED
,
378 int * total
, bool speed
)
380 enum rtx_code code
= (enum rtx_code
) codearg
;
389 *total
= COSTS_N_INSNS (const_costs (x
, code
));
396 if (TARGET_V850E
&& !speed
)
404 && ( GET_MODE (x
) == SImode
405 || GET_MODE (x
) == HImode
406 || GET_MODE (x
) == QImode
))
408 if (GET_CODE (XEXP (x
, 1)) == REG
)
410 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
412 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
414 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
423 if (outer_code
== COMPARE
)
432 /* Print operand X using operand code CODE to assembly language output file
436 v850_print_operand (FILE * file
, rtx x
, int code
)
438 HOST_WIDE_INT high
, low
;
443 /* We use 'c' operands with symbols for .vtinherit. */
444 if (GET_CODE (x
) == SYMBOL_REF
)
446 output_addr_const(file
, x
);
453 switch ((code
== 'B' || code
== 'C')
454 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
457 if (code
== 'c' || code
== 'C')
458 fprintf (file
, "nz");
460 fprintf (file
, "ne");
463 if (code
== 'c' || code
== 'C')
469 fprintf (file
, "ge");
472 fprintf (file
, "gt");
475 fprintf (file
, "le");
478 fprintf (file
, "lt");
481 fprintf (file
, "nl");
487 fprintf (file
, "nh");
496 case 'F': /* High word of CONST_DOUBLE. */
497 switch (GET_CODE (x
))
500 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
504 const_double_split (x
, &high
, &low
);
505 fprintf (file
, "%ld", (long) high
);
512 case 'G': /* Low word of CONST_DOUBLE. */
513 switch (GET_CODE (x
))
516 fprintf (file
, "%ld", (long) INTVAL (x
));
520 const_double_split (x
, &high
, &low
);
521 fprintf (file
, "%ld", (long) low
);
529 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
532 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
535 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
537 if (GET_CODE (x
) == CONST
)
538 x
= XEXP (XEXP (x
, 0), 0);
540 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
542 if (SYMBOL_REF_ZDA_P (x
))
543 fprintf (file
, "zdaoff");
544 else if (SYMBOL_REF_SDA_P (x
))
545 fprintf (file
, "sdaoff");
546 else if (SYMBOL_REF_TDA_P (x
))
547 fprintf (file
, "tdaoff");
552 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
553 output_addr_const (file
, x
);
556 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
558 if (GET_CODE (x
) == CONST
)
559 x
= XEXP (XEXP (x
, 0), 0);
561 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
563 if (SYMBOL_REF_ZDA_P (x
))
564 fprintf (file
, "r0");
565 else if (SYMBOL_REF_SDA_P (x
))
566 fprintf (file
, "gp");
567 else if (SYMBOL_REF_TDA_P (x
))
568 fprintf (file
, "ep");
572 case 'R': /* 2nd word of a double. */
573 switch (GET_CODE (x
))
576 fprintf (file
, reg_names
[REGNO (x
) + 1]);
579 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
580 v850_print_operand_address (file
, x
);
581 if (GET_CODE (x
) == CONST_INT
)
582 fprintf (file
, "[r0]");
587 unsigned HOST_WIDE_INT v
= INTVAL (x
);
589 /* Trickery to avoid problems with shifting
590 32-bits at a time on a 32-bit host. */
593 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, v
);
598 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_HIGH (x
));
608 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
609 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
616 /* Like an 'S' operand above, but for unsigned loads only. */
617 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
622 case 'W': /* Print the instruction suffix. */
623 switch (GET_MODE (x
))
628 case QImode
: fputs (".b", file
); break;
629 case HImode
: fputs (".h", file
); break;
630 case SImode
: fputs (".w", file
); break;
631 case SFmode
: fputs (".w", file
); break;
634 case '.': /* Register r0. */
635 fputs (reg_names
[0], file
);
637 case 'z': /* Reg or zero. */
639 fputs (reg_names
[REGNO (x
)], file
);
640 else if ((GET_MODE(x
) == SImode
641 || GET_MODE(x
) == DFmode
642 || GET_MODE(x
) == SFmode
)
643 && x
== CONST0_RTX(GET_MODE(x
)))
644 fputs (reg_names
[0], file
);
647 gcc_assert (x
== const0_rtx
);
648 fputs (reg_names
[0], file
);
652 switch (GET_CODE (x
))
655 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
656 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
659 output_address (XEXP (x
, 0));
663 fputs (reg_names
[REGNO (x
)], file
);
666 fputs (reg_names
[subreg_regno (x
)], file
);
669 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
677 v850_print_operand_address (file
, x
);
688 /* Output assembly language output for the address ADDR to FILE. */
691 v850_print_operand_address (FILE * file
, rtx addr
)
693 switch (GET_CODE (addr
))
696 fprintf (file
, "0[");
697 v850_print_operand (file
, addr
, 0);
701 if (GET_CODE (XEXP (addr
, 0)) == REG
)
704 fprintf (file
, "lo(");
705 v850_print_operand (file
, XEXP (addr
, 1), 0);
706 fprintf (file
, ")[");
707 v850_print_operand (file
, XEXP (addr
, 0), 0);
712 if (GET_CODE (XEXP (addr
, 0)) == REG
713 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
716 v850_print_operand (file
, XEXP (addr
, 1), 0);
718 v850_print_operand (file
, XEXP (addr
, 0), 0);
723 v850_print_operand (file
, XEXP (addr
, 0), 0);
725 v850_print_operand (file
, XEXP (addr
, 1), 0);
730 const char *off_name
= NULL
;
731 const char *reg_name
= NULL
;
733 if (SYMBOL_REF_ZDA_P (addr
))
738 else if (SYMBOL_REF_SDA_P (addr
))
743 else if (SYMBOL_REF_TDA_P (addr
))
750 fprintf (file
, "%s(", off_name
);
751 output_addr_const (file
, addr
);
753 fprintf (file
, ")[%s]", reg_name
);
757 if (special_symbolref_operand (addr
, VOIDmode
))
759 rtx x
= XEXP (XEXP (addr
, 0), 0);
760 const char *off_name
;
761 const char *reg_name
;
763 if (SYMBOL_REF_ZDA_P (x
))
768 else if (SYMBOL_REF_SDA_P (x
))
773 else if (SYMBOL_REF_TDA_P (x
))
781 fprintf (file
, "%s(", off_name
);
782 output_addr_const (file
, addr
);
783 fprintf (file
, ")[%s]", reg_name
);
786 output_addr_const (file
, addr
);
789 output_addr_const (file
, addr
);
795 v850_print_operand_punct_valid_p (unsigned char code
)
800 /* When assemble_integer is used to emit the offsets for a switch
801 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
802 output_addr_const will normally barf at this, but it is OK to omit
803 the truncate and just emit the difference of the two labels. The
804 .hword directive will automatically handle the truncation for us.
806 Returns true if rtx was handled, false otherwise. */
809 v850_output_addr_const_extra (FILE * file
, rtx x
)
811 if (GET_CODE (x
) != TRUNCATE
)
816 /* We must also handle the case where the switch table was passed a
817 constant value and so has been collapsed. In this case the first
818 label will have been deleted. In such a case it is OK to emit
819 nothing, since the table will not be used.
820 (cf gcc.c-torture/compile/990801-1.c). */
821 if (GET_CODE (x
) == MINUS
822 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
824 rtx_code_label
*label
825 = dyn_cast
<rtx_code_label
*> (XEXP (XEXP (x
, 0), 0));
826 if (label
&& label
->deleted ())
830 output_addr_const (file
, x
);
834 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
838 output_move_single (rtx
* operands
)
840 rtx dst
= operands
[0];
841 rtx src
= operands
[1];
848 else if (GET_CODE (src
) == CONST_INT
)
850 HOST_WIDE_INT value
= INTVAL (src
);
852 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
855 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
856 return "movea %1,%.,%0";
858 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
859 return "movhi hi0(%1),%.,%0";
861 /* A random constant. */
862 else if (TARGET_V850E_UP
)
865 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
868 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
870 HOST_WIDE_INT high
, low
;
872 const_double_split (src
, &high
, &low
);
874 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
877 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
878 return "movea %F1,%.,%0";
880 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
881 return "movhi hi0(%F1),%.,%0";
883 /* A random constant. */
884 else if (TARGET_V850E_UP
)
888 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
891 else if (GET_CODE (src
) == MEM
)
892 return "%S1ld%W1 %1,%0";
894 else if (special_symbolref_operand (src
, VOIDmode
))
895 return "movea %O1(%P1),%Q1,%0";
897 else if (GET_CODE (src
) == LABEL_REF
898 || GET_CODE (src
) == SYMBOL_REF
899 || GET_CODE (src
) == CONST
)
902 return "mov hilo(%1),%0";
904 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
907 else if (GET_CODE (src
) == HIGH
)
908 return "movhi hi(%1),%.,%0";
910 else if (GET_CODE (src
) == LO_SUM
)
912 operands
[2] = XEXP (src
, 0);
913 operands
[3] = XEXP (src
, 1);
914 return "movea lo(%3),%2,%0";
918 else if (GET_CODE (dst
) == MEM
)
921 return "%S0st%W0 %1,%0";
923 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
924 return "%S0st%W0 %.,%0";
926 else if (GET_CODE (src
) == CONST_DOUBLE
927 && CONST0_RTX (GET_MODE (dst
)) == src
)
928 return "%S0st%W0 %.,%0";
931 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode
, dst
, src
));
936 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
938 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
943 return CC_FPU_LEmode
;
945 return CC_FPU_GEmode
;
947 return CC_FPU_LTmode
;
949 return CC_FPU_GTmode
;
951 return CC_FPU_EQmode
;
953 return CC_FPU_NEmode
;
962 v850_gen_float_compare (enum rtx_code cond
, machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
964 if (GET_MODE (op0
) == DFmode
)
969 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
972 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
975 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
978 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
981 /* Note: There is no NE comparison operator. So we
982 perform an EQ comparison and invert the branch.
983 See v850_float_nz_comparison for how this is done. */
985 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
991 else if (GET_MODE (v850_compare_op0
) == SFmode
)
996 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
999 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
1002 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
1005 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
1008 /* Note: There is no NE comparison operator. So we
1009 perform an EQ comparison and invert the branch.
1010 See v850_float_nz_comparison for how this is done. */
1012 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
1021 return v850_select_cc_mode (cond
, op0
, op1
);
1025 v850_gen_compare (enum rtx_code cond
, machine_mode mode
, rtx op0
, rtx op1
)
1027 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
1029 emit_insn (gen_cmpsi_insn (op0
, op1
));
1030 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1035 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1036 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1037 emit_insn (gen_rtx_SET(mode
, cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1039 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1043 /* Return maximum offset supported for a short EP memory reference of mode
1044 MODE and signedness UNSIGNEDP. */
1047 ep_memory_offset (machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1054 if (TARGET_SMALL_SLD
)
1055 max_offset
= (1 << 4);
1056 else if ((TARGET_V850E_UP
)
1058 max_offset
= (1 << 4);
1060 max_offset
= (1 << 7);
1064 if (TARGET_SMALL_SLD
)
1065 max_offset
= (1 << 5);
1066 else if ((TARGET_V850E_UP
)
1068 max_offset
= (1 << 5);
1070 max_offset
= (1 << 8);
1075 max_offset
= (1 << 8);
1085 /* Return true if OP is a valid short EP memory reference */
1088 ep_memory_operand (rtx op
, machine_mode mode
, int unsigned_load
)
1094 /* If we are not using the EP register on a per-function basis
1095 then do not allow this optimization at all. This is to
1096 prevent the use of the SLD/SST instructions which cannot be
1097 guaranteed to work properly due to a hardware bug. */
1101 if (GET_CODE (op
) != MEM
)
1104 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1106 mask
= GET_MODE_SIZE (mode
) - 1;
1108 addr
= XEXP (op
, 0);
1109 if (GET_CODE (addr
) == CONST
)
1110 addr
= XEXP (addr
, 0);
1112 switch (GET_CODE (addr
))
1118 return SYMBOL_REF_TDA_P (addr
);
1121 return REGNO (addr
) == EP_REGNUM
;
1124 op0
= XEXP (addr
, 0);
1125 op1
= XEXP (addr
, 1);
1126 if (GET_CODE (op1
) == CONST_INT
1127 && INTVAL (op1
) < max_offset
1128 && INTVAL (op1
) >= 0
1129 && (INTVAL (op1
) & mask
) == 0)
1131 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1134 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1143 /* Substitute memory references involving a pointer, to use the ep pointer,
1144 taking care to save and preserve the ep. */
1147 substitute_ep_register (rtx_insn
*first_insn
,
1148 rtx_insn
*last_insn
,
1154 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1159 df_set_regs_ever_live (1, true);
1160 *p_r1
= gen_rtx_REG (Pmode
, 1);
1161 *p_ep
= gen_rtx_REG (Pmode
, 30);
1166 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1167 2 * (uses
- 3), uses
, reg_names
[regno
],
1168 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1169 INSN_UID (first_insn
), INSN_UID (last_insn
));
1171 if (NOTE_P (first_insn
))
1172 first_insn
= next_nonnote_insn (first_insn
);
1174 last_insn
= next_nonnote_insn (last_insn
);
1175 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1177 if (NONJUMP_INSN_P (insn
))
1179 rtx pattern
= single_set (insn
);
1181 /* Replace the memory references. */
1185 /* Memory operands are signed by default. */
1186 int unsignedp
= FALSE
;
1188 if (GET_CODE (SET_DEST (pattern
)) == MEM
1189 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1192 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1193 p_mem
= &SET_DEST (pattern
);
1195 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1196 p_mem
= &SET_SRC (pattern
);
1198 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1199 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1200 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1202 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1203 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1205 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1213 rtx addr
= XEXP (*p_mem
, 0);
1215 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1216 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1218 else if (GET_CODE (addr
) == PLUS
1219 && GET_CODE (XEXP (addr
, 0)) == REG
1220 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1221 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1222 && ((INTVAL (XEXP (addr
, 1)))
1223 < ep_memory_offset (GET_MODE (*p_mem
),
1225 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1226 *p_mem
= change_address (*p_mem
, VOIDmode
,
1227 gen_rtx_PLUS (Pmode
,
1235 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1236 insn
= prev_nonnote_insn (first_insn
);
1237 if (insn
&& NONJUMP_INSN_P (insn
)
1238 && GET_CODE (PATTERN (insn
)) == SET
1239 && SET_DEST (PATTERN (insn
)) == *p_ep
1240 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1243 emit_insn_before (gen_rtx_SET (Pmode
, *p_r1
, *p_ep
), first_insn
);
1245 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, reg
), first_insn
);
1246 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, *p_r1
), last_insn
);
1250 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1251 the -mep mode to copy heavily used pointers to ep to use the implicit
1260 rtx_insn
*first_insn
;
1261 rtx_insn
*last_insn
;
1263 regs
[FIRST_PSEUDO_REGISTER
];
1272 /* If not ep mode, just return now. */
1276 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1279 regs
[i
].first_insn
= NULL
;
1280 regs
[i
].last_insn
= NULL
;
1283 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1285 switch (GET_CODE (insn
))
1287 /* End of basic block */
1294 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1296 if (max_uses
< regs
[i
].uses
)
1298 max_uses
= regs
[i
].uses
;
1304 substitute_ep_register (regs
[max_regno
].first_insn
,
1305 regs
[max_regno
].last_insn
,
1306 max_uses
, max_regno
, &r1
, &ep
);
1310 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1313 regs
[i
].first_insn
= NULL
;
1314 regs
[i
].last_insn
= NULL
;
1322 pattern
= single_set (insn
);
1324 /* See if there are any memory references we can shorten. */
1327 rtx src
= SET_SRC (pattern
);
1328 rtx dest
= SET_DEST (pattern
);
1330 /* Memory operands are signed by default. */
1331 int unsignedp
= FALSE
;
1333 /* We might have (SUBREG (MEM)) here, so just get rid of the
1334 subregs to make this code simpler. */
1335 if (GET_CODE (dest
) == SUBREG
1336 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1337 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1338 alter_subreg (&dest
, false);
1339 if (GET_CODE (src
) == SUBREG
1340 && (GET_CODE (SUBREG_REG (src
)) == MEM
1341 || GET_CODE (SUBREG_REG (src
)) == REG
))
1342 alter_subreg (&src
, false);
1344 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1347 else if (GET_CODE (dest
) == MEM
)
1350 else if (GET_CODE (src
) == MEM
)
1353 else if (GET_CODE (src
) == SIGN_EXTEND
1354 && GET_CODE (XEXP (src
, 0)) == MEM
)
1355 mem
= XEXP (src
, 0);
1357 else if (GET_CODE (src
) == ZERO_EXTEND
1358 && GET_CODE (XEXP (src
, 0)) == MEM
)
1360 mem
= XEXP (src
, 0);
1366 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1369 else if (!use_ep
&& mem
1370 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1372 rtx addr
= XEXP (mem
, 0);
1376 if (GET_CODE (addr
) == REG
)
1379 regno
= REGNO (addr
);
1382 else if (GET_CODE (addr
) == PLUS
1383 && GET_CODE (XEXP (addr
, 0)) == REG
1384 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1385 && ((INTVAL (XEXP (addr
, 1)))
1386 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1387 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1390 regno
= REGNO (XEXP (addr
, 0));
1399 regs
[regno
].last_insn
= insn
;
1400 if (!regs
[regno
].first_insn
)
1401 regs
[regno
].first_insn
= insn
;
1405 /* Loading up a register in the basic block zaps any savings
1407 if (GET_CODE (dest
) == REG
)
1409 machine_mode mode
= GET_MODE (dest
);
1413 regno
= REGNO (dest
);
1414 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1418 /* See if we can use the pointer before this
1423 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1425 if (max_uses
< regs
[i
].uses
)
1427 max_uses
= regs
[i
].uses
;
1433 && max_regno
>= regno
1434 && max_regno
< endregno
)
1436 substitute_ep_register (regs
[max_regno
].first_insn
,
1437 regs
[max_regno
].last_insn
,
1438 max_uses
, max_regno
, &r1
,
1441 /* Since we made a substitution, zap all remembered
1443 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1446 regs
[i
].first_insn
= NULL
;
1447 regs
[i
].last_insn
= NULL
;
1452 for (i
= regno
; i
< endregno
; i
++)
1455 regs
[i
].first_insn
= NULL
;
1456 regs
[i
].last_insn
= NULL
;
1464 /* # of registers saved by the interrupt handler. */
1465 #define INTERRUPT_FIXED_NUM 5
1467 /* # of bytes for registers saved by the interrupt handler. */
1468 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1470 /* # of words saved for other registers. */
1471 #define INTERRUPT_ALL_SAVE_NUM \
1472 (30 - INTERRUPT_FIXED_NUM)
1474 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1477 compute_register_save_size (long * p_reg_saved
)
1481 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1482 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1485 /* Count space for the register saves. */
1486 if (interrupt_handler
)
1488 for (i
= 0; i
<= 31; i
++)
1492 if (df_regs_ever_live_p (i
) || call_p
)
1495 reg_saved
|= 1L << i
;
1499 /* We don't save/restore r0 or the stack pointer */
1501 case STACK_POINTER_REGNUM
:
1504 /* For registers with fixed use, we save them, set them to the
1505 appropriate value, and then restore them.
1506 These registers are handled specially, so don't list them
1507 on the list of registers to save in the prologue. */
1508 case 1: /* temp used to hold ep */
1510 case 10: /* temp used to call interrupt save/restore */
1511 case 11: /* temp used to call interrupt save/restore (long call) */
1512 case EP_REGNUM
: /* ep */
1519 /* Find the first register that needs to be saved. */
1520 for (i
= 0; i
<= 31; i
++)
1521 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1522 || i
== LINK_POINTER_REGNUM
))
1525 /* If it is possible that an out-of-line helper function might be
1526 used to generate the prologue for the current function, then we
1527 need to cover the possibility that such a helper function will
1528 be used, despite the fact that there might be gaps in the list of
1529 registers that need to be saved. To detect this we note that the
1530 helper functions always push at least register r29 (provided
1531 that the function is not an interrupt handler). */
1533 if (TARGET_PROLOG_FUNCTION
1534 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1539 reg_saved
|= 1L << i
;
1544 /* Helper functions save all registers between the starting
1545 register and the last register, regardless of whether they
1546 are actually used by the function or not. */
1547 for (; i
<= 29; i
++)
1550 reg_saved
|= 1L << i
;
1553 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1556 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1561 for (; i
<= 31; i
++)
1562 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1563 || i
== LINK_POINTER_REGNUM
))
1566 reg_saved
|= 1L << i
;
1572 *p_reg_saved
= reg_saved
;
1577 /* Typical stack layout should looks like this after the function's prologue:
1582 | | arguments saved | Increasing
1583 | | on the stack | addresses
1584 PARENT arg pointer -> | | /
1585 -------------------------- ---- -------------------
1586 | | - space for argument split between regs & stack
1588 CHILD | | \ <-- (return address here)
1593 frame pointer -> | | \ ___
1600 | | arguments | | Decreasing
1601 (hard) frame pointer | | / | | addresses
1602 and stack pointer -> | | / _|_ |
1603 -------------------------- ---- ------------------ V */
1606 compute_frame_size (int size
, long * p_reg_saved
)
1609 + compute_register_save_size (p_reg_saved
)
1610 + crtl
->outgoing_args_size
);
1614 use_prolog_function (int num_save
, int frame_size
)
1616 int alloc_stack
= (4 * num_save
);
1617 int unalloc_stack
= frame_size
- alloc_stack
;
1618 int save_func_len
, restore_func_len
;
1619 int save_normal_len
, restore_normal_len
;
1621 if (! TARGET_DISABLE_CALLT
)
1622 save_func_len
= restore_func_len
= 2;
1624 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1628 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1629 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1632 /* See if we would have used ep to save the stack. */
1633 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1634 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1636 save_normal_len
= restore_normal_len
= 4 * num_save
;
1638 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1639 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1641 /* Don't bother checking if we don't actually save any space.
1642 This happens for instance if one register is saved and additional
1643 stack space is allocated. */
1644 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1648 increment_stack (signed int amount
, bool in_prologue
)
1655 inc
= GEN_INT (amount
);
1657 if (! CONST_OK_FOR_K (amount
))
1659 rtx reg
= gen_rtx_REG (Pmode
, 12);
1661 inc
= emit_move_insn (reg
, inc
);
1667 inc
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1673 expand_prologue (void)
1676 unsigned int size
= get_frame_size ();
1677 unsigned int actual_fsize
;
1678 unsigned int init_stack_alloc
= 0;
1681 unsigned int num_save
;
1683 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1686 actual_fsize
= compute_frame_size (size
, ®_saved
);
1688 if (flag_stack_usage_info
)
1689 current_function_static_stack_size
= actual_fsize
;
1691 /* Save/setup global registers for interrupt functions right now. */
1692 if (interrupt_handler
)
1694 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1695 emit_insn (gen_callt_save_interrupt ());
1697 emit_insn (gen_save_interrupt ());
1699 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1701 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1702 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1704 /* Interrupt functions are not passed arguments, so no need to
1705 allocate space for split structure arguments. */
1706 gcc_assert (crtl
->args
.pretend_args_size
== 0);
1709 /* Identify all of the saved registers. */
1711 for (i
= 1; i
< 32; i
++)
1713 if (((1L << i
) & reg_saved
) != 0)
1714 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1717 if (crtl
->args
.pretend_args_size
)
1721 increment_stack (- (actual_fsize
+ crtl
->args
.pretend_args_size
), true);
1725 increment_stack (- crtl
->args
.pretend_args_size
, true);
1728 /* See if we have an insn that allocates stack space and saves the particular
1729 registers we want to. Note that the helpers won't
1730 allocate additional space for registers GCC saves to complete a
1731 "split" structure argument. */
1732 save_all
= NULL_RTX
;
1733 if (TARGET_PROLOG_FUNCTION
1734 && !crtl
->args
.pretend_args_size
1737 if (use_prolog_function (num_save
, actual_fsize
))
1739 int alloc_stack
= 4 * num_save
;
1742 save_all
= gen_rtx_PARALLEL
1744 rtvec_alloc (num_save
+ 1
1745 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1747 XVECEXP (save_all
, 0, 0)
1748 = gen_rtx_SET (VOIDmode
,
1750 gen_rtx_PLUS (Pmode
,
1752 GEN_INT(-alloc_stack
)));
1753 for (i
= 0; i
< num_save
; i
++)
1756 XVECEXP (save_all
, 0, i
+1)
1757 = gen_rtx_SET (VOIDmode
,
1759 gen_rtx_PLUS (Pmode
,
1765 if (TARGET_DISABLE_CALLT
)
1767 XVECEXP (save_all
, 0, num_save
+ 1)
1768 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1770 if (TARGET_LONG_CALLS
)
1771 XVECEXP (save_all
, 0, num_save
+ 2)
1772 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1775 v850_all_frame_related (save_all
);
1777 code
= recog (save_all
, NULL_RTX
, NULL
);
1780 rtx insn
= emit_insn (save_all
);
1781 INSN_CODE (insn
) = code
;
1782 actual_fsize
-= alloc_stack
;
1786 save_all
= NULL_RTX
;
1790 /* If no prolog save function is available, store the registers the old
1791 fashioned way (one by one). */
1794 /* Special case interrupt functions that save all registers for a call. */
1795 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1797 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1798 emit_insn (gen_callt_save_all_interrupt ());
1800 emit_insn (gen_save_all_interrupt ());
1805 /* If the stack is too big, allocate it in chunks so we can do the
1806 register saves. We use the register save size so we use the ep
1808 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1809 init_stack_alloc
= compute_register_save_size (NULL
);
1811 init_stack_alloc
= actual_fsize
;
1813 /* Save registers at the beginning of the stack frame. */
1814 offset
= init_stack_alloc
- 4;
1816 if (init_stack_alloc
)
1817 increment_stack (- (signed) init_stack_alloc
, true);
1819 /* Save the return pointer first. */
1820 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1822 F (emit_move_insn (gen_rtx_MEM (SImode
,
1823 plus_constant (Pmode
,
1826 save_regs
[--num_save
]));
1830 for (i
= 0; i
< num_save
; i
++)
1832 F (emit_move_insn (gen_rtx_MEM (SImode
,
1833 plus_constant (Pmode
,
1842 /* Allocate the rest of the stack that was not allocated above (either it is
1843 > 32K or we just called a function to save the registers and needed more
1845 if (actual_fsize
> init_stack_alloc
)
1846 increment_stack (init_stack_alloc
- actual_fsize
, true);
1848 /* If we need a frame pointer, set it up now. */
1849 if (frame_pointer_needed
)
1850 F (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
1855 expand_epilogue (void)
1858 unsigned int size
= get_frame_size ();
1860 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1861 rtx restore_regs
[32];
1863 unsigned int num_restore
;
1865 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1867 /* Eliminate the initial stack stored by interrupt functions. */
1868 if (interrupt_handler
)
1870 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1871 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1872 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1875 /* Cut off any dynamic stack created. */
1876 if (frame_pointer_needed
)
1877 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1879 /* Identify all of the saved registers. */
1881 for (i
= 1; i
< 32; i
++)
1883 if (((1L << i
) & reg_saved
) != 0)
1884 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1887 /* See if we have an insn that restores the particular registers we
1889 restore_all
= NULL_RTX
;
1891 if (TARGET_PROLOG_FUNCTION
1893 && !crtl
->args
.pretend_args_size
1894 && !interrupt_handler
)
1896 int alloc_stack
= (4 * num_restore
);
1898 /* Don't bother checking if we don't actually save any space. */
1899 if (use_prolog_function (num_restore
, actual_fsize
))
1902 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1903 rtvec_alloc (num_restore
+ 2));
1904 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1905 XVECEXP (restore_all
, 0, 1)
1906 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1907 gen_rtx_PLUS (Pmode
,
1909 GEN_INT (alloc_stack
)));
1911 offset
= alloc_stack
- 4;
1912 for (i
= 0; i
< num_restore
; i
++)
1914 XVECEXP (restore_all
, 0, i
+2)
1915 = gen_rtx_SET (VOIDmode
,
1918 gen_rtx_PLUS (Pmode
,
1924 code
= recog (restore_all
, NULL_RTX
, NULL
);
1930 actual_fsize
-= alloc_stack
;
1931 increment_stack (actual_fsize
, false);
1933 insn
= emit_jump_insn (restore_all
);
1934 INSN_CODE (insn
) = code
;
1937 restore_all
= NULL_RTX
;
1941 /* If no epilogue save function is available, restore the registers the
1942 old fashioned way (one by one). */
1945 unsigned int init_stack_free
;
1947 /* If the stack is large, we need to cut it down in 2 pieces. */
1948 if (interrupt_handler
)
1949 init_stack_free
= 0;
1950 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1951 init_stack_free
= 4 * num_restore
;
1953 init_stack_free
= (signed) actual_fsize
;
1955 /* Deallocate the rest of the stack if it is > 32K. */
1956 if ((unsigned int) actual_fsize
> init_stack_free
)
1957 increment_stack (actual_fsize
- init_stack_free
, false);
1959 /* Special case interrupt functions that save all registers
1961 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1963 if (! TARGET_DISABLE_CALLT
)
1964 emit_insn (gen_callt_restore_all_interrupt ());
1966 emit_insn (gen_restore_all_interrupt ());
1970 /* Restore registers from the beginning of the stack frame. */
1971 int offset
= init_stack_free
- 4;
1973 /* Restore the return pointer first. */
1975 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1977 emit_move_insn (restore_regs
[--num_restore
],
1978 gen_rtx_MEM (SImode
,
1979 plus_constant (Pmode
,
1985 for (i
= 0; i
< num_restore
; i
++)
1987 emit_move_insn (restore_regs
[i
],
1988 gen_rtx_MEM (SImode
,
1989 plus_constant (Pmode
,
1993 emit_use (restore_regs
[i
]);
1997 /* Cut back the remainder of the stack. */
1998 increment_stack (init_stack_free
+ crtl
->args
.pretend_args_size
,
2002 /* And return or use reti for interrupt handlers. */
2003 if (interrupt_handler
)
2005 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
2006 emit_insn (gen_callt_return_interrupt ());
2008 emit_jump_insn (gen_return_interrupt ());
2010 else if (actual_fsize
)
2011 emit_jump_insn (gen_return_internal ());
2013 emit_jump_insn (gen_return_simple ());
2016 v850_interrupt_cache_p
= FALSE
;
2017 v850_interrupt_p
= FALSE
;
2020 /* Update the condition code from the insn. */
2022 notice_update_cc (rtx body
, rtx_insn
*insn
)
2024 switch (get_attr_cc (insn
))
2027 /* Insn does not affect CC at all. */
2031 /* Insn does not change CC, but the 0'th operand has been changed. */
2032 if (cc_status
.value1
!= 0
2033 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
2034 cc_status
.value1
= 0;
2038 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2039 V,C is in an unusable state. */
2041 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2042 cc_status
.value1
= recog_data
.operand
[0];
2046 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2047 C is in an unusable state. */
2049 cc_status
.flags
|= CC_NO_CARRY
;
2050 cc_status
.value1
= recog_data
.operand
[0];
2054 /* The insn is a compare instruction. */
2056 cc_status
.value1
= SET_SRC (body
);
2060 /* Insn doesn't leave CC in a usable state. */
2069 /* Retrieve the data area that has been chosen for the given decl. */
2072 v850_get_data_area (tree decl
)
2074 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2075 return DATA_AREA_SDA
;
2077 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2078 return DATA_AREA_TDA
;
2080 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2081 return DATA_AREA_ZDA
;
2083 return DATA_AREA_NORMAL
;
2086 /* Store the indicated data area in the decl's attributes. */
2089 v850_set_data_area (tree decl
, v850_data_area data_area
)
2095 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2096 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2097 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2102 DECL_ATTRIBUTES (decl
) = tree_cons
2103 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2106 /* Handle an "interrupt" attribute; arguments as in
2107 struct attribute_spec.handler. */
2109 v850_handle_interrupt_attribute (tree
* node
,
2111 tree args ATTRIBUTE_UNUSED
,
2112 int flags ATTRIBUTE_UNUSED
,
2113 bool * no_add_attrs
)
2115 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2117 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2119 *no_add_attrs
= true;
2125 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2126 struct attribute_spec.handler. */
2128 v850_handle_data_area_attribute (tree
* node
,
2130 tree args ATTRIBUTE_UNUSED
,
2131 int flags ATTRIBUTE_UNUSED
,
2132 bool * no_add_attrs
)
2134 v850_data_area data_area
;
2135 v850_data_area area
;
2138 /* Implement data area attribute. */
2139 if (is_attribute_p ("sda", name
))
2140 data_area
= DATA_AREA_SDA
;
2141 else if (is_attribute_p ("tda", name
))
2142 data_area
= DATA_AREA_TDA
;
2143 else if (is_attribute_p ("zda", name
))
2144 data_area
= DATA_AREA_ZDA
;
2148 switch (TREE_CODE (decl
))
2151 if (current_function_decl
!= NULL_TREE
)
2153 error_at (DECL_SOURCE_LOCATION (decl
),
2154 "data area attributes cannot be specified for "
2156 *no_add_attrs
= true;
2162 area
= v850_get_data_area (decl
);
2163 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2165 error ("data area of %q+D conflicts with previous declaration",
2167 *no_add_attrs
= true;
2179 /* Return nonzero if FUNC is an interrupt function as specified
2180 by the "interrupt" attribute. */
2183 v850_interrupt_function_p (tree func
)
2188 if (v850_interrupt_cache_p
)
2189 return v850_interrupt_p
;
2191 if (TREE_CODE (func
) != FUNCTION_DECL
)
2194 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2200 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2201 ret
= a
!= NULL_TREE
;
2204 /* Its not safe to trust global variables until after function inlining has
2206 if (reload_completed
| reload_in_progress
)
2207 v850_interrupt_p
= ret
;
2214 v850_encode_data_area (tree decl
, rtx symbol
)
2218 /* Map explicit sections into the appropriate attribute */
2219 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2221 if (DECL_SECTION_NAME (decl
))
2223 const char *name
= DECL_SECTION_NAME (decl
);
2225 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2226 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2228 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2229 v850_set_data_area (decl
, DATA_AREA_SDA
);
2231 else if (streq (name
, ".tdata"))
2232 v850_set_data_area (decl
, DATA_AREA_TDA
);
2235 /* If no attribute, support -m{zda,sda,tda}=n */
2238 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2242 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2243 v850_set_data_area (decl
, DATA_AREA_TDA
);
2245 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2246 v850_set_data_area (decl
, DATA_AREA_SDA
);
2248 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2249 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2252 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2256 flags
= SYMBOL_REF_FLAGS (symbol
);
2257 switch (v850_get_data_area (decl
))
2259 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2260 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2261 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2262 default: gcc_unreachable ();
2264 SYMBOL_REF_FLAGS (symbol
) = flags
;
2268 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2270 default_encode_section_info (decl
, rtl
, first
);
2272 if (TREE_CODE (decl
) == VAR_DECL
2273 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2274 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2277 /* Construct a JR instruction to a routine that will perform the equivalent of
2278 the RTL passed in as an argument. This RTL is a function epilogue that
2279 pops registers off the stack and possibly releases some extra stack space
2280 as well. The code has already verified that the RTL matches these
2284 construct_restore_jr (rtx op
)
2286 int count
= XVECLEN (op
, 0);
2288 unsigned long int mask
;
2289 unsigned long int first
;
2290 unsigned long int last
;
2292 static char buff
[100]; /* XXX */
2296 error ("bogus JR construction: %d", count
);
2300 /* Work out how many bytes to pop off the stack before retrieving
2302 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2303 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2304 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2306 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2308 /* Each pop will remove 4 bytes from the stack.... */
2309 stack_bytes
-= (count
- 2) * 4;
2311 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2312 if (stack_bytes
!= 0)
2314 error ("bad amount of stack space removal: %d", stack_bytes
);
2318 /* Now compute the bit mask of registers to push. */
2320 for (i
= 2; i
< count
; i
++)
2322 rtx vector_element
= XVECEXP (op
, 0, i
);
2324 gcc_assert (GET_CODE (vector_element
) == SET
);
2325 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2326 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2329 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2332 /* Scan for the first register to pop. */
2333 for (first
= 0; first
< 32; first
++)
2335 if (mask
& (1 << first
))
2339 gcc_assert (first
< 32);
2341 /* Discover the last register to pop. */
2342 if (mask
& (1 << LINK_POINTER_REGNUM
))
2344 last
= LINK_POINTER_REGNUM
;
2348 gcc_assert (!stack_bytes
);
2349 gcc_assert (mask
& (1 << 29));
2354 /* Note, it is possible to have gaps in the register mask.
2355 We ignore this here, and generate a JR anyway. We will
2356 be popping more registers than is strictly necessary, but
2357 it does save code space. */
2359 if (TARGET_LONG_CALLS
)
2364 sprintf (name
, "__return_%s", reg_names
[first
]);
2366 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2368 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2374 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2376 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2383 /* Construct a JARL instruction to a routine that will perform the equivalent
2384 of the RTL passed as a parameter. This RTL is a function prologue that
2385 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2386 some stack space as well. The code has already verified that the RTL
2387 matches these requirements. */
2389 construct_save_jarl (rtx op
)
2391 int count
= XVECLEN (op
, 0);
2393 unsigned long int mask
;
2394 unsigned long int first
;
2395 unsigned long int last
;
2397 static char buff
[100]; /* XXX */
2399 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2401 error ("bogus JARL construction: %d", count
);
2406 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2407 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2408 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2409 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2411 /* Work out how many bytes to push onto the stack after storing the
2413 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2415 /* Each push will put 4 bytes from the stack.... */
2416 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2418 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2419 if (stack_bytes
!= 0)
2421 error ("bad amount of stack space removal: %d", stack_bytes
);
2425 /* Now compute the bit mask of registers to push. */
2427 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2429 rtx vector_element
= XVECEXP (op
, 0, i
);
2431 gcc_assert (GET_CODE (vector_element
) == SET
);
2432 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2433 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2436 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2439 /* Scan for the first register to push. */
2440 for (first
= 0; first
< 32; first
++)
2442 if (mask
& (1 << first
))
2446 gcc_assert (first
< 32);
2448 /* Discover the last register to push. */
2449 if (mask
& (1 << LINK_POINTER_REGNUM
))
2451 last
= LINK_POINTER_REGNUM
;
2455 gcc_assert (!stack_bytes
);
2456 gcc_assert (mask
& (1 << 29));
2461 /* Note, it is possible to have gaps in the register mask.
2462 We ignore this here, and generate a JARL anyway. We will
2463 be pushing more registers than is strictly necessary, but
2464 it does save code space. */
2466 if (TARGET_LONG_CALLS
)
2471 sprintf (name
, "__save_%s", reg_names
[first
]);
2473 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2475 if (TARGET_V850E3V5_UP
)
2476 sprintf (buff
, "mov hilo(%s), r11\n\tjarl [r11], r10", name
);
2478 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2484 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2486 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2493 /* A version of asm_output_aligned_bss() that copes with the special
2494 data areas of the v850. */
2496 v850_output_aligned_bss (FILE * file
,
2499 unsigned HOST_WIDE_INT size
,
2502 switch (v850_get_data_area (decl
))
2505 switch_to_section (zbss_section
);
2509 switch_to_section (sbss_section
);
2513 switch_to_section (tdata_section
);
2516 switch_to_section (bss_section
);
2520 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2521 #ifdef ASM_DECLARE_OBJECT_NAME
2522 last_assemble_variable_decl
= decl
;
2523 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2525 /* Standard thing is just output label for the object. */
2526 ASM_OUTPUT_LABEL (file
, name
);
2527 #endif /* ASM_DECLARE_OBJECT_NAME */
2528 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2531 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2533 v850_output_common (FILE * file
,
2539 if (decl
== NULL_TREE
)
2541 fprintf (file
, "%s", COMMON_ASM_OP
);
2545 switch (v850_get_data_area (decl
))
2548 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2552 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2556 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2560 fprintf (file
, "%s", COMMON_ASM_OP
);
2565 assemble_name (file
, name
);
2566 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2569 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2571 v850_output_local (FILE * file
,
2577 fprintf (file
, "%s", LOCAL_ASM_OP
);
2578 assemble_name (file
, name
);
2579 fprintf (file
, "\n");
2581 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2584 /* Add data area to the given declaration if a ghs data area pragma is
2585 currently in effect (#pragma ghs startXXX/endXXX). */
2587 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2590 && data_area_stack
->data_area
2591 && current_function_decl
== NULL_TREE
2592 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2593 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2594 v850_set_data_area (decl
, data_area_stack
->data_area
);
2596 /* Initialize the default names of the v850 specific sections,
2597 if this has not been done before. */
2599 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2601 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2604 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2607 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2610 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2613 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2617 if (current_function_decl
== NULL_TREE
2618 && (TREE_CODE (decl
) == VAR_DECL
2619 || TREE_CODE (decl
) == CONST_DECL
2620 || TREE_CODE (decl
) == FUNCTION_DECL
)
2621 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2622 && !DECL_SECTION_NAME (decl
))
2624 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2625 const char * chosen_section
;
2627 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2628 kind
= GHS_SECTION_KIND_TEXT
;
2631 /* First choose a section kind based on the data area of the decl. */
2632 switch (v850_get_data_area (decl
))
2638 kind
= ((TREE_READONLY (decl
))
2639 ? GHS_SECTION_KIND_ROSDATA
2640 : GHS_SECTION_KIND_SDATA
);
2644 kind
= GHS_SECTION_KIND_TDATA
;
2648 kind
= ((TREE_READONLY (decl
))
2649 ? GHS_SECTION_KIND_ROZDATA
2650 : GHS_SECTION_KIND_ZDATA
);
2653 case DATA_AREA_NORMAL
: /* default data area */
2654 if (TREE_READONLY (decl
))
2655 kind
= GHS_SECTION_KIND_RODATA
;
2656 else if (DECL_INITIAL (decl
))
2657 kind
= GHS_SECTION_KIND_DATA
;
2659 kind
= GHS_SECTION_KIND_BSS
;
2663 /* Now, if the section kind has been explicitly renamed,
2664 then attach a section attribute. */
2665 chosen_section
= GHS_current_section_names
[(int) kind
];
2667 /* Otherwise, if this kind of section needs an explicit section
2668 attribute, then also attach one. */
2669 if (chosen_section
== NULL
)
2670 chosen_section
= GHS_default_section_names
[(int) kind
];
2674 /* Only set the section name if specified by a pragma, because
2675 otherwise it will force those variables to get allocated storage
2676 in this module, rather than by the linker. */
2677 set_decl_section_name (decl
, chosen_section
);
2682 /* Construct a DISPOSE instruction that is the equivalent of
2683 the given RTX. We have already verified that this should
2687 construct_dispose_instruction (rtx op
)
2689 int count
= XVECLEN (op
, 0);
2691 unsigned long int mask
;
2693 static char buff
[ 100 ]; /* XXX */
2698 error ("bogus DISPOSE construction: %d", count
);
2702 /* Work out how many bytes to pop off the
2703 stack before retrieving registers. */
2704 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2705 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2706 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2708 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2710 /* Each pop will remove 4 bytes from the stack.... */
2711 stack_bytes
-= (count
- 2) * 4;
2713 /* Make sure that the amount we are popping
2714 will fit into the DISPOSE instruction. */
2715 if (stack_bytes
> 128)
2717 error ("too much stack space to dispose of: %d", stack_bytes
);
2721 /* Now compute the bit mask of registers to push. */
2724 for (i
= 2; i
< count
; i
++)
2726 rtx vector_element
= XVECEXP (op
, 0, i
);
2728 gcc_assert (GET_CODE (vector_element
) == SET
);
2729 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2730 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2733 if (REGNO (SET_DEST (vector_element
)) == 2)
2736 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2739 if (! TARGET_DISABLE_CALLT
2740 && (use_callt
|| stack_bytes
== 0))
2744 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2749 for (i
= 20; i
< 32; i
++)
2750 if (mask
& (1 << i
))
2754 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2756 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2757 i
, (mask
& (1 << 31)) ? "31c" : "29");
2762 static char regs
[100]; /* XXX */
2765 /* Generate the DISPOSE instruction. Note we could just issue the
2766 bit mask as a number as the assembler can cope with this, but for
2767 the sake of our readers we turn it into a textual description. */
2771 for (i
= 20; i
< 32; i
++)
2773 if (mask
& (1 << i
))
2778 strcat (regs
, ", ");
2783 strcat (regs
, reg_names
[ first
]);
2785 for (i
++; i
< 32; i
++)
2786 if ((mask
& (1 << i
)) == 0)
2791 strcat (regs
, " - ");
2792 strcat (regs
, reg_names
[ i
- 1 ] );
2797 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2803 /* Construct a PREPARE instruction that is the equivalent of
2804 the given RTL. We have already verified that this should
2808 construct_prepare_instruction (rtx op
)
2812 unsigned long int mask
;
2814 static char buff
[ 100 ]; /* XXX */
2817 if (XVECLEN (op
, 0) <= 1)
2819 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2823 /* Work out how many bytes to push onto
2824 the stack after storing the registers. */
2825 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2826 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2827 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2829 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2832 /* Make sure that the amount we are popping
2833 will fit into the DISPOSE instruction. */
2834 if (stack_bytes
< -128)
2836 error ("too much stack space to prepare: %d", stack_bytes
);
2840 /* Now compute the bit mask of registers to push. */
2843 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2845 rtx vector_element
= XVECEXP (op
, 0, i
);
2847 if (GET_CODE (vector_element
) == CLOBBER
)
2850 gcc_assert (GET_CODE (vector_element
) == SET
);
2851 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2852 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2855 if (REGNO (SET_SRC (vector_element
)) == 2)
2858 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2862 stack_bytes
+= count
* 4;
2864 if ((! TARGET_DISABLE_CALLT
)
2865 && (use_callt
|| stack_bytes
== 0))
2869 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2873 for (i
= 20; i
< 32; i
++)
2874 if (mask
& (1 << i
))
2878 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2880 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2881 i
, (mask
& (1 << 31)) ? "31c" : "29");
2885 static char regs
[100]; /* XXX */
2889 /* Generate the PREPARE instruction. Note we could just issue the
2890 bit mask as a number as the assembler can cope with this, but for
2891 the sake of our readers we turn it into a textual description. */
2895 for (i
= 20; i
< 32; i
++)
2897 if (mask
& (1 << i
))
2902 strcat (regs
, ", ");
2907 strcat (regs
, reg_names
[ first
]);
2909 for (i
++; i
< 32; i
++)
2910 if ((mask
& (1 << i
)) == 0)
2915 strcat (regs
, " - ");
2916 strcat (regs
, reg_names
[ i
- 1 ] );
2921 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2927 /* Return an RTX indicating where the return address to the
2928 calling function can be found. */
2931 v850_return_addr (int count
)
2936 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2939 /* Implement TARGET_ASM_INIT_SECTIONS. */
2942 v850_asm_init_sections (void)
2945 = get_unnamed_section (0, output_section_asm_op
,
2946 "\t.section .rosdata,\"a\"");
2949 = get_unnamed_section (0, output_section_asm_op
,
2950 "\t.section .rozdata,\"a\"");
2953 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2954 "\t.section .tdata,\"aw\"");
2957 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2958 "\t.section .zdata,\"aw\"");
2961 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2962 output_section_asm_op
,
2963 "\t.section .zbss,\"aw\"");
2967 v850_select_section (tree exp
,
2968 int reloc ATTRIBUTE_UNUSED
,
2969 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2971 if (TREE_CODE (exp
) == VAR_DECL
)
2974 if (!TREE_READONLY (exp
)
2975 || TREE_SIDE_EFFECTS (exp
)
2976 || !DECL_INITIAL (exp
)
2977 || (DECL_INITIAL (exp
) != error_mark_node
2978 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2983 switch (v850_get_data_area (exp
))
2986 return is_const
? rozdata_section
: zdata_section
;
2989 return tdata_section
;
2992 return is_const
? rosdata_section
: sdata_section
;
2995 return is_const
? readonly_data_section
: data_section
;
2998 return readonly_data_section
;
3001 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
3004 v850_function_value_regno_p (const unsigned int regno
)
3006 return (regno
== 10);
3009 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3012 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
3014 /* Return values > 8 bytes in length in memory. */
3015 return int_size_in_bytes (type
) > 8
3016 || TYPE_MODE (type
) == BLKmode
3017 /* With the rh850 ABI return all aggregates in memory. */
3018 || ((! TARGET_GCC_ABI
) && AGGREGATE_TYPE_P (type
))
3022 /* Worker function for TARGET_FUNCTION_VALUE. */
3025 v850_function_value (const_tree valtype
,
3026 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
3027 bool outgoing ATTRIBUTE_UNUSED
)
3029 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
3033 /* Worker function for TARGET_CAN_ELIMINATE. */
3036 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3038 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
3041 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3043 If TARGET_APP_REGS is not defined then add r2 and r5 to
3044 the pool of fixed registers. See PR 14505. */
3047 v850_conditional_register_usage (void)
3049 if (TARGET_APP_REGS
)
3051 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3052 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3056 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3059 v850_asm_trampoline_template (FILE *f
)
3061 fprintf (f
, "\tjarl .+4,r12\n");
3062 fprintf (f
, "\tld.w 12[r12],r20\n");
3063 fprintf (f
, "\tld.w 16[r12],r12\n");
3064 fprintf (f
, "\tjmp [r12]\n");
3065 fprintf (f
, "\tnop\n");
3066 fprintf (f
, "\t.long 0\n");
3067 fprintf (f
, "\t.long 0\n");
3070 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3073 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3075 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3077 emit_block_move (m_tramp
, assemble_trampoline_template (),
3078 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3080 mem
= adjust_address (m_tramp
, SImode
, 16);
3081 emit_move_insn (mem
, chain_value
);
3082 mem
= adjust_address (m_tramp
, SImode
, 20);
3083 emit_move_insn (mem
, fnaddr
);
3087 v850_issue_rate (void)
3089 return (TARGET_V850E2_UP
? 2 : 1);
3092 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3095 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3097 return (GET_CODE (x
) == CONST_DOUBLE
3098 || !(GET_CODE (x
) == CONST
3099 && GET_CODE (XEXP (x
, 0)) == PLUS
3100 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3101 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3102 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3106 v850_memory_move_cost (machine_mode mode
,
3107 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3110 switch (GET_MODE_SIZE (mode
))
3120 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3125 v850_adjust_insn_length (rtx_insn
*insn
, int length
)
3127 if (TARGET_V850E3V5_UP
)
3131 if (TARGET_LONG_CALLS
)
3133 /* call_internal_long, call_value_internal_long. */
3141 /* call_internal_short, call_value_internal_short. */
3150 /* V850 specific attributes. */
3152 static const struct attribute_spec v850_attribute_table
[] =
3154 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3155 affects_type_identity } */
3156 { "interrupt_handler", 0, 0, true, false, false,
3157 v850_handle_interrupt_attribute
, false },
3158 { "interrupt", 0, 0, true, false, false,
3159 v850_handle_interrupt_attribute
, false },
3160 { "sda", 0, 0, true, false, false,
3161 v850_handle_data_area_attribute
, false },
3162 { "tda", 0, 0, true, false, false,
3163 v850_handle_data_area_attribute
, false },
3164 { "zda", 0, 0, true, false, false,
3165 v850_handle_data_area_attribute
, false },
3166 { NULL
, 0, 0, false, false, false, NULL
, false }
3170 v850_option_override (void)
3172 if (flag_exceptions
|| flag_non_call_exceptions
)
3173 flag_omit_frame_pointer
= 0;
3175 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3176 if (! TARGET_GCC_ABI
)
3177 target_flags
|= MASK_DISABLE_CALLT
;
3181 v850_gen_movdi (rtx
* operands
)
3183 if (REG_P (operands
[0]))
3185 if (REG_P (operands
[1]))
3187 if (REGNO (operands
[0]) == (REGNO (operands
[1]) - 1))
3188 return "mov %1, %0; mov %R1, %R0";
3190 return "mov %R1, %R0; mov %1, %0";
3193 if (MEM_P (operands
[1]))
3195 if (REGNO (operands
[0]) & 1)
3196 /* Use two load word instructions to synthesise a load double. */
3197 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3199 return "ld.dw %1, %0";
3202 return "mov %1, %0; mov %R1, %R0";
3205 gcc_assert (REG_P (operands
[1]));
3207 if (REGNO (operands
[1]) & 1)
3208 /* Use two store word instructions to synthesise a store double. */
3209 return "st.w %1, %0 ; st.w %R1, %R0 ";
3211 return "st.dw %1, %0";
3214 /* Initialize the GCC target structure. */
3216 #undef TARGET_OPTION_OVERRIDE
3217 #define TARGET_OPTION_OVERRIDE v850_option_override
3219 #undef TARGET_MEMORY_MOVE_COST
3220 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3222 #undef TARGET_ASM_ALIGNED_HI_OP
3223 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3225 #undef TARGET_PRINT_OPERAND
3226 #define TARGET_PRINT_OPERAND v850_print_operand
3227 #undef TARGET_PRINT_OPERAND_ADDRESS
3228 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3229 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3230 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3232 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3233 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3235 #undef TARGET_ATTRIBUTE_TABLE
3236 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3238 #undef TARGET_INSERT_ATTRIBUTES
3239 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3241 #undef TARGET_ASM_SELECT_SECTION
3242 #define TARGET_ASM_SELECT_SECTION v850_select_section
3244 /* The assembler supports switchable .bss sections, but
3245 v850_select_section doesn't yet make use of them. */
3246 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3247 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3249 #undef TARGET_ENCODE_SECTION_INFO
3250 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3252 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3253 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3255 #undef TARGET_RTX_COSTS
3256 #define TARGET_RTX_COSTS v850_rtx_costs
3258 #undef TARGET_ADDRESS_COST
3259 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3261 #undef TARGET_MACHINE_DEPENDENT_REORG
3262 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3264 #undef TARGET_SCHED_ISSUE_RATE
3265 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3267 #undef TARGET_FUNCTION_VALUE_REGNO_P
3268 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3269 #undef TARGET_FUNCTION_VALUE
3270 #define TARGET_FUNCTION_VALUE v850_function_value
3272 #undef TARGET_PROMOTE_PROTOTYPES
3273 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3275 #undef TARGET_RETURN_IN_MEMORY
3276 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3278 #undef TARGET_PASS_BY_REFERENCE
3279 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3281 #undef TARGET_CALLEE_COPIES
3282 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3284 #undef TARGET_ARG_PARTIAL_BYTES
3285 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3287 #undef TARGET_FUNCTION_ARG
3288 #define TARGET_FUNCTION_ARG v850_function_arg
3290 #undef TARGET_FUNCTION_ARG_ADVANCE
3291 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3293 #undef TARGET_CAN_ELIMINATE
3294 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3296 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3297 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3299 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3300 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3301 #undef TARGET_TRAMPOLINE_INIT
3302 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3304 #undef TARGET_LEGITIMATE_CONSTANT_P
3305 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3307 #undef TARGET_CAN_USE_DOLOOP_P
3308 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3310 struct gcc_target targetm
= TARGET_INITIALIZER
;
3312 #include "gt-v850.h"