1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
40 #include "integrate.h"
43 #include "target-def.h"
48 #define streq(a,b) (strcmp (a, b) == 0)
51 static void v850_print_operand_address (FILE *, rtx
);
53 /* Names of the various data areas used on the v850. */
54 tree GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
55 tree GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
57 /* Track the current data area set by the data area pragma (which
58 can be nested). Tested by check_default_data_area. */
59 data_area_stack_element
* data_area_stack
= NULL
;
61 /* True if we don't need to check any more if the current
62 function is an interrupt handler. */
63 static int v850_interrupt_cache_p
= FALSE
;
65 rtx v850_compare_op0
, v850_compare_op1
;
67 /* Whether current function is an interrupt handler. */
68 static int v850_interrupt_p
= FALSE
;
70 static GTY(()) section
* rosdata_section
;
71 static GTY(()) section
* rozdata_section
;
72 static GTY(()) section
* tdata_section
;
73 static GTY(()) section
* zdata_section
;
74 static GTY(()) section
* zbss_section
;
76 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
77 Specify whether to pass the argument by reference. */
80 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
81 enum machine_mode mode
, const_tree type
,
82 bool named ATTRIBUTE_UNUSED
)
84 unsigned HOST_WIDE_INT size
;
87 size
= int_size_in_bytes (type
);
89 size
= GET_MODE_SIZE (mode
);
94 /* Implementing the Varargs Macros. */
97 v850_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED
)
99 return !TARGET_GHS
? true : false;
102 /* Return an RTX to represent where an argument with mode MODE
103 and type TYPE will be passed to a function. If the result
104 is NULL_RTX, the argument will be pushed. */
107 v850_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
108 const_tree type
, bool named
)
110 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
111 rtx result
= NULL_RTX
;
118 size
= int_size_in_bytes (type
);
120 size
= GET_MODE_SIZE (mode
);
122 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
126 /* Once we have stopped using argument registers, do not start up again. */
127 cum
->nbytes
= 4 * UNITS_PER_WORD
;
131 if (size
<= UNITS_PER_WORD
&& type
)
132 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
136 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
138 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
141 if (type
== NULL_TREE
142 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
145 switch (cum
->nbytes
/ UNITS_PER_WORD
)
148 result
= gen_rtx_REG (mode
, 6);
151 result
= gen_rtx_REG (mode
, 7);
154 result
= gen_rtx_REG (mode
, 8);
157 result
= gen_rtx_REG (mode
, 9);
166 /* Return the number of bytes which must be put into registers
167 for values which are part in registers and part in memory. */
169 v850_arg_partial_bytes (cumulative_args_t cum_v
, enum machine_mode mode
,
170 tree type
, bool named
)
172 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
175 if (TARGET_GHS
&& !named
)
179 size
= int_size_in_bytes (type
);
181 size
= GET_MODE_SIZE (mode
);
187 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
191 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
193 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
196 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
199 if (type
== NULL_TREE
200 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
203 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
206 /* Update the data in CUM to advance over an argument
207 of mode MODE and data type TYPE.
208 (TYPE is null for libcalls where that information may not be available.) */
211 v850_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
212 const_tree type
, bool named ATTRIBUTE_UNUSED
)
214 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
216 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
217 ? GET_MODE_SIZE (Pmode
)
219 ? GET_MODE_SIZE (mode
)
220 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
224 /* Return the high and low words of a CONST_DOUBLE */
227 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
229 if (GET_CODE (x
) == CONST_DOUBLE
)
234 switch (GET_MODE (x
))
237 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
238 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
239 *p_high
= t
[1]; /* since v850 is little endian */
240 *p_low
= t
[0]; /* high is second word */
244 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
245 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
251 *p_high
= CONST_DOUBLE_HIGH (x
);
252 *p_low
= CONST_DOUBLE_LOW (x
);
260 fatal_insn ("const_double_split got a bad insn:", x
);
264 /* Return the cost of the rtx R with code CODE. */
267 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
269 if (CONST_OK_FOR_I (value
))
271 else if (CONST_OK_FOR_J (value
))
273 else if (CONST_OK_FOR_K (value
))
280 const_costs (rtx r
, enum rtx_code c
)
282 HOST_WIDE_INT high
, low
;
287 return const_costs_int (INTVAL (r
), 0);
290 const_double_split (r
, &high
, &low
);
291 if (GET_MODE (r
) == SFmode
)
292 return const_costs_int (high
, 1);
294 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
310 v850_rtx_costs (rtx x
,
312 int outer_code ATTRIBUTE_UNUSED
,
313 int * total
, bool speed
)
315 enum rtx_code code
= (enum rtx_code
) codearg
;
324 *total
= COSTS_N_INSNS (const_costs (x
, code
));
331 if (TARGET_V850E
&& !speed
)
339 && ( GET_MODE (x
) == SImode
340 || GET_MODE (x
) == HImode
341 || GET_MODE (x
) == QImode
))
343 if (GET_CODE (XEXP (x
, 1)) == REG
)
345 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
347 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
349 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
358 if (outer_code
== COMPARE
)
367 /* Print operand X using operand code CODE to assembly language output file
371 v850_print_operand (FILE * file
, rtx x
, int code
)
373 HOST_WIDE_INT high
, low
;
378 /* We use 'c' operands with symbols for .vtinherit */
379 if (GET_CODE (x
) == SYMBOL_REF
)
381 output_addr_const(file
, x
);
388 switch ((code
== 'B' || code
== 'C')
389 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
392 if (code
== 'c' || code
== 'C')
393 fprintf (file
, "nz");
395 fprintf (file
, "ne");
398 if (code
== 'c' || code
== 'C')
404 fprintf (file
, "ge");
407 fprintf (file
, "gt");
410 fprintf (file
, "le");
413 fprintf (file
, "lt");
416 fprintf (file
, "nl");
422 fprintf (file
, "nh");
431 case 'F': /* high word of CONST_DOUBLE */
432 switch (GET_CODE (x
))
435 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
439 const_double_split (x
, &high
, &low
);
440 fprintf (file
, "%ld", (long) high
);
447 case 'G': /* low word of CONST_DOUBLE */
448 switch (GET_CODE (x
))
451 fprintf (file
, "%ld", (long) INTVAL (x
));
455 const_double_split (x
, &high
, &low
);
456 fprintf (file
, "%ld", (long) low
);
464 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
467 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
470 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
472 if (GET_CODE (x
) == CONST
)
473 x
= XEXP (XEXP (x
, 0), 0);
475 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
477 if (SYMBOL_REF_ZDA_P (x
))
478 fprintf (file
, "zdaoff");
479 else if (SYMBOL_REF_SDA_P (x
))
480 fprintf (file
, "sdaoff");
481 else if (SYMBOL_REF_TDA_P (x
))
482 fprintf (file
, "tdaoff");
487 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
488 output_addr_const (file
, x
);
491 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
493 if (GET_CODE (x
) == CONST
)
494 x
= XEXP (XEXP (x
, 0), 0);
496 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
498 if (SYMBOL_REF_ZDA_P (x
))
499 fprintf (file
, "r0");
500 else if (SYMBOL_REF_SDA_P (x
))
501 fprintf (file
, "gp");
502 else if (SYMBOL_REF_TDA_P (x
))
503 fprintf (file
, "ep");
507 case 'R': /* 2nd word of a double. */
508 switch (GET_CODE (x
))
511 fprintf (file
, reg_names
[REGNO (x
) + 1]);
514 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
515 v850_print_operand_address (file
, x
);
516 if (GET_CODE (x
) == CONST_INT
)
517 fprintf (file
, "[r0]");
526 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
527 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
534 /* Like an 'S' operand above, but for unsigned loads only. */
535 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
540 case 'W': /* print the instruction suffix */
541 switch (GET_MODE (x
))
546 case QImode
: fputs (".b", file
); break;
547 case HImode
: fputs (".h", file
); break;
548 case SImode
: fputs (".w", file
); break;
549 case SFmode
: fputs (".w", file
); break;
552 case '.': /* register r0 */
553 fputs (reg_names
[0], file
);
555 case 'z': /* reg or zero */
556 if (GET_CODE (x
) == REG
)
557 fputs (reg_names
[REGNO (x
)], file
);
558 else if ((GET_MODE(x
) == SImode
559 || GET_MODE(x
) == DFmode
560 || GET_MODE(x
) == SFmode
)
561 && x
== CONST0_RTX(GET_MODE(x
)))
562 fputs (reg_names
[0], file
);
565 gcc_assert (x
== const0_rtx
);
566 fputs (reg_names
[0], file
);
570 switch (GET_CODE (x
))
573 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
574 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
577 output_address (XEXP (x
, 0));
581 fputs (reg_names
[REGNO (x
)], file
);
584 fputs (reg_names
[subreg_regno (x
)], file
);
591 v850_print_operand_address (file
, x
);
602 /* Output assembly language output for the address ADDR to FILE. */
605 v850_print_operand_address (FILE * file
, rtx addr
)
607 switch (GET_CODE (addr
))
610 fprintf (file
, "0[");
611 v850_print_operand (file
, addr
, 0);
615 if (GET_CODE (XEXP (addr
, 0)) == REG
)
618 fprintf (file
, "lo(");
619 v850_print_operand (file
, XEXP (addr
, 1), 0);
620 fprintf (file
, ")[");
621 v850_print_operand (file
, XEXP (addr
, 0), 0);
626 if (GET_CODE (XEXP (addr
, 0)) == REG
627 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
630 v850_print_operand (file
, XEXP (addr
, 1), 0);
632 v850_print_operand (file
, XEXP (addr
, 0), 0);
637 v850_print_operand (file
, XEXP (addr
, 0), 0);
639 v850_print_operand (file
, XEXP (addr
, 1), 0);
644 const char *off_name
= NULL
;
645 const char *reg_name
= NULL
;
647 if (SYMBOL_REF_ZDA_P (addr
))
652 else if (SYMBOL_REF_SDA_P (addr
))
657 else if (SYMBOL_REF_TDA_P (addr
))
664 fprintf (file
, "%s(", off_name
);
665 output_addr_const (file
, addr
);
667 fprintf (file
, ")[%s]", reg_name
);
671 if (special_symbolref_operand (addr
, VOIDmode
))
673 rtx x
= XEXP (XEXP (addr
, 0), 0);
674 const char *off_name
;
675 const char *reg_name
;
677 if (SYMBOL_REF_ZDA_P (x
))
682 else if (SYMBOL_REF_SDA_P (x
))
687 else if (SYMBOL_REF_TDA_P (x
))
695 fprintf (file
, "%s(", off_name
);
696 output_addr_const (file
, addr
);
697 fprintf (file
, ")[%s]", reg_name
);
700 output_addr_const (file
, addr
);
703 output_addr_const (file
, addr
);
709 v850_print_operand_punct_valid_p (unsigned char code
)
714 /* When assemble_integer is used to emit the offsets for a switch
715 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
716 output_addr_const will normally barf at this, but it is OK to omit
717 the truncate and just emit the difference of the two labels. The
718 .hword directive will automatically handle the truncation for us.
720 Returns true if rtx was handled, false otherwise. */
723 v850_output_addr_const_extra (FILE * file
, rtx x
)
725 if (GET_CODE (x
) != TRUNCATE
)
730 /* We must also handle the case where the switch table was passed a
731 constant value and so has been collapsed. In this case the first
732 label will have been deleted. In such a case it is OK to emit
733 nothing, since the table will not be used.
734 (cf gcc.c-torture/compile/990801-1.c). */
735 if (GET_CODE (x
) == MINUS
736 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
737 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == CODE_LABEL
738 && INSN_DELETED_P (XEXP (XEXP (x
, 0), 0)))
741 output_addr_const (file
, x
);
745 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
749 output_move_single (rtx
* operands
)
751 rtx dst
= operands
[0];
752 rtx src
= operands
[1];
759 else if (GET_CODE (src
) == CONST_INT
)
761 HOST_WIDE_INT value
= INTVAL (src
);
763 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
766 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
767 return "movea %1,%.,%0";
769 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
770 return "movhi hi0(%1),%.,%0";
772 /* A random constant. */
773 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
776 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
779 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
781 HOST_WIDE_INT high
, low
;
783 const_double_split (src
, &high
, &low
);
785 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
788 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
789 return "movea %F1,%.,%0";
791 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
792 return "movhi hi0(%F1),%.,%0";
794 /* A random constant. */
795 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
799 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
802 else if (GET_CODE (src
) == MEM
)
803 return "%S1ld%W1 %1,%0";
805 else if (special_symbolref_operand (src
, VOIDmode
))
806 return "movea %O1(%P1),%Q1,%0";
808 else if (GET_CODE (src
) == LABEL_REF
809 || GET_CODE (src
) == SYMBOL_REF
810 || GET_CODE (src
) == CONST
)
812 if (TARGET_V850E
|| TARGET_V850E2_ALL
)
813 return "mov hilo(%1),%0";
815 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
818 else if (GET_CODE (src
) == HIGH
)
819 return "movhi hi(%1),%.,%0";
821 else if (GET_CODE (src
) == LO_SUM
)
823 operands
[2] = XEXP (src
, 0);
824 operands
[3] = XEXP (src
, 1);
825 return "movea lo(%3),%2,%0";
829 else if (GET_CODE (dst
) == MEM
)
832 return "%S0st%W0 %1,%0";
834 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
835 return "%S0st%W0 %.,%0";
837 else if (GET_CODE (src
) == CONST_DOUBLE
838 && CONST0_RTX (GET_MODE (dst
)) == src
)
839 return "%S0st%W0 %.,%0";
842 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode
, dst
, src
));
846 /* Generate comparison code. */
848 v850_float_z_comparison_operator (rtx op
, enum machine_mode mode
)
850 enum rtx_code code
= GET_CODE (op
);
852 if (GET_RTX_CLASS (code
) != RTX_COMPARE
853 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
856 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
859 if ((GET_CODE (XEXP (op
, 0)) != REG
860 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
861 || XEXP (op
, 1) != const0_rtx
)
864 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LTmode
)
866 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LEmode
)
868 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_EQmode
)
875 v850_float_nz_comparison_operator (rtx op
, enum machine_mode mode
)
877 enum rtx_code code
= GET_CODE (op
);
879 if (GET_RTX_CLASS (code
) != RTX_COMPARE
880 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
883 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
886 if ((GET_CODE (XEXP (op
, 0)) != REG
887 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
888 || XEXP (op
, 1) != const0_rtx
)
891 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GTmode
)
893 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GEmode
)
895 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_NEmode
)
902 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
904 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
909 return CC_FPU_LEmode
;
911 return CC_FPU_GEmode
;
913 return CC_FPU_LTmode
;
915 return CC_FPU_GTmode
;
917 return CC_FPU_EQmode
;
919 return CC_FPU_NEmode
;
928 v850_gen_float_compare (enum rtx_code cond
, enum machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
930 if (GET_MODE(op0
) == DFmode
)
935 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
938 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
941 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
944 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
947 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
950 emit_insn (gen_cmpdf_ne_insn (op0
, op1
));
956 else if (GET_MODE(v850_compare_op0
) == SFmode
)
961 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
964 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
967 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
970 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
973 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
976 emit_insn (gen_cmpsf_ne_insn(op0
, op1
));
987 return v850_select_cc_mode (cond
, op0
, op1
);
991 v850_gen_compare (enum rtx_code cond
, enum machine_mode mode
, rtx op0
, rtx op1
)
993 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
995 emit_insn (gen_cmpsi_insn (op0
, op1
));
996 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1001 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1002 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1003 emit_insn (gen_rtx_SET(mode
, cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1005 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1009 /* Return maximum offset supported for a short EP memory reference of mode
1010 MODE and signedness UNSIGNEDP. */
1013 ep_memory_offset (enum machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1020 if (TARGET_SMALL_SLD
)
1021 max_offset
= (1 << 4);
1022 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1024 max_offset
= (1 << 4);
1026 max_offset
= (1 << 7);
1030 if (TARGET_SMALL_SLD
)
1031 max_offset
= (1 << 5);
1032 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1034 max_offset
= (1 << 5);
1036 max_offset
= (1 << 8);
1041 max_offset
= (1 << 8);
1051 /* Return true if OP is a valid short EP memory reference */
1054 ep_memory_operand (rtx op
, enum machine_mode mode
, int unsigned_load
)
1060 /* If we are not using the EP register on a per-function basis
1061 then do not allow this optimization at all. This is to
1062 prevent the use of the SLD/SST instructions which cannot be
1063 guaranteed to work properly due to a hardware bug. */
1067 if (GET_CODE (op
) != MEM
)
1070 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1072 mask
= GET_MODE_SIZE (mode
) - 1;
1074 addr
= XEXP (op
, 0);
1075 if (GET_CODE (addr
) == CONST
)
1076 addr
= XEXP (addr
, 0);
1078 switch (GET_CODE (addr
))
1084 return SYMBOL_REF_TDA_P (addr
);
1087 return REGNO (addr
) == EP_REGNUM
;
1090 op0
= XEXP (addr
, 0);
1091 op1
= XEXP (addr
, 1);
1092 if (GET_CODE (op1
) == CONST_INT
1093 && INTVAL (op1
) < max_offset
1094 && INTVAL (op1
) >= 0
1095 && (INTVAL (op1
) & mask
) == 0)
1097 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1100 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1109 /* Substitute memory references involving a pointer, to use the ep pointer,
1110 taking care to save and preserve the ep. */
1113 substitute_ep_register (rtx first_insn
,
1120 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1125 df_set_regs_ever_live (1, true);
1126 *p_r1
= gen_rtx_REG (Pmode
, 1);
1127 *p_ep
= gen_rtx_REG (Pmode
, 30);
1132 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1133 2 * (uses
- 3), uses
, reg_names
[regno
],
1134 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1135 INSN_UID (first_insn
), INSN_UID (last_insn
));
1137 if (GET_CODE (first_insn
) == NOTE
)
1138 first_insn
= next_nonnote_insn (first_insn
);
1140 last_insn
= next_nonnote_insn (last_insn
);
1141 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1143 if (GET_CODE (insn
) == INSN
)
1145 rtx pattern
= single_set (insn
);
1147 /* Replace the memory references. */
1151 /* Memory operands are signed by default. */
1152 int unsignedp
= FALSE
;
1154 if (GET_CODE (SET_DEST (pattern
)) == MEM
1155 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1158 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1159 p_mem
= &SET_DEST (pattern
);
1161 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1162 p_mem
= &SET_SRC (pattern
);
1164 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1165 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1166 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1168 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1171 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1179 rtx addr
= XEXP (*p_mem
, 0);
1181 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1182 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1184 else if (GET_CODE (addr
) == PLUS
1185 && GET_CODE (XEXP (addr
, 0)) == REG
1186 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1187 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1188 && ((INTVAL (XEXP (addr
, 1)))
1189 < ep_memory_offset (GET_MODE (*p_mem
),
1191 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1192 *p_mem
= change_address (*p_mem
, VOIDmode
,
1193 gen_rtx_PLUS (Pmode
,
1201 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1202 insn
= prev_nonnote_insn (first_insn
);
1203 if (insn
&& GET_CODE (insn
) == INSN
1204 && GET_CODE (PATTERN (insn
)) == SET
1205 && SET_DEST (PATTERN (insn
)) == *p_ep
1206 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1209 emit_insn_before (gen_rtx_SET (Pmode
, *p_r1
, *p_ep
), first_insn
);
1211 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, reg
), first_insn
);
1212 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, *p_r1
), last_insn
);
1216 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1217 the -mep mode to copy heavily used pointers to ep to use the implicit
1229 regs
[FIRST_PSEUDO_REGISTER
];
1238 /* If not ep mode, just return now. */
1242 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1245 regs
[i
].first_insn
= NULL_RTX
;
1246 regs
[i
].last_insn
= NULL_RTX
;
1249 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1251 switch (GET_CODE (insn
))
1253 /* End of basic block */
1260 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1262 if (max_uses
< regs
[i
].uses
)
1264 max_uses
= regs
[i
].uses
;
1270 substitute_ep_register (regs
[max_regno
].first_insn
,
1271 regs
[max_regno
].last_insn
,
1272 max_uses
, max_regno
, &r1
, &ep
);
1276 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1279 regs
[i
].first_insn
= NULL_RTX
;
1280 regs
[i
].last_insn
= NULL_RTX
;
1288 pattern
= single_set (insn
);
1290 /* See if there are any memory references we can shorten */
1293 rtx src
= SET_SRC (pattern
);
1294 rtx dest
= SET_DEST (pattern
);
1296 /* Memory operands are signed by default. */
1297 int unsignedp
= FALSE
;
1299 /* We might have (SUBREG (MEM)) here, so just get rid of the
1300 subregs to make this code simpler. */
1301 if (GET_CODE (dest
) == SUBREG
1302 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1303 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1304 alter_subreg (&dest
);
1305 if (GET_CODE (src
) == SUBREG
1306 && (GET_CODE (SUBREG_REG (src
)) == MEM
1307 || GET_CODE (SUBREG_REG (src
)) == REG
))
1308 alter_subreg (&src
);
1310 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1313 else if (GET_CODE (dest
) == MEM
)
1316 else if (GET_CODE (src
) == MEM
)
1319 else if (GET_CODE (src
) == SIGN_EXTEND
1320 && GET_CODE (XEXP (src
, 0)) == MEM
)
1321 mem
= XEXP (src
, 0);
1323 else if (GET_CODE (src
) == ZERO_EXTEND
1324 && GET_CODE (XEXP (src
, 0)) == MEM
)
1326 mem
= XEXP (src
, 0);
1332 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1335 else if (!use_ep
&& mem
1336 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1338 rtx addr
= XEXP (mem
, 0);
1342 if (GET_CODE (addr
) == REG
)
1345 regno
= REGNO (addr
);
1348 else if (GET_CODE (addr
) == PLUS
1349 && GET_CODE (XEXP (addr
, 0)) == REG
1350 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1351 && ((INTVAL (XEXP (addr
, 1)))
1352 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1353 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1356 regno
= REGNO (XEXP (addr
, 0));
1365 regs
[regno
].last_insn
= insn
;
1366 if (!regs
[regno
].first_insn
)
1367 regs
[regno
].first_insn
= insn
;
1371 /* Loading up a register in the basic block zaps any savings
1373 if (GET_CODE (dest
) == REG
)
1375 enum machine_mode mode
= GET_MODE (dest
);
1379 regno
= REGNO (dest
);
1380 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1384 /* See if we can use the pointer before this
1389 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1391 if (max_uses
< regs
[i
].uses
)
1393 max_uses
= regs
[i
].uses
;
1399 && max_regno
>= regno
1400 && max_regno
< endregno
)
1402 substitute_ep_register (regs
[max_regno
].first_insn
,
1403 regs
[max_regno
].last_insn
,
1404 max_uses
, max_regno
, &r1
,
1407 /* Since we made a substitution, zap all remembered
1409 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1412 regs
[i
].first_insn
= NULL_RTX
;
1413 regs
[i
].last_insn
= NULL_RTX
;
1418 for (i
= regno
; i
< endregno
; i
++)
1421 regs
[i
].first_insn
= NULL_RTX
;
1422 regs
[i
].last_insn
= NULL_RTX
;
1430 /* # of registers saved by the interrupt handler. */
1431 #define INTERRUPT_FIXED_NUM 5
1433 /* # of bytes for registers saved by the interrupt handler. */
1434 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1436 /* # of words saved for other registers. */
1437 #define INTERRUPT_ALL_SAVE_NUM \
1438 (30 - INTERRUPT_FIXED_NUM)
1440 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1443 compute_register_save_size (long * p_reg_saved
)
1447 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1448 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1451 /* Count the return pointer if we need to save it. */
1452 if (crtl
->profile
&& !call_p
)
1454 df_set_regs_ever_live (LINK_POINTER_REGNUM
, true);
1458 /* Count space for the register saves. */
1459 if (interrupt_handler
)
1461 for (i
= 0; i
<= 31; i
++)
1465 if (df_regs_ever_live_p (i
) || call_p
)
1468 reg_saved
|= 1L << i
;
1472 /* We don't save/restore r0 or the stack pointer */
1474 case STACK_POINTER_REGNUM
:
1477 /* For registers with fixed use, we save them, set them to the
1478 appropriate value, and then restore them.
1479 These registers are handled specially, so don't list them
1480 on the list of registers to save in the prologue. */
1481 case 1: /* temp used to hold ep */
1483 case 10: /* temp used to call interrupt save/restore */
1484 case 11: /* temp used to call interrupt save/restore (long call) */
1485 case EP_REGNUM
: /* ep */
1492 /* Find the first register that needs to be saved. */
1493 for (i
= 0; i
<= 31; i
++)
1494 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1495 || i
== LINK_POINTER_REGNUM
))
1498 /* If it is possible that an out-of-line helper function might be
1499 used to generate the prologue for the current function, then we
1500 need to cover the possibility that such a helper function will
1501 be used, despite the fact that there might be gaps in the list of
1502 registers that need to be saved. To detect this we note that the
1503 helper functions always push at least register r29 (provided
1504 that the function is not an interrupt handler). */
1506 if (TARGET_PROLOG_FUNCTION
1507 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1512 reg_saved
|= 1L << i
;
1517 /* Helper functions save all registers between the starting
1518 register and the last register, regardless of whether they
1519 are actually used by the function or not. */
1520 for (; i
<= 29; i
++)
1523 reg_saved
|= 1L << i
;
1526 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1529 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1534 for (; i
<= 31; i
++)
1535 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1536 || i
== LINK_POINTER_REGNUM
))
1539 reg_saved
|= 1L << i
;
1545 *p_reg_saved
= reg_saved
;
1551 compute_frame_size (int size
, long * p_reg_saved
)
1554 + compute_register_save_size (p_reg_saved
)
1555 + crtl
->outgoing_args_size
);
1559 use_prolog_function (int num_save
, int frame_size
)
1561 int alloc_stack
= (4 * num_save
);
1562 int unalloc_stack
= frame_size
- alloc_stack
;
1563 int save_func_len
, restore_func_len
;
1564 int save_normal_len
, restore_normal_len
;
1566 if (! TARGET_DISABLE_CALLT
)
1567 save_func_len
= restore_func_len
= 2;
1569 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1573 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1574 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1577 /* See if we would have used ep to save the stack. */
1578 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1579 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1581 save_normal_len
= restore_normal_len
= 4 * num_save
;
1583 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1584 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1586 /* Don't bother checking if we don't actually save any space.
1587 This happens for instance if one register is saved and additional
1588 stack space is allocated. */
1589 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1593 expand_prologue (void)
1596 unsigned int size
= get_frame_size ();
1597 unsigned int actual_fsize
;
1598 unsigned int init_stack_alloc
= 0;
1601 unsigned int num_save
;
1603 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1606 actual_fsize
= compute_frame_size (size
, ®_saved
);
1608 /* Save/setup global registers for interrupt functions right now. */
1609 if (interrupt_handler
)
1611 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1612 emit_insn (gen_callt_save_interrupt ());
1614 emit_insn (gen_save_interrupt ());
1616 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1618 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1619 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1622 /* Identify all of the saved registers. */
1624 for (i
= 1; i
< 32; i
++)
1626 if (((1L << i
) & reg_saved
) != 0)
1627 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1630 /* See if we have an insn that allocates stack space and saves the particular
1631 registers we want to. */
1632 save_all
= NULL_RTX
;
1633 if (TARGET_PROLOG_FUNCTION
&& num_save
> 0)
1635 if (use_prolog_function (num_save
, actual_fsize
))
1637 int alloc_stack
= 4 * num_save
;
1640 save_all
= gen_rtx_PARALLEL
1642 rtvec_alloc (num_save
+ 1
1643 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1645 XVECEXP (save_all
, 0, 0)
1646 = gen_rtx_SET (VOIDmode
,
1648 gen_rtx_PLUS (Pmode
,
1650 GEN_INT(-alloc_stack
)));
1651 for (i
= 0; i
< num_save
; i
++)
1654 XVECEXP (save_all
, 0, i
+1)
1655 = gen_rtx_SET (VOIDmode
,
1657 gen_rtx_PLUS (Pmode
,
1663 if (TARGET_DISABLE_CALLT
)
1665 XVECEXP (save_all
, 0, num_save
+ 1)
1666 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1668 if (TARGET_LONG_CALLS
)
1669 XVECEXP (save_all
, 0, num_save
+ 2)
1670 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1673 code
= recog (save_all
, NULL_RTX
, NULL
);
1676 rtx insn
= emit_insn (save_all
);
1677 INSN_CODE (insn
) = code
;
1678 actual_fsize
-= alloc_stack
;
1682 save_all
= NULL_RTX
;
1686 /* If no prolog save function is available, store the registers the old
1687 fashioned way (one by one). */
1690 /* Special case interrupt functions that save all registers for a call. */
1691 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1693 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1694 emit_insn (gen_callt_save_all_interrupt ());
1696 emit_insn (gen_save_all_interrupt ());
1701 /* If the stack is too big, allocate it in chunks so we can do the
1702 register saves. We use the register save size so we use the ep
1704 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1705 init_stack_alloc
= compute_register_save_size (NULL
);
1707 init_stack_alloc
= actual_fsize
;
1709 /* Save registers at the beginning of the stack frame. */
1710 offset
= init_stack_alloc
- 4;
1712 if (init_stack_alloc
)
1713 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1715 GEN_INT (- (signed) init_stack_alloc
)));
1717 /* Save the return pointer first. */
1718 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1720 emit_move_insn (gen_rtx_MEM (SImode
,
1721 plus_constant (stack_pointer_rtx
,
1723 save_regs
[--num_save
]);
1727 for (i
= 0; i
< num_save
; i
++)
1729 emit_move_insn (gen_rtx_MEM (SImode
,
1730 plus_constant (stack_pointer_rtx
,
1738 /* Allocate the rest of the stack that was not allocated above (either it is
1739 > 32K or we just called a function to save the registers and needed more
1741 if (actual_fsize
> init_stack_alloc
)
1743 int diff
= actual_fsize
- init_stack_alloc
;
1744 if (CONST_OK_FOR_K (-diff
))
1745 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1750 rtx reg
= gen_rtx_REG (Pmode
, 12);
1751 emit_move_insn (reg
, GEN_INT (-diff
));
1752 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, reg
));
1756 /* If we need a frame pointer, set it up now. */
1757 if (frame_pointer_needed
)
1758 emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1763 expand_epilogue (void)
1766 unsigned int size
= get_frame_size ();
1768 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1769 rtx restore_regs
[32];
1771 unsigned int num_restore
;
1773 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1775 /* Eliminate the initial stack stored by interrupt functions. */
1776 if (interrupt_handler
)
1778 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1779 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1780 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1783 /* Cut off any dynamic stack created. */
1784 if (frame_pointer_needed
)
1785 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1787 /* Identify all of the saved registers. */
1789 for (i
= 1; i
< 32; i
++)
1791 if (((1L << i
) & reg_saved
) != 0)
1792 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1795 /* See if we have an insn that restores the particular registers we
1797 restore_all
= NULL_RTX
;
1799 if (TARGET_PROLOG_FUNCTION
1801 && !interrupt_handler
)
1803 int alloc_stack
= (4 * num_restore
);
1805 /* Don't bother checking if we don't actually save any space. */
1806 if (use_prolog_function (num_restore
, actual_fsize
))
1809 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1810 rtvec_alloc (num_restore
+ 2));
1811 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1812 XVECEXP (restore_all
, 0, 1)
1813 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1814 gen_rtx_PLUS (Pmode
,
1816 GEN_INT (alloc_stack
)));
1818 offset
= alloc_stack
- 4;
1819 for (i
= 0; i
< num_restore
; i
++)
1821 XVECEXP (restore_all
, 0, i
+2)
1822 = gen_rtx_SET (VOIDmode
,
1825 gen_rtx_PLUS (Pmode
,
1831 code
= recog (restore_all
, NULL_RTX
, NULL
);
1837 actual_fsize
-= alloc_stack
;
1840 if (CONST_OK_FOR_K (actual_fsize
))
1841 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1843 GEN_INT (actual_fsize
)));
1846 rtx reg
= gen_rtx_REG (Pmode
, 12);
1847 emit_move_insn (reg
, GEN_INT (actual_fsize
));
1848 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1854 insn
= emit_jump_insn (restore_all
);
1855 INSN_CODE (insn
) = code
;
1859 restore_all
= NULL_RTX
;
1863 /* If no epilogue save function is available, restore the registers the
1864 old fashioned way (one by one). */
1867 unsigned int init_stack_free
;
1869 /* If the stack is large, we need to cut it down in 2 pieces. */
1870 if (interrupt_handler
)
1871 init_stack_free
= 0;
1872 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1873 init_stack_free
= 4 * num_restore
;
1875 init_stack_free
= (signed) actual_fsize
;
1877 /* Deallocate the rest of the stack if it is > 32K. */
1878 if ((unsigned int) actual_fsize
> init_stack_free
)
1882 diff
= actual_fsize
- init_stack_free
;
1884 if (CONST_OK_FOR_K (diff
))
1885 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1890 rtx reg
= gen_rtx_REG (Pmode
, 12);
1891 emit_move_insn (reg
, GEN_INT (diff
));
1892 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1898 /* Special case interrupt functions that save all registers
1900 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1902 if (! TARGET_DISABLE_CALLT
)
1903 emit_insn (gen_callt_restore_all_interrupt ());
1905 emit_insn (gen_restore_all_interrupt ());
1909 /* Restore registers from the beginning of the stack frame. */
1910 int offset
= init_stack_free
- 4;
1912 /* Restore the return pointer first. */
1914 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1916 emit_move_insn (restore_regs
[--num_restore
],
1917 gen_rtx_MEM (SImode
,
1918 plus_constant (stack_pointer_rtx
,
1923 for (i
= 0; i
< num_restore
; i
++)
1925 emit_move_insn (restore_regs
[i
],
1926 gen_rtx_MEM (SImode
,
1927 plus_constant (stack_pointer_rtx
,
1930 emit_use (restore_regs
[i
]);
1934 /* Cut back the remainder of the stack. */
1935 if (init_stack_free
)
1936 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1938 GEN_INT (init_stack_free
)));
1941 /* And return or use reti for interrupt handlers. */
1942 if (interrupt_handler
)
1944 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1945 emit_insn (gen_callt_return_interrupt ());
1947 emit_jump_insn (gen_return_interrupt ());
1949 else if (actual_fsize
)
1950 emit_jump_insn (gen_return_internal ());
1952 emit_jump_insn (gen_return_simple ());
1955 v850_interrupt_cache_p
= FALSE
;
1956 v850_interrupt_p
= FALSE
;
1959 /* Update the condition code from the insn. */
1961 notice_update_cc (rtx body
, rtx insn
)
1963 switch (get_attr_cc (insn
))
1966 /* Insn does not affect CC at all. */
1970 /* Insn does not change CC, but the 0'th operand has been changed. */
1971 if (cc_status
.value1
!= 0
1972 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
1973 cc_status
.value1
= 0;
1977 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1978 V,C is in an unusable state. */
1980 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
1981 cc_status
.value1
= recog_data
.operand
[0];
1985 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1986 C is in an unusable state. */
1988 cc_status
.flags
|= CC_NO_CARRY
;
1989 cc_status
.value1
= recog_data
.operand
[0];
1993 /* The insn is a compare instruction. */
1995 cc_status
.value1
= SET_SRC (body
);
1999 /* Insn doesn't leave CC in a usable state. */
2008 /* Retrieve the data area that has been chosen for the given decl. */
2011 v850_get_data_area (tree decl
)
2013 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2014 return DATA_AREA_SDA
;
2016 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2017 return DATA_AREA_TDA
;
2019 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2020 return DATA_AREA_ZDA
;
2022 return DATA_AREA_NORMAL
;
2025 /* Store the indicated data area in the decl's attributes. */
2028 v850_set_data_area (tree decl
, v850_data_area data_area
)
2034 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2035 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2036 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2041 DECL_ATTRIBUTES (decl
) = tree_cons
2042 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2045 /* Handle an "interrupt" attribute; arguments as in
2046 struct attribute_spec.handler. */
2048 v850_handle_interrupt_attribute (tree
* node
,
2050 tree args ATTRIBUTE_UNUSED
,
2051 int flags ATTRIBUTE_UNUSED
,
2052 bool * no_add_attrs
)
2054 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2056 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2058 *no_add_attrs
= true;
2064 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2065 struct attribute_spec.handler. */
2067 v850_handle_data_area_attribute (tree
* node
,
2069 tree args ATTRIBUTE_UNUSED
,
2070 int flags ATTRIBUTE_UNUSED
,
2071 bool * no_add_attrs
)
2073 v850_data_area data_area
;
2074 v850_data_area area
;
2077 /* Implement data area attribute. */
2078 if (is_attribute_p ("sda", name
))
2079 data_area
= DATA_AREA_SDA
;
2080 else if (is_attribute_p ("tda", name
))
2081 data_area
= DATA_AREA_TDA
;
2082 else if (is_attribute_p ("zda", name
))
2083 data_area
= DATA_AREA_ZDA
;
2087 switch (TREE_CODE (decl
))
2090 if (current_function_decl
!= NULL_TREE
)
2092 error_at (DECL_SOURCE_LOCATION (decl
),
2093 "data area attributes cannot be specified for "
2095 *no_add_attrs
= true;
2101 area
= v850_get_data_area (decl
);
2102 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2104 error ("data area of %q+D conflicts with previous declaration",
2106 *no_add_attrs
= true;
2118 /* Return nonzero if FUNC is an interrupt function as specified
2119 by the "interrupt" attribute. */
2122 v850_interrupt_function_p (tree func
)
2127 if (v850_interrupt_cache_p
)
2128 return v850_interrupt_p
;
2130 if (TREE_CODE (func
) != FUNCTION_DECL
)
2133 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2139 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2140 ret
= a
!= NULL_TREE
;
2143 /* Its not safe to trust global variables until after function inlining has
2145 if (reload_completed
| reload_in_progress
)
2146 v850_interrupt_p
= ret
;
2153 v850_encode_data_area (tree decl
, rtx symbol
)
2157 /* Map explicit sections into the appropriate attribute */
2158 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2160 if (DECL_SECTION_NAME (decl
))
2162 const char *name
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
2164 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2165 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2167 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2168 v850_set_data_area (decl
, DATA_AREA_SDA
);
2170 else if (streq (name
, ".tdata"))
2171 v850_set_data_area (decl
, DATA_AREA_TDA
);
2174 /* If no attribute, support -m{zda,sda,tda}=n */
2177 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2181 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2182 v850_set_data_area (decl
, DATA_AREA_TDA
);
2184 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2185 v850_set_data_area (decl
, DATA_AREA_SDA
);
2187 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2188 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2191 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2195 flags
= SYMBOL_REF_FLAGS (symbol
);
2196 switch (v850_get_data_area (decl
))
2198 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2199 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2200 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2201 default: gcc_unreachable ();
2203 SYMBOL_REF_FLAGS (symbol
) = flags
;
2207 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2209 default_encode_section_info (decl
, rtl
, first
);
2211 if (TREE_CODE (decl
) == VAR_DECL
2212 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2213 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2216 /* Construct a JR instruction to a routine that will perform the equivalent of
2217 the RTL passed in as an argument. This RTL is a function epilogue that
2218 pops registers off the stack and possibly releases some extra stack space
2219 as well. The code has already verified that the RTL matches these
2223 construct_restore_jr (rtx op
)
2225 int count
= XVECLEN (op
, 0);
2227 unsigned long int mask
;
2228 unsigned long int first
;
2229 unsigned long int last
;
2231 static char buff
[100]; /* XXX */
2235 error ("bogus JR construction: %d", count
);
2239 /* Work out how many bytes to pop off the stack before retrieving
2241 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2242 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2243 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2245 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2247 /* Each pop will remove 4 bytes from the stack.... */
2248 stack_bytes
-= (count
- 2) * 4;
2250 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2251 if (stack_bytes
!= 0)
2253 error ("bad amount of stack space removal: %d", stack_bytes
);
2257 /* Now compute the bit mask of registers to push. */
2259 for (i
= 2; i
< count
; i
++)
2261 rtx vector_element
= XVECEXP (op
, 0, i
);
2263 gcc_assert (GET_CODE (vector_element
) == SET
);
2264 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2265 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2268 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2271 /* Scan for the first register to pop. */
2272 for (first
= 0; first
< 32; first
++)
2274 if (mask
& (1 << first
))
2278 gcc_assert (first
< 32);
2280 /* Discover the last register to pop. */
2281 if (mask
& (1 << LINK_POINTER_REGNUM
))
2283 last
= LINK_POINTER_REGNUM
;
2287 gcc_assert (!stack_bytes
);
2288 gcc_assert (mask
& (1 << 29));
2293 /* Note, it is possible to have gaps in the register mask.
2294 We ignore this here, and generate a JR anyway. We will
2295 be popping more registers than is strictly necessary, but
2296 it does save code space. */
2298 if (TARGET_LONG_CALLS
)
2303 sprintf (name
, "__return_%s", reg_names
[first
]);
2305 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2307 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2313 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2315 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2322 /* Construct a JARL instruction to a routine that will perform the equivalent
2323 of the RTL passed as a parameter. This RTL is a function prologue that
2324 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2325 some stack space as well. The code has already verified that the RTL
2326 matches these requirements. */
2328 construct_save_jarl (rtx op
)
2330 int count
= XVECLEN (op
, 0);
2332 unsigned long int mask
;
2333 unsigned long int first
;
2334 unsigned long int last
;
2336 static char buff
[100]; /* XXX */
2338 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2340 error ("bogus JARL construction: %d", count
);
2345 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2346 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2347 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2348 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2350 /* Work out how many bytes to push onto the stack after storing the
2352 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2354 /* Each push will put 4 bytes from the stack.... */
2355 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2357 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2358 if (stack_bytes
!= 0)
2360 error ("bad amount of stack space removal: %d", stack_bytes
);
2364 /* Now compute the bit mask of registers to push. */
2366 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2368 rtx vector_element
= XVECEXP (op
, 0, i
);
2370 gcc_assert (GET_CODE (vector_element
) == SET
);
2371 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2372 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2375 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2378 /* Scan for the first register to push. */
2379 for (first
= 0; first
< 32; first
++)
2381 if (mask
& (1 << first
))
2385 gcc_assert (first
< 32);
2387 /* Discover the last register to push. */
2388 if (mask
& (1 << LINK_POINTER_REGNUM
))
2390 last
= LINK_POINTER_REGNUM
;
2394 gcc_assert (!stack_bytes
);
2395 gcc_assert (mask
& (1 << 29));
2400 /* Note, it is possible to have gaps in the register mask.
2401 We ignore this here, and generate a JARL anyway. We will
2402 be pushing more registers than is strictly necessary, but
2403 it does save code space. */
2405 if (TARGET_LONG_CALLS
)
2410 sprintf (name
, "__save_%s", reg_names
[first
]);
2412 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2414 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2420 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2422 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2429 extern tree last_assemble_variable_decl
;
2430 extern int size_directive_output
;
2432 /* A version of asm_output_aligned_bss() that copes with the special
2433 data areas of the v850. */
2435 v850_output_aligned_bss (FILE * file
,
2438 unsigned HOST_WIDE_INT size
,
2441 switch (v850_get_data_area (decl
))
2444 switch_to_section (zbss_section
);
2448 switch_to_section (sbss_section
);
2452 switch_to_section (tdata_section
);
2455 switch_to_section (bss_section
);
2459 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2460 #ifdef ASM_DECLARE_OBJECT_NAME
2461 last_assemble_variable_decl
= decl
;
2462 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2464 /* Standard thing is just output label for the object. */
2465 ASM_OUTPUT_LABEL (file
, name
);
2466 #endif /* ASM_DECLARE_OBJECT_NAME */
2467 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2470 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2472 v850_output_common (FILE * file
,
2478 if (decl
== NULL_TREE
)
2480 fprintf (file
, "%s", COMMON_ASM_OP
);
2484 switch (v850_get_data_area (decl
))
2487 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2491 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2495 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2499 fprintf (file
, "%s", COMMON_ASM_OP
);
2504 assemble_name (file
, name
);
2505 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2508 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2510 v850_output_local (FILE * file
,
2516 fprintf (file
, "%s", LOCAL_ASM_OP
);
2517 assemble_name (file
, name
);
2518 fprintf (file
, "\n");
2520 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2523 /* Add data area to the given declaration if a ghs data area pragma is
2524 currently in effect (#pragma ghs startXXX/endXXX). */
2526 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2529 && data_area_stack
->data_area
2530 && current_function_decl
== NULL_TREE
2531 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2532 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2533 v850_set_data_area (decl
, data_area_stack
->data_area
);
2535 /* Initialize the default names of the v850 specific sections,
2536 if this has not been done before. */
2538 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2540 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2541 = build_string (sizeof (".sdata")-1, ".sdata");
2543 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2544 = build_string (sizeof (".rosdata")-1, ".rosdata");
2546 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2547 = build_string (sizeof (".tdata")-1, ".tdata");
2549 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2550 = build_string (sizeof (".zdata")-1, ".zdata");
2552 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2553 = build_string (sizeof (".rozdata")-1, ".rozdata");
2556 if (current_function_decl
== NULL_TREE
2557 && (TREE_CODE (decl
) == VAR_DECL
2558 || TREE_CODE (decl
) == CONST_DECL
2559 || TREE_CODE (decl
) == FUNCTION_DECL
)
2560 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2561 && !DECL_SECTION_NAME (decl
))
2563 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2564 tree chosen_section
;
2566 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2567 kind
= GHS_SECTION_KIND_TEXT
;
2570 /* First choose a section kind based on the data area of the decl. */
2571 switch (v850_get_data_area (decl
))
2577 kind
= ((TREE_READONLY (decl
))
2578 ? GHS_SECTION_KIND_ROSDATA
2579 : GHS_SECTION_KIND_SDATA
);
2583 kind
= GHS_SECTION_KIND_TDATA
;
2587 kind
= ((TREE_READONLY (decl
))
2588 ? GHS_SECTION_KIND_ROZDATA
2589 : GHS_SECTION_KIND_ZDATA
);
2592 case DATA_AREA_NORMAL
: /* default data area */
2593 if (TREE_READONLY (decl
))
2594 kind
= GHS_SECTION_KIND_RODATA
;
2595 else if (DECL_INITIAL (decl
))
2596 kind
= GHS_SECTION_KIND_DATA
;
2598 kind
= GHS_SECTION_KIND_BSS
;
2602 /* Now, if the section kind has been explicitly renamed,
2603 then attach a section attribute. */
2604 chosen_section
= GHS_current_section_names
[(int) kind
];
2606 /* Otherwise, if this kind of section needs an explicit section
2607 attribute, then also attach one. */
2608 if (chosen_section
== NULL
)
2609 chosen_section
= GHS_default_section_names
[(int) kind
];
2613 /* Only set the section name if specified by a pragma, because
2614 otherwise it will force those variables to get allocated storage
2615 in this module, rather than by the linker. */
2616 DECL_SECTION_NAME (decl
) = chosen_section
;
2621 /* Construct a DISPOSE instruction that is the equivalent of
2622 the given RTX. We have already verified that this should
2626 construct_dispose_instruction (rtx op
)
2628 int count
= XVECLEN (op
, 0);
2630 unsigned long int mask
;
2632 static char buff
[ 100 ]; /* XXX */
2637 error ("bogus DISPOSE construction: %d", count
);
2641 /* Work out how many bytes to pop off the
2642 stack before retrieving registers. */
2643 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2644 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2645 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2647 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2649 /* Each pop will remove 4 bytes from the stack.... */
2650 stack_bytes
-= (count
- 2) * 4;
2652 /* Make sure that the amount we are popping
2653 will fit into the DISPOSE instruction. */
2654 if (stack_bytes
> 128)
2656 error ("too much stack space to dispose of: %d", stack_bytes
);
2660 /* Now compute the bit mask of registers to push. */
2663 for (i
= 2; i
< count
; i
++)
2665 rtx vector_element
= XVECEXP (op
, 0, i
);
2667 gcc_assert (GET_CODE (vector_element
) == SET
);
2668 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2669 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2672 if (REGNO (SET_DEST (vector_element
)) == 2)
2675 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2678 if (! TARGET_DISABLE_CALLT
2679 && (use_callt
|| stack_bytes
== 0))
2683 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2688 for (i
= 20; i
< 32; i
++)
2689 if (mask
& (1 << i
))
2693 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2695 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2696 i
, (mask
& (1 << 31)) ? "31c" : "29");
2701 static char regs
[100]; /* XXX */
2704 /* Generate the DISPOSE instruction. Note we could just issue the
2705 bit mask as a number as the assembler can cope with this, but for
2706 the sake of our readers we turn it into a textual description. */
2710 for (i
= 20; i
< 32; i
++)
2712 if (mask
& (1 << i
))
2717 strcat (regs
, ", ");
2722 strcat (regs
, reg_names
[ first
]);
2724 for (i
++; i
< 32; i
++)
2725 if ((mask
& (1 << i
)) == 0)
2730 strcat (regs
, " - ");
2731 strcat (regs
, reg_names
[ i
- 1 ] );
2736 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2742 /* Construct a PREPARE instruction that is the equivalent of
2743 the given RTL. We have already verified that this should
2747 construct_prepare_instruction (rtx op
)
2751 unsigned long int mask
;
2753 static char buff
[ 100 ]; /* XXX */
2756 if (XVECLEN (op
, 0) <= 1)
2758 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2762 /* Work out how many bytes to push onto
2763 the stack after storing the registers. */
2764 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2765 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2766 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2768 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2771 /* Make sure that the amount we are popping
2772 will fit into the DISPOSE instruction. */
2773 if (stack_bytes
< -128)
2775 error ("too much stack space to prepare: %d", stack_bytes
);
2779 /* Now compute the bit mask of registers to push. */
2782 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2784 rtx vector_element
= XVECEXP (op
, 0, i
);
2786 if (GET_CODE (vector_element
) == CLOBBER
)
2789 gcc_assert (GET_CODE (vector_element
) == SET
);
2790 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2791 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2794 if (REGNO (SET_SRC (vector_element
)) == 2)
2797 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2801 stack_bytes
+= count
* 4;
2803 if ((! TARGET_DISABLE_CALLT
)
2804 && (use_callt
|| stack_bytes
== 0))
2808 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2812 for (i
= 20; i
< 32; i
++)
2813 if (mask
& (1 << i
))
2817 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2819 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2820 i
, (mask
& (1 << 31)) ? "31c" : "29");
2824 static char regs
[100]; /* XXX */
2828 /* Generate the PREPARE instruction. Note we could just issue the
2829 bit mask as a number as the assembler can cope with this, but for
2830 the sake of our readers we turn it into a textual description. */
2834 for (i
= 20; i
< 32; i
++)
2836 if (mask
& (1 << i
))
2841 strcat (regs
, ", ");
2846 strcat (regs
, reg_names
[ first
]);
2848 for (i
++; i
< 32; i
++)
2849 if ((mask
& (1 << i
)) == 0)
2854 strcat (regs
, " - ");
2855 strcat (regs
, reg_names
[ i
- 1 ] );
2860 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2866 /* Return an RTX indicating where the return address to the
2867 calling function can be found. */
2870 v850_return_addr (int count
)
2875 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2878 /* Implement TARGET_ASM_INIT_SECTIONS. */
2881 v850_asm_init_sections (void)
2884 = get_unnamed_section (0, output_section_asm_op
,
2885 "\t.section .rosdata,\"a\"");
2888 = get_unnamed_section (0, output_section_asm_op
,
2889 "\t.section .rozdata,\"a\"");
2892 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2893 "\t.section .tdata,\"aw\"");
2896 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2897 "\t.section .zdata,\"aw\"");
2900 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2901 output_section_asm_op
,
2902 "\t.section .zbss,\"aw\"");
2906 v850_select_section (tree exp
,
2907 int reloc ATTRIBUTE_UNUSED
,
2908 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2910 if (TREE_CODE (exp
) == VAR_DECL
)
2913 if (!TREE_READONLY (exp
)
2914 || TREE_SIDE_EFFECTS (exp
)
2915 || !DECL_INITIAL (exp
)
2916 || (DECL_INITIAL (exp
) != error_mark_node
2917 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2922 switch (v850_get_data_area (exp
))
2925 return is_const
? rozdata_section
: zdata_section
;
2928 return tdata_section
;
2931 return is_const
? rosdata_section
: sdata_section
;
2934 return is_const
? readonly_data_section
: data_section
;
2937 return readonly_data_section
;
2940 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2943 v850_function_value_regno_p (const unsigned int regno
)
2945 return (regno
== 10);
2948 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2951 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2953 /* Return values > 8 bytes in length in memory. */
2954 return int_size_in_bytes (type
) > 8 || TYPE_MODE (type
) == BLKmode
;
2957 /* Worker function for TARGET_FUNCTION_VALUE. */
2960 v850_function_value (const_tree valtype
,
2961 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
2962 bool outgoing ATTRIBUTE_UNUSED
)
2964 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
2968 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2971 v850_setup_incoming_varargs (cumulative_args_t ca
,
2972 enum machine_mode mode ATTRIBUTE_UNUSED
,
2973 tree type ATTRIBUTE_UNUSED
,
2974 int *pretend_arg_size ATTRIBUTE_UNUSED
,
2975 int second_time ATTRIBUTE_UNUSED
)
2977 get_cumulative_args (ca
)->anonymous_args
= (!TARGET_GHS
? 1 : 0);
2980 /* Worker function for TARGET_CAN_ELIMINATE. */
2983 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
2985 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
2988 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2990 If TARGET_APP_REGS is not defined then add r2 and r5 to
2991 the pool of fixed registers. See PR 14505. */
2994 v850_conditional_register_usage (void)
2996 if (TARGET_APP_REGS
)
2998 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
2999 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3003 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3006 v850_asm_trampoline_template (FILE *f
)
3008 fprintf (f
, "\tjarl .+4,r12\n");
3009 fprintf (f
, "\tld.w 12[r12],r20\n");
3010 fprintf (f
, "\tld.w 16[r12],r12\n");
3011 fprintf (f
, "\tjmp [r12]\n");
3012 fprintf (f
, "\tnop\n");
3013 fprintf (f
, "\t.long 0\n");
3014 fprintf (f
, "\t.long 0\n");
3017 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3020 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3022 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3024 emit_block_move (m_tramp
, assemble_trampoline_template (),
3025 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3027 mem
= adjust_address (m_tramp
, SImode
, 16);
3028 emit_move_insn (mem
, chain_value
);
3029 mem
= adjust_address (m_tramp
, SImode
, 20);
3030 emit_move_insn (mem
, fnaddr
);
3034 v850_issue_rate (void)
3036 return (TARGET_V850E2_ALL
? 2 : 1);
3039 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3042 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3044 return (GET_CODE (x
) == CONST_DOUBLE
3045 || !(GET_CODE (x
) == CONST
3046 && GET_CODE (XEXP (x
, 0)) == PLUS
3047 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3048 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3049 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3053 v850_memory_move_cost (enum machine_mode mode
,
3054 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3057 switch (GET_MODE_SIZE (mode
))
3067 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3071 /* V850 specific attributes. */
3073 static const struct attribute_spec v850_attribute_table
[] =
3075 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3076 affects_type_identity } */
3077 { "interrupt_handler", 0, 0, true, false, false,
3078 v850_handle_interrupt_attribute
, false },
3079 { "interrupt", 0, 0, true, false, false,
3080 v850_handle_interrupt_attribute
, false },
3081 { "sda", 0, 0, true, false, false,
3082 v850_handle_data_area_attribute
, false },
3083 { "tda", 0, 0, true, false, false,
3084 v850_handle_data_area_attribute
, false },
3085 { "zda", 0, 0, true, false, false,
3086 v850_handle_data_area_attribute
, false },
3087 { NULL
, 0, 0, false, false, false, NULL
, false }
3090 /* Initialize the GCC target structure. */
3092 #undef TARGET_MEMORY_MOVE_COST
3093 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3095 #undef TARGET_ASM_ALIGNED_HI_OP
3096 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3098 #undef TARGET_PRINT_OPERAND
3099 #define TARGET_PRINT_OPERAND v850_print_operand
3100 #undef TARGET_PRINT_OPERAND_ADDRESS
3101 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3102 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3103 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3105 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3106 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3108 #undef TARGET_ATTRIBUTE_TABLE
3109 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3111 #undef TARGET_INSERT_ATTRIBUTES
3112 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3114 #undef TARGET_ASM_SELECT_SECTION
3115 #define TARGET_ASM_SELECT_SECTION v850_select_section
3117 /* The assembler supports switchable .bss sections, but
3118 v850_select_section doesn't yet make use of them. */
3119 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3120 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3122 #undef TARGET_ENCODE_SECTION_INFO
3123 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3125 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3126 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3128 #undef TARGET_RTX_COSTS
3129 #define TARGET_RTX_COSTS v850_rtx_costs
3131 #undef TARGET_ADDRESS_COST
3132 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3134 #undef TARGET_MACHINE_DEPENDENT_REORG
3135 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3137 #undef TARGET_SCHED_ISSUE_RATE
3138 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3140 #undef TARGET_FUNCTION_VALUE_REGNO_P
3141 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3142 #undef TARGET_FUNCTION_VALUE
3143 #define TARGET_FUNCTION_VALUE v850_function_value
3145 #undef TARGET_PROMOTE_PROTOTYPES
3146 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3148 #undef TARGET_RETURN_IN_MEMORY
3149 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3151 #undef TARGET_PASS_BY_REFERENCE
3152 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3154 #undef TARGET_CALLEE_COPIES
3155 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3157 #undef TARGET_SETUP_INCOMING_VARARGS
3158 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3160 #undef TARGET_ARG_PARTIAL_BYTES
3161 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3163 #undef TARGET_FUNCTION_ARG
3164 #define TARGET_FUNCTION_ARG v850_function_arg
3166 #undef TARGET_FUNCTION_ARG_ADVANCE
3167 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3169 #undef TARGET_CAN_ELIMINATE
3170 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3172 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3173 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3175 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3176 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3177 #undef TARGET_TRAMPOLINE_INIT
3178 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3180 #undef TARGET_STRICT_ARGUMENT_NAMING
3181 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3183 #undef TARGET_LEGITIMATE_CONSTANT_P
3184 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3186 struct gcc_target targetm
= TARGET_INITIALIZER
;
3188 #include "gt-v850.h"