1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
40 #include "integrate.h"
43 #include "target-def.h"
48 #define streq(a,b) (strcmp (a, b) == 0)
51 static void v850_print_operand_address (FILE *, rtx
);
53 /* Names of the various data areas used on the v850. */
54 tree GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
55 tree GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
57 /* Track the current data area set by the data area pragma (which
58 can be nested). Tested by check_default_data_area. */
59 data_area_stack_element
* data_area_stack
= NULL
;
61 /* True if we don't need to check any more if the current
62 function is an interrupt handler. */
63 static int v850_interrupt_cache_p
= FALSE
;
65 rtx v850_compare_op0
, v850_compare_op1
;
67 /* Whether current function is an interrupt handler. */
68 static int v850_interrupt_p
= FALSE
;
70 static GTY(()) section
* rosdata_section
;
71 static GTY(()) section
* rozdata_section
;
72 static GTY(()) section
* tdata_section
;
73 static GTY(()) section
* zdata_section
;
74 static GTY(()) section
* zbss_section
;
76 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
77 Specify whether to pass the argument by reference. */
80 v850_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
81 enum machine_mode mode
, const_tree type
,
82 bool named ATTRIBUTE_UNUSED
)
84 unsigned HOST_WIDE_INT size
;
87 size
= int_size_in_bytes (type
);
89 size
= GET_MODE_SIZE (mode
);
94 /* Implementing the Varargs Macros. */
97 v850_strict_argument_naming (CUMULATIVE_ARGS
* ca ATTRIBUTE_UNUSED
)
99 return !TARGET_GHS
? true : false;
102 /* Return an RTX to represent where an argument with mode MODE
103 and type TYPE will be passed to a function. If the result
104 is NULL_RTX, the argument will be pushed. */
107 v850_function_arg (CUMULATIVE_ARGS
* cum
, enum machine_mode mode
,
108 const_tree type
, bool named
)
110 rtx result
= NULL_RTX
;
117 size
= int_size_in_bytes (type
);
119 size
= GET_MODE_SIZE (mode
);
121 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
125 /* Once we have stopped using argument registers, do not start up again. */
126 cum
->nbytes
= 4 * UNITS_PER_WORD
;
130 if (size
<= UNITS_PER_WORD
&& type
)
131 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
135 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
137 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
140 if (type
== NULL_TREE
141 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
144 switch (cum
->nbytes
/ UNITS_PER_WORD
)
147 result
= gen_rtx_REG (mode
, 6);
150 result
= gen_rtx_REG (mode
, 7);
153 result
= gen_rtx_REG (mode
, 8);
156 result
= gen_rtx_REG (mode
, 9);
165 /* Return the number of bytes which must be put into registers
166 for values which are part in registers and part in memory. */
168 v850_arg_partial_bytes (CUMULATIVE_ARGS
* cum
, enum machine_mode mode
,
169 tree type
, bool named
)
173 if (TARGET_GHS
&& !named
)
177 size
= int_size_in_bytes (type
);
179 size
= GET_MODE_SIZE (mode
);
185 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
189 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
191 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
194 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
197 if (type
== NULL_TREE
198 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
201 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
204 /* Update the data in CUM to advance over an argument
205 of mode MODE and data type TYPE.
206 (TYPE is null for libcalls where that information may not be available.) */
209 v850_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
210 const_tree type
, bool named ATTRIBUTE_UNUSED
)
212 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
213 ? GET_MODE_SIZE (Pmode
)
215 ? GET_MODE_SIZE (mode
)
216 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
220 /* Return the high and low words of a CONST_DOUBLE */
223 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
225 if (GET_CODE (x
) == CONST_DOUBLE
)
230 switch (GET_MODE (x
))
233 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
234 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
235 *p_high
= t
[1]; /* since v850 is little endian */
236 *p_low
= t
[0]; /* high is second word */
240 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
241 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
247 *p_high
= CONST_DOUBLE_HIGH (x
);
248 *p_low
= CONST_DOUBLE_LOW (x
);
256 fatal_insn ("const_double_split got a bad insn:", x
);
260 /* Return the cost of the rtx R with code CODE. */
263 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
265 if (CONST_OK_FOR_I (value
))
267 else if (CONST_OK_FOR_J (value
))
269 else if (CONST_OK_FOR_K (value
))
276 const_costs (rtx r
, enum rtx_code c
)
278 HOST_WIDE_INT high
, low
;
283 return const_costs_int (INTVAL (r
), 0);
286 const_double_split (r
, &high
, &low
);
287 if (GET_MODE (r
) == SFmode
)
288 return const_costs_int (high
, 1);
290 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
306 v850_rtx_costs (rtx x
,
308 int outer_code ATTRIBUTE_UNUSED
,
309 int * total
, bool speed
)
311 enum rtx_code code
= (enum rtx_code
) codearg
;
320 *total
= COSTS_N_INSNS (const_costs (x
, code
));
327 if (TARGET_V850E
&& !speed
)
335 && ( GET_MODE (x
) == SImode
336 || GET_MODE (x
) == HImode
337 || GET_MODE (x
) == QImode
))
339 if (GET_CODE (XEXP (x
, 1)) == REG
)
341 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
343 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
345 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
354 if (outer_code
== COMPARE
)
363 /* Print operand X using operand code CODE to assembly language output file
367 v850_print_operand (FILE * file
, rtx x
, int code
)
369 HOST_WIDE_INT high
, low
;
374 /* We use 'c' operands with symbols for .vtinherit */
375 if (GET_CODE (x
) == SYMBOL_REF
)
377 output_addr_const(file
, x
);
384 switch ((code
== 'B' || code
== 'C')
385 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
388 if (code
== 'c' || code
== 'C')
389 fprintf (file
, "nz");
391 fprintf (file
, "ne");
394 if (code
== 'c' || code
== 'C')
400 fprintf (file
, "ge");
403 fprintf (file
, "gt");
406 fprintf (file
, "le");
409 fprintf (file
, "lt");
412 fprintf (file
, "nl");
418 fprintf (file
, "nh");
427 case 'F': /* high word of CONST_DOUBLE */
428 switch (GET_CODE (x
))
431 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
435 const_double_split (x
, &high
, &low
);
436 fprintf (file
, "%ld", (long) high
);
443 case 'G': /* low word of CONST_DOUBLE */
444 switch (GET_CODE (x
))
447 fprintf (file
, "%ld", (long) INTVAL (x
));
451 const_double_split (x
, &high
, &low
);
452 fprintf (file
, "%ld", (long) low
);
460 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
463 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
466 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
468 if (GET_CODE (x
) == CONST
)
469 x
= XEXP (XEXP (x
, 0), 0);
471 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
473 if (SYMBOL_REF_ZDA_P (x
))
474 fprintf (file
, "zdaoff");
475 else if (SYMBOL_REF_SDA_P (x
))
476 fprintf (file
, "sdaoff");
477 else if (SYMBOL_REF_TDA_P (x
))
478 fprintf (file
, "tdaoff");
483 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
484 output_addr_const (file
, x
);
487 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
489 if (GET_CODE (x
) == CONST
)
490 x
= XEXP (XEXP (x
, 0), 0);
492 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
494 if (SYMBOL_REF_ZDA_P (x
))
495 fprintf (file
, "r0");
496 else if (SYMBOL_REF_SDA_P (x
))
497 fprintf (file
, "gp");
498 else if (SYMBOL_REF_TDA_P (x
))
499 fprintf (file
, "ep");
503 case 'R': /* 2nd word of a double. */
504 switch (GET_CODE (x
))
507 fprintf (file
, reg_names
[REGNO (x
) + 1]);
510 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
511 v850_print_operand_address (file
, x
);
512 if (GET_CODE (x
) == CONST_INT
)
513 fprintf (file
, "[r0]");
522 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
523 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
530 /* Like an 'S' operand above, but for unsigned loads only. */
531 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
536 case 'W': /* print the instruction suffix */
537 switch (GET_MODE (x
))
542 case QImode
: fputs (".b", file
); break;
543 case HImode
: fputs (".h", file
); break;
544 case SImode
: fputs (".w", file
); break;
545 case SFmode
: fputs (".w", file
); break;
548 case '.': /* register r0 */
549 fputs (reg_names
[0], file
);
551 case 'z': /* reg or zero */
552 if (GET_CODE (x
) == REG
)
553 fputs (reg_names
[REGNO (x
)], file
);
554 else if ((GET_MODE(x
) == SImode
555 || GET_MODE(x
) == DFmode
556 || GET_MODE(x
) == SFmode
)
557 && x
== CONST0_RTX(GET_MODE(x
)))
558 fputs (reg_names
[0], file
);
561 gcc_assert (x
== const0_rtx
);
562 fputs (reg_names
[0], file
);
566 switch (GET_CODE (x
))
569 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
570 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
573 output_address (XEXP (x
, 0));
577 fputs (reg_names
[REGNO (x
)], file
);
580 fputs (reg_names
[subreg_regno (x
)], file
);
587 v850_print_operand_address (file
, x
);
598 /* Output assembly language output for the address ADDR to FILE. */
601 v850_print_operand_address (FILE * file
, rtx addr
)
603 switch (GET_CODE (addr
))
606 fprintf (file
, "0[");
607 v850_print_operand (file
, addr
, 0);
611 if (GET_CODE (XEXP (addr
, 0)) == REG
)
614 fprintf (file
, "lo(");
615 v850_print_operand (file
, XEXP (addr
, 1), 0);
616 fprintf (file
, ")[");
617 v850_print_operand (file
, XEXP (addr
, 0), 0);
622 if (GET_CODE (XEXP (addr
, 0)) == REG
623 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
626 v850_print_operand (file
, XEXP (addr
, 1), 0);
628 v850_print_operand (file
, XEXP (addr
, 0), 0);
633 v850_print_operand (file
, XEXP (addr
, 0), 0);
635 v850_print_operand (file
, XEXP (addr
, 1), 0);
640 const char *off_name
= NULL
;
641 const char *reg_name
= NULL
;
643 if (SYMBOL_REF_ZDA_P (addr
))
648 else if (SYMBOL_REF_SDA_P (addr
))
653 else if (SYMBOL_REF_TDA_P (addr
))
660 fprintf (file
, "%s(", off_name
);
661 output_addr_const (file
, addr
);
663 fprintf (file
, ")[%s]", reg_name
);
667 if (special_symbolref_operand (addr
, VOIDmode
))
669 rtx x
= XEXP (XEXP (addr
, 0), 0);
670 const char *off_name
;
671 const char *reg_name
;
673 if (SYMBOL_REF_ZDA_P (x
))
678 else if (SYMBOL_REF_SDA_P (x
))
683 else if (SYMBOL_REF_TDA_P (x
))
691 fprintf (file
, "%s(", off_name
);
692 output_addr_const (file
, addr
);
693 fprintf (file
, ")[%s]", reg_name
);
696 output_addr_const (file
, addr
);
699 output_addr_const (file
, addr
);
705 v850_print_operand_punct_valid_p (unsigned char code
)
710 /* When assemble_integer is used to emit the offsets for a switch
711 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
712 output_addr_const will normally barf at this, but it is OK to omit
713 the truncate and just emit the difference of the two labels. The
714 .hword directive will automatically handle the truncation for us.
716 Returns true if rtx was handled, false otherwise. */
719 v850_output_addr_const_extra (FILE * file
, rtx x
)
721 if (GET_CODE (x
) != TRUNCATE
)
726 /* We must also handle the case where the switch table was passed a
727 constant value and so has been collapsed. In this case the first
728 label will have been deleted. In such a case it is OK to emit
729 nothing, since the table will not be used.
730 (cf gcc.c-torture/compile/990801-1.c). */
731 if (GET_CODE (x
) == MINUS
732 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
733 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == CODE_LABEL
734 && INSN_DELETED_P (XEXP (XEXP (x
, 0), 0)))
737 output_addr_const (file
, x
);
741 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
745 output_move_single (rtx
* operands
)
747 rtx dst
= operands
[0];
748 rtx src
= operands
[1];
755 else if (GET_CODE (src
) == CONST_INT
)
757 HOST_WIDE_INT value
= INTVAL (src
);
759 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
762 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
763 return "movea %1,%.,%0";
765 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
766 return "movhi hi0(%1),%.,%0";
768 /* A random constant. */
769 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
772 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
775 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
777 HOST_WIDE_INT high
, low
;
779 const_double_split (src
, &high
, &low
);
781 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
784 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
785 return "movea %F1,%.,%0";
787 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
788 return "movhi hi0(%F1),%.,%0";
790 /* A random constant. */
791 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
795 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
798 else if (GET_CODE (src
) == MEM
)
799 return "%S1ld%W1 %1,%0";
801 else if (special_symbolref_operand (src
, VOIDmode
))
802 return "movea %O1(%P1),%Q1,%0";
804 else if (GET_CODE (src
) == LABEL_REF
805 || GET_CODE (src
) == SYMBOL_REF
806 || GET_CODE (src
) == CONST
)
808 if (TARGET_V850E
|| TARGET_V850E2_ALL
)
809 return "mov hilo(%1),%0";
811 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
814 else if (GET_CODE (src
) == HIGH
)
815 return "movhi hi(%1),%.,%0";
817 else if (GET_CODE (src
) == LO_SUM
)
819 operands
[2] = XEXP (src
, 0);
820 operands
[3] = XEXP (src
, 1);
821 return "movea lo(%3),%2,%0";
825 else if (GET_CODE (dst
) == MEM
)
828 return "%S0st%W0 %1,%0";
830 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
831 return "%S0st%W0 %.,%0";
833 else if (GET_CODE (src
) == CONST_DOUBLE
834 && CONST0_RTX (GET_MODE (dst
)) == src
)
835 return "%S0st%W0 %.,%0";
838 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode
, dst
, src
));
842 /* Generate comparison code. */
844 v850_float_z_comparison_operator (rtx op
, enum machine_mode mode
)
846 enum rtx_code code
= GET_CODE (op
);
848 if (GET_RTX_CLASS (code
) != RTX_COMPARE
849 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
852 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
855 if ((GET_CODE (XEXP (op
, 0)) != REG
856 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
857 || XEXP (op
, 1) != const0_rtx
)
860 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LTmode
)
862 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LEmode
)
864 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_EQmode
)
871 v850_float_nz_comparison_operator (rtx op
, enum machine_mode mode
)
873 enum rtx_code code
= GET_CODE (op
);
875 if (GET_RTX_CLASS (code
) != RTX_COMPARE
876 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
879 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
882 if ((GET_CODE (XEXP (op
, 0)) != REG
883 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
884 || XEXP (op
, 1) != const0_rtx
)
887 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GTmode
)
889 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GEmode
)
891 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_NEmode
)
898 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
900 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
905 return CC_FPU_LEmode
;
907 return CC_FPU_GEmode
;
909 return CC_FPU_LTmode
;
911 return CC_FPU_GTmode
;
913 return CC_FPU_EQmode
;
915 return CC_FPU_NEmode
;
924 v850_gen_float_compare (enum rtx_code cond
, enum machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
926 if (GET_MODE(op0
) == DFmode
)
931 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
934 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
937 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
940 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
943 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
946 emit_insn (gen_cmpdf_ne_insn (op0
, op1
));
952 else if (GET_MODE(v850_compare_op0
) == SFmode
)
957 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
960 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
963 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
966 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
969 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
972 emit_insn (gen_cmpsf_ne_insn(op0
, op1
));
983 return v850_select_cc_mode (cond
, op0
, op1
);
987 v850_gen_compare (enum rtx_code cond
, enum machine_mode mode
, rtx op0
, rtx op1
)
989 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
991 emit_insn (gen_cmpsi_insn (op0
, op1
));
992 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
997 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
998 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
999 emit_insn (gen_rtx_SET(mode
, cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1001 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1005 /* Return maximum offset supported for a short EP memory reference of mode
1006 MODE and signedness UNSIGNEDP. */
1009 ep_memory_offset (enum machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1016 if (TARGET_SMALL_SLD
)
1017 max_offset
= (1 << 4);
1018 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1020 max_offset
= (1 << 4);
1022 max_offset
= (1 << 7);
1026 if (TARGET_SMALL_SLD
)
1027 max_offset
= (1 << 5);
1028 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1030 max_offset
= (1 << 5);
1032 max_offset
= (1 << 8);
1037 max_offset
= (1 << 8);
1047 /* Return true if OP is a valid short EP memory reference */
1050 ep_memory_operand (rtx op
, enum machine_mode mode
, int unsigned_load
)
1056 /* If we are not using the EP register on a per-function basis
1057 then do not allow this optimization at all. This is to
1058 prevent the use of the SLD/SST instructions which cannot be
1059 guaranteed to work properly due to a hardware bug. */
1063 if (GET_CODE (op
) != MEM
)
1066 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1068 mask
= GET_MODE_SIZE (mode
) - 1;
1070 addr
= XEXP (op
, 0);
1071 if (GET_CODE (addr
) == CONST
)
1072 addr
= XEXP (addr
, 0);
1074 switch (GET_CODE (addr
))
1080 return SYMBOL_REF_TDA_P (addr
);
1083 return REGNO (addr
) == EP_REGNUM
;
1086 op0
= XEXP (addr
, 0);
1087 op1
= XEXP (addr
, 1);
1088 if (GET_CODE (op1
) == CONST_INT
1089 && INTVAL (op1
) < max_offset
1090 && INTVAL (op1
) >= 0
1091 && (INTVAL (op1
) & mask
) == 0)
1093 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1096 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1105 /* Substitute memory references involving a pointer, to use the ep pointer,
1106 taking care to save and preserve the ep. */
1109 substitute_ep_register (rtx first_insn
,
1116 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1121 df_set_regs_ever_live (1, true);
1122 *p_r1
= gen_rtx_REG (Pmode
, 1);
1123 *p_ep
= gen_rtx_REG (Pmode
, 30);
1128 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1129 2 * (uses
- 3), uses
, reg_names
[regno
],
1130 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1131 INSN_UID (first_insn
), INSN_UID (last_insn
));
1133 if (GET_CODE (first_insn
) == NOTE
)
1134 first_insn
= next_nonnote_insn (first_insn
);
1136 last_insn
= next_nonnote_insn (last_insn
);
1137 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1139 if (GET_CODE (insn
) == INSN
)
1141 rtx pattern
= single_set (insn
);
1143 /* Replace the memory references. */
1147 /* Memory operands are signed by default. */
1148 int unsignedp
= FALSE
;
1150 if (GET_CODE (SET_DEST (pattern
)) == MEM
1151 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1154 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1155 p_mem
= &SET_DEST (pattern
);
1157 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1158 p_mem
= &SET_SRC (pattern
);
1160 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1161 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1162 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1164 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1165 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1167 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1175 rtx addr
= XEXP (*p_mem
, 0);
1177 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1178 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1180 else if (GET_CODE (addr
) == PLUS
1181 && GET_CODE (XEXP (addr
, 0)) == REG
1182 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1183 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1184 && ((INTVAL (XEXP (addr
, 1)))
1185 < ep_memory_offset (GET_MODE (*p_mem
),
1187 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1188 *p_mem
= change_address (*p_mem
, VOIDmode
,
1189 gen_rtx_PLUS (Pmode
,
1197 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1198 insn
= prev_nonnote_insn (first_insn
);
1199 if (insn
&& GET_CODE (insn
) == INSN
1200 && GET_CODE (PATTERN (insn
)) == SET
1201 && SET_DEST (PATTERN (insn
)) == *p_ep
1202 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1205 emit_insn_before (gen_rtx_SET (Pmode
, *p_r1
, *p_ep
), first_insn
);
1207 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, reg
), first_insn
);
1208 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, *p_r1
), last_insn
);
1212 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1213 the -mep mode to copy heavily used pointers to ep to use the implicit
1225 regs
[FIRST_PSEUDO_REGISTER
];
1234 /* If not ep mode, just return now. */
1238 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1241 regs
[i
].first_insn
= NULL_RTX
;
1242 regs
[i
].last_insn
= NULL_RTX
;
1245 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1247 switch (GET_CODE (insn
))
1249 /* End of basic block */
1256 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1258 if (max_uses
< regs
[i
].uses
)
1260 max_uses
= regs
[i
].uses
;
1266 substitute_ep_register (regs
[max_regno
].first_insn
,
1267 regs
[max_regno
].last_insn
,
1268 max_uses
, max_regno
, &r1
, &ep
);
1272 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1275 regs
[i
].first_insn
= NULL_RTX
;
1276 regs
[i
].last_insn
= NULL_RTX
;
1284 pattern
= single_set (insn
);
1286 /* See if there are any memory references we can shorten */
1289 rtx src
= SET_SRC (pattern
);
1290 rtx dest
= SET_DEST (pattern
);
1292 /* Memory operands are signed by default. */
1293 int unsignedp
= FALSE
;
1295 /* We might have (SUBREG (MEM)) here, so just get rid of the
1296 subregs to make this code simpler. */
1297 if (GET_CODE (dest
) == SUBREG
1298 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1299 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1300 alter_subreg (&dest
);
1301 if (GET_CODE (src
) == SUBREG
1302 && (GET_CODE (SUBREG_REG (src
)) == MEM
1303 || GET_CODE (SUBREG_REG (src
)) == REG
))
1304 alter_subreg (&src
);
1306 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1309 else if (GET_CODE (dest
) == MEM
)
1312 else if (GET_CODE (src
) == MEM
)
1315 else if (GET_CODE (src
) == SIGN_EXTEND
1316 && GET_CODE (XEXP (src
, 0)) == MEM
)
1317 mem
= XEXP (src
, 0);
1319 else if (GET_CODE (src
) == ZERO_EXTEND
1320 && GET_CODE (XEXP (src
, 0)) == MEM
)
1322 mem
= XEXP (src
, 0);
1328 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1331 else if (!use_ep
&& mem
1332 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1334 rtx addr
= XEXP (mem
, 0);
1338 if (GET_CODE (addr
) == REG
)
1341 regno
= REGNO (addr
);
1344 else if (GET_CODE (addr
) == PLUS
1345 && GET_CODE (XEXP (addr
, 0)) == REG
1346 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1347 && ((INTVAL (XEXP (addr
, 1)))
1348 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1349 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1352 regno
= REGNO (XEXP (addr
, 0));
1361 regs
[regno
].last_insn
= insn
;
1362 if (!regs
[regno
].first_insn
)
1363 regs
[regno
].first_insn
= insn
;
1367 /* Loading up a register in the basic block zaps any savings
1369 if (GET_CODE (dest
) == REG
)
1371 enum machine_mode mode
= GET_MODE (dest
);
1375 regno
= REGNO (dest
);
1376 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1380 /* See if we can use the pointer before this
1385 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1387 if (max_uses
< regs
[i
].uses
)
1389 max_uses
= regs
[i
].uses
;
1395 && max_regno
>= regno
1396 && max_regno
< endregno
)
1398 substitute_ep_register (regs
[max_regno
].first_insn
,
1399 regs
[max_regno
].last_insn
,
1400 max_uses
, max_regno
, &r1
,
1403 /* Since we made a substitution, zap all remembered
1405 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1408 regs
[i
].first_insn
= NULL_RTX
;
1409 regs
[i
].last_insn
= NULL_RTX
;
1414 for (i
= regno
; i
< endregno
; i
++)
1417 regs
[i
].first_insn
= NULL_RTX
;
1418 regs
[i
].last_insn
= NULL_RTX
;
1426 /* # of registers saved by the interrupt handler. */
1427 #define INTERRUPT_FIXED_NUM 5
1429 /* # of bytes for registers saved by the interrupt handler. */
1430 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1432 /* # of words saved for other registers. */
1433 #define INTERRUPT_ALL_SAVE_NUM \
1434 (30 - INTERRUPT_FIXED_NUM)
1436 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1439 compute_register_save_size (long * p_reg_saved
)
1443 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1444 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1447 /* Count the return pointer if we need to save it. */
1448 if (crtl
->profile
&& !call_p
)
1450 df_set_regs_ever_live (LINK_POINTER_REGNUM
, true);
1454 /* Count space for the register saves. */
1455 if (interrupt_handler
)
1457 for (i
= 0; i
<= 31; i
++)
1461 if (df_regs_ever_live_p (i
) || call_p
)
1464 reg_saved
|= 1L << i
;
1468 /* We don't save/restore r0 or the stack pointer */
1470 case STACK_POINTER_REGNUM
:
1473 /* For registers with fixed use, we save them, set them to the
1474 appropriate value, and then restore them.
1475 These registers are handled specially, so don't list them
1476 on the list of registers to save in the prologue. */
1477 case 1: /* temp used to hold ep */
1479 case 10: /* temp used to call interrupt save/restore */
1480 case 11: /* temp used to call interrupt save/restore (long call) */
1481 case EP_REGNUM
: /* ep */
1488 /* Find the first register that needs to be saved. */
1489 for (i
= 0; i
<= 31; i
++)
1490 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1491 || i
== LINK_POINTER_REGNUM
))
1494 /* If it is possible that an out-of-line helper function might be
1495 used to generate the prologue for the current function, then we
1496 need to cover the possibility that such a helper function will
1497 be used, despite the fact that there might be gaps in the list of
1498 registers that need to be saved. To detect this we note that the
1499 helper functions always push at least register r29 (provided
1500 that the function is not an interrupt handler). */
1502 if (TARGET_PROLOG_FUNCTION
1503 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1508 reg_saved
|= 1L << i
;
1513 /* Helper functions save all registers between the starting
1514 register and the last register, regardless of whether they
1515 are actually used by the function or not. */
1516 for (; i
<= 29; i
++)
1519 reg_saved
|= 1L << i
;
1522 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1525 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1530 for (; i
<= 31; i
++)
1531 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1532 || i
== LINK_POINTER_REGNUM
))
1535 reg_saved
|= 1L << i
;
1541 *p_reg_saved
= reg_saved
;
1547 compute_frame_size (int size
, long * p_reg_saved
)
1550 + compute_register_save_size (p_reg_saved
)
1551 + crtl
->outgoing_args_size
);
1555 use_prolog_function (int num_save
, int frame_size
)
1557 int alloc_stack
= (4 * num_save
);
1558 int unalloc_stack
= frame_size
- alloc_stack
;
1559 int save_func_len
, restore_func_len
;
1560 int save_normal_len
, restore_normal_len
;
1562 if (! TARGET_DISABLE_CALLT
)
1563 save_func_len
= restore_func_len
= 2;
1565 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1569 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1570 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1573 /* See if we would have used ep to save the stack. */
1574 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1575 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1577 save_normal_len
= restore_normal_len
= 4 * num_save
;
1579 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1580 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1582 /* Don't bother checking if we don't actually save any space.
1583 This happens for instance if one register is saved and additional
1584 stack space is allocated. */
1585 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1589 expand_prologue (void)
1592 unsigned int size
= get_frame_size ();
1593 unsigned int actual_fsize
;
1594 unsigned int init_stack_alloc
= 0;
1597 unsigned int num_save
;
1599 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1602 actual_fsize
= compute_frame_size (size
, ®_saved
);
1604 /* Save/setup global registers for interrupt functions right now. */
1605 if (interrupt_handler
)
1607 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1608 emit_insn (gen_callt_save_interrupt ());
1610 emit_insn (gen_save_interrupt ());
1612 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1614 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1615 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1618 /* Identify all of the saved registers. */
1620 for (i
= 1; i
< 32; i
++)
1622 if (((1L << i
) & reg_saved
) != 0)
1623 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1626 /* See if we have an insn that allocates stack space and saves the particular
1627 registers we want to. */
1628 save_all
= NULL_RTX
;
1629 if (TARGET_PROLOG_FUNCTION
&& num_save
> 0)
1631 if (use_prolog_function (num_save
, actual_fsize
))
1633 int alloc_stack
= 4 * num_save
;
1636 save_all
= gen_rtx_PARALLEL
1638 rtvec_alloc (num_save
+ 1
1639 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1641 XVECEXP (save_all
, 0, 0)
1642 = gen_rtx_SET (VOIDmode
,
1644 gen_rtx_PLUS (Pmode
,
1646 GEN_INT(-alloc_stack
)));
1647 for (i
= 0; i
< num_save
; i
++)
1650 XVECEXP (save_all
, 0, i
+1)
1651 = gen_rtx_SET (VOIDmode
,
1653 gen_rtx_PLUS (Pmode
,
1659 if (TARGET_DISABLE_CALLT
)
1661 XVECEXP (save_all
, 0, num_save
+ 1)
1662 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1664 if (TARGET_LONG_CALLS
)
1665 XVECEXP (save_all
, 0, num_save
+ 2)
1666 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1669 code
= recog (save_all
, NULL_RTX
, NULL
);
1672 rtx insn
= emit_insn (save_all
);
1673 INSN_CODE (insn
) = code
;
1674 actual_fsize
-= alloc_stack
;
1678 save_all
= NULL_RTX
;
1682 /* If no prolog save function is available, store the registers the old
1683 fashioned way (one by one). */
1686 /* Special case interrupt functions that save all registers for a call. */
1687 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1689 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1690 emit_insn (gen_callt_save_all_interrupt ());
1692 emit_insn (gen_save_all_interrupt ());
1697 /* If the stack is too big, allocate it in chunks so we can do the
1698 register saves. We use the register save size so we use the ep
1700 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1701 init_stack_alloc
= compute_register_save_size (NULL
);
1703 init_stack_alloc
= actual_fsize
;
1705 /* Save registers at the beginning of the stack frame. */
1706 offset
= init_stack_alloc
- 4;
1708 if (init_stack_alloc
)
1709 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1711 GEN_INT (- (signed) init_stack_alloc
)));
1713 /* Save the return pointer first. */
1714 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1716 emit_move_insn (gen_rtx_MEM (SImode
,
1717 plus_constant (stack_pointer_rtx
,
1719 save_regs
[--num_save
]);
1723 for (i
= 0; i
< num_save
; i
++)
1725 emit_move_insn (gen_rtx_MEM (SImode
,
1726 plus_constant (stack_pointer_rtx
,
1734 /* Allocate the rest of the stack that was not allocated above (either it is
1735 > 32K or we just called a function to save the registers and needed more
1737 if (actual_fsize
> init_stack_alloc
)
1739 int diff
= actual_fsize
- init_stack_alloc
;
1740 if (CONST_OK_FOR_K (-diff
))
1741 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1746 rtx reg
= gen_rtx_REG (Pmode
, 12);
1747 emit_move_insn (reg
, GEN_INT (-diff
));
1748 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, reg
));
1752 /* If we need a frame pointer, set it up now. */
1753 if (frame_pointer_needed
)
1754 emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1759 expand_epilogue (void)
1762 unsigned int size
= get_frame_size ();
1764 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1765 rtx restore_regs
[32];
1767 unsigned int num_restore
;
1769 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1771 /* Eliminate the initial stack stored by interrupt functions. */
1772 if (interrupt_handler
)
1774 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1775 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1776 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1779 /* Cut off any dynamic stack created. */
1780 if (frame_pointer_needed
)
1781 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1783 /* Identify all of the saved registers. */
1785 for (i
= 1; i
< 32; i
++)
1787 if (((1L << i
) & reg_saved
) != 0)
1788 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1791 /* See if we have an insn that restores the particular registers we
1793 restore_all
= NULL_RTX
;
1795 if (TARGET_PROLOG_FUNCTION
1797 && !interrupt_handler
)
1799 int alloc_stack
= (4 * num_restore
);
1801 /* Don't bother checking if we don't actually save any space. */
1802 if (use_prolog_function (num_restore
, actual_fsize
))
1805 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1806 rtvec_alloc (num_restore
+ 2));
1807 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1808 XVECEXP (restore_all
, 0, 1)
1809 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1810 gen_rtx_PLUS (Pmode
,
1812 GEN_INT (alloc_stack
)));
1814 offset
= alloc_stack
- 4;
1815 for (i
= 0; i
< num_restore
; i
++)
1817 XVECEXP (restore_all
, 0, i
+2)
1818 = gen_rtx_SET (VOIDmode
,
1821 gen_rtx_PLUS (Pmode
,
1827 code
= recog (restore_all
, NULL_RTX
, NULL
);
1833 actual_fsize
-= alloc_stack
;
1836 if (CONST_OK_FOR_K (actual_fsize
))
1837 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1839 GEN_INT (actual_fsize
)));
1842 rtx reg
= gen_rtx_REG (Pmode
, 12);
1843 emit_move_insn (reg
, GEN_INT (actual_fsize
));
1844 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1850 insn
= emit_jump_insn (restore_all
);
1851 INSN_CODE (insn
) = code
;
1855 restore_all
= NULL_RTX
;
1859 /* If no epilogue save function is available, restore the registers the
1860 old fashioned way (one by one). */
1863 unsigned int init_stack_free
;
1865 /* If the stack is large, we need to cut it down in 2 pieces. */
1866 if (interrupt_handler
)
1867 init_stack_free
= 0;
1868 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1869 init_stack_free
= 4 * num_restore
;
1871 init_stack_free
= (signed) actual_fsize
;
1873 /* Deallocate the rest of the stack if it is > 32K. */
1874 if ((unsigned int) actual_fsize
> init_stack_free
)
1878 diff
= actual_fsize
- init_stack_free
;
1880 if (CONST_OK_FOR_K (diff
))
1881 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1886 rtx reg
= gen_rtx_REG (Pmode
, 12);
1887 emit_move_insn (reg
, GEN_INT (diff
));
1888 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1894 /* Special case interrupt functions that save all registers
1896 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1898 if (! TARGET_DISABLE_CALLT
)
1899 emit_insn (gen_callt_restore_all_interrupt ());
1901 emit_insn (gen_restore_all_interrupt ());
1905 /* Restore registers from the beginning of the stack frame. */
1906 int offset
= init_stack_free
- 4;
1908 /* Restore the return pointer first. */
1910 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1912 emit_move_insn (restore_regs
[--num_restore
],
1913 gen_rtx_MEM (SImode
,
1914 plus_constant (stack_pointer_rtx
,
1919 for (i
= 0; i
< num_restore
; i
++)
1921 emit_move_insn (restore_regs
[i
],
1922 gen_rtx_MEM (SImode
,
1923 plus_constant (stack_pointer_rtx
,
1926 emit_use (restore_regs
[i
]);
1930 /* Cut back the remainder of the stack. */
1931 if (init_stack_free
)
1932 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1934 GEN_INT (init_stack_free
)));
1937 /* And return or use reti for interrupt handlers. */
1938 if (interrupt_handler
)
1940 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1941 emit_insn (gen_callt_return_interrupt ());
1943 emit_jump_insn (gen_return_interrupt ());
1945 else if (actual_fsize
)
1946 emit_jump_insn (gen_return_internal ());
1948 emit_jump_insn (gen_return_simple ());
1951 v850_interrupt_cache_p
= FALSE
;
1952 v850_interrupt_p
= FALSE
;
1955 /* Update the condition code from the insn. */
1957 notice_update_cc (rtx body
, rtx insn
)
1959 switch (get_attr_cc (insn
))
1962 /* Insn does not affect CC at all. */
1966 /* Insn does not change CC, but the 0'th operand has been changed. */
1967 if (cc_status
.value1
!= 0
1968 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
1969 cc_status
.value1
= 0;
1973 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1974 V,C is in an unusable state. */
1976 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
1977 cc_status
.value1
= recog_data
.operand
[0];
1981 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1982 C is in an unusable state. */
1984 cc_status
.flags
|= CC_NO_CARRY
;
1985 cc_status
.value1
= recog_data
.operand
[0];
1989 /* The insn is a compare instruction. */
1991 cc_status
.value1
= SET_SRC (body
);
1995 /* Insn doesn't leave CC in a usable state. */
2004 /* Retrieve the data area that has been chosen for the given decl. */
2007 v850_get_data_area (tree decl
)
2009 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2010 return DATA_AREA_SDA
;
2012 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2013 return DATA_AREA_TDA
;
2015 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2016 return DATA_AREA_ZDA
;
2018 return DATA_AREA_NORMAL
;
2021 /* Store the indicated data area in the decl's attributes. */
2024 v850_set_data_area (tree decl
, v850_data_area data_area
)
2030 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2031 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2032 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2037 DECL_ATTRIBUTES (decl
) = tree_cons
2038 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2041 /* Handle an "interrupt" attribute; arguments as in
2042 struct attribute_spec.handler. */
2044 v850_handle_interrupt_attribute (tree
* node
,
2046 tree args ATTRIBUTE_UNUSED
,
2047 int flags ATTRIBUTE_UNUSED
,
2048 bool * no_add_attrs
)
2050 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2052 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2054 *no_add_attrs
= true;
2060 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2061 struct attribute_spec.handler. */
2063 v850_handle_data_area_attribute (tree
* node
,
2065 tree args ATTRIBUTE_UNUSED
,
2066 int flags ATTRIBUTE_UNUSED
,
2067 bool * no_add_attrs
)
2069 v850_data_area data_area
;
2070 v850_data_area area
;
2073 /* Implement data area attribute. */
2074 if (is_attribute_p ("sda", name
))
2075 data_area
= DATA_AREA_SDA
;
2076 else if (is_attribute_p ("tda", name
))
2077 data_area
= DATA_AREA_TDA
;
2078 else if (is_attribute_p ("zda", name
))
2079 data_area
= DATA_AREA_ZDA
;
2083 switch (TREE_CODE (decl
))
2086 if (current_function_decl
!= NULL_TREE
)
2088 error_at (DECL_SOURCE_LOCATION (decl
),
2089 "data area attributes cannot be specified for "
2091 *no_add_attrs
= true;
2097 area
= v850_get_data_area (decl
);
2098 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2100 error ("data area of %q+D conflicts with previous declaration",
2102 *no_add_attrs
= true;
2114 /* Return nonzero if FUNC is an interrupt function as specified
2115 by the "interrupt" attribute. */
2118 v850_interrupt_function_p (tree func
)
2123 if (v850_interrupt_cache_p
)
2124 return v850_interrupt_p
;
2126 if (TREE_CODE (func
) != FUNCTION_DECL
)
2129 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2135 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2136 ret
= a
!= NULL_TREE
;
2139 /* Its not safe to trust global variables until after function inlining has
2141 if (reload_completed
| reload_in_progress
)
2142 v850_interrupt_p
= ret
;
2149 v850_encode_data_area (tree decl
, rtx symbol
)
2153 /* Map explicit sections into the appropriate attribute */
2154 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2156 if (DECL_SECTION_NAME (decl
))
2158 const char *name
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
2160 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2161 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2163 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2164 v850_set_data_area (decl
, DATA_AREA_SDA
);
2166 else if (streq (name
, ".tdata"))
2167 v850_set_data_area (decl
, DATA_AREA_TDA
);
2170 /* If no attribute, support -m{zda,sda,tda}=n */
2173 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2177 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2178 v850_set_data_area (decl
, DATA_AREA_TDA
);
2180 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2181 v850_set_data_area (decl
, DATA_AREA_SDA
);
2183 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2184 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2187 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2191 flags
= SYMBOL_REF_FLAGS (symbol
);
2192 switch (v850_get_data_area (decl
))
2194 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2195 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2196 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2197 default: gcc_unreachable ();
2199 SYMBOL_REF_FLAGS (symbol
) = flags
;
2203 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2205 default_encode_section_info (decl
, rtl
, first
);
2207 if (TREE_CODE (decl
) == VAR_DECL
2208 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2209 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2212 /* Construct a JR instruction to a routine that will perform the equivalent of
2213 the RTL passed in as an argument. This RTL is a function epilogue that
2214 pops registers off the stack and possibly releases some extra stack space
2215 as well. The code has already verified that the RTL matches these
2219 construct_restore_jr (rtx op
)
2221 int count
= XVECLEN (op
, 0);
2223 unsigned long int mask
;
2224 unsigned long int first
;
2225 unsigned long int last
;
2227 static char buff
[100]; /* XXX */
2231 error ("bogus JR construction: %d", count
);
2235 /* Work out how many bytes to pop off the stack before retrieving
2237 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2238 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2239 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2241 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2243 /* Each pop will remove 4 bytes from the stack.... */
2244 stack_bytes
-= (count
- 2) * 4;
2246 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2247 if (stack_bytes
!= 0)
2249 error ("bad amount of stack space removal: %d", stack_bytes
);
2253 /* Now compute the bit mask of registers to push. */
2255 for (i
= 2; i
< count
; i
++)
2257 rtx vector_element
= XVECEXP (op
, 0, i
);
2259 gcc_assert (GET_CODE (vector_element
) == SET
);
2260 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2261 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2264 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2267 /* Scan for the first register to pop. */
2268 for (first
= 0; first
< 32; first
++)
2270 if (mask
& (1 << first
))
2274 gcc_assert (first
< 32);
2276 /* Discover the last register to pop. */
2277 if (mask
& (1 << LINK_POINTER_REGNUM
))
2279 last
= LINK_POINTER_REGNUM
;
2283 gcc_assert (!stack_bytes
);
2284 gcc_assert (mask
& (1 << 29));
2289 /* Note, it is possible to have gaps in the register mask.
2290 We ignore this here, and generate a JR anyway. We will
2291 be popping more registers than is strictly necessary, but
2292 it does save code space. */
2294 if (TARGET_LONG_CALLS
)
2299 sprintf (name
, "__return_%s", reg_names
[first
]);
2301 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2303 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2309 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2311 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2318 /* Construct a JARL instruction to a routine that will perform the equivalent
2319 of the RTL passed as a parameter. This RTL is a function prologue that
2320 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2321 some stack space as well. The code has already verified that the RTL
2322 matches these requirements. */
2324 construct_save_jarl (rtx op
)
2326 int count
= XVECLEN (op
, 0);
2328 unsigned long int mask
;
2329 unsigned long int first
;
2330 unsigned long int last
;
2332 static char buff
[100]; /* XXX */
2334 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2336 error ("bogus JARL construction: %d", count
);
2341 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2342 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2343 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2344 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2346 /* Work out how many bytes to push onto the stack after storing the
2348 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2350 /* Each push will put 4 bytes from the stack.... */
2351 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2353 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2354 if (stack_bytes
!= 0)
2356 error ("bad amount of stack space removal: %d", stack_bytes
);
2360 /* Now compute the bit mask of registers to push. */
2362 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2364 rtx vector_element
= XVECEXP (op
, 0, i
);
2366 gcc_assert (GET_CODE (vector_element
) == SET
);
2367 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2368 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2371 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2374 /* Scan for the first register to push. */
2375 for (first
= 0; first
< 32; first
++)
2377 if (mask
& (1 << first
))
2381 gcc_assert (first
< 32);
2383 /* Discover the last register to push. */
2384 if (mask
& (1 << LINK_POINTER_REGNUM
))
2386 last
= LINK_POINTER_REGNUM
;
2390 gcc_assert (!stack_bytes
);
2391 gcc_assert (mask
& (1 << 29));
2396 /* Note, it is possible to have gaps in the register mask.
2397 We ignore this here, and generate a JARL anyway. We will
2398 be pushing more registers than is strictly necessary, but
2399 it does save code space. */
2401 if (TARGET_LONG_CALLS
)
2406 sprintf (name
, "__save_%s", reg_names
[first
]);
2408 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2410 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2416 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2418 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2425 extern tree last_assemble_variable_decl
;
2426 extern int size_directive_output
;
2428 /* A version of asm_output_aligned_bss() that copes with the special
2429 data areas of the v850. */
2431 v850_output_aligned_bss (FILE * file
,
2434 unsigned HOST_WIDE_INT size
,
2437 switch (v850_get_data_area (decl
))
2440 switch_to_section (zbss_section
);
2444 switch_to_section (sbss_section
);
2448 switch_to_section (tdata_section
);
2451 switch_to_section (bss_section
);
2455 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2456 #ifdef ASM_DECLARE_OBJECT_NAME
2457 last_assemble_variable_decl
= decl
;
2458 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2460 /* Standard thing is just output label for the object. */
2461 ASM_OUTPUT_LABEL (file
, name
);
2462 #endif /* ASM_DECLARE_OBJECT_NAME */
2463 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2466 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2468 v850_output_common (FILE * file
,
2474 if (decl
== NULL_TREE
)
2476 fprintf (file
, "%s", COMMON_ASM_OP
);
2480 switch (v850_get_data_area (decl
))
2483 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2487 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2491 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2495 fprintf (file
, "%s", COMMON_ASM_OP
);
2500 assemble_name (file
, name
);
2501 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2504 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2506 v850_output_local (FILE * file
,
2512 fprintf (file
, "%s", LOCAL_ASM_OP
);
2513 assemble_name (file
, name
);
2514 fprintf (file
, "\n");
2516 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2519 /* Add data area to the given declaration if a ghs data area pragma is
2520 currently in effect (#pragma ghs startXXX/endXXX). */
2522 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2525 && data_area_stack
->data_area
2526 && current_function_decl
== NULL_TREE
2527 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2528 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2529 v850_set_data_area (decl
, data_area_stack
->data_area
);
2531 /* Initialize the default names of the v850 specific sections,
2532 if this has not been done before. */
2534 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2536 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2537 = build_string (sizeof (".sdata")-1, ".sdata");
2539 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2540 = build_string (sizeof (".rosdata")-1, ".rosdata");
2542 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2543 = build_string (sizeof (".tdata")-1, ".tdata");
2545 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2546 = build_string (sizeof (".zdata")-1, ".zdata");
2548 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2549 = build_string (sizeof (".rozdata")-1, ".rozdata");
2552 if (current_function_decl
== NULL_TREE
2553 && (TREE_CODE (decl
) == VAR_DECL
2554 || TREE_CODE (decl
) == CONST_DECL
2555 || TREE_CODE (decl
) == FUNCTION_DECL
)
2556 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2557 && !DECL_SECTION_NAME (decl
))
2559 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2560 tree chosen_section
;
2562 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2563 kind
= GHS_SECTION_KIND_TEXT
;
2566 /* First choose a section kind based on the data area of the decl. */
2567 switch (v850_get_data_area (decl
))
2573 kind
= ((TREE_READONLY (decl
))
2574 ? GHS_SECTION_KIND_ROSDATA
2575 : GHS_SECTION_KIND_SDATA
);
2579 kind
= GHS_SECTION_KIND_TDATA
;
2583 kind
= ((TREE_READONLY (decl
))
2584 ? GHS_SECTION_KIND_ROZDATA
2585 : GHS_SECTION_KIND_ZDATA
);
2588 case DATA_AREA_NORMAL
: /* default data area */
2589 if (TREE_READONLY (decl
))
2590 kind
= GHS_SECTION_KIND_RODATA
;
2591 else if (DECL_INITIAL (decl
))
2592 kind
= GHS_SECTION_KIND_DATA
;
2594 kind
= GHS_SECTION_KIND_BSS
;
2598 /* Now, if the section kind has been explicitly renamed,
2599 then attach a section attribute. */
2600 chosen_section
= GHS_current_section_names
[(int) kind
];
2602 /* Otherwise, if this kind of section needs an explicit section
2603 attribute, then also attach one. */
2604 if (chosen_section
== NULL
)
2605 chosen_section
= GHS_default_section_names
[(int) kind
];
2609 /* Only set the section name if specified by a pragma, because
2610 otherwise it will force those variables to get allocated storage
2611 in this module, rather than by the linker. */
2612 DECL_SECTION_NAME (decl
) = chosen_section
;
2617 /* Construct a DISPOSE instruction that is the equivalent of
2618 the given RTX. We have already verified that this should
2622 construct_dispose_instruction (rtx op
)
2624 int count
= XVECLEN (op
, 0);
2626 unsigned long int mask
;
2628 static char buff
[ 100 ]; /* XXX */
2633 error ("bogus DISPOSE construction: %d", count
);
2637 /* Work out how many bytes to pop off the
2638 stack before retrieving registers. */
2639 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2640 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2641 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2643 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2645 /* Each pop will remove 4 bytes from the stack.... */
2646 stack_bytes
-= (count
- 2) * 4;
2648 /* Make sure that the amount we are popping
2649 will fit into the DISPOSE instruction. */
2650 if (stack_bytes
> 128)
2652 error ("too much stack space to dispose of: %d", stack_bytes
);
2656 /* Now compute the bit mask of registers to push. */
2659 for (i
= 2; i
< count
; i
++)
2661 rtx vector_element
= XVECEXP (op
, 0, i
);
2663 gcc_assert (GET_CODE (vector_element
) == SET
);
2664 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2665 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2668 if (REGNO (SET_DEST (vector_element
)) == 2)
2671 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2674 if (! TARGET_DISABLE_CALLT
2675 && (use_callt
|| stack_bytes
== 0))
2679 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2684 for (i
= 20; i
< 32; i
++)
2685 if (mask
& (1 << i
))
2689 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2691 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2692 i
, (mask
& (1 << 31)) ? "31c" : "29");
2697 static char regs
[100]; /* XXX */
2700 /* Generate the DISPOSE instruction. Note we could just issue the
2701 bit mask as a number as the assembler can cope with this, but for
2702 the sake of our readers we turn it into a textual description. */
2706 for (i
= 20; i
< 32; i
++)
2708 if (mask
& (1 << i
))
2713 strcat (regs
, ", ");
2718 strcat (regs
, reg_names
[ first
]);
2720 for (i
++; i
< 32; i
++)
2721 if ((mask
& (1 << i
)) == 0)
2726 strcat (regs
, " - ");
2727 strcat (regs
, reg_names
[ i
- 1 ] );
2732 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2738 /* Construct a PREPARE instruction that is the equivalent of
2739 the given RTL. We have already verified that this should
2743 construct_prepare_instruction (rtx op
)
2747 unsigned long int mask
;
2749 static char buff
[ 100 ]; /* XXX */
2752 if (XVECLEN (op
, 0) <= 1)
2754 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2758 /* Work out how many bytes to push onto
2759 the stack after storing the registers. */
2760 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2761 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2762 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2764 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2767 /* Make sure that the amount we are popping
2768 will fit into the DISPOSE instruction. */
2769 if (stack_bytes
< -128)
2771 error ("too much stack space to prepare: %d", stack_bytes
);
2775 /* Now compute the bit mask of registers to push. */
2778 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2780 rtx vector_element
= XVECEXP (op
, 0, i
);
2782 if (GET_CODE (vector_element
) == CLOBBER
)
2785 gcc_assert (GET_CODE (vector_element
) == SET
);
2786 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2787 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2790 if (REGNO (SET_SRC (vector_element
)) == 2)
2793 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2797 stack_bytes
+= count
* 4;
2799 if ((! TARGET_DISABLE_CALLT
)
2800 && (use_callt
|| stack_bytes
== 0))
2804 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2808 for (i
= 20; i
< 32; i
++)
2809 if (mask
& (1 << i
))
2813 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2815 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2816 i
, (mask
& (1 << 31)) ? "31c" : "29");
2820 static char regs
[100]; /* XXX */
2824 /* Generate the PREPARE instruction. Note we could just issue the
2825 bit mask as a number as the assembler can cope with this, but for
2826 the sake of our readers we turn it into a textual description. */
2830 for (i
= 20; i
< 32; i
++)
2832 if (mask
& (1 << i
))
2837 strcat (regs
, ", ");
2842 strcat (regs
, reg_names
[ first
]);
2844 for (i
++; i
< 32; i
++)
2845 if ((mask
& (1 << i
)) == 0)
2850 strcat (regs
, " - ");
2851 strcat (regs
, reg_names
[ i
- 1 ] );
2856 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2862 /* Return an RTX indicating where the return address to the
2863 calling function can be found. */
2866 v850_return_addr (int count
)
2871 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2874 /* Implement TARGET_ASM_INIT_SECTIONS. */
2877 v850_asm_init_sections (void)
2880 = get_unnamed_section (0, output_section_asm_op
,
2881 "\t.section .rosdata,\"a\"");
2884 = get_unnamed_section (0, output_section_asm_op
,
2885 "\t.section .rozdata,\"a\"");
2888 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2889 "\t.section .tdata,\"aw\"");
2892 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2893 "\t.section .zdata,\"aw\"");
2896 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2897 output_section_asm_op
,
2898 "\t.section .zbss,\"aw\"");
2902 v850_select_section (tree exp
,
2903 int reloc ATTRIBUTE_UNUSED
,
2904 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2906 if (TREE_CODE (exp
) == VAR_DECL
)
2909 if (!TREE_READONLY (exp
)
2910 || TREE_SIDE_EFFECTS (exp
)
2911 || !DECL_INITIAL (exp
)
2912 || (DECL_INITIAL (exp
) != error_mark_node
2913 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2918 switch (v850_get_data_area (exp
))
2921 return is_const
? rozdata_section
: zdata_section
;
2924 return tdata_section
;
2927 return is_const
? rosdata_section
: sdata_section
;
2930 return is_const
? readonly_data_section
: data_section
;
2933 return readonly_data_section
;
2936 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2939 v850_function_value_regno_p (const unsigned int regno
)
2941 return (regno
== 10);
2944 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2947 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2949 /* Return values > 8 bytes in length in memory. */
2950 return int_size_in_bytes (type
) > 8 || TYPE_MODE (type
) == BLKmode
;
2953 /* Worker function for TARGET_FUNCTION_VALUE. */
2956 v850_function_value (const_tree valtype
,
2957 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
2958 bool outgoing ATTRIBUTE_UNUSED
)
2960 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
2964 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2967 v850_setup_incoming_varargs (CUMULATIVE_ARGS
*ca
,
2968 enum machine_mode mode ATTRIBUTE_UNUSED
,
2969 tree type ATTRIBUTE_UNUSED
,
2970 int *pretend_arg_size ATTRIBUTE_UNUSED
,
2971 int second_time ATTRIBUTE_UNUSED
)
2973 ca
->anonymous_args
= (!TARGET_GHS
? 1 : 0);
2976 /* Worker function for TARGET_CAN_ELIMINATE. */
2979 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
2981 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
2984 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2986 If TARGET_APP_REGS is not defined then add r2 and r5 to
2987 the pool of fixed registers. See PR 14505. */
2990 v850_conditional_register_usage (void)
2992 if (TARGET_APP_REGS
)
2994 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
2995 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
2999 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3002 v850_asm_trampoline_template (FILE *f
)
3004 fprintf (f
, "\tjarl .+4,r12\n");
3005 fprintf (f
, "\tld.w 12[r12],r20\n");
3006 fprintf (f
, "\tld.w 16[r12],r12\n");
3007 fprintf (f
, "\tjmp [r12]\n");
3008 fprintf (f
, "\tnop\n");
3009 fprintf (f
, "\t.long 0\n");
3010 fprintf (f
, "\t.long 0\n");
3013 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3016 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3018 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3020 emit_block_move (m_tramp
, assemble_trampoline_template (),
3021 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3023 mem
= adjust_address (m_tramp
, SImode
, 16);
3024 emit_move_insn (mem
, chain_value
);
3025 mem
= adjust_address (m_tramp
, SImode
, 20);
3026 emit_move_insn (mem
, fnaddr
);
3030 v850_issue_rate (void)
3032 return (TARGET_V850E2_ALL
? 2 : 1);
3035 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3038 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3040 return (GET_CODE (x
) == CONST_DOUBLE
3041 || !(GET_CODE (x
) == CONST
3042 && GET_CODE (XEXP (x
, 0)) == PLUS
3043 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3044 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3045 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3049 v850_memory_move_cost (enum machine_mode mode
,
3050 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3053 switch (GET_MODE_SIZE (mode
))
3063 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3067 /* V850 specific attributes. */
3069 static const struct attribute_spec v850_attribute_table
[] =
3071 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3072 affects_type_identity } */
3073 { "interrupt_handler", 0, 0, true, false, false,
3074 v850_handle_interrupt_attribute
, false },
3075 { "interrupt", 0, 0, true, false, false,
3076 v850_handle_interrupt_attribute
, false },
3077 { "sda", 0, 0, true, false, false,
3078 v850_handle_data_area_attribute
, false },
3079 { "tda", 0, 0, true, false, false,
3080 v850_handle_data_area_attribute
, false },
3081 { "zda", 0, 0, true, false, false,
3082 v850_handle_data_area_attribute
, false },
3083 { NULL
, 0, 0, false, false, false, NULL
, false }
3086 /* Initialize the GCC target structure. */
3088 #undef TARGET_MEMORY_MOVE_COST
3089 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3091 #undef TARGET_ASM_ALIGNED_HI_OP
3092 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3094 #undef TARGET_PRINT_OPERAND
3095 #define TARGET_PRINT_OPERAND v850_print_operand
3096 #undef TARGET_PRINT_OPERAND_ADDRESS
3097 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3098 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3099 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3101 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3102 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3104 #undef TARGET_ATTRIBUTE_TABLE
3105 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3107 #undef TARGET_INSERT_ATTRIBUTES
3108 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3110 #undef TARGET_ASM_SELECT_SECTION
3111 #define TARGET_ASM_SELECT_SECTION v850_select_section
3113 /* The assembler supports switchable .bss sections, but
3114 v850_select_section doesn't yet make use of them. */
3115 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3116 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3118 #undef TARGET_ENCODE_SECTION_INFO
3119 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3121 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3122 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3124 #undef TARGET_RTX_COSTS
3125 #define TARGET_RTX_COSTS v850_rtx_costs
3127 #undef TARGET_ADDRESS_COST
3128 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3130 #undef TARGET_MACHINE_DEPENDENT_REORG
3131 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3133 #undef TARGET_SCHED_ISSUE_RATE
3134 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3136 #undef TARGET_FUNCTION_VALUE_REGNO_P
3137 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3138 #undef TARGET_FUNCTION_VALUE
3139 #define TARGET_FUNCTION_VALUE v850_function_value
3141 #undef TARGET_PROMOTE_PROTOTYPES
3142 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3144 #undef TARGET_RETURN_IN_MEMORY
3145 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3147 #undef TARGET_PASS_BY_REFERENCE
3148 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3150 #undef TARGET_CALLEE_COPIES
3151 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3153 #undef TARGET_SETUP_INCOMING_VARARGS
3154 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3156 #undef TARGET_ARG_PARTIAL_BYTES
3157 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3159 #undef TARGET_FUNCTION_ARG
3160 #define TARGET_FUNCTION_ARG v850_function_arg
3162 #undef TARGET_FUNCTION_ARG_ADVANCE
3163 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3165 #undef TARGET_CAN_ELIMINATE
3166 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3168 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3169 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3171 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3172 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3173 #undef TARGET_TRAMPOLINE_INIT
3174 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3176 #undef TARGET_STRICT_ARGUMENT_NAMING
3177 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3179 #undef TARGET_LEGITIMATE_CONSTANT_P
3180 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3182 struct gcc_target targetm
= TARGET_INITIALIZER
;
3184 #include "gt-v850.h"