1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
41 #include "diagnostic-core.h"
44 #include "target-def.h"
47 /* This is used by GOTaddr2picreg to uniquely identify
49 int mn10300_unspec_int_label_counter
;
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label
;
57 /* The selected processor. */
58 enum processor_type mn10300_processor
= PROCESSOR_DEFAULT
;
60 /* Processor type to select for tuning. */
61 static const char * mn10300_tune_string
= NULL
;
63 /* Selected processor type for tuning. */
64 enum processor_type mn10300_tune_cpu
= PROCESSOR_DEFAULT
;
66 /* The size of the callee register save area. Right now we save everything
67 on entry since it costs us nothing in code size. It does cost us from a
68 speed standpoint, so we want to optimize this sooner or later. */
69 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
70 + 4 * df_regs_ever_live_p (3) \
71 + 4 * df_regs_ever_live_p (6) \
72 + 4 * df_regs_ever_live_p (7) \
73 + 16 * (df_regs_ever_live_p (14) \
74 || df_regs_ever_live_p (15) \
75 || df_regs_ever_live_p (16) \
76 || df_regs_ever_live_p (17)))
78 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
79 static const struct default_options mn10300_option_optimization_table
[] =
81 { OPT_LEVELS_1_PLUS
, OPT_fomit_frame_pointer
, NULL
, 1 },
82 { OPT_LEVELS_NONE
, 0, NULL
, 0 }
85 /* Implement TARGET_HANDLE_OPTION. */
88 mn10300_handle_option (size_t code
,
89 const char *arg ATTRIBUTE_UNUSED
,
95 mn10300_processor
= value
? PROCESSOR_AM33
: PROCESSOR_MN10300
;
99 mn10300_processor
= (value
101 : MIN (PROCESSOR_AM33
, PROCESSOR_DEFAULT
));
105 mn10300_processor
= (value
? PROCESSOR_AM34
: PROCESSOR_DEFAULT
);
109 mn10300_tune_string
= arg
;
117 /* Implement TARGET_OPTION_OVERRIDE. */
120 mn10300_option_override (void)
123 target_flags
&= ~MASK_MULT_BUG
;
126 /* Disable scheduling for the MN10300 as we do
127 not have timing information available for it. */
128 flag_schedule_insns
= 0;
129 flag_schedule_insns_after_reload
= 0;
131 /* Force enable splitting of wide types, as otherwise it is trivial
132 to run out of registers. Indeed, this works so well that register
133 allocation problems are now more common *without* optimization,
134 when this flag is not enabled by default. */
135 flag_split_wide_types
= 1;
138 if (mn10300_tune_string
)
140 if (strcasecmp (mn10300_tune_string
, "mn10300") == 0)
141 mn10300_tune_cpu
= PROCESSOR_MN10300
;
142 else if (strcasecmp (mn10300_tune_string
, "am33") == 0)
143 mn10300_tune_cpu
= PROCESSOR_AM33
;
144 else if (strcasecmp (mn10300_tune_string
, "am33-2") == 0)
145 mn10300_tune_cpu
= PROCESSOR_AM33_2
;
146 else if (strcasecmp (mn10300_tune_string
, "am34") == 0)
147 mn10300_tune_cpu
= PROCESSOR_AM34
;
149 error ("-mtune= expects mn10300, am33, am33-2, or am34");
154 mn10300_file_start (void)
156 default_file_start ();
159 fprintf (asm_out_file
, "\t.am33_2\n");
160 else if (TARGET_AM33
)
161 fprintf (asm_out_file
, "\t.am33\n");
164 /* Print operand X using operand code CODE to assembly language output file
168 mn10300_print_operand (FILE *file
, rtx x
, int code
)
174 if (GET_MODE (XEXP (x
, 0)) == CC_FLOATmode
)
176 switch (code
== 'b' ? GET_CODE (x
)
177 : reverse_condition_maybe_unordered (GET_CODE (x
)))
180 fprintf (file
, "ne");
183 fprintf (file
, "eq");
186 fprintf (file
, "ge");
189 fprintf (file
, "gt");
192 fprintf (file
, "le");
195 fprintf (file
, "lt");
198 fprintf (file
, "lge");
201 fprintf (file
, "uo");
204 fprintf (file
, "lg");
207 fprintf (file
, "ue");
210 fprintf (file
, "uge");
213 fprintf (file
, "ug");
216 fprintf (file
, "ule");
219 fprintf (file
, "ul");
226 /* These are normal and reversed branches. */
227 switch (code
== 'b' ? GET_CODE (x
) : reverse_condition (GET_CODE (x
)))
230 fprintf (file
, "ne");
233 fprintf (file
, "eq");
236 fprintf (file
, "ge");
239 fprintf (file
, "gt");
242 fprintf (file
, "le");
245 fprintf (file
, "lt");
248 fprintf (file
, "cc");
251 fprintf (file
, "hi");
254 fprintf (file
, "ls");
257 fprintf (file
, "cs");
264 /* This is used for the operand to a call instruction;
265 if it's a REG, enclose it in parens, else output
266 the operand normally. */
270 mn10300_print_operand (file
, x
, 0);
274 mn10300_print_operand (file
, x
, 0);
278 switch (GET_CODE (x
))
282 output_address (XEXP (x
, 0));
287 fprintf (file
, "fd%d", REGNO (x
) - 18);
295 /* These are the least significant word in a 64bit value. */
297 switch (GET_CODE (x
))
301 output_address (XEXP (x
, 0));
306 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
310 fprintf (file
, "%s", reg_names
[subreg_regno (x
)]);
318 switch (GET_MODE (x
))
321 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
322 REAL_VALUE_TO_TARGET_DOUBLE (rv
, val
);
323 fprintf (file
, "0x%lx", val
[0]);
326 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
327 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
[0]);
328 fprintf (file
, "0x%lx", val
[0]);
332 mn10300_print_operand_address (file
,
333 GEN_INT (CONST_DOUBLE_LOW (x
)));
344 split_double (x
, &low
, &high
);
345 fprintf (file
, "%ld", (long)INTVAL (low
));
354 /* Similarly, but for the most significant word. */
356 switch (GET_CODE (x
))
360 x
= adjust_address (x
, SImode
, 4);
361 output_address (XEXP (x
, 0));
366 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
370 fprintf (file
, "%s", reg_names
[subreg_regno (x
) + 1]);
378 switch (GET_MODE (x
))
381 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
382 REAL_VALUE_TO_TARGET_DOUBLE (rv
, val
);
383 fprintf (file
, "0x%lx", val
[1]);
389 mn10300_print_operand_address (file
,
390 GEN_INT (CONST_DOUBLE_HIGH (x
)));
401 split_double (x
, &low
, &high
);
402 fprintf (file
, "%ld", (long)INTVAL (high
));
413 if (REG_P (XEXP (x
, 0)))
414 output_address (gen_rtx_PLUS (SImode
, XEXP (x
, 0), const0_rtx
));
416 output_address (XEXP (x
, 0));
421 gcc_assert (INTVAL (x
) >= -128 && INTVAL (x
) <= 255);
422 fprintf (file
, "%d", (int)((~INTVAL (x
)) & 0xff));
426 gcc_assert (INTVAL (x
) >= -128 && INTVAL (x
) <= 255);
427 fprintf (file
, "%d", (int)(INTVAL (x
) & 0xff));
430 /* For shift counts. The hardware ignores the upper bits of
431 any immediate, but the assembler will flag an out of range
432 shift count as an error. So we mask off the high bits
433 of the immediate here. */
437 fprintf (file
, "%d", (int)(INTVAL (x
) & 0x1f));
443 switch (GET_CODE (x
))
447 output_address (XEXP (x
, 0));
456 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
460 fprintf (file
, "%s", reg_names
[subreg_regno (x
)]);
463 /* This will only be single precision.... */
469 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
470 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
471 fprintf (file
, "0x%lx", val
);
481 mn10300_print_operand_address (file
, x
);
490 /* Output assembly language output for the address ADDR to FILE. */
493 mn10300_print_operand_address (FILE *file
, rtx addr
)
495 switch (GET_CODE (addr
))
498 mn10300_print_operand (file
, XEXP (addr
, 0), 0);
503 mn10300_print_operand (file
, XEXP (addr
, 0), 0);
506 mn10300_print_operand (file
, XEXP (addr
, 1), 0);
510 mn10300_print_operand (file
, addr
, 0);
514 rtx base
= XEXP (addr
, 0);
515 rtx index
= XEXP (addr
, 1);
517 if (REG_P (index
) && !REG_OK_FOR_INDEX_P (index
))
523 gcc_assert (REG_P (index
) && REG_OK_FOR_INDEX_P (index
));
525 gcc_assert (REG_OK_FOR_BASE_P (base
));
527 mn10300_print_operand (file
, index
, 0);
529 mn10300_print_operand (file
, base
, 0);
533 output_addr_const (file
, addr
);
536 output_addr_const (file
, addr
);
541 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA.
543 Used for PIC-specific UNSPECs. */
546 mn10300_asm_output_addr_const_extra (FILE *file
, rtx x
)
548 if (GET_CODE (x
) == UNSPEC
)
552 case UNSPEC_INT_LABEL
:
553 asm_fprintf (file
, ".%LLIL" HOST_WIDE_INT_PRINT_DEC
,
554 INTVAL (XVECEXP (x
, 0, 0)));
557 /* GLOBAL_OFFSET_TABLE or local symbols, no suffix. */
558 output_addr_const (file
, XVECEXP (x
, 0, 0));
561 output_addr_const (file
, XVECEXP (x
, 0, 0));
562 fputs ("@GOT", file
);
565 output_addr_const (file
, XVECEXP (x
, 0, 0));
566 fputs ("@GOTOFF", file
);
569 output_addr_const (file
, XVECEXP (x
, 0, 0));
570 fputs ("@PLT", file
);
572 case UNSPEC_GOTSYM_OFF
:
573 assemble_name (file
, GOT_SYMBOL_NAME
);
575 output_addr_const (file
, XVECEXP (x
, 0, 0));
587 /* Count the number of FP registers that have to be saved. */
589 fp_regs_to_save (void)
596 for (i
= FIRST_FP_REGNUM
; i
<= LAST_FP_REGNUM
; ++i
)
597 if (df_regs_ever_live_p (i
) && ! call_really_used_regs
[i
])
603 /* Print a set of registers in the format required by "movm" and "ret".
604 Register K is saved if bit K of MASK is set. The data and address
605 registers can be stored individually, but the extended registers cannot.
606 We assume that the mask already takes that into account. For instance,
607 bits 14 to 17 must have the same value. */
610 mn10300_print_reg_list (FILE *file
, int mask
)
618 for (i
= 0; i
< FIRST_EXTENDED_REGNUM
; i
++)
619 if ((mask
& (1 << i
)) != 0)
623 fputs (reg_names
[i
], file
);
627 if ((mask
& 0x3c000) != 0)
629 gcc_assert ((mask
& 0x3c000) == 0x3c000);
632 fputs ("exreg1", file
);
640 mn10300_can_use_return_insn (void)
642 /* size includes the fixed stack space needed for function calls. */
643 int size
= get_frame_size () + crtl
->outgoing_args_size
;
645 /* And space for the return pointer. */
646 size
+= crtl
->outgoing_args_size
? 4 : 0;
648 return (reload_completed
650 && !df_regs_ever_live_p (2)
651 && !df_regs_ever_live_p (3)
652 && !df_regs_ever_live_p (6)
653 && !df_regs_ever_live_p (7)
654 && !df_regs_ever_live_p (14)
655 && !df_regs_ever_live_p (15)
656 && !df_regs_ever_live_p (16)
657 && !df_regs_ever_live_p (17)
658 && fp_regs_to_save () == 0
659 && !frame_pointer_needed
);
662 /* Returns the set of live, callee-saved registers as a bitmask. The
663 callee-saved extended registers cannot be stored individually, so
664 all of them will be included in the mask if any one of them is used. */
667 mn10300_get_live_callee_saved_regs (void)
673 for (i
= 0; i
<= LAST_EXTENDED_REGNUM
; i
++)
674 if (df_regs_ever_live_p (i
) && ! call_really_used_regs
[i
])
676 if ((mask
& 0x3c000) != 0)
685 RTX_FRAME_RELATED_P (r
) = 1;
689 /* Generate an instruction that pushes several registers onto the stack.
690 Register K will be saved if bit K in MASK is set. The function does
691 nothing if MASK is zero.
693 To be compatible with the "movm" instruction, the lowest-numbered
694 register must be stored in the lowest slot. If MASK is the set
695 { R1,...,RN }, where R1...RN are ordered least first, the generated
696 instruction will have the form:
699 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
700 (set (mem:SI (plus:SI (reg:SI 9)
704 (set (mem:SI (plus:SI (reg:SI 9)
709 mn10300_gen_multiple_store (unsigned int mask
)
711 /* The order in which registers are stored, from SP-4 through SP-N*4. */
712 static const unsigned int store_order
[8] = {
713 /* e2, e3: never saved */
714 FIRST_EXTENDED_REGNUM
+ 4,
715 FIRST_EXTENDED_REGNUM
+ 5,
716 FIRST_EXTENDED_REGNUM
+ 6,
717 FIRST_EXTENDED_REGNUM
+ 7,
718 /* e0, e1, mdrq, mcrh, mcrl, mcvf: never saved. */
719 FIRST_DATA_REGNUM
+ 2,
720 FIRST_DATA_REGNUM
+ 3,
721 FIRST_ADDRESS_REGNUM
+ 2,
722 FIRST_ADDRESS_REGNUM
+ 3,
723 /* d0, d1, a0, a1, mdr, lir, lar: never saved. */
733 for (i
= count
= 0; i
< ARRAY_SIZE(store_order
); ++i
)
735 unsigned regno
= store_order
[i
];
737 if (((mask
>> regno
) & 1) == 0)
741 x
= plus_constant (stack_pointer_rtx
, count
* -4);
742 x
= gen_frame_mem (SImode
, x
);
743 x
= gen_rtx_SET (VOIDmode
, x
, gen_rtx_REG (SImode
, regno
));
746 /* Remove the register from the mask so that... */
747 mask
&= ~(1u << regno
);
750 /* ... we can make sure that we didn't try to use a register
751 not listed in the store order. */
752 gcc_assert (mask
== 0);
754 /* Create the instruction that updates the stack pointer. */
755 x
= plus_constant (stack_pointer_rtx
, count
* -4);
756 x
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
, x
);
759 /* We need one PARALLEL element to update the stack pointer and
760 an additional element for each register that is stored. */
761 x
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (count
+ 1, elts
));
766 mn10300_expand_prologue (void)
770 /* SIZE includes the fixed stack space needed for function calls. */
771 size
= get_frame_size () + crtl
->outgoing_args_size
;
772 size
+= (crtl
->outgoing_args_size
? 4 : 0);
774 /* If we use any of the callee-saved registers, save them now. */
775 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
777 if (TARGET_AM33_2
&& fp_regs_to_save ())
779 int num_regs_to_save
= fp_regs_to_save (), i
;
785 save_sp_partial_merge
,
789 unsigned int strategy_size
= (unsigned)-1, this_strategy_size
;
792 /* We have several different strategies to save FP registers.
793 We can store them using SP offsets, which is beneficial if
794 there are just a few registers to save, or we can use `a0' in
795 post-increment mode (`a0' is the only call-clobbered address
796 register that is never used to pass information to a
797 function). Furthermore, if we don't need a frame pointer, we
798 can merge the two SP adds into a single one, but this isn't
799 always beneficial; sometimes we can just split the two adds
800 so that we don't exceed a 16-bit constant size. The code
801 below will select which strategy to use, so as to generate
802 smallest code. Ties are broken in favor or shorter sequences
803 (in terms of number of instructions). */
805 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
806 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
807 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
808 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
810 /* We add 0 * (S) in two places to promote to the type of S,
811 so that all arms of the conditional have the same type. */
812 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
813 (((S) >= (L)) ? 0 * (S) + (SIZE1) * (N) \
814 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
815 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
817 #define SIZE_FMOV_SP_(S,N) \
818 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
819 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
820 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
821 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
823 /* Consider alternative save_sp_merge only if we don't need the
824 frame pointer and size is nonzero. */
825 if (! frame_pointer_needed
&& size
)
827 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
828 this_strategy_size
= SIZE_ADD_SP (-(size
+ 4 * num_regs_to_save
));
829 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
830 this_strategy_size
+= SIZE_FMOV_SP (size
, num_regs_to_save
);
832 if (this_strategy_size
< strategy_size
)
834 strategy
= save_sp_merge
;
835 strategy_size
= this_strategy_size
;
839 /* Consider alternative save_sp_no_merge unconditionally. */
840 /* Insn: add -4 * num_regs_to_save, sp. */
841 this_strategy_size
= SIZE_ADD_SP (-4 * num_regs_to_save
);
842 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
843 this_strategy_size
+= SIZE_FMOV_SP (0, num_regs_to_save
);
846 /* Insn: add -size, sp. */
847 this_strategy_size
+= SIZE_ADD_SP (-size
);
850 if (this_strategy_size
< strategy_size
)
852 strategy
= save_sp_no_merge
;
853 strategy_size
= this_strategy_size
;
856 /* Consider alternative save_sp_partial_merge only if we don't
857 need a frame pointer and size is reasonably large. */
858 if (! frame_pointer_needed
&& size
+ 4 * num_regs_to_save
> 128)
860 /* Insn: add -128, sp. */
861 this_strategy_size
= SIZE_ADD_SP (-128);
862 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
863 this_strategy_size
+= SIZE_FMOV_SP (128 - 4 * num_regs_to_save
,
867 /* Insn: add 128-size, sp. */
868 this_strategy_size
+= SIZE_ADD_SP (128 - size
);
871 if (this_strategy_size
< strategy_size
)
873 strategy
= save_sp_partial_merge
;
874 strategy_size
= this_strategy_size
;
878 /* Consider alternative save_a0_merge only if we don't need a
879 frame pointer, size is nonzero and the user hasn't
880 changed the calling conventions of a0. */
881 if (! frame_pointer_needed
&& size
882 && call_really_used_regs
[FIRST_ADDRESS_REGNUM
]
883 && ! fixed_regs
[FIRST_ADDRESS_REGNUM
])
885 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
886 this_strategy_size
= SIZE_ADD_SP (-(size
+ 4 * num_regs_to_save
));
887 /* Insn: mov sp, a0. */
888 this_strategy_size
++;
891 /* Insn: add size, a0. */
892 this_strategy_size
+= SIZE_ADD_AX (size
);
894 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
895 this_strategy_size
+= 3 * num_regs_to_save
;
897 if (this_strategy_size
< strategy_size
)
899 strategy
= save_a0_merge
;
900 strategy_size
= this_strategy_size
;
904 /* Consider alternative save_a0_no_merge if the user hasn't
905 changed the calling conventions of a0. */
906 if (call_really_used_regs
[FIRST_ADDRESS_REGNUM
]
907 && ! fixed_regs
[FIRST_ADDRESS_REGNUM
])
909 /* Insn: add -4 * num_regs_to_save, sp. */
910 this_strategy_size
= SIZE_ADD_SP (-4 * num_regs_to_save
);
911 /* Insn: mov sp, a0. */
912 this_strategy_size
++;
913 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
914 this_strategy_size
+= 3 * num_regs_to_save
;
917 /* Insn: add -size, sp. */
918 this_strategy_size
+= SIZE_ADD_SP (-size
);
921 if (this_strategy_size
< strategy_size
)
923 strategy
= save_a0_no_merge
;
924 strategy_size
= this_strategy_size
;
928 /* Emit the initial SP add, common to all strategies. */
931 case save_sp_no_merge
:
932 case save_a0_no_merge
:
933 F (emit_insn (gen_addsi3 (stack_pointer_rtx
,
935 GEN_INT (-4 * num_regs_to_save
))));
939 case save_sp_partial_merge
:
940 F (emit_insn (gen_addsi3 (stack_pointer_rtx
,
943 xsize
= 128 - 4 * num_regs_to_save
;
949 F (emit_insn (gen_addsi3 (stack_pointer_rtx
,
951 GEN_INT (-(size
+ 4 * num_regs_to_save
)))));
952 /* We'll have to adjust FP register saves according to the
955 /* Since we've already created the stack frame, don't do it
956 again at the end of the function. */
964 /* Now prepare register a0, if we have decided to use it. */
968 case save_sp_no_merge
:
969 case save_sp_partial_merge
:
974 case save_a0_no_merge
:
975 reg
= gen_rtx_REG (SImode
, FIRST_ADDRESS_REGNUM
);
976 F (emit_insn (gen_movsi (reg
, stack_pointer_rtx
)));
978 F (emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (xsize
))));
979 reg
= gen_rtx_POST_INC (SImode
, reg
);
986 /* Now actually save the FP registers. */
987 for (i
= FIRST_FP_REGNUM
; i
<= LAST_FP_REGNUM
; ++i
)
988 if (df_regs_ever_live_p (i
) && ! call_really_used_regs
[i
])
996 /* If we aren't using `a0', use an SP offset. */
999 addr
= gen_rtx_PLUS (SImode
,
1004 addr
= stack_pointer_rtx
;
1009 F (emit_insn (gen_movsf (gen_rtx_MEM (SFmode
, addr
),
1010 gen_rtx_REG (SFmode
, i
))));
1014 /* Now put the frame pointer into the frame pointer register. */
1015 if (frame_pointer_needed
)
1016 F (emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
));
1018 /* Allocate stack for this frame. */
1020 F (emit_insn (gen_addsi3 (stack_pointer_rtx
,
1024 if (flag_pic
&& df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM
))
1025 emit_insn (gen_GOTaddr2picreg ());
1029 mn10300_expand_epilogue (void)
1033 /* SIZE includes the fixed stack space needed for function calls. */
1034 size
= get_frame_size () + crtl
->outgoing_args_size
;
1035 size
+= (crtl
->outgoing_args_size
? 4 : 0);
1037 if (TARGET_AM33_2
&& fp_regs_to_save ())
1039 int num_regs_to_save
= fp_regs_to_save (), i
;
1042 /* We have several options to restore FP registers. We could
1043 load them from SP offsets, but, if there are enough FP
1044 registers to restore, we win if we use a post-increment
1047 /* If we have a frame pointer, it's the best option, because we
1048 already know it has the value we want. */
1049 if (frame_pointer_needed
)
1050 reg
= gen_rtx_REG (SImode
, FRAME_POINTER_REGNUM
);
1051 /* Otherwise, we may use `a1', since it's call-clobbered and
1052 it's never used for return values. But only do so if it's
1053 smaller than using SP offsets. */
1056 enum { restore_sp_post_adjust
,
1057 restore_sp_pre_adjust
,
1058 restore_sp_partial_adjust
,
1059 restore_a1
} strategy
;
1060 unsigned int this_strategy_size
, strategy_size
= (unsigned)-1;
1062 /* Consider using sp offsets before adjusting sp. */
1063 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1064 this_strategy_size
= SIZE_FMOV_SP (size
, num_regs_to_save
);
1065 /* If size is too large, we'll have to adjust SP with an
1067 if (size
+ 4 * num_regs_to_save
+ REG_SAVE_BYTES
> 255)
1069 /* Insn: add size + 4 * num_regs_to_save, sp. */
1070 this_strategy_size
+= SIZE_ADD_SP (size
+ 4 * num_regs_to_save
);
1072 /* If we don't have to restore any non-FP registers,
1073 we'll be able to save one byte by using rets. */
1074 if (! REG_SAVE_BYTES
)
1075 this_strategy_size
--;
1077 if (this_strategy_size
< strategy_size
)
1079 strategy
= restore_sp_post_adjust
;
1080 strategy_size
= this_strategy_size
;
1083 /* Consider using sp offsets after adjusting sp. */
1084 /* Insn: add size, sp. */
1085 this_strategy_size
= SIZE_ADD_SP (size
);
1086 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1087 this_strategy_size
+= SIZE_FMOV_SP (0, num_regs_to_save
);
1088 /* We're going to use ret to release the FP registers
1089 save area, so, no savings. */
1091 if (this_strategy_size
< strategy_size
)
1093 strategy
= restore_sp_pre_adjust
;
1094 strategy_size
= this_strategy_size
;
1097 /* Consider using sp offsets after partially adjusting sp.
1098 When size is close to 32Kb, we may be able to adjust SP
1099 with an imm16 add instruction while still using fmov
1101 if (size
+ 4 * num_regs_to_save
+ REG_SAVE_BYTES
> 255)
1103 /* Insn: add size + 4 * num_regs_to_save
1104 + REG_SAVE_BYTES - 252,sp. */
1105 this_strategy_size
= SIZE_ADD_SP (size
+ 4 * num_regs_to_save
1106 + REG_SAVE_BYTES
- 252);
1107 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1108 this_strategy_size
+= SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1109 - 4 * num_regs_to_save
,
1111 /* We're going to use ret to release the FP registers
1112 save area, so, no savings. */
1114 if (this_strategy_size
< strategy_size
)
1116 strategy
= restore_sp_partial_adjust
;
1117 strategy_size
= this_strategy_size
;
1121 /* Consider using a1 in post-increment mode, as long as the
1122 user hasn't changed the calling conventions of a1. */
1123 if (call_really_used_regs
[FIRST_ADDRESS_REGNUM
+ 1]
1124 && ! fixed_regs
[FIRST_ADDRESS_REGNUM
+1])
1126 /* Insn: mov sp,a1. */
1127 this_strategy_size
= 1;
1130 /* Insn: add size,a1. */
1131 this_strategy_size
+= SIZE_ADD_AX (size
);
1133 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1134 this_strategy_size
+= 3 * num_regs_to_save
;
1135 /* If size is large enough, we may be able to save a
1137 if (size
+ 4 * num_regs_to_save
+ REG_SAVE_BYTES
> 255)
1139 /* Insn: mov a1,sp. */
1140 this_strategy_size
+= 2;
1142 /* If we don't have to restore any non-FP registers,
1143 we'll be able to save one byte by using rets. */
1144 if (! REG_SAVE_BYTES
)
1145 this_strategy_size
--;
1147 if (this_strategy_size
< strategy_size
)
1149 strategy
= restore_a1
;
1150 strategy_size
= this_strategy_size
;
1156 case restore_sp_post_adjust
:
1159 case restore_sp_pre_adjust
:
1160 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1166 case restore_sp_partial_adjust
:
1167 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1169 GEN_INT (size
+ 4 * num_regs_to_save
1170 + REG_SAVE_BYTES
- 252)));
1171 size
= 252 - REG_SAVE_BYTES
- 4 * num_regs_to_save
;
1175 reg
= gen_rtx_REG (SImode
, FIRST_ADDRESS_REGNUM
+ 1);
1176 emit_insn (gen_movsi (reg
, stack_pointer_rtx
));
1178 emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
1186 /* Adjust the selected register, if any, for post-increment. */
1188 reg
= gen_rtx_POST_INC (SImode
, reg
);
1190 for (i
= FIRST_FP_REGNUM
; i
<= LAST_FP_REGNUM
; ++i
)
1191 if (df_regs_ever_live_p (i
) && ! call_really_used_regs
[i
])
1199 /* If we aren't using a post-increment register, use an
1201 addr
= gen_rtx_PLUS (SImode
,
1206 addr
= stack_pointer_rtx
;
1210 emit_insn (gen_movsf (gen_rtx_REG (SFmode
, i
),
1211 gen_rtx_MEM (SFmode
, addr
)));
1214 /* If we were using the restore_a1 strategy and the number of
1215 bytes to be released won't fit in the `ret' byte, copy `a1'
1216 to `sp', to avoid having to use `add' to adjust it. */
1217 if (! frame_pointer_needed
&& reg
&& size
+ REG_SAVE_BYTES
> 255)
1219 emit_move_insn (stack_pointer_rtx
, XEXP (reg
, 0));
1224 /* Maybe cut back the stack, except for the register save area.
1226 If the frame pointer exists, then use the frame pointer to
1229 If the stack size + register save area is more than 255 bytes,
1230 then the stack must be cut back here since the size + register
1231 save size is too big for a ret/retf instruction.
1233 Else leave it alone, it will be cut back as part of the
1234 ret/retf instruction, or there wasn't any stack to begin with.
1236 Under no circumstances should the register save area be
1237 deallocated here, that would leave a window where an interrupt
1238 could occur and trash the register save area. */
1239 if (frame_pointer_needed
)
1241 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1244 else if (size
+ REG_SAVE_BYTES
> 255)
1246 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1252 /* Adjust the stack and restore callee-saved registers, if any. */
1253 if (size
|| df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1254 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1255 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1256 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1257 || frame_pointer_needed
)
1258 emit_jump_insn (gen_return_internal_regs
1259 (GEN_INT (size
+ REG_SAVE_BYTES
)));
1261 emit_jump_insn (gen_return_internal ());
1264 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1265 This function is for MATCH_PARALLEL and so assumes OP is known to be
1266 parallel. If OP is a multiple store, return a mask indicating which
1267 registers it saves. Return 0 otherwise. */
1270 mn10300_store_multiple_operation (rtx op
,
1271 enum machine_mode mode ATTRIBUTE_UNUSED
)
1279 count
= XVECLEN (op
, 0);
1283 /* Check that first instruction has the form (set (sp) (plus A B)) */
1284 elt
= XVECEXP (op
, 0, 0);
1285 if (GET_CODE (elt
) != SET
1286 || (! REG_P (SET_DEST (elt
)))
1287 || REGNO (SET_DEST (elt
)) != STACK_POINTER_REGNUM
1288 || GET_CODE (SET_SRC (elt
)) != PLUS
)
1291 /* Check that A is the stack pointer and B is the expected stack size.
1292 For OP to match, each subsequent instruction should push a word onto
1293 the stack. We therefore expect the first instruction to create
1294 COUNT-1 stack slots. */
1295 elt
= SET_SRC (elt
);
1296 if ((! REG_P (XEXP (elt
, 0)))
1297 || REGNO (XEXP (elt
, 0)) != STACK_POINTER_REGNUM
1298 || (! CONST_INT_P (XEXP (elt
, 1)))
1299 || INTVAL (XEXP (elt
, 1)) != -(count
- 1) * 4)
1303 for (i
= 1; i
< count
; i
++)
1305 /* Check that element i is a (set (mem M) R). */
1306 /* ??? Validate the register order a-la mn10300_gen_multiple_store.
1307 Remember: the ordering is *not* monotonic. */
1308 elt
= XVECEXP (op
, 0, i
);
1309 if (GET_CODE (elt
) != SET
1310 || (! MEM_P (SET_DEST (elt
)))
1311 || (! REG_P (SET_SRC (elt
))))
1314 /* Remember which registers are to be saved. */
1315 last
= REGNO (SET_SRC (elt
));
1316 mask
|= (1 << last
);
1318 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1319 elt
= XEXP (SET_DEST (elt
), 0);
1320 if (GET_CODE (elt
) != PLUS
1321 || (! REG_P (XEXP (elt
, 0)))
1322 || REGNO (XEXP (elt
, 0)) != STACK_POINTER_REGNUM
1323 || (! CONST_INT_P (XEXP (elt
, 1)))
1324 || INTVAL (XEXP (elt
, 1)) != -i
* 4)
1328 /* All or none of the callee-saved extended registers must be in the set. */
1329 if ((mask
& 0x3c000) != 0
1330 && (mask
& 0x3c000) != 0x3c000)
1336 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1339 mn10300_preferred_reload_class (rtx x
, reg_class_t rclass
)
1341 if (x
== stack_pointer_rtx
&& rclass
!= SP_REGS
)
1342 return (TARGET_AM33
? GENERAL_REGS
: ADDRESS_REGS
);
1345 && !HARD_REGISTER_P (x
))
1346 || (GET_CODE (x
) == SUBREG
1347 && REG_P (SUBREG_REG (x
))
1348 && !HARD_REGISTER_P (SUBREG_REG (x
))))
1349 return LIMIT_RELOAD_CLASS (GET_MODE (x
), rclass
);
1354 /* Implement TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
1357 mn10300_preferred_output_reload_class (rtx x
, reg_class_t rclass
)
1359 if (x
== stack_pointer_rtx
&& rclass
!= SP_REGS
)
1360 return (TARGET_AM33
? GENERAL_REGS
: ADDRESS_REGS
);
1364 /* Implement TARGET_SECONDARY_RELOAD. */
1367 mn10300_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
1368 enum machine_mode mode
, secondary_reload_info
*sri
)
1370 enum reg_class rclass
= (enum reg_class
) rclass_i
;
1371 enum reg_class xclass
= NO_REGS
;
1372 unsigned int xregno
= INVALID_REGNUM
;
1377 if (xregno
>= FIRST_PSEUDO_REGISTER
)
1378 xregno
= true_regnum (x
);
1379 if (xregno
!= INVALID_REGNUM
)
1380 xclass
= REGNO_REG_CLASS (xregno
);
1385 /* Memory load/stores less than a full word wide can't have an
1386 address or stack pointer destination. They must use a data
1387 register as an intermediate register. */
1388 if (rclass
!= DATA_REGS
1389 && (mode
== QImode
|| mode
== HImode
)
1390 && xclass
== NO_REGS
)
1393 /* We can only move SP to/from an address register. */
1395 && rclass
== SP_REGS
1396 && xclass
!= ADDRESS_REGS
)
1397 return ADDRESS_REGS
;
1399 && xclass
== SP_REGS
1400 && rclass
!= ADDRESS_REGS
1401 && rclass
!= SP_OR_ADDRESS_REGS
)
1402 return ADDRESS_REGS
;
1405 /* We can't directly load sp + const_int into a register;
1406 we must use an address register as an scratch. */
1408 && rclass
!= SP_REGS
1409 && rclass
!= SP_OR_ADDRESS_REGS
1410 && rclass
!= SP_OR_GENERAL_REGS
1411 && GET_CODE (x
) == PLUS
1412 && (XEXP (x
, 0) == stack_pointer_rtx
1413 || XEXP (x
, 1) == stack_pointer_rtx
))
1415 sri
->icode
= CODE_FOR_reload_plus_sp_const
;
1419 /* We can't load/store an FP register from a constant address. */
1421 && (rclass
== FP_REGS
|| xclass
== FP_REGS
)
1422 && (xclass
== NO_REGS
|| rclass
== NO_REGS
))
1426 if (xregno
>= FIRST_PSEUDO_REGISTER
&& xregno
!= INVALID_REGNUM
)
1428 addr
= reg_equiv_mem
[xregno
];
1430 addr
= XEXP (addr
, 0);
1435 if (addr
&& CONSTANT_ADDRESS_P (addr
))
1436 return GENERAL_REGS
;
1439 /* Otherwise assume no secondary reloads are needed. */
1444 mn10300_initial_offset (int from
, int to
)
1446 /* The difference between the argument pointer and the frame pointer
1447 is the size of the callee register save area. */
1448 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
1450 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1451 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1452 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1453 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1454 || fp_regs_to_save ()
1455 || frame_pointer_needed
)
1456 return REG_SAVE_BYTES
1457 + 4 * fp_regs_to_save ();
1462 /* The difference between the argument pointer and the stack pointer is
1463 the sum of the size of this function's frame, the callee register save
1464 area, and the fixed stack space needed for function calls (if any). */
1465 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1467 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1468 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1469 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1470 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1471 || fp_regs_to_save ()
1472 || frame_pointer_needed
)
1473 return (get_frame_size () + REG_SAVE_BYTES
1474 + 4 * fp_regs_to_save ()
1475 + (crtl
->outgoing_args_size
1476 ? crtl
->outgoing_args_size
+ 4 : 0));
1478 return (get_frame_size ()
1479 + (crtl
->outgoing_args_size
1480 ? crtl
->outgoing_args_size
+ 4 : 0));
1483 /* The difference between the frame pointer and stack pointer is the sum
1484 of the size of this function's frame and the fixed stack space needed
1485 for function calls (if any). */
1486 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1487 return (get_frame_size ()
1488 + (crtl
->outgoing_args_size
1489 ? crtl
->outgoing_args_size
+ 4 : 0));
1494 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1497 mn10300_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1499 /* Return values > 8 bytes in length in memory. */
1500 return (int_size_in_bytes (type
) > 8
1501 || int_size_in_bytes (type
) == 0
1502 || TYPE_MODE (type
) == BLKmode
);
1505 /* Flush the argument registers to the stack for a stdarg function;
1506 return the new argument pointer. */
1508 mn10300_builtin_saveregs (void)
1511 tree fntype
= TREE_TYPE (current_function_decl
);
1512 int argadj
= ((!stdarg_p (fntype
))
1513 ? UNITS_PER_WORD
: 0);
1514 alias_set_type set
= get_varargs_alias_set ();
1517 offset
= plus_constant (crtl
->args
.arg_offset_rtx
, argadj
);
1519 offset
= crtl
->args
.arg_offset_rtx
;
1521 mem
= gen_rtx_MEM (SImode
, crtl
->args
.internal_arg_pointer
);
1522 set_mem_alias_set (mem
, set
);
1523 emit_move_insn (mem
, gen_rtx_REG (SImode
, 0));
1525 mem
= gen_rtx_MEM (SImode
,
1526 plus_constant (crtl
->args
.internal_arg_pointer
, 4));
1527 set_mem_alias_set (mem
, set
);
1528 emit_move_insn (mem
, gen_rtx_REG (SImode
, 1));
1530 return copy_to_reg (expand_binop (Pmode
, add_optab
,
1531 crtl
->args
.internal_arg_pointer
,
1532 offset
, 0, 0, OPTAB_LIB_WIDEN
));
1536 mn10300_va_start (tree valist
, rtx nextarg
)
1538 nextarg
= expand_builtin_saveregs ();
1539 std_expand_builtin_va_start (valist
, nextarg
);
1542 /* Return true when a parameter should be passed by reference. */
1545 mn10300_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
1546 enum machine_mode mode
, const_tree type
,
1547 bool named ATTRIBUTE_UNUSED
)
1549 unsigned HOST_WIDE_INT size
;
1552 size
= int_size_in_bytes (type
);
1554 size
= GET_MODE_SIZE (mode
);
1556 return (size
> 8 || size
== 0);
1559 /* Return an RTX to represent where a value with mode MODE will be returned
1560 from a function. If the result is NULL_RTX, the argument is pushed. */
1563 mn10300_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1564 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1566 rtx result
= NULL_RTX
;
1569 /* We only support using 2 data registers as argument registers. */
1572 /* Figure out the size of the object to be passed. */
1573 if (mode
== BLKmode
)
1574 size
= int_size_in_bytes (type
);
1576 size
= GET_MODE_SIZE (mode
);
1578 cum
->nbytes
= (cum
->nbytes
+ 3) & ~3;
1580 /* Don't pass this arg via a register if all the argument registers
1582 if (cum
->nbytes
> nregs
* UNITS_PER_WORD
)
1585 /* Don't pass this arg via a register if it would be split between
1586 registers and memory. */
1587 if (type
== NULL_TREE
1588 && cum
->nbytes
+ size
> nregs
* UNITS_PER_WORD
)
1591 switch (cum
->nbytes
/ UNITS_PER_WORD
)
1594 result
= gen_rtx_REG (mode
, FIRST_ARGUMENT_REGNUM
);
1597 result
= gen_rtx_REG (mode
, FIRST_ARGUMENT_REGNUM
+ 1);
1606 /* Update the data in CUM to advance over an argument
1607 of mode MODE and data type TYPE.
1608 (TYPE is null for libcalls where that information may not be available.) */
1611 mn10300_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1612 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1614 cum
->nbytes
+= (mode
!= BLKmode
1615 ? (GET_MODE_SIZE (mode
) + 3) & ~3
1616 : (int_size_in_bytes (type
) + 3) & ~3);
1619 /* Return the number of bytes of registers to use for an argument passed
1620 partially in registers and partially in memory. */
1623 mn10300_arg_partial_bytes (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1624 tree type
, bool named ATTRIBUTE_UNUSED
)
1628 /* We only support using 2 data registers as argument registers. */
1631 /* Figure out the size of the object to be passed. */
1632 if (mode
== BLKmode
)
1633 size
= int_size_in_bytes (type
);
1635 size
= GET_MODE_SIZE (mode
);
1637 cum
->nbytes
= (cum
->nbytes
+ 3) & ~3;
1639 /* Don't pass this arg via a register if all the argument registers
1641 if (cum
->nbytes
> nregs
* UNITS_PER_WORD
)
1644 if (cum
->nbytes
+ size
<= nregs
* UNITS_PER_WORD
)
1647 /* Don't pass this arg via a register if it would be split between
1648 registers and memory. */
1649 if (type
== NULL_TREE
1650 && cum
->nbytes
+ size
> nregs
* UNITS_PER_WORD
)
1653 return nregs
* UNITS_PER_WORD
- cum
->nbytes
;
1656 /* Return the location of the function's value. This will be either
1657 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1658 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1659 we only return the PARALLEL for outgoing values; we do not want
1660 callers relying on this extra copy. */
1663 mn10300_function_value (const_tree valtype
,
1664 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
1668 enum machine_mode mode
= TYPE_MODE (valtype
);
1670 if (! POINTER_TYPE_P (valtype
))
1671 return gen_rtx_REG (mode
, FIRST_DATA_REGNUM
);
1672 else if (! TARGET_PTR_A0D0
|| ! outgoing
1673 || cfun
->returns_struct
)
1674 return gen_rtx_REG (mode
, FIRST_ADDRESS_REGNUM
);
1676 rv
= gen_rtx_PARALLEL (mode
, rtvec_alloc (2));
1678 = gen_rtx_EXPR_LIST (VOIDmode
,
1679 gen_rtx_REG (mode
, FIRST_ADDRESS_REGNUM
),
1683 = gen_rtx_EXPR_LIST (VOIDmode
,
1684 gen_rtx_REG (mode
, FIRST_DATA_REGNUM
),
1689 /* Implements TARGET_LIBCALL_VALUE. */
1692 mn10300_libcall_value (enum machine_mode mode
,
1693 const_rtx fun ATTRIBUTE_UNUSED
)
1695 return gen_rtx_REG (mode
, FIRST_DATA_REGNUM
);
1698 /* Implements FUNCTION_VALUE_REGNO_P. */
1701 mn10300_function_value_regno_p (const unsigned int regno
)
1703 return (regno
== FIRST_DATA_REGNUM
|| regno
== FIRST_ADDRESS_REGNUM
);
1706 /* Output a compare insn. */
1709 mn10300_output_cmp (rtx operand
, rtx insn
)
1714 /* We can save a byte if we can find a register which has the value
1716 temp
= PREV_INSN (insn
);
1717 while (optimize
&& temp
)
1721 /* We allow the search to go through call insns. We record
1722 the fact that we've past a CALL_INSN and reject matches which
1723 use call clobbered registers. */
1726 || GET_CODE (temp
) == BARRIER
)
1732 if (GET_CODE (temp
) == NOTE
)
1734 temp
= PREV_INSN (temp
);
1738 /* It must be an insn, see if it is a simple set. */
1739 set
= single_set (temp
);
1742 temp
= PREV_INSN (temp
);
1746 /* Are we setting a data register to zero (this does not win for
1749 If it's a call clobbered register, have we past a call?
1751 Make sure the register we find isn't the same as ourself;
1752 the mn10300 can't encode that.
1754 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1755 so the code to detect calls here isn't doing anything useful. */
1756 if (REG_P (SET_DEST (set
))
1757 && SET_SRC (set
) == CONST0_RTX (GET_MODE (SET_DEST (set
)))
1758 && !reg_set_between_p (SET_DEST (set
), temp
, insn
)
1759 && (REGNO_REG_CLASS (REGNO (SET_DEST (set
)))
1760 == REGNO_REG_CLASS (REGNO (operand
)))
1761 && REGNO_REG_CLASS (REGNO (SET_DEST (set
))) != EXTENDED_REGS
1762 && REGNO (SET_DEST (set
)) != REGNO (operand
)
1764 || ! call_really_used_regs
[REGNO (SET_DEST (set
))]))
1767 xoperands
[0] = operand
;
1768 xoperands
[1] = SET_DEST (set
);
1770 output_asm_insn ("cmp %1,%0", xoperands
);
1774 if (REGNO_REG_CLASS (REGNO (operand
)) == EXTENDED_REGS
1775 && REG_P (SET_DEST (set
))
1776 && SET_SRC (set
) == CONST0_RTX (GET_MODE (SET_DEST (set
)))
1777 && !reg_set_between_p (SET_DEST (set
), temp
, insn
)
1778 && (REGNO_REG_CLASS (REGNO (SET_DEST (set
)))
1779 != REGNO_REG_CLASS (REGNO (operand
)))
1780 && REGNO_REG_CLASS (REGNO (SET_DEST (set
))) == EXTENDED_REGS
1781 && REGNO (SET_DEST (set
)) != REGNO (operand
)
1783 || ! call_really_used_regs
[REGNO (SET_DEST (set
))]))
1786 xoperands
[0] = operand
;
1787 xoperands
[1] = SET_DEST (set
);
1789 output_asm_insn ("cmp %1,%0", xoperands
);
1792 temp
= PREV_INSN (temp
);
1797 /* Return 1 if X contains a symbolic expression. We know these
1798 expressions will have one of a few well defined forms, so
1799 we need only check those forms. */
1802 mn10300_symbolic_operand (rtx op
,
1803 enum machine_mode mode ATTRIBUTE_UNUSED
)
1805 switch (GET_CODE (op
))
1812 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1813 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1814 && CONST_INT_P (XEXP (op
, 1)));
1820 /* Try machine dependent ways of modifying an illegitimate address
1821 to be legitimate. If we find one, return the new valid address.
1822 This macro is used in only one place: `memory_address' in explow.c.
1824 OLDX is the address as it was before break_out_memory_refs was called.
1825 In some cases it is useful to look at this to decide what needs to be done.
1827 Normally it is always safe for this macro to do nothing. It exists to
1828 recognize opportunities to optimize the output.
1830 But on a few ports with segmented architectures and indexed addressing
1831 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1834 mn10300_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1835 enum machine_mode mode ATTRIBUTE_UNUSED
)
1837 if (flag_pic
&& ! mn10300_legitimate_pic_operand_p (x
))
1838 x
= mn10300_legitimize_pic_address (oldx
, NULL_RTX
);
1840 /* Uh-oh. We might have an address for x[n-100000]. This needs
1841 special handling to avoid creating an indexed memory address
1842 with x-100000 as the base. */
1843 if (GET_CODE (x
) == PLUS
1844 && mn10300_symbolic_operand (XEXP (x
, 1), VOIDmode
))
1846 /* Ugly. We modify things here so that the address offset specified
1847 by the index expression is computed first, then added to x to form
1848 the entire address. */
1850 rtx regx1
, regy1
, regy2
, y
;
1852 /* Strip off any CONST. */
1854 if (GET_CODE (y
) == CONST
)
1857 if (GET_CODE (y
) == PLUS
|| GET_CODE (y
) == MINUS
)
1859 regx1
= force_reg (Pmode
, force_operand (XEXP (x
, 0), 0));
1860 regy1
= force_reg (Pmode
, force_operand (XEXP (y
, 0), 0));
1861 regy2
= force_reg (Pmode
, force_operand (XEXP (y
, 1), 0));
1862 regx1
= force_reg (Pmode
,
1863 gen_rtx_fmt_ee (GET_CODE (y
), Pmode
, regx1
,
1865 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, regx1
, regy1
));
1871 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1872 @GOTOFF in `reg'. */
1875 mn10300_legitimize_pic_address (rtx orig
, rtx reg
)
1879 if (GET_CODE (orig
) == LABEL_REF
1880 || (GET_CODE (orig
) == SYMBOL_REF
1881 && (CONSTANT_POOL_ADDRESS_P (orig
)
1882 || ! MN10300_GLOBAL_P (orig
))))
1885 reg
= gen_reg_rtx (Pmode
);
1887 x
= gen_rtx_UNSPEC (SImode
, gen_rtvec (1, orig
), UNSPEC_GOTOFF
);
1888 x
= gen_rtx_CONST (SImode
, x
);
1889 emit_move_insn (reg
, x
);
1891 x
= emit_insn (gen_addsi3 (reg
, reg
, pic_offset_table_rtx
));
1893 else if (GET_CODE (orig
) == SYMBOL_REF
)
1896 reg
= gen_reg_rtx (Pmode
);
1898 x
= gen_rtx_UNSPEC (SImode
, gen_rtvec (1, orig
), UNSPEC_GOT
);
1899 x
= gen_rtx_CONST (SImode
, x
);
1900 x
= gen_rtx_PLUS (SImode
, pic_offset_table_rtx
, x
);
1901 x
= gen_const_mem (SImode
, x
);
1903 x
= emit_move_insn (reg
, x
);
1908 set_unique_reg_note (x
, REG_EQUAL
, orig
);
1912 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1913 isn't protected by a PIC unspec; nonzero otherwise. */
1916 mn10300_legitimate_pic_operand_p (rtx x
)
1921 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
1924 if (GET_CODE (x
) == UNSPEC
1925 && (XINT (x
, 1) == UNSPEC_PIC
1926 || XINT (x
, 1) == UNSPEC_GOT
1927 || XINT (x
, 1) == UNSPEC_GOTOFF
1928 || XINT (x
, 1) == UNSPEC_PLT
1929 || XINT (x
, 1) == UNSPEC_GOTSYM_OFF
))
1932 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
1933 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
1939 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1940 if (! mn10300_legitimate_pic_operand_p (XVECEXP (x
, i
, j
)))
1943 else if (fmt
[i
] == 'e'
1944 && ! mn10300_legitimate_pic_operand_p (XEXP (x
, i
)))
1951 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1952 legitimate, and FALSE otherwise.
1954 On the mn10300, the value in the address register must be
1955 in the same memory space/segment as the effective address.
1957 This is problematical for reload since it does not understand
1958 that base+index != index+base in a memory reference.
1960 Note it is still possible to use reg+reg addressing modes,
1961 it's just much more difficult. For a discussion of a possible
1962 workaround and solution, see the comments in pa.c before the
1963 function record_unscaled_index_insn_codes. */
1966 mn10300_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1970 if (CONSTANT_ADDRESS_P (x
))
1971 return !flag_pic
|| mn10300_legitimate_pic_operand_p (x
);
1973 if (RTX_OK_FOR_BASE_P (x
, strict
))
1976 if (TARGET_AM33
&& (mode
== SImode
|| mode
== SFmode
|| mode
== HImode
))
1978 if (GET_CODE (x
) == POST_INC
)
1979 return RTX_OK_FOR_BASE_P (XEXP (x
, 0), strict
);
1980 if (GET_CODE (x
) == POST_MODIFY
)
1981 return (RTX_OK_FOR_BASE_P (XEXP (x
, 0), strict
)
1982 && CONSTANT_ADDRESS_P (XEXP (x
, 1)));
1985 if (GET_CODE (x
) != PLUS
)
1989 index
= XEXP (x
, 1);
1995 /* ??? Without AM33 generalized (Ri,Rn) addressing, reg+reg
1996 addressing is hard to satisfy. */
2000 return (REGNO_GENERAL_P (REGNO (base
), strict
)
2001 && REGNO_GENERAL_P (REGNO (index
), strict
));
2004 if (!REGNO_STRICT_OK_FOR_BASE_P (REGNO (base
), strict
))
2007 if (CONST_INT_P (index
))
2008 return IN_RANGE (INTVAL (index
), -1 - 0x7fffffff, 0x7fffffff);
2010 if (CONSTANT_ADDRESS_P (index
))
2011 return !flag_pic
|| mn10300_legitimate_pic_operand_p (index
);
2017 mn10300_regno_in_class_p (unsigned regno
, int rclass
, bool strict
)
2019 if (regno
>= FIRST_PSEUDO_REGISTER
)
2025 regno
= reg_renumber
[regno
];
2027 return TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
);
2031 mn10300_legitimize_reload_address (rtx x
,
2032 enum machine_mode mode ATTRIBUTE_UNUSED
,
2033 int opnum
, int type
,
2034 int ind_levels ATTRIBUTE_UNUSED
)
2036 bool any_change
= false;
2038 /* See above re disabling reg+reg addressing for MN103. */
2042 if (GET_CODE (x
) != PLUS
)
2045 if (XEXP (x
, 0) == stack_pointer_rtx
)
2047 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2048 GENERAL_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2049 opnum
, (enum reload_type
) type
);
2052 if (XEXP (x
, 1) == stack_pointer_rtx
)
2054 push_reload (XEXP (x
, 1), NULL_RTX
, &XEXP (x
, 1), NULL
,
2055 GENERAL_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2056 opnum
, (enum reload_type
) type
);
2060 return any_change
? x
: NULL_RTX
;
2063 /* Used by LEGITIMATE_CONSTANT_P(). Returns TRUE if X is a valid
2064 constant. Note that some "constants" aren't valid, such as TLS
2065 symbols and unconverted GOT-based references, so we eliminate
2069 mn10300_legitimate_constant_p (rtx x
)
2071 switch (GET_CODE (x
))
2076 if (GET_CODE (x
) == PLUS
)
2078 if (! CONST_INT_P (XEXP (x
, 1)))
2083 /* Only some unspecs are valid as "constants". */
2084 if (GET_CODE (x
) == UNSPEC
)
2086 switch (XINT (x
, 1))
2088 case UNSPEC_INT_LABEL
:
2099 /* We must have drilled down to a symbol. */
2100 if (! mn10300_symbolic_operand (x
, Pmode
))
2111 /* Undo pic address legitimization for the benefit of debug info. */
2114 mn10300_delegitimize_address (rtx orig_x
)
2116 rtx x
= orig_x
, ret
, addend
= NULL
;
2121 if (GET_CODE (x
) != PLUS
|| GET_MODE (x
) != Pmode
)
2124 if (XEXP (x
, 0) == pic_offset_table_rtx
)
2126 /* With the REG+REG addressing of AM33, var-tracking can re-assemble
2127 some odd-looking "addresses" that were never valid in the first place.
2128 We need to look harder to avoid warnings being emitted. */
2129 else if (GET_CODE (XEXP (x
, 0)) == PLUS
)
2131 rtx x0
= XEXP (x
, 0);
2132 rtx x00
= XEXP (x0
, 0);
2133 rtx x01
= XEXP (x0
, 1);
2135 if (x00
== pic_offset_table_rtx
)
2137 else if (x01
== pic_offset_table_rtx
)
2147 if (GET_CODE (x
) != CONST
)
2150 if (GET_CODE (x
) != UNSPEC
)
2153 ret
= XVECEXP (x
, 0, 0);
2154 if (XINT (x
, 1) == UNSPEC_GOTOFF
)
2156 else if (XINT (x
, 1) == UNSPEC_GOT
)
2161 gcc_assert (GET_CODE (ret
) == SYMBOL_REF
);
2162 if (need_mem
!= MEM_P (orig_x
))
2164 if (need_mem
&& addend
)
2167 ret
= gen_rtx_PLUS (Pmode
, addend
, ret
);
2171 /* For addresses, costs are relative to "MOV (Rm),Rn". For AM33 this is
2172 the 3-byte fully general instruction; for MN103 this is the 2-byte form
2173 with an address register. */
2176 mn10300_address_cost (rtx x
, bool speed
)
2181 switch (GET_CODE (x
))
2186 /* We assume all of these require a 32-bit constant, even though
2187 some symbol and label references can be relaxed. */
2188 return speed
? 1 : 4;
2196 /* Assume any symbolic offset is a 32-bit constant. */
2197 i
= (CONST_INT_P (XEXP (x
, 1)) ? INTVAL (XEXP (x
, 1)) : 0x12345678);
2198 if (IN_RANGE (i
, -128, 127))
2199 return speed
? 0 : 1;
2202 if (IN_RANGE (i
, -0x800000, 0x7fffff))
2208 index
= XEXP (x
, 1);
2209 if (register_operand (index
, SImode
))
2211 /* Attempt to minimize the number of registers in the address.
2212 This is similar to what other ports do. */
2213 if (register_operand (base
, SImode
))
2217 index
= XEXP (x
, 0);
2220 /* Assume any symbolic offset is a 32-bit constant. */
2221 i
= (CONST_INT_P (XEXP (x
, 1)) ? INTVAL (XEXP (x
, 1)) : 0x12345678);
2222 if (IN_RANGE (i
, -128, 127))
2223 return speed
? 0 : 1;
2224 if (IN_RANGE (i
, -32768, 32767))
2225 return speed
? 0 : 2;
2226 return speed
? 2 : 6;
2229 return rtx_cost (x
, MEM
, speed
);
2233 /* Implement the TARGET_REGISTER_MOVE_COST hook.
2235 Recall that the base value of 2 is required by assumptions elsewhere
2236 in the body of the compiler, and that cost 2 is special-cased as an
2237 early exit from reload meaning no work is required. */
2240 mn10300_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2241 reg_class_t ifrom
, reg_class_t ito
)
2243 enum reg_class from
= (enum reg_class
) ifrom
;
2244 enum reg_class to
= (enum reg_class
) ito
;
2245 enum reg_class scratch
, test
;
2247 /* Simplify the following code by unifying the fp register classes. */
2248 if (to
== FP_ACC_REGS
)
2250 if (from
== FP_ACC_REGS
)
2253 /* Diagnose invalid moves by costing them as two moves. */
2258 scratch
= (TARGET_AM33
? GENERAL_REGS
: ADDRESS_REGS
);
2259 else if (to
== FP_REGS
&& to
!= from
)
2260 scratch
= GENERAL_REGS
;
2264 if (from
== SP_REGS
)
2265 scratch
= (TARGET_AM33
? GENERAL_REGS
: ADDRESS_REGS
);
2266 else if (from
== FP_REGS
&& to
!= from
)
2267 scratch
= GENERAL_REGS
;
2269 if (scratch
!= NO_REGS
&& !reg_class_subset_p (test
, scratch
))
2270 return (mn10300_register_move_cost (VOIDmode
, from
, scratch
)
2271 + mn10300_register_move_cost (VOIDmode
, scratch
, to
));
2273 /* From here on, all we need consider are legal combinations. */
2277 /* The scale here is bytes * 2. */
2279 if (from
== to
&& (to
== ADDRESS_REGS
|| to
== DATA_REGS
))
2282 if (from
== SP_REGS
)
2283 return (to
== ADDRESS_REGS
? 2 : 6);
2285 /* For MN103, all remaining legal moves are two bytes. */
2290 return (from
== ADDRESS_REGS
? 4 : 6);
2292 if ((from
== ADDRESS_REGS
|| from
== DATA_REGS
)
2293 && (to
== ADDRESS_REGS
|| to
== DATA_REGS
))
2296 if (to
== EXTENDED_REGS
)
2297 return (to
== from
? 6 : 4);
2299 /* What's left are SP_REGS, FP_REGS, or combinations of the above. */
2304 /* The scale here is cycles * 2. */
2308 if (from
== FP_REGS
)
2311 /* All legal moves between integral registers are single cycle. */
2316 /* Implement the TARGET_MEMORY_MOVE_COST hook.
2318 Given lack of the form of the address, this must be speed-relative,
2319 though we should never be less expensive than a size-relative register
2320 move cost above. This is not a problem. */
2323 mn10300_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2324 reg_class_t iclass
, bool in ATTRIBUTE_UNUSED
)
2326 enum reg_class rclass
= (enum reg_class
) iclass
;
2328 if (rclass
== FP_REGS
)
2333 /* Implement the TARGET_RTX_COSTS hook.
2335 Speed-relative costs are relative to COSTS_N_INSNS, which is intended
2336 to represent cycles. Size-relative costs are in bytes. */
2339 mn10300_rtx_costs (rtx x
, int code
, int outer_code
, int *ptotal
, bool speed
)
2341 /* This value is used for SYMBOL_REF etc where we want to pretend
2342 we have a full 32-bit constant. */
2343 HOST_WIDE_INT i
= 0x12345678;
2353 if (outer_code
== SET
)
2355 /* 16-bit integer loads have latency 1, 32-bit loads 2. */
2356 if (IN_RANGE (i
, -32768, 32767))
2357 total
= COSTS_N_INSNS (1);
2359 total
= COSTS_N_INSNS (2);
2363 /* 16-bit integer operands don't affect latency;
2364 24-bit and 32-bit operands add a cycle. */
2365 if (IN_RANGE (i
, -32768, 32767))
2368 total
= COSTS_N_INSNS (1);
2373 if (outer_code
== SET
)
2377 else if (IN_RANGE (i
, -128, 127))
2379 else if (IN_RANGE (i
, -32768, 32767))
2386 /* Reference here is ADD An,Dn, vs ADD imm,Dn. */
2387 if (IN_RANGE (i
, -128, 127))
2389 else if (IN_RANGE (i
, -32768, 32767))
2391 else if (TARGET_AM33
&& IN_RANGE (i
, -0x01000000, 0x00ffffff))
2403 /* We assume all of these require a 32-bit constant, even though
2404 some symbol and label references can be relaxed. */
2408 switch (XINT (x
, 1))
2414 case UNSPEC_GOTSYM_OFF
:
2415 /* The PIC unspecs also resolve to a 32-bit constant. */
2419 /* Assume any non-listed unspec is some sort of arithmetic. */
2420 goto do_arith_costs
;
2424 /* Notice the size difference of INC and INC4. */
2425 if (!speed
&& outer_code
== SET
&& CONST_INT_P (XEXP (x
, 1)))
2427 i
= INTVAL (XEXP (x
, 1));
2428 if (i
== 1 || i
== 4)
2430 total
= 1 + rtx_cost (XEXP (x
, 0), PLUS
, speed
);
2434 goto do_arith_costs
;
2448 total
= (speed
? COSTS_N_INSNS (1) : 2);
2452 /* Notice the size difference of ASL2 and variants. */
2453 if (!speed
&& CONST_INT_P (XEXP (x
, 1)))
2454 switch (INTVAL (XEXP (x
, 1)))
2469 total
= (speed
? COSTS_N_INSNS (1) : 3);
2473 total
= (speed
? COSTS_N_INSNS (3) : 2);
2480 total
= (speed
? COSTS_N_INSNS (39)
2481 /* Include space to load+retrieve MDR. */
2482 : code
== MOD
|| code
== UMOD
? 6 : 4);
2486 total
= mn10300_address_cost (XEXP (x
, 0), speed
);
2488 total
= COSTS_N_INSNS (2 + total
);
2492 /* Probably not implemented. Assume external call. */
2493 total
= (speed
? COSTS_N_INSNS (10) : 7);
2505 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2506 may access it using GOTOFF instead of GOT. */
2509 mn10300_encode_section_info (tree decl
, rtx rtl
, int first ATTRIBUTE_UNUSED
)
2515 symbol
= XEXP (rtl
, 0);
2516 if (GET_CODE (symbol
) != SYMBOL_REF
)
2520 SYMBOL_REF_FLAG (symbol
) = (*targetm
.binds_local_p
) (decl
);
2523 /* Dispatch tables on the mn10300 are extremely expensive in terms of code
2524 and readonly data size. So we crank up the case threshold value to
2525 encourage a series of if/else comparisons to implement many small switch
2526 statements. In theory, this value could be increased much more if we
2527 were solely optimizing for space, but we keep it "reasonable" to avoid
2528 serious code efficiency lossage. */
2531 mn10300_case_values_threshold (void)
2536 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2539 mn10300_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
2541 rtx mem
, disp
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2543 /* This is a strict alignment target, which means that we play
2544 some games to make sure that the locations at which we need
2545 to store <chain> and <disp> wind up at aligned addresses.
2548 0xfc 0xdd mov chain,a1
2550 0xf8 0xed 0x00 btst 0,d1
2554 Note that the two extra insns are effectively nops; they
2555 clobber the flags but do not affect the contents of D0 or D1. */
2557 disp
= expand_binop (SImode
, sub_optab
, fnaddr
,
2558 plus_constant (XEXP (m_tramp
, 0), 11),
2559 NULL_RTX
, 1, OPTAB_DIRECT
);
2561 mem
= adjust_address (m_tramp
, SImode
, 0);
2562 emit_move_insn (mem
, gen_int_mode (0xddfc0028, SImode
));
2563 mem
= adjust_address (m_tramp
, SImode
, 4);
2564 emit_move_insn (mem
, chain_value
);
2565 mem
= adjust_address (m_tramp
, SImode
, 8);
2566 emit_move_insn (mem
, gen_int_mode (0xdc00edf8, SImode
));
2567 mem
= adjust_address (m_tramp
, SImode
, 12);
2568 emit_move_insn (mem
, disp
);
2571 /* Output the assembler code for a C++ thunk function.
2572 THUNK_DECL is the declaration for the thunk function itself, FUNCTION
2573 is the decl for the target function. DELTA is an immediate constant
2574 offset to be added to the THIS parameter. If VCALL_OFFSET is nonzero
2575 the word at the adjusted address *(*THIS' + VCALL_OFFSET) should be
2576 additionally added to THIS. Finally jump to the entry point of
2580 mn10300_asm_output_mi_thunk (FILE * file
,
2581 tree thunk_fndecl ATTRIBUTE_UNUSED
,
2582 HOST_WIDE_INT delta
,
2583 HOST_WIDE_INT vcall_offset
,
2588 /* Get the register holding the THIS parameter. Handle the case
2589 where there is a hidden first argument for a returned structure. */
2590 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
2591 _this
= reg_names
[FIRST_ARGUMENT_REGNUM
+ 1];
2593 _this
= reg_names
[FIRST_ARGUMENT_REGNUM
];
2595 fprintf (file
, "\t%s Thunk Entry Point:\n", ASM_COMMENT_START
);
2598 fprintf (file
, "\tadd %d, %s\n", (int) delta
, _this
);
2602 const char * scratch
= reg_names
[FIRST_ADDRESS_REGNUM
+ 1];
2604 fprintf (file
, "\tmov %s, %s\n", _this
, scratch
);
2605 fprintf (file
, "\tmov (%s), %s\n", scratch
, scratch
);
2606 fprintf (file
, "\tadd %d, %s\n", (int) vcall_offset
, scratch
);
2607 fprintf (file
, "\tmov (%s), %s\n", scratch
, scratch
);
2608 fprintf (file
, "\tadd %s, %s\n", scratch
, _this
);
2611 fputs ("\tjmp ", file
);
2612 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
2616 /* Return true if mn10300_output_mi_thunk would be able to output the
2617 assembler code for the thunk function specified by the arguments
2618 it is passed, and false otherwise. */
2621 mn10300_can_output_mi_thunk (const_tree thunk_fndecl ATTRIBUTE_UNUSED
,
2622 HOST_WIDE_INT delta ATTRIBUTE_UNUSED
,
2623 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
2624 const_tree function ATTRIBUTE_UNUSED
)
2630 mn10300_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
2632 if (REGNO_REG_CLASS (regno
) == FP_REGS
2633 || REGNO_REG_CLASS (regno
) == FP_ACC_REGS
)
2634 /* Do not store integer values in FP registers. */
2635 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& ((regno
& 1) == 0);
2637 if (((regno
) & 1) == 0 || GET_MODE_SIZE (mode
) == 4)
2640 if (REGNO_REG_CLASS (regno
) == DATA_REGS
2641 || (TARGET_AM33
&& REGNO_REG_CLASS (regno
) == ADDRESS_REGS
)
2642 || REGNO_REG_CLASS (regno
) == EXTENDED_REGS
)
2643 return GET_MODE_SIZE (mode
) <= 4;
2649 mn10300_modes_tieable (enum machine_mode mode1
, enum machine_mode mode2
)
2651 if (GET_MODE_CLASS (mode1
) == MODE_FLOAT
2652 && GET_MODE_CLASS (mode2
) != MODE_FLOAT
)
2655 if (GET_MODE_CLASS (mode2
) == MODE_FLOAT
2656 && GET_MODE_CLASS (mode1
) != MODE_FLOAT
)
2661 || (GET_MODE_SIZE (mode1
) <= 4 && GET_MODE_SIZE (mode2
) <= 4))
2668 mn10300_select_cc_mode (rtx x
)
2670 return (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
) ? CC_FLOATmode
: CCmode
;
2674 is_load_insn (rtx insn
)
2676 if (GET_CODE (PATTERN (insn
)) != SET
)
2679 return MEM_P (SET_SRC (PATTERN (insn
)));
2683 is_store_insn (rtx insn
)
2685 if (GET_CODE (PATTERN (insn
)) != SET
)
2688 return MEM_P (SET_DEST (PATTERN (insn
)));
2691 /* Update scheduling costs for situations that cannot be
2692 described using the attributes and DFA machinery.
2693 DEP is the insn being scheduled.
2694 INSN is the previous insn.
2695 COST is the current cycle cost for DEP. */
2698 mn10300_adjust_sched_cost (rtx insn
, rtx link
, rtx dep
, int cost
)
2700 int timings
= get_attr_timings (insn
);
2705 if (GET_CODE (insn
) == PARALLEL
)
2706 insn
= XVECEXP (insn
, 0, 0);
2708 if (GET_CODE (dep
) == PARALLEL
)
2709 dep
= XVECEXP (dep
, 0, 0);
2711 /* For the AM34 a load instruction that follows a
2712 store instruction incurs an extra cycle of delay. */
2713 if (mn10300_tune_cpu
== PROCESSOR_AM34
2714 && is_load_insn (dep
)
2715 && is_store_insn (insn
))
2718 /* For the AM34 a non-store, non-branch FPU insn that follows
2719 another FPU insn incurs a one cycle throughput increase. */
2720 else if (mn10300_tune_cpu
== PROCESSOR_AM34
2721 && ! is_store_insn (insn
)
2723 && GET_CODE (PATTERN (dep
)) == SET
2724 && GET_CODE (PATTERN (insn
)) == SET
2725 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep
)))) == MODE_FLOAT
2726 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn
)))) == MODE_FLOAT
)
2729 /* Resolve the conflict described in section 1-7-4 of
2730 Chapter 3 of the MN103E Series Instruction Manual
2733 "When the preceeding instruction is a CPU load or
2734 store instruction, a following FPU instruction
2735 cannot be executed until the CPU completes the
2736 latency period even though there are no register
2737 or flag dependencies between them." */
2739 /* Only the AM33-2 (and later) CPUs have FPU instructions. */
2740 if (! TARGET_AM33_2
)
2743 /* If a data dependence already exists then the cost is correct. */
2744 if (REG_NOTE_KIND (link
) == 0)
2747 /* Check that the instruction about to scheduled is an FPU instruction. */
2748 if (GET_CODE (PATTERN (dep
)) != SET
)
2751 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep
)))) != MODE_FLOAT
)
2754 /* Now check to see if the previous instruction is a load or store. */
2755 if (! is_load_insn (insn
) && ! is_store_insn (insn
))
2758 /* XXX: Verify: The text of 1-7-4 implies that the restriction
2759 only applies when an INTEGER load/store preceeds an FPU
2760 instruction, but is this true ? For now we assume that it is. */
2761 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn
)))) != MODE_INT
)
2764 /* Extract the latency value from the timings attribute. */
2765 return timings
< 100 ? (timings
% 10) : (timings
% 100);
2769 mn10300_conditional_register_usage (void)
2775 for (i
= FIRST_EXTENDED_REGNUM
;
2776 i
<= LAST_EXTENDED_REGNUM
; i
++)
2777 fixed_regs
[i
] = call_used_regs
[i
] = 1;
2781 for (i
= FIRST_FP_REGNUM
;
2782 i
<= LAST_FP_REGNUM
; i
++)
2783 fixed_regs
[i
] = call_used_regs
[i
] = 1;
2786 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] =
2787 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2790 /* Worker function for TARGET_MD_ASM_CLOBBERS.
2791 We do this in the mn10300 backend to maintain source compatibility
2792 with the old cc0-based compiler. */
2795 mn10300_md_asm_clobbers (tree outputs ATTRIBUTE_UNUSED
,
2796 tree inputs ATTRIBUTE_UNUSED
,
2799 clobbers
= tree_cons (NULL_TREE
, build_string (5, "EPSW"),
2804 /* Initialize the GCC target structure. */
2806 #undef TARGET_EXCEPT_UNWIND_INFO
2807 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2809 #undef TARGET_ASM_ALIGNED_HI_OP
2810 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2812 #undef TARGET_LEGITIMIZE_ADDRESS
2813 #define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
2815 #undef TARGET_ADDRESS_COST
2816 #define TARGET_ADDRESS_COST mn10300_address_cost
2817 #undef TARGET_REGISTER_MOVE_COST
2818 #define TARGET_REGISTER_MOVE_COST mn10300_register_move_cost
2819 #undef TARGET_MEMORY_MOVE_COST
2820 #define TARGET_MEMORY_MOVE_COST mn10300_memory_move_cost
2821 #undef TARGET_RTX_COSTS
2822 #define TARGET_RTX_COSTS mn10300_rtx_costs
2824 #undef TARGET_ASM_FILE_START
2825 #define TARGET_ASM_FILE_START mn10300_file_start
2826 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
2827 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
2829 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
2830 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA mn10300_asm_output_addr_const_extra
2832 #undef TARGET_DEFAULT_TARGET_FLAGS
2833 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
2834 #undef TARGET_HANDLE_OPTION
2835 #define TARGET_HANDLE_OPTION mn10300_handle_option
2836 #undef TARGET_OPTION_OVERRIDE
2837 #define TARGET_OPTION_OVERRIDE mn10300_option_override
2838 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2839 #define TARGET_OPTION_OPTIMIZATION_TABLE mn10300_option_optimization_table
2841 #undef TARGET_ENCODE_SECTION_INFO
2842 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
2844 #undef TARGET_PROMOTE_PROTOTYPES
2845 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2846 #undef TARGET_RETURN_IN_MEMORY
2847 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
2848 #undef TARGET_PASS_BY_REFERENCE
2849 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
2850 #undef TARGET_CALLEE_COPIES
2851 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
2852 #undef TARGET_ARG_PARTIAL_BYTES
2853 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
2854 #undef TARGET_FUNCTION_ARG
2855 #define TARGET_FUNCTION_ARG mn10300_function_arg
2856 #undef TARGET_FUNCTION_ARG_ADVANCE
2857 #define TARGET_FUNCTION_ARG_ADVANCE mn10300_function_arg_advance
2859 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
2860 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
2861 #undef TARGET_EXPAND_BUILTIN_VA_START
2862 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
2864 #undef TARGET_CASE_VALUES_THRESHOLD
2865 #define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
2867 #undef TARGET_LEGITIMATE_ADDRESS_P
2868 #define TARGET_LEGITIMATE_ADDRESS_P mn10300_legitimate_address_p
2869 #undef TARGET_DELEGITIMIZE_ADDRESS
2870 #define TARGET_DELEGITIMIZE_ADDRESS mn10300_delegitimize_address
2872 #undef TARGET_PREFERRED_RELOAD_CLASS
2873 #define TARGET_PREFERRED_RELOAD_CLASS mn10300_preferred_reload_class
2874 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2875 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS \
2876 mn10300_preferred_output_reload_class
2877 #undef TARGET_SECONDARY_RELOAD
2878 #define TARGET_SECONDARY_RELOAD mn10300_secondary_reload
2880 #undef TARGET_TRAMPOLINE_INIT
2881 #define TARGET_TRAMPOLINE_INIT mn10300_trampoline_init
2883 #undef TARGET_FUNCTION_VALUE
2884 #define TARGET_FUNCTION_VALUE mn10300_function_value
2885 #undef TARGET_LIBCALL_VALUE
2886 #define TARGET_LIBCALL_VALUE mn10300_libcall_value
2888 #undef TARGET_ASM_OUTPUT_MI_THUNK
2889 #define TARGET_ASM_OUTPUT_MI_THUNK mn10300_asm_output_mi_thunk
2890 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2891 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK mn10300_can_output_mi_thunk
2893 #undef TARGET_SCHED_ADJUST_COST
2894 #define TARGET_SCHED_ADJUST_COST mn10300_adjust_sched_cost
2896 #undef TARGET_CONDITIONAL_REGISTER_USAGE
2897 #define TARGET_CONDITIONAL_REGISTER_USAGE mn10300_conditional_register_usage
2899 #undef TARGET_MD_ASM_CLOBBERS
2900 #define TARGET_MD_ASM_CLOBBERS mn10300_md_asm_clobbers
2902 struct gcc_target targetm
= TARGET_INITIALIZER
;