1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
54 /* Machine-specific symbol_ref flags. */
55 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
58 static bool s390_assemble_integer (rtx
, unsigned int, int);
59 static void s390_select_rtx_section (enum machine_mode
, rtx
,
60 unsigned HOST_WIDE_INT
);
61 static void s390_encode_section_info (tree
, rtx
, int);
62 static bool s390_cannot_force_const_mem (rtx
);
63 static rtx
s390_delegitimize_address (rtx
);
64 static bool s390_return_in_memory (tree
, tree
);
65 static void s390_init_builtins (void);
66 static rtx
s390_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
67 static void s390_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
69 static enum attr_type
s390_safe_attr_type (rtx
);
71 static int s390_adjust_cost (rtx
, rtx
, rtx
, int);
72 static int s390_adjust_priority (rtx
, int);
73 static int s390_issue_rate (void);
74 static int s390_use_dfa_pipeline_interface (void);
75 static int s390_first_cycle_multipass_dfa_lookahead (void);
76 static bool s390_rtx_costs (rtx
, int, int, int *);
77 static int s390_address_cost (rtx
);
78 static void s390_reorg (void);
79 static bool s390_valid_pointer_mode (enum machine_mode
);
80 static tree
s390_build_builtin_va_list (void);
82 #undef TARGET_ASM_ALIGNED_HI_OP
83 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
84 #undef TARGET_ASM_ALIGNED_DI_OP
85 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
86 #undef TARGET_ASM_INTEGER
87 #define TARGET_ASM_INTEGER s390_assemble_integer
89 #undef TARGET_ASM_OPEN_PAREN
90 #define TARGET_ASM_OPEN_PAREN ""
92 #undef TARGET_ASM_CLOSE_PAREN
93 #define TARGET_ASM_CLOSE_PAREN ""
95 #undef TARGET_ASM_SELECT_RTX_SECTION
96 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
98 #undef TARGET_ENCODE_SECTION_INFO
99 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
102 #undef TARGET_HAVE_TLS
103 #define TARGET_HAVE_TLS true
105 #undef TARGET_CANNOT_FORCE_CONST_MEM
106 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
108 #undef TARGET_DELEGITIMIZE_ADDRESS
109 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
111 #undef TARGET_RETURN_IN_MEMORY
112 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
114 #undef TARGET_INIT_BUILTINS
115 #define TARGET_INIT_BUILTINS s390_init_builtins
116 #undef TARGET_EXPAND_BUILTIN
117 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
119 #undef TARGET_ASM_OUTPUT_MI_THUNK
120 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
121 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
122 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
124 #undef TARGET_SCHED_ADJUST_COST
125 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
126 #undef TARGET_SCHED_ADJUST_PRIORITY
127 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
128 #undef TARGET_SCHED_ISSUE_RATE
129 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
130 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
131 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE s390_use_dfa_pipeline_interface
132 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
133 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
135 #undef TARGET_RTX_COSTS
136 #define TARGET_RTX_COSTS s390_rtx_costs
137 #undef TARGET_ADDRESS_COST
138 #define TARGET_ADDRESS_COST s390_address_cost
140 #undef TARGET_MACHINE_DEPENDENT_REORG
141 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
143 #undef TARGET_VALID_POINTER_MODE
144 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
146 #undef TARGET_BUILD_BUILTIN_VA_LIST
147 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
149 #undef TARGET_PROMOTE_FUNCTION_ARGS
150 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
151 #undef TARGET_PROMOTE_FUNCTION_RETURN
152 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
154 struct gcc_target targetm
= TARGET_INITIALIZER
;
156 extern int reload_completed
;
158 /* The alias set for prologue/epilogue register save/restore. */
159 static int s390_sr_alias_set
= 0;
161 /* Save information from a "cmpxx" operation until the branch or scc is
163 rtx s390_compare_op0
, s390_compare_op1
;
165 /* Structure used to hold the components of a S/390 memory
166 address. A legitimate address on S/390 is of the general
168 base + index + displacement
169 where any of the components is optional.
171 base and index are registers of the class ADDR_REGS,
172 displacement is an unsigned 12-bit immediate constant. */
182 /* Which cpu are we tuning for. */
183 enum processor_type s390_tune
;
184 enum processor_flags s390_tune_flags
;
185 /* Which instruction set architecture to use. */
186 enum processor_type s390_arch
;
187 enum processor_flags s390_arch_flags
;
189 /* Strings to hold which cpu and instruction set architecture to use. */
190 const char *s390_tune_string
; /* for -mtune=<xxx> */
191 const char *s390_arch_string
; /* for -march=<xxx> */
193 /* Define the structure for the machine field in struct function. */
195 struct machine_function
GTY(())
197 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
200 /* Set if return address needs to be saved. */
201 bool save_return_addr_p
;
203 /* Number of first and last gpr to be saved, restored. */
205 int first_restore_gpr
;
208 /* Size of stack frame. */
209 HOST_WIDE_INT frame_size
;
211 /* Some local-dynamic TLS symbol name. */
212 const char *some_ld_name
;
215 static int s390_match_ccmode_set (rtx
, enum machine_mode
);
216 static int s390_branch_condition_mask (rtx
);
217 static const char *s390_branch_condition_mnemonic (rtx
, int);
218 static int check_mode (rtx
, enum machine_mode
*);
219 static int general_s_operand (rtx
, enum machine_mode
, int);
220 static int s390_short_displacement (rtx
);
221 static int s390_decompose_address (rtx
, struct s390_address
*);
222 static rtx
get_thread_pointer (void);
223 static rtx
legitimize_tls_address (rtx
, rtx
);
224 static void print_shift_count_operand (FILE *, rtx
);
225 static const char *get_some_local_dynamic_name (void);
226 static int get_some_local_dynamic_name_1 (rtx
*, void *);
227 static int reg_used_in_mem_p (int, rtx
);
228 static int addr_generation_dependency_p (rtx
, rtx
);
229 static int s390_split_branches (void);
230 static void find_constant_pool_ref (rtx
, rtx
*);
231 static void replace_constant_pool_ref (rtx
*, rtx
, rtx
);
232 static rtx
find_ltrel_base (rtx
);
233 static void replace_ltrel_base (rtx
*, rtx
);
234 static void s390_optimize_prolog (bool);
235 static int find_unused_clobbered_reg (void);
236 static void s390_frame_info (void);
237 static rtx
save_fpr (rtx
, int, int);
238 static rtx
restore_fpr (rtx
, int, int);
239 static rtx
save_gprs (rtx
, int, int, int);
240 static rtx
restore_gprs (rtx
, int, int, int);
241 static int s390_function_arg_size (enum machine_mode
, tree
);
242 static bool s390_function_arg_float (enum machine_mode
, tree
);
243 static struct machine_function
* s390_init_machine_status (void);
245 /* Check whether integer displacement is in range. */
246 #define DISP_IN_RANGE(d) \
247 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
248 : ((d) >= 0 && (d) <= 4095))
250 /* Return true if SET either doesn't set the CC register, or else
251 the source and destination have matching CC modes and that
252 CC mode is at least as constrained as REQ_MODE. */
255 s390_match_ccmode_set (rtx set
, enum machine_mode req_mode
)
257 enum machine_mode set_mode
;
259 if (GET_CODE (set
) != SET
)
262 if (GET_CODE (SET_DEST (set
)) != REG
|| !CC_REGNO_P (REGNO (SET_DEST (set
))))
265 set_mode
= GET_MODE (SET_DEST (set
));
278 if (req_mode
!= set_mode
)
283 if (req_mode
!= CCSmode
&& req_mode
!= CCUmode
&& req_mode
!= CCTmode
284 && req_mode
!= CCSRmode
&& req_mode
!= CCURmode
)
290 if (req_mode
!= CCAmode
)
298 return (GET_MODE (SET_SRC (set
)) == set_mode
);
301 /* Return true if every SET in INSN that sets the CC register
302 has source and destination with matching CC modes and that
303 CC mode is at least as constrained as REQ_MODE.
304 If REQ_MODE is VOIDmode, always return false. */
307 s390_match_ccmode (rtx insn
, enum machine_mode req_mode
)
311 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
312 if (req_mode
== VOIDmode
)
315 if (GET_CODE (PATTERN (insn
)) == SET
)
316 return s390_match_ccmode_set (PATTERN (insn
), req_mode
);
318 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
319 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
321 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
322 if (GET_CODE (set
) == SET
)
323 if (!s390_match_ccmode_set (set
, req_mode
))
330 /* If a test-under-mask instruction can be used to implement
331 (compare (and ... OP1) OP2), return the CC mode required
332 to do that. Otherwise, return VOIDmode.
333 MIXED is true if the instruction can distinguish between
334 CC1 and CC2 for mixed selected bits (TMxx), it is false
335 if the instruction cannot (TM). */
338 s390_tm_ccmode (rtx op1
, rtx op2
, int mixed
)
342 /* ??? Fixme: should work on CONST_DOUBLE as well. */
343 if (GET_CODE (op1
) != CONST_INT
|| GET_CODE (op2
) != CONST_INT
)
346 /* Selected bits all zero: CC0. */
347 if (INTVAL (op2
) == 0)
350 /* Selected bits all one: CC3. */
351 if (INTVAL (op2
) == INTVAL (op1
))
354 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
357 bit1
= exact_log2 (INTVAL (op2
));
358 bit0
= exact_log2 (INTVAL (op1
) ^ INTVAL (op2
));
359 if (bit0
!= -1 && bit1
!= -1)
360 return bit0
> bit1
? CCT1mode
: CCT2mode
;
366 /* Given a comparison code OP (EQ, NE, etc.) and the operands
367 OP0 and OP1 of a COMPARE, return the mode to be used for the
371 s390_select_ccmode (enum rtx_code code
, rtx op0
, rtx op1
)
377 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
378 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0
, 1)), 'K', "K"))
380 if ((GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
381 || GET_CODE (op1
) == NEG
)
382 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
385 if (GET_CODE (op0
) == AND
)
387 /* Check whether we can potentially do it via TM. */
388 enum machine_mode ccmode
;
389 ccmode
= s390_tm_ccmode (XEXP (op0
, 1), op1
, 1);
390 if (ccmode
!= VOIDmode
)
392 /* Relax CCTmode to CCZmode to allow fall-back to AND
393 if that turns out to be beneficial. */
394 return ccmode
== CCTmode
? CCZmode
: ccmode
;
398 if (register_operand (op0
, HImode
)
399 && GET_CODE (op1
) == CONST_INT
400 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 65535))
402 if (register_operand (op0
, QImode
)
403 && GET_CODE (op1
) == CONST_INT
404 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 255))
413 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
414 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0
, 1)), 'K', "K"))
416 if (INTVAL (XEXP((op0
), 1)) < 0)
429 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
430 && GET_CODE (op1
) != CONST_INT
)
436 if (GET_CODE (op0
) == PLUS
437 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
440 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
441 && GET_CODE (op1
) != CONST_INT
)
447 if (GET_CODE (op0
) == MINUS
448 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
451 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
452 && GET_CODE (op1
) != CONST_INT
)
461 /* Return nonzero if OP is a valid comparison operator
462 for an ALC condition in mode MODE. */
465 s390_alc_comparison (rtx op
, enum machine_mode mode
)
467 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
470 if (!COMPARISON_P (op
))
473 if (GET_CODE (XEXP (op
, 0)) != REG
474 || REGNO (XEXP (op
, 0)) != CC_REGNUM
475 || XEXP (op
, 1) != const0_rtx
)
478 switch (GET_MODE (XEXP (op
, 0)))
481 return GET_CODE (op
) == LTU
;
484 return GET_CODE (op
) == LEU
;
487 return GET_CODE (op
) == GTU
;
490 return GET_CODE (op
) == LTU
;
493 return GET_CODE (op
) == UNGT
;
496 return GET_CODE (op
) == UNLT
;
503 /* Return nonzero if OP is a valid comparison operator
504 for an SLB condition in mode MODE. */
507 s390_slb_comparison (rtx op
, enum machine_mode mode
)
509 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
512 if (!COMPARISON_P (op
))
515 if (GET_CODE (XEXP (op
, 0)) != REG
516 || REGNO (XEXP (op
, 0)) != CC_REGNUM
517 || XEXP (op
, 1) != const0_rtx
)
520 switch (GET_MODE (XEXP (op
, 0)))
523 return GET_CODE (op
) == GEU
;
526 return GET_CODE (op
) == GTU
;
529 return GET_CODE (op
) == LEU
;
532 return GET_CODE (op
) == GEU
;
535 return GET_CODE (op
) == LE
;
538 return GET_CODE (op
) == GE
;
545 /* Return branch condition mask to implement a branch
546 specified by CODE. */
549 s390_branch_condition_mask (rtx code
)
551 const int CC0
= 1 << 3;
552 const int CC1
= 1 << 2;
553 const int CC2
= 1 << 1;
554 const int CC3
= 1 << 0;
556 if (GET_CODE (XEXP (code
, 0)) != REG
557 || REGNO (XEXP (code
, 0)) != CC_REGNUM
558 || XEXP (code
, 1) != const0_rtx
)
561 switch (GET_MODE (XEXP (code
, 0)))
564 switch (GET_CODE (code
))
567 case NE
: return CC1
| CC2
| CC3
;
574 switch (GET_CODE (code
))
577 case NE
: return CC0
| CC2
| CC3
;
584 switch (GET_CODE (code
))
587 case NE
: return CC0
| CC1
| CC3
;
594 switch (GET_CODE (code
))
597 case NE
: return CC0
| CC1
| CC2
;
604 switch (GET_CODE (code
))
606 case EQ
: return CC0
| CC2
;
607 case NE
: return CC1
| CC3
;
614 switch (GET_CODE (code
))
616 case LTU
: return CC2
| CC3
; /* carry */
617 case GEU
: return CC0
| CC1
; /* no carry */
624 switch (GET_CODE (code
))
626 case GTU
: return CC0
| CC1
; /* borrow */
627 case LEU
: return CC2
| CC3
; /* no borrow */
634 switch (GET_CODE (code
))
637 case NE
: return CC1
| CC2
| CC3
;
638 case LTU
: return CC1
;
639 case GTU
: return CC2
;
640 case LEU
: return CC0
| CC1
;
641 case GEU
: return CC0
| CC2
;
648 switch (GET_CODE (code
))
651 case NE
: return CC2
| CC1
| CC3
;
652 case LTU
: return CC2
;
653 case GTU
: return CC1
;
654 case LEU
: return CC0
| CC2
;
655 case GEU
: return CC0
| CC1
;
662 switch (GET_CODE (code
))
665 case NE
: return CC1
| CC2
| CC3
;
666 case LT
: return CC1
| CC3
;
668 case LE
: return CC0
| CC1
| CC3
;
669 case GE
: return CC0
| CC2
;
676 switch (GET_CODE (code
))
679 case NE
: return CC1
| CC2
| CC3
;
681 case GT
: return CC2
| CC3
;
682 case LE
: return CC0
| CC1
;
683 case GE
: return CC0
| CC2
| CC3
;
690 switch (GET_CODE (code
))
693 case NE
: return CC1
| CC2
| CC3
;
696 case LE
: return CC0
| CC1
;
697 case GE
: return CC0
| CC2
;
698 case UNORDERED
: return CC3
;
699 case ORDERED
: return CC0
| CC1
| CC2
;
700 case UNEQ
: return CC0
| CC3
;
701 case UNLT
: return CC1
| CC3
;
702 case UNGT
: return CC2
| CC3
;
703 case UNLE
: return CC0
| CC1
| CC3
;
704 case UNGE
: return CC0
| CC2
| CC3
;
705 case LTGT
: return CC1
| CC2
;
712 switch (GET_CODE (code
))
715 case NE
: return CC2
| CC1
| CC3
;
718 case LE
: return CC0
| CC2
;
719 case GE
: return CC0
| CC1
;
720 case UNORDERED
: return CC3
;
721 case ORDERED
: return CC0
| CC2
| CC1
;
722 case UNEQ
: return CC0
| CC3
;
723 case UNLT
: return CC2
| CC3
;
724 case UNGT
: return CC1
| CC3
;
725 case UNLE
: return CC0
| CC2
| CC3
;
726 case UNGE
: return CC0
| CC1
| CC3
;
727 case LTGT
: return CC2
| CC1
;
738 /* If INV is false, return assembler mnemonic string to implement
739 a branch specified by CODE. If INV is true, return mnemonic
740 for the corresponding inverted branch. */
743 s390_branch_condition_mnemonic (rtx code
, int inv
)
745 static const char *const mnemonic
[16] =
747 NULL
, "o", "h", "nle",
748 "l", "nhe", "lh", "ne",
749 "e", "nlh", "he", "nl",
750 "le", "nh", "no", NULL
753 int mask
= s390_branch_condition_mask (code
);
758 if (mask
< 1 || mask
> 14)
761 return mnemonic
[mask
];
764 /* Return the part of op which has a value different from def.
765 The size of the part is determined by mode.
766 Use this function only if you already know that op really
767 contains such a part. */
769 unsigned HOST_WIDE_INT
770 s390_extract_part (rtx op
, enum machine_mode mode
, int def
)
772 unsigned HOST_WIDE_INT value
= 0;
773 int max_parts
= HOST_BITS_PER_WIDE_INT
/ GET_MODE_BITSIZE (mode
);
774 int part_bits
= GET_MODE_BITSIZE (mode
);
775 unsigned HOST_WIDE_INT part_mask
= (1 << part_bits
) - 1;
778 for (i
= 0; i
< max_parts
; i
++)
781 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
785 if ((value
& part_mask
) != (def
& part_mask
))
786 return value
& part_mask
;
792 /* If OP is an integer constant of mode MODE with exactly one
793 part of mode PART_MODE unequal to DEF, return the number of that
794 part. Otherwise, return -1. */
797 s390_single_part (rtx op
,
798 enum machine_mode mode
,
799 enum machine_mode part_mode
,
802 unsigned HOST_WIDE_INT value
= 0;
803 int n_parts
= GET_MODE_SIZE (mode
) / GET_MODE_SIZE (part_mode
);
804 unsigned HOST_WIDE_INT part_mask
= (1 << GET_MODE_BITSIZE (part_mode
)) - 1;
807 if (GET_CODE (op
) != CONST_INT
)
810 for (i
= 0; i
< n_parts
; i
++)
813 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
815 value
>>= GET_MODE_BITSIZE (part_mode
);
817 if ((value
& part_mask
) != (def
& part_mask
))
825 return part
== -1 ? -1 : n_parts
- 1 - part
;
828 /* Check whether we can (and want to) split a double-word
829 move in mode MODE from SRC to DST into two single-word
830 moves, moving the subword FIRST_SUBWORD first. */
833 s390_split_ok_p (rtx dst
, rtx src
, enum machine_mode mode
, int first_subword
)
835 /* Floating point registers cannot be split. */
836 if (FP_REG_P (src
) || FP_REG_P (dst
))
839 /* We don't need to split if operands are directly accessible. */
840 if (s_operand (src
, mode
) || s_operand (dst
, mode
))
843 /* Non-offsettable memory references cannot be split. */
844 if ((GET_CODE (src
) == MEM
&& !offsettable_memref_p (src
))
845 || (GET_CODE (dst
) == MEM
&& !offsettable_memref_p (dst
)))
848 /* Moving the first subword must not clobber a register
849 needed to move the second subword. */
850 if (register_operand (dst
, mode
))
852 rtx subreg
= operand_subword (dst
, first_subword
, 0, mode
);
853 if (reg_overlap_mentioned_p (subreg
, src
))
861 /* Change optimizations to be performed, depending on the
864 LEVEL is the optimization level specified; 2 if `-O2' is
865 specified, 1 if `-O' is specified, and 0 if neither is specified.
867 SIZE is nonzero if `-Os' is specified and zero otherwise. */
870 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
872 /* ??? There are apparently still problems with -fcaller-saves. */
873 flag_caller_saves
= 0;
875 /* By default, always emit DWARF-2 unwind info. This allows debugging
876 without maintaining a stack frame back-chain. */
877 flag_asynchronous_unwind_tables
= 1;
881 override_options (void)
886 const char *const name
; /* processor name or nickname. */
887 const enum processor_type processor
;
888 const enum processor_flags flags
;
890 const processor_alias_table
[] =
892 {"g5", PROCESSOR_9672_G5
, PF_IEEE_FLOAT
},
893 {"g6", PROCESSOR_9672_G6
, PF_IEEE_FLOAT
},
894 {"z900", PROCESSOR_2064_Z900
, PF_IEEE_FLOAT
| PF_ZARCH
},
895 {"z990", PROCESSOR_2084_Z990
, PF_IEEE_FLOAT
| PF_ZARCH
896 | PF_LONG_DISPLACEMENT
},
899 int const pta_size
= ARRAY_SIZE (processor_alias_table
);
901 /* Acquire a unique set number for our register saves and restores. */
902 s390_sr_alias_set
= new_alias_set ();
904 /* Set up function hooks. */
905 init_machine_status
= s390_init_machine_status
;
907 /* Architecture mode defaults according to ABI. */
908 if (!(target_flags_explicit
& MASK_ZARCH
))
911 target_flags
|= MASK_ZARCH
;
913 target_flags
&= ~MASK_ZARCH
;
916 /* Determine processor architectural level. */
917 if (!s390_arch_string
)
918 s390_arch_string
= TARGET_ZARCH
? "z900" : "g5";
920 for (i
= 0; i
< pta_size
; i
++)
921 if (! strcmp (s390_arch_string
, processor_alias_table
[i
].name
))
923 s390_arch
= processor_alias_table
[i
].processor
;
924 s390_arch_flags
= processor_alias_table
[i
].flags
;
928 error ("Unknown cpu used in -march=%s.", s390_arch_string
);
930 /* Determine processor to tune for. */
931 if (!s390_tune_string
)
933 s390_tune
= s390_arch
;
934 s390_tune_flags
= s390_arch_flags
;
935 s390_tune_string
= s390_arch_string
;
939 for (i
= 0; i
< pta_size
; i
++)
940 if (! strcmp (s390_tune_string
, processor_alias_table
[i
].name
))
942 s390_tune
= processor_alias_table
[i
].processor
;
943 s390_tune_flags
= processor_alias_table
[i
].flags
;
947 error ("Unknown cpu used in -mtune=%s.", s390_tune_string
);
951 if (TARGET_ZARCH
&& !(s390_arch_flags
& PF_ZARCH
))
952 error ("z/Architecture mode not supported on %s.", s390_arch_string
);
953 if (TARGET_64BIT
&& !TARGET_ZARCH
)
954 error ("64-bit ABI not supported in ESA/390 mode.");
957 /* Map for smallest class containing reg regno. */
959 const enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
960 { GENERAL_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
961 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
962 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
963 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
964 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
965 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
966 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
967 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
968 ADDR_REGS
, NO_REGS
, ADDR_REGS
971 /* Return attribute type of insn. */
973 static enum attr_type
974 s390_safe_attr_type (rtx insn
)
976 if (recog_memoized (insn
) >= 0)
977 return get_attr_type (insn
);
982 /* Return true if OP a (const_int 0) operand.
983 OP is the current operation.
984 MODE is the current operation mode. */
987 const0_operand (register rtx op
, enum machine_mode mode
)
989 return op
== CONST0_RTX (mode
);
992 /* Return true if OP is constant.
993 OP is the current operation.
994 MODE is the current operation mode. */
997 consttable_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
999 return CONSTANT_P (op
);
1002 /* Return true if the mode of operand OP matches MODE.
1003 If MODE is set to VOIDmode, set it to the mode of OP. */
1006 check_mode (register rtx op
, enum machine_mode
*mode
)
1008 if (*mode
== VOIDmode
)
1009 *mode
= GET_MODE (op
);
1012 if (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != *mode
)
1018 /* Return true if OP a valid operand for the LARL instruction.
1019 OP is the current operation.
1020 MODE is the current operation mode. */
1023 larl_operand (register rtx op
, enum machine_mode mode
)
1025 if (! check_mode (op
, &mode
))
1028 /* Allow labels and local symbols. */
1029 if (GET_CODE (op
) == LABEL_REF
)
1031 if (GET_CODE (op
) == SYMBOL_REF
)
1032 return ((SYMBOL_REF_FLAGS (op
) & SYMBOL_FLAG_ALIGN1
) == 0
1033 && SYMBOL_REF_TLS_MODEL (op
) == 0
1034 && (!flag_pic
|| SYMBOL_REF_LOCAL_P (op
)));
1036 /* Everything else must have a CONST, so strip it. */
1037 if (GET_CODE (op
) != CONST
)
1041 /* Allow adding *even* in-range constants. */
1042 if (GET_CODE (op
) == PLUS
)
1044 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
1045 || (INTVAL (XEXP (op
, 1)) & 1) != 0)
1047 #if HOST_BITS_PER_WIDE_INT > 32
1048 if (INTVAL (XEXP (op
, 1)) >= (HOST_WIDE_INT
)1 << 32
1049 || INTVAL (XEXP (op
, 1)) < -((HOST_WIDE_INT
)1 << 32))
1055 /* Labels and local symbols allowed here as well. */
1056 if (GET_CODE (op
) == LABEL_REF
)
1058 if (GET_CODE (op
) == SYMBOL_REF
)
1059 return ((SYMBOL_REF_FLAGS (op
) & SYMBOL_FLAG_ALIGN1
) == 0
1060 && SYMBOL_REF_TLS_MODEL (op
) == 0
1061 && (!flag_pic
|| SYMBOL_REF_LOCAL_P (op
)));
1063 /* Now we must have a @GOTENT offset or @PLT stub
1064 or an @INDNTPOFF TLS offset. */
1065 if (GET_CODE (op
) == UNSPEC
1066 && XINT (op
, 1) == UNSPEC_GOTENT
)
1068 if (GET_CODE (op
) == UNSPEC
1069 && XINT (op
, 1) == UNSPEC_PLT
)
1071 if (GET_CODE (op
) == UNSPEC
1072 && XINT (op
, 1) == UNSPEC_INDNTPOFF
)
1078 /* Helper routine to implement s_operand and s_imm_operand.
1079 OP is the current operation.
1080 MODE is the current operation mode.
1081 ALLOW_IMMEDIATE specifies whether immediate operands should
1082 be accepted or not. */
1085 general_s_operand (register rtx op
, enum machine_mode mode
,
1086 int allow_immediate
)
1088 struct s390_address addr
;
1090 /* Call general_operand first, so that we don't have to
1091 check for many special cases. */
1092 if (!general_operand (op
, mode
))
1095 /* Just like memory_operand, allow (subreg (mem ...))
1097 if (reload_completed
1098 && GET_CODE (op
) == SUBREG
1099 && GET_CODE (SUBREG_REG (op
)) == MEM
)
1100 op
= SUBREG_REG (op
);
1102 switch (GET_CODE (op
))
1104 /* Constants are OK as s-operand if ALLOW_IMMEDIATE
1105 is true and we are still before reload. */
1108 if (!allow_immediate
|| reload_completed
)
1112 /* Memory operands are OK unless they already use an
1115 if (GET_CODE (XEXP (op
, 0)) == ADDRESSOF
)
1117 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1121 /* Do not allow literal pool references unless ALLOW_IMMEDIATE
1122 is true. This prevents compares between two literal pool
1123 entries from being accepted. */
1124 if (!allow_immediate
1125 && addr
.base
&& REGNO (addr
.base
) == BASE_REGISTER
)
1136 /* Return true if OP is a valid S-type operand.
1137 OP is the current operation.
1138 MODE is the current operation mode. */
1141 s_operand (register rtx op
, enum machine_mode mode
)
1143 return general_s_operand (op
, mode
, 0);
1146 /* Return true if OP is a valid S-type operand or an immediate
1147 operand that can be addressed as S-type operand by forcing
1148 it into the literal pool.
1149 OP is the current operation.
1150 MODE is the current operation mode. */
1153 s_imm_operand (register rtx op
, enum machine_mode mode
)
1155 return general_s_operand (op
, mode
, 1);
1158 /* Return true if OP a valid shift count operand.
1159 OP is the current operation.
1160 MODE is the current operation mode. */
1163 shift_count_operand (rtx op
, enum machine_mode mode
)
1165 HOST_WIDE_INT offset
= 0;
1167 if (! check_mode (op
, &mode
))
1170 /* We can have an integer constant, an address register,
1171 or a sum of the two. Note that reload already checks
1172 that any register present is an address register, so
1173 we just check for any register here. */
1174 if (GET_CODE (op
) == CONST_INT
)
1176 offset
= INTVAL (op
);
1179 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
1181 offset
= INTVAL (XEXP (op
, 1));
1184 while (op
&& GET_CODE (op
) == SUBREG
)
1185 op
= SUBREG_REG (op
);
1186 if (op
&& GET_CODE (op
) != REG
)
1189 /* Unfortunately we have to reject constants that are invalid
1190 for an address, or else reload will get confused. */
1191 if (!DISP_IN_RANGE (offset
))
1197 /* Return true if DISP is a valid short displacement. */
1200 s390_short_displacement (rtx disp
)
1202 /* No displacement is OK. */
1206 /* Integer displacement in range. */
1207 if (GET_CODE (disp
) == CONST_INT
)
1208 return INTVAL (disp
) >= 0 && INTVAL (disp
) < 4096;
1210 /* GOT offset is not OK, the GOT can be large. */
1211 if (GET_CODE (disp
) == CONST
1212 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
1213 && XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
)
1216 /* All other symbolic constants are literal pool references,
1217 which are OK as the literal pool must be small. */
1218 if (GET_CODE (disp
) == CONST
)
1224 /* Return true if OP is a valid operand for a C constraint. */
1227 s390_extra_constraint_str (rtx op
, int c
, const char * str
)
1229 struct s390_address addr
;
1237 if (GET_CODE (op
) != MEM
)
1239 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1244 if (TARGET_LONG_DISPLACEMENT
)
1246 if (!s390_short_displacement (addr
.disp
))
1252 if (GET_CODE (op
) != MEM
)
1255 if (TARGET_LONG_DISPLACEMENT
)
1257 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1259 if (!s390_short_displacement (addr
.disp
))
1265 if (!TARGET_LONG_DISPLACEMENT
)
1267 if (GET_CODE (op
) != MEM
)
1269 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1273 if (s390_short_displacement (addr
.disp
))
1278 if (!TARGET_LONG_DISPLACEMENT
)
1280 if (GET_CODE (op
) != MEM
)
1282 /* Any invalid address here will be fixed up by reload,
1283 so accept it for the most generic constraint. */
1284 if (s390_decompose_address (XEXP (op
, 0), &addr
)
1285 && s390_short_displacement (addr
.disp
))
1290 if (TARGET_LONG_DISPLACEMENT
)
1292 if (!s390_decompose_address (op
, &addr
))
1294 if (!s390_short_displacement (addr
.disp
))
1300 if (!TARGET_LONG_DISPLACEMENT
)
1302 /* Any invalid address here will be fixed up by reload,
1303 so accept it for the most generic constraint. */
1304 if (s390_decompose_address (op
, &addr
)
1305 && s390_short_displacement (addr
.disp
))
1310 return shift_count_operand (op
, VOIDmode
);
1319 /* Return true if VALUE matches the constraint STR. */
1322 s390_const_ok_for_constraint_p (HOST_WIDE_INT value
,
1326 enum machine_mode mode
, part_mode
;
1336 return (unsigned int)value
< 256;
1339 return (unsigned int)value
< 4096;
1342 return value
>= -32768 && value
< 32768;
1345 return (TARGET_LONG_DISPLACEMENT
?
1346 (value
>= -524288 && value
<= 524287)
1347 : (value
>= 0 && value
<= 4095));
1349 return value
== 2147483647;
1352 part
= str
[1] - '0';
1356 case 'H': part_mode
= HImode
; break;
1357 case 'Q': part_mode
= QImode
; break;
1363 case 'H': mode
= HImode
; break;
1364 case 'S': mode
= SImode
; break;
1365 case 'D': mode
= DImode
; break;
1371 case '0': def
= 0; break;
1372 case 'F': def
= -1; break;
1376 if (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (part_mode
))
1379 if (s390_single_part (GEN_INT (value
), mode
, part_mode
, def
) != part
)
1391 /* Compute a (partial) cost for rtx X. Return true if the complete
1392 cost has been computed, and false if subexpressions should be
1393 scanned. In either case, *TOTAL contains the cost result. */
1396 s390_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
1401 if (GET_CODE (XEXP (x
, 0)) == MINUS
1402 && GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
1409 /* Force_const_mem does not work out of reload, because the
1410 saveable_obstack is set to reload_obstack, which does not
1411 live long enough. Because of this we cannot use force_const_mem
1412 in addsi3. This leads to problems with gen_add2_insn with a
1413 constant greater than a short. Because of that we give an
1414 addition of greater constants a cost of 3 (reload1.c 10096). */
1415 /* ??? saveable_obstack no longer exists. */
1416 if (outer_code
== PLUS
1417 && (INTVAL (x
) > 32767 || INTVAL (x
) < -32768))
1418 *total
= COSTS_N_INSNS (3);
1439 *total
= COSTS_N_INSNS (1);
1443 if (GET_MODE (XEXP (x
, 0)) == DImode
)
1444 *total
= COSTS_N_INSNS (40);
1446 *total
= COSTS_N_INSNS (7);
1453 *total
= COSTS_N_INSNS (33);
1461 /* Return the cost of an address rtx ADDR. */
1464 s390_address_cost (rtx addr
)
1466 struct s390_address ad
;
1467 if (!s390_decompose_address (addr
, &ad
))
1470 return ad
.indx
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1473 /* Return true if OP is a valid operand for the BRAS instruction.
1474 OP is the current operation.
1475 MODE is the current operation mode. */
1478 bras_sym_operand (register rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1480 register enum rtx_code code
= GET_CODE (op
);
1482 /* Allow SYMBOL_REFs. */
1483 if (code
== SYMBOL_REF
)
1486 /* Allow @PLT stubs. */
1488 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1489 && XINT (XEXP (op
, 0), 1) == UNSPEC_PLT
)
1494 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1495 otherwise return 0. */
1498 tls_symbolic_operand (register rtx op
)
1500 if (GET_CODE (op
) != SYMBOL_REF
)
1502 return SYMBOL_REF_TLS_MODEL (op
);
1505 /* Return true if OP is a load multiple operation. It is known to be a
1506 PARALLEL and the first section will be tested.
1507 OP is the current operation.
1508 MODE is the current operation mode. */
1511 load_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1513 enum machine_mode elt_mode
;
1514 int count
= XVECLEN (op
, 0);
1515 unsigned int dest_regno
;
1520 /* Perform a quick check so we don't blow up below. */
1522 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1523 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
1524 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
1527 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
1528 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
1529 elt_mode
= GET_MODE (SET_DEST (XVECEXP (op
, 0, 0)));
1531 /* Check, is base, or base + displacement. */
1533 if (GET_CODE (src_addr
) == REG
)
1535 else if (GET_CODE (src_addr
) == PLUS
1536 && GET_CODE (XEXP (src_addr
, 0)) == REG
1537 && GET_CODE (XEXP (src_addr
, 1)) == CONST_INT
)
1539 off
= INTVAL (XEXP (src_addr
, 1));
1540 src_addr
= XEXP (src_addr
, 0);
1545 if (src_addr
== frame_pointer_rtx
|| src_addr
== arg_pointer_rtx
)
1548 for (i
= 1; i
< count
; i
++)
1550 rtx elt
= XVECEXP (op
, 0, i
);
1552 if (GET_CODE (elt
) != SET
1553 || GET_CODE (SET_DEST (elt
)) != REG
1554 || GET_MODE (SET_DEST (elt
)) != elt_mode
1555 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
1556 || GET_CODE (SET_SRC (elt
)) != MEM
1557 || GET_MODE (SET_SRC (elt
)) != elt_mode
1558 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
1559 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
1560 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
1561 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1))
1562 != off
+ i
* GET_MODE_SIZE (elt_mode
))
1569 /* Return true if OP is a store multiple operation. It is known to be a
1570 PARALLEL and the first section will be tested.
1571 OP is the current operation.
1572 MODE is the current operation mode. */
1575 store_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1577 enum machine_mode elt_mode
;
1578 int count
= XVECLEN (op
, 0);
1579 unsigned int src_regno
;
1583 /* Perform a quick check so we don't blow up below. */
1585 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1586 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
1587 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
1590 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
1591 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
1592 elt_mode
= GET_MODE (SET_SRC (XVECEXP (op
, 0, 0)));
1594 /* Check, is base, or base + displacement. */
1596 if (GET_CODE (dest_addr
) == REG
)
1598 else if (GET_CODE (dest_addr
) == PLUS
1599 && GET_CODE (XEXP (dest_addr
, 0)) == REG
1600 && GET_CODE (XEXP (dest_addr
, 1)) == CONST_INT
)
1602 off
= INTVAL (XEXP (dest_addr
, 1));
1603 dest_addr
= XEXP (dest_addr
, 0);
1608 if (dest_addr
== frame_pointer_rtx
|| dest_addr
== arg_pointer_rtx
)
1611 for (i
= 1; i
< count
; i
++)
1613 rtx elt
= XVECEXP (op
, 0, i
);
1615 if (GET_CODE (elt
) != SET
1616 || GET_CODE (SET_SRC (elt
)) != REG
1617 || GET_MODE (SET_SRC (elt
)) != elt_mode
1618 || REGNO (SET_SRC (elt
)) != src_regno
+ i
1619 || GET_CODE (SET_DEST (elt
)) != MEM
1620 || GET_MODE (SET_DEST (elt
)) != elt_mode
1621 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
1622 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
1623 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
1624 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1))
1625 != off
+ i
* GET_MODE_SIZE (elt_mode
))
1632 /* Return true if OP contains a symbol reference */
1635 symbolic_reference_mentioned_p (rtx op
)
1637 register const char *fmt
;
1640 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1643 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1644 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1650 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1651 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1655 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1662 /* Return true if OP contains a reference to a thread-local symbol. */
1665 tls_symbolic_reference_mentioned_p (rtx op
)
1667 register const char *fmt
;
1670 if (GET_CODE (op
) == SYMBOL_REF
)
1671 return tls_symbolic_operand (op
);
1673 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1674 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1680 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1681 if (tls_symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1685 else if (fmt
[i
] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op
, i
)))
1693 /* Return true if OP is a legitimate general operand when
1694 generating PIC code. It is given that flag_pic is on
1695 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1698 legitimate_pic_operand_p (register rtx op
)
1700 /* Accept all non-symbolic constants. */
1701 if (!SYMBOLIC_CONST (op
))
1704 /* Reject everything else; must be handled
1705 via emit_symbolic_move. */
1709 /* Returns true if the constant value OP is a legitimate general operand.
1710 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1713 legitimate_constant_p (register rtx op
)
1715 /* Accept all non-symbolic constants. */
1716 if (!SYMBOLIC_CONST (op
))
1719 /* Accept immediate LARL operands. */
1720 if (TARGET_CPU_ZARCH
&& larl_operand (op
, VOIDmode
))
1723 /* Thread-local symbols are never legal constants. This is
1724 so that emit_call knows that computing such addresses
1725 might require a function call. */
1726 if (TLS_SYMBOLIC_CONST (op
))
1729 /* In the PIC case, symbolic constants must *not* be
1730 forced into the literal pool. We accept them here,
1731 so that they will be handled by emit_symbolic_move. */
1735 /* All remaining non-PIC symbolic constants are
1736 forced into the literal pool. */
1740 /* Determine if it's legal to put X into the constant pool. This
1741 is not possible if X contains the address of a symbol that is
1742 not constant (TLS) or not known at final link time (PIC). */
1745 s390_cannot_force_const_mem (rtx x
)
1747 switch (GET_CODE (x
))
1751 /* Accept all non-symbolic constants. */
1755 /* Labels are OK iff we are non-PIC. */
1756 return flag_pic
!= 0;
1759 /* 'Naked' TLS symbol references are never OK,
1760 non-TLS symbols are OK iff we are non-PIC. */
1761 if (tls_symbolic_operand (x
))
1764 return flag_pic
!= 0;
1767 return s390_cannot_force_const_mem (XEXP (x
, 0));
1770 return s390_cannot_force_const_mem (XEXP (x
, 0))
1771 || s390_cannot_force_const_mem (XEXP (x
, 1));
1774 switch (XINT (x
, 1))
1776 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1777 case UNSPEC_LTREL_OFFSET
:
1785 case UNSPEC_GOTNTPOFF
:
1786 case UNSPEC_INDNTPOFF
:
1799 /* Returns true if the constant value OP is a legitimate general
1800 operand during and after reload. The difference to
1801 legitimate_constant_p is that this function will not accept
1802 a constant that would need to be forced to the literal pool
1803 before it can be used as operand. */
1806 legitimate_reload_constant_p (register rtx op
)
1808 /* Accept la(y) operands. */
1809 if (GET_CODE (op
) == CONST_INT
1810 && DISP_IN_RANGE (INTVAL (op
)))
1813 /* Accept l(g)hi operands. */
1814 if (GET_CODE (op
) == CONST_INT
1815 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', "K"))
1818 /* Accept lliXX operands. */
1820 && s390_single_part (op
, DImode
, HImode
, 0) >= 0)
1823 /* Accept larl operands. */
1824 if (TARGET_CPU_ZARCH
1825 && larl_operand (op
, VOIDmode
))
1828 /* Everything else cannot be handled without reload. */
1832 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1833 return the class of reg to actually use. */
1836 s390_preferred_reload_class (rtx op
, enum reg_class
class)
1838 /* This can happen if a floating point constant is being
1839 reloaded into an integer register. Leave well alone. */
1840 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1841 && class != FP_REGS
)
1844 switch (GET_CODE (op
))
1846 /* Constants we cannot reload must be forced into the
1851 if (legitimate_reload_constant_p (op
))
1856 /* If a symbolic constant or a PLUS is reloaded,
1857 it is most likely being used as an address, so
1858 prefer ADDR_REGS. If 'class' is not a superset
1859 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1864 if (reg_class_subset_p (ADDR_REGS
, class))
1876 /* Return the register class of a scratch register needed to
1877 load IN into a register of class CLASS in MODE.
1879 We need a temporary when loading a PLUS expression which
1880 is not a legitimate operand of the LOAD ADDRESS instruction. */
1883 s390_secondary_input_reload_class (enum reg_class
class ATTRIBUTE_UNUSED
,
1884 enum machine_mode mode
, rtx in
)
1886 if (s390_plus_operand (in
, mode
))
1892 /* Return the register class of a scratch register needed to
1893 store a register of class CLASS in MODE into OUT:
1895 We need a temporary when storing a double-word to a
1896 non-offsettable memory address. */
1899 s390_secondary_output_reload_class (enum reg_class
class,
1900 enum machine_mode mode
, rtx out
)
1902 if ((TARGET_64BIT
? mode
== TImode
1903 : (mode
== DImode
|| mode
== DFmode
))
1904 && reg_classes_intersect_p (GENERAL_REGS
, class)
1905 && GET_CODE (out
) == MEM
1906 && !offsettable_memref_p (out
)
1907 && !s_operand (out
, VOIDmode
))
1913 /* Return true if OP is a PLUS that is not a legitimate
1914 operand for the LA instruction.
1915 OP is the current operation.
1916 MODE is the current operation mode. */
1919 s390_plus_operand (register rtx op
, enum machine_mode mode
)
1921 if (!check_mode (op
, &mode
) || mode
!= Pmode
)
1924 if (GET_CODE (op
) != PLUS
)
1927 if (legitimate_la_operand_p (op
))
1933 /* Generate code to load SRC, which is PLUS that is not a
1934 legitimate operand for the LA instruction, into TARGET.
1935 SCRATCH may be used as scratch register. */
1938 s390_expand_plus_operand (register rtx target
, register rtx src
,
1939 register rtx scratch
)
1942 struct s390_address ad
;
1944 /* src must be a PLUS; get its two operands. */
1945 if (GET_CODE (src
) != PLUS
|| GET_MODE (src
) != Pmode
)
1948 /* Check if any of the two operands is already scheduled
1949 for replacement by reload. This can happen e.g. when
1950 float registers occur in an address. */
1951 sum1
= find_replacement (&XEXP (src
, 0));
1952 sum2
= find_replacement (&XEXP (src
, 1));
1953 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
1955 /* If the address is already strictly valid, there's nothing to do. */
1956 if (!s390_decompose_address (src
, &ad
)
1957 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
1958 || (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
)))
1960 /* Otherwise, one of the operands cannot be an address register;
1961 we reload its value into the scratch register. */
1962 if (true_regnum (sum1
) < 1 || true_regnum (sum1
) > 15)
1964 emit_move_insn (scratch
, sum1
);
1967 if (true_regnum (sum2
) < 1 || true_regnum (sum2
) > 15)
1969 emit_move_insn (scratch
, sum2
);
1973 /* According to the way these invalid addresses are generated
1974 in reload.c, it should never happen (at least on s390) that
1975 *neither* of the PLUS components, after find_replacements
1976 was applied, is an address register. */
1977 if (sum1
== scratch
&& sum2
== scratch
)
1983 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
1986 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1987 is only ever performed on addresses, so we can mark the
1988 sum as legitimate for LA in any case. */
1989 s390_load_address (target
, src
);
1993 /* Decompose a RTL expression ADDR for a memory address into
1994 its components, returned in OUT.
1996 Returns 0 if ADDR is not a valid memory address, nonzero
1997 otherwise. If OUT is NULL, don't return the components,
1998 but check for validity only.
2000 Note: Only addresses in canonical form are recognized.
2001 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2002 canonical form so that they will be recognized. */
2005 s390_decompose_address (register rtx addr
, struct s390_address
*out
)
2007 rtx base
= NULL_RTX
;
2008 rtx indx
= NULL_RTX
;
2009 rtx disp
= NULL_RTX
;
2010 int pointer
= FALSE
;
2011 int base_ptr
= FALSE
;
2012 int indx_ptr
= FALSE
;
2014 /* Decompose address into base + index + displacement. */
2016 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == UNSPEC
)
2019 else if (GET_CODE (addr
) == PLUS
)
2021 rtx op0
= XEXP (addr
, 0);
2022 rtx op1
= XEXP (addr
, 1);
2023 enum rtx_code code0
= GET_CODE (op0
);
2024 enum rtx_code code1
= GET_CODE (op1
);
2026 if (code0
== REG
|| code0
== UNSPEC
)
2028 if (code1
== REG
|| code1
== UNSPEC
)
2030 indx
= op0
; /* index + base */
2036 base
= op0
; /* base + displacement */
2041 else if (code0
== PLUS
)
2043 indx
= XEXP (op0
, 0); /* index + base + disp */
2044 base
= XEXP (op0
, 1);
2055 disp
= addr
; /* displacement */
2058 /* Validate base register. */
2061 if (GET_CODE (base
) == UNSPEC
)
2063 if (XVECLEN (base
, 0) != 1 || XINT (base
, 1) != UNSPEC_LTREL_BASE
)
2065 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2068 if (GET_CODE (base
) != REG
|| GET_MODE (base
) != Pmode
)
2071 if (REGNO (base
) == BASE_REGISTER
2072 || REGNO (base
) == STACK_POINTER_REGNUM
2073 || REGNO (base
) == FRAME_POINTER_REGNUM
2074 || ((reload_completed
|| reload_in_progress
)
2075 && frame_pointer_needed
2076 && REGNO (base
) == HARD_FRAME_POINTER_REGNUM
)
2077 || REGNO (base
) == ARG_POINTER_REGNUM
2079 && REGNO (base
) == PIC_OFFSET_TABLE_REGNUM
))
2080 pointer
= base_ptr
= TRUE
;
2083 /* Validate index register. */
2086 if (GET_CODE (indx
) == UNSPEC
)
2088 if (XVECLEN (indx
, 0) != 1 || XINT (indx
, 1) != UNSPEC_LTREL_BASE
)
2090 indx
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2093 if (GET_CODE (indx
) != REG
|| GET_MODE (indx
) != Pmode
)
2096 if (REGNO (indx
) == BASE_REGISTER
2097 || REGNO (indx
) == STACK_POINTER_REGNUM
2098 || REGNO (indx
) == FRAME_POINTER_REGNUM
2099 || ((reload_completed
|| reload_in_progress
)
2100 && frame_pointer_needed
2101 && REGNO (indx
) == HARD_FRAME_POINTER_REGNUM
)
2102 || REGNO (indx
) == ARG_POINTER_REGNUM
2104 && REGNO (indx
) == PIC_OFFSET_TABLE_REGNUM
))
2105 pointer
= indx_ptr
= TRUE
;
2108 /* Prefer to use pointer as base, not index. */
2109 if (base
&& indx
&& !base_ptr
2110 && (indx_ptr
|| (!REG_POINTER (base
) && REG_POINTER (indx
))))
2117 /* Validate displacement. */
2120 /* Allow integer constant in range. */
2121 if (GET_CODE (disp
) == CONST_INT
)
2123 /* If the argument pointer is involved, the displacement will change
2124 later anyway as the argument pointer gets eliminated. This could
2125 make a valid displacement invalid, but it is more likely to make
2126 an invalid displacement valid, because we sometimes access the
2127 register save area via negative offsets to the arg pointer.
2128 Thus we don't check the displacement for validity here. If after
2129 elimination the displacement turns out to be invalid after all,
2130 this is fixed up by reload in any case. */
2131 if (base
!= arg_pointer_rtx
&& indx
!= arg_pointer_rtx
)
2133 if (!DISP_IN_RANGE (INTVAL (disp
)))
2138 /* In the small-PIC case, the linker converts @GOT
2139 and @GOTNTPOFF offsets to possible displacements. */
2140 else if (GET_CODE (disp
) == CONST
2141 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
2142 && (XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
2143 || XINT (XEXP (disp
, 0), 1) == UNSPEC_GOTNTPOFF
))
2151 /* Accept chunkfied literal pool symbol references. */
2152 else if (GET_CODE (disp
) == CONST
2153 && GET_CODE (XEXP (disp
, 0)) == MINUS
2154 && GET_CODE (XEXP (XEXP (disp
, 0), 0)) == LABEL_REF
2155 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == LABEL_REF
)
2160 /* Likewise if a constant offset is present. */
2161 else if (GET_CODE (disp
) == CONST
2162 && GET_CODE (XEXP (disp
, 0)) == PLUS
2163 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
2164 && GET_CODE (XEXP (XEXP (disp
, 0), 0)) == MINUS
2165 && GET_CODE (XEXP (XEXP (XEXP (disp
, 0), 0), 0)) == LABEL_REF
2166 && GET_CODE (XEXP (XEXP (XEXP (disp
, 0), 0), 1)) == LABEL_REF
)
2171 /* We can convert literal pool addresses to
2172 displacements by basing them off the base register. */
2175 /* In some cases, we can accept an additional
2176 small constant offset. Split these off here. */
2178 unsigned int offset
= 0;
2180 if (GET_CODE (disp
) == CONST
2181 && GET_CODE (XEXP (disp
, 0)) == PLUS
2182 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
)
2184 offset
= INTVAL (XEXP (XEXP (disp
, 0), 1));
2185 disp
= XEXP (XEXP (disp
, 0), 0);
2188 /* Now we must have a literal pool address. */
2189 if (GET_CODE (disp
) != SYMBOL_REF
2190 || !CONSTANT_POOL_ADDRESS_P (disp
))
2193 /* If we have an offset, make sure it does not
2194 exceed the size of the constant pool entry. */
2195 if (offset
&& offset
>= GET_MODE_SIZE (get_pool_mode (disp
)))
2198 /* Either base or index must be free to
2199 hold the base register. */
2203 /* Convert the address. */
2205 indx
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2207 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2209 disp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, disp
),
2210 UNSPEC_LTREL_OFFSET
);
2211 disp
= gen_rtx_CONST (Pmode
, disp
);
2214 disp
= plus_constant (disp
, offset
);
2228 out
->pointer
= pointer
;
2234 /* Return nonzero if ADDR is a valid memory address.
2235 STRICT specifies whether strict register checking applies. */
2238 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
2239 register rtx addr
, int strict
)
2241 struct s390_address ad
;
2242 if (!s390_decompose_address (addr
, &ad
))
2247 if (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
2249 if (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
))
2254 if (ad
.base
&& !REG_OK_FOR_BASE_NONSTRICT_P (ad
.base
))
2256 if (ad
.indx
&& !REG_OK_FOR_INDEX_NONSTRICT_P (ad
.indx
))
2263 /* Return 1 if OP is a valid operand for the LA instruction.
2264 In 31-bit, we need to prove that the result is used as an
2265 address, as LA performs only a 31-bit addition. */
2268 legitimate_la_operand_p (register rtx op
)
2270 struct s390_address addr
;
2271 if (!s390_decompose_address (op
, &addr
))
2274 if (TARGET_64BIT
|| addr
.pointer
)
2280 /* Return 1 if OP is a valid operand for the LA instruction,
2281 and we prefer to use LA over addition to compute it. */
2284 preferred_la_operand_p (register rtx op
)
2286 struct s390_address addr
;
2287 if (!s390_decompose_address (op
, &addr
))
2290 if (!TARGET_64BIT
&& !addr
.pointer
)
2296 if ((addr
.base
&& REG_P (addr
.base
) && REG_POINTER (addr
.base
))
2297 || (addr
.indx
&& REG_P (addr
.indx
) && REG_POINTER (addr
.indx
)))
2303 /* Emit a forced load-address operation to load SRC into DST.
2304 This will use the LOAD ADDRESS instruction even in situations
2305 where legitimate_la_operand_p (SRC) returns false. */
2308 s390_load_address (rtx dst
, rtx src
)
2311 emit_move_insn (dst
, src
);
2313 emit_insn (gen_force_la_31 (dst
, src
));
2316 /* Return a legitimate reference for ORIG (an address) using the
2317 register REG. If REG is 0, a new pseudo is generated.
2319 There are two types of references that must be handled:
2321 1. Global data references must load the address from the GOT, via
2322 the PIC reg. An insn is emitted to do this load, and the reg is
2325 2. Static data references, constant pool addresses, and code labels
2326 compute the address as an offset from the GOT, whose base is in
2327 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2328 differentiate them from global data objects. The returned
2329 address is the PIC reg + an unspec constant.
2331 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2332 reg also appears in the address. */
2335 legitimize_pic_address (rtx orig
, rtx reg
)
2341 if (GET_CODE (addr
) == LABEL_REF
2342 || (GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (addr
)))
2344 /* This is a local symbol. */
2345 if (TARGET_CPU_ZARCH
&& larl_operand (addr
, VOIDmode
))
2347 /* Access local symbols PC-relative via LARL.
2348 This is the same as in the non-PIC case, so it is
2349 handled automatically ... */
2353 /* Access local symbols relative to the GOT. */
2355 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2357 if (reload_in_progress
|| reload_completed
)
2358 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2360 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTOFF
);
2361 addr
= gen_rtx_CONST (Pmode
, addr
);
2362 addr
= force_const_mem (Pmode
, addr
);
2363 emit_move_insn (temp
, addr
);
2365 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2368 emit_move_insn (reg
, new);
2373 else if (GET_CODE (addr
) == SYMBOL_REF
)
2376 reg
= gen_reg_rtx (Pmode
);
2380 /* Assume GOT offset < 4k. This is handled the same way
2381 in both 31- and 64-bit code (@GOT). */
2383 if (reload_in_progress
|| reload_completed
)
2384 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2386 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
2387 new = gen_rtx_CONST (Pmode
, new);
2388 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2389 new = gen_rtx_MEM (Pmode
, new);
2390 RTX_UNCHANGING_P (new) = 1;
2391 emit_move_insn (reg
, new);
2394 else if (TARGET_CPU_ZARCH
)
2396 /* If the GOT offset might be >= 4k, we determine the position
2397 of the GOT entry via a PC-relative LARL (@GOTENT). */
2399 rtx temp
= gen_reg_rtx (Pmode
);
2401 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTENT
);
2402 new = gen_rtx_CONST (Pmode
, new);
2403 emit_move_insn (temp
, new);
2405 new = gen_rtx_MEM (Pmode
, temp
);
2406 RTX_UNCHANGING_P (new) = 1;
2407 emit_move_insn (reg
, new);
2412 /* If the GOT offset might be >= 4k, we have to load it
2413 from the literal pool (@GOT). */
2415 rtx temp
= gen_reg_rtx (Pmode
);
2417 if (reload_in_progress
|| reload_completed
)
2418 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2420 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
2421 addr
= gen_rtx_CONST (Pmode
, addr
);
2422 addr
= force_const_mem (Pmode
, addr
);
2423 emit_move_insn (temp
, addr
);
2425 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2426 new = gen_rtx_MEM (Pmode
, new);
2427 RTX_UNCHANGING_P (new) = 1;
2428 emit_move_insn (reg
, new);
2434 if (GET_CODE (addr
) == CONST
)
2436 addr
= XEXP (addr
, 0);
2437 if (GET_CODE (addr
) == UNSPEC
)
2439 if (XVECLEN (addr
, 0) != 1)
2441 switch (XINT (addr
, 1))
2443 /* If someone moved a GOT-relative UNSPEC
2444 out of the literal pool, force them back in. */
2447 new = force_const_mem (Pmode
, orig
);
2450 /* @GOT is OK as is if small. */
2453 new = force_const_mem (Pmode
, orig
);
2456 /* @GOTENT is OK as is. */
2460 /* @PLT is OK as is on 64-bit, must be converted to
2461 GOT-relative @PLTOFF on 31-bit. */
2463 if (!TARGET_CPU_ZARCH
)
2465 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2467 if (reload_in_progress
|| reload_completed
)
2468 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2470 addr
= XVECEXP (addr
, 0, 0);
2471 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
),
2473 addr
= gen_rtx_CONST (Pmode
, addr
);
2474 addr
= force_const_mem (Pmode
, addr
);
2475 emit_move_insn (temp
, addr
);
2477 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2480 emit_move_insn (reg
, new);
2486 /* Everything else cannot happen. */
2491 else if (GET_CODE (addr
) != PLUS
)
2494 if (GET_CODE (addr
) == PLUS
)
2496 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
2497 /* Check first to see if this is a constant offset
2498 from a local symbol reference. */
2499 if ((GET_CODE (op0
) == LABEL_REF
2500 || (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (op0
)))
2501 && GET_CODE (op1
) == CONST_INT
)
2503 if (TARGET_CPU_ZARCH
&& larl_operand (op0
, VOIDmode
))
2505 if (INTVAL (op1
) & 1)
2507 /* LARL can't handle odd offsets, so emit a
2508 pair of LARL and LA. */
2509 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2511 if (!DISP_IN_RANGE (INTVAL (op1
)))
2513 int even
= INTVAL (op1
) - 1;
2514 op0
= gen_rtx_PLUS (Pmode
, op0
, GEN_INT (even
));
2515 op0
= gen_rtx_CONST (Pmode
, op0
);
2519 emit_move_insn (temp
, op0
);
2520 new = gen_rtx_PLUS (Pmode
, temp
, op1
);
2524 emit_move_insn (reg
, new);
2530 /* If the offset is even, we can just use LARL.
2531 This will happen automatically. */
2536 /* Access local symbols relative to the GOT. */
2538 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2540 if (reload_in_progress
|| reload_completed
)
2541 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2543 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op0
),
2545 addr
= gen_rtx_PLUS (Pmode
, addr
, op1
);
2546 addr
= gen_rtx_CONST (Pmode
, addr
);
2547 addr
= force_const_mem (Pmode
, addr
);
2548 emit_move_insn (temp
, addr
);
2550 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2553 emit_move_insn (reg
, new);
2559 /* Now, check whether it is a GOT relative symbol plus offset
2560 that was pulled out of the literal pool. Force it back in. */
2562 else if (GET_CODE (op0
) == UNSPEC
2563 && GET_CODE (op1
) == CONST_INT
)
2565 if (XVECLEN (op0
, 0) != 1)
2567 if (XINT (op0
, 1) != UNSPEC_GOTOFF
)
2570 new = force_const_mem (Pmode
, orig
);
2573 /* Otherwise, compute the sum. */
2576 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2577 new = legitimize_pic_address (XEXP (addr
, 1),
2578 base
== reg
? NULL_RTX
: reg
);
2579 if (GET_CODE (new) == CONST_INT
)
2580 new = plus_constant (base
, INTVAL (new));
2583 if (GET_CODE (new) == PLUS
&& CONSTANT_P (XEXP (new, 1)))
2585 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new, 0));
2586 new = XEXP (new, 1);
2588 new = gen_rtx_PLUS (Pmode
, base
, new);
2591 if (GET_CODE (new) == CONST
)
2592 new = XEXP (new, 0);
2593 new = force_operand (new, 0);
2600 /* Load the thread pointer into a register. */
2603 get_thread_pointer (void)
2607 tp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TP
);
2608 tp
= force_reg (Pmode
, tp
);
2609 mark_reg_pointer (tp
, BITS_PER_WORD
);
2614 /* Construct the SYMBOL_REF for the tls_get_offset function. */
2616 static GTY(()) rtx s390_tls_symbol
;
2618 s390_tls_get_offset (void)
2620 if (!s390_tls_symbol
)
2621 s390_tls_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tls_get_offset");
2623 return s390_tls_symbol
;
2626 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2627 this (thread-local) address. REG may be used as temporary. */
2630 legitimize_tls_address (rtx addr
, rtx reg
)
2632 rtx
new, tls_call
, temp
, base
, r2
, insn
;
2634 if (GET_CODE (addr
) == SYMBOL_REF
)
2635 switch (tls_symbolic_operand (addr
))
2637 case TLS_MODEL_GLOBAL_DYNAMIC
:
2639 r2
= gen_rtx_REG (Pmode
, 2);
2640 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_TLSGD
);
2641 new = gen_rtx_CONST (Pmode
, tls_call
);
2642 new = force_const_mem (Pmode
, new);
2643 emit_move_insn (r2
, new);
2644 emit_call_insn (gen_call_value_tls (r2
, tls_call
));
2645 insn
= get_insns ();
2648 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
2649 temp
= gen_reg_rtx (Pmode
);
2650 emit_libcall_block (insn
, temp
, r2
, new);
2652 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2655 s390_load_address (reg
, new);
2660 case TLS_MODEL_LOCAL_DYNAMIC
:
2662 r2
= gen_rtx_REG (Pmode
, 2);
2663 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM
);
2664 new = gen_rtx_CONST (Pmode
, tls_call
);
2665 new = force_const_mem (Pmode
, new);
2666 emit_move_insn (r2
, new);
2667 emit_call_insn (gen_call_value_tls (r2
, tls_call
));
2668 insn
= get_insns ();
2671 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM_NTPOFF
);
2672 temp
= gen_reg_rtx (Pmode
);
2673 emit_libcall_block (insn
, temp
, r2
, new);
2675 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2676 base
= gen_reg_rtx (Pmode
);
2677 s390_load_address (base
, new);
2679 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_DTPOFF
);
2680 new = gen_rtx_CONST (Pmode
, new);
2681 new = force_const_mem (Pmode
, new);
2682 temp
= gen_reg_rtx (Pmode
);
2683 emit_move_insn (temp
, new);
2685 new = gen_rtx_PLUS (Pmode
, base
, temp
);
2688 s390_load_address (reg
, new);
2693 case TLS_MODEL_INITIAL_EXEC
:
2696 /* Assume GOT offset < 4k. This is handled the same way
2697 in both 31- and 64-bit code. */
2699 if (reload_in_progress
|| reload_completed
)
2700 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2702 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
2703 new = gen_rtx_CONST (Pmode
, new);
2704 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2705 new = gen_rtx_MEM (Pmode
, new);
2706 RTX_UNCHANGING_P (new) = 1;
2707 temp
= gen_reg_rtx (Pmode
);
2708 emit_move_insn (temp
, new);
2710 else if (TARGET_CPU_ZARCH
)
2712 /* If the GOT offset might be >= 4k, we determine the position
2713 of the GOT entry via a PC-relative LARL. */
2715 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
2716 new = gen_rtx_CONST (Pmode
, new);
2717 temp
= gen_reg_rtx (Pmode
);
2718 emit_move_insn (temp
, new);
2720 new = gen_rtx_MEM (Pmode
, temp
);
2721 RTX_UNCHANGING_P (new) = 1;
2722 temp
= gen_reg_rtx (Pmode
);
2723 emit_move_insn (temp
, new);
2727 /* If the GOT offset might be >= 4k, we have to load it
2728 from the literal pool. */
2730 if (reload_in_progress
|| reload_completed
)
2731 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2733 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
2734 new = gen_rtx_CONST (Pmode
, new);
2735 new = force_const_mem (Pmode
, new);
2736 temp
= gen_reg_rtx (Pmode
);
2737 emit_move_insn (temp
, new);
2739 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2740 new = gen_rtx_MEM (Pmode
, new);
2741 RTX_UNCHANGING_P (new) = 1;
2743 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new, addr
), UNSPEC_TLS_LOAD
);
2744 temp
= gen_reg_rtx (Pmode
);
2745 emit_insn (gen_rtx_SET (Pmode
, temp
, new));
2749 /* In position-dependent code, load the absolute address of
2750 the GOT entry from the literal pool. */
2752 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
2753 new = gen_rtx_CONST (Pmode
, new);
2754 new = force_const_mem (Pmode
, new);
2755 temp
= gen_reg_rtx (Pmode
);
2756 emit_move_insn (temp
, new);
2759 new = gen_rtx_MEM (Pmode
, new);
2760 RTX_UNCHANGING_P (new) = 1;
2762 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new, addr
), UNSPEC_TLS_LOAD
);
2763 temp
= gen_reg_rtx (Pmode
);
2764 emit_insn (gen_rtx_SET (Pmode
, temp
, new));
2767 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2770 s390_load_address (reg
, new);
2775 case TLS_MODEL_LOCAL_EXEC
:
2776 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
2777 new = gen_rtx_CONST (Pmode
, new);
2778 new = force_const_mem (Pmode
, new);
2779 temp
= gen_reg_rtx (Pmode
);
2780 emit_move_insn (temp
, new);
2782 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2785 s390_load_address (reg
, new);
2794 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == UNSPEC
)
2796 switch (XINT (XEXP (addr
, 0), 1))
2798 case UNSPEC_INDNTPOFF
:
2799 if (TARGET_CPU_ZARCH
)
2811 abort (); /* for now ... */
2816 /* Emit insns to move operands[1] into operands[0]. */
2819 emit_symbolic_move (rtx
*operands
)
2821 rtx temp
= no_new_pseudos
? operands
[0] : gen_reg_rtx (Pmode
);
2823 if (GET_CODE (operands
[0]) == MEM
)
2824 operands
[1] = force_reg (Pmode
, operands
[1]);
2825 else if (TLS_SYMBOLIC_CONST (operands
[1]))
2826 operands
[1] = legitimize_tls_address (operands
[1], temp
);
2828 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2831 /* Try machine-dependent ways of modifying an illegitimate address X
2832 to be legitimate. If we find one, return the new, valid address.
2834 OLDX is the address as it was before break_out_memory_refs was called.
2835 In some cases it is useful to look at this to decide what needs to be done.
2837 MODE is the mode of the operand pointed to by X.
2839 When -fpic is used, special handling is needed for symbolic references.
2840 See comments by legitimize_pic_address for details. */
2843 legitimize_address (register rtx x
, register rtx oldx ATTRIBUTE_UNUSED
,
2844 enum machine_mode mode ATTRIBUTE_UNUSED
)
2846 rtx constant_term
= const0_rtx
;
2848 if (TLS_SYMBOLIC_CONST (x
))
2850 x
= legitimize_tls_address (x
, 0);
2852 if (legitimate_address_p (mode
, x
, FALSE
))
2857 if (SYMBOLIC_CONST (x
)
2858 || (GET_CODE (x
) == PLUS
2859 && (SYMBOLIC_CONST (XEXP (x
, 0))
2860 || SYMBOLIC_CONST (XEXP (x
, 1)))))
2861 x
= legitimize_pic_address (x
, 0);
2863 if (legitimate_address_p (mode
, x
, FALSE
))
2867 x
= eliminate_constant_term (x
, &constant_term
);
2869 /* Optimize loading of large displacements by splitting them
2870 into the multiple of 4K and the rest; this allows the
2871 former to be CSE'd if possible.
2873 Don't do this if the displacement is added to a register
2874 pointing into the stack frame, as the offsets will
2875 change later anyway. */
2877 if (GET_CODE (constant_term
) == CONST_INT
2878 && !TARGET_LONG_DISPLACEMENT
2879 && !DISP_IN_RANGE (INTVAL (constant_term
))
2880 && !(REG_P (x
) && REGNO_PTR_FRAME_P (REGNO (x
))))
2882 HOST_WIDE_INT lower
= INTVAL (constant_term
) & 0xfff;
2883 HOST_WIDE_INT upper
= INTVAL (constant_term
) ^ lower
;
2885 rtx temp
= gen_reg_rtx (Pmode
);
2886 rtx val
= force_operand (GEN_INT (upper
), temp
);
2888 emit_move_insn (temp
, val
);
2890 x
= gen_rtx_PLUS (Pmode
, x
, temp
);
2891 constant_term
= GEN_INT (lower
);
2894 if (GET_CODE (x
) == PLUS
)
2896 if (GET_CODE (XEXP (x
, 0)) == REG
)
2898 register rtx temp
= gen_reg_rtx (Pmode
);
2899 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2901 emit_move_insn (temp
, val
);
2903 x
= gen_rtx_PLUS (Pmode
, XEXP (x
, 0), temp
);
2906 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2908 register rtx temp
= gen_reg_rtx (Pmode
);
2909 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2911 emit_move_insn (temp
, val
);
2913 x
= gen_rtx_PLUS (Pmode
, temp
, XEXP (x
, 1));
2917 if (constant_term
!= const0_rtx
)
2918 x
= gen_rtx_PLUS (Pmode
, x
, constant_term
);
2923 /* Emit code to move LEN bytes from DST to SRC. */
2926 s390_expand_movstr (rtx dst
, rtx src
, rtx len
)
2928 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
2930 if (INTVAL (len
) > 0)
2931 emit_insn (gen_movstr_short (dst
, src
, GEN_INT (INTVAL (len
) - 1)));
2934 else if (TARGET_MVCLE
)
2936 emit_insn (gen_movstr_long (dst
, src
, convert_to_mode (Pmode
, len
, 1)));
2941 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
2942 rtx end_label
= gen_label_rtx ();
2943 enum machine_mode mode
;
2946 mode
= GET_MODE (len
);
2947 if (mode
== VOIDmode
)
2950 type
= lang_hooks
.types
.type_for_mode (mode
, 1);
2954 dst_addr
= gen_reg_rtx (Pmode
);
2955 src_addr
= gen_reg_rtx (Pmode
);
2956 count
= gen_reg_rtx (mode
);
2957 blocks
= gen_reg_rtx (mode
);
2959 convert_move (count
, len
, 1);
2960 emit_cmp_and_jump_insns (count
, const0_rtx
,
2961 EQ
, NULL_RTX
, mode
, 1, end_label
);
2963 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
2964 emit_move_insn (src_addr
, force_operand (XEXP (src
, 0), NULL_RTX
));
2965 dst
= change_address (dst
, VOIDmode
, dst_addr
);
2966 src
= change_address (src
, VOIDmode
, src_addr
);
2968 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
2970 emit_move_insn (count
, temp
);
2972 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
2974 emit_move_insn (blocks
, temp
);
2976 expand_start_loop (1);
2977 expand_exit_loop_top_cond (0, build (NE_EXPR
, type
,
2978 make_tree (type
, blocks
),
2979 make_tree (type
, const0_rtx
)));
2981 emit_insn (gen_movstr_short (dst
, src
, GEN_INT (255)));
2982 s390_load_address (dst_addr
,
2983 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
2984 s390_load_address (src_addr
,
2985 gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (256)));
2987 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
2989 emit_move_insn (blocks
, temp
);
2993 emit_insn (gen_movstr_short (dst
, src
,
2994 convert_to_mode (Pmode
, count
, 1)));
2995 emit_label (end_label
);
2999 /* Emit code to clear LEN bytes at DST. */
3002 s390_expand_clrstr (rtx dst
, rtx len
)
3004 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3006 if (INTVAL (len
) > 0)
3007 emit_insn (gen_clrstr_short (dst
, GEN_INT (INTVAL (len
) - 1)));
3010 else if (TARGET_MVCLE
)
3012 emit_insn (gen_clrstr_long (dst
, convert_to_mode (Pmode
, len
, 1)));
3017 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
3018 rtx end_label
= gen_label_rtx ();
3019 enum machine_mode mode
;
3022 mode
= GET_MODE (len
);
3023 if (mode
== VOIDmode
)
3026 type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3030 dst_addr
= gen_reg_rtx (Pmode
);
3031 src_addr
= gen_reg_rtx (Pmode
);
3032 count
= gen_reg_rtx (mode
);
3033 blocks
= gen_reg_rtx (mode
);
3035 convert_move (count
, len
, 1);
3036 emit_cmp_and_jump_insns (count
, const0_rtx
,
3037 EQ
, NULL_RTX
, mode
, 1, end_label
);
3039 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
3040 dst
= change_address (dst
, VOIDmode
, dst_addr
);
3042 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3044 emit_move_insn (count
, temp
);
3046 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3048 emit_move_insn (blocks
, temp
);
3050 expand_start_loop (1);
3051 expand_exit_loop_top_cond (0, build (NE_EXPR
, type
,
3052 make_tree (type
, blocks
),
3053 make_tree (type
, const0_rtx
)));
3055 emit_insn (gen_clrstr_short (dst
, GEN_INT (255)));
3056 s390_load_address (dst_addr
,
3057 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
3059 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3061 emit_move_insn (blocks
, temp
);
3065 emit_insn (gen_clrstr_short (dst
, convert_to_mode (Pmode
, count
, 1)));
3066 emit_label (end_label
);
3070 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3071 and return the result in TARGET. */
3074 s390_expand_cmpmem (rtx target
, rtx op0
, rtx op1
, rtx len
)
3076 rtx (*gen_result
) (rtx
) =
3077 GET_MODE (target
) == DImode
? gen_cmpint_di
: gen_cmpint_si
;
3079 op0
= protect_from_queue (op0
, 0);
3080 op1
= protect_from_queue (op1
, 0);
3081 len
= protect_from_queue (len
, 0);
3083 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3085 if (INTVAL (len
) > 0)
3087 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (INTVAL (len
) - 1)));
3088 emit_insn (gen_result (target
));
3091 emit_move_insn (target
, const0_rtx
);
3094 else /* if (TARGET_MVCLE) */
3096 emit_insn (gen_cmpmem_long (op0
, op1
, convert_to_mode (Pmode
, len
, 1)));
3097 emit_insn (gen_result (target
));
3101 /* Deactivate for now as profile code cannot cope with
3102 CC being live across basic block boundaries. */
3105 rtx addr0
, addr1
, count
, blocks
, temp
;
3106 rtx end_label
= gen_label_rtx ();
3107 enum machine_mode mode
;
3110 mode
= GET_MODE (len
);
3111 if (mode
== VOIDmode
)
3114 type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3118 addr0
= gen_reg_rtx (Pmode
);
3119 addr1
= gen_reg_rtx (Pmode
);
3120 count
= gen_reg_rtx (mode
);
3121 blocks
= gen_reg_rtx (mode
);
3123 convert_move (count
, len
, 1);
3124 emit_cmp_and_jump_insns (count
, const0_rtx
,
3125 EQ
, NULL_RTX
, mode
, 1, end_label
);
3127 emit_move_insn (addr0
, force_operand (XEXP (op0
, 0), NULL_RTX
));
3128 emit_move_insn (addr1
, force_operand (XEXP (op1
, 0), NULL_RTX
));
3129 op0
= change_address (op0
, VOIDmode
, addr0
);
3130 op1
= change_address (op1
, VOIDmode
, addr1
);
3132 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3134 emit_move_insn (count
, temp
);
3136 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3138 emit_move_insn (blocks
, temp
);
3140 expand_start_loop (1);
3141 expand_exit_loop_top_cond (0, build (NE_EXPR
, type
,
3142 make_tree (type
, blocks
),
3143 make_tree (type
, const0_rtx
)));
3145 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (255)));
3146 temp
= gen_rtx_NE (VOIDmode
, gen_rtx_REG (CCSmode
, 33), const0_rtx
);
3147 temp
= gen_rtx_IF_THEN_ELSE (VOIDmode
, temp
,
3148 gen_rtx_LABEL_REF (VOIDmode
, end_label
), pc_rtx
);
3149 temp
= gen_rtx_SET (VOIDmode
, pc_rtx
, temp
);
3150 emit_jump_insn (temp
);
3152 s390_load_address (addr0
,
3153 gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (256)));
3154 s390_load_address (addr1
,
3155 gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (256)));
3157 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3159 emit_move_insn (blocks
, temp
);
3163 emit_insn (gen_cmpmem_short (op0
, op1
,
3164 convert_to_mode (Pmode
, count
, 1)));
3165 emit_label (end_label
);
3167 emit_insn (gen_result (target
));
3172 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3173 We need to emit DTP-relative relocations. */
3176 s390_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3181 fputs ("\t.long\t", file
);
3184 fputs ("\t.quad\t", file
);
3189 output_addr_const (file
, x
);
3190 fputs ("@DTPOFF", file
);
3193 /* In the name of slightly smaller debug output, and to cater to
3194 general assembler losage, recognize various UNSPEC sequences
3195 and turn them back into a direct symbol reference. */
3198 s390_delegitimize_address (rtx orig_x
)
3202 if (GET_CODE (x
) != MEM
)
3206 if (GET_CODE (x
) == PLUS
3207 && GET_CODE (XEXP (x
, 1)) == CONST
3208 && GET_CODE (XEXP (x
, 0)) == REG
3209 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
3211 y
= XEXP (XEXP (x
, 1), 0);
3212 if (GET_CODE (y
) == UNSPEC
3213 && XINT (y
, 1) == UNSPEC_GOT
)
3214 return XVECEXP (y
, 0, 0);
3218 if (GET_CODE (x
) == CONST
)
3221 if (GET_CODE (y
) == UNSPEC
3222 && XINT (y
, 1) == UNSPEC_GOTENT
)
3223 return XVECEXP (y
, 0, 0);
3230 /* Output shift count operand OP to stdio stream FILE. */
3233 print_shift_count_operand (FILE *file
, rtx op
)
3235 HOST_WIDE_INT offset
= 0;
3237 /* We can have an integer constant, an address register,
3238 or a sum of the two. */
3239 if (GET_CODE (op
) == CONST_INT
)
3241 offset
= INTVAL (op
);
3244 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
3246 offset
= INTVAL (XEXP (op
, 1));
3249 while (op
&& GET_CODE (op
) == SUBREG
)
3250 op
= SUBREG_REG (op
);
3253 if (op
&& (GET_CODE (op
) != REG
3254 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
3255 || REGNO_REG_CLASS (REGNO (op
)) != ADDR_REGS
))
3258 /* Shift counts are truncated to the low six bits anyway. */
3259 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
& 63);
3261 fprintf (file
, "(%s)", reg_names
[REGNO (op
)]);
3264 /* Locate some local-dynamic symbol still in use by this function
3265 so that we can print its name in local-dynamic base patterns. */
3268 get_some_local_dynamic_name (void)
3272 if (cfun
->machine
->some_ld_name
)
3273 return cfun
->machine
->some_ld_name
;
3275 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3277 && for_each_rtx (&PATTERN (insn
), get_some_local_dynamic_name_1
, 0))
3278 return cfun
->machine
->some_ld_name
;
3284 get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
3288 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
3290 x
= get_pool_constant (x
);
3291 return for_each_rtx (&x
, get_some_local_dynamic_name_1
, 0);
3294 if (GET_CODE (x
) == SYMBOL_REF
3295 && tls_symbolic_operand (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
3297 cfun
->machine
->some_ld_name
= XSTR (x
, 0);
3304 /* Output machine-dependent UNSPECs occurring in address constant X
3305 in assembler syntax to stdio stream FILE. Returns true if the
3306 constant X could be recognized, false otherwise. */
3309 s390_output_addr_const_extra (FILE *file
, rtx x
)
3311 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 1)
3312 switch (XINT (x
, 1))
3315 output_addr_const (file
, XVECEXP (x
, 0, 0));
3316 fprintf (file
, "@GOTENT");
3319 output_addr_const (file
, XVECEXP (x
, 0, 0));
3320 fprintf (file
, "@GOT");
3323 output_addr_const (file
, XVECEXP (x
, 0, 0));
3324 fprintf (file
, "@GOTOFF");
3327 output_addr_const (file
, XVECEXP (x
, 0, 0));
3328 fprintf (file
, "@PLT");
3331 output_addr_const (file
, XVECEXP (x
, 0, 0));
3332 fprintf (file
, "@PLTOFF");
3335 output_addr_const (file
, XVECEXP (x
, 0, 0));
3336 fprintf (file
, "@TLSGD");
3339 assemble_name (file
, get_some_local_dynamic_name ());
3340 fprintf (file
, "@TLSLDM");
3343 output_addr_const (file
, XVECEXP (x
, 0, 0));
3344 fprintf (file
, "@DTPOFF");
3347 output_addr_const (file
, XVECEXP (x
, 0, 0));
3348 fprintf (file
, "@NTPOFF");
3350 case UNSPEC_GOTNTPOFF
:
3351 output_addr_const (file
, XVECEXP (x
, 0, 0));
3352 fprintf (file
, "@GOTNTPOFF");
3354 case UNSPEC_INDNTPOFF
:
3355 output_addr_const (file
, XVECEXP (x
, 0, 0));
3356 fprintf (file
, "@INDNTPOFF");
3363 /* Output address operand ADDR in assembler syntax to
3364 stdio stream FILE. */
3367 print_operand_address (FILE *file
, rtx addr
)
3369 struct s390_address ad
;
3371 if (!s390_decompose_address (addr
, &ad
)
3372 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3373 || (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
)))
3374 output_operand_lossage ("Cannot decompose address.");
3377 output_addr_const (file
, ad
.disp
);
3379 fprintf (file
, "0");
3381 if (ad
.base
&& ad
.indx
)
3382 fprintf (file
, "(%s,%s)", reg_names
[REGNO (ad
.indx
)],
3383 reg_names
[REGNO (ad
.base
)]);
3385 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
3388 /* Output operand X in assembler syntax to stdio stream FILE.
3389 CODE specified the format flag. The following format flags
3392 'C': print opcode suffix for branch condition.
3393 'D': print opcode suffix for inverse branch condition.
3394 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3395 'O': print only the displacement of a memory reference.
3396 'R': print only the base register of a memory reference.
3397 'N': print the second word of a DImode operand.
3398 'M': print the second word of a TImode operand.
3399 'Y': print shift count operand.
3401 'b': print integer X as if it's an unsigned byte.
3402 'x': print integer X as if it's an unsigned word.
3403 'h': print integer X as if it's a signed word.
3404 'i': print the first nonzero HImode part of X.
3405 'j': print the first HImode part unequal to 0xffff of X. */
3408 print_operand (FILE *file
, rtx x
, int code
)
3413 fprintf (file
, s390_branch_condition_mnemonic (x
, FALSE
));
3417 fprintf (file
, s390_branch_condition_mnemonic (x
, TRUE
));
3421 if (GET_CODE (x
) == SYMBOL_REF
)
3423 fprintf (file
, "%s", ":tls_load:");
3424 output_addr_const (file
, x
);
3426 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSGD
)
3428 fprintf (file
, "%s", ":tls_gdcall:");
3429 output_addr_const (file
, XVECEXP (x
, 0, 0));
3431 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSLDM
)
3433 fprintf (file
, "%s", ":tls_ldcall:");
3434 assemble_name (file
, get_some_local_dynamic_name ());
3442 struct s390_address ad
;
3444 if (GET_CODE (x
) != MEM
3445 || !s390_decompose_address (XEXP (x
, 0), &ad
)
3446 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3451 output_addr_const (file
, ad
.disp
);
3453 fprintf (file
, "0");
3459 struct s390_address ad
;
3461 if (GET_CODE (x
) != MEM
3462 || !s390_decompose_address (XEXP (x
, 0), &ad
)
3463 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3468 fprintf (file
, "%s", reg_names
[REGNO (ad
.base
)]);
3470 fprintf (file
, "0");
3475 if (GET_CODE (x
) == REG
)
3476 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
3477 else if (GET_CODE (x
) == MEM
)
3478 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 4));
3484 if (GET_CODE (x
) == REG
)
3485 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
3486 else if (GET_CODE (x
) == MEM
)
3487 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 8));
3493 print_shift_count_operand (file
, x
);
3497 switch (GET_CODE (x
))
3500 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
3504 output_address (XEXP (x
, 0));
3511 output_addr_const (file
, x
);
3516 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xff);
3517 else if (code
== 'x')
3518 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffff);
3519 else if (code
== 'h')
3520 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xffff) ^ 0x8000) - 0x8000);
3521 else if (code
== 'i')
3522 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
3523 s390_extract_part (x
, HImode
, 0));
3524 else if (code
== 'j')
3525 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
3526 s390_extract_part (x
, HImode
, -1));
3528 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3532 if (GET_MODE (x
) != VOIDmode
)
3535 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xff);
3536 else if (code
== 'x')
3537 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xffff);
3538 else if (code
== 'h')
3539 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((CONST_DOUBLE_LOW (x
) & 0xffff) ^ 0x8000) - 0x8000);
3545 fatal_insn ("UNKNOWN in print_operand !?", x
);
3550 /* Target hook for assembling integer objects. We need to define it
3551 here to work a round a bug in some versions of GAS, which couldn't
3552 handle values smaller than INT_MIN when printed in decimal. */
3555 s390_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
3557 if (size
== 8 && aligned_p
3558 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < INT_MIN
)
3560 fprintf (asm_out_file
, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX
"\n",
3564 return default_assemble_integer (x
, size
, aligned_p
);
3567 /* Returns true if register REGNO is used for forming
3568 a memory address in expression X. */
3571 reg_used_in_mem_p (int regno
, rtx x
)
3573 enum rtx_code code
= GET_CODE (x
);
3579 if (refers_to_regno_p (regno
, regno
+1,
3583 else if (code
== SET
3584 && GET_CODE (SET_DEST (x
)) == PC
)
3586 if (refers_to_regno_p (regno
, regno
+1,
3591 fmt
= GET_RTX_FORMAT (code
);
3592 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3595 && reg_used_in_mem_p (regno
, XEXP (x
, i
)))
3598 else if (fmt
[i
] == 'E')
3599 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3600 if (reg_used_in_mem_p (regno
, XVECEXP (x
, i
, j
)))
3606 /* Returns true if expression DEP_RTX sets an address register
3607 used by instruction INSN to address memory. */
3610 addr_generation_dependency_p (rtx dep_rtx
, rtx insn
)
3614 if (GET_CODE (dep_rtx
) == INSN
)
3615 dep_rtx
= PATTERN (dep_rtx
);
3617 if (GET_CODE (dep_rtx
) == SET
)
3619 target
= SET_DEST (dep_rtx
);
3620 if (GET_CODE (target
) == STRICT_LOW_PART
)
3621 target
= XEXP (target
, 0);
3622 while (GET_CODE (target
) == SUBREG
)
3623 target
= SUBREG_REG (target
);
3625 if (GET_CODE (target
) == REG
)
3627 int regno
= REGNO (target
);
3629 if (s390_safe_attr_type (insn
) == TYPE_LA
)
3631 pat
= PATTERN (insn
);
3632 if (GET_CODE (pat
) == PARALLEL
)
3634 if (XVECLEN (pat
, 0) != 2)
3636 pat
= XVECEXP (pat
, 0, 0);
3638 if (GET_CODE (pat
) == SET
)
3639 return refers_to_regno_p (regno
, regno
+1, SET_SRC (pat
), 0);
3643 else if (get_attr_atype (insn
) == ATYPE_AGEN
)
3644 return reg_used_in_mem_p (regno
, PATTERN (insn
));
3650 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3653 s390_agen_dep_p (rtx dep_insn
, rtx insn
)
3655 rtx dep_rtx
= PATTERN (dep_insn
);
3658 if (GET_CODE (dep_rtx
) == SET
3659 && addr_generation_dependency_p (dep_rtx
, insn
))
3661 else if (GET_CODE (dep_rtx
) == PARALLEL
)
3663 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
3665 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
3672 /* Return the modified cost of the dependency of instruction INSN
3673 on instruction DEP_INSN through the link LINK. COST is the
3674 default cost of that dependency.
3676 Data dependencies are all handled without delay. However, if a
3677 register is modified and subsequently used as base or index
3678 register of a memory reference, at least 4 cycles need to pass
3679 between setting and using the register to avoid pipeline stalls.
3680 An exception is the LA instruction. An address generated by LA can
3681 be used by introducing only a one cycle stall on the pipeline. */
3684 s390_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3689 /* If the dependence is an anti-dependence, there is no cost. For an
3690 output dependence, there is sometimes a cost, but it doesn't seem
3691 worth handling those few cases. */
3693 if (REG_NOTE_KIND (link
) != 0)
3696 /* If we can't recognize the insns, we can't really do anything. */
3697 if (recog_memoized (insn
) < 0 || recog_memoized (dep_insn
) < 0)
3700 /* DFA based scheduling checks address dependency in md file. */
3701 if (s390_use_dfa_pipeline_interface ())
3703 /* Operand forward in case of lr, load and la. */
3704 if (s390_tune
== PROCESSOR_2084_Z990
3706 && (s390_safe_attr_type (dep_insn
) == TYPE_LA
3707 || s390_safe_attr_type (dep_insn
) == TYPE_LR
3708 || s390_safe_attr_type (dep_insn
) == TYPE_LOAD
))
3713 dep_rtx
= PATTERN (dep_insn
);
3715 if (GET_CODE (dep_rtx
) == SET
3716 && addr_generation_dependency_p (dep_rtx
, insn
))
3717 cost
+= (s390_safe_attr_type (dep_insn
) == TYPE_LA
) ? 1 : 4;
3718 else if (GET_CODE (dep_rtx
) == PARALLEL
)
3720 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
3722 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
3723 cost
+= (s390_safe_attr_type (dep_insn
) == TYPE_LA
) ? 1 : 4;
3729 /* A C statement (sans semicolon) to update the integer scheduling priority
3730 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3731 reduce the priority to execute INSN later. Do not define this macro if
3732 you do not need to adjust the scheduling priorities of insns.
3734 A STD instruction should be scheduled earlier,
3735 in order to use the bypass. */
3738 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
3740 if (! INSN_P (insn
))
3743 if (s390_tune
!= PROCESSOR_2084_Z990
)
3746 switch (s390_safe_attr_type (insn
))
3750 priority
= priority
<< 3;
3753 priority
= priority
<< 1;
3761 /* The number of instructions that can be issued per cycle. */
3764 s390_issue_rate (void)
3766 if (s390_tune
== PROCESSOR_2084_Z990
)
3771 /* If the following function returns TRUE, we will use the the DFA
3775 s390_use_dfa_pipeline_interface (void)
3777 if (s390_tune
== PROCESSOR_2064_Z900
3778 || s390_tune
== PROCESSOR_2084_Z990
)
3785 s390_first_cycle_multipass_dfa_lookahead (void)
3787 return s390_use_dfa_pipeline_interface () ? 4 : 0;
3791 /* Split all branches that exceed the maximum distance.
3792 Returns true if this created a new literal pool entry. */
3795 s390_split_branches (void)
3797 rtx temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
3798 int new_literal
= 0;
3799 rtx insn
, pat
, tmp
, target
;
3802 /* We need correct insn addresses. */
3804 shorten_branches (get_insns ());
3806 /* Find all branches that exceed 64KB, and split them. */
3808 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3810 if (GET_CODE (insn
) != JUMP_INSN
)
3813 pat
= PATTERN (insn
);
3814 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
3815 pat
= XVECEXP (pat
, 0, 0);
3816 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
3819 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
3821 label
= &SET_SRC (pat
);
3823 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
3825 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
3826 label
= &XEXP (SET_SRC (pat
), 1);
3827 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
3828 label
= &XEXP (SET_SRC (pat
), 2);
3835 if (get_attr_length (insn
) <= 4)
3838 /* We are going to use the return register as scratch register,
3839 make sure it will be saved/restored by the prologue/epilogue. */
3840 cfun
->machine
->save_return_addr_p
= 1;
3845 tmp
= force_const_mem (Pmode
, *label
);
3846 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, tmp
), insn
);
3847 INSN_ADDRESSES_NEW (tmp
, -1);
3854 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, *label
),
3855 UNSPEC_LTREL_OFFSET
);
3856 target
= gen_rtx_CONST (Pmode
, target
);
3857 target
= force_const_mem (Pmode
, target
);
3858 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, target
), insn
);
3859 INSN_ADDRESSES_NEW (tmp
, -1);
3861 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (target
, 0)),
3863 target
= gen_rtx_PLUS (Pmode
, temp_reg
, target
);
3866 if (!validate_change (insn
, label
, target
, 0))
3874 /* Find a literal pool symbol referenced in RTX X, and store
3875 it at REF. Will abort if X contains references to more than
3876 one such pool symbol; multiple references to the same symbol
3877 are allowed, however.
3879 The rtx pointed to by REF must be initialized to NULL_RTX
3880 by the caller before calling this routine. */
3883 find_constant_pool_ref (rtx x
, rtx
*ref
)
3888 /* Ignore LTREL_BASE references. */
3889 if (GET_CODE (x
) == UNSPEC
3890 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
3892 /* Likewise POOL_ENTRY insns. */
3893 if (GET_CODE (x
) == UNSPEC_VOLATILE
3894 && XINT (x
, 1) == UNSPECV_POOL_ENTRY
)
3897 if (GET_CODE (x
) == SYMBOL_REF
3898 && CONSTANT_POOL_ADDRESS_P (x
))
3900 if (*ref
== NULL_RTX
)
3906 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
3907 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
3911 find_constant_pool_ref (XEXP (x
, i
), ref
);
3913 else if (fmt
[i
] == 'E')
3915 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3916 find_constant_pool_ref (XVECEXP (x
, i
, j
), ref
);
3921 /* Replace every reference to the literal pool symbol REF
3922 in X by the address ADDR. Fix up MEMs as required. */
3925 replace_constant_pool_ref (rtx
*x
, rtx ref
, rtx addr
)
3933 /* Literal pool references can only occur inside a MEM ... */
3934 if (GET_CODE (*x
) == MEM
)
3936 rtx memref
= XEXP (*x
, 0);
3940 *x
= replace_equiv_address (*x
, addr
);
3944 if (GET_CODE (memref
) == CONST
3945 && GET_CODE (XEXP (memref
, 0)) == PLUS
3946 && GET_CODE (XEXP (XEXP (memref
, 0), 1)) == CONST_INT
3947 && XEXP (XEXP (memref
, 0), 0) == ref
)
3949 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (memref
, 0), 1));
3950 *x
= replace_equiv_address (*x
, plus_constant (addr
, off
));
3955 /* ... or a load-address type pattern. */
3956 if (GET_CODE (*x
) == SET
)
3958 rtx addrref
= SET_SRC (*x
);
3962 SET_SRC (*x
) = addr
;
3966 if (GET_CODE (addrref
) == CONST
3967 && GET_CODE (XEXP (addrref
, 0)) == PLUS
3968 && GET_CODE (XEXP (XEXP (addrref
, 0), 1)) == CONST_INT
3969 && XEXP (XEXP (addrref
, 0), 0) == ref
)
3971 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (addrref
, 0), 1));
3972 SET_SRC (*x
) = plus_constant (addr
, off
);
3977 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
3978 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
3982 replace_constant_pool_ref (&XEXP (*x
, i
), ref
, addr
);
3984 else if (fmt
[i
] == 'E')
3986 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
3987 replace_constant_pool_ref (&XVECEXP (*x
, i
, j
), ref
, addr
);
3992 /* Check whether X contains an UNSPEC_LTREL_BASE.
3993 Return its constant pool symbol if found, NULL_RTX otherwise. */
3996 find_ltrel_base (rtx x
)
4001 if (GET_CODE (x
) == UNSPEC
4002 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
4003 return XVECEXP (x
, 0, 0);
4005 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4006 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4010 rtx fnd
= find_ltrel_base (XEXP (x
, i
));
4014 else if (fmt
[i
] == 'E')
4016 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4018 rtx fnd
= find_ltrel_base (XVECEXP (x
, i
, j
));
4028 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with BASE. */
4031 replace_ltrel_base (rtx
*x
, rtx base
)
4036 if (GET_CODE (*x
) == UNSPEC
4037 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
4043 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4044 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4048 replace_ltrel_base (&XEXP (*x
, i
), base
);
4050 else if (fmt
[i
] == 'E')
4052 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4053 replace_ltrel_base (&XVECEXP (*x
, i
, j
), base
);
4059 /* We keep a list of constants which we have to add to internal
4060 constant tables in the middle of large functions. */
4062 #define NR_C_MODES 7
4063 enum machine_mode constant_modes
[NR_C_MODES
] =
4074 struct constant
*next
;
4079 struct constant_pool
4081 struct constant_pool
*next
;
4086 struct constant
*constants
[NR_C_MODES
];
4091 static struct constant_pool
* s390_mainpool_start (void);
4092 static void s390_mainpool_finish (struct constant_pool
*, rtx base_reg
);
4093 static void s390_mainpool_cancel (struct constant_pool
*);
4095 static struct constant_pool
* s390_chunkify_start (rtx base_reg
);
4096 static void s390_chunkify_finish (struct constant_pool
*, rtx base_reg
);
4097 static void s390_chunkify_cancel (struct constant_pool
*);
4099 static struct constant_pool
*s390_start_pool (struct constant_pool
**, rtx
);
4100 static void s390_end_pool (struct constant_pool
*, rtx
);
4101 static void s390_add_pool_insn (struct constant_pool
*, rtx
);
4102 static struct constant_pool
*s390_find_pool (struct constant_pool
*, rtx
);
4103 static void s390_add_constant (struct constant_pool
*, rtx
, enum machine_mode
);
4104 static rtx
s390_find_constant (struct constant_pool
*, rtx
, enum machine_mode
);
4105 static rtx
s390_dump_pool (struct constant_pool
*, bool);
4106 static struct constant_pool
*s390_alloc_pool (void);
4107 static void s390_free_pool (struct constant_pool
*);
4109 /* Create new constant pool covering instructions starting at INSN
4110 and chain it to the end of POOL_LIST. */
4112 static struct constant_pool
*
4113 s390_start_pool (struct constant_pool
**pool_list
, rtx insn
)
4115 struct constant_pool
*pool
, **prev
;
4117 pool
= s390_alloc_pool ();
4118 pool
->first_insn
= insn
;
4120 for (prev
= pool_list
; *prev
; prev
= &(*prev
)->next
)
4127 /* End range of instructions covered by POOL at INSN and emit
4128 placeholder insn representing the pool. */
4131 s390_end_pool (struct constant_pool
*pool
, rtx insn
)
4133 rtx pool_size
= GEN_INT (pool
->size
+ 8 /* alignment slop */);
4136 insn
= get_last_insn ();
4138 pool
->pool_insn
= emit_insn_after (gen_pool (pool_size
), insn
);
4139 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4142 /* Add INSN to the list of insns covered by POOL. */
4145 s390_add_pool_insn (struct constant_pool
*pool
, rtx insn
)
4147 bitmap_set_bit (pool
->insns
, INSN_UID (insn
));
4150 /* Return pool out of POOL_LIST that covers INSN. */
4152 static struct constant_pool
*
4153 s390_find_pool (struct constant_pool
*pool_list
, rtx insn
)
4155 struct constant_pool
*pool
;
4157 for (pool
= pool_list
; pool
; pool
= pool
->next
)
4158 if (bitmap_bit_p (pool
->insns
, INSN_UID (insn
)))
4164 /* Add constant VAL of mode MODE to the constant pool POOL. */
4167 s390_add_constant (struct constant_pool
*pool
, rtx val
, enum machine_mode mode
)
4172 for (i
= 0; i
< NR_C_MODES
; i
++)
4173 if (constant_modes
[i
] == mode
)
4175 if (i
== NR_C_MODES
)
4178 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
4179 if (rtx_equal_p (val
, c
->value
))
4184 c
= (struct constant
*) xmalloc (sizeof *c
);
4186 c
->label
= gen_label_rtx ();
4187 c
->next
= pool
->constants
[i
];
4188 pool
->constants
[i
] = c
;
4189 pool
->size
+= GET_MODE_SIZE (mode
);
4193 /* Find constant VAL of mode MODE in the constant pool POOL.
4194 Return an RTX describing the distance from the start of
4195 the pool to the location of the new constant. */
4198 s390_find_constant (struct constant_pool
*pool
, rtx val
,
4199 enum machine_mode mode
)
4205 for (i
= 0; i
< NR_C_MODES
; i
++)
4206 if (constant_modes
[i
] == mode
)
4208 if (i
== NR_C_MODES
)
4211 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
4212 if (rtx_equal_p (val
, c
->value
))
4218 offset
= gen_rtx_MINUS (Pmode
, gen_rtx_LABEL_REF (Pmode
, c
->label
),
4219 gen_rtx_LABEL_REF (Pmode
, pool
->label
));
4220 offset
= gen_rtx_CONST (Pmode
, offset
);
4224 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
4225 do not emit the pool base label. */
4228 s390_dump_pool (struct constant_pool
*pool
, bool remote_label
)
4234 /* Pool start insn switches to proper section
4235 and guarantees necessary alignment. */
4236 if (TARGET_CPU_ZARCH
)
4237 insn
= emit_insn_after (gen_pool_start_64 (), pool
->pool_insn
);
4239 insn
= emit_insn_after (gen_pool_start_31 (), pool
->pool_insn
);
4240 INSN_ADDRESSES_NEW (insn
, -1);
4244 insn
= emit_label_after (pool
->label
, insn
);
4245 INSN_ADDRESSES_NEW (insn
, -1);
4248 /* Dump constants in descending alignment requirement order,
4249 ensuring proper alignment for every constant. */
4250 for (i
= 0; i
< NR_C_MODES
; i
++)
4251 for (c
= pool
->constants
[i
]; c
; c
= c
->next
)
4253 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
4254 rtx value
= c
->value
;
4255 if (GET_CODE (value
) == CONST
4256 && GET_CODE (XEXP (value
, 0)) == UNSPEC
4257 && XINT (XEXP (value
, 0), 1) == UNSPEC_LTREL_OFFSET
4258 && XVECLEN (XEXP (value
, 0), 0) == 1)
4260 value
= gen_rtx_MINUS (Pmode
, XVECEXP (XEXP (value
, 0), 0, 0),
4261 gen_rtx_LABEL_REF (VOIDmode
, pool
->label
));
4262 value
= gen_rtx_CONST (VOIDmode
, value
);
4265 insn
= emit_label_after (c
->label
, insn
);
4266 INSN_ADDRESSES_NEW (insn
, -1);
4268 value
= gen_rtx_UNSPEC_VOLATILE (constant_modes
[i
],
4269 gen_rtvec (1, value
),
4270 UNSPECV_POOL_ENTRY
);
4271 insn
= emit_insn_after (value
, insn
);
4272 INSN_ADDRESSES_NEW (insn
, -1);
4275 /* Pool end insn switches back to previous section
4276 and guarantees necessary alignment. */
4277 if (TARGET_CPU_ZARCH
)
4278 insn
= emit_insn_after (gen_pool_end_64 (), insn
);
4280 insn
= emit_insn_after (gen_pool_end_31 (), insn
);
4281 INSN_ADDRESSES_NEW (insn
, -1);
4283 insn
= emit_barrier_after (insn
);
4284 INSN_ADDRESSES_NEW (insn
, -1);
4286 /* Remove placeholder insn. */
4287 remove_insn (pool
->pool_insn
);
4292 /* Allocate new constant_pool structure. */
4294 static struct constant_pool
*
4295 s390_alloc_pool (void)
4297 struct constant_pool
*pool
;
4300 pool
= (struct constant_pool
*) xmalloc (sizeof *pool
);
4302 for (i
= 0; i
< NR_C_MODES
; i
++)
4303 pool
->constants
[i
] = NULL
;
4305 pool
->label
= gen_label_rtx ();
4306 pool
->first_insn
= NULL_RTX
;
4307 pool
->pool_insn
= NULL_RTX
;
4308 pool
->insns
= BITMAP_XMALLOC ();
4314 /* Free all memory used by POOL. */
4317 s390_free_pool (struct constant_pool
*pool
)
4321 for (i
= 0; i
< NR_C_MODES
; i
++)
4323 struct constant
*c
= pool
->constants
[i
];
4326 struct constant
*next
= c
->next
;
4332 BITMAP_XFREE (pool
->insns
);
4337 /* Collect main literal pool. Return NULL on overflow. */
4339 static struct constant_pool
*
4340 s390_mainpool_start (void)
4342 struct constant_pool
*pool
;
4345 pool
= s390_alloc_pool ();
4347 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4349 if (GET_CODE (insn
) == INSN
4350 && GET_CODE (PATTERN (insn
)) == UNSPEC_VOLATILE
4351 && XINT (PATTERN (insn
), 1) == UNSPECV_MAIN_POOL
)
4353 if (pool
->pool_insn
)
4355 pool
->pool_insn
= insn
;
4358 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4360 rtx pool_ref
= NULL_RTX
;
4361 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4364 rtx constant
= get_pool_constant (pool_ref
);
4365 enum machine_mode mode
= get_pool_mode (pool_ref
);
4366 s390_add_constant (pool
, constant
, mode
);
4371 if (!pool
->pool_insn
)
4374 if (pool
->size
>= 4096)
4376 /* We're going to chunkify the pool, so remove the main
4377 pool placeholder insn. */
4378 remove_insn (pool
->pool_insn
);
4380 s390_free_pool (pool
);
4387 /* POOL holds the main literal pool as collected by s390_mainpool_start.
4388 Modify the current function to output the pool constants as well as
4389 the pool register setup instruction. BASE_REG is the register to
4390 be used as pool base register. */
4393 s390_mainpool_finish (struct constant_pool
*pool
, rtx base_reg
)
4397 /* If the pool is empty, we're done. */
4398 if (pool
->size
== 0)
4400 remove_insn (pool
->pool_insn
);
4401 s390_free_pool (pool
);
4405 /* We need correct insn addresses. */
4406 shorten_branches (get_insns ());
4408 /* On zSeries, we use a LARL to load the pool register. The pool is
4409 located in the .rodata section, so we emit it after the function. */
4410 if (TARGET_CPU_ZARCH
)
4412 insn
= gen_main_base_64 (base_reg
, pool
->label
);
4413 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4414 INSN_ADDRESSES_NEW (insn
, -1);
4415 remove_insn (pool
->pool_insn
);
4417 insn
= get_last_insn ();
4418 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4419 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4421 s390_dump_pool (pool
, 0);
4424 /* On S/390, if the total size of the function's code plus literal pool
4425 does not exceed 4096 bytes, we use BASR to set up a function base
4426 pointer, and emit the literal pool at the end of the function. */
4427 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
4428 + pool
->size
+ 8 /* alignment slop */ < 4096)
4430 insn
= gen_main_base_31_small (base_reg
, pool
->label
);
4431 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4432 INSN_ADDRESSES_NEW (insn
, -1);
4433 remove_insn (pool
->pool_insn
);
4435 insn
= emit_label_after (pool
->label
, insn
);
4436 INSN_ADDRESSES_NEW (insn
, -1);
4438 insn
= get_last_insn ();
4439 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4440 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4442 s390_dump_pool (pool
, 1);
4445 /* Otherwise, we emit an inline literal pool and use BASR to branch
4446 over it, setting up the pool register at the same time. */
4449 rtx pool_end
= gen_label_rtx ();
4451 insn
= gen_main_base_31_large (base_reg
, pool
->label
, pool_end
);
4452 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4453 INSN_ADDRESSES_NEW (insn
, -1);
4454 remove_insn (pool
->pool_insn
);
4456 insn
= emit_label_after (pool
->label
, insn
);
4457 INSN_ADDRESSES_NEW (insn
, -1);
4459 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4460 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4462 insn
= emit_label_after (pool_end
, pool
->pool_insn
);
4463 INSN_ADDRESSES_NEW (insn
, -1);
4465 s390_dump_pool (pool
, 1);
4469 /* Replace all literal pool references. */
4471 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4474 replace_ltrel_base (&PATTERN (insn
), base_reg
);
4476 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4478 rtx addr
, pool_ref
= NULL_RTX
;
4479 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4482 addr
= s390_find_constant (pool
, get_pool_constant (pool_ref
),
4483 get_pool_mode (pool_ref
));
4484 addr
= gen_rtx_PLUS (Pmode
, base_reg
, addr
);
4485 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
4486 INSN_CODE (insn
) = -1;
4492 /* Free the pool. */
4493 s390_free_pool (pool
);
4496 /* POOL holds the main literal pool as collected by s390_mainpool_start.
4497 We have decided we cannot use this pool, so revert all changes
4498 to the current function that were done by s390_mainpool_start. */
4500 s390_mainpool_cancel (struct constant_pool
*pool
)
4502 /* We didn't actually change the instruction stream, so simply
4503 free the pool memory. */
4504 s390_free_pool (pool
);
4508 /* Chunkify the literal pool. BASE_REG is to be used as pool
4511 #define S390_POOL_CHUNK_MIN 0xc00
4512 #define S390_POOL_CHUNK_MAX 0xe00
4514 static struct constant_pool
*
4515 s390_chunkify_start (rtx base_reg
)
4517 struct constant_pool
*curr_pool
= NULL
, *pool_list
= NULL
;
4520 rtx pending_ltrel
= NULL_RTX
;
4523 rtx (*gen_reload_base
) (rtx
, rtx
) =
4524 TARGET_CPU_ZARCH
? gen_reload_base_64
: gen_reload_base_31
;
4527 /* We need correct insn addresses. */
4529 shorten_branches (get_insns ());
4531 /* Scan all insns and move literals to pool chunks. */
4533 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4535 /* Check for pending LTREL_BASE. */
4538 rtx ltrel_base
= find_ltrel_base (PATTERN (insn
));
4541 if (ltrel_base
== pending_ltrel
)
4542 pending_ltrel
= NULL_RTX
;
4548 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4550 rtx pool_ref
= NULL_RTX
;
4551 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4554 rtx constant
= get_pool_constant (pool_ref
);
4555 enum machine_mode mode
= get_pool_mode (pool_ref
);
4558 curr_pool
= s390_start_pool (&pool_list
, insn
);
4560 s390_add_constant (curr_pool
, constant
, mode
);
4561 s390_add_pool_insn (curr_pool
, insn
);
4563 /* Don't split the pool chunk between a LTREL_OFFSET load
4564 and the corresponding LTREL_BASE. */
4565 if (GET_CODE (constant
) == CONST
4566 && GET_CODE (XEXP (constant
, 0)) == UNSPEC
4567 && XINT (XEXP (constant
, 0), 1) == UNSPEC_LTREL_OFFSET
)
4571 pending_ltrel
= pool_ref
;
4576 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
4579 s390_add_pool_insn (curr_pool
, insn
);
4580 /* An LTREL_BASE must follow within the same basic block. */
4586 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn
)
4587 || INSN_ADDRESSES (INSN_UID (insn
)) == -1)
4590 if (TARGET_CPU_ZARCH
)
4592 if (curr_pool
->size
< S390_POOL_CHUNK_MAX
)
4595 s390_end_pool (curr_pool
, NULL_RTX
);
4600 int chunk_size
= INSN_ADDRESSES (INSN_UID (insn
))
4601 - INSN_ADDRESSES (INSN_UID (curr_pool
->first_insn
))
4604 /* We will later have to insert base register reload insns.
4605 Those will have an effect on code size, which we need to
4606 consider here. This calculation makes rather pessimistic
4607 worst-case assumptions. */
4608 if (GET_CODE (insn
) == CODE_LABEL
)
4611 if (chunk_size
< S390_POOL_CHUNK_MIN
4612 && curr_pool
->size
< S390_POOL_CHUNK_MIN
)
4615 /* Pool chunks can only be inserted after BARRIERs ... */
4616 if (GET_CODE (insn
) == BARRIER
)
4618 s390_end_pool (curr_pool
, insn
);
4623 /* ... so if we don't find one in time, create one. */
4624 else if ((chunk_size
> S390_POOL_CHUNK_MAX
4625 || curr_pool
->size
> S390_POOL_CHUNK_MAX
))
4627 rtx label
, jump
, barrier
;
4629 /* We can insert the barrier only after a 'real' insn. */
4630 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
4632 if (get_attr_length (insn
) == 0)
4635 /* Don't separate LTREL_BASE from the corresponding
4636 LTREL_OFFSET load. */
4640 label
= gen_label_rtx ();
4641 jump
= emit_jump_insn_after (gen_jump (label
), insn
);
4642 barrier
= emit_barrier_after (jump
);
4643 insn
= emit_label_after (label
, barrier
);
4644 JUMP_LABEL (jump
) = label
;
4645 LABEL_NUSES (label
) = 1;
4647 INSN_ADDRESSES_NEW (jump
, -1);
4648 INSN_ADDRESSES_NEW (barrier
, -1);
4649 INSN_ADDRESSES_NEW (insn
, -1);
4651 s390_end_pool (curr_pool
, barrier
);
4659 s390_end_pool (curr_pool
, NULL_RTX
);
4664 /* Find all labels that are branched into
4665 from an insn belonging to a different chunk. */
4667 far_labels
= BITMAP_XMALLOC ();
4669 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4671 /* Labels marked with LABEL_PRESERVE_P can be target
4672 of non-local jumps, so we have to mark them.
4673 The same holds for named labels.
4675 Don't do that, however, if it is the label before
4678 if (GET_CODE (insn
) == CODE_LABEL
4679 && (LABEL_PRESERVE_P (insn
) || LABEL_NAME (insn
)))
4681 rtx vec_insn
= next_real_insn (insn
);
4682 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
4683 PATTERN (vec_insn
) : NULL_RTX
;
4685 || !(GET_CODE (vec_pat
) == ADDR_VEC
4686 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
4687 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (insn
));
4690 /* If we have a direct jump (conditional or unconditional)
4691 or a casesi jump, check all potential targets. */
4692 else if (GET_CODE (insn
) == JUMP_INSN
)
4694 rtx pat
= PATTERN (insn
);
4695 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
4696 pat
= XVECEXP (pat
, 0, 0);
4698 if (GET_CODE (pat
) == SET
)
4700 rtx label
= JUMP_LABEL (insn
);
4703 if (s390_find_pool (pool_list
, label
)
4704 != s390_find_pool (pool_list
, insn
))
4705 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
4708 else if (GET_CODE (pat
) == PARALLEL
4709 && XVECLEN (pat
, 0) == 2
4710 && GET_CODE (XVECEXP (pat
, 0, 0)) == SET
4711 && GET_CODE (XVECEXP (pat
, 0, 1)) == USE
4712 && GET_CODE (XEXP (XVECEXP (pat
, 0, 1), 0)) == LABEL_REF
)
4714 /* Find the jump table used by this casesi jump. */
4715 rtx vec_label
= XEXP (XEXP (XVECEXP (pat
, 0, 1), 0), 0);
4716 rtx vec_insn
= next_real_insn (vec_label
);
4717 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
4718 PATTERN (vec_insn
) : NULL_RTX
;
4720 && (GET_CODE (vec_pat
) == ADDR_VEC
4721 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
4723 int i
, diff_p
= GET_CODE (vec_pat
) == ADDR_DIFF_VEC
;
4725 for (i
= 0; i
< XVECLEN (vec_pat
, diff_p
); i
++)
4727 rtx label
= XEXP (XVECEXP (vec_pat
, diff_p
, i
), 0);
4729 if (s390_find_pool (pool_list
, label
)
4730 != s390_find_pool (pool_list
, insn
))
4731 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
4738 /* Insert base register reload insns before every pool. */
4740 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
4742 rtx new_insn
= gen_reload_base (base_reg
, curr_pool
->label
);
4743 rtx insn
= curr_pool
->first_insn
;
4744 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
4747 /* Insert base register reload insns at every far label. */
4749 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4750 if (GET_CODE (insn
) == CODE_LABEL
4751 && bitmap_bit_p (far_labels
, CODE_LABEL_NUMBER (insn
)))
4753 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
4756 rtx new_insn
= gen_reload_base (base_reg
, pool
->label
);
4757 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
4762 BITMAP_XFREE (far_labels
);
4765 /* Recompute insn addresses. */
4767 init_insn_lengths ();
4768 shorten_branches (get_insns ());
4773 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4774 After we have decided to use this list, finish implementing
4775 all changes to the current function as required. BASE_REG is
4776 to be used as pool base register. */
4779 s390_chunkify_finish (struct constant_pool
*pool_list
, rtx base_reg
)
4781 struct constant_pool
*curr_pool
= NULL
;
4785 /* Replace all literal pool references. */
4787 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4790 replace_ltrel_base (&PATTERN (insn
), base_reg
);
4792 curr_pool
= s390_find_pool (pool_list
, insn
);
4796 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4798 rtx addr
, pool_ref
= NULL_RTX
;
4799 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4802 addr
= s390_find_constant (curr_pool
, get_pool_constant (pool_ref
),
4803 get_pool_mode (pool_ref
));
4804 addr
= gen_rtx_PLUS (Pmode
, base_reg
, addr
);
4805 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
4806 INSN_CODE (insn
) = -1;
4811 /* Dump out all literal pools. */
4813 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
4814 s390_dump_pool (curr_pool
, 0);
4816 /* Free pool list. */
4820 struct constant_pool
*next
= pool_list
->next
;
4821 s390_free_pool (pool_list
);
4826 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4827 We have decided we cannot use this list, so revert all changes
4828 to the current function that were done by s390_chunkify_start. */
4831 s390_chunkify_cancel (struct constant_pool
*pool_list
)
4833 struct constant_pool
*curr_pool
= NULL
;
4836 /* Remove all pool placeholder insns. */
4838 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
4840 /* Did we insert an extra barrier? Remove it. */
4841 rtx barrier
= PREV_INSN (curr_pool
->pool_insn
);
4842 rtx jump
= barrier
? PREV_INSN (barrier
) : NULL_RTX
;
4843 rtx label
= NEXT_INSN (curr_pool
->pool_insn
);
4845 if (jump
&& GET_CODE (jump
) == JUMP_INSN
4846 && barrier
&& GET_CODE (barrier
) == BARRIER
4847 && label
&& GET_CODE (label
) == CODE_LABEL
4848 && GET_CODE (PATTERN (jump
)) == SET
4849 && SET_DEST (PATTERN (jump
)) == pc_rtx
4850 && GET_CODE (SET_SRC (PATTERN (jump
))) == LABEL_REF
4851 && XEXP (SET_SRC (PATTERN (jump
)), 0) == label
)
4854 remove_insn (barrier
);
4855 remove_insn (label
);
4858 remove_insn (curr_pool
->pool_insn
);
4861 /* Remove all base register reload insns. */
4863 for (insn
= get_insns (); insn
; )
4865 rtx next_insn
= NEXT_INSN (insn
);
4867 if (GET_CODE (insn
) == INSN
4868 && GET_CODE (PATTERN (insn
)) == SET
4869 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC
4870 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPEC_RELOAD_BASE
)
4876 /* Free pool list. */
4880 struct constant_pool
*next
= pool_list
->next
;
4881 s390_free_pool (pool_list
);
4887 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
4890 s390_output_pool_entry (rtx exp
, enum machine_mode mode
, unsigned int align
)
4894 switch (GET_MODE_CLASS (mode
))
4897 if (GET_CODE (exp
) != CONST_DOUBLE
)
4900 REAL_VALUE_FROM_CONST_DOUBLE (r
, exp
);
4901 assemble_real (r
, mode
, align
);
4905 assemble_integer (exp
, GET_MODE_SIZE (mode
), align
, 1);
4914 /* Rework the prolog/epilog to avoid saving/restoring
4915 registers unnecessarily. BASE_USED specifies whether
4916 the literal pool base register needs to be saved. */
4919 s390_optimize_prolog (bool base_used
)
4921 int save_first
, save_last
, restore_first
, restore_last
;
4923 rtx insn
, new_insn
, next_insn
;
4925 /* Recompute regs_ever_live data for special registers. */
4926 regs_ever_live
[BASE_REGISTER
] = base_used
;
4927 regs_ever_live
[RETURN_REGNUM
] = cfun
->machine
->save_return_addr_p
;
4928 regs_ever_live
[STACK_POINTER_REGNUM
] = cfun
->machine
->frame_size
> 0;
4931 /* Find first and last gpr to be saved. */
4933 for (i
= 6; i
< 16; i
++)
4934 if (regs_ever_live
[i
])
4936 || i
== STACK_POINTER_REGNUM
4937 || i
== RETURN_REGNUM
4938 || i
== BASE_REGISTER
4939 || (flag_pic
&& i
== (int)PIC_OFFSET_TABLE_REGNUM
))
4942 for (j
= 15; j
> i
; j
--)
4943 if (regs_ever_live
[j
])
4945 || j
== STACK_POINTER_REGNUM
4946 || j
== RETURN_REGNUM
4947 || j
== BASE_REGISTER
4948 || (flag_pic
&& j
== (int)PIC_OFFSET_TABLE_REGNUM
))
4953 /* Nothing to save/restore. */
4954 save_first
= restore_first
= -1;
4955 save_last
= restore_last
= -1;
4959 /* Save/restore from i to j. */
4960 save_first
= restore_first
= i
;
4961 save_last
= restore_last
= j
;
4964 /* Varargs functions need to save gprs 2 to 6. */
4965 if (current_function_stdarg
)
4973 /* If all special registers are in fact used, there's nothing we
4974 can do, so no point in walking the insn list. */
4975 if (i
<= BASE_REGISTER
&& j
>= BASE_REGISTER
4976 && (TARGET_CPU_ZARCH
|| (i
<= RETURN_REGNUM
&& j
>= RETURN_REGNUM
)))
4980 /* Search for prolog/epilog insns and replace them. */
4982 for (insn
= get_insns (); insn
; insn
= next_insn
)
4984 int first
, last
, off
;
4985 rtx set
, base
, offset
;
4987 next_insn
= NEXT_INSN (insn
);
4989 if (GET_CODE (insn
) != INSN
)
4992 if (GET_CODE (PATTERN (insn
)) == PARALLEL
4993 && store_multiple_operation (PATTERN (insn
), VOIDmode
))
4995 set
= XVECEXP (PATTERN (insn
), 0, 0);
4996 first
= REGNO (SET_SRC (set
));
4997 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
4998 offset
= const0_rtx
;
4999 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
5000 off
= INTVAL (offset
) - first
* UNITS_PER_WORD
;
5002 if (GET_CODE (base
) != REG
|| off
< 0)
5004 if (first
> BASE_REGISTER
|| last
< BASE_REGISTER
)
5007 if (save_first
!= -1)
5009 new_insn
= save_gprs (base
, off
, save_first
, save_last
);
5010 new_insn
= emit_insn_before (new_insn
, insn
);
5011 INSN_ADDRESSES_NEW (new_insn
, -1);
5018 if (GET_CODE (PATTERN (insn
)) == SET
5019 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
5020 && REGNO (SET_SRC (PATTERN (insn
))) == BASE_REGISTER
5021 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
5023 set
= PATTERN (insn
);
5024 offset
= const0_rtx
;
5025 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
5026 off
= INTVAL (offset
) - BASE_REGISTER
* UNITS_PER_WORD
;
5028 if (GET_CODE (base
) != REG
|| off
< 0)
5031 if (save_first
!= -1)
5033 new_insn
= save_gprs (base
, off
, save_first
, save_last
);
5034 new_insn
= emit_insn_before (new_insn
, insn
);
5035 INSN_ADDRESSES_NEW (new_insn
, -1);
5042 if (GET_CODE (PATTERN (insn
)) == PARALLEL
5043 && load_multiple_operation (PATTERN (insn
), VOIDmode
))
5045 set
= XVECEXP (PATTERN (insn
), 0, 0);
5046 first
= REGNO (SET_DEST (set
));
5047 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
5048 offset
= const0_rtx
;
5049 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
5050 off
= INTVAL (offset
) - first
* UNITS_PER_WORD
;
5052 if (GET_CODE (base
) != REG
|| off
< 0)
5054 if (first
> BASE_REGISTER
|| last
< BASE_REGISTER
)
5057 if (restore_first
!= -1)
5059 new_insn
= restore_gprs (base
, off
, restore_first
, restore_last
);
5060 new_insn
= emit_insn_before (new_insn
, insn
);
5061 INSN_ADDRESSES_NEW (new_insn
, -1);
5068 if (GET_CODE (PATTERN (insn
)) == SET
5069 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
5070 && REGNO (SET_DEST (PATTERN (insn
))) == BASE_REGISTER
5071 && GET_CODE (SET_SRC (PATTERN (insn
))) == MEM
)
5073 set
= PATTERN (insn
);
5074 offset
= const0_rtx
;
5075 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
5076 off
= INTVAL (offset
) - BASE_REGISTER
* UNITS_PER_WORD
;
5078 if (GET_CODE (base
) != REG
|| off
< 0)
5081 if (restore_first
!= -1)
5083 new_insn
= restore_gprs (base
, off
, restore_first
, restore_last
);
5084 new_insn
= emit_insn_before (new_insn
, insn
);
5085 INSN_ADDRESSES_NEW (new_insn
, -1);
5094 /* Perform machine-dependent processing. */
5099 rtx base_reg
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
5100 bool base_used
= false;
5101 bool pool_overflow
= false;
5103 /* Make sure all splits have been performed; splits after
5104 machine_dependent_reorg might confuse insn length counts. */
5105 split_all_insns_noflow ();
5108 /* In small leaf functions, try to use an unused call-clobbered
5109 register as base register to avoid save/restore overhead. */
5110 if (current_function_is_leaf
&& !regs_ever_live
[5])
5111 base_reg
= gen_rtx_REG (Pmode
, 5);
5114 /* Install the main literal pool and the associated base
5115 register load insns.
5117 In addition, there are two problematic situations we need
5120 - the literal pool might be > 4096 bytes in size, so that
5121 some of its elements cannot be directly accessed
5123 - a branch target might be > 64K away from the branch, so that
5124 it is not possible to use a PC-relative instruction.
5126 To fix those, we split the single literal pool into multiple
5127 pool chunks, reloading the pool base register at various
5128 points throughout the function to ensure it always points to
5129 the pool chunk the following code expects, and / or replace
5130 PC-relative branches by absolute branches.
5132 However, the two problems are interdependent: splitting the
5133 literal pool can move a branch further away from its target,
5134 causing the 64K limit to overflow, and on the other hand,
5135 replacing a PC-relative branch by an absolute branch means
5136 we need to put the branch target address into the literal
5137 pool, possibly causing it to overflow.
5139 So, we loop trying to fix up both problems until we manage
5140 to satisfy both conditions at the same time. Note that the
5141 loop is guaranteed to terminate as every pass of the loop
5142 strictly decreases the total number of PC-relative branches
5143 in the function. (This is not completely true as there
5144 might be branch-over-pool insns introduced by chunkify_start.
5145 Those never need to be split however.) */
5149 struct constant_pool
*pool
= NULL
;
5151 /* Collect the literal pool. */
5154 pool
= s390_mainpool_start ();
5156 pool_overflow
= true;
5159 /* If literal pool overflowed, start to chunkify it. */
5161 pool
= s390_chunkify_start (base_reg
);
5163 /* Split out-of-range branches. If this has created new
5164 literal pool entries, cancel current chunk list and
5165 recompute it. zSeries machines have large branch
5166 instructions, so we never need to split a branch. */
5167 if (!TARGET_CPU_ZARCH
&& s390_split_branches ())
5170 s390_chunkify_cancel (pool
);
5172 s390_mainpool_cancel (pool
);
5177 /* If we made it up to here, both conditions are satisfied.
5178 Finish up literal pool related changes. */
5179 if ((pool_overflow
|| pool
->size
> 0)
5180 && REGNO (base_reg
) == BASE_REGISTER
)
5184 s390_chunkify_finish (pool
, base_reg
);
5186 s390_mainpool_finish (pool
, base_reg
);
5191 s390_optimize_prolog (base_used
);
5195 /* Return an RTL expression representing the value of the return address
5196 for the frame COUNT steps up from the current frame. FRAME is the
5197 frame pointer of that frame. */
5200 s390_return_addr_rtx (int count
, rtx frame
)
5204 /* Without backchain, we fail for all but the current frame. */
5206 if (!TARGET_BACKCHAIN
&& count
> 0)
5209 /* For the current frame, we need to make sure the initial
5210 value of RETURN_REGNUM is actually saved. */
5213 cfun
->machine
->save_return_addr_p
= true;
5215 /* To retrieve the return address we read the stack slot where the
5216 corresponding RETURN_REGNUM value was saved. */
5218 addr
= plus_constant (frame
, RETURN_REGNUM
* UNITS_PER_WORD
);
5219 addr
= memory_address (Pmode
, addr
);
5220 return gen_rtx_MEM (Pmode
, addr
);
5223 /* Find first call clobbered register unsused in a function.
5224 This could be used as base register in a leaf function
5225 or for holding the return address before epilogue. */
5228 find_unused_clobbered_reg (void)
5231 for (i
= 0; i
< 6; i
++)
5232 if (!regs_ever_live
[i
])
5237 /* Fill FRAME with info about frame of current function. */
5240 s390_frame_info (void)
5243 HOST_WIDE_INT fsize
= get_frame_size ();
5245 if (!TARGET_64BIT
&& fsize
> 0x7fff0000)
5246 fatal_error ("Total size of local variables exceeds architecture limit.");
5248 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5249 cfun
->machine
->save_fprs_p
= 0;
5251 for (i
= 24; i
< 32; i
++)
5252 if (regs_ever_live
[i
] && !global_regs
[i
])
5254 cfun
->machine
->save_fprs_p
= 1;
5258 cfun
->machine
->frame_size
= fsize
+ cfun
->machine
->save_fprs_p
* 64;
5260 /* Does function need to setup frame and save area. */
5262 if (! current_function_is_leaf
5263 || cfun
->machine
->frame_size
> 0
5264 || current_function_calls_alloca
5265 || current_function_stdarg
)
5266 cfun
->machine
->frame_size
+= STARTING_FRAME_OFFSET
;
5268 /* If we use the return register, we'll need to make sure
5269 it is going to be saved/restored. */
5271 if (!current_function_is_leaf
5272 || regs_ever_live
[RETURN_REGNUM
])
5273 cfun
->machine
->save_return_addr_p
= 1;
5275 /* Find first and last gpr to be saved. Note that at this point,
5276 we assume the base register and -on S/390- the return register
5277 always need to be saved. This is done because the usage of these
5278 register might change even after the prolog was emitted.
5279 If it turns out later that we really don't need them, the
5280 prolog/epilog code is modified again. */
5282 regs_ever_live
[BASE_REGISTER
] = 1;
5283 if (!TARGET_CPU_ZARCH
|| cfun
->machine
->save_return_addr_p
)
5284 regs_ever_live
[RETURN_REGNUM
] = 1;
5285 regs_ever_live
[STACK_POINTER_REGNUM
] = cfun
->machine
->frame_size
> 0;
5287 for (i
= 6; i
< 16; i
++)
5288 if (regs_ever_live
[i
])
5290 || i
== STACK_POINTER_REGNUM
5291 || i
== RETURN_REGNUM
5292 || i
== BASE_REGISTER
5293 || (flag_pic
&& i
== (int)PIC_OFFSET_TABLE_REGNUM
))
5296 for (j
= 15; j
> i
; j
--)
5297 if (regs_ever_live
[j
])
5299 || j
== STACK_POINTER_REGNUM
5300 || j
== RETURN_REGNUM
5301 || j
== BASE_REGISTER
5302 || (flag_pic
&& j
== (int)PIC_OFFSET_TABLE_REGNUM
))
5305 /* Save / Restore from gpr i to j. */
5306 cfun
->machine
->first_save_gpr
= i
;
5307 cfun
->machine
->first_restore_gpr
= i
;
5308 cfun
->machine
->last_save_gpr
= j
;
5310 /* Varargs functions need to save gprs 2 to 6. */
5311 if (current_function_stdarg
)
5312 cfun
->machine
->first_save_gpr
= 2;
5315 /* Return offset between argument pointer and frame pointer
5316 initially after prologue. */
5319 s390_arg_frame_offset (void)
5321 HOST_WIDE_INT fsize
= get_frame_size ();
5324 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5327 for (i
= 24; i
< 32; i
++)
5328 if (regs_ever_live
[i
] && !global_regs
[i
])
5334 fsize
= fsize
+ save_fprs_p
* 64;
5336 /* Does function need to setup frame and save area. */
5338 if (! current_function_is_leaf
5340 || current_function_calls_alloca
5341 || current_function_stdarg
)
5342 fsize
+= STARTING_FRAME_OFFSET
;
5343 return fsize
+ STACK_POINTER_OFFSET
;
5346 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5347 to register BASE. Return generated insn. */
5350 save_fpr (rtx base
, int offset
, int regnum
)
5353 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
5354 set_mem_alias_set (addr
, s390_sr_alias_set
);
5356 return emit_move_insn (addr
, gen_rtx_REG (DFmode
, regnum
));
5359 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5360 to register BASE. Return generated insn. */
5363 restore_fpr (rtx base
, int offset
, int regnum
)
5366 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
5367 set_mem_alias_set (addr
, s390_sr_alias_set
);
5369 return emit_move_insn (gen_rtx_REG (DFmode
, regnum
), addr
);
5372 /* Generate insn to save registers FIRST to LAST into
5373 the register save area located at offset OFFSET
5374 relative to register BASE. */
5377 save_gprs (rtx base
, int offset
, int first
, int last
)
5379 rtx addr
, insn
, note
;
5382 addr
= plus_constant (base
, offset
+ first
* UNITS_PER_WORD
);
5383 addr
= gen_rtx_MEM (Pmode
, addr
);
5384 set_mem_alias_set (addr
, s390_sr_alias_set
);
5386 /* Special-case single register. */
5390 insn
= gen_movdi (addr
, gen_rtx_REG (Pmode
, first
));
5392 insn
= gen_movsi (addr
, gen_rtx_REG (Pmode
, first
));
5394 RTX_FRAME_RELATED_P (insn
) = 1;
5399 insn
= gen_store_multiple (addr
,
5400 gen_rtx_REG (Pmode
, first
),
5401 GEN_INT (last
- first
+ 1));
5404 /* We need to set the FRAME_RELATED flag on all SETs
5405 inside the store-multiple pattern.
5407 However, we must not emit DWARF records for registers 2..5
5408 if they are stored for use by variable arguments ...
5410 ??? Unfortunately, it is not enough to simply not the the
5411 FRAME_RELATED flags for those SETs, because the first SET
5412 of the PARALLEL is always treated as if it had the flag
5413 set, even if it does not. Therefore we emit a new pattern
5414 without those registers as REG_FRAME_RELATED_EXPR note. */
5418 rtx pat
= PATTERN (insn
);
5420 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
5421 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
5422 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
5424 RTX_FRAME_RELATED_P (insn
) = 1;
5428 addr
= plus_constant (base
, offset
+ 6 * UNITS_PER_WORD
);
5429 note
= gen_store_multiple (gen_rtx_MEM (Pmode
, addr
),
5430 gen_rtx_REG (Pmode
, 6),
5431 GEN_INT (last
- 6 + 1));
5432 note
= PATTERN (note
);
5435 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5436 note
, REG_NOTES (insn
));
5438 for (i
= 0; i
< XVECLEN (note
, 0); i
++)
5439 if (GET_CODE (XVECEXP (note
, 0, i
)) == SET
)
5440 RTX_FRAME_RELATED_P (XVECEXP (note
, 0, i
)) = 1;
5442 RTX_FRAME_RELATED_P (insn
) = 1;
5448 /* Generate insn to restore registers FIRST to LAST from
5449 the register save area located at offset OFFSET
5450 relative to register BASE. */
5453 restore_gprs (rtx base
, int offset
, int first
, int last
)
5457 addr
= plus_constant (base
, offset
+ first
* UNITS_PER_WORD
);
5458 addr
= gen_rtx_MEM (Pmode
, addr
);
5459 set_mem_alias_set (addr
, s390_sr_alias_set
);
5461 /* Special-case single register. */
5465 insn
= gen_movdi (gen_rtx_REG (Pmode
, first
), addr
);
5467 insn
= gen_movsi (gen_rtx_REG (Pmode
, first
), addr
);
5472 insn
= gen_load_multiple (gen_rtx_REG (Pmode
, first
),
5474 GEN_INT (last
- first
+ 1));
5478 /* Emit code to load the GOT register. If MAYBE_DEAD is true,
5479 annotate generated insns with REG_MAYBE_DEAD notes. */
5481 static GTY(()) rtx got_symbol
;
5483 s390_load_got (int maybe_dead
)
5487 got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
5488 SYMBOL_REF_FLAGS (got_symbol
) = SYMBOL_FLAG_LOCAL
;
5491 if (TARGET_CPU_ZARCH
)
5493 rtx insn
= emit_move_insn (pic_offset_table_rtx
, got_symbol
);
5495 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
5502 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, got_symbol
),
5503 UNSPEC_LTREL_OFFSET
);
5504 offset
= gen_rtx_CONST (Pmode
, offset
);
5505 offset
= force_const_mem (Pmode
, offset
);
5507 insn
= emit_move_insn (pic_offset_table_rtx
, offset
);
5509 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
5512 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (offset
, 0)),
5514 offset
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
5516 insn
= emit_move_insn (pic_offset_table_rtx
, offset
);
5518 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
5523 /* Expand the prologue into a bunch of separate insns. */
5526 s390_emit_prologue (void)
5532 /* Compute frame_info. */
5536 /* Choose best register to use for temp use within prologue.
5537 See below for why TPF must use the register 1. */
5539 if (!current_function_is_leaf
5541 temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
5543 temp_reg
= gen_rtx_REG (Pmode
, 1);
5545 /* Save call saved gprs. */
5547 insn
= save_gprs (stack_pointer_rtx
, 0,
5548 cfun
->machine
->first_save_gpr
, cfun
->machine
->last_save_gpr
);
5551 /* Dummy insn to mark literal pool slot. */
5553 emit_insn (gen_main_pool ());
5555 /* Save fprs for variable args. */
5557 if (current_function_stdarg
)
5558 for (i
= 16; i
< (TARGET_64BIT
? 20 : 18); i
++)
5559 save_fpr (stack_pointer_rtx
, 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
5561 /* Save fprs 4 and 6 if used (31 bit ABI). */
5564 for (i
= 18; i
< 20; i
++)
5565 if (regs_ever_live
[i
] && !global_regs
[i
])
5567 insn
= save_fpr (stack_pointer_rtx
, 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
5568 RTX_FRAME_RELATED_P (insn
) = 1;
5571 /* Decrement stack pointer. */
5573 if (cfun
->machine
->frame_size
> 0)
5575 rtx frame_off
= GEN_INT (-cfun
->machine
->frame_size
);
5577 /* Save incoming stack pointer into temp reg. */
5579 if (TARGET_BACKCHAIN
|| cfun
->machine
->save_fprs_p
)
5581 insn
= emit_insn (gen_move_insn (temp_reg
, stack_pointer_rtx
));
5584 /* Subtract frame size from stack pointer. */
5586 if (DISP_IN_RANGE (INTVAL (frame_off
)))
5588 insn
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
5589 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
5591 insn
= emit_insn (insn
);
5595 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off
), 'K', "K"))
5596 frame_off
= force_const_mem (Pmode
, frame_off
);
5598 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, frame_off
));
5601 RTX_FRAME_RELATED_P (insn
) = 1;
5603 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5604 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
5605 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
5606 GEN_INT (-cfun
->machine
->frame_size
))),
5609 /* Set backchain. */
5611 if (TARGET_BACKCHAIN
)
5613 addr
= gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
5614 set_mem_alias_set (addr
, s390_sr_alias_set
);
5615 insn
= emit_insn (gen_move_insn (addr
, temp_reg
));
5618 /* If we support asynchronous exceptions (e.g. for Java),
5619 we need to make sure the backchain pointer is set up
5620 before any possibly trapping memory access. */
5622 if (TARGET_BACKCHAIN
&& flag_non_call_exceptions
)
5624 addr
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
5625 emit_insn (gen_rtx_CLOBBER (VOIDmode
, addr
));
5629 /* Save fprs 8 - 15 (64 bit ABI). */
5631 if (cfun
->machine
->save_fprs_p
)
5633 insn
= emit_insn (gen_add2_insn (temp_reg
, GEN_INT(-64)));
5635 for (i
= 24; i
< 32; i
++)
5636 if (regs_ever_live
[i
] && !global_regs
[i
])
5638 rtx addr
= plus_constant (stack_pointer_rtx
,
5639 cfun
->machine
->frame_size
- 64 + (i
-24)*8);
5641 insn
= save_fpr (temp_reg
, (i
-24)*8, i
);
5642 RTX_FRAME_RELATED_P (insn
) = 1;
5644 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5645 gen_rtx_SET (VOIDmode
,
5646 gen_rtx_MEM (DFmode
, addr
),
5647 gen_rtx_REG (DFmode
, i
)),
5652 /* Set frame pointer, if needed. */
5654 if (frame_pointer_needed
)
5656 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
5657 RTX_FRAME_RELATED_P (insn
) = 1;
5660 /* Set up got pointer, if needed. */
5662 if (flag_pic
&& regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
5663 s390_load_got(true);
5667 /* Generate a BAS instruction to serve as a function
5668 entry intercept to facilitate the use of tracing
5669 algorithms located at the branch target.
5671 This must use register 1. */
5676 addr
= GEN_INT (0xfe0);
5677 unkn
= CONST0_RTX (SImode
);
5678 link
= gen_rtx_REG (Pmode
, 1);
5680 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode
, addr
), unkn
, link
));
5682 /* Emit a blockage here so that all code
5683 lies between the profiling mechanisms. */
5684 emit_insn (gen_blockage ());
5688 /* Expand the epilogue into a bunch of separate insns. */
5691 s390_emit_epilogue (void)
5693 rtx frame_pointer
, return_reg
;
5694 int area_bottom
, area_top
, offset
= 0;
5701 /* Generate a BAS instruction to serve as a function
5702 entry intercept to facilitate the use of tracing
5703 algorithms located at the branch target.
5705 This must use register 1. */
5711 addr
= GEN_INT (0xfe6);
5712 unkn
= CONST0_RTX (SImode
);
5713 link
= gen_rtx_REG (Pmode
, 1);
5715 /* Emit a blockage here so that all code
5716 lies between the profiling mechanisms. */
5717 emit_insn (gen_blockage ());
5719 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode
, addr
), unkn
, link
));
5722 /* Check whether to use frame or stack pointer for restore. */
5724 frame_pointer
= frame_pointer_needed
?
5725 hard_frame_pointer_rtx
: stack_pointer_rtx
;
5727 /* Compute which parts of the save area we need to access. */
5729 if (cfun
->machine
->first_restore_gpr
!= -1)
5731 area_bottom
= cfun
->machine
->first_restore_gpr
* UNITS_PER_WORD
;
5732 area_top
= (cfun
->machine
->last_save_gpr
+ 1) * UNITS_PER_WORD
;
5736 area_bottom
= INT_MAX
;
5742 if (cfun
->machine
->save_fprs_p
)
5744 if (area_bottom
> -64)
5752 for (i
= 18; i
< 20; i
++)
5753 if (regs_ever_live
[i
] && !global_regs
[i
])
5755 if (area_bottom
> 16*UNITS_PER_WORD
+ 8*(i
-16))
5756 area_bottom
= 16*UNITS_PER_WORD
+ 8*(i
-16);
5757 if (area_top
< 16*UNITS_PER_WORD
+ 8*(i
-16) + 8)
5758 area_top
= 16*UNITS_PER_WORD
+ 8*(i
-16) + 8;
5762 /* Check whether we can access the register save area.
5763 If not, increment the frame pointer as required. */
5765 if (area_top
<= area_bottom
)
5767 /* Nothing to restore. */
5769 else if (DISP_IN_RANGE (cfun
->machine
->frame_size
+ area_bottom
)
5770 && DISP_IN_RANGE (cfun
->machine
->frame_size
+ area_top
-1))
5772 /* Area is in range. */
5773 offset
= cfun
->machine
->frame_size
;
5777 rtx insn
, frame_off
;
5779 offset
= area_bottom
< 0 ? -area_bottom
: 0;
5780 frame_off
= GEN_INT (cfun
->machine
->frame_size
- offset
);
5782 if (DISP_IN_RANGE (INTVAL (frame_off
)))
5784 insn
= gen_rtx_SET (VOIDmode
, frame_pointer
,
5785 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
5786 insn
= emit_insn (insn
);
5790 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off
), 'K', "K"))
5791 frame_off
= force_const_mem (Pmode
, frame_off
);
5793 insn
= emit_insn (gen_add2_insn (frame_pointer
, frame_off
));
5797 /* Restore call saved fprs. */
5801 if (cfun
->machine
->save_fprs_p
)
5802 for (i
= 24; i
< 32; i
++)
5803 if (regs_ever_live
[i
] && !global_regs
[i
])
5804 restore_fpr (frame_pointer
,
5805 offset
- 64 + (i
-24) * 8, i
);
5809 for (i
= 18; i
< 20; i
++)
5810 if (regs_ever_live
[i
] && !global_regs
[i
])
5811 restore_fpr (frame_pointer
,
5812 offset
+ 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
5815 /* Return register. */
5817 return_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
5819 /* Restore call saved gprs. */
5821 if (cfun
->machine
->first_restore_gpr
!= -1)
5826 /* Check for global register and save them
5827 to stack location from where they get restored. */
5829 for (i
= cfun
->machine
->first_restore_gpr
;
5830 i
<= cfun
->machine
->last_save_gpr
;
5833 /* These registers are special and need to be
5834 restored in any case. */
5835 if (i
== STACK_POINTER_REGNUM
5836 || i
== RETURN_REGNUM
5837 || i
== BASE_REGISTER
5838 || (flag_pic
&& i
== (int)PIC_OFFSET_TABLE_REGNUM
))
5843 addr
= plus_constant (frame_pointer
,
5844 offset
+ i
* UNITS_PER_WORD
);
5845 addr
= gen_rtx_MEM (Pmode
, addr
);
5846 set_mem_alias_set (addr
, s390_sr_alias_set
);
5847 emit_move_insn (addr
, gen_rtx_REG (Pmode
, i
));
5851 /* Fetch return address from stack before load multiple,
5852 this will do good for scheduling. */
5854 if (cfun
->machine
->save_return_addr_p
5855 || (cfun
->machine
->first_restore_gpr
< BASE_REGISTER
5856 && cfun
->machine
->last_save_gpr
> RETURN_REGNUM
))
5858 int return_regnum
= find_unused_clobbered_reg();
5861 return_reg
= gen_rtx_REG (Pmode
, return_regnum
);
5863 addr
= plus_constant (frame_pointer
,
5864 offset
+ RETURN_REGNUM
* UNITS_PER_WORD
);
5865 addr
= gen_rtx_MEM (Pmode
, addr
);
5866 set_mem_alias_set (addr
, s390_sr_alias_set
);
5867 emit_move_insn (return_reg
, addr
);
5870 /* ??? As references to the base register are not made
5871 explicit in insn RTX code, we have to add a barrier here
5872 to prevent incorrect scheduling. */
5874 emit_insn (gen_blockage());
5876 insn
= restore_gprs (frame_pointer
, offset
,
5877 cfun
->machine
->first_restore_gpr
,
5878 cfun
->machine
->last_save_gpr
);
5882 /* Return to caller. */
5884 p
= rtvec_alloc (2);
5886 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
5887 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
, return_reg
);
5888 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
5892 /* Return the size in bytes of a function argument of
5893 type TYPE and/or mode MODE. At least one of TYPE or
5894 MODE must be specified. */
5897 s390_function_arg_size (enum machine_mode mode
, tree type
)
5900 return int_size_in_bytes (type
);
5902 /* No type info available for some library calls ... */
5903 if (mode
!= BLKmode
)
5904 return GET_MODE_SIZE (mode
);
5906 /* If we have neither type nor mode, abort */
5910 /* Return true if a function argument of type TYPE and mode MODE
5911 is to be passed in a floating-point register, if available. */
5914 s390_function_arg_float (enum machine_mode mode
, tree type
)
5916 int size
= s390_function_arg_size (mode
, type
);
5920 /* Soft-float changes the ABI: no floating-point registers are used. */
5921 if (TARGET_SOFT_FLOAT
)
5924 /* No type info available for some library calls ... */
5926 return mode
== SFmode
|| mode
== DFmode
;
5928 /* The ABI says that record types with a single member are treated
5929 just like that member would be. */
5930 while (TREE_CODE (type
) == RECORD_TYPE
)
5932 tree field
, single
= NULL_TREE
;
5934 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
5936 if (TREE_CODE (field
) != FIELD_DECL
)
5939 if (single
== NULL_TREE
)
5940 single
= TREE_TYPE (field
);
5945 if (single
== NULL_TREE
)
5951 return TREE_CODE (type
) == REAL_TYPE
;
5954 /* Return true if a function argument of type TYPE and mode MODE
5955 is to be passed in an integer register, or a pair of integer
5956 registers, if available. */
5959 s390_function_arg_integer (enum machine_mode mode
, tree type
)
5961 int size
= s390_function_arg_size (mode
, type
);
5965 /* No type info available for some library calls ... */
5967 return GET_MODE_CLASS (mode
) == MODE_INT
5968 || (TARGET_SOFT_FLOAT
&& GET_MODE_CLASS (mode
) == MODE_FLOAT
);
5970 /* We accept small integral (and similar) types. */
5971 if (INTEGRAL_TYPE_P (type
)
5972 || POINTER_TYPE_P (type
)
5973 || TREE_CODE (type
) == OFFSET_TYPE
5974 || (TARGET_SOFT_FLOAT
&& TREE_CODE (type
) == REAL_TYPE
))
5977 /* We also accept structs of size 1, 2, 4, 8 that are not
5978 passed in floating-point registers. */
5979 if (AGGREGATE_TYPE_P (type
)
5980 && exact_log2 (size
) >= 0
5981 && !s390_function_arg_float (mode
, type
))
5987 /* Return 1 if a function argument of type TYPE and mode MODE
5988 is to be passed by reference. The ABI specifies that only
5989 structures of size 1, 2, 4, or 8 bytes are passed by value,
5990 all other structures (and complex numbers) are passed by
5994 s390_function_arg_pass_by_reference (enum machine_mode mode
, tree type
)
5996 int size
= s390_function_arg_size (mode
, type
);
6002 if (AGGREGATE_TYPE_P (type
) && exact_log2 (size
) < 0)
6005 if (TREE_CODE (type
) == COMPLEX_TYPE
6006 || TREE_CODE (type
) == VECTOR_TYPE
)
6013 /* Update the data in CUM to advance over an argument of mode MODE and
6014 data type TYPE. (TYPE is null for libcalls where that information
6015 may not be available.). The boolean NAMED specifies whether the
6016 argument is a named argument (as opposed to an unnamed argument
6017 matching an ellipsis). */
6020 s390_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
6021 tree type
, int named ATTRIBUTE_UNUSED
)
6023 if (s390_function_arg_pass_by_reference (mode
, type
))
6027 else if (s390_function_arg_float (mode
, type
))
6031 else if (s390_function_arg_integer (mode
, type
))
6033 int size
= s390_function_arg_size (mode
, type
);
6034 cum
->gprs
+= ((size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
);
6040 /* Define where to put the arguments to a function.
6041 Value is zero to push the argument on the stack,
6042 or a hard register in which to store the argument.
6044 MODE is the argument's machine mode.
6045 TYPE is the data type of the argument (as a tree).
6046 This is null for libcalls where that information may
6048 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6049 the preceding args and about the function being called.
6050 NAMED is nonzero if this argument is a named parameter
6051 (otherwise it is an extra parameter matching an ellipsis).
6053 On S/390, we use general purpose registers 2 through 6 to
6054 pass integer, pointer, and certain structure arguments, and
6055 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
6056 to pass floating point arguments. All remaining arguments
6057 are pushed to the stack. */
6060 s390_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
6061 int named ATTRIBUTE_UNUSED
)
6063 if (s390_function_arg_pass_by_reference (mode
, type
))
6066 if (s390_function_arg_float (mode
, type
))
6068 if (cum
->fprs
+ 1 > (TARGET_64BIT
? 4 : 2))
6071 return gen_rtx_REG (mode
, cum
->fprs
+ 16);
6073 else if (s390_function_arg_integer (mode
, type
))
6075 int size
= s390_function_arg_size (mode
, type
);
6076 int n_gprs
= (size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
;
6078 if (cum
->gprs
+ n_gprs
> 5)
6081 return gen_rtx_REG (mode
, cum
->gprs
+ 2);
6084 /* After the real arguments, expand_call calls us once again
6085 with a void_type_node type. Whatever we return here is
6086 passed as operand 2 to the call expanders.
6088 We don't need this feature ... */
6089 else if (type
== void_type_node
)
6095 /* Return true if return values of type TYPE should be returned
6096 in a memory buffer whose address is passed by the caller as
6097 hidden first argument. */
6100 s390_return_in_memory (tree type
, tree fundecl ATTRIBUTE_UNUSED
)
6102 /* We accept small integral (and similar) types. */
6103 if (INTEGRAL_TYPE_P (type
)
6104 || POINTER_TYPE_P (type
)
6105 || TREE_CODE (type
) == OFFSET_TYPE
6106 || TREE_CODE (type
) == REAL_TYPE
)
6107 return int_size_in_bytes (type
) > 8;
6109 /* Aggregates and similar constructs are always returned
6111 if (AGGREGATE_TYPE_P (type
)
6112 || TREE_CODE (type
) == COMPLEX_TYPE
6113 || TREE_CODE (type
) == VECTOR_TYPE
)
6116 /* ??? We get called on all sorts of random stuff from
6117 aggregate_value_p. We can't abort, but it's not clear
6118 what's safe to return. Pretend it's a struct I guess. */
6122 /* Define where to return a (scalar) value of type TYPE.
6123 If TYPE is null, define where to return a (scalar)
6124 value of mode MODE from a libcall. */
6127 s390_function_value (tree type
, enum machine_mode mode
)
6131 int unsignedp
= TREE_UNSIGNED (type
);
6132 mode
= promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1);
6135 if (GET_MODE_CLASS (mode
) != MODE_INT
6136 && GET_MODE_CLASS (mode
) != MODE_FLOAT
)
6138 if (GET_MODE_SIZE (mode
) > 8)
6141 if (TARGET_HARD_FLOAT
&& GET_MODE_CLASS (mode
) == MODE_FLOAT
)
6142 return gen_rtx_REG (mode
, 16);
6144 return gen_rtx_REG (mode
, 2);
6148 /* Create and return the va_list datatype.
6150 On S/390, va_list is an array type equivalent to
6152 typedef struct __va_list_tag
6156 void *__overflow_arg_area;
6157 void *__reg_save_area;
6160 where __gpr and __fpr hold the number of general purpose
6161 or floating point arguments used up to now, respectively,
6162 __overflow_arg_area points to the stack location of the
6163 next argument passed on the stack, and __reg_save_area
6164 always points to the start of the register area in the
6165 call frame of the current function. The function prologue
6166 saves all registers used for argument passing into this
6167 area if the function uses variable arguments. */
6170 s390_build_builtin_va_list (void)
6172 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
6174 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
6177 build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
6179 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("__gpr"),
6180 long_integer_type_node
);
6181 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("__fpr"),
6182 long_integer_type_node
);
6183 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("__overflow_arg_area"),
6185 f_sav
= build_decl (FIELD_DECL
, get_identifier ("__reg_save_area"),
6188 DECL_FIELD_CONTEXT (f_gpr
) = record
;
6189 DECL_FIELD_CONTEXT (f_fpr
) = record
;
6190 DECL_FIELD_CONTEXT (f_ovf
) = record
;
6191 DECL_FIELD_CONTEXT (f_sav
) = record
;
6193 TREE_CHAIN (record
) = type_decl
;
6194 TYPE_NAME (record
) = type_decl
;
6195 TYPE_FIELDS (record
) = f_gpr
;
6196 TREE_CHAIN (f_gpr
) = f_fpr
;
6197 TREE_CHAIN (f_fpr
) = f_ovf
;
6198 TREE_CHAIN (f_ovf
) = f_sav
;
6200 layout_type (record
);
6202 /* The correct type is an array type of one element. */
6203 return build_array_type (record
, build_index_type (size_zero_node
));
6206 /* Implement va_start by filling the va_list structure VALIST.
6207 STDARG_P is always true, and ignored.
6208 NEXTARG points to the first anonymous stack argument.
6210 The following global variables are used to initialize
6211 the va_list structure:
6213 current_function_args_info:
6214 holds number of gprs and fprs used for named arguments.
6215 current_function_arg_offset_rtx:
6216 holds the offset of the first anonymous stack argument
6217 (relative to the virtual arg pointer). */
6220 s390_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
6222 HOST_WIDE_INT n_gpr
, n_fpr
;
6224 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
6225 tree gpr
, fpr
, ovf
, sav
, t
;
6227 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6228 f_fpr
= TREE_CHAIN (f_gpr
);
6229 f_ovf
= TREE_CHAIN (f_fpr
);
6230 f_sav
= TREE_CHAIN (f_ovf
);
6232 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
6233 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
6234 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
6235 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
6236 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
6238 /* Count number of gp and fp argument registers used. */
6240 n_gpr
= current_function_args_info
.gprs
;
6241 n_fpr
= current_function_args_info
.fprs
;
6243 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
6244 TREE_SIDE_EFFECTS (t
) = 1;
6245 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6247 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
6248 TREE_SIDE_EFFECTS (t
) = 1;
6249 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6251 /* Find the overflow area. */
6252 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
6254 off
= INTVAL (current_function_arg_offset_rtx
);
6255 off
= off
< 0 ? 0 : off
;
6256 if (TARGET_DEBUG_ARG
)
6257 fprintf (stderr
, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
6258 (int)n_gpr
, (int)n_fpr
, off
);
6260 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
, build_int_2 (off
, 0));
6262 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
6263 TREE_SIDE_EFFECTS (t
) = 1;
6264 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6266 /* Find the register save area. */
6267 t
= make_tree (TREE_TYPE (sav
), virtual_incoming_args_rtx
);
6268 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
6269 build_int_2 (-STACK_POINTER_OFFSET
, -1));
6270 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
6271 TREE_SIDE_EFFECTS (t
) = 1;
6272 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6275 /* Implement va_arg by updating the va_list structure
6276 VALIST as required to retrieve an argument of type
6277 TYPE, and returning that argument.
6279 Generates code equivalent to:
6281 if (integral value) {
6282 if (size <= 4 && args.gpr < 5 ||
6283 size > 4 && args.gpr < 4 )
6284 ret = args.reg_save_area[args.gpr+8]
6286 ret = *args.overflow_arg_area++;
6287 } else if (float value) {
6289 ret = args.reg_save_area[args.fpr+64]
6291 ret = *args.overflow_arg_area++;
6292 } else if (aggregate value) {
6294 ret = *args.reg_save_area[args.gpr]
6296 ret = **args.overflow_arg_area++;
6300 s390_va_arg (tree valist
, tree type
)
6302 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
6303 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
6304 int indirect_p
, size
, n_reg
, sav_ofs
, sav_scale
, max_reg
;
6305 rtx lab_false
, lab_over
, addr_rtx
, r
;
6307 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6308 f_fpr
= TREE_CHAIN (f_gpr
);
6309 f_ovf
= TREE_CHAIN (f_fpr
);
6310 f_sav
= TREE_CHAIN (f_ovf
);
6312 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
6313 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
6314 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
6315 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
6316 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
6318 size
= int_size_in_bytes (type
);
6320 if (s390_function_arg_pass_by_reference (TYPE_MODE (type
), type
))
6322 if (TARGET_DEBUG_ARG
)
6324 fprintf (stderr
, "va_arg: aggregate type");
6328 /* Aggregates are passed by reference. */
6332 sav_ofs
= 2 * UNITS_PER_WORD
;
6333 sav_scale
= UNITS_PER_WORD
;
6334 size
= UNITS_PER_WORD
;
6337 else if (s390_function_arg_float (TYPE_MODE (type
), type
))
6339 if (TARGET_DEBUG_ARG
)
6341 fprintf (stderr
, "va_arg: float type");
6345 /* FP args go in FP registers, if present. */
6349 sav_ofs
= 16 * UNITS_PER_WORD
;
6351 /* TARGET_64BIT has up to 4 parameter in fprs */
6352 max_reg
= TARGET_64BIT
? 3 : 1;
6356 if (TARGET_DEBUG_ARG
)
6358 fprintf (stderr
, "va_arg: other type");
6362 /* Otherwise into GP registers. */
6365 n_reg
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
6366 sav_ofs
= 2 * UNITS_PER_WORD
;
6368 if (size
< UNITS_PER_WORD
)
6369 sav_ofs
+= UNITS_PER_WORD
- size
;
6371 sav_scale
= UNITS_PER_WORD
;
6378 /* Pull the value out of the saved registers ... */
6380 lab_false
= gen_label_rtx ();
6381 lab_over
= gen_label_rtx ();
6382 addr_rtx
= gen_reg_rtx (Pmode
);
6384 emit_cmp_and_jump_insns (expand_expr (reg
, NULL_RTX
, Pmode
, EXPAND_NORMAL
),
6386 GT
, const1_rtx
, Pmode
, 0, lab_false
);
6389 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
6393 u
= build (MULT_EXPR
, long_integer_type_node
,
6394 reg
, build_int_2 (sav_scale
, 0));
6395 TREE_SIDE_EFFECTS (u
) = 1;
6397 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
6398 TREE_SIDE_EFFECTS (t
) = 1;
6400 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
6402 emit_move_insn (addr_rtx
, r
);
6405 emit_jump_insn (gen_jump (lab_over
));
6407 emit_label (lab_false
);
6409 /* ... Otherwise out of the overflow area. */
6411 t
= save_expr (ovf
);
6414 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
6415 if (size
< UNITS_PER_WORD
)
6417 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (UNITS_PER_WORD
-size
, 0));
6418 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
6419 TREE_SIDE_EFFECTS (t
) = 1;
6420 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6422 t
= save_expr (ovf
);
6425 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
6427 emit_move_insn (addr_rtx
, r
);
6429 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
6430 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
6431 TREE_SIDE_EFFECTS (t
) = 1;
6432 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6434 emit_label (lab_over
);
6436 /* If less than max_regs a registers are retrieved out
6437 of register save area, increment. */
6439 u
= build (PREINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
6440 build_int_2 (n_reg
, 0));
6441 TREE_SIDE_EFFECTS (u
) = 1;
6442 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6446 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
6447 set_mem_alias_set (r
, get_varargs_alias_set ());
6448 emit_move_insn (addr_rtx
, r
);
6460 S390_BUILTIN_THREAD_POINTER
,
6461 S390_BUILTIN_SET_THREAD_POINTER
,
6466 static unsigned int const code_for_builtin_64
[S390_BUILTIN_max
] = {
6471 static unsigned int const code_for_builtin_31
[S390_BUILTIN_max
] = {
6477 s390_init_builtins (void)
6481 ftype
= build_function_type (ptr_type_node
, void_list_node
);
6482 builtin_function ("__builtin_thread_pointer", ftype
,
6483 S390_BUILTIN_THREAD_POINTER
, BUILT_IN_MD
,
6486 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
6487 builtin_function ("__builtin_set_thread_pointer", ftype
,
6488 S390_BUILTIN_SET_THREAD_POINTER
, BUILT_IN_MD
,
6492 /* Expand an expression EXP that calls a built-in function,
6493 with result going to TARGET if that's convenient
6494 (and in mode MODE if that's convenient).
6495 SUBTARGET may be used as the target for computing one of EXP's operands.
6496 IGNORE is nonzero if the value is to be ignored. */
6499 s390_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
6500 enum machine_mode mode ATTRIBUTE_UNUSED
,
6501 int ignore ATTRIBUTE_UNUSED
)
6505 unsigned int const *code_for_builtin
=
6506 TARGET_64BIT
? code_for_builtin_64
: code_for_builtin_31
;
6508 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6509 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6510 tree arglist
= TREE_OPERAND (exp
, 1);
6511 enum insn_code icode
;
6512 rtx op
[MAX_ARGS
], pat
;
6516 if (fcode
>= S390_BUILTIN_max
)
6517 internal_error ("bad builtin fcode");
6518 icode
= code_for_builtin
[fcode
];
6520 internal_error ("bad builtin fcode");
6522 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
6524 for (arglist
= TREE_OPERAND (exp
, 1), arity
= 0;
6526 arglist
= TREE_CHAIN (arglist
), arity
++)
6528 const struct insn_operand_data
*insn_op
;
6530 tree arg
= TREE_VALUE (arglist
);
6531 if (arg
== error_mark_node
)
6533 if (arity
> MAX_ARGS
)
6536 insn_op
= &insn_data
[icode
].operand
[arity
+ nonvoid
];
6538 op
[arity
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, 0);
6540 if (!(*insn_op
->predicate
) (op
[arity
], insn_op
->mode
))
6541 op
[arity
] = copy_to_mode_reg (insn_op
->mode
, op
[arity
]);
6546 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6548 || GET_MODE (target
) != tmode
6549 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6550 target
= gen_reg_rtx (tmode
);
6556 pat
= GEN_FCN (icode
) (target
);
6560 pat
= GEN_FCN (icode
) (target
, op
[0]);
6562 pat
= GEN_FCN (icode
) (op
[0]);
6565 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
6581 /* Output assembly code for the trampoline template to
6584 On S/390, we use gpr 1 internally in the trampoline code;
6585 gpr 0 is used to hold the static chain. */
6588 s390_trampoline_template (FILE *file
)
6592 fprintf (file
, "larl\t%s,0f\n", reg_names
[1]);
6593 fprintf (file
, "lg\t%s,0(%s)\n", reg_names
[0], reg_names
[1]);
6594 fprintf (file
, "lg\t%s,8(%s)\n", reg_names
[1], reg_names
[1]);
6595 fprintf (file
, "br\t%s\n", reg_names
[1]);
6596 fprintf (file
, "0:\t.quad\t0\n");
6597 fprintf (file
, ".quad\t0\n");
6601 fprintf (file
, "basr\t%s,0\n", reg_names
[1]);
6602 fprintf (file
, "l\t%s,10(%s)\n", reg_names
[0], reg_names
[1]);
6603 fprintf (file
, "l\t%s,14(%s)\n", reg_names
[1], reg_names
[1]);
6604 fprintf (file
, "br\t%s\n", reg_names
[1]);
6605 fprintf (file
, ".long\t0\n");
6606 fprintf (file
, ".long\t0\n");
6610 /* Emit RTL insns to initialize the variable parts of a trampoline.
6611 FNADDR is an RTX for the address of the function's pure code.
6612 CXT is an RTX for the static chain value for the function. */
6615 s390_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
6617 emit_move_insn (gen_rtx_MEM (Pmode
,
6618 memory_address (Pmode
,
6619 plus_constant (addr
, (TARGET_64BIT
? 20 : 12) ))), cxt
);
6620 emit_move_insn (gen_rtx_MEM (Pmode
,
6621 memory_address (Pmode
,
6622 plus_constant (addr
, (TARGET_64BIT
? 28 : 16) ))), fnaddr
);
6625 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6626 LOW and HIGH, independent of the host word size. */
6629 s390_gen_rtx_const_DI (int high
, int low
)
6631 #if HOST_BITS_PER_WIDE_INT >= 64
6633 val
= (HOST_WIDE_INT
)high
;
6635 val
|= (HOST_WIDE_INT
)low
;
6637 return GEN_INT (val
);
6639 #if HOST_BITS_PER_WIDE_INT >= 32
6640 return immed_double_const ((HOST_WIDE_INT
)low
, (HOST_WIDE_INT
)high
, DImode
);
6647 /* Output assembler code to FILE to increment profiler label # LABELNO
6648 for profiling a function entry. */
6651 s390_function_profiler (FILE *file
, int labelno
)
6656 ASM_GENERATE_INTERNAL_LABEL (label
, "LP", labelno
);
6658 fprintf (file
, "# function profiler \n");
6660 op
[0] = gen_rtx_REG (Pmode
, RETURN_REGNUM
);
6661 op
[1] = gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
6662 op
[1] = gen_rtx_MEM (Pmode
, plus_constant (op
[1], UNITS_PER_WORD
));
6664 op
[2] = gen_rtx_REG (Pmode
, 1);
6665 op
[3] = gen_rtx_SYMBOL_REF (Pmode
, label
);
6666 SYMBOL_REF_FLAGS (op
[3]) = SYMBOL_FLAG_LOCAL
;
6668 op
[4] = gen_rtx_SYMBOL_REF (Pmode
, "_mcount");
6671 op
[4] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[4]), UNSPEC_PLT
);
6672 op
[4] = gen_rtx_CONST (Pmode
, op
[4]);
6677 output_asm_insn ("stg\t%0,%1", op
);
6678 output_asm_insn ("larl\t%2,%3", op
);
6679 output_asm_insn ("brasl\t%0,%4", op
);
6680 output_asm_insn ("lg\t%0,%1", op
);
6684 op
[6] = gen_label_rtx ();
6686 output_asm_insn ("st\t%0,%1", op
);
6687 output_asm_insn ("bras\t%2,%l6", op
);
6688 output_asm_insn (".long\t%4", op
);
6689 output_asm_insn (".long\t%3", op
);
6690 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
6691 output_asm_insn ("l\t%0,0(%2)", op
);
6692 output_asm_insn ("l\t%2,4(%2)", op
);
6693 output_asm_insn ("basr\t%0,%0", op
);
6694 output_asm_insn ("l\t%0,%1", op
);
6698 op
[5] = gen_label_rtx ();
6699 op
[6] = gen_label_rtx ();
6701 output_asm_insn ("st\t%0,%1", op
);
6702 output_asm_insn ("bras\t%2,%l6", op
);
6703 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[5]));
6704 output_asm_insn (".long\t%4-%l5", op
);
6705 output_asm_insn (".long\t%3-%l5", op
);
6706 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
6707 output_asm_insn ("lr\t%0,%2", op
);
6708 output_asm_insn ("a\t%0,0(%2)", op
);
6709 output_asm_insn ("a\t%2,4(%2)", op
);
6710 output_asm_insn ("basr\t%0,%0", op
);
6711 output_asm_insn ("l\t%0,%1", op
);
6715 /* Select section for constant in constant pool. In 32-bit mode,
6716 constants go in the function section; in 64-bit mode in .rodata. */
6719 s390_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED
,
6720 rtx x ATTRIBUTE_UNUSED
,
6721 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
6723 if (TARGET_CPU_ZARCH
)
6724 readonly_data_section ();
6726 function_section (current_function_decl
);
6729 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
6730 into its SYMBOL_REF_FLAGS. */
6733 s390_encode_section_info (tree decl
, rtx rtl
, int first
)
6735 default_encode_section_info (decl
, rtl
, first
);
6737 /* If a variable has a forced alignment to < 2 bytes, mark it with
6738 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
6739 if (TREE_CODE (decl
) == VAR_DECL
6740 && DECL_USER_ALIGN (decl
) && DECL_ALIGN (decl
) < 16)
6741 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_ALIGN1
;
6744 /* Output thunk to FILE that implements a C++ virtual function call (with
6745 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
6746 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6747 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6748 relative to the resulting this pointer. */
6751 s390_output_mi_thunk (FILE *file
, tree thunk ATTRIBUTE_UNUSED
,
6752 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
6758 /* Operand 0 is the target function. */
6759 op
[0] = XEXP (DECL_RTL (function
), 0);
6760 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (op
[0]))
6763 op
[0] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[0]),
6764 TARGET_64BIT
? UNSPEC_PLT
: UNSPEC_GOT
);
6765 op
[0] = gen_rtx_CONST (Pmode
, op
[0]);
6768 /* Operand 1 is the 'this' pointer. */
6769 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
6770 op
[1] = gen_rtx_REG (Pmode
, 3);
6772 op
[1] = gen_rtx_REG (Pmode
, 2);
6774 /* Operand 2 is the delta. */
6775 op
[2] = GEN_INT (delta
);
6777 /* Operand 3 is the vcall_offset. */
6778 op
[3] = GEN_INT (vcall_offset
);
6780 /* Operand 4 is the temporary register. */
6781 op
[4] = gen_rtx_REG (Pmode
, 1);
6783 /* Operands 5 to 8 can be used as labels. */
6789 /* Operand 9 can be used for temporary register. */
6792 /* Generate code. */
6795 /* Setup literal pool pointer if required. */
6796 if ((!DISP_IN_RANGE (delta
)
6797 && !CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
6798 || (!DISP_IN_RANGE (vcall_offset
)
6799 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K")))
6801 op
[5] = gen_label_rtx ();
6802 output_asm_insn ("larl\t%4,%5", op
);
6805 /* Add DELTA to this pointer. */
6808 if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'J', "J"))
6809 output_asm_insn ("la\t%1,%2(%1)", op
);
6810 else if (DISP_IN_RANGE (delta
))
6811 output_asm_insn ("lay\t%1,%2(%1)", op
);
6812 else if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
6813 output_asm_insn ("aghi\t%1,%2", op
);
6816 op
[6] = gen_label_rtx ();
6817 output_asm_insn ("agf\t%1,%6-%5(%4)", op
);
6821 /* Perform vcall adjustment. */
6824 if (DISP_IN_RANGE (vcall_offset
))
6826 output_asm_insn ("lg\t%4,0(%1)", op
);
6827 output_asm_insn ("ag\t%1,%3(%4)", op
);
6829 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K"))
6831 output_asm_insn ("lghi\t%4,%3", op
);
6832 output_asm_insn ("ag\t%4,0(%1)", op
);
6833 output_asm_insn ("ag\t%1,0(%4)", op
);
6837 op
[7] = gen_label_rtx ();
6838 output_asm_insn ("llgf\t%4,%7-%5(%4)", op
);
6839 output_asm_insn ("ag\t%4,0(%1)", op
);
6840 output_asm_insn ("ag\t%1,0(%4)", op
);
6844 /* Jump to target. */
6845 output_asm_insn ("jg\t%0", op
);
6847 /* Output literal pool if required. */
6850 output_asm_insn (".align\t4", op
);
6851 targetm
.asm_out
.internal_label (file
, "L",
6852 CODE_LABEL_NUMBER (op
[5]));
6856 targetm
.asm_out
.internal_label (file
, "L",
6857 CODE_LABEL_NUMBER (op
[6]));
6858 output_asm_insn (".long\t%2", op
);
6862 targetm
.asm_out
.internal_label (file
, "L",
6863 CODE_LABEL_NUMBER (op
[7]));
6864 output_asm_insn (".long\t%3", op
);
6869 /* Setup base pointer if required. */
6871 || (!DISP_IN_RANGE (delta
)
6872 && !CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
6873 || (!DISP_IN_RANGE (delta
)
6874 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K")))
6876 op
[5] = gen_label_rtx ();
6877 output_asm_insn ("basr\t%4,0", op
);
6878 targetm
.asm_out
.internal_label (file
, "L",
6879 CODE_LABEL_NUMBER (op
[5]));
6882 /* Add DELTA to this pointer. */
6885 if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'J', "J"))
6886 output_asm_insn ("la\t%1,%2(%1)", op
);
6887 else if (DISP_IN_RANGE (delta
))
6888 output_asm_insn ("lay\t%1,%2(%1)", op
);
6889 else if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
6890 output_asm_insn ("ahi\t%1,%2", op
);
6893 op
[6] = gen_label_rtx ();
6894 output_asm_insn ("a\t%1,%6-%5(%4)", op
);
6898 /* Perform vcall adjustment. */
6901 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'J', "J"))
6903 output_asm_insn ("lg\t%4,0(%1)", op
);
6904 output_asm_insn ("a\t%1,%3(%4)", op
);
6906 else if (DISP_IN_RANGE (vcall_offset
))
6908 output_asm_insn ("lg\t%4,0(%1)", op
);
6909 output_asm_insn ("ay\t%1,%3(%4)", op
);
6911 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K"))
6913 output_asm_insn ("lhi\t%4,%3", op
);
6914 output_asm_insn ("a\t%4,0(%1)", op
);
6915 output_asm_insn ("a\t%1,0(%4)", op
);
6919 op
[7] = gen_label_rtx ();
6920 output_asm_insn ("l\t%4,%7-%5(%4)", op
);
6921 output_asm_insn ("a\t%4,0(%1)", op
);
6922 output_asm_insn ("a\t%1,0(%4)", op
);
6925 /* We had to clobber the base pointer register.
6926 Re-setup the base pointer (with a different base). */
6927 op
[5] = gen_label_rtx ();
6928 output_asm_insn ("basr\t%4,0", op
);
6929 targetm
.asm_out
.internal_label (file
, "L",
6930 CODE_LABEL_NUMBER (op
[5]));
6933 /* Jump to target. */
6934 op
[8] = gen_label_rtx ();
6937 output_asm_insn ("l\t%4,%8-%5(%4)", op
);
6939 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
6940 /* We cannot call through .plt, since .plt requires %r12 loaded. */
6941 else if (flag_pic
== 1)
6943 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
6944 output_asm_insn ("l\t%4,%0(%4)", op
);
6946 else if (flag_pic
== 2)
6948 op
[9] = gen_rtx_REG (Pmode
, 0);
6949 output_asm_insn ("l\t%9,%8-4-%5(%4)", op
);
6950 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
6951 output_asm_insn ("ar\t%4,%9", op
);
6952 output_asm_insn ("l\t%4,0(%4)", op
);
6955 output_asm_insn ("br\t%4", op
);
6957 /* Output literal pool. */
6958 output_asm_insn (".align\t4", op
);
6960 if (nonlocal
&& flag_pic
== 2)
6961 output_asm_insn (".long\t%0", op
);
6964 op
[0] = gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
6965 SYMBOL_REF_FLAGS (op
[0]) = SYMBOL_FLAG_LOCAL
;
6968 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[8]));
6970 output_asm_insn (".long\t%0", op
);
6972 output_asm_insn (".long\t%0-%5", op
);
6976 targetm
.asm_out
.internal_label (file
, "L",
6977 CODE_LABEL_NUMBER (op
[6]));
6978 output_asm_insn (".long\t%2", op
);
6982 targetm
.asm_out
.internal_label (file
, "L",
6983 CODE_LABEL_NUMBER (op
[7]));
6984 output_asm_insn (".long\t%3", op
);
6990 s390_valid_pointer_mode (enum machine_mode mode
)
6992 return (mode
== SImode
|| (TARGET_64BIT
&& mode
== DImode
));
6995 /* How to allocate a 'struct machine_function'. */
6997 static struct machine_function
*
6998 s390_init_machine_status (void)
7000 return ggc_alloc_cleared (sizeof (struct machine_function
));
7003 #include "gt-s390.h"